o
    ·2úhƒ  ã                   @   s4   d dl mZ d dlmZ edƒG dd„ deƒƒZdS )é    )Úkeras_export)ÚCallbackzkeras.callbacks.LambdaCallbackc                       s.   e Zd ZdZ						d‡ fdd„	Z‡  ZS )ÚLambdaCallbackau	  Callback for creating simple, custom callbacks on-the-fly.

    This callback is constructed with anonymous functions that will be called
    at the appropriate time (during `Model.{fit | evaluate | predict}`).
    Note that the callbacks expects positional arguments, as:

    - `on_epoch_begin` and `on_epoch_end` expect two positional arguments:
      `epoch`, `logs`
    - `on_train_begin` and `on_train_end` expect one positional argument:
      `logs`
    - `on_train_batch_begin` and `on_train_batch_end` expect a positional
      argument `batch` and a keyword argument `logs`
    - See `Callback` class definition for the full list of functions and their
      expected arguments.

    Args:
        on_epoch_begin: called at the beginning of every epoch.
        on_epoch_end: called at the end of every epoch.
        on_train_begin: called at the beginning of model training.
        on_train_end: called at the end of model training.
        on_train_batch_begin: called at the beginning of every train batch.
        on_train_batch_end: called at the end of every train batch.
        kwargs: Any function in `Callback` that you want to override by
            passing `function_name=function`. For example,
            `LambdaCallback(.., on_train_end=train_end_fn)`. The custom function
            needs to have same arguments as the ones defined in `Callback`.

    Example:

    ```python
    # Print the batch number at the beginning of every batch.
    batch_print_callback = LambdaCallback(
        on_train_batch_begin=lambda batch,logs: print(batch))

    # Stream the epoch loss to a file in JSON format. The file content
    # is not well-formed JSON but rather has a JSON object per line.
    import json
    json_log = open('loss_log.json', mode='wt', buffering=1)
    json_logging_callback = LambdaCallback(
        on_epoch_end=lambda epoch, logs: json_log.write(
            json.dumps({'epoch': epoch, 'loss': logs['loss']}) + '
'),
        on_train_end=lambda logs: json_log.close()
    )

    # Terminate some processes after having finished model training.
    processes = ...
    cleanup_callback = LambdaCallback(
        on_train_end=lambda logs: [
            p.terminate() for p in processes if p.is_alive()])

    model.fit(...,
              callbacks=[batch_print_callback,
                         json_logging_callback,
                         cleanup_callback])
    ```
    Nc                    sr   t ƒ  ¡  | j |¡ |d ur|| _|d ur|| _|d ur || _|d ur'|| _|d ur.|| _|d ur7|| _	d S d S )N)
ÚsuperÚ__init__Ú__dict__ÚupdateÚon_epoch_beginÚon_epoch_endÚon_train_beginÚon_train_endÚon_train_batch_beginÚon_train_batch_end)Úselfr	   r
   r   r   r   r   Úkwargs©Ú	__class__© ú^/var/www/html/chatgem/venv/lib/python3.10/site-packages/keras/src/callbacks/lambda_callback.pyr   @   s   


ÿzLambdaCallback.__init__)NNNNNN)Ú__name__Ú
__module__Ú__qualname__Ú__doc__r   Ú__classcell__r   r   r   r   r      s    ;ùr   N)Úkeras.src.api_exportr   Úkeras.src.callbacks.callbackr   r   r   r   r   r   Ú<module>   s    