o
    2hH                     @   s@   d dl mZ d dlmZ d dlmZ edG dd deZdS )    )activations)keras_export)Layerzkeras.layers.ELUc                       s2   e Zd ZdZd	 fdd	Zdd Zdd Z  ZS )
ELUa;  Applies an Exponential Linear Unit function to an output.

    Formula:

    ```
    f(x) = alpha * (exp(x) - 1.) for x < 0
    f(x) = x for x >= 0
    ```

    Args:
        alpha: float, slope of negative section. Defaults to `1.0`.
        **kwargs: Base layer keyword arguments, such as `name` and `dtype`.
          ?c                    s*   t  jdi | || _d| _|   d S )NT )super__init__alphasupports_masking_build_at_init)selfr
   kwargs	__class__r   [/var/www/html/chatgem/venv/lib/python3.10/site-packages/keras/src/layers/activations/elu.pyr	      s   zELU.__init__c                 C   s   t j|| jdS )N)r
   )r   elur
   )r   inputsr   r   r   call   s   zELU.callc                 C   s   |S )Nr   )r   input_shaper   r   r   compute_output_shape    s   zELU.compute_output_shape)r   )__name__
__module____qualname____doc__r	   r   r   __classcell__r   r   r   r   r      s
    r   N)	keras.srcr   keras.src.api_exportr   keras.src.layers.layerr   r   r   r   r   r   <module>   s
    