o
    2h                     @   sb   d dl mZ d dl mZ d dlmZ d dlmZ edgG dd dejZej	
deje_	d	S )
    )backend)ops)keras_export)	optimizerzkeras.optimizers.Nadamc                       sj   e Zd ZdZ															d fd
d	Z fddZ fddZdd Z fddZ  Z	S )Nadama  Optimizer that implements the Nadam algorithm.

    Much like Adam is essentially RMSprop with momentum, Nadam is Adam with
    Nesterov momentum.

    Args:
        learning_rate: A float, a
            `keras.optimizers.schedules.LearningRateSchedule` instance, or
            a callable that takes no arguments and returns the actual value to
            use. The learning rate. Defaults to `0.001`.
        beta_1: A float value or a constant float tensor, or a callable
            that takes no arguments and returns the actual value to use. The
            exponential decay rate for the 1st moment estimates.
            Defaults to `0.9`.
        beta_2: A float value or a constant float tensor, or a callable
            that takes no arguments and returns the actual value to use. The
            exponential decay rate for the 2nd moment estimates. Defaults to
            `0.999`.
        epsilon: A small constant for numerical stability. This epsilon is
            "epsilon hat" in the Kingma and Ba paper (in the formula just before
            Section 2.1), not the epsilon in Algorithm 1 of the paper.
            Defaults to `1e-7`.
        {{base_optimizer_keyword_args}}

    Reference:

    - [Dozat, 2015](http://cs229.stanford.edu/proj2015/054_report.pdf).

    MbP??+?Hz>NFGz?nadamc                    s@   t  jd|||||||	|
|||d| || _|| _|| _d S )N)learning_ratenameweight_decayclipnorm	clipvalueglobal_clipnormuse_emaema_momentumema_overwrite_frequencyloss_scale_factorgradient_accumulation_steps )super__init__beta_1beta_2epsilon)selfr   r   r   r   r   r   r   r   r   r   r   r   r   r   kwargs	__class__r   U/var/www/html/chatgem/venv/lib/python3.10/site-packages/keras/src/optimizers/nadam.pyr   '   s$   
zNadam.__init__c                    sZ   | j rdS |r|d j}nt }t | | |ddg\| _| _tj	d|d| _
dS )zInitialize optimizer variables.

        Nadam optimizer has 2 types of variables: momentums and velocities.

        Args:
            var_list: list of model variables to build Nadam variables on.
        Nr   momentumvelocity      ?)dtype)builtr&   r   floatxr   buildadd_optimizer_variables
_momentums_velocitiesVariable
_u_product)r   var_listr&   r    r   r"   r)   K   s   zNadam.buildc                    sT   | j j}| | j | j | j ddtdt| jd |    t 	||| d S )Nr%         ?Q?   )
r.   r&   assignr   r   powercast
iterationsr   _backend_update_step)r   gradstrainable_variablesr   r&   r    r   r"   r7   _   s   	zNadam._backend_update_stepc                 C   s  |j }t||}t||}t| jd |}t| jd |}td|}t| j|}	t| j|}
|	ddt||   }|	ddt||   }t| j|}|| }t|
|}| j| 	| }| j
| 	| }| |tt||d|	  | |ttt||d|
  ttt||d| ttd| |d| }t|d| }| |tt||tt|| j dS )z=Update step given gradient and the associated model variable.r2      r1   r%   r0   N)r&   r   r5   r6   r   r   r4   r.   r+   _get_variable_indexr,   
assign_addmultiplysubtractsquareadddivide
assign_subsqrtr   )r   gradientvariabler   	var_dtypelr
local_step	next_stepdecayr   r   u_tu_t_1u_product_tu_product_t_1beta_2_powermvm_hatv_hatr   r   r"   update_stepl   s@   zNadam.update_stepc                    s&   t   }|| j| j| jd |S )N)r   r   r   )r   
get_configupdater   r   r   )r   configr    r   r"   rU      s   
zNadam.get_config)r   r   r	   r
   NNNNFr   NNNr   )
__name__
__module____qualname____doc__r   r)   r7   rT   rU   __classcell__r   r   r    r"   r      s*     $(r   z{{base_optimizer_keyword_args}}N)	keras.srcr   r   keras.src.api_exportr   keras.src.optimizersr   	Optimizerr   r[   replacebase_optimizer_keyword_argsr   r   r   r"   <module>   s     
