o
    2h                     @   sN   d Z ddlZddlZG dd dejZe Zejdd Zdd Zd	d
 Z	dS )z9Helper library for functions used during TPU compilation.    Nc                   @   s,   e Zd ZdZdd Zedd Zdd ZdS )	
TpuContextzEA context object holding state about the TPU computation being built.c                 C   s
   d| _ dS )zCreates a new TpuContext.N_number_of_shardsself r   ]/var/www/html/chatgem/venv/lib/python3.10/site-packages/tensorflow/python/tpu/tpu_function.py__init__   s   
zTpuContext.__init__c                 C   s   | j S Nr   r   r   r   r   number_of_shards   s   zTpuContext.number_of_shardsc                 C   s
   || _ d S r
   r   )r   r   r   r   r   set_number_of_shards!   s   
zTpuContext.set_number_of_shardsN)__name__
__module____qualname____doc__r	   propertyr   r   r   r   r   r   r      s    
r   c              	   c   sB    t jdur
tdzt |  dV  W t d dS t d w )z3A context manager setting current number of shards.Nz"tpu_shard_context cannot be nested)_current_tpu_contextr   NotImplementedErrorr   )r   r   r   r   tpu_shard_context*   s   

r   c                   C   s   t S r
   )r   r   r   r   r   get_tpu_context6   s   r   c                 C   s   t | dd | S )Nstep_marker_location!STEP_MARK_AT_TOP_LEVEL_WHILE_LOOP)setattr)funcr   r   r   on_device_training_loop=   s   r   )
r   
contextlib	threadinglocalr   r   contextmanagerr   r   r   r   r   r   r   <module>   s   
