U
    zh(                     @   s   d dl Z d dlmZ d dlZd dlm  mZ zd dlZ	W n e
k
rR   dZ	Y nX edddZdd Zdd	 Zd
d ZG dd dZdd ZejdddZedddZeedddZdS )    N)List)returnc                   C   s
   t j S )a  
    Indicates whether we are tracing/compiling with torch.compile() or torch.export().

    If need to check specifically that TorchDynamo is used, then use
    torch.compiler.is_dynamo_compiling().

    TODO(khabinov): we should deprecate this function and use one of these two:
    * torch.compiler.is_compiling(),
    * torch.compiler.is_dynamo_compiling().
    It will depend on the context where to use what.
    )torchcompileris_compiling r   r   N/var/www/html/venv/lib/python3.8/site-packages/torch/_dynamo/external_utils.pyr      s    r   c                    s   t   fdd}|S )zB
    Create an extra frame around fn that is not in skipfiles
    c                     s
    | |S Nr   )argskwargsfnr   r   inner$   s    zwrap_inline.<locals>.inner)	functoolswraps)r   r   r   r   r   wrap_inline   s    r   c                 G   s   | | }|dkr|d S |S )zA
    Used by compiled autograd to handle hook returning None
    Nr   r   )hookr
   resultr   r   r   	call_hook+   s    r   c                    s"   t s S t  fdd}|S )zDecorator that turns a function from ``np.ndarray``s to ``np.ndarray``s into a function
    from ``torch.Tensor``s to ``torch.Tensor``s.
    c                     s:   t tjdd | |f\} } | |}t tjdd |S )Nc                 S   s   |   S r	   )numpyxr   r   r   <lambda>?       z*wrap_numpy.<locals>.wrap.<locals>.<lambda>c                 S   s
   t | S r	   )r   Z	as_tensorr   r   r   r   r   B   r   )pytreeZtree_map_onlyr   TensornpZndarray)r
   r   outfr   r   wrap<   s      
zwrap_numpy.<locals>.wrap)r   r   r   )r   r    r   r   r   
wrap_numpy5   s
    r!   c                   @   s0   e Zd Zejjjeej dddZ	dd Z
dS )FakeBackwardCFunctionrealsaved_tensorsc                 C   s   || _ || _d S r	   r#   )selfr$   r%   r   r   r   __init__H   s    zFakeBackwardCFunction.__init__c                 C   s   t | j|S r	   )getattrr$   )r&   namer   r   r   __getattr__P   s    z!FakeBackwardCFunction.__getattr__N)__name__
__module____qualname__r   ZautogradfunctionZBackwardCFunctionr   r   r'   r*   r   r   r   r   r"   G   s   r"   c                 G   s2   t | |}|jj|f| }t|tk	r.|f}|S r	   )r"   Z_forward_clsZbackwardtypetuple)Zbackward_c_functionr%   r
   ZfakeZgradsr   r   r   call_backwardV   s
    
r1   r   c                 C   s   |    S r	   )Zuntyped_storagesizer   r   r   r   untyped_storage_sizea   s    r3   )	hook_namec                 O   s   t | |||S r	   r(   )bw_stater4   r
   r   r   r   r   call_hook_from_backward_statee   s    r7   )
hooks_namemodule_namec          
      G   s>   t ||}t ||}|D ] }|||f| }	|	d k	r|	}q|S r	   r5   )
_r   r6   r8   r9   r
   modulehooksr   Z
new_resultr   r   r   %call_module_hooks_from_backward_statei   s    

r=   )r   typingr   r   Ztorch.utils._pytreeutilsZ_pytreer   r   r   ModuleNotFoundErrorboolr   r   r   r!   r"   r1   r   r3   strr7   r=   r   r   r   r   <module>   s$   

 