U
    zh                     @   sj  d dl Z d dlZd dlZd dlZd dlZd dlmZmZ d dlZ	d dl
Z	d dlZ	d dlZ	d dlZ	d dlmZ d dlmZ ddlmZ ddlmZmZmZ dd	lmZ dd
lmZmZ ddlmZ ddlm Z  ddl!m"Z"m#Z# ddl$m%Z% ddl&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z- ddl.m/Z/ ddl0m1Z1m2Z2m3Z3 ddl4m5Z5m6Z6 ddl7m8Z8m9Z9 ddl:m;Z;m<Z< zd dl=Z>W n e?k
r   dZ>Y nX e@eAZBeCDe	jEjEjFe	jGjHjIe	jGjHjJe	jGjEjFe	jGjEjKe	jLjMe	jNjOjPe	jNjQjRe	jNjQjSe	jNjQjTe	jUjVjWe	jGjXjYe	jGjXjZe	jGjXj[e	jGjXj\e	jGj]j^e	j_jUjVjWe	j`jUjVjWgZaeCDe	jbjcjde	jegZfe	jge	jhjie	jLjje	jLjke	j`jle	j`jme	j4jme	jne	joe	jpe	jqe	jre	jse	jte	jue	jvjwjxjye	jze	jLj{gZ|e	j4m re|}e	j4j~e	j4je	j4jg eCDe|Z|e	jjde	jjde	jLjde	jjjde	jbjde	jjjde	jhjde	jjde	jjdi	ZeCDdddddgZG dd de/ZG dd  d eZG d!d" d"eZdS )#    N)DictList)warning_once)_StreamBase   )TracingContext   )configpolyfill	variables)	PyCodegen)new_parameter_placeholdertracable_create_parameter) get_registered_device_interfaces)unimplemented)GuardBuilderinstall_guard)SyntheticLocalSource)check_unspec_or_constant_argsguard_if_dynhas_torch_functionhashableproductproxy_args_kwargsunwrap_if_wrapper   )VariableTracker)AutocastModeVariableNullContextVariableTorchFunctionDisableVariable)DistributedVariableProcessGroupVariable)ListVariableTupleVariable)can_dispatch_torch_functiondispatch_torch_functionFTaddsubmuldivsqrtc                       s\   e Zd ZdZedd Z fddZdd Zdd	 Zd
d Z	dd Z
dd Zdd Z  ZS )BaseTorchVariablezHcommon base for all torch.* functions, classes, modules and other thingsc                 C   s   t |tj | ||dS )N)source)r   Z
make_guardr   ZFUNCTION_MATCH)clsvaluer,    r/   O/var/www/html/venv/lib/python3.8/site-packages/torch/_dynamo/variables/torch.pycreate_with_source   s
    z$BaseTorchVariable.create_with_sourcec                    s   t  jf | || _d S N)super__init__r.   )selfr.   kwargs	__class__r/   r0   r4      s    zBaseTorchVariable.__init__c                 C   sl   z| j j d| j j }W n$ tk
r>   dt| j  }Y nX dtdd| }|||| j d d S )N.Z
torch_obj___z[^a-zA-Z0-9_]+_F)	r.   
__module____name__	Exceptionidrer'   Zextend_outputZsetup_globally_cached)r5   codegennameZunique_var_namer/   r/   r0   reconstruct   s    zBaseTorchVariable.reconstructc                 C   s   | j S r2   r.   r5   r/   r/   r0   as_proxy   s    zBaseTorchVariable.as_proxyc                 C   s
   t | jS r2   )typer.   rE   r/   r/   r0   python_type   s    zBaseTorchVariable.python_typec                 C   s   | j S r2   rD   rE   r/   r/   r0   as_python_constant   s    z$BaseTorchVariable.as_python_constantc                 C   s   t | j|}tj|S r2   )hasattrr.   r   ConstantVariablecreate)r5   txrB   resultr/   r/   r0   call_hasattr   s    zBaseTorchVariable.call_hasattrc                 C   s    | j tkrdS t| j dd dkS )NTr<   math)r.   constant_fold_functionsgetattrrE   r/   r/   r0   can_constant_fold_through   s    
z+BaseTorchVariable.can_constant_fold_through)r=   r<   __qualname____doc__classmethodr1   r4   rC   rF   rH   rI   rO   rS   __classcell__r/   r/   r7   r0   r+      s   

r+   c                       s>   e Zd ZdZdd Zedd Zdddd	 fd
dZ  ZS )TorchCtxManagerClassVariablezLPoints to a context manager class in torch.* that dynamo has implementationsc                 C   s   d| j  dS )NzTorchCtxManagerClassVariable()rD   rE   r/   r/   r0   __repr__   s    z%TorchCtxManagerClassVariable.__repr__c                 C   s    t | } t| ot| o| tkS r2   )r   callabler   supported_ctx_manager_classesrD   r/   r/   r0   is_matching_cls   s
    z,TorchCtxManagerClassVariable.is_matching_clsList[VariableTracker]Dict[str, VariableTracker]r   argsr6   returnc              	      s  ddl m}m}m}m}m}m}	m}
m}m	}m
} | jtjkrt|dkrtt|d tjjrt||d}||||S ||dS n*| jtjkrt|dkrt|d tjjr||d}||||S ||dS | jtjkrt|dkr|j||d  ddS | jtjkr`t|dkr2t|dks6tt|dkrP|d  nd}|	||S t| jrt| jtrddlm} ||||j !d| jd	i S | jtj"j#j$tj%j"j$tj&j"j$fkrt'| j||S | jtj(j)tj(j*tj+j(j)tj+j(j*fkrt,t-d
| j t. S | jtj/j0kr>|s0|r4tt1|S | jtj2j3j4krxt|dksbt||dd |D S | jtj2j5j6krt|dkst|
|S | jtj+j7j8krt|dkst||dd |D S | jtj+j7j9krt|dkst||S | jtj2j5j:kr<t|dks2t||S | jtj2j5j;krvt|dks`t||dd |D S | jtj+j<j=krt|dkst|||d  S t> |||S )Nr   )
!DisabledSavedTensorsHooksVariableDualLevelContextManager&GradIncrementNestingCtxManagerVariable)GradInplaceRequiresGradCtxManagerVariableGradModeVariableInferenceModeVariable%JvpIncrementNestingCtxManagerVariableSetFwdGradEnabledContextManagerStreamVariable&VmapIncrementNestingCtxManagerVariabler   FT)Zinitialized)wrap_fx_proxy_clscall_functionr/   z$Profiler function %s will be ignoredr   c                 S   s   g | ]}t |qS r/   r   .0xr/   r/   r0   
<listcomp>  s     z>TorchCtxManagerClassVariable.call_function.<locals>.<listcomp>c                 S   s   g | ]}t |qS r/   ro   rp   r/   r/   r0   rs     s     c                 S   s   g | ]}t |qS r/   ro   rp   r/   r/   r0   rs   $  s     )? rc   rd   re   rf   rg   rh   ri   rj   rk   rl   r.   torchno_gradlen
isinstancer   Z	functionsZBaseUserFunctionVariablerL   rn   enable_gradset_grad_enabledrI   inference_modeAssertionErrorinspectisclass
issubclassr   Ztorch._dynamo.variables.builderrm   outputcreate_proxyampautocast_modeautocastcudacpur   profilerprofilerecord_functionautogradr   logr   _CDisableTorchFunctionSubclassr   
_functorchvmapvmap_increment_nestingeager_transformsjvp_increment_nesting
forward_ad_set_fwd_grad_enabled
dual_levelgrad_increment_nestingenable_inplace_requires_gradgraphdisable_saved_tensors_hooksr3   )r5   rM   ra   r6   rc   rd   re   rf   rg   rh   ri   rj   rk   rl   ctxZinf_moderm   r7   r/   r0   rn      s    0   
  




 
z*TorchCtxManagerClassVariable.call_function)	r=   r<   rT   rU   rZ   staticmethodr]   rn   rW   r/   r/   r7   r0   rX      s   
 rX   c                   @   sj   e Zd ZdZdd Zdd Zeeddd Z	d	d
ddddZ
dd ZedddZedd ZdS )TorchInGraphFunctionVariablez@Points to a torch function/method that should be put in FX graphc                 C   s   d| j  dS )NzTorchInGraphFunctionVariable(rY   rD   rE   r/   r/   r0   rZ   2  s    z%TorchInGraphFunctionVariable.__repr__c                 C   s   | j S r2   rD   rE   r/   r/   r0   get_function5  s    z)TorchInGraphFunctionVariable.get_functionNc            (         s  i fdd} ddl m} ddlm mmmmmm	 ddl
mm	m
 | t  fdd	}| tjjjfd
d}| tjjjjfdd}| tjfdd}| tjtjj fdd}| tjtj fdd}| tj fdd}| t fdd}	| tjj j!j"tjj j!j#tjj j!j$tjj j!j%tjj j!j&dd }
| tj' fdd}| tj(dOfdd	}| tj) fdd }| tj*j+ fd!d"}| tjj,tjj-tjj. fd#d$}| t/0d%d& t1 D  fd'd(}| tj2
fd)d*}| tj3j4d+d, }| tj5j6j7 fd-d.}| tj!j8j9d/d0 }| tjj:d1d2 }| tjj;j<tjj;j<j=dPd4d5}| tjj;j>tjj;j>j=dQd6d7}| tj?d8d9 }| tj@ fd:d;}| |	fd<d=}tAB r^dd>lCmD} dd?lEmF}mG}mH}mI}mJ}  | |||| | fd@dA}!| |jK	fdBdC}"| tjLjMdRd3dDdEdF}#| tjjNjOdGdH }$| tjPjQjRjS fdIdJ}%| tj*jTjUdKdL }&| tjVfdMdN}'S )SzBuild a dict from function -> method to handle it so that we are O(1)
        in terms of the number of function with special handling.c                     s"    fdd}t  d st|S )Nc                    s&    D ]}|kst || |< q| S r2   )r|   )handlerfn)fnshandlersr/   r0   	_register@  s    
zOTorchInGraphFunctionVariable._get_handlers.<locals>.register.<locals>._registerr   )r[   r|   )r   r   )r   )r   r0   register?  s    z<TorchInGraphFunctionVariable._get_handlers.<locals>.registerr   )
SDPAParamsr   )rK   DeterministicAlgorithmsVariablerg   StreamContextVariableSymNodeVariableTensorVariableUserDefinedObjectVariable)SourcelessBuilderwrap_fx_proxyrm   c                    sH   |s|rt | jtjjtjjjtjjtjjfkr8|	   
t| j S r2   )r|   r.   ru   _utilsis_compiling_dynamoexternal_utilscompileris_dynamo_compilingZmark_inconsistent_side_effectsrL   tracing_state_functionsr5   rM   ra   r6   rK   r/   r0   handle_tracing_state_functionsU  s    zRTorchInGraphFunctionVariable._get_handlers.<locals>.handle_tracing_state_functionsc                    s     |tj S r2   )rL   ru   	overridesget_default_nowrap_functionsr   r   r/   r0   #handle_get_default_nowrap_functionsb  s     zWTorchInGraphFunctionVariable._get_handlers.<locals>.handle_get_default_nowrap_functionsc                    s   |  |tj||S r2   )inline_user_function_returnrL   r
   Zaccumulate_gradr   r   r/   r0   handle_accumulate_grad_l  s
      zKTorchInGraphFunctionVariable._get_handlers.<locals>.handle_accumulate_grad_c                    s&   t ||s"| |tj||S d S r2   )r   r   rL   r
   radiansr   r   r/   r0   handle_radiansr  s    
  zBTorchInGraphFunctionVariable._get_handlers.<locals>.handle_radiansc                    sF   t |s.| jtjjkr8t |r8t|jdr8 dS  dS d S )NZ__torch_function__TF)rx   r.   ru   r   is_tensor_likerJ   rL   )r5   rM   arg)rK   r   r   r/   r0   handle_is_tensorz  s    


zDTorchInGraphFunctionVariable._get_handlers.<locals>.handle_is_tensorc                    s`   |}t |r\|jd k	r\| jtjkr2 |jjS | jtjkrL |jjS td| j d S )Nzcalling )rx   dtyper.   ru   is_floating_pointrL   
is_complexr|   )r5   rM   inputZ	input_argrK   r   r/   r0   handle_is_floating_point  s    zLTorchInGraphFunctionVariable._get_handlers.<locals>.handle_is_floating_pointc                    sB   t |r$|jd k	r$ t|jS t |r>||dg i S d S )Nnumel)rx   sizerL   r   call_methodr5   rM   r   r   r/   r0   handle_numel  s    
z@TorchInGraphFunctionVariable._get_handlers.<locals>.handle_numelc                    s   t | st||dg i S Nr   )rx   r|   r   r   )r   r/   r0   handle_tensor_size_rewrites  s    zOTorchInGraphFunctionVariable._get_handlers.<locals>.handle_tensor_size_rewritesc                 _   s   |  |||S r2   )_call_ntupler   r/   r/   r0   handle_ntuple  s    zATorchInGraphFunctionVariable._get_handlers.<locals>.handle_ntuplec                    s   t j  t S r2   )r   _guards_singletonrL   ru   is_grad_enabledr5   rM   )rK   rg   r/   r0   handle_is_grad_enabled  s    
zJTorchInGraphFunctionVariable._get_handlers.<locals>.handle_is_grad_enabledFc                    s$   |r|  rtd  ||  S )Nz2torch.use_deterministic_algorithms(warn_only=True))rI   r   rL   )r5   rM   modeZ	warn_only)r   r/   r0   #handle_use_deterministic_algorithms  s    zWTorchInGraphFunctionVariable._get_handlers.<locals>.handle_use_deterministic_algorithmsc                    s   t j  t S r2   )r   r   rL   ru   $are_deterministic_algorithms_enabledr   )rK   r   r/   r0   +handle_are_deterministic_algorithms_enabled  s    
z_TorchInGraphFunctionVariable._get_handlers.<locals>.handle_are_deterministic_algorithms_enabledc                    s   t tj  |jjS r2   )r   r   r   rL   r   Ztorch_function_enabledr   r   r/   r0    handle_is_torch_function_enabled  s    
zTTorchInGraphFunctionVariable._get_handlers.<locals>.handle_is_torch_function_enabledc                    sD   t |dkr(t|d tr(|d |n|} tdd |D S )Nr   r   c                 s   s   | ]}t |V  qd S r2   )r   rp   r/   r/   r0   	<genexpr>  s     z`TorchInGraphFunctionVariable._get_handlers.<locals>.handle_has_torch_function.<locals>.<genexpr>)rw   rx   r#   unpack_var_sequencerL   any)r5   rM   ra   Zelemsr   r/   r0   handle_has_torch_function  s    
zMTorchInGraphFunctionVariable._get_handlers.<locals>.handle_has_torch_functionc                 s   s   | ]\}}|j V  qd S r2   )stream)rq   r;   device_interfacer/   r/   r0   r     s   z=TorchInGraphFunctionVariable._get_handlers.<locals>.<genexpr>c                    s     ||S r2   )rL   )r5   rM   r   )r   r/   r0   handle_device_interface_stream  s    zRTorchInGraphFunctionVariable._get_handlers.<locals>.handle_device_interface_streamc                    sB   t jstd tstd  ||jjdtjft|i  d dS )Nz-torch.from_numpy. config.trace_numpy is Falsez(torch.from_numpy. NumPy is not availablern   )Z
target_clsrM   proxyexample_value)	r	   Ztrace_numpyr   npr   r   ru   Z	as_tensorr   )r5   rM   ra   )r   rm   r/   r0   handle_from_numpy  s    zETorchInGraphFunctionVariable._get_handlers.<locals>.handle_from_numpyc                 S   s   |S r2   r/   )r5   rM   Zthe_typeZ	the_valuer/   r/   r0   handle_jit_annotate  s    zGTorchInGraphFunctionVariable._get_handlers.<locals>.handle_jit_annotatec                    sF   |rt dt|st dtjd|j|jd} tjj	|S )Nz%Expect 1 input to cudnn.is_acceptablez2Expect input to cudnn.is_acceptable to be a tensorr   )r   device)
r|   rx   ru   tensorr   r   rL   backendscudnnis_acceptable)r5   rM   r   extraZ
tensor_inpr   r/   r0   handle_cudnn_is_acceptable  s     zNTorchInGraphFunctionVariable._get_handlers.<locals>.handle_cudnn_is_acceptablec                 _   s   t jj|f||S r2   )r   ZBackwardHookVariablerL   r   r/   r/   r0   handle_backward_hook  s    zHTorchInGraphFunctionVariable._get_handlers.<locals>.handle_backward_hookc                 _   s   | j |f||S r2   )call_nn_parameterr   r/   r/   r0   handle_parameter   s    zDTorchInGraphFunctionVariable._get_handlers.<locals>.handle_parameterNc                 S   s   |d k	r| |d|gi S d S r   r   Zself_rM   r5   dimr/   r/   r0   handle_sym_size  s    zCTorchInGraphFunctionVariable._get_handlers.<locals>.handle_sym_sizec                 S   s   |d k	r| |d|gi S d S )NZstrider   r   r/   r/   r0   handle_sym_stride
  s    zETorchInGraphFunctionVariable._get_handlers.<locals>.handle_sym_stridec                 _   sz   t |dkrvd|krvt |dkrvttj||dd  i }ttj|||d gi }ttj||d |gi S d S )Nr   r.   r   r   )rw   r   ru   r)   rn   r(   r&   )r5   rM   ra   r6   rN   r/   r/   r0   handle_addcdiv  s      
  
 
 
 
 zBTorchInGraphFunctionVariable._get_handlers.<locals>.handle_addcdivc                    s0   |  r| s$t|tjr,| r, d S d S r2   )is_python_constantrI   rx   r   r   Zevaluate_expr)r5   rM   	conditionmessager   r/   r0   handle_assert  s    
zATorchInGraphFunctionVariable._get_handlers.<locals>.handle_assertc                    s(    ||j jdtjjft|| |dS )Nrn   )r   Z
param_vars)r   r   ru   r   Z_SDPAParamsr   r   r   r/   r0   handle_sdpa_params&  s    zFTorchInGraphFunctionVariable._get_handlers.<locals>.handle_sdpa_params)DTensor)_get_group_size_by_name_get_group_tag_rank_not_in_group$_resolve_group_name_by_ranks_and_tagget_process_group_ranksc                    s   t |dkr$t|d t fshtnDt |dkrRt|d trLt|d  shtntd| d| j dd |D }| j| }||S )Nr   r   r   zInvalid group value (z) for constant pg function c                 S   s   g | ]}|  qS r/   rI   )rq   r   r/   r/   r0   rs   V  s     znTorchInGraphFunctionVariable._get_handlers.<locals>.handle_constant_processgroup_functions.<locals>.<listcomp>)rw   rx   r!   r|   r"   r.   rL   )r5   rM   ra   args_as_valueZinvocation_result)rK   r   r/   r0   &handle_constant_processgroup_functions<  s     

zZTorchInGraphFunctionVariable._get_handlers.<locals>.handle_constant_processgroup_functionsc                    sn   dd |dd  D  dd |  D  fdd}djj |_||jjd	|ft|d
 gi  dS )Nc                 S   s   g | ]}|  qS r/   r  rp   r/   r/   r0   rs   b  s     zYTorchInGraphFunctionVariable._get_handlers.<locals>.handle_from_local.<locals>.<listcomp>r   c                 S   s   i | ]\}}||  qS r/   r  rq   kvr/   r/   r0   
<dictcomp>c  s      zYTorchInGraphFunctionVariable._get_handlers.<locals>.handle_from_local.<locals>.<dictcomp>c                    s   j | f S r2   rD   rr   r  Zkwargs_as_valuer5   r/   r0   fn_with_prim_typese  s    zaTorchInGraphFunctionVariable._get_handlers.<locals>.handle_from_local.<locals>.fn_with_prim_typeszprim rn   r   rM   r   )itemsr.   r=   r   r   r   )r5   rM   ra   r6   r
  r   r	  r0   handle_from_local^  s    zETorchInGraphFunctionVariable._get_handlers.<locals>.handle_from_local)layoutc                _   s<   ddl m} |r&| tjkr&td t||s8td d S )Nr   )BaseListVariablez3torch.compile does not support strided NestedTensorz!nested_tensor with non-list input)listsr  rI   ru   Zstridedr   rx   )r5   rM   Ztensor_listr  ra   r6   r  r/   r/   r0   handle_nested_tensort  s
    
zHTorchInGraphFunctionVariable._get_handlers.<locals>.handle_nested_tensorc                 _   sH   t |t | dks<t |dkrD|d  rD|d  dkrDtd d S )Nr   r   z<torch.nn.functional.one_hot with data-dependent output shape)rw   r   rI   r   r   r/   r/   r0   handle_one_hot  s    

zBTorchInGraphFunctionVariable._get_handlers.<locals>.handle_one_hotc                    s6   t |r$tjtjjj|j	S t | r2|S d S r2   )
rx   r   rK   rL   ru   fxexperimentalsymbolic_shapesguard_size_obliviousZsym_num)r5   rM   expr)rK   r   r/   r0   handle_guard_size_oblivious  s    


zOTorchInGraphFunctionVariable._get_handlers.<locals>.handle_guard_size_obliviousc                 _   s    ddl m} t||||S )Nr   )_unsafe_set_version_counter)Ztensor_version_opr  r   rn   )r5   rM   ra   r6   r  r/   r/   r0   !handle_unsafe_set_version_counter  s      zUTorchInGraphFunctionVariable._get_handlers.<locals>.handle_unsafe_set_version_counterc                    s`    fdd d }|r"|d }nd|kr2|d }t |s\ |r\ttjj|||S d S )Nc                    s@   t |  frdS t | ttfr8tfdd| jD S dS d S )NTc                 3   s   | ]} |V  qd S r2   r/   )rq   ycheck_any_unspecr/   r0   r     s     ztTorchInGraphFunctionVariable._get_handlers.<locals>.handle_torch_tensor.<locals>.check_any_unspec.<locals>.<genexpr>F)rx   r"   r#   r   r  r  )r   r   r  r/   r0   r    s
    zaTorchInGraphFunctionVariable._get_handlers.<locals>.handle_torch_tensor.<locals>.check_any_unspecr   data)rx   r   ru   Z_refsr   rn   )r5   rM   ra   r6   Zdata_arg)r   r   r  r0   handle_torch_tensor  s    
  zGTorchInGraphFunctionVariable._get_handlers.<locals>.handle_torch_tensor)F)N)N)N)WZtorch.backends.cudar   rt   rK   r   rg   r   r   r   r   builderr   r   rm   r   ru   r   r   __wrapped__ZopsZinductorZaccumulate_grad_defaultrP   r   Z	is_tensorr   r   r   r   !REWRITE_OPS_TO_TENSOR_SIZE_METHODnnmodulesutilsZ_singleZ_pairZ_tripleZ
_quadruple_ntupler   Zuse_deterministic_algorithmsr   r   Z_is_torch_function_enabledr   Zhas_torch_function_variadicZhas_torch_function_unarydictfromkeysr   Z
from_numpyjitZannotater   r   r   hooksZBackwardHook	ParameterZatenZsym_sizeintZ
sym_strideZaddcdiv_assertr    is_availableZtorch.distributed._tensorr   Z"torch.distributed.distributed_c10dr   r   r   r   r   Z
from_localnestedZnested_tensor
functionalZone_hotr  r  r  r  Z	_autogradr  r   )(r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r  r  r  r  r  r  r   r/   )rK   r   rg   r   r   r   r   r   r   r   rm   r0   _get_handlers8  s    
$		


















 



z*TorchInGraphFunctionVariable._get_handlersr^   r_   r   r`   c                    s  ddl m}m m} ddlm} |  r^t||r^|| 	 dd |D dd |
 D S |  | j}|r|| f||}|r|S t||rt| ||S t fdd	|D }	td
d	 |D }
t| jdddkr | jjtkr |	r |
r dt| j d}t| t| | j}|	rdd| jj }t| jdd dkrdtt|rdtt|}|jjd|ft|| d}t||rd|kr|d 	 rtd d|krt|d tjr|d 	 d kst|t rt|d t t!fst"fdd|d j
D }t#|D ]&\}}|j$kr$|j
| j$|< q$t%|d j
|j
D ]R\}}|j&r^|jj'kr^t|tjr^t|tjr^|j(|j(kr^td q^nt||r|t|d |st"d|d j)j*j+kst"|j)j*j+d }|d j)j*j+d }|d j&r@|d jj'kr@|j,|j,kr@td tj-.|sVtd /|d }|j$kr|j$|< ntdt0|d   |S d S )Nr   )rK   r   r   r   c                 S   s   g | ]}|  qS r/   r  rp   r/   r/   r0   rs     s     z>TorchInGraphFunctionVariable.call_function.<locals>.<listcomp>c                 S   s   i | ]\}}||  qS r/   r  r  r/   r/   r0   r    s      z>TorchInGraphFunctionVariable.call_function.<locals>.<dictcomp>c                 3   s   | ]}t | V  qd S r2   )rx   rp   )r   r/   r0   r     s     z=TorchInGraphFunctionVariable.call_function.<locals>.<genexpr>c                 s   s    | ]}t |tjtjfV  qd S r2   )rx   r   rK   r   rp   r/   r/   r0   r     s   r<   rt   ru   zCalling z on only torch.SymInt arguments is not yet supported.
To support this behavior, we need to allow const-propping tensors that store symint data.
For now, dynamo will explicitly graph break when it encounters user code with this behavior.
Z_sym_rP   rn   r  requires_gradzfactory functions that return tensors that require grad are not supported.
Either create the tensor outside the compiled region, or do not set the tensor to require_gradoutc                    s   g | ]}  |qS r/   )find_symbolic_locals_namerp   )rM   r/   r0   rs     s    z*out variants with resizing on graph inputsr   z9out= op was called where output tensor was non-contiguouszout variant of )1rt   rK   r   r   r!  r   rS   r   rL   rI   r  r3  getr.   r$   r%   r   allrR   r=   bin_opsstrr   warningr   rJ   ru   r   r   r   rx   r   r#   r"   r|   	enumerateZsymbolic_localszipr,   Z	graphargsr   r   nodemetashapeZ_prims_commonZis_contiguousr6  rG   )r5   rM   ra   r6   rK   r   r   Zspecial_handlerrN   Zany_symints_or_symfloatsZall_ints_or_floatsmsgZfn_Ztorch_sym_opZtensor_variableZoutput_tensor_namesidxrB   Z
out_tensorZresult_tensorZfake_tensorZfake_outr/   )r   rM   r0   rn     s    
 

 




 



	
z*TorchInGraphFunctionVariable.call_functionc                    s~   | j tjjjjkr |d   n| j jd j t	 t
s<t|rDt fdd}| j tjjjjkrnt|S ||d S dS )z1inline behavior of torch.nn.modules.utils._ntupler   c                    sZ   |  rtt| S |  rFtjtj	j
j |  S td|  d d S )Nztorch.nn.modules.utils._ntuple(rY   )Zhas_unpack_var_sequencer   r#   listr   r   rK   rL   ru   r%  r&  r'  r(  rI   r   rD   countrM   r/   r0   r   H  s    
z@TorchInGraphFunctionVariable._call_ntuple.<locals>.handle_ntupleN)r.   ru   r%  r&  r'  r(  rI   __closure__cell_contentsrx   r.  r|   r   ZLambdaVariable)r5   rM   ra   r6   r   r/   rD  r0   r   ?  s    
z)TorchInGraphFunctionVariable._call_ntupleTc              
   C   sZ  t |tjr6z| }W n tk
r4   td Y nX t |tjsRtd| d |jrf| |||S z8t	|
|d }|
|d }|
|d }W n0 tk
r } ztd|  W 5 d}~X Y nX |jt||||g}|jr||d	g i }d
dlm}	 |	||jdt| | fi }
t |
tjs>ttjj|
_d|
_|j|
_|
S )z>A call to torch.nn.Parameter() gets lifted to before the graphz)Parameter(requires_grad=...) not constantzParameter(data=z) not implementedr@  r   r   zParameter not python_constant: Ndetachr   r   rn   F)rx   r   r   rI   NotImplementedErrorr   r   r,   _nn_param_via_prefix_inserttupleZvar_getattrr   Zsynthetic_graph_inputr   r4  r   r!  r   r   r   rF   r|   ru   r%  r-  Z
class_typeZhas_grad_fn)r-   rM   r  r4  r@  r   r   eplaceholderr   rN   r/   r/   r0   r   Z  sH      
	
z.TorchInGraphFunctionVariable.call_nn_parameterc           
      C   s   ddl m} | j }t| }|dd ||j |t| |	dd |
| | jj|  | j}|jdkrtd t|}tj| j| j}|| ||}	t jj| |	S )	Nr   )VariableBuilderztorch.nnr-  r   T)rM  Zget_attrzAUnexpected type of data placeholder op for parameter construction)r!  rN  r   Znew_varr   Zload_import_fromr,   r   rK   rn   storeZpregraph_bytecodeextendget_instructionsrF   r>  opr   r   ru   r%  r-  Zexample_value_from_input_noder   r7  Zguards_contextZdynamo_guardsZremove_guards_with_source)
rM   r  r4  rN  varnameZcgZ	data_noder,   r   rN   r/   r/   r0   rJ    s.    




z8TorchInGraphFunctionVariable._nn_param_via_prefix_insert)NT)r=   r<   rT   rU   rZ   r   r   	functools	lru_cacher3  rn   r   rV   r   rJ  r/   r/   r/   r0   r   /  s&        7r   )rT  r}   loggingrP   r@   typingr   r   Ztorch._Cru   Ztorch._refsZtorch.fxZtorch.nnZtorch.onnx.operatorsZtorch._loggingr   Ztorch._streambaser   Z_guardsr   rt   r	   r
   r   rA   r   Zcreate_parameter_opr   r   r   r   excr   Zguardsr   r   r,   r   r'  r   r   r   r   r   r   r   baser   Zctx_managerr   r   r   distributedr    r!   r  r"   r#   Ztorch_functionr$   r%   numpyr   ModuleNotFoundError	getLoggerr=   r   r)  r*  r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   Z	grad_modery   r{   rv   rz   r   r   r   r   r\   Zonnx	operatorsZshape_as_tensorZ_shape_as_tensorr$  r/  r   Z_get_device_indexZ_get_cublas_allow_tf32Z_is_any_autocast_enabledZget_device_propertiesr0  Zget_autocast_dtypeZget_autocast_gpu_dtypeZget_default_dtypeZis_autocast_cache_enabledZis_autocast_cpu_enabledZis_autocast_enabledr   r   r%  r2  Z
_ReductionZget_enumZpromote_typesZ_get_privateuse1_backend_namerQ   rP  Zis_initializedZget_rankZget_world_sizer+  Zis_scripting
is_tracingZ_get_tracing_stater  Z_symbolic_traceZis_fx_tracingZis_in_onnx_exportr   r   r   r   r   r   r9  r+   rX   r   r/   r/   r/   r0   <module>   s   $	





         , 