U
    zh$                     @   s   d dl Z d dlZd dlZd dlZd dlZd dlZd dlmZ d dlm	Z	 d dl
Z
ddlmZmZ ddlmZ eeZeeedddd	d
e	e d
ddZe jeddZe jeddZdd Ze ddd ZdS )    N)MappingProxyType)Optional   )device_from_inputsfake_tensor_unsupported)register_backend N     )	schedulertrials	opt_level)optionsc                   s@  dd l ddl m} ddlm} tj| |}t|}dd t|D }| | }t	|dkrlt
d | jS |j||\}	}
|jdkr|j}j }nd}jt }|dd }|d krtjd	d }|d
d}|dd}|dkr(ddl m} t }tj|s||	d |
|\}}|D ]}t|j  q8td t	|dkr|!||}tj|s|dkst"|j#||$|gdd}z|%| W n0 t&k
r   tj|rt'|  Y nX |(|6 j)j*|ddid |j+|	||
d}W 5 Q R X W 5 Q R X n|dkrddl m,} t- t}|jdkrvjt  d|j.j/dd }|dkst"|j0j1|	|||d|
d|d}|j0j2||	||
|d }W 5 Q R X nD|d!ks|sj)j*|d" |j+|	||
d}W 5 Q R X nt3d#|4|d! | d$d% fd&d' fd(d)}|S )*Nr   )relay)graph_executorc                 S   s    g | ]\}}d | |j fqS )inp_)shape).0idxi r   L/var/www/html/venv/lib/python3.8/site-packages/torch/_dynamo/backends/tvm.py
<listcomp>$   s     ztvm.<locals>.<listcomp>z0Explicitly fall back to eager due to zero outputcudar
   ZTVM_SCHEDULERr   r   r   r	   auto_scheduler)r   mainzNo tasksi  )Znum_measure_trialsZmeasure_callbacksZearly_stoppingz relay.backend.use_auto_schedulerT)r   config)targetparamsmeta_schedule)r   z --num-cores F)Zlogical@   Zevolutionary)modr   work_dirZmax_trials_globalZnum_trials_per_iterr   Zstrategyr   )databaser    r   r   r   default)r   zThis tuning option is invalid/not implemented for torchdynamo's TVM-related backend. There are three available options: default, auto_scheduler and meta_schedule.c                 S   s*   | j dkrt|  S tjj|  S )z8A helper function to transfer a NDArray to torch.tensor.bool)dtypetorchZ
from_numpynumpyutilsZdlpackfrom_dlpackZ	to_dlpack)Z	nd_tensorr   r   r   to_torch_tensor   s    
ztvm.<locals>.to_torch_tensorc                    s,   | j tjkr  j|   S  j| S )z8A helper function to transfer a torch.tensor to NDArray.)r%   r&   r$   ndarraycpur'   r)   )Ztorch_tensor)tvmr   r   to_tvm_tensor   s    ztvm.<locals>.to_tvm_tensorc                     s   dd | D }   \}}dd | D }t|dD ]R\}}| dkr6|jrX| }d| }||krxtd| q6 || q6 	   fddt
  D S )	Nc                 S   s   g | ]}|  qS r   )
contiguous)r   ar   r   r   r      s     z)tvm.<locals>.exec_tvm.<locals>.<listcomp>c                 S   s   h | ]\}}|qS r   r   )r   name_r   r   r   	<setcomp>   s     z(tvm.<locals>.exec_tvm.<locals>.<setcomp>r   r   z6input %s skipped as not found in tvm's runtime libraryc                    s   g | ]}  |qS r   )Z
get_output)r   r   )mr*   r   r   r      s     )Zget_input_infoitems	enumeratedimZrequires_graddetachlogwarningZ	set_inputrunrangeZget_num_outputs)Zi_argsargsZ
shape_infor3   Zactive_inputsr   argZinp_name)r5   r*   r/   r   r   exec_tvm   s(    
ztvm.<locals>.exec_tvm)5r.   r   Ztvm.contribr   r&   Zjittracer   r7   lenr:   r;   forwardZfrontendZfrom_pytorchtyper   indexr   r-   ZTargetllvm_targetgetosenvironr   tempfileNamedTemporaryFilepathexistsZextract_tasksprintZcompute_dagZTaskSchedulerAssertionErrorZTuningOptionsZRecordToFileZtune	ExceptionunlinkZApplyHistoryBestZ	transformZPassContextbuildr   TemporaryDirectoryr(   	cpu_countZrelay_integrationZ
tune_relayZcompile_relayNotImplementedErrorZGraphModule)ZgmZexample_inputsr   r   r   Zjit_modZdeviceZ
shape_listZexample_outputsr    r   devr   r
   r   r   r   Zlog_filetasksZtask_weightstaskZtunerZtune_optionlibmsr!   r"   r@   r   )r5   r*   r/   r.   r   r.      s    




  

 &


	r.   r   )r
   r   c                   C   s,   zt d W dS  tk
r&   Y dS X d S )Nr.   TF)	importlibimport_moduleImportErrorr   r   r   r   has_tvm   s
    
r^   c                  C   s2   t jdkr.td } d| kr"dS d| kr.dS dS )Nlinuxz/proc/cpuinfoZavx512zllvm -mcpu=skylake-avx512Zavx2zllvm -mcpu=core-avx2llvm)sysplatformopenread)Zcpuinfor   r   r   rF      s    
rF   )	functoolsr[   loggingrH   ra   rJ   typesr   typingr   r&   commonr   r   registryr   	getLogger__name__r:   r.   partialZtvm_meta_scheduleZtvm_auto_schedulerr^   	lru_cacherF   r   r   r   r   <module>   s2   

 