U
    h!                     @   s   d dl mZ d dlmZmZmZmZmZ d dlm	  m
Z d dlm	Z	mZ ddlmZ ddlmZ G dd	 d	e	jZG d
d de	jZG dd deZG dd deZdS )    )OrderedDict)CallableDictListOptionalTupleN)nnTensor   )Conv2dNormActivation)_log_api_usage_oncec                   @   s@   e Zd ZdZee ee ee eee ee f dddZdS )ExtraFPNBlocka  
    Base class for the extra block in the FPN.

    Args:
        results (List[Tensor]): the result of the FPN
        x (List[Tensor]): the original feature maps
        names (List[str]): the names for each one of the
            original feature maps

    Returns:
        results (List[Tensor]): the extended set of results
            of the FPN
        names (List[str]): the extended set of names for the results
    )resultsxnamesreturnc                 C   s   d S )N )selfr   r   r   r   r   Y/var/www/html/venv/lib/python3.8/site-packages/torchvision/ops/feature_pyramid_network.pyforward   s    zExtraFPNBlock.forwardN	__name__
__module____qualname____doc__r   r	   strr   r   r   r   r   r   r      s   r   c                       s   e Zd ZdZdZdee eee ee	de
jf  d fddZ fdd	Zeeed
ddZeeed
ddZeeef eeef dddZ  ZS )FeaturePyramidNetworkac  
    Module that adds a FPN from on top of a set of feature maps. This is based on
    `"Feature Pyramid Network for Object Detection" <https://arxiv.org/abs/1612.03144>`_.

    The feature maps are currently supposed to be in increasing depth
    order.

    The input to the model is expected to be an OrderedDict[Tensor], containing
    the feature maps on top of which the FPN will be added.

    Args:
        in_channels_list (list[int]): number of channels for each feature map that
            is passed to the module
        out_channels (int): number of channels of the FPN representation
        extra_blocks (ExtraFPNBlock or None): if provided, extra operations will
            be performed. It is expected to take the fpn features, the original
            features and the names of the original features as input, and returns
            a new list of feature maps and their corresponding names
        norm_layer (callable, optional): Module specifying the normalization layer to use. Default: None

    Examples::

        >>> m = torchvision.ops.FeaturePyramidNetwork([10, 20, 30], 5)
        >>> # get some dummy data
        >>> x = OrderedDict()
        >>> x['feat0'] = torch.rand(1, 10, 64, 64)
        >>> x['feat2'] = torch.rand(1, 20, 16, 16)
        >>> x['feat3'] = torch.rand(1, 30, 8, 8)
        >>> # compute the FPN on top of x
        >>> output = m(x)
        >>> print([(k, v.shape) for k, v in output.items()])
        >>> # returns
        >>>   [('feat0', torch.Size([1, 5, 64, 64])),
        >>>    ('feat2', torch.Size([1, 5, 16, 16])),
        >>>    ('feat3', torch.Size([1, 5, 8, 8]))]

    r
   N.)in_channels_listout_channelsextra_blocks
norm_layerc           	   	      s   t    t|  t | _t | _|D ]R}|dkr>tdt||dd|d d}t||d|d d}| j	| | j	| q*| 
 D ]<}t|tjrtjj|jdd |jd k	rtj|jd q|d k	rt|tstdt| || _d S )	Nr   z(in_channels=0 is currently not supported   )kernel_sizepaddingr    activation_layer   )r"   r    r$   az1extra_blocks should be of type ExtraFPNBlock not )super__init__r   r   Z
ModuleListinner_blockslayer_blocks
ValueErrorr   appendmodules
isinstanceConv2dinitkaiming_uniform_weightbias	constant_r   	TypeErrortyper   )	r   r   r   r   r    in_channelsZinner_block_moduleZlayer_block_modulem	__class__r   r   r)   M   sB    


         

zFeaturePyramidNetwork.__init__c              	      s   | dd }|d ks|dk rt| j}	dD ]b}
t|	D ]T}dD ]J}| |
 d| d| }| |
 d| d| }||kr>||||< q>q6q*t ||||||| d S )Nversionr
   )r*   r+   )r3   r4   .z.0.)getlenr*   rangepopr(   _load_from_state_dict)r   Z
state_dictprefixZlocal_metadatastrictZmissing_keysZunexpected_keysZ
error_msgsr<   
num_blocksblockir7   old_keyZnew_keyr:   r   r   rB   p   s&    

z+FeaturePyramidNetwork._load_from_state_dict)r   idxr   c                 C   sF   t | j}|dk r||7 }|}t| jD ]\}}||kr(||}q(|S )zs
        This is equivalent to self.inner_blocks[idx](x),
        but torchscript doesn't support this yet
        r   )r?   r*   	enumerater   r   rI   rE   outrG   moduler   r   r   get_result_from_inner_blocks   s    

z2FeaturePyramidNetwork.get_result_from_inner_blocksc                 C   sF   t | j}|dk r||7 }|}t| jD ]\}}||kr(||}q(|S )zs
        This is equivalent to self.layer_blocks[idx](x),
        but torchscript doesn't support this yet
        r   )r?   r+   rJ   rK   r   r   r   get_result_from_layer_blocks   s    

z2FeaturePyramidNetwork.get_result_from_layer_blocks)r   r   c           
      C   s   t | }t | }| |d d}g }|| |d tt|d ddD ]N}| || |}|jdd }t	j
||dd}|| }|d| || qR| jdk	r| |||\}}tdd	 t||D }	|	S )
a6  
        Computes the FPN for a set of feature maps.

        Args:
            x (OrderedDict[Tensor]): feature maps for each feature level.

        Returns:
            results (OrderedDict[Tensor]): feature maps after FPN layers.
                They are ordered from the highest resolution first.
        r
   NZnearest)sizemoder   c                 S   s   g | ]\}}||fqS r   r   ).0kvr   r   r   
<listcomp>   s     z1FeaturePyramidNetwork.forward.<locals>.<listcomp>)listkeysvaluesrN   r-   rO   r@   r?   shapeFZinterpolateinsertr   r   zip)
r   r   r   Z
last_innerr   rI   Zinner_lateralZ
feat_shapeZinner_top_downrL   r   r   r   r      s    
zFeaturePyramidNetwork.forward)NN)r   r   r   r   _versionr   intr   r   r   r   Moduler)   rB   r	   rN   rO   r   r   r   __classcell__r   r   r:   r   r   $   s   &  # r   c                   @   s@   e Zd ZdZee ee ee eee ee f dddZdS )LastLevelMaxPoolzh
    Applies a max_pool2d (not actual max_pool2d, we just subsample) on top of the last feature map
    )r   yr   r   c                 C   s.   | d | tj|d dddd ||fS )NpoolrP   r!   r
   r   )r"   Zstrider#   )r-   r\   Z
max_pool2d)r   r   rd   r   r   r   r   r      s    
zLastLevelMaxPool.forwardNr   r   r   r   r   rc      s   rc   c                       sX   e Zd ZdZeed fddZee ee ee e	ee ee f dddZ
  ZS )LastLevelP6P7zO
    This module is used in RetinaNet to generate extra layers, P6 and P7.
    )r8   r   c                    st   t    t||ddd| _t||ddd| _| j| jfD ]&}tjj|jdd tj	|j
d q>||k| _d S )Nr%   r
   r!   r&   r   )r(   r)   r   r0   p6p7r1   r2   r3   r5   r4   use_P5)r   r8   r   rM   r:   r   r   r)      s    
zLastLevelP6P7.__init__)pcr   r   c           	      C   s^   |d |d  }}| j r|n|}| |}| t|}|||g |ddg ||fS )NrP   rg   rh   )ri   rg   rh   r\   Zreluextend)	r   rj   rk   r   Zp5Zc5r   rg   rh   r   r   r   r      s    
zLastLevelP6P7.forward)r   r   r   r   r`   r)   r   r	   r   r   r   rb   r   r   r:   r   rf      s   rf   )collectionsr   typingr   r   r   r   r   Ztorch.nn.functionalr   Z
functionalr\   Ztorchr	   Zops.miscr   utilsr   ra   r   r   rc   rf   r   r   r   r   <module>   s    ,