U
    Kc                     @   sL   d dl mZ d dlZd dlZddlmZ G dd deZG dd deZdS )	    )reduceN   )Functionc                   @   s$   e Zd Zedd Zedd ZdS )Typec                 C   s(   t || _|jsdn| | _| |S N)type
input_typeZis_cudaZ
get_deviceinput_device)ctxiZ	dest_type r   D/tmp/pip-unpacked-wheel-gikjz4vx/torch/autograd/_functions/tensor.pyforward	   s    
zType.forwardc              
   C   sP   | j dkr|| jd fS tj| j  || jd fW  5 Q R  S Q R X d S r   )r
   r   r	   torchZcudaZdevicer   Zgrad_outputr   r   r   backward   s    
zType.backwardN__name__
__module____qualname__staticmethodr   r   r   r   r   r   r      s   
r   c                   @   s$   e Zd Zedd Zedd ZdS )Resizec                 C   s   || _ tdd |d| _| | jkr\tddtt|| jdtt| | | | _	|j
r|| | j| S | r|| j| }|S | j| S d S )Nc                 S   s   | | S Nr   )xyr   r   r   <lambda>       z Resize.forward.<locals>.<lambda>   zrequested resize to {} ({} elements in total), but the given tensor has a size of {} ({} elements). autograd's resize can only change the shape of a given tensor, while preserving the number of elements. r   )sizesr   numelRuntimeErrorformatjoinmapstrsizeinput_sizesZis_quantizedZcopy_
contiguousviewZis_contiguousnew)r   Ztensorr   resultr   r   r   r      s"      

zResize.forwardc                 C   s&   |  | j kst| | jd fS r   )r    AssertionErrorr(   r)   r'   r   r   r   r   r   0   s    zResize.backwardNr   r   r   r   r   r      s   
r   )	functoolsr   r   Ztorch._utilsfunctionr   r   r   r   r   r   r   <module>   s
   