U
    Kc                      @   sv   d dl mZ d dlZd dlmZ d dlmZ dddgZG dd dejjZ	G d	d dejjZ
G d
d dejjZdS )    )ListN)Tensor)opsFloatFunctionalFXFloatFunctionalQFunctionalc                       s   e Zd ZdZ fddZdd ZeeedddZeeedd	d
Z	eeedddZ
eeedddZdee eedddZeeedddZ  ZS )r   a  State collector class for float operations.

    The instance of this class can be used instead of the ``torch.`` prefix for
    some operations. See example usage below.

    .. note::

        This class does not provide a ``forward`` hook. Instead, you must use
        one of the underlying functions (e.g. ``add``).

    Examples::

        >>> f_add = FloatFunctional()
        >>> a = torch.tensor(3.0)
        >>> b = torch.tensor(4.0)
        >>> f_add.add(a, b)  # Equivalent to ``torch.add(a, b)``

    Valid operation names:
        - add
        - cat
        - mul
        - add_relu
        - add_scalar
        - mul_scalar
    c                    s   t t|   tj | _d S N)superr   __init__torchnnIdentityactivation_post_processself	__class__ T/tmp/pip-unpacked-wheel-gikjz4vx/torch/ao/nn/quantized/modules/functional_modules.pyr
   #   s    zFloatFunctional.__init__c                 C   s   t dd S NzYFloatFunctional is not intended to use the 'forward'. Please use the underlying operationRuntimeErrorr   xr   r   r   forward'   s    zFloatFunctional.forwardr   yreturnc                 C   s   t ||}| |}|S r   )r   addr   r   r   r   rr   r   r   r   ,   s    
zFloatFunctional.addc                 C   s   t ||}|S r   r   r   r   r   r   r   
add_scalar2   s    zFloatFunctional.add_scalarc                 C   s   t ||}| |}|S r   )r   mulr   r   r   r   r   r#   9   s    
zFloatFunctional.mulc                 C   s   t ||}|S r   r   r#   r   r   r   r   
mul_scalar?   s    zFloatFunctional.mul_scalarr   r   dimr   c                 C   s   t j||d}| |}|S N)r'   )r   catr   r   r   r'   r    r   r   r   r)   F   s    
zFloatFunctional.catc                 C   s(   t ||}t jj|}| |}|S r   )r   r   r   
functionalrelur   r   r   r   r   add_reluL   s    
zFloatFunctional.add_relu)r   )__name__
__module____qualname____doc__r
   r   r   r   floatr"   r#   r%   r   intr)   r-   __classcell__r   r   r   r   r   	   s   c                   @   s   e Zd ZdZdd ZeeedddZeeedddZeeedd	d
Z	eeedddZ
dee eedddZeeedddZdS )r   a$   module to replace FloatFunctional module before FX graph mode quantization,
    since activation_post_process will be inserted in top level module directly

    Valid operation names:
        - add
        - cat
        - mul
        - add_relu
        - add_scalar
        - mul_scalar
    c                 C   s   t dd S r   r   r   r   r   r   r   ^   s    zFXFloatFunctional.forwardr   c                 C   s   t ||}|S r   r!   r   r   r   r   r   c   s    zFXFloatFunctional.addc                 C   s   t ||}|S r   r!   r   r   r   r   r"   h   s    zFXFloatFunctional.add_scalarc                 C   s   t ||}|S r   r$   r   r   r   r   r#   m   s    zFXFloatFunctional.mulc                 C   s   t ||}|S r   r$   r   r   r   r   r%   r   s    zFXFloatFunctional.mul_scalarr   r&   c                 C   s   t j||d}|S r(   )r   r)   r*   r   r   r   r)   w   s    zFXFloatFunctional.catc                 C   s   t ||}t jj|}|S r   )r   r   r   r+   r,   r   r   r   r   r-   |   s    zFXFloatFunctional.add_reluN)r   )r.   r/   r0   r1   r   r   r   r2   r"   r#   r%   r   r3   r)   r-   r   r   r   r   r   R   s   c                       s   e Zd ZdZ fddZ fddZ fddZdd	 Zd
d Zdd Z	e
e
e
dddZe
ee
dddZe
e
e
dddZe
ee
dddZdee
 ee
dddZe
e
e
dddZedd Z  ZS ) r   a  Wrapper class for quantized operations.

    The instance of this class can be used instead of the
    ``torch.ops.quantized`` prefix. See example usage below.

    .. note::

        This class does not provide a ``forward`` hook. Instead, you must use
        one of the underlying functions (e.g. ``add``).

    Examples::

        >>> q_add = QFunctional()
        >>> # xdoctest: +SKIP
        >>> a = torch.quantize_per_tensor(torch.tensor(3.0), 1.0, 0, torch.qint32)
        >>> b = torch.quantize_per_tensor(torch.tensor(4.0), 1.0, 0, torch.qint32)
        >>> q_add.add(a, b)  # Equivalent to ``torch.ops.quantized.add(a, b, 1.0, 0)``

    Valid operation names:
        - add
        - cat
        - mul
        - add_relu
        - add_scalar
        - mul_scalar
    c                    s*   t t|   d| _d| _tj | _d S )Ng      ?r   )	r	   r   r
   scale
zero_pointr   r   r   r   r   r   r   r   r
      s    zQFunctional.__init__c                    s@   t t| ||| t| j||d < t| j||d < d S )Nr5   r6   )r	   r   _save_to_state_dictr   Ztensorr5   r6   )r   ZdestinationprefixZ	keep_varsr   r   r   r7      s    zQFunctional._save_to_state_dictc              	      sH   t ||d | _t||d | _tt| |||d||| d S )Nr5   r6   F)r2   popr5   r3   r6   r	   r   _load_from_state_dict)r   Z
state_dictr8   Zlocal_metadatastrictZmissing_keysZunexpected_keysZ
error_msgsr   r   r   r:      s      z!QFunctional._load_from_state_dictc                 C   s   dS )Nr   r   r   r   r   r   	_get_name   s    zQFunctional._get_namec                 C   s   d | j| jS )Nzscale={}, zero_point={})formatr5   r6   r   r   r   r   
extra_repr   s     zQFunctional.extra_reprc                 C   s   t dd S )NzTFunctional is not intended to use the 'forward'. Please use the underlying operationr   r   r   r   r   r      s    zQFunctional.forwardr   c                 C   s&   t jj||| j| jd}| |}|S N)r5   r6   )r   	quantizedr   r5   r6   r   r   r   r   r   r      s    
zQFunctional.addc                 C   s   t j||}|S r   )r   r@   r"   r   r   r   r   r"      s    zQFunctional.add_scalarc                 C   s&   t jj||| j| jd}| |}|S r?   )r   r@   r#   r5   r6   r   r   r   r   r   r#      s    
zQFunctional.mulc                 C   s   t j||}|S r   )r   r@   r%   r   r   r   r   r%      s    zQFunctional.mul_scalarr   r&   c                 C   s&   t jj|| j| j|d}| |}|S )N)r5   r6   r'   )r   r@   r)   r5   r6   r   r*   r   r   r   r)      s    
zQFunctional.catc                 C   s&   t jj||| j| jd}| |}|S r?   )r   r@   r-   r5   r6   r   r   r   r   r   r-      s    
zQFunctional.add_reluc                 C   s@   t |tkstd|j \}}t }t||_t||_	|S )Nz=QFunctional.from_float expects an instance of FloatFunctional)
typer   AssertionErrorr   Zcalculate_qparamsr   r2   r5   r3   r6   )clsmodr5   r6   new_modr   r   r   
from_float   s    

zQFunctional.from_float)r   )r.   r/   r0   r1   r
   r7   r:   r<   r>   r   r   r   r2   r"   r#   r%   r   r3   r)   r-   classmethodrF   r4   r   r   r   r   r      s   )typingr   r   r   Z
torch._opsr   __all__r   Moduler   r   r   r   r   r   r   <module>   s   
I/