U
    Kc                     @   sb   d dl Z d dl mZ d dlmZ d dlm  mZ ddgZG dd dejZG dd dej	Z	dS )    N)Tensor	EmbeddingEmbeddingBagc                	       sP   e Zd ZdZejZddd fddZeddd	Z	e
d
d Zdd Z  ZS )r   a  
    An embedding bag module attached with FakeQuantize modules for weight,
    used for quantization aware training.

    We adopt the same interface as `torch.nn.Embedding`, please see
    https://pytorch.org/docs/stable/generated/torch.nn.Embedding.html#torch.nn.Embedding
    for documentation.

    Similar to `torch.nn.Embedding`, with FakeQuantize modules initialized to
    default.

    Attributes:
        weight: fake quant module for weight
    N       @Freturnc              	      sr   |	|
d}t  j||||||||f| |s4td| jtjksZtdt| j || _|j|d| _	d S )Ndevicedtype'qconfig must be provided for QAT moduleSEmbedding weights requires a qscheme of torch.per_channel_affine_float_qparams Got factory_kwargs
super__init__AssertionErrorweightqschemetorch per_channel_affine_float_qparamsstrqconfigweight_fake_quant)selfnum_embeddingsembedding_dimpadding_idxmax_norm	norm_typescale_grad_by_freqsparse_weightr	   r
   r   r   	__class__ I/tmp/pip-unpacked-wheel-gikjz4vx/torch/ao/nn/qat/modules/embedding_ops.pyr      s"    
   zEmbedding.__init__c              	   C   s(   t || | j| j| j| j| j| jS N)	FZ	embeddingr   r   r   r   r   r    r!   )r   inputr%   r%   r&   forward'   s      zEmbedding.forwardc                 C   s   t || jks(td| j d | jj t|ds:td|jsHtd|j j}|tj	ksntdt
| |j}| |j|j|j|j|j|j|j|j|d	}|S )Create a qat module from a float module

            Args: `mod` a float module, either produced by torch.ao.quantization utilities
            or directly from user
         qat..from_float only works for r   ,Input float module must have qconfig defined,Input float module must have a valid qconfigr   r   )type_FLOAT_MODULEr   __name__hasattrr   r   r   r   r   r   r   r   r   r   r   r    r!   clsmodZweight_qschemer   Zqat_embedding_bagr%   r%   r&   
from_float,   s*        zEmbedding.from_floatc              
   C   sL   t j| j| j| j| j| j| j| j	d }t j
| j |_|| j |S r'   )r   nnr   r   r   r   r   r   r    r!   	Parameterr   detachtraintrainingr   embedding_bagr%   r%   r&   to_floatC   s       zEmbedding.to_float)	NNr   FFNNNN)r3   
__module____qualname____doc__r9   r   r2   r   r   r*   classmethodr8   r@   __classcell__r%   r%   r#   r&   r      s$                  
c                       sR   e Zd ZdZejZddd fddZdedd	d
Z	e
dd Zdd Z  ZS )r   a  
    An embedding bag module attached with FakeQuantize modules for weight,
    used for quantization aware training.

    We adopt the same interface as `torch.nn.EmbeddingBag`, please see
    https://pytorch.org/docs/stable/generated/torch.nn.EmbeddingBag.html#torch.nn.EmbeddingBag
    for documentation.

    Similar to `torch.nn.EmbeddingBag`, with FakeQuantize modules initialized to
    default.

    Attributes:
        weight: fake quant module for weight
    Nr   Fmeanr   c                    sv   ||d}t  j|||||||||	|
f
| |s8td| jtjks^tdt| j || _|j|d| _	d S )Nr   r   WEmbedding Bag weights requires a qscheme of torch.per_channel_affine_float_qparams Got r   r   )r   r   r   r   r   r    moder!   r"   include_last_offsetr   r   r	   r
   r   r#   r%   r&   r   \   s&    
    zEmbeddingBag.__init__c                 C   s4   t || | j|| j| j| j| j| j|| j	| j
S r'   )r(   r?   r   r   r   r   r    rH   r!   rI   r   )r   r)   offsetsZper_sample_weightsr%   r%   r&   r*   k   s        zEmbeddingBag.forwardc                 C   s   t || jks(td| j d | jj t|ds:td|jsHtd|j j}|tj	ksntdt
| |j}| |j|j|j|j|j|j|j|j|j|j|d}|S )r+   r,   r-   r   r.   r/   rG   r0   )r1   r2   r   r3   r4   r   r   r   r   r   r   r   r   r   r   r    rH   r!   rI   r   r5   r%   r%   r&   r8   r   s,         zEmbeddingBag.from_floatc                 C   sT   t j| j| j| j| j| j| j| j	d | j
| j
}t j| j |_|| j |S r'   )r   r9   r   r   r   r   r   r    rH   r!   rI   r   r:   r   r;   r<   r=   r>   r%   r%   r&   r@      s         zEmbeddingBag.to_float)Nr   FrF   FNFNNNN)NN)r3   rA   rB   rC   r9   r   r2   r   r   r*   rD   r8   r@   rE   r%   r%   r#   r&   r   K   s(                     
)
r   r   Ztorch.nnr9   Ztorch.nn.functionalZ
functionalr(   __all__r   r   r%   r%   r%   r&   <module>   s   C