U
    &ºc  ã                   @   sŒ   d dl Z d dlZd dlZd dlm  mZ e  e¡Z	dd„ Z
dd„ Zdd„ Zejdk r\eZnejZd	d
„ Zeje
eejeedœZdd„ ZdS )é    Nc                 C   s   | t  | ¡ S )N)ÚtorchZsigmoid©Úx© r   ú</tmp/pip-unpacked-wheel-ymerj3tt/transformers/activations.pyÚswish   s    r   c                 C   s    | d dt  | t d¡ ¡  S )aš   Original Implementation of the gelu activation function in Google Bert repo when initially created.
        For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
        0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
        This is now written in C in torch.nn.functional
        Also see https://arxiv.org/abs/1606.08415
    ç      à?ç      ð?ç       @)r   ÚerfÚmathÚsqrtr   r   r   r   Ú_gelu_python   s    r   c                 C   s6   d|  dt  t dtj ¡| dt  | d¡   ¡  S )zŸ Implementation of the gelu activation function currently in Google Bert repo (identical to OpenAI GPT).
        Also see https://arxiv.org/abs/1606.08415
    r   r	   r
   ç÷Hmâä¦?g      @)r   Útanhr   r   ÚpiÚpowr   r   r   r   Úgelu_new   s    r   z1.4.0c                 C   s*   d|  dt  | d dd|  |    ¡  S )Nr   r	   g€ÑÓ3Eˆé?r   )r   r   r   r   r   r   Ú	gelu_fast&   s    r   )Úrelur   Úgelur   r   r   c                 C   s,   | t krt |  S td | tt  ¡ ƒ¡ƒ‚d S )Nz*function {} not found in ACT2FN mapping {})ÚACT2FNÚKeyErrorÚformatÚlistÚkeys)Zactivation_stringr   r   r   Úget_activation4   s    r   )Úloggingr   r   Ztorch.nn.functionalÚnnZ
functionalÚFÚ	getLoggerÚ__name__Úloggerr   r   r   Ú__version__r   r   r   r   r   r   r   r   r   r   Ú<module>   s&   


ú
