U
    &ºc£  ã                   @   sH   d Z ddlZddlmZ e e¡Zddddd	d
dœZG dd„ deƒZdS )z ELECTRA model configuration é    Né   )ÚPretrainedConfigz^https://s3.amazonaws.com/models.huggingface.co/bert/google/electra-small-generator/config.jsonz]https://s3.amazonaws.com/models.huggingface.co/bert/google/electra-base-generator/config.jsonz^https://s3.amazonaws.com/models.huggingface.co/bert/google/electra-large-generator/config.jsonzbhttps://s3.amazonaws.com/models.huggingface.co/bert/google/electra-small-discriminator/config.jsonzahttps://s3.amazonaws.com/models.huggingface.co/bert/google/electra-base-discriminator/config.jsonzbhttps://s3.amazonaws.com/models.huggingface.co/bert/google/electra-large-discriminator/config.json)zgoogle/electra-small-generatorzgoogle/electra-base-generatorzgoogle/electra-large-generatorz"google/electra-small-discriminatorz!google/electra-base-discriminatorz"google/electra-large-discriminatorc                       s*   e Zd ZdZeZdZd‡ fdd„	Z‡  ZS )ÚElectraConfiga›  
        This is the configuration class to store the configuration of a :class:`~transformers.ElectraModel`.
        It is used to instantiate an ELECTRA model according to the specified arguments, defining the model
        architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of
        the ELECTRA `google/electra-small-discriminator <https://huggingface.co/google/electra-small-discriminator>`__
        architecture.

        Configuration objects inherit from  :class:`~transformers.PretrainedConfig` and can be used
        to control the model outputs. Read the documentation from  :class:`~transformers.PretrainedConfig`
        for more information.


        Args:
            vocab_size (:obj:`int`, optional, defaults to 30522):
                Vocabulary size of the ELECTRA model. Defines the different tokens that
                can be represented by the `inputs_ids` passed to the forward method of :class:`~transformers.ElectraModel`.
            embedding_size (:obj:`int`, optional, defaults to 128):
                Dimensionality of the encoder layers and the pooler layer.
            hidden_size (:obj:`int`, optional, defaults to 256):
                Dimensionality of the encoder layers and the pooler layer.
            num_hidden_layers (:obj:`int`, optional, defaults to 12):
                Number of hidden layers in the Transformer encoder.
            num_attention_heads (:obj:`int`, optional, defaults to 4):
                Number of attention heads for each attention layer in the Transformer encoder.
            intermediate_size (:obj:`int`, optional, defaults to 1024):
                Dimensionality of the "intermediate" (i.e., feed-forward) layer in the Transformer encoder.
            hidden_act (:obj:`str` or :obj:`function`, optional, defaults to "gelu"):
                The non-linear activation function (function or string) in the encoder and pooler.
                If string, "gelu", "relu", "swish" and "gelu_new" are supported.
            hidden_dropout_prob (:obj:`float`, optional, defaults to 0.1):
                The dropout probabilitiy for all fully connected layers in the embeddings, encoder, and pooler.
            attention_probs_dropout_prob (:obj:`float`, optional, defaults to 0.1):
                The dropout ratio for the attention probabilities.
            max_position_embeddings (:obj:`int`, optional, defaults to 512):
                The maximum sequence length that this model might ever be used with.
                Typically set this to something large just in case (e.g., 512 or 1024 or 2048).
            type_vocab_size (:obj:`int`, optional, defaults to 2):
                The vocabulary size of the `token_type_ids` passed into :class:`~transformers.ElectraModel`.
            initializer_range (:obj:`float`, optional, defaults to 0.02):
                The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
            layer_norm_eps (:obj:`float`, optional, defaults to 1e-12):
                The epsilon used by the layer normalization layers.

        Example::

            from transformers import ElectraModel, ElectraConfig

            # Initializing a ELECTRA electra-base-uncased style configuration
            configuration = ElectraConfig()

            # Initializing a model from the electra-base-uncased style configuration
            model = ElectraModel(configuration)

            # Accessing the model configuration
            configuration = model.config

        Attributes:
            pretrained_config_archive_map (Dict[str, str]):
                A dictionary containing all the available pre-trained checkpoints.
    Zelectraé:w  é€   é   é   é   é   Úgeluçš™™™™™¹?é   é   ç{®Gáz”?çê-™—q=r   c                    sh   t ƒ jf d|i|—Ž || _|| _|| _|| _|| _|| _|| _|| _	|	| _
|
| _|| _|| _|| _d S )NÚpad_token_id)ÚsuperÚ__init__Ú
vocab_sizeÚembedding_sizeÚhidden_sizeÚnum_hidden_layersÚnum_attention_headsÚintermediate_sizeÚ
hidden_actÚhidden_dropout_probÚattention_probs_dropout_probÚmax_position_embeddingsÚtype_vocab_sizeÚinitializer_rangeÚlayer_norm_eps)Úselfr   r   r   r   r   r   r   r   r   r   r   r   r    r   Úkwargs©Ú	__class__© úF/tmp/pip-unpacked-wheel-ymerj3tt/transformers/configuration_electra.pyr   d   s    zElectraConfig.__init__)r   r   r   r   r	   r
   r   r   r   r   r   r   r   r   )	Ú__name__Ú
__module__Ú__qualname__Ú__doc__Ú%ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAPZpretrained_config_archive_mapZ
model_typer   Ú__classcell__r%   r%   r#   r&   r   $   s$   <              ñr   )	r*   ÚloggingZconfiguration_utilsr   Ú	getLoggerr'   Úloggerr+   r   r%   r%   r%   r&   Ú<module>   s   
ú
