U
    &ºcÛ  ã                   @   sº   d Z ddlZddlmZ ddlmZ ddlmZmZm	Z	m
Z
 e e¡Zi ZdZedeƒG d	d
„ d
e
ƒƒZedeƒG dd„ deƒƒZedeƒG dd„ deƒƒZedeƒG dd„ de	ƒƒZdS )z TF 2.0  XLM-RoBERTa model. é    Né   )ÚXLMRobertaConfig)Úadd_start_docstrings)ÚTFRobertaForMaskedLMÚ"TFRobertaForSequenceClassificationÚTFRobertaForTokenClassificationÚTFRobertaModelaÓ  

    .. note::

        TF 2.0 models accepts two formats as inputs:

            - having all inputs as keyword arguments (like PyTorch models), or
            - having all inputs as a list, tuple or dict in the first positional arguments.

        This second option is useful when using :obj:`tf.keras.Model.fit()` method which currently requires having
        all the tensors in the first argument of the model call function: :obj:`model(inputs)`.

        If you choose this second option, there are three possibilities you can use to gather all the input Tensors
        in the first positional argument :

        - a single Tensor with input_ids only and nothing else: :obj:`model(inputs_ids)`
        - a list of varying length with one or several input Tensors IN THE ORDER given in the docstring:
          :obj:`model([input_ids, attention_mask])` or :obj:`model([input_ids, attention_mask, token_type_ids])`
        - a dictionary with one or several input Tensors associated to the input names given in the docstring:
          :obj:`model({'input_ids': input_ids, 'token_type_ids': token_type_ids})`

    Parameters:
        config (:class:`~transformers.XLMRobertaConfig`): Model configuration class with all the parameters of the
            model. Initializing with a config file does not load the weights associated with the model, only the configuration.
            Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
zeThe bare XLM-RoBERTa Model transformer outputting raw hidden-states without any specific head on top.c                   @   s   e Zd ZdZeZeZdS )ÚTFXLMRobertaModelz¡
    This class overrides :class:`~transformers.TFRobertaModel`. Please check the
    superclass for the appropriate documentation alongside usage examples.
    N©Ú__name__Ú
__module__Ú__qualname__Ú__doc__r   Zconfig_classÚ+TF_XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_MAPZpretrained_model_archive_map© r   r   úH/tmp/pip-unpacked-wheel-ymerj3tt/transformers/modeling_tf_xlm_roberta.pyr	   @   s   r	   z:XLM-RoBERTa Model with a `language modeling` head on top. c                   @   s   e Zd ZdZeZeZdS )ÚTFXLMRobertaForMaskedLMz§
    This class overrides :class:`~transformers.TFRobertaForMaskedLM`. Please check the
    superclass for the appropriate documentation alongside usage examples.
    Nr
   r   r   r   r   r   N   s   r   zšXLM-RoBERTa Model transformer with a sequence classification/regression head on top (a linear layer
    on top of the pooled output) e.g. for GLUE tasks. c                   @   s   e Zd ZdZeZeZdS )Ú%TFXLMRobertaForSequenceClassificationzµ
    This class overrides :class:`~transformers.TFRobertaForSequenceClassification`. Please check the
    superclass for the appropriate documentation alongside usage examples.
    Nr
   r   r   r   r   r   [   s   r   z¡XLM-RoBERTa Model with a token classification head on top (a linear layer on top of
    the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks. c                   @   s   e Zd ZdZeZeZdS )Ú"TFXLMRobertaForTokenClassificationz²
    This class overrides :class:`~transformers.TFRobertaForTokenClassification`. Please check the
    superclass for the appropriate documentation alongside usage examples.
    Nr
   r   r   r   r   r   j   s   r   )r   ÚloggingZconfiguration_xlm_robertar   Z
file_utilsr   Zmodeling_tf_robertar   r   r   r   Ú	getLoggerr   Úloggerr   ZXLM_ROBERTA_START_DOCSTRINGr	   r   r   r   r   r   r   r   Ú<module>   s6   
þ
 ÿ
ý
ý