CSS-LM

Форк
0
/
modeling_xlm_roberta.py 
137 строк · 5.0 Кб
1
# coding=utf-8
2
# Copyright 2019 Facebook AI Research and the HuggingFace Inc. team.
3
# Copyright (c) 2018, NVIDIA CORPORATION.  All rights reserved.
4
#
5
# Licensed under the Apache License, Version 2.0 (the "License");
6
# you may not use this file except in compliance with the License.
7
# You may obtain a copy of the License at
8
#
9
#     http://www.apache.org/licenses/LICENSE-2.0
10
#
11
# Unless required by applicable law or agreed to in writing, software
12
# distributed under the License is distributed on an "AS IS" BASIS,
13
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
# See the License for the specific language governing permissions and
15
# limitations under the License.
16
"""PyTorch XLM-RoBERTa model. """
17

18

19
import logging
20

21
from .configuration_xlm_roberta import XLMRobertaConfig
22
from .file_utils import add_start_docstrings
23
from .modeling_roberta import (
24
    RobertaForMaskedLM,
25
    RobertaForMultipleChoice,
26
    RobertaForQuestionAnswering,
27
    RobertaForSequenceClassification,
28
    RobertaForTokenClassification,
29
    RobertaModel,
30
)
31

32

33
logger = logging.getLogger(__name__)
34

35
XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = [
36
    "xlm-roberta-base",
37
    "xlm-roberta-large",
38
    "xlm-roberta-large-finetuned-conll02-dutch",
39
    "xlm-roberta-large-finetuned-conll02-spanish",
40
    "xlm-roberta-large-finetuned-conll03-english",
41
    "xlm-roberta-large-finetuned-conll03-german",
42
    # See all XLM-RoBERTa models at https://huggingface.co/models?filter=xlm-roberta
43
]
44

45

46
XLM_ROBERTA_START_DOCSTRING = r"""
47

48
    This model is a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`_ sub-class.
49
    Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general
50
    usage and behavior.
51

52
    Parameters:
53
        config (:class:`~transformers.XLMRobertaConfig`): Model configuration class with all the parameters of the
54
            model. Initializing with a config file does not load the weights associated with the model, only the configuration.
55
            Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
56
"""
57

58

59
@add_start_docstrings(
60
    "The bare XLM-RoBERTa Model transformer outputting raw hidden-states without any specific head on top.",
61
    XLM_ROBERTA_START_DOCSTRING,
62
)
63
class XLMRobertaModel(RobertaModel):
64
    """
65
    This class overrides :class:`~transformers.RobertaModel`. Please check the
66
    superclass for the appropriate documentation alongside usage examples.
67
    """
68

69
    config_class = XLMRobertaConfig
70

71

72
@add_start_docstrings(
73
    """XLM-RoBERTa Model with a `language modeling` head on top. """, XLM_ROBERTA_START_DOCSTRING,
74
)
75
class XLMRobertaForMaskedLM(RobertaForMaskedLM):
76
    """
77
    This class overrides :class:`~transformers.RobertaForMaskedLM`. Please check the
78
    superclass for the appropriate documentation alongside usage examples.
79
    """
80

81
    config_class = XLMRobertaConfig
82

83

84
@add_start_docstrings(
85
    """XLM-RoBERTa Model transformer with a sequence classification/regression head on top (a linear layer
86
    on top of the pooled output) e.g. for GLUE tasks. """,
87
    XLM_ROBERTA_START_DOCSTRING,
88
)
89
class XLMRobertaForSequenceClassification(RobertaForSequenceClassification):
90
    """
91
    This class overrides :class:`~transformers.RobertaForSequenceClassification`. Please check the
92
    superclass for the appropriate documentation alongside usage examples.
93
    """
94

95
    config_class = XLMRobertaConfig
96

97

98
@add_start_docstrings(
99
    """XLM-RoBERTa Model with a multiple choice classification head on top (a linear layer on top of
100
    the pooled output and a softmax) e.g. for RocStories/SWAG tasks. """,
101
    XLM_ROBERTA_START_DOCSTRING,
102
)
103
class XLMRobertaForMultipleChoice(RobertaForMultipleChoice):
104
    """
105
    This class overrides :class:`~transformers.RobertaForMultipleChoice`. Please check the
106
    superclass for the appropriate documentation alongside usage examples.
107
    """
108

109
    config_class = XLMRobertaConfig
110

111

112
@add_start_docstrings(
113
    """XLM-RoBERTa Model with a token classification head on top (a linear layer on top of
114
    the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks. """,
115
    XLM_ROBERTA_START_DOCSTRING,
116
)
117
class XLMRobertaForTokenClassification(RobertaForTokenClassification):
118
    """
119
    This class overrides :class:`~transformers.RobertaForTokenClassification`. Please check the
120
    superclass for the appropriate documentation alongside usage examples.
121
    """
122

123
    config_class = XLMRobertaConfig
124

125

126
@add_start_docstrings(
127
    """XLM-RoBERTa Model with a span classification head on top for extractive question-answering tasks like SQuAD (a
128
    linear layers on top of the hidden-states output to compute `span start logits` and `span end logits`).""",
129
    XLM_ROBERTA_START_DOCSTRING,
130
)
131
class XLMRobertaForQuestionAnswering(RobertaForQuestionAnswering):
132
    """
133
    This class overrides :class:`~transformers.RobertaForQuestionAnswering`. Please check the
134
    superclass for the appropriate documentation alongside usage examples.
135
    """
136

137
    config_class = XLMRobertaConfig
138

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.