transformers

Форк
0
/
test_modeling_tf_distilbert.py 
261 строка · 10.2 Кб
1
# coding=utf-8
2
# Copyright 2020 The HuggingFace Team. All rights reserved.
3
#
4
# Licensed under the Apache License, Version 2.0 (the "License");
5
# you may not use this file except in compliance with the License.
6
# You may obtain a copy of the License at
7
#
8
#     http://www.apache.org/licenses/LICENSE-2.0
9
#
10
# Unless required by applicable law or agreed to in writing, software
11
# distributed under the License is distributed on an "AS IS" BASIS,
12
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
# See the License for the specific language governing permissions and
14
# limitations under the License.
15

16

17
from __future__ import annotations
18

19
import unittest
20

21
from transformers import DistilBertConfig, is_tf_available
22
from transformers.testing_utils import require_tf, slow
23

24
from ...test_configuration_common import ConfigTester
25
from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask
26
from ...test_pipeline_mixin import PipelineTesterMixin
27

28

29
if is_tf_available():
30
    import tensorflow as tf
31

32
    from transformers.models.distilbert.modeling_tf_distilbert import (
33
        TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
34
        TFDistilBertForMaskedLM,
35
        TFDistilBertForMultipleChoice,
36
        TFDistilBertForQuestionAnswering,
37
        TFDistilBertForSequenceClassification,
38
        TFDistilBertForTokenClassification,
39
        TFDistilBertModel,
40
    )
41

42

43
class TFDistilBertModelTester:
44
    def __init__(
45
        self,
46
        parent,
47
    ):
48
        self.parent = parent
49
        self.batch_size = 13
50
        self.seq_length = 7
51
        self.is_training = True
52
        self.use_input_mask = True
53
        self.use_token_type_ids = False
54
        self.use_labels = True
55
        self.vocab_size = 99
56
        self.hidden_size = 32
57
        self.num_hidden_layers = 2
58
        self.num_attention_heads = 4
59
        self.intermediate_size = 37
60
        self.hidden_act = "gelu"
61
        self.hidden_dropout_prob = 0.1
62
        self.attention_probs_dropout_prob = 0.1
63
        self.max_position_embeddings = 512
64
        self.type_vocab_size = 16
65
        self.type_sequence_label_size = 2
66
        self.initializer_range = 0.02
67
        self.num_labels = 3
68
        self.num_choices = 4
69
        self.scope = None
70

71
    def prepare_config_and_inputs(self):
72
        input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
73

74
        input_mask = None
75
        if self.use_input_mask:
76
            input_mask = random_attention_mask([self.batch_size, self.seq_length])
77

78
        sequence_labels = None
79
        token_labels = None
80
        choice_labels = None
81
        if self.use_labels:
82
            sequence_labels = ids_tensor([self.batch_size], self.type_sequence_label_size)
83
            token_labels = ids_tensor([self.batch_size, self.seq_length], self.num_labels)
84
            choice_labels = ids_tensor([self.batch_size], self.num_choices)
85

86
        config = DistilBertConfig(
87
            vocab_size=self.vocab_size,
88
            dim=self.hidden_size,
89
            n_layers=self.num_hidden_layers,
90
            n_heads=self.num_attention_heads,
91
            hidden_dim=self.intermediate_size,
92
            hidden_act=self.hidden_act,
93
            dropout=self.hidden_dropout_prob,
94
            attention_dropout=self.attention_probs_dropout_prob,
95
            max_position_embeddings=self.max_position_embeddings,
96
            initializer_range=self.initializer_range,
97
        )
98

99
        return config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
100

101
    def create_and_check_distilbert_model(
102
        self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
103
    ):
104
        model = TFDistilBertModel(config=config)
105
        inputs = {"input_ids": input_ids, "attention_mask": input_mask}
106

107
        result = model(inputs)
108

109
        inputs = [input_ids, input_mask]
110

111
        result = model(inputs)
112

113
        self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))
114

115
    def create_and_check_distilbert_for_masked_lm(
116
        self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
117
    ):
118
        model = TFDistilBertForMaskedLM(config=config)
119
        inputs = {"input_ids": input_ids, "attention_mask": input_mask}
120
        result = model(inputs)
121
        self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))
122

123
    def create_and_check_distilbert_for_question_answering(
124
        self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
125
    ):
126
        model = TFDistilBertForQuestionAnswering(config=config)
127
        inputs = {
128
            "input_ids": input_ids,
129
            "attention_mask": input_mask,
130
        }
131
        result = model(inputs)
132
        self.parent.assertEqual(result.start_logits.shape, (self.batch_size, self.seq_length))
133
        self.parent.assertEqual(result.end_logits.shape, (self.batch_size, self.seq_length))
134

135
    def create_and_check_distilbert_for_sequence_classification(
136
        self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
137
    ):
138
        config.num_labels = self.num_labels
139
        model = TFDistilBertForSequenceClassification(config)
140
        inputs = {"input_ids": input_ids, "attention_mask": input_mask}
141
        result = model(inputs)
142
        self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_labels))
143

144
    def create_and_check_distilbert_for_multiple_choice(
145
        self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
146
    ):
147
        config.num_choices = self.num_choices
148
        model = TFDistilBertForMultipleChoice(config)
149
        multiple_choice_inputs_ids = tf.tile(tf.expand_dims(input_ids, 1), (1, self.num_choices, 1))
150
        multiple_choice_input_mask = tf.tile(tf.expand_dims(input_mask, 1), (1, self.num_choices, 1))
151
        inputs = {
152
            "input_ids": multiple_choice_inputs_ids,
153
            "attention_mask": multiple_choice_input_mask,
154
        }
155
        result = model(inputs)
156
        self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_choices))
157

158
    def create_and_check_distilbert_for_token_classification(
159
        self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
160
    ):
161
        config.num_labels = self.num_labels
162
        model = TFDistilBertForTokenClassification(config)
163
        inputs = {"input_ids": input_ids, "attention_mask": input_mask}
164
        result = model(inputs)
165
        self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.num_labels))
166

167
    def prepare_config_and_inputs_for_common(self):
168
        config_and_inputs = self.prepare_config_and_inputs()
169
        (config, input_ids, input_mask, sequence_labels, token_labels, choice_labels) = config_and_inputs
170
        inputs_dict = {"input_ids": input_ids, "attention_mask": input_mask}
171
        return config, inputs_dict
172

173

174
@require_tf
175
class TFDistilBertModelTest(TFModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
176
    all_model_classes = (
177
        (
178
            TFDistilBertModel,
179
            TFDistilBertForMaskedLM,
180
            TFDistilBertForQuestionAnswering,
181
            TFDistilBertForSequenceClassification,
182
            TFDistilBertForTokenClassification,
183
            TFDistilBertForMultipleChoice,
184
        )
185
        if is_tf_available()
186
        else None
187
    )
188
    pipeline_model_mapping = (
189
        {
190
            "feature-extraction": TFDistilBertModel,
191
            "fill-mask": TFDistilBertForMaskedLM,
192
            "question-answering": TFDistilBertForQuestionAnswering,
193
            "text-classification": TFDistilBertForSequenceClassification,
194
            "token-classification": TFDistilBertForTokenClassification,
195
            "zero-shot": TFDistilBertForSequenceClassification,
196
        }
197
        if is_tf_available()
198
        else {}
199
    )
200
    test_head_masking = False
201
    test_onnx = False
202

203
    def setUp(self):
204
        self.model_tester = TFDistilBertModelTester(self)
205
        self.config_tester = ConfigTester(self, config_class=DistilBertConfig, dim=37)
206

207
    def test_config(self):
208
        self.config_tester.run_common_tests()
209

210
    def test_distilbert_model(self):
211
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
212
        self.model_tester.create_and_check_distilbert_model(*config_and_inputs)
213

214
    def test_for_masked_lm(self):
215
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
216
        self.model_tester.create_and_check_distilbert_for_masked_lm(*config_and_inputs)
217

218
    def test_for_question_answering(self):
219
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
220
        self.model_tester.create_and_check_distilbert_for_question_answering(*config_and_inputs)
221

222
    def test_for_sequence_classification(self):
223
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
224
        self.model_tester.create_and_check_distilbert_for_sequence_classification(*config_and_inputs)
225

226
    def test_for_multiple_choice(self):
227
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
228
        self.model_tester.create_and_check_distilbert_for_multiple_choice(*config_and_inputs)
229

230
    def test_for_token_classification(self):
231
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
232
        self.model_tester.create_and_check_distilbert_for_token_classification(*config_and_inputs)
233

234
    @slow
235
    def test_model_from_pretrained(self):
236
        for model_name in list(TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST[:1]):
237
            model = TFDistilBertModel.from_pretrained(model_name)
238
            self.assertIsNotNone(model)
239

240

241
@require_tf
242
class TFDistilBertModelIntegrationTest(unittest.TestCase):
243
    @slow
244
    def test_inference_masked_lm(self):
245
        model = TFDistilBertModel.from_pretrained("distilbert-base-uncased")
246
        input_ids = tf.constant([[0, 1, 2, 3, 4, 5]])
247
        output = model(input_ids)[0]
248

249
        expected_shape = [1, 6, 768]
250
        self.assertEqual(output.shape, expected_shape)
251

252
        expected_slice = tf.constant(
253
            [
254
                [
255
                    [0.19261885, -0.13732955, 0.4119799],
256
                    [0.22150156, -0.07422661, 0.39037204],
257
                    [0.22756018, -0.0896414, 0.3701467],
258
                ]
259
            ]
260
        )
261
        tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=1e-4)
262

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.