transformers
285 строк · 12.7 Кб
1# coding=utf-8
2# Copyright 2018 HuggingFace Inc..
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15"""
16isort:skip_file
17"""
18import os19import pickle20import tempfile21import unittest22from typing import Callable, Optional23
24import numpy as np25
26from transformers import (27BatchEncoding,28BertTokenizer,29BertTokenizerFast,30PreTrainedTokenizer,31PreTrainedTokenizerFast,32TensorType,33TokenSpan,34is_tokenizers_available,35)
36from transformers.models.gpt2.tokenization_gpt2 import GPT2Tokenizer37from transformers.testing_utils import CaptureStderr, require_flax, require_tf, require_tokenizers, require_torch, slow38
39
40if is_tokenizers_available():41from tokenizers import Tokenizer42from tokenizers.models import WordPiece43
44
45class TokenizerUtilsTest(unittest.TestCase):46def check_tokenizer_from_pretrained(self, tokenizer_class):47s3_models = list(tokenizer_class.max_model_input_sizes.keys())48for model_name in s3_models[:1]:49tokenizer = tokenizer_class.from_pretrained(model_name)50self.assertIsNotNone(tokenizer)51self.assertIsInstance(tokenizer, tokenizer_class)52self.assertIsInstance(tokenizer, PreTrainedTokenizer)53
54for special_tok in tokenizer.all_special_tokens:55self.assertIsInstance(special_tok, str)56special_tok_id = tokenizer.convert_tokens_to_ids(special_tok)57self.assertIsInstance(special_tok_id, int)58
59def assert_dump_and_restore(self, be_original: BatchEncoding, equal_op: Optional[Callable] = None):60batch_encoding_str = pickle.dumps(be_original)61self.assertIsNotNone(batch_encoding_str)62
63be_restored = pickle.loads(batch_encoding_str)64
65# Ensure is_fast is correctly restored66self.assertEqual(be_restored.is_fast, be_original.is_fast)67
68# Ensure encodings are potentially correctly restored69if be_original.is_fast:70self.assertIsNotNone(be_restored.encodings)71else:72self.assertIsNone(be_restored.encodings)73
74# Ensure the keys are the same75for original_v, restored_v in zip(be_original.values(), be_restored.values()):76if equal_op:77self.assertTrue(equal_op(restored_v, original_v))78else:79self.assertEqual(restored_v, original_v)80
81@slow82def test_pretrained_tokenizers(self):83self.check_tokenizer_from_pretrained(GPT2Tokenizer)84
85def test_tensor_type_from_str(self):86self.assertEqual(TensorType("tf"), TensorType.TENSORFLOW)87self.assertEqual(TensorType("pt"), TensorType.PYTORCH)88self.assertEqual(TensorType("np"), TensorType.NUMPY)89
90@require_tokenizers91def test_batch_encoding_pickle(self):92import numpy as np93
94tokenizer_p = BertTokenizer.from_pretrained("google-bert/bert-base-cased")95tokenizer_r = BertTokenizerFast.from_pretrained("google-bert/bert-base-cased")96
97# Python no tensor98with self.subTest("BatchEncoding (Python, return_tensors=None)"):99self.assert_dump_and_restore(tokenizer_p("Small example to encode"))100
101with self.subTest("BatchEncoding (Python, return_tensors=NUMPY)"):102self.assert_dump_and_restore(103tokenizer_p("Small example to encode", return_tensors=TensorType.NUMPY), np.array_equal104)105
106with self.subTest("BatchEncoding (Rust, return_tensors=None)"):107self.assert_dump_and_restore(tokenizer_r("Small example to encode"))108
109with self.subTest("BatchEncoding (Rust, return_tensors=NUMPY)"):110self.assert_dump_and_restore(111tokenizer_r("Small example to encode", return_tensors=TensorType.NUMPY), np.array_equal112)113
114@require_tf115@require_tokenizers116def test_batch_encoding_pickle_tf(self):117import tensorflow as tf118
119def tf_array_equals(t1, t2):120return tf.reduce_all(tf.equal(t1, t2))121
122tokenizer_p = BertTokenizer.from_pretrained("google-bert/bert-base-cased")123tokenizer_r = BertTokenizerFast.from_pretrained("google-bert/bert-base-cased")124
125with self.subTest("BatchEncoding (Python, return_tensors=TENSORFLOW)"):126self.assert_dump_and_restore(127tokenizer_p("Small example to encode", return_tensors=TensorType.TENSORFLOW), tf_array_equals128)129
130with self.subTest("BatchEncoding (Rust, return_tensors=TENSORFLOW)"):131self.assert_dump_and_restore(132tokenizer_r("Small example to encode", return_tensors=TensorType.TENSORFLOW), tf_array_equals133)134
135@require_torch136@require_tokenizers137def test_batch_encoding_pickle_pt(self):138import torch139
140tokenizer_p = BertTokenizer.from_pretrained("google-bert/bert-base-cased")141tokenizer_r = BertTokenizerFast.from_pretrained("google-bert/bert-base-cased")142
143with self.subTest("BatchEncoding (Python, return_tensors=PYTORCH)"):144self.assert_dump_and_restore(145tokenizer_p("Small example to encode", return_tensors=TensorType.PYTORCH), torch.equal146)147
148with self.subTest("BatchEncoding (Rust, return_tensors=PYTORCH)"):149self.assert_dump_and_restore(150tokenizer_r("Small example to encode", return_tensors=TensorType.PYTORCH), torch.equal151)152
153@require_tokenizers154def test_batch_encoding_is_fast(self):155tokenizer_p = BertTokenizer.from_pretrained("google-bert/bert-base-cased")156tokenizer_r = BertTokenizerFast.from_pretrained("google-bert/bert-base-cased")157
158with self.subTest("Python Tokenizer"):159self.assertFalse(tokenizer_p("Small example to_encode").is_fast)160
161with self.subTest("Rust Tokenizer"):162self.assertTrue(tokenizer_r("Small example to_encode").is_fast)163
164@require_tokenizers165def test_batch_encoding_word_to_tokens(self):166tokenizer_r = BertTokenizerFast.from_pretrained("google-bert/bert-base-cased")167encoded = tokenizer_r(["Test", "\xad", "test"], is_split_into_words=True)168
169self.assertEqual(encoded.word_to_tokens(0), TokenSpan(start=1, end=2))170self.assertEqual(encoded.word_to_tokens(1), None)171self.assertEqual(encoded.word_to_tokens(2), TokenSpan(start=2, end=3))172
173def test_batch_encoding_with_labels(self):174batch = BatchEncoding({"inputs": [[1, 2, 3], [4, 5, 6]], "labels": [0, 1]})175tensor_batch = batch.convert_to_tensors(tensor_type="np")176self.assertEqual(tensor_batch["inputs"].shape, (2, 3))177self.assertEqual(tensor_batch["labels"].shape, (2,))178# test converting the converted179with CaptureStderr() as cs:180tensor_batch = batch.convert_to_tensors(tensor_type="np")181self.assertFalse(len(cs.err), msg=f"should have no warning, but got {cs.err}")182
183batch = BatchEncoding({"inputs": [1, 2, 3], "labels": 0})184tensor_batch = batch.convert_to_tensors(tensor_type="np", prepend_batch_axis=True)185self.assertEqual(tensor_batch["inputs"].shape, (1, 3))186self.assertEqual(tensor_batch["labels"].shape, (1,))187
188@require_torch189def test_batch_encoding_with_labels_pt(self):190batch = BatchEncoding({"inputs": [[1, 2, 3], [4, 5, 6]], "labels": [0, 1]})191tensor_batch = batch.convert_to_tensors(tensor_type="pt")192self.assertEqual(tensor_batch["inputs"].shape, (2, 3))193self.assertEqual(tensor_batch["labels"].shape, (2,))194# test converting the converted195with CaptureStderr() as cs:196tensor_batch = batch.convert_to_tensors(tensor_type="pt")197self.assertFalse(len(cs.err), msg=f"should have no warning, but got {cs.err}")198
199batch = BatchEncoding({"inputs": [1, 2, 3], "labels": 0})200tensor_batch = batch.convert_to_tensors(tensor_type="pt", prepend_batch_axis=True)201self.assertEqual(tensor_batch["inputs"].shape, (1, 3))202self.assertEqual(tensor_batch["labels"].shape, (1,))203
204@require_tf205def test_batch_encoding_with_labels_tf(self):206batch = BatchEncoding({"inputs": [[1, 2, 3], [4, 5, 6]], "labels": [0, 1]})207tensor_batch = batch.convert_to_tensors(tensor_type="tf")208self.assertEqual(tensor_batch["inputs"].shape, (2, 3))209self.assertEqual(tensor_batch["labels"].shape, (2,))210# test converting the converted211with CaptureStderr() as cs:212tensor_batch = batch.convert_to_tensors(tensor_type="tf")213self.assertFalse(len(cs.err), msg=f"should have no warning, but got {cs.err}")214
215batch = BatchEncoding({"inputs": [1, 2, 3], "labels": 0})216tensor_batch = batch.convert_to_tensors(tensor_type="tf", prepend_batch_axis=True)217self.assertEqual(tensor_batch["inputs"].shape, (1, 3))218self.assertEqual(tensor_batch["labels"].shape, (1,))219
220@require_flax221def test_batch_encoding_with_labels_jax(self):222batch = BatchEncoding({"inputs": [[1, 2, 3], [4, 5, 6]], "labels": [0, 1]})223tensor_batch = batch.convert_to_tensors(tensor_type="jax")224self.assertEqual(tensor_batch["inputs"].shape, (2, 3))225self.assertEqual(tensor_batch["labels"].shape, (2,))226# test converting the converted227with CaptureStderr() as cs:228tensor_batch = batch.convert_to_tensors(tensor_type="jax")229self.assertFalse(len(cs.err), msg=f"should have no warning, but got {cs.err}")230
231batch = BatchEncoding({"inputs": [1, 2, 3], "labels": 0})232tensor_batch = batch.convert_to_tensors(tensor_type="jax", prepend_batch_axis=True)233self.assertEqual(tensor_batch["inputs"].shape, (1, 3))234self.assertEqual(tensor_batch["labels"].shape, (1,))235
236def test_padding_accepts_tensors(self):237features = [{"input_ids": np.array([0, 1, 2])}, {"input_ids": np.array([0, 1, 2, 3])}]238tokenizer = BertTokenizer.from_pretrained("google-bert/bert-base-cased")239
240batch = tokenizer.pad(features, padding=True)241self.assertTrue(isinstance(batch["input_ids"], np.ndarray))242self.assertEqual(batch["input_ids"].tolist(), [[0, 1, 2, tokenizer.pad_token_id], [0, 1, 2, 3]])243batch = tokenizer.pad(features, padding=True, return_tensors="np")244self.assertTrue(isinstance(batch["input_ids"], np.ndarray))245self.assertEqual(batch["input_ids"].tolist(), [[0, 1, 2, tokenizer.pad_token_id], [0, 1, 2, 3]])246
247@require_torch248def test_padding_accepts_tensors_pt(self):249import torch250
251features = [{"input_ids": torch.tensor([0, 1, 2])}, {"input_ids": torch.tensor([0, 1, 2, 3])}]252tokenizer = BertTokenizer.from_pretrained("google-bert/bert-base-cased")253
254batch = tokenizer.pad(features, padding=True)255self.assertTrue(isinstance(batch["input_ids"], torch.Tensor))256self.assertEqual(batch["input_ids"].tolist(), [[0, 1, 2, tokenizer.pad_token_id], [0, 1, 2, 3]])257batch = tokenizer.pad(features, padding=True, return_tensors="pt")258self.assertTrue(isinstance(batch["input_ids"], torch.Tensor))259self.assertEqual(batch["input_ids"].tolist(), [[0, 1, 2, tokenizer.pad_token_id], [0, 1, 2, 3]])260
261@require_tf262def test_padding_accepts_tensors_tf(self):263import tensorflow as tf264
265features = [{"input_ids": tf.constant([0, 1, 2])}, {"input_ids": tf.constant([0, 1, 2, 3])}]266tokenizer = BertTokenizer.from_pretrained("google-bert/bert-base-cased")267
268batch = tokenizer.pad(features, padding=True)269self.assertTrue(isinstance(batch["input_ids"], tf.Tensor))270self.assertEqual(batch["input_ids"].numpy().tolist(), [[0, 1, 2, tokenizer.pad_token_id], [0, 1, 2, 3]])271batch = tokenizer.pad(features, padding=True, return_tensors="tf")272self.assertTrue(isinstance(batch["input_ids"], tf.Tensor))273self.assertEqual(batch["input_ids"].numpy().tolist(), [[0, 1, 2, tokenizer.pad_token_id], [0, 1, 2, 3]])274
275@require_tokenizers276def test_instantiation_from_tokenizers(self):277bert_tokenizer = Tokenizer(WordPiece(unk_token="[UNK]"))278PreTrainedTokenizerFast(tokenizer_object=bert_tokenizer)279
280@require_tokenizers281def test_instantiation_from_tokenizers_json_file(self):282bert_tokenizer = Tokenizer(WordPiece(unk_token="[UNK]"))283with tempfile.TemporaryDirectory() as tmpdirname:284bert_tokenizer.save(os.path.join(tmpdirname, "tokenizer.json"))285PreTrainedTokenizerFast(tokenizer_file=os.path.join(tmpdirname, "tokenizer.json"))286