transformers

Форк
0
/
test_tokenization_roformer.py 
83 строки · 3.4 Кб
1
# coding=utf-8
2
# Copyright 2021 The HuggingFace Team. All rights reserved.
3
#
4
# Licensed under the Apache License, Version 2.0 (the "License");
5
# you may not use this file except in compliance with the License.
6
# You may obtain a copy of the License at
7
#
8
#     http://www.apache.org/licenses/LICENSE-2.0
9
#
10
# Unless required by applicable law or agreed to in writing, software
11
# distributed under the License is distributed on an "AS IS" BASIS,
12
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
# See the License for the specific language governing permissions and
14
# limitations under the License.
15

16
import tempfile
17
import unittest
18

19
from transformers import RoFormerTokenizer, RoFormerTokenizerFast
20
from transformers.testing_utils import require_rjieba, require_tokenizers
21

22
from ...test_tokenization_common import TokenizerTesterMixin
23

24

25
@require_rjieba
26
@require_tokenizers
27
class RoFormerTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
28
    tokenizer_class = RoFormerTokenizer
29
    rust_tokenizer_class = RoFormerTokenizerFast
30
    space_between_special_tokens = True
31
    test_rust_tokenizer = True
32

33
    def setUp(self):
34
        super().setUp()
35

36
    def get_tokenizer(self, **kwargs):
37
        return self.tokenizer_class.from_pretrained("junnyu/roformer_chinese_base", **kwargs)
38

39
    def get_rust_tokenizer(self, **kwargs):
40
        return self.rust_tokenizer_class.from_pretrained("junnyu/roformer_chinese_base", **kwargs)
41

42
    def get_chinese_input_output_texts(self):
43
        input_text = "永和服装饰品有限公司,今天天气非常好"
44
        output_text = "永和 服装 饰品 有限公司 , 今 天 天 气 非常 好"
45
        return input_text, output_text
46

47
    def test_tokenizer(self):
48
        tokenizer = self.get_tokenizer()
49
        input_text, output_text = self.get_chinese_input_output_texts()
50
        tokens = tokenizer.tokenize(input_text)
51

52
        self.assertListEqual(tokens, output_text.split())
53

54
        input_tokens = tokens + [tokenizer.unk_token]
55
        exp_tokens = [22943, 21332, 34431, 45904, 117, 306, 1231, 1231, 2653, 33994, 1266, 100]
56
        self.assertListEqual(tokenizer.convert_tokens_to_ids(input_tokens), exp_tokens)
57

58
    def test_rust_tokenizer(self):
59
        tokenizer = self.get_rust_tokenizer()
60
        input_text, output_text = self.get_chinese_input_output_texts()
61
        tokens = tokenizer.tokenize(input_text)
62
        self.assertListEqual(tokens, output_text.split())
63
        input_tokens = tokens + [tokenizer.unk_token]
64
        exp_tokens = [22943, 21332, 34431, 45904, 117, 306, 1231, 1231, 2653, 33994, 1266, 100]
65
        self.assertListEqual(tokenizer.convert_tokens_to_ids(input_tokens), exp_tokens)
66

67
    # can't train new_tokenizer via Tokenizers lib
68
    def test_training_new_tokenizer(self):
69
        pass
70

71
    # can't train new_tokenizer via Tokenizers lib
72
    def test_training_new_tokenizer_with_special_tokens_change(self):
73
        pass
74

75
    def test_save_slow_from_fast_and_reload_fast(self):
76
        for cls in [RoFormerTokenizer, RoFormerTokenizerFast]:
77
            original = cls.from_pretrained("alchemab/antiberta2")
78
            self.assertEqual(original.encode("生活的真谛是"), [1, 4, 4, 4, 4, 4, 4, 2])
79

80
            with tempfile.TemporaryDirectory() as tmp_dir:
81
                original.save_pretrained(tmp_dir)
82
                new = cls.from_pretrained(tmp_dir)
83
            self.assertEqual(new.encode("生活的真谛是"), [1, 4, 4, 4, 4, 4, 4, 2])
84

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.