optimum-habana

Форк
0
92 строки · 2.9 Кб
1
# coding=utf-8
2
# Copyright 2022 HuggingFace Inc.
3
#
4
# Licensed under the Apache License, Version 2.0 (the "License");
5
# you may not use this file except in compliance with the License.
6
# You may obtain a copy of the License at
7
#
8
#     http://www.apache.org/licenses/LICENSE-2.0
9
#
10
# Unless required by applicable law or agreed to in writing, software
11
# distributed under the License is distributed on an "AS IS" BASIS,
12
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
# See the License for the specific language governing permissions and
14
# limitations under the License.
15

16

17
# Mapping between model families and specific model names with their configuration
18
MODELS_TO_TEST_MAPPING = {
19
    "bert": [
20
        # ("bert-base-uncased", "Habana/bert-base-uncased"),
21
        ("bert-large-uncased-whole-word-masking", "Habana/bert-large-uncased-whole-word-masking"),
22
    ],
23
    "roberta": [
24
        ("roberta-base", "Habana/roberta-base"),
25
        ("roberta-large", "Habana/roberta-large"),
26
    ],
27
    "albert": [
28
        ("albert-large-v2", "Habana/albert-large-v2"),
29
        ("albert-xxlarge-v1", "Habana/albert-xxlarge-v1"),
30
    ],
31
    "distilbert": [
32
        ("distilbert-base-uncased", "Habana/distilbert-base-uncased"),
33
    ],
34
    "gpt2": [
35
        ("gpt2", "Habana/gpt2"),
36
        ("gpt2-xl", "Habana/gpt2"),
37
    ],
38
    "t5": [
39
        ("t5-small", "Habana/t5"),
40
        ("google/flan-t5-xxl", "Habana/t5"),
41
    ],
42
    "vit": [
43
        ("google/vit-base-patch16-224-in21k", "Habana/vit"),
44
    ],
45
    "wav2vec2": [
46
        ("facebook/wav2vec2-base", "Habana/wav2vec2"),
47
        ("facebook/wav2vec2-large-lv60", "Habana/wav2vec2"),
48
    ],
49
    "swin": [("microsoft/swin-base-patch4-window7-224-in22k", "Habana/swin")],
50
    "clip": [("./clip-roberta", "Habana/clip")],
51
    "bridgetower": [("BridgeTower/bridgetower-large-itm-mlm-itc", "Habana/clip")],
52
    "gpt_neox": [("EleutherAI/gpt-neox-20b", "Habana/gpt2")],
53
    "llama": [("huggyllama/llama-7b", "Habana/gpt2")],
54
    "falcon": [("tiiuae/falcon-40b", "Habana/gpt2")],
55
    "bloom": [("bigscience/bloom-7b1", "Habana/roberta-base")],
56
    "whisper": [("openai/whisper-small", "Habana/whisper")],
57
}
58

59
MODELS_TO_TEST_FOR_QUESTION_ANSWERING = [
60
    "bert",
61
    "roberta",
62
    "albert",
63
    "distilbert",
64
]
65

66
# Only BERT has been officially validated for sequence classification
67
MODELS_TO_TEST_FOR_SEQUENCE_CLASSIFICATION = [
68
    "bert",
69
    # "roberta",
70
    # "albert",
71
    # "distilbert",
72
]
73

74
MODELS_TO_TEST_FOR_CAUSAL_LANGUAGE_MODELING = ["gpt2", "gpt_neox", "bloom"]
75

76
MODELS_TO_TEST_FOR_SEQ2SEQ = ["t5"]
77

78
MODELS_TO_TEST_FOR_IMAGE_CLASSIFICATION = ["vit", "swin"]
79

80
# Only RoBERTa is tested in CI for MLM
81
MODELS_TO_TEST_FOR_MASKED_LANGUAGE_MODELING = [
82
    # "bert",
83
    "roberta",
84
    # "albert",
85
    # "distilbert",
86
]
87

88
MODELS_TO_TEST_FOR_AUDIO_CLASSIFICATION = ["wav2vec2"]
89

90
MODELS_TO_TEST_FOR_SPEECH_RECOGNITION = ["wav2vec2", "whisper"]
91

92
MODELS_TO_TEST_FOR_IMAGE_TEXT = ["clip"]
93

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.