transformers

Форк
0
/
check_support_list.py 
95 строк · 3.7 Кб
1
# coding=utf-8
2
# Copyright 2023 The HuggingFace Inc. team.
3
#
4
# Licensed under the Apache License, Version 2.0 (the "License");
5
# you may not use this file except in compliance with the License.
6
# You may obtain a copy of the License at
7
#
8
#     http://www.apache.org/licenses/LICENSE-2.0
9
#
10
# Unless required by applicable law or agreed to in writing, software
11
# distributed under the License is distributed on an "AS IS" BASIS,
12
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
# See the License for the specific language governing permissions and
14
# limitations under the License.
15
"""
16
Utility that checks the supports of 3rd party libraries are listed in the documentation file. Currently, this includes:
17
- flash attention support
18
- SDPA support
19

20
Use from the root of the repo with (as used in `make repo-consistency`):
21

22
```bash
23
python utils/check_support_list.py
24
```
25

26
It has no auto-fix mode.
27
"""
28
import os
29
from glob import glob
30

31

32
# All paths are set with the intent you should run this script from the root of the repo with the command
33
# python utils/check_doctest_list.py
34
REPO_PATH = "."
35

36

37
def check_flash_support_list():
38
    with open(os.path.join(REPO_PATH, "docs/source/en/perf_infer_gpu_one.md"), "r") as f:
39
        doctext = f.read()
40

41
        doctext = doctext.split("FlashAttention-2 is currently supported for the following architectures:")[1]
42
        doctext = doctext.split("You can request to add FlashAttention-2 support")[0]
43

44
    patterns = glob(os.path.join(REPO_PATH, "src/transformers/models/**/modeling_*.py"))
45
    patterns_tf = glob(os.path.join(REPO_PATH, "src/transformers/models/**/modeling_tf_*.py"))
46
    patterns_flax = glob(os.path.join(REPO_PATH, "src/transformers/models/**/modeling_flax_*.py"))
47
    patterns = list(set(patterns) - set(patterns_tf) - set(patterns_flax))
48
    archs_supporting_fa2 = []
49
    for filename in patterns:
50
        with open(filename, "r") as f:
51
            text = f.read()
52

53
            if "_supports_flash_attn_2 = True" in text:
54
                model_name = os.path.basename(filename).replace(".py", "").replace("modeling_", "")
55
                archs_supporting_fa2.append(model_name)
56

57
    for arch in archs_supporting_fa2:
58
        if arch not in doctext:
59
            raise ValueError(
60
                f"{arch} should be in listed in the flash attention documentation but is not. Please update the documentation."
61
            )
62

63

64
def check_sdpa_support_list():
65
    with open(os.path.join(REPO_PATH, "docs/source/en/perf_infer_gpu_one.md"), "r") as f:
66
        doctext = f.read()
67

68
        doctext = doctext.split(
69
            "For now, Transformers supports SDPA inference and training for the following architectures:"
70
        )[1]
71
        doctext = doctext.split("Note that FlashAttention can only be used for models using the")[0]
72

73
    patterns = glob(os.path.join(REPO_PATH, "src/transformers/models/**/modeling_*.py"))
74
    patterns_tf = glob(os.path.join(REPO_PATH, "src/transformers/models/**/modeling_tf_*.py"))
75
    patterns_flax = glob(os.path.join(REPO_PATH, "src/transformers/models/**/modeling_flax_*.py"))
76
    patterns = list(set(patterns) - set(patterns_tf) - set(patterns_flax))
77
    archs_supporting_sdpa = []
78
    for filename in patterns:
79
        with open(filename, "r") as f:
80
            text = f.read()
81

82
            if "_supports_sdpa = True" in text:
83
                model_name = os.path.basename(filename).replace(".py", "").replace("modeling_", "")
84
                archs_supporting_sdpa.append(model_name)
85

86
    for arch in archs_supporting_sdpa:
87
        if arch not in doctext:
88
            raise ValueError(
89
                f"{arch} should be in listed in the SDPA documentation but is not. Please update the documentation."
90
            )
91

92

93
if __name__ == "__main__":
94
    check_flash_support_list()
95
    check_sdpa_support_list()
96

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.