transformers
168 строк · 7.1 Кб
1# coding=utf-8
2# Copyright 2023 The HuggingFace Inc. team.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15"""
16Utility that checks the list of models in the tips in the task-specific pages of the doc is up to date and potentially
17fixes it.
18
19Use from the root of the repo with:
20
21```bash
22python utils/check_task_guides.py
23```
24
25for a check that will error in case of inconsistencies (used by `make repo-consistency`).
26
27To auto-fix issues run:
28
29```bash
30python utils/check_task_guides.py --fix_and_overwrite
31```
32
33which is used by `make fix-copies`.
34"""
35import argparse
36import os
37
38from transformers.utils import direct_transformers_import
39
40
41# All paths are set with the intent you should run this script from the root of the repo with the command
42# python utils/check_task_guides.py
43TRANSFORMERS_PATH = "src/transformers"
44PATH_TO_TASK_GUIDES = "docs/source/en/tasks"
45
46
47def _find_text_in_file(filename: str, start_prompt: str, end_prompt: str) -> str:
48"""
49Find the text in filename between two prompts.
50
51Args:
52filename (`str`): The file to search into.
53start_prompt (`str`): A string to look for at the start of the content searched.
54end_prompt (`str`): A string that will mark the end of the content to look for.
55
56Returns:
57`str`: The content between the prompts.
58"""
59with open(filename, "r", encoding="utf-8", newline="\n") as f:
60lines = f.readlines()
61# Find the start prompt.
62start_index = 0
63while not lines[start_index].startswith(start_prompt):
64start_index += 1
65start_index += 1
66
67# Now go until the end prompt.
68end_index = start_index
69while not lines[end_index].startswith(end_prompt):
70end_index += 1
71end_index -= 1
72
73while len(lines[start_index]) <= 1:
74start_index += 1
75while len(lines[end_index]) <= 1:
76end_index -= 1
77end_index += 1
78return "".join(lines[start_index:end_index]), start_index, end_index, lines
79
80
81# This is to make sure the transformers module imported is the one in the repo.
82transformers_module = direct_transformers_import(TRANSFORMERS_PATH)
83
84# Map between a task guide and the corresponding auto class.
85TASK_GUIDE_TO_MODELS = {
86"asr.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_CTC_MAPPING_NAMES,
87"audio_classification.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING_NAMES,
88"language_modeling.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_CAUSAL_LM_MAPPING_NAMES,
89"image_classification.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING_NAMES,
90"masked_language_modeling.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_MASKED_LM_MAPPING_NAMES,
91"multiple_choice.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_MULTIPLE_CHOICE_MAPPING_NAMES,
92"object_detection.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_OBJECT_DETECTION_MAPPING_NAMES,
93"question_answering.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_QUESTION_ANSWERING_MAPPING_NAMES,
94"semantic_segmentation.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_SEMANTIC_SEGMENTATION_MAPPING_NAMES,
95"sequence_classification.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING_NAMES,
96"summarization.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING_NAMES,
97"token_classification.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING_NAMES,
98"translation.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING_NAMES,
99"video_classification.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_VIDEO_CLASSIFICATION_MAPPING_NAMES,
100"document_question_answering.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_DOCUMENT_QUESTION_ANSWERING_MAPPING_NAMES,
101"monocular_depth_estimation.md": transformers_module.models.auto.modeling_auto.MODEL_FOR_DEPTH_ESTIMATION_MAPPING_NAMES,
102}
103
104# This list contains model types used in some task guides that are not in `CONFIG_MAPPING_NAMES` (therefore not in any
105# `MODEL_MAPPING_NAMES` or any `MODEL_FOR_XXX_MAPPING_NAMES`).
106SPECIAL_TASK_GUIDE_TO_MODEL_TYPES = {
107"summarization.md": ("nllb",),
108"translation.md": ("nllb",),
109}
110
111
112def get_model_list_for_task(task_guide: str) -> str:
113"""
114Return the list of models supporting a given task.
115
116Args:
117task_guide (`str`): The name of the task guide to check.
118
119Returns:
120`str`: The list of models supporting this task, as links to their respective doc pages separated by commas.
121"""
122model_maping_names = TASK_GUIDE_TO_MODELS[task_guide]
123special_model_types = SPECIAL_TASK_GUIDE_TO_MODEL_TYPES.get(task_guide, set())
124model_names = {
125code: name
126for code, name in transformers_module.MODEL_NAMES_MAPPING.items()
127if (code in model_maping_names or code in special_model_types)
128}
129return ", ".join([f"[{name}](../model_doc/{code})" for code, name in model_names.items()]) + "\n"
130
131
132def check_model_list_for_task(task_guide: str, overwrite: bool = False):
133"""
134For a given task guide, checks the model list in the generated tip for consistency with the state of the lib and
135updates it if needed.
136
137Args:
138task_guide (`str`):
139The name of the task guide to check.
140overwrite (`bool`, *optional*, defaults to `False`):
141Whether or not to overwrite the table when it's not up to date.
142"""
143current_list, start_index, end_index, lines = _find_text_in_file(
144filename=os.path.join(PATH_TO_TASK_GUIDES, task_guide),
145start_prompt="<!--This tip is automatically generated by `make fix-copies`, do not fill manually!-->",
146end_prompt="<!--End of the generated tip-->",
147)
148
149new_list = get_model_list_for_task(task_guide)
150
151if current_list != new_list:
152if overwrite:
153with open(os.path.join(PATH_TO_TASK_GUIDES, task_guide), "w", encoding="utf-8", newline="\n") as f:
154f.writelines(lines[:start_index] + [new_list] + lines[end_index:])
155else:
156raise ValueError(
157f"The list of models that can be used in the {task_guide} guide needs an update. Run `make fix-copies`"
158" to fix this."
159)
160
161
162if __name__ == "__main__":
163parser = argparse.ArgumentParser()
164parser.add_argument("--fix_and_overwrite", action="store_true", help="Whether to fix inconsistencies.")
165args = parser.parse_args()
166
167for task_guide in TASK_GUIDE_TO_MODELS.keys():
168check_model_list_for_task(task_guide, args.fix_and_overwrite)
169