peft

Форк
0
/
test_encoder_decoder_models.py 
212 строк · 10.3 Кб
1
# Copyright 2023-present the HuggingFace Inc. team.
2
#
3
# Licensed under the Apache License, Version 2.0 (the "License");
4
# you may not use this file except in compliance with the License.
5
# You may obtain a copy of the License at
6
#
7
#     http://www.apache.org/licenses/LICENSE-2.0
8
#
9
# Unless required by applicable law or agreed to in writing, software
10
# distributed under the License is distributed on an "AS IS" BASIS,
11
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
# See the License for the specific language governing permissions and
13
# limitations under the License.
14
import tempfile
15
import unittest
16

17
import torch
18
from parameterized import parameterized
19
from transformers import AutoModelForSeq2SeqLM, AutoModelForTokenClassification
20

21
from peft import LoraConfig, TaskType, get_peft_model
22

23
from .testing_common import PeftCommonTester, PeftTestConfigManager
24

25

26
PEFT_ENCODER_DECODER_MODELS_TO_TEST = [
27
    "ybelkada/tiny-random-T5ForConditionalGeneration-calibrated",
28
    "hf-internal-testing/tiny-random-BartForConditionalGeneration",
29
]
30

31
FULL_GRID = {"model_ids": PEFT_ENCODER_DECODER_MODELS_TO_TEST, "task_type": "SEQ_2_SEQ_LM"}
32

33

34
class PeftEncoderDecoderModelTester(unittest.TestCase, PeftCommonTester):
35
    r"""
36
    Test if the PeftModel behaves as expected. This includes:
37
    - test if the model has the expected methods
38

39
    We use parametrized.expand for debugging purposes to test each model individually.
40
    """
41

42
    transformers_class = AutoModelForSeq2SeqLM
43

44
    def prepare_inputs_for_testing(self):
45
        input_ids = torch.tensor([[1, 1, 1], [1, 2, 1]]).to(self.torch_device)
46
        decoder_input_ids = torch.tensor([[1, 1, 1], [1, 2, 1]]).to(self.torch_device)
47
        attention_mask = torch.tensor([[1, 1, 1], [1, 0, 1]]).to(self.torch_device)
48

49
        input_dict = {
50
            "input_ids": input_ids,
51
            "decoder_input_ids": decoder_input_ids,
52
            "attention_mask": attention_mask,
53
        }
54

55
        return input_dict
56

57
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
58
    def test_attributes_parametrized(self, test_name, model_id, config_cls, config_kwargs):
59
        self._test_model_attr(model_id, config_cls, config_kwargs)
60

61
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
62
    def test_adapter_name(self, test_name, model_id, config_cls, config_kwargs):
63
        self._test_adapter_name(model_id, config_cls, config_kwargs)
64

65
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
66
    def test_prepare_for_training_parametrized(self, test_name, model_id, config_cls, config_kwargs):
67
        self._test_prepare_for_training(model_id, config_cls, config_kwargs)
68

69
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
70
    def test_save_pretrained(self, test_name, model_id, config_cls, config_kwargs):
71
        self._test_save_pretrained(model_id, config_cls, config_kwargs)
72

73
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
74
    def test_save_pretrained_pickle(self, test_name, model_id, config_cls, config_kwargs):
75
        self._test_save_pretrained(model_id, config_cls, config_kwargs, safe_serialization=False)
76

77
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
78
    def test_save_pretrained_selected_adapters(self, test_name, model_id, config_cls, config_kwargs):
79
        self._test_save_pretrained_selected_adapters(model_id, config_cls, config_kwargs)
80

81
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
82
    def test_save_pretrained_selected_adapters_pickle(self, test_name, model_id, config_cls, config_kwargs):
83
        self._test_save_pretrained_selected_adapters(model_id, config_cls, config_kwargs, safe_serialization=False)
84

85
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
86
    def test_from_pretrained_config_construction(self, test_name, model_id, config_cls, config_kwargs):
87
        self._test_from_pretrained_config_construction(model_id, config_cls, config_kwargs)
88

89
    @parameterized.expand(
90
        PeftTestConfigManager.get_grid_parameters(
91
            {
92
                "model_ids": PEFT_ENCODER_DECODER_MODELS_TO_TEST,
93
                "lora_kwargs": {"init_lora_weights": [False]},
94
                "ia3_kwargs": {"init_ia3_weights": [False]},
95
                "task_type": "SEQ_2_SEQ_LM",
96
            },
97
        )
98
    )
99
    def test_merge_layers(self, test_name, model_id, config_cls, config_kwargs):
100
        self._test_merge_layers(model_id, config_cls, config_kwargs)
101

102
    # skip non lora models - generate does not work for prefix tuning, prompt tuning
103
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
104
    def test_generate(self, test_name, model_id, config_cls, config_kwargs):
105
        self._test_generate(model_id, config_cls, config_kwargs)
106

107
    # skip non lora models - generate does not work for prefix tuning, prompt tuning
108
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
109
    def test_generate_pos_args(self, test_name, model_id, config_cls, config_kwargs):
110
        # positional arguments are not supported for PeftModelForSeq2SeqLM
111
        self._test_generate_pos_args(model_id, config_cls, config_kwargs, raises_err=True)
112

113
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
114
    def test_generate_half_prec(self, test_name, model_id, config_cls, config_kwargs):
115
        self._test_generate_half_prec(model_id, config_cls, config_kwargs)
116

117
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
118
    def test_prefix_tuning_half_prec_conversion(self, test_name, model_id, config_cls, config_kwargs):
119
        self._test_prefix_tuning_half_prec_conversion(model_id, config_cls, config_kwargs)
120

121
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
122
    def test_training_encoder_decoders(self, test_name, model_id, config_cls, config_kwargs):
123
        self._test_training(model_id, config_cls, config_kwargs)
124

125
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
126
    def test_training_encoder_decoders_layer_indexing(self, test_name, model_id, config_cls, config_kwargs):
127
        self._test_training_layer_indexing(model_id, config_cls, config_kwargs)
128

129
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
130
    def test_training_encoder_decoders_gradient_checkpointing(self, test_name, model_id, config_cls, config_kwargs):
131
        self._test_training_gradient_checkpointing(model_id, config_cls, config_kwargs)
132

133
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
134
    def test_inference_safetensors(self, test_name, model_id, config_cls, config_kwargs):
135
        self._test_inference_safetensors(model_id, config_cls, config_kwargs)
136

137
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
138
    def test_peft_model_device_map(self, test_name, model_id, config_cls, config_kwargs):
139
        self._test_peft_model_device_map(model_id, config_cls, config_kwargs)
140

141
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
142
    def test_delete_adapter(self, test_name, model_id, config_cls, config_kwargs):
143
        self._test_delete_adapter(model_id, config_cls, config_kwargs)
144

145
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
146
    def test_delete_inactive_adapter(self, test_name, model_id, config_cls, config_kwargs):
147
        self._test_delete_inactive_adapter(model_id, config_cls, config_kwargs)
148

149
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
150
    def test_adding_multiple_adapters_with_bias_raises(self, test_name, model_id, config_cls, config_kwargs):
151
        self._test_adding_multiple_adapters_with_bias_raises(model_id, config_cls, config_kwargs)
152

153
    @parameterized.expand(
154
        PeftTestConfigManager.get_grid_parameters(
155
            {
156
                "model_ids": PEFT_ENCODER_DECODER_MODELS_TO_TEST,
157
                "lora_kwargs": {"init_lora_weights": [False]},
158
                "adalora_kwargs": {"init_lora_weights": [False]},
159
                "ia3_kwargs": {"init_ia3_weights": [False]},
160
                "task_type": "SEQ_2_SEQ_LM",
161
            },
162
        )
163
    )
164
    def test_unload_adapter(self, test_name, model_id, config_cls, config_kwargs):
165
        self._test_unload_adapter(model_id, config_cls, config_kwargs)
166

167
    @parameterized.expand(
168
        PeftTestConfigManager.get_grid_parameters(
169
            {
170
                "model_ids": PEFT_ENCODER_DECODER_MODELS_TO_TEST,
171
                "lora_kwargs": {"init_lora_weights": [False]},
172
                "task_type": "SEQ_2_SEQ_LM",
173
            },
174
        )
175
    )
176
    def test_weighted_combination_of_adapters(self, test_name, model_id, config_cls, config_kwargs):
177
        self._test_weighted_combination_of_adapters(model_id, config_cls, config_kwargs)
178

179
    @parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
180
    def test_training_prompt_learning_tasks(self, test_name, model_id, config_cls, config_kwargs):
181
        self._test_training_prompt_learning_tasks(model_id, config_cls, config_kwargs)
182

183
    @parameterized.expand(
184
        PeftTestConfigManager.get_grid_parameters(
185
            {
186
                "model_ids": PEFT_ENCODER_DECODER_MODELS_TO_TEST,
187
                "lora_kwargs": {"init_lora_weights": [False]},
188
                "adalora_kwargs": {"init_lora_weights": [False]},
189
                "ia3_kwargs": {"init_ia3_weights": [False]},
190
                "task_type": "SEQ_2_SEQ_LM",
191
            },
192
        )
193
    )
194
    def test_disable_adapter(self, test_name, model_id, config_cls, config_kwargs):
195
        self._test_disable_adapter(model_id, config_cls, config_kwargs)
196

197

198
class PeftEncoderDecoderCustomModelTester(unittest.TestCase):
199
    """
200
    A custom class to write any custom test related with Enc-Dec models
201
    """
202

203
    def test_save_shared_tensors(self):
204
        model_id = "hf-internal-testing/tiny-random-RobertaModel"
205
        peft_config = LoraConfig(
206
            task_type=TaskType.TOKEN_CLS, inference_mode=False, r=16, lora_alpha=16, lora_dropout=0.1, bias="all"
207
        )
208
        model = AutoModelForTokenClassification.from_pretrained(model_id, num_labels=11)
209
        model = get_peft_model(model, peft_config)
210
        with tempfile.TemporaryDirectory() as tmp_dir:
211
            # This should work fine
212
            model.save_pretrained(tmp_dir, safe_serialization=True)
213

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.