1
# Copyright 2023-present the HuggingFace Inc. team.
3
# Licensed under the Apache License, Version 2.0 (the "License");
4
# you may not use this file except in compliance with the License.
5
# You may obtain a copy of the License at
7
# http://www.apache.org/licenses/LICENSE-2.0
9
# Unless required by applicable law or agreed to in writing, software
10
# distributed under the License is distributed on an "AS IS" BASIS,
11
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
# See the License for the specific language governing permissions and
13
# limitations under the License.
18
from parameterized import parameterized
19
from transformers import AutoModelForSeq2SeqLM, AutoModelForTokenClassification
21
from peft import LoraConfig, TaskType, get_peft_model
23
from .testing_common import PeftCommonTester, PeftTestConfigManager
26
PEFT_ENCODER_DECODER_MODELS_TO_TEST = [
27
"ybelkada/tiny-random-T5ForConditionalGeneration-calibrated",
28
"hf-internal-testing/tiny-random-BartForConditionalGeneration",
31
FULL_GRID = {"model_ids": PEFT_ENCODER_DECODER_MODELS_TO_TEST, "task_type": "SEQ_2_SEQ_LM"}
34
class PeftEncoderDecoderModelTester(unittest.TestCase, PeftCommonTester):
36
Test if the PeftModel behaves as expected. This includes:
37
- test if the model has the expected methods
39
We use parametrized.expand for debugging purposes to test each model individually.
42
transformers_class = AutoModelForSeq2SeqLM
44
def prepare_inputs_for_testing(self):
45
input_ids = torch.tensor([[1, 1, 1], [1, 2, 1]]).to(self.torch_device)
46
decoder_input_ids = torch.tensor([[1, 1, 1], [1, 2, 1]]).to(self.torch_device)
47
attention_mask = torch.tensor([[1, 1, 1], [1, 0, 1]]).to(self.torch_device)
50
"input_ids": input_ids,
51
"decoder_input_ids": decoder_input_ids,
52
"attention_mask": attention_mask,
57
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
58
def test_attributes_parametrized(self, test_name, model_id, config_cls, config_kwargs):
59
self._test_model_attr(model_id, config_cls, config_kwargs)
61
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
62
def test_adapter_name(self, test_name, model_id, config_cls, config_kwargs):
63
self._test_adapter_name(model_id, config_cls, config_kwargs)
65
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
66
def test_prepare_for_training_parametrized(self, test_name, model_id, config_cls, config_kwargs):
67
self._test_prepare_for_training(model_id, config_cls, config_kwargs)
69
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
70
def test_save_pretrained(self, test_name, model_id, config_cls, config_kwargs):
71
self._test_save_pretrained(model_id, config_cls, config_kwargs)
73
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
74
def test_save_pretrained_pickle(self, test_name, model_id, config_cls, config_kwargs):
75
self._test_save_pretrained(model_id, config_cls, config_kwargs, safe_serialization=False)
77
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
78
def test_save_pretrained_selected_adapters(self, test_name, model_id, config_cls, config_kwargs):
79
self._test_save_pretrained_selected_adapters(model_id, config_cls, config_kwargs)
81
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
82
def test_save_pretrained_selected_adapters_pickle(self, test_name, model_id, config_cls, config_kwargs):
83
self._test_save_pretrained_selected_adapters(model_id, config_cls, config_kwargs, safe_serialization=False)
85
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
86
def test_from_pretrained_config_construction(self, test_name, model_id, config_cls, config_kwargs):
87
self._test_from_pretrained_config_construction(model_id, config_cls, config_kwargs)
89
@parameterized.expand(
90
PeftTestConfigManager.get_grid_parameters(
92
"model_ids": PEFT_ENCODER_DECODER_MODELS_TO_TEST,
93
"lora_kwargs": {"init_lora_weights": [False]},
94
"ia3_kwargs": {"init_ia3_weights": [False]},
95
"task_type": "SEQ_2_SEQ_LM",
99
def test_merge_layers(self, test_name, model_id, config_cls, config_kwargs):
100
self._test_merge_layers(model_id, config_cls, config_kwargs)
102
# skip non lora models - generate does not work for prefix tuning, prompt tuning
103
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
104
def test_generate(self, test_name, model_id, config_cls, config_kwargs):
105
self._test_generate(model_id, config_cls, config_kwargs)
107
# skip non lora models - generate does not work for prefix tuning, prompt tuning
108
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
109
def test_generate_pos_args(self, test_name, model_id, config_cls, config_kwargs):
110
# positional arguments are not supported for PeftModelForSeq2SeqLM
111
self._test_generate_pos_args(model_id, config_cls, config_kwargs, raises_err=True)
113
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
114
def test_generate_half_prec(self, test_name, model_id, config_cls, config_kwargs):
115
self._test_generate_half_prec(model_id, config_cls, config_kwargs)
117
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
118
def test_prefix_tuning_half_prec_conversion(self, test_name, model_id, config_cls, config_kwargs):
119
self._test_prefix_tuning_half_prec_conversion(model_id, config_cls, config_kwargs)
121
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
122
def test_training_encoder_decoders(self, test_name, model_id, config_cls, config_kwargs):
123
self._test_training(model_id, config_cls, config_kwargs)
125
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
126
def test_training_encoder_decoders_layer_indexing(self, test_name, model_id, config_cls, config_kwargs):
127
self._test_training_layer_indexing(model_id, config_cls, config_kwargs)
129
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
130
def test_training_encoder_decoders_gradient_checkpointing(self, test_name, model_id, config_cls, config_kwargs):
131
self._test_training_gradient_checkpointing(model_id, config_cls, config_kwargs)
133
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
134
def test_inference_safetensors(self, test_name, model_id, config_cls, config_kwargs):
135
self._test_inference_safetensors(model_id, config_cls, config_kwargs)
137
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
138
def test_peft_model_device_map(self, test_name, model_id, config_cls, config_kwargs):
139
self._test_peft_model_device_map(model_id, config_cls, config_kwargs)
141
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
142
def test_delete_adapter(self, test_name, model_id, config_cls, config_kwargs):
143
self._test_delete_adapter(model_id, config_cls, config_kwargs)
145
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
146
def test_delete_inactive_adapter(self, test_name, model_id, config_cls, config_kwargs):
147
self._test_delete_inactive_adapter(model_id, config_cls, config_kwargs)
149
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
150
def test_adding_multiple_adapters_with_bias_raises(self, test_name, model_id, config_cls, config_kwargs):
151
self._test_adding_multiple_adapters_with_bias_raises(model_id, config_cls, config_kwargs)
153
@parameterized.expand(
154
PeftTestConfigManager.get_grid_parameters(
156
"model_ids": PEFT_ENCODER_DECODER_MODELS_TO_TEST,
157
"lora_kwargs": {"init_lora_weights": [False]},
158
"adalora_kwargs": {"init_lora_weights": [False]},
159
"ia3_kwargs": {"init_ia3_weights": [False]},
160
"task_type": "SEQ_2_SEQ_LM",
164
def test_unload_adapter(self, test_name, model_id, config_cls, config_kwargs):
165
self._test_unload_adapter(model_id, config_cls, config_kwargs)
167
@parameterized.expand(
168
PeftTestConfigManager.get_grid_parameters(
170
"model_ids": PEFT_ENCODER_DECODER_MODELS_TO_TEST,
171
"lora_kwargs": {"init_lora_weights": [False]},
172
"task_type": "SEQ_2_SEQ_LM",
176
def test_weighted_combination_of_adapters(self, test_name, model_id, config_cls, config_kwargs):
177
self._test_weighted_combination_of_adapters(model_id, config_cls, config_kwargs)
179
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
180
def test_training_prompt_learning_tasks(self, test_name, model_id, config_cls, config_kwargs):
181
self._test_training_prompt_learning_tasks(model_id, config_cls, config_kwargs)
183
@parameterized.expand(
184
PeftTestConfigManager.get_grid_parameters(
186
"model_ids": PEFT_ENCODER_DECODER_MODELS_TO_TEST,
187
"lora_kwargs": {"init_lora_weights": [False]},
188
"adalora_kwargs": {"init_lora_weights": [False]},
189
"ia3_kwargs": {"init_ia3_weights": [False]},
190
"task_type": "SEQ_2_SEQ_LM",
194
def test_disable_adapter(self, test_name, model_id, config_cls, config_kwargs):
195
self._test_disable_adapter(model_id, config_cls, config_kwargs)
198
class PeftEncoderDecoderCustomModelTester(unittest.TestCase):
200
A custom class to write any custom test related with Enc-Dec models
203
def test_save_shared_tensors(self):
204
model_id = "hf-internal-testing/tiny-random-RobertaModel"
205
peft_config = LoraConfig(
206
task_type=TaskType.TOKEN_CLS, inference_mode=False, r=16, lora_alpha=16, lora_dropout=0.1, bias="all"
208
model = AutoModelForTokenClassification.from_pretrained(model_id, num_labels=11)
209
model = get_peft_model(model, peft_config)
210
with tempfile.TemporaryDirectory() as tmp_dir:
211
# This should work fine
212
model.save_pretrained(tmp_dir, safe_serialization=True)