4
# Copyright 2023-present the HuggingFace Inc. team.
6
# Licensed under the Apache License, Version 2.0 (the "License");
7
# you may not use this file except in compliance with the License.
8
# You may obtain a copy of the License at
10
# http://www.apache.org/licenses/LICENSE-2.0
12
# Unless required by applicable law or agreed to in writing, software
13
# distributed under the License is distributed on an "AS IS" BASIS,
14
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
# See the License for the specific language governing permissions and
16
# limitations under the License.
21
from peft import LoraConfig, get_peft_model_state_dict, inject_adapter_in_model
22
from peft.utils import ModulesToSaveWrapper
25
class DummyModel(torch.nn.Module):
28
self.embedding = torch.nn.Embedding(10, 10)
29
self.linear = torch.nn.Linear(10, 10)
30
self.lm_head = torch.nn.Linear(10, 10)
32
def forward(self, input_ids):
33
x = self.embedding(input_ids)
39
class TestPeft(unittest.TestCase):
41
self.model = DummyModel()
43
lora_config = LoraConfig(
48
target_modules=["linear"],
51
self.model = inject_adapter_in_model(lora_config, self.model)
53
def test_inject_adapter_in_model(self):
54
dummy_inputs = torch.LongTensor([[0, 1, 2, 3, 4, 5, 6, 7]])
55
_ = self.model(dummy_inputs)
57
for name, module in self.model.named_modules():
59
assert hasattr(module, "lora_A")
60
assert hasattr(module, "lora_B")
62
def test_get_peft_model_state_dict(self):
63
peft_state_dict = get_peft_model_state_dict(self.model)
65
for key in peft_state_dict.keys():
68
def test_modules_to_save(self):
69
self.model = DummyModel()
71
lora_config = LoraConfig(
76
target_modules=["linear"],
77
modules_to_save=["embedding"],
80
self.model = inject_adapter_in_model(lora_config, self.model)
82
for name, module in self.model.named_modules():
84
assert hasattr(module, "lora_A")
85
assert hasattr(module, "lora_B")
86
elif name == "embedding":
87
assert isinstance(module, ModulesToSaveWrapper)
89
state_dict = get_peft_model_state_dict(self.model)
91
assert "embedding.weight" in state_dict.keys()
93
assert hasattr(self.model.embedding, "weight")