peft

Форк
0
/
test_low_level_api.py 
93 строки · 2.8 Кб
1
#!/usr/bin/env python3
2

3
# coding=utf-8
4
# Copyright 2023-present the HuggingFace Inc. team.
5
#
6
# Licensed under the Apache License, Version 2.0 (the "License");
7
# you may not use this file except in compliance with the License.
8
# You may obtain a copy of the License at
9
#
10
#     http://www.apache.org/licenses/LICENSE-2.0
11
#
12
# Unless required by applicable law or agreed to in writing, software
13
# distributed under the License is distributed on an "AS IS" BASIS,
14
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
# See the License for the specific language governing permissions and
16
# limitations under the License.
17
import unittest
18

19
import torch
20

21
from peft import LoraConfig, get_peft_model_state_dict, inject_adapter_in_model
22
from peft.utils import ModulesToSaveWrapper
23

24

25
class DummyModel(torch.nn.Module):
26
    def __init__(self):
27
        super().__init__()
28
        self.embedding = torch.nn.Embedding(10, 10)
29
        self.linear = torch.nn.Linear(10, 10)
30
        self.lm_head = torch.nn.Linear(10, 10)
31

32
    def forward(self, input_ids):
33
        x = self.embedding(input_ids)
34
        x = self.linear(x)
35
        x = self.lm_head(x)
36
        return x
37

38

39
class TestPeft(unittest.TestCase):
40
    def setUp(self):
41
        self.model = DummyModel()
42

43
        lora_config = LoraConfig(
44
            lora_alpha=16,
45
            lora_dropout=0.1,
46
            r=64,
47
            bias="none",
48
            target_modules=["linear"],
49
        )
50

51
        self.model = inject_adapter_in_model(lora_config, self.model)
52

53
    def test_inject_adapter_in_model(self):
54
        dummy_inputs = torch.LongTensor([[0, 1, 2, 3, 4, 5, 6, 7]])
55
        _ = self.model(dummy_inputs)
56

57
        for name, module in self.model.named_modules():
58
            if name == "linear":
59
                assert hasattr(module, "lora_A")
60
                assert hasattr(module, "lora_B")
61

62
    def test_get_peft_model_state_dict(self):
63
        peft_state_dict = get_peft_model_state_dict(self.model)
64

65
        for key in peft_state_dict.keys():
66
            assert "lora" in key
67

68
    def test_modules_to_save(self):
69
        self.model = DummyModel()
70

71
        lora_config = LoraConfig(
72
            lora_alpha=16,
73
            lora_dropout=0.1,
74
            r=64,
75
            bias="none",
76
            target_modules=["linear"],
77
            modules_to_save=["embedding"],
78
        )
79

80
        self.model = inject_adapter_in_model(lora_config, self.model)
81

82
        for name, module in self.model.named_modules():
83
            if name == "linear":
84
                assert hasattr(module, "lora_A")
85
                assert hasattr(module, "lora_B")
86
            elif name == "embedding":
87
                assert isinstance(module, ModulesToSaveWrapper)
88

89
        state_dict = get_peft_model_state_dict(self.model)
90

91
        assert "embedding.weight" in state_dict.keys()
92

93
        assert hasattr(self.model.embedding, "weight")
94

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.