19
from peft import LoraConfig, get_peft_model
22
class ModelWithModuleDict(nn.Module):
25
self.other_layer = nn.Linear(10, 10)
26
self.module = nn.ModuleDict({"foo": nn.Linear(10, 10)})
29
return self.module["foo"](torch.rand(1, 10))
32
class ModelWithModuleList(nn.Module):
35
self.other_layer = nn.Linear(10, 10)
36
self.module = nn.ModuleList([nn.Linear(10, 10)])
39
return self.module[0](torch.rand(1, 10))
42
class ModelWithParameterDict(nn.Module):
45
self.other_layer = nn.Linear(10, 10)
46
self.module = nn.ParameterDict({"foo": nn.Parameter(torch.rand(10, 10))})
49
return self.module["foo"]
52
class ModelWithParameterList(nn.Module):
55
self.other_layer = nn.Linear(10, 10)
56
self.module = nn.ParameterList([nn.Parameter(torch.rand(10, 10))])
62
@pytest.mark.parametrize(
63
"cls", [ModelWithModuleDict, ModelWithModuleList, ModelWithParameterDict, ModelWithParameterList]
65
def test_modules_to_save_targets_module_dict_raises(cls):
67
peft_config = LoraConfig(
68
target_modules=["other_layer"],
69
modules_to_save=["module"],
73
msg = "modules_to_save cannot be applied to modules of type"
74
with pytest.raises(TypeError, match=msg):
75
get_peft_model(model=model, peft_config=peft_config)