pytorch

Форк
0
/
test_fx_param_shape_control_flow.py 
155 строк · 4.9 Кб
1
# Owner(s): ["module: fx"]
2

3
import unittest
4
import torch
5
import torch.fx
6

7
from torch.testing._internal.common_utils import TestCase
8

9

10
class MyModuleBase(torch.nn.Module):
11
    def forward(self, x):
12
        matrx = self.get_mul_matrix()
13
        if self.no_relu():
14
            return torch.mm(x, matrx)
15
        else:
16
            return torch.relu(torch.mm(x, matrx))
17

18
    def get_mul_matrix(self):
19
        return self.param
20

21
    def no_relu(self):
22
        raise Exception("not implemented")
23

24
class MyModuleParamShape(MyModuleBase):
25
    def __init__(self, in_channels):
26
        super().__init__()
27
        self.param = torch.nn.Parameter(torch.randn(in_channels, 3))
28

29
    def no_relu(self):
30
        return self.param.shape[0] < 10
31

32

33
class MyModuleParamSize(MyModuleBase):
34
    def __init__(self, in_channels):
35
        super().__init__()
36
        self.param = torch.nn.Parameter(torch.randn(in_channels, 3))
37

38
    def no_relu(self):
39
        return self.param.size()[0] < 10
40

41

42
class MyModuleParamDim(MyModuleBase):
43
    def __init__(self, param):
44
        super().__init__()
45
        self.param = param
46

47
    def get_mul_matrix(self):
48
        return self.param[0] if (self.param.dim() == 3) else self.param
49

50
    def no_relu(self):
51
        return self.param.dim() == 3
52

53

54
class MyModuleParamNDim(MyModuleBase):
55
    def __init__(self, param):
56
        super().__init__()
57
        self.param = param
58

59
    def get_mul_matrix(self):
60
        return self.param[0] if (self.param.ndim == 3) else self.param
61

62
    def no_relu(self):
63
        return self.param.ndim == 3
64

65

66
class MyModuleParamNumEl(MyModuleBase):
67
    def __init__(self, in_channels):
68
        super().__init__()
69
        self.param = torch.nn.Parameter(torch.randn(in_channels, 3))
70

71
    def no_relu(self):
72
        return self.param.numel() < 10 * 3
73

74

75

76
class MyModuleParamNElement(MyModuleBase):
77
    def __init__(self, in_channels):
78
        super().__init__()
79
        self.param = torch.nn.Parameter(torch.randn(in_channels, 3))
80

81
    def no_relu(self):
82
        return self.param.nelement() < 10 * 3
83

84

85

86
class TestConstParamShapeInControlFlow(TestCase):
87

88
    def verify_mm_relu_mods(self, mm_only_mod, relu_mod):
89
        """
90
        Verify one module only does a mm op while the other
91
        performs both mm and relu ops in cascade
92
        """
93
        x = torch.randn(10, 5)
94
        torch.testing.assert_close(mm_only_mod(x), torch.mm(x, mm_only_mod.get_mul_matrix()))
95
        tracer = torch.fx.Tracer(param_shapes_constant=True)
96
        traced_graph = tracer.trace(mm_only_mod)
97

98
        # verify the graph module calculates the same result
99
        graph_mod_mm = torch.fx.GraphModule(mm_only_mod, traced_graph)
100
        torch.testing.assert_close(graph_mod_mm(x), torch.mm(x, mm_only_mod.get_mul_matrix()))
101

102

103
        # Make a new module with different parameter shape to go down the different
104
        # code path
105
        x = torch.randn(10, 15)
106
        torch.testing.assert_close(relu_mod(x), torch.relu(torch.mm(x, relu_mod.get_mul_matrix())))
107

108
        tracer2 = torch.fx.Tracer(param_shapes_constant=True)
109
        traced_graph2 = tracer2.trace(relu_mod)
110

111
        # verify the graph module calculates the same result
112
        graph_mod_relu = torch.fx.GraphModule(relu_mod, traced_graph2)
113
        torch.testing.assert_close(graph_mod_relu(x), torch.relu(torch.mm(x, relu_mod.get_mul_matrix())))
114

115

116
        graph1_node_targets = [n.target for n in traced_graph.nodes]
117
        graph2_node_targets = [n.target for n in traced_graph2.nodes]
118

119
        # the second graph has an exta relu function call node
120
        assert torch.mm in graph1_node_targets and torch.mm in graph2_node_targets
121
        assert torch.relu not in graph1_node_targets and torch.relu in graph2_node_targets
122

123
    def test_param_shape_const(self):
124
        mymod = MyModuleParamShape(in_channels=5)
125
        mymod2 = MyModuleParamShape(in_channels=15)
126
        self.verify_mm_relu_mods(mymod, mymod2)
127

128
    def test_param_size_const(self):
129
        mymod = MyModuleParamSize(in_channels=5)
130
        mymod2 = MyModuleParamSize(in_channels=15)
131
        self.verify_mm_relu_mods(mymod, mymod2)
132

133
    def test_param_dim_const(self):
134
        mymod = MyModuleParamDim(torch.nn.Parameter(torch.randn(2, 5, 3)))
135
        mymod2 = MyModuleParamDim(torch.nn.Parameter(torch.randn(15, 3)))
136
        self.verify_mm_relu_mods(mymod, mymod2)
137

138
    def test_param_ndim_const(self):
139
        mymod = MyModuleParamNDim(torch.nn.Parameter(torch.randn(2, 5, 3)))
140
        mymod2 = MyModuleParamNDim(torch.nn.Parameter(torch.randn(15, 3)))
141
        self.verify_mm_relu_mods(mymod, mymod2)
142

143
    def test_param_numel_const(self):
144
        mymod = MyModuleParamNumEl(in_channels=5)
145
        mymod2 = MyModuleParamNumEl(in_channels=15)
146
        self.verify_mm_relu_mods(mymod, mymod2)
147

148
    def test_param_nelement_const(self):
149
        mymod = MyModuleParamNElement(in_channels=5)
150
        mymod2 = MyModuleParamNElement(in_channels=15)
151
        self.verify_mm_relu_mods(mymod, mymod2)
152

153

154
if __name__ == '__main__':
155
    unittest.main()
156

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.