1
# Owner(s): ["module: onnx"]
2
from __future__ import annotations
6
import pytorch_test_common
9
from torch.testing._internal import common_utils
12
def assert_op_in_onnx_model(model: onnx.ModelProto, op_type: str):
13
inlined = onnx.inliner.inline_local_functions(model)
14
for node in inlined.graph.node:
15
if node.op_type == op_type:
17
raise AssertionError(f"Op {op_type} not found in model")
20
class TestDynamoExportDecompSkip(pytorch_test_common.ExportTestCase):
21
def test_upsample_bilinear2d(self):
22
class TestModel(torch.nn.Module):
25
self.upsample = torch.nn.Upsample(scale_factor=2, mode="bilinear")
28
return self.upsample(x)
30
onnx_program = torch.onnx.dynamo_export(TestModel(), torch.randn(1, 1, 2, 2))
31
# If decomposition is skipped, the model will contain a Resize op instead of fine grained subgraph.
32
assert_op_in_onnx_model(onnx_program.model_proto, "Resize")
34
def test_upsample_bilinear2d_output_size(self):
35
def func(x: torch.Tensor):
36
return torch.nn.functional.interpolate(x, size=(4, 4), mode="bilinear")
38
onnx_program = torch.onnx.dynamo_export(func, torch.randn(1, 1, 2, 2))
39
# If decomposition is skipped, the model will contain a Resize op instead of fine grained subgraph.
40
assert_op_in_onnx_model(onnx_program.model_proto, "Resize")
42
def test_instance_norm(self):
43
def func(x: torch.Tensor):
44
return torch.nn.functional.instance_norm(x)
46
onnx_program = torch.onnx.dynamo_export(func, torch.randn(1, 1, 2, 2))
47
# If decomposition is skipped, the model will contain an InstanceNormalization op
48
# instead of BatchNormalization op w/ training=True.
49
assert_op_in_onnx_model(onnx_program.model_proto, "InstanceNormalization")
52
if __name__ == "__main__":
53
common_utils.run_tests()