pytorch
69 строк · 1.9 Кб
1import argparse
2import os.path
3import sys
4import torch
5
6
7def get_custom_backend_library_path():
8"""
9Get the path to the library containing the custom backend.
10
11Return:
12The path to the custom backend object, customized by platform.
13"""
14if sys.platform.startswith("win32"):
15library_filename = "custom_backend.dll"
16elif sys.platform.startswith("darwin"):
17library_filename = "libcustom_backend.dylib"
18else:
19library_filename = "libcustom_backend.so"
20path = os.path.abspath(f"build/{library_filename}")
21assert os.path.exists(path), path
22return path
23
24
25def to_custom_backend(module):
26"""
27This is a helper that wraps torch._C._jit_to_test_backend and compiles
28only the forward method with an empty compile spec.
29
30Args:
31module: input ScriptModule.
32
33Returns:
34The module, lowered so that it can run on TestBackend.
35"""
36lowered_module = torch._C._jit_to_backend("custom_backend", module, {"forward": {"": ""}})
37return lowered_module
38
39
40class Model(torch.nn.Module):
41"""
42Simple model used for testing that to_backend API supports saving, loading,
43and executing in C++.
44"""
45
46def forward(self, a, b):
47return (a + b, a - b)
48
49
50def main():
51parser = argparse.ArgumentParser(
52description="Lower a Module to a custom backend"
53)
54parser.add_argument("--export-module-to", required=True)
55options = parser.parse_args()
56
57# Load the library containing the custom backend.
58library_path = get_custom_backend_library_path()
59torch.ops.load_library(library_path)
60assert library_path in torch.ops.loaded_libraries
61
62# Lower an instance of Model to the custom backend and export it
63# to the specified location.
64lowered_module = to_custom_backend(torch.jit.script(Model()))
65torch.jit.save(lowered_module, options.export_module_to)
66
67
68if __name__ == "__main__":
69main()
70