pytorch
1__version__ = '{{VERSION}}'
2debug = False
3cuda = '{{CUDA_VERSION}}'
4# TODO: use workspace status to stamp the correct version
5git_version = ""
6hip = None
7
8# This is a gross monkey-patch hack that depends on the order of imports
9# in torch/__init__.py
10# TODO: find a more elegant solution to set `USE_GLOBAL_DEPS` for the bazel build
11import torch
12torch.USE_GLOBAL_DEPS = False
13