pytorch

Форк
0
/
_utils_internal.py 
138 строк · 4.6 Кб
1
import functools
2
import logging
3
import os
4
import sys
5
import tempfile
6
from typing import Any, Dict
7

8
import torch
9

10
log = logging.getLogger(__name__)
11

12

13
# this arbitrary-looking assortment of functionality is provided here
14
# to have a central place for overrideable behavior. The motivating
15
# use is the FB build environment, where this source file is replaced
16
# by an equivalent.
17

18
if torch._running_with_deploy():
19
    # __file__ is meaningless in the context of frozen torch used in torch deploy.
20
    # setting empty torch_parent should allow below functions to operate without crashing,
21
    # but it's unclear if there is a valid use case for them in the context of deploy.
22
    torch_parent = ""
23
else:
24
    if os.path.basename(os.path.dirname(__file__)) == "shared":
25
        torch_parent = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
26
    else:
27
        torch_parent = os.path.dirname(os.path.dirname(__file__))
28

29

30
def get_file_path(*path_components: str) -> str:
31
    return os.path.join(torch_parent, *path_components)
32

33

34
def get_file_path_2(*path_components: str) -> str:
35
    return os.path.join(*path_components)
36

37

38
def get_writable_path(path: str) -> str:
39
    if os.access(path, os.W_OK):
40
        return path
41
    return tempfile.mkdtemp(suffix=os.path.basename(path))
42

43

44
def prepare_multiprocessing_environment(path: str) -> None:
45
    pass
46

47

48
def resolve_library_path(path: str) -> str:
49
    return os.path.realpath(path)
50

51

52
def throw_abstract_impl_not_imported_error(opname, module, context):
53
    if module in sys.modules:
54
        raise NotImplementedError(
55
            f"{opname}: We could not find the abstract impl for this operator. "
56
        )
57
    else:
58
        raise NotImplementedError(
59
            f"{opname}: We could not find the abstract impl for this operator. "
60
            f"The operator specified that you may need to import the '{module}' "
61
            f"Python module to load the abstract impl. {context}"
62
        )
63

64

65
# Meta only, see
66
# https://www.internalfb.com/intern/wiki/ML_Workflow_Observability/User_Guides/Adding_instrumentation_to_your_code/
67
#
68
# This will cause an event to get logged to Scuba via the signposts API.  You
69
# can view samples on the API at https://fburl.com/scuba/workflow_signpost/zh9wmpqs
70
# we log to subsystem "torch", and the category and name you provide here.
71
# Each of the arguments translate into a Scuba column.  We're still figuring
72
# out local conventions in PyTorch, but category should be something like
73
# "dynamo" or "inductor", and name should be a specific string describing what
74
# kind of event happened.
75
#
76
# Killswitch is at
77
# https://www.internalfb.com/intern/justknobs/?name=pytorch%2Fsignpost#event
78
def signpost_event(category: str, name: str, parameters: Dict[str, Any]):
79
    log.info("%s %s: %r", category, name, parameters)
80

81

82
def log_compilation_event(metrics):
83
    log.info("%s", metrics)
84

85

86
def upload_graph(graph):
87
    pass
88

89

90
def set_pytorch_distributed_envs_from_justknobs():
91
    pass
92

93

94
def log_export_usage(**kwargs):
95
    pass
96

97

98
def justknobs_check(name: str) -> bool:
99
    """
100
    This function can be used to killswitch functionality in FB prod,
101
    where you can toggle this value to False in JK without having to
102
    do a code push.  In OSS, we always have everything turned on all
103
    the time, because downstream users can simply choose to not update
104
    PyTorch.  (If more fine-grained enable/disable is needed, we could
105
    potentially have a map we lookup name in to toggle behavior.  But
106
    the point is that it's all tied to source code in OSS, since there's
107
    no live server to query.)
108

109
    This is the bare minimum functionality I needed to do some killswitches.
110
    We have a more detailed plan at
111
    https://docs.google.com/document/d/1Ukerh9_42SeGh89J-tGtecpHBPwGlkQ043pddkKb3PU/edit
112
    In particular, in some circumstances it may be necessary to read in
113
    a knob once at process start, and then use it consistently for the
114
    rest of the process.  Future functionality will codify these patterns
115
    into a better high level API.
116

117
    WARNING: Do NOT call this function at module import time, JK is not
118
    fork safe and you will break anyone who forks the process and then
119
    hits JK again.
120
    """
121
    return True
122

123

124
@functools.lru_cache(None)
125
def max_clock_rate():
126
    from triton.testing import nvsmi
127

128
    return nvsmi(["clocks.max.sm"])[0]
129

130

131
TEST_MASTER_ADDR = "127.0.0.1"
132
TEST_MASTER_PORT = 29500
133
# USE_GLOBAL_DEPS controls whether __init__.py tries to load
134
# libtorch_global_deps, see Note [Global dependencies]
135
USE_GLOBAL_DEPS = True
136
# USE_RTLD_GLOBAL_WITH_LIBTORCH controls whether __init__.py tries to load
137
# _C.so with RTLD_GLOBAL during the call to dlopen.
138
USE_RTLD_GLOBAL_WITH_LIBTORCH = False
139

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.