transformers
83 строки · 2.4 Кб
1# Copyright 2020 The HuggingFace Team. All rights reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14"""
15A simple launcher script for TPU training
16
17Inspired by https://github.com/pytorch/pytorch/blob/master/torch/distributed/launch.py
18
19::
20>>> python xla_spawn.py --num_cores=NUM_CORES_YOU_HAVE
21YOUR_TRAINING_SCRIPT.py (--arg1 --arg2 --arg3 and all other
22arguments of your training script)
23
24"""
25
26
27import importlib28import sys29from argparse import REMAINDER, ArgumentParser30from pathlib import Path31
32import torch_xla.distributed.xla_multiprocessing as xmp33
34
35def parse_args():36"""37Helper function parsing the command line options
38@retval ArgumentParser
39"""
40parser = ArgumentParser(41description=(42"PyTorch TPU distributed training launch helper utility that will spawn up multiple distributed processes"43)44)45
46# Optional arguments for the launch helper47parser.add_argument("--num_cores", type=int, default=1, help="Number of TPU cores to use (1 or 8).")48
49# positional50parser.add_argument(51"training_script",52type=str,53help=(54"The full path to the single TPU training "55"program/script to be launched in parallel, "56"followed by all the arguments for the "57"training script"58),59)60
61# rest from the training program62parser.add_argument("training_script_args", nargs=REMAINDER)63
64return parser.parse_args()65
66
67def main():68args = parse_args()69
70# Import training_script as a module.71script_fpath = Path(args.training_script)72sys.path.append(str(script_fpath.parent.resolve()))73mod_name = script_fpath.stem74mod = importlib.import_module(mod_name)75
76# Patch sys.argv77sys.argv = [args.training_script] + args.training_script_args + ["--tpu_num_cores", str(args.num_cores)]78
79xmp.spawn(mod._mp_fn, args=(), nprocs=args.num_cores)80
81
82if __name__ == "__main__":83main()84