colossalai
/
setup.py
150 строк · 4.3 Кб
1import os2import sys3from typing import List4
5from setuptools import find_packages, setup6
7try:8import torch # noqa9from torch.utils.cpp_extension import BuildExtension10
11TORCH_AVAILABLE = True12except ImportError:13TORCH_AVAILABLE = False14
15THIS_DIR = os.path.dirname(os.path.abspath(__file__))16BUILD_EXT = int(os.environ.get("BUILD_EXT", "0")) == 117
18# we do not support windows currently
19if sys.platform == "win32":20raise RuntimeError("Windows is not supported yet. Please try again within the Windows Subsystem for Linux (WSL).")21
22
23def fetch_requirements(path) -> List[str]:24"""25This function reads the requirements file.
26
27Args:
28path (str): the path to the requirements file.
29
30Returns:
31The lines in the requirements file.
32"""
33with open(path, "r") as fd:34return [r.strip() for r in fd.readlines()]35
36
37def fetch_readme() -> str:38"""39This function reads the README.md file in the current directory.
40
41Returns:
42The lines in the README file.
43"""
44with open("README.md", encoding="utf-8") as f:45return f.read()46
47
48def get_version() -> str:49"""50This function reads the version.txt and generates the colossalai/version.py file.
51
52Returns:
53The library version stored in version.txt.
54"""
55
56setup_file_path = os.path.abspath(__file__)57project_path = os.path.dirname(setup_file_path)58version_txt_path = os.path.join(project_path, "version.txt")59version_py_path = os.path.join(project_path, "colossalai/version.py")60
61with open(version_txt_path) as f:62version = f.read().strip()63
64# write version into version.py65with open(version_py_path, "w") as f:66f.write(f"__version__ = '{version}'\n")67return version68
69
70if BUILD_EXT:71if not TORCH_AVAILABLE:72raise ModuleNotFoundError(73"[extension] PyTorch is not found while BUILD_EXT=1. You need to install PyTorch first in order to build CUDA extensions"74)75
76from extensions import ALL_EXTENSIONS77
78op_names = []79ext_modules = []80
81for ext_cls in ALL_EXTENSIONS:82ext = ext_cls()83if ext.support_aot and ext.is_hardware_available():84ext.assert_hardware_compatible()85op_names.append(ext.name)86ext_modules.append(ext.build_aot())87
88# show log89if len(ext_modules) == 0:90raise RuntimeError("[extension] Could not find any kernel compatible with the current environment.")91else:92op_name_list = ", ".join(op_names)93print(f"[extension] Building extensions{op_name_list}")94else:95ext_modules = []96
97version = get_version()98package_name = "colossalai"99
100setup(101name=package_name,102version=version,103packages=find_packages(104exclude=(105"extensions",106"benchmark",107"docker",108"tests",109"docs",110"examples",111"tests",112"scripts",113"requirements",114"extensions",115"*.egg-info",116),117),118description="An integrated large-scale model training system with efficient parallelization techniques",119long_description=fetch_readme(),120long_description_content_type="text/markdown",121license="Apache Software License 2.0",122url="https://www.colossalai.org",123project_urls={124"Forum": "https://github.com/hpcaitech/ColossalAI/discussions",125"Bug Tracker": "https://github.com/hpcaitech/ColossalAI/issues",126"Examples": "https://github.com/hpcaitech/ColossalAI-Examples",127"Documentation": "http://colossalai.readthedocs.io",128"Github": "https://github.com/hpcaitech/ColossalAI",129},130ext_modules=ext_modules,131cmdclass={"build_ext": BuildExtension} if ext_modules else {},132install_requires=fetch_requirements("requirements/requirements.txt"),133entry_points="""134[console_scripts]
135colossalai=colossalai.cli:cli
136""",137python_requires=">=3.6",138classifiers=[139"Programming Language :: Python :: 3",140"License :: OSI Approved :: Apache Software License",141"Environment :: GPU :: NVIDIA CUDA",142"Topic :: Scientific/Engineering :: Artificial Intelligence",143"Topic :: System :: Distributed Computing",144],145package_data={146"colossalai": [147"kernel/extensions/csrc/**/*",148]149},150)
151