openllm
203 строки · 5.9 Кб
1[build-system]
2build-backend = "hatchling.build"
3requires = [
4"hatchling==1.18.0",
5"hatch-vcs==0.3.0",
6"hatch-fancy-pypi-readme==23.1.0",
7]
8
9[project]
10authors = [
11{ name = "Aaron Pham", email = "aarnphm@bentoml.com" },
12{ name = "BentoML Team", email = "contact@bentoml.com" },
13]
14dynamic = ['readme', 'version']
15classifiers = [
16"Development Status :: 5 - Production/Stable",
17"Environment :: GPU :: NVIDIA CUDA",
18"Environment :: GPU :: NVIDIA CUDA :: 12",
19"Environment :: GPU :: NVIDIA CUDA :: 11.8",
20"Environment :: GPU :: NVIDIA CUDA :: 11.7",
21"License :: OSI Approved :: Apache Software License",
22"Topic :: Scientific/Engineering",
23"Topic :: Scientific/Engineering :: Artificial Intelligence",
24"Topic :: Software Development :: Libraries",
25"Operating System :: OS Independent",
26"Intended Audience :: Developers",
27"Intended Audience :: Science/Research",
28"Intended Audience :: System Administrators",
29"Typing :: Typed",
30"Programming Language :: Python",
31"Programming Language :: Python :: 3",
32"Programming Language :: Python :: 3 :: Only",
33"Programming Language :: Python :: 3.8",
34"Programming Language :: Python :: 3.9",
35"Programming Language :: Python :: 3.10",
36"Programming Language :: Python :: 3.11",
37"Programming Language :: Python :: Implementation :: CPython",
38"Programming Language :: Python :: Implementation :: PyPy",
39]
40description = "OpenLLM Core: Core components for OpenLLM."
41keywords = [
42"MLOps",
43"AI",
44"BentoML",
45"Model Serving",
46"Model Deployment",
47"LLMOps",
48"Falcon",
49"Vicuna",
50"Llama 2",
51"Fine tuning",
52"Serverless",
53"Large Language Model",
54"Generative AI",
55"StableLM",
56"Alpaca",
57"PyTorch",
58"Transformers",
59]
60dependencies = [
61"attrs>=23.1.0",
62"cattrs>=23.1.0,<23.2.0",
63"orjson",
64"inflection",
65"deepmerge",
66"typing_extensions",
67"mypy_extensions",
68]
69license = "Apache-2.0"
70name = "openllm-core"
71requires-python = ">=3.8"
72[project.urls]
73Blog = "https://modelserving.com"
74Chat = "https://l.bentoml.com/join-openllm-discord"
75Documentation = "https://github.com/bentoml/OpenLLM/blob/main/openllm-core/README.md"
76GitHub = "https://github.com/bentoml/OpenLLM/blob/main/openllm-core"
77History = "https://github.com/bentoml/OpenLLM/blob/main/CHANGELOG.md"
78Homepage = "https://bentoml.com"
79Tracker = "https://github.com/bentoml/OpenLLM/issues"
80Twitter = "https://twitter.com/bentomlai"
81[project.optional-dependencies]
82full = ["openllm-core[vllm,fine-tune,bentoml]"]
83vllm = ["vllm"]
84bentoml = ["bentoml>=1.1.11,<1.2"]
85fine-tune = [
86"transformers[torch,tokenizers,accelerate]>=4.34.0",
87"peft>=0.4.0",
88"bitsandbytes",
89"datasets",
90"accelerate",
91"trl",
92]
93
94[tool.hatch.version]
95fallback-version = "0.0.0"
96source = "vcs"
97[tool.hatch.build.hooks.vcs]
98version-file = "src/openllm_core/_version.py"
99[tool.hatch.version.raw-options]
100git_describe_command = [
101"git",
102"describe",
103"--dirty",
104"--tags",
105"--long",
106"--first-parent",
107]
108local_scheme = "no-local-version"
109root = ".."
110[tool.hatch.metadata]
111allow-direct-references = true
112[tool.hatch.build.targets.wheel]
113only-include = ["src/openllm_core"]
114sources = ["src"]
115[tool.hatch.build.targets.sdist]
116exclude = [
117"/.git_archival.txt",
118"tests",
119"/.python-version-default",
120"ADDING_NEW_MODEL.md",
121]
122[tool.hatch.build.targets.wheel.hooks.mypyc]
123dependencies = [
124"hatch-mypyc==0.16.0",
125"mypy==1.5.1",
126# avoid https://github.com/pallets/click/issues/2558
127"click==8.1.3",
128"bentoml==1.1.2",
129"transformers>=4.32.1",
130"pandas-stubs",
131"types-psutil",
132"types-tabulate",
133"types-PyYAML",
134"types-protobuf",
135]
136enable-by-default = false
137exclude = ["src/openllm_core/_typing_compat.py"]
138include = [
139"src/openllm_core/utils/__init__.py",
140"src/openllm_core/__init__.py",
141"src/openllm_core/_prompt.py",
142"src/openllm_core/_schemas.py",
143"src/openllm_core/_strategies.py",
144"src/openllm_core/exceptions.py",
145]
146# NOTE: This is consistent with pyproject.toml
147mypy-args = [
148"--strict",
149# this is because all transient library doesn't have types
150"--follow-imports=skip",
151"--allow-subclassing-any",
152"--check-untyped-defs",
153"--ignore-missing-imports",
154"--no-warn-return-any",
155"--warn-unreachable",
156"--no-warn-no-return",
157"--no-warn-unused-ignores",
158"--exclude='/src\\/openllm_core\\/_typing_compat\\.py$'",
159]
160options = { verbose = true, strip_asserts = true, debug_level = "2", opt_level = "3", include_runtime_files = true }
161require-runtime-dependencies = true
162
163[tool.hatch.metadata.hooks.fancy-pypi-readme]
164content-type = "text/markdown"
165# PyPI doesn't support the <picture> tag.
166[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
167text = """
168<p align="center">
169<a href="https://github.com/bentoml/openllm">
170<img src="https://raw.githubusercontent.com/bentoml/openllm/main/.github/assets/main-banner.png" alt="Banner for OpenLLM" />
171</a>
172</p>
173
174"""
175[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
176end-before = "\n<!-- hatch-fancy-pypi-readme intro stop -->"
177path = "README.md"
178start-after = "<!-- hatch-fancy-pypi-readme intro start -->\n"
179[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
180text = """
181
182<p align="center">
183<img src="https://raw.githubusercontent.com/bentoml/openllm/main/.github/assets/output.gif" alt="Gif showing OpenLLM Intro" />
184</p>
185"""
186[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
187text = """
188
189<p align="center">
190<img src="https://raw.githubusercontent.com/bentoml/openllm/main/.github/assets/agent.gif" alt="Gif showing Agent integration" />
191</p>
192"""
193[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
194end-before = "\n<!-- hatch-fancy-pypi-readme interim stop -->"
195path = "README.md"
196start-after = "<!-- hatch-fancy-pypi-readme interim start -->\n"
197[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
198text = """
199
200---
201
202[Click me for full changelog](https://github.com/bentoml/openllm/blob/main/CHANGELOG.md)
203"""
204