openllm

Форк
0
/
pyproject.toml 
203 строки · 5.9 Кб
1
[build-system]
2
build-backend = "hatchling.build"
3
requires = [
4
    "hatchling==1.18.0",
5
    "hatch-vcs==0.3.0",
6
    "hatch-fancy-pypi-readme==23.1.0",
7
]
8

9
[project]
10
authors = [
11
    { name = "Aaron Pham", email = "aarnphm@bentoml.com" },
12
    { name = "BentoML Team", email = "contact@bentoml.com" },
13
]
14
dynamic = ['readme', 'version']
15
classifiers = [
16
    "Development Status :: 5 - Production/Stable",
17
    "Environment :: GPU :: NVIDIA CUDA",
18
    "Environment :: GPU :: NVIDIA CUDA :: 12",
19
    "Environment :: GPU :: NVIDIA CUDA :: 11.8",
20
    "Environment :: GPU :: NVIDIA CUDA :: 11.7",
21
    "License :: OSI Approved :: Apache Software License",
22
    "Topic :: Scientific/Engineering",
23
    "Topic :: Scientific/Engineering :: Artificial Intelligence",
24
    "Topic :: Software Development :: Libraries",
25
    "Operating System :: OS Independent",
26
    "Intended Audience :: Developers",
27
    "Intended Audience :: Science/Research",
28
    "Intended Audience :: System Administrators",
29
    "Typing :: Typed",
30
    "Programming Language :: Python",
31
    "Programming Language :: Python :: 3",
32
    "Programming Language :: Python :: 3 :: Only",
33
    "Programming Language :: Python :: 3.8",
34
    "Programming Language :: Python :: 3.9",
35
    "Programming Language :: Python :: 3.10",
36
    "Programming Language :: Python :: 3.11",
37
    "Programming Language :: Python :: Implementation :: CPython",
38
    "Programming Language :: Python :: Implementation :: PyPy",
39
]
40
description = "OpenLLM Core: Core components for OpenLLM."
41
keywords = [
42
    "MLOps",
43
    "AI",
44
    "BentoML",
45
    "Model Serving",
46
    "Model Deployment",
47
    "LLMOps",
48
    "Falcon",
49
    "Vicuna",
50
    "Llama 2",
51
    "Fine tuning",
52
    "Serverless",
53
    "Large Language Model",
54
    "Generative AI",
55
    "StableLM",
56
    "Alpaca",
57
    "PyTorch",
58
    "Transformers",
59
]
60
dependencies = [
61
    "attrs>=23.1.0",
62
    "cattrs>=23.1.0,<23.2.0",
63
    "orjson",
64
    "inflection",
65
    "deepmerge",
66
    "typing_extensions",
67
    "mypy_extensions",
68
]
69
license = "Apache-2.0"
70
name = "openllm-core"
71
requires-python = ">=3.8"
72
[project.urls]
73
Blog = "https://modelserving.com"
74
Chat = "https://l.bentoml.com/join-openllm-discord"
75
Documentation = "https://github.com/bentoml/OpenLLM/blob/main/openllm-core/README.md"
76
GitHub = "https://github.com/bentoml/OpenLLM/blob/main/openllm-core"
77
History = "https://github.com/bentoml/OpenLLM/blob/main/CHANGELOG.md"
78
Homepage = "https://bentoml.com"
79
Tracker = "https://github.com/bentoml/OpenLLM/issues"
80
Twitter = "https://twitter.com/bentomlai"
81
[project.optional-dependencies]
82
full = ["openllm-core[vllm,fine-tune,bentoml]"]
83
vllm = ["vllm"]
84
bentoml = ["bentoml>=1.1.11,<1.2"]
85
fine-tune = [
86
    "transformers[torch,tokenizers,accelerate]>=4.34.0",
87
    "peft>=0.4.0",
88
    "bitsandbytes",
89
    "datasets",
90
    "accelerate",
91
    "trl",
92
]
93

94
[tool.hatch.version]
95
fallback-version = "0.0.0"
96
source = "vcs"
97
[tool.hatch.build.hooks.vcs]
98
version-file = "src/openllm_core/_version.py"
99
[tool.hatch.version.raw-options]
100
git_describe_command = [
101
    "git",
102
    "describe",
103
    "--dirty",
104
    "--tags",
105
    "--long",
106
    "--first-parent",
107
]
108
local_scheme = "no-local-version"
109
root = ".."
110
[tool.hatch.metadata]
111
allow-direct-references = true
112
[tool.hatch.build.targets.wheel]
113
only-include = ["src/openllm_core"]
114
sources = ["src"]
115
[tool.hatch.build.targets.sdist]
116
exclude = [
117
    "/.git_archival.txt",
118
    "tests",
119
    "/.python-version-default",
120
    "ADDING_NEW_MODEL.md",
121
]
122
[tool.hatch.build.targets.wheel.hooks.mypyc]
123
dependencies = [
124
    "hatch-mypyc==0.16.0",
125
    "mypy==1.5.1",
126
    # avoid https://github.com/pallets/click/issues/2558
127
    "click==8.1.3",
128
    "bentoml==1.1.2",
129
    "transformers>=4.32.1",
130
    "pandas-stubs",
131
    "types-psutil",
132
    "types-tabulate",
133
    "types-PyYAML",
134
    "types-protobuf",
135
]
136
enable-by-default = false
137
exclude = ["src/openllm_core/_typing_compat.py"]
138
include = [
139
    "src/openllm_core/utils/__init__.py",
140
    "src/openllm_core/__init__.py",
141
    "src/openllm_core/_prompt.py",
142
    "src/openllm_core/_schemas.py",
143
    "src/openllm_core/_strategies.py",
144
    "src/openllm_core/exceptions.py",
145
]
146
# NOTE: This is consistent with pyproject.toml
147
mypy-args = [
148
    "--strict",
149
    # this is because all transient library doesn't have types
150
    "--follow-imports=skip",
151
    "--allow-subclassing-any",
152
    "--check-untyped-defs",
153
    "--ignore-missing-imports",
154
    "--no-warn-return-any",
155
    "--warn-unreachable",
156
    "--no-warn-no-return",
157
    "--no-warn-unused-ignores",
158
    "--exclude='/src\\/openllm_core\\/_typing_compat\\.py$'",
159
]
160
options = { verbose = true, strip_asserts = true, debug_level = "2", opt_level = "3", include_runtime_files = true }
161
require-runtime-dependencies = true
162

163
[tool.hatch.metadata.hooks.fancy-pypi-readme]
164
content-type = "text/markdown"
165
# PyPI doesn't support the <picture> tag.
166
[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
167
text = """
168
<p align="center">
169
  <a href="https://github.com/bentoml/openllm">
170
    <img src="https://raw.githubusercontent.com/bentoml/openllm/main/.github/assets/main-banner.png" alt="Banner for OpenLLM" />
171
  </a>
172
</p>
173

174
"""
175
[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
176
end-before = "\n<!-- hatch-fancy-pypi-readme intro stop -->"
177
path = "README.md"
178
start-after = "<!-- hatch-fancy-pypi-readme intro start -->\n"
179
[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
180
text = """
181

182
<p align="center">
183
  <img src="https://raw.githubusercontent.com/bentoml/openllm/main/.github/assets/output.gif" alt="Gif showing OpenLLM Intro" />
184
</p>
185
"""
186
[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
187
text = """
188

189
<p align="center">
190
  <img src="https://raw.githubusercontent.com/bentoml/openllm/main/.github/assets/agent.gif" alt="Gif showing Agent integration" />
191
</p>
192
"""
193
[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
194
end-before = "\n<!-- hatch-fancy-pypi-readme interim stop -->"
195
path = "README.md"
196
start-after = "<!-- hatch-fancy-pypi-readme interim start -->\n"
197
[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
198
text = """
199

200
---
201

202
[Click me for full changelog](https://github.com/bentoml/openllm/blob/main/CHANGELOG.md)
203
"""
204

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.