datasets

Форк
0
/
setup.py 
290 строк · 10.1 Кб
1
# Lint as: python3
2
"""HuggingFace/Datasets is an open library of datasets.
3

4
Note:
5

6
   VERSION needs to be formatted following the MAJOR.MINOR.PATCH convention
7
   (we need to follow this convention to be able to retrieve versioned scripts)
8

9
Simple check list for release from AllenNLP repo: https://github.com/allenai/allennlp/blob/master/setup.py
10

11
Steps to make a release:
12

13
0. Prerequisites:
14
   - Dependencies:
15
     - twine: `pip install twine`
16
   - Create an account in (and join the 'datasets' project):
17
     - PyPI: https://pypi.org/
18
     - Test PyPI: https://test.pypi.org/
19
   - Don't break `transformers`: run the `transformers` CI using the `main` branch and make sure it's green.
20
     - In `transformers`, use `datasets @ git+https://github.com/huggingface/datasets@main#egg=datasets`
21
       Add a step to install `datasets@main` after `save_cache` in .circleci/create_circleci_config.py:
22
       ```
23
       steps.append({"run": {"name": "Install `datasets@main`", "command": 'pip uninstall datasets -y && pip install "datasets @ git+https://github.com/huggingface/datasets@main#egg=datasets"'}})
24
       ```
25
     - and then run the CI
26

27
1. Create the release branch from main branch:
28
     ```
29
     git checkout main
30
     git pull upstream main
31
     git checkout -b release-VERSION
32
     ```
33

34
2. Change the version to the release VERSION in:
35
   - __init__.py
36
   - setup.py
37

38
3. Commit these changes, push and create a Pull Request:
39
     ```
40
     git add -u
41
     git commit -m "Release: VERSION"
42
     git push upstream release-VERSION
43
     ```
44
   - Go to: https://github.com/huggingface/datasets/pull/new/release-VERSION
45
   - Create pull request
46

47
4. From your local release branch, build both the sources and the wheel. Do not change anything in setup.py between
48
   creating the wheel and the source distribution (obviously).
49
   - First, delete any building directories that may exist from previous builds:
50
     - build
51
     - dist
52
   - From the top level directory, build the wheel and the sources:
53
       ```
54
       python setup.py bdist_wheel
55
       python setup.py sdist
56
       ```
57
   - You should now have a /dist directory with both .whl and .tar.gz source versions.
58

59
5. Check that everything looks correct by uploading the package to the test PyPI server:
60
     ```
61
     twine upload dist/* -r testpypi
62
     ```
63
   Check that you can install it in a virtualenv/notebook by running:
64
     ```
65
     pip install huggingface_hub fsspec aiohttp pyarrow-hotfix
66
     pip install -U tqdm
67
     pip install -i https://testpypi.python.org/pypi datasets
68
     ```
69

70
6. Upload the final version to the actual PyPI:
71
     ```
72
     twine upload dist/* -r pypi
73
     ```
74

75
7. Make the release on GitHub once everything is looking hunky-dory:
76
   - Merge the release Pull Request
77
   - Create a new release: https://github.com/huggingface/datasets/releases/new
78
   - Choose a tag: Introduce the new VERSION as tag, that will be created when you publish the release
79
     - Create new tag VERSION on publish
80
   - Release title: Introduce the new VERSION as well
81
   - Describe the release
82
     - Use "Generate release notes" button for automatic generation
83
   - Publish release
84

85
8. Set the dev version
86
   - Create the dev-version branch from the main branch:
87
       ```
88
       git checkout main
89
       git pull upstream main
90
       git branch -D dev-version
91
       git checkout -b dev-version
92
       ```
93
   - Change the version to X.X.X+1.dev0 (e.g. VERSION=1.18.3 -> 1.18.4.dev0) in:
94
     - __init__.py
95
     - setup.py
96
   - Commit these changes, push and create a Pull Request:
97
       ```
98
       git add -u
99
       git commit -m "Set dev version"
100
       git push upstream dev-version
101
       ```
102
     - Go to: https://github.com/huggingface/datasets/pull/new/dev-version
103
     - Create pull request
104
   - Merge the dev version Pull Request
105
"""
106

107
from setuptools import find_packages, setup
108

109

110
REQUIRED_PKGS = [
111
    # For file locking
112
    "filelock",
113
    # We use numpy>=1.17 to have np.random.Generator (Dataset shuffling)
114
    "numpy>=1.17",
115
    # Backend and serialization.
116
    # Minimum 12.0.0 to be able to concatenate extension arrays
117
    "pyarrow>=12.0.0",
118
    # As long as we allow pyarrow < 14.0.1, to fix vulnerability CVE-2023-47248
119
    "pyarrow-hotfix",
120
    # For smart caching dataset processing
121
    "dill>=0.3.0,<0.3.9",  # tmp pin until dill has official support for determinism see https://github.com/uqfoundation/dill/issues/19
122
    # For performance gains with apache arrow
123
    "pandas",
124
    # for downloading datasets over HTTPS
125
    "requests>=2.19.0",
126
    # progress bars in download and scripts
127
    "tqdm>=4.62.1",
128
    # for fast hashing
129
    "xxhash",
130
    # for better multiprocessing
131
    "multiprocess",
132
    # to save datasets locally or on any filesystem
133
    # minimum 2023.1.0 to support protocol=kwargs in fsspec's `open`, `get_fs_token_paths`, etc.: see https://github.com/fsspec/filesystem_spec/pull/1143
134
    "fsspec[http]>=2023.1.0,<=2024.2.0",
135
    # for data streaming via http
136
    "aiohttp",
137
    # To get datasets from the Datasets Hub on huggingface.co
138
    "huggingface_hub>=0.19.4",
139
    # Utilities from PyPA to e.g., compare versions
140
    "packaging",
141
    # To parse YAML metadata from dataset cards
142
    "pyyaml>=5.1",
143
]
144

145
AUDIO_REQUIRE = [
146
    "soundfile>=0.12.1",
147
    "librosa",
148
]
149

150
VISION_REQUIRE = [
151
    "Pillow>=6.2.1",
152
]
153

154
BENCHMARKS_REQUIRE = [
155
    "tensorflow==2.12.0",
156
    "torch==2.0.1",
157
    "transformers==4.30.1",
158
]
159

160
TESTS_REQUIRE = [
161
    # test dependencies
162
    "absl-py",
163
    "joblib<1.3.0",  # joblibspark doesn't support recent joblib versions
164
    "joblibspark",
165
    "pytest",
166
    "pytest-datadir",
167
    "pytest-xdist",
168
    # optional dependencies
169
    "apache-beam>=2.26.0; sys_platform != 'win32' and python_version<'3.10'",  # doesn't support recent dill versions for recent python versions and on windows requires pyarrow<12.0.0
170
    "elasticsearch<8.0.0",  # 8.0 asks users to provide hosts or cloud_id when instantiating ElasticSearch()
171
    "faiss-cpu>=1.6.4",
172
    "jax>=0.3.14; sys_platform != 'win32'",
173
    "jaxlib>=0.3.14; sys_platform != 'win32'",
174
    "lz4",
175
    "pyspark>=3.4",  # https://issues.apache.org/jira/browse/SPARK-40991 fixed in 3.4.0
176
    "py7zr",
177
    "rarfile>=4.0",
178
    "sqlalchemy",
179
    "s3fs>=2021.11.1",  # aligned with fsspec[http]>=2021.11.1; test only on python 3.7 for now
180
    "tensorflow>=2.3,!=2.6.0,!=2.6.1; sys_platform != 'darwin' or platform_machine != 'arm64'",
181
    "tensorflow-macos; sys_platform == 'darwin' and platform_machine == 'arm64'",
182
    "tiktoken",
183
    "torch>=2.0.0",
184
    "soundfile>=0.12.1",
185
    "transformers",
186
    "typing-extensions>=4.6.1",  # due to conflict between apache-beam and pydantic
187
    "zstandard",
188
]
189

190

191
METRICS_TESTS_REQUIRE = [
192
    # metrics dependencies
193
    "accelerate",  # for frugalscore (calls transformers' Trainer)
194
    "bert_score>=0.3.6",
195
    "jiwer",
196
    "langdetect",
197
    "mauve-text",
198
    "nltk",
199
    "rouge_score",
200
    "sacrebleu",
201
    "sacremoses",
202
    "scikit-learn",
203
    "scipy",
204
    "sentencepiece",  # for bleurt
205
    "seqeval",
206
    "spacy>=3.0.0",
207
    "tldextract",
208
    # to speed up pip backtracking
209
    "toml>=0.10.1",
210
    "typer<0.5.0",  # pinned to work with Spacy==3.4.3 on Windows: see https://github.com/tiangolo/typer/issues/427
211
    "requests_file>=1.5.1",
212
    "tldextract>=3.1.0",
213
    "texttable>=1.6.3",
214
    "Werkzeug>=1.0.1",
215
    "six~=1.15.0",
216
]
217

218
TESTS_REQUIRE.extend(VISION_REQUIRE)
219
TESTS_REQUIRE.extend(AUDIO_REQUIRE)
220

221
QUALITY_REQUIRE = ["ruff>=0.3.0"]
222

223
DOCS_REQUIRE = [
224
    # Might need to add doc-builder and some specific deps in the future
225
    "s3fs",
226
    # Following dependencies are required for the Python reference to be built properly
227
    "transformers",
228
    "torch",
229
    "tensorflow>=2.2.0,!=2.6.0,!=2.6.1; sys_platform != 'darwin' or platform_machine != 'arm64'",
230
    "tensorflow-macos; sys_platform == 'darwin' and platform_machine == 'arm64'",
231
]
232

233
EXTRAS_REQUIRE = {
234
    "audio": AUDIO_REQUIRE,
235
    "vision": VISION_REQUIRE,
236
    "apache-beam": ["apache-beam>=2.26.0"],
237
    "tensorflow": [
238
        "tensorflow>=2.2.0,!=2.6.0,!=2.6.1; sys_platform != 'darwin' or platform_machine != 'arm64'",
239
        "tensorflow-macos; sys_platform == 'darwin' and platform_machine == 'arm64'",
240
    ],
241
    "tensorflow_gpu": ["tensorflow-gpu>=2.2.0,!=2.6.0,!=2.6.1"],
242
    "torch": ["torch"],
243
    "jax": ["jax>=0.3.14", "jaxlib>=0.3.14"],
244
    "s3": ["s3fs"],
245
    "streaming": [],  # for backward compatibility
246
    "dev": TESTS_REQUIRE + QUALITY_REQUIRE + DOCS_REQUIRE,
247
    "tests": TESTS_REQUIRE,
248
    "metrics-tests": METRICS_TESTS_REQUIRE,
249
    "quality": QUALITY_REQUIRE,
250
    "benchmarks": BENCHMARKS_REQUIRE,
251
    "docs": DOCS_REQUIRE,
252
}
253

254
setup(
255
    name="datasets",
256
    version="2.18.1.dev0",  # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots)
257
    description="HuggingFace community-driven open-source library of datasets",
258
    long_description=open("README.md", encoding="utf-8").read(),
259
    long_description_content_type="text/markdown",
260
    author="HuggingFace Inc.",
261
    author_email="thomas@huggingface.co",
262
    url="https://github.com/huggingface/datasets",
263
    download_url="https://github.com/huggingface/datasets/tags",
264
    license="Apache 2.0",
265
    package_dir={"": "src"},
266
    packages=find_packages("src"),
267
    package_data={
268
        "datasets": ["py.typed"],
269
        "datasets.utils.resources": ["*.json", "*.yaml", "*.tsv"],
270
    },
271
    entry_points={"console_scripts": ["datasets-cli=datasets.commands.datasets_cli:main"]},
272
    python_requires=">=3.8.0",
273
    install_requires=REQUIRED_PKGS,
274
    extras_require=EXTRAS_REQUIRE,
275
    classifiers=[
276
        "Development Status :: 5 - Production/Stable",
277
        "Intended Audience :: Developers",
278
        "Intended Audience :: Education",
279
        "Intended Audience :: Science/Research",
280
        "License :: OSI Approved :: Apache Software License",
281
        "Operating System :: OS Independent",
282
        "Programming Language :: Python :: 3",
283
        "Programming Language :: Python :: 3.8",
284
        "Programming Language :: Python :: 3.9",
285
        "Programming Language :: Python :: 3.10",
286
        "Topic :: Scientific/Engineering :: Artificial Intelligence",
287
    ],
288
    keywords="datasets machine learning datasets metrics",
289
    zip_safe=False,  # Required for mypy to find the py.typed file
290
)
291

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.