datasets
/
setup.py
290 строк · 10.1 Кб
1# Lint as: python3
2"""HuggingFace/Datasets is an open library of datasets.
3
4Note:
5
6VERSION needs to be formatted following the MAJOR.MINOR.PATCH convention
7(we need to follow this convention to be able to retrieve versioned scripts)
8
9Simple check list for release from AllenNLP repo: https://github.com/allenai/allennlp/blob/master/setup.py
10
11Steps to make a release:
12
130. Prerequisites:
14- Dependencies:
15- twine: `pip install twine`
16- Create an account in (and join the 'datasets' project):
17- PyPI: https://pypi.org/
18- Test PyPI: https://test.pypi.org/
19- Don't break `transformers`: run the `transformers` CI using the `main` branch and make sure it's green.
20- In `transformers`, use `datasets @ git+https://github.com/huggingface/datasets@main#egg=datasets`
21Add a step to install `datasets@main` after `save_cache` in .circleci/create_circleci_config.py:
22```
23steps.append({"run": {"name": "Install `datasets@main`", "command": 'pip uninstall datasets -y && pip install "datasets @ git+https://github.com/huggingface/datasets@main#egg=datasets"'}})
24```
25- and then run the CI
26
271. Create the release branch from main branch:
28```
29git checkout main
30git pull upstream main
31git checkout -b release-VERSION
32```
33
342. Change the version to the release VERSION in:
35- __init__.py
36- setup.py
37
383. Commit these changes, push and create a Pull Request:
39```
40git add -u
41git commit -m "Release: VERSION"
42git push upstream release-VERSION
43```
44- Go to: https://github.com/huggingface/datasets/pull/new/release-VERSION
45- Create pull request
46
474. From your local release branch, build both the sources and the wheel. Do not change anything in setup.py between
48creating the wheel and the source distribution (obviously).
49- First, delete any building directories that may exist from previous builds:
50- build
51- dist
52- From the top level directory, build the wheel and the sources:
53```
54python setup.py bdist_wheel
55python setup.py sdist
56```
57- You should now have a /dist directory with both .whl and .tar.gz source versions.
58
595. Check that everything looks correct by uploading the package to the test PyPI server:
60```
61twine upload dist/* -r testpypi
62```
63Check that you can install it in a virtualenv/notebook by running:
64```
65pip install huggingface_hub fsspec aiohttp pyarrow-hotfix
66pip install -U tqdm
67pip install -i https://testpypi.python.org/pypi datasets
68```
69
706. Upload the final version to the actual PyPI:
71```
72twine upload dist/* -r pypi
73```
74
757. Make the release on GitHub once everything is looking hunky-dory:
76- Merge the release Pull Request
77- Create a new release: https://github.com/huggingface/datasets/releases/new
78- Choose a tag: Introduce the new VERSION as tag, that will be created when you publish the release
79- Create new tag VERSION on publish
80- Release title: Introduce the new VERSION as well
81- Describe the release
82- Use "Generate release notes" button for automatic generation
83- Publish release
84
858. Set the dev version
86- Create the dev-version branch from the main branch:
87```
88git checkout main
89git pull upstream main
90git branch -D dev-version
91git checkout -b dev-version
92```
93- Change the version to X.X.X+1.dev0 (e.g. VERSION=1.18.3 -> 1.18.4.dev0) in:
94- __init__.py
95- setup.py
96- Commit these changes, push and create a Pull Request:
97```
98git add -u
99git commit -m "Set dev version"
100git push upstream dev-version
101```
102- Go to: https://github.com/huggingface/datasets/pull/new/dev-version
103- Create pull request
104- Merge the dev version Pull Request
105"""
106
107from setuptools import find_packages, setup108
109
110REQUIRED_PKGS = [111# For file locking112"filelock",113# We use numpy>=1.17 to have np.random.Generator (Dataset shuffling)114"numpy>=1.17",115# Backend and serialization.116# Minimum 12.0.0 to be able to concatenate extension arrays117"pyarrow>=12.0.0",118# As long as we allow pyarrow < 14.0.1, to fix vulnerability CVE-2023-47248119"pyarrow-hotfix",120# For smart caching dataset processing121"dill>=0.3.0,<0.3.9", # tmp pin until dill has official support for determinism see https://github.com/uqfoundation/dill/issues/19122# For performance gains with apache arrow123"pandas",124# for downloading datasets over HTTPS125"requests>=2.19.0",126# progress bars in download and scripts127"tqdm>=4.62.1",128# for fast hashing129"xxhash",130# for better multiprocessing131"multiprocess",132# to save datasets locally or on any filesystem133# minimum 2023.1.0 to support protocol=kwargs in fsspec's `open`, `get_fs_token_paths`, etc.: see https://github.com/fsspec/filesystem_spec/pull/1143134"fsspec[http]>=2023.1.0,<=2024.2.0",135# for data streaming via http136"aiohttp",137# To get datasets from the Datasets Hub on huggingface.co138"huggingface_hub>=0.19.4",139# Utilities from PyPA to e.g., compare versions140"packaging",141# To parse YAML metadata from dataset cards142"pyyaml>=5.1",143]
144
145AUDIO_REQUIRE = [146"soundfile>=0.12.1",147"librosa",148]
149
150VISION_REQUIRE = [151"Pillow>=6.2.1",152]
153
154BENCHMARKS_REQUIRE = [155"tensorflow==2.12.0",156"torch==2.0.1",157"transformers==4.30.1",158]
159
160TESTS_REQUIRE = [161# test dependencies162"absl-py",163"joblib<1.3.0", # joblibspark doesn't support recent joblib versions164"joblibspark",165"pytest",166"pytest-datadir",167"pytest-xdist",168# optional dependencies169"apache-beam>=2.26.0; sys_platform != 'win32' and python_version<'3.10'", # doesn't support recent dill versions for recent python versions and on windows requires pyarrow<12.0.0170"elasticsearch<8.0.0", # 8.0 asks users to provide hosts or cloud_id when instantiating ElasticSearch()171"faiss-cpu>=1.6.4",172"jax>=0.3.14; sys_platform != 'win32'",173"jaxlib>=0.3.14; sys_platform != 'win32'",174"lz4",175"pyspark>=3.4", # https://issues.apache.org/jira/browse/SPARK-40991 fixed in 3.4.0176"py7zr",177"rarfile>=4.0",178"sqlalchemy",179"s3fs>=2021.11.1", # aligned with fsspec[http]>=2021.11.1; test only on python 3.7 for now180"tensorflow>=2.3,!=2.6.0,!=2.6.1; sys_platform != 'darwin' or platform_machine != 'arm64'",181"tensorflow-macos; sys_platform == 'darwin' and platform_machine == 'arm64'",182"tiktoken",183"torch>=2.0.0",184"soundfile>=0.12.1",185"transformers",186"typing-extensions>=4.6.1", # due to conflict between apache-beam and pydantic187"zstandard",188]
189
190
191METRICS_TESTS_REQUIRE = [192# metrics dependencies193"accelerate", # for frugalscore (calls transformers' Trainer)194"bert_score>=0.3.6",195"jiwer",196"langdetect",197"mauve-text",198"nltk",199"rouge_score",200"sacrebleu",201"sacremoses",202"scikit-learn",203"scipy",204"sentencepiece", # for bleurt205"seqeval",206"spacy>=3.0.0",207"tldextract",208# to speed up pip backtracking209"toml>=0.10.1",210"typer<0.5.0", # pinned to work with Spacy==3.4.3 on Windows: see https://github.com/tiangolo/typer/issues/427211"requests_file>=1.5.1",212"tldextract>=3.1.0",213"texttable>=1.6.3",214"Werkzeug>=1.0.1",215"six~=1.15.0",216]
217
218TESTS_REQUIRE.extend(VISION_REQUIRE)219TESTS_REQUIRE.extend(AUDIO_REQUIRE)220
221QUALITY_REQUIRE = ["ruff>=0.3.0"]222
223DOCS_REQUIRE = [224# Might need to add doc-builder and some specific deps in the future225"s3fs",226# Following dependencies are required for the Python reference to be built properly227"transformers",228"torch",229"tensorflow>=2.2.0,!=2.6.0,!=2.6.1; sys_platform != 'darwin' or platform_machine != 'arm64'",230"tensorflow-macos; sys_platform == 'darwin' and platform_machine == 'arm64'",231]
232
233EXTRAS_REQUIRE = {234"audio": AUDIO_REQUIRE,235"vision": VISION_REQUIRE,236"apache-beam": ["apache-beam>=2.26.0"],237"tensorflow": [238"tensorflow>=2.2.0,!=2.6.0,!=2.6.1; sys_platform != 'darwin' or platform_machine != 'arm64'",239"tensorflow-macos; sys_platform == 'darwin' and platform_machine == 'arm64'",240],241"tensorflow_gpu": ["tensorflow-gpu>=2.2.0,!=2.6.0,!=2.6.1"],242"torch": ["torch"],243"jax": ["jax>=0.3.14", "jaxlib>=0.3.14"],244"s3": ["s3fs"],245"streaming": [], # for backward compatibility246"dev": TESTS_REQUIRE + QUALITY_REQUIRE + DOCS_REQUIRE,247"tests": TESTS_REQUIRE,248"metrics-tests": METRICS_TESTS_REQUIRE,249"quality": QUALITY_REQUIRE,250"benchmarks": BENCHMARKS_REQUIRE,251"docs": DOCS_REQUIRE,252}
253
254setup(255name="datasets",256version="2.18.1.dev0", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots)257description="HuggingFace community-driven open-source library of datasets",258long_description=open("README.md", encoding="utf-8").read(),259long_description_content_type="text/markdown",260author="HuggingFace Inc.",261author_email="thomas@huggingface.co",262url="https://github.com/huggingface/datasets",263download_url="https://github.com/huggingface/datasets/tags",264license="Apache 2.0",265package_dir={"": "src"},266packages=find_packages("src"),267package_data={268"datasets": ["py.typed"],269"datasets.utils.resources": ["*.json", "*.yaml", "*.tsv"],270},271entry_points={"console_scripts": ["datasets-cli=datasets.commands.datasets_cli:main"]},272python_requires=">=3.8.0",273install_requires=REQUIRED_PKGS,274extras_require=EXTRAS_REQUIRE,275classifiers=[276"Development Status :: 5 - Production/Stable",277"Intended Audience :: Developers",278"Intended Audience :: Education",279"Intended Audience :: Science/Research",280"License :: OSI Approved :: Apache Software License",281"Operating System :: OS Independent",282"Programming Language :: Python :: 3",283"Programming Language :: Python :: 3.8",284"Programming Language :: Python :: 3.9",285"Programming Language :: Python :: 3.10",286"Topic :: Scientific/Engineering :: Artificial Intelligence",287],288keywords="datasets machine learning datasets metrics",289zip_safe=False, # Required for mypy to find the py.typed file290)
291