diff --git a/.github/workflows/test_models.yml b/.github/workflows/test_models.yml index 4a022465..64ef23b5 100644 --- a/.github/workflows/test_models.yml +++ b/.github/workflows/test_models.yml @@ -55,9 +55,8 @@ jobs: if [ "${{ matrix.executorch-version }}" == "nightly" ]; then python install_dev.py else - pip install '.[tests]' + pip install '.[dev]' pip install executorch==${{ matrix.executorch-version }} - pip install git+https://github.com/pytorch-labs/tokenizers fi pip list - name: Run tests diff --git a/install_dev.py b/install_dev.py index c31e76a1..eb523b4a 100644 --- a/install_dev.py +++ b/install_dev.py @@ -7,7 +7,7 @@ def install_torch_nightly_deps(): """Install torch related dependencies from pinned nightly""" EXECUTORCH_NIGHTLY_VERSION = "dev20250916" TORCHAO_NIGHTLY_VERSION = "dev20250916" - # Torch nightly is aligned with pinned nightly in https://github.com/pytorch/executorch/blob/main/install_requirements.py#L74 + # Torch nightly is aligned with pinned nightly in https://github.com/pytorch/executorch/blob/main/torch_pin.py#L2 TORCH_NIGHTLY_VERSION = "dev20250916" subprocess.check_call( [ @@ -43,7 +43,7 @@ def install_dep_from_source(): "-m", "pip", "install", - "git+https://github.com/pytorch-labs/tokenizers@fc32028858020c4fcafe37aaaeaf5d1b480336a2#egg=pytorch-tokenizers", + "git+https://github.com/pytorch-labs/tokenizers@3aada3fe28c945d14d5ec62254eb56ccdf10eb11#egg=pytorch-tokenizers", ] ) diff --git a/pyproject.toml b/pyproject.toml index 8fa22c11..d83f191b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,14 +1,83 @@ +[project] +name = "optimum-executorch" +dynamic = ["version"] +description = "Optimum Executorch is an interface between the Hugging Face libraries and ExecuTorch" +readme = { file = "README.md", content-type = "text/markdown" } +license = { text = "Apache" } +authors = [ + { name = "HuggingFace Inc. Special Ops Team", email = "hardware@huggingface.co" }, +] +requires-python = ">=3.10.0" +keywords = ["transformers", "quantization", "inference", "executorch"] +classifiers = [ + "Development Status :: 2 - Pre-Alpha", + "License :: OSI Approved :: Apache Software License", + "Intended Audience :: Developers", + "Intended Audience :: Education", + "Intended Audience :: Science/Research", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Scientific/Engineering :: Artificial Intelligence", +] + +dependencies = [ + "optimum~=1.24", + "executorch>=1.0.0", + "transformers==4.56.1", + "pytorch-tokenizers>=1.0.1", + "accelerate>=0.26.0", +] + +[project.optional-dependencies] +dev = [ + "accelerate>=0.26.0", + "coremltools>=8.2.0", + "datasets==3.6.0", + "parameterized", + "pytest", + "safetensors", + "sentencepiece", + "numba!=0.58.0", + "librosa", + "soundfile", + "tiktoken", + "black~=23.1", + "ruff==0.4.4", +] + +[project.urls] +Homepage = "https://github.com/huggingface/optimum-executorch" + +# ---- setuptools config ---- + +[tool.setuptools] +# Equivalent of include_package_data=True +include-package-data = true + +[tool.setuptools.packages.find] +# Mirrors find_namespace_packages(include=["optimum*"]) +include = ["optimum*"] +namespaces = true + +[tool.setuptools.dynamic] +# Pull version from the Python attribute +version = { attr = "optimum.executorch.version.__version__" } + +# ---- your existing tool configs (kept, but one tweak suggested) ---- + [tool.black] line-length = 119 -target-version = ['py37'] +# Recommended to match your supported interpreters: +target-version = ["py310", "py311", "py312"] [tool.ruff] -# Never enforce `E501` (line length violations). ignore = ["C901", "E501", "E741", "W605"] select = ["C", "E", "F", "I", "W"] line-length = 119 -# Ignore import violations in all `__init__.py` files. [tool.ruff.per-file-ignores] "__init__.py" = ["E402", "F401", "F403", "F811"] @@ -18,6 +87,10 @@ known-first-party = ["optimum"] [tool.pytest.ini_options] markers = [ - "run_slow", - "portable", + "run_slow", + "portable", ] + +[build-system] +requires = ["setuptools >= 77.0.3", "wheel"] +build-backend = "setuptools.build_meta" diff --git a/setup.py b/setup.py deleted file mode 100644 index 0a8f85e5..00000000 --- a/setup.py +++ /dev/null @@ -1,75 +0,0 @@ -import re - -from setuptools import find_namespace_packages, setup - - -# Ensure we match the version set in optimum/executorch/version.py -filepath = "optimum/executorch/version.py" -try: - with open(filepath) as version_file: - (__version__,) = re.findall('__version__ = "(.*)"', version_file.read()) -except Exception as error: - assert False, "Error: Could not open '%s' due %s\n" % (filepath, error) - -INSTALL_REQUIRE = [ - "optimum~=1.24", - "executorch>=1.0.0", - "transformers==4.56.1", -] - -TESTS_REQUIRE = [ - "accelerate>=0.26.0", - "coremltools>=8.2.0", - "datasets==3.6.0", # Locked to 3.6.0 due to https://github.com/huggingface/datasets/issues/7707 - "parameterized", - "pytest", - "safetensors", - "sentencepiece", - "numba!=0.58.0", # Due to the bug https://github.com/numba/numba/issues/9209 - "librosa", - "soundfile", - "tiktoken", -] - - -QUALITY_REQUIRE = ["black~=23.1", "ruff==0.4.4"] - - -EXTRAS_REQUIRE = { - "tests": TESTS_REQUIRE, - "quality": QUALITY_REQUIRE, - "dev": TESTS_REQUIRE + QUALITY_REQUIRE, -} - - -setup( - name="optimum-executorch", - version=__version__, - description="Optimum Executorch is an interface between the Hugging Face libraries and ExecuTorch", - long_description=open("README.md", "r", encoding="utf-8").read(), - long_description_content_type="text/markdown", - classifiers=[ - "Development Status :: 2 - Pre-Alpha", - "License :: OSI Approved :: Apache Software License", - "Intended Audience :: Developers", - "Intended Audience :: Education", - "Intended Audience :: Science/Research", - "Operating System :: OS Independent", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Topic :: Scientific/Engineering :: Artificial Intelligence", - ], - keywords="transformers, quantization, inference, executorch", - url="https://github.com/huggingface/optimum", - author="HuggingFace Inc. Special Ops Team", - author_email="hardware@huggingface.co", - license="Apache", - packages=find_namespace_packages(include=["optimum*"]), - install_requires=INSTALL_REQUIRE, - extras_require=EXTRAS_REQUIRE, - python_requires=">=3.10.0", - include_package_data=True, - zip_safe=False, -)