Delete setup.py
Browse files
setup.py
DELETED
@@ -1,301 +0,0 @@
|
|
1 |
-
# Copyright 2024 The HuggingFace Team. All rights reserved.
|
2 |
-
#
|
3 |
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
-
# you may not use this file except in compliance with the License.
|
5 |
-
# You may obtain a copy of the License at
|
6 |
-
#
|
7 |
-
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
-
#
|
9 |
-
# Unless required by applicable law or agreed to in writing, software
|
10 |
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
-
# See the License for the specific language governing permissions and
|
13 |
-
# limitations under the License.
|
14 |
-
|
15 |
-
"""
|
16 |
-
Simple check list from AllenNLP repo: https://github.com/allenai/allennlp/blob/main/setup.py
|
17 |
-
|
18 |
-
To create the package for PyPI.
|
19 |
-
|
20 |
-
1. Run `make pre-release` (or `make pre-patch` for a patch release) then run `make fix-copies` to fix the index of the
|
21 |
-
documentation.
|
22 |
-
|
23 |
-
If releasing on a special branch, copy the updated README.md on the main branch for the commit you will make
|
24 |
-
for the post-release and run `make fix-copies` on the main branch as well.
|
25 |
-
|
26 |
-
2. Unpin specific versions from setup.py that use a git install.
|
27 |
-
|
28 |
-
3. Checkout the release branch (v<RELEASE>-release, for example v4.19-release), and commit these changes with the
|
29 |
-
message: "Release: <RELEASE>" and push.
|
30 |
-
|
31 |
-
4. Manually trigger the "Nightly and release tests on main/release branch" workflow from the release branch. Wait for
|
32 |
-
the tests to complete. We can safely ignore the known test failures.
|
33 |
-
|
34 |
-
5. Wait for the tests on main to be completed and be green (otherwise revert and fix bugs).
|
35 |
-
|
36 |
-
6. Add a tag in git to mark the release: "git tag v<RELEASE> -m 'Adds tag v<RELEASE> for PyPI'"
|
37 |
-
Push the tag to git: git push --tags origin v<RELEASE>-release
|
38 |
-
|
39 |
-
7. Build both the sources and the wheel. Do not change anything in setup.py between
|
40 |
-
creating the wheel and the source distribution (obviously).
|
41 |
-
|
42 |
-
For the wheel, run: "python setup.py bdist_wheel" in the top level directory
|
43 |
-
(This will build a wheel for the Python version you use to build it).
|
44 |
-
|
45 |
-
For the sources, run: "python setup.py sdist"
|
46 |
-
You should now have a /dist directory with both .whl and .tar.gz source versions.
|
47 |
-
|
48 |
-
Long story cut short, you need to run both before you can upload the distribution to the
|
49 |
-
test PyPI and the actual PyPI servers:
|
50 |
-
|
51 |
-
python setup.py bdist_wheel && python setup.py sdist
|
52 |
-
|
53 |
-
8. Check that everything looks correct by uploading the package to the PyPI test server:
|
54 |
-
|
55 |
-
twine upload dist/* -r pypitest
|
56 |
-
(pypi suggests using twine as other methods upload files via plaintext.)
|
57 |
-
You may have to specify the repository url, use the following command then:
|
58 |
-
twine upload dist/* -r pypitest --repository-url=https://test.pypi.org/legacy/
|
59 |
-
|
60 |
-
Check that you can install it in a virtualenv by running:
|
61 |
-
pip install -i https://testpypi.python.org/pypi diffusers
|
62 |
-
|
63 |
-
If you are testing from a Colab Notebook, for instance, then do:
|
64 |
-
pip install diffusers && pip uninstall diffusers
|
65 |
-
pip install -i https://testpypi.python.org/pypi diffusers
|
66 |
-
|
67 |
-
Check you can run the following commands:
|
68 |
-
python -c "from diffusers import __version__; print(__version__)"
|
69 |
-
python -c "from diffusers import DiffusionPipeline; pipe = DiffusionPipeline.from_pretrained('fusing/unet-ldm-dummy-update'); pipe()"
|
70 |
-
python -c "from diffusers import DiffusionPipeline; pipe = DiffusionPipeline.from_pretrained('hf-internal-testing/tiny-stable-diffusion-pipe', safety_checker=None); pipe('ah suh du')"
|
71 |
-
python -c "from diffusers import *"
|
72 |
-
|
73 |
-
9. Upload the final version to the actual PyPI:
|
74 |
-
twine upload dist/* -r pypi
|
75 |
-
|
76 |
-
10. Prepare the release notes and publish them on GitHub once everything is looking hunky-dory. You can use the following
|
77 |
-
Space to fetch all the commits applicable for the release: https://huggingface.co/spaces/sayakpaul/auto-release-notes-diffusers.
|
78 |
-
It automatically fetches the correct tag and branch but also provides the option to configure them.
|
79 |
-
`tag` should be the previous release tag (v0.26.1, for example), and `branch` should be
|
80 |
-
the latest release branch (v0.27.0-release, for example). It denotes all commits that have happened on branch
|
81 |
-
v0.27.0-release after the tag v0.26.1 was created.
|
82 |
-
|
83 |
-
11. Run `make post-release` (or, for a patch release, `make post-patch`). If you were on a branch for the release,
|
84 |
-
you need to go back to main before executing this.
|
85 |
-
"""
|
86 |
-
|
87 |
-
import os
|
88 |
-
import re
|
89 |
-
import sys
|
90 |
-
|
91 |
-
from setuptools import Command, find_packages, setup
|
92 |
-
|
93 |
-
|
94 |
-
# IMPORTANT:
|
95 |
-
# 1. all dependencies should be listed here with their version requirements if any
|
96 |
-
# 2. once modified, run: `make deps_table_update` to update src/diffusers/dependency_versions_table.py
|
97 |
-
_deps = [
|
98 |
-
"Pillow", # keep the PIL.Image.Resampling deprecation away
|
99 |
-
"accelerate>=0.31.0",
|
100 |
-
"compel==0.1.8",
|
101 |
-
"datasets",
|
102 |
-
"filelock",
|
103 |
-
"flax>=0.4.1",
|
104 |
-
"hf-doc-builder>=0.3.0",
|
105 |
-
"huggingface-hub>=0.27.0",
|
106 |
-
"requests-mock==1.10.0",
|
107 |
-
"importlib_metadata",
|
108 |
-
"invisible-watermark>=0.2.0",
|
109 |
-
"isort>=5.5.4",
|
110 |
-
"jax>=0.4.1",
|
111 |
-
"jaxlib>=0.4.1",
|
112 |
-
"Jinja2",
|
113 |
-
"k-diffusion>=0.0.12",
|
114 |
-
"torchsde",
|
115 |
-
"note_seq",
|
116 |
-
"librosa",
|
117 |
-
"numpy",
|
118 |
-
"parameterized",
|
119 |
-
"peft>=0.15.0",
|
120 |
-
"protobuf>=3.20.3,<4",
|
121 |
-
"pytest",
|
122 |
-
"pytest-timeout",
|
123 |
-
"pytest-xdist",
|
124 |
-
"python>=3.8.0",
|
125 |
-
"ruff==0.9.10",
|
126 |
-
"safetensors>=0.3.1",
|
127 |
-
"sentencepiece>=0.1.91,!=0.1.92",
|
128 |
-
"GitPython<3.1.19",
|
129 |
-
"scipy",
|
130 |
-
"onnx",
|
131 |
-
"optimum_quanto>=0.2.6",
|
132 |
-
"gguf>=0.10.0",
|
133 |
-
"torchao>=0.7.0",
|
134 |
-
"bitsandbytes>=0.43.3",
|
135 |
-
"regex!=2019.12.17",
|
136 |
-
"requests",
|
137 |
-
"tensorboard",
|
138 |
-
"tiktoken>=0.7.0",
|
139 |
-
"torch>=1.4",
|
140 |
-
"torchvision",
|
141 |
-
"transformers>=4.41.2",
|
142 |
-
"urllib3<=2.0.0",
|
143 |
-
"black",
|
144 |
-
"phonemizer",
|
145 |
-
"opencv-python",
|
146 |
-
]
|
147 |
-
|
148 |
-
# this is a lookup table with items like:
|
149 |
-
#
|
150 |
-
# tokenizers: "huggingface-hub==0.8.0"
|
151 |
-
# packaging: "packaging"
|
152 |
-
#
|
153 |
-
# some of the values are versioned whereas others aren't.
|
154 |
-
deps = {b: a for a, b in (re.findall(r"^(([^!=<>~]+)(?:[!=<>~].*)?$)", x)[0] for x in _deps)}
|
155 |
-
|
156 |
-
# since we save this data in src/diffusers/dependency_versions_table.py it can be easily accessed from
|
157 |
-
# anywhere. If you need to quickly access the data from this table in a shell, you can do so easily with:
|
158 |
-
#
|
159 |
-
# python -c 'import sys; from diffusers.dependency_versions_table import deps; \
|
160 |
-
# print(" ".join([deps[x] for x in sys.argv[1:]]))' tokenizers datasets
|
161 |
-
#
|
162 |
-
# Just pass the desired package names to that script as it's shown with 2 packages above.
|
163 |
-
#
|
164 |
-
# If diffusers is not yet installed and the work is done from the cloned repo remember to add `PYTHONPATH=src` to the script above
|
165 |
-
#
|
166 |
-
# You can then feed this for example to `pip`:
|
167 |
-
#
|
168 |
-
# pip install -U $(python -c 'import sys; from diffusers.dependency_versions_table import deps; \
|
169 |
-
# print(" ".join([deps[x] for x in sys.argv[1:]]))' tokenizers datasets)
|
170 |
-
#
|
171 |
-
|
172 |
-
|
173 |
-
def deps_list(*pkgs):
|
174 |
-
return [deps[pkg] for pkg in pkgs]
|
175 |
-
|
176 |
-
|
177 |
-
class DepsTableUpdateCommand(Command):
|
178 |
-
"""
|
179 |
-
A custom command that updates the dependency table.
|
180 |
-
usage: python setup.py deps_table_update
|
181 |
-
"""
|
182 |
-
|
183 |
-
description = "build runtime dependency table"
|
184 |
-
user_options = [
|
185 |
-
# format: (long option, short option, description).
|
186 |
-
(
|
187 |
-
"dep-table-update",
|
188 |
-
None,
|
189 |
-
"updates src/diffusers/dependency_versions_table.py",
|
190 |
-
),
|
191 |
-
]
|
192 |
-
|
193 |
-
def initialize_options(self):
|
194 |
-
pass
|
195 |
-
|
196 |
-
def finalize_options(self):
|
197 |
-
pass
|
198 |
-
|
199 |
-
def run(self):
|
200 |
-
entries = "\n".join([f' "{k}": "{v}",' for k, v in deps.items()])
|
201 |
-
content = [
|
202 |
-
"# THIS FILE HAS BEEN AUTOGENERATED. To update:",
|
203 |
-
"# 1. modify the `_deps` dict in setup.py",
|
204 |
-
"# 2. run `make deps_table_update`",
|
205 |
-
"deps = {",
|
206 |
-
entries,
|
207 |
-
"}",
|
208 |
-
"",
|
209 |
-
]
|
210 |
-
target = "src/diffusers/dependency_versions_table.py"
|
211 |
-
print(f"updating {target}")
|
212 |
-
with open(target, "w", encoding="utf-8", newline="\n") as f:
|
213 |
-
f.write("\n".join(content))
|
214 |
-
|
215 |
-
|
216 |
-
extras = {}
|
217 |
-
extras["quality"] = deps_list("urllib3", "isort", "ruff", "hf-doc-builder")
|
218 |
-
extras["docs"] = deps_list("hf-doc-builder")
|
219 |
-
extras["training"] = deps_list("accelerate", "datasets", "protobuf", "tensorboard", "Jinja2", "peft")
|
220 |
-
extras["test"] = deps_list(
|
221 |
-
"compel",
|
222 |
-
"GitPython",
|
223 |
-
"datasets",
|
224 |
-
"Jinja2",
|
225 |
-
"invisible-watermark",
|
226 |
-
"k-diffusion",
|
227 |
-
"librosa",
|
228 |
-
"parameterized",
|
229 |
-
"pytest",
|
230 |
-
"pytest-timeout",
|
231 |
-
"pytest-xdist",
|
232 |
-
"requests-mock",
|
233 |
-
"safetensors",
|
234 |
-
"sentencepiece",
|
235 |
-
"scipy",
|
236 |
-
"tiktoken",
|
237 |
-
"torchvision",
|
238 |
-
"transformers",
|
239 |
-
"phonemizer",
|
240 |
-
)
|
241 |
-
extras["torch"] = deps_list("torch", "accelerate")
|
242 |
-
|
243 |
-
extras["bitsandbytes"] = deps_list("bitsandbytes", "accelerate")
|
244 |
-
extras["gguf"] = deps_list("gguf", "accelerate")
|
245 |
-
extras["optimum_quanto"] = deps_list("optimum_quanto", "accelerate")
|
246 |
-
extras["torchao"] = deps_list("torchao", "accelerate")
|
247 |
-
|
248 |
-
if os.name == "nt": # windows
|
249 |
-
extras["flax"] = [] # jax is not supported on windows
|
250 |
-
else:
|
251 |
-
extras["flax"] = deps_list("jax", "jaxlib", "flax")
|
252 |
-
|
253 |
-
extras["dev"] = (
|
254 |
-
extras["quality"] + extras["test"] + extras["training"] + extras["docs"] + extras["torch"] + extras["flax"]
|
255 |
-
)
|
256 |
-
|
257 |
-
install_requires = [
|
258 |
-
deps["importlib_metadata"],
|
259 |
-
deps["filelock"],
|
260 |
-
deps["huggingface-hub"],
|
261 |
-
deps["numpy"],
|
262 |
-
deps["regex"],
|
263 |
-
deps["requests"],
|
264 |
-
deps["safetensors"],
|
265 |
-
deps["Pillow"],
|
266 |
-
]
|
267 |
-
|
268 |
-
version_range_max = max(sys.version_info[1], 10) + 1
|
269 |
-
|
270 |
-
setup(
|
271 |
-
name="diffusers",
|
272 |
-
version="0.34.0.dev0", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots)
|
273 |
-
description="State-of-the-art diffusion in PyTorch and JAX.",
|
274 |
-
long_description=open("README.md", "r", encoding="utf-8").read(),
|
275 |
-
long_description_content_type="text/markdown",
|
276 |
-
keywords="deep learning diffusion jax pytorch stable diffusion audioldm",
|
277 |
-
license="Apache 2.0 License",
|
278 |
-
author="The Hugging Face team (past and future) with the help of all our contributors (https://github.com/huggingface/diffusers/graphs/contributors)",
|
279 |
-
author_email="[email protected]",
|
280 |
-
url="https://github.com/huggingface/diffusers",
|
281 |
-
package_dir={"": "src"},
|
282 |
-
packages=find_packages("src"),
|
283 |
-
package_data={"diffusers": ["py.typed"]},
|
284 |
-
include_package_data=True,
|
285 |
-
python_requires=">=3.8.0",
|
286 |
-
install_requires=list(install_requires),
|
287 |
-
extras_require=extras,
|
288 |
-
entry_points={"console_scripts": ["diffusers-cli=diffusers.commands.diffusers_cli:main"]},
|
289 |
-
classifiers=[
|
290 |
-
"Development Status :: 5 - Production/Stable",
|
291 |
-
"Intended Audience :: Developers",
|
292 |
-
"Intended Audience :: Education",
|
293 |
-
"Intended Audience :: Science/Research",
|
294 |
-
"License :: OSI Approved :: Apache Software License",
|
295 |
-
"Operating System :: OS Independent",
|
296 |
-
"Topic :: Scientific/Engineering :: Artificial Intelligence",
|
297 |
-
"Programming Language :: Python :: 3",
|
298 |
-
]
|
299 |
-
+ [f"Programming Language :: Python :: 3.{i}" for i in range(8, version_range_max)],
|
300 |
-
cmdclass={"deps_table_update": DepsTableUpdateCommand},
|
301 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|