John6666 commited on
Commit
0e95c75
·
verified ·
1 Parent(s): b45ac7a

Upload 9 files

Browse files
Files changed (5) hide show
  1. README.md +1 -1
  2. convert_url_to_diffusers_multi_gr.py +4 -11
  3. requirements.txt +4 -3
  4. sdutils.py +23 -10
  5. utils.py +53 -38
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: 🎨➡️🧨
4
  colorFrom: indigo
5
  colorTo: purple
6
  sdk: gradio
7
- sdk_version: 5.1.0
8
  app_file: app.py
9
  pinned: false
10
  license: mit
 
4
  colorFrom: indigo
5
  colorTo: purple
6
  sdk: gradio
7
+ sdk_version: 5.6.0
8
  app_file: app.py
9
  pinned: false
10
  license: mit
convert_url_to_diffusers_multi_gr.py CHANGED
@@ -1,13 +1,5 @@
1
  import os
2
- if os.environ.get("SPACES_ZERO_GPU") is not None:
3
- import spaces
4
- else:
5
- class spaces:
6
- @staticmethod
7
- def GPU(func):
8
- def wrapper(*args, **kwargs):
9
- return func(*args, **kwargs)
10
- return wrapper
11
  import argparse
12
  from pathlib import Path
13
  import os
@@ -212,7 +204,7 @@ def convert_sd35_fp8_cpu(new_file: str, new_dir: str, dtype: str, base_repo: str
212
 
213
  #@spaces.GPU(duration=60)
214
  def load_and_save_pipeline(pipe, model_type: str, url: str, new_file: str, new_dir: str, dtype: str,
215
- scheduler: str, base_repo: str, civitai_key: str, lora_dict: dict,
216
  my_vae, my_clip_tokenizer, my_clip_encoder, my_t5_tokenizer, my_t5_encoder,
217
  kwargs: dict, dkwargs: dict, progress=gr.Progress(track_tqdm=True)):
218
  try:
@@ -289,6 +281,7 @@ def load_and_save_pipeline(pipe, model_type: str, url: str, new_file: str, new_d
289
  pipe = fuse_loras(pipe, lora_dict, temp_dir, civitai_key, dkwargs)
290
  pipe.save_pretrained(new_dir)
291
  except Exception as e:
 
292
  raise Exception("Failed to load pipeline.") from e
293
  finally:
294
  return pipe
@@ -369,7 +362,7 @@ def convert_url_to_diffusers(url: str, civitai_key: str="", is_upload_sf: bool=F
369
  if my_t5_tokenizer: kwargs["tokenizer_2"] = my_t5_tokenizer
370
  if my_t5_encoder: kwargs["text_encoder_2"] = my_t5_encoder
371
 
372
- pipe = load_and_save_pipeline(pipe, model_type, url, new_file, new_dir, dtype, scheduler, base_repo, civitai_key, lora_dict,
373
  my_vae, my_clip_tokenizer, my_clip_encoder, my_t5_tokenizer, my_t5_encoder, kwargs, dkwargs)
374
 
375
  if Path(new_dir).exists(): save_readme_md(new_dir, url)
 
1
  import os
2
+ import spaces
 
 
 
 
 
 
 
 
3
  import argparse
4
  from pathlib import Path
5
  import os
 
204
 
205
  #@spaces.GPU(duration=60)
206
  def load_and_save_pipeline(pipe, model_type: str, url: str, new_file: str, new_dir: str, dtype: str,
207
+ scheduler: str, ema: bool, base_repo: str, civitai_key: str, lora_dict: dict,
208
  my_vae, my_clip_tokenizer, my_clip_encoder, my_t5_tokenizer, my_t5_encoder,
209
  kwargs: dict, dkwargs: dict, progress=gr.Progress(track_tqdm=True)):
210
  try:
 
281
  pipe = fuse_loras(pipe, lora_dict, temp_dir, civitai_key, dkwargs)
282
  pipe.save_pretrained(new_dir)
283
  except Exception as e:
284
+ print(f"Failed to load pipeline. {e}")
285
  raise Exception("Failed to load pipeline.") from e
286
  finally:
287
  return pipe
 
362
  if my_t5_tokenizer: kwargs["tokenizer_2"] = my_t5_tokenizer
363
  if my_t5_encoder: kwargs["text_encoder_2"] = my_t5_encoder
364
 
365
+ pipe = load_and_save_pipeline(pipe, model_type, url, new_file, new_dir, dtype, scheduler, ema, base_repo, civitai_key, lora_dict,
366
  my_vae, my_clip_tokenizer, my_clip_encoder, my_t5_tokenizer, my_t5_encoder, kwargs, dkwargs)
367
 
368
  if Path(new_dir).exists(): save_readme_md(new_dir, url)
requirements.txt CHANGED
@@ -1,11 +1,12 @@
1
  huggingface_hub
2
  safetensors
3
- transformers==4.44.0
4
- diffusers==0.30.3
5
  peft
6
  sentencepiece
7
  torch
8
  pytorch_lightning
9
  gdown
10
  bitsandbytes
11
- accelerate
 
 
1
  huggingface_hub
2
  safetensors
3
+ transformers>=4.44.0
4
+ diffusers>=0.30.3
5
  peft
6
  sentencepiece
7
  torch
8
  pytorch_lightning
9
  gdown
10
  bitsandbytes
11
+ accelerate
12
+ numpy<2
sdutils.py CHANGED
@@ -51,16 +51,17 @@ from diffusers import (
51
 
52
 
53
  SCHEDULER_CONFIG_MAP = {
54
- "DPM++ 2M": (DPMSolverMultistepScheduler, {"use_karras_sigmas": False}),
55
- "DPM++ 2M Karras": (DPMSolverMultistepScheduler, {"use_karras_sigmas": True}),
56
  "DPM++ 2M SDE": (DPMSolverMultistepScheduler, {"use_karras_sigmas": False, "algorithm_type": "sde-dpmsolver++"}),
57
  "DPM++ 2M SDE Karras": (DPMSolverMultistepScheduler, {"use_karras_sigmas": True, "algorithm_type": "sde-dpmsolver++"}),
58
- "DPM++ 2S": (DPMSolverSinglestepScheduler, {"use_karras_sigmas": False}),
59
- "DPM++ 2S Karras": (DPMSolverSinglestepScheduler, {"use_karras_sigmas": True}),
60
- "DPM++ 1S": (DPMSolverMultistepScheduler, {"solver_order": 1}),
61
- "DPM++ 1S Karras": (DPMSolverMultistepScheduler, {"solver_order": 1, "use_karras_sigmas": True}),
62
- "DPM++ 3M": (DPMSolverMultistepScheduler, {"solver_order": 3}),
63
- "DPM++ 3M Karras": (DPMSolverMultistepScheduler, {"solver_order": 3, "use_karras_sigmas": True}),
 
64
  "DPM++ SDE": (DPMSolverSDEScheduler, {"use_karras_sigmas": False}),
65
  "DPM++ SDE Karras": (DPMSolverSDEScheduler, {"use_karras_sigmas": True}),
66
  "DPM2": (KDPM2DiscreteScheduler, {}),
@@ -87,8 +88,8 @@ SCHEDULER_CONFIG_MAP = {
87
  "DPM++ 2M EDM Karras": (EDMDPMSolverMultistepScheduler, {"use_karras_sigmas": True, "solver_order": 2, "solver_type": "midpoint", "final_sigmas_type": "zero", "algorithm_type": "dpmsolver++"}),
88
  "DDPM": (DDPMScheduler, {}),
89
 
90
- "DPM++ 2M Lu": (DPMSolverMultistepScheduler, {"use_lu_lambdas": True}),
91
- "DPM++ 2M Ef": (DPMSolverMultistepScheduler, {"euler_at_final": True}),
92
  "DPM++ 2M SDE Lu": (DPMSolverMultistepScheduler, {"use_lu_lambdas": True, "algorithm_type": "sde-dpmsolver++"}),
93
  "DPM++ 2M SDE Ef": (DPMSolverMultistepScheduler, {"algorithm_type": "sde-dpmsolver++", "euler_at_final": True}),
94
 
@@ -98,6 +99,18 @@ SCHEDULER_CONFIG_MAP = {
98
  "TCD trailing": (TCDScheduler, {"timestep_spacing": "trailing"}),
99
  "LCM Auto-Loader": (LCMScheduler, {}),
100
  "TCD Auto-Loader": (TCDScheduler, {}),
 
 
 
 
 
 
 
 
 
 
 
 
101
  }
102
 
103
 
 
51
 
52
 
53
  SCHEDULER_CONFIG_MAP = {
54
+ "DPM++ 2M": (DPMSolverMultistepScheduler, {"algorithm_type": "dpmsolver++", "use_karras_sigmas": False}),
55
+ "DPM++ 2M Karras": (DPMSolverMultistepScheduler, {"algorithm_type": "dpmsolver++", "use_karras_sigmas": True}),
56
  "DPM++ 2M SDE": (DPMSolverMultistepScheduler, {"use_karras_sigmas": False, "algorithm_type": "sde-dpmsolver++"}),
57
  "DPM++ 2M SDE Karras": (DPMSolverMultistepScheduler, {"use_karras_sigmas": True, "algorithm_type": "sde-dpmsolver++"}),
58
+ "DPM++ 2S": (DPMSolverSinglestepScheduler, {"algorithm_type": "dpmsolver++", "use_karras_sigmas": False}),
59
+ "DPM++ 2S Karras": (DPMSolverSinglestepScheduler, {"algorithm_type": "dpmsolver++", "use_karras_sigmas": True}),
60
+ "DPM++ 1S": (DPMSolverMultistepScheduler, {"algorithm_type": "dpmsolver++", "solver_order": 1}),
61
+ "DPM++ 1S Karras": (DPMSolverMultistepScheduler, {"algorithm_type": "dpmsolver++", "solver_order": 1, "use_karras_sigmas": True}),
62
+ "DPM++ 3M": (DPMSolverMultistepScheduler, {"algorithm_type": "dpmsolver++", "solver_order": 3}),
63
+ "DPM++ 3M Karras": (DPMSolverMultistepScheduler, {"algorithm_type": "dpmsolver++", "solver_order": 3, "use_karras_sigmas": True}),
64
+ "DPM 3M": (DPMSolverMultistepScheduler, {"algorithm_type": "dpmsolver", "final_sigmas_type": "sigma_min", "solver_order": 3}),
65
  "DPM++ SDE": (DPMSolverSDEScheduler, {"use_karras_sigmas": False}),
66
  "DPM++ SDE Karras": (DPMSolverSDEScheduler, {"use_karras_sigmas": True}),
67
  "DPM2": (KDPM2DiscreteScheduler, {}),
 
88
  "DPM++ 2M EDM Karras": (EDMDPMSolverMultistepScheduler, {"use_karras_sigmas": True, "solver_order": 2, "solver_type": "midpoint", "final_sigmas_type": "zero", "algorithm_type": "dpmsolver++"}),
89
  "DDPM": (DDPMScheduler, {}),
90
 
91
+ "DPM++ 2M Lu": (DPMSolverMultistepScheduler, {"algorithm_type": "dpmsolver++", "use_lu_lambdas": True}),
92
+ "DPM++ 2M Ef": (DPMSolverMultistepScheduler, {"algorithm_type": "dpmsolver++", "euler_at_final": True}),
93
  "DPM++ 2M SDE Lu": (DPMSolverMultistepScheduler, {"use_lu_lambdas": True, "algorithm_type": "sde-dpmsolver++"}),
94
  "DPM++ 2M SDE Ef": (DPMSolverMultistepScheduler, {"algorithm_type": "sde-dpmsolver++", "euler_at_final": True}),
95
 
 
99
  "TCD trailing": (TCDScheduler, {"timestep_spacing": "trailing"}),
100
  "LCM Auto-Loader": (LCMScheduler, {}),
101
  "TCD Auto-Loader": (TCDScheduler, {}),
102
+
103
+ "EDM": (EDMDPMSolverMultistepScheduler, {}),
104
+ "EDM Karras": (EDMDPMSolverMultistepScheduler, {"use_karras_sigmas": True}),
105
+
106
+ "Euler (V-Prediction)": (EulerDiscreteScheduler, {"prediction_type": "v_prediction", "rescale_betas_zero_snr": True}),
107
+ "Euler a (V-Prediction)": (EulerAncestralDiscreteScheduler, {"prediction_type": "v_prediction", "rescale_betas_zero_snr": True}),
108
+ "Euler EDM (V-Prediction)": (EDMEulerScheduler, {"prediction_type": "v_prediction"}),
109
+ "Euler EDM Karras (V-Prediction)": (EDMEulerScheduler, {"use_karras_sigmas": True, "prediction_type": "v_prediction"}),
110
+ "DPM++ 2M EDM (V-Prediction)": (EDMDPMSolverMultistepScheduler, {"solver_order": 2, "solver_type": "midpoint", "final_sigmas_type": "zero", "algorithm_type": "dpmsolver++", "prediction_type": "v_prediction"}),
111
+ "DPM++ 2M EDM Karras (V-Prediction)": (EDMDPMSolverMultistepScheduler, {"use_karras_sigmas": True, "solver_order": 2, "solver_type": "midpoint", "final_sigmas_type": "zero", "algorithm_type": "dpmsolver++", "prediction_type": "v_prediction"}),
112
+ "EDM (V-Prediction)": (EDMDPMSolverMultistepScheduler, {"prediction_type": "v_prediction"}),
113
+ "EDM Karras (V-Prediction)": (EDMDPMSolverMultistepScheduler, {"use_karras_sigmas": True, "prediction_type": "v_prediction"}),
114
  }
115
 
116
 
utils.py CHANGED
@@ -8,6 +8,7 @@ import re
8
  import urllib.parse
9
  import subprocess
10
  import time
 
11
 
12
 
13
  def get_token():
@@ -25,6 +26,17 @@ def set_token(token):
25
  print(f"Error: Failed to save token.")
26
 
27
 
 
 
 
 
 
 
 
 
 
 
 
28
  def get_user_agent():
29
  return 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:127.0) Gecko/20100101 Firefox/127.0'
30
 
@@ -113,26 +125,29 @@ def download_hf_file(directory, url, progress=gr.Progress(track_tqdm=True)):
113
 
114
 
115
  def download_thing(directory, url, civitai_api_key="", progress=gr.Progress(track_tqdm=True)): # requires aria2, gdown
116
- url = url.strip()
117
- if "drive.google.com" in url:
118
- original_dir = os.getcwd()
119
- os.chdir(directory)
120
- os.system(f"gdown --fuzzy {url}")
121
- os.chdir(original_dir)
122
- elif "huggingface.co" in url:
123
- url = url.replace("?download=true", "")
124
- if "/blob/" in url: url = url.replace("/blob/", "/resolve/")
125
- download_hf_file(directory, url)
126
- elif "civitai.com" in url:
127
- if "?" in url:
128
- url = url.split("?")[0]
129
- if civitai_api_key:
130
- url = url + f"?token={civitai_api_key}"
131
- os.system(f"aria2c --console-log-level=error --summary-interval=10 -c -x 16 -k 1M -s 16 -d {directory} {url}")
 
 
 
132
  else:
133
- print("You need an API key to download Civitai models.")
134
- else:
135
- os.system(f"aria2c --console-log-level=error --summary-interval=10 -c -x 16 -k 1M -s 16 -d {directory} {url}")
136
 
137
 
138
  def get_local_file_list(dir_path):
@@ -145,30 +160,30 @@ def get_local_file_list(dir_path):
145
 
146
 
147
  def get_download_file(temp_dir, url, civitai_key, progress=gr.Progress(track_tqdm=True)):
148
- if not "http" in url and is_repo_name(url) and not Path(url).exists():
149
- print(f"Use HF Repo: {url}")
150
- new_file = url
151
- elif not "http" in url and Path(url).exists():
152
- print(f"Use local file: {url}")
153
- new_file = url
154
- elif Path(f"{temp_dir}/{url.split('/')[-1]}").exists():
155
- print(f"File to download alreday exists: {url}")
156
- new_file = f"{temp_dir}/{url.split('/')[-1]}"
157
- else:
158
- print(f"Start downloading: {url}")
159
- before = get_local_file_list(temp_dir)
160
- try:
161
  download_thing(temp_dir, url.strip(), civitai_key)
162
- except Exception:
 
 
163
  print(f"Download failed: {url}")
164
  return ""
165
- after = get_local_file_list(temp_dir)
166
- new_file = list_sub(after, before)[0] if list_sub(after, before) else ""
167
- if not new_file:
168
  print(f"Download failed: {url}")
169
  return ""
170
- print(f"Download completed: {url}")
171
- return new_file
172
 
173
 
174
  def download_repo(repo_id: str, dir_path: str, progress=gr.Progress(track_tqdm=True)): # for diffusers repo
 
8
  import urllib.parse
9
  import subprocess
10
  import time
11
+ from typing import Any
12
 
13
 
14
  def get_token():
 
26
  print(f"Error: Failed to save token.")
27
 
28
 
29
+ def get_state(state: dict, key: str):
30
+ if key in state.keys(): return state[key]
31
+ else:
32
+ print(f"State '{key}' not found.")
33
+ return None
34
+
35
+
36
+ def set_state(state: dict, key: str, value: Any):
37
+ state[key] = value
38
+
39
+
40
  def get_user_agent():
41
  return 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:127.0) Gecko/20100101 Firefox/127.0'
42
 
 
125
 
126
 
127
  def download_thing(directory, url, civitai_api_key="", progress=gr.Progress(track_tqdm=True)): # requires aria2, gdown
128
+ try:
129
+ url = url.strip()
130
+ if "drive.google.com" in url:
131
+ original_dir = os.getcwd()
132
+ os.chdir(directory)
133
+ os.system(f"gdown --fuzzy {url}")
134
+ os.chdir(original_dir)
135
+ elif "huggingface.co" in url:
136
+ url = url.replace("?download=true", "")
137
+ if "/blob/" in url: url = url.replace("/blob/", "/resolve/")
138
+ download_hf_file(directory, url)
139
+ elif "civitai.com" in url:
140
+ if "?" in url:
141
+ url = url.split("?")[0]
142
+ if civitai_api_key:
143
+ url = url + f"?token={civitai_api_key}"
144
+ os.system(f"aria2c --console-log-level=error --summary-interval=10 -c -x 16 -k 1M -s 16 -d {directory} {url}")
145
+ else:
146
+ print("You need an API key to download Civitai models.")
147
  else:
148
+ os.system(f"aria2c --console-log-level=error --summary-interval=10 -c -x 16 -k 1M -s 16 -d {directory} {url}")
149
+ except Exception as e:
150
+ print(f"Failed to download: {e}")
151
 
152
 
153
  def get_local_file_list(dir_path):
 
160
 
161
 
162
  def get_download_file(temp_dir, url, civitai_key, progress=gr.Progress(track_tqdm=True)):
163
+ try:
164
+ if not "http" in url and is_repo_name(url) and not Path(url).exists():
165
+ print(f"Use HF Repo: {url}")
166
+ new_file = url
167
+ elif not "http" in url and Path(url).exists():
168
+ print(f"Use local file: {url}")
169
+ new_file = url
170
+ elif Path(f"{temp_dir}/{url.split('/')[-1]}").exists():
171
+ print(f"File to download alreday exists: {url}")
172
+ new_file = f"{temp_dir}/{url.split('/')[-1]}"
173
+ else:
174
+ print(f"Start downloading: {url}")
175
+ before = get_local_file_list(temp_dir)
176
  download_thing(temp_dir, url.strip(), civitai_key)
177
+ after = get_local_file_list(temp_dir)
178
+ new_file = list_sub(after, before)[0] if list_sub(after, before) else ""
179
+ if not new_file:
180
  print(f"Download failed: {url}")
181
  return ""
182
+ print(f"Download completed: {url}")
183
+ return new_file
184
+ except Exception:
185
  print(f"Download failed: {url}")
186
  return ""
 
 
187
 
188
 
189
  def download_repo(repo_id: str, dir_path: str, progress=gr.Progress(track_tqdm=True)): # for diffusers repo