Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -115,7 +115,7 @@ def load_and_prepare_model(model_id):
|
|
| 115 |
#pipeX = StableDiffusionXLPipeline.from_pretrained("ford442/Juggernaut-XI-v11-fp32",torch_dtype=torch.float32)
|
| 116 |
pipe = StableDiffusionXLPipeline.from_pretrained(
|
| 117 |
model_id,
|
| 118 |
-
|
| 119 |
add_watermarker=False,
|
| 120 |
# use_safetensors=True,
|
| 121 |
# vae=AutoencoderKL.from_pretrained("BeastHF/MyBack_SDXL_Juggernaut_XL_VAE/MyBack_SDXL_Juggernaut_XL_VAE_V10(version_X).safetensors",repo_type='model',safety_checker=None),
|
|
@@ -137,7 +137,7 @@ def load_and_prepare_model(model_id):
|
|
| 137 |
#pipe.unet.to(torch.bfloat16)
|
| 138 |
pipe.to(device)
|
| 139 |
#pipe.vae.to(torch.bfloat16)
|
| 140 |
-
pipe.to(torch.bfloat16)
|
| 141 |
#pipe.to(device, torch.bfloat16)
|
| 142 |
del pipeX
|
| 143 |
#sched = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", algorithm_type="dpmsolver++")
|
|
@@ -222,7 +222,7 @@ def generate_30(
|
|
| 222 |
f.write(f"Steps: {num_inference_steps} \n")
|
| 223 |
f.write(f"Guidance Scale: {guidance_scale} \n")
|
| 224 |
f.write(f"SPACE SETUP: \n")
|
| 225 |
-
f.write(f"Use Model Dtype:
|
| 226 |
f.write(f"Model Scheduler: Euler_a custom before cuda \n")
|
| 227 |
f.write(f"Model VAE: default \n")
|
| 228 |
f.write(f"Model UNET: default \n")
|
|
@@ -288,7 +288,7 @@ def generate_60(
|
|
| 288 |
f.write(f"Steps: {num_inference_steps} \n")
|
| 289 |
f.write(f"Guidance Scale: {guidance_scale} \n")
|
| 290 |
f.write(f"SPACE SETUP: \n")
|
| 291 |
-
f.write(f"Use Model Dtype:
|
| 292 |
f.write(f"Model Scheduler: Euler_a custom before cuda \n")
|
| 293 |
f.write(f"Model VAE: default \n")
|
| 294 |
f.write(f"Model UNET: default \n")
|
|
@@ -354,7 +354,7 @@ def generate_90(
|
|
| 354 |
f.write(f"Steps: {num_inference_steps} \n")
|
| 355 |
f.write(f"Guidance Scale: {guidance_scale} \n")
|
| 356 |
f.write(f"SPACE SETUP: \n")
|
| 357 |
-
f.write(f"Use Model Dtype:
|
| 358 |
f.write(f"Model Scheduler: Euler_a custom before cuda \n")
|
| 359 |
f.write(f"Model VAE: default \n")
|
| 360 |
f.write(f"Model UNET: default \n")
|
|
|
|
| 115 |
#pipeX = StableDiffusionXLPipeline.from_pretrained("ford442/Juggernaut-XI-v11-fp32",torch_dtype=torch.float32)
|
| 116 |
pipe = StableDiffusionXLPipeline.from_pretrained(
|
| 117 |
model_id,
|
| 118 |
+
torch_dtype=torch.bfloat16,
|
| 119 |
add_watermarker=False,
|
| 120 |
# use_safetensors=True,
|
| 121 |
# vae=AutoencoderKL.from_pretrained("BeastHF/MyBack_SDXL_Juggernaut_XL_VAE/MyBack_SDXL_Juggernaut_XL_VAE_V10(version_X).safetensors",repo_type='model',safety_checker=None),
|
|
|
|
| 137 |
#pipe.unet.to(torch.bfloat16)
|
| 138 |
pipe.to(device)
|
| 139 |
#pipe.vae.to(torch.bfloat16)
|
| 140 |
+
#pipe.to(torch.bfloat16)
|
| 141 |
#pipe.to(device, torch.bfloat16)
|
| 142 |
del pipeX
|
| 143 |
#sched = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", algorithm_type="dpmsolver++")
|
|
|
|
| 222 |
f.write(f"Steps: {num_inference_steps} \n")
|
| 223 |
f.write(f"Guidance Scale: {guidance_scale} \n")
|
| 224 |
f.write(f"SPACE SETUP: \n")
|
| 225 |
+
f.write(f"Use Model Dtype: yes \n")
|
| 226 |
f.write(f"Model Scheduler: Euler_a custom before cuda \n")
|
| 227 |
f.write(f"Model VAE: default \n")
|
| 228 |
f.write(f"Model UNET: default \n")
|
|
|
|
| 288 |
f.write(f"Steps: {num_inference_steps} \n")
|
| 289 |
f.write(f"Guidance Scale: {guidance_scale} \n")
|
| 290 |
f.write(f"SPACE SETUP: \n")
|
| 291 |
+
f.write(f"Use Model Dtype: yes \n")
|
| 292 |
f.write(f"Model Scheduler: Euler_a custom before cuda \n")
|
| 293 |
f.write(f"Model VAE: default \n")
|
| 294 |
f.write(f"Model UNET: default \n")
|
|
|
|
| 354 |
f.write(f"Steps: {num_inference_steps} \n")
|
| 355 |
f.write(f"Guidance Scale: {guidance_scale} \n")
|
| 356 |
f.write(f"SPACE SETUP: \n")
|
| 357 |
+
f.write(f"Use Model Dtype: yes \n")
|
| 358 |
f.write(f"Model Scheduler: Euler_a custom before cuda \n")
|
| 359 |
f.write(f"Model VAE: default \n")
|
| 360 |
f.write(f"Model UNET: default \n")
|