Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -116,14 +116,15 @@ def load_and_prepare_model(model_id):
|
|
| 116 |
add_watermarker=False,
|
| 117 |
use_safetensors=True,
|
| 118 |
# vae=AutoencoderKL.from_pretrained("BeastHF/MyBack_SDXL_Juggernaut_XL_VAE/MyBack_SDXL_Juggernaut_XL_VAE_V10(version_X).safetensors",repo_type='model',safety_checker=None),
|
| 119 |
-
vae=AutoencoderKL.from_pretrained("stabilityai/sdxl-vae",repo_type='model',safety_checker=None).to(torch.bfloat16),
|
|
|
|
| 120 |
#scheduler=EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset =1)
|
| 121 |
)
|
| 122 |
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset=1)
|
| 123 |
#pipe.to('cuda')
|
| 124 |
# pipe.scheduler=EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
| 125 |
|
| 126 |
-
|
| 127 |
pipe.to(device)
|
| 128 |
#pipe.to(dtype=torch.bfloat16)
|
| 129 |
|
|
|
|
| 116 |
add_watermarker=False,
|
| 117 |
use_safetensors=True,
|
| 118 |
# vae=AutoencoderKL.from_pretrained("BeastHF/MyBack_SDXL_Juggernaut_XL_VAE/MyBack_SDXL_Juggernaut_XL_VAE_V10(version_X).safetensors",repo_type='model',safety_checker=None),
|
| 119 |
+
#vae=AutoencoderKL.from_pretrained("stabilityai/sdxl-vae",repo_type='model',safety_checker=None).to(torch.bfloat16),
|
| 120 |
+
vae=AutoencoderKL.from_pretrained("ford442/sdxl-vae-bf16", torch_dtype=torch.bfloat16,repo_type='model',safety_checker=None),
|
| 121 |
#scheduler=EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset =1)
|
| 122 |
)
|
| 123 |
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset=1)
|
| 124 |
#pipe.to('cuda')
|
| 125 |
# pipe.scheduler=EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
| 126 |
|
| 127 |
+
# pipe.unet = pipeX.unet
|
| 128 |
pipe.to(device)
|
| 129 |
#pipe.to(dtype=torch.bfloat16)
|
| 130 |
|