Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -107,22 +107,19 @@ def apply_style(style_name: str, positive: str, negative: str = "") -> Tuple[str
|
|
107 |
return p.replace("{prompt}", positive), n + negative
|
108 |
|
109 |
def load_and_prepare_model(model_id):
|
110 |
-
model_dtypes = {
|
111 |
-
"ford442/RealVisXL_V5.0_BF16": torch.bfloat16,
|
112 |
-
}
|
113 |
dtype = model_dtypes.get(model_id, torch.bfloat16) # Default to float32 if not found
|
114 |
-
sched = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", algorithm_type="sde-dpmsolver++")
|
115 |
-
|
116 |
vae = AutoencoderKL.from_pretrained("ford442/sdxl-vae-bf16", torch_dtype=torch.bfloat16,safety_checker=None)
|
117 |
pipe = StableDiffusionXLPipeline.from_pretrained(
|
118 |
model_id,
|
119 |
torch_dtype=torch.bfloat16,
|
120 |
add_watermarker=False,
|
121 |
use_safetensors=True,
|
122 |
-
scheduler=sched,
|
123 |
vae=vae,
|
124 |
-
)
|
125 |
-
|
|
|
|
|
126 |
return pipe
|
127 |
|
128 |
# Preload and compile both models
|
|
|
107 |
return p.replace("{prompt}", positive), n + negative
|
108 |
|
109 |
def load_and_prepare_model(model_id):
|
110 |
+
model_dtypes = {"ford442/RealVisXL_V5.0_BF16": torch.bfloat16,}
|
|
|
|
|
111 |
dtype = model_dtypes.get(model_id, torch.bfloat16) # Default to float32 if not found
|
|
|
|
|
112 |
vae = AutoencoderKL.from_pretrained("ford442/sdxl-vae-bf16", torch_dtype=torch.bfloat16,safety_checker=None)
|
113 |
pipe = StableDiffusionXLPipeline.from_pretrained(
|
114 |
model_id,
|
115 |
torch_dtype=torch.bfloat16,
|
116 |
add_watermarker=False,
|
117 |
use_safetensors=True,
|
|
|
118 |
vae=vae,
|
119 |
+
)
|
120 |
+
sched = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", algorithm_type="sde-dpmsolver++")
|
121 |
+
pipe.scheduler=sched
|
122 |
+
pipe.to('cuda')
|
123 |
return pipe
|
124 |
|
125 |
# Preload and compile both models
|