Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -223,7 +223,7 @@ def scheduler_swap_callback(pipeline, step_index, timestep, callback_kwargs):
|
|
223 |
torch.set_float32_matmul_precision("highest")
|
224 |
#callback_kwargs["latents"] = callback_kwargs["latents"].to(torch.bfloat16)
|
225 |
#pipe.unet.to(torch.float64)
|
226 |
-
|
227 |
# pipe.vae = vae_a
|
228 |
# pipe.unet = unet_a
|
229 |
torch.backends.cudnn.deterministic = False
|
@@ -263,7 +263,7 @@ def uploadNote(prompt,num_inference_steps,guidance_scale,timestamp):
|
|
263 |
|
264 |
'''
|
265 |
|
266 |
-
pyx = cyper.inline(code, fast_indexing=True)
|
267 |
|
268 |
@spaces.GPU(duration=30)
|
269 |
def generate_30(
|
|
|
223 |
torch.set_float32_matmul_precision("highest")
|
224 |
#callback_kwargs["latents"] = callback_kwargs["latents"].to(torch.bfloat16)
|
225 |
#pipe.unet.to(torch.float64)
|
226 |
+
pipeline.unet.set_default_attn_processor() ## custom ##
|
227 |
# pipe.vae = vae_a
|
228 |
# pipe.unet = unet_a
|
229 |
torch.backends.cudnn.deterministic = False
|
|
|
263 |
|
264 |
'''
|
265 |
|
266 |
+
pyx = cyper.inline(code, fast_indexing=True, directives=dict(boundscheck=False, wraparound=False, language_level=3)))
|
267 |
|
268 |
@spaces.GPU(duration=30)
|
269 |
def generate_30(
|