Ryukijano commited on
Commit
70eea75
·
verified ·
1 Parent(s): fabdc5a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -2
app.py CHANGED
@@ -35,11 +35,19 @@ pipe.set_adapters(["better"], adapter_weights=[1.0])
35
  pipe.fuse_lora(adapter_name=["better"], lora_scale=1.0)
36
  pipe.unload_lora_weights()
37
 
38
- # Corrected: Access 'transformer' instead of 'unet'
39
  pipe.transformer.to(memory_format=torch.channels_last)
40
  pipe.vae.to(memory_format=torch.channels_last)
41
 
42
- pipe.enable_xformers_memory_efficient_attention()
 
 
 
 
 
 
 
 
43
 
44
  torch.cuda.empty_cache()
45
 
 
35
  pipe.fuse_lora(adapter_name=["better"], lora_scale=1.0)
36
  pipe.unload_lora_weights()
37
 
38
+ # Correctly set memory format
39
  pipe.transformer.to(memory_format=torch.channels_last)
40
  pipe.vae.to(memory_format=torch.channels_last)
41
 
42
+ # Conditionally enable xformers only for the transformer
43
+ if hasattr(pipe, "transformer") and torch.cuda.is_available():
44
+ try:
45
+ pipe.transformer.enable_xformers_memory_efficient_attention()
46
+ except Exception as e:
47
+ print(
48
+ "Warning: Could not enable xformers for the transformer due to the following error:"
49
+ )
50
+ print(e)
51
 
52
  torch.cuda.empty_cache()
53