ovi054 commited on
Commit
85f6fcb
·
verified ·
1 Parent(s): 188c227

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -1,7 +1,8 @@
1
  import torch
2
  from diffusers import UniPCMultistepScheduler
3
  from diffusers import WanPipeline, AutoencoderKLWan # Use Wan-specific VAE
4
- from diffusers.hooks import apply_first_block_cache, FirstBlockCacheConfig
 
5
  from diffusers.models import UNetSpatioTemporalConditionModel
6
  from transformers import T5EncoderModel, T5Tokenizer
7
 
@@ -24,7 +25,8 @@ def generate(prompt, negative_prompt, width=1024, height=1024, num_inference_ste
24
  pipe.unload_lora_weights()
25
  pipe.load_lora_weights(lora_id.strip())
26
  pipe.to("cuda")
27
- apply_first_block_cache(pipe.transformer, FirstBlockCacheConfig(threshold=0.2))
 
28
  try:
29
  output = pipe(
30
  prompt=prompt,
 
1
  import torch
2
  from diffusers import UniPCMultistepScheduler
3
  from diffusers import WanPipeline, AutoencoderKLWan # Use Wan-specific VAE
4
+ # from diffusers.hooks import apply_first_block_cache, FirstBlockCacheConfig
5
+ from para_attn.first_block_cache.diffusers_adapters import apply_cache_on_pipe
6
  from diffusers.models import UNetSpatioTemporalConditionModel
7
  from transformers import T5EncoderModel, T5Tokenizer
8
 
 
25
  pipe.unload_lora_weights()
26
  pipe.load_lora_weights(lora_id.strip())
27
  pipe.to("cuda")
28
+ # apply_first_block_cache(pipe.transformer, FirstBlockCacheConfig(threshold=0.2))
29
+ apply_cache_on_pipe(pipe)
30
  try:
31
  output = pipe(
32
  prompt=prompt,