Spaces:
Running
on
Zero
Running
on
Zero
disable xformers memory efficient attention on unet
Browse files
app.py
CHANGED
|
@@ -136,16 +136,17 @@ def main(video_path, audio_path, progress=gr.Progress(track_tqdm=True)):
|
|
| 136 |
unet, _ = UNet3DConditionModel.from_pretrained(
|
| 137 |
OmegaConf.to_container(config.model),
|
| 138 |
inference_ckpt_path, # load checkpoint
|
| 139 |
-
|
| 140 |
)
|
| 141 |
|
| 142 |
unet = unet.to(dtype=torch.float16)
|
| 143 |
|
|
|
|
| 144 |
# set xformers
|
| 145 |
|
| 146 |
if is_xformers_available():
|
| 147 |
unet.enable_xformers_memory_efficient_attention()
|
| 148 |
-
|
| 149 |
|
| 150 |
pipeline = LipsyncPipeline(
|
| 151 |
vae=vae,
|
|
|
|
| 136 |
unet, _ = UNet3DConditionModel.from_pretrained(
|
| 137 |
OmegaConf.to_container(config.model),
|
| 138 |
inference_ckpt_path, # load checkpoint
|
| 139 |
+
device="cpu",
|
| 140 |
)
|
| 141 |
|
| 142 |
unet = unet.to(dtype=torch.float16)
|
| 143 |
|
| 144 |
+
"""
|
| 145 |
# set xformers
|
| 146 |
|
| 147 |
if is_xformers_available():
|
| 148 |
unet.enable_xformers_memory_efficient_attention()
|
| 149 |
+
"""
|
| 150 |
|
| 151 |
pipeline = LipsyncPipeline(
|
| 152 |
vae=vae,
|