Spaces:
Runtime error
Runtime error
Commit
·
7bded4c
1
Parent(s):
f4db27b
Update app.py
Browse files
app.py
CHANGED
@@ -5,6 +5,10 @@ import torch
|
|
5 |
from diffusers import DiffusionPipeline, DPMSolverMultistepScheduler
|
6 |
from diffusers.utils import export_to_video
|
7 |
from base64 import b64encode
|
|
|
|
|
|
|
|
|
8 |
|
9 |
# Load pipeline (outside the function for efficiency)
|
10 |
pipe = DiffusionPipeline.from_pretrained("damo-vilab/text-to-video-ms-1.7b", torch_dtype=torch.float16, variant="fp16")
|
@@ -15,7 +19,7 @@ pipe.enable_vae_slicing()
|
|
15 |
def Generate_video(prompt, video_duration_seconds):
|
16 |
num_frames = video_duration_seconds * 10
|
17 |
video_frames = pipe(prompt=prompt, negative_prompt="low quality",
|
18 |
-
|
19 |
video_path = export_to_video(video_frames) # Assuming you have this function defined
|
20 |
return video_path
|
21 |
|
|
|
5 |
from diffusers import DiffusionPipeline, DPMSolverMultistepScheduler
|
6 |
from diffusers.utils import export_to_video
|
7 |
from base64 import b64encode
|
8 |
+
import torch
|
9 |
+
|
10 |
+
device = "cpu" # Force CPU usage
|
11 |
+
|
12 |
|
13 |
# Load pipeline (outside the function for efficiency)
|
14 |
pipe = DiffusionPipeline.from_pretrained("damo-vilab/text-to-video-ms-1.7b", torch_dtype=torch.float16, variant="fp16")
|
|
|
19 |
def Generate_video(prompt, video_duration_seconds):
|
20 |
num_frames = video_duration_seconds * 10
|
21 |
video_frames = pipe(prompt=prompt, negative_prompt="low quality",
|
22 |
+
num_inference_steps=25, num_frames=num_frames, device=device).frames
|
23 |
video_path = export_to_video(video_frames) # Assuming you have this function defined
|
24 |
return video_path
|
25 |
|