Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -11,17 +11,13 @@ def download_model(repo_id, model_name):
|
|
11 |
def run_inference(model_name, prompt_path):
|
12 |
repo_id = "hpcai-tech/Open-Sora"
|
13 |
|
14 |
-
# Map model names to their respective configuration files
|
15 |
config_mapping = {
|
16 |
"OpenSora-v1-16x256x256.pth": "configs/opensora/inference/16x256x256.py",
|
17 |
"OpenSora-v1-HQ-16x256x256.pth": "configs/opensora/inference/16x512x512.py",
|
18 |
"OpenSora-v1-HQ-16x512x512.pth": "configs/opensora/inference/64x512x512.py"
|
19 |
}
|
20 |
|
21 |
-
# Get the configuration path based on the model name
|
22 |
config_path = config_mapping[model_name]
|
23 |
-
|
24 |
-
# Download the selected model
|
25 |
ckpt_path = download_model(repo_id, model_name)
|
26 |
|
27 |
with open(config_path, 'r') as file:
|
@@ -37,14 +33,12 @@ def run_inference(model_name, prompt_path):
|
|
37 |
"scripts/inference.py", temp_config_path,
|
38 |
"--ckpt-path", ckpt_path
|
39 |
]
|
40 |
-
|
41 |
-
|
42 |
shutil.rmtree(temp_file.name)
|
43 |
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
return "Error occurred:", result.stderr
|
48 |
|
49 |
def main():
|
50 |
gr.Interface(
|
@@ -57,10 +51,7 @@ def main():
|
|
57 |
], label="Model Selection"),
|
58 |
gr.Textbox(label="Prompt Path", value="./assets/texts/t2v_samples.txt")
|
59 |
],
|
60 |
-
outputs=
|
61 |
-
gr.Text(label="Status"),
|
62 |
-
gr.Text(label="Output")
|
63 |
-
],
|
64 |
title="Open-Sora Inference",
|
65 |
description="Run Open-Sora Inference with Custom Parameters"
|
66 |
).launch()
|
|
|
11 |
def run_inference(model_name, prompt_path):
|
12 |
repo_id = "hpcai-tech/Open-Sora"
|
13 |
|
|
|
14 |
config_mapping = {
|
15 |
"OpenSora-v1-16x256x256.pth": "configs/opensora/inference/16x256x256.py",
|
16 |
"OpenSora-v1-HQ-16x256x256.pth": "configs/opensora/inference/16x512x512.py",
|
17 |
"OpenSora-v1-HQ-16x512x512.pth": "configs/opensora/inference/64x512x512.py"
|
18 |
}
|
19 |
|
|
|
20 |
config_path = config_mapping[model_name]
|
|
|
|
|
21 |
ckpt_path = download_model(repo_id, model_name)
|
22 |
|
23 |
with open(config_path, 'r') as file:
|
|
|
33 |
"scripts/inference.py", temp_config_path,
|
34 |
"--ckpt-path", ckpt_path
|
35 |
]
|
36 |
+
subprocess.run(cmd, capture_output=True, text=True)
|
|
|
37 |
shutil.rmtree(temp_file.name)
|
38 |
|
39 |
+
# Assuming the output video is saved in a known path, e.g., "./output/video.mp4"
|
40 |
+
output_video_path = "./output/video.mp4"
|
41 |
+
return output_video_path
|
|
|
42 |
|
43 |
def main():
|
44 |
gr.Interface(
|
|
|
51 |
], label="Model Selection"),
|
52 |
gr.Textbox(label="Prompt Path", value="./assets/texts/t2v_samples.txt")
|
53 |
],
|
54 |
+
outputs=gr.Video(label="Output Video"),
|
|
|
|
|
|
|
55 |
title="Open-Sora Inference",
|
56 |
description="Run Open-Sora Inference with Custom Parameters"
|
57 |
).launch()
|