kadirnar commited on
Commit
d3f4bbe
·
verified ·
1 Parent(s): b117894

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -4
app.py CHANGED
@@ -4,11 +4,14 @@ import subprocess
4
  import tempfile
5
  import shutil
6
  import os
 
7
 
8
  def download_model(repo_id, model_name):
9
  model_path = hf_hub_download(repo_id=repo_id, filename=model_name)
10
  return model_path
11
 
 
 
12
  def run_inference(model_name, prompt_text):
13
  repo_id = "hpcai-tech/Open-Sora"
14
 
@@ -32,7 +35,6 @@ def run_inference(model_name, prompt_text):
32
  config_content = file.read()
33
  config_content = config_content.replace('prompt_path = "./assets/texts/t2v_samples.txt"', f'prompt_path = "{prompt_file.name}"')
34
 
35
- # Create a temporary file for the updated configuration
36
  with tempfile.NamedTemporaryFile('w', delete=False) as temp_file:
37
  temp_file.write(config_content)
38
  temp_config_path = temp_file.name
@@ -49,9 +51,12 @@ def run_inference(model_name, prompt_text):
49
  os.remove(prompt_file.name)
50
 
51
  if result.returncode == 0:
52
- return "Inference completed successfully.", result.stdout
 
 
53
  else:
54
- return "Error occurred:", result.stderr
 
55
 
56
  def main():
57
  gr.Interface(
@@ -64,7 +69,7 @@ def main():
64
  ], label="Model Selection"),
65
  gr.Textbox(label="Prompt Text", placeholder="Enter prompt text here")
66
  ],
67
- outputs="text",
68
  title="Open-Sora Inference",
69
  description="Run Open-Sora Inference with Custom Parameters",
70
  ).launch()
 
4
  import tempfile
5
  import shutil
6
  import os
7
+ import spaces
8
 
9
  def download_model(repo_id, model_name):
10
  model_path = hf_hub_download(repo_id=repo_id, filename=model_name)
11
  return model_path
12
 
13
+
14
+ @spaces.GPU
15
  def run_inference(model_name, prompt_text):
16
  repo_id = "hpcai-tech/Open-Sora"
17
 
 
35
  config_content = file.read()
36
  config_content = config_content.replace('prompt_path = "./assets/texts/t2v_samples.txt"', f'prompt_path = "{prompt_file.name}"')
37
 
 
38
  with tempfile.NamedTemporaryFile('w', delete=False) as temp_file:
39
  temp_file.write(config_content)
40
  temp_config_path = temp_file.name
 
51
  os.remove(prompt_file.name)
52
 
53
  if result.returncode == 0:
54
+ # Assuming the output video is saved at a known location, for example "./output/video.mp4"
55
+ output_video_path = "./output/video.mp4"
56
+ return output_video_path
57
  else:
58
+ print("Error occurred:", result.stderr)
59
+ return None # You might want to handle errors differently
60
 
61
  def main():
62
  gr.Interface(
 
69
  ], label="Model Selection"),
70
  gr.Textbox(label="Prompt Text", placeholder="Enter prompt text here")
71
  ],
72
+ outputs=gr.Video(label="Output Video"),
73
  title="Open-Sora Inference",
74
  description="Run Open-Sora Inference with Custom Parameters",
75
  ).launch()