mrcuddle commited on
Commit
e2a74c9
·
verified ·
1 Parent(s): 73f5f53

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -3,7 +3,9 @@ import torch
3
  from diffusers import I2VGenXLPipeline
4
  from diffusers.utils import export_to_gif, load_image
5
  import tempfile
 
6
 
 
7
  def initialize_pipeline(device):
8
  # Initialize the pipeline with CUDA support
9
  pipeline = I2VGenXLPipeline.from_pretrained("ali-vilab/i2vgen-xl", torch_dtype=torch.float16, variant="fp16")
@@ -12,7 +14,6 @@ def initialize_pipeline(device):
12
 
13
  def generate_gif(prompt, image, negative_prompt, num_inference_steps, guidance_scale, seed):
14
  # Check if CUDA is available and set the device
15
- device = "cuda" if torch.cuda.is_available() else "cpu"
16
 
17
  # Initialize the pipeline within the function
18
  pipeline = initialize_pipeline(device)
@@ -65,7 +66,7 @@ with gr.Blocks() as demo:
65
 
66
  text_generate_button.click(
67
  fn=generate_gif,
68
- inputs=[text_prompt, None, text_negative_prompt, text_num_inference_steps, text_guidance_scale, text_seed],
69
  outputs=text_output_video
70
  )
71
 
 
3
  from diffusers import I2VGenXLPipeline
4
  from diffusers.utils import export_to_gif, load_image
5
  import tempfile
6
+ import spaces
7
 
8
+ @spaces.GPU
9
  def initialize_pipeline(device):
10
  # Initialize the pipeline with CUDA support
11
  pipeline = I2VGenXLPipeline.from_pretrained("ali-vilab/i2vgen-xl", torch_dtype=torch.float16, variant="fp16")
 
14
 
15
  def generate_gif(prompt, image, negative_prompt, num_inference_steps, guidance_scale, seed):
16
  # Check if CUDA is available and set the device
 
17
 
18
  # Initialize the pipeline within the function
19
  pipeline = initialize_pipeline(device)
 
66
 
67
  text_generate_button.click(
68
  fn=generate_gif,
69
+ inputs=[text_prompt, gr.Image(type="filepath", label="Input Image (optional)", visible=False), text_negative_prompt, text_num_inference_steps, text_guidance_scale, text_seed],
70
  outputs=text_output_video
71
  )
72