talalif commited on
Commit
44477f1
·
verified ·
1 Parent(s): 4665864

Update app2.py

Browse files
Files changed (1) hide show
  1. app2.py +9 -7
app2.py CHANGED
@@ -45,7 +45,7 @@ with open ("./MuseV/configs/model/T2I_all_model.py","r+") as scrip:
45
 
46
  from PIL import Image
47
 
48
- def add_new_image(image):
49
  image = Image.fromarray(image)
50
  height = image.height
51
  width = image.width
@@ -58,7 +58,7 @@ def add_new_image(image):
58
  img_length_ratio: {lr}
59
  ipadapter_image: {ip_img}
60
  name: image
61
- prompt: (masterpiece, best quality, highres:1),(1person, solo:1),(eye blinks:1.8),(head wave:1.3)
62
  refer_image: {ip_img}
63
  video_path: null
64
  width: {width}"""
@@ -89,12 +89,11 @@ def add_new_video(video):
89
  configs.seek(0)
90
 
91
 
92
- def run():
93
  #subprocess.run(["python", "./MuseV/text2video.py", "--sd_model_name", "majicmixRealv6Fp16", "--unet_model_name", "musev", "-test_data_path", "./MuseV/configs/tasks/example.yaml", "--n_batch", "1", "--target_datas", "image", "--vae_model_path", "./MuseV/checkpoints/vae/sd-vae-ft-mse", "--motion_speed", "5.0", "--time_size", "12", "--fps", "12"])
94
- subprocess.run(["python", "./MuseV/text2video.py", "--sd_model_name", "majicmixRealv6Fp16", "--unet_model_name", "musev_referencenet", "--referencenet_model_name", "musev_referencenet", "--ip_adapter_model_name", "musev_referencenet", "-test_data_path", "./MuseV/configs/tasks/example.yaml", "--output_dir", "./MuseV", "--n_batch", "1", "--target_datas", "image", "--vision_clip_extractor_class_name", "ImageClipVisionFeatureExtractor", "--vision_clip_model_path", "./MuseV/checkpoints/IP-Adapter/models/image_encoder", "--motion_speed", "5.0", "--vae_model_path", "./MuseV/checkpoints/vae/sd-vae-ft-mse", "--time_size", "12", "--fps", "12"])
95
  return "./output.mp4"
96
 
97
-
98
  def run_video():
99
  subprocess.run(["python", "./MuseV/video2video.py", "--sd_model_name", "fantasticmix_v10", "--unet_model_name", "musev", "-test_data_path", "./MuseV/configs/tasks/example.yaml", "--output_dir", "./output", "--n_batch", "1", "--controlnet_name", "dwpose_body_hand", "--which2video", "video_middle", "--target_datas", "dance1", "--fps", "12", "--time_size", "12"])
100
  return "./output.mp4"
@@ -104,10 +103,13 @@ with gr.Blocks() as demo:
104
  subtitle1 = gr.Markdown("""Image2Vid""")
105
  image = gr.Image()
106
  button1 = gr.Button(value="Save Image")
107
- button1.click(fn=add_new_image,inputs=[image])
 
 
 
108
  button2 = gr.Button(value="Generate Img2Vid")
109
  video = gr.Video()
110
- button2.click(fn=run,outputs=video)
111
  subtitle2 = gr.Markdown("""Vid2Vid""")
112
  video_in = gr.Video()
113
  button3 = gr.Button(value="Save Video")
 
45
 
46
  from PIL import Image
47
 
48
+ def add_new_image(prompt,image):
49
  image = Image.fromarray(image)
50
  height = image.height
51
  width = image.width
 
58
  img_length_ratio: {lr}
59
  ipadapter_image: {ip_img}
60
  name: image
61
+ prompt: {prompt}
62
  refer_image: {ip_img}
63
  video_path: null
64
  width: {width}"""
 
89
  configs.seek(0)
90
 
91
 
92
+ def run(frames,fps):
93
  #subprocess.run(["python", "./MuseV/text2video.py", "--sd_model_name", "majicmixRealv6Fp16", "--unet_model_name", "musev", "-test_data_path", "./MuseV/configs/tasks/example.yaml", "--n_batch", "1", "--target_datas", "image", "--vae_model_path", "./MuseV/checkpoints/vae/sd-vae-ft-mse", "--motion_speed", "5.0", "--time_size", "12", "--fps", "12"])
94
+ subprocess.run(["python", "./MuseV/text2video.py", "--sd_model_name", "majicmixRealv6Fp16", "--unet_model_name", "musev_referencenet", "--referencenet_model_name", "musev_referencenet", "--ip_adapter_model_name", "musev_referencenet", "-test_data_path", "./MuseV/configs/tasks/example.yaml", "--output_dir", "./MuseV", "--n_batch", "1", "--target_datas", "image", "--vision_clip_extractor_class_name", "ImageClipVisionFeatureExtractor", "--vision_clip_model_path", "./MuseV/checkpoints/IP-Adapter/models/image_encoder", "--motion_speed", "5.0", "--vae_model_path", "./MuseV/checkpoints/vae/sd-vae-ft-mse", "--time_size", frames, "--fps", fps])
95
  return "./output.mp4"
96
 
 
97
  def run_video():
98
  subprocess.run(["python", "./MuseV/video2video.py", "--sd_model_name", "fantasticmix_v10", "--unet_model_name", "musev", "-test_data_path", "./MuseV/configs/tasks/example.yaml", "--output_dir", "./output", "--n_batch", "1", "--controlnet_name", "dwpose_body_hand", "--which2video", "video_middle", "--target_datas", "dance1", "--fps", "12", "--time_size", "12"])
99
  return "./output.mp4"
 
103
  subtitle1 = gr.Markdown("""Image2Vid""")
104
  image = gr.Image()
105
  button1 = gr.Button(value="Save Image")
106
+ frames = gr.Number(value=12)
107
+ fps = gr.Number(value=12)
108
+ prompt = gr.Text(value="(masterpiece, best quality, highres:1),(1person, solo:1),(eye blinks:1.8),(head wave:1.3)")
109
+ button1.click(fn=add_new_image,inputs=[prompt,image])
110
  button2 = gr.Button(value="Generate Img2Vid")
111
  video = gr.Video()
112
+ button2.click(fn=run,inputs=[frames,fps],outputs=video)
113
  subtitle2 = gr.Markdown("""Vid2Vid""")
114
  video_in = gr.Video()
115
  button3 = gr.Button(value="Save Video")