linoyts HF staff commited on
Commit
e57b7d5
·
verified ·
1 Parent(s): c4018f6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -7
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import gradio as gr
2
  import numpy as np
3
  import random
4
-
5
  import spaces
6
  import torch
7
  from huggingface_hub import hf_hub_download
@@ -28,6 +28,8 @@ pipe_prior_redux = FluxPriorReduxPipeline.from_pretrained(
28
  torch_dtype=torch.bfloat16
29
  ).to("cuda")
30
 
 
 
31
  @spaces.GPU
32
  def infer(control_image, prompt, image_2, prompt_2, reference_scale= 0.03 ,
33
  prompt_embeds_scale_1 =1, prompt_embeds_scale_2 =1, pooled_prompt_embeds_scale_1 =1, pooled_prompt_embeds_scale_2 =1,
@@ -180,16 +182,13 @@ Hyper FLUX 8 Steps LoRA](https://huggingface.co/ByteDance/Hyper-SD)
180
  num_inference_steps = gr.Slider(
181
  label="Number of inference steps",
182
  minimum=1,
183
- maximum=50,
184
  step=1,
185
- value=28,
186
  )
187
 
188
  gr.Examples(
189
- examples=[
190
- ["mona_lisa.jpg", "pink hair, at the beach", "", "", 0.35],
191
-
192
- ],
193
  inputs=[input_image, prompt, image_2, prompt_2, reference_scale],
194
  outputs=[result],
195
  fn=infer,
 
1
  import gradio as gr
2
  import numpy as np
3
  import random
4
+ from PIL import Image
5
  import spaces
6
  import torch
7
  from huggingface_hub import hf_hub_download
 
28
  torch_dtype=torch.bfloat16
29
  ).to("cuda")
30
 
31
+ examples = [[Image.open("mona_lisa.jpg"), "pink hair, at the beach", "", "", 0.35]]
32
+
33
  @spaces.GPU
34
  def infer(control_image, prompt, image_2, prompt_2, reference_scale= 0.03 ,
35
  prompt_embeds_scale_1 =1, prompt_embeds_scale_2 =1, pooled_prompt_embeds_scale_1 =1, pooled_prompt_embeds_scale_2 =1,
 
182
  num_inference_steps = gr.Slider(
183
  label="Number of inference steps",
184
  minimum=1,
185
+ maximum=30,
186
  step=1,
187
+ value=8,
188
  )
189
 
190
  gr.Examples(
191
+ examples=examples,
 
 
 
192
  inputs=[input_image, prompt, image_2, prompt_2, reference_scale],
193
  outputs=[result],
194
  fn=infer,