zwl commited on
Commit
f33ec8f
·
1 Parent(s): 7506484

scaled linear + bh2

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -12,7 +12,7 @@ scheduler = UniPCMultistepScheduler(
12
  prediction_type="epsilon",
13
  predict_x0=True,
14
  thresholding=False,
15
- solver_type='bh1',
16
  lower_order_final=True,
17
  disable_corrector=[0],
18
  )
@@ -64,7 +64,7 @@ else:
64
 
65
  device = "GPU 🔥" if torch.cuda.is_available() else "CPU 🥶"
66
 
67
- def inference(model_name, prompt, guidance, steps, width=512, height=512, seed=0, img=None, strength=0.5, neg_prompt=""):
68
 
69
  global current_model
70
  for model in models:
@@ -257,7 +257,7 @@ with gr.Blocks(css=css) as demo:
257
 
258
  # model_name.change(lambda x: gr.update(visible = x == models[0].name), inputs=model_name, outputs=custom_model_group)
259
 
260
- inputs = [model_name, prompt, guidance, steps, width, height, seed, image, strength, neg_prompt]
261
  prompt.submit(inference, inputs=inputs, outputs=image_out)
262
 
263
  generate.click(inference, inputs=inputs, outputs=image_out)
 
12
  prediction_type="epsilon",
13
  predict_x0=True,
14
  thresholding=False,
15
+ solver_type='bh2',
16
  lower_order_final=True,
17
  disable_corrector=[0],
18
  )
 
64
 
65
  device = "GPU 🔥" if torch.cuda.is_available() else "CPU 🥶"
66
 
67
+ def inference(model_name, prompt, guidance, steps, width=512, height=512, seed=0, img=None, strength=0.5, neg_prompt="", order=2, solver_type="bh1"):
68
 
69
  global current_model
70
  for model in models:
 
257
 
258
  # model_name.change(lambda x: gr.update(visible = x == models[0].name), inputs=model_name, outputs=custom_model_group)
259
 
260
+ inputs = [model_name, prompt, guidance, steps, width, height, seed, image, strength, neg_prompt, order, solver_type]
261
  prompt.submit(inference, inputs=inputs, outputs=image_out)
262
 
263
  generate.click(inference, inputs=inputs, outputs=image_out)