Spaces:
Runtime error
Runtime error
Upload app.py
Browse files
app.py
CHANGED
|
@@ -40,7 +40,7 @@ with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", fill_width=True, css=css) as demo:
|
|
| 40 |
with gr.Row():
|
| 41 |
run_button = gr.Button("Generate Image", variant="primary", scale=6)
|
| 42 |
random_button = gr.Button("Random Model π²", variant="secondary", scale=3)
|
| 43 |
-
stop_button = gr.Button('Stop', interactive=False, variant="stop", scale=1)
|
| 44 |
with gr.Group():
|
| 45 |
model_name = gr.Dropdown(label="Select Model", choices=list(loaded_models.keys()), value=list(loaded_models.keys())[0], allow_custom_value=True)
|
| 46 |
model_info = gr.Markdown(value=get_model_info_md(list(loaded_models.keys())[0]), elem_classes="model_info")
|
|
@@ -86,14 +86,14 @@ with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", fill_width=True, css=css) as demo:
|
|
| 86 |
fn=lambda i, n, m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4: infer_fn(m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4) if (i < n) else None,
|
| 87 |
inputs=[img_i, image_num, model_name, prompt, neg_prompt, height, width, steps, cfg, seed,
|
| 88 |
positive_prefix, positive_suffix, negative_prefix, negative_suffix],
|
| 89 |
-
outputs=[o], queue=
|
| 90 |
gen_event2 = gr.on(triggers=[random_button.click],
|
| 91 |
fn=lambda i, n, m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4: infer_rand_fn(m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4) if (i < n) else None,
|
| 92 |
inputs=[img_i, image_num, model_name, prompt, neg_prompt, height, width, steps, cfg, seed,
|
| 93 |
positive_prefix, positive_suffix, negative_prefix, negative_suffix],
|
| 94 |
-
outputs=[o], queue=
|
| 95 |
o.change(save_gallery, [o, results], [results, image_files], show_api=False)
|
| 96 |
-
stop_button.click(lambda: gr.update(interactive=False), None, stop_button, cancels=[gen_event, gen_event2], show_api=False)
|
| 97 |
|
| 98 |
clear_prompt.click(lambda: None, None, [prompt], queue=False, show_api=False)
|
| 99 |
clear_results.click(lambda: (None, None), None, [results, image_files], queue=False, show_api=False)
|
|
@@ -102,3 +102,4 @@ with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", fill_width=True, css=css) as demo:
|
|
| 102 |
|
| 103 |
demo.queue(default_concurrency_limit=200, max_size=200)
|
| 104 |
demo.launch(max_threads=400)
|
|
|
|
|
|
| 40 |
with gr.Row():
|
| 41 |
run_button = gr.Button("Generate Image", variant="primary", scale=6)
|
| 42 |
random_button = gr.Button("Random Model π²", variant="secondary", scale=3)
|
| 43 |
+
#stop_button = gr.Button('Stop', interactive=False, variant="stop", scale=1)
|
| 44 |
with gr.Group():
|
| 45 |
model_name = gr.Dropdown(label="Select Model", choices=list(loaded_models.keys()), value=list(loaded_models.keys())[0], allow_custom_value=True)
|
| 46 |
model_info = gr.Markdown(value=get_model_info_md(list(loaded_models.keys())[0]), elem_classes="model_info")
|
|
|
|
| 86 |
fn=lambda i, n, m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4: infer_fn(m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4) if (i < n) else None,
|
| 87 |
inputs=[img_i, image_num, model_name, prompt, neg_prompt, height, width, steps, cfg, seed,
|
| 88 |
positive_prefix, positive_suffix, negative_prefix, negative_suffix],
|
| 89 |
+
outputs=[o], queue=False, show_api=False) # Be sure to delete ", queue=False" when activating the stop button
|
| 90 |
gen_event2 = gr.on(triggers=[random_button.click],
|
| 91 |
fn=lambda i, n, m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4: infer_rand_fn(m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4) if (i < n) else None,
|
| 92 |
inputs=[img_i, image_num, model_name, prompt, neg_prompt, height, width, steps, cfg, seed,
|
| 93 |
positive_prefix, positive_suffix, negative_prefix, negative_suffix],
|
| 94 |
+
outputs=[o], queue=False, show_api=False) # Be sure to delete ", queue=False" when activating the stop button
|
| 95 |
o.change(save_gallery, [o, results], [results, image_files], show_api=False)
|
| 96 |
+
#stop_button.click(lambda: gr.update(interactive=False), None, stop_button, cancels=[gen_event, gen_event2], show_api=False)
|
| 97 |
|
| 98 |
clear_prompt.click(lambda: None, None, [prompt], queue=False, show_api=False)
|
| 99 |
clear_results.click(lambda: (None, None), None, [results, image_files], queue=False, show_api=False)
|
|
|
|
| 102 |
|
| 103 |
demo.queue(default_concurrency_limit=200, max_size=200)
|
| 104 |
demo.launch(max_threads=400)
|
| 105 |
+
# https://github.com/gradio-app/gradio/issues/6339
|