Spaces:
Running
on
Zero
Running
on
Zero
Upload app.py
Browse files
app.py
CHANGED
@@ -75,7 +75,7 @@ with gr.Blocks(fill_width=True, elem_id="container", css=css, delete_cache=(60,
|
|
75 |
|
76 |
with gr.Accordion("Advanced Settings", open=True):
|
77 |
task = gr.Dropdown(label="Task", choices=SDXL_TASK, value=TASK_MODEL_LIST[0])
|
78 |
-
with gr.Tab("
|
79 |
with gr.Row():
|
80 |
negative_prompt = gr.Text(label="Negative prompt", lines=1, max_lines=6, placeholder="Enter a negative prompt", show_copy_button=True,
|
81 |
value="(low quality, worst quality:1.2), very displeasing, watermark, signature, ugly")
|
@@ -87,12 +87,12 @@ with gr.Blocks(fill_width=True, elem_id="container", css=css, delete_cache=(60,
|
|
87 |
model_info = gr.Markdown(elem_classes="info")
|
88 |
with gr.Column(scale=1):
|
89 |
model_detail = gr.Checkbox(label="Show detail of model in list", value=False)
|
90 |
-
with gr.
|
91 |
-
|
92 |
-
|
|
|
93 |
recom_prompt = gr.Checkbox(label="Recommended prompt", value=True, scale=1)
|
94 |
-
|
95 |
-
with gr.Tab("Generation Settings"):
|
96 |
with gr.Row():
|
97 |
seed = gr.Slider(label="Seed", minimum=0, maximum=MAX_SEED, step=1, value=0)
|
98 |
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
@@ -112,7 +112,10 @@ with gr.Blocks(fill_width=True, elem_id="container", css=css, delete_cache=(60,
|
|
112 |
schedule_type = gr.Dropdown(label="Schedule type", choices=SCHEDULE_TYPE_OPTIONS, value=SCHEDULE_TYPE_OPTIONS[0])
|
113 |
schedule_prediction_type = gr.Dropdown(label="Discrete Sampling Type", choices=SCHEDULE_PREDICTION_TYPE_OPTIONS, value=SCHEDULE_PREDICTION_TYPE_OPTIONS[0])
|
114 |
vae_model = gr.Dropdown(label="VAE Model", choices=get_vaes(), value=get_vaes()[0])
|
115 |
-
|
|
|
|
|
|
|
116 |
|
117 |
with gr.Tab("LoRA"):
|
118 |
def lora_dropdown(label, visible=True):
|
@@ -276,65 +279,61 @@ with gr.Blocks(fill_width=True, elem_id="container", css=css, delete_cache=(60,
|
|
276 |
cm_btn_send_ip1.click(send_img, [img_source, img_result], [image_ip1, mask_ip1], queue=False, show_api=False)
|
277 |
cm_btn_send_ip2.click(send_img, [img_source, img_result], [image_ip2, mask_ip2], queue=False, show_api=False)
|
278 |
|
279 |
-
with gr.Tab("Hires fix"):
|
280 |
-
with gr.
|
281 |
-
|
282 |
-
|
283 |
-
|
284 |
-
|
285 |
-
|
286 |
-
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
|
294 |
-
|
295 |
-
|
296 |
-
|
297 |
-
|
298 |
-
|
299 |
-
|
300 |
-
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
|
311 |
-
|
312 |
-
|
313 |
-
|
314 |
-
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
|
319 |
-
|
320 |
-
|
321 |
-
|
322 |
-
|
323 |
-
|
324 |
-
|
325 |
-
|
326 |
-
|
327 |
-
|
328 |
-
|
329 |
-
|
330 |
-
|
331 |
-
|
332 |
-
|
333 |
|
334 |
-
with gr.Tab("Textual inversion"):
|
335 |
-
active_textual_inversion = gr.Checkbox(value=False, label="Active Textual Inversion in prompt")
|
336 |
-
use_textual_inversion = gr.CheckboxGroup(choices=get_ti_choices(model_name.value) if active_textual_inversion.value else [], value=None, label="Use Textual Invertion in prompt")
|
337 |
-
|
338 |
with gr.Tab("Translation Settings"):
|
339 |
chatbot = gr.Chatbot(render_markdown=False, visible=False) # component for auto-translation
|
340 |
chat_model = gr.Dropdown(choices=get_dolphin_models(), value=get_dolphin_models()[0][1], allow_custom_value=True, label="Model")
|
|
|
75 |
|
76 |
with gr.Accordion("Advanced Settings", open=True):
|
77 |
task = gr.Dropdown(label="Task", choices=SDXL_TASK, value=TASK_MODEL_LIST[0])
|
78 |
+
with gr.Tab("Generation Settings"):
|
79 |
with gr.Row():
|
80 |
negative_prompt = gr.Text(label="Negative prompt", lines=1, max_lines=6, placeholder="Enter a negative prompt", show_copy_button=True,
|
81 |
value="(low quality, worst quality:1.2), very displeasing, watermark, signature, ugly")
|
|
|
87 |
model_info = gr.Markdown(elem_classes="info")
|
88 |
with gr.Column(scale=1):
|
89 |
model_detail = gr.Checkbox(label="Show detail of model in list", value=False)
|
90 |
+
with gr.Accordion("Prompt Settings", open=False):
|
91 |
+
with gr.Row():
|
92 |
+
quality_selector = gr.Radio(label="Quality Tag Presets", interactive=True, choices=list(preset_quality.keys()), value="None", scale=3)
|
93 |
+
style_selector = gr.Radio(label="Style Presets", interactive=True, choices=list(preset_styles.keys()), value="None", scale=3)
|
94 |
recom_prompt = gr.Checkbox(label="Recommended prompt", value=True, scale=1)
|
95 |
+
prompt_syntax = gr.Dropdown(label="Prompt Syntax", choices=PROMPT_W_OPTIONS, value=PROMPT_W_OPTIONS[1][1])
|
|
|
96 |
with gr.Row():
|
97 |
seed = gr.Slider(label="Seed", minimum=0, maximum=MAX_SEED, step=1, value=0)
|
98 |
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
|
|
112 |
schedule_type = gr.Dropdown(label="Schedule type", choices=SCHEDULE_TYPE_OPTIONS, value=SCHEDULE_TYPE_OPTIONS[0])
|
113 |
schedule_prediction_type = gr.Dropdown(label="Discrete Sampling Type", choices=SCHEDULE_PREDICTION_TYPE_OPTIONS, value=SCHEDULE_PREDICTION_TYPE_OPTIONS[0])
|
114 |
vae_model = gr.Dropdown(label="VAE Model", choices=get_vaes(), value=get_vaes()[0])
|
115 |
+
with gr.Accordion("Other Settings", open=False):
|
116 |
+
with gr.Accordion("Textual inversion", open=True):
|
117 |
+
active_textual_inversion = gr.Checkbox(value=False, label="Active Textual Inversion in prompt")
|
118 |
+
use_textual_inversion = gr.CheckboxGroup(choices=get_ti_choices(model_name.value) if active_textual_inversion.value else [], value=None, label="Use Textual Invertion in prompt")
|
119 |
|
120 |
with gr.Tab("LoRA"):
|
121 |
def lora_dropdown(label, visible=True):
|
|
|
279 |
cm_btn_send_ip1.click(send_img, [img_source, img_result], [image_ip1, mask_ip1], queue=False, show_api=False)
|
280 |
cm_btn_send_ip2.click(send_img, [img_source, img_result], [image_ip2, mask_ip2], queue=False, show_api=False)
|
281 |
|
282 |
+
with gr.Tab("Hires fix / Detailfix"):
|
283 |
+
with gr.Accordion("Hires fix", open=True):
|
284 |
+
with gr.Row():
|
285 |
+
upscaler_model_path = gr.Dropdown(label="Upscaler", choices=UPSCALER_KEYS, value=UPSCALER_KEYS[0])
|
286 |
+
upscaler_increases_size = gr.Slider(minimum=1.1, maximum=4., step=0.1, value=1.2, label="Upscale by")
|
287 |
+
esrgan_tile = gr.Slider(minimum=0, value=0, maximum=500, step=1, label="ESRGAN Tile")
|
288 |
+
esrgan_tile_overlap = gr.Slider(minimum=1, maximum=200, step=1, value=8, label="ESRGAN Tile Overlap")
|
289 |
+
with gr.Row():
|
290 |
+
hires_steps = gr.Slider(minimum=0, value=30, maximum=100, step=1, label="Hires Steps")
|
291 |
+
hires_denoising_strength = gr.Slider(minimum=0.1, maximum=1.0, step=0.01, value=0.55, label="Hires Denoising Strength")
|
292 |
+
hires_sampler = gr.Dropdown(label="Hires Sampler", choices=POST_PROCESSING_SAMPLER, value=POST_PROCESSING_SAMPLER[0])
|
293 |
+
hires_schedule_list = ["Use same schedule type"] + SCHEDULE_TYPE_OPTIONS
|
294 |
+
hires_schedule_type = gr.Dropdown(label="Hires Schedule type", choices=hires_schedule_list, value=hires_schedule_list[0])
|
295 |
+
hires_guidance_scale = gr.Slider(minimum=-1., maximum=30., step=0.5, value=-1., label="Hires CFG", info="If the value is -1, the main CFG will be used")
|
296 |
+
with gr.Row():
|
297 |
+
hires_prompt = gr.Textbox(label="Hires Prompt", placeholder="Main prompt will be use", lines=3)
|
298 |
+
hires_negative_prompt = gr.Textbox(label="Hires Negative Prompt", placeholder="Main negative prompt will be use", lines=3)
|
299 |
+
with gr.Accordion("Detail fix", open=True):
|
300 |
+
with gr.Row():
|
301 |
+
# Adetailer Inpaint Only
|
302 |
+
adetailer_inpaint_only = gr.Checkbox(label="Inpaint only", value=True)
|
303 |
+
# Adetailer Verbose
|
304 |
+
adetailer_verbose = gr.Checkbox(label="Verbose", value=False)
|
305 |
+
# Adetailer Sampler
|
306 |
+
adetailer_sampler = gr.Dropdown(label="Adetailer sampler:", choices=POST_PROCESSING_SAMPLER, value=POST_PROCESSING_SAMPLER[0])
|
307 |
+
with gr.Row():
|
308 |
+
with gr.Accordion("Detailfix A", open=True, visible=True):
|
309 |
+
# Adetailer A
|
310 |
+
adetailer_active_a = gr.Checkbox(label="Enable Adetailer A", value=False)
|
311 |
+
prompt_ad_a = gr.Textbox(label="Main prompt", placeholder="Main prompt will be use", lines=3)
|
312 |
+
negative_prompt_ad_a = gr.Textbox(label="Negative prompt", placeholder="Main negative prompt will be use", lines=3)
|
313 |
+
with gr.Row():
|
314 |
+
strength_ad_a = gr.Number(label="Strength:", value=0.35, step=0.01, minimum=0.01, maximum=1.0)
|
315 |
+
face_detector_ad_a = gr.Checkbox(label="Face detector", value=False)
|
316 |
+
person_detector_ad_a = gr.Checkbox(label="Person detector", value=True)
|
317 |
+
hand_detector_ad_a = gr.Checkbox(label="Hand detector", value=False)
|
318 |
+
with gr.Row():
|
319 |
+
mask_dilation_a = gr.Number(label="Mask dilation:", value=4, minimum=1)
|
320 |
+
mask_blur_a = gr.Number(label="Mask blur:", value=4, minimum=1)
|
321 |
+
mask_padding_a = gr.Number(label="Mask padding:", value=32, minimum=1)
|
322 |
+
with gr.Accordion("Detailfix B", open=True, visible=True):
|
323 |
+
# Adetailer B
|
324 |
+
adetailer_active_b = gr.Checkbox(label="Enable Adetailer B", value=False)
|
325 |
+
prompt_ad_b = gr.Textbox(label="Main prompt", placeholder="Main prompt will be use", lines=3)
|
326 |
+
negative_prompt_ad_b = gr.Textbox(label="Negative prompt", placeholder="Main negative prompt will be use", lines=3)
|
327 |
+
with gr.Row():
|
328 |
+
strength_ad_b = gr.Number(label="Strength:", value=0.35, step=0.01, minimum=0.01, maximum=1.0)
|
329 |
+
face_detector_ad_b = gr.Checkbox(label="Face detector", value=False)
|
330 |
+
person_detector_ad_b = gr.Checkbox(label="Person detector", value=True)
|
331 |
+
hand_detector_ad_b = gr.Checkbox(label="Hand detector", value=False)
|
332 |
+
with gr.Row():
|
333 |
+
mask_dilation_b = gr.Number(label="Mask dilation:", value=4, minimum=1)
|
334 |
+
mask_blur_b = gr.Number(label="Mask blur:", value=4, minimum=1)
|
335 |
+
mask_padding_b = gr.Number(label="Mask padding:", value=32, minimum=1)
|
336 |
|
|
|
|
|
|
|
|
|
337 |
with gr.Tab("Translation Settings"):
|
338 |
chatbot = gr.Chatbot(render_markdown=False, visible=False) # component for auto-translation
|
339 |
chat_model = gr.Dropdown(choices=get_dolphin_models(), value=get_dolphin_models()[0][1], allow_custom_value=True, label="Model")
|