John6666 commited on
Commit
e9a2b09
·
verified ·
1 Parent(s): 40e5374

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +8 -9
  2. dc.py +2 -2
app.py CHANGED
@@ -57,15 +57,14 @@ with gr.Blocks(fill_width=True, elem_id="container", css=css, delete_cache=(60,
57
  run_translate_button = gr.Button("Run with LLM Enhance", variant="secondary", scale=3)
58
  auto_trans = gr.Checkbox(label="Auto translate to English", value=False, scale=2)
59
 
60
- with gr.Group():
61
- result = gr.Image(label="Result", elem_id="result", format="png", show_label=False, interactive=False,
62
- show_download_button=True, show_share_button=False, container=True)
63
- with gr.Accordion("History", open=False):
64
- history_gallery = gr.Gallery(label="History", columns=6, object_fit="contain", format="png", interactive=False, show_share_button=False,
65
- show_download_button=True)
66
- history_files = gr.Files(interactive=False, visible=False)
67
- history_clear_button = gr.Button(value="Clear History", variant="secondary")
68
- history_clear_button.click(lambda: ([], []), None, [history_gallery, history_files], queue=False, show_api=False)
69
 
70
  with gr.Accordion("Advanced Settings", open=False):
71
  with gr.Row():
 
57
  run_translate_button = gr.Button("Run with LLM Enhance", variant="secondary", scale=3)
58
  auto_trans = gr.Checkbox(label="Auto translate to English", value=False, scale=2)
59
 
60
+ result = gr.Image(label="Result", elem_id="result", format="png", show_label=False, interactive=False,
61
+ show_download_button=True, show_share_button=False, container=True)
62
+ with gr.Accordion("History", open=False):
63
+ history_gallery = gr.Gallery(label="History", columns=6, object_fit="contain", format="png", interactive=False, show_share_button=False,
64
+ show_download_button=True)
65
+ history_files = gr.Files(interactive=False, visible=False)
66
+ history_clear_button = gr.Button(value="Clear History", variant="secondary")
67
+ history_clear_button.click(lambda: ([], []), None, [history_gallery, history_files], queue=False, show_api=False)
 
68
 
69
  with gr.Accordion("Advanced Settings", open=False):
70
  with gr.Row():
dc.py CHANGED
@@ -162,7 +162,7 @@ class GuiSD:
162
  break
163
  time.sleep(0.5)
164
  print(f"Waiting queue {i}")
165
- yield "Waiting queue"
166
 
167
  self.status_loading = True
168
 
@@ -617,6 +617,7 @@ def esrgan_upscale(image, upscaler_name, upscaler_size):
617
 
618
  dynamic_gpu_duration.zerogpu = True
619
  sd_gen_generate_pipeline.zerogpu = True
 
620
 
621
 
622
  from pathlib import Path
@@ -628,7 +629,6 @@ from modutils import (safe_float, escape_lora_basename, to_lora_key, to_lora_pat
628
  normalize_prompt_list, get_civitai_info, search_lora_on_civitai, translate_to_en, get_t2i_model_info, get_civitai_tag, save_image_history)
629
 
630
 
631
- sd_gen = GuiSD()
632
  #@spaces.GPU
633
  def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps,
634
  model_name = load_diffusers_format_model[0], lora1 = None, lora1_wt = 1.0, lora2 = None, lora2_wt = 1.0,
 
162
  break
163
  time.sleep(0.5)
164
  print(f"Waiting queue {i}")
165
+ #yield "Waiting queue"
166
 
167
  self.status_loading = True
168
 
 
617
 
618
  dynamic_gpu_duration.zerogpu = True
619
  sd_gen_generate_pipeline.zerogpu = True
620
+ sd_gen = GuiSD()
621
 
622
 
623
  from pathlib import Path
 
629
  normalize_prompt_list, get_civitai_info, search_lora_on_civitai, translate_to_en, get_t2i_model_info, get_civitai_tag, save_image_history)
630
 
631
 
 
632
  #@spaces.GPU
633
  def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps,
634
  model_name = load_diffusers_format_model[0], lora1 = None, lora1_wt = 1.0, lora2 = None, lora2_wt = 1.0,