charliebaby2023 commited on
Commit
f1c6c08
Β·
verified Β·
1 Parent(s): 2c694c9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -99
app.py CHANGED
@@ -6,7 +6,7 @@ from concurrent.futures import ThreadPoolExecutor
6
  import requests
7
 
8
  now2 = 0
9
- kii=" mohawk femboy racecar driver ";
10
 
11
 
12
  def get_current_time():
@@ -16,6 +16,7 @@ def get_current_time():
16
  ki = f'{kii} {current_time}'
17
  return ki
18
 
 
19
  def load_fn(models):
20
  global models_load
21
  models_load = {}
@@ -35,16 +36,10 @@ num_models = len(models)
35
  default_models = models[:num_models]
36
 
37
 
38
-
39
-
40
-
41
-
42
-
43
-
44
-
45
  def extend_choices(choices):
46
  return choices + (num_models - len(choices)) * ['NA']
47
 
 
48
  def update_imgbox(choices):
49
  choices_plus = extend_choices(choices)
50
  return [gr.Image(None, label=m, visible=(m != 'NA')) for m in choices_plus]
@@ -52,6 +47,7 @@ def update_imgbox(choices):
52
 
53
  executor = ThreadPoolExecutor(max_workers=num_models)
54
 
 
55
  def gen_fn(model_str, prompt, negative_prompt):
56
  if model_str == 'NA':
57
  return None
@@ -60,23 +56,13 @@ def gen_fn(model_str, prompt, negative_prompt):
60
  combined_prompt = f'{prompt} {model_str} {negative_prompt} {noise}'
61
  print(f"Generating with prompt: {combined_prompt}")
62
 
63
- # try:
64
- # future = executor.submit(models_load[model_str], f'{prompt} {negative_prompt} {noise}')
65
- # result = future.result()
66
- # return result
67
- # except requests.exceptions.Timeout:
68
- # print(f"Timeout occurred for model {model_str}. Please try again later.")
69
- # return None
70
- # except Exception as e:
71
- # print(f"Error occurred: {e}")
72
- # return None
73
  try:
74
  # Attempt to generate the image
75
  image_response = models_load[model_str](f'{prompt} {negative_prompt} {noise}')
76
 
77
  # Check if the image_response is a tuple, handle accordingly
78
  if isinstance(image_response, tuple):
79
- # If the response is a tuple, return the first item assuming it's the image
80
  image_response = image_response[0]
81
 
82
  # Ensure the response is an image or image-like object
@@ -91,71 +77,31 @@ def gen_fn(model_str, prompt, negative_prompt):
91
  print(f"Error occurred: {e}")
92
  return None
93
 
94
-
95
-
96
- # noise = str(randint(0, 9999))
97
- # combined_prompt = f'{prompt} {model_str} {negative_prompt} {noise}'
98
- # print(f"Generating with prompt: {combined_prompt}") # Debug line
99
-
100
- # result = models_load[model_str](f'{prompt} {negative_prompt} {noise}')
101
-
102
- # end_time = time.time() # End timing
103
- # runtime = end_time - start_time
104
- # model_timings[model_str] = runtime # Log the model's execution time
105
-
106
- # queue_size -= 1 # Decrement queue size after processing
107
- # return f"Model {model_str} ran for {runtime:.2f} seconds", result
108
-
109
-
110
-
111
-
112
-
113
- # return models_load[model_str](f'{prompt} {negative_prompt} {noise}')
114
-
115
-
116
-
117
-
118
-
119
-
120
 
121
  def make_me():
122
- # with gr.Tab('The Dream'):
123
- with gr.Row():
124
- txt_input = gr.Textbox(lines=2, value=kii )
125
- #txt_input = gr.Textbox(label='Your prompt:', lines=2, value=kii)
126
- negative_prompt_input = gr.Textbox(lines=2, value="", label="Negative Prompt" )
127
- gen_button = gr.Button('Generate images')
128
- stop_button = gr.Button('Stop', variant='secondary', interactive=False)
129
- gen_button.click(lambda _: gr.update(interactive=True), None, stop_button)
130
- gr.HTML("""
131
- <div style="text-align: center; max-width: 100%; margin: 0 auto;">
132
- <body>
133
- </body>
134
- </div>
135
- """)
136
- with gr.Row():
137
- output = [gr.Image(label=m ) for m in default_models]
138
- current_models = [gr.Textbox(m, visible=False) for m in default_models]
139
- for m, o in zip(current_models, output):
140
- gen_event = gen_button.click(gen_fn, [m, txt_input, negative_prompt_input], o, queue = False)
141
- # stop_button.click(lambda _: gr.update(interactive=False), None, stop_button, cancels=[gen_event])
142
-
143
-
144
- with gr.Accordion('Model selection'):
145
- # model_choice = gr.CheckboxGroup(models, label=f' {num_models} different models selected', value=default_models, multiselect=True, max_choices=num_models, interactive=True, filterable=False)
146
- # model_choice.change(update_imgbox, model_choice, output)
147
- # model_choice.change(extend_choices, model_choice, current_models)
148
- model_choice = gr.CheckboxGroup(models, label=f' {num_models} different models selected', value=default_models, interactive=True )
149
- model_choice.change(update_imgbox, model_choice, output)
150
- model_choice.change(extend_choices, model_choice, current_models)
151
-
152
- # with gr.Row():
153
- # gr.HTML("""
154
- # <div class="footer">
155
- # <p> Based on the <a href="https://huggingface.co/spaces/derwahnsinn/TestGen">TestGen</a> Space by derwahnsinn, the <a href="https://huggingface.co/spaces/RdnUser77/SpacIO_v1">SpacIO</a> Space by RdnUser77 and Omnibus's Maximum Multiplier!
156
- # </p>
157
- # """)
158
 
 
 
 
 
159
 
160
 
161
  js_code = """
@@ -171,24 +117,6 @@ js_code = """
171
  </script>
172
  """
173
 
174
-
175
-
176
-
177
-
178
-
179
-
180
-
181
-
182
-
183
-
184
-
185
-
186
-
187
-
188
-
189
-
190
-
191
-
192
  with gr.Blocks(css="""
193
  label.float.svelte-i3tvor { top:auto!important; bottom: 0; position: absolute; background: rgba(0,0,0,0.0); left: var(--block-label-margin); color: rgba(200,200,200,.7);}
194
  .genbut { max-width: 50px; max-height: 30px; width:150px; height:30px}
 
6
  import requests
7
 
8
  now2 = 0
9
+ kii=" mohawk femboy racecar driver "
10
 
11
 
12
  def get_current_time():
 
16
  ki = f'{kii} {current_time}'
17
  return ki
18
 
19
+
20
  def load_fn(models):
21
  global models_load
22
  models_load = {}
 
36
  default_models = models[:num_models]
37
 
38
 
 
 
 
 
 
 
 
39
  def extend_choices(choices):
40
  return choices + (num_models - len(choices)) * ['NA']
41
 
42
+
43
  def update_imgbox(choices):
44
  choices_plus = extend_choices(choices)
45
  return [gr.Image(None, label=m, visible=(m != 'NA')) for m in choices_plus]
 
47
 
48
  executor = ThreadPoolExecutor(max_workers=num_models)
49
 
50
+
51
  def gen_fn(model_str, prompt, negative_prompt):
52
  if model_str == 'NA':
53
  return None
 
56
  combined_prompt = f'{prompt} {model_str} {negative_prompt} {noise}'
57
  print(f"Generating with prompt: {combined_prompt}")
58
 
 
 
 
 
 
 
 
 
 
 
59
  try:
60
  # Attempt to generate the image
61
  image_response = models_load[model_str](f'{prompt} {negative_prompt} {noise}')
62
 
63
  # Check if the image_response is a tuple, handle accordingly
64
  if isinstance(image_response, tuple):
65
+ # If the response is a tuple, assume the first item is the image
66
  image_response = image_response[0]
67
 
68
  # Ensure the response is an image or image-like object
 
77
  print(f"Error occurred: {e}")
78
  return None
79
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
 
81
  def make_me():
82
+ with gr.Row():
83
+ txt_input = gr.Textbox(lines=2, value=kii)
84
+ negative_prompt_input = gr.Textbox(lines=2, value="", label="Negative Prompt")
85
+ gen_button = gr.Button('Generate images')
86
+ stop_button = gr.Button('Stop', variant='secondary', interactive=False)
87
+ gen_button.click(lambda _: gr.update(interactive=True), None, stop_button)
88
+ gr.HTML("""
89
+ <div style="text-align: center; max-width: 100%; margin: 0 auto;">
90
+ <body>
91
+ </body>
92
+ </div>
93
+ """)
94
+
95
+ with gr.Row():
96
+ output = [gr.Image(label=m) for m in default_models]
97
+ current_models = [gr.Textbox(m, visible=False) for m in default_models]
98
+ for m, o in zip(current_models, output):
99
+ gen_event = gen_button.click(gen_fn, [m, txt_input, negative_prompt_input], o, queue=False)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
 
101
+ with gr.Accordion('Model selection'):
102
+ model_choice = gr.CheckboxGroup(models, label=f' {num_models} different models selected', value=default_models, interactive=True)
103
+ model_choice.change(update_imgbox, model_choice, output)
104
+ model_choice.change(extend_choices, model_choice, current_models)
105
 
106
 
107
  js_code = """
 
117
  </script>
118
  """
119
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
120
  with gr.Blocks(css="""
121
  label.float.svelte-i3tvor { top:auto!important; bottom: 0; position: absolute; background: rgba(0,0,0,0.0); left: var(--block-label-margin); color: rgba(200,200,200,.7);}
122
  .genbut { max-width: 50px; max-height: 30px; width:150px; height:30px}