Spaces:
Running
on
Zero
Running
on
Zero
v3p6
Browse files
app.py
CHANGED
@@ -66,11 +66,25 @@ def load_pipeline(model_name):
|
|
66 |
def parse_json_parameters(json_str):
|
67 |
try:
|
68 |
params = json.loads(json_str)
|
69 |
-
required_keys = ['prompt', 'negative_prompt', '
|
70 |
for key in required_keys:
|
71 |
if key not in params:
|
72 |
raise ValueError(f"Missing required key: {key}")
|
73 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
74 |
except json.JSONDecodeError:
|
75 |
raise ValueError("Invalid JSON format")
|
76 |
except Exception as e:
|
@@ -104,6 +118,7 @@ def generate(
|
|
104 |
guidance_scale = params['guidance_scale']
|
105 |
num_inference_steps = params['num_inference_steps']
|
106 |
sampler = params['sampler']
|
|
|
107 |
except ValueError as e:
|
108 |
raise gr.Error(str(e))
|
109 |
|
@@ -200,24 +215,14 @@ generation_history = []
|
|
200 |
|
201 |
# Function to update the history list
|
202 |
def update_history_list():
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
<img src='data:image/png;base64,{utils.image_to_base64(item["image"])}'
|
208 |
-
style='width: 100px; height: 100px; object-fit: cover;'
|
209 |
-
onclick='handle_image_click({idx})' />
|
210 |
-
<p style='width: 100px; white-space: nowrap; overflow: hidden; text-overflow: ellipsis;'>
|
211 |
-
{item["prompt"]}
|
212 |
-
</p>
|
213 |
-
</div>
|
214 |
-
"""
|
215 |
-
html += "</div>"
|
216 |
-
return html
|
217 |
|
218 |
# Function to handle image click in history
|
219 |
-
def handle_image_click(
|
220 |
-
selected = generation_history[
|
221 |
return selected["image"], json.dumps(selected["metadata"], indent=2)
|
222 |
|
223 |
# Modify the generate function to add results to the history
|
@@ -232,7 +237,7 @@ def generate_and_update_history(*args, **kwargs):
|
|
232 |
})
|
233 |
if len(generation_history) > 10: # Limit history to 10 items
|
234 |
generation_history.pop()
|
235 |
-
return images, metadata, gr.update(
|
236 |
|
237 |
if torch.cuda.is_available():
|
238 |
pipe = load_pipeline(MODEL)
|
@@ -268,10 +273,8 @@ with gr.Blocks(css="style.css") as demo:
|
|
268 |
variant="primary",
|
269 |
scale=0
|
270 |
)
|
271 |
-
result = gr.
|
272 |
label="Result",
|
273 |
-
columns=1,
|
274 |
-
preview=True,
|
275 |
show_label=False
|
276 |
)
|
277 |
with gr.Accordion(label="Advanced Settings", open=False):
|
@@ -354,8 +357,8 @@ with gr.Blocks(css="style.css") as demo:
|
|
354 |
generate_from_json = gr.Button("Generate from JSON")
|
355 |
|
356 |
# Add history accordion
|
357 |
-
with gr.Accordion("Generation History",
|
358 |
-
|
359 |
with gr.Row():
|
360 |
selected_image = gr.Image(label="Selected Image", interactive=False)
|
361 |
selected_metadata = gr.JSON(label="Selected Metadata", show_label=False)
|
@@ -408,10 +411,7 @@ with gr.Blocks(css="style.css") as demo:
|
|
408 |
).then(
|
409 |
fn=generate_and_update_history,
|
410 |
inputs=inputs,
|
411 |
-
outputs=[result, gr_metadata,
|
412 |
-
).then(
|
413 |
-
fn=update_history_list,
|
414 |
-
outputs=history_list
|
415 |
)
|
416 |
|
417 |
negative_prompt.submit(
|
@@ -423,10 +423,7 @@ with gr.Blocks(css="style.css") as demo:
|
|
423 |
).then(
|
424 |
fn=generate_and_update_history,
|
425 |
inputs=inputs,
|
426 |
-
outputs=[result, gr_metadata,
|
427 |
-
).then(
|
428 |
-
fn=update_history_list,
|
429 |
-
outputs=history_list
|
430 |
)
|
431 |
|
432 |
run_button.click(
|
@@ -438,23 +435,20 @@ with gr.Blocks(css="style.css") as demo:
|
|
438 |
).then(
|
439 |
fn=generate_and_update_history,
|
440 |
inputs=inputs,
|
441 |
-
outputs=[result, gr_metadata,
|
442 |
-
).then(
|
443 |
-
fn=update_history_list,
|
444 |
-
outputs=history_list
|
445 |
)
|
446 |
|
447 |
# Add event handler for generate_from_json button
|
448 |
generate_from_json.click(
|
449 |
fn=generate_and_update_history,
|
450 |
inputs=inputs,
|
451 |
-
outputs=[result, gr_metadata,
|
452 |
-
).then(
|
453 |
-
fn=update_history_list,
|
454 |
-
outputs=history_list
|
455 |
)
|
456 |
|
457 |
-
# Add
|
458 |
-
|
|
|
|
|
|
|
459 |
|
460 |
demo.queue(max_size=20).launch(debug=IS_COLAB, share=IS_COLAB)
|
|
|
66 |
def parse_json_parameters(json_str):
|
67 |
try:
|
68 |
params = json.loads(json_str)
|
69 |
+
required_keys = ['prompt', 'negative_prompt', 'resolution', 'guidance_scale', 'num_inference_steps', 'seed', 'sampler']
|
70 |
for key in required_keys:
|
71 |
if key not in params:
|
72 |
raise ValueError(f"Missing required key: {key}")
|
73 |
+
|
74 |
+
# Parse resolution
|
75 |
+
width, height = map(int, params['resolution'].split(' x '))
|
76 |
+
|
77 |
+
return {
|
78 |
+
'prompt': params['prompt'],
|
79 |
+
'negative_prompt': params['negative_prompt'],
|
80 |
+
'seed': params['seed'],
|
81 |
+
'width': width,
|
82 |
+
'height': height,
|
83 |
+
'guidance_scale': params['guidance_scale'],
|
84 |
+
'num_inference_steps': params['num_inference_steps'],
|
85 |
+
'sampler': params['sampler'],
|
86 |
+
'use_upscaler': params.get('use_upscaler', False)
|
87 |
+
}
|
88 |
except json.JSONDecodeError:
|
89 |
raise ValueError("Invalid JSON format")
|
90 |
except Exception as e:
|
|
|
118 |
guidance_scale = params['guidance_scale']
|
119 |
num_inference_steps = params['num_inference_steps']
|
120 |
sampler = params['sampler']
|
121 |
+
use_upscaler = params['use_upscaler']
|
122 |
except ValueError as e:
|
123 |
raise gr.Error(str(e))
|
124 |
|
|
|
215 |
|
216 |
# Function to update the history list
|
217 |
def update_history_list():
|
218 |
+
return [
|
219 |
+
gr.Image.update(value=item["image"], visible=True)
|
220 |
+
for item in generation_history
|
221 |
+
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
222 |
|
223 |
# Function to handle image click in history
|
224 |
+
def handle_image_click(evt: gr.SelectData):
|
225 |
+
selected = generation_history[evt.index]
|
226 |
return selected["image"], json.dumps(selected["metadata"], indent=2)
|
227 |
|
228 |
# Modify the generate function to add results to the history
|
|
|
237 |
})
|
238 |
if len(generation_history) > 10: # Limit history to 10 items
|
239 |
generation_history.pop()
|
240 |
+
return images[0], json.dumps(metadata, indent=2), gr.Gallery.update(value=update_history_list())
|
241 |
|
242 |
if torch.cuda.is_available():
|
243 |
pipe = load_pipeline(MODEL)
|
|
|
273 |
variant="primary",
|
274 |
scale=0
|
275 |
)
|
276 |
+
result = gr.Image(
|
277 |
label="Result",
|
|
|
|
|
278 |
show_label=False
|
279 |
)
|
280 |
with gr.Accordion(label="Advanced Settings", open=False):
|
|
|
357 |
generate_from_json = gr.Button("Generate from JSON")
|
358 |
|
359 |
# Add history accordion
|
360 |
+
with gr.Accordion("Generation History", open=False) as history_accordion:
|
361 |
+
history_gallery = gr.Gallery(label="History", show_label=False, elem_id="history_gallery").style(grid=5)
|
362 |
with gr.Row():
|
363 |
selected_image = gr.Image(label="Selected Image", interactive=False)
|
364 |
selected_metadata = gr.JSON(label="Selected Metadata", show_label=False)
|
|
|
411 |
).then(
|
412 |
fn=generate_and_update_history,
|
413 |
inputs=inputs,
|
414 |
+
outputs=[result, gr_metadata, history_gallery],
|
|
|
|
|
|
|
415 |
)
|
416 |
|
417 |
negative_prompt.submit(
|
|
|
423 |
).then(
|
424 |
fn=generate_and_update_history,
|
425 |
inputs=inputs,
|
426 |
+
outputs=[result, gr_metadata, history_gallery],
|
|
|
|
|
|
|
427 |
)
|
428 |
|
429 |
run_button.click(
|
|
|
435 |
).then(
|
436 |
fn=generate_and_update_history,
|
437 |
inputs=inputs,
|
438 |
+
outputs=[result, gr_metadata, history_gallery],
|
|
|
|
|
|
|
439 |
)
|
440 |
|
441 |
# Add event handler for generate_from_json button
|
442 |
generate_from_json.click(
|
443 |
fn=generate_and_update_history,
|
444 |
inputs=inputs,
|
445 |
+
outputs=[result, gr_metadata, history_gallery],
|
|
|
|
|
|
|
446 |
)
|
447 |
|
448 |
+
# Add event handler for image selection in history
|
449 |
+
history_gallery.select(
|
450 |
+
fn=handle_image_click,
|
451 |
+
outputs=[selected_image, selected_metadata],
|
452 |
+
)
|
453 |
|
454 |
demo.queue(max_size=20).launch(debug=IS_COLAB, share=IS_COLAB)
|