Spaces:
Running
on
Zero
Running
on
Zero
v3p3
Browse files
app.py
CHANGED
@@ -183,12 +183,7 @@ def generate(
|
|
183 |
filepath = utils.save_image(image, metadata, OUTPUT_DIR)
|
184 |
logger.info(f"Image saved as {filepath} with metadata")
|
185 |
|
186 |
-
#
|
187 |
-
history = gr.get_state("history") or []
|
188 |
-
history.insert(0, {"prompt": prompt, "image": images[0], "metadata": metadata})
|
189 |
-
gr.set_state("history", history[:10]) # Keep only the last 10 entries
|
190 |
-
|
191 |
-
return images, metadata, gr.update(choices=[h["prompt"] for h in history])
|
192 |
except Exception as e:
|
193 |
logger.exception(f"An error occurred: {e}")
|
194 |
raise
|
@@ -214,7 +209,6 @@ def get_random_prompt():
|
|
214 |
|
215 |
return f"{score}, {character}, {style}, show accurate"
|
216 |
|
217 |
-
|
218 |
if torch.cuda.is_available():
|
219 |
pipe = load_pipeline(MODEL)
|
220 |
logger.info("Loaded on Device!")
|
@@ -390,11 +384,18 @@ with gr.Blocks(css="style.css") as demo:
|
|
390 |
clear_button = gr.Button("Clear All")
|
391 |
random_prompt_button = gr.Button("Random Prompt")
|
392 |
|
|
|
393 |
history_dropdown = gr.Dropdown(label="Generation History", choices=[], interactive=True, elem_id="history-dropdown")
|
394 |
|
395 |
with gr.Accordion(label="Generation Parameters", open=False):
|
396 |
gr_metadata = gr.JSON(label="Metadata", show_label=False)
|
397 |
|
|
|
|
|
|
|
|
|
|
|
|
|
398 |
gr.Examples(
|
399 |
examples=config.examples,
|
400 |
inputs=prompt,
|
@@ -442,9 +443,14 @@ with gr.Blocks(css="style.css") as demo:
|
|
442 |
).then(
|
443 |
fn=generate,
|
444 |
inputs=inputs,
|
445 |
-
outputs=[result, gr_metadata
|
446 |
api_name="run",
|
|
|
|
|
|
|
|
|
447 |
)
|
|
|
448 |
negative_prompt.submit(
|
449 |
fn=utils.randomize_seed_fn,
|
450 |
inputs=[seed, randomize_seed],
|
@@ -454,9 +460,14 @@ with gr.Blocks(css="style.css") as demo:
|
|
454 |
).then(
|
455 |
fn=generate,
|
456 |
inputs=inputs,
|
457 |
-
outputs=[result, gr_metadata
|
458 |
api_name=False,
|
|
|
|
|
|
|
|
|
459 |
)
|
|
|
460 |
run_button.click(
|
461 |
fn=utils.randomize_seed_fn,
|
462 |
inputs=[seed, randomize_seed],
|
@@ -466,8 +477,12 @@ with gr.Blocks(css="style.css") as demo:
|
|
466 |
).then(
|
467 |
fn=generate,
|
468 |
inputs=inputs,
|
469 |
-
outputs=[result, gr_metadata
|
470 |
api_name=False,
|
|
|
|
|
|
|
|
|
471 |
)
|
472 |
|
473 |
apply_json_button.click(
|
@@ -498,8 +513,8 @@ with gr.Blocks(css="style.css") as demo:
|
|
498 |
)
|
499 |
|
500 |
history_dropdown.change(
|
501 |
-
fn=lambda x:
|
502 |
-
inputs=history_dropdown,
|
503 |
outputs=prompt
|
504 |
)
|
505 |
|
|
|
183 |
filepath = utils.save_image(image, metadata, OUTPUT_DIR)
|
184 |
logger.info(f"Image saved as {filepath} with metadata")
|
185 |
|
186 |
+
return images, json.dumps(metadata) # Return metadata as a JSON string
|
|
|
|
|
|
|
|
|
|
|
187 |
except Exception as e:
|
188 |
logger.exception(f"An error occurred: {e}")
|
189 |
raise
|
|
|
209 |
|
210 |
return f"{score}, {character}, {style}, show accurate"
|
211 |
|
|
|
212 |
if torch.cuda.is_available():
|
213 |
pipe = load_pipeline(MODEL)
|
214 |
logger.info("Loaded on Device!")
|
|
|
384 |
clear_button = gr.Button("Clear All")
|
385 |
random_prompt_button = gr.Button("Random Prompt")
|
386 |
|
387 |
+
history = gr.State([]) # Add a state component to store history
|
388 |
history_dropdown = gr.Dropdown(label="Generation History", choices=[], interactive=True, elem_id="history-dropdown")
|
389 |
|
390 |
with gr.Accordion(label="Generation Parameters", open=False):
|
391 |
gr_metadata = gr.JSON(label="Metadata", show_label=False)
|
392 |
|
393 |
+
def update_history(images, metadata, history):
|
394 |
+
if images:
|
395 |
+
new_entry = {"prompt": json.loads(metadata)["prompt"], "image": images[0]}
|
396 |
+
history = [new_entry] + history[:9] # Keep only the last 10 entries
|
397 |
+
return gr.update(choices=[h["prompt"] for h in history]), history
|
398 |
+
|
399 |
gr.Examples(
|
400 |
examples=config.examples,
|
401 |
inputs=prompt,
|
|
|
443 |
).then(
|
444 |
fn=generate,
|
445 |
inputs=inputs,
|
446 |
+
outputs=[result, gr_metadata],
|
447 |
api_name="run",
|
448 |
+
).then(
|
449 |
+
fn=update_history,
|
450 |
+
inputs=[result, gr_metadata, history],
|
451 |
+
outputs=[history_dropdown, history],
|
452 |
)
|
453 |
+
|
454 |
negative_prompt.submit(
|
455 |
fn=utils.randomize_seed_fn,
|
456 |
inputs=[seed, randomize_seed],
|
|
|
460 |
).then(
|
461 |
fn=generate,
|
462 |
inputs=inputs,
|
463 |
+
outputs=[result, gr_metadata],
|
464 |
api_name=False,
|
465 |
+
).then(
|
466 |
+
fn=update_history,
|
467 |
+
inputs=[result, gr_metadata, history],
|
468 |
+
outputs=[history_dropdown, history],
|
469 |
)
|
470 |
+
|
471 |
run_button.click(
|
472 |
fn=utils.randomize_seed_fn,
|
473 |
inputs=[seed, randomize_seed],
|
|
|
477 |
).then(
|
478 |
fn=generate,
|
479 |
inputs=inputs,
|
480 |
+
outputs=[result, gr_metadata],
|
481 |
api_name=False,
|
482 |
+
).then(
|
483 |
+
fn=update_history,
|
484 |
+
inputs=[result, gr_metadata, history],
|
485 |
+
outputs=[history_dropdown, history],
|
486 |
)
|
487 |
|
488 |
apply_json_button.click(
|
|
|
513 |
)
|
514 |
|
515 |
history_dropdown.change(
|
516 |
+
fn=lambda x, history: next((h["prompt"] for h in history if h["prompt"] == x), ""),
|
517 |
+
inputs=[history_dropdown, history],
|
518 |
outputs=prompt
|
519 |
)
|
520 |
|