charliebaby2023's picture
Update app.py
f1c6c08 verified
raw
history blame
4.69 kB
import gradio as gr
from random import randint
from all_models import models
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor
import requests
now2 = 0
kii=" mohawk femboy racecar driver "
def get_current_time():
now = datetime.now()
now2 = now
current_time = now2.strftime("%Y-%m-%d %H:%M:%S")
ki = f'{kii} {current_time}'
return ki
def load_fn(models):
global models_load
models_load = {}
for model in models:
if model not in models_load.keys():
try:
m = gr.load(f'models/{model}')
except Exception as error:
print(f"Error loading model {model}: {error}")
m = gr.Interface(lambda _: None, inputs=gr.Textbox(), outputs=gr.Image(), enable_queue=False)
models_load.update({model: m})
load_fn(models)
num_models = len(models)
default_models = models[:num_models]
def extend_choices(choices):
return choices + (num_models - len(choices)) * ['NA']
def update_imgbox(choices):
choices_plus = extend_choices(choices)
return [gr.Image(None, label=m, visible=(m != 'NA')) for m in choices_plus]
executor = ThreadPoolExecutor(max_workers=num_models)
def gen_fn(model_str, prompt, negative_prompt):
if model_str == 'NA':
return None
noise = str(randint(0, 9999))
combined_prompt = f'{prompt} {model_str} {negative_prompt} {noise}'
print(f"Generating with prompt: {combined_prompt}")
try:
# Attempt to generate the image
image_response = models_load[model_str](f'{prompt} {negative_prompt} {noise}')
# Check if the image_response is a tuple, handle accordingly
if isinstance(image_response, tuple):
# If the response is a tuple, assume the first item is the image
image_response = image_response[0]
# Ensure the response is an image or image-like object
if isinstance(image_response, gr.Image):
return image_response
elif isinstance(image_response, str): # If the response is a path or URL, pass it as a string
return gr.Image(image_response) # You can handle it based on your model's return type
else:
print(f"Unexpected response type: {type(image_response)}")
return None
except Exception as e:
print(f"Error occurred: {e}")
return None
def make_me():
with gr.Row():
txt_input = gr.Textbox(lines=2, value=kii)
negative_prompt_input = gr.Textbox(lines=2, value="", label="Negative Prompt")
gen_button = gr.Button('Generate images')
stop_button = gr.Button('Stop', variant='secondary', interactive=False)
gen_button.click(lambda _: gr.update(interactive=True), None, stop_button)
gr.HTML("""
<div style="text-align: center; max-width: 100%; margin: 0 auto;">
<body>
</body>
</div>
""")
with gr.Row():
output = [gr.Image(label=m) for m in default_models]
current_models = [gr.Textbox(m, visible=False) for m in default_models]
for m, o in zip(current_models, output):
gen_event = gen_button.click(gen_fn, [m, txt_input, negative_prompt_input], o, queue=False)
with gr.Accordion('Model selection'):
model_choice = gr.CheckboxGroup(models, label=f' {num_models} different models selected', value=default_models, interactive=True)
model_choice.change(update_imgbox, model_choice, output)
model_choice.change(extend_choices, model_choice, current_models)
js_code = """
<script>
const originalScroll = window.scrollTo;
const originalShowToast = gradio.Toast.show;
gradio.Toast.show = function() {
originalShowToast.apply(this, arguments);
window.scrollTo = function() {};};
setTimeout(() => {
window.scrollTo = originalScroll;
}, 300000); // Restore scroll function after 3 seconds
</script>
"""
with gr.Blocks(css="""
label.float.svelte-i3tvor { top:auto!important; bottom: 0; position: absolute; background: rgba(0,0,0,0.0); left: var(--block-label-margin); color: rgba(200,200,200,.7);}
.genbut { max-width: 50px; max-height: 30px; width:150px; height:30px}
.stopbut { max-width: 50px; max-height: 30px; width:150px; height:30px}
.float.svelte-1mwvhlq { position: absolute; top: var(--block-label-margin); left: var(--block-label-margin); background: none; border: none;}
""") as demo:
gr.Markdown("<script>" + js_code + "</script>")
make_me()
demo.queue()
demo.queue = False
demo.config["queue"] = False
demo.launch(max_threads=200)