import gradio as gr from random import randint from all_models import models from datetime import datetime now2 = 0 kii=" mohawk femboy racecar driver "; def get_current_time(): now = datetime.now() now2 = now current_time = now2.strftime("%Y-%m-%d %H:%M:%S") ki = f'{kii} {current_time}' return ki def load_fn(models): global models_load models_load = {} for model in models: if model not in models_load.keys(): try: m = gr.load(f'models/{model}') except Exception as error: m = gr.Interface(lambda txt: None, ['text'], ['image']) models_load.update({model: m}) load_fn(models) num_models = len(models) default_models = models[:num_models] def extend_choices(choices): return choices + (num_models - len(choices)) * ['NA'] def update_imgbox(choices): choices_plus = extend_choices(choices) return [gr.Image(None, label=m, visible=(m != 'NA')) for m in choices_plus] def gen_fn(model_str, prompt, negative_prompt): if model_str == 'NA': return None noise = str(randint(0, 9999)) combined_prompt = f'{prompt} {model_str} {negative_prompt} {noise}' print(f"Generating with prompt: {combined_prompt}") # Debug line # result = models_load[model_str](f'{prompt} {negative_prompt} {noise}') # end_time = time.time() # End timing # runtime = end_time - start_time # model_timings[model_str] = runtime # Log the model's execution time # queue_size -= 1 # Decrement queue size after processing # return f"Model {model_str} ran for {runtime:.2f} seconds", result return models_load[model_str](f'{prompt} {negative_prompt} {noise}') def make_me(): # with gr.Tab('The Dream'): with gr.Row(): txt_input = gr.Textbox(lines=2, value=kii ) #txt_input = gr.Textbox(label='Your prompt:', lines=2, value=kii) negative_prompt_input = gr.Textbox(lines=2, value="", label="Negative Prompt" ) gen_button = gr.Button('Generate images') stop_button = gr.Button('Stop', variant='secondary', interactive=False) gen_button.click(lambda s: gr.update(interactive=True), None, stop_button) gr.HTML("""
""") with gr.Row(): output = [gr.Image(label=m ) for m in default_models] current_models = [gr.Textbox(m, visible=False) for m in default_models] for m, o in zip(current_models, output): gen_event = gen_button.click(gen_fn, [m, txt_input, negative_prompt_input], o) stop_button.click(lambda s: gr.update(interactive=False), None, stop_button, cancels=[gen_event]) with gr.Accordion('Model selection'): # model_choice = gr.CheckboxGroup(models, label=f' {num_models} different models selected', value=default_models, multiselect=True, max_choices=num_models, interactive=True, filterable=False) # model_choice.change(update_imgbox, model_choice, output) # model_choice.change(extend_choices, model_choice, current_models) model_choice = gr.CheckboxGroup(models, label=f' {num_models} different models selected', value=default_models, interactive=True ) model_choice.change(update_imgbox, model_choice, output) model_choice.change(extend_choices, model_choice, current_models) # with gr.Row(): # gr.HTML(""" #