File size: 7,070 Bytes
6fef025 89c5f18 f5b8400 53b0019 e1f04d1 7954c2d 9534b33 53b0019 0318f31 a7d9eac 2974b84 15a21d3 ad36d43 984eb50 ad36d43 53b0019 0318f31 c469318 0318f31 b21c027 f1c6c08 f5b8400 e5cd0b9 004534f e5cd0b9 2eb1abd 6fef025 81987e1 6fef025 19aac56 f5b8400 6fef025 9534b33 f5b8400 6fef025 f1c6c08 f5b8400 0318f31 6fef025 1fb3ca0 e1f04d1 f1c6c08 b5e8e8c 80fc7f0 b5e8e8c e1f04d1 b5e8e8c e1f04d1 b5e8e8c 1fb3ca0 b5e8e8c 9765ec1 9534b33 a2c2448 bdcf524 f1c6c08 7e85763 f1c6c08 0636b63 4c20fde a2c2448 f1c6c08 80fc7f0 0318f31 d84fe4a f1c6c08 2944f54 1fb3ca0 1a0c870 1fb3ca0 2944f54 b3e0eba ca4c542 a7d9eac 20fec22 ea1b6f0 a7d9eac 20fec22 59ef579 821e5e9 24804f6 f5bf397 4d6c68d 20fec22 ea1b6f0 4d6c68d ea1b6f0 66445a3 9ebcbbb 151f12e 9ebcbbb c039666 042ac2e c039666 ea1b6f0 4e33ebf a7d9eac 4e33ebf 042ac2e e56e52f 94dbd6e 02482a9 20fec22 94dbd6e 77d981d b4567ee ca4c542 26b4194 f97541a 5fd8ae0 5929363 37eec56 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 |
import gradio as gr
from random import randint
from all_models import models
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor
import time
import requests
now2 = 0
kii=" this is your prompt input window still a wip"
combined_prompt = ""
def get_current_time():
now = datetime.now()
now2 = now
current_time = now2.strftime("%Y-%m-%d %H:%M:%S")
ki = f'{kii} {current_time}'
return ki
def load_fn(models):
global models_load
models_load = {}
for model in models:
if model not in models_load.keys():
try:
m = gr.load(f'models/{model}')
print(f"{m}");
except Exception as error:
print(f"Error loading model {model}: {error}")
m = gr.Interface(lambda _: None, inputs=gr.Textbox(), outputs=gr.Image(), enable_queue=False)
models_load.update({model: m})
load_fn(models)
num_models = len(models)
default_models = models[:num_models]
def extend_choices(choices):
return choices + (num_models - len(choices)) * ['NA']
def update_imgbox(choices):
choices_plus = extend_choices(choices)
return [gr.Image(None, label=m, visible=(m != 'NA')) for m in choices_plus]
executor = ThreadPoolExecutor(max_workers=num_models)
#def gen_fn(model_str, prompt):
# if model_str == 'NA':
# return None#
#
# noise = str(randint(0, 9999))
# combined_prompt = f'{prompt}'
# print(f"Generating with prompt: {combined_prompt}")
def gen_fn(model_str, prompt):
global models_load
if model_str == 'NA':
return None
if models_load.get(model_str) is None:
models_load[model_str] = load_model(model_str) # Load the model if not already loaded
noise = str(randint(0, 9999))
return models_load[model_str](f'{prompt} {noise}')
# try:
# image_response = models_load[model_str](f'{prompt} {noise}')
# if isinstance(image_response, gr.Image):
# return image_response
# elif isinstance(image_response, str): # If the response is a path or URL, pass it as a string
# return gr.Image(image_response) # You can handle it based on your model's return type
# else:
# print(f"Unexpected response type: {type(image_response)}")
# return None
# except Exception as e:
# print(f"Error occurred: {e}")
# return None
def make_me():
with gr.Row():
txt_input = gr.Textbox(lines=2, value=kii, label=None)
gen_button = gr.Button('Generate images')
# stop_button = gr.Button('Stop', variant='secondary', interactive=False)
#gen_button.click(lambda _: gr.update(interactive=True), None, stop_button)
gen_button.click(lambda _: gr.update(interactive=True), None)
gr.HTML("""
<div style="text-align: center; max-width: 100%; margin: 0 auto;">
<body>
</body>
</div>
""")
with gr.Row():
output = [gr.Image(label=m) for m in default_models]
current_models = [gr.Textbox(m, visible=False) for m in default_models]
for m, o in zip(current_models, output):
gen_event = gen_button.click(gen_fn, [m, txt_input], o, queue=False)
with gr.Accordion('Model selection', visible=False):
model_choice = gr.CheckboxGroup(models, label=f' {num_models} different models selected', value=default_models, interactive=True)
model_choice.change(update_imgbox, model_choice, output)
model_choice.change(extend_choices, model_choice, current_models)
js_code = """
<script>
const originalScroll = window.scrollTo;
const originalShowToast = gradio.Toast.show;
gradio.Toast.show = function() {
originalShowToast.apply(this, arguments);
window.scrollTo = function() {};};
setTimeout(() => {
window.scrollTo = originalScroll;
}, 1000); // Restore scroll function after 3 seconds
</script>
"""
with gr.Blocks(css="""
label.float.svelte-i3tvor { top:auto!important; bottom: 0; position: absolute; background: rgba(0,0,0,0.0); left: var(--block-label-margin); color: rgba(200,200,200,.7);}
.genbut { max-width: 50px; max-height: 30px; width:150px; height:30px}
.stopbut { max-width: 50px; max-height: 30px; width:150px; height:30px}
.float.svelte-1mwvhlq { position: absolute; top: var(--block-label-margin); left: var(--block-label-margin); background: none; border: none;}
textarea:hover { background:#55555555;}
textarea { overflow-y: scroll; top:0px; width: 100%; height:100%!important;
font-size: 1.5em;
letter-spacing: 3px;
color: limegreen;
border: none!important;
background: none;
outline: none !important; }
.svelte-5y6bt2 {max-height:161px;min-height:160px;}
.hide-container { max-height: 2px; position: fixed; min-height: 1px;}
.svelte-1gfkn6j {display:none;}
.gradio-container .gri-textbox .gri-input { border: none; padding: 0; background: transparent; box-shadow: none; }
.padded.svelte-5y6bt2:hover { border:1px solid cyan;}
.padded.svelte-5y6bt2 {
border: none;
background: none!important; padding: 0px!important; min-width:100%!important; max-width:101%!important;position:relative;right:0px;max-height:100%; }
.secondary.svelte-1137axg {
width: 200px;
flex: none!important;
position: relative;
min-width: 160px;
border: var(--button-border-width) solid
var(--button-secondary-border-color);
background: var(--button-secondary-background-fill);
color: var(--button-secondary-text-color);
box-shadow: var(--button-secondary-shadow);
left: 0px;
float: left;
}
div.svelte-633qhp {
/* display: flex; */
/* flex-direction: inherit; */
/* flex-wrap: wrap; */
gap: var(--form-gap-width);
box-shadow: var(--block-shadow);
height: 20px;
width: 250px;
position:fixed;
left:calc(50% - 100px);
flex: none!important;
/* border: var(--block-border-width) solid var(--block-border-color); */
/* border-radius: var(--block-radius); */
/* background: var(--border-color-primary); */
/* overflow-y: hidden; */
}
.form.svelte-633qhp{
height:50px;
width:auto!important;
z-index: 4000;
position: fixed;
flex: auto!important;
border: none!important;
background: none!important;
min-width: 30%!important;
min-height: 45px !important;
resize:both;
left: 50%;
transform: translate(-50%,0);
}
.input-container.svelte-11mx0st.svelte-11mx0st {
/* display: flex; */
top: 0px;
position: absolute;
align-items: flex-end;
bottom: 0px;
background: none;
border: none;
left: 0px;
right: 0px;
}
}
""") as demo:
gr.Markdown("<script>" + js_code + "</script>")
make_me()
demo.queue()
demo.queue = False
demo.config["queue"] = False
demo.launch(max_threads=200)
|