Update app.py
Browse files
app.py
CHANGED
@@ -21,33 +21,12 @@ def load_fn(models):
|
|
21 |
models_load = {}
|
22 |
for model in models:
|
23 |
if model not in models_load.keys():
|
24 |
-
# try:
|
25 |
-
# m = gr.load(f'models/{model}')
|
26 |
-
# except Exception as error:
|
27 |
-
# print(f"Error loading model {model}: {error}")
|
28 |
-
# m = gr.Interface(lambda _: None, inputs=gr.Textbox(), outputs=gr.Image(), enable_queue=False)
|
29 |
-
# models_load.update({model: m})
|
30 |
try:
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
# If the response is a tuple, return the first item assuming it's the image
|
37 |
-
image_response = image_response[0]
|
38 |
-
|
39 |
-
# Ensure the response is an image or image-like object
|
40 |
-
if isinstance(image_response, gr.Image):
|
41 |
-
return image_response
|
42 |
-
elif isinstance(image_response, str): # If the response is a path or URL, pass it as a string
|
43 |
-
return gr.Image(image_response) # You can handle it based on your model's return type
|
44 |
-
else:
|
45 |
-
print(f"Unexpected response type: {type(image_response)}")
|
46 |
-
return None
|
47 |
-
|
48 |
-
except Exception as e:
|
49 |
-
print(f"Error occurred: {e}")
|
50 |
-
return None
|
51 |
|
52 |
|
53 |
load_fn(models)
|
@@ -81,16 +60,37 @@ def gen_fn(model_str, prompt, negative_prompt):
|
|
81 |
combined_prompt = f'{prompt} {model_str} {negative_prompt} {noise}'
|
82 |
print(f"Generating with prompt: {combined_prompt}")
|
83 |
|
84 |
-
try:
|
85 |
-
future = executor.submit(models_load[model_str], f'{prompt} {negative_prompt} {noise}')
|
86 |
-
result = future.result()
|
87 |
-
return result
|
88 |
-
except requests.exceptions.Timeout:
|
89 |
-
print(f"Timeout occurred for model {model_str}. Please try again later.")
|
90 |
-
return None
|
91 |
-
except Exception as e:
|
92 |
-
print(f"Error occurred: {e}")
|
93 |
-
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
94 |
|
95 |
|
96 |
|
|
|
21 |
models_load = {}
|
22 |
for model in models:
|
23 |
if model not in models_load.keys():
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
try:
|
25 |
+
m = gr.load(f'models/{model}')
|
26 |
+
except Exception as error:
|
27 |
+
print(f"Error loading model {model}: {error}")
|
28 |
+
m = gr.Interface(lambda _: None, inputs=gr.Textbox(), outputs=gr.Image(), enable_queue=False)
|
29 |
+
models_load.update({model: m})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
|
31 |
|
32 |
load_fn(models)
|
|
|
60 |
combined_prompt = f'{prompt} {model_str} {negative_prompt} {noise}'
|
61 |
print(f"Generating with prompt: {combined_prompt}")
|
62 |
|
63 |
+
# try:
|
64 |
+
# future = executor.submit(models_load[model_str], f'{prompt} {negative_prompt} {noise}')
|
65 |
+
# result = future.result()
|
66 |
+
# return result
|
67 |
+
# except requests.exceptions.Timeout:
|
68 |
+
# print(f"Timeout occurred for model {model_str}. Please try again later.")
|
69 |
+
# return None
|
70 |
+
# except Exception as e:
|
71 |
+
# print(f"Error occurred: {e}")
|
72 |
+
# return None
|
73 |
+
try:
|
74 |
+
# Attempt to generate the image
|
75 |
+
image_response = models_load[model_str](f'{prompt} {negative_prompt} {noise}')
|
76 |
+
|
77 |
+
# Check if the image_response is a tuple, handle accordingly
|
78 |
+
if isinstance(image_response, tuple):
|
79 |
+
# If the response is a tuple, return the first item assuming it's the image
|
80 |
+
image_response = image_response[0]
|
81 |
+
|
82 |
+
# Ensure the response is an image or image-like object
|
83 |
+
if isinstance(image_response, gr.Image):
|
84 |
+
return image_response
|
85 |
+
elif isinstance(image_response, str): # If the response is a path or URL, pass it as a string
|
86 |
+
return gr.Image(image_response) # You can handle it based on your model's return type
|
87 |
+
else:
|
88 |
+
print(f"Unexpected response type: {type(image_response)}")
|
89 |
+
return None
|
90 |
+
|
91 |
+
except Exception as e:
|
92 |
+
print(f"Error occurred: {e}")
|
93 |
+
return None
|
94 |
|
95 |
|
96 |
|