Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -66,7 +66,7 @@ def initialize_model():
|
|
66 |
|
67 |
# μ΄λ―Έ λ‘λλ κ²½μ° λ€μ λ‘λνμ§ μμ
|
68 |
if pipe is not None:
|
69 |
-
return
|
70 |
|
71 |
try:
|
72 |
if not path.exists(cache_path):
|
@@ -161,9 +161,20 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
|
|
161 |
</div>
|
162 |
""")
|
163 |
|
164 |
-
# μν νμ λ³μ
|
165 |
-
error_message = gr.
|
166 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
167 |
|
168 |
with gr.Row():
|
169 |
with gr.Column(scale=3):
|
@@ -257,32 +268,24 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
|
|
257 |
|
258 |
@spaces.GPU
|
259 |
def process_image(height, width, steps, scales, prompt, seed):
|
|
|
|
|
260 |
# λͺ¨λΈ μ΄κΈ°ν μν νμΈ
|
261 |
if pipe is None:
|
262 |
-
|
263 |
|
264 |
model_loaded = initialize_model()
|
265 |
if not model_loaded:
|
266 |
-
|
267 |
-
loading_status.update(visible=False)
|
268 |
-
return None
|
269 |
-
|
270 |
-
loading_status.update(visible=False)
|
271 |
|
272 |
# μ
λ ₯κ° κ²μ¦
|
273 |
if not prompt or prompt.strip() == "":
|
274 |
-
|
275 |
-
return None
|
276 |
|
277 |
# ν둬ννΈ νν°λ§
|
278 |
is_safe, filtered_prompt = filter_prompt(prompt)
|
279 |
if not is_safe:
|
280 |
-
|
281 |
-
return None
|
282 |
-
|
283 |
-
# μλ¬ λ©μμ§ μ΄κΈ°ν
|
284 |
-
error_message.update(visible=False)
|
285 |
-
loading_status.update("μ΄λ―Έμ§λ₯Ό μμ± μ€μ
λλ€...", visible=True)
|
286 |
|
287 |
try:
|
288 |
# λ©λͺ¨λ¦¬ ν보λ₯Ό μν κ°λΉμ§ μ½λ μ
|
@@ -295,6 +298,9 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
|
|
295 |
else:
|
296 |
seed = int(seed) # νμ
λ³ν μμ νκ² μ²λ¦¬
|
297 |
|
|
|
|
|
|
|
298 |
# μ΄λ―Έμ§ μμ±
|
299 |
with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16), timer("inference"):
|
300 |
generator = torch.Generator(device="cuda").manual_seed(seed)
|
@@ -317,42 +323,46 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
|
|
317 |
max_sequence_length=256
|
318 |
).images[0]
|
319 |
|
320 |
-
|
321 |
-
return generated_image
|
322 |
|
323 |
except Exception as e:
|
324 |
error_msg = f"μ΄λ―Έμ§ μμ± μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
|
325 |
print(error_msg)
|
326 |
traceback.print_exc()
|
327 |
-
error_message.update(error_msg, visible=True)
|
328 |
-
loading_status.update(visible=False)
|
329 |
|
330 |
# μ€λ₯ ν λ©λͺ¨λ¦¬ μ 리
|
331 |
gc.collect()
|
332 |
torch.cuda.empty_cache()
|
333 |
|
334 |
-
return None
|
335 |
|
336 |
def update_seed():
|
337 |
return get_random_seed()
|
338 |
-
|
339 |
-
#
|
340 |
-
def
|
341 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
342 |
return process_image(height, width, steps, scales, prompt, seed)
|
343 |
|
|
|
344 |
generate_btn.click(
|
345 |
-
|
346 |
inputs=[height, width, steps, scales, prompt, seed],
|
347 |
-
outputs=[output]
|
348 |
)
|
349 |
|
350 |
randomize_seed.click(
|
351 |
-
update_seed,
|
352 |
outputs=[seed]
|
353 |
)
|
354 |
|
355 |
if __name__ == "__main__":
|
356 |
# μ± μμ μ λͺ¨λΈ 미리 λ‘λνμ§ μμ (첫 μμ² μ μ§μ° λ‘λ©)
|
357 |
-
demo.queue(max_size=10).launch()
|
358 |
-
|
|
|
66 |
|
67 |
# μ΄λ―Έ λ‘λλ κ²½μ° λ€μ λ‘λνμ§ μμ
|
68 |
if pipe is not None:
|
69 |
+
return True
|
70 |
|
71 |
try:
|
72 |
if not path.exists(cache_path):
|
|
|
161 |
</div>
|
162 |
""")
|
163 |
|
164 |
+
# μν νμ λ³μ (HTML λμ Textbox μ¬μ©)
|
165 |
+
error_message = gr.Textbox(
|
166 |
+
value="",
|
167 |
+
label="Error",
|
168 |
+
visible=False,
|
169 |
+
elem_classes=["error-message"]
|
170 |
+
)
|
171 |
+
|
172 |
+
loading_status = gr.Textbox(
|
173 |
+
value="",
|
174 |
+
label="Status",
|
175 |
+
visible=False,
|
176 |
+
elem_classes=["loading-indicator"]
|
177 |
+
)
|
178 |
|
179 |
with gr.Row():
|
180 |
with gr.Column(scale=3):
|
|
|
268 |
|
269 |
@spaces.GPU
|
270 |
def process_image(height, width, steps, scales, prompt, seed):
|
271 |
+
global pipe
|
272 |
+
|
273 |
# λͺ¨λΈ μ΄κΈ°ν μν νμΈ
|
274 |
if pipe is None:
|
275 |
+
return None, "λͺ¨λΈμ λ‘λ© μ€μ
λλ€... μ²μ μ€ν μ μκ°μ΄ μμλ μ μμ΅λλ€.", True, "", False
|
276 |
|
277 |
model_loaded = initialize_model()
|
278 |
if not model_loaded:
|
279 |
+
return None, "", False, "λͺ¨λΈ λ‘λ© μ€ μ€λ₯κ° λ°μνμ΅λλ€. νμ΄μ§λ₯Ό μλ‘κ³ μΉ¨νκ³ λ€μ μλν΄ μ£ΌμΈμ.", True
|
|
|
|
|
|
|
|
|
280 |
|
281 |
# μ
λ ₯κ° κ²μ¦
|
282 |
if not prompt or prompt.strip() == "":
|
283 |
+
return None, "", False, "μ΄λ―Έμ§ μ€λͺ
μ μ
λ ₯ν΄μ£ΌμΈμ.", True
|
|
|
284 |
|
285 |
# ν둬ννΈ νν°λ§
|
286 |
is_safe, filtered_prompt = filter_prompt(prompt)
|
287 |
if not is_safe:
|
288 |
+
return None, "", False, "λΆμ μ ν λ΄μ©μ΄ ν¬ν¨λ ν둬ννΈμ
λλ€.", True
|
|
|
|
|
|
|
|
|
|
|
289 |
|
290 |
try:
|
291 |
# λ©λͺ¨λ¦¬ ν보λ₯Ό μν κ°λΉμ§ μ½λ μ
|
|
|
298 |
else:
|
299 |
seed = int(seed) # νμ
λ³ν μμ νκ² μ²λ¦¬
|
300 |
|
301 |
+
# μ΄λ―Έμ§ μμ± μν λ©μμ§
|
302 |
+
loading_message = "μ΄λ―Έμ§λ₯Ό μμ± μ€μ
λλ€..."
|
303 |
+
|
304 |
# μ΄λ―Έμ§ μμ±
|
305 |
with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16), timer("inference"):
|
306 |
generator = torch.Generator(device="cuda").manual_seed(seed)
|
|
|
323 |
max_sequence_length=256
|
324 |
).images[0]
|
325 |
|
326 |
+
# μ±κ³΅ μ μ΄λ―Έμ§ λ°ν, μν λ©μμ§ μ¨κΉ
|
327 |
+
return generated_image, "", False, "", False
|
328 |
|
329 |
except Exception as e:
|
330 |
error_msg = f"μ΄λ―Έμ§ μμ± μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
|
331 |
print(error_msg)
|
332 |
traceback.print_exc()
|
|
|
|
|
333 |
|
334 |
# μ€λ₯ ν λ©λͺ¨λ¦¬ μ 리
|
335 |
gc.collect()
|
336 |
torch.cuda.empty_cache()
|
337 |
|
338 |
+
return None, "", False, error_msg, True
|
339 |
|
340 |
def update_seed():
|
341 |
return get_random_seed()
|
342 |
+
|
343 |
+
# μ΄λ―Έμ§ μμ± μ€λΉ ν¨μ
|
344 |
+
def prepare_generation(height, width, steps, scales, prompt, seed):
|
345 |
+
# λͺ¨λΈμ΄ μμ§ λ‘λλμ§ μμλ€λ©΄ λ‘λ
|
346 |
+
if pipe is None:
|
347 |
+
is_loaded = initialize_model()
|
348 |
+
if not is_loaded:
|
349 |
+
return None, "λͺ¨λΈ λ‘λ©μ μ€ν¨νμ΅λλ€. νμ΄μ§λ₯Ό μλ‘κ³ μΉ¨νκ³ λ€μ μλν΄ μ£ΌμΈμ.", True, "", False
|
350 |
+
|
351 |
+
# μμ± νλ‘μΈμ€ μμ
|
352 |
return process_image(height, width, steps, scales, prompt, seed)
|
353 |
|
354 |
+
# λ²νΌ ν΄λ¦ μ΄λ²€νΈ μ°κ²°
|
355 |
generate_btn.click(
|
356 |
+
fn=prepare_generation,
|
357 |
inputs=[height, width, steps, scales, prompt, seed],
|
358 |
+
outputs=[output, loading_status, loading_status, error_message, error_message]
|
359 |
)
|
360 |
|
361 |
randomize_seed.click(
|
362 |
+
fn=update_seed,
|
363 |
outputs=[seed]
|
364 |
)
|
365 |
|
366 |
if __name__ == "__main__":
|
367 |
# μ± μμ μ λͺ¨λΈ 미리 λ‘λνμ§ μμ (첫 μμ² μ μ§μ° λ‘λ©)
|
368 |
+
demo.queue(max_size=10).launch()
|
|