Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -316,25 +316,33 @@ class Demo:
|
|
| 316 |
async def generation_code(self, query: Optional[str], _setting: Dict[str, str]):
|
| 317 |
if not query or query.strip() == '':
|
| 318 |
query = get_random_placeholder()
|
| 319 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 320 |
messages = [{'role': Role.SYSTEM, 'content': _setting['system']}]
|
| 321 |
messages.append({'role': Role.USER, 'content': query})
|
| 322 |
-
|
| 323 |
system_message = messages[0]['content']
|
| 324 |
-
|
| 325 |
-
claude_messages = [
|
| 326 |
-
{"role": "user", "content": query}
|
| 327 |
-
]
|
| 328 |
-
|
| 329 |
openai_messages = [
|
| 330 |
{"role": "system", "content": system_message},
|
| 331 |
{"role": "user", "content": query}
|
| 332 |
]
|
| 333 |
|
| 334 |
try:
|
| 335 |
-
# ๋ก๋ฉ ์์
|
| 336 |
yield [
|
| 337 |
-
"",
|
| 338 |
None,
|
| 339 |
gr.update(active_key="loading"),
|
| 340 |
gr.update(open=True)
|
|
@@ -345,7 +353,7 @@ class Demo:
|
|
| 345 |
try:
|
| 346 |
async for content in try_claude_api(system_message, claude_messages):
|
| 347 |
yield [
|
| 348 |
-
"",
|
| 349 |
None,
|
| 350 |
gr.update(active_key="loading"),
|
| 351 |
gr.update(open=True)
|
|
@@ -358,7 +366,7 @@ class Demo:
|
|
| 358 |
|
| 359 |
async for content in try_openai_api(openai_messages):
|
| 360 |
yield [
|
| 361 |
-
"",
|
| 362 |
None,
|
| 363 |
gr.update(active_key="loading"),
|
| 364 |
gr.update(open=True)
|
|
@@ -367,14 +375,72 @@ class Demo:
|
|
| 367 |
collected_content = content
|
| 368 |
|
| 369 |
if collected_content:
|
| 370 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 371 |
yield [
|
| 372 |
collected_content,
|
| 373 |
send_to_sandbox(remove_code_block(collected_content)),
|
| 374 |
gr.update(active_key="render"),
|
| 375 |
gr.update(open=False)
|
| 376 |
]
|
| 377 |
-
|
| 378 |
else:
|
| 379 |
raise ValueError("No content was generated from either API")
|
| 380 |
|
|
@@ -382,6 +448,9 @@ class Demo:
|
|
| 382 |
print(f"Error details: {str(e)}")
|
| 383 |
raise ValueError(f'Error calling APIs: {str(e)}')
|
| 384 |
|
|
|
|
|
|
|
|
|
|
| 385 |
def remove_code_block(text):
|
| 386 |
pattern = r'```html\n(.+?)\n```'
|
| 387 |
match = re.search(pattern, text, re.DOTALL)
|
|
|
|
| 316 |
async def generation_code(self, query: Optional[str], _setting: Dict[str, str]):
|
| 317 |
if not query or query.strip() == '':
|
| 318 |
query = get_random_placeholder()
|
| 319 |
+
|
| 320 |
+
# ์ด๋ฏธ์ง ์์ฑ์ด ํ์ํ์ง ํ์ธ
|
| 321 |
+
needs_image = '์ด๋ฏธ์ง' in query or '๊ทธ๋ฆผ' in query or 'image' in query.lower()
|
| 322 |
+
image_prompt = None
|
| 323 |
+
|
| 324 |
+
# ์ด๋ฏธ์ง ํ๋กฌํํธ ์ถ์ถ
|
| 325 |
+
if needs_image:
|
| 326 |
+
for keyword in ['์ด๋ฏธ์ง:', '๊ทธ๋ฆผ:', 'image:']:
|
| 327 |
+
if keyword in query.lower():
|
| 328 |
+
image_prompt = query.split(keyword)[1].strip()
|
| 329 |
+
break
|
| 330 |
+
if not image_prompt:
|
| 331 |
+
image_prompt = query # ๋ช
์์ ํ๋กฌํํธ๊ฐ ์์ผ๋ฉด ์ ์ฒด ์ฟผ๋ฆฌ ์ฌ์ฉ
|
| 332 |
+
|
| 333 |
messages = [{'role': Role.SYSTEM, 'content': _setting['system']}]
|
| 334 |
messages.append({'role': Role.USER, 'content': query})
|
| 335 |
+
|
| 336 |
system_message = messages[0]['content']
|
| 337 |
+
claude_messages = [{"role": "user", "content": query}]
|
|
|
|
|
|
|
|
|
|
|
|
|
| 338 |
openai_messages = [
|
| 339 |
{"role": "system", "content": system_message},
|
| 340 |
{"role": "user", "content": query}
|
| 341 |
]
|
| 342 |
|
| 343 |
try:
|
|
|
|
| 344 |
yield [
|
| 345 |
+
"",
|
| 346 |
None,
|
| 347 |
gr.update(active_key="loading"),
|
| 348 |
gr.update(open=True)
|
|
|
|
| 353 |
try:
|
| 354 |
async for content in try_claude_api(system_message, claude_messages):
|
| 355 |
yield [
|
| 356 |
+
"",
|
| 357 |
None,
|
| 358 |
gr.update(active_key="loading"),
|
| 359 |
gr.update(open=True)
|
|
|
|
| 366 |
|
| 367 |
async for content in try_openai_api(openai_messages):
|
| 368 |
yield [
|
| 369 |
+
"",
|
| 370 |
None,
|
| 371 |
gr.update(active_key="loading"),
|
| 372 |
gr.update(open=True)
|
|
|
|
| 375 |
collected_content = content
|
| 376 |
|
| 377 |
if collected_content:
|
| 378 |
+
# ์ด๋ฏธ์ง ์์ฑ์ด ํ์ํ ๊ฒฝ์ฐ
|
| 379 |
+
if needs_image and image_prompt:
|
| 380 |
+
try:
|
| 381 |
+
print(f"Generating image for prompt: {image_prompt}")
|
| 382 |
+
# FLUX ๋ชจ๋ธ์ ์ฌ์ฉํ์ฌ ์ด๋ฏธ์ง ์์ฑ
|
| 383 |
+
if pipe is not None:
|
| 384 |
+
image = generate_image(
|
| 385 |
+
prompt=image_prompt,
|
| 386 |
+
height=512,
|
| 387 |
+
width=512,
|
| 388 |
+
steps=8,
|
| 389 |
+
scales=3.5,
|
| 390 |
+
seed=random.randint(1, 10000)
|
| 391 |
+
)
|
| 392 |
+
|
| 393 |
+
# ์ด๋ฏธ์ง๋ฅผ Base64๋ก ์ธ์ฝ๋ฉ
|
| 394 |
+
buffered = BytesIO()
|
| 395 |
+
image.save(buffered, format="PNG")
|
| 396 |
+
img_str = base64.b64encode(buffered.getvalue()).decode()
|
| 397 |
+
|
| 398 |
+
# HTML์ ์ด๋ฏธ์ง ์ถ๊ฐ
|
| 399 |
+
image_html = f'''
|
| 400 |
+
<div class="generated-image" style="margin: 20px 0; text-align: center;">
|
| 401 |
+
<h3 style="color: #333; margin-bottom: 10px;">Generated Image:</h3>
|
| 402 |
+
<img src="data:image/png;base64,{img_str}"
|
| 403 |
+
style="max-width: 100%;
|
| 404 |
+
border-radius: 10px;
|
| 405 |
+
box-shadow: 0 4px 8px rgba(0,0,0,0.1);">
|
| 406 |
+
<p style="color: #666; margin-top: 10px; font-style: italic;">
|
| 407 |
+
Prompt: {html.escape(image_prompt)}
|
| 408 |
+
</p>
|
| 409 |
+
</div>
|
| 410 |
+
'''
|
| 411 |
+
|
| 412 |
+
# HTML ์๋ต์ ์ด๋ฏธ์ง ์ฝ์
|
| 413 |
+
if '```html' in collected_content:
|
| 414 |
+
# HTML ์ฝ๋ ๋ธ๋ก ๋ด๋ถ์ ์ด๋ฏธ์ง ์ถ๊ฐ
|
| 415 |
+
collected_content = collected_content.replace('```html\n', f'```html\n{image_html}')
|
| 416 |
+
else:
|
| 417 |
+
# HTML ์ฝ๋ ๋ธ๋ก์ผ๋ก ๊ฐ์ธ์ ์ด๋ฏธ์ง ์ถ๊ฐ
|
| 418 |
+
collected_content = f'```html\n{image_html}\n```\n{collected_content}'
|
| 419 |
+
|
| 420 |
+
print("Image generation successful")
|
| 421 |
+
else:
|
| 422 |
+
raise Exception("FLUX model not initialized")
|
| 423 |
+
|
| 424 |
+
except Exception as e:
|
| 425 |
+
print(f"Image generation error: {str(e)}")
|
| 426 |
+
error_message = f'''
|
| 427 |
+
<div style="color: #ff4d4f; padding: 10px; margin: 10px 0;
|
| 428 |
+
border-left: 4px solid #ff4d4f; background: #fff2f0;">
|
| 429 |
+
<p>Failed to generate image: {str(e)}</p>
|
| 430 |
+
</div>
|
| 431 |
+
'''
|
| 432 |
+
if '```html' in collected_content:
|
| 433 |
+
collected_content = collected_content.replace('```html\n', f'```html\n{error_message}')
|
| 434 |
+
else:
|
| 435 |
+
collected_content = f'```html\n{error_message}\n```\n{collected_content}'
|
| 436 |
+
|
| 437 |
+
# ์ต์ข
๊ฒฐ๊ณผ ํ์
|
| 438 |
yield [
|
| 439 |
collected_content,
|
| 440 |
send_to_sandbox(remove_code_block(collected_content)),
|
| 441 |
gr.update(active_key="render"),
|
| 442 |
gr.update(open=False)
|
| 443 |
]
|
|
|
|
| 444 |
else:
|
| 445 |
raise ValueError("No content was generated from either API")
|
| 446 |
|
|
|
|
| 448 |
print(f"Error details: {str(e)}")
|
| 449 |
raise ValueError(f'Error calling APIs: {str(e)}')
|
| 450 |
|
| 451 |
+
def clear_history(self):
|
| 452 |
+
return []
|
| 453 |
+
|
| 454 |
def remove_code_block(text):
|
| 455 |
pattern = r'```html\n(.+?)\n```'
|
| 456 |
match = re.search(pattern, text, re.DOTALL)
|