openfree commited on
Commit
37929fa
ยท
verified ยท
1 Parent(s): 27cc406

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +89 -17
app.py CHANGED
@@ -433,12 +433,20 @@ async def try_openai_api(openai_messages):
433
 
434
  class Demo:
435
  def __init__(self):
436
- pass
437
 
438
  async def generation_code(self, query: Optional[str], _setting: Dict[str, str]):
439
  if not query or query.strip() == '':
440
  query = get_random_placeholder()
441
 
 
 
 
 
 
 
 
 
442
  # ์ด๋ฏธ์ง€ ์ƒ์„ฑ์ด ํ•„์š”ํ•œ์ง€ ํ™•์ธ
443
  needs_image = '์ด๋ฏธ์ง€' in query or '๊ทธ๋ฆผ' in query or 'image' in query.lower()
444
  image_prompt = None
@@ -450,13 +458,40 @@ class Demo:
450
  image_prompt = query.split(keyword)[1].strip()
451
  break
452
  if not image_prompt:
453
- image_prompt = query # ๋ช…์‹œ์  ํ”„๋กฌํ”„ํŠธ๊ฐ€ ์—†์œผ๋ฉด ์ „์ฒด ์ฟผ๋ฆฌ ์‚ฌ์šฉ
454
-
455
  messages = [{'role': Role.SYSTEM, 'content': _setting['system']}]
456
  messages.append({'role': Role.USER, 'content': query})
457
 
458
  system_message = messages[0]['content']
459
  claude_messages = [{"role": "user", "content": query}]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
460
  openai_messages = [
461
  {"role": "system", "content": system_message},
462
  {"role": "user", "content": query}
@@ -497,11 +532,10 @@ class Demo:
497
  collected_content = content
498
 
499
  if collected_content:
500
- # ์ด๋ฏธ์ง€ ์ƒ์„ฑ์ด ํ•„์š”ํ•œ ๊ฒฝ์šฐ
501
  if needs_image and image_prompt:
502
  try:
503
  print(f"Generating image for prompt: {image_prompt}")
504
- # FLUX ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€ ์ƒ์„ฑ
505
  if pipe is not None:
506
  image = generate_image(
507
  prompt=image_prompt,
@@ -512,12 +546,10 @@ class Demo:
512
  seed=random.randint(1, 10000)
513
  )
514
 
515
- # ์ด๋ฏธ์ง€๋ฅผ Base64๋กœ ์ธ์ฝ”๋”ฉ
516
  buffered = BytesIO()
517
  image.save(buffered, format="PNG")
518
  img_str = base64.b64encode(buffered.getvalue()).decode()
519
 
520
- # HTML์— ์ด๋ฏธ์ง€ ์ถ”๊ฐ€
521
  image_html = f'''
522
  <div class="generated-image" style="margin: 20px 0; text-align: center;">
523
  <h3 style="color: #333; margin-bottom: 10px;">Generated Image:</h3>
@@ -531,12 +563,9 @@ class Demo:
531
  </div>
532
  '''
533
 
534
- # HTML ์‘๋‹ต์— ์ด๋ฏธ์ง€ ์‚ฝ์ž…
535
  if '```html' in collected_content:
536
- # HTML ์ฝ”๋“œ ๋ธ”๋ก ๋‚ด๋ถ€์— ์ด๋ฏธ์ง€ ์ถ”๊ฐ€
537
  collected_content = collected_content.replace('```html\n', f'```html\n{image_html}')
538
  else:
539
- # HTML ์ฝ”๋“œ ๋ธ”๋ก์œผ๋กœ ๊ฐ์‹ธ์„œ ์ด๋ฏธ์ง€ ์ถ”๊ฐ€
540
  collected_content = f'```html\n{image_html}\n```\n{collected_content}'
541
 
542
  print("Image generation successful")
@@ -556,7 +585,32 @@ class Demo:
556
  else:
557
  collected_content = f'```html\n{error_message}\n```\n{collected_content}'
558
 
559
- # ์ตœ์ข… ๊ฒฐ๊ณผ ํ‘œ์‹œ
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
560
  yield [
561
  collected_content,
562
  send_to_sandbox(remove_code_block(collected_content)),
@@ -571,8 +625,15 @@ class Demo:
571
  raise ValueError(f'Error calling APIs: {str(e)}')
572
 
573
  def clear_history(self):
 
 
574
  return []
575
 
 
 
 
 
 
576
  def remove_code_block(text):
577
  pattern = r'```html\n(.+?)\n```'
578
  match = re.search(pattern, text, re.DOTALL)
@@ -686,7 +747,13 @@ def get_random_placeholder():
686
  def update_placeholder():
687
  return gr.update(placeholder=get_random_placeholder())
688
 
 
 
 
 
 
689
 
 
690
 
691
  def create_main_interface():
692
  """๋ฉ”์ธ ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ ํ•จ์ˆ˜"""
@@ -1140,29 +1207,34 @@ Use the "Generate" button for basic creation, "Enhance" button for prompt improv
1140
  with antd.Tabs.Item(key="render"):
1141
  sandbox = gr.HTML(elem_classes="html_content")
1142
 
1143
- # ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ ์ •์˜
1144
  async def handle_file_upload(files):
1145
  if not files:
1146
  return "No files uploaded"
1147
-
1148
  if len(files) > FileProcessor.MAX_FILES:
1149
  return f"Maximum {FileProcessor.MAX_FILES} files allowed"
1150
-
1151
  results = []
 
 
 
1152
  for file in files:
1153
  if not FileProcessor.is_allowed_file(file.name):
1154
  results.append(f"Unsupported file: {file.name}")
1155
  continue
1156
-
1157
  if os.path.getsize(file.name) > FileProcessor.MAX_FILE_SIZE:
1158
  results.append(f"File too large: {file.name}")
1159
  continue
1160
-
1161
  result = await FileProcessor.process_file(file)
 
 
1162
  results.append(f"Analysis for {file.name}:\n{result}\n")
1163
-
1164
  return "\n".join(results)
1165
 
 
1166
  # ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ ์—ฐ๊ฒฐ
1167
  analyze_btn.click(
1168
  fn=handle_file_upload,
 
433
 
434
  class Demo:
435
  def __init__(self):
436
+ self.file_state = FileAnalysisState() # ํŒŒ์ผ ์ƒํƒœ ๊ด€๋ฆฌ ์ถ”๊ฐ€
437
 
438
  async def generation_code(self, query: Optional[str], _setting: Dict[str, str]):
439
  if not query or query.strip() == '':
440
  query = get_random_placeholder()
441
 
442
+ # ํŒŒ์ผ ๋ถ„์„ ๊ฒฐ๊ณผ๊ฐ€ ์žˆ๋Š” ๊ฒฝ์šฐ ์ปจํ…์ŠคํŠธ๋กœ ์ถ”๊ฐ€
443
+ context = ""
444
+ if self.file_state.last_analysis:
445
+ context = "Based on the uploaded files:\n"
446
+ for filename, analysis in self.file_state.last_analysis.items():
447
+ context += f"\nFile '{filename}':\n{analysis}\n"
448
+ query = f"{context}\n\nUser Query: {query}"
449
+
450
  # ์ด๋ฏธ์ง€ ์ƒ์„ฑ์ด ํ•„์š”ํ•œ์ง€ ํ™•์ธ
451
  needs_image = '์ด๋ฏธ์ง€' in query or '๊ทธ๋ฆผ' in query or 'image' in query.lower()
452
  image_prompt = None
 
458
  image_prompt = query.split(keyword)[1].strip()
459
  break
460
  if not image_prompt:
461
+ image_prompt = query
462
+
463
  messages = [{'role': Role.SYSTEM, 'content': _setting['system']}]
464
  messages.append({'role': Role.USER, 'content': query})
465
 
466
  system_message = messages[0]['content']
467
  claude_messages = [{"role": "user", "content": query}]
468
+
469
+ # ์—…๋กœ๋“œ๋œ ์ด๋ฏธ์ง€๊ฐ€ ์žˆ๋Š” ๊ฒฝ์šฐ Claude API ๋ฉ”์‹œ์ง€์— ์ถ”๊ฐ€
470
+ for filename, file in self.file_state.uploaded_files.items():
471
+ if any(filename.lower().endswith(ext) for ext in ['.jpg', '.jpeg', '.png', '.gif', '.bmp', '.webp']):
472
+ try:
473
+ image = Image.open(file)
474
+ image_bytes = io.BytesIO()
475
+ image.save(image_bytes, format=image.format)
476
+ image_base64 = base64.b64encode(image_bytes.getvalue()).decode('utf-8')
477
+
478
+ claude_messages[0]["content"] = [
479
+ {
480
+ "type": "image",
481
+ "source": {
482
+ "type": "base64",
483
+ "media_type": f"image/{image.format.lower()}",
484
+ "data": image_base64
485
+ }
486
+ },
487
+ {
488
+ "type": "text",
489
+ "text": query
490
+ }
491
+ ]
492
+ except Exception as e:
493
+ print(f"Error processing uploaded image: {str(e)}")
494
+
495
  openai_messages = [
496
  {"role": "system", "content": system_message},
497
  {"role": "user", "content": query}
 
532
  collected_content = content
533
 
534
  if collected_content:
535
+ # FLUX๋ฅผ ์‚ฌ์šฉํ•œ ์ด๋ฏธ์ง€ ์ƒ์„ฑ์ด ํ•„์š”ํ•œ ๊ฒฝ์šฐ
536
  if needs_image and image_prompt:
537
  try:
538
  print(f"Generating image for prompt: {image_prompt}")
 
539
  if pipe is not None:
540
  image = generate_image(
541
  prompt=image_prompt,
 
546
  seed=random.randint(1, 10000)
547
  )
548
 
 
549
  buffered = BytesIO()
550
  image.save(buffered, format="PNG")
551
  img_str = base64.b64encode(buffered.getvalue()).decode()
552
 
 
553
  image_html = f'''
554
  <div class="generated-image" style="margin: 20px 0; text-align: center;">
555
  <h3 style="color: #333; margin-bottom: 10px;">Generated Image:</h3>
 
563
  </div>
564
  '''
565
 
 
566
  if '```html' in collected_content:
 
567
  collected_content = collected_content.replace('```html\n', f'```html\n{image_html}')
568
  else:
 
569
  collected_content = f'```html\n{image_html}\n```\n{collected_content}'
570
 
571
  print("Image generation successful")
 
585
  else:
586
  collected_content = f'```html\n{error_message}\n```\n{collected_content}'
587
 
588
+ # ์—…๋กœ๋“œ๋œ ์ด๋ฏธ์ง€ ํ‘œ์‹œ
589
+ for filename, file in self.file_state.uploaded_files.items():
590
+ if any(filename.lower().endswith(ext) for ext in ['.jpg', '.jpeg', '.png', '.gif', '.bmp', '.webp']):
591
+ try:
592
+ image = Image.open(file)
593
+ buffered = BytesIO()
594
+ image.save(buffered, format=image.format)
595
+ img_str = base64.b64encode(buffered.getvalue()).decode()
596
+
597
+ uploaded_image_html = f'''
598
+ <div class="uploaded-image" style="margin: 20px 0; text-align: center;">
599
+ <h3 style="color: #333; margin-bottom: 10px;">Uploaded Image: {html.escape(filename)}</h3>
600
+ <img src="data:image/{image.format.lower()};base64,{img_str}"
601
+ style="max-width: 100%;
602
+ border-radius: 10px;
603
+ box-shadow: 0 4px 8px rgba(0,0,0,0.1);">
604
+ </div>
605
+ '''
606
+
607
+ if '```html' in collected_content:
608
+ collected_content = collected_content.replace('```html\n', f'```html\n{uploaded_image_html}')
609
+ else:
610
+ collected_content = f'```html\n{uploaded_image_html}\n```\n{collected_content}'
611
+ except Exception as e:
612
+ print(f"Error displaying uploaded image: {str(e)}")
613
+
614
  yield [
615
  collected_content,
616
  send_to_sandbox(remove_code_block(collected_content)),
 
625
  raise ValueError(f'Error calling APIs: {str(e)}')
626
 
627
  def clear_history(self):
628
+ self.file_state.last_analysis.clear()
629
+ self.file_state.uploaded_files.clear()
630
  return []
631
 
632
+ def update_file_state(self, analysis_result, files):
633
+ """ํŒŒ์ผ ๋ถ„์„ ๊ฒฐ๊ณผ์™€ ํŒŒ์ผ ๊ฐ์ฒด ์—…๋ฐ์ดํŠธ"""
634
+ self.file_state.last_analysis.update(analysis_result)
635
+ self.file_state.uploaded_files.update(files)
636
+
637
  def remove_code_block(text):
638
  pattern = r'```html\n(.+?)\n```'
639
  match = re.search(pattern, text, re.DOTALL)
 
747
  def update_placeholder():
748
  return gr.update(placeholder=get_random_placeholder())
749
 
750
+ # ํŒŒ์ผ ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ ์ €์žฅํ•  ์ „์—ญ ๋ณ€์ˆ˜ ์ถ”๊ฐ€
751
+ class FileAnalysisState:
752
+ def __init__(self):
753
+ self.last_analysis = {} # {ํŒŒ์ผ๋ช…: ๋ถ„์„๊ฒฐ๊ณผ} ํ˜•ํƒœ๋กœ ์ €์žฅ
754
+ self.uploaded_files = {} # {ํŒŒ์ผ๋ช…: ํŒŒ์ผ๊ฐ์ฒด} ํ˜•ํƒœ๋กœ ์ €์žฅ
755
 
756
+ file_state = FileAnalysisState()
757
 
758
  def create_main_interface():
759
  """๋ฉ”์ธ ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ ํ•จ์ˆ˜"""
 
1207
  with antd.Tabs.Item(key="render"):
1208
  sandbox = gr.HTML(elem_classes="html_content")
1209
 
 
1210
  async def handle_file_upload(files):
1211
  if not files:
1212
  return "No files uploaded"
1213
+
1214
  if len(files) > FileProcessor.MAX_FILES:
1215
  return f"Maximum {FileProcessor.MAX_FILES} files allowed"
1216
+
1217
  results = []
1218
+ file_state.last_analysis.clear()
1219
+ file_state.uploaded_files.clear()
1220
+
1221
  for file in files:
1222
  if not FileProcessor.is_allowed_file(file.name):
1223
  results.append(f"Unsupported file: {file.name}")
1224
  continue
1225
+
1226
  if os.path.getsize(file.name) > FileProcessor.MAX_FILE_SIZE:
1227
  results.append(f"File too large: {file.name}")
1228
  continue
1229
+
1230
  result = await FileProcessor.process_file(file)
1231
+ file_state.last_analysis[file.name] = result
1232
+ file_state.uploaded_files[file.name] = file
1233
  results.append(f"Analysis for {file.name}:\n{result}\n")
1234
+
1235
  return "\n".join(results)
1236
 
1237
+
1238
  # ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ ์—ฐ๊ฒฐ
1239
  analyze_btn.click(
1240
  fn=handle_file_upload,