seawolf2357 commited on
Commit
56354e9
ยท
verified ยท
1 Parent(s): 5718b5c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -5
app.py CHANGED
@@ -251,9 +251,9 @@ def process_new_user_message(message: dict) -> list[dict]:
251
  return content_list
252
 
253
  # 7) ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ
254
- if "<image>" in message["text"]:
255
  # interleaved
256
- return process_interleaved_images(message)
257
  else:
258
  # ์ผ๋ฐ˜ ์—ฌ๋Ÿฌ ์žฅ
259
  for img_path in image_files:
@@ -261,7 +261,6 @@ def process_new_user_message(message: dict) -> list[dict]:
261
 
262
  return content_list
263
 
264
-
265
  ##################################################
266
  # history -> LLM ๋ฉ”์‹œ์ง€ ๋ณ€ํ™˜
267
  ##################################################
@@ -300,8 +299,26 @@ def run(message: dict, history: list[dict], system_prompt: str = "", max_new_tok
300
  if system_prompt:
301
  messages.append({"role": "system", "content": [{"type": "text", "text": system_prompt}]})
302
  messages.extend(process_history(history))
303
- messages.append({"role": "user", "content": process_new_user_message(message)})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
304
 
 
305
  inputs = processor.apply_chat_template(
306
  messages,
307
  add_generation_prompt=True,
@@ -324,7 +341,6 @@ def run(message: dict, history: list[dict], system_prompt: str = "", max_new_tok
324
  output += new_text
325
  yield output
326
 
327
-
328
  ##################################################
329
  # ์˜ˆ์‹œ๋“ค (๊ธฐ์กด)
330
  ##################################################
 
251
  return content_list
252
 
253
  # 7) ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ
254
+ if "<image>" in message["text"] and image_files: # ์ด๋ฏธ์ง€ ํŒŒ์ผ์ด ์žˆ๋Š” ๊ฒฝ์šฐ์—๋งŒ
255
  # interleaved
256
+ return process_interleaved_images({"text": message["text"], "files": image_files})
257
  else:
258
  # ์ผ๋ฐ˜ ์—ฌ๋Ÿฌ ์žฅ
259
  for img_path in image_files:
 
261
 
262
  return content_list
263
 
 
264
  ##################################################
265
  # history -> LLM ๋ฉ”์‹œ์ง€ ๋ณ€ํ™˜
266
  ##################################################
 
299
  if system_prompt:
300
  messages.append({"role": "system", "content": [{"type": "text", "text": system_prompt}]})
301
  messages.extend(process_history(history))
302
+
303
+ # ์‚ฌ์šฉ์ž ๋ฉ”์‹œ์ง€ ์ฒ˜๋ฆฌ
304
+ user_content = process_new_user_message(message)
305
+
306
+ # ์ด๋ฏธ์ง€๊ฐ€ ์•„๋‹Œ ํŒŒ์ผ๋“ค๋งŒ ํ…์ŠคํŠธ๋กœ ๋ณ€ํ™˜
307
+ processed_content = []
308
+ for item in user_content:
309
+ if item["type"] == "image":
310
+ # ์ด๋ฏธ์ง€ ํŒŒ์ผ์ธ์ง€ ํ™•์ธ
311
+ if re.search(r"\.(png|jpg|jpeg|gif|webp)$", item["url"], re.IGNORECASE):
312
+ processed_content.append(item)
313
+ else:
314
+ # ์ด๋ฏธ์ง€๊ฐ€ ์•„๋‹Œ ํŒŒ์ผ์€ ํ…์ŠคํŠธ๋กœ ๋ณ€ํ™˜
315
+ processed_content.append({"type": "text", "text": f"[File: {os.path.basename(item['url'])}]"})
316
+ else:
317
+ processed_content.append(item)
318
+
319
+ messages.append({"role": "user", "content": processed_content})
320
 
321
+ # LLM ์ฒ˜๋ฆฌ๋Š” ๊ทธ๋Œ€๋กœ ์ง„ํ–‰
322
  inputs = processor.apply_chat_template(
323
  messages,
324
  add_generation_prompt=True,
 
341
  output += new_text
342
  yield output
343
 
 
344
  ##################################################
345
  # ์˜ˆ์‹œ๋“ค (๊ธฐ์กด)
346
  ##################################################