Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -5,8 +5,8 @@ import re
|
|
5 |
import tempfile
|
6 |
from collections.abc import Iterator
|
7 |
from threading import Thread
|
8 |
-
import json
|
9 |
-
import requests
|
10 |
import cv2
|
11 |
import gradio as gr
|
12 |
import spaces
|
@@ -17,26 +17,18 @@ from transformers import AutoProcessor, Gemma3ForConditionalGeneration, TextIter
|
|
17 |
|
18 |
# CSV/TXT ๋ถ์
|
19 |
import pandas as pd
|
20 |
-
|
21 |
# PDF ํ
์คํธ ์ถ์ถ
|
22 |
import PyPDF2
|
23 |
|
24 |
##############################################################################
|
25 |
-
# SERPHouse API key from environment variable
|
26 |
##############################################################################
|
27 |
SERPHOUSE_API_KEY = os.getenv("SERPHOUSE_API_KEY", "")
|
28 |
|
29 |
##############################################################################
|
30 |
-
# ๊ฐ๋จํ ํค์๋ ์ถ์ถ ํจ์
|
31 |
##############################################################################
|
32 |
def extract_keywords(text: str, top_k: int = 5) -> str:
|
33 |
-
"""
|
34 |
-
๊ฐ์ฅ ๊ฐ๋จํ ์์:
|
35 |
-
1) ํ
์คํธ๋ฅผ ์๋ฌธ์๋ก
|
36 |
-
2) ์ํ๋ฒณ/์ซ์/๊ณต๋ฐฑ ์ ์ธ ๋ฌธ์ ์ ๊ฑฐ
|
37 |
-
3) ๊ณต๋ฐฑ ํ ํฐ ๋ถ๋ฆฌ
|
38 |
-
4) ์ ํ ํฐ n๊ฐ ์ถ์ถ
|
39 |
-
"""
|
40 |
text = text.lower()
|
41 |
text = re.sub(r"[^a-z0-9\s]", "", text)
|
42 |
tokens = text.split()
|
@@ -45,11 +37,12 @@ def extract_keywords(text: str, top_k: int = 5) -> str:
|
|
45 |
|
46 |
##############################################################################
|
47 |
# SERPHouse Live endpoint ํธ์ถ
|
48 |
-
# - ์์ 20๊ฐ ๊ฒฐ๊ณผ
|
49 |
##############################################################################
|
50 |
def do_web_search(query: str) -> str:
|
51 |
"""
|
52 |
-
|
|
|
53 |
"""
|
54 |
try:
|
55 |
url = "https://api.serphouse.com/serp/live"
|
@@ -59,11 +52,11 @@ def do_web_search(query: str) -> str:
|
|
59 |
"lang": "en",
|
60 |
"device": "desktop",
|
61 |
"serp_type": "web",
|
62 |
-
"num_result": "20",
|
63 |
"api_token": SERPHOUSE_API_KEY,
|
64 |
}
|
65 |
resp = requests.get(url, params=params, timeout=30)
|
66 |
-
resp.raise_for_status()
|
67 |
data = resp.json()
|
68 |
|
69 |
results = data.get("results", {})
|
@@ -71,10 +64,9 @@ def do_web_search(query: str) -> str:
|
|
71 |
if not organic:
|
72 |
return "No web search results found."
|
73 |
|
74 |
-
# ๊ฐ item์ JSON(์ ์ฒด ํ๋)์ผ๋ก ๋ณํํ์ฌ ์ ์ฅ
|
75 |
summary_lines = []
|
76 |
for idx, item in enumerate(organic[:20], start=1):
|
77 |
-
# item ์ ์ฒด๋ฅผ JSON ๋ฌธ์์ด๋ก
|
78 |
item_json = json.dumps(item, ensure_ascii=False, indent=2)
|
79 |
summary_lines.append(f"Result {idx}:\n{item_json}\n")
|
80 |
|
@@ -85,9 +77,9 @@ def do_web_search(query: str) -> str:
|
|
85 |
|
86 |
|
87 |
##############################################################################
|
88 |
-
#
|
89 |
##############################################################################
|
90 |
-
MAX_CONTENT_CHARS = 4000
|
91 |
model_id = os.getenv("MODEL_ID", "google/gemma-3-27b-it")
|
92 |
processor = AutoProcessor.from_pretrained(model_id, padding_side="left")
|
93 |
model = Gemma3ForConditionalGeneration.from_pretrained(
|
@@ -108,7 +100,6 @@ def analyze_csv_file(path: str) -> str:
|
|
108 |
"""
|
109 |
try:
|
110 |
df = pd.read_csv(path)
|
111 |
-
# ์ต๋ 50ํ, 10์ด๊น์ง๋ง ํ์
|
112 |
if df.shape[0] > 50 or df.shape[1] > 10:
|
113 |
df = df.iloc[:50, :10]
|
114 |
df_str = df.to_string()
|
@@ -193,13 +184,6 @@ def count_files_in_history(history: list[dict]) -> tuple[int, int]:
|
|
193 |
|
194 |
|
195 |
def validate_media_constraints(message: dict, history: list[dict]) -> bool:
|
196 |
-
"""
|
197 |
-
- ๋น๋์ค 1๊ฐ ์ด๊ณผ ๋ถ๊ฐ
|
198 |
-
- ๋น๋์ค์ ์ด๋ฏธ์ง ํผํฉ ๋ถ๊ฐ
|
199 |
-
- ์ด๋ฏธ์ง ๊ฐ์(MAX_NUM_IMAGES) ์ด๊ณผ ๋ถ๊ฐ
|
200 |
-
- <image> ํ๊ทธ๊ฐ ์์ผ๋ฉด ํ๊ทธ ์์ ์ค์ ์ด๋ฏธ์ง ์ ์ผ์น
|
201 |
-
- CSV, TXT, PDF ๋ฑ์ ์ฌ๊ธฐ์ ์ ํํ์ง ์์
|
202 |
-
"""
|
203 |
media_files = []
|
204 |
for f in message["files"]:
|
205 |
if re.search(r"\.(png|jpg|jpeg|gif|webp)$", f, re.IGNORECASE) or f.endswith(".mp4"):
|
@@ -381,7 +365,7 @@ def process_history(history: list[dict]) -> list[dict]:
|
|
381 |
|
382 |
|
383 |
##############################################################################
|
384 |
-
# ๋ฉ์ธ ์ถ๋ก ํจ์ (web search ์ฒดํฌ ์ ์๋ ํค์๋์ถ์ถ->๊ฒ์->๊ฒฐ๊ณผ system msg
|
385 |
##############################################################################
|
386 |
@spaces.GPU(duration=120)
|
387 |
def run(
|
@@ -398,47 +382,47 @@ def run(
|
|
398 |
return
|
399 |
|
400 |
try:
|
401 |
-
#
|
402 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
403 |
if use_web_search:
|
404 |
user_text = message["text"]
|
405 |
-
# 1) ํค์๋ ์ถ์ถ
|
406 |
ws_query = extract_keywords(user_text, top_k=5)
|
407 |
-
|
408 |
-
|
409 |
-
|
410 |
-
|
411 |
-
|
412 |
-
|
413 |
-
history_system_msg = {
|
414 |
-
"role": "system",
|
415 |
-
"content": [{"type": "text", "text": system_search_content}]
|
416 |
-
}
|
417 |
else:
|
418 |
-
|
419 |
-
|
420 |
-
"content": [{"type": "text", "text": "No web search results"}]
|
421 |
-
}
|
422 |
|
423 |
-
#
|
424 |
messages = []
|
425 |
-
if
|
426 |
-
|
427 |
-
|
428 |
-
|
429 |
-
|
430 |
-
|
431 |
-
|
|
|
432 |
messages.extend(process_history(history))
|
433 |
-
|
434 |
-
# ์ ์ ์ ๋ฉ์์ง
|
435 |
user_content = process_new_user_message(message)
|
436 |
for item in user_content:
|
437 |
if item["type"] == "text" and len(item["text"]) > MAX_CONTENT_CHARS:
|
438 |
item["text"] = item["text"][:MAX_CONTENT_CHARS] + "\n...(truncated)..."
|
439 |
messages.append({"role": "user", "content": user_content})
|
440 |
|
441 |
-
# LLM ์
๋ ฅ ์์ฑ
|
442 |
inputs = processor.apply_chat_template(
|
443 |
messages,
|
444 |
add_generation_prompt=True,
|
@@ -453,7 +437,7 @@ def run(
|
|
453 |
streamer=streamer,
|
454 |
max_new_tokens=max_new_tokens,
|
455 |
)
|
456 |
-
|
457 |
t = Thread(target=model.generate, kwargs=gen_kwargs)
|
458 |
t.start()
|
459 |
|
@@ -632,7 +616,7 @@ title_html = """
|
|
632 |
</p>
|
633 |
"""
|
634 |
|
635 |
-
with gr.Blocks(css=css, title="Vidraft-
|
636 |
gr.Markdown(title_html)
|
637 |
|
638 |
with gr.Row():
|
@@ -669,9 +653,9 @@ with gr.Blocks(css=css, title="Vidraft-G3-27B-Multi-Search") as demo:
|
|
669 |
value=2000,
|
670 |
)
|
671 |
|
672 |
-
gr.Markdown("<br><br>")
|
673 |
|
674 |
-
# Main ChatInterface
|
675 |
with gr.Column(scale=7):
|
676 |
chat = gr.ChatInterface(
|
677 |
fn=run,
|
@@ -690,7 +674,7 @@ with gr.Blocks(css=css, title="Vidraft-G3-27B-Multi-Search") as demo:
|
|
690 |
system_prompt_box,
|
691 |
max_tokens_slider,
|
692 |
web_search_checkbox,
|
693 |
-
web_search_text,
|
694 |
],
|
695 |
stop_btn=False,
|
696 |
title="Vidraft-Gemma-3-27B",
|
@@ -706,9 +690,11 @@ with gr.Blocks(css=css, title="Vidraft-G3-27B-Multi-Search") as demo:
|
|
706 |
gr.Markdown("### Example Inputs (click to load)")
|
707 |
gr.Examples(
|
708 |
examples=examples,
|
709 |
-
inputs=[], #
|
710 |
cache_examples=False
|
711 |
)
|
712 |
|
713 |
if __name__ == "__main__":
|
714 |
-
|
|
|
|
|
|
5 |
import tempfile
|
6 |
from collections.abc import Iterator
|
7 |
from threading import Thread
|
8 |
+
import json
|
9 |
+
import requests
|
10 |
import cv2
|
11 |
import gradio as gr
|
12 |
import spaces
|
|
|
17 |
|
18 |
# CSV/TXT ๋ถ์
|
19 |
import pandas as pd
|
|
|
20 |
# PDF ํ
์คํธ ์ถ์ถ
|
21 |
import PyPDF2
|
22 |
|
23 |
##############################################################################
|
24 |
+
# SERPHouse API key from environment variable
|
25 |
##############################################################################
|
26 |
SERPHOUSE_API_KEY = os.getenv("SERPHOUSE_API_KEY", "")
|
27 |
|
28 |
##############################################################################
|
29 |
+
# ๊ฐ๋จํ ํค์๋ ์ถ์ถ ํจ์
|
30 |
##############################################################################
|
31 |
def extract_keywords(text: str, top_k: int = 5) -> str:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
text = text.lower()
|
33 |
text = re.sub(r"[^a-z0-9\s]", "", text)
|
34 |
tokens = text.split()
|
|
|
37 |
|
38 |
##############################################################################
|
39 |
# SERPHouse Live endpoint ํธ์ถ
|
40 |
+
# - ์์ 20๊ฐ ๊ฒฐ๊ณผ JSON์ LLM์ ๋๊ธธ ๋ link, snippet ๋ฑ ๋ชจ๋ ํฌํจ
|
41 |
##############################################################################
|
42 |
def do_web_search(query: str) -> str:
|
43 |
"""
|
44 |
+
์์ 20๊ฐ 'organic' ๊ฒฐ๊ณผ item ์ ์ฒด(์ ๋ชฉ, link, snippet ๋ฑ)๋ฅผ
|
45 |
+
JSON ๋ฌธ์์ด ํํ๋ก ๋ฐํ
|
46 |
"""
|
47 |
try:
|
48 |
url = "https://api.serphouse.com/serp/live"
|
|
|
52 |
"lang": "en",
|
53 |
"device": "desktop",
|
54 |
"serp_type": "web",
|
55 |
+
"num_result": "20",
|
56 |
"api_token": SERPHOUSE_API_KEY,
|
57 |
}
|
58 |
resp = requests.get(url, params=params, timeout=30)
|
59 |
+
resp.raise_for_status()
|
60 |
data = resp.json()
|
61 |
|
62 |
results = data.get("results", {})
|
|
|
64 |
if not organic:
|
65 |
return "No web search results found."
|
66 |
|
|
|
67 |
summary_lines = []
|
68 |
for idx, item in enumerate(organic[:20], start=1):
|
69 |
+
# item ์ ์ฒด๋ฅผ JSON ๋ฌธ์์ด๋ก
|
70 |
item_json = json.dumps(item, ensure_ascii=False, indent=2)
|
71 |
summary_lines.append(f"Result {idx}:\n{item_json}\n")
|
72 |
|
|
|
77 |
|
78 |
|
79 |
##############################################################################
|
80 |
+
# ๋ชจ๋ธ/ํ๋ก์ธ์ ๋ก๋ฉ
|
81 |
##############################################################################
|
82 |
+
MAX_CONTENT_CHARS = 4000
|
83 |
model_id = os.getenv("MODEL_ID", "google/gemma-3-27b-it")
|
84 |
processor = AutoProcessor.from_pretrained(model_id, padding_side="left")
|
85 |
model = Gemma3ForConditionalGeneration.from_pretrained(
|
|
|
100 |
"""
|
101 |
try:
|
102 |
df = pd.read_csv(path)
|
|
|
103 |
if df.shape[0] > 50 or df.shape[1] > 10:
|
104 |
df = df.iloc[:50, :10]
|
105 |
df_str = df.to_string()
|
|
|
184 |
|
185 |
|
186 |
def validate_media_constraints(message: dict, history: list[dict]) -> bool:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
187 |
media_files = []
|
188 |
for f in message["files"]:
|
189 |
if re.search(r"\.(png|jpg|jpeg|gif|webp)$", f, re.IGNORECASE) or f.endswith(".mp4"):
|
|
|
365 |
|
366 |
|
367 |
##############################################################################
|
368 |
+
# ๋ฉ์ธ ์ถ๋ก ํจ์ (web search ์ฒดํฌ ์ ์๋ ํค์๋์ถ์ถ->๊ฒ์->๊ฒฐ๊ณผ system msg)
|
369 |
##############################################################################
|
370 |
@spaces.GPU(duration=120)
|
371 |
def run(
|
|
|
382 |
return
|
383 |
|
384 |
try:
|
385 |
+
# (1) system ๋ฉ์์ง๋ฅผ ํ๋๋ก ํฉ์น๊ธฐ ์ํด, ๋ฏธ๋ฆฌ buffer
|
386 |
+
combined_system_msg = ""
|
387 |
+
|
388 |
+
# ์ฌ์ฉ์๊ฐ system_prompt๋ฅผ ์
๋ ฅํ๋ค๋ฉด
|
389 |
+
if system_prompt.strip():
|
390 |
+
combined_system_msg += f"[System Prompt]\n{system_prompt.strip()}\n\n"
|
391 |
+
|
392 |
+
# (2) ์น ๊ฒ์ ์ฒดํฌ ์, ํค์๋ ์ถ์ถ
|
393 |
if use_web_search:
|
394 |
user_text = message["text"]
|
|
|
395 |
ws_query = extract_keywords(user_text, top_k=5)
|
396 |
+
# ๋ง์ฝ ์ถ์ถ ํค์๋๊ฐ ๋น์ด์์ผ๋ฉด ๊ฒ์์ ๊ฑด๋๋
|
397 |
+
if ws_query.strip():
|
398 |
+
logger.info(f"[Auto WebSearch Keyword] {ws_query!r}")
|
399 |
+
ws_result = do_web_search(ws_query)
|
400 |
+
# ๊ฒ์ ๊ฒฐ๊ณผ๋ฅผ ์์คํ
๋ฉ์์ง ๋์ ํฉ์นจ
|
401 |
+
combined_system_msg += f"[Search top-20 Full Items Based on user prompt]\n{ws_result}\n\n"
|
|
|
|
|
|
|
|
|
402 |
else:
|
403 |
+
# ์ถ์ถ๋ ํค์๋๊ฐ ์์ผ๋ฉด ๊ตณ์ด ๊ฒ์ ์๋ ์ ํจ
|
404 |
+
combined_system_msg += "[No valid keywords found, skipping WebSearch]\n\n"
|
|
|
|
|
405 |
|
406 |
+
# (3) system ๋ฉ์์ง๊ฐ ์ต์ข
์ ์ผ๋ก ๋น์ด ์์ง ์๋ค๋ฉด
|
407 |
messages = []
|
408 |
+
if combined_system_msg.strip():
|
409 |
+
# system ์ญํ ๋ฉ์์ง ํ๋ ์์ฑ
|
410 |
+
messages.append({
|
411 |
+
"role": "system",
|
412 |
+
"content": [{"type": "text", "text": combined_system_msg.strip()}],
|
413 |
+
})
|
414 |
+
|
415 |
+
# (4) ์ด์ ๋ํ์ด๋ ฅ
|
416 |
messages.extend(process_history(history))
|
417 |
+
|
418 |
+
# (5) ์ ์ ์ ๋ฉ์์ง
|
419 |
user_content = process_new_user_message(message)
|
420 |
for item in user_content:
|
421 |
if item["type"] == "text" and len(item["text"]) > MAX_CONTENT_CHARS:
|
422 |
item["text"] = item["text"][:MAX_CONTENT_CHARS] + "\n...(truncated)..."
|
423 |
messages.append({"role": "user", "content": user_content})
|
424 |
|
425 |
+
# (6) LLM ์
๋ ฅ ์์ฑ
|
426 |
inputs = processor.apply_chat_template(
|
427 |
messages,
|
428 |
add_generation_prompt=True,
|
|
|
437 |
streamer=streamer,
|
438 |
max_new_tokens=max_new_tokens,
|
439 |
)
|
440 |
+
|
441 |
t = Thread(target=model.generate, kwargs=gen_kwargs)
|
442 |
t.start()
|
443 |
|
|
|
616 |
</p>
|
617 |
"""
|
618 |
|
619 |
+
with gr.Blocks(css=css, title="Vidraft-Gemma-3-27B") as demo:
|
620 |
gr.Markdown(title_html)
|
621 |
|
622 |
with gr.Row():
|
|
|
653 |
value=2000,
|
654 |
)
|
655 |
|
656 |
+
gr.Markdown("<br><br>")
|
657 |
|
658 |
+
# Main ChatInterface
|
659 |
with gr.Column(scale=7):
|
660 |
chat = gr.ChatInterface(
|
661 |
fn=run,
|
|
|
674 |
system_prompt_box,
|
675 |
max_tokens_slider,
|
676 |
web_search_checkbox,
|
677 |
+
web_search_text,
|
678 |
],
|
679 |
stop_btn=False,
|
680 |
title="Vidraft-Gemma-3-27B",
|
|
|
690 |
gr.Markdown("### Example Inputs (click to load)")
|
691 |
gr.Examples(
|
692 |
examples=examples,
|
693 |
+
inputs=[], # ์ฐ๊ฒฐํ inputs๊ฐ ์์ผ๋ฏ๋ก ๋น ๋ฆฌ์คํธ
|
694 |
cache_examples=False
|
695 |
)
|
696 |
|
697 |
if __name__ == "__main__":
|
698 |
+
# share=True ํ์๋ฉด public URL ์ฌ์ฉ ๊ฐ๋ฅ
|
699 |
+
demo.launch(share=True)
|
700 |
+
|