|
import gradio as gr |
|
import os |
|
import requests |
|
import json |
|
import time |
|
from dotenv import load_dotenv |
|
|
|
|
|
load_dotenv() |
|
|
|
def create_deepseek_interface(): |
|
|
|
api_key = os.getenv("FW_API_KEY") |
|
serphouse_api_key = os.getenv("SERPHOUSE_API_KEY") |
|
|
|
if not api_key: |
|
print("๊ฒฝ๊ณ : FW_API_KEY ํ๊ฒฝ ๋ณ์๊ฐ ์ค์ ๋์ง ์์์ต๋๋ค.") |
|
if not serphouse_api_key: |
|
print("๊ฒฝ๊ณ : SERPHOUSE_API_KEY ํ๊ฒฝ ๋ณ์๊ฐ ์ค์ ๋์ง ์์์ต๋๋ค.") |
|
|
|
|
|
def extract_keywords_with_llm(query): |
|
if not api_key: |
|
return "LLM ํค์๋ ์ถ์ถ์ ์ํ FW_API_KEY๊ฐ ์ค์ ๋์ง ์์์ต๋๋ค.", query |
|
|
|
|
|
url = "https://api.fireworks.ai/inference/v1/chat/completions" |
|
payload = { |
|
"model": "accounts/fireworks/models/deepseek-v3-0324", |
|
"max_tokens": 200, |
|
"temperature": 0.1, |
|
"messages": [ |
|
{ |
|
"role": "system", |
|
"content": "์ฌ์ฉ์์ ์ง๋ฌธ์์ ์น ๊ฒ์์ ํจ๊ณผ์ ์ธ ํต์ฌ ํค์๋ 3-5๊ฐ๋ฅผ ์ถ์ถํ์ธ์. ํค์๋๋ง ์ผํ๋ก ๊ตฌ๋ถํ์ฌ ์ถ๋ ฅํ๊ณ ๋ค๋ฅธ ์ค๋ช
์ด๋ ๋ถ๊ฐ ์ ๋ณด๋ ์ ๊ณตํ์ง ๋ง์ธ์." |
|
}, |
|
{ |
|
"role": "user", |
|
"content": query |
|
} |
|
] |
|
} |
|
headers = { |
|
"Accept": "application/json", |
|
"Content-Type": "application/json", |
|
"Authorization": f"Bearer {api_key}" |
|
} |
|
|
|
try: |
|
response = requests.post(url, headers=headers, json=payload) |
|
response.raise_for_status() |
|
result = response.json() |
|
|
|
|
|
keywords = result["choices"][0]["message"]["content"].strip() |
|
|
|
|
|
if len(keywords) > 100 or "," not in keywords: |
|
return f"์ถ์ถ๋ ํค์๋: {keywords}", query |
|
|
|
return f"์ถ์ถ๋ ํค์๋: {keywords}", keywords |
|
|
|
except Exception as e: |
|
print(f"ํค์๋ ์ถ์ถ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}") |
|
return f"ํค์๋ ์ถ์ถ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}", query |
|
|
|
|
|
def search_with_serphouse(query): |
|
if not serphouse_api_key: |
|
return "SERPHOUSE_API_KEY๊ฐ ์ค์ ๋์ง ์์์ต๋๋ค." |
|
|
|
|
|
extraction_result, search_query = extract_keywords_with_llm(query) |
|
print(f"์๋ณธ ์ฟผ๋ฆฌ: {query}") |
|
print(extraction_result) |
|
|
|
url = "https://api.serphouse.com/serp/live" |
|
payload = { |
|
"q": search_query, |
|
"domain": "google.com", |
|
"loc": "us", |
|
"lang": "en", |
|
"device": "desktop", |
|
"serp_type": "web", |
|
"page": 1, |
|
"num": 5 |
|
} |
|
headers = { |
|
"Content-Type": "application/json", |
|
"Authorization": f"Bearer {serphouse_api_key}" |
|
} |
|
|
|
try: |
|
response = requests.post(url, headers=headers, json=payload) |
|
response.raise_for_status() |
|
|
|
search_results = response.json() |
|
|
|
|
|
formatted_results = [] |
|
formatted_results.append(f"๊ฒ์์ด: {search_query}\n\n") |
|
|
|
if "organic" in search_results and len(search_results["organic"]) > 0: |
|
for result in search_results["organic"][:5]: |
|
title = result.get("title", "์ ๋ชฉ ์์") |
|
snippet = result.get("snippet", "๋ด์ฉ ์์") |
|
link = result.get("link", "#") |
|
formatted_results.append(f"์ ๋ชฉ: {title}\n๋ด์ฉ: {snippet}\n๋งํฌ: {link}\n\n") |
|
|
|
return "".join(formatted_results) |
|
else: |
|
return f"๊ฒ์์ด '{search_query}'์ ๋ํ ๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค." |
|
|
|
except Exception as e: |
|
return f"๊ฒ์ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}" |
|
|
|
|
|
def query_deepseek_streaming(message, history, use_deep_research): |
|
if not api_key: |
|
yield history, "ํ๊ฒฝ ๋ณ์ FW_API_KEY๊ฐ ์ค์ ๋์ง ์์์ต๋๋ค. ์๋ฒ์์ ํ๊ฒฝ ๋ณ์๋ฅผ ํ์ธํด์ฃผ์ธ์." |
|
return |
|
|
|
search_context = "" |
|
search_info = "" |
|
if use_deep_research: |
|
|
|
yield history + [(message, "๐ ์ต์ ์ ํค์๋ ์ถ์ถ ๋ฐ ์น ๊ฒ์ ์ค...")], "" |
|
|
|
|
|
search_results = search_with_serphouse(message) |
|
if not search_results.startswith("๊ฒ์ ์ค ์ค๋ฅ ๋ฐ์") and not search_results.startswith("SERPHOUSE_API_KEY"): |
|
search_context = f""" |
|
๋ค์์ ์ฌ์ฉ์ ์ง๋ฌธ๊ณผ ๊ด๋ จ๋ ์ต์ ๊ฒ์ ๊ฒฐ๊ณผ์
๋๋ค. ์ด ์ ๋ณด๋ฅผ ์ฐธ๊ณ ํ์ฌ ์ ํํ๊ณ ์ต์ ์ ๋ณด๊ฐ ๋ฐ์๋ ์๋ต์ ์ ๊ณตํ์ธ์: |
|
|
|
{search_results} |
|
|
|
์ ๊ฒ์ ๊ฒฐ๊ณผ๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ์ฌ์ฉ์์ ๋ค์ ์ง๋ฌธ์ ๋ต๋ณํ์ธ์. ๊ฒ์ ๊ฒฐ๊ณผ์์ ๋ช
ํํ ๋ต๋ณ์ ์ฐพ์ ์ ์๋ ๊ฒฝ์ฐ, ๋น์ ์ ์ง์์ ํ์ฉํ์ฌ ์ต์ ์ ๋ต๋ณ์ ์ ๊ณตํ์ธ์. |
|
๊ฒ์ ๊ฒฐ๊ณผ๋ฅผ ์ธ์ฉํ ๋๋ ์ถ์ฒ๋ฅผ ๋ช
์ํ๊ณ , ๋ต๋ณ์ด ์ต์ ์ ๋ณด๋ฅผ ๋ฐ์ํ๋๋ก ํ์ธ์. |
|
""" |
|
search_info = f"๐ Deep Research ๊ธฐ๋ฅ ํ์ฑํ: ๊ด๋ จ ์น ๊ฒ์ ๊ฒฐ๊ณผ๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ์๋ต ์์ฑ ์ค..." |
|
|
|
|
|
messages = [] |
|
for user, assistant in history: |
|
messages.append({"role": "user", "content": user}) |
|
messages.append({"role": "assistant", "content": assistant}) |
|
|
|
|
|
if search_context: |
|
|
|
messages.insert(0, {"role": "system", "content": search_context}) |
|
|
|
|
|
messages.append({"role": "user", "content": message}) |
|
|
|
|
|
url = "https://api.fireworks.ai/inference/v1/chat/completions" |
|
payload = { |
|
"model": "accounts/fireworks/models/deepseek-v3-0324", |
|
"max_tokens": 20480, |
|
"top_p": 1, |
|
"top_k": 40, |
|
"presence_penalty": 0, |
|
"frequency_penalty": 0, |
|
"temperature": 0.6, |
|
"messages": messages, |
|
"stream": True |
|
} |
|
headers = { |
|
"Accept": "application/json", |
|
"Content-Type": "application/json", |
|
"Authorization": f"Bearer {api_key}" |
|
} |
|
|
|
try: |
|
|
|
response = requests.request("POST", url, headers=headers, data=json.dumps(payload), stream=True) |
|
response.raise_for_status() |
|
|
|
|
|
new_history = history.copy() |
|
|
|
|
|
start_msg = search_info if search_info else "" |
|
new_history.append((message, start_msg)) |
|
|
|
|
|
full_response = start_msg |
|
|
|
|
|
for line in response.iter_lines(): |
|
if line: |
|
line_text = line.decode('utf-8') |
|
|
|
|
|
if line_text.startswith("data: "): |
|
line_text = line_text[6:] |
|
|
|
|
|
if line_text == "[DONE]": |
|
break |
|
|
|
try: |
|
|
|
chunk = json.loads(line_text) |
|
chunk_content = chunk.get("choices", [{}])[0].get("delta", {}).get("content", "") |
|
|
|
if chunk_content: |
|
full_response += chunk_content |
|
|
|
new_history[-1] = (message, full_response) |
|
yield new_history, "" |
|
except json.JSONDecodeError: |
|
continue |
|
|
|
|
|
yield new_history, "" |
|
|
|
except requests.exceptions.RequestException as e: |
|
error_msg = f"API ์ค๋ฅ: {str(e)}" |
|
if hasattr(e, 'response') and e.response and e.response.status_code == 401: |
|
error_msg = "์ธ์ฆ ์คํจ. ํ๊ฒฝ ๋ณ์ FW_API_KEY๋ฅผ ํ์ธํด์ฃผ์ธ์." |
|
yield history, error_msg |
|
|
|
|
|
with gr.Blocks(theme="soft", fill_height=True) as demo: |
|
|
|
gr.Markdown( |
|
""" |
|
# ๐ค DeepSeek V3 ์คํธ๋ฆฌ๋ฐ ์ธํฐํ์ด์ค |
|
### Fireworks AI๊ฐ ์ ๊ณตํ๋ ๊ณ ๊ธ AI ๋ชจ๋ธ - ์ค์๊ฐ ์๋ต ์ง์ |
|
""" |
|
) |
|
|
|
|
|
with gr.Row(): |
|
|
|
with gr.Column(): |
|
|
|
chatbot = gr.Chatbot( |
|
height=500, |
|
show_label=False, |
|
container=True |
|
) |
|
|
|
|
|
with gr.Row(): |
|
use_deep_research = gr.Checkbox( |
|
label="Deep Research ํ์ฑํ", |
|
info="์ต์ ์ ํค์๋ ์ถ์ถ ๋ฐ ์น ๊ฒ์์ ํตํ ์ต์ ์ ๋ณด ํ์ฉ" |
|
) |
|
|
|
|
|
with gr.Row(): |
|
msg = gr.Textbox( |
|
label="๋ฉ์์ง", |
|
placeholder="์ฌ๊ธฐ์ ํ๋กฌํํธ๋ฅผ ์
๋ ฅํ์ธ์...", |
|
show_label=False, |
|
scale=9 |
|
) |
|
submit = gr.Button("์ ์ก", variant="primary", scale=1) |
|
|
|
|
|
with gr.Row(): |
|
clear = gr.ClearButton([msg, chatbot], value="๐งน ๋ํ ์ด๊ธฐํ") |
|
|
|
|
|
gr.Examples( |
|
examples=[ |
|
"๋ฅ๋ฌ๋์์ ํธ๋์คํฌ๋จธ์ RNN์ ์ฐจ์ด์ ์ ์ค๋ช
ํด์ฃผ์ธ์.", |
|
"ํน์ ๋ฒ์ ๋ด์ ์์๋ฅผ ์ฐพ๋ ํ์ด์ฌ ํจ์๋ฅผ ์์ฑํด์ฃผ์ธ์.", |
|
"๊ฐํํ์ต์ ์ฃผ์ ๊ฐ๋
์ ์์ฝํด์ฃผ์ธ์." |
|
], |
|
inputs=msg |
|
) |
|
|
|
|
|
error_box = gr.Markdown("") |
|
|
|
|
|
submit.click( |
|
query_deepseek_streaming, |
|
inputs=[msg, chatbot, use_deep_research], |
|
outputs=[chatbot, error_box] |
|
).then( |
|
lambda: "", |
|
None, |
|
[msg] |
|
) |
|
|
|
|
|
msg.submit( |
|
query_deepseek_streaming, |
|
inputs=[msg, chatbot, use_deep_research], |
|
outputs=[chatbot, error_box] |
|
).then( |
|
lambda: "", |
|
None, |
|
[msg] |
|
) |
|
|
|
return demo |
|
|
|
|
|
if __name__ == "__main__": |
|
demo = create_deepseek_interface() |
|
demo.launch(debug=True) |