myezrag / app.py
ginipick's picture
Update app.py
5325327 verified
raw
history blame
30.7 kB
import gradio as gr
from huggingface_hub import InferenceClient
import os
import pandas as pd
from typing import List, Dict, Tuple
import json
import io
import traceback
import csv
from openai import OpenAI
from functools import lru_cache
from concurrent.futures import ThreadPoolExecutor
import math
# CSS μ„€μ •
css = """
footer {
visibility: hidden;
}
#chatbot-container, #chatbot-data-upload {
height: 700px;
overflow-y: scroll;
}
#chatbot-container .message, #chatbot-data-upload .message {
font-size: 14px;
}
/* μž…λ ₯μ°½ 배경색 및 κΈ€μžμƒ‰ λ³€κ²½ */
textarea, input[type="text"] {
background-color: #ffffff;
color: #000000;
}
/* 파일 μ—…λ‘œλ“œ μ˜μ—­ 높이 쑰절 */
#parquet-upload-area {
max-height: 150px;
overflow-y: auto;
}
/* 초기 μ„€λͺ… 글씨 크기 쑰절 */
#initial-description {
font-size: 14px;
}
/* API Key μž…λ ₯ μ„Ήμ…˜ μŠ€νƒ€μΌ */
.api-key-section {
margin: 10px 0;
padding: 10px;
border: 1px solid #ddd;
border-radius: 5px;
}
.api-key-status {
margin-top: 5px;
font-weight: bold;
}
"""
# μΆ”λ‘  API ν΄λΌμ΄μ–ΈνŠΈ μ„€μ •
hf_client = InferenceClient(
"CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN")
)
def load_code(filename: str) -> str:
try:
with open(filename, 'r', encoding='utf-8') as file:
return file.read()
except FileNotFoundError:
return f"{filename} νŒŒμΌμ„ 찾을 수 μ—†μŠ΅λ‹ˆλ‹€."
except Exception as e:
return f"νŒŒμΌμ„ μ½λŠ” 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"
def load_parquet(filename: str) -> str:
try:
df = pd.read_parquet(filename, engine='pyarrow')
return df.head(10).to_markdown(index=False)
except FileNotFoundError:
return f"{filename} νŒŒμΌμ„ 찾을 수 μ—†μŠ΅λ‹ˆλ‹€."
except Exception as e:
return f"νŒŒμΌμ„ μ½λŠ” 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"
def clean_response(text: str) -> str:
"""응닡 ν…μŠ€νŠΈ μ •μ œ ν•¨μˆ˜"""
sentences = [s.strip() for s in text.split('.') if s.strip()]
unique_sentences = []
seen = set()
for sentence in sentences:
normalized = ' '.join(sentence.lower().split())
if normalized not in seen:
seen.add(normalized)
unique_sentences.append(sentence)
cleaned_text = '. '.join(unique_sentences)
if cleaned_text and not cleaned_text.endswith('.'):
cleaned_text += '.'
return cleaned_text
def remove_duplicates(text: str) -> str:
"""쀑볡 λ¬Έμž₯ 제거 ν•¨μˆ˜"""
sentences = text.split('.')
unique_sentences = []
seen = set()
for sentence in sentences:
sentence = sentence.strip()
if sentence and sentence not in seen:
seen.add(sentence)
unique_sentences.append(sentence)
return '. '.join(unique_sentences)
def upload_csv(file_path: str) -> Tuple[str, str]:
try:
df = pd.read_csv(file_path, sep=',')
required_columns = {'id', 'text', 'label', 'metadata'}
available_columns = set(df.columns)
missing_columns = required_columns - available_columns
if missing_columns:
return f"CSV νŒŒμΌμ— λ‹€μŒ ν•„μˆ˜ 컬럼이 λˆ„λ½λ˜μ—ˆμŠ΅λ‹ˆλ‹€: {', '.join(missing_columns)}", ""
df.drop_duplicates(inplace=True)
df.fillna('', inplace=True)
df = df.astype({'id': 'int32', 'text': 'string', 'label': 'category', 'metadata': 'string'})
parquet_filename = os.path.splitext(os.path.basename(file_path))[0] + '.parquet'
df.to_parquet(parquet_filename, engine='pyarrow', compression='snappy')
return f"{parquet_filename} 파일이 μ„±κ³΅μ μœΌλ‘œ μ—…λ‘œλ“œλ˜κ³  λ³€ν™˜λ˜μ—ˆμŠ΅λ‹ˆλ‹€.", parquet_filename
except Exception as e:
return f"CSV 파일 μ—…λ‘œλ“œ 및 λ³€ν™˜ 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}", ""
def upload_parquet(file_path: str) -> Tuple[str, str, str]:
try:
df = pd.read_parquet(file_path, engine='pyarrow')
data_info = {
"총 λ ˆμ½”λ“œ 수": len(df),
"컬럼 λͺ©λ‘": list(df.columns),
"데이터 νƒ€μž…": df.dtypes.to_dict(),
"결츑치 정보": df.isnull().sum().to_dict()
}
summary = []
summary.append(f"### 데이터셋 κΈ°λ³Έ 정보:")
summary.append(f"- 총 λ ˆμ½”λ“œ 수: {data_info['총 λ ˆμ½”λ“œ 수']}")
summary.append(f"- 컬럼 λͺ©λ‘: {', '.join(data_info['컬럼 λͺ©λ‘'])}")
summary.append("\n### μ»¬λŸΌλ³„ 정보:")
for col in df.columns:
if df[col].dtype in ['int64', 'float64']:
stats = df[col].describe()
summary.append(f"\n{col} (μˆ˜μΉ˜ν˜•):")
summary.append(f"- 평균: {stats['mean']:.2f}")
summary.append(f"- μ΅œμ†Œ: {stats['min']}")
summary.append(f"- μ΅œλŒ€: {stats['max']}")
elif df[col].dtype == 'object' or df[col].dtype == 'string':
unique_count = df[col].nunique()
summary.append(f"\n{col} (ν…μŠ€νŠΈ):")
summary.append(f"- κ³ μœ κ°’ 수: {unique_count}")
if unique_count < 10:
value_counts = df[col].value_counts().head(5)
summary.append("- μƒμœ„ 5개 κ°’:")
for val, count in value_counts.items():
summary.append(f" β€’ {val}: {count}개")
preview = df.head(10).to_markdown(index=False)
summary.append("\n### 데이터 미리보기:")
summary.append(preview)
parquet_content = "\n".join(summary)
parquet_json = df.to_json(orient='records', force_ascii=False)
return "Parquet 파일이 μ„±κ³΅μ μœΌλ‘œ μ—…λ‘œλ“œλ˜μ—ˆμŠ΅λ‹ˆλ‹€.", parquet_content, parquet_json
except Exception as e:
return f"Parquet 파일 μ—…λ‘œλ“œ 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}", "", ""
def text_to_parquet(text: str) -> Tuple[str, str, str]:
try:
lines = [line.strip() for line in text.split('\n') if line.strip()]
data = []
for line in lines:
try:
import re
pattern = r'(\d+),([^,]+),([^,]+),(.+)'
match = re.match(pattern, line)
if match:
id_val, text_val, label_val, metadata_val = match.groups()
text_val = text_val.strip().strip('"')
label_val = label_val.strip().strip('"')
metadata_val = metadata_val.strip().strip('"')
data.append({
'id': int(id_val),
'text': text_val,
'label': label_val,
'metadata': metadata_val
})
except Exception as e:
print(f"라인 νŒŒμ‹± 였λ₯˜: {line}\n{str(e)}")
continue
if not data:
return "λ³€ν™˜ν•  데이터가 μ—†μŠ΅λ‹ˆλ‹€.", "", ""
df = pd.DataFrame(data)
df = df.astype({
'id': 'int32',
'text': 'string',
'label': 'string',
'metadata': 'string'
})
parquet_filename = 'text_to_parquet.parquet'
df.to_parquet(parquet_filename, engine='pyarrow', compression='snappy')
preview = df.to_markdown(index=False)
return (
f"{parquet_filename} 파일이 μ„±κ³΅μ μœΌλ‘œ λ³€ν™˜λ˜μ—ˆμŠ΅λ‹ˆλ‹€. 총 {len(df)}개의 λ ˆμ½”λ“œκ°€ μ²˜λ¦¬λ˜μ—ˆμŠ΅λ‹ˆλ‹€.",
preview,
parquet_filename
)
except Exception as e:
error_message = f"ν…μŠ€νŠΈ λ³€ν™˜ 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"
print(f"{error_message}\n{traceback.format_exc()}")
return error_message, "", ""
def respond(message: str, history: List[Dict[str, str]], system_message: str = "", max_tokens: int = 4000, temperature: float = 0.5, top_p: float = 0.9, parquet_data: str = None, api_key: str = None) -> str:
if not api_key:
yield "⚠️ API Keyκ°€ μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€. μ„œλΉ„μŠ€ μ΄μš©μ„ μœ„ν•΄ API Keyλ₯Ό μž…λ ₯ν•΄μ£Όμ„Έμš”."
return
# OpenAI ν΄λΌμ΄μ–ΈνŠΈ μ΄ˆκΈ°ν™”
client = OpenAI(api_key=api_key)
system_prefix = """λ°˜λ“œμ‹œ ν•œκΈ€λ‘œ λ‹΅λ³€ν•  것. λ„ˆλŠ” μ—…λ‘œλ“œλœ 데이터λ₯Ό 기반으둜 μ§ˆλ¬Έμ— λ‹΅λ³€ν•˜λŠ” 역할을 ν•œλ‹€.
μ£Όμš” μ§€μΉ¨:
1. 질문과 직접 κ΄€λ ¨λœ λ‚΄μš©λ§Œ 간단λͺ…λ£Œν•˜κ²Œ λ‹΅λ³€ν•  것
2. 이전 λ‹΅λ³€κ³Ό μ€‘λ³΅λ˜λŠ” λ‚΄μš©μ€ μ œμ™Έν•  것
3. λΆˆν•„μš”ν•œ μ˜ˆμ‹œλ‚˜ λΆ€μ—° μ„€λͺ…은 ν•˜μ§€ 말 것
4. λ™μΌν•œ λ‚΄μš©μ„ λ‹€λ₯Έ ν‘œν˜„μœΌλ‘œ λ°˜λ³΅ν•˜μ§€ 말 것
5. 핡심 μ •λ³΄λ§Œ 전달할 것
"""
if parquet_data:
try:
df = pd.read_json(io.StringIO(parquet_data))
data_summary = df.describe(include='all').to_string()
system_prefix += f"\n\n데이터 μš”μ•½:\n{data_summary}"
except Exception as e:
print(f"데이터 λ‘œλ“œ 였λ₯˜: {str(e)}")
messages = [{"role": "system", "content": system_prefix}]
recent_history = history[-3:] if history else []
for chat in recent_history:
messages.append({"role": chat["role"], "content": chat["content"]})
messages.append({"role": "user", "content": message})
try:
response = client.chat.completions.create(
model="gpt-4o-mini",
messages=messages,
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
stream=True
)
full_response = ""
for chunk in response:
if chunk.choices[0].delta.content:
full_response += chunk.choices[0].delta.content
yield clean_response(full_response)
except Exception as e:
error_message = f"응닡 생성 쀑 였λ₯˜ λ°œμƒ: {str(e)}"
print(f"{error_message}\n{traceback.format_exc()}")
yield error_message
def preprocess_text_with_llm(input_text: str, api_key: str = None) -> str:
if not api_key:
return "⚠️ API Keyκ°€ μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€. μ„œλΉ„μŠ€ μ΄μš©μ„ μœ„ν•΄ API Keyλ₯Ό μž…λ ₯ν•΄μ£Όμ„Έμš”."
# OpenAI ν΄λΌμ΄μ–ΈνŠΈ μ΄ˆκΈ°ν™”
client = OpenAI(api_key=api_key)
system_prompt = """λ°˜λ“œμ‹œ ν•œκΈ€(ν•œκ΅­μ–΄)둜 λ‹΅λ³€ν•˜μ‹œμ˜€. 당신은 데이터 μ „μ²˜λ¦¬ μ „λ¬Έκ°€μž…λ‹ˆλ‹€. μž…λ ₯된 ν…μŠ€νŠΈλ₯Ό CSV 데이터셋 ν˜•μ‹μœΌλ‘œ λ³€ν™˜ν•˜μ„Έμš”.
κ·œμΉ™:
1. 좜λ ₯ ν˜•μ‹: id,text,label,metadata
2. id: 1λΆ€ν„° μ‹œμž‘ν•˜λŠ” 순차적 번호
3. text: 의미 μžˆλŠ” λ‹¨μœ„λ‘œ λΆ„λ¦¬λœ ν…μŠ€νŠΈ
4. label: ν…μŠ€νŠΈμ˜ μ£Όμ œλ‚˜ μΉ΄ν…Œκ³ λ¦¬λ₯Ό μ•„λž˜ κΈ°μ€€μœΌλ‘œ μ •ν™•ν•˜κ²Œ ν•œ 개만 선택
- Historical_Figure (역사적 인물)
- Military_History (ꡰ사 역사)
- Technology (기술)
- Politics (μ •μΉ˜)
- Culture (λ¬Έν™”)
5. metadata: λ‚ μ§œ, 좜처 λ“± μΆ”κ°€ 정보"""
try:
response = client.chat.completions.create(
model="gpt-4-0125-preview",
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": input_text}
],
max_tokens=4000,
temperature=0.1,
stream=True
)
full_response = ""
for chunk in response:
if chunk.choices[0].delta.content:
full_response += chunk.choices[0].delta.content
processed_text = clean_response(full_response)
try:
from io import StringIO
import csv
csv.reader(StringIO(processed_text))
return processed_text
except csv.Error:
return "LLM이 μ˜¬λ°”λ₯Έ CSV ν˜•μ‹μ„ μƒμ„±ν•˜μ§€ λͺ»ν–ˆμŠ΅λ‹ˆλ‹€. λ‹€μ‹œ μ‹œλ„ν•΄μ£Όμ„Έμš”."
except Exception as e:
error_message = f"μ „μ²˜λ¦¬ 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"
print(error_message)
return error_message
# Gradio Blocks μΈν„°νŽ˜μ΄μŠ€ μ„€μ •
with gr.Blocks(css=css) as demo:
api_key_state = gr.State("") # API ν‚€λ₯Ό μ €μž₯ν•  State μΆ”κ°€
gr.Markdown("# MyEzRAG: LLM이 λ‚˜λ§Œμ˜ λ°μ΄ν„°λ‘œ ν•™μŠ΅ν•œ μ½˜ν…μΈ  생성/λ‹΅λ³€", elem_id="initial-description")
# API ν‚€ μž…λ ₯ μ„Ήμ…˜ μΆ”κ°€
with gr.Row(elem_classes="api-key-section"):
with gr.Column(scale=3):
api_key_input = gr.Textbox(
label="OpenAI API Key",
placeholder="sk-...",
type="password",
show_label=True
)
with gr.Column(scale=1):
api_key_button = gr.Button("API Key μ„€μ •", variant="primary")
# API ν‚€ μƒνƒœ ν‘œμ‹œ
api_key_status = gr.Markdown("⚠️ API Keyκ°€ μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€. μ„œλΉ„μŠ€ μ΄μš©μ„ μœ„ν•΄ API Keyλ₯Ό μž…λ ₯ν•΄μ£Όμ„Έμš”.", elem_classes="api-key-status")
# API ν‚€ μ„€μ • ν•¨μˆ˜
def set_api_key(api_key: str):
if not api_key.strip():
return "⚠️ API Keyκ°€ μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€. μ„œλΉ„μŠ€ μ΄μš©μ„ μœ„ν•΄ API Keyλ₯Ό μž…λ ₯ν•΄μ£Όμ„Έμš”.", ""
if not api_key.startswith("sk-"):
return "❌ μ˜¬λ°”λ₯΄μ§€ μ•Šμ€ API Key ν˜•μ‹μž…λ‹ˆλ‹€. λ‹€μ‹œ ν™•μΈν•΄μ£Όμ„Έμš”.", ""
return "βœ… API Keyκ°€ μ„±κ³΅μ μœΌλ‘œ μ„€μ •λ˜μ—ˆμŠ΅λ‹ˆλ‹€.", api_key
# API ν‚€ μ„€μ • 이벀트 μ—°κ²°
api_key_button.click(
set_api_key,
inputs=[api_key_input],
outputs=[api_key_status, api_key_state]
)
gr.Markdown(
"### 'μ‚¬μš© 방법' 탭을 톡해 μžμ„Έν•œ 이용 방법을 μ°Έκ³ ν•˜μ„Έμš”.\n"
"### Tip) '예제'λ₯Ό 톡해 λ‹€μ–‘ν•œ ν™œμš© 방법을 μ²΄ν—˜ν•˜κ³  μ‘μš©ν•΄ λ³΄μ„Έμš”, 데이터셋 μ—…λ‘œλ“œμ‹œ λ―Έλ¦¬λ³΄κΈ°λŠ” 10건만 좜λ ₯",
elem_id="initial-description"
)
# 첫 번째 νƒ­: My 데이터셋+LLM
with gr.Tab("My 데이터셋+LLM"):
gr.Markdown("### LLMκ³Ό λŒ€ν™”ν•˜κΈ°")
chatbot_data_upload = gr.Chatbot(label="챗봇", type="messages", elem_id="chatbot-data-upload")
msg_data_upload = gr.Textbox(label="λ©”μ‹œμ§€ μž…λ ₯", placeholder="여기에 λ©”μ‹œμ§€λ₯Ό μž…λ ₯ν•˜μ„Έμš”...")
send_data_upload = gr.Button("전솑")
with gr.Accordion("μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ 및 μ˜΅μ…˜ μ„€μ •", open=False):
system_message = gr.Textbox(label="System Message", value="λ„ˆλŠ” AI μ‘°μ–Έμž 역할이닀.")
max_tokens = gr.Slider(minimum=1, maximum=8000, value=1000, label="Max Tokens")
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature")
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P")
parquet_data_state = gr.State()
def handle_message_data_upload(message: str, history: List[Dict[str, str]], system_message: str, max_tokens: int, temperature: float, top_p: float, parquet_data: str, api_key: str):
if not api_key:
history = history or []
history.append({"role": "assistant", "content": "⚠️ API Keyκ°€ μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€. μ„œλΉ„μŠ€ μ΄μš©μ„ μœ„ν•΄ API Keyλ₯Ό μž…λ ₯ν•΄μ£Όμ„Έμš”."})
yield history, ""
return
history = history or []
recent_questions = [chat['content'].strip().lower() for chat in history[-3:] if chat['role'] == 'user']
if message.strip().lower() in recent_questions:
yield history + [{"role": "assistant", "content": "λ™μΌν•œ 질문이 μ΅œκ·Όμ— μžˆμ—ˆμŠ΅λ‹ˆλ‹€. λ‹€λ₯Έ μ§ˆλ¬Έμ„ ν•΄μ£Όμ„Έμš”."}], ""
return
try:
history.append({"role": "user", "content": message})
response_gen = respond(
message,
history,
system_message,
max_tokens,
temperature=0.3,
top_p=top_p,
parquet_data=parquet_data,
api_key=api_key
)
partial_response = ""
for partial in response_gen:
partial_response = partial
display_history = history + [{"role": "assistant", "content": partial_response}]
yield display_history, ""
history.append({"role": "assistant", "content": partial_response})
except Exception as e:
response = f"였λ₯˜ λ°œμƒ: {str(e)}"
history.append({"role": "assistant", "content": response})
yield history, ""
send_data_upload.click(
handle_message_data_upload,
inputs=[
msg_data_upload,
chatbot_data_upload,
system_message,
max_tokens,
temperature,
top_p,
parquet_data_state,
api_key_state,
],
outputs=[chatbot_data_upload, msg_data_upload],
queue=True
)
# 예제 μΆ”κ°€
with gr.Accordion("예제", open=False):
gr.Examples(
examples=[
["μ—…λ‘œλ“œλœ 데이터셋에 λŒ€ν•΄ μš”μ•½ μ„€λͺ…ν•˜λΌ."],
["μ—…λ‘œλ“œλœ 데이터셋 νŒŒμΌμ„ ν•™μŠ΅ λ°μ΄ν„°λ‘œ ν™œμš©ν•˜μ—¬, λ³Έ μ„œλΉ„μŠ€λ₯Ό SEO μ΅œμ ν™”ν•˜μ—¬ λΈ”λ‘œκ·Έ 포슀트(κ°œμš”, λ°°κ²½ 및 ν•„μš”μ„±, κΈ°μ‘΄ μœ μ‚¬ μ œν’ˆ/μ„œλΉ„μŠ€μ™€ λΉ„κ΅ν•˜μ—¬ 특μž₯점, ν™œμš©μ²˜, κ°€μΉ˜, κΈ°λŒ€νš¨κ³Ό, 결둠을 포함)둜 4000 토큰 이상 μž‘μ„±ν•˜λΌ"],
["μ—…λ‘œλ“œλœ 데이터셋 νŒŒμΌμ„ ν•™μŠ΅ λ°μ΄ν„°λ‘œ ν™œμš©ν•˜μ—¬, μ‚¬μš© 방법과 차별점, νŠΉμ§•, 강점을 μ€‘μ‹¬μœΌλ‘œ 4000 토큰 이상 유튜브 μ˜μƒ 슀크립트 ν˜•νƒœλ‘œ μž‘μ„±ν•˜λΌ"],
["μ—…λ‘œλ“œλœ 데이터셋 νŒŒμΌμ„ ν•™μŠ΅ λ°μ΄ν„°λ‘œ ν™œμš©ν•˜μ—¬, μ œν’ˆ 상세 νŽ˜μ΄μ§€ ν˜•μ‹μ˜ λ‚΄μš©μ„ 4000 토큰 이상 μžμ„Ένžˆ μ„€λͺ…ν•˜λΌ"],
["μ—…λ‘œλ“œλœ 데이터셋 νŒŒμΌμ„ ν•™μŠ΅ λ°μ΄ν„°λ‘œ ν™œμš©ν•˜μ—¬, FAQ 20건을 μƒμ„Έν•˜κ²Œ μž‘μ„±ν•˜λΌ. 4000토큰 이상 μ‚¬μš©ν•˜λΌ."],
["μ—…λ‘œλ“œλœ 데이터셋 νŒŒμΌμ„ ν•™μŠ΅ λ°μ΄ν„°λ‘œ ν™œμš©ν•˜μ—¬, νŠΉν—ˆ μΆœμ›μ— ν™œμš©ν•  기술 및 λΉ„μ¦ˆλ‹ˆμŠ€ λͺ¨λΈ 츑면을 ν¬ν•¨ν•˜μ—¬ νŠΉν—ˆ μΆœμ›μ„œ ꡬ성에 맞게 ν˜μ‹ μ μΈ 창의 발λͺ… λ‚΄μš©μ„ μ€‘μ‹¬μœΌλ‘œ 4000 토큰 이상 μž‘μ„±ν•˜λΌ."],
],
inputs=msg_data_upload,
label="예제 선택",
)
# Parquet 파일 μ—…λ‘œλ“œ
gr.Markdown("### Parquet 파일 μ—…λ‘œλ“œ")
with gr.Row():
with gr.Column():
parquet_upload = gr.File(
label="Parquet 파일 μ—…λ‘œλ“œ", type="filepath", elem_id="parquet-upload-area"
)
parquet_upload_button = gr.Button("μ—…λ‘œλ“œ")
parquet_upload_status = gr.Textbox(label="μ—…λ‘œλ“œ μƒνƒœ", interactive=False)
parquet_preview_chat = gr.Markdown(label="Parquet 파일 미리보기")
def handle_parquet_upload(file_path: str):
message, parquet_content, parquet_json = upload_parquet(file_path)
if parquet_json:
return message, parquet_content, parquet_json
else:
return message, "", ""
parquet_upload_button.click(
handle_parquet_upload,
inputs=parquet_upload,
outputs=[parquet_upload_status, parquet_preview_chat, parquet_data_state]
)
# 두 번째 νƒ­: CSV to My 데이터셋
with gr.Tab("CSV to My 데이터셋"):
gr.Markdown("### CSV 파일 μ—…λ‘œλ“œ 및 Parquet λ³€ν™˜")
with gr.Row():
with gr.Column():
csv_file = gr.File(label="CSV 파일 μ—…λ‘œλ“œ", type="filepath")
upload_button = gr.Button("μ—…λ‘œλ“œ 및 λ³€ν™˜")
upload_status = gr.Textbox(label="μ—…λ‘œλ“œ μƒνƒœ", interactive=False)
parquet_preview = gr.Markdown(label="Parquet 파일 미리보기")
download_button = gr.File(label="Parquet 파일 λ‹€μš΄λ‘œλ“œ", interactive=False)
def handle_csv_upload(file_path: str):
message, parquet_filename = upload_csv(file_path)
if parquet_filename:
parquet_content = load_parquet(parquet_filename)
return message, parquet_content, parquet_filename
else:
return message, "", None
upload_button.click(
handle_csv_upload,
inputs=csv_file,
outputs=[upload_status, parquet_preview, download_button]
)
# μ„Έ 번째 νƒ­: Text to My 데이터셋
with gr.Tab("Text to My 데이터셋"):
gr.Markdown("### ν…μŠ€νŠΈλ₯Ό μž…λ ₯ν•˜λ©΄ CSV둜 λ³€ν™˜ ν›„ Parquet으둜 μžλ™ μ „ν™˜λ©λ‹ˆλ‹€.")
with gr.Row():
with gr.Column():
text_input = gr.Textbox(
label="ν…μŠ€νŠΈ μž…λ ₯ (각 행은 `id,text,label,metadata` ν˜•μ‹μœΌλ‘œ μž…λ ₯)",
lines=10,
placeholder='예: 1,"μ΄μˆœμ‹ ","μž₯κ΅°","거뢁선"\n2,"원균","μž₯κ΅°","λͺ¨ν•¨"\n3,"μ„ μ‘°","μ™•","μ‹œκΈ°"\n4,"λ„μš”ν† λ―Έ νžˆλ°μš”μ‹œ","μ™•","침랡"'
)
convert_button = gr.Button("λ³€ν™˜ 및 λ‹€μš΄λ‘œλ“œ")
convert_status = gr.Textbox(label="λ³€ν™˜ μƒνƒœ", interactive=False)
parquet_preview_convert = gr.Markdown(label="Parquet 파일 미리보기")
download_parquet_convert = gr.File(label="Parquet 파일 λ‹€μš΄λ‘œλ“œ", interactive=False)
def handle_text_to_parquet(text: str):
message, parquet_content, parquet_filename = text_to_parquet(text)
if parquet_filename:
return message, parquet_content, parquet_filename
else:
return message, "", None
convert_button.click(
handle_text_to_parquet,
inputs=text_input,
outputs=[convert_status, parquet_preview_convert, download_parquet_convert]
)
# λ„€ 번째 νƒ­: Text Preprocessing with LLM
with gr.Tab("Text Preprocessing with LLM"):
gr.Markdown("### ν…μŠ€νŠΈλ₯Ό μž…λ ₯ν•˜λ©΄ LLM이 데이터셋 ν˜•μ‹μ— 맞게 μ „μ²˜λ¦¬ν•˜μ—¬ 좜λ ₯ν•©λ‹ˆλ‹€.")
with gr.Row():
with gr.Column():
raw_text_input = gr.Textbox(
label="ν…μŠ€νŠΈ μž…λ ₯",
lines=15,
placeholder="여기에 μ „μ²˜λ¦¬ν•  ν…μŠ€νŠΈλ₯Ό μž…λ ₯ν•˜μ„Έμš”..."
)
with gr.Row():
preprocess_button = gr.Button("μ „μ²˜λ¦¬ μ‹€ν–‰", variant="primary")
clear_button = gr.Button("μ΄ˆκΈ°ν™”")
preprocess_status = gr.Textbox(
label="μ „μ²˜λ¦¬ μƒνƒœ",
interactive=False,
value="λŒ€κΈ° 쀑..."
)
processed_text_output = gr.Textbox(
label="μ „μ²˜λ¦¬λœ 데이터셋 좜λ ₯",
lines=15,
interactive=False
)
convert_to_parquet_button = gr.Button("Parquet으둜 λ³€ν™˜")
download_parquet = gr.File(label="λ³€ν™˜λœ Parquet 파일 λ‹€μš΄λ‘œλ“œ")
def handle_text_preprocessing(input_text: str, api_key: str):
if not api_key:
yield "⚠️ API Keyκ°€ μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€.", ""
return
if not input_text.strip():
yield "μž…λ ₯ ν…μŠ€νŠΈκ°€ μ—†μŠ΅λ‹ˆλ‹€.", ""
return
try:
yield "μ „μ²˜λ¦¬λ₯Ό μ‹œμž‘ν•©λ‹ˆλ‹€...", ""
processed_text = preprocess_text_with_llm(input_text, api_key)
if processed_text:
yield "μ „μ²˜λ¦¬κ°€ μ™„λ£Œλ˜μ—ˆμŠ΅λ‹ˆλ‹€.", processed_text
else:
yield "μ „μ²˜λ¦¬ κ²°κ³Όκ°€ μ—†μŠ΅λ‹ˆλ‹€.", ""
except Exception as e:
yield f"처리 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}", ""
def clear_inputs():
return "", "λŒ€κΈ° 쀑...", ""
def convert_to_parquet_file(processed_text: str):
if not processed_text.strip():
return "λ³€ν™˜ν•  ν…μŠ€νŠΈκ°€ μ—†μŠ΅λ‹ˆλ‹€.", None
try:
message, parquet_content, parquet_filename = text_to_parquet(processed_text)
if parquet_filename:
return message, parquet_filename
return message, None
except Exception as e:
return f"Parquet λ³€ν™˜ 쀑 였λ₯˜ λ°œμƒ: {str(e)}", None
preprocess_button.click(
handle_text_preprocessing,
inputs=[raw_text_input, api_key_state],
outputs=[preprocess_status, processed_text_output],
queue=True
)
clear_button.click(
clear_inputs,
outputs=[raw_text_input, preprocess_status, processed_text_output]
)
convert_to_parquet_button.click(
convert_to_parquet_file,
inputs=[processed_text_output],
outputs=[preprocess_status, download_parquet]
)
with gr.Accordion("예제 ν…μŠ€νŠΈ", open=False):
gr.Examples(
examples=[
["μ΄μˆœμ‹ μ€ μ‘°μ„  μ€‘κΈ°μ˜ 무신이닀. κ·ΈλŠ” μž„μ§„μ™œλž€ λ‹Ήμ‹œ 해ꡰ을 μ΄λŒμ—ˆλ‹€. 거뢁선을 λ§Œλ“€μ–΄ μ™œκ΅°κ³Ό μ‹Έμ› λ‹€."],
["인곡지λŠ₯은 컴퓨터 κ³Όν•™μ˜ ν•œ 뢄야이닀. κΈ°κ³„ν•™μŠ΅μ€ 인곡지λŠ₯의 ν•˜μœ„ 뢄야이닀. λ”₯λŸ¬λ‹μ€ κΈ°κ³„ν•™μŠ΅μ˜ ν•œ 방법이닀."]
],
inputs=raw_text_input,
label="예제 선택"
)
# μ‚¬μš© 방법 νƒ­
with gr.Tab("πŸ“š μ‚¬μš© 방법"):
gr.Markdown("""
# MyEzRAG μ‚¬μš© κ°€μ΄λ“œ
## πŸ”‘ API Key μ„€μ •
1. OpenAI API Keyλ₯Ό 상단 μž…λ ₯창에 μž…λ ₯
2. 'API Key μ„€μ •' λ²„νŠΌ 클릭
3. μ„€μ • 성곡 λ©”μ‹œμ§€ 확인
## 1️⃣ My 데이터셋+LLM νƒ­
### κΈ°λŠ₯
- μ—…λ‘œλ“œλœ Parquet 데이터셋을 기반으둜 LLMκ³Ό λŒ€ν™”
- λ°μ΄ν„°μ…‹μ˜ λ‚΄μš©μ„ ν™œμš©ν•œ μ½˜ν…μΈ  생성
### μ‚¬μš© 방법
1. Parquet 파일 μ—…λ‘œλ“œ μ„Ήμ…˜μ—μ„œ 데이터셋 νŒŒμΌμ„ μ—…λ‘œλ“œ
2. μ±„νŒ…μ°½μ— μ›ν•˜λŠ” μ§ˆλ¬Έμ΄λ‚˜ μš”μ²­μ‚¬ν•­ μž…λ ₯
3. 예제 λ²„νŠΌμ„ ν™œμš©ν•˜μ—¬ λ‹€μ–‘ν•œ ν™œμš© 사둀 μ²΄ν—˜
### 팁
- μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ μ„€μ •μœΌλ‘œ 응닡 μŠ€νƒ€μΌ μ‘°μ • κ°€λŠ₯
- μƒμ„Έν•œ 질문일수둝 더 μ •ν™•ν•œ λ‹΅λ³€ 제곡
---
## 2️⃣ CSV to My 데이터셋 νƒ­
### κΈ°λŠ₯
- CSV νŒŒμΌμ„ Parquet ν˜•μ‹μœΌλ‘œ λ³€ν™˜
- 데이터 μ΅œμ ν™” 및 μ •μ œ
### μ‚¬μš© 방법
1. CSV 파일 μ€€λΉ„ (ν•„μˆ˜ 컬럼: id, text, label, metadata)
2. 파일 μ—…λ‘œλ“œ ν›„ 'μ—…λ‘œλ“œ 및 λ³€ν™˜' λ²„νŠΌ 클릭
3. λ³€ν™˜λœ Parquet 파일 λ‹€μš΄λ‘œλ“œ
### μ£Όμ˜μ‚¬ν•­
- CSV νŒŒμΌμ€ λ°˜λ“œμ‹œ ν•„μˆ˜ μ»¬λŸΌμ„ 포함해야 함
- 인코딩은 UTF-8 ꢌμž₯
---
## 3️⃣ Text to My 데이터셋 νƒ­
### κΈ°λŠ₯
- ν…μŠ€νŠΈ ν˜•μ‹μ˜ 데이터λ₯Ό Parquet으둜 λ³€ν™˜
- μˆ˜λ™ 데이터 μž…λ ₯ 지원
### μ‚¬μš© 방법
1. μ§€μ •λœ ν˜•μ‹μœΌλ‘œ ν…μŠ€νŠΈ μž…λ ₯
```
1,"μ΄μˆœμ‹ ","μž₯κ΅°","거뢁선"
2,"원균","μž₯κ΅°","λͺ¨ν•¨"
```
2. 'λ³€ν™˜ 및 λ‹€μš΄λ‘œλ“œ' λ²„νŠΌ 클릭
3. λ³€ν™˜λœ 파일 확인 및 λ‹€μš΄λ‘œλ“œ
### μž…λ ₯ ν˜•μ‹
- id: 순차적 번호
- text: μ‹€μ œ ν…μŠ€νŠΈ λ‚΄μš©
- label: λΆ„λ₯˜ 라벨
- metadata: λΆ€κ°€ 정보
---
## 4️⃣ Text Preprocessing with LLM νƒ­
### κΈ°λŠ₯
- LLM을 ν™œμš©ν•œ μžλ™ ν…μŠ€νŠΈ μ „μ²˜λ¦¬
- κ΅¬μ‘°ν™”λœ 데이터셋 생성
### μ‚¬μš© 방법
1. 원문 ν…μŠ€νŠΈ μž…λ ₯
2. 'μ „μ²˜λ¦¬ μ‹€ν–‰' λ²„νŠΌ 클릭
3. κ²°κ³Ό 확인 ν›„ ν•„μš”μ‹œ Parquet λ³€ν™˜
### νŠΉμ§•
- μžλ™ λ ˆμ΄λΈ”λ§
- λ¬Έμž₯ λ‹¨μœ„ 뢄리
- 쀑볡 제거
- 데이터 μ •κ·œν™”
## πŸ’‘ 일반적인 팁
- API KeyλŠ” μ•ˆμ „ν•˜κ²Œ λ³΄κ΄€ν•˜κ³  주기적으둜 κ°±μ‹ 
- 각 νƒ­μ˜ 예제λ₯Ό μ°Έκ³ ν•˜μ—¬ μ‚¬μš©λ²• 읡히기
- 데이터 ν’ˆμ§ˆμ΄ μ’‹μ„μˆ˜λ‘ 더 λ‚˜μ€ κ²°κ³Ό 제곡
- 였λ₯˜ λ°œμƒ μ‹œ μž…λ ₯ 데이터 ν˜•μ‹ 확인
- λŒ€μš©λŸ‰ 처리 μ‹œ μ μ ˆν•œ 청크 크기둜 λΆ„ν•  처리
## ⚠️ μ£Όμ˜μ‚¬ν•­
- API Keyλ₯Ό 타인과 κ³΅μœ ν•˜μ§€ μ•ŠκΈ°
- λ―Όκ°ν•œ κ°œμΈμ •λ³΄ ν¬ν•¨ν•˜μ§€ μ•ŠκΈ°
- 데이터 λ°±μ—… ꢌμž₯
- λ„€νŠΈμ›Œν¬ μƒνƒœ 확인
- λΈŒλΌμš°μ € μΊμ‹œ 주기적 정리
## πŸ” 문제 ν•΄κ²°
- API Key 였λ₯˜: ν‚€ ν˜•μ‹ 및 μœ νš¨μ„± 확인
- 였λ₯˜ λ°œμƒ μ‹œ μž…λ ₯ 데이터 ν˜•μ‹ 확인
- 파일 μ—…λ‘œλ“œ μ‹€νŒ¨ μ‹œ 파일 크기 및 ν˜•μ‹ 확인
- λ³€ν™˜ μ‹€νŒ¨ μ‹œ 데이터 인코딩 확인
- 응닡이 느릴 경우 데이터 크기 μ‘°μ •
""")
gr.Markdown("### [email protected]", elem_id="initial-description")
if __name__ == "__main__":
demo.launch(share=True)