ChatExample / app.py
KorWoody's picture
Update app.py
632039a
raw
history blame
4.39 kB
import os
import pymssql
import pandas as pd
from langchain.vectorstores import Chroma
from langchain.embeddings import OpenAIEmbeddings
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.llms import OpenAI
from langchain.chat_models import ChatOpenAI
from langchain.chains import RetrievalQA
from langchain.document_loaders import TextLoader
from langchain.document_loaders import DirectoryLoader
from langchain.document_loaders import CSVLoader
from langchain.memory import ConversationBufferMemory
def Loading():
return "๋ฐ์ดํ„ฐ ๋กœ๋”ฉ ์ค‘..."
def LoadData(openai_key):
if openai_key is not None:
os.environ["OPENAI_API_KEY"] = openai_key
persist_directory = 'realdb_LLM'
embedding = OpenAIEmbeddings()
vectordb = Chroma(
persist_directory=persist_directory,
embedding_function=embedding
)
global retriever
retriever = vectordb.as_retriever(search_kwargs={"k": 1})
return "์ค€๋น„ ์™„๋ฃŒ"
else:
return "์‚ฌ์šฉํ•˜์‹œ๋Š” API Key๋ฅผ ์ž…๋ ฅํ•˜์—ฌ ์ฃผ์‹œ๊ธฐ ๋ฐ”๋ž๋‹ˆ๋‹ค."
# ์ฑ—๋ด‡์˜ ๋‹ต๋ณ€์„ ์ฒ˜๋ฆฌํ•˜๋Š” ํ•จ์ˆ˜
def respond(message, chat_history):
try:
# if freelv == "":
# freelv = 0
qa_chain = RetrievalQA.from_chain_type(
llm=OpenAI(temperature=0.4),
# llm=OpenAI(temperature=0.4),
# llm=ChatOpenAI(temperature=0),
chain_type="stuff",
retriever=retriever
)
result = qa_chain(message)
bot_message = result['result']
# bot_message += '\n\n' + ' [์ถœ์ฒ˜]'
# # ๋‹ต๋ณ€์˜ ์ถœ์ฒ˜๋ฅผ ํ‘œ๊ธฐ
# for i, doc in enumerate(result['source_documents']):
# bot_message += str(i+1) + '. ' + doc.metadata['source'] + ' '
# ์ฑ„ํŒ… ๊ธฐ๋ก์— ์‚ฌ์šฉ์ž์˜ ๋ฉ”์‹œ์ง€์™€ ๋ด‡์˜ ์‘๋‹ต์„ ์ถ”๊ฐ€.
chat_history.append((message, bot_message))
return "", chat_history
except:
chat_history.append(("", "API Key ์ž…๋ ฅ ์š”๋ง"))
return " ", chat_history
import gradio as gr
# ์ฑ—๋ด‡ ์„ค๋ช…
title = """
<div style="text-align: center; max-width: 500px; margin: 0 auto;">
<div>
<h1>Pretraining Chatbot V2 Real</h1>
</div>
<p style="margin-bottom: 10px; font-size: 94%">
OpenAI LLM๋ฅผ ์ด์šฉํ•œ Chatbot (Similarity)
</p>
</div>
"""
# ๊พธ๋ฏธ๊ธฐ
css="""
#col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
"""
with gr.Blocks(css=css) as UnivChatbot:
with gr.Column(elem_id="col-container"):
gr.HTML(title)
with gr.Row():
with gr.Column(scale=3):
openai_key = gr.Textbox(label="You OpenAI API key", type="password", placeholder="OpenAI Key Type", elem_id="InputKey", show_label=False, container=False)
with gr.Column(scale=1):
langchain_status = gr.Textbox(placeholder="Status", interactive=False, show_label=False, container=False)
# with gr.Column(scale=1):
# freelv = gr.Textbox(placeholder="์œ ์—ฐ์„ฑ", show_label=False, container=False)
with gr.Column(scale=1):
chk_key = gr.Button("ํ™•์ธ", variant="primary")
chatbot = gr.Chatbot(label="๋Œ€ํ•™ ์ฑ—๋ด‡์‹œ์Šคํ…œ(OpenAI LLM)", elem_id="chatbot") # ์ƒ๋‹จ ์ขŒ์ธก
with gr.Row():
with gr.Column(scale=9):
msg = gr.Textbox(label="์ž…๋ ฅ", placeholder="๊ถ๊ธˆํ•˜์‹  ๋‚ด์—ญ์„ ์ž…๋ ฅํ•˜์—ฌ ์ฃผ์„ธ์š”.", elem_id="InputQuery", show_label=False, container=False)
with gr.Row():
with gr.Column(scale=1):
submit = gr.Button("์ „์†ก", variant="primary")
with gr.Column(scale=1):
clear = gr.Button("์ดˆ๊ธฐํ™”", variant="stop")
#chk_key.click(Loading, None, langchain_status, queue=False)
chk_key.click(LoadData, openai_key, outputs=[langchain_status], queue=False)
# ์‚ฌ์šฉ์ž์˜ ์ž…๋ ฅ์„ ์ œ์ถœ(submit)ํ•˜๋ฉด respond ํ•จ์ˆ˜๊ฐ€ ํ˜ธ์ถœ.
msg.submit(respond, [msg, chatbot], [msg, chatbot])
submit.click(respond, [msg, chatbot], [msg, chatbot])
# '์ดˆ๊ธฐํ™”' ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜๋ฉด ์ฑ„ํŒ… ๊ธฐ๋ก์„ ์ดˆ๊ธฐํ™”.
clear.click(lambda: None, None, chatbot, queue=False)
UnivChatbot.launch()