ChatExample / app.py
KorWoody's picture
Upload app.py
f5b8ca4
raw
history blame
5.49 kB
import os
import pymssql
import pandas as pd
# os.environ["OPENAI_API_KEY"] = "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"
os.environ["OPENAI_API_KEY"] = "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"
from langchain.vectorstores import Chroma
from langchain.embeddings import OpenAIEmbeddings
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.llms import OpenAI
from langchain.chat_models import ChatOpenAI
from langchain.chains import RetrievalQA
from langchain.document_loaders import TextLoader
from langchain.document_loaders import DirectoryLoader
from langchain.document_loaders import CSVLoader
from langchain.memory import ConversationBufferMemory
# # ์‚ฌ์šฉ์ž๊ฐ€ ์งˆ๋ฌธํ•œ ๋‚ด์—ญ ์ €์žฅํ•ด์„œ ๋‚˜์ค‘์— ํ•™์Šต์šฉ์œผ๋กœ ์“ฐ๊ธฐ ์œ„ํ•ด DB ์ ‘์†
# # MSSQL ์ ‘์†
# conn = pymssql.connect(host=r"(local)", database='Chatbot_Manage')
# conn.autocommit(True) # ์˜คํ†  ์ปค๋ฐ‹ ํ™œ์„ฑํ™”
# # Connection ์œผ๋กœ๋ถ€ํ„ฐ Cursor ์ƒ์„ฑ
# _cursor = conn.cursor()
# _cursorConfig = conn.cursor()
# # _query = "SELECT UserInput as query, SystemAnswer as answer FROM ChatHistory WHERE AcceptFlag = 'Y'"
# _queryConfig = "SELECT Temperature FROM ChatConfig WHERE IsUse = 1"
# # _cursor.execute(_query)
# _cursorConfig.execute(_queryConfig)
# #์‹คํ–‰ํ•œ ๊ฐ’, ์ด๋ฆ„ ๊ฐ’์„ DataFrame์— ์ €์žฅ
# #dfsql = ['query','answer'] #๋ฐ์ดํ„ฐํ”„๋ ˆ์ž„ ์ปฌ๋Ÿผ์— ์ด๋ฆ„ ์„ค์ •.
# # _row = cursor.fetchall()
# # df1 = pd.DataFrame(_row, columns=dfsql)
# _rowConfing = _cursorConfig.fetchone()
# while _rowConfing:
# for col in range(len(_rowConfing)):
# temperature = _rowConfing[col]
# _rowConfing = _cursorConfig.fetchone()
# conn.close() ## ์—ฐ๊ฒฐ ๋Š๊ธฐ
persist_directory = 'realdb_LLM'
embedding = OpenAIEmbeddings()
vectordb = Chroma(
persist_directory=persist_directory,
embedding_function=embedding
)
retriever = vectordb.as_retriever(search_kwargs={"k": 1})
def process_llm_response(llm_response):
print(llm_response['result'])
print('\n\nSources:')
for source in llm_response["source_documents"]:
print(source.metadata['source'])
# ์ฑ—๋ด‡์˜ ๋‹ต๋ณ€์„ ์ฒ˜๋ฆฌํ•˜๋Š” ํ•จ์ˆ˜
def respond(message, chat_history, temperature):
qa_chain = RetrievalQA.from_chain_type(
llm=OpenAI(temperature=0.4),
# llm=OpenAI(temperature=0.4),
# llm=ChatOpenAI(temperature=0),
chain_type="stuff",
retriever=retriever
)
result = qa_chain(message)
bot_message = result['result']
# bot_message += '\n\n' + ' [์ถœ์ฒ˜]'
# # ๋‹ต๋ณ€์˜ ์ถœ์ฒ˜๋ฅผ ํ‘œ๊ธฐ
# for i, doc in enumerate(result['source_documents']):
# bot_message += str(i+1) + '. ' + doc.metadata['source'] + ' '
# ์ฑ„ํŒ… ๊ธฐ๋ก์— ์‚ฌ์šฉ์ž์˜ ๋ฉ”์‹œ์ง€์™€ ๋ด‡์˜ ์‘๋‹ต์„ ์ถ”๊ฐ€.
chat_history.append((message, bot_message))
historySave(message=message, answer=str(result['result']).replace("'",""))
# historySave(message=message, answer="")
return "", chat_history
def historySave(message, answer):
conn = pymssql.connect(host=r"(local)", database='Chatbot_Manage', charset='utf8')
conn.autocommit(True) # ์˜คํ†  ์ปค๋ฐ‹ ํ™œ์„ฑํ™”
# Connection ์œผ๋กœ๋ถ€ํ„ฐ Cursor ์ƒ์„ฑ
cursor = conn.cursor()
SystemType = "OpenAI(Real LLM)"
# SQL๋ฌธ ์‹คํ–‰'
_sql = "EXEC ChatHistory_InsUpd '" + SystemType + "','" + message + "', '" + answer + "'"
cursor.execute(_sql)
conn.close() ## ์—ฐ๊ฒฐ ๋Š๊ธฐ
import gradio as gr
# ์ฑ—๋ด‡ ์„ค๋ช…
title = """
<div style="text-align: center; max-width: 500px; margin: 0 auto;">
<div>
<h1>Pretraining Chatbot V2 Real</h1>
</div>
<p style="margin-bottom: 10px; font-size: 94%">
OpenAI LLM๋ฅผ ์ด์šฉํ•œ Chatbot (Similarity)
</p>
</div>
"""
# ๊พธ๋ฏธ๊ธฐ
css="""
#col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
"""
with gr.Blocks(css=css) as UnivChatbot:
with gr.Column(elem_id="col-container"):
gr.HTML(title)
# with gr.Row():
# with gr.Column(scale=3):
# openai_key = gr.Textbox(label="You OpenAI API key", type="password", placeholder="OpenAI Key Type", elem_id="InputKey", show_label=False, container=False)
# with gr.Column(scale=1):
# langchain_status = gr.Textbox(placeholder="Status", interactive=False, show_label=False, container=False)
# with gr.Column(scale=1):
# chk_key = gr.Button("ํ™•์ธ", variant="primary")
chatbot = gr.Chatbot(label="๋Œ€ํ•™ ์ฑ—๋ด‡์‹œ์Šคํ…œ(OpenAI LLM)", elem_id="chatbot") # ์ƒ๋‹จ ์ขŒ์ธก
with gr.Row():
with gr.Column(scale=9):
msg = gr.Textbox(label="์ž…๋ ฅ", placeholder="๊ถ๊ธˆํ•˜์‹  ๋‚ด์—ญ์„ ์ž…๋ ฅํ•˜์—ฌ ์ฃผ์„ธ์š”.", elem_id="InputQuery", show_label=False, container=False)
with gr.Row():
with gr.Column(scale=1):
submit = gr.Button("์ „์†ก", variant="primary")
with gr.Column(scale=1):
clear = gr.Button("์ดˆ๊ธฐํ™”", variant="stop")
# ์‚ฌ์šฉ์ž์˜ ์ž…๋ ฅ์„ ์ œ์ถœ(submit)ํ•˜๋ฉด respond ํ•จ์ˆ˜๊ฐ€ ํ˜ธ์ถœ.
msg.submit(respond, [msg, chatbot], [msg, chatbot])
submit.click(respond, [msg, chatbot], [msg, chatbot])
# '์ดˆ๊ธฐํ™”' ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜๋ฉด ์ฑ„ํŒ… ๊ธฐ๋ก์„ ์ดˆ๊ธฐํ™”.
clear.click(lambda: None, None, chatbot, queue=False)
UnivChatbot.launch(server_port=60001)