Spaces:
Sleeping
Sleeping
File size: 5,487 Bytes
f5b8ca4 08ea151 f5b8ca4 08ea151 f5b8ca4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 |
import os
import pymssql
import pandas as pd
# os.environ["OPENAI_API_KEY"] = "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"
os.environ["OPENAI_API_KEY"] = "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"
from langchain.vectorstores import Chroma
from langchain.embeddings import OpenAIEmbeddings
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.llms import OpenAI
from langchain.chat_models import ChatOpenAI
from langchain.chains import RetrievalQA
from langchain.document_loaders import TextLoader
from langchain.document_loaders import DirectoryLoader
from langchain.document_loaders import CSVLoader
from langchain.memory import ConversationBufferMemory
# # ์ฌ์ฉ์๊ฐ ์ง๋ฌธํ ๋ด์ญ ์ ์ฅํด์ ๋์ค์ ํ์ต์ฉ์ผ๋ก ์ฐ๊ธฐ ์ํด DB ์ ์
# # MSSQL ์ ์
# conn = pymssql.connect(host=r"(local)", database='Chatbot_Manage')
# conn.autocommit(True) # ์คํ ์ปค๋ฐ ํ์ฑํ
# # Connection ์ผ๋ก๋ถํฐ Cursor ์์ฑ
# _cursor = conn.cursor()
# _cursorConfig = conn.cursor()
# # _query = "SELECT UserInput as query, SystemAnswer as answer FROM ChatHistory WHERE AcceptFlag = 'Y'"
# _queryConfig = "SELECT Temperature FROM ChatConfig WHERE IsUse = 1"
# # _cursor.execute(_query)
# _cursorConfig.execute(_queryConfig)
# #์คํํ ๊ฐ, ์ด๋ฆ ๊ฐ์ DataFrame์ ์ ์ฅ
# #dfsql = ['query','answer'] #๋ฐ์ดํฐํ๋ ์ ์ปฌ๋ผ์ ์ด๋ฆ ์ค์ .
# # _row = cursor.fetchall()
# # df1 = pd.DataFrame(_row, columns=dfsql)
# _rowConfing = _cursorConfig.fetchone()
# while _rowConfing:
# for col in range(len(_rowConfing)):
# temperature = _rowConfing[col]
# _rowConfing = _cursorConfig.fetchone()
# conn.close() ## ์ฐ๊ฒฐ ๋๊ธฐ
persist_directory = 'realdb_LLM'
embedding = OpenAIEmbeddings()
vectordb = Chroma(
persist_directory=persist_directory,
embedding_function=embedding
)
retriever = vectordb.as_retriever(search_kwargs={"k": 1})
def process_llm_response(llm_response):
print(llm_response['result'])
print('\n\nSources:')
for source in llm_response["source_documents"]:
print(source.metadata['source'])
# ์ฑ๋ด์ ๋ต๋ณ์ ์ฒ๋ฆฌํ๋ ํจ์
def respond(message, chat_history, temperature):
qa_chain = RetrievalQA.from_chain_type(
llm=OpenAI(temperature=0.4),
# llm=OpenAI(temperature=0.4),
# llm=ChatOpenAI(temperature=0),
chain_type="stuff",
retriever=retriever
)
result = qa_chain(message)
bot_message = result['result']
# bot_message += '\n\n' + ' [์ถ์ฒ]'
# # ๋ต๋ณ์ ์ถ์ฒ๋ฅผ ํ๊ธฐ
# for i, doc in enumerate(result['source_documents']):
# bot_message += str(i+1) + '. ' + doc.metadata['source'] + ' '
# ์ฑํ
๊ธฐ๋ก์ ์ฌ์ฉ์์ ๋ฉ์์ง์ ๋ด์ ์๋ต์ ์ถ๊ฐ.
chat_history.append((message, bot_message))
historySave(message=message, answer=str(result['result']).replace("'",""))
# historySave(message=message, answer="")
return "", chat_history
def historySave(message, answer):
conn = pymssql.connect(host=r"(local)", database='Chatbot_Manage', charset='utf8')
conn.autocommit(True) # ์คํ ์ปค๋ฐ ํ์ฑํ
# Connection ์ผ๋ก๋ถํฐ Cursor ์์ฑ
cursor = conn.cursor()
SystemType = "OpenAI(Real LLM)"
# SQL๋ฌธ ์คํ'
_sql = "EXEC ChatHistory_InsUpd '" + SystemType + "','" + message + "', '" + answer + "'"
cursor.execute(_sql)
conn.close() ## ์ฐ๊ฒฐ ๋๊ธฐ
import gradio as gr
# ์ฑ๋ด ์ค๋ช
title = """
<div style="text-align: center; max-width: 500px; margin: 0 auto;">
<div>
<h1>Pretraining Chatbot V2 Real</h1>
</div>
<p style="margin-bottom: 10px; font-size: 94%">
OpenAI LLM๋ฅผ ์ด์ฉํ Chatbot (Similarity)
</p>
</div>
"""
# ๊พธ๋ฏธ๊ธฐ
css="""
#col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
"""
with gr.Blocks(css=css) as UnivChatbot:
with gr.Column(elem_id="col-container"):
gr.HTML(title)
# with gr.Row():
# with gr.Column(scale=3):
# openai_key = gr.Textbox(label="You OpenAI API key", type="password", placeholder="OpenAI Key Type", elem_id="InputKey", show_label=False, container=False)
# with gr.Column(scale=1):
# langchain_status = gr.Textbox(placeholder="Status", interactive=False, show_label=False, container=False)
# with gr.Column(scale=1):
# chk_key = gr.Button("ํ์ธ", variant="primary")
chatbot = gr.Chatbot(label="๋ํ ์ฑ๋ด์์คํ
(OpenAI LLM)", elem_id="chatbot") # ์๋จ ์ข์ธก
with gr.Row():
with gr.Column(scale=9):
msg = gr.Textbox(label="์
๋ ฅ", placeholder="๊ถ๊ธํ์ ๋ด์ญ์ ์
๋ ฅํ์ฌ ์ฃผ์ธ์.", elem_id="InputQuery", show_label=False, container=False)
with gr.Row():
with gr.Column(scale=1):
submit = gr.Button("์ ์ก", variant="primary")
with gr.Column(scale=1):
clear = gr.Button("์ด๊ธฐํ", variant="stop")
# ์ฌ์ฉ์์ ์
๋ ฅ์ ์ ์ถ(submit)ํ๋ฉด respond ํจ์๊ฐ ํธ์ถ.
msg.submit(respond, [msg, chatbot], [msg, chatbot])
submit.click(respond, [msg, chatbot], [msg, chatbot])
# '์ด๊ธฐํ' ๋ฒํผ์ ํด๋ฆญํ๋ฉด ์ฑํ
๊ธฐ๋ก์ ์ด๊ธฐํ.
clear.click(lambda: None, None, chatbot, queue=False)
UnivChatbot.launch(server_port=60001) |