Spaces:
Sleeping
Sleeping
| import os | |
| import pymssql | |
| import pandas as pd | |
| # os.environ["OPENAI_API_KEY"] = "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" | |
| os.environ["OPENAI_API_KEY"] = "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" | |
| from langchain.vectorstores import Chroma | |
| from langchain.embeddings import OpenAIEmbeddings | |
| from langchain.text_splitter import RecursiveCharacterTextSplitter | |
| from langchain.llms import OpenAI | |
| from langchain.chat_models import ChatOpenAI | |
| from langchain.chains import RetrievalQA | |
| from langchain.document_loaders import TextLoader | |
| from langchain.document_loaders import DirectoryLoader | |
| from langchain.document_loaders import CSVLoader | |
| from langchain.memory import ConversationBufferMemory | |
| # # ์ฌ์ฉ์๊ฐ ์ง๋ฌธํ ๋ด์ญ ์ ์ฅํด์ ๋์ค์ ํ์ต์ฉ์ผ๋ก ์ฐ๊ธฐ ์ํด DB ์ ์ | |
| # # MSSQL ์ ์ | |
| # conn = pymssql.connect(host=r"(local)", database='Chatbot_Manage') | |
| # conn.autocommit(True) # ์คํ ์ปค๋ฐ ํ์ฑํ | |
| # # Connection ์ผ๋ก๋ถํฐ Cursor ์์ฑ | |
| # _cursor = conn.cursor() | |
| # _cursorConfig = conn.cursor() | |
| # # _query = "SELECT UserInput as query, SystemAnswer as answer FROM ChatHistory WHERE AcceptFlag = 'Y'" | |
| # _queryConfig = "SELECT Temperature FROM ChatConfig WHERE IsUse = 1" | |
| # # _cursor.execute(_query) | |
| # _cursorConfig.execute(_queryConfig) | |
| # #์คํํ ๊ฐ, ์ด๋ฆ ๊ฐ์ DataFrame์ ์ ์ฅ | |
| # #dfsql = ['query','answer'] #๋ฐ์ดํฐํ๋ ์ ์ปฌ๋ผ์ ์ด๋ฆ ์ค์ . | |
| # # _row = cursor.fetchall() | |
| # # df1 = pd.DataFrame(_row, columns=dfsql) | |
| # _rowConfing = _cursorConfig.fetchone() | |
| # while _rowConfing: | |
| # for col in range(len(_rowConfing)): | |
| # temperature = _rowConfing[col] | |
| # _rowConfing = _cursorConfig.fetchone() | |
| # conn.close() ## ์ฐ๊ฒฐ ๋๊ธฐ | |
| persist_directory = 'realdb_LLM' | |
| embedding = OpenAIEmbeddings() | |
| vectordb = Chroma( | |
| persist_directory=persist_directory, | |
| embedding_function=embedding | |
| ) | |
| retriever = vectordb.as_retriever(search_kwargs={"k": 1}) | |
| def process_llm_response(llm_response): | |
| print(llm_response['result']) | |
| print('\n\nSources:') | |
| for source in llm_response["source_documents"]: | |
| print(source.metadata['source']) | |
| # ์ฑ๋ด์ ๋ต๋ณ์ ์ฒ๋ฆฌํ๋ ํจ์ | |
| def respond(message, chat_history, temperature): | |
| qa_chain = RetrievalQA.from_chain_type( | |
| llm=OpenAI(temperature=0.4), | |
| # llm=OpenAI(temperature=0.4), | |
| # llm=ChatOpenAI(temperature=0), | |
| chain_type="stuff", | |
| retriever=retriever | |
| ) | |
| result = qa_chain(message) | |
| bot_message = result['result'] | |
| # bot_message += '\n\n' + ' [์ถ์ฒ]' | |
| # # ๋ต๋ณ์ ์ถ์ฒ๋ฅผ ํ๊ธฐ | |
| # for i, doc in enumerate(result['source_documents']): | |
| # bot_message += str(i+1) + '. ' + doc.metadata['source'] + ' ' | |
| # ์ฑํ ๊ธฐ๋ก์ ์ฌ์ฉ์์ ๋ฉ์์ง์ ๋ด์ ์๋ต์ ์ถ๊ฐ. | |
| chat_history.append((message, bot_message)) | |
| historySave(message=message, answer=str(result['result']).replace("'","")) | |
| # historySave(message=message, answer="") | |
| return "", chat_history | |
| def historySave(message, answer): | |
| conn = pymssql.connect(host=r"(local)", database='Chatbot_Manage', charset='utf8') | |
| conn.autocommit(True) # ์คํ ์ปค๋ฐ ํ์ฑํ | |
| # Connection ์ผ๋ก๋ถํฐ Cursor ์์ฑ | |
| cursor = conn.cursor() | |
| SystemType = "OpenAI(Real LLM)" | |
| # SQL๋ฌธ ์คํ' | |
| _sql = "EXEC ChatHistory_InsUpd '" + SystemType + "','" + message + "', '" + answer + "'" | |
| cursor.execute(_sql) | |
| conn.close() ## ์ฐ๊ฒฐ ๋๊ธฐ | |
| import gradio as gr | |
| # ์ฑ๋ด ์ค๋ช | |
| title = """ | |
| <div style="text-align: center; max-width: 500px; margin: 0 auto;"> | |
| <div> | |
| <h1>Pretraining Chatbot V2 Real</h1> | |
| </div> | |
| <p style="margin-bottom: 10px; font-size: 94%"> | |
| OpenAI LLM๋ฅผ ์ด์ฉํ Chatbot (Similarity) | |
| </p> | |
| </div> | |
| """ | |
| # ๊พธ๋ฏธ๊ธฐ | |
| css=""" | |
| #col-container {max-width: 700px; margin-left: auto; margin-right: auto;} | |
| """ | |
| with gr.Blocks(css=css) as UnivChatbot: | |
| with gr.Column(elem_id="col-container"): | |
| gr.HTML(title) | |
| # with gr.Row(): | |
| # with gr.Column(scale=3): | |
| # openai_key = gr.Textbox(label="You OpenAI API key", type="password", placeholder="OpenAI Key Type", elem_id="InputKey", show_label=False, container=False) | |
| # with gr.Column(scale=1): | |
| # langchain_status = gr.Textbox(placeholder="Status", interactive=False, show_label=False, container=False) | |
| # with gr.Column(scale=1): | |
| # chk_key = gr.Button("ํ์ธ", variant="primary") | |
| chatbot = gr.Chatbot(label="๋ํ ์ฑ๋ด์์คํ (OpenAI LLM)", elem_id="chatbot") # ์๋จ ์ข์ธก | |
| with gr.Row(): | |
| with gr.Column(scale=9): | |
| msg = gr.Textbox(label="์ ๋ ฅ", placeholder="๊ถ๊ธํ์ ๋ด์ญ์ ์ ๋ ฅํ์ฌ ์ฃผ์ธ์.", elem_id="InputQuery", show_label=False, container=False) | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| submit = gr.Button("์ ์ก", variant="primary") | |
| with gr.Column(scale=1): | |
| clear = gr.Button("์ด๊ธฐํ", variant="stop") | |
| # ์ฌ์ฉ์์ ์ ๋ ฅ์ ์ ์ถ(submit)ํ๋ฉด respond ํจ์๊ฐ ํธ์ถ. | |
| msg.submit(respond, [msg, chatbot], [msg, chatbot]) | |
| submit.click(respond, [msg, chatbot], [msg, chatbot]) | |
| # '์ด๊ธฐํ' ๋ฒํผ์ ํด๋ฆญํ๋ฉด ์ฑํ ๊ธฐ๋ก์ ์ด๊ธฐํ. | |
| clear.click(lambda: None, None, chatbot, queue=False) | |
| UnivChatbot.launch(server_port=60001) |