Spaces:
Runtime error
Runtime error
Create chatbot.py
Browse files- modules/chatbot.py +49 -0
modules/chatbot.py
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from langchain.chat_models import ChatOpenAI
|
3 |
+
from langchain.chains import ConversationalRetrievalChain
|
4 |
+
from langchain.prompts.prompt import PromptTemplate
|
5 |
+
|
6 |
+
|
7 |
+
class Chatbot:
|
8 |
+
_template = """λ€μ λνμ νμ μ§λ¬Έμ΄ μ£Όμ΄μ§λ©΄ νμ μ§λ¬Έμ λ
립ν μ§λ¬ΈμΌλ‘ λ°κΎΈμμμ€.
|
9 |
+
μ§λ¬Έμ΄ CSV νμΌμ μ 보μ κ΄ν κ²μ΄λΌκ³ κ°μ ν μ μμ΅λλ€.
|
10 |
+
Chat History:
|
11 |
+
{chat_history}
|
12 |
+
Follow-up entry: {question}
|
13 |
+
Standalone question:"""
|
14 |
+
|
15 |
+
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
|
16 |
+
|
17 |
+
qa_template = """"csv νμΌμ μ 보λ₯Ό κΈ°λ°μΌλ‘ μ§λ¬Έμ λ΅νλ AI λν λΉμμ
λλ€.
|
18 |
+
csv νμΌμ λ°μ΄ν°μ μ§λ¬Έμ΄ μ 곡λλ©° μ¬μ©μκ° νμν μ 보λ₯Ό μ°Ύλλ‘ λμμΌ ν©λλ€.
|
19 |
+
μκ³ μλ μ 보μ λν΄μλ§ μλ΅νμμμ€. λ΅μ μ§μ΄λ΄λ €κ³ νμ§ λ§μΈμ.
|
20 |
+
κ·νμ λ΅λ³μ μ§§κ³ μΉκ·Όνλ©° λμΌν μΈμ΄λ‘ μμ±λμ΄μΌ ν©λλ€.
|
21 |
+
question: {question}
|
22 |
+
=========
|
23 |
+
{context}
|
24 |
+
=======
|
25 |
+
"""
|
26 |
+
|
27 |
+
QA_PROMPT = PromptTemplate(template=qa_template, input_variables=["question", "context"])
|
28 |
+
|
29 |
+
def __init__(self, model_name, temperature, vectors):
|
30 |
+
self.model_name = model_name
|
31 |
+
self.temperature = temperature
|
32 |
+
self.vectors = vectors
|
33 |
+
|
34 |
+
def conversational_chat(self, query):
|
35 |
+
"""
|
36 |
+
Starts a conversational chat with a model via Langchain
|
37 |
+
"""
|
38 |
+
|
39 |
+
chain = ConversationalRetrievalChain.from_llm(
|
40 |
+
llm=ChatOpenAI(model_name=self.model_name, temperature=self.temperature),
|
41 |
+
condense_question_prompt=self.CONDENSE_QUESTION_PROMPT,
|
42 |
+
qa_prompt=self.QA_PROMPT,
|
43 |
+
retriever=self.vectors.as_retriever(),
|
44 |
+
)
|
45 |
+
result = chain({"question": query, "chat_history": st.session_state["history"]})
|
46 |
+
|
47 |
+
st.session_state["history"].append((query, result["answer"]))
|
48 |
+
|
49 |
+
return result["answer"]
|