CSV-ChatBot / modules /chatbot.py
RustX's picture
Create chatbot.py
a9d3fa8
import streamlit as st
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
from langchain.prompts.prompt import PromptTemplate
class Chatbot:
_template = """λ‹€μŒ λŒ€ν™”μ™€ 후속 질문이 주어지면 후속 μ§ˆλ¬Έμ„ λ…λ¦½ν˜• 질문으둜 λ°”κΎΈμ‹­μ‹œμ˜€.
질문이 CSV 파일의 정보에 κ΄€ν•œ 것이라고 κ°€μ •ν•  수 μžˆμŠ΅λ‹ˆλ‹€.
Chat History:
{chat_history}
Follow-up entry: {question}
Standalone question:"""
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
qa_template = """"csv 파일의 정보λ₯Ό 기반으둜 μ§ˆλ¬Έμ— λ‹΅ν•˜λŠ” AI λŒ€ν™” λΉ„μ„œμž…λ‹ˆλ‹€.
csv 파일의 데이터와 질문이 제곡되며 μ‚¬μš©μžκ°€ ν•„μš”ν•œ 정보λ₯Ό 찾도둝 도와야 ν•©λ‹ˆλ‹€.
μ•Œκ³  μžˆλŠ” 정보에 λŒ€ν•΄μ„œλ§Œ μ‘λ‹΅ν•˜μ‹­μ‹œμ˜€. 닡을 지어내렀고 ν•˜μ§€ λ§ˆμ„Έμš”.
κ·€ν•˜μ˜ 닡변은 짧고 μΉœκ·Όν•˜λ©° λ™μΌν•œ μ–Έμ–΄λ‘œ μž‘μ„±λ˜μ–΄μ•Ό ν•©λ‹ˆλ‹€.
question: {question}
=========
{context}
=======
"""
QA_PROMPT = PromptTemplate(template=qa_template, input_variables=["question", "context"])
def __init__(self, model_name, temperature, vectors):
self.model_name = model_name
self.temperature = temperature
self.vectors = vectors
def conversational_chat(self, query):
"""
Starts a conversational chat with a model via Langchain
"""
chain = ConversationalRetrievalChain.from_llm(
llm=ChatOpenAI(model_name=self.model_name, temperature=self.temperature),
condense_question_prompt=self.CONDENSE_QUESTION_PROMPT,
qa_prompt=self.QA_PROMPT,
retriever=self.vectors.as_retriever(),
)
result = chain({"question": query, "chat_history": st.session_state["history"]})
st.session_state["history"].append((query, result["answer"]))
return result["answer"]