Spaces:
Runtime error
Runtime error
import streamlit as st | |
from langchain.chat_models import ChatOpenAI | |
from langchain.chains import ConversationalRetrievalChain | |
from langchain.prompts.prompt import PromptTemplate | |
class Chatbot: | |
_template = """λ€μ λνμ νμ μ§λ¬Έμ΄ μ£Όμ΄μ§λ©΄ νμ μ§λ¬Έμ λ 립ν μ§λ¬ΈμΌλ‘ λ°κΎΈμμμ€. | |
μ§λ¬Έμ΄ CSV νμΌμ μ 보μ κ΄ν κ²μ΄λΌκ³ κ°μ ν μ μμ΅λλ€. | |
Chat History: | |
{chat_history} | |
Follow-up entry: {question} | |
Standalone question:""" | |
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template) | |
qa_template = """"csv νμΌμ μ 보λ₯Ό κΈ°λ°μΌλ‘ μ§λ¬Έμ λ΅νλ AI λν λΉμμ λλ€. | |
csv νμΌμ λ°μ΄ν°μ μ§λ¬Έμ΄ μ 곡λλ©° μ¬μ©μκ° νμν μ 보λ₯Ό μ°Ύλλ‘ λμμΌ ν©λλ€. | |
μκ³ μλ μ 보μ λν΄μλ§ μλ΅νμμμ€. λ΅μ μ§μ΄λ΄λ €κ³ νμ§ λ§μΈμ. | |
κ·νμ λ΅λ³μ μ§§κ³ μΉκ·Όνλ©° λμΌν μΈμ΄λ‘ μμ±λμ΄μΌ ν©λλ€. | |
question: {question} | |
========= | |
{context} | |
======= | |
""" | |
QA_PROMPT = PromptTemplate(template=qa_template, input_variables=["question", "context"]) | |
def __init__(self, model_name, temperature, vectors): | |
self.model_name = model_name | |
self.temperature = temperature | |
self.vectors = vectors | |
def conversational_chat(self, query): | |
""" | |
Starts a conversational chat with a model via Langchain | |
""" | |
chain = ConversationalRetrievalChain.from_llm( | |
llm=ChatOpenAI(model_name=self.model_name, temperature=self.temperature), | |
condense_question_prompt=self.CONDENSE_QUESTION_PROMPT, | |
qa_prompt=self.QA_PROMPT, | |
retriever=self.vectors.as_retriever(), | |
) | |
result = chain({"question": query, "chat_history": st.session_state["history"]}) | |
st.session_state["history"].append((query, result["answer"])) | |
return result["answer"] |