Spaces:
Runtime error
Runtime error
File size: 1,958 Bytes
a9d3fa8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
import streamlit as st
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
from langchain.prompts.prompt import PromptTemplate
class Chatbot:
_template = """λ€μ λνμ νμ μ§λ¬Έμ΄ μ£Όμ΄μ§λ©΄ νμ μ§λ¬Έμ λ
립ν μ§λ¬ΈμΌλ‘ λ°κΎΈμμμ€.
μ§λ¬Έμ΄ CSV νμΌμ μ 보μ κ΄ν κ²μ΄λΌκ³ κ°μ ν μ μμ΅λλ€.
Chat History:
{chat_history}
Follow-up entry: {question}
Standalone question:"""
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
qa_template = """"csv νμΌμ μ 보λ₯Ό κΈ°λ°μΌλ‘ μ§λ¬Έμ λ΅νλ AI λν λΉμμ
λλ€.
csv νμΌμ λ°μ΄ν°μ μ§λ¬Έμ΄ μ 곡λλ©° μ¬μ©μκ° νμν μ 보λ₯Ό μ°Ύλλ‘ λμμΌ ν©λλ€.
μκ³ μλ μ 보μ λν΄μλ§ μλ΅νμμμ€. λ΅μ μ§μ΄λ΄λ €κ³ νμ§ λ§μΈμ.
κ·νμ λ΅λ³μ μ§§κ³ μΉκ·Όνλ©° λμΌν μΈμ΄λ‘ μμ±λμ΄μΌ ν©λλ€.
question: {question}
=========
{context}
=======
"""
QA_PROMPT = PromptTemplate(template=qa_template, input_variables=["question", "context"])
def __init__(self, model_name, temperature, vectors):
self.model_name = model_name
self.temperature = temperature
self.vectors = vectors
def conversational_chat(self, query):
"""
Starts a conversational chat with a model via Langchain
"""
chain = ConversationalRetrievalChain.from_llm(
llm=ChatOpenAI(model_name=self.model_name, temperature=self.temperature),
condense_question_prompt=self.CONDENSE_QUESTION_PROMPT,
qa_prompt=self.QA_PROMPT,
retriever=self.vectors.as_retriever(),
)
result = chain({"question": query, "chat_history": st.session_state["history"]})
st.session_state["history"].append((query, result["answer"]))
return result["answer"] |