Update app.py
Browse files
app.py
CHANGED
@@ -11,7 +11,7 @@ from langchain.document_loaders import TextLoader
|
|
11 |
from langchain.chains.combine_documents import create_stuff_documents_chain
|
12 |
from langchain_community.chat_message_histories.streamlit import StreamlitChatMessageHistory
|
13 |
from langchain.prompts import PromptTemplate
|
14 |
-
from langchain.chains
|
15 |
from langchain.vectorstores import Chroma
|
16 |
from utills import load_txt_documents , split_docs, load_uploaded_documents,retriever_from_chroma
|
17 |
|
@@ -72,11 +72,7 @@ which maybe reference context in the chat history, formulate a standalone questi
|
|
72 |
which can be understood without the chat history. Do NOT answer the question,
|
73 |
just reformulate it if needed and otherwise return it as is."""
|
74 |
|
75 |
-
|
76 |
-
def create_history_aware_retriever():
|
77 |
-
return history_aware_retriever(llm, retriever, contextualize_q_system_prompt)
|
78 |
-
|
79 |
-
ha_retriever = create_history_aware_retriever()
|
80 |
|
81 |
qa_system_prompt = """You are an assistant for question-answering tasks. Use the following pieces of retrieved context to answer the question. If you don't know the answer, just say that you don't know. Be as informative as possible, be polite and formal.\n{context}"""
|
82 |
|
@@ -88,17 +84,10 @@ qa_prompt = ChatPromptTemplate.from_messages(
|
|
88 |
]
|
89 |
)
|
90 |
|
91 |
-
|
92 |
-
def create_question_answer_chain():
|
93 |
-
return create_stuff_documents_chain(llm, qa_prompt)
|
94 |
|
95 |
-
question_answer_chain = create_question_answer_chain()
|
96 |
-
|
97 |
-
@st.cache_resource
|
98 |
-
def create_rag_chain():
|
99 |
-
return create_retrieval_chain(ha_retriever, question_answer_chain)
|
100 |
|
101 |
-
rag_chain =
|
102 |
msgs = StreamlitChatMessageHistory(key="special_app_key")
|
103 |
|
104 |
@st.cache_resource
|
|
|
11 |
from langchain.chains.combine_documents import create_stuff_documents_chain
|
12 |
from langchain_community.chat_message_histories.streamlit import StreamlitChatMessageHistory
|
13 |
from langchain.prompts import PromptTemplate
|
14 |
+
from langchain.chains import create_history_aware_retriever, create_retrieval_chain
|
15 |
from langchain.vectorstores import Chroma
|
16 |
from utills import load_txt_documents , split_docs, load_uploaded_documents,retriever_from_chroma
|
17 |
|
|
|
72 |
which can be understood without the chat history. Do NOT answer the question,
|
73 |
just reformulate it if needed and otherwise return it as is."""
|
74 |
|
75 |
+
ha_retriever = history_aware_retriever(llm, retriever, contextualize_q_system_prompt)
|
|
|
|
|
|
|
|
|
76 |
|
77 |
qa_system_prompt = """You are an assistant for question-answering tasks. Use the following pieces of retrieved context to answer the question. If you don't know the answer, just say that you don't know. Be as informative as possible, be polite and formal.\n{context}"""
|
78 |
|
|
|
84 |
]
|
85 |
)
|
86 |
|
87 |
+
question_answer_chain = create_stuff_documents_chain(llm, qa_prompt)
|
|
|
|
|
88 |
|
|
|
|
|
|
|
|
|
|
|
89 |
|
90 |
+
rag_chain = create_retrieval_chain(ha_retriever, question_answer_chain)
|
91 |
msgs = StreamlitChatMessageHistory(key="special_app_key")
|
92 |
|
93 |
@st.cache_resource
|