Update app.py
Browse files
app.py
CHANGED
|
@@ -44,22 +44,27 @@ def main():
|
|
| 44 |
|
| 45 |
|
| 46 |
docs = split_docs(documents, 350, 40)
|
|
|
|
|
|
|
|
|
|
| 47 |
|
| 48 |
|
| 49 |
if prompt := st.text_input("Enter your question:"):
|
| 50 |
msgs = st.session_state.get("chat_history", StreamlitChatMessageHistory(key="special_app_key"))
|
| 51 |
st.chat_message("human").write(prompt)
|
| 52 |
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
|
|
|
|
|
|
| 63 |
|
| 64 |
st.session_state["chat_history"] = msgs
|
| 65 |
|
|
@@ -112,5 +117,19 @@ def create_conversational_rag_chain(vectorstore):
|
|
| 112 |
)
|
| 113 |
return conversation_chain
|
| 114 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 115 |
if __name__ == "__main__":
|
| 116 |
main()
|
|
|
|
| 44 |
|
| 45 |
|
| 46 |
docs = split_docs(documents, 350, 40)
|
| 47 |
+
vectorstore = get_vectorstore(docs)
|
| 48 |
+
if "conversation_chain" not in st.session_state:
|
| 49 |
+
st.session_state.conversation_chain = create_conversational_rag_chain(vectorstore)
|
| 50 |
|
| 51 |
|
| 52 |
if prompt := st.text_input("Enter your question:"):
|
| 53 |
msgs = st.session_state.get("chat_history", StreamlitChatMessageHistory(key="special_app_key"))
|
| 54 |
st.chat_message("human").write(prompt)
|
| 55 |
|
| 56 |
+
if st.session_state.conversation_chain is not None:
|
| 57 |
+
input_dict = {"input": prompt, "chat_history": msgs.messages}
|
| 58 |
+
config = {"configurable": {"session_id": "any"}}
|
| 59 |
+
response = st.session_state.conversation_chain.invoke(input_dict, config)
|
| 60 |
+
st.chat_message("ai").write(response["answer"])
|
| 61 |
|
| 62 |
+
if "docs" in response and response["documents"]:
|
| 63 |
+
for index, doc in enumerate(response["documents"]):
|
| 64 |
+
with st.expander(f"Document {index + 1}"):
|
| 65 |
+
st.write(doc)
|
| 66 |
+
else:
|
| 67 |
+
st.error("Conversation chain is not available.")
|
| 68 |
|
| 69 |
st.session_state["chat_history"] = msgs
|
| 70 |
|
|
|
|
| 117 |
)
|
| 118 |
return conversation_chain
|
| 119 |
|
| 120 |
+
|
| 121 |
+
def get_vectorstore(text_chunks):
|
| 122 |
+
model_name = "sentence-transformers/all-mpnet-base-v2"
|
| 123 |
+
model_kwargs = {'device': 'cpu'}
|
| 124 |
+
encode_kwargs = {'normalize_embeddings': True}
|
| 125 |
+
embeddings = HuggingFaceEmbeddings(
|
| 126 |
+
model_name=model_name,
|
| 127 |
+
model_kwargs=model_kwargs,
|
| 128 |
+
encode_kwargs=encode_kwargs
|
| 129 |
+
)
|
| 130 |
+
vectorstore = Chroma.from_documents(
|
| 131 |
+
documents=text_chunks, embedding=embeddings, persist_directory="docs/chroma/")
|
| 132 |
+
return vectorstore
|
| 133 |
+
|
| 134 |
if __name__ == "__main__":
|
| 135 |
main()
|