reducing chat window
Browse files
utils.py
CHANGED
@@ -41,7 +41,7 @@ message_history = CustomMongoDBChatMessageHistory(
|
|
41 |
collection_name='3d_printing_applications'
|
42 |
)
|
43 |
|
44 |
-
memory = ConversationBufferWindowMemory(memory_key="chat_history", k=
|
45 |
|
46 |
vectorstore_index = None
|
47 |
|
@@ -193,7 +193,7 @@ def get_qa_chain(vectorstore_index):
|
|
193 |
# embeddings_filter = EmbeddingsFilter(embeddings=embeddings, similarity_threshold=0.76)
|
194 |
# compression_retriever = ContextualCompressionRetriever(base_compressor=embeddings_filter, base_retriever=gpt_3_5_index.as_retriever())
|
195 |
retriever = vectorstore_index.as_retriever(search_type="similarity_score_threshold",
|
196 |
-
search_kwargs={"score_threshold": .
|
197 |
|
198 |
chain = ConversationalRetrievalChain.from_llm(llm, retriever, return_source_documents=True,
|
199 |
verbose=True, get_chat_history=get_chat_history,
|
@@ -211,7 +211,8 @@ def get_chat_history(inputs) -> str:
|
|
211 |
def generate_answer(question) -> str:
|
212 |
global vectorstore_index
|
213 |
chain = get_qa_chain(vectorstore_index)
|
214 |
-
|
|
|
215 |
result = chain(
|
216 |
{"question": question, "chat_history": history})
|
217 |
|
|
|
41 |
collection_name='3d_printing_applications'
|
42 |
)
|
43 |
|
44 |
+
memory = ConversationBufferWindowMemory(memory_key="chat_history", k=4)
|
45 |
|
46 |
vectorstore_index = None
|
47 |
|
|
|
193 |
# embeddings_filter = EmbeddingsFilter(embeddings=embeddings, similarity_threshold=0.76)
|
194 |
# compression_retriever = ContextualCompressionRetriever(base_compressor=embeddings_filter, base_retriever=gpt_3_5_index.as_retriever())
|
195 |
retriever = vectorstore_index.as_retriever(search_type="similarity_score_threshold",
|
196 |
+
search_kwargs={"score_threshold": .76})
|
197 |
|
198 |
chain = ConversationalRetrievalChain.from_llm(llm, retriever, return_source_documents=True,
|
199 |
verbose=True, get_chat_history=get_chat_history,
|
|
|
211 |
def generate_answer(question) -> str:
|
212 |
global vectorstore_index
|
213 |
chain = get_qa_chain(vectorstore_index)
|
214 |
+
# get last 4 messages from chat history
|
215 |
+
history = memory.chat_memory.messages[-4:]
|
216 |
result = chain(
|
217 |
{"question": question, "chat_history": history})
|
218 |
|