Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -34,9 +34,6 @@ def insert_or_fetch_embeddings(index_name):
|
|
34 |
# Initialize or fetch Pinecone vector store
|
35 |
vector_store = insert_or_fetch_embeddings(index_name)
|
36 |
|
37 |
-
# Define the metadata for filtering
|
38 |
-
# metadata = {'source': '/Users/cheynelevesseur/Desktop/Python_Code/Projects/LLM/Intensifying Literacy Instruction - Essential Practices (NATIONAL).pdf'}
|
39 |
-
|
40 |
# calculate embedding cost using tiktoken
|
41 |
def calculate_embedding_cost(text):
|
42 |
import tiktoken
|
@@ -56,7 +53,7 @@ def ask_with_memory(vector_store, query, chat_history=[]):
|
|
56 |
# The retriever is created with metadata filter directly in search_kwargs
|
57 |
# retriever = vector_store.as_retriever(search_type='similarity', search_kwargs={'k': 3, 'filter': {'source': {'$eq': 'https://mimtsstac.org/sites/default/files/session-documents/Intensifying%20Literacy%20Instruction%20-%20Essential%20Practices%20%28NATIONAL%29.pdf'}}})
|
58 |
retriever = vector_store.as_retriever(search_type='similarity', search_kwargs={'k': 3, 'filter': {'source':'https://mimtsstac.org/sites/default/files/session-documents/Intensifying%20Literacy%20Instruction%20-%20Essential%20Practices%20%28NATIONAL%29.pdf'}})
|
59 |
-
|
60 |
chain= ConversationalRetrievalChain.from_llm(llm, retriever)
|
61 |
result = chain({'question': query, 'chat_history': st.session_state['history']})
|
62 |
# Append to chat history as a dictionary
|
|
|
34 |
# Initialize or fetch Pinecone vector store
|
35 |
vector_store = insert_or_fetch_embeddings(index_name)
|
36 |
|
|
|
|
|
|
|
37 |
# calculate embedding cost using tiktoken
|
38 |
def calculate_embedding_cost(text):
|
39 |
import tiktoken
|
|
|
53 |
# The retriever is created with metadata filter directly in search_kwargs
|
54 |
# retriever = vector_store.as_retriever(search_type='similarity', search_kwargs={'k': 3, 'filter': {'source': {'$eq': 'https://mimtsstac.org/sites/default/files/session-documents/Intensifying%20Literacy%20Instruction%20-%20Essential%20Practices%20%28NATIONAL%29.pdf'}}})
|
55 |
retriever = vector_store.as_retriever(search_type='similarity', search_kwargs={'k': 3, 'filter': {'source':'https://mimtsstac.org/sites/default/files/session-documents/Intensifying%20Literacy%20Instruction%20-%20Essential%20Practices%20%28NATIONAL%29.pdf'}})
|
56 |
+
|
57 |
chain= ConversationalRetrievalChain.from_llm(llm, retriever)
|
58 |
result = chain({'question': query, 'chat_history': st.session_state['history']})
|
59 |
# Append to chat history as a dictionary
|