chalisesagun commited on
Commit
dd76442
Β·
verified Β·
1 Parent(s): f032aa3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -5
app.py CHANGED
@@ -21,8 +21,11 @@ if api_key:
21
  # Step 2: Upload PDF Document
22
  uploaded_file = st.file_uploader("πŸ“‚ Upload a PDF document", type=["pdf"])
23
 
24
- if uploaded_file:
25
- # Load and process the document
 
 
 
26
  try:
27
  with st.spinner("Processing document..."):
28
  # Save the uploaded file temporarily
@@ -43,21 +46,22 @@ if api_key:
43
 
44
  # Generate embeddings and store them in a vector database
45
  embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
46
- vector_store = FAISS.from_documents(chunks, embeddings)
47
 
48
  st.success("Document processed successfully!")
49
  except Exception as e:
50
  st.error(f"Error processing document: {e}")
51
  st.stop()
52
 
53
- # Step 3: Ask Questions About the Document
 
54
  st.subheader("πŸ’¬ Chat with Your Document")
55
  user_query = st.text_input("Ask a question:")
56
 
57
  if user_query:
58
  try:
59
  # Set up the RAG pipeline with DeepSeek LLM
60
- retriever = vector_store.as_retriever()
61
  llm = ChatOpenAI(
62
  model="deepseek-chat",
63
  openai_api_key=api_key,
 
21
  # Step 2: Upload PDF Document
22
  uploaded_file = st.file_uploader("πŸ“‚ Upload a PDF document", type=["pdf"])
23
 
24
+ # Use session state to persist the vector_store
25
+ if "vector_store" not in st.session_state:
26
+ st.session_state.vector_store = None
27
+
28
+ if uploaded_file and st.session_state.vector_store is None:
29
  try:
30
  with st.spinner("Processing document..."):
31
  # Save the uploaded file temporarily
 
46
 
47
  # Generate embeddings and store them in a vector database
48
  embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
49
+ st.session_state.vector_store = FAISS.from_documents(chunks, embeddings)
50
 
51
  st.success("Document processed successfully!")
52
  except Exception as e:
53
  st.error(f"Error processing document: {e}")
54
  st.stop()
55
 
56
+ # Step 3: Ask Questions About the Document
57
+ if st.session_state.vector_store:
58
  st.subheader("πŸ’¬ Chat with Your Document")
59
  user_query = st.text_input("Ask a question:")
60
 
61
  if user_query:
62
  try:
63
  # Set up the RAG pipeline with DeepSeek LLM
64
+ retriever = st.session_state.vector_store.as_retriever()
65
  llm = ChatOpenAI(
66
  model="deepseek-chat",
67
  openai_api_key=api_key,