pankajsingh3012 commited on
Commit
f2e1e75
·
verified ·
1 Parent(s): 445dac6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -70,7 +70,6 @@ def chunk_text(text: str, max_chunk_size: int = 1000) :
70
 
71
  return chunks
72
 
73
-
74
  # Streamlit UI
75
  st.title("CUDA Documentation QA System")
76
 
@@ -110,11 +109,12 @@ if query and st.session_state.documents_loaded:
110
  # Create a PromptTemplate for the QA chain
111
  qa_prompt = PromptTemplate(template="Answer the following question based on the context provided:\n\n{context}\n\nQuestion: {question}\nAnswer:", input_variables=["context", "question"])
112
 
113
- # Create the retrieval QA chain
114
- qa_chain = RetrievalQA(
 
115
  retriever=st.session_state.vector_store.as_retriever(),
116
  llm=llm,
117
- prompt=qa_prompt
118
  )
119
 
120
  response = qa_chain({"question": query})
 
70
 
71
  return chunks
72
 
 
73
  # Streamlit UI
74
  st.title("CUDA Documentation QA System")
75
 
 
109
  # Create a PromptTemplate for the QA chain
110
  qa_prompt = PromptTemplate(template="Answer the following question based on the context provided:\n\n{context}\n\nQuestion: {question}\nAnswer:", input_variables=["context", "question"])
111
 
112
+ # Load the QA chain
113
+ qa_chain = load_qa_chain(
114
+ prompt=qa_prompt,
115
  retriever=st.session_state.vector_store.as_retriever(),
116
  llm=llm,
117
+ combine_documents_chain=True
118
  )
119
 
120
  response = qa_chain({"question": query})