Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -70,7 +70,6 @@ def chunk_text(text: str, max_chunk_size: int = 1000) :
|
|
70 |
|
71 |
return chunks
|
72 |
|
73 |
-
|
74 |
# Streamlit UI
|
75 |
st.title("CUDA Documentation QA System")
|
76 |
|
@@ -110,11 +109,12 @@ if query and st.session_state.documents_loaded:
|
|
110 |
# Create a PromptTemplate for the QA chain
|
111 |
qa_prompt = PromptTemplate(template="Answer the following question based on the context provided:\n\n{context}\n\nQuestion: {question}\nAnswer:", input_variables=["context", "question"])
|
112 |
|
113 |
-
#
|
114 |
-
qa_chain =
|
|
|
115 |
retriever=st.session_state.vector_store.as_retriever(),
|
116 |
llm=llm,
|
117 |
-
|
118 |
)
|
119 |
|
120 |
response = qa_chain({"question": query})
|
|
|
70 |
|
71 |
return chunks
|
72 |
|
|
|
73 |
# Streamlit UI
|
74 |
st.title("CUDA Documentation QA System")
|
75 |
|
|
|
109 |
# Create a PromptTemplate for the QA chain
|
110 |
qa_prompt = PromptTemplate(template="Answer the following question based on the context provided:\n\n{context}\n\nQuestion: {question}\nAnswer:", input_variables=["context", "question"])
|
111 |
|
112 |
+
# Load the QA chain
|
113 |
+
qa_chain = load_qa_chain(
|
114 |
+
prompt=qa_prompt,
|
115 |
retriever=st.session_state.vector_store.as_retriever(),
|
116 |
llm=llm,
|
117 |
+
combine_documents_chain=True
|
118 |
)
|
119 |
|
120 |
response = qa_chain({"question": query})
|