Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -66,15 +66,15 @@ def main():
|
|
66 |
#PATH = 'model/'
|
67 |
#llm = AutoModelForCausalLM.from_pretrained("CohereForAI/aya-101")
|
68 |
# llm = AutoModelForCausalLM.from_pretrained(PATH,local_files_only=True)
|
69 |
-
llm = huggingface_hub.HuggingFaceHub(repo_id="google/flan-t5-
|
70 |
model_kwargs={"temperature":1.0, "max_length":256})
|
71 |
if query == 'Summarize':
|
72 |
#docs = pdf_reader.load_and_split()
|
73 |
chain = load_summarize_chain(llm, chain_type="map_reduce")
|
74 |
response = chain.run(chunks_doc[0])
|
75 |
else:
|
76 |
-
docs = vector_store.similarity_search(query=query, k=
|
77 |
-
|
78 |
chain = load_qa_chain(llm=llm, chain_type="stuff")
|
79 |
response = chain.run(input_documents=docs, question=query)
|
80 |
#retriever=vector_store.as_retriever()
|
|
|
66 |
#PATH = 'model/'
|
67 |
#llm = AutoModelForCausalLM.from_pretrained("CohereForAI/aya-101")
|
68 |
# llm = AutoModelForCausalLM.from_pretrained(PATH,local_files_only=True)
|
69 |
+
llm = huggingface_hub.HuggingFaceHub(repo_id="google/flan-t5-large",
|
70 |
model_kwargs={"temperature":1.0, "max_length":256})
|
71 |
if query == 'Summarize':
|
72 |
#docs = pdf_reader.load_and_split()
|
73 |
chain = load_summarize_chain(llm, chain_type="map_reduce")
|
74 |
response = chain.run(chunks_doc[0])
|
75 |
else:
|
76 |
+
docs = vector_store.similarity_search(query=query, k=3)
|
77 |
+
st.write(docs)
|
78 |
chain = load_qa_chain(llm=llm, chain_type="stuff")
|
79 |
response = chain.run(input_documents=docs, question=query)
|
80 |
#retriever=vector_store.as_retriever()
|