Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -58,7 +58,7 @@ def load_pdf(file_path):
|
|
58 |
return VectorStore
|
59 |
|
60 |
def load_chatbot(max_tokens=120):
|
61 |
-
return load_qa_chain(llm=OpenAI(temperature=0.
|
62 |
|
63 |
def display_chat_history(chat_history):
|
64 |
for chat in chat_history:
|
@@ -105,7 +105,7 @@ def main():
|
|
105 |
loading_message.text('Bot is thinking...')
|
106 |
|
107 |
VectorStore = load_pdf(pdf)
|
108 |
-
max_tokens =
|
109 |
chain = load_chatbot(max_tokens=max_tokens)
|
110 |
docs = VectorStore.similarity_search(query=query, k=2)
|
111 |
|
@@ -118,7 +118,7 @@ def main():
|
|
118 |
|
119 |
# Check if the response ends with a sentence-ending punctuation
|
120 |
while not filtered_response.strip().endswith(('.', '!', '?')) and max_tokens < MAX_TOKEN_LIMIT:
|
121 |
-
max_tokens +=
|
122 |
chain = load_chatbot(max_tokens=max_tokens)
|
123 |
additional_response = chain.run(input_documents=docs, question=query)
|
124 |
filtered_response += additional_response # Append the additional response to the filtered_response
|
|
|
58 |
return VectorStore
|
59 |
|
60 |
def load_chatbot(max_tokens=120):
|
61 |
+
return load_qa_chain(llm=OpenAI(temperature=0.1, max_tokens=max_tokens), chain_type="stuff")
|
62 |
|
63 |
def display_chat_history(chat_history):
|
64 |
for chat in chat_history:
|
|
|
105 |
loading_message.text('Bot is thinking...')
|
106 |
|
107 |
VectorStore = load_pdf(pdf)
|
108 |
+
max_tokens = 120
|
109 |
chain = load_chatbot(max_tokens=max_tokens)
|
110 |
docs = VectorStore.similarity_search(query=query, k=2)
|
111 |
|
|
|
118 |
|
119 |
# Check if the response ends with a sentence-ending punctuation
|
120 |
while not filtered_response.strip().endswith(('.', '!', '?')) and max_tokens < MAX_TOKEN_LIMIT:
|
121 |
+
max_tokens += 100 # Increase the max_tokens limit
|
122 |
chain = load_chatbot(max_tokens=max_tokens)
|
123 |
additional_response = chain.run(input_documents=docs, question=query)
|
124 |
filtered_response += additional_response # Append the additional response to the filtered_response
|