Anne31415 commited on
Commit
ffab811
·
1 Parent(s): f7d3ba1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -3
app.py CHANGED
@@ -61,8 +61,8 @@ def load_pdf(file_path):
61
  return VectorStore
62
 
63
 
64
- def load_chatbot():
65
- return load_qa_chain(llm=OpenAI(temperature=0.1, max_tokens=120), chain_type="stuff")
66
 
67
 
68
  def display_chat_history(chat_history):
@@ -97,10 +97,18 @@ def main():
97
  loading_message.text('Bot is thinking...')
98
 
99
  VectorStore = load_pdf(pdf)
100
- chain = load_chatbot()
 
101
  docs = VectorStore.similarity_search(query=query, k=3)
102
  with get_openai_callback() as cb:
103
  response = chain.run(input_documents=docs, question=query)
 
 
 
 
 
 
 
104
 
105
  st.session_state['chat_history'].append(("Bot", response, "new"))
106
 
@@ -121,5 +129,8 @@ def main():
121
  # Mark all messages as old after displaying
122
  st.session_state['chat_history'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history']]
123
 
 
 
 
124
  if __name__ == "__main__":
125
  main()
 
61
  return VectorStore
62
 
63
 
64
+ def load_chatbot(max_tokens=120):
65
+ return load_qa_chain(llm=OpenAI(temperature=0.5, max_tokens=max_tokens), chain_type="stuff")
66
 
67
 
68
  def display_chat_history(chat_history):
 
97
  loading_message.text('Bot is thinking...')
98
 
99
  VectorStore = load_pdf(pdf)
100
+ max_tokens = 100 # Initial max tokens
101
+ chain = load_chatbot(max_tokens=max_tokens)
102
  docs = VectorStore.similarity_search(query=query, k=3)
103
  with get_openai_callback() as cb:
104
  response = chain.run(input_documents=docs, question=query)
105
+
106
+ # Check if the response ends with a sentence-ending punctuation
107
+ while not response.strip().endswith(('.', '!', '?')) and max_tokens < MAX_TOKEN_LIMIT:
108
+ max_tokens += 50 # Increase the max_tokens limit
109
+ chain = load_chatbot(max_tokens=max_tokens)
110
+ additional_response = chain.run(input_documents=docs, question=query)
111
+ response += additional_response # Append the additional response to the original response
112
 
113
  st.session_state['chat_history'].append(("Bot", response, "new"))
114
 
 
129
  # Mark all messages as old after displaying
130
  st.session_state['chat_history'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history']]
131
 
132
+ # Define a maximum token limit to avoid infinite loops
133
+ MAX_TOKEN_LIMIT = 400
134
+
135
  if __name__ == "__main__":
136
  main()