Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -175,6 +175,11 @@ def ask_bot(query):
|
|
175 |
# Kombiniere den standardmäßigen Prompt mit der Benutzeranfrage
|
176 |
full_query = standard_prompt + query
|
177 |
return full_query
|
|
|
|
|
|
|
|
|
|
|
178 |
|
179 |
|
180 |
def page1():
|
@@ -240,34 +245,50 @@ def page1():
|
|
240 |
query = "Was bedeutet die Vorhaltefinanzierung?"
|
241 |
|
242 |
|
243 |
-
|
244 |
if query:
|
245 |
full_query = ask_bot(query)
|
246 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
247 |
|
248 |
# Start timing
|
249 |
-
start_time = time.time()
|
250 |
|
251 |
|
252 |
-
with st.spinner('Bot is thinking...'):
|
253 |
-
chain = load_chatbot()
|
254 |
-
docs = VectorStore.similarity_search(query=query, k=5)
|
255 |
-
with get_openai_callback() as cb:
|
256 |
-
response = chain.run(input_documents=docs, question=full_query)
|
257 |
-
response = handle_no_answer(response) # Process the response through the new function
|
258 |
|
259 |
|
260 |
|
261 |
# Stop timing
|
262 |
-
end_time = time.time()
|
263 |
|
264 |
# Calculate duration
|
265 |
-
duration = end_time - start_time
|
266 |
|
267 |
# You can use Streamlit's text function to display the timing
|
268 |
-
st.text(f"Response time: {duration:.2f} seconds")
|
269 |
|
270 |
-
st.session_state['chat_history_page1'].append(("Bot", response, "new"))
|
271 |
|
272 |
|
273 |
# Display new messages at the bottom
|
|
|
175 |
# Kombiniere den standardmäßigen Prompt mit der Benutzeranfrage
|
176 |
full_query = standard_prompt + query
|
177 |
return full_query
|
178 |
+
|
179 |
+
# Funktion zum Kombinieren der Chat-Historie mit der aktuellen Anfrage
|
180 |
+
def combine_history_with_query(current_query, history):
|
181 |
+
combined_history = " ".join([f"{chat[0]}: {chat[1]}" for chat in history])
|
182 |
+
return combined_history + " " + current_query
|
183 |
|
184 |
|
185 |
def page1():
|
|
|
245 |
query = "Was bedeutet die Vorhaltefinanzierung?"
|
246 |
|
247 |
|
|
|
248 |
if query:
|
249 |
full_query = ask_bot(query)
|
250 |
+
# Kombiniere die gesamte Session-Historie mit der aktuellen Anfrage
|
251 |
+
combined_query = combine_history_with_query(full_query, st.session_state['chat_history_page1'])
|
252 |
+
|
253 |
+
with st.spinner('Bot is thinking...'):
|
254 |
+
chain = load_chatbot()
|
255 |
+
docs = VectorStore.similarity_search(query=combined_query, k=5)
|
256 |
+
with get_openai_callback() as cb:
|
257 |
+
response = chain.run(input_documents=docs, question=combined_query)
|
258 |
+
response = handle_no_answer(response)
|
259 |
+
|
260 |
+
# Aktualisiere die Chat-Historie nach der Antwort des Bots
|
261 |
+
st.session_state['chat_history_page1'].append(("User", query))
|
262 |
+
st.session_state['chat_history_page1'].append(("Bot", response))
|
263 |
+
|
264 |
+
|
265 |
+
#if query:
|
266 |
+
#full_query = ask_bot(query)
|
267 |
+
#st.session_state['chat_history_page1'].append(("User", query, "new"))
|
268 |
|
269 |
# Start timing
|
270 |
+
#start_time = time.time()
|
271 |
|
272 |
|
273 |
+
#with st.spinner('Bot is thinking...'):
|
274 |
+
#chain = load_chatbot()
|
275 |
+
#docs = VectorStore.similarity_search(query=query, k=5)
|
276 |
+
#with get_openai_callback() as cb:
|
277 |
+
#response = chain.run(input_documents=docs, question=full_query)
|
278 |
+
#response = handle_no_answer(response) # Process the response through the new function
|
279 |
|
280 |
|
281 |
|
282 |
# Stop timing
|
283 |
+
#end_time = time.time()
|
284 |
|
285 |
# Calculate duration
|
286 |
+
#duration = end_time - start_time
|
287 |
|
288 |
# You can use Streamlit's text function to display the timing
|
289 |
+
#st.text(f"Response time: {duration:.2f} seconds")
|
290 |
|
291 |
+
#st.session_state['chat_history_page1'].append(("Bot", response, "new"))
|
292 |
|
293 |
|
294 |
# Display new messages at the bottom
|