Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -109,31 +109,8 @@ def load_pdf_text(file_path):
|
|
109 |
return text
|
110 |
|
111 |
def load_chatbot():
|
112 |
-
#
|
113 |
-
|
114 |
-
standard_prompt = "Schreibe immer höflich und auf Deutsch und frage immer nach Jahren, wenn keines explizit angegeben ist. "
|
115 |
-
return load_qa_chain(llm=OpenAI(model_name="gpt-3.5-turbo-instruct", prompt=standard_prompt), chain_type="stuff")
|
116 |
-
|
117 |
-
|
118 |
-
def ask_bot(query):
|
119 |
-
chain = load_chatbot()
|
120 |
-
|
121 |
-
# Definiere hier den standardmäßigen Prompt
|
122 |
-
standard_prompt = "Schreibe immer höflich und auf Deutsch und frage immer nach Jahren, wenn keines explizit angegeben ist. "
|
123 |
-
|
124 |
-
# Kombiniere den standardmäßigen Prompt mit der Benutzeranfrage
|
125 |
-
full_query = standard_prompt + query
|
126 |
-
|
127 |
-
# Suche nach den ähnlichsten Dokumenten
|
128 |
-
docs = VectorStore.similarity_search(query=full_query, k=5)
|
129 |
-
|
130 |
-
with get_openai_callback() as cb:
|
131 |
-
# Übergib die kombinierte Anfrage an die run-Funktion
|
132 |
-
response = chain.run(input_documents=docs, question=full_query)
|
133 |
-
response = handle_no_answer(response) # Verarbeite die Antwort durch die neue Funktion
|
134 |
-
|
135 |
-
return response
|
136 |
-
|
137 |
|
138 |
|
139 |
def display_chat_history(chat_history):
|
@@ -193,6 +170,11 @@ def handle_no_answer(response):
|
|
193 |
return response
|
194 |
|
195 |
|
|
|
|
|
|
|
|
|
|
|
196 |
|
197 |
|
198 |
def page1():
|
@@ -258,25 +240,26 @@ def page1():
|
|
258 |
query = "Was bedeutet die Vorhaltefinanzierung?"
|
259 |
|
260 |
|
261 |
-
|
262 |
if query:
|
263 |
st.session_state['chat_history_page1'].append(("User", query, "new"))
|
264 |
-
|
265 |
# Start timing
|
266 |
start_time = time.time()
|
267 |
-
|
268 |
with st.spinner('Bot is thinking...'):
|
|
|
269 |
chain = load_chatbot()
|
270 |
-
|
271 |
-
|
272 |
-
|
|
|
|
|
|
|
273 |
with get_openai_callback() as cb:
|
274 |
-
|
275 |
-
response =
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
|
281 |
# Stop timing
|
282 |
end_time = time.time()
|
|
|
109 |
return text
|
110 |
|
111 |
def load_chatbot():
|
112 |
+
#return load_qa_chain(llm=OpenAI(), chain_type="stuff")
|
113 |
+
return load_qa_chain(llm=OpenAI(model_name="gpt-3.5-turbo-instruct"), chain_type="stuff")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
114 |
|
115 |
|
116 |
def display_chat_history(chat_history):
|
|
|
170 |
return response
|
171 |
|
172 |
|
173 |
+
def create_system_message():
|
174 |
+
return {
|
175 |
+
'role': 'system',
|
176 |
+
'content': 'You are a friendly and helpful chatbot. Remember to be polite, provide accurate information, and ask for specifics if the user query is vague - especially for years if the user isnt telling you the year. Always answer in english, even if the users asks in german.'
|
177 |
+
}
|
178 |
|
179 |
|
180 |
def page1():
|
|
|
240 |
query = "Was bedeutet die Vorhaltefinanzierung?"
|
241 |
|
242 |
|
243 |
+
|
244 |
if query:
|
245 |
st.session_state['chat_history_page1'].append(("User", query, "new"))
|
246 |
+
|
247 |
# Start timing
|
248 |
start_time = time.time()
|
249 |
+
|
250 |
with st.spinner('Bot is thinking...'):
|
251 |
+
system_message = create_system_message() # Create system message
|
252 |
chain = load_chatbot()
|
253 |
+
docs = VectorStore.similarity_search(query=query, k=5)
|
254 |
+
|
255 |
+
# Include system message in conversation history
|
256 |
+
conversation_history = [{'role': 'system', 'content': system_message['content']}]
|
257 |
+
conversation_history.extend(docs) # Add the document search results
|
258 |
+
|
259 |
with get_openai_callback() as cb:
|
260 |
+
response = chain.run(input_documents=conversation_history, question=query)
|
261 |
+
response = handle_no_answer(response)
|
262 |
+
|
|
|
|
|
|
|
263 |
|
264 |
# Stop timing
|
265 |
end_time = time.time()
|