Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -177,12 +177,30 @@ def conversation(qa_chain, message, history):
|
|
177 |
new_history = history + [(message, response_answer)]
|
178 |
return qa_chain, gr.update(value=""), new_history, response_source1, response_source1_page, response_source2, response_source2_page, response_source3, response_source3_page
|
179 |
|
180 |
-
def conversation_no_doc(
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
186 |
|
187 |
def upload_file(file_obj):
|
188 |
list_file_path = []
|
@@ -336,7 +354,7 @@ def demo():
|
|
336 |
submit_btn_no_doc = gr.Button("Submit message")
|
337 |
clear_btn_no_doc = gr.ClearButton([msg_no_doc, chatbot_no_doc], value="Clear conversation")
|
338 |
|
339 |
-
gr.ChatInterface(
|
340 |
fn=generate,
|
341 |
chatbot=chatbot_no_doc,
|
342 |
additional_inputs=additional_inputs,
|
@@ -373,8 +391,8 @@ def demo():
|
|
373 |
|
374 |
# Initialize LLM without document for conversation
|
375 |
submit_btn_no_doc.click(conversation_no_doc,
|
376 |
-
inputs=[
|
377 |
-
outputs=[
|
378 |
queue=False)
|
379 |
clear_btn_no_doc.click(lambda:[None,""],
|
380 |
inputs=None,
|
|
|
177 |
new_history = history + [(message, response_answer)]
|
178 |
return qa_chain, gr.update(value=""), new_history, response_source1, response_source1_page, response_source2, response_source2_page, response_source3, response_source3_page
|
179 |
|
180 |
+
def conversation_no_doc(prompt, history, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0):
|
181 |
+
temperature = float(temperature)
|
182 |
+
if temperature < 1e-2:
|
183 |
+
temperature = 1e-2
|
184 |
+
top_p = float(top_p)
|
185 |
+
|
186 |
+
generate_kwargs = dict(
|
187 |
+
temperature=temperature,
|
188 |
+
max_new_tokens=max_new_tokens,
|
189 |
+
top_p=top_p,
|
190 |
+
repetition_penalty=repetition_penalty,
|
191 |
+
do_sample=True,
|
192 |
+
seed=42,
|
193 |
+
)
|
194 |
+
|
195 |
+
formatted_prompt = format_prompt(prompt, history)
|
196 |
+
|
197 |
+
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
198 |
+
output = ""
|
199 |
+
|
200 |
+
for response in stream:
|
201 |
+
output += response.token.text
|
202 |
+
yield output
|
203 |
+
return output
|
204 |
|
205 |
def upload_file(file_obj):
|
206 |
list_file_path = []
|
|
|
354 |
submit_btn_no_doc = gr.Button("Submit message")
|
355 |
clear_btn_no_doc = gr.ClearButton([msg_no_doc, chatbot_no_doc], value="Clear conversation")
|
356 |
|
357 |
+
chat_interface = gr.ChatInterface(
|
358 |
fn=generate,
|
359 |
chatbot=chatbot_no_doc,
|
360 |
additional_inputs=additional_inputs,
|
|
|
391 |
|
392 |
# Initialize LLM without document for conversation
|
393 |
submit_btn_no_doc.click(conversation_no_doc,
|
394 |
+
inputs=[msg_no_doc, chatbot_no_doc],
|
395 |
+
outputs=[msg_no_doc, chatbot_no_doc],
|
396 |
queue=False)
|
397 |
clear_btn_no_doc.click(lambda:[None,""],
|
398 |
inputs=None,
|