Shreyas094 commited on
Commit
1289a5c
·
verified ·
1 Parent(s): 0090ae2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -22
app.py CHANGED
@@ -188,14 +188,16 @@ class CitingSources(BaseModel):
188
  ...,
189
  description="List of sources to cite. Should be an URL of the source."
190
  )
191
- def chatbot_interface(message, history, use_web_search, model, temperature, num_calls):
 
 
 
192
  if not message.strip():
193
  return "", history
194
 
195
  history = history + [(message, "")]
196
-
197
  try:
198
- for response in respond(message, history, model, temperature, num_calls, use_web_search):
199
  history[-1] = (message, response)
200
  yield history
201
  except gr.CancelledError:
@@ -214,7 +216,7 @@ def retry_last_response(history, use_web_search, model, temperature, num_calls):
214
 
215
  return chatbot_interface(last_user_msg, history, use_web_search, model, temperature, num_calls)
216
 
217
- def respond(message, history, model, temperature, num_calls, use_web_search):
218
  logging.info(f"User Query: {message}")
219
  logging.info(f"Model Used: {model}")
220
  logging.info(f"Search Type: {'Web Search' if use_web_search else 'PDF Search'}")
@@ -223,9 +225,11 @@ def respond(message, history, model, temperature, num_calls, use_web_search):
223
  if use_web_search:
224
  for main_content, sources in get_response_with_search(message, model, num_calls=num_calls, temperature=temperature):
225
  response = f"{main_content}\n\n{sources}"
226
- first_line = response.split('\n')[0] if response else ''
227
- logging.info(f"Generated Response (first line): {first_line}")
228
- yield response
 
 
229
  else:
230
  embed = get_embeddings()
231
  if os.path.exists("faiss_database"):
@@ -235,19 +239,21 @@ def respond(message, history, model, temperature, num_calls, use_web_search):
235
  context_str = "\n".join([doc.page_content for doc in relevant_docs])
236
  else:
237
  context_str = "No documents available."
238
-
239
  if model == "@cf/meta/llama-3.1-8b-instruct":
240
- # Use Cloudflare API
241
- for partial_response in get_response_from_cloudflare(prompt="", context=context_str, query=message, num_calls=num_calls, temperature=temperature, search_type="pdf"):
242
- first_line = partial_response.split('\n')[0] if partial_response else ''
243
- logging.info(f"Generated Response (first line): {first_line}")
244
- yield partial_response
 
245
  else:
246
- # Use Hugging Face API
247
- for partial_response in get_response_from_pdf(message, model, num_calls=num_calls, temperature=temperature):
248
- first_line = partial_response.split('\n')[0] if partial_response else ''
249
- logging.info(f"Generated Response (first line): {first_line}")
250
- yield partial_response
 
251
  except Exception as e:
252
  logging.error(f"Error with {model}: {str(e)}")
253
  if "microsoft/Phi-3-mini-4k-instruct" in model:
@@ -255,8 +261,7 @@ def respond(message, history, model, temperature, num_calls, use_web_search):
255
  fallback_model = "mistralai/Mistral-7B-Instruct-v0.3"
256
  yield from respond(message, history, fallback_model, temperature, num_calls, use_web_search)
257
  else:
258
- yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
259
-
260
  logging.basicConfig(level=logging.DEBUG)
261
 
262
  def get_response_from_cloudflare(prompt, context, query, num_calls=3, temperature=0.2, search_type="pdf"):
@@ -395,12 +400,14 @@ css = """
395
  use_web_search = gr.Checkbox(label="Use Web Search", value=False)
396
 
397
  demo = gr.ChatInterface(
398
- respond,
399
  additional_inputs=[
400
  gr.Dropdown(choices=MODELS, label="Select Model", value=MODELS[0]),
401
  gr.Slider(minimum=0.1, maximum=1.0, value=0.2, step=0.1, label="Temperature"),
402
  gr.Slider(minimum=1, maximum=5, value=1, step=1, label="Number of API Calls"),
403
- use_web_search # Add this line to include the checkbox
 
 
404
  ],
405
  title="AI-powered Web Search and PDF Chat Assistant",
406
  description="Chat with your PDFs or use web search to answer questions.",
@@ -422,6 +429,30 @@ demo = gr.ChatInterface(
422
  color_accent_soft_dark="transparent",
423
  code_background_fill_dark="#140b0b"
424
  ),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
425
 
426
  css=css,
427
  examples=[
 
188
  ...,
189
  description="List of sources to cite. Should be an URL of the source."
190
  )
191
+ Here's a modified version of the `chatbot_interface` function that includes the "Continue Generation" button logic:
192
+
193
+ ```
194
+ def chatbot_interface(message, history, use_web_search, model, temperature, num_calls, is_interrupted=False, partial_response=""):
195
  if not message.strip():
196
  return "", history
197
 
198
  history = history + [(message, "")]
 
199
  try:
200
+ for response in respond(message, history, model, temperature, num_calls, use_web_search, is_interrupted, partial_response):
201
  history[-1] = (message, response)
202
  yield history
203
  except gr.CancelledError:
 
216
 
217
  return chatbot_interface(last_user_msg, history, use_web_search, model, temperature, num_calls)
218
 
219
+ def respond(message, history, model, temperature, num_calls, use_web_search, is_interrupted=False, partial_response=""):
220
  logging.info(f"User Query: {message}")
221
  logging.info(f"Model Used: {model}")
222
  logging.info(f"Search Type: {'Web Search' if use_web_search else 'PDF Search'}")
 
225
  if use_web_search:
226
  for main_content, sources in get_response_with_search(message, model, num_calls=num_calls, temperature=temperature):
227
  response = f"{main_content}\n\n{sources}"
228
+ if is_interrupted:
229
+ partial_response += response
230
+ yield partial_response
231
+ else:
232
+ yield response
233
  else:
234
  embed = get_embeddings()
235
  if os.path.exists("faiss_database"):
 
239
  context_str = "\n".join([doc.page_content for doc in relevant_docs])
240
  else:
241
  context_str = "No documents available."
242
+
243
  if model == "@cf/meta/llama-3.1-8b-instruct":
244
+ for response in get_response_from_cloudflare(prompt="", context=context_str, query=message, num_calls=num_calls, temperature=temperature, search_type="pdf"):
245
+ if is_interrupted:
246
+ partial_response += response
247
+ yield partial_response
248
+ else:
249
+ yield response
250
  else:
251
+ for response in get_response_from_pdf(message, model, num_calls=num_calls, temperature=temperature):
252
+ if is_interrupted:
253
+ partial_response += response
254
+ yield partial_response
255
+ else:
256
+ yield response
257
  except Exception as e:
258
  logging.error(f"Error with {model}: {str(e)}")
259
  if "microsoft/Phi-3-mini-4k-instruct" in model:
 
261
  fallback_model = "mistralai/Mistral-7B-Instruct-v0.3"
262
  yield from respond(message, history, fallback_model, temperature, num_calls, use_web_search)
263
  else:
264
+ yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model.
 
265
  logging.basicConfig(level=logging.DEBUG)
266
 
267
  def get_response_from_cloudflare(prompt, context, query, num_calls=3, temperature=0.2, search_type="pdf"):
 
400
  use_web_search = gr.Checkbox(label="Use Web Search", value=False)
401
 
402
  demo = gr.ChatInterface(
403
+ chatbot_interface,
404
  additional_inputs=[
405
  gr.Dropdown(choices=MODELS, label="Select Model", value=MODELS[0]),
406
  gr.Slider(minimum=0.1, maximum=1.0, value=0.2, step=0.1, label="Temperature"),
407
  gr.Slider(minimum=1, maximum=5, value=1, step=1, label="Number of API Calls"),
408
+ use_web_search,
409
+ gr.Boolean(value=False, label="Is Interrupted"), # Flag to indicate interruption
410
+ gr.Textbox(label="Partial Response"), # Store partial response
411
  ],
412
  title="AI-powered Web Search and PDF Chat Assistant",
413
  description="Chat with your PDFs or use web search to answer questions.",
 
429
  color_accent_soft_dark="transparent",
430
  code_background_fill_dark="#140b0b"
431
  ),
432
+ css=css,
433
+ examples=[
434
+ ["Tell me about the contents of the uploaded PDFs."],
435
+ ["What are the main topics discussed in the documents?"],
436
+ ["Can you summarize the key points from the PDFs?"],
437
+ ],
438
+ cache_examples=False,
439
+ analytics_enabled=False,
440
+ )
441
+
442
+ continue_button = gr.Button("Continue Generation")
443
+ continue_output = gr.Textbox(label="Response")
444
+
445
+ continue_button.click(
446
+ chatbot_interface,
447
+ inputs=[
448
+ message_input,
449
+ history,
450
+ use_web_search,
451
+ model_dropdown,
452
+ temperature_slider,
453
+ num_calls_slider,
454
+ gr.Boolean(value=True, label="Is Interrupted"), # Set is_interrupted to True
455
+ gr.Textbox(label="Partial Response"),
456
 
457
  css=css,
458
  examples=[