Shreyas094 commited on
Commit
f6f35dc
·
verified ·
1 Parent(s): a71bb6a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -0
app.py CHANGED
@@ -381,6 +381,37 @@ Write a detailed and complete response that answers the following user question:
381
  response += chunk
382
  yield response # Yield partial response
383
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
384
  def vote(data: gr.LikeData):
385
  if data.liked:
386
  print(f"You upvoted this response: {data.value}")
@@ -433,6 +464,17 @@ demo = gr.ChatInterface(
433
  analytics_enabled=False,
434
  )
435
 
 
 
 
 
 
 
 
 
 
 
 
436
  # Add file upload functionality
437
  with demo:
438
  gr.Markdown("## Upload PDF Documents")
 
381
  response += chunk
382
  yield response # Yield partial response
383
 
384
+ def continue_generation(message, last_response, model, temperature, num_calls):
385
+ continue_prompt = f"""
386
+ Original query: {message}
387
+
388
+ Previously generated response:
389
+ {last_response}
390
+
391
+ Please continue the response from where it left off, maintaining coherence and relevance to the original query.
392
+ """
393
+
394
+ if model == "@cf/meta/llama-3.1-8b-instruct":
395
+ # Use Cloudflare API
396
+ for response in get_response_from_cloudflare(prompt=continue_prompt, context="", query="", num_calls=num_calls, temperature=temperature, search_type="continue"):
397
+ yield last_response + response
398
+ else:
399
+ # Use Hugging Face API
400
+ client = InferenceClient(model, token=huggingface_token)
401
+
402
+ continued_response = last_response
403
+ for i in range(num_calls):
404
+ for message in client.chat_completion(
405
+ messages=[{"role": "user", "content": continue_prompt}],
406
+ max_tokens=1000,
407
+ temperature=temperature,
408
+ stream=True,
409
+ ):
410
+ if message.choices and message.choices[0].delta and message.choices[0].delta.content:
411
+ chunk = message.choices[0].delta.content
412
+ continued_response += chunk
413
+ yield continued_response
414
+
415
  def vote(data: gr.LikeData):
416
  if data.liked:
417
  print(f"You upvoted this response: {data.value}")
 
464
  analytics_enabled=False,
465
  )
466
 
467
+ # In the Gradio interface
468
+ with demo:
469
+ continue_button = gr.Button("Continue Generation")
470
+ continue_button.click(continue_generation,
471
+ inputs=[gr.State(lambda: history[-1][0] if history else ""),
472
+ gr.State(lambda: history[-1][1] if history else ""),
473
+ gr.Dropdown(choices=MODELS, label="Select Model"),
474
+ gr.Slider(label="Temperature"),
475
+ gr.Slider(label="Number of API Calls")],
476
+ outputs=gr.Chatbot())
477
+
478
  # Add file upload functionality
479
  with demo:
480
  gr.Markdown("## Upload PDF Documents")