sagar007 commited on
Commit
379919c
·
verified ·
1 Parent(s): ed0c3c5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -9
app.py CHANGED
@@ -4,7 +4,7 @@ from transformers import pipeline
4
  from duckduckgo_search import DDGS
5
  from datetime import datetime
6
 
7
- # Initialize a lightweight text generation model on CPU (moves to GPU when decorated)
8
  generator = pipeline("text-generation", model="distilgpt2", device=-1) # -1 ensures CPU by default
9
 
10
  # Web search function (CPU-based)
@@ -57,7 +57,7 @@ def process_deep_research(query: str, history: list):
57
  """Handle the deep research process."""
58
  if not history:
59
  history = []
60
-
61
  # Fetch web results (CPU)
62
  web_results = get_web_results(query)
63
  sources_html = format_sources(web_results)
@@ -65,9 +65,11 @@ def process_deep_research(query: str, history: list):
65
  # Generate answer (GPU via @spaces.GPU)
66
  prompt = format_prompt(query, web_results)
67
  answer = generate_answer(prompt)
68
- final_history = history + [[query, answer]]
69
-
70
- return answer, sources_html, final_history
 
 
71
 
72
  # Custom CSS for a cool, lightweight UI
73
  css = """
@@ -163,26 +165,27 @@ with gr.Blocks(title="Deep Research Engine - ZeroGPU", css=css) as demo:
163
  with gr.Column():
164
  sources_output = gr.HTML(label="Sources", elem_classes="sources-list")
165
 
166
- # Chat history
167
  with gr.Row():
168
- history_display = gr.Chatbot(label="History", elem_classes="history-box")
169
 
170
  # Event handling
171
  def handle_search(query, history):
172
  answer, sources, new_history = process_deep_research(query, history)
173
  return answer, sources, new_history
174
 
 
175
  search_btn.click(
176
  fn=handle_search,
177
  inputs=[search_input, history_state],
178
- outputs=[answer_output, sources_output, history_display],
179
- _js="() => [document.querySelector('.search-box input').value, null]"
180
  ).then(
181
  fn=lambda x: x,
182
  inputs=[history_display],
183
  outputs=[history_state]
184
  )
185
 
 
186
  search_input.submit(
187
  fn=handle_search,
188
  inputs=[search_input, history_state],
 
4
  from duckduckgo_search import DDGS
5
  from datetime import datetime
6
 
7
+ # Initialize a lightweight text generation model on CPU
8
  generator = pipeline("text-generation", model="distilgpt2", device=-1) # -1 ensures CPU by default
9
 
10
  # Web search function (CPU-based)
 
57
  """Handle the deep research process."""
58
  if not history:
59
  history = []
60
+
61
  # Fetch web results (CPU)
62
  web_results = get_web_results(query)
63
  sources_html = format_sources(web_results)
 
65
  # Generate answer (GPU via @spaces.GPU)
66
  prompt = format_prompt(query, web_results)
67
  answer = generate_answer(prompt)
68
+
69
+ # Convert tuple history to messages format (role/content)
70
+ new_history = history + [{"role": "user", "content": query}, {"role": "assistant", "content": answer}]
71
+
72
+ return answer, sources_html, new_history
73
 
74
  # Custom CSS for a cool, lightweight UI
75
  css = """
 
165
  with gr.Column():
166
  sources_output = gr.HTML(label="Sources", elem_classes="sources-list")
167
 
168
+ # Chat history (using messages format)
169
  with gr.Row():
170
+ history_display = gr.Chatbot(label="History", elem_classes="history-box", type="messages")
171
 
172
  # Event handling
173
  def handle_search(query, history):
174
  answer, sources, new_history = process_deep_research(query, history)
175
  return answer, sources, new_history
176
 
177
+ # Remove _js parameter from click event
178
  search_btn.click(
179
  fn=handle_search,
180
  inputs=[search_input, history_state],
181
+ outputs=[answer_output, sources_output, history_display]
 
182
  ).then(
183
  fn=lambda x: x,
184
  inputs=[history_display],
185
  outputs=[history_state]
186
  )
187
 
188
+ # Remove _js parameter from submit event
189
  search_input.submit(
190
  fn=handle_search,
191
  inputs=[search_input, history_state],