refacto into different tabs
Browse files- app.py +49 -677
- climateqa/chat.py +197 -0
- climateqa/{event_handler.py → handle_stream_events.py} +0 -0
- front/deprecated.py +46 -0
- front/event_listeners.py +0 -0
- front/tabs/__init__.py +6 -0
- front/tabs/chat_interface.py +55 -0
- front/tabs/main_tab.py +69 -0
- front/tabs/tab_about.py +38 -0
- front/tabs/tab_config.py +123 -0
- front/tabs/tab_examples.py +40 -0
- front/tabs/tab_figures.py +31 -0
- front/tabs/tab_papers.py +36 -0
- front/tabs/tab_recommended_content.py +0 -0
app.py
CHANGED
@@ -1,18 +1,7 @@
|
|
1 |
# Import necessary libraries
|
2 |
import os
|
3 |
-
import json
|
4 |
-
import time
|
5 |
-
import re
|
6 |
-
import base64
|
7 |
-
from datetime import datetime
|
8 |
-
from io import BytesIO
|
9 |
-
|
10 |
-
import numpy as np
|
11 |
-
import pandas as pd
|
12 |
import gradio as gr
|
13 |
-
|
14 |
-
from gradio_modal import Modal
|
15 |
-
from sentence_transformers import CrossEncoder
|
16 |
from azure.storage.fileshare import ShareServiceClient
|
17 |
|
18 |
# Import custom modules
|
@@ -20,21 +9,15 @@ from climateqa.engine.embeddings import get_embeddings_function
|
|
20 |
from climateqa.engine.llm import get_llm
|
21 |
from climateqa.engine.vectorstore import get_pinecone_vectorstore
|
22 |
from climateqa.engine.reranker import get_reranker
|
23 |
-
from climateqa.sample_questions import QUESTIONS
|
24 |
-
from climateqa.constants import POSSIBLE_REPORTS
|
25 |
-
from climateqa.utils import get_image_from_azure_blob_storage
|
26 |
from climateqa.engine.graph import make_graph_agent
|
27 |
from climateqa.engine.chains.retrieve_papers import find_papers
|
28 |
-
from
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
convert_to_docs_to_html
|
35 |
-
)
|
36 |
from utils import create_user_id
|
37 |
-
from front.utils import make_html_source
|
38 |
import logging
|
39 |
|
40 |
logging.basicConfig(level=logging.WARNING)
|
@@ -42,6 +25,7 @@ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # Suppresses INFO and WARNING logs
|
|
42 |
logging.getLogger().setLevel(logging.WARNING)
|
43 |
|
44 |
|
|
|
45 |
# Load environment variables in local mode
|
46 |
try:
|
47 |
from dotenv import load_dotenv
|
@@ -57,25 +41,6 @@ theme = gr.themes.Base(
|
|
57 |
font=[gr.themes.GoogleFont("Poppins"), "ui-sans-serif", "system-ui", "sans-serif"],
|
58 |
)
|
59 |
|
60 |
-
# Initialize prompt and system template
|
61 |
-
init_prompt = """
|
62 |
-
Hello, I am ClimateQ&A, a conversational assistant designed to help you understand climate change and biodiversity loss. I will answer your questions by **sifting through the IPCC and IPBES scientific reports**.
|
63 |
-
|
64 |
-
❓ How to use
|
65 |
-
- **Language**: You can ask me your questions in any language.
|
66 |
-
- **Audience**: You can specify your audience (children, general public, experts) to get a more adapted answer.
|
67 |
-
- **Sources**: You can choose to search in the IPCC or IPBES reports, or both.
|
68 |
-
- **Relevant content sources**: You can choose to search for figures, papers, or graphs that can be relevant for your question.
|
69 |
-
|
70 |
-
⚠️ Limitations
|
71 |
-
*Please note that the AI is not perfect and may sometimes give irrelevant answers. If you are not satisfied with the answer, please ask a more specific question or report your feedback to help us improve the system.*
|
72 |
-
|
73 |
-
🛈 Information
|
74 |
-
Please note that we log your questions for meta-analysis purposes, so avoid sharing any sensitive or personal information.
|
75 |
-
|
76 |
-
What do you want to learn ?
|
77 |
-
"""
|
78 |
-
|
79 |
# Azure Blob Storage credentials
|
80 |
account_key = os.environ["BLOB_ACCOUNT_KEY"]
|
81 |
if len(account_key) == 86:
|
@@ -93,20 +58,7 @@ share_client = service.get_share_client(file_share_name)
|
|
93 |
|
94 |
user_id = create_user_id()
|
95 |
|
96 |
-
|
97 |
-
CITATION_LABEL = "BibTeX citation for ClimateQ&A"
|
98 |
-
CITATION_TEXT = r"""@misc{climateqa,
|
99 |
-
author={Théo Alves Da Costa, Timothée Bohe},
|
100 |
-
title={ClimateQ&A, AI-powered conversational assistant for climate change and biodiversity loss},
|
101 |
-
year={2024},
|
102 |
-
howpublished= {\url{https://climateqa.com}},
|
103 |
-
}
|
104 |
-
@software{climateqa,
|
105 |
-
author = {Théo Alves Da Costa, Timothée Bohe},
|
106 |
-
publisher = {ClimateQ&A},
|
107 |
-
title = {ClimateQ&A, AI-powered conversational assistant for climate change and biodiversity loss},
|
108 |
-
}
|
109 |
-
"""
|
110 |
|
111 |
# Create vectorstore and retriever
|
112 |
embeddings_function = get_embeddings_function()
|
@@ -119,224 +71,21 @@ reranker = get_reranker("nano")
|
|
119 |
|
120 |
agent = make_graph_agent(llm=llm, vectorstore_ipcc=vectorstore, vectorstore_graphs=vectorstore_graphs, vectorstore_region = vectorstore_region, reranker=reranker, threshold_docs=0)#TODO put back default 0.2
|
121 |
|
122 |
-
# Function to update modal visibility
|
123 |
-
def update_config_modal_visibility(config_open):
|
124 |
-
new_config_visibility_status = not config_open
|
125 |
-
return gr.update(visible=new_config_visibility_status), new_config_visibility_status
|
126 |
-
|
127 |
-
# Main chat function
|
128 |
-
async def chat(
|
129 |
-
query: str,
|
130 |
-
history: list[ChatMessage],
|
131 |
-
audience: str,
|
132 |
-
sources: list[str],
|
133 |
-
reports: list[str],
|
134 |
-
relevant_content_sources_selection: list[str],
|
135 |
-
search_only: bool
|
136 |
-
) -> tuple[list, str, str, str, list, str]:
|
137 |
-
"""Process a chat query and return response with relevant sources and visualizations.
|
138 |
-
|
139 |
-
Args:
|
140 |
-
query (str): The user's question
|
141 |
-
history (list): Chat message history
|
142 |
-
audience (str): Target audience type
|
143 |
-
sources (list): Knowledge base sources to search
|
144 |
-
reports (list): Specific reports to search within sources
|
145 |
-
relevant_content_sources_selection (list): Types of content to retrieve (figures, papers, etc)
|
146 |
-
search_only (bool): Whether to only search without generating answer
|
147 |
-
|
148 |
-
Yields:
|
149 |
-
tuple: Contains:
|
150 |
-
- history: Updated chat history
|
151 |
-
- docs_html: HTML of retrieved documents
|
152 |
-
- output_query: Processed query
|
153 |
-
- output_language: Detected language
|
154 |
-
- related_contents: Related content
|
155 |
-
- graphs_html: HTML of relevant graphs
|
156 |
-
"""
|
157 |
-
# Log incoming question
|
158 |
-
date_now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
159 |
-
print(f">> NEW QUESTION ({date_now}) : {query}")
|
160 |
-
|
161 |
-
audience_prompt = init_audience(audience)
|
162 |
-
sources = sources or ["IPCC", "IPBES"]
|
163 |
-
reports = reports or []
|
164 |
-
|
165 |
-
# Prepare inputs for agent
|
166 |
-
inputs = {
|
167 |
-
"user_input": query,
|
168 |
-
"audience": audience_prompt,
|
169 |
-
"sources_input": sources,
|
170 |
-
"relevant_content_sources_selection": relevant_content_sources_selection,
|
171 |
-
"search_only": search_only,
|
172 |
-
"reports": reports
|
173 |
-
}
|
174 |
-
|
175 |
-
# Get streaming events from agent
|
176 |
-
result = agent.astream_events(inputs, version="v1")
|
177 |
-
|
178 |
-
# Initialize state variables
|
179 |
-
docs = []
|
180 |
-
related_contents = []
|
181 |
-
docs_html = ""
|
182 |
-
new_docs_html = ""
|
183 |
-
output_query = ""
|
184 |
-
output_language = ""
|
185 |
-
output_keywords = ""
|
186 |
-
start_streaming = False
|
187 |
-
graphs_html = ""
|
188 |
-
used_documents = []
|
189 |
-
answer_message_content = ""
|
190 |
-
|
191 |
-
# Define processing steps
|
192 |
-
steps_display = {
|
193 |
-
"categorize_intent": ("🔄️ Analyzing user message", True),
|
194 |
-
"transform_query": ("🔄️ Thinking step by step to answer the question", True),
|
195 |
-
"retrieve_documents": ("🔄️ Searching in the knowledge base", False),
|
196 |
-
"retrieve_local_data": ("🔄️ Searching in the knowledge base", False),
|
197 |
-
}
|
198 |
-
|
199 |
-
try:
|
200 |
-
# Process streaming events
|
201 |
-
async for event in result:
|
202 |
-
|
203 |
-
if "langgraph_node" in event["metadata"]:
|
204 |
-
node = event["metadata"]["langgraph_node"]
|
205 |
-
|
206 |
-
# Handle document retrieval
|
207 |
-
if event["event"] == "on_chain_end" and event["name"] in ["retrieve_documents","retrieve_local_data"] and event["data"]["output"] != None:
|
208 |
-
history, used_documents = handle_retrieved_documents(
|
209 |
-
event, history, used_documents
|
210 |
-
)
|
211 |
-
if event["event"] == "on_chain_end" and event["name"] == "answer_search" :
|
212 |
-
docs = event["data"]["input"]["documents"]
|
213 |
-
docs_html = convert_to_docs_to_html(docs)
|
214 |
-
related_contents = event["data"]["input"]["related_contents"]
|
215 |
-
|
216 |
-
# Handle intent categorization
|
217 |
-
elif (event["event"] == "on_chain_end" and
|
218 |
-
node == "categorize_intent" and
|
219 |
-
event["name"] == "_write"):
|
220 |
-
intent = event["data"]["output"]["intent"]
|
221 |
-
output_language = event["data"]["output"].get("language", "English")
|
222 |
-
history[-1].content = f"Language identified: {output_language}\nIntent identified: {intent}"
|
223 |
-
|
224 |
-
# Handle processing steps display
|
225 |
-
elif event["name"] in steps_display and event["event"] == "on_chain_start":
|
226 |
-
event_description, display_output = steps_display[node]
|
227 |
-
if (not hasattr(history[-1], 'metadata') or
|
228 |
-
history[-1].metadata["title"] != event_description):
|
229 |
-
history.append(ChatMessage(
|
230 |
-
role="assistant",
|
231 |
-
content="",
|
232 |
-
metadata={'title': event_description}
|
233 |
-
))
|
234 |
-
|
235 |
-
# Handle answer streaming
|
236 |
-
elif (event["name"] != "transform_query" and
|
237 |
-
event["event"] == "on_chat_model_stream" and
|
238 |
-
node in ["answer_rag","answer_rag_no_docs", "answer_search", "answer_chitchat"]):
|
239 |
-
history, start_streaming, answer_message_content = stream_answer(
|
240 |
-
history, event, start_streaming, answer_message_content
|
241 |
-
)
|
242 |
-
|
243 |
-
# Handle graph retrieval
|
244 |
-
elif event["name"] in ["retrieve_graphs", "retrieve_graphs_ai"] and event["event"] == "on_chain_end":
|
245 |
-
graphs_html = handle_retrieved_owid_graphs(event, graphs_html)
|
246 |
-
|
247 |
-
# Handle query transformation
|
248 |
-
if event["name"] == "transform_query" and event["event"] == "on_chain_end":
|
249 |
-
if hasattr(history[-1], "content"):
|
250 |
-
sub_questions = [q["question"] for q in event["data"]["output"]["questions_list"]]
|
251 |
-
history[-1].content += "Decompose question into sub-questions:\n\n - " + "\n - ".join(sub_questions)
|
252 |
-
|
253 |
-
yield history, docs_html, output_query, output_language, related_contents, graphs_html
|
254 |
-
|
255 |
-
except Exception as e:
|
256 |
-
print(f"Event {event} has failed")
|
257 |
-
raise gr.Error(str(e))
|
258 |
-
|
259 |
-
try:
|
260 |
-
# Log interaction to Azure if not in local environment
|
261 |
-
if os.getenv("GRADIO_ENV") != "local":
|
262 |
-
timestamp = str(datetime.now().timestamp())
|
263 |
-
prompt = history[1]["content"]
|
264 |
-
logs = {
|
265 |
-
"user_id": str(user_id),
|
266 |
-
"prompt": prompt,
|
267 |
-
"query": prompt,
|
268 |
-
"question": output_query,
|
269 |
-
"sources": sources,
|
270 |
-
"docs": serialize_docs(docs),
|
271 |
-
"answer": history[-1].content,
|
272 |
-
"time": timestamp,
|
273 |
-
}
|
274 |
-
log_on_azure(f"{timestamp}.json", logs, share_client)
|
275 |
-
except Exception as e:
|
276 |
-
print(f"Error logging on Azure Blob Storage: {e}")
|
277 |
-
error_msg = f"ClimateQ&A Error: {str(e)[:100]} - The error has been noted, try another question and if the error remains, you can contact us :)"
|
278 |
-
raise gr.Error(error_msg)
|
279 |
-
|
280 |
-
yield history, docs_html, output_query, output_language, related_contents, graphs_html
|
281 |
-
|
282 |
-
# Function to save feedback
|
283 |
-
def save_feedback(feed: str, user_id):
|
284 |
-
if len(feed) > 1:
|
285 |
-
timestamp = str(datetime.now().timestamp())
|
286 |
-
file = user_id + timestamp + ".json"
|
287 |
-
logs = {
|
288 |
-
"user_id": user_id,
|
289 |
-
"feedback": feed,
|
290 |
-
"time": timestamp,
|
291 |
-
}
|
292 |
-
log_on_azure(file, logs, share_client)
|
293 |
-
return "Feedback submitted, thank you!"
|
294 |
-
|
295 |
-
# Function to log data on Azure
|
296 |
-
def log_on_azure(file, logs, share_client):
|
297 |
-
logs = json.dumps(logs)
|
298 |
-
file_client = share_client.get_file_client(file)
|
299 |
-
file_client.upload_file(logs)
|
300 |
-
|
301 |
-
|
302 |
|
|
|
|
|
|
|
303 |
|
304 |
|
305 |
# --------------------------------------------------------------------
|
306 |
# Gradio
|
307 |
# --------------------------------------------------------------------
|
308 |
|
309 |
-
|
310 |
-
|
311 |
-
|
312 |
-
|
313 |
-
|
314 |
-
else:
|
315 |
-
print(data)
|
316 |
-
|
317 |
-
def save_graph(saved_graphs_state, embedding, category):
|
318 |
-
print(f"\nCategory:\n{saved_graphs_state}\n")
|
319 |
-
if category not in saved_graphs_state:
|
320 |
-
saved_graphs_state[category] = []
|
321 |
-
if embedding not in saved_graphs_state[category]:
|
322 |
-
saved_graphs_state[category].append(embedding)
|
323 |
-
return saved_graphs_state, gr.Button("Graph Saved")
|
324 |
-
|
325 |
-
|
326 |
-
# Functions to toggle visibility
|
327 |
-
def toggle_summary_visibility():
|
328 |
-
global summary_visible
|
329 |
-
summary_visible = not summary_visible
|
330 |
-
return gr.update(visible=summary_visible)
|
331 |
-
|
332 |
-
def toggle_relevant_visibility():
|
333 |
-
global relevant_visible
|
334 |
-
relevant_visible = not relevant_visible
|
335 |
-
return gr.update(visible=relevant_visible)
|
336 |
-
|
337 |
-
def change_completion_status(current_state):
|
338 |
-
current_state = 1 - current_state
|
339 |
-
return current_state
|
340 |
|
341 |
def update_sources_number_display(sources_textbox, figures_cards, current_graphs, papers_html):
|
342 |
sources_number = sources_textbox.count("<h2>")
|
@@ -351,240 +100,8 @@ def update_sources_number_display(sources_textbox, figures_cards, current_graphs
|
|
351 |
|
352 |
return gr.update(label=recommended_content_notif_label), gr.update(label=sources_notif_label), gr.update(label=figures_notif_label), gr.update(label=graphs_notif_label), gr.update(label=papers_notif_label)
|
353 |
|
354 |
-
def change_sample_questions(key):
|
355 |
-
index = list(QUESTIONS.keys()).index(key)
|
356 |
-
visible_bools = [False] * len(samples)
|
357 |
-
visible_bools[index] = True
|
358 |
-
return [gr.update(visible=visible_bools[i]) for i in range(len(samples))]
|
359 |
-
|
360 |
-
|
361 |
-
|
362 |
-
# Chat functions
|
363 |
-
def start_chat(query, history, search_only):
|
364 |
-
history = history + [ChatMessage(role="user", content=query)]
|
365 |
-
if not search_only:
|
366 |
-
return (gr.update(interactive=False), gr.update(selected=1), history, [])
|
367 |
-
else:
|
368 |
-
return (gr.update(interactive=False), gr.update(selected=2), history, [])
|
369 |
-
|
370 |
-
def finish_chat():
|
371 |
-
return gr.update(interactive=True, value="")
|
372 |
-
|
373 |
-
# Initialize visibility states
|
374 |
-
summary_visible = False
|
375 |
-
relevant_visible = False
|
376 |
-
|
377 |
-
# UI Layout Components
|
378 |
-
def create_chat_interface():
|
379 |
-
chatbot = gr.Chatbot(
|
380 |
-
value=[ChatMessage(role="assistant", content=init_prompt)],
|
381 |
-
type="messages",
|
382 |
-
show_copy_button=True,
|
383 |
-
show_label=False,
|
384 |
-
elem_id="chatbot",
|
385 |
-
layout="panel",
|
386 |
-
avatar_images=(None, "https://i.ibb.co/YNyd5W2/logo4.png"),
|
387 |
-
max_height="80vh",
|
388 |
-
height="100vh"
|
389 |
-
)
|
390 |
-
|
391 |
-
with gr.Row(elem_id="input-message"):
|
392 |
-
|
393 |
-
textbox = gr.Textbox(
|
394 |
-
placeholder="Ask me anything here!",
|
395 |
-
show_label=False,
|
396 |
-
scale=12,
|
397 |
-
lines=1,
|
398 |
-
interactive=True,
|
399 |
-
elem_id="input-textbox"
|
400 |
-
)
|
401 |
-
|
402 |
-
config_button = gr.Button("", elem_id="config-button")
|
403 |
-
|
404 |
-
return chatbot, textbox, config_button
|
405 |
-
|
406 |
-
def create_examples_tab():
|
407 |
-
examples_hidden = gr.Textbox(visible=False)
|
408 |
-
first_key = list(QUESTIONS.keys())[0]
|
409 |
-
dropdown_samples = gr.Dropdown(
|
410 |
-
choices=QUESTIONS.keys(),
|
411 |
-
value=first_key,
|
412 |
-
interactive=True,
|
413 |
-
label="Select a category of sample questions",
|
414 |
-
elem_id="dropdown-samples"
|
415 |
-
)
|
416 |
-
|
417 |
-
samples = []
|
418 |
-
for i, key in enumerate(QUESTIONS.keys()):
|
419 |
-
examples_visible = (i == 0)
|
420 |
-
with gr.Row(visible=examples_visible) as group_examples:
|
421 |
-
examples_questions = gr.Examples(
|
422 |
-
examples=QUESTIONS[key],
|
423 |
-
inputs=[examples_hidden],
|
424 |
-
examples_per_page=8,
|
425 |
-
run_on_click=False,
|
426 |
-
elem_id=f"examples{i}",
|
427 |
-
api_name=f"examples{i}"
|
428 |
-
)
|
429 |
-
samples.append(group_examples)
|
430 |
-
|
431 |
-
return examples_hidden, dropdown_samples, samples
|
432 |
-
|
433 |
-
def create_figures_tab():
|
434 |
-
sources_raw = gr.State()
|
435 |
-
new_figures = gr.State([])
|
436 |
-
used_figures = gr.State([])
|
437 |
-
|
438 |
-
with Modal(visible=False, elem_id="modal_figure_galery") as figure_modal:
|
439 |
-
gallery_component = gr.Gallery(
|
440 |
-
object_fit='scale-down',
|
441 |
-
elem_id="gallery-component",
|
442 |
-
height="80vh"
|
443 |
-
)
|
444 |
-
|
445 |
-
show_full_size_figures = gr.Button(
|
446 |
-
"Show figures in full size",
|
447 |
-
elem_id="show-figures",
|
448 |
-
interactive=True
|
449 |
-
)
|
450 |
-
show_full_size_figures.click(
|
451 |
-
lambda: Modal(visible=True),
|
452 |
-
None,
|
453 |
-
figure_modal
|
454 |
-
)
|
455 |
-
|
456 |
-
figures_cards = gr.HTML(show_label=False, elem_id="sources-figures")
|
457 |
-
|
458 |
-
return sources_raw, new_figures, used_figures, gallery_component, figures_cards, figure_modal
|
459 |
-
|
460 |
-
def create_papers_tab():
|
461 |
-
with gr.Accordion(
|
462 |
-
visible=True,
|
463 |
-
elem_id="papers-summary-popup",
|
464 |
-
label="See summary of relevant papers",
|
465 |
-
open=False
|
466 |
-
) as summary_popup:
|
467 |
-
papers_summary = gr.Markdown("", visible=True, elem_id="papers-summary")
|
468 |
-
|
469 |
-
with gr.Accordion(
|
470 |
-
visible=True,
|
471 |
-
elem_id="papers-relevant-popup",
|
472 |
-
label="See relevant papers",
|
473 |
-
open=False
|
474 |
-
) as relevant_popup:
|
475 |
-
papers_html = gr.HTML(show_label=False, elem_id="papers-textbox")
|
476 |
-
|
477 |
-
btn_citations_network = gr.Button("Explore papers citations network")
|
478 |
-
with Modal(visible=False) as papers_modal:
|
479 |
-
citations_network = gr.HTML(
|
480 |
-
"<h3>Citations Network Graph</h3>",
|
481 |
-
visible=True,
|
482 |
-
elem_id="papers-citations-network"
|
483 |
-
)
|
484 |
-
btn_citations_network.click(
|
485 |
-
lambda: Modal(visible=True),
|
486 |
-
None,
|
487 |
-
papers_modal
|
488 |
-
)
|
489 |
-
|
490 |
-
return papers_summary, papers_html, citations_network, papers_modal
|
491 |
-
|
492 |
-
def create_config_modal(config_open):
|
493 |
-
with Modal(visible=False, elem_id="modal-config") as config_modal:
|
494 |
-
gr.Markdown("Reminders: You can talk in any language, ClimateQ&A is multi-lingual!")
|
495 |
-
|
496 |
-
dropdown_sources = gr.CheckboxGroup(
|
497 |
-
choices=["IPCC", "IPBES", "IPOS"],
|
498 |
-
label="Select source (by default search in all sources)",
|
499 |
-
value=["IPCC"],
|
500 |
-
interactive=True
|
501 |
-
)
|
502 |
-
|
503 |
-
dropdown_reports = gr.Dropdown(
|
504 |
-
choices=POSSIBLE_REPORTS,
|
505 |
-
label="Or select specific reports",
|
506 |
-
multiselect=True,
|
507 |
-
value=None,
|
508 |
-
interactive=True
|
509 |
-
)
|
510 |
-
|
511 |
-
dropdown_external_sources = gr.CheckboxGroup(
|
512 |
-
choices=["Figures (IPCC/IPBES)", "Papers (OpenAlex)", "Graphs (OurWorldInData)","POC region"],
|
513 |
-
label="Select database to search for relevant content",
|
514 |
-
value=["Figures (IPCC/IPBES)","POC region"],
|
515 |
-
interactive=True
|
516 |
-
)
|
517 |
-
|
518 |
-
search_only = gr.Checkbox(
|
519 |
-
label="Search only for recommended content without chating",
|
520 |
-
value=False,
|
521 |
-
interactive=True,
|
522 |
-
elem_id="checkbox-chat"
|
523 |
-
)
|
524 |
-
|
525 |
-
dropdown_audience = gr.Dropdown(
|
526 |
-
choices=["Children", "General public", "Experts"],
|
527 |
-
label="Select audience",
|
528 |
-
value="Experts",
|
529 |
-
interactive=True
|
530 |
-
)
|
531 |
-
|
532 |
-
after = gr.Slider(
|
533 |
-
minimum=1950,
|
534 |
-
maximum=2023,
|
535 |
-
step=1,
|
536 |
-
value=1960,
|
537 |
-
label="Publication date",
|
538 |
-
show_label=True,
|
539 |
-
interactive=True,
|
540 |
-
elem_id="date-papers",
|
541 |
-
visible=False
|
542 |
-
)
|
543 |
-
|
544 |
-
output_query = gr.Textbox(
|
545 |
-
label="Query used for retrieval",
|
546 |
-
show_label=True,
|
547 |
-
elem_id="reformulated-query",
|
548 |
-
lines=2,
|
549 |
-
interactive=False,
|
550 |
-
visible=False
|
551 |
-
)
|
552 |
-
|
553 |
-
output_language = gr.Textbox(
|
554 |
-
label="Language",
|
555 |
-
show_label=True,
|
556 |
-
elem_id="language",
|
557 |
-
lines=1,
|
558 |
-
interactive=False,
|
559 |
-
visible=False
|
560 |
-
)
|
561 |
-
|
562 |
-
dropdown_external_sources.change(
|
563 |
-
lambda x: gr.update(visible="Papers (OpenAlex)" in x),
|
564 |
-
inputs=[dropdown_external_sources],
|
565 |
-
outputs=[after]
|
566 |
-
)
|
567 |
-
|
568 |
-
close_config_modal = gr.Button("Validate and Close", elem_id="close-config-modal")
|
569 |
-
close_config_modal.click(
|
570 |
-
fn=update_config_modal_visibility,
|
571 |
-
inputs=[config_open],
|
572 |
-
outputs=[config_modal, config_open]
|
573 |
-
)
|
574 |
-
|
575 |
-
return {
|
576 |
-
"config_open" : config_open,
|
577 |
-
"config_modal": config_modal,
|
578 |
-
"dropdown_sources": dropdown_sources,
|
579 |
-
"dropdown_reports": dropdown_reports,
|
580 |
-
"dropdown_external_sources": dropdown_external_sources,
|
581 |
-
"search_only": search_only,
|
582 |
-
"dropdown_audience": dropdown_audience,
|
583 |
-
"after": after,
|
584 |
-
"output_query": output_query,
|
585 |
-
"output_language": output_language,
|
586 |
-
}
|
587 |
|
|
|
588 |
def cqa_tab(tab_name):
|
589 |
# State variables
|
590 |
current_graphs = gr.State([])
|
@@ -599,7 +116,7 @@ def cqa_tab(tab_name):
|
|
599 |
with gr.Tabs(elem_id="right_panel_tab") as tabs:
|
600 |
# Examples tab
|
601 |
with gr.TabItem("Examples", elem_id="tab-examples", id=0):
|
602 |
-
examples_hidden
|
603 |
|
604 |
# Sources tab
|
605 |
with gr.Tab("Sources", elem_id="tab-sources", id=1) as tab_sources:
|
@@ -631,8 +148,6 @@ def cqa_tab(tab_name):
|
|
631 |
"new_figures": new_figures,
|
632 |
"current_graphs": current_graphs,
|
633 |
"examples_hidden": examples_hidden,
|
634 |
-
"dropdown_samples": dropdown_samples,
|
635 |
-
"samples": samples,
|
636 |
"sources_textbox": sources_textbox,
|
637 |
"figures_cards": figures_cards,
|
638 |
"gallery_component": gallery_component,
|
@@ -648,27 +163,7 @@ def cqa_tab(tab_name):
|
|
648 |
"graph_container": graphs_container
|
649 |
}
|
650 |
|
651 |
-
|
652 |
-
with gr.Tab("About", elem_classes="max-height other-tabs"):
|
653 |
-
with gr.Row():
|
654 |
-
with gr.Column(scale=1):
|
655 |
-
gr.Markdown(
|
656 |
-
"""
|
657 |
-
### More info
|
658 |
-
- See more info at [https://climateqa.com](https://climateqa.com/docs/intro/)
|
659 |
-
- Feedbacks on this [form](https://forms.office.com/e/1Yzgxm6jbp)
|
660 |
-
|
661 |
-
### Citation
|
662 |
-
"""
|
663 |
-
)
|
664 |
-
with gr.Accordion(CITATION_LABEL, elem_id="citation", open=False):
|
665 |
-
gr.Textbox(
|
666 |
-
value=CITATION_TEXT,
|
667 |
-
label="",
|
668 |
-
interactive=False,
|
669 |
-
show_copy_button=True,
|
670 |
-
lines=len(CITATION_TEXT.split('\n')),
|
671 |
-
)
|
672 |
|
673 |
def event_handling(
|
674 |
main_tab_components,
|
@@ -682,8 +177,6 @@ def event_handling(
|
|
682 |
new_figures = main_tab_components["new_figures"]
|
683 |
current_graphs = main_tab_components["current_graphs"]
|
684 |
examples_hidden = main_tab_components["examples_hidden"]
|
685 |
-
dropdown_samples = main_tab_components["dropdown_samples"]
|
686 |
-
samples = main_tab_components["samples"]
|
687 |
sources_textbox = main_tab_components["sources_textbox"]
|
688 |
figures_cards = main_tab_components["figures_cards"]
|
689 |
gallery_component = main_tab_components["gallery_component"]
|
@@ -708,183 +201,62 @@ def event_handling(
|
|
708 |
after = config_components["after"]
|
709 |
output_query = config_components["output_query"]
|
710 |
output_language = config_components["output_language"]
|
|
|
711 |
|
712 |
new_sources_hmtl = gr.State([])
|
713 |
|
714 |
|
715 |
|
716 |
-
config_button
|
717 |
-
|
718 |
-
|
719 |
-
|
720 |
-
|
721 |
-
|
722 |
-
|
723 |
(textbox
|
724 |
-
.submit(start_chat, [textbox, chatbot, search_only], [textbox, tabs, chatbot, sources_raw], queue=False, api_name=f"
|
725 |
-
.then(chat, [textbox, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name="
|
726 |
-
.then(finish_chat, None, [textbox], api_name=f"
|
727 |
)
|
728 |
-
|
729 |
-
|
730 |
-
|
731 |
(examples_hidden
|
732 |
-
.change(start_chat, [examples_hidden, chatbot, search_only], [
|
733 |
-
.then(chat, [examples_hidden, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name="
|
734 |
-
.then(finish_chat, None, [
|
735 |
)
|
736 |
-
|
737 |
new_sources_hmtl.change(lambda x : x, inputs = [new_sources_hmtl], outputs = [sources_textbox])
|
738 |
current_graphs.change(lambda x: x, inputs=[current_graphs], outputs=[graphs_container])
|
739 |
new_figures.change(process_figures, inputs=[sources_raw, new_figures], outputs=[sources_raw, figures_cards, gallery_component])
|
740 |
|
741 |
# Update sources numbers
|
742 |
-
|
743 |
-
|
744 |
-
current_graphs.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs, papers_html], [tab_recommended_content, tab_sources, tab_figures, tab_graphs, tab_papers])
|
745 |
-
papers_html.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs, papers_html], [tab_recommended_content, tab_sources, tab_figures, tab_graphs, tab_papers])
|
746 |
-
|
747 |
-
# Other questions examples
|
748 |
-
dropdown_samples.change(change_sample_questions, dropdown_samples, samples)
|
749 |
-
|
750 |
-
# Search for papers
|
751 |
-
textbox.submit(find_papers, [textbox, after, dropdown_external_sources], [papers_html, citations_network, papers_summary])
|
752 |
-
examples_hidden.change(find_papers, [examples_hidden, after, dropdown_external_sources], [papers_html, citations_network, papers_summary])
|
753 |
|
|
|
|
|
|
|
|
|
754 |
|
755 |
def main_ui():
|
756 |
-
config_open = gr.State(
|
757 |
with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=theme, elem_id="main-component") as demo:
|
758 |
-
config_components = create_config_modal(
|
|
|
759 |
with gr.Tabs():
|
760 |
cqa_components = cqa_tab(tab_name = "ClimateQ&A")
|
761 |
-
local_cqa_components = cqa_tab(tab_name = "Beta - POC Adapt'Action")
|
762 |
|
763 |
-
|
764 |
|
765 |
event_handling(cqa_components, config_components, tab_name = 'ClimateQ&A')
|
766 |
-
event_handling(local_cqa_components, config_components, tab_name = 'Beta - POC Adapt\'Action')
|
|
|
767 |
demo.queue()
|
768 |
|
769 |
return demo
|
770 |
|
771 |
-
|
772 |
-
|
773 |
-
|
774 |
-
|
775 |
-
|
776 |
-
# # Main UI Assembly
|
777 |
-
# with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=theme, elem_id="main-component") as demo:
|
778 |
-
|
779 |
-
# # State variables
|
780 |
-
# # chat_completed_state = gr.State(0)
|
781 |
-
# current_graphs = gr.State([])
|
782 |
-
# saved_graphs = gr.State({})
|
783 |
-
# new_sources_hmtl = gr.State([])
|
784 |
-
|
785 |
-
# config_open = gr.State(False)
|
786 |
-
|
787 |
-
# with gr.Tab("ClimateQ&A"):
|
788 |
-
# with gr.Row(elem_id="chatbot-row"):
|
789 |
-
# # Left column - Chat interface
|
790 |
-
# with gr.Column(scale=2):
|
791 |
-
# chatbot, textbox, config_button = create_chat_interface()
|
792 |
-
|
793 |
-
# # Right column - Content panels
|
794 |
-
# with gr.Column(scale=2, variant="panel", elem_id="right-panel"):
|
795 |
-
# with gr.Tabs(elem_id="right_panel_tab") as tabs:
|
796 |
-
# # Examples tab
|
797 |
-
# with gr.TabItem("Examples", elem_id="tab-examples", id=0):
|
798 |
-
# examples_hidden, dropdown_samples, samples = create_examples_tab()
|
799 |
-
|
800 |
-
# # Sources tab
|
801 |
-
# with gr.Tab("Sources", elem_id="tab-sources", id=1) as tab_sources:
|
802 |
-
# sources_textbox = gr.HTML(show_label=False, elem_id="sources-textbox")
|
803 |
-
|
804 |
-
|
805 |
-
# # Recommended content tab
|
806 |
-
# with gr.Tab("Recommended content", elem_id="tab-recommended_content", id=2) as tab_recommended_content:
|
807 |
-
# with gr.Tabs(elem_id="group-subtabs") as tabs_recommended_content:
|
808 |
-
# # Figures subtab
|
809 |
-
# with gr.Tab("Figures", elem_id="tab-figures", id=3) as tab_figures:
|
810 |
-
# sources_raw, new_figures, used_figures, gallery_component, figures_cards, figure_modal = create_figures_tab()
|
811 |
-
|
812 |
-
# # Papers subtab
|
813 |
-
# with gr.Tab("Papers", elem_id="tab-citations", id=4) as tab_papers:
|
814 |
-
# papers_summary, papers_html, citations_network, papers_modal = create_papers_tab()
|
815 |
-
|
816 |
-
# # Graphs subtab
|
817 |
-
# with gr.Tab("Graphs", elem_id="tab-graphs", id=5) as tab_graphs:
|
818 |
-
# graphs_container = gr.HTML(
|
819 |
-
# "<h2>There are no graphs to be displayed at the moment. Try asking another question.</h2>",
|
820 |
-
# elem_id="graphs-container"
|
821 |
-
# )
|
822 |
-
|
823 |
-
|
824 |
-
|
825 |
-
# with gr.Tab("About", elem_classes="max-height other-tabs"):
|
826 |
-
# with gr.Row():
|
827 |
-
# with gr.Column(scale=1):
|
828 |
-
# gr.Markdown(
|
829 |
-
# """
|
830 |
-
# ### More info
|
831 |
-
# - See more info at [https://climateqa.com](https://climateqa.com/docs/intro/)
|
832 |
-
# - Feedbacks on this [form](https://forms.office.com/e/1Yzgxm6jbp)
|
833 |
-
|
834 |
-
# ### Citation
|
835 |
-
# """
|
836 |
-
# )
|
837 |
-
# with gr.Accordion(CITATION_LABEL, elem_id="citation", open=False):
|
838 |
-
# gr.Textbox(
|
839 |
-
# value=CITATION_TEXT,
|
840 |
-
# label="",
|
841 |
-
# interactive=False,
|
842 |
-
# show_copy_button=True,
|
843 |
-
# lines=len(CITATION_TEXT.split('\n')),
|
844 |
-
# )
|
845 |
-
# # Configuration pannel
|
846 |
-
# config_modal, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only, dropdown_audience, after, output_query, output_language = create_config_modal(config_open)
|
847 |
-
|
848 |
-
# # Event handlers
|
849 |
-
# config_button.click(
|
850 |
-
# fn=update_config_modal_visibility,
|
851 |
-
# inputs=[config_open],
|
852 |
-
# outputs=[config_modal, config_open]
|
853 |
-
# )
|
854 |
-
|
855 |
-
|
856 |
-
# (textbox
|
857 |
-
# .submit(start_chat, [textbox, chatbot, search_only], [textbox, tabs, chatbot, sources_raw], queue=False, api_name="start_chat_textbox")
|
858 |
-
# .then(chat, [textbox, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name="chat_textbox")
|
859 |
-
# .then(finish_chat, None, [textbox], api_name="finish_chat_textbox")
|
860 |
-
# )
|
861 |
-
|
862 |
-
|
863 |
-
|
864 |
-
# (examples_hidden
|
865 |
-
# .change(start_chat, [examples_hidden, chatbot, search_only], [textbox, tabs, chatbot, sources_raw], queue=False, api_name="start_chat_examples")
|
866 |
-
# .then(chat, [examples_hidden, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name="chat_textbox")
|
867 |
-
# .then(finish_chat, None, [textbox], api_name="finish_chat_examples")
|
868 |
-
# )
|
869 |
-
|
870 |
-
# new_sources_hmtl.change(lambda x : x, inputs = [new_sources_hmtl], outputs = [sources_textbox])
|
871 |
-
# new_figures.change(process_figures, inputs=[sources_raw, new_figures], outputs=[sources_raw, figures_cards, gallery_component])
|
872 |
-
# current_graphs.change(lambda x: x, inputs=[current_graphs], outputs=[graphs_container])
|
873 |
-
|
874 |
-
# # Update sources numbers
|
875 |
-
# sources_textbox.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs, papers_html], [tab_recommended_content, tab_sources, tab_figures, tab_graphs, tab_papers])
|
876 |
-
# figures_cards.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs, papers_html], [tab_recommended_content, tab_sources, tab_figures, tab_graphs, tab_papers])
|
877 |
-
# current_graphs.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs, papers_html], [tab_recommended_content, tab_sources, tab_figures, tab_graphs, tab_papers])
|
878 |
-
# papers_html.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs, papers_html], [tab_recommended_content, tab_sources, tab_figures, tab_graphs, tab_papers])
|
879 |
-
|
880 |
-
# # Other questions examples
|
881 |
-
# dropdown_samples.change(change_sample_questions, dropdown_samples, samples)
|
882 |
-
|
883 |
-
# # Search for papers
|
884 |
-
# textbox.submit(find_papers, [textbox, after, dropdown_external_sources], [papers_html, citations_network, papers_summary])
|
885 |
-
# examples_hidden.change(find_papers, [examples_hidden, after, dropdown_external_sources], [papers_html, citations_network, papers_summary])
|
886 |
-
|
887 |
-
# demo.queue()
|
888 |
|
889 |
demo = main_ui()
|
890 |
demo.launch(ssr_mode=False)
|
|
|
1 |
# Import necessary libraries
|
2 |
import os
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
import gradio as gr
|
4 |
+
|
|
|
|
|
5 |
from azure.storage.fileshare import ShareServiceClient
|
6 |
|
7 |
# Import custom modules
|
|
|
9 |
from climateqa.engine.llm import get_llm
|
10 |
from climateqa.engine.vectorstore import get_pinecone_vectorstore
|
11 |
from climateqa.engine.reranker import get_reranker
|
|
|
|
|
|
|
12 |
from climateqa.engine.graph import make_graph_agent
|
13 |
from climateqa.engine.chains.retrieve_papers import find_papers
|
14 |
+
from climateqa.chat import start_chat, chat_stream, finish_chat
|
15 |
+
|
16 |
+
from front.tabs import (create_config_modal, create_examples_tab, create_papers_tab, create_figures_tab, create_chat_interface, create_about_tab)
|
17 |
+
from front.utils import process_figures
|
18 |
+
|
19 |
+
|
|
|
|
|
20 |
from utils import create_user_id
|
|
|
21 |
import logging
|
22 |
|
23 |
logging.basicConfig(level=logging.WARNING)
|
|
|
25 |
logging.getLogger().setLevel(logging.WARNING)
|
26 |
|
27 |
|
28 |
+
|
29 |
# Load environment variables in local mode
|
30 |
try:
|
31 |
from dotenv import load_dotenv
|
|
|
41 |
font=[gr.themes.GoogleFont("Poppins"), "ui-sans-serif", "system-ui", "sans-serif"],
|
42 |
)
|
43 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
# Azure Blob Storage credentials
|
45 |
account_key = os.environ["BLOB_ACCOUNT_KEY"]
|
46 |
if len(account_key) == 86:
|
|
|
58 |
|
59 |
user_id = create_user_id()
|
60 |
|
61 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
62 |
|
63 |
# Create vectorstore and retriever
|
64 |
embeddings_function = get_embeddings_function()
|
|
|
71 |
|
72 |
agent = make_graph_agent(llm=llm, vectorstore_ipcc=vectorstore, vectorstore_graphs=vectorstore_graphs, vectorstore_region = vectorstore_region, reranker=reranker, threshold_docs=0)#TODO put back default 0.2
|
73 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
74 |
|
75 |
+
async def chat(query, history, audience, sources, reports, relevant_content_sources_selection, search_only):
|
76 |
+
async for event in chat_stream(agent, query, history, audience, sources, reports, relevant_content_sources_selection, search_only, share_client, user_id):
|
77 |
+
yield event
|
78 |
|
79 |
|
80 |
# --------------------------------------------------------------------
|
81 |
# Gradio
|
82 |
# --------------------------------------------------------------------
|
83 |
|
84 |
+
# Function to update modal visibility
|
85 |
+
def update_config_modal_visibility(config_open):
|
86 |
+
new_config_visibility_status = not config_open
|
87 |
+
return gr.update(visible=new_config_visibility_status), new_config_visibility_status
|
88 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
89 |
|
90 |
def update_sources_number_display(sources_textbox, figures_cards, current_graphs, papers_html):
|
91 |
sources_number = sources_textbox.count("<h2>")
|
|
|
100 |
|
101 |
return gr.update(label=recommended_content_notif_label), gr.update(label=sources_notif_label), gr.update(label=figures_notif_label), gr.update(label=graphs_notif_label), gr.update(label=papers_notif_label)
|
102 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
|
104 |
+
# # UI Layout Components
|
105 |
def cqa_tab(tab_name):
|
106 |
# State variables
|
107 |
current_graphs = gr.State([])
|
|
|
116 |
with gr.Tabs(elem_id="right_panel_tab") as tabs:
|
117 |
# Examples tab
|
118 |
with gr.TabItem("Examples", elem_id="tab-examples", id=0):
|
119 |
+
examples_hidden = create_examples_tab()
|
120 |
|
121 |
# Sources tab
|
122 |
with gr.Tab("Sources", elem_id="tab-sources", id=1) as tab_sources:
|
|
|
148 |
"new_figures": new_figures,
|
149 |
"current_graphs": current_graphs,
|
150 |
"examples_hidden": examples_hidden,
|
|
|
|
|
151 |
"sources_textbox": sources_textbox,
|
152 |
"figures_cards": figures_cards,
|
153 |
"gallery_component": gallery_component,
|
|
|
163 |
"graph_container": graphs_container
|
164 |
}
|
165 |
|
166 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
167 |
|
168 |
def event_handling(
|
169 |
main_tab_components,
|
|
|
177 |
new_figures = main_tab_components["new_figures"]
|
178 |
current_graphs = main_tab_components["current_graphs"]
|
179 |
examples_hidden = main_tab_components["examples_hidden"]
|
|
|
|
|
180 |
sources_textbox = main_tab_components["sources_textbox"]
|
181 |
figures_cards = main_tab_components["figures_cards"]
|
182 |
gallery_component = main_tab_components["gallery_component"]
|
|
|
201 |
after = config_components["after"]
|
202 |
output_query = config_components["output_query"]
|
203 |
output_language = config_components["output_language"]
|
204 |
+
close_config_modal = config_components["close_config_modal_button"]
|
205 |
|
206 |
new_sources_hmtl = gr.State([])
|
207 |
|
208 |
|
209 |
|
210 |
+
for button in [config_button, close_config_modal]:
|
211 |
+
button.click(
|
212 |
+
fn=update_config_modal_visibility,
|
213 |
+
inputs=[config_open],
|
214 |
+
outputs=[config_modal, config_open]
|
215 |
+
)
|
216 |
+
# Event for textbox
|
217 |
(textbox
|
218 |
+
.submit(start_chat, [textbox, chatbot, search_only], [textbox, tabs, chatbot, sources_raw], queue=False, api_name=f"start_chat_{textbox.elem_id}")
|
219 |
+
.then(chat, [textbox, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name=f"chat_{textbox.elem_id}")
|
220 |
+
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{textbox.elem_id}")
|
221 |
)
|
222 |
+
# Event for examples_hidden
|
|
|
|
|
223 |
(examples_hidden
|
224 |
+
.change(start_chat, [examples_hidden, chatbot, search_only], [examples_hidden, tabs, chatbot, sources_raw], queue=False, api_name=f"start_chat_{examples_hidden.elem_id}")
|
225 |
+
.then(chat, [examples_hidden, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name=f"chat_{examples_hidden.elem_id}")
|
226 |
+
.then(finish_chat, None, [examples_hidden], api_name=f"finish_chat_{examples_hidden.elem_id}")
|
227 |
)
|
228 |
+
|
229 |
new_sources_hmtl.change(lambda x : x, inputs = [new_sources_hmtl], outputs = [sources_textbox])
|
230 |
current_graphs.change(lambda x: x, inputs=[current_graphs], outputs=[graphs_container])
|
231 |
new_figures.change(process_figures, inputs=[sources_raw, new_figures], outputs=[sources_raw, figures_cards, gallery_component])
|
232 |
|
233 |
# Update sources numbers
|
234 |
+
for component in [sources_textbox, figures_cards, current_graphs, papers_html]:
|
235 |
+
component.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs, papers_html], [tab_recommended_content, tab_sources, tab_figures, tab_graphs, tab_papers])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
236 |
|
237 |
+
|
238 |
+
# Search for papers
|
239 |
+
for component in [textbox, examples_hidden]:
|
240 |
+
component.submit(find_papers, [component, after, dropdown_external_sources], [papers_html, citations_network, papers_summary])
|
241 |
|
242 |
def main_ui():
|
243 |
+
# config_open = gr.State(True)
|
244 |
with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=theme, elem_id="main-component") as demo:
|
245 |
+
config_components = create_config_modal()
|
246 |
+
|
247 |
with gr.Tabs():
|
248 |
cqa_components = cqa_tab(tab_name = "ClimateQ&A")
|
249 |
+
# local_cqa_components = cqa_tab(tab_name = "Beta - POC Adapt'Action")
|
250 |
|
251 |
+
create_about_tab()
|
252 |
|
253 |
event_handling(cqa_components, config_components, tab_name = 'ClimateQ&A')
|
254 |
+
# event_handling(local_cqa_components, config_components, tab_name = 'Beta - POC Adapt\'Action')
|
255 |
+
|
256 |
demo.queue()
|
257 |
|
258 |
return demo
|
259 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
260 |
|
261 |
demo = main_ui()
|
262 |
demo.launch(ssr_mode=False)
|
climateqa/chat.py
ADDED
@@ -0,0 +1,197 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from datetime import datetime
|
3 |
+
import gradio as gr
|
4 |
+
# from .agent import agent
|
5 |
+
from gradio import ChatMessage
|
6 |
+
from langgraph.graph.state import CompiledStateGraph
|
7 |
+
import json
|
8 |
+
|
9 |
+
from .handle_stream_events import (
|
10 |
+
init_audience,
|
11 |
+
handle_retrieved_documents,
|
12 |
+
convert_to_docs_to_html,
|
13 |
+
stream_answer,
|
14 |
+
handle_retrieved_owid_graphs,
|
15 |
+
serialize_docs,
|
16 |
+
)
|
17 |
+
|
18 |
+
# Function to log data on Azure
|
19 |
+
def log_on_azure(file, logs, share_client):
|
20 |
+
logs = json.dumps(logs)
|
21 |
+
file_client = share_client.get_file_client(file)
|
22 |
+
file_client.upload_file(logs)
|
23 |
+
|
24 |
+
# Chat functions
|
25 |
+
def start_chat(query, history, search_only):
|
26 |
+
history = history + [ChatMessage(role="user", content=query)]
|
27 |
+
if not search_only:
|
28 |
+
return (gr.update(interactive=False), gr.update(selected=1), history, [])
|
29 |
+
else:
|
30 |
+
return (gr.update(interactive=False), gr.update(selected=2), history, [])
|
31 |
+
|
32 |
+
def finish_chat():
|
33 |
+
return gr.update(interactive=True, value="")
|
34 |
+
|
35 |
+
def log_interaction_to_azure(history, output_query, sources, docs, share_client, user_id):
|
36 |
+
try:
|
37 |
+
# Log interaction to Azure if not in local environment
|
38 |
+
if os.getenv("GRADIO_ENV") != "local":
|
39 |
+
timestamp = str(datetime.now().timestamp())
|
40 |
+
prompt = history[1]["content"]
|
41 |
+
logs = {
|
42 |
+
"user_id": str(user_id),
|
43 |
+
"prompt": prompt,
|
44 |
+
"query": prompt,
|
45 |
+
"question": output_query,
|
46 |
+
"sources": sources,
|
47 |
+
"docs": serialize_docs(docs),
|
48 |
+
"answer": history[-1].content,
|
49 |
+
"time": timestamp,
|
50 |
+
}
|
51 |
+
log_on_azure(f"{timestamp}.json", logs, share_client)
|
52 |
+
except Exception as e:
|
53 |
+
print(f"Error logging on Azure Blob Storage: {e}")
|
54 |
+
error_msg = f"ClimateQ&A Error: {str(e)[:100]} - The error has been noted, try another question and if the error remains, you can contact us :)"
|
55 |
+
raise gr.Error(error_msg)
|
56 |
+
|
57 |
+
# Main chat function
|
58 |
+
async def chat_stream(
|
59 |
+
agent : CompiledStateGraph,
|
60 |
+
query: str,
|
61 |
+
history: list[ChatMessage],
|
62 |
+
audience: str,
|
63 |
+
sources: list[str],
|
64 |
+
reports: list[str],
|
65 |
+
relevant_content_sources_selection: list[str],
|
66 |
+
search_only: bool,
|
67 |
+
share_client,
|
68 |
+
user_id: str
|
69 |
+
) -> tuple[list, str, str, str, list, str]:
|
70 |
+
"""Process a chat query and return response with relevant sources and visualizations.
|
71 |
+
|
72 |
+
Args:
|
73 |
+
query (str): The user's question
|
74 |
+
history (list): Chat message history
|
75 |
+
audience (str): Target audience type
|
76 |
+
sources (list): Knowledge base sources to search
|
77 |
+
reports (list): Specific reports to search within sources
|
78 |
+
relevant_content_sources_selection (list): Types of content to retrieve (figures, papers, etc)
|
79 |
+
search_only (bool): Whether to only search without generating answer
|
80 |
+
|
81 |
+
Yields:
|
82 |
+
tuple: Contains:
|
83 |
+
- history: Updated chat history
|
84 |
+
- docs_html: HTML of retrieved documents
|
85 |
+
- output_query: Processed query
|
86 |
+
- output_language: Detected language
|
87 |
+
- related_contents: Related content
|
88 |
+
- graphs_html: HTML of relevant graphs
|
89 |
+
"""
|
90 |
+
# Log incoming question
|
91 |
+
date_now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
92 |
+
print(f">> NEW QUESTION ({date_now}) : {query}")
|
93 |
+
|
94 |
+
audience_prompt = init_audience(audience)
|
95 |
+
sources = sources or ["IPCC", "IPBES"]
|
96 |
+
reports = reports or []
|
97 |
+
|
98 |
+
# Prepare inputs for agent
|
99 |
+
inputs = {
|
100 |
+
"user_input": query,
|
101 |
+
"audience": audience_prompt,
|
102 |
+
"sources_input": sources,
|
103 |
+
"relevant_content_sources_selection": relevant_content_sources_selection,
|
104 |
+
"search_only": search_only,
|
105 |
+
"reports": reports
|
106 |
+
}
|
107 |
+
|
108 |
+
# Get streaming events from agent
|
109 |
+
result = agent.astream_events(inputs, version="v1")
|
110 |
+
|
111 |
+
# Initialize state variables
|
112 |
+
docs = []
|
113 |
+
related_contents = []
|
114 |
+
docs_html = ""
|
115 |
+
new_docs_html = ""
|
116 |
+
output_query = ""
|
117 |
+
output_language = ""
|
118 |
+
output_keywords = ""
|
119 |
+
start_streaming = False
|
120 |
+
graphs_html = ""
|
121 |
+
used_documents = []
|
122 |
+
answer_message_content = ""
|
123 |
+
|
124 |
+
# Define processing steps
|
125 |
+
steps_display = {
|
126 |
+
"categorize_intent": ("🔄️ Analyzing user message", True),
|
127 |
+
"transform_query": ("🔄️ Thinking step by step to answer the question", True),
|
128 |
+
"retrieve_documents": ("🔄️ Searching in the knowledge base", False),
|
129 |
+
"retrieve_local_data": ("🔄️ Searching in the knowledge base", False),
|
130 |
+
}
|
131 |
+
|
132 |
+
try:
|
133 |
+
# Process streaming events
|
134 |
+
async for event in result:
|
135 |
+
|
136 |
+
if "langgraph_node" in event["metadata"]:
|
137 |
+
node = event["metadata"]["langgraph_node"]
|
138 |
+
|
139 |
+
# Handle document retrieval
|
140 |
+
if event["event"] == "on_chain_end" and event["name"] in ["retrieve_documents","retrieve_local_data"] and event["data"]["output"] != None:
|
141 |
+
history, used_documents = handle_retrieved_documents(
|
142 |
+
event, history, used_documents
|
143 |
+
)
|
144 |
+
if event["event"] == "on_chain_end" and event["name"] == "answer_search" :
|
145 |
+
docs = event["data"]["input"]["documents"]
|
146 |
+
docs_html = convert_to_docs_to_html(docs)
|
147 |
+
related_contents = event["data"]["input"]["related_contents"]
|
148 |
+
|
149 |
+
# Handle intent categorization
|
150 |
+
elif (event["event"] == "on_chain_end" and
|
151 |
+
node == "categorize_intent" and
|
152 |
+
event["name"] == "_write"):
|
153 |
+
intent = event["data"]["output"]["intent"]
|
154 |
+
output_language = event["data"]["output"].get("language", "English")
|
155 |
+
history[-1].content = f"Language identified: {output_language}\nIntent identified: {intent}"
|
156 |
+
|
157 |
+
# Handle processing steps display
|
158 |
+
elif event["name"] in steps_display and event["event"] == "on_chain_start":
|
159 |
+
event_description, display_output = steps_display[node]
|
160 |
+
if (not hasattr(history[-1], 'metadata') or
|
161 |
+
history[-1].metadata["title"] != event_description):
|
162 |
+
history.append(ChatMessage(
|
163 |
+
role="assistant",
|
164 |
+
content="",
|
165 |
+
metadata={'title': event_description}
|
166 |
+
))
|
167 |
+
|
168 |
+
# Handle answer streaming
|
169 |
+
elif (event["name"] != "transform_query" and
|
170 |
+
event["event"] == "on_chat_model_stream" and
|
171 |
+
node in ["answer_rag","answer_rag_no_docs", "answer_search", "answer_chitchat"]):
|
172 |
+
history, start_streaming, answer_message_content = stream_answer(
|
173 |
+
history, event, start_streaming, answer_message_content
|
174 |
+
)
|
175 |
+
|
176 |
+
# Handle graph retrieval
|
177 |
+
elif event["name"] in ["retrieve_graphs", "retrieve_graphs_ai"] and event["event"] == "on_chain_end":
|
178 |
+
graphs_html = handle_retrieved_owid_graphs(event, graphs_html)
|
179 |
+
|
180 |
+
# Handle query transformation
|
181 |
+
if event["name"] == "transform_query" and event["event"] == "on_chain_end":
|
182 |
+
if hasattr(history[-1], "content"):
|
183 |
+
sub_questions = [q["question"] for q in event["data"]["output"]["questions_list"]]
|
184 |
+
history[-1].content += "Decompose question into sub-questions:\n\n - " + "\n - ".join(sub_questions)
|
185 |
+
|
186 |
+
yield history, docs_html, output_query, output_language, related_contents, graphs_html
|
187 |
+
|
188 |
+
except Exception as e:
|
189 |
+
print(f"Event {event} has failed")
|
190 |
+
raise gr.Error(str(e))
|
191 |
+
|
192 |
+
|
193 |
+
|
194 |
+
# Call the function to log interaction
|
195 |
+
log_interaction_to_azure(history, output_query, sources, docs, share_client, user_id)
|
196 |
+
|
197 |
+
yield history, docs_html, output_query, output_language, related_contents, graphs_html
|
climateqa/{event_handler.py → handle_stream_events.py}
RENAMED
File without changes
|
front/deprecated.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
# Functions to toggle visibility
|
3 |
+
def toggle_summary_visibility():
|
4 |
+
global summary_visible
|
5 |
+
summary_visible = not summary_visible
|
6 |
+
return gr.update(visible=summary_visible)
|
7 |
+
|
8 |
+
def toggle_relevant_visibility():
|
9 |
+
global relevant_visible
|
10 |
+
relevant_visible = not relevant_visible
|
11 |
+
return gr.update(visible=relevant_visible)
|
12 |
+
|
13 |
+
def change_completion_status(current_state):
|
14 |
+
current_state = 1 - current_state
|
15 |
+
return current_state
|
16 |
+
|
17 |
+
|
18 |
+
|
19 |
+
def vote(data: gr.LikeData):
|
20 |
+
if data.liked:
|
21 |
+
print(data.value)
|
22 |
+
else:
|
23 |
+
print(data)
|
24 |
+
|
25 |
+
def save_graph(saved_graphs_state, embedding, category):
|
26 |
+
print(f"\nCategory:\n{saved_graphs_state}\n")
|
27 |
+
if category not in saved_graphs_state:
|
28 |
+
saved_graphs_state[category] = []
|
29 |
+
if embedding not in saved_graphs_state[category]:
|
30 |
+
saved_graphs_state[category].append(embedding)
|
31 |
+
return saved_graphs_state, gr.Button("Graph Saved")
|
32 |
+
|
33 |
+
|
34 |
+
# Function to save feedback
|
35 |
+
def save_feedback(feed: str, user_id):
|
36 |
+
if len(feed) > 1:
|
37 |
+
timestamp = str(datetime.now().timestamp())
|
38 |
+
file = user_id + timestamp + ".json"
|
39 |
+
logs = {
|
40 |
+
"user_id": user_id,
|
41 |
+
"feedback": feed,
|
42 |
+
"time": timestamp,
|
43 |
+
}
|
44 |
+
log_on_azure(file, logs, share_client)
|
45 |
+
return "Feedback submitted, thank you!"
|
46 |
+
|
front/event_listeners.py
ADDED
File without changes
|
front/tabs/__init__.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from .tab_config import create_config_modal
|
2 |
+
from .tab_examples import create_examples_tab
|
3 |
+
from .tab_papers import create_papers_tab
|
4 |
+
from .tab_figures import create_figures_tab
|
5 |
+
from .chat_interface import create_chat_interface
|
6 |
+
from .tab_about import create_about_tab
|
front/tabs/chat_interface.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from gradio.components import ChatMessage
|
3 |
+
|
4 |
+
# Initialize prompt and system template
|
5 |
+
init_prompt = """
|
6 |
+
Hello, I am ClimateQ&A, a conversational assistant designed to help you understand climate change and biodiversity loss. I will answer your questions by **sifting through the IPCC and IPBES scientific reports**.
|
7 |
+
|
8 |
+
❓ How to use
|
9 |
+
- **Language**: You can ask me your questions in any language.
|
10 |
+
- **Audience**: You can specify your audience (children, general public, experts) to get a more adapted answer.
|
11 |
+
- **Sources**: You can choose to search in the IPCC or IPBES reports, or both.
|
12 |
+
- **Relevant content sources**: You can choose to search for figures, papers, or graphs that can be relevant for your question.
|
13 |
+
|
14 |
+
⚠️ Limitations
|
15 |
+
*Please note that the AI is not perfect and may sometimes give irrelevant answers. If you are not satisfied with the answer, please ask a more specific question or report your feedback to help us improve the system.*
|
16 |
+
|
17 |
+
🛈 Information
|
18 |
+
Please note that we log your questions for meta-analysis purposes, so avoid sharing any sensitive or personal information.
|
19 |
+
|
20 |
+
What do you want to learn ?
|
21 |
+
"""
|
22 |
+
|
23 |
+
|
24 |
+
|
25 |
+
# UI Layout Components
|
26 |
+
def create_chat_interface():
|
27 |
+
chatbot = gr.Chatbot(
|
28 |
+
value=[ChatMessage(role="assistant", content=init_prompt)],
|
29 |
+
type="messages",
|
30 |
+
show_copy_button=True,
|
31 |
+
show_label=False,
|
32 |
+
elem_id="chatbot",
|
33 |
+
layout="panel",
|
34 |
+
avatar_images=(None, "https://i.ibb.co/YNyd5W2/logo4.png"),
|
35 |
+
max_height="80vh",
|
36 |
+
height="100vh"
|
37 |
+
)
|
38 |
+
|
39 |
+
with gr.Row(elem_id="input-message"):
|
40 |
+
|
41 |
+
textbox = gr.Textbox(
|
42 |
+
placeholder="Ask me anything here!",
|
43 |
+
show_label=False,
|
44 |
+
scale=12,
|
45 |
+
lines=1,
|
46 |
+
interactive=True,
|
47 |
+
elem_id="input-textbox"
|
48 |
+
)
|
49 |
+
|
50 |
+
config_button = gr.Button("", elem_id="config-button")
|
51 |
+
|
52 |
+
return chatbot, textbox, config_button
|
53 |
+
|
54 |
+
|
55 |
+
|
front/tabs/main_tab.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from .chat_interface import create_chat_interface
|
3 |
+
from .tab_examples import create_examples_tab
|
4 |
+
from .tab_papers import create_papers_tab
|
5 |
+
from .tab_figures import create_figures_tab
|
6 |
+
from .chat_interface import create_chat_interface
|
7 |
+
|
8 |
+
def cqa_tab(tab_name):
|
9 |
+
# State variables
|
10 |
+
current_graphs = gr.State([])
|
11 |
+
with gr.Tab(tab_name):
|
12 |
+
with gr.Row(elem_id="chatbot-row"):
|
13 |
+
# Left column - Chat interface
|
14 |
+
with gr.Column(scale=2):
|
15 |
+
chatbot, textbox, config_button = create_chat_interface()
|
16 |
+
|
17 |
+
# Right column - Content panels
|
18 |
+
with gr.Column(scale=2, variant="panel", elem_id="right-panel"):
|
19 |
+
with gr.Tabs(elem_id="right_panel_tab") as tabs:
|
20 |
+
# Examples tab
|
21 |
+
with gr.TabItem("Examples", elem_id="tab-examples", id=0):
|
22 |
+
examples_hidden, dropdown_samples, samples = create_examples_tab()
|
23 |
+
|
24 |
+
# Sources tab
|
25 |
+
with gr.Tab("Sources", elem_id="tab-sources", id=1) as tab_sources:
|
26 |
+
sources_textbox = gr.HTML(show_label=False, elem_id="sources-textbox")
|
27 |
+
|
28 |
+
|
29 |
+
# Recommended content tab
|
30 |
+
with gr.Tab("Recommended content", elem_id="tab-recommended_content", id=2) as tab_recommended_content:
|
31 |
+
with gr.Tabs(elem_id="group-subtabs") as tabs_recommended_content:
|
32 |
+
# Figures subtab
|
33 |
+
with gr.Tab("Figures", elem_id="tab-figures", id=3) as tab_figures:
|
34 |
+
sources_raw, new_figures, used_figures, gallery_component, figures_cards, figure_modal = create_figures_tab()
|
35 |
+
|
36 |
+
# Papers subtab
|
37 |
+
with gr.Tab("Papers", elem_id="tab-citations", id=4) as tab_papers:
|
38 |
+
papers_summary, papers_html, citations_network, papers_modal = create_papers_tab()
|
39 |
+
|
40 |
+
# Graphs subtab
|
41 |
+
with gr.Tab("Graphs", elem_id="tab-graphs", id=5) as tab_graphs:
|
42 |
+
graphs_container = gr.HTML(
|
43 |
+
"<h2>There are no graphs to be displayed at the moment. Try asking another question.</h2>",
|
44 |
+
elem_id="graphs-container"
|
45 |
+
)
|
46 |
+
return {
|
47 |
+
"chatbot": chatbot,
|
48 |
+
"textbox": textbox,
|
49 |
+
"tabs": tabs,
|
50 |
+
"sources_raw": sources_raw,
|
51 |
+
"new_figures": new_figures,
|
52 |
+
"current_graphs": current_graphs,
|
53 |
+
"examples_hidden": examples_hidden,
|
54 |
+
"dropdown_samples": dropdown_samples,
|
55 |
+
"samples": samples,
|
56 |
+
"sources_textbox": sources_textbox,
|
57 |
+
"figures_cards": figures_cards,
|
58 |
+
"gallery_component": gallery_component,
|
59 |
+
"config_button": config_button,
|
60 |
+
"papers_html": papers_html,
|
61 |
+
"citations_network": citations_network,
|
62 |
+
"papers_summary": papers_summary,
|
63 |
+
"tab_recommended_content": tab_recommended_content,
|
64 |
+
"tab_sources": tab_sources,
|
65 |
+
"tab_figures": tab_figures,
|
66 |
+
"tab_graphs": tab_graphs,
|
67 |
+
"tab_papers": tab_papers,
|
68 |
+
"graph_container": graphs_container
|
69 |
+
}
|
front/tabs/tab_about.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
|
3 |
+
# Citation information
|
4 |
+
CITATION_LABEL = "BibTeX citation for ClimateQ&A"
|
5 |
+
CITATION_TEXT = r"""@misc{climateqa,
|
6 |
+
author={Théo Alves Da Costa, Timothée Bohe},
|
7 |
+
title={ClimateQ&A, AI-powered conversational assistant for climate change and biodiversity loss},
|
8 |
+
year={2024},
|
9 |
+
howpublished= {\url{https://climateqa.com}},
|
10 |
+
}
|
11 |
+
@software{climateqa,
|
12 |
+
author = {Théo Alves Da Costa, Timothée Bohe},
|
13 |
+
publisher = {ClimateQ&A},
|
14 |
+
title = {ClimateQ&A, AI-powered conversational assistant for climate change and biodiversity loss},
|
15 |
+
}
|
16 |
+
"""
|
17 |
+
|
18 |
+
def create_about_tab():
|
19 |
+
with gr.Tab("About", elem_classes="max-height other-tabs"):
|
20 |
+
with gr.Row():
|
21 |
+
with gr.Column(scale=1):
|
22 |
+
gr.Markdown(
|
23 |
+
"""
|
24 |
+
### More info
|
25 |
+
- See more info at [https://climateqa.com](https://climateqa.com/docs/intro/)
|
26 |
+
- Feedbacks on this [form](https://forms.office.com/e/1Yzgxm6jbp)
|
27 |
+
|
28 |
+
### Citation
|
29 |
+
"""
|
30 |
+
)
|
31 |
+
with gr.Accordion(CITATION_LABEL, elem_id="citation", open=False):
|
32 |
+
gr.Textbox(
|
33 |
+
value=CITATION_TEXT,
|
34 |
+
label="",
|
35 |
+
interactive=False,
|
36 |
+
show_copy_button=True,
|
37 |
+
lines=len(CITATION_TEXT.split('\n')),
|
38 |
+
)
|
front/tabs/tab_config.py
ADDED
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from gradio_modal import Modal
|
3 |
+
from climateqa.constants import POSSIBLE_REPORTS
|
4 |
+
from typing import TypedDict
|
5 |
+
|
6 |
+
class ConfigPanel(TypedDict):
|
7 |
+
config_open: gr.State
|
8 |
+
config_modal: Modal
|
9 |
+
dropdown_sources: gr.CheckboxGroup
|
10 |
+
dropdown_reports: gr.Dropdown
|
11 |
+
dropdown_external_sources: gr.CheckboxGroup
|
12 |
+
search_only: gr.Checkbox
|
13 |
+
dropdown_audience: gr.Dropdown
|
14 |
+
after: gr.Slider
|
15 |
+
output_query: gr.Textbox
|
16 |
+
output_language: gr.Textbox
|
17 |
+
|
18 |
+
|
19 |
+
def create_config_modal():
|
20 |
+
config_open = gr.State(value=True)
|
21 |
+
with Modal(visible=False, elem_id="modal-config") as config_modal:
|
22 |
+
gr.Markdown("Reminders: You can talk in any language, ClimateQ&A is multi-lingual!")
|
23 |
+
|
24 |
+
dropdown_sources = gr.CheckboxGroup(
|
25 |
+
choices=["IPCC", "IPBES", "IPOS"],
|
26 |
+
label="Select source (by default search in all sources)",
|
27 |
+
value=["IPCC"],
|
28 |
+
interactive=True
|
29 |
+
)
|
30 |
+
|
31 |
+
dropdown_reports = gr.Dropdown(
|
32 |
+
choices=POSSIBLE_REPORTS,
|
33 |
+
label="Or select specific reports",
|
34 |
+
multiselect=True,
|
35 |
+
value=None,
|
36 |
+
interactive=True
|
37 |
+
)
|
38 |
+
|
39 |
+
dropdown_external_sources = gr.CheckboxGroup(
|
40 |
+
choices=["Figures (IPCC/IPBES)", "Papers (OpenAlex)", "Graphs (OurWorldInData)","POC region"],
|
41 |
+
label="Select database to search for relevant content",
|
42 |
+
value=["Figures (IPCC/IPBES)","POC region"],
|
43 |
+
interactive=True
|
44 |
+
)
|
45 |
+
|
46 |
+
search_only = gr.Checkbox(
|
47 |
+
label="Search only for recommended content without chating",
|
48 |
+
value=False,
|
49 |
+
interactive=True,
|
50 |
+
elem_id="checkbox-chat"
|
51 |
+
)
|
52 |
+
|
53 |
+
dropdown_audience = gr.Dropdown(
|
54 |
+
choices=["Children", "General public", "Experts"],
|
55 |
+
label="Select audience",
|
56 |
+
value="Experts",
|
57 |
+
interactive=True
|
58 |
+
)
|
59 |
+
|
60 |
+
after = gr.Slider(
|
61 |
+
minimum=1950,
|
62 |
+
maximum=2023,
|
63 |
+
step=1,
|
64 |
+
value=1960,
|
65 |
+
label="Publication date",
|
66 |
+
show_label=True,
|
67 |
+
interactive=True,
|
68 |
+
elem_id="date-papers",
|
69 |
+
visible=False
|
70 |
+
)
|
71 |
+
|
72 |
+
output_query = gr.Textbox(
|
73 |
+
label="Query used for retrieval",
|
74 |
+
show_label=True,
|
75 |
+
elem_id="reformulated-query",
|
76 |
+
lines=2,
|
77 |
+
interactive=False,
|
78 |
+
visible=False
|
79 |
+
)
|
80 |
+
|
81 |
+
output_language = gr.Textbox(
|
82 |
+
label="Language",
|
83 |
+
show_label=True,
|
84 |
+
elem_id="language",
|
85 |
+
lines=1,
|
86 |
+
interactive=False,
|
87 |
+
visible=False
|
88 |
+
)
|
89 |
+
|
90 |
+
dropdown_external_sources.change(
|
91 |
+
lambda x: gr.update(visible="Papers (OpenAlex)" in x),
|
92 |
+
inputs=[dropdown_external_sources],
|
93 |
+
outputs=[after]
|
94 |
+
)
|
95 |
+
|
96 |
+
close_config_modal_button = gr.Button("Validate and Close", elem_id="close-config-modal")
|
97 |
+
|
98 |
+
|
99 |
+
# return ConfigPanel(
|
100 |
+
# config_open=config_open,
|
101 |
+
# config_modal=config_modal,
|
102 |
+
# dropdown_sources=dropdown_sources,
|
103 |
+
# dropdown_reports=dropdown_reports,
|
104 |
+
# dropdown_external_sources=dropdown_external_sources,
|
105 |
+
# search_only=search_only,
|
106 |
+
# dropdown_audience=dropdown_audience,
|
107 |
+
# after=after,
|
108 |
+
# output_query=output_query,
|
109 |
+
# output_language=output_language
|
110 |
+
# )
|
111 |
+
return {
|
112 |
+
"config_open" : config_open,
|
113 |
+
"config_modal": config_modal,
|
114 |
+
"dropdown_sources": dropdown_sources,
|
115 |
+
"dropdown_reports": dropdown_reports,
|
116 |
+
"dropdown_external_sources": dropdown_external_sources,
|
117 |
+
"search_only": search_only,
|
118 |
+
"dropdown_audience": dropdown_audience,
|
119 |
+
"after": after,
|
120 |
+
"output_query": output_query,
|
121 |
+
"output_language": output_language,
|
122 |
+
"close_config_modal_button": close_config_modal_button
|
123 |
+
}
|
front/tabs/tab_examples.py
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from climateqa.sample_questions import QUESTIONS
|
3 |
+
|
4 |
+
|
5 |
+
def create_examples_tab():
|
6 |
+
examples_hidden = gr.Textbox(visible=False)
|
7 |
+
first_key = list(QUESTIONS.keys())[0]
|
8 |
+
dropdown_samples = gr.Dropdown(
|
9 |
+
choices=QUESTIONS.keys(),
|
10 |
+
value=first_key,
|
11 |
+
interactive=True,
|
12 |
+
label="Select a category of sample questions",
|
13 |
+
elem_id="dropdown-samples"
|
14 |
+
)
|
15 |
+
|
16 |
+
samples = []
|
17 |
+
for i, key in enumerate(QUESTIONS.keys()):
|
18 |
+
examples_visible = (i == 0)
|
19 |
+
with gr.Row(visible=examples_visible) as group_examples:
|
20 |
+
examples_questions = gr.Examples(
|
21 |
+
examples=QUESTIONS[key],
|
22 |
+
inputs=[examples_hidden],
|
23 |
+
examples_per_page=8,
|
24 |
+
run_on_click=False,
|
25 |
+
elem_id=f"examples{i}",
|
26 |
+
api_name=f"examples{i}"
|
27 |
+
)
|
28 |
+
samples.append(group_examples)
|
29 |
+
|
30 |
+
|
31 |
+
def change_sample_questions(key):
|
32 |
+
index = list(QUESTIONS.keys()).index(key)
|
33 |
+
visible_bools = [False] * len(samples)
|
34 |
+
visible_bools[index] = True
|
35 |
+
return [gr.update(visible=visible_bools[i]) for i in range(len(samples))]
|
36 |
+
|
37 |
+
# event listener
|
38 |
+
dropdown_samples.change(change_sample_questions, dropdown_samples, samples)
|
39 |
+
|
40 |
+
return examples_hidden
|
front/tabs/tab_figures.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from gradio_modal import Modal
|
3 |
+
|
4 |
+
|
5 |
+
def create_figures_tab():
|
6 |
+
sources_raw = gr.State()
|
7 |
+
new_figures = gr.State([])
|
8 |
+
used_figures = gr.State([])
|
9 |
+
|
10 |
+
with Modal(visible=False, elem_id="modal_figure_galery") as figure_modal:
|
11 |
+
gallery_component = gr.Gallery(
|
12 |
+
object_fit='scale-down',
|
13 |
+
elem_id="gallery-component",
|
14 |
+
height="80vh"
|
15 |
+
)
|
16 |
+
|
17 |
+
show_full_size_figures = gr.Button(
|
18 |
+
"Show figures in full size",
|
19 |
+
elem_id="show-figures",
|
20 |
+
interactive=True
|
21 |
+
)
|
22 |
+
show_full_size_figures.click(
|
23 |
+
lambda: Modal(visible=True),
|
24 |
+
None,
|
25 |
+
figure_modal
|
26 |
+
)
|
27 |
+
|
28 |
+
figures_cards = gr.HTML(show_label=False, elem_id="sources-figures")
|
29 |
+
|
30 |
+
return sources_raw, new_figures, used_figures, gallery_component, figures_cards, figure_modal
|
31 |
+
|
front/tabs/tab_papers.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from gradio_modal import Modal
|
3 |
+
|
4 |
+
|
5 |
+
def create_papers_tab():
|
6 |
+
with gr.Accordion(
|
7 |
+
visible=True,
|
8 |
+
elem_id="papers-summary-popup",
|
9 |
+
label="See summary of relevant papers",
|
10 |
+
open=False
|
11 |
+
) as summary_popup:
|
12 |
+
papers_summary = gr.Markdown("", visible=True, elem_id="papers-summary")
|
13 |
+
|
14 |
+
with gr.Accordion(
|
15 |
+
visible=True,
|
16 |
+
elem_id="papers-relevant-popup",
|
17 |
+
label="See relevant papers",
|
18 |
+
open=False
|
19 |
+
) as relevant_popup:
|
20 |
+
papers_html = gr.HTML(show_label=False, elem_id="papers-textbox")
|
21 |
+
|
22 |
+
btn_citations_network = gr.Button("Explore papers citations network")
|
23 |
+
with Modal(visible=False) as papers_modal:
|
24 |
+
citations_network = gr.HTML(
|
25 |
+
"<h3>Citations Network Graph</h3>",
|
26 |
+
visible=True,
|
27 |
+
elem_id="papers-citations-network"
|
28 |
+
)
|
29 |
+
btn_citations_network.click(
|
30 |
+
lambda: Modal(visible=True),
|
31 |
+
None,
|
32 |
+
papers_modal
|
33 |
+
)
|
34 |
+
|
35 |
+
return papers_summary, papers_html, citations_network, papers_modal
|
36 |
+
|
front/tabs/tab_recommended_content.py
ADDED
File without changes
|