NegotiateAI / src /application /views /knowledge_hub_page.py
TeresaK's picture
upload version 2 (#2)
d064c89 verified
raw
history blame
20.1 kB
import base64
import time
from pathlib import Path
import pandas as pd
import streamlit as st
from haystack_integrations.document_stores.qdrant import QdrantDocumentStore
from src.document_store.get_index import get_index
from src.rag.pipeline import RAGPipeline
from src.utils.data import load_css, load_json
from src.utils.writer import typewriter
DATA_BASE_PATH = Path(__file__).parent.parent.parent.parent / "data"
# Function to load and encode the image
def get_base64_image(image_path):
with open(image_path, "rb") as img_file:
return base64.b64encode(img_file.read()).decode()
@st.cache_data
def load_css_style() -> None:
load_css(Path(__file__).parent.parent.parent.parent / "style" / "style.css")
@st.cache_resource
def load_knowledge_hub_pipeline(
template: str,
) -> tuple[QdrantDocumentStore, RAGPipeline]:
knowledge_hub_index = get_index(index="knowledge_hub_data")
knowledge_hub_rag = RAGPipeline(
document_store=knowledge_hub_index, top_k=5, template=template
)
return knowledge_hub_index, knowledge_hub_rag
@st.cache_resource
def get_organization_filter() -> dict | list:
return load_json(DATA_BASE_PATH / "taxonomies" / "organization.json")
@st.cache_data
def load_template() -> str:
path = (
Path(__file__).parent.parent.parent
/ "rag"
/ "prompt_templates"
/ "inc_template.txt"
)
with open(path, "r") as file:
template = file.read()
return template
@st.cache_data
def get_example_prompts() -> list[str]:
return [
example["question"]
for example in load_json(
DATA_BASE_PATH / "example_prompts" / "example_prompts_knowledge_hub.json"
)
]
@st.cache_resource
def get_region_filter() -> dict | list:
return load_json(DATA_BASE_PATH / "taxonomies" / "region.json")
@st.cache_resource
def get_type_of_document_filter() -> dict | list:
return load_json(DATA_BASE_PATH / "taxonomies" / "type_of_document.json")
@st.cache_data
def set_trigger_state_values() -> tuple[bool, bool]:
trigger_filter_k = st.session_state.setdefault("trigger_k", False)
trigger_ask_k = st.session_state.setdefault("trigger_k", False)
return trigger_filter_k, trigger_ask_k
@st.cache_data
def about_knowledge_hub() -> None:
st.markdown("""<p class="header"> Help us Improve! </p>""", unsafe_allow_html=True)
st.markdown(
"""<p class="description"> We would appreciate your feedback and support to improve the app. You can fill out a quick feedback form (maximal 5 minutes) or use the in-depth survey (maximal 15 minutes). </p>""",
unsafe_allow_html=True,
)
review, in_depth_review, _ = st.columns(spec=[0.7, 1.0, 4], gap="large")
with review:
st.link_button(
label="Feedback",
url="https://forms.gle/PPT5g558utGDUAGh6",
icon=":material/reviews:",
)
with in_depth_review:
st.link_button(
label="Survey",
url="https://docs.google.com/forms/d/1-WNS0ZdAuystajf2i6iSR5HpRfvV1LYq_TcQfaIMvkA",
icon=":material/rate_review:",
)
logo = get_base64_image("static/images/logo.png")
st.write("\n")
st.write("\n")
st.write("\n")
st.markdown(
f"""<div class="footer">
<h3>About</h3>
<div class="content">
The Deutsche Gesellschaft für Internationale Zusammenarbeit (GIZ) GmbH <br>
is a globally active service provider dedicated to international cooperation <br>
for sustainable development and it’s active in over 120 countries. <br> <br>
The GIZ Data Lab specializes in harnessing data for development, <br>
driving innovative solutions in international cooperation
to address <br> real-world challenges. <br> <br>
Our work on NegotiateAI started in 2023. You can find more information <br>
about the NegotiateAI project on our <a href="https://www.blog-datalab.com/home/negotiateai/">website</a>.
</div>
<img src="data:image/png;base64,{logo}" class="logo" />
</div>
""",
unsafe_allow_html=True,
)
@st.cache_data
def load_app_init() -> None:
description_knowledge_hub_col_1, _ = st.columns([0.66, 1])
with description_knowledge_hub_col_1:
with st.expander("About", icon=":material/info:"):
st.markdown(
"""
<p class="description"> Query and engage with the Plastic Knowledge Hub to obtain a wealth of resources that will support you to learn more about the main topics of plastic pollution. Work around with the filters to get more precise information. <br>
While the generated answers take into account up to eight documents at a time due to technical limitations, users can still access the full set of filtered documents via direct links for comprehensive exploration.
</p>
""",
unsafe_allow_html=True,
)
st.write("\n")
st.write("\n")
def init_knowledge_hub_page():
load_css_style()
load_app_init()
# Load Cache Data and Resources
trigger_filter_k, trigger_ask_k = set_trigger_state_values()
knowledge_hub_template = load_template()
knowledge_hub_index, knowledge_hub_rag = load_knowledge_hub_pipeline(
template=knowledge_hub_template
)
example_prompts_k = get_example_prompts()
organization_filter = get_organization_filter()
region_filter = get_region_filter()
type_of_document_filter = get_type_of_document_filter()
# Application Column
application_col = st.columns(1)
with application_col[0]:
st.write("\n")
organization, region, type_of_document = st.columns(3)
with organization:
selected_organization = st.multiselect(
label="Organization",
options=organization_filter,
label_visibility="visible",
placeholder="Select organization",
)
with region:
selected_region = st.multiselect(
label="Region",
options=region_filter,
label_visibility="visible",
placeholder="Select region",
)
with type_of_document:
selected_type_of_document = st.multiselect(
label="Type of Document",
options=type_of_document_filter,
label_visibility="visible",
placeholder="Select type of document",
)
st.write("\n")
asking_k, filtering_k = st.tabs(["Ask a question", "Filter documents"])
with asking_k:
application_col_ask_k, output_col_ask_k = st.columns([1, 1.5])
with application_col_ask_k:
st.markdown(
"""
<p class="description">
Please ask a question to get an answer or show documents based on the selected filters. This step is optional.</p>
""",
unsafe_allow_html=True,
)
if "prompt_k" not in st.session_state:
prompt_k = st.text_area(label="")
if (
"prompt_k" in st.session_state
and st.session_state.prompt_k in example_prompts_k
):
prompt_k = st.text_area(
label="Enter a question",
value=st.session_state.prompt_k,
)
if (
"prompt_k" in st.session_state
and st.session_state.prompt_k not in example_prompts_k
):
del st.session_state["prompt_k"]
prompt_k = st.text_area(label="Enter a question")
trigger_ask_k = st.session_state.setdefault("trigger", False)
if st.button(
"Ask", key="ask_k", type="primary", icon=":material/send:"
):
if prompt_k == "":
st.error(
"Please enter a question. Reloading the app in few seconds",
icon=":material/error:",
)
time.sleep(3)
st.rerun()
with st.spinner("Querying Documents..."):
filter_selection = {
"organization": selected_organization,
"region": selected_region,
"type_of_document": selected_type_of_document,
}
if (
not selected_region
and not selected_organization
and not selected_type_of_document
):
st.warning(
"No filters selected. All documents will be used for the question. Longer processing time expected. Please consider using the filter functions to narrow down the data.",
icon=":material/warning:",
)
filters = knowledge_hub_rag.build_filter(
filter_selections=filter_selection
)
try:
docs = knowledge_hub_index.filter_documents(filters=filters)
except Exception:
st.error(
"An error occured while filtering the documents. Please try again. App will reload in a few seconds. If the app does not reload, please refresh the page.",
icon=":material/error:",
)
trigger_ask_k = False
time.sleep(3)
st.rerun()
if not docs:
st.error(
"The combination of filters you've chosen does not match any documents. Please try another combination of filters. If a filter combination does not return any documents, it means that there are no documents that match the selected filters and therefore no answer can be given.",
icon=":material/error:",
)
trigger_ask_k = False
st.stop()
else:
st.success("Filtering completed.", icon=":material/check:")
with st.spinner("Answering question..."):
try:
result = knowledge_hub_rag.run(
query=prompt_k, filter_selections=filter_selection
)
except Exception:
st.error(
"An error occured while querying the documents. Please try again. App will reload in a few seconds. If the app does not reload, please refresh the page.",
icon=":material/error:",
)
trigger_ask_k = False
time.sleep(3)
st.rerun()
trigger_ask_k = True
st.success(
"Answering question completed.", icon=":material/check:"
)
st.markdown(
"***≡ Examples***",
help="These are example prompts that can be used to ask questions to the model. Click on a prompt to use it as a question. You can also type your own question in the text area above. In general we highly recommend to use the filter functions to narrow down the data.",
)
st.caption("Double click to select the prompt")
for i, prompt_inc in enumerate(example_prompts_k):
# with col[i % 4]:
if st.button(prompt_inc):
if "key" not in st.session_state:
st.session_state["prompt_k"] = prompt_inc
with filtering_k:
application_col_filter_k, output_col_filter_k = st.columns([1, 1.5])
with application_col_filter_k:
st.markdown(
"""
<p class="description"> This filter function allows you to see all documents that match the selected filters. The documents can be accessed via a link \n </p>
""",
unsafe_allow_html=True,
)
if st.button(
"Filter documents",
key="filter_docuemts_k",
type="primary",
icon=":material/filter_alt:",
):
if (
not selected_region
and not selected_organization
and not selected_type_of_document
):
st.info(
"No filteres selected. All documents will be shown. Longer processing time expected."
)
with st.spinner("Filtering documents..."):
filter = RAGPipeline.build_filter(
filter_selections={
"organization": selected_organization,
"region": selected_region,
"type_of_document": selected_type_of_document,
}
)
try:
result = knowledge_hub_index.filter_documents(
filters=filter
)
except Exception:
st.error(
"An error occured while filtering the documents. Please try again. App will reload in a few seconds. If the app does not reload, please refresh the page.",
icon=":material/error:",
)
trigger_filter_k = False
time.sleep(3)
st.rerun()
retriever_ids = set()
result_meta = []
for doc in result:
retriever_id = doc.meta["retriever_id"]
if retriever_id not in retriever_ids:
result_meta.append(
{
"organization": doc.meta["organization"],
"title": doc.meta["title"],
"year": doc.meta["year"],
"region": doc.meta["region"],
"keywords": doc.meta["key_words"],
"type_of_document": doc.meta[
"type_of_document"
],
"type_of_organization": doc.meta[
"type_of_organization"
],
"href": doc.meta["href"],
}
)
retriever_ids.add(retriever_id)
else:
continue
result_df = pd.DataFrame(result_meta)
if result_df.empty:
st.info(
"No documents found for the combination of filters you've chosen. All countries are represented at least once in the data. Remove the draft categories to see all documents for the countries selected or try other draft categories and/or rounds"
)
trigger_filter_k = False
else:
trigger_filter_k = True
if trigger_filter_k:
with output_col_filter_k:
st.markdown("### Overview of all filtered documents")
st.dataframe(
result_df,
hide_index=True,
use_container_width=True,
column_config={
"organization": st.column_config.ListColumn("Organization"),
"title": st.column_config.TextColumn("Title"),
"year": st.column_config.TextColumn("Year"),
"region": st.column_config.ListColumn("Region"),
"keywords": st.column_config.ListColumn("Keywords"),
"type_of_document": st.column_config.TextColumn(
"Type of Document"
),
"type_of_organization": st.column_config.TextColumn(
"Type of Organization"
),
"href": st.column_config.LinkColumn("Link"),
},
)
trigger_filter_k = False
if trigger_ask_k:
with output_col_ask_k:
if result is None:
st.error(
"Open AI rate limit exceeded. Please try again in a few seconds."
)
st.stop()
reference_data = [
(doc.meta["retriever_id"], doc.meta["href"])
for doc in result["retriever"]["documents"]
]
references = ["\n"]
for retriever_id, href in reference_data:
references.append(f"-[{retriever_id}]: {href} \n")
references = list(set(references))
st.markdown(
"""<svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 -960 960 960" width="24px" fill="#077493"><path d="m640-480 80 80v80H520v240l-40 40-40-40v-240H240v-80l80-80v-280h-40v-80h400v80h-40v280Zm-286 80h252l-46-46v-314H400v314l-46 46Zm126 0Z"/></svg> <b>Answer</b>""",
unsafe_allow_html=True,
)
typewriter(
text=result["llm"]["replies"][0],
references=references,
speed=100,
app="knowledge_hub",
)
with st.expander("Show more information to the documents"):
sorted_docs = sorted(
result["retriever"]["documents"],
key=lambda x: x.meta["retriever_id"],
)
current_doc = None
markdown_text = ""
for doc in sorted_docs:
print(current_doc)
if doc.meta["retriever_id"] != current_doc:
markdown_text += f"- Document: {doc.meta['retriever_id']}\n"
markdown_text += " - Text passages\n"
markdown_text += f" - {doc.content}\n"
else:
markdown_text += f" - {doc.content}\n"
current_doc = doc.meta["retriever_id"]
st.write(markdown_text)
trigger_ask_k = False
st.markdown(
"""<hr style="height:2px;border:none;color:#077493;background-color:#077493;" /> """,
unsafe_allow_html=True,
)
about_knowledge_hub()