Pijush2023 commited on
Commit
b2042b7
·
verified ·
1 Parent(s): f3ac855

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -259,7 +259,7 @@ def generate_answer(message, choice, retrieval_mode):
259
 
260
  prompt_template = QA_CHAIN_PROMPT_1 if choice == "Details" else QA_CHAIN_PROMPT_2
261
 
262
- if retrieval_mode == "Vector":
263
  qa_chain = RetrievalQA.from_chain_type(
264
  llm=chat_model,
265
  chain_type="stuff",
@@ -269,7 +269,7 @@ def generate_answer(message, choice, retrieval_mode):
269
  response = qa_chain({"query": message})
270
  logging.debug(f"Vector response: {response}")
271
  return response['result'], extract_addresses(response['result'])
272
- elif retrieval_mode == "Knowledge-Graph":
273
  response = chain_neo4j.invoke({"question": message})
274
  logging.debug(f"Knowledge-Graph response: {response}")
275
  return response, extract_addresses(response)
@@ -795,7 +795,7 @@ def fetch_local_weather():
795
 
796
 
797
  def handle_retrieval_mode_change(choice):
798
- if choice == "Knowledge-Graph":
799
  return gr.update(interactive=False), gr.update(interactive=False)
800
  else:
801
  return gr.update(interactive=True), gr.update(interactive=True)
@@ -808,7 +808,7 @@ with gr.Blocks(theme='Pijush2023/scikit-learn-pijush') as demo:
808
 
809
  chatbot = gr.Chatbot([], elem_id="RADAR:Channel 94.1", bubble_full_width=False)
810
  choice = gr.Radio(label="Select Style", choices=["Details", "Conversational"], value="Conversational")
811
- retrieval_mode = gr.Radio(label="Retrieval Mode", choices=["Vector", "Knowledge-Graph"], value="Vector")
812
 
813
  gr.Markdown("<h1 style='color: red;'>Talk to RADAR</h1>", elem_id="voice-markdown")
814
 
@@ -854,10 +854,10 @@ with gr.Blocks(theme='Pijush2023/scikit-learn-pijush') as demo:
854
  # Handle retrieval mode change
855
  retrieval_mode.change(fn=handle_retrieval_mode_change, inputs=retrieval_mode, outputs=[choice, choice])
856
 
857
- # with gr.Column():
858
- # weather_output = gr.HTML(value=fetch_local_weather())
859
- # news_output = gr.HTML(value=fetch_local_news())
860
- # events_output = gr.HTML(value=fetch_local_events())
861
 
862
 
863
  with gr.Column():
 
259
 
260
  prompt_template = QA_CHAIN_PROMPT_1 if choice == "Details" else QA_CHAIN_PROMPT_2
261
 
262
+ if retrieval_mode == "VDB":
263
  qa_chain = RetrievalQA.from_chain_type(
264
  llm=chat_model,
265
  chain_type="stuff",
 
269
  response = qa_chain({"query": message})
270
  logging.debug(f"Vector response: {response}")
271
  return response['result'], extract_addresses(response['result'])
272
+ elif retrieval_mode == "KGF":
273
  response = chain_neo4j.invoke({"question": message})
274
  logging.debug(f"Knowledge-Graph response: {response}")
275
  return response, extract_addresses(response)
 
795
 
796
 
797
  def handle_retrieval_mode_change(choice):
798
+ if choice == "KGF":
799
  return gr.update(interactive=False), gr.update(interactive=False)
800
  else:
801
  return gr.update(interactive=True), gr.update(interactive=True)
 
808
 
809
  chatbot = gr.Chatbot([], elem_id="RADAR:Channel 94.1", bubble_full_width=False)
810
  choice = gr.Radio(label="Select Style", choices=["Details", "Conversational"], value="Conversational")
811
+ retrieval_mode = gr.Radio(label="Retrieval Mode", choices=["VDB", "KGF"], value="VDB")
812
 
813
  gr.Markdown("<h1 style='color: red;'>Talk to RADAR</h1>", elem_id="voice-markdown")
814
 
 
854
  # Handle retrieval mode change
855
  retrieval_mode.change(fn=handle_retrieval_mode_change, inputs=retrieval_mode, outputs=[choice, choice])
856
 
857
+ with gr.Column():
858
+ weather_output = gr.HTML(value=fetch_local_weather())
859
+ news_output = gr.HTML(value=fetch_local_news())
860
+ events_output = gr.HTML(value=fetch_local_events())
861
 
862
 
863
  with gr.Column():