Pijush2023 commited on
Commit
76bdb22
·
verified ·
1 Parent(s): 2afb07b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -57
app.py CHANGED
@@ -787,11 +787,7 @@ def get_current_time_and_date():
787
 
788
  current_time_and_date = get_current_time_and_date()
789
 
790
-
791
-
792
  def fetch_local_events():
793
-
794
-
795
  api_key = os.environ['SERP_API']
796
  url = f'https://serpapi.com/search.json?engine=google_events&q=Events+in+Birmingham&hl=en&gl=us&api_key={api_key}'
797
  response = requests.get(url)
@@ -854,9 +850,6 @@ def fetch_local_events():
854
  else:
855
  return "<p>Failed to fetch local events</p>"
856
 
857
-
858
-
859
-
860
  def fetch_local_weather():
861
  try:
862
  api_key = os.environ['WEATHER_API']
@@ -950,14 +943,6 @@ event type and description.And also add this Birmingham,AL at the end of each ad
950
  Question: {question}
951
  Helpful Answer:"""
952
 
953
-
954
- # template2 = """You are an expert concierge who is helpful and a renowned guide for Birmingham,Alabama. Based on today's weather being a sunny bright day and today's date is 1st july 2024, take the location or address but don't show the location or address on the output prompts. Use the following pieces of context,
955
- # memory, and message history, along with your knowledge of perennial events in Birmingham,Alabama, to answer the question at the end. If you don't know the answer, just say "Homie, I need to get more data for this," and don't try to make up an answer.
956
- # Keep the answer short and sweet and crisp. Always say "It was my pleasure!" at the end of the answer.
957
- # {context}
958
- # Question: {question}
959
- # Helpful Answer:""""
960
-
961
  template2 = """You are an expert concierge who is helpful and a renowned guide for Birmingham,Alabama. Based on today's weather being a sunny bright day and today's date is 16th july 2024, take the location or address but don't show the location or address on the output prompts. Use the following pieces of context,
962
  memory, and message history, along with your knowledge of perennial events in Birmingham,Alabama, to answer the question at the end. If you don't know the answer, just say "Homie, I need to get more data for this," and don't try to make up an answer.
963
  Keep the answer short ,sweet and crisp and in one shot. Always say "It was my pleasure!" at the end of the answer.
@@ -997,7 +982,6 @@ def initialize_agent_with_prompt(prompt_template):
997
  )
998
  return agent
999
 
1000
-
1001
  def generate_answer(message, choice):
1002
  logging.debug(f"generate_answer called with prompt_choice: {choice}")
1003
 
@@ -1013,8 +997,6 @@ def generate_answer(message, choice):
1013
  addresses = extract_addresses(response['output'])
1014
  return response['output'], addresses
1015
 
1016
-
1017
-
1018
  def bot(history, choice, tts_choice, state):
1019
  if not history:
1020
  return history
@@ -1041,7 +1023,11 @@ def add_message(history, message):
1041
  history.append((message, None))
1042
  return history, gr.Textbox(value="", interactive=True, placeholder="Enter message or upload file...", show_label=False)
1043
 
 
 
1044
 
 
 
1045
 
1046
  def print_like_dislike(x: gr.LikeData):
1047
  print(x.index, x.value, x.liked)
@@ -1068,8 +1054,6 @@ def extract_addresses(response):
1068
 
1069
  all_addresses = []
1070
 
1071
-
1072
-
1073
  def generate_map(location_names):
1074
  global all_addresses
1075
  all_addresses.extend(location_names)
@@ -1084,14 +1068,13 @@ def generate_map(location_names):
1084
  if geocode_result:
1085
  location = geocode_result[0]['geometry']['location']
1086
  folium.Marker(
1087
- [location['lat'], location['lng']],
1088
  tooltip=f"{geocode_result[0]['formatted_address']}"
1089
  ).add_to(m)
1090
 
1091
  map_html = m._repr_html_()
1092
  return map_html
1093
 
1094
-
1095
  def fetch_local_news():
1096
  api_key = os.environ['SERP_API']
1097
  url = f'https://serpapi.com/search.json?engine=google_news&q=birmingham headline&api_key={api_key}'
@@ -1189,7 +1172,7 @@ def transcribe_function(stream, new_chunk):
1189
  stream = y
1190
  result = pipe_asr({"array": stream, "sampling_rate": sr}, return_timestamps=False)
1191
  full_text = result.get("text", "")
1192
- return stream, full_text # Return the transcribed text
1193
 
1194
  def update_map_with_response(history):
1195
  if not history:
@@ -1198,10 +1181,7 @@ def update_map_with_response(history):
1198
  addresses = extract_addresses(response)
1199
  return generate_map(addresses)
1200
 
1201
- def clear_textbox():
1202
- return ""
1203
-
1204
- def show_map_if_details(history,choice):
1205
  if choice in ["Details", "Conversational"]:
1206
  return gr.update(visible=True), update_map_with_response(history)
1207
  else:
@@ -1334,7 +1314,6 @@ def generate_audio_mars5(text):
1334
  cfg = config_class(**{k: kwargs_dict[k] for k in kwargs_dict if k in config_class.__dataclass_fields__})
1335
  ar_codes, wav_out = mars5.tts(chunk, wav, "", cfg=cfg)
1336
 
1337
-
1338
  temp_audio_path = os.path.join(tempfile.gettempdir(), f"mars5_audio_{len(audio_segments)}.wav")
1339
  torchaudio.save(temp_audio_path, wav_out.unsqueeze(0), mars5.sr)
1340
  audio_segments.append(AudioSegment.from_wav(temp_audio_path))
@@ -1387,6 +1366,7 @@ with gr.Blocks(theme='Pijush2023/scikit-learn-pijush') as demo:
1387
  location_output = gr.HTML()
1388
  retriver_button.click(fn=add_message, inputs=[chatbot, chat_input], outputs=[chatbot, chat_input]).then(
1389
  fn=bot, inputs=[chatbot, choice, tts_choice, state], outputs=[chatbot, gr.Audio(interactive=False, autoplay=True)], api_name="Ask_Retriever").then(
 
1390
  fn=show_map_if_details, inputs=[chatbot, choice], outputs=[location_output, location_output], api_name="map_finder")
1391
 
1392
  bot_msg = chat_msg.then(bot, [chatbot, choice, tts_choice], [chatbot], api_name="generate_voice_response")
@@ -1398,25 +1378,6 @@ with gr.Blocks(theme='Pijush2023/scikit-learn-pijush') as demo:
1398
  audio_input = gr.Audio(sources=["microphone"], streaming=True, type='numpy')
1399
  audio_input.stream(transcribe_function, inputs=[state, audio_input], outputs=[state, chat_input], api_name="voice_query_to_text")
1400
 
1401
- # New components for recording and sending chunks
1402
- audio_recorder = gr.Audio(sources="microphone", type="numpy", label="Record Audio Chunk")
1403
- send_chunk_button = gr.Button("Send Chunk")
1404
-
1405
- def transcribe_chunk(audio):
1406
- sr, y = audio
1407
- y = y.astype(np.float32) / np.max(np.abs(y))
1408
- result = pipe_asr({"array": y, "sampling_rate": sr}, return_timestamps=False)
1409
- return result["text"]
1410
-
1411
- send_chunk_button.click(fn=transcribe_chunk, inputs=audio_recorder, outputs=chat_input).then(
1412
- fn=add_message, inputs=[chatbot, chat_input], outputs=[chatbot, chat_input])
1413
-
1414
- with gr.Column():
1415
- weather_output = gr.HTML(value=fetch_local_weather())
1416
- news_output = gr.HTML(value=fetch_local_news())
1417
- events_output = gr.HTML(value=fetch_local_events())
1418
-
1419
-
1420
  with gr.Column():
1421
  image_output_1 = gr.Image(value=generate_image(hardcoded_prompt_1), width=400, height=400)
1422
  image_output_2 = gr.Image(value=generate_image(hardcoded_prompt_2), width=400, height=400)
@@ -1426,8 +1387,6 @@ with gr.Blocks(theme='Pijush2023/scikit-learn-pijush') as demo:
1426
  refresh_button.click(fn=update_images, inputs=None, outputs=[image_output_1, image_output_2, image_output_3])
1427
  location_output = gr.HTML()
1428
  bot_msg.then(show_map_if_details, [chatbot, choice], [location_output, location_output], api_name="map_finder")
1429
-
1430
-
1431
 
1432
  demo.queue()
1433
  demo.launch(share=True)
@@ -1435,11 +1394,3 @@ demo.launch(share=True)
1435
 
1436
 
1437
 
1438
-
1439
-
1440
-
1441
-
1442
-
1443
-
1444
-
1445
-
 
787
 
788
  current_time_and_date = get_current_time_and_date()
789
 
 
 
790
  def fetch_local_events():
 
 
791
  api_key = os.environ['SERP_API']
792
  url = f'https://serpapi.com/search.json?engine=google_events&q=Events+in+Birmingham&hl=en&gl=us&api_key={api_key}'
793
  response = requests.get(url)
 
850
  else:
851
  return "<p>Failed to fetch local events</p>"
852
 
 
 
 
853
  def fetch_local_weather():
854
  try:
855
  api_key = os.environ['WEATHER_API']
 
943
  Question: {question}
944
  Helpful Answer:"""
945
 
 
 
 
 
 
 
 
 
946
  template2 = """You are an expert concierge who is helpful and a renowned guide for Birmingham,Alabama. Based on today's weather being a sunny bright day and today's date is 16th july 2024, take the location or address but don't show the location or address on the output prompts. Use the following pieces of context,
947
  memory, and message history, along with your knowledge of perennial events in Birmingham,Alabama, to answer the question at the end. If you don't know the answer, just say "Homie, I need to get more data for this," and don't try to make up an answer.
948
  Keep the answer short ,sweet and crisp and in one shot. Always say "It was my pleasure!" at the end of the answer.
 
982
  )
983
  return agent
984
 
 
985
  def generate_answer(message, choice):
986
  logging.debug(f"generate_answer called with prompt_choice: {choice}")
987
 
 
997
  addresses = extract_addresses(response['output'])
998
  return response['output'], addresses
999
 
 
 
1000
  def bot(history, choice, tts_choice, state):
1001
  if not history:
1002
  return history
 
1023
  history.append((message, None))
1024
  return history, gr.Textbox(value="", interactive=True, placeholder="Enter message or upload file...", show_label=False)
1025
 
1026
+ def clear_textbox():
1027
+ return ""
1028
 
1029
+ def clear_textbox_after_retrieval(chatbot, chat_input, state):
1030
+ return clear_textbox(), clear_textbox()
1031
 
1032
  def print_like_dislike(x: gr.LikeData):
1033
  print(x.index, x.value, x.liked)
 
1054
 
1055
  all_addresses = []
1056
 
 
 
1057
  def generate_map(location_names):
1058
  global all_addresses
1059
  all_addresses.extend(location_names)
 
1068
  if geocode_result:
1069
  location = geocode_result[0]['geometry']['location']
1070
  folium.Marker(
1071
+ [location['lat'], 'lng'],
1072
  tooltip=f"{geocode_result[0]['formatted_address']}"
1073
  ).add_to(m)
1074
 
1075
  map_html = m._repr_html_()
1076
  return map_html
1077
 
 
1078
  def fetch_local_news():
1079
  api_key = os.environ['SERP_API']
1080
  url = f'https://serpapi.com/search.json?engine=google_news&q=birmingham headline&api_key={api_key}'
 
1172
  stream = y
1173
  result = pipe_asr({"array": stream, "sampling_rate": sr}, return_timestamps=False)
1174
  full_text = result.get("text", "")
1175
+ return stream, full_text
1176
 
1177
  def update_map_with_response(history):
1178
  if not history:
 
1181
  addresses = extract_addresses(response)
1182
  return generate_map(addresses)
1183
 
1184
+ def show_map_if_details(history, choice):
 
 
 
1185
  if choice in ["Details", "Conversational"]:
1186
  return gr.update(visible=True), update_map_with_response(history)
1187
  else:
 
1314
  cfg = config_class(**{k: kwargs_dict[k] for k in kwargs_dict if k in config_class.__dataclass_fields__})
1315
  ar_codes, wav_out = mars5.tts(chunk, wav, "", cfg=cfg)
1316
 
 
1317
  temp_audio_path = os.path.join(tempfile.gettempdir(), f"mars5_audio_{len(audio_segments)}.wav")
1318
  torchaudio.save(temp_audio_path, wav_out.unsqueeze(0), mars5.sr)
1319
  audio_segments.append(AudioSegment.from_wav(temp_audio_path))
 
1366
  location_output = gr.HTML()
1367
  retriver_button.click(fn=add_message, inputs=[chatbot, chat_input], outputs=[chatbot, chat_input]).then(
1368
  fn=bot, inputs=[chatbot, choice, tts_choice, state], outputs=[chatbot, gr.Audio(interactive=False, autoplay=True)], api_name="Ask_Retriever").then(
1369
+ fn=clear_textbox_after_retrieval, inputs=[chatbot, chat_input, state], outputs=[chat_input, chat_input]).then(
1370
  fn=show_map_if_details, inputs=[chatbot, choice], outputs=[location_output, location_output], api_name="map_finder")
1371
 
1372
  bot_msg = chat_msg.then(bot, [chatbot, choice, tts_choice], [chatbot], api_name="generate_voice_response")
 
1378
  audio_input = gr.Audio(sources=["microphone"], streaming=True, type='numpy')
1379
  audio_input.stream(transcribe_function, inputs=[state, audio_input], outputs=[state, chat_input], api_name="voice_query_to_text")
1380
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1381
  with gr.Column():
1382
  image_output_1 = gr.Image(value=generate_image(hardcoded_prompt_1), width=400, height=400)
1383
  image_output_2 = gr.Image(value=generate_image(hardcoded_prompt_2), width=400, height=400)
 
1387
  refresh_button.click(fn=update_images, inputs=None, outputs=[image_output_1, image_output_2, image_output_3])
1388
  location_output = gr.HTML()
1389
  bot_msg.then(show_map_if_details, [chatbot, choice], [location_output, location_output], api_name="map_finder")
 
 
1390
 
1391
  demo.queue()
1392
  demo.launch(share=True)
 
1394
 
1395
 
1396