brian-yu-nexusflow commited on
Commit
24a8d18
Β·
1 Parent(s): 6db0fcc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -9
app.py CHANGED
@@ -43,7 +43,7 @@ FUNCTIONS = [
43
  Function(
44
  name="sort_results",
45
  short_description="Sorting results",
46
- description_function=lambda places, sort, descending=True, first_n = None: f"Sorting results by {sort} from "
47
  + ("lowest to highest" if not descending else "highest to lowest"),
48
  explanation_function=lambda result: "Done!",
49
  ),
@@ -56,8 +56,8 @@ FUNCTIONS = [
56
  Function(
57
  name="get_distance",
58
  short_description="Calcuate distance",
59
- description_function=lambda place_1, place_2: f"Calculating the distance between various places...",
60
- explanation_function=lambda result: result[0],
61
  ),
62
  Function(
63
  name="get_recommendations",
@@ -71,13 +71,14 @@ FUNCTIONS = [
71
  Function(
72
  name="find_places_near_location",
73
  short_description="Look for places",
74
- description_function=lambda type_of_place, location, radius_miles = 50: f"Looking for places near {location} within {radius_miles} with the following "
75
  + (
76
  f"types: {', '.join(type_of_place)}"
77
  if isinstance(type_of_place, list)
78
  else f"type: {type_of_place}"
79
  ),
80
- explanation_function=lambda result: f"Found {len(result)} places!",
 
81
  ),
82
  Function(
83
  name="get_some_reviews",
@@ -304,7 +305,9 @@ class RavenDemo(gr.Blocks):
304
  steps = [gr.Textbox(value="", visible=False) for _ in range(self.max_num_steps)]
305
  yield get_returns()
306
 
307
- raven_prompt = self.functions_helper.get_prompt(query.replace("'", r"\'").replace('"', r'\"'))
 
 
308
  print(f"{'-' * 80}\nPrompt sent to Raven\n\n{raven_prompt}\n\n{'-' * 80}\n")
309
  stream = self.raven_client.text_generation(
310
  raven_prompt, **RAVEN_GENERATION_KWARGS
@@ -385,13 +388,15 @@ class RavenDemo(gr.Blocks):
385
  for s in stream:
386
  for c in s:
387
  summary_model_summary += c
388
- summary_model_summary = summary_model_summary.lstrip().removesuffix(
389
- "<|end_of_turn|>"
 
 
390
  )
391
  yield get_returns()
392
  except huggingface_hub.inference._text_generation.ValidationError:
393
  if len(results) > 1:
394
- new_length = (3*len(results)) // 4
395
  results = results[:new_length]
396
  continue
397
  else:
 
43
  Function(
44
  name="sort_results",
45
  short_description="Sorting results",
46
+ description_function=lambda places, sort, descending=True, first_n=None: f"Sorting results by {sort} from "
47
  + ("lowest to highest" if not descending else "highest to lowest"),
48
  explanation_function=lambda result: "Done!",
49
  ),
 
56
  Function(
57
  name="get_distance",
58
  short_description="Calcuate distance",
59
+ description_function=lambda place_1, place_2: "Calculating distances",
60
+ explanation_function=lambda result: result[2],
61
  ),
62
  Function(
63
  name="get_recommendations",
 
71
  Function(
72
  name="find_places_near_location",
73
  short_description="Look for places",
74
+ description_function=lambda type_of_place, location, radius_miles=50: f"Looking for places near {location} within {radius_miles} with the following "
75
  + (
76
  f"types: {', '.join(type_of_place)}"
77
  if isinstance(type_of_place, list)
78
  else f"type: {type_of_place}"
79
  ),
80
+ explanation_function=lambda result: f"Found "
81
+ + (f"{len(result)} places!" if len(result) > 1 else f"1 place!"),
82
  ),
83
  Function(
84
  name="get_some_reviews",
 
305
  steps = [gr.Textbox(value="", visible=False) for _ in range(self.max_num_steps)]
306
  yield get_returns()
307
 
308
+ raven_prompt = self.functions_helper.get_prompt(
309
+ query.replace("'", r"\'").replace('"', r"\"")
310
+ )
311
  print(f"{'-' * 80}\nPrompt sent to Raven\n\n{raven_prompt}\n\n{'-' * 80}\n")
312
  stream = self.raven_client.text_generation(
313
  raven_prompt, **RAVEN_GENERATION_KWARGS
 
388
  for s in stream:
389
  for c in s:
390
  summary_model_summary += c
391
+ summary_model_summary = (
392
+ summary_model_summary.lstrip().removesuffix(
393
+ "<|end_of_turn|>"
394
+ )
395
  )
396
  yield get_returns()
397
  except huggingface_hub.inference._text_generation.ValidationError:
398
  if len(results) > 1:
399
+ new_length = (3 * len(results)) // 4
400
  results = results[:new_length]
401
  continue
402
  else: