prithivMLmods commited on
Commit
64dec74
·
verified ·
1 Parent(s): 7273d80

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -28,7 +28,7 @@ def search(query):
28
  with requests.Session() as session:
29
  resp = session.get(
30
  url="https://www.google.com/search",
31
- headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36"},
32
  params={"q": term, "num": 3, "udm": 14},
33
  timeout=5,
34
  verify=None,
@@ -40,7 +40,7 @@ def search(query):
40
  link = result.find("a", href=True)
41
  link = link["href"]
42
  try:
43
- webpage = session.get(link, headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36"}, timeout=5, verify=False)
44
  webpage.raise_for_status()
45
  visible_text = extract_text_from_webpage(webpage.text)
46
  if len(visible_text) > max_chars_per_page:
@@ -50,13 +50,14 @@ def search(query):
50
  all_results.append({"link": link, "text": None})
51
  return all_results
52
 
53
-
54
  client_gemma = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
55
  client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
56
  client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
57
 
58
  func_caller = []
59
 
 
60
  def respond(message, history):
61
  func_caller = []
62
 
@@ -92,7 +93,7 @@ def respond(message, history):
92
  web_results = search(query)
93
  gr.Info("Extracting relevant Info")
94
  web2 = ' '.join([f"Link: {res['link']}\nText: {res['text']}\n\n" for res in web_results if res['text']])
95
- messages = f""
96
  for msg in history:
97
  messages += f"\nuser\n{str(msg[0])}"
98
  messages += f"\nassistant\n{str(msg[1])}"
@@ -104,7 +105,7 @@ def respond(message, history):
104
  output += response.token.text
105
  yield output
106
  else:
107
- messages = f""
108
  for msg in history:
109
  messages += f"\nuser\n{str(msg[0])}"
110
  messages += f"\nassistant\n{str(msg[1])}"
@@ -116,7 +117,7 @@ def respond(message, history):
116
  output += response.token.text
117
  yield output
118
  except:
119
- messages = f""
120
  for msg in history:
121
  messages += f"\nuser\n{str(msg[0])}"
122
  messages += f"\nassistant\n{str(msg[1])}"
 
28
  with requests.Session() as session:
29
  resp = session.get(
30
  url="https://www.google.com/search",
31
+ headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/111.0"},
32
  params={"q": term, "num": 3, "udm": 14},
33
  timeout=5,
34
  verify=None,
 
40
  link = result.find("a", href=True)
41
  link = link["href"]
42
  try:
43
+ webpage = session.get(link, headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/111.0"}, timeout=5, verify=False)
44
  webpage.raise_for_status()
45
  visible_text = extract_text_from_webpage(webpage.text)
46
  if len(visible_text) > max_chars_per_page:
 
50
  all_results.append({"link": link, "text": None})
51
  return all_results
52
 
53
+ # Initialize inference clients for different models
54
  client_gemma = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
55
  client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
56
  client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
57
 
58
  func_caller = []
59
 
60
+ # Define the main chat function
61
  def respond(message, history):
62
  func_caller = []
63
 
 
93
  web_results = search(query)
94
  gr.Info("Extracting relevant Info")
95
  web2 = ' '.join([f"Link: {res['link']}\nText: {res['text']}\n\n" for res in web_results if res['text']])
96
+ messages = f"system\nYou are OpenCHAT mini a helpful assistant made by KingNish. You are provided with WEB results from which you can find informations to answer users query in Structured and More better way. You do not say Unnecesarry things Only say thing which is important and relevant. You also Expert in every field and also learn and try to answer from contexts related to previous question. Try your best to give best response possible to user. You also try to show emotions using Emojis and reply like human, use short forms, friendly tone and emotions."
97
  for msg in history:
98
  messages += f"\nuser\n{str(msg[0])}"
99
  messages += f"\nassistant\n{str(msg[1])}"
 
105
  output += response.token.text
106
  yield output
107
  else:
108
+ messages = f"system\nYou are OpenCHAT mini a helpful assistant made by KingNish. You answers users query like human friend. You are also Expert in every field and also learn and try to answer from contexts related to previous question. Try your best to give best response possible to user. You also try to show emotions using Emojis and reply like human, use short forms, friendly tone and emotions."
109
  for msg in history:
110
  messages += f"\nuser\n{str(msg[0])}"
111
  messages += f"\nassistant\n{str(msg[1])}"
 
117
  output += response.token.text
118
  yield output
119
  except:
120
+ messages = f"system\nYou are OpenCHAT mini a helpful assistant made by KingNish. You answers users query like human friend. You are also Expert in every field and also learn and try to answer from contexts related to previous question. Try your best to give best response possible to user. You also try to show emotions using Emojis and reply like human, use short forms, friendly tone and emotions."
121
  for msg in history:
122
  messages += f"\nuser\n{str(msg[0])}"
123
  messages += f"\nassistant\n{str(msg[1])}"