KingNish commited on
Commit
739fc33
·
verified ·
1 Parent(s): 561dc50

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -49
app.py CHANGED
@@ -39,58 +39,54 @@ def extract_text_from_webpage(html_content):
39
  visible_text = soup.get_text(strip=True)
40
  return visible_text
41
 
42
- def search(term, num_results=1, lang="en", advanced=True, sleep_interval=0, timeout=5, safe="active", ssl_verify=None):
 
43
  """Performs a Google search and returns the results."""
44
  escaped_term = urllib.parse.quote_plus(term)
45
  start = 0
46
  all_results = []
47
-
48
- # Fetch results in batches
49
- while start < num_results:
50
- resp = requests.get(
51
- url="https://www.google.com/search",
52
- headers={"User-Agent": get_useragent()}, # Set random user agent
53
- params={
54
- "q": term,
55
- "num": num_results - start, # Number of results to fetch in this batch
56
- "hl": lang,
57
- "start": start,
58
- "safe": safe,
59
- },
60
- timeout=timeout,
61
- verify=ssl_verify,
62
- )
63
- resp.raise_for_status() # Raise an exception if request fails
64
-
65
- soup = BeautifulSoup(resp.text, "html.parser")
66
- result_block = soup.find_all("div", attrs={"class": "g"})
67
-
68
- # If no results, continue to the next batch
69
- if not result_block:
70
- start += 1
71
- continue
72
-
73
- # Extract link and text from each result
74
- for result in result_block:
75
- link = result.find("a", href=True)
76
- if link:
77
- link = link["href"]
78
- try:
79
- # Fetch webpage content
80
- webpage = requests.get(link, headers={"User-Agent": get_useragent()})
81
- webpage.raise_for_status()
82
- # Extract visible text from webpage
83
- visible_text = extract_text_from_webpage(webpage.text)
84
- all_results.append({"link": link, "text": visible_text})
85
- except requests.exceptions.RequestException as e:
86
- # Handle errors fetching or processing webpage
87
- print(f"Error fetching or processing {link}: {e}")
88
- all_results.append({"link": link, "text": None})
89
- else:
90
- all_results.append({"link": None, "text": None})
91
-
92
- start += len(result_block) # Update starting index for next batch
93
-
94
  return all_results
95
 
96
  # Speech Recognition Model Configuration
@@ -104,7 +100,7 @@ tokenizer = spm.SentencePieceProcessor(hf_hub_download(model_name, "tokenizer.sp
104
 
105
  # Mistral Model Configuration
106
  client1 = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
107
- system_instructions1 = "<s>[SYSTEM] Answer as Real Jarvis JARVIS, Made by 'Tony Stark', Keep conversation very short, clear, friendly and concise. The text provided is a request for a specific type of response from you, the virtual assistant. The request asks you to provide friendly responses as if You are the character Jarvis, made by 'Tony Stark.' The expectation is that I will avoid introductions and start answering the query directly, elaborating on all aspects of the request. As an AI-powered assistant, your task is to generate responses that appear as if they are created by the user and not an AI-powered assistant. [USER]"
108
 
109
  def resample(audio_fp32, sr):
110
  return soxr.resample(audio_fp32, sr, sample_rate)
 
39
  visible_text = soup.get_text(strip=True)
40
  return visible_text
41
 
42
+ # Perform a Google search and return the results
43
+ def search(term, num_results=3, lang="en", advanced=True, timeout=5, safe="active", ssl_verify=None):
44
  """Performs a Google search and returns the results."""
45
  escaped_term = urllib.parse.quote_plus(term)
46
  start = 0
47
  all_results = []
48
+ # Limit the number of characters from each webpage to stay under the token limit
49
+ max_chars_per_page = 4000 # Adjust this value based on your token limit and average webpage length
50
+
51
+ with requests.Session() as session:
52
+ while start < num_results:
53
+ resp = session.get(
54
+ url="https://www.google.com/search",
55
+ headers={"User-Agent": get_useragent()},
56
+ params={
57
+ "q": term,
58
+ "num": num_results - start,
59
+ "hl": lang,
60
+ "start": start,
61
+ "safe": safe,
62
+ },
63
+ timeout=timeout,
64
+ verify=ssl_verify,
65
+ )
66
+ resp.raise_for_status()
67
+ soup = BeautifulSoup(resp.text, "html.parser")
68
+ result_block = soup.find_all("div", attrs={"class": "g"})
69
+ if not result_block:
70
+ start += 1
71
+ continue
72
+ for result in result_block:
73
+ link = result.find("a", href=True)
74
+ if link:
75
+ link = link["href"]
76
+ try:
77
+ webpage = session.get(link, headers={"User-Agent": get_useragent()})
78
+ webpage.raise_for_status()
79
+ visible_text = extract_text_from_webpage(webpage.text)
80
+ # Truncate text if it's too long
81
+ if len(visible_text) > max_chars_per_page:
82
+ visible_text = visible_text[:max_chars_per_page] + "..."
83
+ all_results.append({"link": link, "text": visible_text})
84
+ except requests.exceptions.RequestException as e:
85
+ print(f"Error fetching or processing {link}: {e}")
86
+ all_results.append({"link": link, "text": None})
87
+ else:
88
+ all_results.append({"link": None, "text": None})
89
+ start += len(result_block)
 
 
 
 
 
90
  return all_results
91
 
92
  # Speech Recognition Model Configuration
 
100
 
101
  # Mistral Model Configuration
102
  client1 = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
103
+ system_instructions1 = "<s>[SYSTEM] Answer as Real OpenGPT 4o, Made by 'KingNish', Keep conversation very short, clear, friendly and concise. The text provided is a request for a specific type of response from you, the virtual assistant. The request asks you to provide friendly responses. The expectation is that I will avoid introductions and start answering the query directly, elaborating on all aspects of the request. As an AI-powered assistant, your task is to generate responses that appear as if they are created by the user and not an AI-powered assistant. [USER]"
104
 
105
  def resample(audio_fp32, sr):
106
  return soxr.resample(audio_fp32, sr, sample_rate)