import requests from bs4 import BeautifulSoup import streamlit as st # Custom CSS to mimic Google's interface def load_css(): st.markdown(""" """, unsafe_allow_html=True) # Function to perform a Google search def search_google(query, proxy=None): headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36' } url = f'https://www.google.com/search?q={query.replace(" ", "+")}&hl=en' proxies = {"http": proxy, "https": proxy} if proxy else None response = requests.get(url, headers=headers, proxies=proxies, timeout=10) if "detected unusual traffic" in response.text.lower(): # Detect CAPTCHA return None, True return response.text, False # Function to parse Google search results def parse_search_results(html_content): soup = BeautifulSoup(html_content, 'html.parser') results = [] for g in soup.find_all('div', class_='tF2Cxc'): title = g.find('h3').text if g.find('h3') else "No title" link = g.find('a')['href'] if g.find('a') else "No link" snippet = g.find('span', class_='aCOpRe').text if g.find('span', class_='aCOpRe') else "No snippet" results.append({'title': title, 'link': link, 'snippet': snippet}) return results # Streamlit App def main(): st.set_page_config(page_title="Google Search Clone", layout="centered") load_css() st.markdown('
', unsafe_allow_html=True) if search_button and query.strip(): with st.spinner("Searching..."): html_content, captcha_detected = search_google(query) if captcha_detected: st.markdown("""Google has detected unusual traffic from this application. Please solve the CAPTCHA manually or try again later.
{result['snippet']}