import requests from bs4 import BeautifulSoup import streamlit as st # Custom CSS for clean UI def load_css(): st.markdown(""" """, unsafe_allow_html=True) # Function to perform a Google search def search_google(query, proxy=None): headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36' } url = f'https://www.google.com/search?q={query.replace(" ", "+")}&hl=en' proxies = {"http": proxy, "https": proxy} if proxy else None response = requests.get(url, headers=headers, proxies=proxies, timeout=10) if "detected unusual traffic" in response.text.lower(): return None, True return response.text, False # Function to parse Google search results def parse_search_results(html_content): soup = BeautifulSoup(html_content, 'html.parser') results = [] for g in soup.find_all('div', class_='tF2Cxc'): title = g.find('h3').text if g.find('h3') else "No title" link = g.find('a')['href'] if g.find('a') else "No link" snippet = g.find('span', class_='aCOpRe').text if g.find('span', class_='aCOpRe') else "No snippet" results.append({'title': title, 'link': link, 'snippet': snippet}) return results # Streamlit App def main(): st.set_page_config(page_title="Google Search Clone with Safe Navigation", layout="centered") load_css() st.markdown('', unsafe_allow_html=True) if search_button and query.strip(): with st.spinner("Searching..."): html_content, captcha_detected = search_google(query) if captcha_detected: st.error("CAPTCHA detected. Please try again later or reduce search frequency.") return if html_content: search_results = parse_search_results(html_content) for idx, result in enumerate(search_results, start=1): st.markdown(f"""

{result['title']}

{result['snippet']}

""", unsafe_allow_html=True) # Iframe secure preview for websites if "safe_view" in st.query_params: url = st.query_params.get("url", [None])[0] if url: st.markdown(f"
", unsafe_allow_html=True) if __name__ == "__main__": main()