MoneyRadar / app.py
seawolf2357's picture
Update app.py
cd9648f verified
raw
history blame
7.22 kB
import gradio as gr
import requests
import json
import os
from datetime import datetime, timedelta
API_KEY = os.getenv("SERPHOUSE_API_KEY")
COUNTRY_LOCATIONS = {
"United States": "1026201", # Alba,Texas,United States
"United Kingdom": "2635167", # Birmingham,England,United Kingdom
"Canada": "5907364", # Burnaby,British Columbia,Canada
"Australia": "2147714", # Sydney,New South Wales,Australia
"Germany": "2950159", # Berlin,Berlin,Germany
"France": "2988507", # Paris,รŽle-de-France,France
"Japan": "1850147", # Tokyo,Tokyo,Japan
"South Korea": "1835848", # Seoul,Seoul,South Korea
"China": "1816670", # Beijing,Beijing,China
"India": "1261481", # New Delhi,Delhi,India
"Brazil": "3448439", # Sรฃo Paulo,Sรฃo Paulo,Brazil
"Mexico": "3530597", # Mexico City,Mexico City,Mexico
"Russia": "524901", # Moscow,Moscow,Russia
"Italy": "3169070", # Rome,Lazio,Italy
"Spain": "3117735", # Madrid,Madrid,Spain
"Netherlands": "2759794", # Amsterdam,North Holland,Netherlands
"Singapore": "1880252", # Singapore,Central Singapore,Singapore
"Hong Kong": "1819729" # Hong Kong,Central and Western District,Hong Kong
}
MAJOR_COUNTRIES = list(COUNTRY_LOCATIONS.keys())
def search_serphouse(query, country, page=1, num_result=100):
url = "https://api.serphouse.com/serp/live"
payload = {
"data": {
"q": query,
"domain": "google.com",
"loc_id": COUNTRY_LOCATIONS.get(country, "1026201"),
"lang": "en",
"device": "desktop",
"serp_type": "news",
"page": "1",
"verbatim": "0",
"gfilter": "0",
"num_result": "100" # num_result๋กœ ์ˆ˜์ •
}
}
headers = {
"accept": "application/json",
"content-type": "application/json",
"authorization": f"Bearer {API_KEY}"
}
try:
response = requests.post(url, json=payload, headers=headers)
response.raise_for_status()
return response.json()
except requests.RequestException as e:
return {"error": f"Error: {str(e)}"}
def format_results_from_raw(results):
if isinstance(results, dict) and "error" in results:
return "Error: " + results["error"], []
try:
news_results = results.get('results', {}).get('results', {}).get('news', [])
if not news_results:
return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", []
articles = []
for idx, result in enumerate(news_results, 1):
articles.append({
"index": idx,
"title": result.get("title", "์ œ๋ชฉ ์—†์Œ"),
"link": result.get("url", result.get("link", "#")),
"snippet": result.get("snippet", "๋‚ด์šฉ ์—†์Œ"),
"channel": result.get("channel", result.get("source", "์•Œ ์ˆ˜ ์—†์Œ")),
"time": result.get("time", result.get("date", "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๊ฐ„")),
"image_url": result.get("img", result.get("thumbnail", ""))
})
return "", articles
except Exception as e:
return f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}", []
def serphouse_search(query, country):
results = search_serphouse(query, country)
return format_results_from_raw(results)
css = """
footer {visibility: hidden;}
"""
with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as iface:
gr.Markdown("๊ฒ€์ƒ‰์–ด๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ์›ํ•˜๋Š” ๊ตญ๊ฐ€๋ฅผ ์„ ํƒํ•˜๋ฉด, ๊ฒ€์ƒ‰์–ด์™€ ์ผ์น˜ํ•˜๋Š” 24์‹œ๊ฐ„ ์ด๋‚ด ๋‰ด์Šค๋ฅผ ์ตœ๋Œ€ 100๊ฐœ ์ถœ๋ ฅํ•ฉ๋‹ˆ๋‹ค.")
with gr.Column():
with gr.Row():
query = gr.Textbox(label="๊ฒ€์ƒ‰์–ด")
country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ€", value="South Korea")
search_button = gr.Button("๊ฒ€์ƒ‰", variant="primary")
# ํ”„๋กœ๊ทธ๋ ˆ์Šค๋ฐ” ์ถ”๊ฐ€
progress = gr.Progress()
status_message = gr.Markdown(visible=False)
articles_state = gr.State([])
article_components = []
for i in range(100):
with gr.Group(visible=False) as article_group:
title = gr.Markdown()
image = gr.Image(width=200, height=150)
snippet = gr.Markdown()
info = gr.Markdown()
article_components.append({
'group': article_group,
'title': title,
'image': image,
'snippet': snippet,
'info': info,
'index': i,
})
def search_and_display(query, country, articles_state, progress=gr.Progress()):
progress(0, desc="๊ฒ€์ƒ‰ ์‹œ์ž‘...")
error_message, articles = serphouse_search(query, country)
progress(0.5, desc="๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘...")
outputs = []
if error_message:
outputs.append(gr.update(value=error_message, visible=True))
for comp in article_components:
outputs.extend([
gr.update(visible=False), gr.update(), gr.update(),
gr.update(), gr.update()
])
articles_state = []
else:
outputs.append(gr.update(value="", visible=False))
total_articles = len(articles)
for idx, comp in enumerate(article_components):
progress((idx + 1) / total_articles, desc=f"๊ฒฐ๊ณผ ํ‘œ์‹œ ์ค‘... {idx + 1}/{total_articles}")
if idx < len(articles):
article = articles[idx]
image_url = article['image_url']
image_update = gr.update(value=image_url, visible=True) if image_url and not image_url.startswith('data:image') else gr.update(value=None, visible=False)
outputs.extend([
gr.update(visible=True),
gr.update(value=f"### [{article['title']}]({article['link']})"),
image_update,
gr.update(value=f"**์š”์•ฝ:** {article['snippet']}"),
gr.update(value=f"**์ถœ์ฒ˜:** {article['channel']} | **์‹œ๊ฐ„:** {article['time']}")
])
else:
outputs.extend([
gr.update(visible=False), gr.update(), gr.update(),
gr.update(), gr.update()
])
articles_state = articles
progress(1.0, desc="์™„๋ฃŒ!")
outputs.append(articles_state)
outputs.append(gr.update(visible=False))
return outputs
search_outputs = [gr.Markdown(visible=False)]
for comp in article_components:
search_outputs.extend([comp['group'], comp['title'], comp['image'],
comp['snippet'], comp['info']])
search_outputs.extend([articles_state, status_message])
search_button.click(
search_and_display,
inputs=[query, country, articles_state],
outputs=search_outputs,
show_progress=True
)
iface.launch()