Update app.py
Browse files
app.py
CHANGED
@@ -1,40 +1,76 @@
|
|
1 |
import gradio as gr
|
2 |
import requests
|
3 |
from datetime import datetime, timezone
|
|
|
|
|
|
|
|
|
4 |
|
5 |
API_URL = "https://huggingface.co/api/daily_papers"
|
6 |
|
7 |
class PaperManager:
|
8 |
-
def __init__(self, papers_per_page=10):
|
9 |
self.papers_per_page = papers_per_page
|
|
|
10 |
self.cache = []
|
11 |
self.last_fetch_date = None
|
12 |
self.total_pages = 1
|
13 |
|
14 |
def fetch_papers(self):
|
15 |
"""
|
16 |
-
Fetch papers from the API
|
|
|
17 |
Returns:
|
18 |
tuple: (success: bool, data: list, error_message: str)
|
19 |
"""
|
20 |
today = datetime.now(timezone.utc).date()
|
21 |
if self.last_fetch_date == today and self.cache:
|
22 |
-
|
23 |
return True, self.cache, ""
|
24 |
else:
|
25 |
try:
|
26 |
-
#
|
27 |
-
response = requests.get(f"{API_URL}?page=1&limit=
|
28 |
response.raise_for_status()
|
29 |
data = response.json()
|
30 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
31 |
self.last_fetch_date = today
|
32 |
self.total_pages = (len(self.cache) + self.papers_per_page - 1) // self.papers_per_page
|
33 |
-
|
|
|
34 |
return True, self.cache, ""
|
35 |
-
except requests.
|
36 |
-
|
37 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
|
39 |
def format_paper(self, paper):
|
40 |
"""
|
@@ -47,6 +83,7 @@ class PaperManager:
|
|
47 |
upvotes = paper.get('paper', {}).get('upvotes', 0)
|
48 |
comments = paper.get('numComments', 0)
|
49 |
published_at = paper.get('publishedAt', datetime.now(timezone.utc).isoformat())
|
|
|
50 |
try:
|
51 |
published_time = datetime.fromisoformat(published_at.replace('Z', '+00:00'))
|
52 |
time_ago = (datetime.now(timezone.utc) - published_time).days
|
@@ -54,11 +91,11 @@ class PaperManager:
|
|
54 |
time_ago = "Unknown"
|
55 |
|
56 |
return f"""<div style='border-bottom: 1px solid #eee; padding: 10px 0;'>
|
57 |
-
<a href='{url}' target='_blank' style='color: #000; text-decoration: none; font-weight: bold;'>{title}</a>
|
58 |
-
<div style='font-size: 0.8em; color: #666; margin-top: 5px;'>
|
59 |
-
{upvotes} upvotes | by {authors} | {time_ago} days ago | {comments} comments
|
60 |
-
</div>
|
61 |
-
</div>"""
|
62 |
|
63 |
def render_papers(self, papers, page=1):
|
64 |
"""
|
@@ -167,4 +204,123 @@ def refresh_papers(current_page, query):
|
|
167 |
page_info_text = f"Page {new_page} of {total_pages}"
|
168 |
return page_papers, new_page, total_pages, papers, page_info_text, ""
|
169 |
else:
|
170 |
-
error_html = f"<div
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
import requests
|
3 |
from datetime import datetime, timezone
|
4 |
+
import logging
|
5 |
+
|
6 |
+
# Configure logging for debugging purposes
|
7 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
8 |
|
9 |
API_URL = "https://huggingface.co/api/daily_papers"
|
10 |
|
11 |
class PaperManager:
|
12 |
+
def __init__(self, papers_per_page=10, max_papers=100):
|
13 |
self.papers_per_page = papers_per_page
|
14 |
+
self.max_papers = max_papers
|
15 |
self.cache = []
|
16 |
self.last_fetch_date = None
|
17 |
self.total_pages = 1
|
18 |
|
19 |
def fetch_papers(self):
|
20 |
"""
|
21 |
+
Fetch the first 100 most recent papers from the API, sorted by upvotes descending.
|
22 |
+
Caches the result and fetches only once a day.
|
23 |
Returns:
|
24 |
tuple: (success: bool, data: list, error_message: str)
|
25 |
"""
|
26 |
today = datetime.now(timezone.utc).date()
|
27 |
if self.last_fetch_date == today and self.cache:
|
28 |
+
logging.info("Using cached papers.")
|
29 |
return True, self.cache, ""
|
30 |
else:
|
31 |
try:
|
32 |
+
# Fetch up to max_papers; adjust 'limit' as per API's capability
|
33 |
+
response = requests.get(f"{API_URL}?page=1&limit={self.max_papers}")
|
34 |
response.raise_for_status()
|
35 |
data = response.json()
|
36 |
+
|
37 |
+
if not isinstance(data, list):
|
38 |
+
raise ValueError("API response is not a list of papers.")
|
39 |
+
|
40 |
+
# Sort papers by upvotes descending
|
41 |
+
sorted_papers = sorted(
|
42 |
+
data,
|
43 |
+
key=lambda x: x.get('paper', {}).get('upvotes', 0),
|
44 |
+
reverse=True
|
45 |
+
)
|
46 |
+
|
47 |
+
# Limit to max_papers
|
48 |
+
self.cache = sorted_papers[:self.max_papers]
|
49 |
self.last_fetch_date = today
|
50 |
self.total_pages = (len(self.cache) + self.papers_per_page - 1) // self.papers_per_page
|
51 |
+
|
52 |
+
logging.info(f"Fetched {len(self.cache)} papers. Total pages: {self.total_pages}")
|
53 |
return True, self.cache, ""
|
54 |
+
except requests.HTTPError as http_err:
|
55 |
+
error_message = f"HTTP error occurred: {http_err}"
|
56 |
+
logging.error(error_message)
|
57 |
+
return False, [], error_message
|
58 |
+
except requests.ConnectionError as conn_err:
|
59 |
+
error_message = f"Connection error occurred: {conn_err}"
|
60 |
+
logging.error(error_message)
|
61 |
+
return False, [], error_message
|
62 |
+
except requests.Timeout as timeout_err:
|
63 |
+
error_message = f"Timeout error occurred: {timeout_err}"
|
64 |
+
logging.error(error_message)
|
65 |
+
return False, [], error_message
|
66 |
+
except requests.RequestException as req_err:
|
67 |
+
error_message = f"An error occurred: {req_err}"
|
68 |
+
logging.error(error_message)
|
69 |
+
return False, [], error_message
|
70 |
+
except ValueError as val_err:
|
71 |
+
error_message = f"Data format error: {val_err}"
|
72 |
+
logging.error(error_message)
|
73 |
+
return False, [], error_message
|
74 |
|
75 |
def format_paper(self, paper):
|
76 |
"""
|
|
|
83 |
upvotes = paper.get('paper', {}).get('upvotes', 0)
|
84 |
comments = paper.get('numComments', 0)
|
85 |
published_at = paper.get('publishedAt', datetime.now(timezone.utc).isoformat())
|
86 |
+
|
87 |
try:
|
88 |
published_time = datetime.fromisoformat(published_at.replace('Z', '+00:00'))
|
89 |
time_ago = (datetime.now(timezone.utc) - published_time).days
|
|
|
91 |
time_ago = "Unknown"
|
92 |
|
93 |
return f"""<div style='border-bottom: 1px solid #eee; padding: 10px 0;'>
|
94 |
+
<a href='{url}' target='_blank' style='color: #000; text-decoration: none; font-weight: bold;'>{title}</a>
|
95 |
+
<div style='font-size: 0.8em; color: #666; margin-top: 5px;'>
|
96 |
+
{upvotes} upvotes | by {authors} | {time_ago} days ago | {comments} comments
|
97 |
+
</div>
|
98 |
+
</div>"""
|
99 |
|
100 |
def render_papers(self, papers, page=1):
|
101 |
"""
|
|
|
204 |
page_info_text = f"Page {new_page} of {total_pages}"
|
205 |
return page_papers, new_page, total_pages, papers, page_info_text, ""
|
206 |
else:
|
207 |
+
error_html = f"<div class='error-message'>Error fetching papers: {error_message}</div>"
|
208 |
+
return error_html, current_page, paper_manager.total_pages, [], f"Page {current_page} of {paper_manager.total_pages}", error_message
|
209 |
+
|
210 |
+
def search_papers(query, papers):
|
211 |
+
"""
|
212 |
+
Search for papers based on the query and update the display.
|
213 |
+
"""
|
214 |
+
if query:
|
215 |
+
filtered = paper_manager.search_papers(query)
|
216 |
+
else:
|
217 |
+
filtered = papers
|
218 |
+
total_pages = (len(filtered) + paper_manager.papers_per_page - 1) // paper_manager.papers_per_page if filtered else 1
|
219 |
+
page = 1
|
220 |
+
page_papers = paper_manager.render_papers(filtered, page)
|
221 |
+
page_info_text = f"Page {page} of {total_pages}"
|
222 |
+
if not filtered:
|
223 |
+
error_html = "<div class='error-message'>No papers match your search query.</div>"
|
224 |
+
else:
|
225 |
+
error_html = ""
|
226 |
+
return page_papers, page, total_pages, filtered, page_info_text, error_html
|
227 |
+
|
228 |
+
def change_page(direction, current_page, total_pages, papers, query):
|
229 |
+
"""
|
230 |
+
Change the current page based on the direction ('next' or 'prev').
|
231 |
+
"""
|
232 |
+
if direction == "next" and current_page < total_pages:
|
233 |
+
new_page = current_page + 1
|
234 |
+
elif direction == "prev" and current_page > 1:
|
235 |
+
new_page = current_page - 1
|
236 |
+
else:
|
237 |
+
new_page = current_page # No change if limits are reached
|
238 |
+
|
239 |
+
if not papers:
|
240 |
+
page_papers = "<div>No papers available for this page.</div>"
|
241 |
+
else:
|
242 |
+
page_papers = paper_manager.render_papers(papers, new_page)
|
243 |
+
page_info_text = f"Page {new_page} of {total_pages}"
|
244 |
+
return page_papers, new_page, total_pages, papers, page_info_text, ""
|
245 |
+
|
246 |
+
def go_prev(current_page, total_pages, papers, query):
|
247 |
+
"""
|
248 |
+
Handle the 'Previous Page' button click.
|
249 |
+
"""
|
250 |
+
return change_page("prev", current_page, total_pages, papers, query)
|
251 |
+
|
252 |
+
def go_next(current_page, total_pages, papers, query):
|
253 |
+
"""
|
254 |
+
Handle the 'Next Page' button click.
|
255 |
+
"""
|
256 |
+
return change_page("next", current_page, total_pages, papers, query)
|
257 |
+
|
258 |
+
# Define the Gradio Blocks interface
|
259 |
+
demo = gr.Blocks(css=css)
|
260 |
+
|
261 |
+
with demo:
|
262 |
+
with gr.Column(elem_classes=["container"]):
|
263 |
+
# Title
|
264 |
+
gr.Markdown("# Daily Papers - HackerNews Style", elem_classes=["title"])
|
265 |
+
|
266 |
+
# Search and Refresh Row
|
267 |
+
with gr.Row(elem_classes=["search-row"]):
|
268 |
+
search_input = gr.Textbox(label="Search papers", placeholder="Enter search term...")
|
269 |
+
refresh_button = gr.Button("Refresh")
|
270 |
+
|
271 |
+
# Error Message Display
|
272 |
+
error_display = gr.HTML(elem_classes=["error-message"])
|
273 |
+
|
274 |
+
# Paper List Display
|
275 |
+
paper_list = gr.HTML(elem_classes=["paper-list"])
|
276 |
+
|
277 |
+
# Pagination Controls
|
278 |
+
with gr.Row(elem_classes=["footer"]):
|
279 |
+
prev_button = gr.Button("Previous Page")
|
280 |
+
page_info = gr.Markdown("Page 1 of 1")
|
281 |
+
next_button = gr.Button("Next Page")
|
282 |
+
|
283 |
+
# Hidden States
|
284 |
+
current_page_state = gr.State(1)
|
285 |
+
total_pages_state = gr.State(1)
|
286 |
+
papers_state = gr.State([])
|
287 |
+
|
288 |
+
# Initialize the app on load
|
289 |
+
demo.load(
|
290 |
+
initialize,
|
291 |
+
outputs=[paper_list, current_page_state, total_pages_state, papers_state, page_info, error_display]
|
292 |
+
)
|
293 |
+
|
294 |
+
# Search Functionality
|
295 |
+
search_input.submit(
|
296 |
+
search_papers,
|
297 |
+
inputs=[search_input, papers_state],
|
298 |
+
outputs=[paper_list, current_page_state, total_pages_state, papers_state, page_info, error_display]
|
299 |
+
)
|
300 |
+
search_input.change(
|
301 |
+
search_papers,
|
302 |
+
inputs=[search_input, papers_state],
|
303 |
+
outputs=[paper_list, current_page_state, total_pages_state, papers_state, page_info, error_display]
|
304 |
+
)
|
305 |
+
|
306 |
+
# Refresh Functionality
|
307 |
+
refresh_button.click(
|
308 |
+
refresh_papers,
|
309 |
+
inputs=[current_page_state, search_input],
|
310 |
+
outputs=[paper_list, current_page_state, total_pages_state, papers_state, page_info, error_display]
|
311 |
+
)
|
312 |
+
|
313 |
+
# Pagination Buttons
|
314 |
+
prev_button.click(
|
315 |
+
go_prev,
|
316 |
+
inputs=[current_page_state, total_pages_state, papers_state, search_input],
|
317 |
+
outputs=[paper_list, current_page_state, total_pages_state, papers_state, page_info, error_display]
|
318 |
+
)
|
319 |
+
next_button.click(
|
320 |
+
go_next,
|
321 |
+
inputs=[current_page_state, total_pages_state, papers_state, search_input],
|
322 |
+
outputs=[paper_list, current_page_state, total_pages_state, papers_state, page_info, error_display]
|
323 |
+
)
|
324 |
+
|
325 |
+
# Launch the Gradio app
|
326 |
+
demo.launch()
|