File size: 4,734 Bytes
c6bd2a2
 
 
 
 
 
3497525
 
 
 
 
 
 
 
 
 
 
c6bd2a2
3497525
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c6bd2a2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3497525
 
 
 
 
 
 
c6bd2a2
 
 
 
 
 
 
3497525
c6bd2a2
 
 
 
3497525
c6bd2a2
3497525
 
 
 
c6bd2a2
3497525
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
import gradio as gr
import requests
from datetime import datetime, timezone

API_URL = "https://huggingface.co/api/daily_papers"

class PaperManager:
    def __init__(self, papers_per_page=10):
        self.papers_per_page = papers_per_page
        self.current_page = 1
        self.papers = []
        self.total_pages = 0

    def fetch_papers(self, page=1):
        try:
            response = requests.get(f"{API_URL}?page={page}&limit={self.papers_per_page}")
            response.raise_for_status()
            data = response.json()
            self.papers = sorted(data, key=lambda x: x.get('paper', {}).get('upvotes', 0), reverse=True)
            self.total_pages = (len(self.papers) + self.papers_per_page - 1) // self.papers_per_page
            return True
        except requests.RequestException as e:
            print(f"Error fetching papers: {e}")
            return False

    def format_paper(self, paper):
        title = paper.get('title', 'No title')
        url = f"https://huggingface.co/papers/{paper['paper'].get('id', '')}"
        authors = ', '.join([author.get('name', '') for author in paper['paper'].get('authors', [])])
        upvotes = paper.get('paper', {}).get('upvotes', 0)
        comments = paper.get('numComments', 0)
        published_time = datetime.fromisoformat(paper.get('publishedAt', datetime.now(timezone.utc).isoformat()).replace('Z', '+00:00'))
        time_ago = (datetime.now(timezone.utc) - published_time).days
        
        return f"""<div style='border-bottom: 1px solid #eee; padding: 10px 0;'>
        <a href='{url}' target='_blank' style='color: #000; text-decoration: none; font-weight: bold;'>{title}</a>
        <div style='font-size: 0.8em; color: #666; margin-top: 5px;'>
        {upvotes} upvotes | by {authors} | {time_ago} days ago | {comments} comments
        </div>
        </div>"""

    def render_papers(self):
        if not self.papers:
            if not self.fetch_papers(self.current_page):
                return "<div>Failed to fetch papers. Please try again later.</div>"
        
        start_index = (self.current_page - 1) * self.papers_per_page
        end_index = start_index + self.papers_per_page
        current_papers = self.papers[start_index:end_index]
        
        if not current_papers:
            return "<div>No more papers available.</div>"
        
        return "".join([self.format_paper(paper) for paper in current_papers])

    def search_papers(self, query):
        if not query:
            return self.render_papers()
        
        filtered_papers = [paper for paper in self.papers if query.lower() in paper.get('title', '').lower()]
        return "".join([self.format_paper(paper) for paper in filtered_papers[:self.papers_per_page]])

    def next_page(self):
        if self.current_page < self.total_pages:
            self.current_page += 1
            self.fetch_papers(self.current_page)
        return self.render_papers(), f"Page {self.current_page} of {self.total_pages}"

    def prev_page(self):
        if self.current_page > 1:
            self.current_page -= 1
            self.fetch_papers(self.current_page)
        return self.render_papers(), f"Page {self.current_page} of {self.total_pages}"

css = """
body { 
    font-family: Arial, sans-serif; 
    max-width: 800px;
    margin: 0 auto;
    padding: 20px;
}
.paper-list { 
    max-height: 600px; 
    overflow-y: auto;
    border: 1px solid #eee;
    border-radius: 5px;
    padding: 10px;
}
.search-row {
    display: flex;
    gap: 10px;
    margin-bottom: 20px;
}
"""

paper_manager = PaperManager()

def refresh_papers():
    paper_manager.current_page = 1
    paper_manager.fetch_papers()
    return paper_manager.render_papers(), f"Page {paper_manager.current_page} of {paper_manager.total_pages}"

demo = gr.Blocks(css=css)

with demo:
    gr.Markdown("# Daily Papers - HackerNews Style")
    with gr.Row(elem_classes=["search-row"]):
        search_input = gr.Textbox(label="Search papers", placeholder="Enter search term...")
        refresh_button = gr.Button("Refresh")
    paper_list = gr.HTML(paper_manager.render_papers(), elem_classes=["paper-list"])
    
    with gr.Row():
        prev_button = gr.Button("Previous Page")
        next_button = gr.Button("Next Page")
        page_info = gr.Markdown(f"Page {paper_manager.current_page} of {paper_manager.total_pages}")
    
    search_input.change(paper_manager.search_papers, inputs=[search_input], outputs=[paper_list])
    refresh_button.click(refresh_papers, outputs=[paper_list, page_info])
    prev_button.click(paper_manager.prev_page, outputs=[paper_list, page_info])
    next_button.click(paper_manager.next_page, outputs=[paper_list, page_info])

demo.launch()