ntphuc149 commited on
Commit
18160a7
·
verified ·
1 Parent(s): ce57c5d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -30
app.py CHANGED
@@ -1,31 +1,31 @@
1
- import time
2
- import requests
3
- import pandas as pd
4
- import streamlit as st
5
- from utils.basic_crawl_gg_scholar import scrape_gg_scholar
6
- from utils.retrieve_doi_by_name import get_doi_by_title
7
- from utils.get_abstract_by_doi import get_abstract_by_doi
8
-
9
-
10
- st.set_page_config(page_title="GG Scholar Crawler :v", page_icon=":book:", layout="centered")
11
-
12
- st.title("Google Scholar Crawler :book:")
13
-
14
- col_1, col_2, col_3, col_4 = st.columns(spec=[5, 1, 1, 1])
15
-
16
- keyword = col_1.text_input("Keyword to search:", key="keyword", placeholder="Enter keyword to search...", label_visibility="hidden")
17
- num_pages = col_2.number_input("Pages:", key="pages", placeholder="Number of pages:...", min_value=1, max_value=9999999, value=1, step=1)
18
- start_year = col_3.number_input("Start:", min_value=1900, max_value=2025, value=2020, key="start_year")
19
- end_year = col_4.number_input("End:", min_value=1900, max_value=2025, value=2025, key="end_year")
20
- is_start = st.button("Crawl!", key="crawl_button")
21
-
22
- if is_start:
23
- with st.spinner("Crawling basic info..."):
24
- basic_crawled_data = scrape_gg_scholar(query=keyword, num_pages=num_pages, start_year=start_year, end_year=end_year)
25
- st.dataframe(basic_crawled_data, use_container_width=True)
26
- st.success("Crawled basic info successfully!")
27
-
28
- # with st.spinner("Retrieving DOI..."):
29
- # doi_crawled_data = get_doi_by_title(basic_crawled_data)
30
- # st.dataframe(doi_crawled_data, use_container_width=True)
31
  # st.success("Retrieved DOI successfully!")
 
1
+ import time
2
+ import requests
3
+ import pandas as pd
4
+ import streamlit as st
5
+ from utils.basic_crawl_gg_scholar import scrape_gg_scholar
6
+ # from utils.retrieve_doi_by_name import get_doi_by_title
7
+ # from utils.get_abstract_by_doi import get_abstract_by_doi
8
+
9
+
10
+ st.set_page_config(page_title="GG Scholar Crawler :v", page_icon=":book:", layout="centered")
11
+
12
+ st.title("Google Scholar Crawler :book:")
13
+
14
+ col_1, col_2, col_3, col_4 = st.columns(spec=[5, 1, 1, 1])
15
+
16
+ keyword = col_1.text_input("Keyword to search:", key="keyword", placeholder="Enter keyword to search...", label_visibility="hidden")
17
+ num_pages = col_2.number_input("Pages:", key="pages", placeholder="Number of pages:...", min_value=1, max_value=9999999, value=1, step=1)
18
+ start_year = col_3.number_input("Start:", min_value=1900, max_value=2025, value=2020, key="start_year")
19
+ end_year = col_4.number_input("End:", min_value=1900, max_value=2025, value=2025, key="end_year")
20
+ is_start = st.button("Crawl!", key="crawl_button")
21
+
22
+ if is_start:
23
+ with st.spinner("Crawling basic info..."):
24
+ basic_crawled_data = scrape_gg_scholar(query=keyword, num_pages=num_pages, start_year=start_year, end_year=end_year)
25
+ st.dataframe(basic_crawled_data, use_container_width=True)
26
+ st.success("Crawled basic info successfully!")
27
+
28
+ # with st.spinner("Retrieving DOI..."):
29
+ # doi_crawled_data = get_doi_by_title(basic_crawled_data)
30
+ # st.dataframe(doi_crawled_data, use_container_width=True)
31
  # st.success("Retrieved DOI successfully!")