Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -341,32 +341,32 @@ class PaperDownloader:
|
|
| 341 |
|
| 342 |
pdf_content = await self.download_with_retry_async(doi)
|
| 343 |
if pdf_content:
|
| 344 |
-
|
| 345 |
-
|
| 346 |
-
|
| 347 |
|
| 348 |
-
|
| 349 |
-
|
| 350 |
-
|
| 351 |
-
|
| 352 |
|
| 353 |
-
|
| 354 |
|
| 355 |
-
|
| 356 |
-
|
| 357 |
-
|
| 358 |
|
| 359 |
else:
|
| 360 |
-
|
| 361 |
-
|
| 362 |
-
|
| 363 |
except CancelledError:
|
| 364 |
-
|
| 365 |
-
|
| 366 |
|
| 367 |
except Exception as e:
|
| 368 |
-
|
| 369 |
-
|
| 370 |
|
| 371 |
|
| 372 |
async def download_multiple_dois(self, dois_text, cancel_event):
|
|
@@ -387,16 +387,13 @@ class PaperDownloader:
|
|
| 387 |
failed_dois = [] # DOIs que no se pudieron descargar
|
| 388 |
downloaded_links = [] # Links de DOIs descargados
|
| 389 |
|
| 390 |
-
for doi in dois:
|
| 391 |
-
|
| 392 |
-
result = await self._download_single_doi(doi,cancel_event) #await all of it and only collect results
|
| 393 |
if cancel_event.is_set():
|
| 394 |
-
|
| 395 |
-
|
| 396 |
-
|
| 397 |
if result is None:
|
| 398 |
-
|
| 399 |
-
|
| 400 |
|
| 401 |
if isinstance(result, Exception):
|
| 402 |
# Excepci贸n inesperada
|
|
@@ -406,31 +403,30 @@ class PaperDownloader:
|
|
| 406 |
|
| 407 |
elif result[0] is None:
|
| 408 |
# Descarga fallida (resultado de download_single_doi_async)
|
| 409 |
-
|
| 410 |
-
|
| 411 |
-
|
| 412 |
|
| 413 |
else:
|
| 414 |
# Descarga exitosa
|
| 415 |
filepath = result[0]
|
| 416 |
-
|
| 417 |
# Generar nombre de archivo 煤nico
|
| 418 |
-
|
| 419 |
-
|
| 420 |
|
| 421 |
-
|
| 422 |
-
|
| 423 |
-
|
| 424 |
-
|
| 425 |
-
|
| 426 |
-
|
| 427 |
-
|
| 428 |
|
| 429 |
except Exception as rename_error:
|
| 430 |
-
|
| 431 |
-
|
| 432 |
-
|
| 433 |
-
|
| 434 |
# Crear archivo ZIP si hay archivos descargados
|
| 435 |
zip_filename = None
|
| 436 |
if downloaded_files:
|
|
@@ -443,8 +439,7 @@ class PaperDownloader:
|
|
| 443 |
lambda: self.create_zip(zip_filename, downloaded_files)
|
| 444 |
)
|
| 445 |
logger.info(f"ZIP file created: {zip_filename}")
|
| 446 |
-
|
| 447 |
-
return zip_filename if downloaded_files else None, "\n".join(downloaded_links),"\n".join(failed_dois), ""
|
| 448 |
|
| 449 |
async def process_bibtex(self, bib_file, cancel_event):
|
| 450 |
"""Process BibTeX file and download papers with multiple strategies and reports UI updates using a callback"""
|
|
@@ -471,47 +466,47 @@ class PaperDownloader:
|
|
| 471 |
downloaded_files = []
|
| 472 |
failed_dois = []
|
| 473 |
downloaded_links = []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 474 |
|
| 475 |
-
|
| 476 |
-
|
| 477 |
-
|
| 478 |
-
|
| 479 |
-
|
| 480 |
-
return None, "Download Cancelled", "Download Cancelled", "" # early return on cancelled
|
| 481 |
-
|
| 482 |
-
if result is None:
|
| 483 |
-
continue
|
| 484 |
-
|
| 485 |
-
if isinstance(result, Exception):
|
| 486 |
-
# Excepci贸n inesperada
|
| 487 |
-
error_msg = f"Unexpected error: {str(result)}"
|
| 488 |
-
logger.error(f"Error downloading {doi}: {error_msg}")
|
| 489 |
-
failed_dois.append(f'<a href="https://doi.org/{doi}">{doi}</a> - {error_msg}')
|
| 490 |
-
|
| 491 |
-
elif result[0] is None:
|
| 492 |
-
# Descarga fallida (resultado de download_single_doi_async)
|
| 493 |
-
error_msg = result[1]
|
| 494 |
-
logger.warning(f"Failed to download {doi}: {error_msg}")
|
| 495 |
-
failed_dois.append(f'<a href="https://doi.org/{doi}">{doi}</a> - {error_msg}')
|
| 496 |
|
| 497 |
-
|
| 498 |
-
|
| 499 |
-
|
| 500 |
|
| 501 |
-
|
| 502 |
-
|
| 503 |
-
|
| 504 |
-
|
| 505 |
-
|
| 506 |
-
|
| 507 |
-
|
| 508 |
if downloaded_files:
|
| 509 |
zip_filename = 'papers.zip'
|
| 510 |
loop = asyncio.get_running_loop()
|
| 511 |
loop.run_in_executor(self.executor, lambda: self.create_zip(zip_filename,downloaded_files))
|
| 512 |
logger.info(f"ZIP file created: {zip_filename}")
|
| 513 |
|
| 514 |
-
return zip_filename, "\n".join(downloaded_links), "\n".join(failed_dois),""
|
| 515 |
|
| 516 |
def create_zip(self, zip_filename, files):
|
| 517 |
"""Crea un archivo zip con los pdfs descargados"""
|
|
@@ -531,31 +526,35 @@ def create_gradio_interface():
|
|
| 531 |
|
| 532 |
|
| 533 |
def update_progress( message="", logs=""):
|
| 534 |
-
|
| 535 |
|
| 536 |
|
| 537 |
async def download_papers(bib_file, doi_input, dois_input, output_file, downloaded_dois_textbox,failed_dois_textbox,logs, single_file):
|
| 538 |
-
|
| 539 |
-
|
| 540 |
-
|
| 541 |
-
|
| 542 |
-
# Check file type
|
| 543 |
if not bib_file.name.lower().endswith('.bib'):
|
| 544 |
-
|
| 545 |
-
|
| 546 |
-
|
| 547 |
|
| 548 |
-
|
| 549 |
-
filepath, message, error= await downloader._download_single_doi(doi_input,cancel_event)
|
| 550 |
-
return None, message, error, "", filepath
|
| 551 |
|
| 552 |
-
|
| 553 |
-
zip_file, downloaded_dois, failed_dois, logs_text= await downloader.download_multiple_dois(dois_input,cancel_event)
|
| 554 |
|
| 555 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 556 |
|
| 557 |
-
|
| 558 |
-
|
|
|
|
| 559 |
|
| 560 |
with gr.Blocks(theme="Hev832/Applio", css="""
|
| 561 |
.gradio-container {
|
|
@@ -614,11 +613,11 @@ def create_gradio_interface():
|
|
| 614 |
stop_button.click(lambda: downloader.cancel_download(), outputs=None) # added function in object downloader
|
| 615 |
|
| 616 |
submit_button.click(
|
| 617 |
-
|
| 618 |
-
|
| 619 |
-
|
| 620 |
)
|
| 621 |
-
|
| 622 |
interface.title="馃敩 Academic Paper Batch Downloader"
|
| 623 |
interface.description="Upload a BibTeX file or enter DOIs to download PDFs. We'll attempt to fetch PDFs from multiple sources like Sci-Hub, Libgen, Google Scholar and Crossref. You can use any of the three inputs at any moment."
|
| 624 |
|
|
|
|
| 341 |
|
| 342 |
pdf_content = await self.download_with_retry_async(doi)
|
| 343 |
if pdf_content:
|
| 344 |
+
logger.info(f"Downloaded PDF for DOI: {doi}")
|
| 345 |
+
filename = f"{str(doi).replace('/', '_').replace('.', '_')}.pdf"
|
| 346 |
+
filepath = os.path.join(self.output_dir, filename)
|
| 347 |
|
| 348 |
+
# Escribir contenido del PDF
|
| 349 |
+
|
| 350 |
+
with open(filepath, 'wb') as f:
|
| 351 |
+
f.write(pdf_content)
|
| 352 |
|
| 353 |
+
logger.info(f"Saved PDF to file: {filepath}")
|
| 354 |
|
| 355 |
+
logger.info(f"Descarga exitosa: {filename}")
|
| 356 |
+
|
| 357 |
+
return filepath, f"Descargado exitosamente: <a href='https://doi.org/{doi}'>{doi}</a>", ""
|
| 358 |
|
| 359 |
else:
|
| 360 |
+
logger.warning(f"No se pudo descargar: {doi}")
|
| 361 |
+
return None, f"No se pudo descargar {doi}", f'<a href="https://doi.org/{doi}">{doi}</a>'
|
| 362 |
+
|
| 363 |
except CancelledError:
|
| 364 |
+
logger.info(f"Download Cancelled DOI: {doi}")
|
| 365 |
+
return None, f"Download cancelled {doi}","Download Cancelled"
|
| 366 |
|
| 367 |
except Exception as e:
|
| 368 |
+
logger.error(f"Error processing {doi}: {e}")
|
| 369 |
+
return None, f"Error processing {doi}: {e}", f"Error processing {doi}: {e}"
|
| 370 |
|
| 371 |
|
| 372 |
async def download_multiple_dois(self, dois_text, cancel_event):
|
|
|
|
| 387 |
failed_dois = [] # DOIs que no se pudieron descargar
|
| 388 |
downloaded_links = [] # Links de DOIs descargados
|
| 389 |
|
| 390 |
+
for i, doi in enumerate(dois):
|
| 391 |
+
result = await self._download_single_doi(doi,cancel_event)
|
|
|
|
| 392 |
if cancel_event.is_set():
|
| 393 |
+
logger.info("Downloads cancelled on multiple dois download")
|
| 394 |
+
return None,"Downloads cancelled","Downloads cancelled", "" # early return on cancelled
|
|
|
|
| 395 |
if result is None:
|
| 396 |
+
continue
|
|
|
|
| 397 |
|
| 398 |
if isinstance(result, Exception):
|
| 399 |
# Excepci贸n inesperada
|
|
|
|
| 403 |
|
| 404 |
elif result[0] is None:
|
| 405 |
# Descarga fallida (resultado de download_single_doi_async)
|
| 406 |
+
error_msg = result[1]
|
| 407 |
+
logger.warning(f"Failed to download {doi}: {error_msg}")
|
| 408 |
+
failed_dois.append(f'<a href="https://doi.org/{doi}">{doi}</a> - {error_msg}')
|
| 409 |
|
| 410 |
else:
|
| 411 |
# Descarga exitosa
|
| 412 |
filepath = result[0]
|
| 413 |
+
|
| 414 |
# Generar nombre de archivo 煤nico
|
| 415 |
+
filename = f"{str(doi).replace('/', '_').replace('.', '_')}.pdf" #Fixed indent
|
| 416 |
+
filepath_unique = os.path.join(self.output_dir, filename) # Fixed indent
|
| 417 |
|
| 418 |
+
try:
|
| 419 |
+
# Renombrar archivo
|
| 420 |
+
os.rename(filepath, filepath_unique) #Fixed indent
|
| 421 |
+
|
| 422 |
+
# A帽adir a lista de archivos descargados
|
| 423 |
+
downloaded_files.append(filepath_unique) #Fixed indent
|
| 424 |
+
downloaded_links.append(f'<a href="https://doi.org/{doi}">{doi}</a>') #Fixed indent
|
| 425 |
|
| 426 |
except Exception as rename_error:
|
| 427 |
+
logger.error(f"Error renaming file for {doi}: {rename_error}")
|
| 428 |
+
failed_dois.append(f'<a href="https://doi.org/{doi}">{doi}</a> - Error saving file') #Fixed indent
|
| 429 |
+
|
|
|
|
| 430 |
# Crear archivo ZIP si hay archivos descargados
|
| 431 |
zip_filename = None
|
| 432 |
if downloaded_files:
|
|
|
|
| 439 |
lambda: self.create_zip(zip_filename, downloaded_files)
|
| 440 |
)
|
| 441 |
logger.info(f"ZIP file created: {zip_filename}")
|
| 442 |
+
return zip_filename if downloaded_files else None, "\n".join(downloaded_links),"\n".join(failed_dois),""
|
|
|
|
| 443 |
|
| 444 |
async def process_bibtex(self, bib_file, cancel_event):
|
| 445 |
"""Process BibTeX file and download papers with multiple strategies and reports UI updates using a callback"""
|
|
|
|
| 466 |
downloaded_files = []
|
| 467 |
failed_dois = []
|
| 468 |
downloaded_links = []
|
| 469 |
+
|
| 470 |
+
for i,doi in enumerate(dois):
|
| 471 |
+
result = await self._download_single_doi(doi, cancel_event)
|
| 472 |
+
|
| 473 |
+
if cancel_event.is_set():
|
| 474 |
+
logger.info("Download Cancelled in bibtex mode")
|
| 475 |
+
return None, "Download Cancelled", "Download Cancelled", ""#cancel if requested
|
| 476 |
+
|
| 477 |
+
if result is None:
|
| 478 |
+
continue
|
| 479 |
+
|
| 480 |
+
if isinstance(result, Exception):
|
| 481 |
+
# Excepci贸n inesperada
|
| 482 |
+
error_msg = f"Unexpected error: {str(result)}"
|
| 483 |
+
logger.error(f"Error downloading {doi}: {error_msg}")
|
| 484 |
+
failed_dois.append(f'<a href="https://doi.org/{doi}">{doi}</a> - {error_msg}')
|
| 485 |
|
| 486 |
+
elif result[0] is None:
|
| 487 |
+
# Descarga fallida (resultado de download_single_doi_async)
|
| 488 |
+
error_msg = result[1]
|
| 489 |
+
logger.warning(f"Failed to download {doi}: {error_msg}")
|
| 490 |
+
failed_dois.append(f'<a href="https://doi.org/{doi}">{doi}</a> - {error_msg}')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 491 |
|
| 492 |
+
else:
|
| 493 |
+
# Descarga exitosa
|
| 494 |
+
filepath = result[0]
|
| 495 |
|
| 496 |
+
# Unique filename for zip
|
| 497 |
+
filename = f"{str(doi).replace('/', '_').replace('.', '_')}_{i}.pdf"
|
| 498 |
+
filepath_unique = os.path.join(self.output_dir, filename)
|
| 499 |
+
os.rename(filepath, filepath_unique)
|
| 500 |
+
downloaded_files.append(filepath_unique)
|
| 501 |
+
downloaded_links.append(f'<a href="https://doi.org/{doi}">{doi}</a>')
|
| 502 |
+
|
| 503 |
if downloaded_files:
|
| 504 |
zip_filename = 'papers.zip'
|
| 505 |
loop = asyncio.get_running_loop()
|
| 506 |
loop.run_in_executor(self.executor, lambda: self.create_zip(zip_filename,downloaded_files))
|
| 507 |
logger.info(f"ZIP file created: {zip_filename}")
|
| 508 |
|
| 509 |
+
return zip_filename, "\n".join(downloaded_links), "\n".join(failed_dois), ""
|
| 510 |
|
| 511 |
def create_zip(self, zip_filename, files):
|
| 512 |
"""Crea un archivo zip con los pdfs descargados"""
|
|
|
|
| 526 |
|
| 527 |
|
| 528 |
def update_progress( message="", logs=""):
|
| 529 |
+
return gr.Textbox.update(value=f"{message}"),gr.Textbox.update(value=f"<pre>{logs}</pre>")
|
| 530 |
|
| 531 |
|
| 532 |
async def download_papers(bib_file, doi_input, dois_input, output_file, downloaded_dois_textbox,failed_dois_textbox,logs, single_file):
|
| 533 |
+
cancel_event = asyncio.Event() # Create cancellation event for every submission.
|
| 534 |
+
downloader.cancel_event = cancel_event # store the event so that it is available to stop the process
|
| 535 |
+
if bib_file:
|
| 536 |
+
# Check file type
|
|
|
|
| 537 |
if not bib_file.name.lower().endswith('.bib'):
|
| 538 |
+
return None, "Error: Please upload a .bib file", "Error: Please upload a .bib file", "", None #Added for consistent value returns
|
| 539 |
+
|
| 540 |
+
zip_file, downloaded_dois, failed_dois, logs_text= await downloader.process_bibtex(bib_file, cancel_event)
|
| 541 |
|
| 542 |
+
return zip_file, downloaded_dois, failed_dois, logs_text, None# Correctly send None and "", in one var to return tuple 5 values, as it is required
|
|
|
|
|
|
|
| 543 |
|
| 544 |
+
elif doi_input:
|
|
|
|
| 545 |
|
| 546 |
+
filepath, message, error= await downloader._download_single_doi(doi_input, cancel_event)
|
| 547 |
+
return None, message, error,"", filepath # Correctly return params here on singe execution
|
| 548 |
+
|
| 549 |
+
elif dois_input:
|
| 550 |
+
|
| 551 |
+
zip_file, downloaded_dois, failed_dois, logs_text = await downloader.download_multiple_dois(dois_input, cancel_event)
|
| 552 |
+
|
| 553 |
+
return zip_file, downloaded_dois, failed_dois, logs_text, None # Correctly send null
|
| 554 |
|
| 555 |
+
else:
|
| 556 |
+
return None, "Please provide a .bib file, a single DOI, or a list of DOIs", "Please provide a .bib file, a single DOI, or a list of DOIs","", None # must also have five values to satisfy gradio block method
|
| 557 |
+
|
| 558 |
|
| 559 |
with gr.Blocks(theme="Hev832/Applio", css="""
|
| 560 |
.gradio-container {
|
|
|
|
| 613 |
stop_button.click(lambda: downloader.cancel_download(), outputs=None) # added function in object downloader
|
| 614 |
|
| 615 |
submit_button.click(
|
| 616 |
+
download_papers,
|
| 617 |
+
inputs=[bib_file, doi_input, dois_input],
|
| 618 |
+
outputs=[output_file, downloaded_dois_textbox, failed_dois_textbox,logs, single_file ] # the new output should be a tuple and we output logs too for debugging.
|
| 619 |
)
|
| 620 |
+
|
| 621 |
interface.title="馃敩 Academic Paper Batch Downloader"
|
| 622 |
interface.description="Upload a BibTeX file or enter DOIs to download PDFs. We'll attempt to fetch PDFs from multiple sources like Sci-Hub, Libgen, Google Scholar and Crossref. You can use any of the three inputs at any moment."
|
| 623 |
|