BookNLP-Demo / app.py
drewThomasson's picture
Update app.py
e18eff0 verified
raw
history blame
1.81 kB
import spacy
import os
import shutil
import gradio as gr
from booknlp.booknlp import BookNLP
from spacy.cli import download
#This will download the booknlp files using my huggingface backup
import download_missing_booknlp_models
# Ensure Spacy model is downloaded
def ensure_spacy_model():
try:
# Try to load the model to see if it's already installed
spacy.load("en_core_web_sm")
except OSError:
# If not installed, download the model
download("en_core_web_sm")
# Initialize Spacy model
ensure_spacy_model()
# Initialize BookNLP model
model_params = {
"pipeline": "entity,quote,supersense,event,coref",
"model": "big"
}
booknlp = BookNLP("en", model_params)
# Define function to process file
def process_book(file):
input_file = file.name
output_dir = "output_dir/booknlp_output/"
book_id = os.path.splitext(os.path.basename(input_file))[0]
# Create output directory if it doesn't exist
if os.path.exists(output_dir):
shutil.rmtree(output_dir)
os.makedirs(output_dir)
# Run BookNLP
booknlp.process(input_file, output_dir, book_id)
# Zip the output folder
shutil.make_archive(f"{output_dir}/{book_id}_output", 'zip', output_dir)
return f"{output_dir}/{book_id}_output.zip"
# Gradio Interface
def gradio_interface():
file_input = gr.inputs.File(file_types=['.txt'], label="Upload a .txt file")
file_output = gr.outputs.File(label="Download the output files")
gr.Interface(
fn=process_book,
inputs=file_input,
outputs=file_output,
title="BookNLP Processor",
description="Upload a .txt book and process it using BookNLP. Download the processed output files."
).launch()
if __name__ == "__main__":
gradio_interface()