import gradio as gr from transformers import pipeline import os # Load the text summarization pipeline summarizer = pipeline("summarization", model="astro21/bart-cls") chunk_counter = 0 def summarize_text(input_text): global chunk_counter # Use a global variable to keep track of the chunk number chunk_counter = 0 # Initialize the chunk counter # Split the input text into chunks with a maximum size of 512 max_chunk_size = 512 chunks = [input_text[i:i+max_chunk_size] for i in range(0, len(input_text), max_chunk_size)] summarized_chunks = [] for chunk in chunks: chunk_counter += 1 print(f"Chunk {chunk_counter}:") # Print the chunk number # Summarize each chunk summarized_chunk = summarizer(chunk, max_length=128, min_length=64, do_sample=False)[0]['summary_text'] summarized_chunks.append(summarized_chunk) # Concatenate the summaries summarized_text = "\n".join(summarized_chunks) return summarized_text def summarize_text_files(files): if files is not None: content = "" for file in files: content += file.read().decode("utf-8") return summarize_text(content) input_type = gr.inputs.File("text", type="text", label="Upload Text Files", multiple=True) demo = gr.Interface(fn=summarize_text_files, inputs=input_type, outputs="text", live=True) demo.launch()