aryan79 commited on
Commit
eba2294
·
verified ·
1 Parent(s): d69b456

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -0
app.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import pipeline
3
+
4
+ # Load the summarization model pipeline
5
+ summarizer = pipeline("summarization", model="Falconsai/text_summarization")
6
+
7
+ # Define the function to handle dynamic chunking of text
8
+ def summarize_text(text):
9
+ # Calculate dynamic chunk size (for example, we assume a max chunk of 1024 characters)
10
+ max_chunk_size = 1024 # Can be adjusted based on model's token limit (often 1024-2048 tokens)
11
+
12
+ # Split the text into chunks if it's longer than the max chunk size
13
+ text_chunks = []
14
+ if len(text) > max_chunk_size:
15
+ # Calculate chunk size dynamically based on input length
16
+ chunk_size = max_chunk_size if len(text) > max_chunk_size else len(text)
17
+ text_chunks = [text[i:i+chunk_size] for i in range(0, len(text), chunk_size)]
18
+ else:
19
+ # If the text is small enough, use it as one chunk
20
+ text_chunks = [text]
21
+
22
+ # Summarize each chunk
23
+ summaries = [summarizer(chunk)[0]['summary_text'] for chunk in text_chunks]
24
+
25
+ # Combine all summaries into one
26
+ full_summary = " ".join(summaries)
27
+ return full_summary
28
+
29
+ # Set up the Gradio interface
30
+ interface = gr.Interface(fn=summarize_text,
31
+ inputs="text",
32
+ outputs="text",
33
+ title="Text Summarizer",
34
+ description="Enter long text to get a detailed summarized version.")
35
+
36
+ # Launch the Gradio interface
37
+ interface.launch(share=True)