pavishnikarthikeyan commited on
Commit
84929e1
·
verified ·
1 Parent(s): b166c3b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -55
app.py CHANGED
@@ -1,73 +1,24 @@
1
  import torch
2
  import gradio as gr
3
  from transformers import pipeline
4
- import logging
5
-
6
- # Set up logging
7
- logging.basicConfig(level=logging.INFO)
8
 
9
  # Use a pipeline as a high-level helper
10
  device = 0 if torch.cuda.is_available() else -1
11
- text_summary = pipeline("summarization", model="facebook/bart-large-cnn", device=device, torch_dtype=torch.bfloat16)
12
-
13
- # Function for summarization with enhancements
14
- def summary(input, summary_type="medium"):
15
- # Check for empty input
16
- if not input.strip():
17
- return "Error: Please provide some text to summarize."
18
-
19
- # Calculate input length
20
- input_length = len(input.split())
21
- logging.info(f"Input length: {input_length} words")
22
-
23
- # Handle input that's too short
24
- if input_length < 10:
25
- return "Error: Input is too short. Please provide at least 10 words."
26
 
27
- # Handle input that's too long for the model
28
- if input_length > 512:
29
- return "Warning: Input exceeds the model's limit of 512 tokens. Please shorten the input text."
30
-
31
- # Adjust max/min lengths based on the summary type
32
- if summary_type == "short":
33
- max_output_tokens = max(10, input_length // 4)
34
- elif summary_type == "medium":
35
- max_output_tokens = max(20, input_length // 2)
36
- elif summary_type == "long":
37
- max_output_tokens = max(30, (3 * input_length) // 4)
38
- min_output_tokens = max(10, input_length // 6)
39
-
40
- # Generate summary
41
- output = text_summary(input, max_length=max_output_tokens, min_length=min_output_tokens, truncation=True)
42
  return output[0]['summary_text']
43
 
44
- # Function to save the output summary to a file
45
- def save_summary(summary_text):
46
- """Save the summarized text to a file."""
47
- with open("summary_output.txt", "w") as file:
48
- file.write(summary_text)
49
- return "Summary saved to 'summary_output.txt'."
50
-
51
- # Gradio interface setup
52
  gr.close_all()
53
 
54
  # Create the Gradio interface
55
  demo = gr.Interface(
56
  fn=summary,
57
- inputs=[
58
- gr.Textbox(label="INPUT THE PASSAGE TO SUMMARIZE", lines=15, placeholder="Paste your text here."),
59
- gr.Dropdown(["short", "medium", "long"], label="SUMMARY LENGTH", value="medium")
60
- ],
61
- outputs=[
62
- gr.Textbox(label="SUMMARIZED TEXT", lines=10, placeholder="Your summarized text will appear here."),
63
- gr.Button("Save Summary", click=save_summary)
64
- ],
65
  title="PAVISHINI @ GenAI Project 1: Text Summarizer",
66
- description=(
67
- "This application summarizes input text. "
68
- "The output length can be short, medium, or long based on your selection."
69
- ),
70
- live=True
71
  )
72
 
73
  demo.launch()
 
1
  import torch
2
  import gradio as gr
3
  from transformers import pipeline
 
 
 
 
4
 
5
  # Use a pipeline as a high-level helper
6
  device = 0 if torch.cuda.is_available() else -1
7
+ text_summary = pipeline("summarization", model="Falconsai/text_summarization",device=device,torch_dtype=torch.bfloat16)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
+ def summary(input):
10
+ output = text_summary(input)
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  return output[0]['summary_text']
12
 
 
 
 
 
 
 
 
 
13
  gr.close_all()
14
 
15
  # Create the Gradio interface
16
  demo = gr.Interface(
17
  fn=summary,
18
+ inputs=[gr.Textbox(label="INPUT THE PASSAGE TO SUMMARIZE", lines=10)],
19
+ outputs=[gr.Textbox(label="SUMMARIZED TEXT", lines=4)],
 
 
 
 
 
 
20
  title="PAVISHINI @ GenAI Project 1: Text Summarizer",
21
+ description="This application is used to summarize the text"
 
 
 
 
22
  )
23
 
24
  demo.launch()