waqasali1707 commited on
Commit
cdf15fe
·
verified ·
1 Parent(s): 769beea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -39
app.py CHANGED
@@ -1,41 +1,34 @@
1
  import streamlit as st
 
2
  import torch
3
- from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, GenerationConfig
4
-
5
- # Your Hugging Face token
6
- hf_token = "HUGGINGFACE_TOKEN"
7
-
8
- # Update this to your model's Hugging Face path
9
- model_name = "waqasali1707/Bart-Base-Summarization"
10
-
11
- try:
12
- tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=hf_token)
13
- model = AutoModelForSeq2SeqLM.from_pretrained(model_name, use_auth_token=hf_token)
14
- except Exception as e:
15
- st.error(f"Failed to load model: {e}")
16
-
17
- # Streamlit UI
18
- st.title("Text Summarizer")
19
- text = st.text_area("Enter the text to generate its Summary:")
20
-
21
- # Configuration for generation
22
- generation_config = GenerationConfig(max_new_tokens=100, do_sample=True, temperature=0.7)
23
-
24
- if text:
25
- try:
26
- # Encode input
27
- inputs_encoded = tokenizer(text, return_tensors='pt')
28
-
29
- # Generate output
30
- with torch.no_grad():
31
- model_output = model.generate(inputs_encoded["input_ids"], generation_config=generation_config)[0]
32
-
33
- # Decode output
34
- output = tokenizer.decode(model_output, skip_special_tokens=True)
35
-
36
- # Display results in a box with a title
37
- with st.expander("Output", expanded=True):
38
- st.write(output)
39
-
40
- except Exception as e:
41
- st.error(f"An error occurred during summarization: {e}")
 
1
  import streamlit as st
2
+ import requests
3
  import torch
4
+ from transformers import pipeline
5
+ from transformers import BartTokenizer, BartForConditionalGeneration
6
+
7
+ # Replace with your Hugging Face model repository path
8
+ model_repo_path = 'waqasali1707/Bart-Base-Summarization'
9
+
10
+ # Load the model and tokenizer
11
+ model = BartForConditionalGeneration.from_pretrained(model_repo_path)
12
+ tokenizer = BartTokenizer.from_pretrained(model_repo_path)
13
+
14
+ # Initialize the summarization pipeline
15
+ summarizer = pipeline('summarization', model=model,tokenizer=tokenizer)
16
+
17
+ # Streamlit app layout
18
+ st.title("Text Summarization App")
19
+
20
+ # User input
21
+ text_input = st.text_area("Enter text to summarize", height=300)
22
+
23
+ # Summarize the text
24
+ if st.button("Summarize"):
25
+ if text_input:
26
+ with st.spinner("Generating summary..."):
27
+ try:
28
+ summary = summarizer(text_input, max_length=150, min_length=30, do_sample=False)
29
+ st.subheader("Summary")
30
+ st.write(summary[0]['summary_text'])
31
+ except Exception as e:
32
+ st.error(f"Error during summarization: {e}")
33
+ else:
34
+ st.warning("Please enter some text to summarize.")