Spaces:
Build error
Build error
Provided More Model For Selection
Browse files
app.py
CHANGED
|
@@ -1,32 +1,62 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
from transformers import pipeline
|
| 3 |
|
| 4 |
-
#
|
| 5 |
available_models = [
|
| 6 |
"facebook/t5-small",
|
| 7 |
"google/pegasus-xsum",
|
| 8 |
"sshleifer/distilbart-cnn-12-6",
|
|
|
|
|
|
|
|
|
|
| 9 |
]
|
| 10 |
|
| 11 |
@st.cache_resource
|
| 12 |
def load_summarizer(model_name):
|
| 13 |
"""Loads the summarization pipeline for a given model."""
|
| 14 |
-
|
| 15 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
-
st.title("Text Summarization App")
|
| 18 |
|
| 19 |
text_to_summarize = st.text_area("Enter text to summarize:", height=300)
|
| 20 |
|
| 21 |
selected_model = st.selectbox("Choose a summarization model:", available_models)
|
| 22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
if st.button("Summarize"):
|
| 24 |
if text_to_summarize:
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
else:
|
| 31 |
st.warning("Please enter some text to summarize.")
|
| 32 |
|
|
@@ -34,5 +64,6 @@ st.sidebar.header("About")
|
|
| 34 |
st.sidebar.info(
|
| 35 |
"This app uses the `transformers` library from Hugging Face "
|
| 36 |
"to perform text summarization. You can select from various "
|
| 37 |
-
"pre-trained models."
|
|
|
|
| 38 |
)
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
from transformers import pipeline
|
| 3 |
|
| 4 |
+
# Assume you have fine-tuned models and their names are listed here
|
| 5 |
available_models = [
|
| 6 |
"facebook/t5-small",
|
| 7 |
"google/pegasus-xsum",
|
| 8 |
"sshleifer/distilbart-cnn-12-6",
|
| 9 |
+
"your_fine_tuned_news_model", # Replace with your fine-tuned model name
|
| 10 |
+
"your_fine_tuned_long_doc_model", # Replace with another fine-tuned model name
|
| 11 |
+
# Add more of your fine-tuned models here
|
| 12 |
]
|
| 13 |
|
| 14 |
@st.cache_resource
|
| 15 |
def load_summarizer(model_name):
|
| 16 |
"""Loads the summarization pipeline for a given model."""
|
| 17 |
+
try:
|
| 18 |
+
summarizer = pipeline("summarization", model=model_name)
|
| 19 |
+
return summarizer
|
| 20 |
+
except Exception as e:
|
| 21 |
+
st.error(f"Error loading model {model_name}: {e}")
|
| 22 |
+
return None
|
| 23 |
|
| 24 |
+
st.title("Advanced Text Summarization App")
|
| 25 |
|
| 26 |
text_to_summarize = st.text_area("Enter text to summarize:", height=300)
|
| 27 |
|
| 28 |
selected_model = st.selectbox("Choose a summarization model:", available_models)
|
| 29 |
|
| 30 |
+
# Parameters for controlling summarization
|
| 31 |
+
max_length = st.sidebar.slider("Max Summary Length:", min_value=50, max_value=500, value=150)
|
| 32 |
+
min_length = st.sidebar.slider("Min Summary Length:", min_value=10, max_value=250, value=30)
|
| 33 |
+
temperature = st.sidebar.slider("Temperature (for sampling):", min_value=0.0, max_value=1.0, value=0.0, step=0.01, help="Higher values make the output more random.")
|
| 34 |
+
repetition_penalty = st.sidebar.slider("Repetition Penalty:", min_value=1.0, max_value=2.5, value=1.0, step=0.01, help="Penalizes repeated tokens to improve coherence.")
|
| 35 |
+
num_beams = st.sidebar.slider("Number of Beams (for beam search):", min_value=1, max_value=10, value=1, help="More beams improve quality but increase computation.")
|
| 36 |
+
do_sample = st.sidebar.checkbox("Enable Sampling?", value=False, help="Whether to use sampling; set to False for deterministic output.")
|
| 37 |
+
|
| 38 |
if st.button("Summarize"):
|
| 39 |
if text_to_summarize:
|
| 40 |
+
summarizer = load_summarizer(selected_model)
|
| 41 |
+
if summarizer:
|
| 42 |
+
with st.spinner(f"Summarizing using {selected_model}..."):
|
| 43 |
+
try:
|
| 44 |
+
summary = summarizer(
|
| 45 |
+
text_to_summarize,
|
| 46 |
+
max_length=max_length,
|
| 47 |
+
min_length=min_length,
|
| 48 |
+
do_sample=do_sample,
|
| 49 |
+
temperature=temperature if do_sample else None,
|
| 50 |
+
repetition_penalty=repetition_penalty,
|
| 51 |
+
num_beams=num_beams if not do_sample else 1, # Beam search is usually not used with sampling
|
| 52 |
+
early_stopping=True,
|
| 53 |
+
)[0]['summary_text']
|
| 54 |
+
st.subheader("Summary:")
|
| 55 |
+
st.write(summary)
|
| 56 |
+
except Exception as e:
|
| 57 |
+
st.error(f"Error during summarization: {e}")
|
| 58 |
+
else:
|
| 59 |
+
st.warning("Failed to load the selected model.")
|
| 60 |
else:
|
| 61 |
st.warning("Please enter some text to summarize.")
|
| 62 |
|
|
|
|
| 64 |
st.sidebar.info(
|
| 65 |
"This app uses the `transformers` library from Hugging Face "
|
| 66 |
"to perform text summarization. You can select from various "
|
| 67 |
+
"pre-trained and potentially fine-tuned models. Experiment with "
|
| 68 |
+
"the parameters in the sidebar to control the summarization process."
|
| 69 |
)
|