import streamlit as st from transformers import pipeline available_models = [ "facebook/t5-small", "facebook/t5-base", "facebook/t5-large", "google/pegasus-xsum", "google/pegasus-cnn_dailymail", "sshleifer/distilbart-cnn-12-6", "allenai/led-base-16384", "google/mt5-small", "google/mt5-base", # Add more models as needed ] @st.cache_resource def load_summarizer(model_name): """Loads the summarization pipeline for a given model from Hugging Face.""" try: summarizer = pipeline("summarization", model=model_name) return summarizer except Exception as e: st.error(f"Error loading model {model_name}: {e}") return None st.title("Hugging Face Text Summarization App") text_to_summarize = st.text_area("Enter text to summarize:", height=300) selected_model = st.selectbox("Choose a summarization model from Hugging Face:", available_models) st.sidebar.header("Summarization Parameters") max_length = st.sidebar.slider("Max Summary Length:", min_value=50, max_value=500, value=150) min_length = st.sidebar.slider("Min Summary Length:", min_value=10, max_value=250, value=30) temperature = st.sidebar.slider("Temperature (for sampling):", min_value=0.0, max_value=1.0, value=0.0, step=0.01, help="Higher values make the output more random.") repetition_penalty = st.sidebar.slider("Repetition Penalty:", min_value=1.0, max_value=2.5, value=1.0, step=0.01, help="Penalizes repeated tokens to improve coherence.") num_beams = st.sidebar.slider("Number of Beams (for beam search):", min_value=1, max_value=10, value=1, help="More beams improve quality but increase computation.") do_sample = st.sidebar.checkbox("Enable Sampling?", value=False, help="Whether to use sampling; set to False for deterministic output.") if st.button("Summarize"): if text_to_summarize: summarizer = load_summarizer(selected_model) if summarizer: with st.spinner(f"Summarizing using {selected_model}..."): try: summary = summarizer( text_to_summarize, max_length=max_length, min_length=min_length, do_sample=do_sample, temperature=temperature if do_sample else None, repetition_penalty=repetition_penalty, num_beams=num_beams if not do_sample else 1, # Beam search is usually not used with sampling early_stopping=True, )[0]['summary_text'] st.subheader("Summary:") st.write(summary) except Exception as e: st.error(f"Error during summarization: {e}") else: st.warning("Failed to load the selected model.") else: st.warning("Please enter some text to summarize.") st.sidebar.header("About") st.sidebar.info( "This app uses the `transformers` library from Hugging Face " "to perform text summarization. You can select from a variety of " "pre-trained models available on the Hugging Face Model Hub. " "Experiment with the parameters in the sidebar to control the " "summarization process." )