import gradio as gr from collections import Counter import re import textstat from transformers import pipeline from langdetect import detect # Load a summarization model summarizer = pipeline("summarization") def text_analysis(text): # Analyze text: word count, character count, language detection, and readability words = re.findall(r'\w+', text.lower()) sentences = re.split(r'[.!?]+', text) num_sentences = len(sentences) - 1 num_words = len(words) num_chars = len("".join(words)) reading_ease = textstat.flesch_reading_ease(text) language = detect(text) # Format the results return { "Language": language, "Sentences": num_sentences, "Words": num_words, "Characters": num_chars, "Readability (Flesch Reading Ease)": reading_ease } def text_summarization(text): # Summarize text using the transformer model summary = summarizer(text, max_length=130, min_length=30, do_sample=False)[0]['summary_text'] return summary # Define interfaces for each function text_analysis_interface = gr.Interface(fn=text_analysis, inputs=gr.Textbox(lines=4, placeholder="Type something here..."), outputs=gr.JSON(label="Text Analysis")) text_summarization_interface = gr.Interface(fn=text_summarization, inputs=gr.Textbox(lines=4, placeholder="Type something here..."), outputs="text") # Combine interfaces using gr.Parallel iface = gr.Parallel(text_analysis_interface, text_summarization_interface) # Launch the app if __name__ == "__main__": iface.launch()