Spaces:
Sleeping
Sleeping
File size: 1,721 Bytes
afc7996 58ef530 8595152 afc7996 ab753b8 afc7996 ab753b8 8595152 ab753b8 afc7996 801be53 afc7996 401d95e 801be53 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
import gradio as gr
from collections import Counter
import re
import textstat
from transformers import pipeline
from langdetect import detect
# Load a summarization model
summarizer = pipeline("summarization")
def text_analysis(text):
# Analyze text: word count, character count, language detection, and readability
words = re.findall(r'\w+', text.lower())
sentences = re.split(r'[.!?]+', text)
num_sentences = len(sentences) - 1
num_words = len(words)
num_chars = len("".join(words))
reading_ease = textstat.flesch_reading_ease(text)
language = detect(text)
# Format the results
return {
"Language": language,
"Sentences": num_sentences,
"Words": num_words,
"Characters": num_chars,
"Readability (Flesch Reading Ease)": reading_ease
}
def text_summarization(text):
# Summarize text using the transformer model
summary = summarizer(text, max_length=130, min_length=30, do_sample=False)[0]['summary_text']
return summary
# Define interfaces for each function
text_analysis_interface = gr.Interface(fn=text_analysis,
inputs=gr.Textbox(lines=4, placeholder="Type something here..."),
outputs=gr.JSON(label="Text Analysis"))
text_summarization_interface = gr.Interface(fn=text_summarization,
inputs=gr.Textbox(lines=4, placeholder="Type something here..."),
outputs="text")
# Combine interfaces using gr.Parallel
iface = gr.Parallel(text_analysis_interface, text_summarization_interface)
# Launch the app
if __name__ == "__main__":
iface.launch()
|