Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
| import torch | |
| import torch.nn.functional as F | |
| # Load model and tokenizer | |
| model_name = "cardiffnlp/twitter-roberta-base-sentiment" | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| model = AutoModelForSequenceClassification.from_pretrained(model_name) | |
| # Label map for 3-level sentiment | |
| labels = ['Negative', 'Neutral', 'Positive'] | |
| def advanced_sentiment_analysis(text): | |
| # Tokenize input | |
| inputs = tokenizer(text, return_tensors="pt", truncation=True) | |
| # Get model logits | |
| with torch.no_grad(): | |
| logits = model(**inputs).logits | |
| # Convert logits to probabilities | |
| probs = F.softmax(logits, dim=1)[0] | |
| # Format result | |
| results = "" | |
| for i, prob in enumerate(probs): | |
| results += f"{labels[i]}: {prob.item() * 100:.2f}%\n" | |
| return results.strip() | |
| # Gradio UI | |
| with gr.Blocks() as demo: | |
| gr.Markdown("### Welcome, please enter a sample of what you may respond or tell a customer, let's tell you how cool it is") | |
| text_input = gr.Textbox(lines=4, placeholder="Type your message here...", label="Customer Message") | |
| output = gr.Textbox(label="Sentiment Analysis Result") | |
| analyze_button = gr.Button("Analyze Sentiment") | |
| analyze_button.click(fn=advanced_sentiment_analysis, inputs=text_input, outputs=output) | |
| demo.launch() |