from transformers import AutoModelForSequenceClassification, AutoTokenizer, pipeline # Load model and tokenizer model_path = "./model" # Load from local directory to avoid connection issues model = AutoModelForSequenceClassification.from_pretrained(model_path) tokenizer = AutoTokenizer.from_pretrained(model_path) # Define sentiment analysis pipeline sentiment_analyzer = pipeline("sentiment-analysis", model=model, tokenizer=tokenizer) def chatbot_response(text): """Analyze sentiment using RoBERTa model.""" if not text.strip(): return "Invalid input. Please enter text." result = sentiment_analyzer(text)[0] label = result["label"] score = round(result["score"], 2) return f"{label} (Confidence: {score})"