Spaces:
Running
Running
File size: 3,412 Bytes
ad9bf8d f763dd0 af09235 e94ec88 af09235 e94ec88 ad9bf8d af09235 f763dd0 af09235 ad9bf8d b03e8ad af09235 b03e8ad e94ec88 b03e8ad f763dd0 b03e8ad 5628a29 b03e8ad 5628a29 b03e8ad f763dd0 b03e8ad f763dd0 b03e8ad f763dd0 5628a29 b03e8ad f763dd0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 |
import streamlit as st
import pandas as pd
from db import insert_data_if_empty, get_mongo_client
from chatbot import chatbot_response # Import chatbot functionality
#### **1. Ensure Data is Inserted Before Display**
insert_data_if_empty()
#### **2. MongoDB Connection**
collection = get_mongo_client()
#### **3. Streamlit App UI**
st.title("π AI Sentiment Analysis Chatbot")
# Show first 5 rows from MongoDB
#st.subheader("First 5 Rows from Database")
#data = list(collection.find({}, {"_id": 0}).limit(5))
#if data:
# st.write(pd.DataFrame(data))
#else:
# st.warning("β οΈ No data found. Try refreshing the app.")
# Button to show full MongoDB data
#if st.button("Show Complete Data"):
# all_data = list(collection.find({}, {"_id": 0}))
# st.write(pd.DataFrame(all_data))
#### **4. AI Chatbot with Sentiment Analysis**
st.subheader("π€ AI Chatbot with Sentiment Analysis")
# User input for chatbot
user_prompt = st.text_area("Ask AI something or paste text for sentiment analysis:")
if st.button("Analyze Sentiment & Get AI Response"):
ai_response, sentiment_label, confidence = chatbot_response(user_prompt)
if ai_response:
st.write("### AI Response:")
st.write(ai_response)
st.write("### Sentiment Analysis:")
st.write(f"**Sentiment:** {sentiment_label} ({confidence:.2f} confidence)")
else:
st.warning("β οΈ Please enter a question or text for sentiment analysis.")
#chatbot.py
import os
import streamlit as st
import google.generativeai as genai
from transformers import pipeline, AutoModelForSequenceClassification, AutoTokenizer
# π Fetch API key from Hugging Face Secrets
GEMINI_API_KEY = os.getenv("gemini_api")
if GEMINI_API_KEY:
genai.configure(api_key=GEMINI_API_KEY)
else:
st.error("β οΈ Google API key is missing! Set it in Hugging Face Secrets.")
# Correct Model Path
MODEL_NAME = "cardiffnlp/twitter-roberta-base-sentiment"
# Load Sentiment Analysis Model (Ensure the correct model is used)
try:
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
sentiment_pipeline = pipeline("sentiment-analysis", model=MODEL_NAME, tokenizer=tokenizer)
except Exception as e:
st.error(f"β Error loading sentiment model: {e}")
# Function to analyze sentiment
def analyze_sentiment(text):
try:
sentiment_result = sentiment_pipeline(text)[0]
label = sentiment_result['label'] # Extract sentiment label (POSITIVE, NEGATIVE, NEUTRAL)
score = sentiment_result['score'] # Extract confidence score
# Convert labels to readable format
sentiment_mapping = {
"LABEL_0": "Negative",
"LABEL_1": "Neutral",
"LABEL_2": "Positive"
}
return sentiment_mapping.get(label, "Unknown"), score
except Exception as e:
return f"Error analyzing sentiment: {e}", None
# Function to generate AI response & analyze sentiment
def chatbot_response(user_prompt):
if not user_prompt:
return None, None, None
try:
# AI Response from Gemini
model = genai.GenerativeModel("gemini-1.5-pro")
ai_response = model.generate_content(user_prompt)
# Sentiment Analysis
sentiment_label, confidence = analyze_sentiment(user_prompt)
return ai_response.text, sentiment_label, confidence
except Exception as e:
return f"β Error: {e}", None, None
|