KrSharangrav commited on
Commit
ad9bf8d
Β·
1 Parent(s): 922ab20

Changes to the csv file and app.py

Browse files
Files changed (3) hide show
  1. app.py +3 -15
  2. backup.py +31 -14
  3. sentiment140.csv +2 -2
app.py CHANGED
@@ -3,7 +3,6 @@ import pandas as pd
3
  import requests
4
  import io
5
  from pymongo import MongoClient
6
- from transformers import pipeline
7
 
8
  #### **1. MongoDB Connection**
9
  def get_mongo_client():
@@ -25,24 +24,13 @@ except Exception as e:
25
  st.error(f"Error loading dataset: {e}")
26
  st.stop()
27
 
28
- #### **3. Sentiment Analysis using BERT-ROBERTA**
29
- st.info("Running Sentiment Analysis...")
30
-
31
- sentiment_pipeline = pipeline("sentiment-analysis", model="cardiffnlp/twitter-roberta-base-sentiment")
32
-
33
- # Function to analyze sentiment
34
- def analyze_sentiment(text):
35
- return sentiment_pipeline(text)[0]['label']
36
-
37
- df["sentiment"] = df["text"].apply(analyze_sentiment)
38
-
39
- #### **4. Upload Data to MongoDB**
40
  collection.delete_many({}) # Optional: Clear existing data before inserting
41
  collection.insert_many(df.to_dict("records"))
42
  st.success("Data Uploaded to MongoDB!")
43
 
44
- #### **5. Build Streamlit Dashboard**
45
- st.title("πŸ“Š Sentiment Analysis Dashboard")
46
 
47
  # Show first 5 rows from MongoDB
48
  st.subheader("First 5 Rows from Database")
 
3
  import requests
4
  import io
5
  from pymongo import MongoClient
 
6
 
7
  #### **1. MongoDB Connection**
8
  def get_mongo_client():
 
24
  st.error(f"Error loading dataset: {e}")
25
  st.stop()
26
 
27
+ #### **3. Upload Data to MongoDB**
 
 
 
 
 
 
 
 
 
 
 
28
  collection.delete_many({}) # Optional: Clear existing data before inserting
29
  collection.insert_many(df.to_dict("records"))
30
  st.success("Data Uploaded to MongoDB!")
31
 
32
+ #### **4. Build Streamlit Dashboard**
33
+ st.title("πŸ“Š MongoDB Data Insertion")
34
 
35
  # Show first 5 rows from MongoDB
36
  st.subheader("First 5 Rows from Database")
backup.py CHANGED
@@ -1,21 +1,33 @@
 
 
 
 
1
  from pymongo import MongoClient
 
2
 
 
3
  def get_mongo_client():
4
- client = MongoClient("mongodb+srv://GMP-21-03:groupa2025@cluster1.u1zed.mongodb.net/?retryWrites=true&w=majority&appName=Cluster1")
5
  db = client["sentiment_db"]
6
  return db["tweets"]
7
 
8
- #### *3. Load and Process Dataset*
9
- import pandas as pd
10
 
11
- # Load dataset
12
- df = pd.read_csv("https://huggingface.co/spaces/sharangrav24/SentimentAnalysis/resolve/main/sentiment140.csv")
13
 
14
- #### *4. Sentiment Analysis using BERT-ROBERTA*
 
 
 
 
 
 
 
15
 
16
- from transformers import pipeline
 
17
 
18
- # Load Hugging Face model
19
  sentiment_pipeline = pipeline("sentiment-analysis", model="cardiffnlp/twitter-roberta-base-sentiment")
20
 
21
  # Function to analyze sentiment
@@ -24,16 +36,21 @@ def analyze_sentiment(text):
24
 
25
  df["sentiment"] = df["text"].apply(analyze_sentiment)
26
 
27
- # Save results to MongoDB
28
- collection = get_mongo_client()
29
  collection.insert_many(df.to_dict("records"))
 
30
 
31
- #### *5. Build Streamlit Dashboard*
32
- import streamlit as st
33
 
34
- st.title("Sentiment Analysis Dashboard")
 
 
 
35
 
36
- if st.button("Show Data"):
 
37
  st.write(df)
38
 
39
  if st.button("Show MongoDB Data"):
 
1
+ import streamlit as st
2
+ import pandas as pd
3
+ import requests
4
+ import io
5
  from pymongo import MongoClient
6
+ from transformers import pipeline
7
 
8
+ #### **1. MongoDB Connection**
9
  def get_mongo_client():
10
+ client = MongoClient("mongodb+srv://groupA:pythongroupA@sentimentcluster.4usfj.mongodb.net/?retryWrites=true&w=majority&appName=SentimentCluster")
11
  db = client["sentiment_db"]
12
  return db["tweets"]
13
 
14
+ collection = get_mongo_client()
 
15
 
16
+ #### **2. Load Dataset from Hugging Face**
17
+ csv_url = "https://huggingface.co/spaces/sharangrav24/SentimentAnalysis/resolve/main/sentiment140.csv"
18
 
19
+ try:
20
+ response = requests.get(csv_url)
21
+ response.raise_for_status() # Ensure the request was successful
22
+ df = pd.read_csv(io.StringIO(response.text), encoding="ISO-8859-1")
23
+ st.success("Dataset Loaded Successfully!")
24
+ except Exception as e:
25
+ st.error(f"Error loading dataset: {e}")
26
+ st.stop()
27
 
28
+ #### **3. Sentiment Analysis using BERT-ROBERTA**
29
+ st.info("Running Sentiment Analysis...")
30
 
 
31
  sentiment_pipeline = pipeline("sentiment-analysis", model="cardiffnlp/twitter-roberta-base-sentiment")
32
 
33
  # Function to analyze sentiment
 
36
 
37
  df["sentiment"] = df["text"].apply(analyze_sentiment)
38
 
39
+ #### **4. Upload Data to MongoDB**
40
+ collection.delete_many({}) # Optional: Clear existing data before inserting
41
  collection.insert_many(df.to_dict("records"))
42
+ st.success("Data Uploaded to MongoDB!")
43
 
44
+ #### **5. Build Streamlit Dashboard**
45
+ st.title("πŸ“Š Sentiment Analysis Dashboard")
46
 
47
+ # Show first 5 rows from MongoDB
48
+ st.subheader("First 5 Rows from Database")
49
+ data = list(collection.find({}, {"_id": 0}).limit(5))
50
+ st.write(pd.DataFrame(data))
51
 
52
+ # Buttons to display more data
53
+ if st.button("Show Complete Data"):
54
  st.write(df)
55
 
56
  if st.button("Show MongoDB Data"):
sentiment140.csv CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:08cba6317a49528fcd074f9043aafcd5ad6c6be45ede159c4e36cec33af24afe
3
- size 238803811
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:865c9a16106df762ab3cff2cdb713bc0d9bc103436365ba7bca3e136d653912a
3
+ size 145627981