tarrasyed19472007 commited on
Commit
8988fb4
·
verified ·
1 Parent(s): 34b1221

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -19
app.py CHANGED
@@ -1,16 +1,17 @@
1
- # Import necessary libraries
2
  import streamlit as st
3
  from transformers import pipeline
4
  import torch
5
  from datasets import load_dataset
6
 
 
 
 
7
  # Load the T5-based Emotion Classifier model
8
  @st.cache_resource
9
  def load_model():
10
  try:
11
  st.write("Loading the emotion analysis model...")
12
- # Initialize the emotion classifier using the T5 model fine-tuned for emotion classification
13
- emotion_analyzer = pipeline("text-classification", model="suryakiran786/T5-emotion")
14
  st.write("Model loaded successfully!")
15
  return emotion_analyzer
16
  except Exception as e:
@@ -20,24 +21,9 @@ def load_model():
20
  # Initialize the model (with caching to prevent reloads)
21
  emotion_analyzer = load_model()
22
 
23
- # Load the dataset if needed for any additional logic (not used in emotion analysis directly, just for example)
24
- @st.cache_data
25
- def load_data():
26
- try:
27
- # For demonstration purposes, let's load a sentiment analysis dataset from Hugging Face
28
- dataset = load_dataset("glue", "sst2")
29
- st.write("Dataset loaded successfully!")
30
- return dataset
31
- except Exception as e:
32
- st.write(f"Error loading dataset: {e}")
33
- return None
34
-
35
- # Load data (just to show usage, not used in emotion analysis directly)
36
- dataset = load_data()
37
-
38
  # Function to predict emotion for a single response
39
  def predict_emotion_single(response):
40
- if not emotion_analyzer:
41
  return {"Error": "Emotion analyzer model not initialized. Please check model loading."}
42
  try:
43
  response = response.strip()
 
 
1
  import streamlit as st
2
  from transformers import pipeline
3
  import torch
4
  from datasets import load_dataset
5
 
6
+ # Ensure proper usage of device (CPU or GPU)
7
+ device = 0 if torch.cuda.is_available() else -1 # Use GPU if available, else use CPU
8
+
9
  # Load the T5-based Emotion Classifier model
10
  @st.cache_resource
11
  def load_model():
12
  try:
13
  st.write("Loading the emotion analysis model...")
14
+ emotion_analyzer = pipeline("text-classification", model="suryakiran786/T5-emotion", device=device)
 
15
  st.write("Model loaded successfully!")
16
  return emotion_analyzer
17
  except Exception as e:
 
21
  # Initialize the model (with caching to prevent reloads)
22
  emotion_analyzer = load_model()
23
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  # Function to predict emotion for a single response
25
  def predict_emotion_single(response):
26
+ if emotion_analyzer is None:
27
  return {"Error": "Emotion analyzer model not initialized. Please check model loading."}
28
  try:
29
  response = response.strip()