tarrasyed19472007 commited on
Commit
986d689
·
verified ·
1 Parent(s): 8988fb4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -5
app.py CHANGED
@@ -1,16 +1,15 @@
1
  import streamlit as st
2
  from transformers import pipeline
3
  import torch
4
- from datasets import load_dataset
5
 
6
- # Ensure proper usage of device (CPU or GPU)
7
  device = 0 if torch.cuda.is_available() else -1 # Use GPU if available, else use CPU
8
 
9
- # Load the T5-based Emotion Classifier model
10
  @st.cache_resource
11
  def load_model():
12
  try:
13
- st.write("Loading the emotion analysis model...")
14
  emotion_analyzer = pipeline("text-classification", model="suryakiran786/T5-emotion", device=device)
15
  st.write("Model loaded successfully!")
16
  return emotion_analyzer
@@ -21,16 +20,25 @@ def load_model():
21
  # Initialize the model (with caching to prevent reloads)
22
  emotion_analyzer = load_model()
23
 
 
 
 
 
 
 
24
  # Function to predict emotion for a single response
25
  def predict_emotion_single(response):
26
  if emotion_analyzer is None:
 
27
  return {"Error": "Emotion analyzer model not initialized. Please check model loading."}
 
28
  try:
29
  response = response.strip()
30
  result = emotion_analyzer([response])
31
  return {res["label"]: round(res["score"], 4) for res in result}
32
  except Exception as e:
33
- return {"Error": str(e)}
 
34
 
35
  # Streamlit App Layout
36
  st.title("Behavior Prediction App")
 
1
  import streamlit as st
2
  from transformers import pipeline
3
  import torch
 
4
 
5
+ # Check for device (GPU or CPU) availability
6
  device = 0 if torch.cuda.is_available() else -1 # Use GPU if available, else use CPU
7
 
8
+ # Function to load the model with enhanced error handling
9
  @st.cache_resource
10
  def load_model():
11
  try:
12
+ st.write("Attempting to load the emotion analysis model...")
13
  emotion_analyzer = pipeline("text-classification", model="suryakiran786/T5-emotion", device=device)
14
  st.write("Model loaded successfully!")
15
  return emotion_analyzer
 
20
  # Initialize the model (with caching to prevent reloads)
21
  emotion_analyzer = load_model()
22
 
23
+ # Check if the model is loaded successfully
24
+ if emotion_analyzer is None:
25
+ st.warning("The emotion analysis model could not be loaded. Please try again.")
26
+ else:
27
+ st.success("Emotion model is ready for predictions!")
28
+
29
  # Function to predict emotion for a single response
30
  def predict_emotion_single(response):
31
  if emotion_analyzer is None:
32
+ st.error("Model not loaded. Please try reloading the app.")
33
  return {"Error": "Emotion analyzer model not initialized. Please check model loading."}
34
+
35
  try:
36
  response = response.strip()
37
  result = emotion_analyzer([response])
38
  return {res["label"]: round(res["score"], 4) for res in result}
39
  except Exception as e:
40
+ st.error(f"Prediction failed: {e}")
41
+ return {"Error": f"Prediction failed: {e}"}
42
 
43
  # Streamlit App Layout
44
  st.title("Behavior Prediction App")