Ahmadkhan12 commited on
Commit
aa1d293
·
verified ·
1 Parent(s): 1effd41

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -25
app.py CHANGED
@@ -1,5 +1,4 @@
1
  import streamlit as st
2
- import face_recognition
3
  import numpy as np
4
  import cv2
5
  from PIL import Image
@@ -12,16 +11,16 @@ st.title("Emotion Recognition App")
12
  # Upload an image
13
  uploaded_file = st.file_uploader("Upload an image", type=["jpg", "jpeg", "png"])
14
 
15
- # Define simple emotion mapping based on facial features (for demonstration purposes)
16
- def detect_emotion(face_landmarks):
 
 
 
17
  """
18
- A simple mock-up function for detecting emotions based on landmarks.
19
- Replace with a more sophisticated model as needed.
20
  """
21
- if face_landmarks:
22
- # Example: Assign "Happy" if eyes are close together
23
- return "Happy"
24
- return "Neutral"
25
 
26
  # Resize image to reduce memory usage
27
  def resize_image(image, max_size=(800, 800)):
@@ -44,30 +43,25 @@ if uploaded_file is not None:
44
  # Convert image to numpy array
45
  image_np = np.array(image)
46
 
47
- # Convert image to RGB (ensure compatibility with face_recognition)
48
- if len(image_np.shape) == 3 and image_np.shape[2] == 4: # RGBA to RGB
49
- image_np = cv2.cvtColor(image_np, cv2.COLOR_RGBA2RGB)
50
- elif len(image_np.shape) == 3 and image_np.shape[2] == 3: # BGR to RGB
51
- image_np = cv2.cvtColor(image_np, cv2.COLOR_BGR2RGB)
52
 
53
- # Detect faces and landmarks
54
- face_locations = face_recognition.face_locations(image_np)
55
- face_landmarks_list = face_recognition.face_landmarks(image_np)
56
 
57
- if face_locations:
58
- for face_location, face_landmarks in zip(face_locations, face_landmarks_list):
59
- # Draw a rectangle around the face
60
- top, right, bottom, left = face_location
61
- cv2.rectangle(image_np, (left, top), (right, bottom), (0, 255, 0), 2)
62
 
63
- # Detect emotion based on landmarks
64
- emotion = detect_emotion(face_landmarks)
65
 
66
  # Display emotion above the face
67
  cv2.putText(
68
  image_np,
69
  emotion,
70
- (left, top - 10),
71
  cv2.FONT_HERSHEY_SIMPLEX,
72
  0.9,
73
  (255, 0, 0),
 
1
  import streamlit as st
 
2
  import numpy as np
3
  import cv2
4
  from PIL import Image
 
11
  # Upload an image
12
  uploaded_file = st.file_uploader("Upload an image", type=["jpg", "jpeg", "png"])
13
 
14
+ # Load OpenCV's pre-trained face detection model
15
+ face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + "haarcascade_frontalface_default.xml")
16
+
17
+ # Define a simple emotion detection function
18
+ def detect_emotion(face):
19
  """
20
+ Mock function to assign a random emotion.
21
+ Replace with an actual emotion detection model.
22
  """
23
+ return "Happy" # Replace with your logic
 
 
 
24
 
25
  # Resize image to reduce memory usage
26
  def resize_image(image, max_size=(800, 800)):
 
43
  # Convert image to numpy array
44
  image_np = np.array(image)
45
 
46
+ # Convert image to grayscale for face detection
47
+ gray_image = cv2.cvtColor(image_np, cv2.COLOR_RGB2GRAY)
 
 
 
48
 
49
+ # Detect faces
50
+ faces = face_cascade.detectMultiScale(gray_image, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30))
 
51
 
52
+ if len(faces) > 0:
53
+ for (x, y, w, h) in faces:
54
+ # Draw rectangle around the face
55
+ cv2.rectangle(image_np, (x, y), (x+w, y+h), (0, 255, 0), 2)
 
56
 
57
+ # Assign a dummy emotion
58
+ emotion = detect_emotion(None)
59
 
60
  # Display emotion above the face
61
  cv2.putText(
62
  image_np,
63
  emotion,
64
+ (x, y - 10),
65
  cv2.FONT_HERSHEY_SIMPLEX,
66
  0.9,
67
  (255, 0, 0),