Inni-23 commited on
Commit
0be3e0c
Β·
1 Parent(s): 6b2138d

Rename emotion.py to app.py

Browse files
Files changed (1) hide show
  1. emotion.py β†’ app.py +7 -16
emotion.py β†’ app.py RENAMED
@@ -1,4 +1,6 @@
1
  import cv2
 
 
2
  from deepface import DeepFace
3
 
4
  # Load the pre-trained emotion detection model
@@ -10,13 +12,7 @@ emotion_labels = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutr
10
  # Load face cascade classifier
11
  face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')
12
 
13
- # Start capturing video
14
- cap = cv2.VideoCapture(0)
15
-
16
- while True:
17
- # Capture frame-by-frame
18
- ret, frame = cap.read()
19
-
20
  # Convert frame to grayscale
21
  gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
22
 
@@ -45,13 +41,8 @@ while True:
45
  cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 0, 255), 2)
46
  cv2.putText(frame, emotion, (x, y - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0, 0, 255), 2)
47
 
48
- # Display the resulting frame
49
- cv2.imshow('Real-time Emotion Detection', frame)
50
-
51
- # Press 'q' to exit
52
- if cv2.waitKey(1) & 0xFF == ord('q'):
53
- break
54
 
55
- # Release the capture and close all windows
56
- cap.release()
57
- cv2.destroyAllWindows()
 
1
  import cv2
2
+ import gradio as gr
3
+ import numpy as np
4
  from deepface import DeepFace
5
 
6
  # Load the pre-trained emotion detection model
 
12
  # Load face cascade classifier
13
  face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')
14
 
15
+ def predict_emotion(frame):
 
 
 
 
 
 
16
  # Convert frame to grayscale
17
  gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
18
 
 
41
  cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 0, 255), 2)
42
  cv2.putText(frame, emotion, (x, y - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0, 0, 255), 2)
43
 
44
+ return frame
 
 
 
 
 
45
 
46
+ # Gradio UI
47
+ iface = gr.Interface(fn=predict_emotion, inputs="webcam", outputs="image")
48
+ iface.launch()