maxinethegreat commited on
Commit
c2f62d3
·
1 Parent(s): 58eea87

test live detection

Browse files
Files changed (1) hide show
  1. app.py +14 -17
app.py CHANGED
@@ -13,23 +13,20 @@ face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_fronta
13
  emotions = ['Angry', 'Disgust', 'Fear', 'Happy', 'Sad', 'Surprise', 'Neutral']
14
 
15
  # Define the predict_emotion function
16
- def predict_emotion(vid):
17
- cap = cv2.VideoCapture(vid)
18
- # while True:
19
- ret, frame = cap.read()
20
- if ret:
21
- gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
22
- faces = face_cascade.detectMultiScale(gray, 1.3, 5)
23
- for (x, y, w, h) in faces:
24
- face = gray[y:y+h, x:x+w]
25
- face = cv2.resize(face, (48, 48))
26
- face = np.expand_dims(face, axis=-1)
27
- face = np.expand_dims(face, axis=0)
28
- prediction = model.predict(face)
29
- emotion = emotions[np.argmax(prediction)]
30
- cv2.putText(frame, emotion, (x, y-10), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2)
31
- cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 0, 0), 2)
32
- return frame
33
 
34
  # Start the video capture and emotion detection
35
  # cap = cv2.VideoCapture(0)
 
13
  emotions = ['Angry', 'Disgust', 'Fear', 'Happy', 'Sad', 'Surprise', 'Neutral']
14
 
15
  # Define the predict_emotion function
16
+ def predict_emotion(frame):
17
+ gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
18
+ faces = face_cascade.detectMultiScale(gray, 1.3, 5)
19
+ for (x, y, w, h) in faces:
20
+ face = gray[y:y+h, x:x+w]
21
+ face = cv2.resize(face, (48, 48))
22
+ face = np.expand_dims(face, axis=-1)
23
+ face = np.expand_dims(face, axis=0)
24
+ prediction = model.predict(face)
25
+ emotion = emotions[np.argmax(prediction)]
26
+ cv2.putText(frame, emotion, (x, y-10), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2)
27
+ cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 0, 0), 2)
28
+
29
+ return frame
 
 
 
30
 
31
  # Start the video capture and emotion detection
32
  # cap = cv2.VideoCapture(0)