Spaces:
Runtime error
Runtime error
File size: 1,889 Bytes
dade934 e3ed1f8 c2f62d3 e3ed1f8 9e5bff1 7a63bb5 2032d4e a514407 2032d4e e3ed1f8 8c1b2ed e3ed1f8 d647e93 32961aa 7cc8ae2 7a63bb5 63705fb f3340c3 8c1b2ed 32961aa 34b577e dade934 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 |
import gradio as gr
import tensorflow as tf
import cv2
import numpy as np
# Load the saved model
model = tf.keras.models.load_model('model/cnn_9_layer_model.h5')
# Define the face cascade and emotions
face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')
emotions = ['Angry', 'Disgust', 'Fear', 'Happy', 'Sad', 'Surprise', 'Neutral']
# Define the predict_emotion function
def predict_emotion(frame):
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
for (x, y, w, h) in faces:
face = gray[y:y+h, x:x+w]
face = cv2.resize(face, (48, 48))
face = np.expand_dims(face, axis=-1)
face = np.expand_dims(face, axis=0)
prediction = model.predict(face)
emotion = emotions[np.argmax(prediction)]
cv2.putText(frame, emotion, (x, y-10), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2)
cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 0, 0), 2)
return frame
# Start the video capture and emotion detection
# cap = cv2.VideoCapture(0)
# while True:
# ret, frame = cap.read()
# if ret:
# frame = predict_emotion(frame)
# cv2.imshow('Live Facial Emotion Detection', frame)
# if cv2.waitKey(1) == ord('q'):
# break
# cap.release()
# cv2.destroyAllWindows()
input_image = gr.Image(source = "webcam", streaming = True, label="Your Face")
# video = gr.inputs.Video(source = "webcam" )
output_image = gr.Image( type="numpy", label="Detected Emotion" )
iface = gr.Interface(
fn = predict_emotion,
inputs=input_image,
outputs=output_image,
batch = True,
# interpretation = "default",
title = "Mood Detectives",
description = "Real-Time Emotion Detection Using Facial Expressions:\nCan our model detect if you are angry, happy, sad, fearful, disgusted, surprised or neutral?",
live = True
)
iface.queue(concurrency_count=1000)
iface.launch() |