File size: 2,471 Bytes
421b67d
 
d4c1346
421b67d
 
60d50c6
0262090
421b67d
60d50c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
421b67d
24c8903
60d50c6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import os
import cv2
import tempfile
import pandas as pd
import matplotlib.pyplot as plt
from deepface import DeepFace
import gradio as gr

def analyze_video(video_path):
    cap = cv2.VideoCapture(video_path)
    fps = cap.get(cv2.CAP_PROP_FPS)
    frames = []
    count = 0

    while True:
        ret, frame = cap.read()
        if not ret:
            break
        if count % int(fps * 2) == 0:  # sample every 2 seconds
            frames.append(frame)
        count += 1
    cap.release()

    emotions_summary = []
    for i, frame in enumerate(frames):
        with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as tmpfile:
            cv2.imwrite(tmpfile.name, frame)
            try:
                result = DeepFace.analyze(
                    img_path=tmpfile.name,
                    actions=['emotion'],
                    enforce_detection=False,
                    detector_backend='opencv',
                    prog_backend='pytorch'
                )
                if isinstance(result, list):
                    emotions_summary.append(result[0]['emotion'])
                else:
                    emotions_summary.append(result['emotion'])
            except Exception as e:
                print(f"Frame {i} skipped: {e}")
            finally:
                os.unlink(tmpfile.name)

    df = pd.DataFrame(emotions_summary)
    emotion_means = df.mean().sort_values(ascending=False)

    # Plot
    plt.figure(figsize=(10, 5))
    emotion_means.plot(kind='bar', color='skyblue')
    plt.title("Average Emotions in Video")
    plt.ylabel("Probability")
    plt.xticks(rotation=45)
    plt.tight_layout()
    plt.savefig("emotion_chart.png")
    plt.close()

    summary = "**Video Analysis Complete**\n"
    summary += f"**Frames Analyzed:** {len(frames)}\n"
    summary += f"**Duration:** {round(len(frames) * 2.0, 1)} seconds\n\n"
    summary += "**Average Emotions:**\n"
    for emotion, value in emotion_means.items():
        summary += f"• {emotion.capitalize()}: {value:.1f}%\n"

    return "emotion_chart.png", summary

demo = gr.Interface(
    fn=analyze_video,
    inputs=gr.Video(label="Upload a Video"),
    outputs=[
        gr.Image(label="Emotion Chart"),
        gr.Markdown(label="Emotion Summary")
    ],
    title="Emotion Recognition from Video",
    description="Upload a short video. The app analyzes emotions every 2 seconds using DeepFace and PyTorch."
)

if __name__ == "__main__":
    demo.launch()