yunusajib's picture
app.py update
60d50c6 verified
raw
history blame
2.47 kB
import os
import cv2
import tempfile
import pandas as pd
import matplotlib.pyplot as plt
from deepface import DeepFace
import gradio as gr
def analyze_video(video_path):
cap = cv2.VideoCapture(video_path)
fps = cap.get(cv2.CAP_PROP_FPS)
frames = []
count = 0
while True:
ret, frame = cap.read()
if not ret:
break
if count % int(fps * 2) == 0: # sample every 2 seconds
frames.append(frame)
count += 1
cap.release()
emotions_summary = []
for i, frame in enumerate(frames):
with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as tmpfile:
cv2.imwrite(tmpfile.name, frame)
try:
result = DeepFace.analyze(
img_path=tmpfile.name,
actions=['emotion'],
enforce_detection=False,
detector_backend='opencv',
prog_backend='pytorch'
)
if isinstance(result, list):
emotions_summary.append(result[0]['emotion'])
else:
emotions_summary.append(result['emotion'])
except Exception as e:
print(f"Frame {i} skipped: {e}")
finally:
os.unlink(tmpfile.name)
df = pd.DataFrame(emotions_summary)
emotion_means = df.mean().sort_values(ascending=False)
# Plot
plt.figure(figsize=(10, 5))
emotion_means.plot(kind='bar', color='skyblue')
plt.title("Average Emotions in Video")
plt.ylabel("Probability")
plt.xticks(rotation=45)
plt.tight_layout()
plt.savefig("emotion_chart.png")
plt.close()
summary = "**Video Analysis Complete**\n"
summary += f"**Frames Analyzed:** {len(frames)}\n"
summary += f"**Duration:** {round(len(frames) * 2.0, 1)} seconds\n\n"
summary += "**Average Emotions:**\n"
for emotion, value in emotion_means.items():
summary += f"• {emotion.capitalize()}: {value:.1f}%\n"
return "emotion_chart.png", summary
demo = gr.Interface(
fn=analyze_video,
inputs=gr.Video(label="Upload a Video"),
outputs=[
gr.Image(label="Emotion Chart"),
gr.Markdown(label="Emotion Summary")
],
title="Emotion Recognition from Video",
description="Upload a short video. The app analyzes emotions every 2 seconds using DeepFace and PyTorch."
)
if __name__ == "__main__":
demo.launch()