yunusajib commited on
Commit
938258b
·
verified ·
1 Parent(s): 239e083
Files changed (1) hide show
  1. app.py +46 -55
app.py CHANGED
@@ -1,79 +1,70 @@
1
- import os
2
  import cv2
3
- import tempfile
4
  import pandas as pd
5
  import matplotlib.pyplot as plt
6
  from deepface import DeepFace
7
- import gradio as gr
8
 
9
- def analyze_video(video_path):
10
  cap = cv2.VideoCapture(video_path)
11
- fps = cap.get(cv2.CAP_PROP_FPS)
12
- frames = []
13
- count = 0
 
 
14
 
15
  while True:
16
  ret, frame = cap.read()
17
  if not ret:
18
  break
19
- if count % int(fps * 2) == 0: # sample every 2 seconds
20
- frames.append(frame)
21
- count += 1
22
- cap.release()
23
-
24
- emotions_summary = []
25
- for i, frame in enumerate(frames):
26
- with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as tmpfile:
27
- cv2.imwrite(tmpfile.name, frame)
28
  try:
29
- result = DeepFace.analyze(
30
- img_path=tmpfile.name,
31
- actions=['emotion'],
32
- enforce_detection=False,
33
- detector_backend='opencv',
34
- prog_backend='pytorch'
35
  )
36
- if isinstance(result, list):
37
- emotions_summary.append(result[0]['emotion'])
38
- else:
39
- emotions_summary.append(result['emotion'])
40
  except Exception as e:
41
- print(f"Frame {i} skipped: {e}")
42
- finally:
43
- os.unlink(tmpfile.name)
44
 
45
- df = pd.DataFrame(emotions_summary)
46
- emotion_means = df.mean().sort_values(ascending=False)
 
 
 
 
 
47
 
48
  # Plot
49
- plt.figure(figsize=(10, 5))
50
- emotion_means.plot(kind='bar', color='skyblue')
51
- plt.title("Average Emotions in Video")
52
- plt.ylabel("Probability")
53
- plt.xticks(rotation=45)
54
  plt.tight_layout()
55
- plt.savefig("emotion_chart.png")
56
- plt.close()
57
 
58
- summary = "**Video Analysis Complete**\n"
59
- summary += f"**Frames Analyzed:** {len(frames)}\n"
60
- summary += f"**Duration:** {round(len(frames) * 2.0, 1)} seconds\n\n"
61
- summary += "**Average Emotions:**\n"
62
- for emotion, value in emotion_means.items():
63
- summary += f"• {emotion.capitalize()}: {value:.1f}%\n"
 
 
 
 
 
 
64
 
65
- return "emotion_chart.png", summary
66
 
67
- demo = gr.Interface(
68
- fn=analyze_video,
69
  inputs=gr.Video(label="Upload a Video"),
70
- outputs=[
71
- gr.Image(label="Emotion Chart"),
72
- gr.Markdown(label="Emotion Summary")
73
- ],
74
- title="Emotion Recognition from Video",
75
- description="Upload a short video. The app analyzes emotions every 2 seconds using DeepFace and PyTorch."
76
  )
77
 
78
- if __name__ == "__main__":
79
- demo.launch()
 
1
+ import gradio as gr
2
  import cv2
 
3
  import pandas as pd
4
  import matplotlib.pyplot as plt
5
  from deepface import DeepFace
6
+ import tempfile
7
 
8
+ def analyze_emotions(video_path):
9
  cap = cv2.VideoCapture(video_path)
10
+ frame_rate = cap.get(cv2.CAP_PROP_FPS)
11
+ frame_interval = int(frame_rate * 2) # Analyze every 2 seconds
12
+
13
+ emotion_data = []
14
+ frame_count = 0
15
 
16
  while True:
17
  ret, frame = cap.read()
18
  if not ret:
19
  break
20
+ if frame_count % frame_interval == 0:
 
 
 
 
 
 
 
 
21
  try:
22
+ analysis = DeepFace.analyze(
23
+ frame,
24
+ actions=["emotion"],
25
+ detector_backend="opencv", # TensorFlow-free
26
+ enforce_detection=False
 
27
  )
28
+ emotion_data.append(analysis[0]["emotion"])
 
 
 
29
  except Exception as e:
30
+ print("Error analyzing frame:", e)
31
+ frame_count += 1
 
32
 
33
+ cap.release()
34
+
35
+ if not emotion_data:
36
+ return "No faces detected."
37
+
38
+ df = pd.DataFrame(emotion_data)
39
+ avg_emotions = df.mean().sort_values(ascending=False)
40
 
41
  # Plot
42
+ fig, ax = plt.subplots(figsize=(6, 3))
43
+ avg_emotions.plot(kind='bar', ax=ax, color='skyblue')
44
+ ax.set_title("Average Emotions Across Video")
45
+ ax.set_ylabel("Confidence (%)")
46
+ ax.set_ylim(0, 100)
47
  plt.tight_layout()
 
 
48
 
49
+ with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmpfile:
50
+ plt.savefig(tmpfile.name)
51
+ plot_path = tmpfile.name
52
+
53
+ summary = f"""
54
+ **Video Analysis Complete**
55
+ **Frames Analyzed:** {len(df)}
56
+ **Duration:** {round(frame_count / frame_rate, 1)} seconds
57
+
58
+ **Average Emotions:**\n""" + "\n".join([f"• {emotion}: {round(score, 1)}%" for emotion, score in avg_emotions.items()])
59
+
60
+ return summary, plot_path
61
 
 
62
 
63
+ iface = gr.Interface(
64
+ fn=analyze_emotions,
65
  inputs=gr.Video(label="Upload a Video"),
66
+ outputs=[gr.Markdown(), gr.Image(type="filepath", label="Emotion Summary Chart")],
67
+ title="Emotion Analysis from Video (No TensorFlow)"
 
 
 
 
68
  )
69
 
70
+ iface.launch()