Marcel0123's picture
Update app.py
34f5ac3 verified
raw
history blame
7.92 kB
import cv2 as cv
import numpy as np
import gradio as gr
from pathlib import Path
from collections import Counter, defaultdict
from huggingface_hub import hf_hub_download
from facial_fer_model import FacialExpressionRecog
from yunet import YuNet
# Download ONNX-modellen
FD_MODEL_PATH = hf_hub_download(repo_id="opencv/face_detection_yunet", filename="face_detection_yunet_2023mar.onnx")
FER_MODEL_PATH = hf_hub_download(repo_id="opencv/facial_expression_recognition", filename="facial_expression_recognition_mobilefacenet_2022july.onnx")
backend_id = cv.dnn.DNN_BACKEND_OPENCV
target_id = cv.dnn.DNN_TARGET_CPU
fer_model = FacialExpressionRecog(modelPath=FER_MODEL_PATH, backendId=backend_id, targetId=target_id)
detect_model = YuNet(modelPath=FD_MODEL_PATH)
# EN -> NL mapping
EN_TO_NL = {
"neutral": "Neutraal",
"happy": "Blij",
"sad": "Verdrietig",
"surprise": "Verrast",
"angry": "Boos",
"anger": "Boos",
"disgust": "Walging",
"fear": "Bang",
"contempt": "Minachting",
"unknown": "Onbekend",
}
def to_dutch(label: str) -> str:
if not label:
return "Onbekend"
key = label.strip().lower()
return EN_TO_NL.get(key, label)
# In-memory statistieken
emotion_stats = defaultdict(int)
def visualize(image, det_res, fer_res):
output = image.copy()
landmark_color = [(255, 0, 0), (0, 0, 255), (0, 255, 0), (255, 0, 255), (0, 255, 255)]
for det, fer_type in zip(det_res, fer_res):
bbox = det[0:4].astype(np.int32)
fer_type_str_nl = to_dutch(FacialExpressionRecog.getDesc(fer_type))
cv.rectangle(output, (bbox[0], bbox[1]), (bbox[0]+bbox[2], bbox[1]+bbox[3]), (0, 255, 0), 2)
cv.putText(output, fer_type_str_nl, (bbox[0], max(0, bbox[1] - 10)),
cv.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2, cv.LINE_AA)
landmarks = det[4:14].astype(np.int32).reshape((5, 2))
for idx, landmark in enumerate(landmarks):
cv.circle(output, landmark, 2, landmark_color[idx], 2)
return output
def summarize_emotions(fer_res):
if not fer_res:
return "## **Geen gezicht gedetecteerd**"
names_nl = [to_dutch(FacialExpressionRecog.getDesc(x)) for x in fer_res]
counts = Counter(names_nl).most_common()
top = counts[0][0]
details = ", ".join([f"{name} ({n})" for name, n in counts])
return f"# **{top}**\n\n_Gedetecteerde emoties: {details}_"
# --- Staafdiagram tekenen met OpenCV (geen matplotlib nodig) ---
def draw_bar_chart_cv(stats: dict, width=640, height=320):
img = np.full((height, width, 3), 255, dtype=np.uint8)
cv.putText(img, "Live emotie-statistieken", (12, 28), cv.FONT_HERSHEY_SIMPLEX, 0.8, (0, 0, 0), 2, cv.LINE_AA)
if not stats:
cv.putText(img, "Nog geen statistieken", (12, height//2), cv.FONT_HERSHEY_SIMPLEX, 0.9, (128, 128, 128), 2, cv.LINE_AA)
return cv.cvtColor(img, cv.COLOR_BGR2RGB)
left, right, top, bottom = 60, 20, 50, 40
plot_w = width - left - right
plot_h = height - top - bottom
origin = (left, height - bottom)
cv.line(img, origin, (left + plot_w, height - bottom), (0, 0, 0), 2)
cv.line(img, origin, (left, height - bottom - plot_h), (0, 0, 0), 2)
labels = list(stats.keys())
values = [stats[k] for k in labels]
max_val = max(values) if max(values) > 0 else 1
n = len(labels)
gap = 12
bar_w = max(10, int((plot_w - gap * (n + 1)) / max(1, n)))
for i, (lab, val) in enumerate(zip(labels, values)):
x1 = left + gap + i * (bar_w + gap)
x2 = x1 + bar_w
h_px = int((val / max_val) * (plot_h - 10))
y1 = height - bottom - h_px
y2 = height - bottom - 1
cv.rectangle(img, (x1, y1), (x2, y2), (0, 170, 60), -1)
cv.putText(img, str(val), (x1 + 2, y1 - 6), cv.FONT_HERSHEY_SIMPLEX, 0.5, (0, 90, 30), 1, cv.LINE_AA)
show_lab = lab if len(lab) <= 10 else lab[:9] + "…"
(tw, th), _ = cv.getTextSize(show_lab, cv.FONT_HERSHEY_SIMPLEX, 0.5, 1)
tx = x1 + (bar_w - tw) // 2
ty = height - bottom + th + 12
cv.putText(img, show_lab, (tx, ty), cv.FONT_HERSHEY_SIMPLEX, 0.5, (40, 40, 40), 1, cv.LINE_AA)
return cv.cvtColor(img, cv.COLOR_BGR2RGB)
def process_image(input_image):
"""Helper: run detectie en retourneer (output_img, fer_res as list of ints)."""
image = cv.cvtColor(input_image, cv.COLOR_RGB2BGR)
h, w, _ = image.shape
detect_model.setInputSize([w, h])
dets = detect_model.infer(image)
if dets is None:
return cv.cvtColor(image, cv.COLOR_BGR2RGB), []
fer_res = [fer_model.infer(image, face_points[:-1])[0] for face_points in dets]
output = visualize(image, dets, fer_res)
return cv.cvtColor(output, cv.COLOR_BGR2RGB), fer_res
def detect_expression(input_image):
"""Versie die WÉL statistieken bijwerkt (gebruik voor 'Verstuur')."""
output_img, fer_res = process_image(input_image)
emotion_md = summarize_emotions(fer_res)
# update stats alleen hier:
names_nl = [to_dutch(FacialExpressionRecog.getDesc(x)) for x in fer_res]
for name in names_nl:
emotion_stats[name] += 1
stats_plot = draw_bar_chart_cv(emotion_stats)
return output_img, emotion_md, stats_plot
def detect_expression_no_stats(input_image):
"""Versie die GEEN statistieken bijwerkt (gebruik voor gr.Examples & caching)."""
output_img, fer_res = process_image(input_image)
emotion_md = summarize_emotions(fer_res)
# géén update van emotion_stats
stats_plot = draw_bar_chart_cv(emotion_stats) # toon huidige stand (kan leeg zijn)
return output_img, emotion_md, stats_plot
# Voorbeelden automatisch laden
IMAGE_EXTS = {".jpg", ".jpeg", ".png", ".bmp", ".webp"}
EXAMPLES_DIR = Path("examples")
if EXAMPLES_DIR.exists() and EXAMPLES_DIR.is_dir():
example_paths = [str(p) for p in sorted(EXAMPLES_DIR.iterdir()) if Path(p).suffix.lower() in IMAGE_EXTS]
else:
example_paths = []
example_list = [[p] for p in example_paths]
CACHE_EXAMPLES = bool(example_list)
# CSS (groene emotietekst)
custom_css = """
#emotie-uitslag { color: #16a34a; }
#emotie-uitslag h1, #emotie-uitslag h2, #emotie-uitslag h3 { margin: 0.25rem 0; }
"""
with gr.Blocks(css=custom_css) as demo:
gr.Markdown("## Herkenning van gezichtsuitdrukkingen ")
gr.Markdown("Detecteert gezichten en herkent gezichtsuitdrukkingen ")
with gr.Row():
with gr.Column():
input_image = gr.Image(type="numpy", label="Afbeelding uploaden")
with gr.Row():
submit_btn = gr.Button("Verstuur", variant="primary")
clear_btn = gr.Button("Wissen")
with gr.Column():
output_image = gr.Image(type="numpy", label="Resultaat gezichtsuitdrukking")
emotion_md = gr.Markdown("## **Nog geen resultaat**", elem_id="emotie-uitslag")
stats_image = gr.Image(label="Statistieken", type="numpy")
# Clear-helpers
def clear_all_on_new():
return None, "## **Nog geen resultaat**", draw_bar_chart_cv(emotion_stats)
def clear_all_button():
return None, None, "## **Nog geen resultaat**", draw_bar_chart_cv(emotion_stats)
input_image.change(fn=clear_all_on_new, outputs=[output_image, emotion_md, stats_image])
submit_btn.click(fn=detect_expression, inputs=input_image, outputs=[output_image, emotion_md, stats_image])
clear_btn.click(fn=clear_all_button, outputs=[input_image, output_image, emotion_md, stats_image])
gr.Markdown("Klik op een voorbeeld om te testen.")
# BELANGRIJK: gebruik de 'no_stats'-functie voor Examples zodat deze NIET meetellen
gr.Examples(
examples=example_list,
inputs=input_image,
outputs=[output_image, emotion_md, stats_image],
fn=detect_expression_no_stats, # <- telt NIET mee
examples_per_page=20,
cache_examples=CACHE_EXAMPLES
)
if __name__ == "__main__":
demo.launch()