Spaces:
Sleeping
Sleeping
Added simple app.py
Browse files
app.py
CHANGED
@@ -1,124 +1,36 @@
|
|
1 |
-
import
|
2 |
-
import time
|
3 |
-
import cv2
|
4 |
import av
|
5 |
-
import
|
|
|
6 |
import streamlit as st
|
7 |
-
from deepface import DeepFace
|
8 |
-
from streamlit_webrtc import webrtc_streamer, WebRtcMode, RTCConfiguration
|
9 |
-
|
10 |
-
# ---------------------------------------------
|
11 |
-
# π Streamlit Page Config
|
12 |
-
# ---------------------------------------------
|
13 |
-
st.set_page_config(page_title="AI Facial Interview Monitor", layout="wide")
|
14 |
-
st.title(":blue[MOCKVIEWER - Face Monitoring System]")
|
15 |
|
16 |
-
|
17 |
-
# π¦ Load Haar Cascade Models
|
18 |
-
# ---------------------------------------------
|
19 |
-
face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + "haarcascade_frontalface_default.xml")
|
20 |
-
eye_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + "haarcascade_eye.xml")
|
21 |
|
22 |
-
|
23 |
-
|
24 |
-
# ---------------------------------------------
|
25 |
-
uploaded_image = st.file_uploader("Upload your reference face image", type=["jpg", "jpeg", "png"])
|
26 |
-
user_ref_img = None
|
27 |
-
if uploaded_image:
|
28 |
-
user_ref_img = cv2.imdecode(np.frombuffer(uploaded_image.read(), np.uint8), cv2.IMREAD_COLOR)
|
29 |
-
st.image(user_ref_img, caption="Reference Image", use_column_width=True)
|
30 |
|
31 |
-
# ---------------------------------------------
|
32 |
-
# β±οΈ Global State Variables
|
33 |
-
# ---------------------------------------------
|
34 |
-
face_detected_time = time.time()
|
35 |
-
last_verified_time = 0
|
36 |
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
# ---------------------------------------------
|
44 |
-
# ποΈ Confidence Heuristic
|
45 |
-
# ---------------------------------------------
|
46 |
-
def is_confident_pose(face_roi):
|
47 |
-
"""Check if eyes are visible and head is upright."""
|
48 |
-
gray = cv2.cvtColor(face_roi, cv2.COLOR_BGR2GRAY)
|
49 |
-
eyes = eye_cascade.detectMultiScale(gray, 1.1, 4)
|
50 |
-
return len(eyes) >= 1
|
51 |
-
|
52 |
-
# ---------------------------------------------
|
53 |
-
# 𧬠Identity Verification
|
54 |
-
# ---------------------------------------------
|
55 |
-
def match_identity(live_face, ref_img):
|
56 |
-
try:
|
57 |
-
result = DeepFace.verify(
|
58 |
-
live_face, ref_img,
|
59 |
-
enforce_detection=False,
|
60 |
-
model_name='Facenet',
|
61 |
-
detector_backend='opencv'
|
62 |
-
)
|
63 |
-
return result["verified"]
|
64 |
-
except Exception as e:
|
65 |
-
print("Verification error:", e)
|
66 |
-
return False
|
67 |
-
|
68 |
-
# ---------------------------------------------
|
69 |
-
# πΉ Streamlit Webcam Video Processor
|
70 |
-
# ---------------------------------------------
|
71 |
class VideoProcessor:
|
72 |
-
|
73 |
-
|
74 |
-
self.last_verified = time.time()
|
75 |
-
self.face_missing = False
|
76 |
-
|
77 |
-
def recv(self, frame):
|
78 |
-
global face_detected_time, last_verified_time, user_ref_img
|
79 |
-
img = frame.to_ndarray(format="bgr24")
|
80 |
-
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
81 |
-
|
82 |
-
faces = face_cascade.detectMultiScale(gray, 1.1, 4)
|
83 |
|
84 |
-
|
85 |
-
if time.time() - face_detected_time > FACE_TIMEOUT:
|
86 |
-
cv2.putText(img, "β Interview Cancelled: Face not visible!", (30, 30),
|
87 |
-
cv2.FONT_HERSHEY_SIMPLEX, 0.8, (0, 0, 255), 2)
|
88 |
-
else:
|
89 |
-
cv2.putText(img, "β οΈ Face not visible. You have 60 seconds.", (30, 30),
|
90 |
-
cv2.FONT_HERSHEY_SIMPLEX, 0.8, (0, 255, 255), 2)
|
91 |
-
else:
|
92 |
-
face_detected_time = time.time()
|
93 |
-
for (x, y, w, h) in faces:
|
94 |
-
face_roi = img[y:y+h, x:x+w]
|
95 |
|
96 |
-
|
97 |
-
|
98 |
-
status = "β
Confident Pose" if confident else "β οΈ Look Straight!"
|
99 |
-
color = (0, 255, 0) if confident else (0, 255, 255)
|
100 |
-
cv2.putText(img, status, (x, y - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.7, color, 2)
|
101 |
-
cv2.rectangle(img, (x, y), (x + w, y + h), color, 2)
|
102 |
|
103 |
-
|
104 |
-
if uploaded_image and (time.time() - last_verified_time > VERIFY_INTERVAL):
|
105 |
-
matched = match_identity(face_roi, user_ref_img)
|
106 |
-
last_verified_time = time.time()
|
107 |
-
if matched:
|
108 |
-
cv2.putText(img, "β
Identity Verified", (x, y + h + 30),
|
109 |
-
cv2.FONT_HERSHEY_SIMPLEX, 0.8, (0, 255, 0), 2)
|
110 |
-
else:
|
111 |
-
cv2.putText(img, "β Identity mismatch!", (x, y + h + 30),
|
112 |
-
cv2.FONT_HERSHEY_SIMPLEX, 0.8, (0, 0, 255), 2)
|
113 |
|
114 |
-
|
|
|
|
|
|
|
|
|
115 |
|
116 |
-
|
117 |
-
# π Activate Webcam Stream
|
118 |
-
# ---------------------------------------------
|
119 |
-
webrtc_streamer(
|
120 |
-
key="monitor",
|
121 |
-
video_processor_factory=VideoProcessor,
|
122 |
-
mode=WebRtcMode.RECVONLY,
|
123 |
-
rtc_configuration=RTCConfiguration(iceServers=[])
|
124 |
-
)
|
|
|
1 |
+
from streamlit_webrtc import webrtc_streamer, RTCConfiguration
|
|
|
|
|
2 |
import av
|
3 |
+
import cv2
|
4 |
+
import time
|
5 |
import streamlit as st
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
|
7 |
+
cascade = cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
|
|
|
|
|
|
|
|
|
8 |
|
9 |
+
st.title(":violet[FACE DETECTION NOTIFIER]")
|
10 |
+
a = st.button(":blue[PUSH NOTIFICATIONS]")
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
|
|
|
|
|
|
|
|
|
|
|
12 |
|
13 |
+
if a:
|
14 |
+
st.toast("PUSH NOTIFICATION ENABLED")
|
15 |
+
time.sleep(5)
|
16 |
+
st.toast("WAKE UP!DON'T SLEEP")
|
17 |
+
|
18 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
class VideoProcessor:
|
20 |
+
def recv(self, frame):
|
21 |
+
frm = frame.to_ndarray(format="bgr24")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
+
faces = cascade.detectMultiScale(cv2.cvtColor(frm, cv2.COLOR_BGR2GRAY), 1.1, 3)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
+
for x,y,w,h in faces:
|
26 |
+
cv2.rectangle(frm, (x,y), (x+w, y+h), (0,255,0), 3)
|
|
|
|
|
|
|
|
|
27 |
|
28 |
+
return av.VideoFrame.from_ndarray(frm, format='bgr24')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
|
30 |
+
webrtc_streamer(key="key", video_processor_factory=VideoProcessor,
|
31 |
+
rtc_configuration=RTCConfiguration(
|
32 |
+
{"iceServers": [{"urls": ["stun:stun.l.google.com:19302"]}]}
|
33 |
+
)
|
34 |
+
)
|
35 |
|
36 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|