Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -8,21 +8,24 @@ import numpy as np
|
|
8 |
from datetime import datetime
|
9 |
from collections import Counter
|
10 |
from services.video_service import get_next_video_frame, reset_video_index
|
11 |
-
from services.
|
12 |
from services.overlay_service import overlay_boxes
|
13 |
from services.metrics_service import update_metrics
|
|
|
|
|
14 |
|
15 |
# Globals
|
16 |
paused = False
|
17 |
frame_rate = 1
|
18 |
frame_count = 0
|
19 |
log_entries = []
|
20 |
-
|
21 |
-
|
22 |
last_frame = None
|
23 |
last_metrics = {}
|
24 |
last_timestamp = ""
|
25 |
last_detected_images = []
|
|
|
26 |
|
27 |
# Constants
|
28 |
TEMP_IMAGE_PATH = "temp.jpg"
|
@@ -31,23 +34,25 @@ os.makedirs(CAPTURED_FRAMES_DIR, exist_ok=True)
|
|
31 |
|
32 |
# Core monitor function
|
33 |
def monitor_feed():
|
34 |
-
global paused, frame_count, last_frame, last_metrics, last_timestamp
|
35 |
|
36 |
if paused and last_frame is not None:
|
37 |
frame = last_frame.copy()
|
38 |
metrics = last_metrics.copy()
|
39 |
else:
|
40 |
frame = get_next_video_frame()
|
41 |
-
detected_boxes =
|
42 |
frame = overlay_boxes(frame, detected_boxes)
|
43 |
cv2.imwrite(TEMP_IMAGE_PATH, frame, [int(cv2.IMWRITE_JPEG_QUALITY), 95])
|
44 |
metrics = update_metrics(detected_boxes)
|
45 |
|
46 |
frame_count += 1
|
47 |
last_timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
|
48 |
|
49 |
if detected_boxes:
|
50 |
-
captured_frame_path = os.path.join(CAPTURED_FRAMES_DIR, f"
|
51 |
cv2.imwrite(captured_frame_path, frame)
|
52 |
last_detected_images.append(captured_frame_path)
|
53 |
if len(last_detected_images) > 5:
|
@@ -56,37 +61,37 @@ def monitor_feed():
|
|
56 |
last_frame = frame.copy()
|
57 |
last_metrics = metrics.copy()
|
58 |
|
59 |
-
# Update
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
])
|
66 |
|
67 |
-
|
68 |
-
|
69 |
-
anomaly_counts.append(anomaly_detected)
|
70 |
|
71 |
if len(log_entries) > 100:
|
72 |
log_entries.pop(0)
|
73 |
-
if len(
|
74 |
-
|
75 |
-
if len(
|
76 |
-
|
77 |
|
78 |
frame = cv2.resize(last_frame, (640, 480))
|
79 |
cv2.putText(frame, f"Frame: {frame_count}", (10, 25), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
|
80 |
cv2.putText(frame, f"{last_timestamp}", (10, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
|
81 |
|
82 |
-
|
83 |
|
84 |
-
|
85 |
|
|
|
86 |
def generate_line_chart():
|
87 |
fig, ax = plt.subplots(figsize=(4, 2))
|
88 |
-
ax.plot(
|
89 |
-
ax.set_title("
|
90 |
ax.set_xlabel("Frame")
|
91 |
ax.set_ylabel("Count")
|
92 |
fig.tight_layout()
|
@@ -95,12 +100,12 @@ def generate_line_chart():
|
|
95 |
plt.close(fig)
|
96 |
return chart_path
|
97 |
|
98 |
-
# Pie chart for
|
99 |
def generate_pie_chart():
|
100 |
-
if not
|
101 |
return None
|
102 |
fig, ax = plt.subplots(figsize=(4, 2))
|
103 |
-
count = Counter(
|
104 |
labels, sizes = zip(*count.items())
|
105 |
ax.pie(sizes, labels=labels, autopct='%1.1f%%', startangle=140)
|
106 |
ax.axis('equal')
|
@@ -112,23 +117,24 @@ def generate_pie_chart():
|
|
112 |
|
113 |
# Gradio UI
|
114 |
with gr.Blocks(theme=gr.themes.Soft()) as app:
|
115 |
-
gr.Markdown("# 🛡️
|
116 |
|
117 |
status_text = gr.Markdown("**Status:** 🟢 Running")
|
118 |
|
119 |
with gr.Row():
|
120 |
with gr.Column(scale=3):
|
121 |
-
video_output = gr.Image(label="Live
|
122 |
with gr.Column(scale=1):
|
123 |
-
metrics_output = gr.Textbox(label="
|
124 |
|
125 |
with gr.Row():
|
126 |
logs_output = gr.Textbox(label="Live Logs", lines=8)
|
127 |
-
chart_output = gr.Image(label="
|
128 |
-
pie_output = gr.Image(label="
|
129 |
|
130 |
with gr.Row():
|
131 |
-
|
|
|
132 |
|
133 |
with gr.Row():
|
134 |
pause_btn = gr.Button("⏸️ Pause")
|
@@ -155,12 +161,11 @@ with gr.Blocks(theme=gr.themes.Soft()) as app:
|
|
155 |
|
156 |
def streaming_loop():
|
157 |
while True:
|
158 |
-
frame, metrics, logs, chart, pie, captured = monitor_feed()
|
159 |
-
|
160 |
-
yield frame, str(metrics), logs, chart, pie, captured
|
161 |
time.sleep(frame_rate)
|
162 |
|
163 |
-
app.load(streaming_loop, outputs=[video_output, metrics_output, logs_output, chart_output, pie_output, captured_images])
|
164 |
|
165 |
if __name__ == "__main__":
|
166 |
-
app.launch(share=True)
|
|
|
8 |
from datetime import datetime
|
9 |
from collections import Counter
|
10 |
from services.video_service import get_next_video_frame, reset_video_index
|
11 |
+
from services.crack_detection_service import detect_cracks
|
12 |
from services.overlay_service import overlay_boxes
|
13 |
from services.metrics_service import update_metrics
|
14 |
+
from services.map_service import generate_map
|
15 |
+
from services.utils import simulate_gps_coordinates
|
16 |
|
17 |
# Globals
|
18 |
paused = False
|
19 |
frame_rate = 1
|
20 |
frame_count = 0
|
21 |
log_entries = []
|
22 |
+
crack_counts = []
|
23 |
+
crack_severity_all = []
|
24 |
last_frame = None
|
25 |
last_metrics = {}
|
26 |
last_timestamp = ""
|
27 |
last_detected_images = []
|
28 |
+
gps_coordinates = []
|
29 |
|
30 |
# Constants
|
31 |
TEMP_IMAGE_PATH = "temp.jpg"
|
|
|
34 |
|
35 |
# Core monitor function
|
36 |
def monitor_feed():
|
37 |
+
global paused, frame_count, last_frame, last_metrics, last_timestamp, gps_coordinates
|
38 |
|
39 |
if paused and last_frame is not None:
|
40 |
frame = last_frame.copy()
|
41 |
metrics = last_metrics.copy()
|
42 |
else:
|
43 |
frame = get_next_video_frame()
|
44 |
+
detected_boxes = detect_cracks(frame)
|
45 |
frame = overlay_boxes(frame, detected_boxes)
|
46 |
cv2.imwrite(TEMP_IMAGE_PATH, frame, [int(cv2.IMWRITE_JPEG_QUALITY), 95])
|
47 |
metrics = update_metrics(detected_boxes)
|
48 |
|
49 |
frame_count += 1
|
50 |
last_timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
51 |
+
gps_coord = simulate_gps_coordinates(frame_count) # Simulate GPS
|
52 |
+
gps_coordinates.append(gps_coord)
|
53 |
|
54 |
if detected_boxes:
|
55 |
+
captured_frame_path = os.path.join(CAPTURED_FRAMES_DIR, f"crack_{frame_count}.jpg")
|
56 |
cv2.imwrite(captured_frame_path, frame)
|
57 |
last_detected_images.append(captured_frame_path)
|
58 |
if len(last_detected_images) > 5:
|
|
|
61 |
last_frame = frame.copy()
|
62 |
last_metrics = metrics.copy()
|
63 |
|
64 |
+
# Update logs and stats
|
65 |
+
crack_detected = len(last_metrics.get('cracks', []))
|
66 |
+
crack_severity_all.extend([
|
67 |
+
a['severity']
|
68 |
+
for a in last_metrics.get('cracks', [])
|
69 |
+
if isinstance(a, dict) and 'severity' in a
|
70 |
+
])
|
71 |
|
72 |
+
log_entries.append(f"{last_timestamp} - Frame {frame_count} - Cracks: {crack_detected} - GPS: {gps_coord}")
|
73 |
+
crack_counts.append(crack_detected)
|
|
|
74 |
|
75 |
if len(log_entries) > 100:
|
76 |
log_entries.pop(0)
|
77 |
+
if len(crack_counts) > 500:
|
78 |
+
crack_counts.pop(0)
|
79 |
+
if len(crack_severity_all) > 500:
|
80 |
+
crack_severity_all.pop(0)
|
81 |
|
82 |
frame = cv2.resize(last_frame, (640, 480))
|
83 |
cv2.putText(frame, f"Frame: {frame_count}", (10, 25), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
|
84 |
cv2.putText(frame, f"{last_timestamp}", (10, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
|
85 |
|
86 |
+
map_path = generate_map(gps_coordinates[-5:], last_metrics.get('cracks', []))
|
87 |
|
88 |
+
return frame[:, :, ::-1], last_metrics, "\n".join(log_entries[-10:]), generate_line_chart(), generate_pie_chart(), last_detected_images, map_path
|
89 |
|
90 |
+
# Line chart
|
91 |
def generate_line_chart():
|
92 |
fig, ax = plt.subplots(figsize=(4, 2))
|
93 |
+
ax.plot(crack_counts[-50:], marker='o')
|
94 |
+
ax.set_title("Cracks Over Time")
|
95 |
ax.set_xlabel("Frame")
|
96 |
ax.set_ylabel("Count")
|
97 |
fig.tight_layout()
|
|
|
100 |
plt.close(fig)
|
101 |
return chart_path
|
102 |
|
103 |
+
# Pie chart for crack severity
|
104 |
def generate_pie_chart():
|
105 |
+
if not crack_severity_all:
|
106 |
return None
|
107 |
fig, ax = plt.subplots(figsize=(4, 2))
|
108 |
+
count = Counter(crack_severity_all[-200:])
|
109 |
labels, sizes = zip(*count.items())
|
110 |
ax.pie(sizes, labels=labels, autopct='%1.1f%%', startangle=140)
|
111 |
ax.axis('equal')
|
|
|
117 |
|
118 |
# Gradio UI
|
119 |
with gr.Blocks(theme=gr.themes.Soft()) as app:
|
120 |
+
gr.Markdown("# 🛡️ Drone Road Inspection Dashboard")
|
121 |
|
122 |
status_text = gr.Markdown("**Status:** 🟢 Running")
|
123 |
|
124 |
with gr.Row():
|
125 |
with gr.Column(scale=3):
|
126 |
+
video_output = gr.Image(label="Live Drone Feed", width=640, height=480)
|
127 |
with gr.Column(scale=1):
|
128 |
+
metrics_output = gr.Textbox(label="Crack Metrics", lines=4)
|
129 |
|
130 |
with gr.Row():
|
131 |
logs_output = gr.Textbox(label="Live Logs", lines=8)
|
132 |
+
chart_output = gr.Image(label="Crack Trend")
|
133 |
+
pie_output = gr.Image(label="Crack Severity")
|
134 |
|
135 |
with gr.Row():
|
136 |
+
map_output = gr.Image(label="Crack Locations Map")
|
137 |
+
captured_images = gr.Gallery(label="Detected Cracks (Last 5)")
|
138 |
|
139 |
with gr.Row():
|
140 |
pause_btn = gr.Button("⏸️ Pause")
|
|
|
161 |
|
162 |
def streaming_loop():
|
163 |
while True:
|
164 |
+
frame, metrics, logs, chart, pie, captured, map_path = monitor_feed()
|
165 |
+
yield frame, str(metrics), logs, chart, pie, captured, map_path
|
|
|
166 |
time.sleep(frame_rate)
|
167 |
|
168 |
+
app.load(streaming_loop, outputs=[video_output, metrics_output, logs_output, chart_output, pie_output, captured_images, map_output])
|
169 |
|
170 |
if __name__ == "__main__":
|
171 |
+
app.launch(share=True)
|