Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -58,16 +58,17 @@ paused: bool = False
|
|
58 |
frame_rate: float = 0.3
|
59 |
frame_count: int = 0
|
60 |
log_entries: List[str] = []
|
61 |
-
|
62 |
-
crack_severity_all: List[str] = []
|
63 |
last_frame: Optional[np.ndarray] = None
|
64 |
last_metrics: Dict[str, Any] = {}
|
65 |
last_timestamp: str = ""
|
66 |
-
|
67 |
-
|
68 |
gps_coordinates: List[List[float]] = []
|
69 |
-
|
70 |
active_service: Optional[str] = None
|
|
|
|
|
71 |
|
72 |
# Constants
|
73 |
DEFAULT_VIDEO_PATH = "sample.mp4"
|
@@ -77,28 +78,47 @@ OUTPUT_DIR = "outputs"
|
|
77 |
os.makedirs(CAPTURED_FRAMES_DIR, exist_ok=True)
|
78 |
os.makedirs(OUTPUT_DIR, exist_ok=True)
|
79 |
|
80 |
-
def
|
81 |
-
global
|
82 |
release_video()
|
83 |
-
|
|
|
84 |
|
85 |
-
if not os.path.exists(
|
86 |
-
status = f"Error:
|
87 |
log_entries.append(status)
|
88 |
logging.error(status)
|
89 |
-
|
90 |
return status
|
91 |
|
92 |
try:
|
93 |
-
|
94 |
-
|
95 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
96 |
log_entries.append(status)
|
97 |
logging.info(status)
|
98 |
return status
|
99 |
except Exception as e:
|
100 |
-
|
101 |
-
status = f"Error loading
|
102 |
log_entries.append(status)
|
103 |
logging.error(status)
|
104 |
return status
|
@@ -137,11 +157,11 @@ def set_active_service(
|
|
137 |
return None, "No Service Category Enabled"
|
138 |
|
139 |
def generate_line_chart() -> Optional[str]:
|
140 |
-
if not
|
141 |
return None
|
142 |
fig, ax = plt.subplots(figsize=(4, 2))
|
143 |
-
ax.plot(
|
144 |
-
ax.set_title("
|
145 |
ax.set_xlabel("Frame")
|
146 |
ax.set_ylabel("Count")
|
147 |
ax.grid(True)
|
@@ -161,17 +181,18 @@ def monitor_feed() -> Tuple[
|
|
161 |
Optional[str]
|
162 |
]:
|
163 |
global paused, frame_count, last_frame, last_metrics, last_timestamp
|
164 |
-
global gps_coordinates,
|
|
|
165 |
|
166 |
-
if not
|
167 |
-
log_entries.append("Cannot start
|
168 |
-
logging.error("
|
169 |
return (
|
170 |
None,
|
171 |
-
json.dumps({"error": "
|
172 |
"\n".join(log_entries[-10:]),
|
173 |
-
|
174 |
-
|
175 |
None,
|
176 |
None
|
177 |
)
|
@@ -181,9 +202,12 @@ def monitor_feed() -> Tuple[
|
|
181 |
metrics = last_metrics.copy()
|
182 |
else:
|
183 |
try:
|
184 |
-
|
185 |
-
|
186 |
-
|
|
|
|
|
|
|
187 |
except RuntimeError as e:
|
188 |
log_entries.append(f"Error: {str(e)}")
|
189 |
logging.error(f"Frame retrieval error: {str(e)}")
|
@@ -191,8 +215,8 @@ def monitor_feed() -> Tuple[
|
|
191 |
None,
|
192 |
json.dumps(last_metrics, indent=2),
|
193 |
"\n".join(log_entries[-10:]),
|
194 |
-
|
195 |
-
|
196 |
None,
|
197 |
None
|
198 |
)
|
@@ -240,8 +264,28 @@ def monitor_feed() -> Tuple[
|
|
240 |
all_detected_items.extend(thermal_dets)
|
241 |
thermal_flag = bool(thermal_dets)
|
242 |
|
243 |
-
# Overlay detections
|
244 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
245 |
|
246 |
# Save temporary image
|
247 |
cv2.imwrite(TEMP_IMAGE_PATH, frame, [int(cv2.IMWRITE_JPEG_QUALITY), 95])
|
@@ -265,14 +309,15 @@ def monitor_feed() -> Tuple[
|
|
265 |
captured_frame_path = os.path.join(CAPTURED_FRAMES_DIR, f"detected_{frame_count}.jpg")
|
266 |
cv2.imwrite(captured_frame_path, frame)
|
267 |
for item in all_detected_items:
|
268 |
-
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
|
275 |
-
|
|
|
276 |
except Exception as e:
|
277 |
log_entries.append(f"Error saving captured frame: {str(e)}")
|
278 |
logging.error(f"Error saving captured frame: {str(e)}")
|
@@ -309,29 +354,25 @@ def monitor_feed() -> Tuple[
|
|
309 |
last_frame = frame.copy()
|
310 |
last_metrics = metrics
|
311 |
|
312 |
-
# Track
|
|
|
313 |
crack_detected = len([item for item in all_detected_items if item.get("type") == "crack"])
|
314 |
hole_detected = len([item for item in all_detected_items if item.get("type") == "hole"])
|
315 |
-
|
316 |
-
|
317 |
-
item["severity"]
|
318 |
-
for item in all_detected_items
|
319 |
-
if item.get("type") in ["crack", "hole"] and "severity" in item
|
320 |
-
])
|
321 |
-
crack_counts.append(crack_detected + hole_detected)
|
322 |
|
323 |
# Log frame processing details
|
324 |
-
log_message = f"{last_timestamp} - Frame {frame_count} -
|
|
|
|
|
325 |
log_entries.append(log_message)
|
326 |
logging.info(log_message)
|
327 |
|
328 |
-
# Limit the size of logs and
|
329 |
if len(log_entries) > 100:
|
330 |
log_entries.pop(0)
|
331 |
-
if len(
|
332 |
-
|
333 |
-
if len(crack_severity_all) > 500:
|
334 |
-
crack_severity_all.pop(0)
|
335 |
|
336 |
# Resize frame and add metadata for display
|
337 |
frame = cv2.resize(last_frame, (640, 480))
|
@@ -339,14 +380,15 @@ def monitor_feed() -> Tuple[
|
|
339 |
cv2.putText(frame, f"{last_timestamp}", (10, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
|
340 |
|
341 |
# Generate map
|
342 |
-
|
|
|
343 |
|
344 |
return (
|
345 |
frame[:, :, ::-1], # Convert BGR to RGB for Gradio
|
346 |
json.dumps(last_metrics, indent=2),
|
347 |
"\n".join(log_entries[-10:]),
|
348 |
-
|
349 |
-
|
350 |
generate_line_chart(),
|
351 |
map_path
|
352 |
)
|
@@ -356,18 +398,18 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue", secondary_hue="green"))
|
|
356 |
gr.Markdown(
|
357 |
"""
|
358 |
# 🛡️ NHAI Drone Road Inspection Dashboard
|
359 |
-
Monitor highway conditions in real-time using drone footage. Select a service category to analyze specific aspects of the road.
|
360 |
"""
|
361 |
)
|
362 |
|
363 |
with gr.Row():
|
364 |
with gr.Column(scale=3):
|
365 |
-
|
366 |
-
load_button = gr.Button("Load
|
367 |
with gr.Column(scale=1):
|
368 |
-
|
369 |
-
label="
|
370 |
-
value="Please upload a video file or ensure 'sample.mp4' exists in the root directory.",
|
371 |
interactive=False
|
372 |
)
|
373 |
|
@@ -385,29 +427,29 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue", secondary_hue="green"))
|
|
385 |
pl_toggle = gr.Checkbox(label="Enable Plantation Services", value=False)
|
386 |
pl_status = gr.Textbox(label="Plantation Status", value="Disabled", interactive=False)
|
387 |
|
388 |
-
status_text = gr.Markdown("**Status:** 🟢 Ready (Upload a
|
389 |
|
390 |
with gr.Row():
|
391 |
with gr.Column(scale=3):
|
392 |
-
|
393 |
with gr.Column(scale=1):
|
394 |
metrics_output = gr.Textbox(
|
395 |
label="Detection Metrics",
|
396 |
lines=10,
|
397 |
interactive=False,
|
398 |
-
placeholder="Detection metrics,
|
399 |
)
|
400 |
|
401 |
with gr.Row():
|
402 |
with gr.Column(scale=2):
|
403 |
logs_output = gr.Textbox(label="Live Logs", lines=8, interactive=False)
|
404 |
with gr.Column(scale=1):
|
405 |
-
|
406 |
-
|
407 |
|
408 |
with gr.Row():
|
409 |
-
chart_output = gr.Image(label="
|
410 |
-
map_output = gr.Image(label="
|
411 |
|
412 |
with gr.Row():
|
413 |
pause_btn = gr.Button("⏸️ Pause", variant="secondary")
|
@@ -443,12 +485,12 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue", secondary_hue="green"))
|
|
443 |
global frame_rate
|
444 |
frame_rate = val
|
445 |
|
446 |
-
|
447 |
|
448 |
load_button.click(
|
449 |
-
|
450 |
-
inputs=[
|
451 |
-
outputs=[
|
452 |
)
|
453 |
|
454 |
def update_toggles(uc_val: bool, om_val: bool, rs_val: bool, pl_val: bool) -> Tuple[str, str, str, str, str]:
|
@@ -474,17 +516,20 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue", secondary_hue="green"))
|
|
474 |
|
475 |
def streaming_loop():
|
476 |
while True:
|
477 |
-
if not
|
478 |
-
yield None, json.dumps({"error": "
|
479 |
else:
|
480 |
-
frame, metrics, logs,
|
481 |
if frame is None:
|
482 |
-
yield None, metrics, logs,
|
483 |
else:
|
484 |
-
yield frame, metrics, logs,
|
|
|
|
|
|
|
485 |
time.sleep(frame_rate)
|
486 |
|
487 |
-
app.load(streaming_loop, outputs=[
|
488 |
|
489 |
if __name__ == "__main__":
|
490 |
app.launch(share=False)
|
|
|
58 |
frame_rate: float = 0.3
|
59 |
frame_count: int = 0
|
60 |
log_entries: List[str] = []
|
61 |
+
detected_counts: List[int] = []
|
|
|
62 |
last_frame: Optional[np.ndarray] = None
|
63 |
last_metrics: Dict[str, Any] = {}
|
64 |
last_timestamp: str = ""
|
65 |
+
detected_plants: List[str] = [] # For plants and missing patches
|
66 |
+
detected_issues: List[str] = [] # For cracks, holes, and missing patches
|
67 |
gps_coordinates: List[List[float]] = []
|
68 |
+
media_loaded: bool = False
|
69 |
active_service: Optional[str] = None
|
70 |
+
is_video: bool = True
|
71 |
+
static_image: Optional[np.ndarray] = None
|
72 |
|
73 |
# Constants
|
74 |
DEFAULT_VIDEO_PATH = "sample.mp4"
|
|
|
78 |
os.makedirs(CAPTURED_FRAMES_DIR, exist_ok=True)
|
79 |
os.makedirs(OUTPUT_DIR, exist_ok=True)
|
80 |
|
81 |
+
def initialize_media(media_file: Optional[Any] = None) -> str:
|
82 |
+
global media_loaded, is_video, static_image, log_entries
|
83 |
release_video()
|
84 |
+
static_image = None
|
85 |
+
media_path = DEFAULT_VIDEO_PATH if media_file is None else media_file.name
|
86 |
|
87 |
+
if not os.path.exists(media_path):
|
88 |
+
status = f"Error: Media file '{media_path}' not found."
|
89 |
log_entries.append(status)
|
90 |
logging.error(status)
|
91 |
+
media_loaded = False
|
92 |
return status
|
93 |
|
94 |
try:
|
95 |
+
# Determine if the file is a video or image
|
96 |
+
if media_path.lower().endswith((".mp4", ".avi")):
|
97 |
+
is_video = True
|
98 |
+
preload_video(media_path)
|
99 |
+
media_loaded = True
|
100 |
+
status = f"Successfully loaded video: {media_path}"
|
101 |
+
elif media_path.lower().endswith((".jpg", ".jpeg", ".png")):
|
102 |
+
is_video = False
|
103 |
+
static_image = cv2.imread(media_path)
|
104 |
+
if static_image is None:
|
105 |
+
raise RuntimeError(f"Failed to load image: {media_path}")
|
106 |
+
static_image = cv2.resize(static_image, (320, 240))
|
107 |
+
media_loaded = True
|
108 |
+
status = f"Successfully loaded image: {media_path}"
|
109 |
+
else:
|
110 |
+
media_loaded = False
|
111 |
+
status = "Error: Unsupported file format. Use .mp4, .avi, .jpg, .jpeg, or .png."
|
112 |
+
log_entries.append(status)
|
113 |
+
logging.error(status)
|
114 |
+
return status
|
115 |
+
|
116 |
log_entries.append(status)
|
117 |
logging.info(status)
|
118 |
return status
|
119 |
except Exception as e:
|
120 |
+
media_loaded = False
|
121 |
+
status = f"Error loading media: {str(e)}"
|
122 |
log_entries.append(status)
|
123 |
logging.error(status)
|
124 |
return status
|
|
|
157 |
return None, "No Service Category Enabled"
|
158 |
|
159 |
def generate_line_chart() -> Optional[str]:
|
160 |
+
if not detected_counts:
|
161 |
return None
|
162 |
fig, ax = plt.subplots(figsize=(4, 2))
|
163 |
+
ax.plot(detected_counts[-50:], marker='o', color='#4682B4')
|
164 |
+
ax.set_title("Detections Over Time")
|
165 |
ax.set_xlabel("Frame")
|
166 |
ax.set_ylabel("Count")
|
167 |
ax.grid(True)
|
|
|
181 |
Optional[str]
|
182 |
]:
|
183 |
global paused, frame_count, last_frame, last_metrics, last_timestamp
|
184 |
+
global gps_coordinates, detected_plants, detected_issues, media_loaded
|
185 |
+
global is_video, static_image
|
186 |
|
187 |
+
if not media_loaded:
|
188 |
+
log_entries.append("Cannot start processing: Media not loaded successfully.")
|
189 |
+
logging.error("Media not loaded successfully.")
|
190 |
return (
|
191 |
None,
|
192 |
+
json.dumps({"error": "Media not loaded. Please upload a video or image file."}, indent=2),
|
193 |
"\n".join(log_entries[-10:]),
|
194 |
+
detected_plants,
|
195 |
+
detected_issues,
|
196 |
None,
|
197 |
None
|
198 |
)
|
|
|
202 |
metrics = last_metrics.copy()
|
203 |
else:
|
204 |
try:
|
205 |
+
if is_video:
|
206 |
+
frame = get_next_video_frame()
|
207 |
+
if frame is None:
|
208 |
+
raise RuntimeError("Failed to retrieve frame from video.")
|
209 |
+
else:
|
210 |
+
frame = static_image.copy()
|
211 |
except RuntimeError as e:
|
212 |
log_entries.append(f"Error: {str(e)}")
|
213 |
logging.error(f"Frame retrieval error: {str(e)}")
|
|
|
215 |
None,
|
216 |
json.dumps(last_metrics, indent=2),
|
217 |
"\n".join(log_entries[-10:]),
|
218 |
+
detected_plants,
|
219 |
+
detected_issues,
|
220 |
None,
|
221 |
None
|
222 |
)
|
|
|
264 |
all_detected_items.extend(thermal_dets)
|
265 |
thermal_flag = bool(thermal_dets)
|
266 |
|
267 |
+
# Overlay detections with distinct colors
|
268 |
+
for item in all_detected_items:
|
269 |
+
box = item.get("box", [])
|
270 |
+
if not box:
|
271 |
+
continue
|
272 |
+
x_min, y_min, x_max, y_max = box
|
273 |
+
label = item.get("label", "")
|
274 |
+
dtype = item.get("type", "")
|
275 |
+
# Assign colors based on detection type
|
276 |
+
if dtype == "plant":
|
277 |
+
color = (0, 255, 0) # Green for plants
|
278 |
+
elif dtype == "crack":
|
279 |
+
color = (255, 0, 0) # Red for cracks
|
280 |
+
elif dtype == "hole":
|
281 |
+
color = (0, 0, 255) # Blue for holes
|
282 |
+
elif dtype == "missing_patch":
|
283 |
+
color = (255, 165, 0) # Orange for missing patches
|
284 |
+
else:
|
285 |
+
color = (255, 255, 0) # Yellow for others
|
286 |
+
cv2.rectangle(frame, (x_min, y_min), (x_max, y_max), color, 2)
|
287 |
+
cv2.putText(frame, label, (x_min, y_min - 10),
|
288 |
+
cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2)
|
289 |
|
290 |
# Save temporary image
|
291 |
cv2.imwrite(TEMP_IMAGE_PATH, frame, [int(cv2.IMWRITE_JPEG_QUALITY), 95])
|
|
|
309 |
captured_frame_path = os.path.join(CAPTURED_FRAMES_DIR, f"detected_{frame_count}.jpg")
|
310 |
cv2.imwrite(captured_frame_path, frame)
|
311 |
for item in all_detected_items:
|
312 |
+
dtype = item.get("type", "")
|
313 |
+
if dtype == "plant":
|
314 |
+
detected_plants.append(captured_frame_path)
|
315 |
+
if len(detected_plants) > 100:
|
316 |
+
detected_plants.pop(0)
|
317 |
+
elif dtype in ["crack", "hole", "missing_patch"]:
|
318 |
+
detected_issues.append(captured_frame_path)
|
319 |
+
if len(detected_issues) > 100:
|
320 |
+
detected_issues.pop(0)
|
321 |
except Exception as e:
|
322 |
log_entries.append(f"Error saving captured frame: {str(e)}")
|
323 |
logging.error(f"Error saving captured frame: {str(e)}")
|
|
|
354 |
last_frame = frame.copy()
|
355 |
last_metrics = metrics
|
356 |
|
357 |
+
# Track detections for metrics
|
358 |
+
plant_detected = len([item for item in all_detected_items if item.get("type") == "plant"])
|
359 |
crack_detected = len([item for item in all_detected_items if item.get("type") == "crack"])
|
360 |
hole_detected = len([item for item in all_detected_items if item.get("type") == "hole"])
|
361 |
+
missing_detected = len([item for item in all_detected_items if item.get("type") == "missing_patch"])
|
362 |
+
detected_counts.append(plant_detected + crack_detected + hole_detected + missing_detected)
|
|
|
|
|
|
|
|
|
|
|
363 |
|
364 |
# Log frame processing details
|
365 |
+
log_message = (f"{last_timestamp} - Frame {frame_count} - "
|
366 |
+
f"Plants: {plant_detected} - Cracks: {crack_detected} - "
|
367 |
+
f"Holes: {hole_detected} - Missing Patches: {missing_detected} - GPS: {gps_coord}")
|
368 |
log_entries.append(log_message)
|
369 |
logging.info(log_message)
|
370 |
|
371 |
+
# Limit the size of logs and detection data
|
372 |
if len(log_entries) > 100:
|
373 |
log_entries.pop(0)
|
374 |
+
if len(detected_counts) > 500:
|
375 |
+
detected_counts.pop(0)
|
|
|
|
|
376 |
|
377 |
# Resize frame and add metadata for display
|
378 |
frame = cv2.resize(last_frame, (640, 480))
|
|
|
380 |
cv2.putText(frame, f"{last_timestamp}", (10, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
|
381 |
|
382 |
# Generate map
|
383 |
+
map_items = [item for item in last_metrics.get("items", []) if item.get("type") in ["crack", "hole", "missing_patch"]]
|
384 |
+
map_path = generate_map(gps_coordinates[-5:], map_items)
|
385 |
|
386 |
return (
|
387 |
frame[:, :, ::-1], # Convert BGR to RGB for Gradio
|
388 |
json.dumps(last_metrics, indent=2),
|
389 |
"\n".join(log_entries[-10:]),
|
390 |
+
detected_plants,
|
391 |
+
detected_issues,
|
392 |
generate_line_chart(),
|
393 |
map_path
|
394 |
)
|
|
|
398 |
gr.Markdown(
|
399 |
"""
|
400 |
# 🛡️ NHAI Drone Road Inspection Dashboard
|
401 |
+
Monitor highway conditions in real-time using drone footage or static images. Select a service category to analyze specific aspects of the road or plantation.
|
402 |
"""
|
403 |
)
|
404 |
|
405 |
with gr.Row():
|
406 |
with gr.Column(scale=3):
|
407 |
+
media_input = gr.File(label="Upload Media File (e.g., sample.mp4, image.jpg)", file_types=[".mp4", ".avi", ".jpg", ".jpeg", ".png"])
|
408 |
+
load_button = gr.Button("Load Media", variant="primary")
|
409 |
with gr.Column(scale=1):
|
410 |
+
media_status = gr.Textbox(
|
411 |
+
label="Media Load Status",
|
412 |
+
value="Please upload a video/image file or ensure 'sample.mp4' exists in the root directory.",
|
413 |
interactive=False
|
414 |
)
|
415 |
|
|
|
427 |
pl_toggle = gr.Checkbox(label="Enable Plantation Services", value=False)
|
428 |
pl_status = gr.Textbox(label="Plantation Status", value="Disabled", interactive=False)
|
429 |
|
430 |
+
status_text = gr.Markdown("**Status:** 🟢 Ready (Upload a media file to start)")
|
431 |
|
432 |
with gr.Row():
|
433 |
with gr.Column(scale=3):
|
434 |
+
media_output = gr.Image(label="Live Feed", width=640, height=480, elem_id="live-feed")
|
435 |
with gr.Column(scale=1):
|
436 |
metrics_output = gr.Textbox(
|
437 |
label="Detection Metrics",
|
438 |
lines=10,
|
439 |
interactive=False,
|
440 |
+
placeholder="Detection metrics, counts will appear here."
|
441 |
)
|
442 |
|
443 |
with gr.Row():
|
444 |
with gr.Column(scale=2):
|
445 |
logs_output = gr.Textbox(label="Live Logs", lines=8, interactive=False)
|
446 |
with gr.Column(scale=1):
|
447 |
+
plant_images = gr.Gallery(label="Detected Plants (Last 100+)", columns=4, rows=13, height="auto")
|
448 |
+
issue_images = gr.Gallery(label="Detected Issues (Last 100+)", columns=4, rows=13, height="auto")
|
449 |
|
450 |
with gr.Row():
|
451 |
+
chart_output = gr.Image(label="Detection Trend")
|
452 |
+
map_output = gr.Image(label="Issue Locations Map")
|
453 |
|
454 |
with gr.Row():
|
455 |
pause_btn = gr.Button("⏸️ Pause", variant="secondary")
|
|
|
485 |
global frame_rate
|
486 |
frame_rate = val
|
487 |
|
488 |
+
media_status.value = initialize_media()
|
489 |
|
490 |
load_button.click(
|
491 |
+
initialize_media,
|
492 |
+
inputs=[media_input],
|
493 |
+
outputs=[media_status]
|
494 |
)
|
495 |
|
496 |
def update_toggles(uc_val: bool, om_val: bool, rs_val: bool, pl_val: bool) -> Tuple[str, str, str, str, str]:
|
|
|
516 |
|
517 |
def streaming_loop():
|
518 |
while True:
|
519 |
+
if not media_loaded:
|
520 |
+
yield None, json.dumps({"error": "Media not loaded. Please upload a video or image file."}, indent=2), "\n".join(log_entries[-10:]), detected_plants, detected_issues, None, None
|
521 |
else:
|
522 |
+
frame, metrics, logs, plants, issues, chart, map_path = monitor_feed()
|
523 |
if frame is None:
|
524 |
+
yield None, metrics, logs, plants, issues, chart, map_path
|
525 |
else:
|
526 |
+
yield frame, metrics, logs, plants, issues, chart, map_path
|
527 |
+
if not is_video:
|
528 |
+
# For static images, yield once and pause
|
529 |
+
break
|
530 |
time.sleep(frame_rate)
|
531 |
|
532 |
+
app.load(streaming_loop, outputs=[media_output, metrics_output, logs_output, plant_images, issue_images, chart_output, map_output])
|
533 |
|
534 |
if __name__ == "__main__":
|
535 |
app.launch(share=False)
|