nagasurendra commited on
Commit
d4f980a
·
verified ·
1 Parent(s): a750b8c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +389 -284
app.py CHANGED
@@ -14,7 +14,6 @@ from typing import List, Dict, Any, Optional
14
  from ultralytics import YOLO
15
  import piexif
16
  import zipfile
17
- import subprocess
18
 
19
  os.environ["YOLO_CONFIG_DIR"] = "/tmp/Ultralytics"
20
  logging.basicConfig(filename="app.log", level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
@@ -47,57 +46,168 @@ def zip_all_outputs(report_path: str, video_path: str, chart_path: str, map_path
47
  zip_path = os.path.join(OUTPUT_DIR, f"drone_analysis_outputs_{datetime.now().strftime('%Y%m%d_%H%M%S')}.zip")
48
  try:
49
  with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
50
- # Add report file
51
- if report_path and os.path.exists(report_path):
52
  zipf.write(report_path, os.path.basename(report_path))
53
- log_entries.append(f"Added report to ZIP: {report_path}")
54
- else:
55
- log_entries.append(f"Warning: Report file not found for ZIP: {report_path}")
56
-
57
- # Add video file
58
- if video_path and os.path.exists(video_path):
59
  zipf.write(video_path, os.path.join("outputs", os.path.basename(video_path)))
60
- log_entries.append(f"Added video to ZIP: {video_path}")
61
- else:
62
- log_entries.append(f"Warning: Video file not found for ZIP: {video_path}")
63
-
64
- # Add chart file
65
- if chart_path and os.path.exists(chart_path):
66
  zipf.write(chart_path, os.path.join("outputs", os.path.basename(chart_path)))
67
- log_entries.append(f"Added chart to ZIP: {chart_path}")
68
- else:
69
- log_entries.append(f"Warning: Chart file not found for ZIP: {chart_path}")
70
-
71
- # Add map file
72
- if map_path and os.path.exists(map_path):
73
  zipf.write(map_path, os.path.join("outputs", os.path.basename(map_path)))
74
- log_entries.append(f"Added map to ZIP: {map_path}")
75
- else:
76
- log_entries.append(f"Warning: Map file not found for ZIP: {map_path}")
77
-
78
- # Add detected issue images
79
  for file in detected_issues:
80
  if os.path.exists(file):
81
  zipf.write(file, os.path.join("captured_frames", os.path.basename(file)))
82
- log_entries.append(f"Added image to ZIP: {file}")
83
- else:
84
- log_entries.append(f"Warning: Image file not found for ZIP: {file}")
85
-
86
- # Add flight logs
87
  for root, _, files in os.walk(FLIGHT_LOG_DIR):
88
  for file in files:
89
  file_path = os.path.join(root, file)
90
- if os.path.exists(file_path):
91
- zipf.write(file_path, os.path.join("flight_logs", file))
92
- log_entries.append(f"Added flight log to ZIP: {file_path}")
93
- else:
94
- log_entries.append(f"Warning: Flight log not found for ZIP: {file_path}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
95
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
96
  log_entries.append(f"Created ZIP: {zip_path}")
97
- if os.path.exists(zip_path):
98
- log_entries.append(f"Confirmed ZIP file exists: {zip_path}")
99
- else:
100
- log_entries.append(f"Error: ZIP file not created: {zip_path}")
101
  return zip_path
102
  except Exception as e:
103
  log_entries.append(f"Error: Failed to create ZIP: {str(e)}")
@@ -198,206 +308,10 @@ def generate_report(
198
  inference_times: List[float],
199
  io_times: List[float]
200
  ) -> str:
201
- # Generating LaTeX-based PDF report
202
  log_entries.append("Generating report...")
 
203
  timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
204
- report_path = os.path.join(OUTPUT_DIR, f"drone_analysis_report_{timestamp}.pdf")
205
- tex_path = os.path.join(OUTPUT_DIR, f"drone_analysis_report_{timestamp}.tex")
206
- md_path = os.path.join(OUTPUT_DIR, f"drone_analysis_report_{timestamp}.md") # Fallback Markdown path
207
-
208
- # LaTeX document content
209
  report_content = [
210
- r"\documentclass[a4paper,12pt]{article}",
211
- r"\usepackage[utf8]{inputenc}",
212
- r"\usepackage[T1]{fontenc}",
213
- r"\usepackage{geometry}",
214
- r"\geometry{margin=1in}",
215
- r"\usepackage{graphicx}",
216
- r"\graphicspath{{./}}",
217
- r"\usepackage{booktabs}",
218
- r"\usepackage{longtable}",
219
- r"\usepackage{enumitem}",
220
- r"\usepackage{hyperref}",
221
- r"\hypersetup{colorlinks=true,linkcolor=blue,filecolor=blue,urlcolor=blue}",
222
- r"\usepackage{amsmath}",
223
- r"\usepackage{caption}",
224
- r"\usepackage{pdfpages}",
225
- r"\usepackage{times}",
226
- r"\begin{document}",
227
- r"\title{NHAI Drone Survey Analysis Report}",
228
- r"\author{Nagasurendra, Data Analyst}",
229
- r"\date{\today}",
230
- r"\maketitle",
231
- r"\tableofcontents",
232
- r"\newpage",
233
- r"\section{Project Details}",
234
- r"\begin{itemize}",
235
- r"\item \textbf{Project Name}: NH-44 Delhi-Hyderabad Section (Package XYZ)",
236
- r"\item \textbf{Highway Section}: Km 100 to Km 150",
237
- r"\item \textbf{State}: Telangana",
238
- r"\item \textbf{Region}: South",
239
- rf"\item \textbf{{Survey Date}}: {datetime.now().strftime('%Y-%m-%d')}",
240
- r"\item \textbf{Drone Service Provider}: ABC Drone Services Pvt. Ltd.",
241
- r"\item \textbf{Technology Service Provider}: XYZ AI Analytics Ltd.",
242
- rf"\item \textbf{{Work Order Reference}}: Data Lake WO-{datetime.now().strftime('%Y-%m-%d')}-XYZ",
243
- r"\item \textbf{Report Prepared By}: Nagasurendra, Data Analyst",
244
- rf"\item \textbf{{Report Date}}: {datetime.now().strftime('%Y-%m-%d')}",
245
- r"\end{itemize}",
246
- r"\section{Introduction}",
247
- r"This report consolidates drone survey results for NH-44 (Km 100--150) under Operations \& Maintenance, per NHAI Policy Circular No. 18.98/2024, detecting potholes and cracks using YOLOv8 for Monthly Progress Report integration.",
248
- r"\section{Drone Survey Metadata}",
249
- r"\begin{itemize}",
250
- r"\item \textbf{Drone Speed}: 5 m/s",
251
- r"\item \textbf{Drone Height}: 60 m",
252
- r"\item \textbf{Camera Sensor}: RGB, 12 MP",
253
- r"\item \textbf{Recording Type}: JPEG, 90$^\circ$ nadir",
254
- r"\item \textbf{Image Overlap}: 85\%",
255
- r"\item \textbf{Flight Pattern}: Single lap, ROW centered",
256
- r"\item \textbf{Geotagging}: Enabled",
257
- r"\item \textbf{Satellite Lock}: 12 satellites",
258
- r"\item \textbf terrain Follow Mode}: Enabled",
259
- r"\end{itemize}",
260
- r"\section{Quality Check Results}",
261
- r"\begin{itemize}",
262
- r"\item \textbf{Resolution}: 4000x3000 (12 MP)",
263
- r"\item \textbf{Overlap}: 85\%",
264
- r"\item \textbf{Camera Angle}: 90$^\circ$ nadir",
265
- r"\item \textbf{Drone Speed}: $\leq$ 5 m/s",
266
- r"\item \textbf{Geotagging}: 100\% compliant",
267
- r"\item \textbf{QC Status}: Passed",
268
- r"\end{itemize}",
269
- r"\section{AI/ML Analytics}",
270
- rf"Total Frames Processed: {frame_count}\par",
271
- rf"Detection Frames: {detection_frame_count} ({detection_frame_count/frame_count*100:.2f}\%)\par",
272
- rf"Total Detections: {metrics['total_detections']}\par",
273
- r"\textbf{Breakdown:}",
274
- r"\begin{itemize}"
275
- ]
276
-
277
- for item in metrics.get("items", []):
278
- percentage = (item["count"] / metrics["total_detections"] * 100) if metrics["total_detections"] > 0 else 0
279
- report_content.append(rf"\item {item['type']}: {item['count']} ({percentage:.2f}\%)")
280
- report_content.extend([
281
- r"\end{itemize}",
282
- rf"Processing Time: {total_time:.2f} seconds\par",
283
- rf"Average Frame Time: {sum(frame_times)/len(frame_times):.2f} ms" if frame_times else r"Average Frame Time: N/A\par",
284
- rf"Average Resize Time: {sum(resize_times)/len(resize_times):.2f} ms" if resize_times else r"Average Resize Time: N/A\par",
285
- rf"Average Inference Time: {sum(inference_times)/len(inference_times):.2f} ms" if inference_times else r"Average Inference Time: N/A\par",
286
- rf"Average I/O Time: {sum(io_times)/len(io_times):.2f} ms" if io_times else r"Average I/O Time: N/A\par",
287
- rf"Timestamp: {metrics.get('timestamp', 'N/A')}\par",
288
- r"Summary: Potholes and cracks detected in high-traffic segments.\par",
289
- r"\section{Output File Structure}",
290
- r"\begin{itemize}",
291
- rf"\item \texttt{{drone\_analysis\_report\_<{timestamp}>.pdf}}: This report",
292
- r"\item \texttt{outputs/processed\_output.mp4}: Processed video with annotations",
293
- rf"\item \texttt{{outputs/chart\_<{timestamp}>.png}}: Detection trend chart",
294
- rf"\item \texttt{{outputs/map\_<{timestamp}>.png}}: Issue locations map",
295
- r"\item \texttt{captured\_frames/detected\_<frame>.jpg}: Geotagged images for detected issues",
296
- r"\item \texttt{flight\_logs/flight\_log\_<frame>.csv}: Flight logs matching image frames",
297
- r"\end{itemize}",
298
- r"\textbf{Note}: Images and logs share frame numbers (e.g., \texttt{detected\_000001.jpg} corresponds to \texttt{flight\_log\_000001.csv}).",
299
- r"\section{Geotagged Images}",
300
- rf"Total Images: {len(detected_issues)}\par",
301
- rf"Storage: Data Lake \texttt{{/project\_xyz/images/{datetime.now().strftime('%Y-%m-%d')}}}\par",
302
- r"\begin{longtable}{|c|l|l|l|l|l|}",
303
- r"\hline",
304
- r"\textbf{Frame} & \textbf{Issue Type} & \textbf{GPS (Lat, Lon)} & \textbf{Timestamp} & \textbf{Confidence} & \textbf{Image Path} \\ \hline",
305
- r"\endhead"
306
- ])
307
-
308
- for detection in all_detections[:100]:
309
- report_content.append(
310
- rf"{detection['frame']:06d} & {detection['label']} & ({detection['gps'][0]:.6f}, {detection['gps'][1]:.6f}) & {detection['timestamp']} & {detection['conf']:.2f} & \texttt{{{os.path.basename(detection['path'])}}} \\ \hline"
311
- )
312
- report_content.append(r"\end{longtable}")
313
-
314
- # Embedding geotagged images
315
- report_content.append(r"\subsection{Geotagged Images Display}")
316
- for detection in all_detections[:100]:
317
- image_path = detection['path']
318
- if os.path.exists(image_path):
319
- report_content.append(rf"\begin{{figure}}[h]")
320
- report_content.append(rf"\centering")
321
- report_content.append(rf"\includegraphics[width=0.8\textwidth]{{{image_path}}}")
322
- report_content.append(rf"\caption{{Frame {detection['frame']:06d}: {detection['label']} at ({detection['gps'][0]:.6f}, {detection['gps'][1]:.6f}), Confidence: {detection['conf']:.2f}}}")
323
- report_content.append(rf"\end{{figure}}")
324
-
325
- report_content.extend([
326
- r"\section{Flight Logs}",
327
- rf"Total Logs: {len(detected_issues)}\par",
328
- rf"Storage: Data Lake \texttt{{/project\_xyz/flight\_logs/{datetime.now().strftime('%Y-%m-%d')}}}\par",
329
- r"\begin{longtable}{|c|l|l|l|l|l|l|l|}",
330
- r"\hline",
331
- r"\textbf{Frame} & \textbf{Timestamp} & \textbf{Latitude} & \textbf{Longitude} & \textbf{Speed (m/s)} & \textbf{Satellites} & \textbf{Altitude (m)} & \textbf{Log Path} \\ \hline",
332
- r"\endhead"
333
- ])
334
-
335
- for detection in all_detections[:100]:
336
- log_path = f"flight_logs/flight_log_{detection['frame']:06d}.csv"
337
- report_content.append(
338
- rf"{detection['frame']:06d} & {detection['timestamp']} & {detection['gps'][0]:.6f} & {detection['gps'][1]:.6f} & 5.0 & 12 & 60 & \texttt{{{os.path.basename(log_path)}}} \\ \hline"
339
- )
340
- report_content.extend([
341
- r"\end{longtable}",
342
- r"\section{Processed Video}",
343
- rf"Path: \texttt{{outputs/processed\_output.mp4}}\par",
344
- rf"Frames: {output_frames}\par",
345
- rf"FPS: {output_fps:.2f}\par",
346
- rf"Duration: {output_duration:.2f} seconds\par",
347
- r"\section{Visualizations}",
348
- rf"Detection Trend Chart: \texttt{{outputs/chart\_<{timestamp}>.png}}\par",
349
- rf"Issue Locations Map: \texttt{{outputs/map\_<{timestamp}>.png}}\par"
350
- ])
351
-
352
- if chart_path and os.path.exists(chart_path):
353
- report_content.extend([
354
- r"\begin{figure}[h]",
355
- r"\centering",
356
- rf"\includegraphics[width=0.8\textwidth]{{{chart_path}}}",
357
- r"\caption{Detection Trend Chart}",
358
- r"\end{figure}"
359
- ])
360
- if map_path and os.path.exists(map_path):
361
- report_content.extend([
362
- r"\begin{figure}[h]",
363
- r"\centering",
364
- rf"\includegraphics[width=0.8\textwidth]{{{map_path}}}",
365
- r"\caption{Issue Locations Map}",
366
- r"\end{figure}"
367
- ])
368
-
369
- report_content.extend([
370
- r"\section{Processing Timestamps}",
371
- rf"Total Processing Time: {total_time:.2f} seconds\par",
372
- r"\textbf{Log Entries (Last 10):}",
373
- r"\begin{itemize}"
374
- ])
375
-
376
- for entry in log_entries[-10:]:
377
- report_content.append(rf"\item {entry}")
378
- report_content.extend([
379
- r"\end{itemize}",
380
- r"\section{Stakeholder Validation}",
381
- r"\begin{itemize}",
382
- r"\item \textbf{AE/IE Comments}: Pending",
383
- r"\item \textbf{PD/RO Comments}: Pending",
384
- r"\end{itemize}",
385
- r"\section{Recommendations}",
386
- r"\begin{itemize}",
387
- r"\item Repair potholes in high-traffic segments.",
388
- r"\item Seal cracks to prevent degradation.",
389
- r"\item Schedule follow-up survey.",
390
- r"\end{itemize}",
391
- r"\section{Data Lake References}",
392
- rf"Images: \texttt{{/project\_xyz/images/{datetime.now().strftime('%Y-%m-%d')}}}\par",
393
- rf"Flight Logs: \texttt{{/project\_xyz/flight\_logs/{datetime.now().strftime('%Y-%m-%d')}}}\par",
394
- rf"Video: \texttt{{/project\_xyz/videos/processed\_output_{datetime.now().strftime('%Y%m%d')}.mp4}}\par",
395
- rf"DAMS Dashboard: \texttt{{/project\_xyz/dams/{datetime.now().strftime('%Y-%m-%d')}}}\par",
396
- r"\end{document}"
397
- ])
398
-
399
- # Fallback Markdown report content
400
- md_report_content = [
401
  "# NHAI Drone Survey Analysis Report",
402
  "",
403
  "## Project Details",
@@ -443,8 +357,8 @@ def generate_report(
443
 
444
  for item in metrics.get("items", []):
445
  percentage = (item["count"] / metrics["total_detections"] * 100) if metrics["total_detections"] > 0 else 0
446
- md_report_content.append(f" - {item['type']}: {item['count']} ({percentage:.2f}%)")
447
- md_report_content.extend([
448
  f"- Processing Time: {total_time:.2f} seconds",
449
  f"- Average Frame Time: {sum(frame_times)/len(frame_times):.2f} ms" if frame_times else "- Average Frame Time: N/A",
450
  f"- Average Resize Time: {sum(resize_times)/len(resize_times):.2f} ms" if resize_times else "- Average Resize Time: N/A",
@@ -455,10 +369,10 @@ def generate_report(
455
  "",
456
  "## 5. Output File Structure",
457
  "- ZIP file contains:",
458
- f" - `drone_analysis_report_{timestamp}.md`: This report (fallback due to PDF generation failure)",
459
  " - `outputs/processed_output.mp4`: Processed video with annotations",
460
- f" - `outputs/chart_{timestamp}.png`: Detection trend chart",
461
- f" - `outputs/map_{timestamp}.png`: Issue locations map",
462
  " - `captured_frames/detected_<frame>.jpg`: Geotagged images for detected issues",
463
  " - `flight_logs/flight_log_<frame>.csv`: Flight logs matching image frames",
464
  "- Note: Images and logs share frame numbers (e.g., `detected_000001.jpg` corresponds to `flight_log_000001.csv`).",
@@ -472,11 +386,11 @@ def generate_report(
472
  ])
473
 
474
  for detection in all_detections[:100]:
475
- md_report_content.append(
476
  f"| {detection['frame']:06d} | {detection['label']} | ({detection['gps'][0]:.6f}, {detection['gps'][1]:.6f}) | {detection['timestamp']} | {detection['conf']:.2f} | captured_frames/{os.path.basename(detection['path'])} |"
477
  )
478
 
479
- md_report_content.extend([
480
  "",
481
  "## 7. Flight Logs",
482
  f"- Total Logs: {len(detected_issues)}",
@@ -488,11 +402,11 @@ def generate_report(
488
 
489
  for detection in all_detections[:100]:
490
  log_path = f"flight_logs/flight_log_{detection['frame']:06d}.csv"
491
- md_report_content.append(
492
  f"| {detection['frame']:06d} | {detection['timestamp']} | {detection['gps'][0]:.6f} | {detection['gps'][1]:.6f} | 5.0 | 12 | 60 | {log_path} |"
493
  )
494
 
495
- md_report_content.extend([
496
  "",
497
  "## 8. Processed Video",
498
  f"- Path: outputs/processed_output.mp4",
@@ -510,9 +424,9 @@ def generate_report(
510
  ])
511
 
512
  for entry in log_entries[-10:]:
513
- md_report_content.append(f" - {entry}")
514
 
515
- md_report_content.extend([
516
  "",
517
  "## 11. Stakeholder Validation",
518
  "- AE/IE Comments: [Pending]",
@@ -531,44 +445,12 @@ def generate_report(
531
  ])
532
 
533
  try:
534
- # Writing LaTeX file
535
- with open(tex_path, 'w') as f:
536
  f.write("\n".join(report_content))
537
- log_entries.append(f"LaTeX file saved: {tex_path}")
538
-
539
- # Compiling LaTeX to PDF using latexmk
540
- result = subprocess.run(
541
- ["latexmk", "-pdf", "-interaction=nonstopmode", tex_path],
542
- cwd=OUTPUT_DIR,
543
- capture_output=True,
544
- text=True
545
- )
546
- if result.returncode == 0:
547
- log_entries.append(f"PDF report generated: {report_path}")
548
- if os.path.exists(report_path):
549
- log_entries.append(f"Confirmed report file exists: {report_path}")
550
- return report_path
551
- else:
552
- log_entries.append(f"Error: PDF report not found at {report_path}")
553
- else:
554
- log_entries.append(f"Error: LaTeX compilation failed: {result.stderr}")
555
  except Exception as e:
556
- log_entries.append(f"Error: Failed to generate PDF report: {str(e)}")
557
-
558
- # Fallback: Generate Markdown report if PDF fails
559
- log_entries.append("Falling back to Markdown report due to PDF generation failure")
560
- try:
561
- with open(md_path, 'w') as f:
562
- f.write("\n".join(md_report_content))
563
- log_entries.append(f"Markdown report saved: {md_path}")
564
- if os.path.exists(md_path):
565
- log_entries.append(f"Confirmed Markdown report file exists: {md_path}")
566
- return md_path
567
- else:
568
- log_entries.append(f"Error: Markdown report not found at {md_path}")
569
- return ""
570
- except Exception as e:
571
- log_entries.append(f"Error: Failed to save Markdown report: {str(e)}")
572
  return ""
573
 
574
  def process_video(video, resize_width=4000, resize_height=3000, frame_skip=5):
@@ -791,5 +673,228 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="orange")) as iface:
791
  ]
792
  )
793
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
794
  if __name__ == "__main__":
795
  iface.launch()
 
14
  from ultralytics import YOLO
15
  import piexif
16
  import zipfile
 
17
 
18
  os.environ["YOLO_CONFIG_DIR"] = "/tmp/Ultralytics"
19
  logging.basicConfig(filename="app.log", level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
 
46
  zip_path = os.path.join(OUTPUT_DIR, f"drone_analysis_outputs_{datetime.now().strftime('%Y%m%d_%H%M%S')}.zip")
47
  try:
48
  with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
49
+ if os.path.exists(report_path):
 
50
  zipf.write(report_path, os.path.basename(report_path))
51
+ if os.path.exists(video_path):
 
 
 
 
 
52
  zipf.write(video_path, os.path.join("outputs", os.path.basename(video_path)))
53
+ if os.path.exists(chart_path):
 
 
 
 
 
54
  zipf.write(chart_path, os.path.join("outputs", os.path.basename(chart_path)))
55
+ if os.path.exists(map_path):
 
 
 
 
 
56
  zipf.write(map_path, os.path.join("outputs", os.path.basename(map_path)))
 
 
 
 
 
57
  for file in detected_issues:
58
  if os.path.exists(file):
59
  zipf.write(file, os.path.join("captured_frames", os.path.basename(file)))
 
 
 
 
 
60
  for root, _, files in os.walk(FLIGHT_LOG_DIR):
61
  for file in files:
62
  file_path = os.path.join(root, file)
63
+ zipf.write(file_path, os.path.join("flight_logs", file))
64
+ log_entries.append(f"Created ZIP: {zip_path}")
65
+ return zip_path
66
+ except Exception as e:
67
+ log_entries.append(f"Error: Failed to create ZIP: {str(e)}")
68
+ return ""
69
+
70
+ def generate_map(gps_coords: List[List[float]], items: List[Dict[str, Any]]) -> str:
71
+ map_path = os.path.join(OUTPUT_DIR, f"map_{datetime.now().strftime('%Y%m%d_%H%M%S')}.png")
72
+ plt.figure(figsize=(4, 4))
73
+ plt.scatter([x[1] for x in gps_coords], [x[0] for x in gps_coords], c='blue', label='GPS Points')
74
+ plt.title("Issue Locations Map")
75
+ plt.xlabel("Longitude")
76
+ plt.ylabel("Latitude")
77
+ plt.legend()
78
+ plt.savefig(map_path)
79
+ plt.close()
80
+ return map_path
81
+
82
+ def write_geotag(image_path: str, gps_coord: List[float]) -> bool:
83
+ try:
84
+ lat = abs(gps_coord[0])
85
+ lon = abs(gps_coord[1])
86
+ lat_ref = "N" if gps_coord[0] >= 0 else "S"
87
+ lon_ref = "E" if gps_coord[1] >= 0 else "W"
88
+ exif_dict = piexif.load(image_path) if os.path.exists(image_path) else {"GPS": {}}
89
+ exif_dict["GPS"] = {
90
+ piexif.GPSIFD.GPSLatitudeRef: lat_ref,
91
+ piexif.GPSIFD.GPSLatitude: ((int(lat), 1), (0, 1), (0, 1)),
92
+ piexif.GPSIFD.GPSLongitudeRef: lon_ref,
93
+ piexif.GPSIFD.GPSLongitude: ((int(lon), 1), (0, 1), (0, 1))
94
+ }
95
+ piexif.insert(piexif.dump(exif_dict), image_path)
96
+ return True
97
+ except Exception as e:
98
+ log_entries.append(f"Error: Failed to geotag {image_path}: {str(e)}")
99
+ return False
100
+
101
+ def write_flight_log(frame_count: int, gps_coord: List[float], timestamp: str) -> str:
102
+ log_path = os.path.join(FLIGHT_LOG_DIR, f"flight_log_{frame_count:06d}.csv")
103
+ try:
104
+ with open(log_path, 'w', newline='') as csvfile:
105
+ writer = csv.writer(csvfile)
106
+ writer.writerow(["Frame", "Timestamp", "Latitude", "Longitude", "Speed_ms", "Satellites", "Altitude_m"])
107
+ writer.writerow([frame_count, timestamp, gps_coord[0], gps_coord[1], 5.0, 12, 60])
108
+ return log_path
109
+ except Exception as e:
110
+ log_entries.append(f"Error: Failed to write flight log {log_path}: {str(e)}")
111
+ return ""
112
+
113
+ def check_image_quality(frame: np.ndarray, input_resolution: int) -> bool:
114
+ height, width, _ = frame.shape
115
+ frame_resolution = width * height
116
+ if frame_resolution < 12_000_000:
117
+ log_entries.append(f"Frame {frame_count}: Resolution {width}x{height} below 12MP")
118
+ return False
119
+ if frame_resolution < input_resolution:
120
+ log_entries.append(f"Frame {frame_count}: Output resolution below input")
121
+ return False
122
+ return True
123
+
124
+ def update_metrics(detections: List[Dict[str, Any]]) -> Dict[str, Any]:
125
+ counts = Counter([det["label"] for det in detections])
126
+ return {
127
+ "items": [{"type": k, "count": v} for k, v in counts.items()],
128
+ "total_detections": len(detections),
129
+ "timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
130
+ }
131
+
132
+ def generate_line_chart() -> Optional[str]:
133
+ if not detected_counts:
134
+ return None
135
+ plt.figure(figsize=(4, 2))
136
+ plt.plot(detected_counts[-50:], marker='o', color='#FF8C00')
137
+ plt.title("Detections Over Time")
138
+ plt.xlabel("Frame")
139
+ plt.ylabel("Count")
140
+ plt.grid(True)
141
+ plt.tight_layout()
142
+ chart_path = os.path.join(OUTPUT_DIR, f"chart_{datetime.now().strftime('%Y%m%d_%H%M%S')}.png")
143
+ plt.savefig(chart_path)
144
+ plt.close()
145
+ return chart_path
146
 
147
+ import cv2
148
+ import torch
149
+ import gradio as gr
150
+ import numpy as np
151
+ import os
152
+ import json
153
+ import logging
154
+ import matplotlib.pyplot as plt
155
+ import csv
156
+ import time
157
+ from datetime import datetime
158
+ from collections import Counter
159
+ from typing import List, Dict, Any, Optional
160
+ from ultralytics import YOLO
161
+ import piexif
162
+ import zipfile
163
+
164
+ os.environ["YOLO_CONFIG_DIR"] = "/tmp/Ultralytics"
165
+ logging.basicConfig(filename="app.log", level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
166
+
167
+ CAPTURED_FRAMES_DIR = "captured_frames"
168
+ OUTPUT_DIR = "outputs"
169
+ FLIGHT_LOG_DIR = "flight_logs"
170
+ os.makedirs(CAPTURED_FRAMES_DIR, exist_ok=True)
171
+ os.makedirs(OUTPUT_DIR, exist_ok=True)
172
+ os.makedirs(FLIGHT_LOG_DIR, exist_ok=True)
173
+ os.chmod(CAPTURED_FRAMES_DIR, 0o777)
174
+ os.chmod(OUTPUT_DIR, 0o777)
175
+ os.chmod(FLIGHT_LOG_DIR, 0o777)
176
+
177
+ log_entries: List[str] = []
178
+ detected_counts: List[int] = []
179
+ detected_issues: List[str] = []
180
+ gps_coordinates: List[List[float]] = []
181
+ last_metrics: Dict[str, Any] = {}
182
+ frame_count: int = 0
183
+ SAVE_IMAGE_INTERVAL = 1
184
+ DETECTION_CLASSES = ["Longitudinal", "Pothole", "Transverse"]
185
+
186
+ device = "cuda" if torch.cuda.is_available() else "cpu"
187
+ model = YOLO('./data/best.pt').to(device)
188
+ if device == "cuda":
189
+ model.half()
190
+
191
+ def zip_all_outputs(report_path: str, video_path: str, chart_path: str, map_path: str) -> str:
192
+ zip_path = os.path.join(OUTPUT_DIR, f"drone_analysis_outputs_{datetime.now().strftime('%Y%m%d_%H%M%S')}.zip")
193
+ try:
194
+ with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
195
+ if os.path.exists(report_path):
196
+ zipf.write(report_path, os.path.basename(report_path))
197
+ if os.path.exists(video_path):
198
+ zipf.write(video_path, os.path.join("outputs", os.path.basename(video_path)))
199
+ if os.path.exists(chart_path):
200
+ zipf.write(chart_path, os.path.join("outputs", os.path.basename(chart_path)))
201
+ if os.path.exists(map_path):
202
+ zipf.write(map_path, os.path.join("outputs", os.path.basename(map_path)))
203
+ for file in detected_issues:
204
+ if os.path.exists(file):
205
+ zipf.write(file, os.path.join("captured_frames", os.path.basename(file)))
206
+ for root, _, files in os.walk(FLIGHT_LOG_DIR):
207
+ for file in files:
208
+ file_path = os.path.join(root, file)
209
+ zipf.write(file_path, os.path.join("flight_logs", file))
210
  log_entries.append(f"Created ZIP: {zip_path}")
 
 
 
 
211
  return zip_path
212
  except Exception as e:
213
  log_entries.append(f"Error: Failed to create ZIP: {str(e)}")
 
308
  inference_times: List[float],
309
  io_times: List[float]
310
  ) -> str:
 
311
  log_entries.append("Generating report...")
312
+ report_path = os.path.join(OUTPUT_DIR, f"drone_analysis_report_{datetime.now().strftime('%Y%m%d_%H%M%S')}.md")
313
  timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
 
 
 
 
 
314
  report_content = [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
315
  "# NHAI Drone Survey Analysis Report",
316
  "",
317
  "## Project Details",
 
357
 
358
  for item in metrics.get("items", []):
359
  percentage = (item["count"] / metrics["total_detections"] * 100) if metrics["total_detections"] > 0 else 0
360
+ report_content.append(f" - {item['type']}: {item['count']} ({percentage:.2f}%)")
361
+ report_content.extend([
362
  f"- Processing Time: {total_time:.2f} seconds",
363
  f"- Average Frame Time: {sum(frame_times)/len(frame_times):.2f} ms" if frame_times else "- Average Frame Time: N/A",
364
  f"- Average Resize Time: {sum(resize_times)/len(resize_times):.2f} ms" if resize_times else "- Average Resize Time: N/A",
 
369
  "",
370
  "## 5. Output File Structure",
371
  "- ZIP file contains:",
372
+ " - `drone_analysis_report_<timestamp>.md`: This report",
373
  " - `outputs/processed_output.mp4`: Processed video with annotations",
374
+ " - `outputs/chart_<timestamp>.png`: Detection trend chart",
375
+ " - `outputs/map_<timestamp>.png`: Issue locations map",
376
  " - `captured_frames/detected_<frame>.jpg`: Geotagged images for detected issues",
377
  " - `flight_logs/flight_log_<frame>.csv`: Flight logs matching image frames",
378
  "- Note: Images and logs share frame numbers (e.g., `detected_000001.jpg` corresponds to `flight_log_000001.csv`).",
 
386
  ])
387
 
388
  for detection in all_detections[:100]:
389
+ report_content.append(
390
  f"| {detection['frame']:06d} | {detection['label']} | ({detection['gps'][0]:.6f}, {detection['gps'][1]:.6f}) | {detection['timestamp']} | {detection['conf']:.2f} | captured_frames/{os.path.basename(detection['path'])} |"
391
  )
392
 
393
+ report_content.extend([
394
  "",
395
  "## 7. Flight Logs",
396
  f"- Total Logs: {len(detected_issues)}",
 
402
 
403
  for detection in all_detections[:100]:
404
  log_path = f"flight_logs/flight_log_{detection['frame']:06d}.csv"
405
+ report_content.append(
406
  f"| {detection['frame']:06d} | {detection['timestamp']} | {detection['gps'][0]:.6f} | {detection['gps'][1]:.6f} | 5.0 | 12 | 60 | {log_path} |"
407
  )
408
 
409
+ report_content.extend([
410
  "",
411
  "## 8. Processed Video",
412
  f"- Path: outputs/processed_output.mp4",
 
424
  ])
425
 
426
  for entry in log_entries[-10:]:
427
+ report_content.append(f" - {entry}")
428
 
429
+ report_content.extend([
430
  "",
431
  "## 11. Stakeholder Validation",
432
  "- AE/IE Comments: [Pending]",
 
445
  ])
446
 
447
  try:
448
+ with open(report_path, 'w') as f:
 
449
  f.write("\n".join(report_content))
450
+ log_entries.append(f"Report saved: {report_path}")
451
+ return report_path
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
452
  except Exception as e:
453
+ log_entries.append(f"Error: Failed to save report: {str(e)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
454
  return ""
455
 
456
  def process_video(video, resize_width=4000, resize_height=3000, frame_skip=5):
 
673
  ]
674
  )
675
 
676
+ if __name__ == "__main__":
677
+ iface.launch()
678
+
679
+ def process_video(video, resize_width=4000, resize_height=3000, frame_skip=5):
680
+ global frame_count, last_metrics, detected_counts, detected_issues, gps_coordinates, log_entries
681
+ frame_count = 0
682
+ detected_counts.clear()
683
+ detected_issues.clear()
684
+ gps_coordinates.clear()
685
+ log_entries.clear()
686
+ last_metrics = {}
687
+
688
+ if video is None:
689
+ log_entries.append("Error: No video uploaded")
690
+ return None, json.dumps({"error": "No video uploaded"}, indent=2), "\n".join(log_entries), [], None, None, None
691
+
692
+ log_entries.append("Starting video processing...")
693
+ start_time = time.time()
694
+ cap = cv2.VideoCapture(video)
695
+ if not cap.isOpened():
696
+ log_entries.append("Error: Could not open video file")
697
+ return None, json.dumps({"error": "Could not open video file"}, indent=2), "\n".join(log_entries), [], None, None, None
698
+
699
+ frame_width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
700
+ frame_height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
701
+ input_resolution = frame_width * frame_height
702
+ fps = cap.get(cv2.CAP_PROP_FPS)
703
+ total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
704
+ log_entries.append(f"Input video: {frame_width}x{frame_height}, {fps} FPS, {total_frames} frames")
705
+
706
+ out_width, out_height = resize_width, resize_height
707
+ output_path = os.path.join(OUTPUT_DIR, "processed_output.mp4")
708
+ out = cv2.VideoWriter(output_path, cv2.VideoWriter_fourcc(*'mp4v'), fps, (out_width, out_height))
709
+ if not out.isOpened():
710
+ log_entries.append("Error: Failed to initialize mp4v codec")
711
+ cap.release()
712
+ return None, json.dumps({"error": "mp4v codec failed"}, indent=2), "\n".join(log_entries), [], None, None, None
713
+
714
+ processed_frames = 0
715
+ all_detections = []
716
+ frame_times = []
717
+ inference_times = []
718
+ resize_times = []
719
+ io_times = []
720
+ detection_frame_count = 0
721
+ output_frame_count = 0
722
+ last_annotated_frame = None
723
+
724
+ while True:
725
+ ret, frame = cap.read()
726
+ if not ret:
727
+ break
728
+ frame_count += 1
729
+ if frame_count % frame_skip != 0:
730
+ continue
731
+ processed_frames += 1
732
+ frame_start = time.time()
733
+
734
+ frame = cv2.resize(frame, (out_width, out_height))
735
+ resize_times.append((time.time() - frame_start) * 1000)
736
+
737
+ if not check_image_quality(frame, input_resolution):
738
+ continue
739
+
740
+ inference_start = time.time()
741
+ results = model(frame, verbose=False, conf=0.5, iou=0.7)
742
+ annotated_frame = results[0].plot()
743
+ inference_times.append((time.time() - inference_start) * 1000)
744
+
745
+ frame_timestamp = frame_count / fps if fps > 0 else 0
746
+ timestamp_str = f"{int(frame_timestamp // 60)}:{int(frame_timestamp % 60):02d}"
747
+
748
+ gps_coord = [17.385044 + (frame_count * 0.0001), 78.486671 + (frame_count * 0.0001)]
749
+ gps_coordinates.append(gps_coord)
750
+
751
+ io_start = time.time()
752
+ frame_detections = []
753
+ for detection in results[0].boxes:
754
+ cls = int(detection.cls)
755
+ conf = float(detection.conf)
756
+ box = detection.xyxy[0].cpu().numpy().astype(int).tolist()
757
+ label = model.names[cls]
758
+ if label in DETECTION_CLASSES:
759
+ frame_detections.append({
760
+ "label": label,
761
+ "box": box,
762
+ "conf": conf,
763
+ "gps": gps_coord,
764
+ "timestamp": timestamp_str,
765
+ "frame": frame_count,
766
+ "path": os.path.join(CAPTURED_FRAMES_DIR, f"detected_{frame_count:06d}.jpg")
767
+ })
768
+ log_entries.append(f"Frame {frame_count} at {timestamp_str}: Detected {label} with confidence {conf:.2f}")
769
+
770
+ if frame_detections:
771
+ detection_frame_count += 1
772
+ if detection_frame_count % SAVE_IMAGE_INTERVAL == 0:
773
+ captured_frame_path = os.path.join(CAPTURED_FRAMES_DIR, f"detected_{frame_count:06d}.jpg")
774
+ if cv2.imwrite(captured_frame_path, annotated_frame):
775
+ if write_geotag(captured_frame_path, gps_coord):
776
+ detected_issues.append(captured_frame_path)
777
+ if len(detected_issues) > 1000: # Limit to 1000 images
778
+ detected_issues.pop(0)
779
+ else:
780
+ log_entries.append(f"Frame {frame_count}: Geotagging failed")
781
+ else:
782
+ log_entries.append(f"Error: Failed to save {captured_frame_path}")
783
+ flight_log_path = write_flight_log(frame_count, gps_coord, timestamp_str)
784
+
785
+ io_times.append((time.time() - io_start) * 1000)
786
+
787
+ out.write(annotated_frame)
788
+ output_frame_count += 1
789
+ last_annotated_frame = annotated_frame
790
+ if frame_skip > 1:
791
+ for _ in range(frame_skip - 1):
792
+ out.write(annotated_frame)
793
+ output_frame_count += 1
794
+
795
+ detected_counts.append(len(frame_detections))
796
+ all_detections.extend(frame_detections)
797
+
798
+ frame_times.append((time.time() - frame_start) * 1000)
799
+ if len(log_entries) > 50:
800
+ log_entries.pop(0)
801
+
802
+ if time.time() - start_time > 600:
803
+ log_entries.append("Error: Processing timeout after 600 seconds")
804
+ break
805
+
806
+ while output_frame_count < total_frames and last_annotated_frame is not None:
807
+ out.write(last_annotated_frame)
808
+ output_frame_count += 1
809
+
810
+ last_metrics = update_metrics(all_detections)
811
+
812
+ cap.release()
813
+ out.release()
814
+
815
+ cap = cv2.VideoCapture(output_path)
816
+ output_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
817
+ output_fps = cap.get(cv2.CAP_PROP_FPS)
818
+ output_duration = output_frames / output_fps if output_fps > 0 else 0
819
+ cap.release()
820
+
821
+ total_time = time.time() - start_time
822
+ log_entries.append(f"Output video: {output_frames} frames, {output_fps:.2f} FPS, {output_duration:.2f} seconds")
823
+
824
+ log_entries.append("Generating chart and map...")
825
+ chart_path = generate_line_chart()
826
+ map_path = generate_map(gps_coordinates[-5:], all_detections)
827
+
828
+ report_path = generate_report(
829
+ last_metrics,
830
+ detected_issues,
831
+ gps_coordinates,
832
+ all_detections,
833
+ frame_count,
834
+ total_time,
835
+ output_frames,
836
+ output_fps,
837
+ output_duration,
838
+ detection_frame_count,
839
+ chart_path,
840
+ map_path,
841
+ frame_times,
842
+ resize_times,
843
+ inference_times,
844
+ io_times
845
+ )
846
+
847
+ log_entries.append("Creating output ZIP...")
848
+ output_zip_path = zip_all_outputs(report_path, output_path, chart_path, map_path)
849
+
850
+ log_entries.append(f"Processing completed in {total_time:.2f} seconds")
851
+ return (
852
+ output_path,
853
+ json.dumps(last_metrics, indent=2),
854
+ "\n".join(log_entries[-10:]),
855
+ detected_issues,
856
+ chart_path,
857
+ map_path,
858
+ output_zip_path
859
+ )
860
+
861
+ with gr.Blocks(theme=gr.themes.Soft(primary_hue="orange")) as iface:
862
+ gr.Markdown("# NHAI Road Defect Detection Dashboard")
863
+ with gr.Row():
864
+ with gr.Column(scale=3):
865
+ video_input = gr.Video(label="Upload Video (12MP recommended)")
866
+ width_slider = gr.Slider(320, 4000, value=4000, label="Output Width", step=1)
867
+ height_slider = gr.Slider(240, 3000, value=3000, label="Output Height", step=1)
868
+ skip_slider = gr.Slider(1, 10, value=5, label="Frame Skip", step=1)
869
+ process_btn = gr.Button("Process Video", variant="primary")
870
+ with gr.Column(scale=1):
871
+ metrics_output = gr.Textbox(label="Detection Metrics", lines=5, interactive=False)
872
+ with gr.Row():
873
+ video_output = gr.Video(label="Processed Video")
874
+ issue_gallery = gr.Gallery(label="Detected Issues", columns=4, height="auto", object_fit="contain")
875
+ with gr.Row():
876
+ chart_output = gr.Image(label="Detection Trend")
877
+ map_output = gr.Image(label="Issue Locations Map")
878
+ with gr.Row():
879
+ logs_output = gr.Textbox(label="Logs", lines=5, interactive=False)
880
+ with gr.Row():
881
+ gr.Markdown("## Download Results")
882
+ with gr.Row():
883
+ output_zip_download = gr.File(label="Download All Outputs (ZIP)")
884
+
885
+ process_btn.click(
886
+ fn=process_video,
887
+ inputs=[video_input, width_slider, height_slider, skip_slider],
888
+ outputs=[
889
+ video_output,
890
+ metrics_output,
891
+ logs_output,
892
+ issue_gallery,
893
+ chart_output,
894
+ map_output,
895
+ output_zip_download
896
+ ]
897
+ )
898
+
899
  if __name__ == "__main__":
900
  iface.launch()