Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -14,6 +14,7 @@ from typing import List, Dict, Any, Optional
|
|
14 |
from ultralytics import YOLO
|
15 |
import piexif
|
16 |
import zipfile
|
|
|
17 |
|
18 |
os.environ["YOLO_CONFIG_DIR"] = "/tmp/Ultralytics"
|
19 |
logging.basicConfig(filename="app.log", level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
@@ -46,22 +47,57 @@ def zip_all_outputs(report_path: str, video_path: str, chart_path: str, map_path
|
|
46 |
zip_path = os.path.join(OUTPUT_DIR, f"drone_analysis_outputs_{datetime.now().strftime('%Y%m%d_%H%M%S')}.zip")
|
47 |
try:
|
48 |
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
49 |
-
|
|
|
50 |
zipf.write(report_path, os.path.basename(report_path))
|
51 |
-
|
|
|
|
|
|
|
|
|
|
|
52 |
zipf.write(video_path, os.path.join("outputs", os.path.basename(video_path)))
|
53 |
-
|
|
|
|
|
|
|
|
|
|
|
54 |
zipf.write(chart_path, os.path.join("outputs", os.path.basename(chart_path)))
|
55 |
-
|
|
|
|
|
|
|
|
|
|
|
56 |
zipf.write(map_path, os.path.join("outputs", os.path.basename(map_path)))
|
|
|
|
|
|
|
|
|
|
|
57 |
for file in detected_issues:
|
58 |
if os.path.exists(file):
|
59 |
zipf.write(file, os.path.join("captured_frames", os.path.basename(file)))
|
|
|
|
|
|
|
|
|
|
|
60 |
for root, _, files in os.walk(FLIGHT_LOG_DIR):
|
61 |
for file in files:
|
62 |
file_path = os.path.join(root, file)
|
63 |
-
|
|
|
|
|
|
|
|
|
|
|
64 |
log_entries.append(f"Created ZIP: {zip_path}")
|
|
|
|
|
|
|
|
|
65 |
return zip_path
|
66 |
except Exception as e:
|
67 |
log_entries.append(f"Error: Failed to create ZIP: {str(e)}")
|
@@ -163,10 +199,11 @@ def generate_report(
|
|
163 |
io_times: List[float]
|
164 |
) -> str:
|
165 |
# Generating LaTeX-based PDF report
|
166 |
-
log_entries.append("Generating
|
167 |
-
report_path = os.path.join(OUTPUT_DIR, f"drone_analysis_report_{datetime.now().strftime('%Y%m%d_%H%M%S')}.pdf")
|
168 |
-
tex_path = os.path.join(OUTPUT_DIR, f"drone_analysis_report_{datetime.now().strftime('%Y%m%d_%H%M%S')}.tex")
|
169 |
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
|
|
|
|
|
170 |
|
171 |
# LaTeX document content
|
172 |
report_content = [
|
@@ -218,7 +255,7 @@ def generate_report(
|
|
218 |
r"\item \textbf{Flight Pattern}: Single lap, ROW centered",
|
219 |
r"\item \textbf{Geotagging}: Enabled",
|
220 |
r"\item \textbf{Satellite Lock}: 12 satellites",
|
221 |
-
r"\item \textbf
|
222 |
r"\end{itemize}",
|
223 |
r"\section{Quality Check Results}",
|
224 |
r"\begin{itemize}",
|
@@ -312,7 +349,6 @@ def generate_report(
|
|
312 |
rf"Issue Locations Map: \texttt{{outputs/map\_<{timestamp}>.png}}\par"
|
313 |
])
|
314 |
|
315 |
-
# Including chart and map images
|
316 |
if chart_path and os.path.exists(chart_path):
|
317 |
report_content.extend([
|
318 |
r"\begin{figure}[h]",
|
@@ -360,6 +396,140 @@ def generate_report(
|
|
360 |
r"\end{document}"
|
361 |
])
|
362 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
363 |
try:
|
364 |
# Writing LaTeX file
|
365 |
with open(tex_path, 'w') as f:
|
@@ -367,7 +537,6 @@ def generate_report(
|
|
367 |
log_entries.append(f"LaTeX file saved: {tex_path}")
|
368 |
|
369 |
# Compiling LaTeX to PDF using latexmk
|
370 |
-
import subprocess
|
371 |
result = subprocess.run(
|
372 |
["latexmk", "-pdf", "-interaction=nonstopmode", tex_path],
|
373 |
cwd=OUTPUT_DIR,
|
@@ -378,76 +547,30 @@ def generate_report(
|
|
378 |
log_entries.append(f"PDF report generated: {report_path}")
|
379 |
if os.path.exists(report_path):
|
380 |
log_entries.append(f"Confirmed report file exists: {report_path}")
|
|
|
381 |
else:
|
382 |
log_entries.append(f"Error: PDF report not found at {report_path}")
|
383 |
-
return ""
|
384 |
else:
|
385 |
log_entries.append(f"Error: LaTeX compilation failed: {result.stderr}")
|
386 |
-
return ""
|
387 |
-
return report_path
|
388 |
except Exception as e:
|
389 |
-
log_entries.append(f"Error: Failed to generate report: {str(e)}")
|
390 |
-
return ""
|
391 |
|
392 |
-
|
393 |
-
|
394 |
try:
|
395 |
-
with
|
396 |
-
|
397 |
-
|
398 |
-
|
399 |
-
|
400 |
-
|
401 |
-
log_entries.append(f"Warning: Report file not found for ZIP: {report_path}")
|
402 |
-
|
403 |
-
# Add video file
|
404 |
-
if video_path and os.path.exists(video_path):
|
405 |
-
zipf.write(video_path, os.path.join("outputs", os.path.basename(video_path)))
|
406 |
-
log_entries.append(f"Added video to ZIP: {video_path}")
|
407 |
-
else:
|
408 |
-
log_entries.append(f"Warning: Video file not found for ZIP: {video_path}")
|
409 |
-
|
410 |
-
# Add chart file
|
411 |
-
if chart_path and os.path.exists(chart_path):
|
412 |
-
zipf.write(chart_path, os.path.join("outputs", os.path.basename(chart_path)))
|
413 |
-
log_entries.append(f"Added chart to ZIP: {chart_path}")
|
414 |
-
else:
|
415 |
-
log_entries.append(f"Warning: Chart file not found for ZIP: {chart_path}")
|
416 |
-
|
417 |
-
# Add map file
|
418 |
-
if map_path and os.path.exists(map_path):
|
419 |
-
zipf.write(map_path, os.path.join("outputs", os.path.basename(map_path)))
|
420 |
-
log_entries.append(f"Added map to ZIP: {map_path}")
|
421 |
-
else:
|
422 |
-
log_entries.append(f"Warning: Map file not found for ZIP: {map_path}")
|
423 |
-
|
424 |
-
# Add detected issue images
|
425 |
-
for file in detected_issues:
|
426 |
-
if os.path.exists(file):
|
427 |
-
zipf.write(file, os.path.join("captured_frames", os.path.basename(file)))
|
428 |
-
log_entries.append(f"Added image to ZIP: {file}")
|
429 |
-
else:
|
430 |
-
log_entries.append(f"Warning: Image file not found for ZIP: {file}")
|
431 |
-
|
432 |
-
# Add flight logs
|
433 |
-
for root, _, files in os.walk(FLIGHT_LOG_DIR):
|
434 |
-
for file in files:
|
435 |
-
file_path = os.path.join(root, file)
|
436 |
-
if os.path.exists(file_path):
|
437 |
-
zipf.write(file_path, os.path.join("flight_logs", file))
|
438 |
-
log_entries.append(f"Added flight log to ZIP: {file_path}")
|
439 |
-
else:
|
440 |
-
log_entries.append(f"Warning: Flight log not found for ZIP: {file_path}")
|
441 |
-
|
442 |
-
log_entries.append(f"Created ZIP: {zip_path}")
|
443 |
-
if os.path.exists(zip_path):
|
444 |
-
log_entries.append(f"Confirmed ZIP file exists: {zip_path}")
|
445 |
else:
|
446 |
-
log_entries.append(f"Error:
|
447 |
-
|
448 |
except Exception as e:
|
449 |
-
log_entries.append(f"Error: Failed to
|
450 |
return ""
|
|
|
451 |
def process_video(video, resize_width=4000, resize_height=3000, frame_skip=5):
|
452 |
global frame_count, last_metrics, detected_counts, detected_issues, gps_coordinates, log_entries
|
453 |
frame_count = 0
|
|
|
14 |
from ultralytics import YOLO
|
15 |
import piexif
|
16 |
import zipfile
|
17 |
+
import subprocess
|
18 |
|
19 |
os.environ["YOLO_CONFIG_DIR"] = "/tmp/Ultralytics"
|
20 |
logging.basicConfig(filename="app.log", level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
|
|
47 |
zip_path = os.path.join(OUTPUT_DIR, f"drone_analysis_outputs_{datetime.now().strftime('%Y%m%d_%H%M%S')}.zip")
|
48 |
try:
|
49 |
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
50 |
+
# Add report file
|
51 |
+
if report_path and os.path.exists(report_path):
|
52 |
zipf.write(report_path, os.path.basename(report_path))
|
53 |
+
log_entries.append(f"Added report to ZIP: {report_path}")
|
54 |
+
else:
|
55 |
+
log_entries.append(f"Warning: Report file not found for ZIP: {report_path}")
|
56 |
+
|
57 |
+
# Add video file
|
58 |
+
if video_path and os.path.exists(video_path):
|
59 |
zipf.write(video_path, os.path.join("outputs", os.path.basename(video_path)))
|
60 |
+
log_entries.append(f"Added video to ZIP: {video_path}")
|
61 |
+
else:
|
62 |
+
log_entries.append(f"Warning: Video file not found for ZIP: {video_path}")
|
63 |
+
|
64 |
+
# Add chart file
|
65 |
+
if chart_path and os.path.exists(chart_path):
|
66 |
zipf.write(chart_path, os.path.join("outputs", os.path.basename(chart_path)))
|
67 |
+
log_entries.append(f"Added chart to ZIP: {chart_path}")
|
68 |
+
else:
|
69 |
+
log_entries.append(f"Warning: Chart file not found for ZIP: {chart_path}")
|
70 |
+
|
71 |
+
# Add map file
|
72 |
+
if map_path and os.path.exists(map_path):
|
73 |
zipf.write(map_path, os.path.join("outputs", os.path.basename(map_path)))
|
74 |
+
log_entries.append(f"Added map to ZIP: {map_path}")
|
75 |
+
else:
|
76 |
+
log_entries.append(f"Warning: Map file not found for ZIP: {map_path}")
|
77 |
+
|
78 |
+
# Add detected issue images
|
79 |
for file in detected_issues:
|
80 |
if os.path.exists(file):
|
81 |
zipf.write(file, os.path.join("captured_frames", os.path.basename(file)))
|
82 |
+
log_entries.append(f"Added image to ZIP: {file}")
|
83 |
+
else:
|
84 |
+
log_entries.append(f"Warning: Image file not found for ZIP: {file}")
|
85 |
+
|
86 |
+
# Add flight logs
|
87 |
for root, _, files in os.walk(FLIGHT_LOG_DIR):
|
88 |
for file in files:
|
89 |
file_path = os.path.join(root, file)
|
90 |
+
if os.path.exists(file_path):
|
91 |
+
zipf.write(file_path, os.path.join("flight_logs", file))
|
92 |
+
log_entries.append(f"Added flight log to ZIP: {file_path}")
|
93 |
+
else:
|
94 |
+
log_entries.append(f"Warning: Flight log not found for ZIP: {file_path}")
|
95 |
+
|
96 |
log_entries.append(f"Created ZIP: {zip_path}")
|
97 |
+
if os.path.exists(zip_path):
|
98 |
+
log_entries.append(f"Confirmed ZIP file exists: {zip_path}")
|
99 |
+
else:
|
100 |
+
log_entries.append(f"Error: ZIP file not created: {zip_path}")
|
101 |
return zip_path
|
102 |
except Exception as e:
|
103 |
log_entries.append(f"Error: Failed to create ZIP: {str(e)}")
|
|
|
199 |
io_times: List[float]
|
200 |
) -> str:
|
201 |
# Generating LaTeX-based PDF report
|
202 |
+
log_entries.append("Generating report...")
|
|
|
|
|
203 |
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
204 |
+
report_path = os.path.join(OUTPUT_DIR, f"drone_analysis_report_{timestamp}.pdf")
|
205 |
+
tex_path = os.path.join(OUTPUT_DIR, f"drone_analysis_report_{timestamp}.tex")
|
206 |
+
md_path = os.path.join(OUTPUT_DIR, f"drone_analysis_report_{timestamp}.md") # Fallback Markdown path
|
207 |
|
208 |
# LaTeX document content
|
209 |
report_content = [
|
|
|
255 |
r"\item \textbf{Flight Pattern}: Single lap, ROW centered",
|
256 |
r"\item \textbf{Geotagging}: Enabled",
|
257 |
r"\item \textbf{Satellite Lock}: 12 satellites",
|
258 |
+
r"\item \textbf terrain Follow Mode}: Enabled",
|
259 |
r"\end{itemize}",
|
260 |
r"\section{Quality Check Results}",
|
261 |
r"\begin{itemize}",
|
|
|
349 |
rf"Issue Locations Map: \texttt{{outputs/map\_<{timestamp}>.png}}\par"
|
350 |
])
|
351 |
|
|
|
352 |
if chart_path and os.path.exists(chart_path):
|
353 |
report_content.extend([
|
354 |
r"\begin{figure}[h]",
|
|
|
396 |
r"\end{document}"
|
397 |
])
|
398 |
|
399 |
+
# Fallback Markdown report content
|
400 |
+
md_report_content = [
|
401 |
+
"# NHAI Drone Survey Analysis Report",
|
402 |
+
"",
|
403 |
+
"## Project Details",
|
404 |
+
"- Project Name: NH-44 Delhi-Hyderabad Section (Package XYZ)",
|
405 |
+
"- Highway Section: Km 100 to Km 150",
|
406 |
+
"- State: Telangana",
|
407 |
+
"- Region: South",
|
408 |
+
f"- Survey Date: {datetime.now().strftime('%Y-%m-%d')}",
|
409 |
+
"- Drone Service Provider: ABC Drone Services Pvt. Ltd.",
|
410 |
+
"- Technology Service Provider: XYZ AI Analytics Ltd.",
|
411 |
+
f"- Work Order Reference: Data Lake WO-{datetime.now().strftime('%Y-%m-%d')}-XYZ",
|
412 |
+
"- Report Prepared By: Nagasurendra, Data Analyst",
|
413 |
+
f"- Report Date: {datetime.now().strftime('%Y-%m-%d')}",
|
414 |
+
"",
|
415 |
+
"## 1. Introduction",
|
416 |
+
"This report consolidates drone survey results for NH-44 (Km 100–150) under Operations & Maintenance, per NHAI Policy Circular No. 18.98/2024, detecting potholes and cracks using YOLOv8 for Monthly Progress Report integration.",
|
417 |
+
"",
|
418 |
+
"## 2. Drone Survey Metadata",
|
419 |
+
"- Drone Speed: 5 m/s",
|
420 |
+
"- Drone Height: 60 m",
|
421 |
+
"- Camera Sensor: RGB, 12 MP",
|
422 |
+
"- Recording Type: JPEG, 90° nadir",
|
423 |
+
"- Image Overlap: 85%",
|
424 |
+
"- Flight Pattern: Single lap, ROW centered",
|
425 |
+
"- Geotagging: Enabled",
|
426 |
+
"- Satellite Lock: 12 satellites",
|
427 |
+
"- Terrain Follow Mode: Enabled",
|
428 |
+
"",
|
429 |
+
"## 3. Quality Check Results",
|
430 |
+
f"- Resolution: 4000x3000 (12 MP)",
|
431 |
+
"- Overlap: 85%",
|
432 |
+
"- Camera Angle: 90° nadir",
|
433 |
+
"- Drone Speed: ≤ 5 m/s",
|
434 |
+
"- Geotagging: 100% compliant",
|
435 |
+
"- QC Status: Passed",
|
436 |
+
"",
|
437 |
+
"## 4. AI/ML Analytics",
|
438 |
+
f"- Total Frames Processed: {frame_count}",
|
439 |
+
f"- Detection Frames: {detection_frame_count} ({detection_frame_count/frame_count*100:.2f}%)",
|
440 |
+
f"- Total Detections: {metrics['total_detections']}",
|
441 |
+
" - Breakdown:"
|
442 |
+
]
|
443 |
+
|
444 |
+
for item in metrics.get("items", []):
|
445 |
+
percentage = (item["count"] / metrics["total_detections"] * 100) if metrics["total_detections"] > 0 else 0
|
446 |
+
md_report_content.append(f" - {item['type']}: {item['count']} ({percentage:.2f}%)")
|
447 |
+
md_report_content.extend([
|
448 |
+
f"- Processing Time: {total_time:.2f} seconds",
|
449 |
+
f"- Average Frame Time: {sum(frame_times)/len(frame_times):.2f} ms" if frame_times else "- Average Frame Time: N/A",
|
450 |
+
f"- Average Resize Time: {sum(resize_times)/len(resize_times):.2f} ms" if resize_times else "- Average Resize Time: N/A",
|
451 |
+
f"- Average Inference Time: {sum(inference_times)/len(inference_times):.2f} ms" if inference_times else "- Average Inference Time: N/A",
|
452 |
+
f"- Average I/O Time: {sum(io_times)/len(io_times):.2f} ms" if io_times else "- Average I/O Time: N/A",
|
453 |
+
f"- Timestamp: {metrics.get('timestamp', 'N/A')}",
|
454 |
+
"- Summary: Potholes and cracks detected in high-traffic segments.",
|
455 |
+
"",
|
456 |
+
"## 5. Output File Structure",
|
457 |
+
"- ZIP file contains:",
|
458 |
+
f" - `drone_analysis_report_{timestamp}.md`: This report (fallback due to PDF generation failure)",
|
459 |
+
" - `outputs/processed_output.mp4`: Processed video with annotations",
|
460 |
+
f" - `outputs/chart_{timestamp}.png`: Detection trend chart",
|
461 |
+
f" - `outputs/map_{timestamp}.png`: Issue locations map",
|
462 |
+
" - `captured_frames/detected_<frame>.jpg`: Geotagged images for detected issues",
|
463 |
+
" - `flight_logs/flight_log_<frame>.csv`: Flight logs matching image frames",
|
464 |
+
"- Note: Images and logs share frame numbers (e.g., `detected_000001.jpg` corresponds to `flight_log_000001.csv`).",
|
465 |
+
"",
|
466 |
+
"## 6. Geotagged Images",
|
467 |
+
f"- Total Images: {len(detected_issues)}",
|
468 |
+
f"- Storage: Data Lake `/project_xyz/images/{datetime.now().strftime('%Y-%m-%d')}`",
|
469 |
+
"",
|
470 |
+
"| Frame | Issue Type | GPS (Lat, Lon) | Timestamp | Confidence | Image Path |",
|
471 |
+
"|-------|------------|----------------|-----------|------------|------------|"
|
472 |
+
])
|
473 |
+
|
474 |
+
for detection in all_detections[:100]:
|
475 |
+
md_report_content.append(
|
476 |
+
f"| {detection['frame']:06d} | {detection['label']} | ({detection['gps'][0]:.6f}, {detection['gps'][1]:.6f}) | {detection['timestamp']} | {detection['conf']:.2f} | captured_frames/{os.path.basename(detection['path'])} |"
|
477 |
+
)
|
478 |
+
|
479 |
+
md_report_content.extend([
|
480 |
+
"",
|
481 |
+
"## 7. Flight Logs",
|
482 |
+
f"- Total Logs: {len(detected_issues)}",
|
483 |
+
f"- Storage: Data Lake `/project_xyz/flight_logs/{datetime.now().strftime('%Y-%m-%d')}`",
|
484 |
+
"",
|
485 |
+
"| Frame | Timestamp | Latitude | Longitude | Speed (m/s) | Satellites | Altitude (m) | Log Path |",
|
486 |
+
"|-------|-----------|----------|-----------|-------------|------------|--------------|----------|"
|
487 |
+
])
|
488 |
+
|
489 |
+
for detection in all_detections[:100]:
|
490 |
+
log_path = f"flight_logs/flight_log_{detection['frame']:06d}.csv"
|
491 |
+
md_report_content.append(
|
492 |
+
f"| {detection['frame']:06d} | {detection['timestamp']} | {detection['gps'][0]:.6f} | {detection['gps'][1]:.6f} | 5.0 | 12 | 60 | {log_path} |"
|
493 |
+
)
|
494 |
+
|
495 |
+
md_report_content.extend([
|
496 |
+
"",
|
497 |
+
"## 8. Processed Video",
|
498 |
+
f"- Path: outputs/processed_output.mp4",
|
499 |
+
f"- Frames: {output_frames}",
|
500 |
+
f"- FPS: {output_fps:.2f}",
|
501 |
+
f"- Duration: {output_duration:.2f} seconds",
|
502 |
+
"",
|
503 |
+
"## 9. Visualizations",
|
504 |
+
f"- Detection Trend Chart: outputs/chart_{timestamp}.png",
|
505 |
+
f"- Issue Locations Map: outputs/map_{timestamp}.png",
|
506 |
+
"",
|
507 |
+
"## 10. Processing Timestamps",
|
508 |
+
f"- Total Processing Time: {total_time:.2f} seconds",
|
509 |
+
"- Log Entries (Last 10):"
|
510 |
+
])
|
511 |
+
|
512 |
+
for entry in log_entries[-10:]:
|
513 |
+
md_report_content.append(f" - {entry}")
|
514 |
+
|
515 |
+
md_report_content.extend([
|
516 |
+
"",
|
517 |
+
"## 11. Stakeholder Validation",
|
518 |
+
"- AE/IE Comments: [Pending]",
|
519 |
+
"- PD/RO Comments: [Pending]",
|
520 |
+
"",
|
521 |
+
"## 12. Recommendations",
|
522 |
+
"- Repair potholes in high-traffic segments.",
|
523 |
+
"- Seal cracks to prevent degradation.",
|
524 |
+
"- Schedule follow-up survey.",
|
525 |
+
"",
|
526 |
+
"## 13. Data Lake References",
|
527 |
+
f"- Images: `/project_xyz/images/{datetime.now().strftime('%Y-%m-%d')}`",
|
528 |
+
f"- Flight Logs: `/project_xyz/flight_logs/{datetime.now().strftime('%Y-%m-%d')}`",
|
529 |
+
f"- Video: `/project_xyz/videos/processed_output_{datetime.now().strftime('%Y%m%d')}.mp4`",
|
530 |
+
f"- DAMS Dashboard: `/project_xyz/dams/{datetime.now().strftime('%Y-%m-%d')}`"
|
531 |
+
])
|
532 |
+
|
533 |
try:
|
534 |
# Writing LaTeX file
|
535 |
with open(tex_path, 'w') as f:
|
|
|
537 |
log_entries.append(f"LaTeX file saved: {tex_path}")
|
538 |
|
539 |
# Compiling LaTeX to PDF using latexmk
|
|
|
540 |
result = subprocess.run(
|
541 |
["latexmk", "-pdf", "-interaction=nonstopmode", tex_path],
|
542 |
cwd=OUTPUT_DIR,
|
|
|
547 |
log_entries.append(f"PDF report generated: {report_path}")
|
548 |
if os.path.exists(report_path):
|
549 |
log_entries.append(f"Confirmed report file exists: {report_path}")
|
550 |
+
return report_path
|
551 |
else:
|
552 |
log_entries.append(f"Error: PDF report not found at {report_path}")
|
|
|
553 |
else:
|
554 |
log_entries.append(f"Error: LaTeX compilation failed: {result.stderr}")
|
|
|
|
|
555 |
except Exception as e:
|
556 |
+
log_entries.append(f"Error: Failed to generate PDF report: {str(e)}")
|
|
|
557 |
|
558 |
+
# Fallback: Generate Markdown report if PDF fails
|
559 |
+
log_entries.append("Falling back to Markdown report due to PDF generation failure")
|
560 |
try:
|
561 |
+
with open(md_path, 'w') as f:
|
562 |
+
f.write("\n".join(md_report_content))
|
563 |
+
log_entries.append(f"Markdown report saved: {md_path}")
|
564 |
+
if os.path.exists(md_path):
|
565 |
+
log_entries.append(f"Confirmed Markdown report file exists: {md_path}")
|
566 |
+
return md_path
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
567 |
else:
|
568 |
+
log_entries.append(f"Error: Markdown report not found at {md_path}")
|
569 |
+
return ""
|
570 |
except Exception as e:
|
571 |
+
log_entries.append(f"Error: Failed to save Markdown report: {str(e)}")
|
572 |
return ""
|
573 |
+
|
574 |
def process_video(video, resize_width=4000, resize_height=3000, frame_skip=5):
|
575 |
global frame_count, last_metrics, detected_counts, detected_issues, gps_coordinates, log_entries
|
576 |
frame_count = 0
|