nagasurendra commited on
Commit
a128de1
·
verified ·
1 Parent(s): b094487

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -14
app.py CHANGED
@@ -183,9 +183,9 @@ def process_video(video, resize_width=4000, resize_height=3000, frame_skip=5):
183
  fps = cap.get(cv2.CAP_PROP_FPS)
184
  total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
185
  expected_duration = total_frames / fps if fps > 0 else 0
186
- log_entries.append(f"Input video: {frame_width}x{frame_height} ({input_resolution/1e6:.2f}MP), {fps} FPS, {total_frames} frames, {expected_duration:.2f} seconds")
187
- logging.info(f"Input video: {frame_width}x{frame_height} ({input_resolution/1e6:.2f}MP), {fps} FPS, {total_frames} frames, {expected_duration:.2f} seconds")
188
- print(f"Input video: {frame_width}x{frame_height} ({input_resolution/1e6:.2f}MP), {fps} FPS, {total_frames} frames, {expected_duration:.2f} seconds")
189
 
190
  out_width, out_height = resize_width, resize_height
191
  output_path = os.path.join(OUTPUT_DIR, "processed_output.mp4")
@@ -334,10 +334,10 @@ def process_video(video, resize_width=4000, resize_height=3000, frame_skip=5):
334
  avg_frame_time = sum(frame_times) / len(frame_times) if frame_times else 0
335
  log_entries.append(f"Output video: {output_frames} frames, {output_fps:.2f} FPS, {output_duration:.2f} seconds")
336
  logging.info(f"Output video: {output_frames} frames, {output_fps:.2f} FPS, {output_duration:.2f} seconds")
337
- log_entries.append(f"Total Processing time: {total_time:.2f} seconds, Avg frame time: {avg_frame_time:.2f} ms, Detection frames: {detection_frame_count}, Output frames: {output_frame_count}")
338
- logging.info(f"Total Processing time: {total_time:.2f} seconds, {avg_frame_time:.2f} Avg frame time: {detection_frame_count:.2f} ms, Detection frames: {output_frame_count:.2f}}, Output frames: {total_time}")
339
- print(f"Output video: {total_time:.2f} seconds")
340
- print(f"Total Processing time: {total_time:.2f} seconds, Avg frame time:: {avg_frame_time:.2f} ms, Detection frames: {detection_frame_count}, Output frames: {output_frame_count}")
341
 
342
  chart_path = generate_line_chart()
343
  map_path = generate_map(gps_coordinates[-5:], all_detections)
@@ -364,18 +364,18 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="orange")) as iface:
364
  with gr.Row():
365
  with gr.Column(scale=3):
366
  video_input = gr.Video(label="Upload Video (12MP recommended for NHAI compliance)")
367
- width_slider=gr.Slider(320, 4000, value=4000, label="Output Width", step=1)
368
- height_slider=gr.Slider(240, 3000, value=3000, label="Output Height", step=1)
369
- skip_slider = gr.Slider(1, 5, value=5, label="Frame Skip", step=1)
370
  process_btn = gr.Button("Process Video", variant="primary")
371
  with gr.Column(scale=1):
372
  metrics_output = gr.Textbox(label="Detection Metrics", lines=5, interactive=False)
373
  with gr.Row():
374
  video_output = gr.Video(label="Processed Video")
375
- issue_gallery = gr.Gallery(label="Detected Issues", columns=2, height="auto", object_fit="contain")
376
  with gr.Row():
377
  chart_output = gr.Image(label="Detection Trend")
378
- map_output = gr.Image(label="Issue Locations")
379
  with gr.Row():
380
  logs_output = gr.Textbox(label="Logs", lines=5, interactive=False)
381
  with gr.Row():
@@ -404,5 +404,4 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="orange")) as iface:
404
  )
405
 
406
  if __name__ == "__main__":
407
- iface.launch()
408
-
 
183
  fps = cap.get(cv2.CAP_PROP_FPS)
184
  total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
185
  expected_duration = total_frames / fps if fps > 0 else 0
186
+ log_entries.append(f"Input video: {frame_width}x{frame_height} ({input_resolution/1e6:.2f}MP), {fps} FPS, {total_frames} frames, {expected_duration:.2f} seconds, Frame skip: {frame_skip}")
187
+ logging.info(f"Input video: {frame_width}x{frame_height} ({input_resolution/1e6:.2f}MP), {fps} FPS, {total_frames} frames, {expected_duration:.2f} seconds, Frame skip: {frame_skip}")
188
+ print(f"Input video: {frame_width}x{frame_height} ({input_resolution/1e6:.2f}MP), {fps} FPS, {total_frames} frames, {expected_duration:.2f} seconds, Frame skip: {frame_skip}")
189
 
190
  out_width, out_height = resize_width, resize_height
191
  output_path = os.path.join(OUTPUT_DIR, "processed_output.mp4")
 
334
  avg_frame_time = sum(frame_times) / len(frame_times) if frame_times else 0
335
  log_entries.append(f"Output video: {output_frames} frames, {output_fps:.2f} FPS, {output_duration:.2f} seconds")
336
  logging.info(f"Output video: {output_frames} frames, {output_fps:.2f} FPS, {output_duration:.2f} seconds")
337
+ log_entries.append(f"Total processing time: {total_time:.2f} seconds, Avg frame time: {avg_frame_time:.2f} ms, Detection frames: {detection_frame_count}, Output frames: {output_frame_count}")
338
+ logging.info(f"Total processing time: {total_time:.2f} seconds, Avg frame time: {avg_frame_time:.2f} ms, Detection frames: {detection_frame_count}, Output frames: {output_frame_count}")
339
+ print(f"Output video: {output_frames} frames, {output_fps:.2f} FPS, {output_duration:.2f} seconds")
340
+ print(f"Total processing time: {total_time:.2f} seconds, Avg frame time: {avg_frame_time:.2f} ms, Detection frames: {detection_frame_count}, Output frames: {output_frame_count}")
341
 
342
  chart_path = generate_line_chart()
343
  map_path = generate_map(gps_coordinates[-5:], all_detections)
 
364
  with gr.Row():
365
  with gr.Column(scale=3):
366
  video_input = gr.Video(label="Upload Video (12MP recommended for NHAI compliance)")
367
+ width_slider = gr.Slider(320, 4000, value=4000, label="Output Width", step=1)
368
+ height_slider = gr.Slider(240, 3000, value=3000, label="Output Height", step=1)
369
+ skip_slider = gr.Slider(1, 10, value=5, label="Frame Skip", step=1)
370
  process_btn = gr.Button("Process Video", variant="primary")
371
  with gr.Column(scale=1):
372
  metrics_output = gr.Textbox(label="Detection Metrics", lines=5, interactive=False)
373
  with gr.Row():
374
  video_output = gr.Video(label="Processed Video")
375
+ issue_gallery = gr.Gallery(label="Detected Issues", columns=4, height="auto", object_fit="contain")
376
  with gr.Row():
377
  chart_output = gr.Image(label="Detection Trend")
378
+ map_output = gr.Image(label="Issue Locations Map")
379
  with gr.Row():
380
  logs_output = gr.Textbox(label="Logs", lines=5, interactive=False)
381
  with gr.Row():
 
404
  )
405
 
406
  if __name__ == "__main__":
407
+ iface.launch()