File size: 10,771 Bytes
41c03cf c3429f6 462fddf c3429f6 449d194 462fddf ba9faee c3429f6 7e29aa0 c3429f6 58199b3 1213ff3 b3f7afe 449d194 70fce4e c3429f6 462fddf 7e44cd3 42d2b87 462fddf 70fce4e 462fddf 7e29aa0 58199b3 42d2b87 449d194 db7efde b3f7afe 42d2b87 c3429f6 462fddf db7efde 462fddf db7efde c9d4715 7e44cd3 0a9dc78 1213ff3 462fddf db7efde 58199b3 1213ff3 449d194 d179a4e 1213ff3 d41a272 1213ff3 d41a272 449d194 885c61f 1213ff3 58199b3 1213ff3 70fce4e 1213ff3 58199b3 b3f7afe 449d194 1213ff3 0a9dc78 58199b3 c3429f6 449d194 462fddf db7efde 462fddf 449d194 573c1ef 58199b3 1213ff3 0a9dc78 1213ff3 0a9dc78 1213ff3 0a9dc78 1213ff3 db7efde 0a9dc78 449d194 0a9dc78 449d194 0a9dc78 70fce4e 1213ff3 0a9dc78 70fce4e 0a9dc78 70fce4e 1213ff3 70fce4e b3f7afe 70fce4e 1213ff3 462fddf 573c1ef 462fddf 1213ff3 70fce4e 449d194 70fce4e c9d4715 1213ff3 7e44cd3 1213ff3 d41a272 7e44cd3 d179a4e b3f7afe d179a4e b3f7afe d179a4e 70fce4e 7e44cd3 1213ff3 d179a4e c9d4715 1213ff3 462fddf d179a4e b3f7afe d41a272 1213ff3 462fddf d179a4e b3f7afe 70fce4e 573c1ef d179a4e d41a272 7e29aa0 a653421 689fb64 c9d4715 462fddf 7e44cd3 462fddf 1213ff3 c9d4715 462fddf 573c1ef c9d4715 1213ff3 61be320 462fddf 1213ff3 d179a4e 462fddf d179a4e 462fddf a295d73 c3429f6 449d194 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 |
import cv2
import numpy as np
import torch
from ultralytics import YOLO
import gradio as gr
from scipy.interpolate import interp1d
from scipy.ndimage import uniform_filter1d
import uuid
import os
# Load the trained YOLOv8n model
model = YOLO("best.pt")
# Constants for LBW decision and video processing
STUMPS_WIDTH = 0.2286 # meters (width of stumps)
FRAME_RATE = 20 # Input video frame rate
SLOW_MOTION_FACTOR = 2 # Reduced for faster output
CONF_THRESHOLD = 0.3 # Increased for better detection
PITCH_ZONE_Y = 0.8 # Adjusted for pitch near stumps
IMPACT_ZONE_Y = 0.7 # Adjusted for impact near batsman leg
IMPACT_DELTA_Y = 20 # Reduced for finer impact detection
STUMPS_HEIGHT = 0.711 # meters (height of stumps)
def process_video(video_path):
if not os.path.exists(video_path):
return [], [], [], "Error: Video file not found"
cap = cv2.VideoCapture(video_path)
frames = []
ball_positions = []
detection_frames = []
debug_log = []
frame_count = 0
while cap.isOpened():
ret, frame = cap.read()
if not ret:
break
# Process every frame for better tracking
frames.append(frame.copy())
# Preprocess frame for better detection
frame = cv2.convertScaleAbs(frame, alpha=1.2, beta=10) # Enhance contrast
results = model.predict(frame, conf=CONF_THRESHOLD)
detections = [det for det in results[0].boxes if det.cls == 0]
if len(detections) == 1:
x1, y1, x2, y2 = detections[0].xyxy[0].cpu().numpy()
ball_positions.append([(x1 + x2) / 2, (y1 + y2) / 2])
detection_frames.append(len(frames) - 1)
cv2.rectangle(frame, (int(x1), int(y1)), (int(x2), int(y2)), (0, 255, 0), 2)
frames[-1] = frame
debug_log.append(f"Frame {frame_count}: {len(detections)} ball detections")
frame_count += 1
cap.release()
if not ball_positions:
debug_log.append("No valid single-ball detections in any frame")
else:
debug_log.append(f"Total valid single-ball detections: {len(ball_positions)}")
return frames, ball_positions, detection_frames, "\n".join(debug_log)
def estimate_trajectory(ball_positions, detection_frames, frames):
if len(ball_positions) < 2:
return None, None, None, None, None, None, "Error: Fewer than 2 valid single-ball detections for trajectory"
frame_height = frames[0].shape[0]
# Smooth coordinates with moving average
window_size = 3
x_coords = uniform_filter1d([pos[0] for pos in ball_positions], size=window_size, mode='nearest')
y_coords = uniform_filter1d([pos[1] for pos in ball_positions], size=window_size, mode='nearest')
times = np.array([i / FRAME_RATE for i in range(len(ball_positions))])
pitch_idx = 0
for i, y in enumerate(y_coords):
if y > frame_height * PITCH_ZONE_Y:
pitch_idx = i
break
pitch_point = ball_positions[pitch_idx]
pitch_frame = detection_frames[pitch_idx]
impact_idx = None
for i in range(1, len(y_coords)):
if (y_coords[i] > frame_height * IMPACT_ZONE_Y and
abs(y_coords[i] - y_coords[i-1]) > IMPACT_DELTA_Y):
impact_idx = i
break
if impact_idx is None:
impact_idx = len(y_coords) - 1
impact_point = ball_positions[impact_idx]
impact_frame = detection_frames[impact_idx]
x_coords = x_coords[:impact_idx + 1]
y_coords = y_coords[:impact_idx + 1]
times = times[:impact_idx + 1]
try:
fx = interp1d(times, x_coords, kind='linear', fill_value="extrapolate")
fy = interp1d(times, y_coords, kind='quadratic', fill_value="extrapolate")
except Exception as e:
return None, None, None, None, None, None, f"Error in trajectory interpolation: {str(e)}"
vis_trajectory = list(zip(x_coords, y_coords))
t_full = np.linspace(times[0], times[-1] + 0.5, len(times) + 5)
x_full = fx(t_full)
y_full = fy(t_full)
full_trajectory = list(zip(x_full, y_full))
debug_log = (f"Trajectory estimated successfully\n"
f"Pitch point at frame {pitch_frame + 1}: ({pitch_point[0]:.1f}, {pitch_point[1]:.1f})\n"
f"Impact point at frame {impact_frame + 1}: ({impact_point[0]:.1f}, {impact_point[1]:.1f})")
return full_trajectory, vis_trajectory, pitch_point, pitch_frame, impact_point, impact_frame, debug_log
def lbw_decision(ball_positions, full_trajectory, frames, pitch_point, impact_point):
if not frames:
return "Error: No frames processed", None, None, None
if not full_trajectory or len(ball_positions) < 2:
return "Not enough data (insufficient valid single-ball detections)", None, None, None
frame_height, frame_width = frames[0].shape[:2]
stumps_x = frame_width / 2
stumps_y = frame_height * 0.8 # Adjusted to align with pitch
stumps_width_pixels = frame_width * (STUMPS_WIDTH / 3.0)
batsman_area_y = frame_height * 0.7
pitch_x, pitch_y = pitch_point
impact_x, impact_y = impact_point
in_line_threshold = stumps_width_pixels / 2
if pitch_x < stumps_x - in_line_threshold or pitch_x > stumps_x + in_line_threshold:
return f"Not Out (Pitched outside line at x: {pitch_x:.1f}, y: {pitch_y:.1f})", full_trajectory, pitch_point, impact_point
if impact_y < batsman_area_y or impact_x < stumps_x - in_line_threshold or impact_x > stumps_x + in_line_threshold:
return f"Not Out (Impact outside line or above batsman at x: {impact_x:.1f}, y: {impact_y:.1f})", full_trajectory, pitch_point, impact_point
hit_stumps = False
for x, y in full_trajectory:
if (abs(x - stumps_x) < in_line_threshold and
abs(y - stumps_y) < frame_height * 0.1):
hit_stumps = True
break
if hit_stumps:
if abs(x - stumps_x) < in_line_threshold * 0.1:
return f"Umpire's Call - Not Out (Ball clips stumps, Pitch at x: {pitch_x:.1f}, y: {pitch_y:.1f}, Impact at x: {impact_x:.1f}, y: {impact_y:.1f})", full_trajectory, pitch_point, impact_point
return f"Out (Ball hits stumps, Pitch at x: {pitch_x:.1f}, y: {pitch_y:.1f}, Impact at x: {impact_x:.1f}, y: {impact_y:.1f})", full_trajectory, pitch_point, impact_point
return f"Not Out (Missing stumps, Pitch at x: {pitch_x:.1f}, y: {pitch_y:.1f}, Impact at x: {impact_x:.1f}, y: {impact_y:.1f})", full_trajectory, pitch_point, impact_point
def generate_slow_motion(frames, vis_trajectory, pitch_point, pitch_frame, impact_point, impact_frame, detection_frames, output_path, decision):
if not frames:
return None
frame_height, frame_width = frames[0].shape[:2]
stumps_x = frame_width / 2
stumps_y = frame_height * 0.8 # Align with pitch
stumps_width_pixels = frame_width * (STUMPS_WIDTH / 3.0)
stumps_height_pixels = frame_height * (STUMPS_HEIGHT / 3.0)
fourcc = cv2.VideoWriter_fourcc(*'mp4v')
out = cv2.VideoWriter(output_path, fourcc, FRAME_RATE / SLOW_MOTION_FACTOR, (frame_width, frame_height))
trajectory_points = np.array(vis_trajectory, dtype=np.int32).reshape((-1, 1, 2))
for i, frame in enumerate(frames):
# Draw stumps outline
cv2.line(frame, (int(stumps_x - stumps_width_pixels / 2), int(stumps_y)),
(int(stumps_x + stumps_width_pixels / 2), int(stumps_y)), (255, 255, 255), 2)
cv2.line(frame, (int(stumps_x - stumps_width_pixels / 2), int(stumps_y - stumps_height_pixels)),
(int(stumps_x - stumps_width_pixels / 2), int(stumps_y)), (255, 255, 255), 2)
cv2.line(frame, (int(stumps_x + stumps_width_pixels / 2), int(stumps_y - stumps_height_pixels)),
(int(stumps_x + stumps_width_pixels / 2), int(stumps_y)), (255, 255, 255), 2)
# Draw crease line at stumps
cv2.line(frame, (int(stumps_x - stumps_width_pixels / 2), int(stumps_y)),
(int(stumps_x + stumps_width_pixels / 2), int(stumps_y)), (255, 255, 0), 2)
if i in detection_frames and trajectory_points.size > 0:
idx = detection_frames.index(i) + 1
if idx <= len(trajectory_points):
cv2.polylines(frame, [trajectory_points[:idx]], False, (0, 0, 255), 2) # Blue trajectory
if pitch_point and i == pitch_frame:
x, y = pitch_point
cv2.circle(frame, (int(x), int(y)), 8, (0, 255, 0), -1) # Green for pitching
cv2.putText(frame, "Pitching", (int(x) + 10, int(y) - 10),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 1)
if impact_point and i == impact_frame:
x, y = impact_point
cv2.circle(frame, (int(x), int(y)), 8, (0, 0, 255), -1) # Red for impact
cv2.putText(frame, "Impact", (int(x) + 10, int(y) + 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1)
if impact_point and i == impact_frame and "Out" in decision:
cv2.putText(frame, "Wickets", (int(stumps_x) - 50, int(stumps_y) - 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 165, 255), 1) # Orange for wickets
for _ in range(SLOW_MOTION_FACTOR):
out.write(frame)
out.release()
return output_path
def drs_review(video):
frames, ball_positions, detection_frames, debug_log = process_video(video)
if not frames:
return f"Error: Failed to process video\nDebug Log:\n{debug_log}", None
full_trajectory, vis_trajectory, pitch_point, pitch_frame, impact_point, impact_frame, trajectory_log = estimate_trajectory(ball_positions, detection_frames, frames)
decision, full_trajectory, pitch_point, impact_point = lbw_decision(ball_positions, full_trajectory, frames, pitch_point, impact_point)
output_path = f"output_{uuid.uuid4()}.mp4"
slow_motion_path = generate_slow_motion(frames, vis_trajectory, pitch_point, pitch_frame, impact_point, impact_frame, detection_frames, output_path, decision)
debug_output = f"{debug_log}\n{trajectory_log}"
return f"DRS Decision: {decision}\nDebug Log:\n{debug_output}", slow_motion_path
# Gradio interface
iface = gr.Interface(
fn=drs_review,
inputs=gr.Video(label="Upload Video Clip"),
outputs=[
gr.Textbox(label="DRS Decision and Debug Log"),
gr.Video(label="Optimized Slow-Motion Replay with Pitching (Green), Impact (Red), Wickets (Orange), Stumps (White), Crease (Yellow)")
],
title="AI-Powered DRS for LBW in Local Cricket",
description="Upload a video clip of a cricket delivery to get an LBW decision and optimized slow-motion replay showing pitching (green circle), impact (red circle), wickets (orange text), stumps (white outline), and crease line (yellow line)."
)
if __name__ == "__main__":
iface.launch() |