MNGames commited on
Commit
bf2aa22
·
verified ·
1 Parent(s): d04b7c9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -15
app.py CHANGED
@@ -1,16 +1,16 @@
1
  import gradio as gr
2
  import tensorflow as tf
3
  import numpy as np
4
- import cv2
5
  from PIL import Image
6
  import io
 
7
 
8
  # Load a pre-trained TensorFlow model (replace with your model path)
9
  model = tf.keras.applications.MobileNetV2(weights="imagenet")
10
 
11
  def preprocess_image(image):
12
  img = np.array(image)
13
- img = cv2.resize(img, (224, 224))
14
  img = tf.keras.applications.mobilenet_v2.preprocess_input(img)
15
  return np.expand_dims(img, axis=0)
16
 
@@ -20,27 +20,21 @@ def classify_frame(frame):
20
  decoded_predictions = tf.keras.applications.mobilenet_v2.decode_predictions(predictions, top=1)[0]
21
  return decoded_predictions[0][1]
22
 
23
- def process_video(video):
24
  result = ""
25
- cap = cv2.VideoCapture(video)
26
- frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
27
- frame_interval = frame_count // 10 # Analyze 10 frames evenly spaced throughout the video
28
-
29
- for i in range(0, frame_count, frame_interval):
30
- cap.set(cv2.CAP_PROP_POS_FRAMES, i)
31
- ret, frame = cap.read()
32
- if not ret:
33
- break
34
 
35
- frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
36
- image = Image.fromarray(frame_rgb)
 
37
  label = classify_frame(image)
38
 
39
  if "baseball" in label.lower():
40
  result = "The runner is out"
41
  break
42
 
43
- cap.release()
44
  if result == "":
45
  result = "The runner is safe"
46
 
 
1
  import gradio as gr
2
  import tensorflow as tf
3
  import numpy as np
 
4
  from PIL import Image
5
  import io
6
+ import moviepy.editor as mp
7
 
8
  # Load a pre-trained TensorFlow model (replace with your model path)
9
  model = tf.keras.applications.MobileNetV2(weights="imagenet")
10
 
11
  def preprocess_image(image):
12
  img = np.array(image)
13
+ img = tf.image.resize(img, (224, 224))
14
  img = tf.keras.applications.mobilenet_v2.preprocess_input(img)
15
  return np.expand_dims(img, axis=0)
16
 
 
20
  decoded_predictions = tf.keras.applications.mobilenet_v2.decode_predictions(predictions, top=1)[0]
21
  return decoded_predictions[0][1]
22
 
23
+ def process_video(video_file):
24
  result = ""
25
+ video = mp.VideoFileClip(io.BytesIO(video_file.read()))
26
+ duration = int(video.duration)
27
+ frame_interval = duration // 10 # Analyze 10 frames evenly spaced throughout the video
 
 
 
 
 
 
28
 
29
+ for i in range(0, duration, frame_interval):
30
+ frame = video.get_frame(i)
31
+ image = Image.fromarray(frame)
32
  label = classify_frame(image)
33
 
34
  if "baseball" in label.lower():
35
  result = "The runner is out"
36
  break
37
 
 
38
  if result == "":
39
  result = "The runner is safe"
40