kristyc commited on
Commit
1a44db9
Β·
1 Parent(s): f7bff5b

Rewrite demo using Gradio and video components

Browse files
Files changed (4) hide show
  1. .gitignore +1 -0
  2. README.md +2 -2
  3. app.py +46 -19
  4. requirements.txt +3 -2
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ __pycache__
README.md CHANGED
@@ -3,8 +3,8 @@ title: Hand & Finger Tracking
3
  emoji: πŸ™Œ
4
  colorFrom: pink
5
  colorTo: purple
6
- sdk: streamlit
7
- sdk_version: 1.10.0
8
  app_file: app.py
9
  pinned: false
10
  ---
 
3
  emoji: πŸ™Œ
4
  colorFrom: pink
5
  colorTo: purple
6
+ sdk: gradio
7
+ sdk_version: 3.0.20
8
  app_file: app.py
9
  pinned: false
10
  ---
app.py CHANGED
@@ -1,9 +1,9 @@
1
- import streamlit as st
2
- from streamlit_webrtc import webrtc_streamer
3
- import av
4
  import mediapipe as mp
5
  import numpy as np
6
- from PIL import Image
 
7
 
8
  mp_hands = mp.solutions.hands
9
  mp_hands_connections = mp.solutions.hands_connections
@@ -20,7 +20,7 @@ connections = {
20
  'HAND_PINKY_FINGER_CONNECTIONS': mp_hands_connections.HAND_PINKY_FINGER_CONNECTIONS,
21
  }
22
 
23
- def process_hands(img):
24
  results = hands.process(img)
25
  output_img = img if draw_background else np.zeros_like(img)
26
  if results.multi_hand_landmarks:
@@ -28,21 +28,48 @@ def process_hands(img):
28
  mp_draw.draw_landmarks(output_img, hand_landmarks, connections[selected_connection])
29
  return output_img
30
 
31
- st.title('Hand & Finger Tracking')
32
- st.markdown("This is a demo of hand and finger tracking using [Google's MediaPipe](https://google.github.io/mediapipe/solutions/hands.html).")
33
 
34
- col1, col2 = st.columns(2)
 
 
 
 
 
 
 
 
35
 
36
- with col1:
37
- picture = st.camera_input("Take a picture with one or both hands in the shot")
38
- draw_background = st.checkbox("Draw background", value=True)
39
- selected_connection = st.selectbox("Select connections to draw", list(connections.keys()))
40
 
41
- with col2:
42
- if picture is not None:
43
- img = Image.open(picture)
44
- img_array = np.array(img)
45
- processed_img = process_hands(img_array)
46
- st.image(processed_img)
47
 
48
- st.image('https://visitor-badge.glitch.me/badge?page_id=kristyc.mediapipe-hands')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from matplotlib.pyplot import draw
 
3
  import mediapipe as mp
4
  import numpy as np
5
+ import tempfile
6
+ import mediapy as media
7
 
8
  mp_hands = mp.solutions.hands
9
  mp_hands_connections = mp.solutions.hands_connections
 
20
  'HAND_PINKY_FINGER_CONNECTIONS': mp_hands_connections.HAND_PINKY_FINGER_CONNECTIONS,
21
  }
22
 
23
+ def process_hands(img, selected_connection, draw_background):
24
  results = hands.process(img)
25
  output_img = img if draw_background else np.zeros_like(img)
26
  if results.multi_hand_landmarks:
 
28
  mp_draw.draw_landmarks(output_img, hand_landmarks, connections[selected_connection])
29
  return output_img
30
 
 
 
31
 
32
+ def process_video(video_path, selected_connection, draw_background):
33
+ with tempfile.NamedTemporaryFile() as f:
34
+ out_path = f"{f.name}.{video_path.split('.')[-1]}"
35
+ with media.VideoReader(video_path) as r:
36
+ with media.VideoWriter(
37
+ out_path, shape=r.shape, fps=r.fps, bps=r.bps) as w:
38
+ for image in r:
39
+ w.add_image(process_hands(image, selected_connection, draw_background))
40
+ return out_path
41
 
 
 
 
 
42
 
43
+ demo = gr.Blocks()
 
 
 
 
 
44
 
45
+ with demo:
46
+ gr.Markdown(
47
+ """
48
+ # Hand & Finger Tracking
49
+ This is a demo of hand and finger tracking using [Google's MediaPipe](https://google.github.io/mediapipe/solutions/hands.html).
50
+ """)
51
+
52
+ with gr.Column():
53
+ draw_background = gr.Checkbox(value=True, label="Draw background?")
54
+ connection_keys = list(connections.keys())
55
+ selected_connection = gr.Dropdown(
56
+ label="Select connections to draw",
57
+ choices=connection_keys,
58
+ value=connection_keys[0],
59
+ )
60
+ with gr.Tabs():
61
+ with gr.TabItem(label="Record a video"):
62
+ recorded_video = gr.Video(source="webcam", format="mp4")
63
+ submit_recorded_video = gr.Button(value="Process Video")
64
+ with gr.TabItem(label="Upload a video"):
65
+ uploaded_video = gr.Video(format="mp4")
66
+ submit_uploaded_video = gr.Button(value="Process Video")
67
+
68
+ with gr.Column():
69
+ processed_video = gr.Video()
70
+
71
+ gr.Markdown('<img id="visitor-badge" alt="visitor badge" src="https://visitor-badge.glitch.me/badge?page_id=kristyc.mediapipe-hands" />')
72
+ submit_recorded_video.click(fn=process_video, inputs=[recorded_video, selected_connection, draw_background], outputs=[processed_video])
73
+ submit_uploaded_video.click(fn=process_video, inputs=[recorded_video, selected_connection, draw_background], outputs=[processed_video])
74
+
75
+ demo.launch()
requirements.txt CHANGED
@@ -1,3 +1,4 @@
1
  mediapipe==0.8.10.1
2
- streamlit-webrtc==0.41.0
3
- opencv-contrib-python==4.6.0.66
 
 
1
  mediapipe==0.8.10.1
2
+ gradio==3.0.20
3
+ opencv-contrib-python==4.6.0.66
4
+ mediapy=1.0.3