kristyc commited on
Commit
621e425
·
1 Parent(s): 6933f69

Fallback to using the camera component due to an issue with real-time video streaming in the hosted app

Browse files
Files changed (1) hide show
  1. app.py +19 -13
app.py CHANGED
@@ -3,6 +3,7 @@ from streamlit_webrtc import webrtc_streamer
3
  import av
4
  import mediapipe as mp
5
  import numpy as np
 
6
 
7
  mp_hands = mp.solutions.hands
8
  mp_hands_connections = mp.solutions.hands_connections
@@ -19,24 +20,29 @@ connections = {
19
  'HAND_PINKY_FINGER_CONNECTIONS': mp_hands_connections.HAND_PINKY_FINGER_CONNECTIONS,
20
  }
21
 
22
- st.title('Hand & Finger Tracking')
23
- st.markdown("This is a demo of hand and finger tracking using [Google's MediaPipe](https://google.github.io/mediapipe/solutions/hands.html).")
24
- draw_background = st.checkbox("Draw background", value=True)
25
- selected_connection = st.selectbox("Select connections to draw", list(connections.keys()))
26
-
27
- def process_hands(frame):
28
- img = frame.to_ndarray(format="bgr24")
29
  results = hands.process(img)
30
  output_img = img if draw_background else np.zeros_like(img)
31
  if results.multi_hand_landmarks:
32
  for hand_landmarks in results.multi_hand_landmarks:
33
  mp_draw.draw_landmarks(output_img, hand_landmarks, connections[selected_connection])
34
- return av.VideoFrame.from_ndarray(output_img, format="bgr24")
 
 
 
 
 
 
 
 
 
 
35
 
36
- webrtc_streamer(
37
- key="streamer",
38
- video_frame_callback=process_hands,
39
- media_stream_constraints={"video": True, "audio": False},
40
- )
 
41
 
42
 
 
3
  import av
4
  import mediapipe as mp
5
  import numpy as np
6
+ from PIL import Image
7
 
8
  mp_hands = mp.solutions.hands
9
  mp_hands_connections = mp.solutions.hands_connections
 
20
  'HAND_PINKY_FINGER_CONNECTIONS': mp_hands_connections.HAND_PINKY_FINGER_CONNECTIONS,
21
  }
22
 
23
+ def process_hands(img):
 
 
 
 
 
 
24
  results = hands.process(img)
25
  output_img = img if draw_background else np.zeros_like(img)
26
  if results.multi_hand_landmarks:
27
  for hand_landmarks in results.multi_hand_landmarks:
28
  mp_draw.draw_landmarks(output_img, hand_landmarks, connections[selected_connection])
29
+ return output_img
30
+
31
+ st.title('Hand & Finger Tracking')
32
+ st.markdown("This is a demo of hand and finger tracking using [Google's MediaPipe](https://google.github.io/mediapipe/solutions/hands.html).")
33
+
34
+ col1, col2 = st.columns(2)
35
+
36
+ with col1:
37
+ picture = st.camera_input("Take a picture")
38
+ draw_background = st.checkbox("Draw background", value=True)
39
+ selected_connection = st.selectbox("Select connections to draw", list(connections.keys()))
40
 
41
+ with col2:
42
+ if picture is not None:
43
+ img = Image.open(picture)
44
+ img_array = np.array(img)
45
+ processed_img = process_hands(img_array)
46
+ st.image(processed_img)
47
 
48