xmrt commited on
Commit
a7307cb
·
1 Parent(s): 1c513f3
Files changed (1) hide show
  1. main_noweb.py +85 -33
main_noweb.py CHANGED
@@ -183,38 +183,7 @@ def pose2dhand(video, kpt_threshold):
183
 
184
  return "".join(out_file), "".join(kpoints)
185
 
186
- block = gr.Blocks()
187
-
188
- with block:
189
- with gr.Column():
190
- with gr.Tab("Upload video"):
191
- with gr.Column():
192
- with gr.Row():
193
- with gr.Column():
194
- with gr.Row():
195
- video_input = gr.Video(source="upload", type="filepath", height=256, width=192)
196
- # Insert slider with kpt_thr
197
- with gr.Column():
198
- gr.Markdown("Drag the keypoint threshold to filter out lower probability keypoints:")
199
- file_kpthr = gr.Slider(0, 1, value=0.3, label='Keypoint threshold')
200
- with gr.Row():
201
- submit_pose_file = gr.Button("Make 2d pose estimation")
202
- submit_pose3d_file = gr.Button("Make 3d pose estimation")
203
- submit_hand_file = gr.Button("Make 2d hand estimation")
204
-
205
- with gr.Row():
206
- video_output1 = gr.PlayableVideo(label = "Estimate human 2d poses", show_label=True, height=256)
207
- video_output2 = gr.PlayableVideo(label = "Estimate human 3d poses", show_label=True, height=256)
208
- video_output3 = gr.PlayableVideo(label = "Estimate human hand poses", show_label=True, height=256)
209
-
210
- gr.Markdown("Download the .json file that contains the keypoint positions for each frame in the video.")
211
- jsonoutput = gr.File(file_types=[".json"])
212
- gr.Markdown("""There are multiple ways to interact with these keypoints.
213
- \n The example below shows how you can calulate the angle on the elbow for example.
214
- \n Copy the code into your own preferred interpreter and experiment with the keypoint file.
215
- \n If you choose to run the code, start by installing the packages json and numpy. The complete overview of the keypoint indices can be seen in the tab 'General information'. """)
216
- gr.Code(
217
- value="""
218
 
219
  # Importing packages needed
220
  import json
@@ -253,7 +222,72 @@ wrist_point = data[0]['instances'][0]['keypoints'][wrist_index]
253
  angle = calculate_angle(shoulder_point, elbow_point, wrist_point)
254
  print("Angle is: ", angle)
255
 
256
- """,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
257
  language="python",
258
  interactive=False,
259
  show_label=False,
@@ -322,6 +356,24 @@ print("Angle is: ", angle)
322
  outputs = [video_output3, jsonoutput],
323
  queue=True)
324
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
325
  if __name__ == "__main__":
326
  block.queue(max_size=20,
327
  #concurrency_count=40, # When you increase the concurrency_count parameter in queue(), max_threads() in launch() is automatically increased as well.
 
183
 
184
  return "".join(out_file), "".join(kpoints)
185
 
186
+ code_example = """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
187
 
188
  # Importing packages needed
189
  import json
 
222
  angle = calculate_angle(shoulder_point, elbow_point, wrist_point)
223
  print("Angle is: ", angle)
224
 
225
+ """
226
+
227
+ block = gr.Blocks()
228
+
229
+ with block:
230
+ with gr.Column():
231
+ with gr.Tab("Upload video"):
232
+ with gr.Column():
233
+ with gr.Row():
234
+ with gr.Column():
235
+ with gr.Row():
236
+ video_input_web = gr.Video(source="webcam", height=256, width=192)
237
+ # Insert slider with kpt_thr
238
+ with gr.Column():
239
+ gr.Markdown("Drag the keypoint threshold to filter out lower probability keypoints:")
240
+ file_kpthr_web = gr.Slider(0, 1, value=0.3, label='Keypoint threshold')
241
+ with gr.Row():
242
+ submit_pose_file_web = gr.Button("Make 2d pose estimation")
243
+ submit_pose3d_file_web = gr.Button("Make 3d pose estimation")
244
+ submit_hand_file_web = gr.Button("Make 2d hand estimation")
245
+
246
+ with gr.Row():
247
+ video_output1_web = gr.PlayableVideo(label = "Estimate human 2d poses", show_label=True, height=256)
248
+ video_output2_web = gr.PlayableVideo(label = "Estimate human 3d poses", show_label=True, height=256)
249
+ video_output3_web = gr.PlayableVideo(label = "Estimate human hand poses", show_label=True, height=256)
250
+
251
+ gr.Markdown("Download the .json file that contains the keypoint positions for each frame in the video.")
252
+ jsonoutput_web = gr.File(file_types=[".json"])
253
+ gr.Markdown("""There are multiple ways to interact with these keypoints.
254
+ \n The example below shows how you can calulate the angle on the elbow for example.
255
+ \n Copy the code into your own preferred interpreter and experiment with the keypoint file.
256
+ \n If you choose to run the code, start by installing the packages json and numpy. The complete overview of the keypoint indices can be seen in the tab 'General information'. """)
257
+ gr.Code(
258
+ value=code_example,
259
+ language="python",
260
+ interactive=False,
261
+ show_label=False,
262
+ )
263
+ with gr.Tab("Upload video"):
264
+ with gr.Column():
265
+ with gr.Row():
266
+ with gr.Column():
267
+ with gr.Row():
268
+ video_input = gr.Video(source="upload", type="filepath", height=256, width=192)
269
+ # Insert slider with kpt_thr
270
+ with gr.Column():
271
+ gr.Markdown("Drag the keypoint threshold to filter out lower probability keypoints:")
272
+ file_kpthr = gr.Slider(0, 1, value=0.3, label='Keypoint threshold')
273
+ with gr.Row():
274
+ submit_pose_file = gr.Button("Make 2d pose estimation")
275
+ submit_pose3d_file = gr.Button("Make 3d pose estimation")
276
+ submit_hand_file = gr.Button("Make 2d hand estimation")
277
+
278
+ with gr.Row():
279
+ video_output1 = gr.PlayableVideo(label = "Estimate human 2d poses", show_label=True, height=256)
280
+ video_output2 = gr.PlayableVideo(label = "Estimate human 3d poses", show_label=True, height=256)
281
+ video_output3 = gr.PlayableVideo(label = "Estimate human hand poses", show_label=True, height=256)
282
+
283
+ gr.Markdown("Download the .json file that contains the keypoint positions for each frame in the video.")
284
+ jsonoutput = gr.File(file_types=[".json"])
285
+ gr.Markdown("""There are multiple ways to interact with these keypoints.
286
+ \n The example below shows how you can calulate the angle on the elbow for example.
287
+ \n Copy the code into your own preferred interpreter and experiment with the keypoint file.
288
+ \n If you choose to run the code, start by installing the packages json and numpy. The complete overview of the keypoint indices can be seen in the tab 'General information'. """)
289
+ gr.Code(
290
+ value=code_example,
291
  language="python",
292
  interactive=False,
293
  show_label=False,
 
356
  outputs = [video_output3, jsonoutput],
357
  queue=True)
358
 
359
+ # From web
360
+ submit_pose_file_web.click(fn=pose2d,
361
+ inputs= [video_input_web, file_kpthr_web],
362
+ outputs = [video_output1_web, jsonoutput_web],
363
+ queue=True)
364
+
365
+ submit_pose3d_file_web.click(fn=pose3d,
366
+ inputs= [video_input_web, file_kpthr_web],
367
+ outputs = [video_output2_web, jsonoutput_web],
368
+ #batch=True,
369
+ #max_batch_size=16,
370
+ queue=True) # Sometimes it worked with queue false? But still slow
371
+
372
+ submit_hand_file_web.click(fn=pose2dhand,
373
+ inputs= [video_input_web, file_kpthr_web],
374
+ outputs = [video_output3_web, jsonoutput_web],
375
+ queue=True)
376
+
377
  if __name__ == "__main__":
378
  block.queue(max_size=20,
379
  #concurrency_count=40, # When you increase the concurrency_count parameter in queue(), max_threads() in launch() is automatically increased as well.