batch true and queue true again
Browse files- main_noweb.py +36 -34
main_noweb.py
CHANGED
@@ -103,6 +103,7 @@ def pose3d(video, kpt_threshold):
|
|
103 |
|
104 |
os.makedirs(add_dir)
|
105 |
print(check_fps(video))
|
|
|
106 |
result_generator = human3d(video,
|
107 |
vis_out_dir = add_dir,
|
108 |
radius = 5,
|
@@ -221,43 +222,42 @@ def UI():
|
|
221 |
gr.Code(
|
222 |
value="""
|
223 |
|
224 |
-
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
|
246 |
-
# We select the first identified person in the first frame (zero index) as an example
|
247 |
-
# To calculate the angle of the right elbow we take the point before and after and according to the indices that will be 6 (right shoulder) and 9 (right wrist)
|
248 |
-
predictions = data['predictions'][0] # Assuming batch_size is 1
|
249 |
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
|
255 |
-
|
256 |
-
|
257 |
-
|
|
|
|
|
258 |
|
259 |
-
|
260 |
-
|
261 |
|
262 |
""",
|
263 |
language="python",
|
@@ -315,7 +315,9 @@ def UI():
|
|
315 |
submit_pose3d_file.click(fn=pose3d,
|
316 |
inputs= [video_input, file_kpthr],
|
317 |
outputs = [video_output2, jsonoutput],
|
318 |
-
|
|
|
|
|
319 |
|
320 |
submit_hand_file.click(fn=pose2dhand,
|
321 |
inputs= [video_input, file_kpthr],
|
|
|
103 |
|
104 |
os.makedirs(add_dir)
|
105 |
print(check_fps(video))
|
106 |
+
#video = human3d.preprocess(video, batch_size=8)
|
107 |
result_generator = human3d(video,
|
108 |
vis_out_dir = add_dir,
|
109 |
radius = 5,
|
|
|
222 |
gr.Code(
|
223 |
value="""
|
224 |
|
225 |
+
# Importing packages needed
|
226 |
+
import json
|
227 |
+
import numpy as np
|
228 |
+
|
229 |
+
# First we load the data
|
230 |
+
with open(file_path, 'r') as json_file:
|
231 |
+
data = json.load(json_file)
|
232 |
+
|
233 |
+
# The we define a function for calculating angles
|
234 |
+
def calculate_angle(a, b, c):
|
235 |
+
a = np.array(a) # First point
|
236 |
+
b = np.array(b) # Middle point
|
237 |
+
c = np.array(c) # End point
|
238 |
+
|
239 |
+
radians = np.arctan2(c[1]-b[1], c[0]-b[0]) - np.arctan2(a[1]-b[1], a[0]-b[0])
|
240 |
+
angle = np.abs(radians*180.0/np.pi)
|
241 |
+
|
242 |
+
if angle >180.0:
|
243 |
+
angle = 360-angle
|
244 |
+
|
245 |
+
return angle
|
246 |
|
|
|
|
|
|
|
247 |
|
248 |
+
# COCO keypoint indices
|
249 |
+
shoulder_index = 6
|
250 |
+
elbow_index = 8
|
251 |
+
wrist_index = 9
|
252 |
|
253 |
+
# We select the first identified person in the first frame (zero index) as an example
|
254 |
+
# To calculate the angle of the right elbow we take the point before and after and according to the indices that will be 6 (right shoulder) and 9 (right wrist)
|
255 |
+
shoulder_point = data[0]['instances'][0]['keypoints'][shoulder_index]
|
256 |
+
elbow_point = data[0]['instances'][0]['keypoints'][elbow_index]
|
257 |
+
wrist_point = data[0]['instances'][0]['keypoints'][wrist_index]
|
258 |
|
259 |
+
angle = calculate_angle(shoulder_point, elbow_point, wrist_point)
|
260 |
+
print("Angle is: ", angle)
|
261 |
|
262 |
""",
|
263 |
language="python",
|
|
|
315 |
submit_pose3d_file.click(fn=pose3d,
|
316 |
inputs= [video_input, file_kpthr],
|
317 |
outputs = [video_output2, jsonoutput],
|
318 |
+
batch=True,
|
319 |
+
max_batch_size=16,
|
320 |
+
queue=True)
|
321 |
|
322 |
submit_hand_file.click(fn=pose2dhand,
|
323 |
inputs= [video_input, file_kpthr],
|