Skip to content
This repository was archived by the owner on Jun 12, 2024. It is now read-only.

Commit 83d78d8

Browse files
Support openpose entire pipeline
1 parent 160d7d4 commit 83d78d8

File tree

8 files changed

+170
-131
lines changed

8 files changed

+170
-131
lines changed

aist_plusplus/utils.py

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -97,3 +97,63 @@ def ffmpeg_video_to_images(video_path, image_dir, fps=None) -> None:
9797
stream, os.path.join(image_dir, '%08d.jpg'), start_number=0)
9898
stream = ffmpeg.overwrite_output(stream)
9999
ffmpeg.run(stream, quiet=True)
100+
101+
102+
def unify_joint_mappings(dataset='openpose25'):
103+
"""Unify different joint definations.
104+
105+
Output unified defination:
106+
['Nose',
107+
'RShoulder', 'RElbow', 'RWrist',
108+
'LShoulder', 'LElbow', 'LWrist',
109+
'RHip', 'RKnee', 'RAnkle',
110+
'LHip', 'LKnee', 'LAnkle',
111+
'REye', 'LEye',
112+
'REar', 'LEar',
113+
'LBigToe', 'LHeel',
114+
'RBigToe', 'RHeel',]
115+
116+
Args:
117+
dataset: `openpose25`, `coco`(17) and `smpl`.
118+
Returns:
119+
a list of indexs that maps the joints to a unified defination.
120+
"""
121+
if dataset == 'openpose25':
122+
return np.array([
123+
0,
124+
2, 3, 4,
125+
5, 6, 7,
126+
9, 10, 11,
127+
12, 13, 14,
128+
15, 16,
129+
17, 18,
130+
19, 21,
131+
22, 24,
132+
], dtype=np.int32)
133+
elif dataset == 'smpl':
134+
# note SMPL needs to be "left-right flipped" to be consistent
135+
# with others
136+
return np.array([
137+
24,
138+
16, 18, 20,
139+
17, 19, 21,
140+
1, 4, 7,
141+
2, 5, 8,
142+
26, 25,
143+
28, 27,
144+
32, 34,
145+
29, 31,
146+
], dtype=np.int32)
147+
elif dataset == 'coco':
148+
return np.array([
149+
0,
150+
5, 7, 9,
151+
6, 8, 10,
152+
11, 13, 15,
153+
12, 14, 16,
154+
1, 2,
155+
3, 4,
156+
], dtype=np.int32)
157+
else:
158+
raise ValueError(f'{dataset} is not supported')
159+

demos/run_openpose_pipeline.sh

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
#! /bin/bash
2+
3+
SEQUENCE_NAME=gBR_sBM_cAll_d04_mBR0_ch01
4+
5+
CUDA_VISIBLE_DEVICES=0,1,2,3 python processing/run_openpose.py --sequence_names=${SEQUENCE_NAME}
6+
CUDA_VISIBLE_DEVICES=0,1,2,3 python processing/run_preprocessing.py --sequence_names=${SEQUENCE_NAME}
7+
CUDA_VISIBLE_DEVICES=0,1,2,3 python processing/run_estimate_keypoints.py --sequence_names=${SEQUENCE_NAME}
8+
CUDA_VISIBLE_DEVICES=0,1,2,3 python processing/run_estimate_smpl.py --sequence_names=${SEQUENCE_NAME}
9+
CUDA_VISIBLE_DEVICES=0,1,2,3 python processing/run_segmentation.py --sequence_names=${SEQUENCE_NAME}

demos/run_vis.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
FLAGS = flags.FLAGS
2626
flags.DEFINE_string(
2727
'anno_dir',
28-
'/home/ruilongli/data/AIST++_openpose/',
28+
'/home/ruilongli/data/AIST++/',
2929
'input local dictionary for AIST++ annotations.')
3030
flags.DEFINE_string(
3131
'video_dir',
@@ -37,7 +37,7 @@
3737
'input local dictionary that stores SMPL data.')
3838
flags.DEFINE_string(
3939
'video_name',
40-
'gKR_sFM_c01_d29_mKR5_ch14',
40+
'gBR_sBM_c01_d04_mBR0_ch01',
4141
'input video name to be visualized.')
4242
flags.DEFINE_string(
4343
'save_dir',

processing/run_estimate_keypoints.py

Lines changed: 21 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,11 @@
2323
import numpy as np
2424

2525
FLAGS = flags.FLAGS
26+
27+
flags.DEFINE_list(
28+
'sequence_names',
29+
None,
30+
'list of sequence names to be processed. None means to process all.')
2631
flags.DEFINE_string(
2732
'anno_dir',
2833
'/home/ruilongli/data/AIST++_openpose/',
@@ -44,9 +49,10 @@
4449
def main(_):
4550
aist_dataset = AISTDataset(anno_dir=FLAGS.anno_dir)
4651

47-
seq_names = [
48-
file_name.split(".")[0]
49-
for file_name in os.listdir(aist_dataset.keypoint2d_dir)]
52+
if FLAGS.sequence_names:
53+
seq_names = FLAGS.sequence_names
54+
else:
55+
seq_names = aist_dataset.mapping_seq2env.keys()
5056

5157
for seq_name in seq_names:
5258
logging.info('processing %s', seq_name)
@@ -87,18 +93,19 @@ def main(_):
8793
body_bones = np.array([
8894
(0, 15), (0, 16), (15, 17), (16, 18),
8995
(0, 1), (1, 2), (2, 3), (3, 4), (1, 5), (5, 6), (6, 7), (1, 8),
90-
(8, 9), (9, 10), (10, 11), (11, 24), (11, 22), (22, 23),
91-
(8, 12), (12, 13), (13, 14), (14, 21), (14, 19), (19, 20),
92-
])
93-
hand_bones = np.array([
94-
(0, 1), (1, 2), (2, 3), (3, 4),
95-
(0, 5), (5, 6), (6, 7), (7, 8),
96-
(0, 9), (9, 10), (10, 11), (11, 12),
97-
(0, 13), (13, 14), (14, 15), (15, 16),
98-
(0, 17), (17, 18), (18, 19), (19, 20)
96+
(8, 9), (9, 10), (10, 11), (11, 24), (11, 22), (11, 23), (22, 23), (23, 24), (24, 22),
97+
(8, 12), (12, 13), (13, 14), (14, 21), (14, 19), (14, 20), (19, 20), (20, 21), (21, 19)
9998
])
100-
bones = np.concatenate([
101-
body_bones, hand_bones + 25, hand_bones + 25 + 21]).tolist()
99+
bones = body_bones.tolist()
100+
# hand_bones = np.array([
101+
# (0, 1), (1, 2), (2, 3), (3, 4),
102+
# (0, 5), (5, 6), (6, 7), (7, 8),
103+
# (0, 9), (9, 10), (10, 11), (11, 12),
104+
# (0, 13), (13, 14), (14, 15), (15, 16),
105+
# (0, 17), (17, 18), (18, 19), (19, 20)
106+
# ])
107+
# bones = np.concatenate([
108+
# body_bones, hand_bones + 25, hand_bones + 25 + 21]).tolist()
102109
else:
103110
raise ValueError(FLAGS.data_type)
104111
keypoints3d = cgroup.triangulate(

processing/run_estimate_smpl.py

Lines changed: 19 additions & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
from absl import flags
2121
from absl import logging
2222
from aist_plusplus.loader import AISTDataset
23+
from aist_plusplus.utils import unify_joint_mappings
2324
import numpy as np
2425
from smplx import SMPL
2526
import torch
@@ -31,93 +32,35 @@
3132
SUPPORT_VIS = False
3233

3334
FLAGS = flags.FLAGS
35+
flags.DEFINE_list(
36+
'sequence_names',
37+
None,
38+
'list of sequence names to be processed. None means to process all.')
3439
flags.DEFINE_string(
3540
'anno_dir',
36-
'/usr/local/google/home/ruilongli/data/public/aist_plusplus_final/',
41+
'/home/ruilongli/data/AIST++_openpose/',
3742
'input local dictionary for AIST++ annotations.')
3843
flags.DEFINE_string(
3944
'smpl_dir',
40-
'/usr/local/google/home/ruilongli/data/SMPL/',
45+
'/home/ruilongli/data/smpl_model/smpl/',
4146
'input local dictionary that stores SMPL data.')
4247
flags.DEFINE_string(
4348
'save_dir',
44-
'/usr/local/google/home/ruilongli/data/public/aist_plusplus_final/motions/',
49+
'/home/ruilongli/data/AIST++_openpose/motions/',
4550
'output local dictionary that stores AIST++ SMPL-format motion data.')
46-
flags.DEFINE_list(
47-
'sequence_names',
48-
None,
49-
'list of sequence names to be processed. None means to process all.')
50-
flags.DEFINE_string(
51-
'save_dir_gcs',
52-
None,
53-
'output GCS directory.')
5451
flags.DEFINE_bool(
5552
'visualize',
5653
False,
5754
'Wether to visualize the fitting process.')
55+
flags.DEFINE_enum(
56+
'data_type',
57+
'openpose',
58+
['internal', 'openpose'],
59+
'Which openpose detector is being used.')
5860
np.random.seed(0)
5961
torch.manual_seed(0)
6062

6163

62-
def unify_joint_mappings(dataset='openpose25'):
63-
"""Unify different joint definations.
64-
65-
Output unified defination:
66-
['Nose',
67-
'RShoulder', 'RElbow', 'RWrist',
68-
'LShoulder', 'LElbow', 'LWrist',
69-
'RHip', 'RKnee', 'RAnkle',
70-
'LHip', 'LKnee', 'LAnkle',
71-
'REye', 'LEye',
72-
'REar', 'LEar',
73-
'LBigToe', 'LHeel',
74-
'RBigToe', 'RHeel',]
75-
76-
Args:
77-
dataset: `openpose25`, `coco`(17) and `smpl`.
78-
Returns:
79-
a list of indexs that maps the joints to a unified defination.
80-
"""
81-
if dataset == 'openpose25':
82-
return np.array([
83-
0,
84-
2, 3, 4,
85-
5, 6, 7,
86-
9, 10, 11,
87-
12, 13, 14,
88-
15, 16,
89-
17, 18,
90-
19, 21,
91-
22, 24,
92-
], dtype=np.int32)
93-
elif dataset == 'smpl':
94-
# note SMPL needs to be "left-right flipped" to be consistent
95-
# with others
96-
return np.array([
97-
24,
98-
16, 18, 20,
99-
17, 19, 21,
100-
1, 4, 7,
101-
2, 5, 8,
102-
26, 25,
103-
28, 27,
104-
32, 34,
105-
29, 31,
106-
], dtype=np.int32)
107-
elif dataset == 'coco':
108-
return np.array([
109-
0,
110-
5, 7, 9,
111-
6, 8, 10,
112-
11, 13, 15,
113-
12, 14, 16,
114-
1, 2,
115-
3, 4,
116-
], dtype=np.int32)
117-
else:
118-
raise ValueError(f'{dataset} is not supported')
119-
120-
12164
class SMPLRegressor:
12265
"""SMPL fitting based on 3D keypoints."""
12366

@@ -165,7 +108,6 @@ def get_optimizer(self, smpl, step, base_lr):
165108

166109
def fit(self, keypoints3d, dtype='coco', verbose=True):
167110
"""Run fitting to optimize the SMPL parameters."""
168-
assert dtype == 'coco', 'only support coco format for now.'
169111
assert len(keypoints3d.shape) == 3, 'input shape should be [N, njoints, 3]'
170112
mapping_target = unify_joint_mappings(dataset=dtype)
171113
keypoints3d = keypoints3d[:, mapping_target, :]
@@ -224,7 +166,12 @@ def main(_):
224166
aist_dataset.keypoint3d_dir, seq_name, use_optim=True)
225167

226168
# SMPL fitting
227-
smpl, loss = smpl_regressor.fit(keypoints3d, dtype='coco', verbose=True)
169+
if FLAGS.data_type == "internal":
170+
smpl, loss = smpl_regressor.fit(keypoints3d, dtype='coco', verbose=True)
171+
elif FLAGS.data_type == "openpose":
172+
smpl, loss = smpl_regressor.fit(keypoints3d, dtype='openpose25', verbose=True)
173+
else:
174+
raise ValueError(FLAGS.data_type)
228175

229176
# One last time forward
230177
with torch.no_grad():
@@ -245,12 +192,6 @@ def main(_):
245192
'smpl_loss': loss,
246193
}, f, protocol=pickle.HIGHEST_PROTOCOL)
247194

248-
# upload results to GCS
249-
if FLAGS.save_dir_gcs:
250-
import gcs_utils
251-
gcs_utils.upload_files_to_gcs(
252-
local_folder=FLAGS.save_dir,
253-
gcs_path=FLAGS.save_dir_gcs)
254195

255196
if __name__ == '__main__':
256197
app.run(main)

processing/run_openpose.py

Lines changed: 34 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,14 @@
2222
from aist_plusplus.utils import ffmpeg_video_to_images
2323

2424
FLAGS = flags.FLAGS
25+
flags.DEFINE_list(
26+
'sequence_names',
27+
None,
28+
'list of sequence names to be processed. None means to process all.')
2529
flags.DEFINE_string(
26-
'seq_name',
27-
'gKR_sFM_cAll_d29_mKR5_ch14',
28-
'input video name to be visualized.')
30+
'anno_dir',
31+
'/home/ruilongli/data/AIST++_openpose/',
32+
'input local dictionary for AIST++ annotations.')
2933
flags.DEFINE_string(
3034
'openpose_dir',
3135
'/home/ruilongli/workspace/openpose',
@@ -47,28 +51,34 @@
4751
def main(_):
4852
os.makedirs(FLAGS.image_save_dir, exist_ok=True)
4953
os.makedirs(FLAGS.openpose_save_dir, exist_ok=True)
50-
for view in AISTDataset.VIEWS:
51-
video_name = AISTDataset.get_video_name(FLAGS.seq_name, view)
52-
video_file = os.path.join(FLAGS.video_dir, video_name + ".mp4")
53-
if not os.path.exists(video_file):
54-
continue
55-
logging.info('processing %s', video_file)
56-
57-
# extract images
58-
image_dir = os.path.join(FLAGS.image_save_dir, video_name)
59-
ffmpeg_video_to_images(video_file, image_dir, fps=60)
60-
61-
# extract keypoints
62-
save_dir = os.path.join(FLAGS.openpose_save_dir, video_name)
63-
os.system(
64-
"cd %s; " % FLAGS.openpose_dir +
65-
"./build/examples/openpose/openpose.bin " +
66-
"--image_dir %s " % image_dir +
67-
"--write_json %s " % save_dir +
68-
"--display 0 --hand --face --render_pose 0"
69-
)
7054

71-
# break
55+
if FLAGS.sequence_names:
56+
seq_names = FLAGS.sequence_names
57+
else:
58+
aist_dataset = AISTDataset(FLAGS.anno_dir)
59+
seq_names = aist_dataset.mapping_seq2env.keys()
60+
61+
for seq_name in seq_names:
62+
for view in AISTDataset.VIEWS:
63+
video_name = AISTDataset.get_video_name(seq_name, view)
64+
video_file = os.path.join(FLAGS.video_dir, video_name + ".mp4")
65+
if not os.path.exists(video_file):
66+
continue
67+
logging.info('processing %s', video_file)
68+
69+
# extract images
70+
image_dir = os.path.join(FLAGS.image_save_dir, video_name)
71+
ffmpeg_video_to_images(video_file, image_dir, fps=60)
72+
73+
# extract keypoints
74+
save_dir = os.path.join(FLAGS.openpose_save_dir, video_name)
75+
os.system(
76+
"cd %s; " % FLAGS.openpose_dir +
77+
"./build/examples/openpose/openpose.bin " +
78+
"--image_dir %s " % image_dir +
79+
"--write_json %s " % save_dir +
80+
"--display 0 --hand --face --render_pose 0"
81+
)
7282

7383
if __name__ == '__main__':
7484
app.run(main)

0 commit comments

Comments
 (0)