Skip to content

Commit c1db5da

Browse files
Handle approx_time_maxspeed specified once (#160)
handle the case where the approx_time_maxspeed is only specified once for multiple cameras, add a log to warn when the synch frame range goes out of the pose frame range and check if there's enough sample for filtering
1 parent df4bced commit c1db5da

File tree

2 files changed

+36
-7
lines changed

2 files changed

+36
-7
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,3 +17,4 @@ dist/
1717
**/pose/
1818
**/pose-sync/
1919
**/pose-associated/
20+
**/kinematics/

Pose2Sim/synchronization.py

Lines changed: 35 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -657,10 +657,23 @@ def synchronize_cams_all(config_dict):
657657

658658
# Determine frames to consider for synchronization
659659
if isinstance(approx_time_maxspeed, list): # search around max speed
660+
logging.info(f'Synchronization is calculated around the times {approx_time_maxspeed} +/- {time_range_around_maxspeed} s.')
661+
if len(approx_time_maxspeed) == 1 and cam_nb > 1:
662+
approx_time_maxspeed *= cam_nb
660663
approx_frame_maxspeed = [int(fps * t) for t in approx_time_maxspeed]
661664
nb_frames_per_cam = [len(fnmatch.filter(os.listdir(os.path.join(json_dir)), '*.json')) for json_dir in json_dirs]
662-
search_around_frames = [[int(a-lag_range) if a-lag_range>0 else 0, int(a+lag_range) if a+lag_range<nb_frames_per_cam[i] else nb_frames_per_cam[i]+f_range[0]] for i,a in enumerate(approx_frame_maxspeed)]
663-
logging.info(f'Synchronization is calculated around the times {approx_time_maxspeed} +/- {time_range_around_maxspeed} s.')
665+
666+
search_around_frames = []
667+
for i, frame in enumerate(approx_frame_maxspeed):
668+
start_frame = max(int(frame - lag_range), 0)
669+
end_frame = min(int(frame + lag_range), nb_frames_per_cam[i] + f_range[0])
670+
671+
if start_frame != frame - lag_range:
672+
logging.warning(f'Frame range start adjusted for camera {i}: {frame - lag_range} -> {start_frame}')
673+
if end_frame != frame + lag_range:
674+
logging.warning(f'Frame range end adjusted for camera {i}: {frame + lag_range} -> {end_frame}')
675+
676+
search_around_frames.append([start_frame, end_frame])
664677
elif approx_time_maxspeed == 'auto': # search on the whole sequence (slower if long sequence)
665678
search_around_frames = [[f_range[0], f_range[0]+nb_frames_per_cam[i]] for i in range(cam_nb)]
666679
logging.info('Synchronization is calculated on the whole sequence. This may take a while.')
@@ -681,19 +694,22 @@ def synchronize_cams_all(config_dict):
681694
# Extract, interpolate, and filter keypoint coordinates
682695
logging.info('Synchronizing...')
683696
df_coords = []
684-
b, a = signal.butter(filter_order/2, filter_cutoff/(fps/2), 'low', analog = False)
697+
b, a = signal.butter(int(filter_order/2), filter_cutoff/(fps/2), 'low', analog = False)
685698
json_files_names_range = [[j for j in json_files_cam if int(re.split(r'(\d+)',j)[-2]) in range(*frames_cam)] for (json_files_cam, frames_cam) in zip(json_files_names,search_around_frames)]
686-
json_files_range = [[os.path.join(pose_dir, j_dir, j_file) for j_file in json_files_names_range[j]] for j, j_dir in enumerate(json_dirs_names)]
687699

688700
if np.array([j==[] for j in json_files_names_range]).any():
689701
raise ValueError(f'No json files found within the specified frame range ({frame_range}) at the times {approx_time_maxspeed} +/- {time_range_around_maxspeed} s.')
690702

703+
json_files_range = [[os.path.join(pose_dir, j_dir, j_file) for j_file in json_files_names_range[j]] for j, j_dir in enumerate(json_dirs_names)]
704+
691705
# Handle manual selection if multi person is True
692706
if multi_person:
693707
selected_id_list = select_person(vid_or_img_files, cam_names, json_files_names_range, search_around_frames, pose_dir, json_dirs_names)
694708
else:
695709
selected_id_list = [None] * cam_nb
696710

711+
padlen = 3 * (max(len(a), len(b)) - 1)
712+
697713
for i in range(cam_nb):
698714
df_coords.append(convert_json2pandas(json_files_range[i], likelihood_threshold=likelihood_threshold, keypoints_ids=keypoints_ids, multi_person=multi_person, selected_id=selected_id_list[i]))
699715
df_coords[i] = drop_col(df_coords[i],3) # drop likelihood
@@ -713,7 +729,13 @@ def synchronize_cams_all(config_dict):
713729
df_coords[i] = df_coords[i][kpt_indices]
714730
df_coords[i] = df_coords[i].apply(interpolate_zeros_nans, axis=0, args = ['linear'])
715731
df_coords[i] = df_coords[i].bfill().ffill()
716-
df_coords[i] = pd.DataFrame(signal.filtfilt(b, a, df_coords[i], axis=0))
732+
if df_coords[i].shape[0] > padlen:
733+
df_coords[i] = pd.DataFrame(signal.filtfilt(b, a, df_coords[i], axis=0))
734+
else:
735+
logging.warning(
736+
f"Camera {i}: insufficient number of samples ({df_coords[i].shape[0]} < {padlen + 1}) to apply the Butterworth filter. "
737+
"Data will remain unfiltered."
738+
)
717739

718740

719741
# Compute sum of speeds
@@ -727,8 +749,14 @@ def synchronize_cams_all(config_dict):
727749

728750
# # Replace 0 by random values, otherwise 0 padding may lead to unreliable correlations
729751
# sum_speeds[i].loc[sum_speeds[i] < 1] = sum_speeds[i].loc[sum_speeds[i] < 1].apply(lambda x: np.random.normal(0,1))
730-
731-
sum_speeds[i] = pd.DataFrame(signal.filtfilt(b, a, sum_speeds[i], axis=0)).squeeze()
752+
753+
if sum_speeds[i].shape[0] > padlen:
754+
sum_speeds[i] = pd.DataFrame(signal.filtfilt(b, a, sum_speeds[i], axis=0)).squeeze()
755+
else:
756+
logging.warning(
757+
f"Camera {i}: insufficient number of samples ({sum_speeds[i].shape[0]} < {padlen + 1}) to apply the Butterworth filter. "
758+
"Data will remain unfiltered."
759+
)
732760

733761

734762
# Compute offset for best synchronization:

0 commit comments

Comments
 (0)