Skip to content

Commit

Permalink
Fixed issue #127 with LIGHTGLUE matcher
Browse files Browse the repository at this point in the history
  • Loading branch information
luigifreda committed Dec 23, 2024
1 parent 917a2bf commit ac9ee8e
Show file tree
Hide file tree
Showing 10 changed files with 136 additions and 17 deletions.
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ __pycache__

.DS_Store

.vscode
!.vscode/settings.json

tmp
build
debug.txt
Expand All @@ -20,7 +23,6 @@ thirdparty/open3d
thirdparty/contextdesc/pretrained
matches.txt
map.png
.vscode
.project
data/videos/webcam

Expand All @@ -46,6 +48,7 @@ data/images/StLucia_small
data/images/SFU
data/orb_descriptors_kitti.npy
data/slam_state
data/kitti_trajectory.txt

thirdparty/pydbow3/ORBvoc.txt

Expand Down
105 changes: 105 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
{
"files.associations": {
"cctype": "cpp",
"clocale": "cpp",
"cmath": "cpp",
"csignal": "cpp",
"cstdarg": "cpp",
"cstddef": "cpp",
"cstdio": "cpp",
"cstdlib": "cpp",
"cstring": "cpp",
"ctime": "cpp",
"cwchar": "cpp",
"cwctype": "cpp",
"array": "cpp",
"atomic": "cpp",
"strstream": "cpp",
"bit": "cpp",
"*.tcc": "cpp",
"bitset": "cpp",
"chrono": "cpp",
"cinttypes": "cpp",
"codecvt": "cpp",
"complex": "cpp",
"condition_variable": "cpp",
"cstdint": "cpp",
"deque": "cpp",
"forward_list": "cpp",
"list": "cpp",
"map": "cpp",
"set": "cpp",
"unordered_map": "cpp",
"unordered_set": "cpp",
"vector": "cpp",
"exception": "cpp",
"algorithm": "cpp",
"functional": "cpp",
"iterator": "cpp",
"memory": "cpp",
"memory_resource": "cpp",
"numeric": "cpp",
"optional": "cpp",
"random": "cpp",
"ratio": "cpp",
"string": "cpp",
"string_view": "cpp",
"system_error": "cpp",
"tuple": "cpp",
"type_traits": "cpp",
"utility": "cpp",
"fstream": "cpp",
"future": "cpp",
"initializer_list": "cpp",
"iomanip": "cpp",
"iosfwd": "cpp",
"iostream": "cpp",
"istream": "cpp",
"limits": "cpp",
"mutex": "cpp",
"new": "cpp",
"ostream": "cpp",
"shared_mutex": "cpp",
"sstream": "cpp",
"stdexcept": "cpp",
"streambuf": "cpp",
"thread": "cpp",
"cfenv": "cpp",
"typeindex": "cpp",
"typeinfo": "cpp",
"valarray": "cpp",
"variant": "cpp",
"*.ipp": "cpp",
"nonlinearoptimization": "cpp",
"any": "cpp",
"core": "cpp",
"numericaldiff": "cpp",
"matrixfunctions": "cpp"
},
"python.autoComplete.extraPaths": [
"${workspaceFolder}",
"${workspaceFolder}/local_features",
"${workspaceFolder}/io",
"${workspaceFolder}/utilities",
"${workspaceFolder}/depth_estimation",
"${workspaceFolder}/viz",
"${workspaceFolder}/slam",
"${workspaceFolder}/loop_closing",
"${workspaceFolder}/dense",
"${workspaceFolder}/thirdparty",
"${workspaceFolder}/cpp"
],
"python.analysis.extraPaths": [
"${workspaceFolder}",
"${workspaceFolder}/local_features",
"${workspaceFolder}/io",
"${workspaceFolder}/utilities",
"${workspaceFolder}/depth_estimation",
"${workspaceFolder}/viz",
"${workspaceFolder}/slam",
"${workspaceFolder}/loop_closing",
"${workspaceFolder}/dense",
"${workspaceFolder}/thirdparty",
"${workspaceFolder}/cpp"
]
}
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ Estimated trajectories can be saved in three different formats: *TUM* (The Open
SAVE_TRAJECTORY:
save_trajectory: True
format_type: tum
filename: kitti00_trajectory.txt
filename: data/kitti00_trajectory.txt
```

### SLAM GUI
Expand Down
7 changes: 5 additions & 2 deletions local_features/feature_matcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
import kornia.feature as KF
import numpy as np

from frame import Frame
import config
config.cfg.set_lib('xfeat')
config.cfg.set_lib('lightglue')
Expand Down Expand Up @@ -332,13 +331,15 @@ def match(self, img1, img2, des1, des2, kps1=None, kps2=None, ratio_test=None,
oris1 = None
oris2 = None
if kps1 is None and kps2 is None:
return [], []
print('FeatureMatcher.match: kps1 and kps2 are None')
return result
else:
# convert from list of keypoints to an array of points if needed
if not isinstance(kps1, np.ndarray) or kps1.dtype != np.float32:
if self.detector_type == FeatureDetectorTypes.LIGHTGLUESIFT:
scales1 = np.array([x.size for x in kps1], dtype=np.float32)
oris1 = np.array([x.angle for x in kps1], dtype=np.float32)
print(f'kps1: {kps1}')
kps1 = np.array([x.pt for x in kps1], dtype=np.float32)
if kVerbose:
print('kps1.shape:',kps1.shape,' kps1.dtype:',kps1.dtype)
Expand All @@ -349,6 +350,8 @@ def match(self, img1, img2, des1, des2, kps1=None, kps2=None, ratio_test=None,
kps2 = np.array([x.pt for x in kps2], dtype=np.float32)
if kVerbose:
print('kps2.shape:',kps2.shape,' kps2.dtype:',kps2.dtype)
if kVerbose:
print(f'image1.shape: {img1.shape}, image2.shape: {img2.shape}')
img1_shape = img1.shape[0:2]
d0={
'keypoints': torch.tensor(kps1, device=self.torch_device).unsqueeze(0),
Expand Down
4 changes: 2 additions & 2 deletions local_features/feature_superpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,15 +33,15 @@
from utils_sys import Printer, is_opencv_version_greater_equal


kVerbose = True
kVerbose = False


class SuperPointOptions:
def __init__(self, do_cuda=True):
# default options from demo_superpoints
self.weights_path=config.cfg.root_folder + '/thirdparty/superpoint/superpoint_v1.pth'
print(f'SuperPoint weights: {self.weights_path}')
self.nms_dist=4
self.nms_dist=3
self.conf_thresh=0.015
self.nn_thresh=0.7

Expand Down
8 changes: 4 additions & 4 deletions local_features/feature_tracker_configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ def get_config_from_name(config_name):
detector_type = FeatureDetectorTypes.SUPERPOINT,
descriptor_type = FeatureDescriptorTypes.SUPERPOINT,
sigma_level0 = Parameters.kSigmaLevel0,
match_ratio_test = kRatioTest,
match_ratio_test = 1.0,
tracker_type = FeatureTrackerTypes.LIGHTGLUE)

LIGHTGLUE_DISK = dict(num_features=kNumFeatures, # N.B.: here, keypoints are not oriented! (i.e. keypoint.angle=0 always)
Expand All @@ -247,7 +247,7 @@ def get_config_from_name(config_name):
detector_type = FeatureDetectorTypes.DISK,
descriptor_type = FeatureDescriptorTypes.DISK,
sigma_level0 = Parameters.kSigmaLevel0,
match_ratio_test = kRatioTest,
match_ratio_test = 1.0,
tracker_type = FeatureTrackerTypes.LIGHTGLUE)

LIGHTGLUE_ALIKED = dict(num_features=kNumFeatures, # N.B.: here, keypoints are not oriented! (i.e. keypoint.angle=0 always)
Expand All @@ -256,7 +256,7 @@ def get_config_from_name(config_name):
detector_type = FeatureDetectorTypes.ALIKED,
descriptor_type = FeatureDescriptorTypes.ALIKED,
sigma_level0 = Parameters.kSigmaLevel0,
match_ratio_test = kRatioTest,
match_ratio_test = 1.0,
tracker_type = FeatureTrackerTypes.LIGHTGLUE)

LIGHTGLUESIFT = dict(num_features=kNumFeatures,
Expand All @@ -265,7 +265,7 @@ def get_config_from_name(config_name):
detector_type = FeatureDetectorTypes.LIGHTGLUESIFT,
descriptor_type = FeatureDescriptorTypes.LIGHTGLUESIFT,
sigma_level0 = Parameters.kSigmaLevel0,
match_ratio_test = kRatioTest,
match_ratio_test = 1.0,
tracker_type = FeatureTrackerTypes.LIGHTGLUE)

DELF = dict(num_features=kNumFeatures,
Expand Down
2 changes: 1 addition & 1 deletion local_features/feature_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ class FeatureInfo(object):
max_descriptor_distance[FeatureDescriptorTypes.FREAK] = 180 # FREAK
#
norm_type[FeatureDescriptorTypes.SUPERPOINT] = cv2.NORM_L2
max_descriptor_distance[FeatureDescriptorTypes.SUPERPOINT] = 1.30 # SUPERPOINT
max_descriptor_distance[FeatureDescriptorTypes.SUPERPOINT] = 2.878 # SUPERPOINT
#
norm_type[FeatureDescriptorTypes.XFEAT] = cv2.NORM_L2
max_descriptor_distance[FeatureDescriptorTypes.XFEAT] = 1.9877 # XFEAT
Expand Down
10 changes: 7 additions & 3 deletions slam/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
from utils_sys import myjet, Printer

from feature_types import FeatureInfo
from feature_matcher import FeatureMatcherTypes
from concurrent.futures import ThreadPoolExecutor

from utils_draw import draw_feature_matches
Expand Down Expand Up @@ -307,6 +308,9 @@ def set_tracker(feature_tracker, force=False):
FrameShared.descriptor_distance = feature_tracker.feature_manager.descriptor_distance
FrameShared.descriptor_distances = feature_tracker.feature_manager.descriptor_distances
FrameShared.oriented_features = feature_tracker.feature_manager.oriented_features
if FrameShared.feature_matcher.matcher_type == FeatureMatcherTypes.LIGHTGLUE or \
FrameShared.feature_matcher.matcher_type == FeatureMatcherTypes.LOFTR:
FrameShared.is_store_imgs = True


# for parallel stereo processing
Expand Down Expand Up @@ -1022,7 +1026,7 @@ def are_map_points_visible(frame1: Frame, frame2: Frame, map_points1, sR21: np.n
# match frames f1 and f2
# out: a vector of match index pairs [idx1[i],idx2[i]] such that the keypoint f1.kps[idx1[i]] is matched with f2.kps[idx2[i]]
def match_frames(f1: Frame, f2: Frame, ratio_test=None):
matching_result = FrameShared.feature_matcher.match(f1.img, f2.img, f1.des, f2.des, ratio_test)
matching_result = FrameShared.feature_matcher.match(f1.img, f2.img, f1.des, f2.des, kps1=f1.kps, kps2=f2.kps, ratio_test=ratio_test)
return matching_result
# idxs1, idxs2 = matching_result.idxs1, matching_result.idxs2
# idxs1 = np.asarray(idxs1)
Expand All @@ -1036,7 +1040,7 @@ def compute_frame_matches_threading(target_frame: Frame, other_frames: list, \
timer = Timer()
def thread_match_function(kf_pair):
kf1,kf2 = kf_pair
matching_result = FrameShared.feature_matcher.match(kf1.img, kf2.img, kf1.des, kf2.des, ratio_test)
matching_result = FrameShared.feature_matcher.match(kf1.img, kf2.img, kf1.des, kf2.des, kps1=kf1.kps, kps2=kf2.kps, ratio_test=ratio_test)
idxs1, idxs2 = matching_result.idxs1, matching_result.idxs2
match_idxs[(kf1, kf2)] = (np.array(idxs1),np.array(idxs2))
kf_pairs = [(target_frame, kf) for kf in other_frames if kf is not target_frame and not kf.is_bad]
Expand All @@ -1056,7 +1060,7 @@ def compute_frame_matches(target_frame: Frame, other_frames: list, \
for kf in other_frames:
if kf is target_frame or kf.is_bad:
continue
matching_result = FrameShared.feature_matcher.match(target_frame.img, kf.img, target_frame.des, kf.des, ratio_test)
matching_result = FrameShared.feature_matcher.match(target_frame.img, kf.img, target_frame.des, kf.des, kps1=target_frame.kps, kps2=kf.kps, ratio_test=ratio_test)
idxs1, idxs2 = matching_result.idxs1, matching_result.idxs2
match_idxs[(target_frame, kf)] = (idxs1, idxs2)
else:
Expand Down
8 changes: 6 additions & 2 deletions slam/initializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,12 @@ def process_frames(self, f_cur: Frame, img_cur, f_ref: Frame):

# find keypoint matches
matching_result = match_frames(f_cur, f_ref, kInitializerFeatureMatchRatioTest)
idxs_cur, idxs_ref = np.asarray(matching_result.idxs1), np.asarray(matching_result.idxs2)
if FrameShared.oriented_features:
idxs_cur = np.asarray(matching_result.idxs1, dtype=int) if matching_result.idxs1 is not None else np.array([], dtype=int)
idxs_ref = np.asarray(matching_result.idxs2, dtype=int) if matching_result.idxs2 is not None else np.array([], dtype=int)
if len(idxs_cur) == 0 or len(idxs_ref) == 0:
print(f'Initializer: # keypoint matches: idxs_cur: {len(idxs_cur)}, idxs_ref: {len(idxs_ref)}')
return out, is_ok
if FrameShared.oriented_features and len(idxs_cur) > 0 and len(idxs_ref) > 0:
valid_match_idxs = filter_matches_with_histogram_orientation(idxs_cur, idxs_ref, f_cur, f_ref)
if len(valid_match_idxs)>0:
idxs_cur = idxs_cur[valid_match_idxs]
Expand Down
2 changes: 1 addition & 1 deletion slam/tracking.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@

kNumMinInliersPoseOptimizationTrackFrame = 10
kNumMinInliersPoseOptimizationTrackLocalMap = 20
kNumMinInliersTrackLocalMapForNotWaitingLocalMappingIdle = 40 # defines bad tracking condition
kNumMinInliersTrackLocalMapForNotWaitingLocalMappingIdle = 50 # defines bad/weak tracking condition


kUseMotionModel = Parameters.kUseMotionModel or Parameters.kUseSearchFrameByProjection
Expand Down

0 comments on commit ac9ee8e

Please sign in to comment.