Skip to content
function [isLoopClosed, mapPoints, vSetKeyFrames] = helperAddLoopConnections(...
mapPoints, vSetKeyFrames, loopCandidates, currKeyFrameId, currFeatures, ...
loopEdgeNumMatches)
%helperAddLoopConnections add connections between the current key frame and
% the valid loop candidate key frames. A loop candidate is valid if it has
% enough covisible map points with the current key frame.
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019-2023 The MathWorks, Inc.
%#codegen
loopClosureEdge = zeros(0, 2, 'uint32');
numCandidates = size(loopCandidates,1);
if isSimMode
[index3d1, index2d1] = findWorldPointsInView(mapPoints, currKeyFrameId);
else
[index3d1Cg, index2d1Cg] = findWorldPointsInView(mapPoints, currKeyFrameId);
index2d1 = index2d1Cg{1};
index3d1 = index3d1Cg{1};
end
validFeatures1 = currFeatures.Features(index2d1, :);
for k = 1 : numCandidates
if isSimMode()
[index3d2, index2d2] = findWorldPointsInView(mapPoints, loopCandidates(k));
else
[index3d2Cg, index2d2Cg] = findWorldPointsInView(mapPoints, loopCandidates(k));
index2d2 = index2d2Cg{1};
index3d2 = index3d2Cg{1};
end
allFeatures2 = vSetKeyFrames.Views.Features{loopCandidates(k)};
validFeatures2 = allFeatures2(index2d2, :);
indexPairs = matchFeatures(binaryFeatures(validFeatures1), binaryFeatures(validFeatures2), ...
'Unique', true, 'MaxRatio', 0.9, 'MatchThreshold', 40);
% Check if all the candidate key frames have strong connection with the
% current keyframe
if size(indexPairs, 1) < loopEdgeNumMatches
continue
end
% Estimate the relative pose of the current key frame with respect to the
% loop candidate keyframe with the highest similarity score
worldPoints1 = mapPoints.WorldPoints(index3d1(indexPairs(:, 1)), :);
worldPoints2 = mapPoints.WorldPoints(index3d2(indexPairs(:, 2)), :);
tform1 = pose2extr(vSetKeyFrames.Views.AbsolutePose(end));
tform2 = pose2extr(vSetKeyFrames.Views.AbsolutePose(loopCandidates(k)));
worldPoints1InCamera1 = transformPointsForward(tform1, worldPoints1) ;
worldPoints2InCamera2 = transformPointsForward(tform2, worldPoints2) ;
w = warning('off','all');
if isSimMode()
[tform, inlierIndex] = estgeotform3d(...
worldPoints1InCamera1, worldPoints2InCamera2, 'similarity', 'MaxDistance', 0.1);
else
[tform, inlierIndex] = estgeotform3d(...
worldPoints1InCamera1, worldPoints2InCamera2, 'rigid', 'MaxDistance', 0.1);
end
warning(w);
% Add connection between the current key frame and the loop key frame
inlierIndexVals = inlierIndex(:);
indexPairs1 = indexPairs(inlierIndexVals, 1);
indexPairs2 = indexPairs(inlierIndexVals, 2);
index2dPairs = index2d2(indexPairs2);
index2d1Pairs = index2d1(indexPairs1);
matches = uint32([index2dPairs, index2d1Pairs]);
vSetKeyFrames = addConnection(vSetKeyFrames, loopCandidates(k), currKeyFrameId, tform, 'Matches', matches);
if isSimMode()
disp(['Loop edge added between keyframe: ', num2str(loopCandidates(k)), ' and ', num2str(currKeyFrameId)]);
end
% Fuse co-visible map points
matchedIndex3d1 = index3d1(indexPairs1);
matchedIndex3d2 = index3d2(indexPairs2);
mapPoints = updateWorldPoints(mapPoints, matchedIndex3d1, mapPoints.WorldPoints(matchedIndex3d2, :));
loopClosureEdge = [loopClosureEdge; loopCandidates(k), currKeyFrameId];
end
isLoopClosed = ~isempty(loopClosureEdge);
end
function tf = isSimMode()
tf = isempty(coder.target);
end
\ No newline at end of file
function [mapPoints, vSetKeyFrames, recentPointIdx] = helperCreateNewMapPoints(...
mapPoints, vSetKeyFrames, currKeyFrameId, intrinsics, scaleFactor, minNumMatches, minParallax)
%helperCreateNewMapPoints creates new map points by triangulating matched
% feature points in the current key frame and the connected key frames.
%
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019-2023 The MathWorks, Inc.
%#codegen
% Get connected key frames
KcViews = connectedViews(vSetKeyFrames, currKeyFrameId, 'MinNumMatches', minNumMatches);
KcIDs = KcViews.ViewId;
% Retreive data of the current key frame
views = vSetKeyFrames.Views;
currPose = views.AbsolutePose(currKeyFrameId);
currFeatures = views.Features{currKeyFrameId};
currPoints = views.Points{currKeyFrameId};
currLocations = currPoints.Location;
currScales = currPoints.Scale;
% Camera projection matrix
currCamMatrix = cameraProjection(intrinsics, pose2extr(currPose));
recentPointIdx = zeros(0, 1);
for i = 1:numel(KcIDs)
kfPose = views.AbsolutePose(KcIDs(i));
if isSimMode()
[kfIndex3d, kfIndex2d] = findWorldPointsInView(mapPoints, KcIDs(i));
else
[kfIndex3dCg, kfIndex2dCg] = findWorldPointsInView(mapPoints, KcIDs(i));
kfIndex3d = kfIndex3dCg{1};
kfIndex2d = kfIndex2dCg{1};
end
xyzPoints = mapPoints.WorldPoints(kfIndex3d,:);
medianDepth = median(vecnorm(xyzPoints - kfPose.Translation, 2, 2));
% Skip the key frame is the change of view is small
isViewClose = norm(kfPose.Translation - currPose.Translation)/medianDepth < 0.01;
if isViewClose
continue
end
% Retrieve data of the connected key frame
kfFeatures = views.Features{KcIDs(i)};
kfPoints = views.Points{KcIDs(i)};
kfLocations = kfPoints.Location;
kfScales = kfPoints.Scale;
% currIndex2d changes in each iteration as new map points are created
if isSimMode()
[~, currIndex2d] = findWorldPointsInView(mapPoints, currKeyFrameId);
else
[~, currIndex2dCg] = findWorldPointsInView(mapPoints, currKeyFrameId);
currIndex2d = currIndex2dCg{1};
end
% Only use unmatched feature points
uIndices1 = setdiff(uint32(1:size(kfFeatures,1))', kfIndex2d, 'stable');
uIndices2 = setdiff(uint32(1:size(currFeatures,1))', currIndex2d, 'stable');
uFeatures1 = kfFeatures(uIndices1, :);
uFeatures2 = currFeatures(uIndices2, :);
uLocations1 = kfLocations(uIndices1, :);
uLocations2 = currLocations(uIndices2, :);
uScales1 = kfScales(uIndices1);
uScales2 = currScales(uIndices2);
indexPairs = matchFeatures(binaryFeatures(uFeatures1), binaryFeatures(uFeatures2),...
'Unique', true, 'MaxRatio', 0.7, 'MatchThreshold', 40);
if isempty(indexPairs)
continue
end
matchedPoints1 = uLocations1(indexPairs(:,1), :);
matchedPoints2 = uLocations2(indexPairs(:,2), :);
% Epipole in the current key frame
epiPole = world2img(kfPose.Translation, pose2extr(currPose), intrinsics);
distToEpipole = vecnorm(matchedPoints2 - epiPole, 2, 2);
% Compute fundamental matrix
F = computeF(intrinsics, kfPose, currPose);
% Epipolar line in the second image
epiLine = epipolarLine(F, matchedPoints2);
distToLine = abs(sum(epiLine.* [matchedPoints1, ones(size(matchedPoints1,1), 1)], 2))./...
sqrt(sum(epiLine(:,1:2).^2, 2));
isValid = distToLine < 2*uScales2(indexPairs(:,2)) & ...
distToEpipole > 10*uScales2(indexPairs(:,2));
indexPairs = indexPairs(isValid, :);
matchedPoints1 = matchedPoints1(isValid, :);
matchedPoints2 = matchedPoints2(isValid, :);
% Parallax check
isLarge = isLargeParalalx(matchedPoints1, matchedPoints2, kfPose, ...
currPose, intrinsics, minParallax);
matchedPoints1 = matchedPoints1(isLarge, :);
matchedPoints2 = matchedPoints2(isLarge, :);
indexPairs = indexPairs(isLarge, :);
kfCamMatrix = cameraProjection(intrinsics, pose2extr(kfPose));
% Triangulate two views to create new world points
[xyzPoints, reprojectionErrors, validIdx] = triangulate(matchedPoints1, ...
matchedPoints2, kfCamMatrix, currCamMatrix);
% Filtering by view direction and reprojection error
inlier = filterTriangulatedMapPoints(xyzPoints, kfPose, currPose, ...
uScales1(indexPairs(:,1)), uScales2(indexPairs(:,2)), ...
reprojectionErrors, scaleFactor, validIdx);
% Add new map points and update connections
if any(inlier)
xyzPoints = xyzPoints(inlier,:);
indexPairs = indexPairs(inlier, :);
mIndices1 = uIndices1(indexPairs(:, 1));
mIndices2 = uIndices2(indexPairs(:, 2));
[mapPoints, indices] = addWorldPoints(mapPoints, xyzPoints);
recentPointIdx = [recentPointIdx; indices]; %#ok<AGROW>
% Add new observations
mapPoints = addCorrespondences(mapPoints, KcIDs(i),indices, mIndices1);
mapPoints = addCorrespondences(mapPoints, currKeyFrameId, indices, mIndices2);
% Update connections with new feature matches
if isSimMode()
[~,ia] = intersect(vSetKeyFrames.Connections{:,1:2}, ...
[KcIDs(i), currKeyFrameId], 'row', 'stable');
oldMatches = vSetKeyFrames.Connections.Matches{ia};
else
connections = vSetKeyFrames.Connections;
[~,ia] = intersect([connections.ViewId1, connections.ViewId2], ...
[KcIDs(i), currKeyFrameId], 'row', 'stable');
oldMatches = connections.Matches{ia};
end
newMatches = [oldMatches; mIndices1, mIndices2];
vSetKeyFrames = updateConnection(vSetKeyFrames, KcIDs(i), currKeyFrameId, ...
'Matches', newMatches);
end
end
end
function F = computeF(intrinsics, pose1, pose2)
R1 = pose1.R;
t1 = pose1.Translation';
R2 = pose2.R;
t2 = pose2.Translation';
R12 = R1'*R2;
t12 = R1'*(t2-t1);
% Skew symmetric matrix
t12x = [0, -t12(3), t12(2)
t12(3), 0, -t12(1)
-t12(2) t12(1), 0];
F = intrinsics.K'\ t12x * R12 / intrinsics.K;
end
function inlier = filterTriangulatedMapPoints(xyzPoints, pose1, pose2, ...
scales1, scales2, reprojectionErrors, scaleFactor, isInFront)
camToPoints1= xyzPoints - pose1.Translation;
camToPoints2= xyzPoints - pose2.Translation;
% Check scale consistency and reprojection errors
distances1 = vecnorm(camToPoints1, 2, 2);
distances2 = vecnorm(camToPoints2, 2, 2);
ratioDist = distances1./distances2;
ratioScale = scales2./scales1;
ratioFactor = 1.5 * scaleFactor;
isInScale = (ratioDist./ratioScale < ratioFactor | ...
ratioScale./ratioDist < ratioFactor);
maxError = sqrt(6);
isSmallError= reprojectionErrors < maxError*min(scales1, scales2);
inlier = isInScale & isSmallError & isInFront;
end
function isLarge = isLargeParalalx(points1, points2, pose1, pose2, intrinsics, minParallax)
% Parallax check
ray1 = [points1, ones(size(points1(:,1)))]/intrinsics.K' *pose1.R';
ray2 = [points2, ones(size(points1(:,2)))]/intrinsics.K' *pose2.R';
cosParallax = sum(ray1 .* ray2, 2) ./(vecnorm(ray1, 2, 2) .* vecnorm(ray2, 2, 2));
isLarge = cosParallax < cosd(minParallax) & cosParallax > 0;
end
function tf = isSimMode()
tf = isempty(coder.target);
end
\ No newline at end of file
function camPoses = helperImportGroundTruth(fileName, imds)
%helperImportGroundTruth Import ground truth camera poses from a .txt file
%
% This is an example helper function that is subject to change or removal
% in future releases.
%
% Copyright 2023 The MathWorks Inc.
% Set up the Import Options and import the data
opts = delimitedTextImportOptions("NumVariables", 9);
% Specify range and delimiter
opts.DataLines = [4, Inf];
opts.Delimiter = " ";
% Specify column names and types
opts.VariableNames = ["timestamp", "tx", "ty", "tz", "qx", "qy", "qz", "qw", "Var9"];
opts.SelectedVariableNames = ["timestamp", "tx", "ty", "tz", "qx", "qy", "qz", "qw"];
opts.VariableTypes = ["double", "double", "double", "double", "double", "double", "double", "double", "string"];
% Specify file level properties
opts.ExtraColumnsRule = "ignore";
opts.EmptyLineRule = "read";
opts.ConsecutiveDelimitersRule = "join";
opts.LeadingDelimitersRule = "ignore";
% Specify variable properties
opts = setvaropts(opts, "Var9", "WhitespaceRule", "preserve");
opts = setvaropts(opts, "Var9", "EmptyFieldRule", "auto");
% Import the data
groundtruth = readtable(fileName, opts);
% Initialize the output
camPoses = repmat(rigidtform3d, 1, numel(imds.Files));
firstT = [groundtruth(1,:).tx groundtruth(1,:).ty groundtruth(1,:).tz];
firstR = quat2rotm([groundtruth(1,:).qw groundtruth(1,:).qx groundtruth(1,:).qy groundtruth(1,:).qz]);
% Find the ground truth data corresponding to the image based on the timestamp
[~,imageTS] = fileparts(imds.Files);
imgTimestamp = str2double(imageTS);
index = interp1(groundtruth.timestamp, 1:numel(groundtruth.timestamp), imgTimestamp, 'nearest', 'extrap');
for i=1:numel(imds.Files)
groundtruth_raw = groundtruth(index(i),:);
% Transform all the camera poses to the coordinate system of the first camera
% Rotation
q = [groundtruth_raw.qw groundtruth_raw.qx groundtruth_raw.qy groundtruth_raw.qz];
camPoses(i).R = firstR'*quat2rotm(q);
% Translation
camPoses(i).Translation = ([groundtruth_raw.tx groundtruth_raw.ty groundtruth_raw.tz]-firstT) * firstR;
end
end
function [xyzPoint, camPoses] = helperMonoVisualSLAM(image)
% Copyright 2023 The MathWorks Inc.
%#codegen
persistent vslam xyzPointsInternal camPosesInternal
if isempty(vslam)
% Create a monovslam class to process the image data
focalLength = [535.4, 539.2]; % in units of pixels
principalPoint = [320.1, 247.6]; % in units of pixels
imageSize = [480, 640]; % in units of pixels
intrinsics = cameraIntrinsics(focalLength, principalPoint, imageSize);
vslam = monovslam(intrinsics);
end
% Process each image frame
addFrame(vslam, image);
% Get 3-D map points and camera poses
if isempty(xyzPointsInternal) || hasNewKeyFrame(vslam)
xyzPointsInternal = mapPoints(vslam);
camPosesInternal = poses(vslam);
end
xyzPoint = xyzPointsInternal;
% Convert camera poses to homogeneous transformation matrices
camPoses = cat(3, camPosesInternal.A);
\ No newline at end of file
classdef helperVisualizeMatchedFeatures < handle
%helperVisualizeMatchedFeatures show the matched features in a frame
%
% This is an example helper class that is subject to change or removal
% in future releases.
% Copyright 2019-2020 The MathWorks, Inc.
properties (Access = private)
Image
Feature
end
methods (Access = public)
function obj = helperVisualizeMatchedFeatures(I, featurePoints)
locations= featurePoints.Location;
% Plot image
hFig = figure;
hAxes = newplot(hFig);
% Set figure visibility and position
hFig.Visible = 'on';
movegui(hFig, [300 220]);
% Show the image
obj.Image = imshow(I, 'Parent', hAxes, 'Border', 'tight');
title(hAxes, 'Matched Features in Current Frame');
hold(hAxes, 'on');
% Plot features
plot(featurePoints, hAxes, 'ShowOrientation',false, ...
'ShowScale',false);
obj.Feature = findobj(hAxes.Parent,'Type','Line');
end
function updatePlot(obj, I, featurePoints)
locations = featurePoints.Location;
obj.Image.CData = I;
obj.Feature.XData = locations(:,1);
obj.Feature.YData = locations(:,2);
drawnow limitrate
end
end
end
### Sub-directory initiative
The intent behind this subdirectory is to lay out the foundations for various Data Acquisition techniques.
The final product will allow users to select each of these foundational methods as a standalone system for
generating the data that we perform VSLAM on. There are two possible sources for this task:
1. Acquire frames to perform VSLAM from existing data-sets
2. Acquire data from DJI Tello drone
#### Performing VSLAM using online dataset
- Open up MATLAB console
- Install necessary MathWorks Library: Vision, Visual SLAM (users will be queried to download necessary packages)
- In MATLAB console, set imagesPath to 'rgbd_dataset_freiburg3_long_office_household/rgb'
- Run the vslam_implementation.m script with the imagesPath as input
- Use output of worldPointSet for figuring out which key features belong to which objects
#### Acquiring data from TELLO Drone
- One option being explored is to aquire the image dataset using a JAVA plugin for the tello drone. The sorucecode for this task can be found in the src directory of the following library: https://github.com/stemrobotics/Tello-SDK/tree/master
- There is a Mathworks TELLO plugin, and its implementation is provided in the main.m file
- This second method however is causing fatal crashes to the MATLAB IDE, so the JAVA method may be preferred moding forward
\ No newline at end of file
function [isLoopClosed, mapPoints, vSetKeyFrames] = helperAddLoopConnections(...
mapPoints, vSetKeyFrames, loopCandidates, currKeyFrameId, currFeatures, ...
currPoints, intrinsics, scaleFactor, loopEdgeNumMatches)
%helperAddLoopConnections add connections between the current key frame and
% the valid loop candidate key frames. A loop candidate is valid if it has
% enough covisible map points with the current key frame.
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019-2020 The MathWorks, Inc.
imageSize = intrinsics.ImageSize;
numCandidates = size(loopCandidates,1);
loopConnections = [];
[index3d1, index2d1] = findWorldPointsInView(mapPoints, currKeyFrameId);
validFeatures1 = currFeatures.Features(index2d1, :);
validPoints1 = currPoints(index2d1).Location;
for k = numCandidates : -1 : 1
[index3d2, index2d2] = findWorldPointsInView(mapPoints, loopCandidates(k));
allFeatures2 = vSetKeyFrames.Views.Features{loopCandidates(k)};
validFeatures2 = allFeatures2(index2d2, :);
allPoints2 = vSetKeyFrames.Views.Points{loopCandidates(k)};
validPoints2 = allPoints2(index2d2);
indexPairs = matchFeatures(binaryFeatures(validFeatures1), binaryFeatures(validFeatures2), ...
'Unique', true, 'MaxRatio', 0.9, 'MatchThreshold', 40);
% Check if all the candidate key frames have strong connection with the
% current keyframe
if size(indexPairs, 1) < loopEdgeNumMatches
isLoopClosed = false;
return
end
% Estimate the relative pose of the current key frame with respect to the
% loop candidate keyframe with the highest similarity score
if k == 1
worldPoints = mapPoints.WorldPoints(index3d2(indexPairs(:,2)),:);
matchedImagePoints = cast(validPoints1(indexPairs(:,1),:), 'like', worldPoints);
[worldOrientation, worldLocation] = estimateWorldCameraPose(matchedImagePoints, worldPoints, intrinsics, ...
'Confidence', 95, 'MaxReprojectionError', 3.5, 'MaxNumTrials', 1e4);
cameraPose = rigid3d(worldOrientation, worldLocation);
[R, t] = cameraPoseToExtrinsics(cameraPose.Rotation, cameraPose.Translation);
xyzPoints = mapPoints.WorldPoints(index3d2,:);
projectedPoints = worldToImage(intrinsics, R, t, xyzPoints);
isInImage = find(projectedPoints(:,1)<imageSize(2) & projectedPoints(:,1)>0 & ...
projectedPoints(:,2)< imageSize(1) & projectedPoints(:,2)>0);
minScales = validPoints2.Scale(isInImage)/scaleFactor;
maxScales = validPoints2.Scale(isInImage)*scaleFactor;
r = 4;
searchRadius = r*validPoints2.Scale(isInImage);
matchedIndexPairs = helperMatchFeaturesInRadius(validFeatures2(isInImage,:), currFeatures.Features, ...
projectedPoints(isInImage,:), currPoints, searchRadius, minScales, maxScales);
matchedIndexPairs(:,1) = isInImage(matchedIndexPairs(:,1));
visiblePointsIndex = index3d2(matchedIndexPairs(:,1));
validWorldPoints = mapPoints.WorldPoints(visiblePointsIndex, :);
matchedImagePoints = currPoints.Location(matchedIndexPairs(:,2),:);
% Refine the pose
cameraPose = bundleAdjustmentMotion(validWorldPoints, matchedImagePoints, ...
cameraPose, intrinsics, 'PointsUndistorted', true, 'AbsoluteTolerance', 1e-7,...
'RelativeTolerance', 1e-15, 'MaxIteration', 50);
% Fuse covisible map points
[matchedIndex2d1, ia1, ib1] = intersect(index2d1, matchedIndexPairs(:,2), 'stable');
matchedIndex3d1 = index3d1(ia1);
matchedIndex3d2 = index3d2(matchedIndexPairs(ib1,1));
matchedIndex2d2 = index2d2(matchedIndexPairs(ib1,1));
mapPoints = updateWorldPoints(mapPoints, matchedIndex3d1, mapPoints.WorldPoints(matchedIndex3d2, :));
% Add connection between the current key frame and the loop key frame
pose1 = vSetKeyFrames.Views.AbsolutePose(loopCandidates(k));
pose2 = cameraPose;
relPose = rigid3d(pose2.Rotation*pose1.Rotation', (pose2.Translation-pose1.Translation)*pose1.Rotation');
matches = [matchedIndex2d2, matchedIndex2d1];
vSetKeyFrames = addConnection(vSetKeyFrames, loopCandidates(k), currKeyFrameId, relPose, 'Matches', matches);
disp(['Loop edge added between keyframe: ', num2str(loopCandidates(k)), ' and ', num2str(currKeyFrameId)]);
% Add connections between the current key frame and the connected
% key frames of the loop key frame
neighborViews = connectedViews(vSetKeyFrames, loopCandidates(k));
for m = 1:numel(neighborViews.ViewId)
neighborViewId = neighborViews.ViewId(m);
[index3d3, index2d3] = findWorldPointsInView(mapPoints, neighborViewId);
[covPointsIndices, ia2, ib2] = intersect(index3d3, matchedIndex3d2, 'stable');
if numel(covPointsIndices) > loopEdgeNumMatches
pose1 = neighborViews.AbsolutePose(m);
pose2 = cameraPose;
relPose = rigid3d(pose2.Rotation*pose1.Rotation', (pose2.Translation-pose1.Translation)*pose1.Rotation');
matches = [index2d3(ia2), matchedIndex2d1(ib2)];
if ~hasConnection(vSetKeyFrames, neighborViewId, currKeyFrameId)
vSetKeyFrames = addConnection(vSetKeyFrames, neighborViewId, currKeyFrameId, relPose, 'Matches', matches);
end
disp(['Loop edge added between keyframe: ', num2str(neighborViewId), ' and ', num2str(currKeyFrameId)]);
end
end
isLoopClosed = true;
end
end
end
\ No newline at end of file
function [mapPoints, vSetKeyFrames] = helperAddNewKeyFrame(mapPoints, vSetKeyFrames,...
cameraPose, currFeatures, currPoints, mapPointsIndices, featureIndices, keyFramesIndices)
%helperAddNewKeyFrame add key frames to the key frame set
%
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019-2020 The MathWorks, Inc.
viewId = vSetKeyFrames.Views.ViewId(end)+1;
vSetKeyFrames = addView(vSetKeyFrames, viewId, cameraPose,...
'Features', currFeatures.Features, ...
'Points', currPoints);
viewsAbsPoses = vSetKeyFrames.Views.AbsolutePose;
for i = 1:numel(keyFramesIndices)
localKeyFrameId = keyFramesIndices(i);
[index3d, index2d] = findWorldPointsInView(mapPoints, localKeyFrameId);
[~, ia, ib] = intersect(index3d, mapPointsIndices, 'stable');
prePose = viewsAbsPoses(localKeyFrameId);
relPose = rigid3d(cameraPose.Rotation*prePose.Rotation', ...
(cameraPose.Translation-prePose.Translation)*prePose.Rotation');
if numel(ia) > 5
vSetKeyFrames = addConnection(vSetKeyFrames, localKeyFrameId, viewId, relPose, ...
'Matches', [index2d(ia),featureIndices(ib)]);
end
end
mapPoints = addCorrespondences(mapPoints, viewId, mapPointsIndices, ...
featureIndices);
end
\ No newline at end of file
function [isDetected, loopKeyFrameIds] = helperCheckLoopClosure(vSetKeyFrames, ...
currKeyframeId, imageDatabase, currImg, imageDatabaseViewIds, loopEdgeNumMatches)
%helperCheckLoopClosure detect loop candidates key frames by retrieving
% visually similar images from the feature database.
%
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019-2020 The MathWorks, Inc.
% Retrieve all the visually similar key frames
[candidateIds, similarityscores] = retrieveImages(currImg, imageDatabase);
% Compute similarity between the current key frame and its strongly-connected
% key frames. The minimum similarity score is used as a baseline to find
% loop candidate key frames, which are visually similar to but not connected
% to the current key frame
covisViews = connectedViews(vSetKeyFrames, currKeyframeId);
covisViewsIds = covisViews.ViewId;
isStrong = helperSelectStrongConnections(vSetKeyFrames.Connections, ...
covisViewsIds, currKeyframeId, loopEdgeNumMatches);
strongCovisViewIds = covisViewsIds(isStrong);
[~, viewIds] = intersect(imageDatabaseViewIds, strongCovisViewIds, 'stable');
% Retrieve the top 10 similar connected key frames
[~,~,scores] = evaluateImageRetrieval(currImg, imageDatabase, viewIds, 'NumResults', 10);
minScore = min(scores);
% Convert from ImageID in ImageDatabase to ViewId in imageviewset
candidateViewIds = imageDatabaseViewIds(candidateIds);
[loopKeyFrameIds,ia] = setdiff(candidateViewIds, covisViewsIds, 'stable');
% Scores of non-connected key frames
candidateScores = similarityscores(ia); % Descending
if ~isempty(ia)
bestScore = candidateScores(1);
% Score must be higher than the 75% of the best score
isValid = candidateScores > max(bestScore*0.75, minScore);
loopKeyFrameIds = loopKeyFrameIds(isValid);
else
loopKeyFrameIds = [];
end
% Loop candidates need to be consecutively detected
minNumCandidates = 3; % At least 3 candidates are found
if size(loopKeyFrameIds,1) >= minNumCandidates
groups = nchoosek(loopKeyFrameIds, minNumCandidates);
consecutiveGroups = groups(max(groups,[],2) - min(groups,[],2) < 5, :);
if ~isempty(consecutiveGroups) % Consecutive candidates are found
loopKeyFrameIds = consecutiveGroups(1,:);
end
isDetected = true;
else
isDetected = false;
end
end
\ No newline at end of file
function [F, score, inliersIndex] = helperComputeFundamentalMatrix(matchedPoints1, matchedPoints2)
[F, inliersLogicalIndex] = estimateFundamentalMatrix( ...
matchedPoints1, matchedPoints2, 'Method','RANSAC',...
'NumTrials', 1e3, 'DistanceThreshold', 0.01);
inlierPoints1 = matchedPoints1(inliersLogicalIndex);
inlierPoints2 = matchedPoints2(inliersLogicalIndex);
inliersIndex = find(inliersLogicalIndex);
locations1 = inlierPoints1.Location;
locations2 = inlierPoints2.Location;
% Distance from points to epipolar line
lineIn1 = epipolarLine(F', locations2);
error2in1 = (sum([locations1, ones(size(locations1, 1),1)].* lineIn1, 2)).^2 ...
./ sum(lineIn1(:,1:2).^2, 2);
lineIn2 = epipolarLine(F, locations1);
error1in2 = (sum([locations2, ones(size(locations2, 1),1)].* lineIn2, 2)).^2 ...
./ sum(lineIn2(:,1:2).^2, 2);
outlierThreshold = 4;
score = sum(max(outlierThreshold-error1in2, 0)) + ...
sum(max(outlierThreshold-error2in1, 0));
end
\ No newline at end of file
function [H, score, inliersIndex] = helperComputeHomography(matchedPoints1, matchedPoints2)
[H, inliersLogicalIndex] = estimateGeometricTransform2D( ...
matchedPoints1, matchedPoints2, 'projective', ...
'MaxNumTrials', 1e3, 'MaxDistance', 4, 'Confidence', 90);
inlierPoints1 = matchedPoints1(inliersLogicalIndex);
inlierPoints2 = matchedPoints2(inliersLogicalIndex);
inliersIndex = find(inliersLogicalIndex);
locations1 = inlierPoints1.Location;
locations2 = inlierPoints2.Location;
xy1In2 = transformPointsForward(H, locations1);
xy2In1 = transformPointsInverse(H, locations2);
error1in2 = sum((locations2 - xy1In2).^2, 2);
error2in1 = sum((locations1 - xy2In1).^2, 2);
outlierThreshold = 6;
score = sum(max(outlierThreshold-error1in2, 0)) + ...
sum(max(outlierThreshold-error2in1, 0));
end
\ No newline at end of file
function [mapPoints, vSetKeyFrames, recentPointIdx] = helperCreateNewMapPoints(...
mapPoints, vSetKeyFrames, currKeyFrameId, intrinsics, scaleFactor, minNumMatches, minParallax)
%helperCreateNewMapPoints creates new map points by triangulating matched
% feature points in the current key frame and the connected key frames.
%
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019-2020 The MathWorks, Inc.
% Get connected key frames
KcViews = connectedViews(vSetKeyFrames, currKeyFrameId);
KcIDs = KcViews.ViewId;
isStrong = helperSelectStrongConnections(vSetKeyFrames.Connections, KcIDs, ...
currKeyFrameId, minNumMatches);
KcIDs = KcIDs(isStrong);
% Retreive data of the current key frame
currPose = vSetKeyFrames.Views.AbsolutePose(currKeyFrameId);
currFeatures = vSetKeyFrames.Views.Features{currKeyFrameId};
currPoints = vSetKeyFrames.Views.Points{currKeyFrameId};
currLocations = currPoints.Location;
currScales = currPoints.Scale;
% Camera projection matrix
[R1, t1] = cameraPoseToExtrinsics(currPose.Rotation, currPose.Translation);
currCamMatrix = cameraMatrix(intrinsics, R1, t1);
recentPointIdx = [];
for i = 1:numel(KcIDs)
kfPose = vSetKeyFrames.Views.AbsolutePose(KcIDs(i));
[kfIndex3d, kfIndex2d] = findWorldPointsInView(mapPoints, KcIDs(i));
xyzPoints = mapPoints.WorldPoints(kfIndex3d,:);
medianDepth = median(vecnorm(xyzPoints - kfPose.Translation, 2, 2));
% Skip the key frame is the change of view is small
isViewClose = norm(kfPose.Translation - currPose.Translation)/medianDepth < 0.01;
if isViewClose
continue
end
% Retrieve data of the connected key frame
kfFeatures = vSetKeyFrames.Views.Features{KcIDs(i)};
kfPoints = vSetKeyFrames.Views.Points{KcIDs(i)};
kfLocations = kfPoints.Location;
kfScales = kfPoints.Scale;
% currIndex2d changes in each iteration as new map points are created
currIndex2d = findWorldPointsInView(mapPoints, currKeyFrameId);
% Only use unmatched feature points
uIndices1 = setdiff(uint32(1:size(kfFeatures,1))', kfIndex2d);
uIndices2 = setdiff(uint32(1:size(currFeatures,1))', currIndex2d);
uFeatures1 = kfFeatures(uIndices1, :);
uFeatures2 = currFeatures(uIndices2, :);
uLocations1 = kfLocations(uIndices1, :);
uLocations2 = currLocations(uIndices2, :);
uScales1 = kfScales(uIndices1);
uScales2 = currScales(uIndices2);
indexPairs = matchFeatures(binaryFeatures(uFeatures1), binaryFeatures(uFeatures2),...
'Unique', true, 'MaxRatio', 0.7, 'MatchThreshold', 40);
if isempty(indexPairs)
continue
end
matchedPoints1 = uLocations1(indexPairs(:,1), :);
matchedPoints2 = uLocations2(indexPairs(:,2), :);
% Epipole in the current key frame
epiPole = worldToImage(intrinsics, currPose.Rotation, currPose.Translation, kfPose.Translation);
distToEpipole = vecnorm(matchedPoints2 - epiPole, 2, 2);
% Compute fundamental matrix
F = computeF(intrinsics, kfPose, currPose);
% Epipolar line in the second image
epiLine = epipolarLine(F, matchedPoints2);
distToLine = abs(sum(epiLine.* [matchedPoints1, ones(size(matchedPoints1,1), 1)], 2))./...
sqrt(sum(epiLine(:,1:2).^2, 2));
isValid = distToLine < 2*uScales2(indexPairs(:,2)) & ...
distToEpipole > 10*uScales2(indexPairs(:,2));
indexPairs = indexPairs(isValid, :);
matchedPoints1 = matchedPoints1(isValid, :);
matchedPoints2 = matchedPoints2(isValid, :);
% Parallax check
isLarge = isLargeParalalx(matchedPoints1, matchedPoints2, kfPose, ...
currPose, intrinsics, minParallax);
matchedPoints1 = matchedPoints1(isLarge, :);
matchedPoints2 = matchedPoints2(isLarge, :);
indexPairs = indexPairs(isLarge, :);
[R2, t2] = cameraPoseToExtrinsics(kfPose.Rotation, kfPose.Translation);
kfCamMatrix = cameraMatrix(intrinsics, R2, t2);
% Triangulate two views to create new world points
[xyzPoints, reprojectionErrors, validIdx] = triangulate(matchedPoints1, ...
matchedPoints2, kfCamMatrix, currCamMatrix);
% Filtering by view direction and reprojection error
inlier = filterTriangulatedMapPoints(xyzPoints, kfPose, currPose, ...
uScales1(indexPairs(:,1)), uScales2(indexPairs(:,2)), ...
reprojectionErrors, scaleFactor, validIdx);
% Add new map points and update connections
if any(inlier)
xyzPoints = xyzPoints(inlier,:);
indexPairs = indexPairs(inlier, :);
mIndices1 = uIndices1(indexPairs(:, 1));
mIndices2 = uIndices2(indexPairs(:, 2));
[mapPoints, indices] = addWorldPoints(mapPoints, xyzPoints);
recentPointIdx = [recentPointIdx; indices]; %#ok<AGROW>
% Add new observations
mapPoints = addCorrespondences(mapPoints, KcIDs(i),indices, mIndices1);
mapPoints = addCorrespondences(mapPoints, currKeyFrameId, indices, mIndices2);
% Update connections with new feature matches
[~,ia] = intersect(vSetKeyFrames.Connections{:,1:2}, ...
[KcIDs(i), currKeyFrameId], 'row', 'stable');
oldMatches = vSetKeyFrames.Connections.Matches{ia};
newMatches = [oldMatches; mIndices1, mIndices2];
vSetKeyFrames = updateConnection(vSetKeyFrames, KcIDs(i), currKeyFrameId, ...
'Matches', newMatches);
end
end
end
function F = computeF(intrinsics, pose1, pose2)
R1 = pose1.Rotation';
t1 = pose1.Translation';
R2 = pose2.Rotation';
t2 = pose2.Translation';
R12 = R1'*R2;
t12 = R1'*(t2-t1);
% Skew symmetric matrix
t12x = [0, -t12(3), t12(2)
t12(3), 0, -t12(1)
-t12(2) t12(1), 0];
K = intrinsics.IntrinsicMatrix';
F = K'\ t12x * R12 / K;
end
function inlier = filterTriangulatedMapPoints(xyzPoints, pose1, pose2, ...
scales1, scales2, reprojectionErrors, scaleFactor, isInFront)
camToPoints1= xyzPoints - pose1.Translation;
camToPoints2= xyzPoints - pose2.Translation;
% Check scale consistency and reprojection errors
distances1 = vecnorm(camToPoints1, 2, 2);
distances2 = vecnorm(camToPoints2, 2, 2);
ratioDist = distances1./distances2;
ratioScale = scales2./scales1;
ratioFactor = 1.5 * scaleFactor;
isInScale = (ratioDist./ratioScale < ratioFactor | ...
ratioScale./ratioDist < ratioFactor);
maxError = sqrt(6);
isSmallError= reprojectionErrors < maxError*min(scales1, scales2);
inlier = isInScale & isSmallError & isInFront;
end
function isLarge = isLargeParalalx(points1, points2, pose1, pose2, intrinsics, minParallax)
% Parallax check
K = intrinsics.IntrinsicMatrix;
ray1 = [points1, ones(size(points1(:,1)))]/K *pose1.Rotation;
ray2 = [points2, ones(size(points1(:,2)))]/K *pose2.Rotation;
cosParallax = sum(ray1 .* ray2, 2) ./(vecnorm(ray1, 2, 2) .* vecnorm(ray2, 2, 2));
isLarge = cosParallax < cosd(minParallax) & cosParallax > 0;
end
\ No newline at end of file
function [mapPointSet, directionAndDepth, mapPointsIdx] = helperCullRecentMapPoints(mapPointSet, directionAndDepth, mapPointsIdx, newPointIdx)
outlierIdx = setdiff(newPointIdx, mapPointsIdx);
if ~isempty(outlierIdx)
mapPointSet = removeWorldPoints(mapPointSet, outlierIdx);
directionAndDepth = remove(directionAndDepth, outlierIdx);
mapPointsIdx = mapPointsIdx - arrayfun(@(x) nnz(x>outlierIdx), mapPointsIdx);
end
end
\ No newline at end of file
function [features, validPoints] = helperDetectAndExtractFeatures(Irgb, ...
scaleFactor, numLevels, varargin)
numPoints = 1000;
% In this example, the images are already undistorted. In a general
% workflow, uncomment the following code to undistort the images.
%
% if nargin > 3
% intrinsics = varargin{1};
% end
% Irgb = undistortImage(Irgb, intrinsics);
% Detect ORB features
Igray = rgb2gray(Irgb);
points = detectORBFeatures(Igray, 'ScaleFactor', scaleFactor, 'NumLevels', numLevels);
% Select a subset of features, uniformly distributed throughout the image
points = selectUniform(points, numPoints, size(Igray, 1:2));
% Extract features
[features, validPoints] = extractFeatures(Igray, points);
end
\ No newline at end of file
function rmse = helperEstimateTrajectoryError(gTruth, cameraPoses)
locations = vertcat(cameraPoses.AbsolutePose.Translation);
gLocations = vertcat(gTruth.Translation);
scale = median(vecnorm(gLocations, 2, 2))/ median(vecnorm(locations, 2, 2));
scaledLocations = locations * scale;
rmse = sqrt(mean( sum((scaledLocations - gLocations).^2, 2) ));
disp(['Absolute RMSE for key frame trajectory (m): ', num2str(rmse)]);
end
\ No newline at end of file
function [isInImage, imagePoins] = helperFindProjectedPointsInImage(...
xyzPoints, cameraPose, intrinsics, imageSize)
%helperFindProjectedPointsInImage check if projected world points are within
% an image
%
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019 The MathWorks, Inc.
[R, t] = cameraPoseToExtrinsics(cameraPose.Rotation, cameraPose.Translation);
imagePoints = worldToImage(intrinsics, R, t, xyzPoints);
isInImage = imagePoints(:,1) < imageSize(2) & imagePoints(:,1) > 0 & ...
imagePoints(:,2) > 0 & imagePoints(:,2) < imageSize(1);
imagePoins = imagePoints(isInImage, :);
end
\ No newline at end of file
function scores = helperHammingDistance(features1, features2)
%helperHammingDistance compute hamming distance between two groups of
% binary feature vectors.
%
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019 The MathWorks, Inc.
persistent lookupTable; % lookup table for counting bits
N1 = size(features1, 1);
N2 = size(features2, 1);
scores = zeros(N1, N2);
if isempty(lookupTable)
lookupTable = zeros(256, 1);
for i = 0:255
lookupTable(i+1) = sum(dec2bin(i)-'0');
end
end
for r = 1:N1
for c = 1:N2
temp = bitxor(features1(r, :), features2(c, :));
idx = double(temp) + 1; % cast needed to avoid integer math
scores(r,c) = sum(lookupTable(idx));
end
end
end
\ No newline at end of file
function isKeyFrame = helperIsKeyFrame(mapPoints, ...
refKeyFrameId, lastKeyFrameIndex, currFrameIndex, mapPointsIndices)
numPointsRefKeyFrame = numel(findWorldPointsInView(mapPoints, refKeyFrameId));
% More than 20 frames have passed from last key frame insertion
tooManyNonKeyFrames = currFrameIndex >= lastKeyFrameIndex + 20;
% Track less than 90 map points
tooFewMapPoints = numel(mapPointsIndices) < 90;
% Tracked map points are fewer than 90% of points tracked by
% the reference key frame
tooFewTrackedPoints = numel(mapPointsIndices) < 0.9 * numPointsRefKeyFrame;
isKeyFrame = (tooManyNonKeyFrames || tooFewMapPoints) && tooFewTrackedPoints;
end
\ No newline at end of file
function [mapPoints, directionAndDepth, vSetKeyFrames, newPointIdx] = helperLocalBundleAdjustment(mapPoints, ...
directionAndDepth, vSetKeyFrames, currKeyFrameId, intrinsics, newPointIdx)
%helperLocalBundleAdjustment refine the pose of the current key frame and
% the map of the surrrounding scene.
%
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019-2020 The MathWorks, Inc.
% Connected key frames of the current key frame
covisViews = connectedViews(vSetKeyFrames, currKeyFrameId);
covisViewsIds = covisViews.ViewId;
% Identify the fixed key frames that are connected to the connected
% key frames of the current key frame
fixedViewIds = [];
for i = 1:numel(covisViewsIds)
if numel(fixedViewIds) > 10
break
end
tempViews = connectedViews(vSetKeyFrames, covisViewsIds(i));
tempId = tempViews.ViewId;
for j = 1:numel(tempId)
if ~ismember(tempId(j), [fixedViewIds; currKeyFrameId; covisViewsIds])
fixedViewIds = [fixedViewIds; tempId(j)]; %#ok<AGROW>
if numel(fixedViewIds) > 10
break
end
end
end
end
% Always fix the first two key frames
fixedViewIds = [fixedViewIds; intersect(covisViewsIds, [1 2])];
refinedKeyFrameIds = [unique([fixedViewIds; covisViewsIds]); currKeyFrameId];
% Indices of map points observed by local key frames
mapPointIdx = findWorldPointsInView(mapPoints, [covisViewsIds; currKeyFrameId]);
mapPointIdx = unique(vertcat(mapPointIdx{:}));
% Find point tracks across the local key frames
numPoints = numel(mapPointIdx);
tracks = repmat(pointTrack(0,[0 0]),1, numPoints);
[viewIdsAll, featureIdxAll] = findViewsOfWorldPoint(mapPoints, mapPointIdx);
locations = cellfun(@(x) x.Location, vSetKeyFrames.Views.Points, 'UniformOutput', false);
for k = 1:numPoints
% Use intersect to get the correct ViewIds in the tracks as the
% connections in vSetKeyFrames are not updated during map point culling
% or bundle adjustment
[viewIds, ia] = intersect(viewIdsAll{k},refinedKeyFrameIds, 'stable');
imagePoints = zeros(numel(ia), 2);
for i = 1:numel(ia)
imagePoints(i, :) = locations{viewIds(i)}(featureIdxAll{k}(ia(i)), :);
end
tracks(k) = pointTrack(viewIds, imagePoints);
end
xyzPoints = mapPoints.WorldPoints(mapPointIdx,:);
camPoses = poses(vSetKeyFrames, refinedKeyFrameIds);
% Refine local key frames and map points
[refinedPoints, refinedPoses, reprojectionErrors] = bundleAdjustment(...
xyzPoints, tracks, camPoses, intrinsics, 'FixedViewIDs', fixedViewIds, ...
'PointsUndistorted', true, 'AbsoluteTolerance', 1e-7,...
'RelativeTolerance', 1e-16, 'MaxIteration', 100);
maxError = 6;
isInlier = reprojectionErrors < maxError;
inlierIdx = mapPointIdx(isInlier);
outlierIdx = mapPointIdx(~isInlier);
mapPoints = updateWorldPoints(mapPoints, inlierIdx, refinedPoints(isInlier,:));
vSetKeyFrames = updateView(vSetKeyFrames, refinedPoses);
directionAndDepth = update(directionAndDepth, mapPoints, vSetKeyFrames.Views, inlierIdx, false);
newPointIdx = setdiff(newPointIdx, outlierIdx);
% Update map points and key frames
if ~isempty(outlierIdx)
mapPoints = removeWorldPoints(mapPoints, outlierIdx);
directionAndDepth = remove(directionAndDepth, outlierIdx);
end
newPointIdx = newPointIdx - arrayfun(@(x) nnz(x>outlierIdx), newPointIdx);
end
\ No newline at end of file
function indexPairs = helperMatchFeaturesInRadius(features1, features2, ...
projectedPoints1, points2, radius, minScales, maxScales)
%helperMatchFeaturesInRadius Match features within a radius
% indexPairs = helperMatchFeaturesInRadius(feature1, feature2,
% projectedPoints1, points2, radius, minScales, maxScales) returns a
% P-by-2 matrix, indexPairs, containing the indices to the features most
% likely to correspond between the two input feature matrices satisfying
% the distance and the scale constraints.
%
% This is an example helper function that is subject to change or removal
% in future releases.
%
% Inputs
% ------
% features1 - Feature matrices in the first image
% features2 - Feature matrices in the second image
% projectedPoints1 - The projection of the world points in the
% second image that correspond to features1
% points2 - Feature points corresponding to features2
% radius - Searching radius
% minScales - Minimum scales of feature points points1
% maxScales - Maximum scales of feature points points1
%
% Output
% ------
% indexPairs - Indices of corresponding features
% Copyright 2019-2020 The MathWorks, Inc.
matchThreshold = 100;
maxRatio = 0.8;
numPoints = size(projectedPoints1, 1);
indexPairs = zeros(numPoints, 2, 'uint32');
neighborScales = points2.Scale;
neighborPoints = points2.Location;
if isscalar(radius)
radius = radius * ones(numPoints, 1);
end
for i = 1: numPoints
% Find points within a radius subjected to the scale constraint
pointIdx = findPointsInRadius(neighborPoints, projectedPoints1(i,:), ...
radius(i), neighborScales, minScales(i), maxScales(i));
if ~isempty(pointIdx)
centerFeature = features1(i,:);
nearbyFeatures = features2(pointIdx,:);
if numel(pointIdx) == 1
bestIndex = pointIdx;
else
scores = helperHammingDistance(centerFeature, nearbyFeatures);
% Find the best two matches
[minScore, index] = mink(scores, 2);
if minScore(1) < matchThreshold
% Ratio test when the best two matches have the same scale
if minScore(1) > maxRatio * minScore(2)
continue
else
bestIndex = pointIdx(index(1));
end
else
continue
end
end
indexPairs(i,:) = uint32([i, bestIndex]);
end
end
isFound = indexPairs(:, 2) > 0;
indexPairs = indexPairs(isFound, :);
[~, ia] = unique(indexPairs(:, 2), 'stable');
indexPairs = indexPairs(ia, :);
end
function index = findPointsInRadius(neighborPoints, centerPoint, radius, ...
neighborScales, minScalse, maxScales)
sqrDist = sum((neighborPoints - centerPoint).^2 , 2);
index = find(sqrDist < radius^2 & neighborScales >= minScalse & neighborScales <= maxScales);
end