Skip to content
package object_detection;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.Test;
import object_detection.types.*;
import java.io.FileWriter;
import java.io.IOException;
import java.io.File;
class CameraIntrinsicsTests {
@Test
void testCameraIntrinsicsInitialization() throws IOException {
// Create a temporary configuration file for testing
String tempFilePath = "test_intrinsics.csv";
try (FileWriter writer = new FileWriter(tempFilePath)) {
writer.write("1000.0,2000.0\n"); // FocalLength
writer.write("500.0,500.0\n"); // PrincipalPoint
writer.write("1280.0,720.0\n"); // ImageSize
writer.write("1.0,0.0,0.0\n"); // K matrix rows
writer.write("0.0,1.0,0.0\n");
writer.write("0.0,0.0,1.0\n");
}
// Create a CameraIntrinsics object
CameraIntrinsics ci = new CameraIntrinsics(tempFilePath);
// Check its attributes
assertArrayEquals(new float[]{1000.0f, 2000.0f}, ci.getFocalLength(), "FocalLength should be correctly initialized");
assertArrayEquals(new float[]{500.0f, 500.0f}, ci.getPrincipalPoint(), "PrincipalPoint should be correctly initialized");
assertArrayEquals(new float[]{1280.0f, 720.0f}, ci.getImageSize(), "ImageSize should be correctly initialized");
double[][] expectedK = new double[][]{
{1.0, 0.0, 0.0},
{0.0, 1.0, 0.0},
{0.0, 0.0, 1.0}
};
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 3; j++) {
assertEquals(expectedK[i][j], ci.getK().get(i, j), 0.01, "K matrix values should match");
}
}
// Clean up temporary file
new File(tempFilePath).delete();
}
}
package object_detection;
import org.ejml.data.*;
import java.io.File;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.Test;
import object_detection.types.*;
import java.io.FileWriter;
import java.io.IOException;
class CameraPoseTests {
@Test
void testCameraPoseInitialization() throws IOException {
// Create a temporary configuration file for testing
String tempFilePath = "test_pose.csv";
try (FileWriter writer = new FileWriter(tempFilePath)) {
writer.write("1.0,2.0,3.0\n"); // Translation vector
writer.write("1.0,0.0,0.0\n"); // R matrix rows
writer.write("0.0,1.0,0.0\n");
writer.write("0.0,0.0,1.0\n");
}
// Create a CameraPose object
CameraPose cp = new CameraPose(tempFilePath);
// Check the translation vector
DMatrixRMaj expectedTranslation = new DMatrixRMaj(new double[]{1.0, 2.0, 3.0});
assertArrayEquals(expectedTranslation.data, cp.getTranslation().data, "Translation vector should be correctly initialized");
// Check the rotation matrix
DMatrixRMaj expectedR = new DMatrixRMaj(new double[][]{
{1.0, 0.0, 0.0},
{0.0, 1.0, 0.0},
{0.0, 0.0, 1.0}
});
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 3; j++) {
assertEquals(expectedR.get(i, j), cp.getR().get(i, j), 0.01, "R matrix values should match");
}
}
// Clean up temporary file
new File(tempFilePath).delete();
}
}
package object_detection;
import org.junit.jupiter.api.BeforeEach;
import object_detection.types.*;
import java.io.FileWriter;
import java.io.IOException;
import java.io.FileNotFoundException;
class FrameTests {
private CameraPose cameraPose;
private Frame frame;
@BeforeEach
void setUp() throws IOException, FileNotFoundException {
// Create a CameraPose for the frame
String tempPoseFile = "test_pose.csv";
try (FileWriter writer = new FileWriter(tempPoseFile)) {
writer.write("1.0,2.0,3.0\n"); // Translation vector
writer.write("1.0,0.0,0.0\n"); // R matrix rows
writer.write("0.0,1.0,0.0\n");
writer.write("0.0,0.0,1.0\n");
}
cameraPose = new CameraPose(tempPoseFile);
// Create a temporary bbox file for the frame
String tempBboxFile = "test_bbox.csv";
try (FileWriter writer = new FileWriter(tempBboxFile)) {
writer.write("Class,x,y,w,h\n"); // Header
writer.write("vehicle,10,10,20,20\n");
writer.write("animal,50,50,30,30\n");
}
// Initialize the Frame object
frame = new Frame(tempBboxFile, cameraPose);
}
}
......@@ -5,41 +5,87 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import object_detection.types.*;
import java.io.FileWriter;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.util.Arrays;
import java.util.List;
import java.io.File;
import org.junit.jupiter.api.AfterEach;
class ObjectSetTests {
private CameraIntrinsics intrinsics;
private List<Point> pointCloud;
private ObjectSet os;
@BeforeEach
void setUp() {
os = new ObjectSet(); // Initialize a new ObjectSet for each test
void setUp() throws IOException, FileNotFoundException {
// Create a temporary configuration file for CameraIntrinsics
String tempIntrFile = "test_intr.csv";
try (FileWriter writer = new FileWriter(tempIntrFile)) {
writer.write("1000.0,2000.0\n"); // FocalLength
writer.write("500.0,500.0\n"); // PrincipalPoint
writer.write("1280.0,720.0\n"); // ImageSize
writer.write("1.0,0.0,0.0\n"); // K matrix rows
writer.write("0.0,1.0,0.0\n");
writer.write("0.0,0.0,1.0\n");
}
// Initialize CameraIntrinsics from the configuration file
intrinsics = new CameraIntrinsics(tempIntrFile);
// Create a temporary configuration file for CameraPose
String tempPoseFile = "test_pose.csv";
try (FileWriter writer = new FileWriter(tempPoseFile)) {
writer.write("1.0,2.0,3.0\n"); // Translation vector
writer.write("1.0,0.0,0.0\n"); // R matrix rows
writer.write("0.0,1.0,0.0\n");
writer.write("0.0,0.0,1.0\n");
}
// Initialize a point cloud
pointCloud = Arrays.asList(
new Point(1.0f, 2.0f, 3.0f),
new Point(4.0f, 5.0f, 6.0f),
new Point(7.0f, 8.0f, 9.0f)
);
// Initialize a new ObjectSet with intrinsics and a point cloud
os = new ObjectSet(intrinsics, pointCloud);
}
@Test
void testObjectCreation() {
// Create objects using multiple points
int objId1 = os.makeObject(new Point(1, 2, 3, 1), new Point(4, 5, 6, 2));
int objId2 = os.makeObject(new Point(7, 8, 9, 3), new Point(10, 11, 12, 4));
assertEquals(2, os.objects.size(), "There should be two objects in the set after addition.");
assertNotNull(os.objects.get(objId1), "The object with objId1 should not be null.");
assertNotNull(os.objects.get(objId2), "The object with objId2 should not be null.");
void testObjectSetCreation() throws IOException, FileNotFoundException {
assertNotNull(os, "ObjectSet should not be null after initialization");
assertNotNull(os.objects, "ObjectSet should have a list of objects");
assertTrue(os.objects.isEmpty(), "Initially, ObjectSet should be empty");
}
@Test
void testObjectComparisonAndCombination() {
// Create two objects that should be similar enough to be considered the same
int objId1 = os.makeObject(new Point(0, 0, 0, 1));
int objId2 = os.makeObject(new Point(0, 0, 0, 2));
void testReconcileCandidate() {
// Create a candidate PointSet
PointSet candidate = new PointSet(1, "object",
new Point(1.0f, 2.0f, 3.0f),
new Point(4.0f, 5.0f, 6.0f)
);
assertTrue(os.compareObjects(objId1, objId2), "Identical objects should be considered the same.");
os.reconcileCandidate(candidate, 0.5);
// Now combine these objects into one
os.combineObjects(objId1, objId2);
assertEquals(1, os.objects.size(), "There should be one object left after combining.");
assertEquals(1, os.objects.size(), "One object should be added or reconciled");
}
@Test
void testEmptyObjectSetCreation() {
assertTrue(os.objects.isEmpty(), "Newly created ObjectSet should be empty.");
void testToString() {
assertEquals("ObjectSet of : 0 objects:", os.toString(), "Initial string representation should match expected format");
// Add an object and check the string representation again
os.reconcileCandidate(new PointSet(1, "vehicle", new Point(1.0f, 2.0f, 3.0f)), 0.5);
assertTrue(os.toString().startsWith("ObjectSet of : 1 objects:"), "String representation should reflect the new object count");
}
@AfterEach
void tearDown() {
new File("test_intr.csv").delete(); // Clean up intrinsics file
new File("test_pose.csv").delete(); // Ensure to delete pose file if it wasn't cleaned up earlier
}
}
package object_detection;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.Test;
import object_detection.types.*;
class Point2DTests {
@Test
void testPoint2DCreation() {
Point2D point = new Point2D(3.5f, 7.2f);
assertEquals(3.5f, point.getX(), "X coordinate should match the constructor input");
assertEquals(7.2f, point.getY(), "Y coordinate should match the constructor input");
}
@Test
void testToString() {
Point2D point = new Point2D(3.5f, 7.2f);
String expectedString = "{3.5, 7.2}";
assertEquals(expectedString, point.toString(), "toString should correctly represent the Point2D");
}
@Test
void testMultiplePoints() {
Point2D point1 = new Point2D(1.0f, 2.0f);
Point2D point2 = new Point2D(3.0f, 4.0f);
assertEquals(1.0f, point1.getX(), "Point1's X coordinate should match the constructor input");
assertEquals(2.0f, point1.getY(), "Point1's Y coordinate should match the constructor input");
assertEquals(3.0f, point2.getX(), "Point2's X coordinate should match the constructor input");
assertEquals(4.0f, point2.getY(), "Point2's Y coordinate should match the constructor input");
}
}
package object_detection;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import object_detection.types.*;
class PointSetTests {
private PointSet ps;
@BeforeEach
void setUp() {
ps = new PointSet(1, "vehicle", new Point(1.0f, 2.0f, 3.0f), new Point(4.0f, 5.0f, 6.0f)); // Initialize a PointSet with a sample ID, prediction name, and points
}
@Test
void testPointSetCreation() {
Point[] points = ps.getPoints();
assertEquals(2, points.length, "PointSet should contain two points initially");
assertEquals(1.0f, points[0].getX(), 0.001, "First point's X coordinate should match");
assertEquals(2.0f, points[0].getY(), 0.001, "First point's Y coordinate should match");
assertEquals(3.0f, points[0].getZ(), 0.001, "First point's Z coordinate should match");
assertEquals(4.0f, points[1].getX(), 0.001, "Second point's X coordinate should match");
assertEquals(5.0f, points[1].getY(), 0.001, "Second point's Y coordinate should match");
assertEquals(6.0f, points[1].getZ(), 0.001, "Second point's Z coordinate should match");
assertEquals("vehicle", ps.getPred(), "Prediction name should be 'vehicle'");
}
@Test
void testAddPoint() {
Point newPoint = new Point(7.0f, 8.0f, 9.0f);
ps.addPoint(newPoint);
Point[] points = ps.getPoints();
assertEquals(3, points.length, "PointSet should contain three points after addition");
assertEquals(7.0f, points[2].getX(), 0.001, "Third point's X coordinate should match");
assertEquals(8.0f, points[2].getY(), 0.001, "Third point's Y coordinate should match");
assertEquals(9.0f, points[2].getZ(), 0.001, "Third point's Z coordinate should match");
}
@Test
void testGetIDX() {
assertEquals(1, ps.getIDX(), "IDX should be 1 as specified during creation");
}
}
......@@ -9,39 +9,51 @@ class PointTests {
@Test
void testPointCreation() {
// Test the creation of a Point object
Point point = new Point(1.0f, 2.0f, 3.0f, 0);
Point point = new Point(1.0f, 2.0f, 3.0f);
assertNotNull(point, "Point object should not be null");
assertEquals(1.0f, point.getX(), "X coordinate should match the constructor input");
assertEquals(2.0f, point.getY(), "Y coordinate should match the constructor input");
assertEquals(3.0f, point.getZ(), "Z coordinate should match the constructor input");
}
@Test
void testPointCreationWithColor() {
// Test the creation of a Point object with color attributes
Point point = new Point(1.0f, 2.0f, 3.0f, 255, 0, 0);
int[] color = point.getColor();
assertNotNull(point, "Point object should not be null");
assertEquals(1.0f, point.getX(), "X coordinate should match the constructor input");
assertEquals(2.0f, point.getY(), "Y coordinate should match the constructor input");
assertEquals(3.0f, point.getZ(), "Z coordinate should match the constructor input");
assertArrayEquals(new int[]{255, 0, 0}, color, "Color should match the constructor input");
}
@Test
void testPointEquality() {
// Test the equality method under two scenarios
Point p1 = new Point(1.005f, 2.005f, 3.005f, 0);
Point p2 = new Point(1.006f, 2.006f, 3.006f, 0);
Point p3 = new Point(1.007f, 2.007f, 3.007f, 0);
Point p1 = new Point(1.005f, 2.005f, 3.005f);
Point p2 = new Point(1.006f, 2.006f, 3.006f);
assertTrue(Point.equals(p1, p2, 0.01f), "Points p1 and p2 should be considered equal with a tolerance of 0.01");
//assertFalse(Point.equals(p1, p2, 0.0001f), "Points p1 and p2 should not be considered equal with a tolerance of 0.0001");
assertTrue(Point.equals(p1, p3, 0.01f), "Points p1 and p3 should be considered equal with a tolerance of 0.01");
//assertFalse(Point.equals(p1, p3, 0.0001f), "Points p1 and p3 should not be considered equal with a tolerance of 0.0001");
assertFalse(Point.equals(p1, p2, 0.001f), "Points p1 and p2 should not be considered equal with a tolerance of 0.001");
}
@Test
void testPointPrecisionEquality() {
void testPrecisionEquality() {
// Test precision issues and rounding errors
Point p1 = new Point(0.0000001f, 0.0000001f, 0.0000001f, 0);
Point p2 = new Point(0.0000002f, 0.0000002f, 0.0000002f, 0);
//assertFalse(Point.equals(p1, p2, 0.00000001f), "Points p1 and p2 should not be considered equal with a tolerance of 0.00000001");
Point p1 = new Point(0.0000001f, 0.0000001f, 0.0000001f);
Point p2 = new Point(0.0000002f, 0.0000002f, 0.0000002f);
assertFalse(Point.equals(p1, p2, 0.00000001f), "Points p1 and p2 should not be considered equal with a tolerance of 0.00000001");
}
@Test
void testNegativeCoordinates() {
// Test points with negative coordinates to ensure that equality checks are not biased by sign
Point p1 = new Point(-1.005f, -2.005f, -3.005f, 0);
Point p2 = new Point(-1.005f, -2.005f, -3.005f, 0);
Point p1 = new Point(-1.005f, -2.005f, -3.005f);
Point p2 = new Point(-1.005f, -2.005f, -3.005f);
assertTrue(Point.equals(p1, p2, 0.01f), "Negative coordinate points should be considered equal");
}
......@@ -49,8 +61,8 @@ class PointTests {
@Test
void testZeroCoordinates() {
// Test points with all coordinates set to zero
Point p1 = new Point(0.0f, 0.0f, 0.0f, 0);
Point p2 = new Point(0.0f, 0.0f, 0.0f, 0);
Point p1 = new Point(0.0f, 0.0f, 0.0f);
Point p2 = new Point(0.0f, 0.0f, 0.0f);
assertTrue(Point.equals(p1, p2, 0.0001f), "Zero coordinate points should be exactly equal");
}
......@@ -58,9 +70,26 @@ class PointTests {
@Test
void testDistinctPoints() {
// Test distinct points that should not be equal
Point p1 = new Point(1.000f, 1.000f, 1.000f, 0);
Point p2 = new Point(2.000f, 2.000f, 2.000f, 0);
Point p1 = new Point(1.000f, 1.000f, 1.000f);
Point p2 = new Point(2.000f, 2.000f, 2.000f);
assertFalse(Point.equals(p1, p2, 0.001f), "Distinct points should not be considered equal");
}
@Test
void testToString() {
// Test the toString method of the Point class
Point point = new Point(1.0f, 2.0f, 3.0f);
assertEquals("Point(1.0 ,2.0 ,3.0)", point.toString(), "String representation should match expected format");
}
//assertFalse(Point.equals(p1, p2, 0.001f), "Distinct points should not be considered equal");
@Test
void testHashCode() {
// Test the hash code generation for the Point class
Point p1 = new Point(1.0f, 2.0f, 3.0f);
Point p2 = new Point(1.0f, 2.0f, 3.0f);
assertEquals(p1.hashCode(), p2.hashCode(), "Hash codes should be identical for points with identical coordinates");
}
}
# Entry into System
Things that are true:
1. A student should be able to build the system, then press run, then open a localhost and see the video and the point cloud of each object
2. A student should also be able to choose between different object and get information (need interactive display)
3. The entry needs to do the entire workflow
- get keyframes and featurepoints
- get objects from keyframes
- start object detection
- finish object detection and update database
- ping GUI server
- GUI server pulls information and displays point cloud to user
TODO: function to process each frame within the ObjectSet
\ No newline at end of file
Input to System:
- pointcloud
- keyframes:
- camera angles (x,y,z + angle ?)
- 2D points captured
- YOLO-captured bounding boxes
1) Take pointcloud, and downsample
2) For each frame:
a. project downsampled pointcloud onto frame
b. overlay 2D bounding boxes from YOLO
c. create candidate objects based on points falling within boxes
d. do overlap combinations based on thresholding voxels
3) Given final objectset, transmit corners to GUI
4) Display corners over original downsampled pointcloud to show objects
Output of System:
- pointcloud
- 3D bounding box of objects
Projecting 3D points onto 2D screen based on camera pose:
1) Calculate camera matrix = K * [R, t'] where R is rotation matrix, t is translation vector, and K is intrinsic of camera pose
2) Get projection by appling projPoints = [point, 1] * cameraMatrix'
3) Divide projPoints[1:2] by projPoints[3] (i.e. divide x and y coordinates by z)
4) Return projPoints if z > 0 (infront of camera), or x,y fall into size of image (0-ImageSize.x, 0-ImageSize.y)
\ No newline at end of file
function [isLoopClosed, mapPoints, vSetKeyFrames] = helperAddLoopConnections(...
mapPoints, vSetKeyFrames, loopCandidates, currKeyFrameId, currFeatures, ...
loopEdgeNumMatches)
%helperAddLoopConnections add connections between the current key frame and
% the valid loop candidate key frames. A loop candidate is valid if it has
% enough covisible map points with the current key frame.
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019-2023 The MathWorks, Inc.
%#codegen
loopClosureEdge = zeros(0, 2, 'uint32');
numCandidates = size(loopCandidates,1);
if isSimMode
[index3d1, index2d1] = findWorldPointsInView(mapPoints, currKeyFrameId);
else
[index3d1Cg, index2d1Cg] = findWorldPointsInView(mapPoints, currKeyFrameId);
index2d1 = index2d1Cg{1};
index3d1 = index3d1Cg{1};
end
validFeatures1 = currFeatures.Features(index2d1, :);
for k = 1 : numCandidates
if isSimMode()
[index3d2, index2d2] = findWorldPointsInView(mapPoints, loopCandidates(k));
else
[index3d2Cg, index2d2Cg] = findWorldPointsInView(mapPoints, loopCandidates(k));
index2d2 = index2d2Cg{1};
index3d2 = index3d2Cg{1};
end
allFeatures2 = vSetKeyFrames.Views.Features{loopCandidates(k)};
validFeatures2 = allFeatures2(index2d2, :);
indexPairs = matchFeatures(binaryFeatures(validFeatures1), binaryFeatures(validFeatures2), ...
'Unique', true, 'MaxRatio', 0.9, 'MatchThreshold', 40);
% Check if all the candidate key frames have strong connection with the
% current keyframe
if size(indexPairs, 1) < loopEdgeNumMatches
continue
end
% Estimate the relative pose of the current key frame with respect to the
% loop candidate keyframe with the highest similarity score
worldPoints1 = mapPoints.WorldPoints(index3d1(indexPairs(:, 1)), :);
worldPoints2 = mapPoints.WorldPoints(index3d2(indexPairs(:, 2)), :);
tform1 = pose2extr(vSetKeyFrames.Views.AbsolutePose(end));
tform2 = pose2extr(vSetKeyFrames.Views.AbsolutePose(loopCandidates(k)));
worldPoints1InCamera1 = transformPointsForward(tform1, worldPoints1) ;
worldPoints2InCamera2 = transformPointsForward(tform2, worldPoints2) ;
w = warning('off','all');
if isSimMode()
[tform, inlierIndex] = estgeotform3d(...
worldPoints1InCamera1, worldPoints2InCamera2, 'similarity', 'MaxDistance', 0.1);
else
[tform, inlierIndex] = estgeotform3d(...
worldPoints1InCamera1, worldPoints2InCamera2, 'rigid', 'MaxDistance', 0.1);
end
warning(w);
% Add connection between the current key frame and the loop key frame
inlierIndexVals = inlierIndex(:);
indexPairs1 = indexPairs(inlierIndexVals, 1);
indexPairs2 = indexPairs(inlierIndexVals, 2);
index2dPairs = index2d2(indexPairs2);
index2d1Pairs = index2d1(indexPairs1);
matches = uint32([index2dPairs, index2d1Pairs]);
vSetKeyFrames = addConnection(vSetKeyFrames, loopCandidates(k), currKeyFrameId, tform, 'Matches', matches);
if isSimMode()
disp(['Loop edge added between keyframe: ', num2str(loopCandidates(k)), ' and ', num2str(currKeyFrameId)]);
end
% Fuse co-visible map points
matchedIndex3d1 = index3d1(indexPairs1);
matchedIndex3d2 = index3d2(indexPairs2);
mapPoints = updateWorldPoints(mapPoints, matchedIndex3d1, mapPoints.WorldPoints(matchedIndex3d2, :));
loopClosureEdge = [loopClosureEdge; loopCandidates(k), currKeyFrameId];
end
isLoopClosed = ~isempty(loopClosureEdge);
end
function tf = isSimMode()
tf = isempty(coder.target);
end
\ No newline at end of file
function [mapPoints, vSetKeyFrames] = helperAddNewKeyFrame(mapPoints, vSetKeyFrames,...
cameraPose, currFeatures, currPoints, mapPointsIndices, featureIndices, keyFramesIndices)
%helperAddNewKeyFrame add key frames to the key frame set
%
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019-2023 The MathWorks, Inc.
%#codegen
viewId = vSetKeyFrames.Views.ViewId(end)+1;
vSetKeyFrames = addView(vSetKeyFrames, viewId, cameraPose,...
'Features', currFeatures.Features, ...
'Points', currPoints);
viewsAbsPoses = vSetKeyFrames.Views.AbsolutePose;
for i = 1:numel(keyFramesIndices)
localKeyFrameId = keyFramesIndices(i);
if isSimMode()
[index3d, index2d] = findWorldPointsInView(mapPoints, localKeyFrameId);
else
[index3dCg, index2dCg] = findWorldPointsInView(mapPoints, localKeyFrameId);
index3d = index3dCg{1};
index2d = index2dCg{1};
end
[~, ia, ib] = intersect(index3d, mapPointsIndices, 'stable');
prePose = viewsAbsPoses(localKeyFrameId);
relPose = rigidtform3d(prePose.R' * cameraPose.R, ...
(cameraPose.Translation-prePose.Translation)*prePose.R);
if numel(ia) > 5
if isSimMode()
vSetKeyFrames = addConnection(vSetKeyFrames, localKeyFrameId, viewId, relPose, ...
'Matches', [index2d(ia),featureIndices(ib)]);
else
coder.varsize('matches', [inf 2], [1, 0]);
fIndices = featureIndices(ib(:));
matches = [index2d(ia), fIndices];
vSetKeyFrames = addConnection(vSetKeyFrames, localKeyFrameId, viewId, relPose, ...
'Matches', matches);
end
end
end
mapPoints = addCorrespondences(mapPoints, viewId, mapPointsIndices, ...
featureIndices);
end
function tf = isSimMode()
tf = isempty(coder.target);
end
\ No newline at end of file
function [isDetected, loopKeyFrameIds] = helperCheckLoopClosure(vSetKeyFrames, ...
currKeyframeId, imageDatabase, currImg, loopEdgeNumMatches)
%helperCheckLoopClosure detect loop candidates key frames by retrieving
% visually similar images from the feature database.
%
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019-2023 The MathWorks, Inc.
%#codegen
% Retrieve all the visually similar key frames
[candidateViewIds, similarityscores] = retrieveImages(currImg, imageDatabase);
% Compute similarity between the current key frame and its strongly-connected
% key frames. The minimum similarity score is used as a baseline to find
% loop candidate key frames, which are visually similar to but not connected
% to the current key frame
covisViews = connectedViews(vSetKeyFrames, currKeyframeId);
covisViewsIds = covisViews.ViewId;
strongCovisViews = connectedViews(vSetKeyFrames, currKeyframeId, 'MinNumMatches', loopEdgeNumMatches);
strongCovisViewIds = strongCovisViews.ViewId;
% Retrieve the top 10 similar connected key frames
[~,~,scores] = evaluateImageRetrieval(currImg, imageDatabase, strongCovisViewIds, 'NumResults', 10);
minScore = min(scores);
[loopKeyFrameIds,ia] = setdiff(candidateViewIds, covisViewsIds, 'stable');
% Scores of non-connected key frames
candidateScores = similarityscores(ia); % Descending
if ~isempty(ia)
bestScore = candidateScores(1);
% Score must be higher than the 75% of the best score
isValid = candidateScores > max(bestScore*0.75, minScore);
loopKeyFrameIds = loopKeyFrameIds(isValid);
else
loopKeyFrameIds = zeros(coder.ignoreConst(0), coder.ignoreConst(0), class(loopKeyFrameIds));
end
% Loop candidates need to be consecutively detected
minNumCandidates = 3; % At least 3 candidates are found
if size(loopKeyFrameIds,1) >= minNumCandidates
groups = nchoosek(loopKeyFrameIds, minNumCandidates);
consecutiveGroups = groups(max(groups,[],2) - min(groups,[],2) < 4, :);
if ~isempty(consecutiveGroups) % Consecutive candidates are found
loopKeyFrameIds = consecutiveGroups(1,:);
isDetected = true;
else
isDetected = false;
end
else
isDetected = false;
end
end
\ No newline at end of file
function [mapPoints, vSetKeyFrames, recentPointIdx] = helperCreateNewMapPoints(...
mapPoints, vSetKeyFrames, currKeyFrameId, intrinsics, scaleFactor, minNumMatches, minParallax)
%helperCreateNewMapPoints creates new map points by triangulating matched
% feature points in the current key frame and the connected key frames.
%
% This is an example helper function that is subject to change or removal
% in future releases.
% Copyright 2019-2023 The MathWorks, Inc.
%#codegen
% Get connected key frames
KcViews = connectedViews(vSetKeyFrames, currKeyFrameId, 'MinNumMatches', minNumMatches);
KcIDs = KcViews.ViewId;
% Retreive data of the current key frame
views = vSetKeyFrames.Views;
currPose = views.AbsolutePose(currKeyFrameId);
currFeatures = views.Features{currKeyFrameId};
currPoints = views.Points{currKeyFrameId};
currLocations = currPoints.Location;
currScales = currPoints.Scale;
% Camera projection matrix
currCamMatrix = cameraProjection(intrinsics, pose2extr(currPose));
recentPointIdx = zeros(0, 1);
for i = 1:numel(KcIDs)
kfPose = views.AbsolutePose(KcIDs(i));
if isSimMode()
[kfIndex3d, kfIndex2d] = findWorldPointsInView(mapPoints, KcIDs(i));
else
[kfIndex3dCg, kfIndex2dCg] = findWorldPointsInView(mapPoints, KcIDs(i));
kfIndex3d = kfIndex3dCg{1};
kfIndex2d = kfIndex2dCg{1};
end
xyzPoints = mapPoints.WorldPoints(kfIndex3d,:);
medianDepth = median(vecnorm(xyzPoints - kfPose.Translation, 2, 2));
% Skip the key frame is the change of view is small
isViewClose = norm(kfPose.Translation - currPose.Translation)/medianDepth < 0.01;
if isViewClose
continue
end
% Retrieve data of the connected key frame
kfFeatures = views.Features{KcIDs(i)};
kfPoints = views.Points{KcIDs(i)};
kfLocations = kfPoints.Location;
kfScales = kfPoints.Scale;
% currIndex2d changes in each iteration as new map points are created
if isSimMode()
[~, currIndex2d] = findWorldPointsInView(mapPoints, currKeyFrameId);
else
[~, currIndex2dCg] = findWorldPointsInView(mapPoints, currKeyFrameId);
currIndex2d = currIndex2dCg{1};
end
% Only use unmatched feature points
uIndices1 = setdiff(uint32(1:size(kfFeatures,1))', kfIndex2d, 'stable');
uIndices2 = setdiff(uint32(1:size(currFeatures,1))', currIndex2d, 'stable');
uFeatures1 = kfFeatures(uIndices1, :);
uFeatures2 = currFeatures(uIndices2, :);
uLocations1 = kfLocations(uIndices1, :);
uLocations2 = currLocations(uIndices2, :);
uScales1 = kfScales(uIndices1);
uScales2 = currScales(uIndices2);
indexPairs = matchFeatures(binaryFeatures(uFeatures1), binaryFeatures(uFeatures2),...
'Unique', true, 'MaxRatio', 0.7, 'MatchThreshold', 40);
if isempty(indexPairs)
continue
end
matchedPoints1 = uLocations1(indexPairs(:,1), :);
matchedPoints2 = uLocations2(indexPairs(:,2), :);
% Epipole in the current key frame
epiPole = world2img(kfPose.Translation, pose2extr(currPose), intrinsics);
distToEpipole = vecnorm(matchedPoints2 - epiPole, 2, 2);
% Compute fundamental matrix
F = computeF(intrinsics, kfPose, currPose);
% Epipolar line in the second image
epiLine = epipolarLine(F, matchedPoints2);
distToLine = abs(sum(epiLine.* [matchedPoints1, ones(size(matchedPoints1,1), 1)], 2))./...
sqrt(sum(epiLine(:,1:2).^2, 2));
isValid = distToLine < 2*uScales2(indexPairs(:,2)) & ...
distToEpipole > 10*uScales2(indexPairs(:,2));
indexPairs = indexPairs(isValid, :);
matchedPoints1 = matchedPoints1(isValid, :);
matchedPoints2 = matchedPoints2(isValid, :);
% Parallax check
isLarge = isLargeParalalx(matchedPoints1, matchedPoints2, kfPose, ...
currPose, intrinsics, minParallax);
matchedPoints1 = matchedPoints1(isLarge, :);
matchedPoints2 = matchedPoints2(isLarge, :);
indexPairs = indexPairs(isLarge, :);
kfCamMatrix = cameraProjection(intrinsics, pose2extr(kfPose));
% Triangulate two views to create new world points
[xyzPoints, reprojectionErrors, validIdx] = triangulate(matchedPoints1, ...
matchedPoints2, kfCamMatrix, currCamMatrix);
% Filtering by view direction and reprojection error
inlier = filterTriangulatedMapPoints(xyzPoints, kfPose, currPose, ...
uScales1(indexPairs(:,1)), uScales2(indexPairs(:,2)), ...
reprojectionErrors, scaleFactor, validIdx);
% Add new map points and update connections
if any(inlier)
xyzPoints = xyzPoints(inlier,:);
indexPairs = indexPairs(inlier, :);
mIndices1 = uIndices1(indexPairs(:, 1));
mIndices2 = uIndices2(indexPairs(:, 2));
[mapPoints, indices] = addWorldPoints(mapPoints, xyzPoints);
recentPointIdx = [recentPointIdx; indices]; %#ok<AGROW>
% Add new observations
mapPoints = addCorrespondences(mapPoints, KcIDs(i),indices, mIndices1);
mapPoints = addCorrespondences(mapPoints, currKeyFrameId, indices, mIndices2);
% Update connections with new feature matches
if isSimMode()
[~,ia] = intersect(vSetKeyFrames.Connections{:,1:2}, ...
[KcIDs(i), currKeyFrameId], 'row', 'stable');
oldMatches = vSetKeyFrames.Connections.Matches{ia};
else
connections = vSetKeyFrames.Connections;
[~,ia] = intersect([connections.ViewId1, connections.ViewId2], ...
[KcIDs(i), currKeyFrameId], 'row', 'stable');
oldMatches = connections.Matches{ia};
end
newMatches = [oldMatches; mIndices1, mIndices2];
vSetKeyFrames = updateConnection(vSetKeyFrames, KcIDs(i), currKeyFrameId, ...
'Matches', newMatches);
end
end
end
function F = computeF(intrinsics, pose1, pose2)
R1 = pose1.R;
t1 = pose1.Translation';
R2 = pose2.R;
t2 = pose2.Translation';
R12 = R1'*R2;
t12 = R1'*(t2-t1);
% Skew symmetric matrix
t12x = [0, -t12(3), t12(2)
t12(3), 0, -t12(1)
-t12(2) t12(1), 0];
F = intrinsics.K'\ t12x * R12 / intrinsics.K;
end
function inlier = filterTriangulatedMapPoints(xyzPoints, pose1, pose2, ...
scales1, scales2, reprojectionErrors, scaleFactor, isInFront)
camToPoints1= xyzPoints - pose1.Translation;
camToPoints2= xyzPoints - pose2.Translation;
% Check scale consistency and reprojection errors
distances1 = vecnorm(camToPoints1, 2, 2);
distances2 = vecnorm(camToPoints2, 2, 2);
ratioDist = distances1./distances2;
ratioScale = scales2./scales1;
ratioFactor = 1.5 * scaleFactor;
isInScale = (ratioDist./ratioScale < ratioFactor | ...
ratioScale./ratioDist < ratioFactor);
maxError = sqrt(6);
isSmallError= reprojectionErrors < maxError*min(scales1, scales2);
inlier = isInScale & isSmallError & isInFront;
end
function isLarge = isLargeParalalx(points1, points2, pose1, pose2, intrinsics, minParallax)
% Parallax check
ray1 = [points1, ones(size(points1(:,1)))]/intrinsics.K' *pose1.R';
ray2 = [points2, ones(size(points1(:,2)))]/intrinsics.K' *pose2.R';
cosParallax = sum(ray1 .* ray2, 2) ./(vecnorm(ray1, 2, 2) .* vecnorm(ray2, 2, 2));
isLarge = cosParallax < cosd(minParallax) & cosParallax > 0;
end
function tf = isSimMode()
tf = isempty(coder.target);
end
\ No newline at end of file