-
Notifications
You must be signed in to change notification settings - Fork 0
/
vslam.m
377 lines (305 loc) · 14.1 KB
/
vslam.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
%% Visual SLAM
% Modified monocular visual ORB SLAM from the Matlab example
% https://www.mathworks.com/help/vision/ug/monocular-visual-simultaneous-localization-and-mapping.html
%
% Main modifications:
%
% Image acquisiton and handling: uses the drone's camera to acquire new
% images and doesn't store them in an imageDatastore
%
% Camera intrinsics: changed the camera intrinsics to those of Tello's
% camera (including distortion)
%
% Figures: grabbed handles to the feature image and particle maps at every
% iteration to save.
%
% Main loop: loop for a given number of cycles through the predetermined
% move sequence --> no loop closure based on a predetermined dataset
%
% Error handling: in case of low lighting and too few ORB features are
% detected, catch in map initialization and main loops and continue; in
% case of poor connection to the drone, catch error for move sequence and
% try 10 times before throwing the error.
%% vslam
% drone: drone object
% cam: drone's camera
% moveseq: cell array with rows containing a vector and an angle
% cycles: total number cycles for movement
% minMatches: minimum number of ORB feature matches for triangulation
function vslam(drone, cam, moveseq, cycles, minMatches)
% Inspect the first image
currFrameIdx = 1;
currI = snapshot(cam);
himage = imshow(currI);
% Set random seed for reproducibility
rng('default');
% Create a cameraIntrinsics object to store the camera intrinsic parameters.
% The intrinsics for the dataset can be found at the following page:
% https://vision.in.tum.de/data/datasets/rgbd-dataset/file_formats
% Note that the images in the dataset are already undistorted, hence there
% is no need to specify the distortion coefficients.
% Tello camera calibration parameters from: https://tellopilots.com/threads/camera-intrinsic-parameter.2620/
focalLength = [921.170702, 919.018377]; % in units of pixels
principalPoint = [459.904354, 351.238301]; % in units of pixels
imageSize = size(currI,[1 2]); % in units of pixels
rad_distort = [-0.033458, 0.105152];
tan_distort = [0.001256, -0.006647];
intrinsics = cameraIntrinsics(focalLength, principalPoint, imageSize,'RadialDistortion',rad_distort,'TangentialDistortion',tan_distort);
% Detect and extract ORB features
scaleFactor = 1.2;
numLevels = 8;
[preFeatures, prePoints] = helperDetectAndExtractFeatures(currI, scaleFactor, numLevels);
currFrameIdx = currFrameIdx + 1;
firstI = currI; % Preserve the first frame
isMapInitialized = false;
% Map initialization loop
while ~isMapInitialized && currFrameIdx < 10
currI = snapshot(cam);
% jiggle the drone around a bit to initialize map
if mod(currFrameIdx,2) == 0
movedown(drone,0.5)
else
moveup(drone,0.5)
end
% error handle if the subcall to detectORBFeatures fails
try
[currFeatures, currPoints] = helperDetectAndExtractFeatures(currI, scaleFactor, numLevels);
catch
currFrameIdx = currFrameIdx + 1;
continue
end
currFrameIdx = currFrameIdx + 1;
% Find putative feature matches
indexPairs = matchFeatures(preFeatures, currFeatures, 'Unique', true, ...
'MaxRatio', 0.9, 'MatchThreshold', 40);
preMatchedPoints = prePoints(indexPairs(:,1),:);
currMatchedPoints = currPoints(indexPairs(:,2),:);
% If not enough matches are found, check the next frame
minMatches = 100;
if size(indexPairs, 1) < minMatches
continue
end
preMatchedPoints = prePoints(indexPairs(:,1),:);
currMatchedPoints = currPoints(indexPairs(:,2),:);
% Compute homography and evaluate reconstruction
[tformH, scoreH, inliersIdxH] = helperComputeHomography(preMatchedPoints, currMatchedPoints);
% Compute fundamental matrix and evaluate reconstruction
[tformF, scoreF, inliersIdxF] = helperComputeFundamentalMatrix(preMatchedPoints, currMatchedPoints);
% Select the model based on a heuristic
ratio = scoreH/(scoreH + scoreF);
ratioThreshold = 0.45;
if ratio > ratioThreshold
inlierTformIdx = inliersIdxH;
tform = tformH;
else
inlierTformIdx = inliersIdxF;
tform = tformF;
end
% Computes the camera location up to scale. Use half of the
% points to reduce computation
inlierPrePoints = preMatchedPoints(inlierTformIdx);
inlierCurrPoints = currMatchedPoints(inlierTformIdx);
[relOrient, relLoc, validFraction] = relativeCameraPose(tform, intrinsics, ...
inlierPrePoints(1:2:end), inlierCurrPoints(1:2:end));
% If not enough inliers are found, move to the next frame
if validFraction < 0.9 || numel(size(relOrient))==3
continue
end
% Triangulate two views to obtain 3-D map points
relPose = rigid3d(relOrient, relLoc);
minParallax = 3; % In degrees
[isValid, xyzWorldPoints, inlierTriangulationIdx] = helperTriangulateTwoFrames(...
rigid3d, relPose, inlierPrePoints, inlierCurrPoints, intrinsics, minParallax);
if ~isValid
continue
end
% Get the original index of features in the two key frames
indexPairs = indexPairs(inlierTformIdx(inlierTriangulationIdx),:);
isMapInitialized = true;
disp(['Map initialized with frame 1 and frame ', num2str(currFrameIdx-1)])
end % End of map initialization loop
if isMapInitialized
close(himage.Parent.Parent); % Close the previous figure
% Show matched features
hfeature = showMatchedFeatures(firstI, currI, prePoints(indexPairs(:,1)), ...
currPoints(indexPairs(:, 2)), 'Montage');
saveas(gcf,'./imgs/slam/matches.png')
else
error('Unable to initialize map.')
end
% Create an empty imageviewset object to store key frames
vSetKeyFrames = imageviewset;
% Create an empty worldpointset object to store 3-D map points
mapPointSet = worldpointset;
% Create a helperViewDirectionAndDepth object to store view direction and depth
directionAndDepth = helperViewDirectionAndDepth(size(xyzWorldPoints, 1));
% Add the first key frame. Place the camera associated with the first
% key frame at the origin, oriented along the Z-axis
preViewId = 1;
vSetKeyFrames = addView(vSetKeyFrames, preViewId, rigid3d, 'Points', prePoints,...
'Features', preFeatures.Features);
% Add the second key frame
currViewId = 2;
vSetKeyFrames = addView(vSetKeyFrames, currViewId, relPose, 'Points', currPoints,...
'Features', currFeatures.Features);
% Add connection between the first and the second key frame
vSetKeyFrames = addConnection(vSetKeyFrames, preViewId, currViewId, relPose, 'Matches', indexPairs);
% Add 3-D map points
[mapPointSet, newPointIdx] = addWorldPoints(mapPointSet, xyzWorldPoints);
% Add observations of the map points
preLocations = prePoints.Location;
currLocations = currPoints.Location;
preScales = prePoints.Scale;
currScales = currPoints.Scale;
% Add image points corresponding to the map points in the first key frame
mapPointSet = addCorrespondences(mapPointSet, preViewId, newPointIdx, indexPairs(:,1));
% Add image points corresponding to the map points in the second key frame
mapPointSet = addCorrespondences(mapPointSet, currViewId, newPointIdx, indexPairs(:,2));
% Run full bundle adjustment on the first two key frames
tracks = findTracks(vSetKeyFrames);
cameraPoses = poses(vSetKeyFrames);
[refinedPoints, refinedAbsPoses] = bundleAdjustment(xyzWorldPoints, tracks, ...
cameraPoses, intrinsics, 'FixedViewIDs', 1, ...
'PointsUndistorted', true, 'AbsoluteTolerance', 1e-7,...
'RelativeTolerance', 1e-15, 'MaxIteration', 50);
% Scale the map and the camera pose using the median depth of map points
medianDepth = median(vecnorm(refinedPoints.'));
refinedPoints = refinedPoints / medianDepth;
refinedAbsPoses.AbsolutePose(currViewId).Translation = ...
refinedAbsPoses.AbsolutePose(currViewId).Translation / medianDepth;
relPose.Translation = relPose.Translation/medianDepth;
% Update key frames with the refined poses
vSetKeyFrames = updateView(vSetKeyFrames, refinedAbsPoses);
vSetKeyFrames = updateConnection(vSetKeyFrames, preViewId, currViewId, relPose);
% Update map points with the refined positions
mapPointSet = updateWorldPoints(mapPointSet, newPointIdx, refinedPoints);
% Update view direction and depth
directionAndDepth = update(directionAndDepth, mapPointSet, vSetKeyFrames.Views, newPointIdx, true);
% Visualize matched features in the current frame
close(hfeature.Parent.Parent);
featurePlot = helperVisualizeMatchedFeatures(currI, currPoints(indexPairs(:,2)));
% Visualize initial map points and camera trajectory
mapPlot = helperVisualizeMotionAndStructure(vSetKeyFrames, mapPointSet);
% Show legend
showLegend(mapPlot);
% ViewId of the current key frame
currKeyFrameId = currViewId;
% ViewId of the last key frame
lastKeyFrameId = currViewId;
% ViewId of the reference key frame that has the most co-visible
% map points with the current key frame
refKeyFrameId = currViewId;
% Index of the last key frame in the input image sequence
lastKeyFrameIdx = currFrameIdx - 1;
% Indices of all the key frames in the input image sequence
addedFramesIdx = [1; lastKeyFrameIdx];
hs = findall(groot,'Type','Figure');
fplot = hs(1);
mplot = hs(2);
saveas(fplot,'./imgs/slam/fplot0.png')
saveas(mplot,'./imgs/slam/mplot0.png')
% Main loop
move_idx = 0;
total_moves = cycles*length(moveseq);
break_iter = 0;
while move_idx < total_moves
try
% cycle through movement vector
[vec, angle] = moveseq{mod(move_idx,length(moveseq)) + 1,:};
move(drone,vec) % may throw error
turn(drone,angle) % may throw error
% guaranteed no error
fprintf('Movement %d of %d\n',move_idx,total_moves)
fprintf("dX = %d\n",vec(1))
fprintf("dY = %d\n",vec(2))
fprintf("dZ = %d\n",vec(3))
move_idx = move_idx + 1;
break_iter = 0;
catch
if break_iter < 10
break_iter = break_iter + 1;
continue
else
error('Movement command not received by Tello on iteration %d.\n',move_idx)
end
end
currI = snapshot(cam);
try
[currFeatures, currPoints] = helperDetectAndExtractFeatures(currI, scaleFactor, numLevels);
% Track the last key frame
% mapPointsIdx: Indices of the map points observed in the current frame
% featureIdx: Indices of the corresponding feature points in the
% current frame
[currPose, mapPointsIdx, featureIdx] = helperTrackLastKeyFrame(mapPointSet, ...
vSetKeyFrames.Views, currFeatures, currPoints, lastKeyFrameId, intrinsics, scaleFactor);
% Track the local map
% refKeyFrameId: ViewId of the reference key frame that has the most
% co-visible map points with the current frame
% localKeyFrameIds: ViewId of the connected key frames of the current frame
[refKeyFrameId, localKeyFrameIds, currPose, mapPointsIdx, featureIdx] = ...
helperTrackLocalMap(mapPointSet, directionAndDepth, vSetKeyFrames, mapPointsIdx, ...
featureIdx, currPose, currFeatures, currPoints, intrinsics, scaleFactor, numLevels);
% Check if the current frame is a key frame.
% A frame is a key frame if both of the following conditions are satisfied:
%
% 1. At least 20 frames have passed since the last key frame or the
% current frame tracks fewer than 80 map points
% 2. The map points tracked by the current frame are fewer than 90% of
% points tracked by the reference key frame
isKeyFrame = helperIsKeyFrame(mapPointSet, refKeyFrameId, lastKeyFrameIdx, ...
currFrameIdx, mapPointsIdx);
% Visualize matched features
updatePlot(featurePlot, currI, currPoints(featureIdx));
saveas(fplot,sprintf('./imgs/slam/fplot_%d.png',move_idx))
if ~isKeyFrame
currFrameIdx = currFrameIdx + 1;
continue
end
% Update current key frame ID
currKeyFrameId = currKeyFrameId + 1;
% Add the new key frame
[mapPointSet, vSetKeyFrames] = helperAddNewKeyFrame(mapPointSet, vSetKeyFrames, ...
currPose, currFeatures, currPoints, mapPointsIdx, featureIdx, localKeyFrameIds);
% Remove outlier map points that are observed in fewer than 3 key frames
[mapPointSet, directionAndDepth, mapPointsIdx] = helperCullRecentMapPoints(mapPointSet, directionAndDepth, mapPointsIdx, newPointIdx);
% Create new map points by triangulation
minNumMatches = minMatches;
minParallax = 3;
[mapPointSet, vSetKeyFrames, newPointIdx] = helperCreateNewMapPoints(mapPointSet, vSetKeyFrames, ...
currKeyFrameId, intrinsics, scaleFactor, minNumMatches, minParallax);
% Update view direction and depth
directionAndDepth = update(directionAndDepth, mapPointSet, vSetKeyFrames.Views, [mapPointsIdx; newPointIdx], true);
% Local bundle adjustment
[mapPointSet, directionAndDepth, vSetKeyFrames, newPointIdx] = helperLocalBundleAdjustment(mapPointSet, directionAndDepth, vSetKeyFrames, ...
currKeyFrameId, intrinsics, newPointIdx);
% Visualize 3D world points and camera trajectory
updatePlot(mapPlot, vSetKeyFrames, mapPointSet);
saveas(mplot,sprintf('./imgs/slam/mplot_%d.png',move_idx))
catch
currFrameIdx = currFrameIdx + 1;
continue
end
% Update IDs and indices
lastKeyFrameId = currKeyFrameId;
lastKeyFrameIdx = currFrameIdx;
addedFramesIdx = [addedFramesIdx; currFrameIdx]; %#ok<AGROW>
currFrameIdx = currFrameIdx + 1;
end % End of main loop
%% Only works if point graph is well-formed, continous --> probably won't happen for Tello but keeping it here just in case
% % Optimize the poses
% vSetKeyFramesOptim = optimizePoses(vSetKeyFrames, minNumMatches, 'Tolerance', 1e-16, 'Verbose', true);
%
% % Update map points after optimizing the poses
% mapPointSet = helperUpdateGlobalMap(mapPointSet, directionAndDepth, ...
% vSetKeyFrames, vSetKeyFramesOptim);
%
% updatePlot(mapPlot, vSetKeyFrames, mapPointSet);
%
% % Plot the optimized camera trajectory
% optimizedPoses = poses(vSetKeyFramesOptim);
% plotOptimizedTrajectory(mapPlot, optimizedPoses)
%
% % Update legend
% showLegend(mapPlot);
% saveas(mplot,'./imgs/final_mplot.png')
end