-
Notifications
You must be signed in to change notification settings - Fork 35
/
zed_oc_sync_example.cpp
282 lines (232 loc) · 10.4 KB
/
zed_oc_sync_example.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2021, STEREOLABS.
//
// All rights reserved.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////
// ----> Includes
#include "videocapture.hpp"
#include "sensorcapture.hpp"
#include <iostream>
#include <sstream>
#include <iomanip>
#include <thread>
#include <mutex>
#include <opencv2/opencv.hpp>
// <---- Includes
// ----> Functions
// Sensor acquisition runs at 400Hz, so it must be executed in a different thread
void getSensorThreadFunc(sl_oc::sensors::SensorCapture* sensCap);
// <---- Functions
// ----> Global variables
std::mutex imuMutex;
std::string imuTsStr;
std::string imuAccelStr;
std::string imuGyroStr;
bool sensThreadStop=false;
uint64_t mcu_sync_ts=0;
// <---- Global variables
// The main function
int main(int argc, char *argv[])
{
// ----> Silence unused warning
(void)argc;
(void)argv;
// <---- Silence unused warning
//sl_oc::sensors::SensorCapture::resetSensorModule();
//sl_oc::sensors::SensorCapture::resetVideoModule();
// Set the verbose level
sl_oc::VERBOSITY verbose = sl_oc::VERBOSITY::ERROR;
// ----> Set the video parameters
sl_oc::video::VideoParams params;
params.res = sl_oc::video::RESOLUTION::HD720;
params.fps = sl_oc::video::FPS::FPS_30;
params.verbose = verbose;
// <---- Video parameters
// ----> Create a Video Capture object
sl_oc::video::VideoCapture videoCap(params);
if( !videoCap.initializeVideo(-1) )
{
std::cerr << "Cannot open camera video capture" << std::endl;
std::cerr << "Try to enable verbose to get more info" << std::endl;
return EXIT_FAILURE;
}
// Serial number of the connected camera
int camSn = videoCap.getSerialNumber();
std::cout << "Video Capture connected to camera sn: " << camSn << std::endl;
// <---- Create a Video Capture object
// ----> Create a Sensors Capture object
sl_oc::sensors::SensorCapture sensCap(verbose);
if( !sensCap.initializeSensors(camSn) ) // Note: we use the serial number acquired by the VideoCapture object
{
std::cerr << "Cannot open sensors capture" << std::endl;
std::cerr << "Try to enable verbose to get more info" << std::endl;
return EXIT_FAILURE;
}
std::cout << "Sensors Capture connected to camera sn: " << sensCap.getSerialNumber() << std::endl;
// Start the sensor capture thread. Note: since sensor data can be retrieved at 400Hz and video data frequency is
// minor (max 100Hz), we use a separated thread for sensors.
std::thread sensThread(getSensorThreadFunc,&sensCap);
// <---- Create Sensors Capture
// ----> Enable video/sensors synchronization
videoCap.enableSensorSync(&sensCap);
// <---- Enable video/sensors synchronization
// ----> Init OpenCV RGB frame
int w,h;
videoCap.getFrameSize(w,h);
cv::Size display_resolution(1024, 576);
switch(params.res)
{
default:
case sl_oc::video::RESOLUTION::VGA:
display_resolution.width = w;
display_resolution.height = h;
break;
case sl_oc::video::RESOLUTION::HD720:
display_resolution.width = w*0.6;
display_resolution.height = h*0.6;
break;
case sl_oc::video::RESOLUTION::HD1080:
case sl_oc::video::RESOLUTION::HD2K:
display_resolution.width = w*0.4;
display_resolution.height = h*0.4;
break;
}
int h_data = 70;
cv::Mat frameDisplay(display_resolution.height + h_data, display_resolution.width,CV_8UC3, cv::Scalar(0,0,0));
cv::Mat frameData = frameDisplay(cv::Rect(0,0, display_resolution.width, h_data));
cv::Mat frameBGRDisplay = frameDisplay(cv::Rect(0,h_data, display_resolution.width, display_resolution.height));
cv::Mat frameBGR(h, w, CV_8UC3, cv::Scalar(0,0,0));
// <---- Init OpenCV RGB frame
uint64_t last_timestamp = 0;
float frame_fps=0;
// Infinite grabbing loop
while (1)
{
// ----> Get Video frame
// Get last available frame
const sl_oc::video::Frame frame = videoCap.getLastFrame(1);
// If the frame is valid we can update it
std::stringstream videoTs;
if(frame.data!=nullptr && frame.timestamp!=last_timestamp)
{
frame_fps = 1e9/static_cast<float>(frame.timestamp-last_timestamp);
last_timestamp = frame.timestamp;
// ----> Conversion from YUV 4:2:2 to BGR for visualization
cv::Mat frameYUV( frame.height, frame.width, CV_8UC2, frame.data);
cv::cvtColor(frameYUV,frameBGR, cv::COLOR_YUV2BGR_YUYV);
// <---- Conversion from YUV 4:2:2 to BGR for visualization
}
// <---- Get Video frame
// ----> Video Debug information
videoTs << std::fixed << std::setprecision(9) << "Video timestamp: " << static_cast<double>(last_timestamp)/1e9<< " sec" ;
if( last_timestamp!=0 )
videoTs << std::fixed << std::setprecision(1) << " [" << frame_fps << " Hz]";
// <---- Video Debug information
// ----> Display frame with info
if(frame.data!=nullptr)
{
frameData.setTo(0);
// Video info
cv::putText( frameData, videoTs.str(), cv::Point(10,20),cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(241,240,236));
// IMU info
imuMutex.lock();
cv::putText( frameData, imuTsStr, cv::Point(10, 35),cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(241,240,236));
// Timestamp offset info
std::stringstream offsetStr;
double offset = (static_cast<double>(frame.timestamp)-static_cast<double>(mcu_sync_ts))/1e9;
offsetStr << std::fixed << std::setprecision(9) << std::showpos << "Timestamp offset: " << offset << " sec [video-sensors]";
cv::putText( frameData, offsetStr.str().c_str(), cv::Point(10, 50),cv::FONT_HERSHEY_SIMPLEX, 0.35, cv::Scalar(241,240,236));
// Average timestamp offset info (we wait at least 200 frames to be sure that offset is stable)
if( frame.frame_id>200 )
{
static double sum=0;
static int count=0;
sum += offset;
double avg_offset=sum/(++count);
std::stringstream avgOffsetStr;
avgOffsetStr << std::fixed << std::setprecision(9) << std::showpos << "Avg timestamp offset: " << avg_offset << " sec";
cv::putText( frameData, avgOffsetStr.str().c_str(), cv::Point(10,62),cv::FONT_HERSHEY_SIMPLEX,0.35, cv::Scalar(241, 240,236));
}
// IMU values
cv::putText( frameData, "Inertial sensor data:", cv::Point(display_resolution.width/2,20),cv::FONT_HERSHEY_SIMPLEX, 0.6, cv::Scalar(241, 240,236));
cv::putText( frameData, imuAccelStr, cv::Point(display_resolution.width/2+15,42),cv::FONT_HERSHEY_SIMPLEX, 0.6, cv::Scalar(241, 240,236));
cv::putText( frameData, imuGyroStr, cv::Point(display_resolution.width/2+15, 62),cv::FONT_HERSHEY_SIMPLEX, 0.6, cv::Scalar(241, 240,236));
imuMutex.unlock();
// Resize Image for display
cv::resize(frameBGR, frameBGRDisplay, display_resolution);
// Display image
cv::imshow( "Stream RGB", frameDisplay);
}
// <---- Display frame with info
// ----> Keyboard handling
int key = cv::waitKey(1);
if( key != -1 )
{
// Quit
if(key=='q' || key=='Q'|| key==27)
{
sensThreadStop=true;
sensThread.join();
break;
}
}
// <---- Keyboard handling
}
return EXIT_SUCCESS;
}
// Sensor acquisition runs at 400Hz, so it must be executed in a different thread
void getSensorThreadFunc(sl_oc::sensors::SensorCapture* sensCap)
{
// Flag to stop the thread
sensThreadStop = false;
// Previous IMU timestamp to calculate frequency
uint64_t last_imu_ts = 0;
// Infinite data grabbing loop
while(!sensThreadStop)
{
// ----> Get IMU data
const sl_oc::sensors::data::Imu imuData = sensCap->getLastIMUData(2000);
// Process data only if valid
if(imuData.valid == sl_oc::sensors::data::Imu::NEW_VAL ) // Uncomment to use only data syncronized with the video frames
{
// ----> Data info to be displayed
std::stringstream timestamp;
std::stringstream accel;
std::stringstream gyro;
timestamp << std::fixed << std::setprecision(9) << "IMU timestamp: " << static_cast<double>(imuData.timestamp)/1e9<< " sec" ;
if(last_imu_ts!=0)
timestamp << std::fixed << std::setprecision(1) << " [" << 1e9/static_cast<float>(imuData.timestamp-last_imu_ts) << " Hz]";
last_imu_ts = imuData.timestamp;
accel << std::fixed << std::showpos << std::setprecision(4) << " * Accel: " << imuData.aX << " " << imuData.aY << " " << imuData.aZ << " [m/s^2]";
gyro << std::fixed << std::showpos << std::setprecision(4) << " * Gyro: " << imuData.gX << " " << imuData.gY << " " << imuData.gZ << " [deg/s]";
// <---- Data info to be displayed
// Mutex to not overwrite data while diplaying them
imuMutex.lock();
imuTsStr = timestamp.str();
imuAccelStr = accel.str();
imuGyroStr = gyro.str();
// ----> Timestamp of the synchronized data
if(imuData.sync)
{
mcu_sync_ts = imuData.timestamp;
}
// <---- Timestamp of the synchronized data
imuMutex.unlock();
}
// <---- Get IMU data
}
}