CameraDepthAI.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2021, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
29 #include <rtabmap/core/util2d.h>
30 #include <rtabmap/utilite/UTimer.h>
34 #include <rtabmap/utilite/UFile.h>
35 
36 
37 namespace rtabmap {
38 
40 {
41 #ifdef RTABMAP_DEPTHAI
42  return true;
43 #else
44  return false;
45 #endif
46 }
47 
49  const std::string & mxidOrName,
50  int resolution,
51  float imageRate,
52  const Transform & localTransform) :
53  Camera(imageRate, localTransform)
54 #ifdef RTABMAP_DEPTHAI
55  ,
56  mxidOrName_(mxidOrName),
57  outputMode_(0),
58  confThreshold_(200),
59  lrcThreshold_(5),
60  resolution_(resolution),
61  extendedDisparity_(false),
62  subpixelFractionalBits_(0),
63  compandingWidth_(0),
64  useSpecTranslation_(false),
65  alphaScaling_(0.0),
66  imuPublished_(true),
67  publishInterIMU_(false),
68  dotIntensity_(0.0),
69  floodIntensity_(0.0),
70  detectFeatures_(0),
71  useHarrisDetector_(false),
72  minDistance_(7.0),
73  numTargetFeatures_(1000),
74  threshold_(0.01),
75  nms_(true),
76  nmsRadius_(4)
77 #endif
78 {
79 #ifdef RTABMAP_DEPTHAI
80  UASSERT(resolution_>=(int)dai::MonoCameraProperties::SensorResolution::THE_720_P && resolution_<=(int)dai::MonoCameraProperties::SensorResolution::THE_1200_P);
81 #endif
82 }
83 
85 {
86 #ifdef RTABMAP_DEPTHAI
87  if(device_.get())
88  {
89  device_->close();
90  }
91 #endif
92 }
93 
94 void CameraDepthAI::setOutputMode(int outputMode)
95 {
96 #ifdef RTABMAP_DEPTHAI
97  outputMode_ = outputMode;
98 #else
99  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
100 #endif
101 }
102 
103 void CameraDepthAI::setDepthProfile(int confThreshold, int lrcThreshold)
104 {
105 #ifdef RTABMAP_DEPTHAI
106  confThreshold_ = confThreshold;
107  lrcThreshold_ = lrcThreshold;
108 #else
109  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
110 #endif
111 }
112 
113 void CameraDepthAI::setExtendedDisparity(bool extendedDisparity)
114 {
115 #ifdef RTABMAP_DEPTHAI
116  extendedDisparity_ = extendedDisparity;
117  if(extendedDisparity_)
118  {
119  if(subpixelFractionalBits_>0)
120  {
121  UWARN("Extended disparity has been enabled while subpixel being also enabled, disabling subpixel...");
122  subpixelFractionalBits_ = 0;
123  }
124  if(compandingWidth_>0)
125  {
126  UWARN("Extended disparity has been enabled while companding being also enabled, disabling companding...");
127  compandingWidth_ = 0;
128  }
129  }
130 #else
131  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
132 #endif
133 }
134 
135 void CameraDepthAI::setSubpixelMode(bool enabled, int fractionalBits)
136 {
137 #ifdef RTABMAP_DEPTHAI
138  UASSERT(fractionalBits>=3 && fractionalBits<=5);
139  subpixelFractionalBits_ = enabled?fractionalBits:0;
140  if(subpixelFractionalBits_ != 0 && extendedDisparity_)
141  {
142  UWARN("Subpixel has been enabled while extended disparity being also enabled, disabling extended disparity...");
143  extendedDisparity_ = false;
144  }
145 #else
146  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
147 #endif
148 }
149 
150 void CameraDepthAI::setCompanding(bool enabled, int width)
151 {
152 #ifdef RTABMAP_DEPTHAI
153  UASSERT(width == 64 || width == 96);
154  compandingWidth_ = enabled?width:0;
155  if(compandingWidth_ != 0 && extendedDisparity_)
156  {
157  UWARN("Companding has been enabled while extended disparity being also enabled, disabling extended disparity...");
158  extendedDisparity_ = false;
159  }
160 #else
161  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
162 #endif
163 }
164 
165 void CameraDepthAI::setRectification(bool useSpecTranslation, float alphaScaling, bool enabled)
166 {
167 #ifdef RTABMAP_DEPTHAI
168  useSpecTranslation_ = useSpecTranslation;
169  alphaScaling_ = alphaScaling;
170  imagesRectified_ = enabled;
171 #else
172  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
173 #endif
174 }
175 
176 void CameraDepthAI::setIMU(bool imuPublished, bool publishInterIMU)
177 {
178 #ifdef RTABMAP_DEPTHAI
179  imuPublished_ = imuPublished;
180  publishInterIMU_ = publishInterIMU;
181 #else
182  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
183 #endif
184 }
185 
186 void CameraDepthAI::setIrIntensity(float dotIntensity, float floodIntensity)
187 {
188 #ifdef RTABMAP_DEPTHAI
189  dotIntensity_ = dotIntensity;
190  floodIntensity_ = floodIntensity;
191 #else
192  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
193 #endif
194 }
195 
196 void CameraDepthAI::setDetectFeatures(int detectFeatures)
197 {
198 #ifdef RTABMAP_DEPTHAI
199  detectFeatures_ = detectFeatures;
200 #else
201  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
202 #endif
203 }
204 
205 void CameraDepthAI::setBlobPath(const std::string & blobPath)
206 {
207 #ifdef RTABMAP_DEPTHAI
208  blobPath_ = blobPath;
209 #else
210  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
211 #endif
212 }
213 
214 void CameraDepthAI::setGFTTDetector(bool useHarrisDetector, float minDistance, int numTargetFeatures)
215 {
216 #ifdef RTABMAP_DEPTHAI
217  useHarrisDetector_ = useHarrisDetector;
218  minDistance_ = minDistance;
219  numTargetFeatures_ = numTargetFeatures;
220 #else
221  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
222 #endif
223 }
224 
225 void CameraDepthAI::setSuperPointDetector(float threshold, bool nms, int nmsRadius)
226 {
227 #ifdef RTABMAP_DEPTHAI
228  threshold_ = threshold;
229  nms_ = nms;
230  nmsRadius_ = nmsRadius;
231 #else
232  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
233 #endif
234 }
235 
236 bool CameraDepthAI::init(const std::string & calibrationFolder, const std::string & cameraName)
237 {
238  UDEBUG("");
239 #ifdef RTABMAP_DEPTHAI
240 
241  std::vector<dai::DeviceInfo> devices = dai::Device::getAllAvailableDevices();
242  if(devices.empty() && mxidOrName_.empty())
243  {
244  UERROR("No DepthAI device found or specified");
245  return false;
246  }
247 
248  if(device_.get())
249  device_->close();
250 
251  accBuffer_.clear();
252  gyroBuffer_.clear();
253 
254  bool deviceFound = false;
255  dai::DeviceInfo deviceToUse(mxidOrName_);
256  if(mxidOrName_.empty())
257  std::tie(deviceFound, deviceToUse) = dai::Device::getFirstAvailableDevice();
258  else if(!deviceToUse.mxid.empty())
259  std::tie(deviceFound, deviceToUse) = dai::Device::getDeviceByMxId(deviceToUse.mxid);
260  else
261  deviceFound = true;
262 
263  if(!deviceFound)
264  {
265  UERROR("Could not find DepthAI device with MXID or IP/USB name \"%s\", found devices:", mxidOrName_.c_str());
266  for(auto& device : devices)
267  UERROR("%s", device.toString().c_str());
268  return false;
269  }
270 
271  // look for calibration files
272  stereoModel_ = StereoCameraModel();
273  targetSize_ = cv::Size(resolution_<2?1280:resolution_==4?1920:640, resolution_==0?720:resolution_==1?800:resolution_==2?400:resolution_==3?480:1200);
274 
275  dai::Pipeline p;
276  auto monoLeft = p.create<dai::node::MonoCamera>();
277  auto monoRight = p.create<dai::node::MonoCamera>();
278  std::shared_ptr<dai::node::StereoDepth> stereo;
279  if(imagesRectified_)
280  stereo = p.create<dai::node::StereoDepth>();
281  std::shared_ptr<dai::node::Camera> colorCam;
282  if(outputMode_==2)
283  {
284  colorCam = p.create<dai::node::Camera>();
285  if(!imagesRectified_)
286  colorCam->setMeshSource(dai::CameraProperties::WarpMeshSource::NONE);
287  if(detectFeatures_)
288  {
289  UWARN("On-device feature detectors cannot be enabled on color camera input!");
290  detectFeatures_ = 0;
291  }
292  }
293  std::shared_ptr<dai::node::IMU> imu;
294  if(imuPublished_)
295  imu = p.create<dai::node::IMU>();
296  std::shared_ptr<dai::node::FeatureTracker> gfttDetector;
297  std::shared_ptr<dai::node::ImageManip> manip;
298  std::shared_ptr<dai::node::NeuralNetwork> neuralNetwork;
299  if(detectFeatures_ == 1)
300  {
301  gfttDetector = p.create<dai::node::FeatureTracker>();
302  }
303  else if(detectFeatures_ >= 2)
304  {
305  if(!blobPath_.empty())
306  {
307  manip = p.create<dai::node::ImageManip>();
308  neuralNetwork = p.create<dai::node::NeuralNetwork>();
309  }
310  else
311  {
312  UWARN("Missing MyriadX blob file!");
313  detectFeatures_ = 0;
314  }
315  }
316 
317  auto sync = p.create<dai::node::Sync>();
318  auto xoutCamera = p.create<dai::node::XLinkOut>();
319  std::shared_ptr<dai::node::XLinkOut> xoutIMU;
320  if(imuPublished_)
321  xoutIMU = p.create<dai::node::XLinkOut>();
322 
323  // XLinkOut
324  xoutCamera->setStreamName("camera");
325  if(imuPublished_)
326  xoutIMU->setStreamName("imu");
327 
328  monoLeft->setResolution((dai::MonoCameraProperties::SensorResolution)resolution_);
329  monoRight->setResolution((dai::MonoCameraProperties::SensorResolution)resolution_);
330  monoLeft->setCamera("left");
331  monoRight->setCamera("right");
332  if(detectFeatures_ >= 2)
333  {
334  if(this->getImageRate() <= 0 || this->getImageRate() > 15)
335  {
336  UWARN("On-device SuperPoint or HF-Net enabled, image rate is limited to 15 FPS!");
337  monoLeft->setFps(15);
338  monoRight->setFps(15);
339  }
340  }
341  else if(this->getImageRate() > 0)
342  {
343  monoLeft->setFps(this->getImageRate());
344  monoRight->setFps(this->getImageRate());
345  }
346 
347  // StereoDepth
348  if(stereo.get())
349  {
350  if(outputMode_ == 2)
351  stereo->setDepthAlign(dai::CameraBoardSocket::CAM_A);
352  else
353  stereo->setDepthAlign(dai::StereoDepthProperties::DepthAlign::RECTIFIED_LEFT);
354  stereo->setExtendedDisparity(extendedDisparity_);
355  stereo->setRectifyEdgeFillColor(0); // black, to better see the cutout
356  stereo->enableDistortionCorrection(true);
357  stereo->setDisparityToDepthUseSpecTranslation(useSpecTranslation_);
358  stereo->setDepthAlignmentUseSpecTranslation(useSpecTranslation_);
359  if(alphaScaling_ > -1.0f)
360  stereo->setAlphaScaling(alphaScaling_);
361  stereo->initialConfig.setConfidenceThreshold(confThreshold_);
362  stereo->initialConfig.setLeftRightCheck(lrcThreshold_>=0);
363  if(lrcThreshold_>=0)
364  stereo->initialConfig.setLeftRightCheckThreshold(lrcThreshold_);
365  stereo->initialConfig.setMedianFilter(dai::MedianFilter::KERNEL_7x7);
366  auto config = stereo->initialConfig.get();
367  config.censusTransform.kernelSize = dai::StereoDepthConfig::CensusTransform::KernelSize::KERNEL_7x9;
368  config.censusTransform.kernelMask = 0X2AA00AA805540155;
369  config.postProcessing.brightnessFilter.maxBrightness = 255;
370  stereo->initialConfig.set(config);
371 
372  // Link plugins CAM -> STEREO -> XLINK
373  monoLeft->out.link(stereo->left);
374  monoRight->out.link(stereo->right);
375  }
376 
377  if(outputMode_ == 2)
378  {
379  colorCam->setBoardSocket(dai::CameraBoardSocket::CAM_A);
380  colorCam->setSize(targetSize_.width, targetSize_.height);
381  if(this->getImageRate() > 0)
382  colorCam->setFps(this->getImageRate());
383  if(alphaScaling_ > -1.0f)
384  colorCam->setCalibrationAlpha(alphaScaling_);
385  }
386  this->setImageRate(0);
387 
388  // Using VideoEncoder on PoE devices, Subpixel is not supported
389  if(deviceToUse.protocol == X_LINK_TCP_IP || mxidOrName_.find(".") != std::string::npos)
390  {
391  auto leftOrColorEnc = p.create<dai::node::VideoEncoder>();
392  auto depthOrRightEnc = p.create<dai::node::VideoEncoder>();
393  leftOrColorEnc->setDefaultProfilePreset(monoLeft->getFps(), dai::VideoEncoderProperties::Profile::MJPEG);
394  depthOrRightEnc->setDefaultProfilePreset(monoRight->getFps(), dai::VideoEncoderProperties::Profile::MJPEG);
395  if(outputMode_ < 2)
396  {
397  if(imagesRectified_) {
398  stereo->rectifiedLeft.link(leftOrColorEnc->input);
399  }
400  else {
401  monoLeft->out.link(leftOrColorEnc->input);
402  }
403  leftOrColorEnc->bitstream.link(sync->inputs["left"]);
404  }
405  else
406  {
407  colorCam->video.link(leftOrColorEnc->input);
408  leftOrColorEnc->bitstream.link(sync->inputs["color"]);
409  }
410  if(imagesRectified_ && outputMode_)
411  {
412  depthOrRightEnc->setQuality(100);
413  stereo->disparity.link(depthOrRightEnc->input);
414  depthOrRightEnc->bitstream.link(sync->inputs["depth"]);
415  }
416  else
417  {
418  if(imagesRectified_) {
419  stereo->rectifiedRight.link(depthOrRightEnc->input);
420  }
421  else {
422  monoRight->out.link(depthOrRightEnc->input);
423  }
424  depthOrRightEnc->bitstream.link(sync->inputs["right"]);
425  }
426  }
427  else
428  {
429  if(stereo.get()) {
430  stereo->setSubpixel(subpixelFractionalBits_>=3 && subpixelFractionalBits_<=5);
431  if(subpixelFractionalBits_>=3 && subpixelFractionalBits_<=5)
432  stereo->setSubpixelFractionalBits(subpixelFractionalBits_);
433  auto config = stereo->initialConfig.get();
434  config.costMatching.enableCompanding = compandingWidth_>0;
435  if(compandingWidth_>0)
436  config.costMatching.disparityWidth = compandingWidth_==64?dai::StereoDepthConfig::CostMatching::DisparityWidth::DISPARITY_64:dai::StereoDepthConfig::CostMatching::DisparityWidth::DISPARITY_96;
437  stereo->initialConfig.set(config);
438  }
439  if(outputMode_ < 2)
440  {
441  if(imagesRectified_)
442  stereo->rectifiedLeft.link(sync->inputs["left"]);
443  else
444  monoLeft->out.link(sync->inputs["left"]);
445  }
446  else
447  {
448  monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
449  monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
450  colorCam->video.link(sync->inputs["color"]);
451  }
452  if(imagesRectified_) {
453  if(outputMode_)
454  stereo->depth.link(sync->inputs["depth"]);
455  else
456  stereo->rectifiedRight.link(sync->inputs["right"]);
457  }
458  else {
459  monoRight->out.link(sync->inputs["right"]);
460  }
461  }
462 
463  sync->setSyncThreshold(std::chrono::milliseconds(int(500 / monoLeft->getFps())));
464  sync->out.link(xoutCamera->input);
465 
466  if(imuPublished_)
467  {
468  // enable ACCELEROMETER_RAW and GYROSCOPE_RAW at 200 hz rate
469  imu->enableIMUSensor({dai::IMUSensor::ACCELEROMETER_RAW, dai::IMUSensor::GYROSCOPE_RAW}, 200);
470  // above this threshold packets will be sent in batch of X, if the host is not blocked and USB bandwidth is available
471  imu->setBatchReportThreshold(1);
472  // maximum number of IMU packets in a batch, if it's reached device will block sending until host can receive it
473  // if lower or equal to batchReportThreshold then the sending is always blocking on device
474  // useful to reduce device's CPU load and number of lost packets, if CPU load is high on device side due to multiple nodes
475  imu->setMaxBatchReports(10);
476 
477  // Link plugins IMU -> XLINK
478  imu->out.link(xoutIMU->input);
479  }
480 
481  if(detectFeatures_ == 1)
482  {
483  gfttDetector->setHardwareResources(1, 2);
484  gfttDetector->initialConfig.setCornerDetector(
485  useHarrisDetector_?dai::FeatureTrackerConfig::CornerDetector::Type::HARRIS:dai::FeatureTrackerConfig::CornerDetector::Type::SHI_THOMASI);
486  gfttDetector->initialConfig.setNumTargetFeatures(numTargetFeatures_);
487  gfttDetector->initialConfig.setMotionEstimator(false);
488  auto cfg = gfttDetector->initialConfig.get();
489  cfg.featureMaintainer.minimumDistanceBetweenFeatures = minDistance_ * minDistance_;
490  gfttDetector->initialConfig.set(cfg);
491  if(stereo.get())
492  stereo->rectifiedLeft.link(gfttDetector->inputImage);
493  else
494  monoLeft->out.link(gfttDetector->inputImage);
495  gfttDetector->outputFeatures.link(sync->inputs["feat"]);
496  }
497  else if(detectFeatures_ >= 2)
498  {
499  manip->setKeepAspectRatio(false);
500  manip->setMaxOutputFrameSize(320 * 200);
501  manip->initialConfig.setResize(320, 200);
502  neuralNetwork->setBlobPath(blobPath_);
503  neuralNetwork->setNumInferenceThreads(2);
504  neuralNetwork->setNumNCEPerInferenceThread(1);
505  neuralNetwork->input.setBlocking(false);
506  if(stereo.get())
507  stereo->rectifiedLeft.link(manip->inputImage);
508  else
509  monoLeft->out.link(manip->inputImage);
510  manip->out.link(neuralNetwork->input);
511  neuralNetwork->out.link(sync->inputs["feat"]);
512  }
513 
514  device_.reset(new dai::Device(p, deviceToUse));
515 
516  UINFO("Device serial: %s", device_->getMxId().c_str());
517  UINFO("Available camera sensors: ");
518  for(auto& sensor : device_->getCameraSensorNames()) {
519  UINFO("Socket: CAM_%c - %s", 'A'+(unsigned char)sensor.first, sensor.second.c_str());
520  }
521 
522  UINFO("Loading eeprom calibration data");
523  dai::CalibrationHandler calibHandler = device_->readCalibration();
524 
525  if(!calibrationFolder.empty() && !cameraName.empty() && imagesRectified_)
526  {
527  UINFO("Flashing camera...");
528  if(outputMode_ == 2)
529  {
530  stereoModel_.setName(cameraName, "rgb", "depth");
531  }
532  if(stereoModel_.load(calibrationFolder, cameraName, false))
533  {
534  std::vector<std::vector<float> > intrinsicsLeft(3);
535  std::vector<std::vector<float> > intrinsicsRight(3);
536  for(int row = 0; row<3; ++row)
537  {
538  intrinsicsLeft[row].resize(3);
539  intrinsicsRight[row].resize(3);
540  for(int col = 0; col<3; ++col)
541  {
542  intrinsicsLeft[row][col] = stereoModel_.left().K_raw().at<double>(row,col);
543  intrinsicsRight[row][col] = stereoModel_.right().K_raw().at<double>(row,col);
544  }
545  }
546 
547  std::vector<float> distortionsLeft = stereoModel_.left().D_raw();
548  std::vector<float> distortionsRight = stereoModel_.right().D_raw();
549  std::vector<std::vector<float> > rotationMatrix(3);
550  for(int row = 0; row<3; ++row)
551  {
552  rotationMatrix[row].resize(3);
553  for(int col = 0; col<3; ++col)
554  {
555  rotationMatrix[row][col] = stereoModel_.stereoTransform()(row,col);
556  }
557  }
558 
559  std::vector<float> translation(3);
560  translation[0] = stereoModel_.stereoTransform().x()*100.0f;
561  translation[1] = stereoModel_.stereoTransform().y()*100.0f;
562  translation[2] = stereoModel_.stereoTransform().z()*100.0f;
563 
564  if(outputMode_ == 2)
565  {
566  // Only set RGB intrinsics
567  calibHandler.setCameraIntrinsics(dai::CameraBoardSocket::CAM_A, intrinsicsLeft, stereoModel_.left().imageWidth(), stereoModel_.left().imageHeight());
568  calibHandler.setDistortionCoefficients(dai::CameraBoardSocket::CAM_A, distortionsLeft);
569  std::vector<float> specTranslation = calibHandler.getCameraTranslationVector(dai::CameraBoardSocket::CAM_A, dai::CameraBoardSocket::CAM_C, true);
570  calibHandler.setCameraExtrinsics(dai::CameraBoardSocket::CAM_A, dai::CameraBoardSocket::CAM_C, rotationMatrix, translation, specTranslation);
571  }
572  else
573  {
574  calibHandler.setCameraIntrinsics(dai::CameraBoardSocket::CAM_B, intrinsicsLeft, stereoModel_.left().imageWidth(), stereoModel_.left().imageHeight());
575  calibHandler.setDistortionCoefficients(dai::CameraBoardSocket::CAM_B, distortionsLeft);
576  calibHandler.setCameraIntrinsics(dai::CameraBoardSocket::CAM_C, intrinsicsRight, stereoModel_.right().imageWidth(), stereoModel_.right().imageHeight());
577  calibHandler.setDistortionCoefficients(dai::CameraBoardSocket::CAM_C, distortionsRight);
578  std::vector<float> specTranslation = calibHandler.getCameraTranslationVector(dai::CameraBoardSocket::CAM_B, dai::CameraBoardSocket::CAM_C, true);
579  calibHandler.setCameraExtrinsics(dai::CameraBoardSocket::CAM_B, dai::CameraBoardSocket::CAM_C, rotationMatrix, translation, specTranslation);
580  }
581 
582  try {
583  UINFO("Flashing camera with calibration from %s with camera name %s", calibrationFolder.c_str(), cameraName.c_str());
585  {
586  std::cout << "K left: " << stereoModel_.left().K_raw() << std::endl;
587  std::cout << "K right: " << stereoModel_.right().K_raw() << std::endl;
588  std::cout << "D left: " << stereoModel_.left().D_raw() << std::endl;
589  std::cout << "D right: " << stereoModel_.right().D_raw() << std::endl;
590  std::cout << "Extrinsics: " << stereoModel_.stereoTransform() << std::endl;
591  std::cout << "Expected K with rectification_alpha=0: " << stereoModel_.left().K()*(double(targetSize_.width)/double(stereoModel_.left().imageWidth())) << std::endl;
592  }
593  device_->flashCalibration2(calibHandler);
594  UINFO("Closing device...");
595  device_->close();
596  UINFO("Restarting pipeline...");
597  device_.reset(new dai::Device(p, deviceToUse));
598  }
599  catch(const std::runtime_error & e) {
600  UERROR("Failed flashing calibration: %s", e.what());
601  }
602  }
603  else
604  {
605  UERROR("Failed loading calibration from %s with camera name %s", calibrationFolder.c_str(), cameraName.c_str());
606  }
607 
608  //Reload calibration
609  calibHandler = device_->readCalibration();
610  }
611 
612  auto eeprom = calibHandler.getEepromData();
613  UINFO("Product name: %s, board name: %s", eeprom.productName.c_str(), eeprom.boardName.c_str());
614 
615  auto cameraId = outputMode_<2?dai::CameraBoardSocket::CAM_B:dai::CameraBoardSocket::CAM_A;
616  cv::Mat cameraMatrix, distCoeffs, newCameraMatrix;
617 
618  std::vector<std::vector<float> > matrix = calibHandler.getCameraIntrinsics(cameraId, targetSize_.width, targetSize_.height);
619  cameraMatrix = (cv::Mat_<double>(3,3) <<
620  matrix[0][0], matrix[0][1], matrix[0][2],
621  matrix[1][0], matrix[1][1], matrix[1][2],
622  matrix[2][0], matrix[2][1], matrix[2][2]);
623 
624  std::vector<float> coeffs = calibHandler.getDistortionCoefficients(cameraId);
625  if(calibHandler.getDistortionModel(cameraId) == dai::CameraModel::Perspective)
626  distCoeffs = (cv::Mat_<double>(1,8) << coeffs[0], coeffs[1], coeffs[2], coeffs[3], coeffs[4], coeffs[5], coeffs[6], coeffs[7]);
627 
628  if(alphaScaling_>-1.0f)
629  newCameraMatrix = cv::getOptimalNewCameraMatrix(cameraMatrix, distCoeffs, targetSize_, alphaScaling_);
630  else
631  newCameraMatrix = cameraMatrix;
632 
633  double fx = newCameraMatrix.at<double>(0, 0);
634  double fy = newCameraMatrix.at<double>(1, 1);
635  double cx = newCameraMatrix.at<double>(0, 2);
636  double cy = newCameraMatrix.at<double>(1, 2);
637  UINFO("fx=%f fy=%f cx=%f cy=%f (target size = %dx%d)", fx, fy, cx, cy, targetSize_.width, targetSize_.height);
638  if(outputMode_ == 2) {
639  stereoModel_ = StereoCameraModel(device_->getDeviceName(), fx, fy, cx, cy, 0, this->getLocalTransform(), targetSize_);
640  }
641  else {
642  double baseline = calibHandler.getBaselineDistance(dai::CameraBoardSocket::CAM_C, dai::CameraBoardSocket::CAM_B, false)/100.0;
643  UINFO("baseline=%f", baseline);
644  stereoModel_ = StereoCameraModel(device_->getDeviceName(), fx, fy, cx, cy, outputMode_==0?baseline:0, this->getLocalTransform()*Transform(-calibHandler.getBaselineDistance(dai::CameraBoardSocket::CAM_A)/100.0, 0, 0), targetSize_);
645  }
646 
647  if(imuPublished_)
648  {
649  // Cannot test the following, I get "IMU calibration data is not available on device yet." with my camera
650  // Update: now (as March 6, 2022) it crashes in "dai::CalibrationHandler::getImuToCameraExtrinsics(dai::CameraBoardSocket, bool)"
651  //matrix = calibHandler.getImuToCameraExtrinsics(dai::CameraBoardSocket::CAM_B);
652  //imuLocalTransform_ = Transform(
653  // matrix[0][0], matrix[0][1], matrix[0][2], matrix[0][3],
654  // matrix[1][0], matrix[1][1], matrix[1][2], matrix[1][3],
655  // matrix[2][0], matrix[2][1], matrix[2][2], matrix[2][3]);
656  if(eeprom.boardName == "OAK-D" ||
657  eeprom.boardName == "BW1098OBC")
658  {
659  imuLocalTransform_ = Transform(
660  0, -1, 0, 0.0525,
661  1, 0, 0, 0.013662,
662  0, 0, 1, 0);
663  }
664  else if(eeprom.boardName == "DM9098")
665  {
666  imuLocalTransform_ = Transform(
667  0, 1, 0, 0.037945,
668  1, 0, 0, 0.00079,
669  0, 0, -1, 0);
670  }
671  else if(eeprom.boardName == "NG2094")
672  {
673  imuLocalTransform_ = Transform(
674  0, 1, 0, 0.0374,
675  1, 0, 0, 0.00176,
676  0, 0, -1, 0);
677  }
678  else if(eeprom.boardName == "NG9097")
679  {
680  imuLocalTransform_ = Transform(
681  0, 1, 0, 0.04,
682  1, 0, 0, 0.020265,
683  0, 0, -1, 0);
684  }
685  else
686  {
687  UWARN("Unknown boardName (%s)! Disabling IMU!", eeprom.boardName.c_str());
688  imuPublished_ = false;
689  }
690  }
691  else
692  {
693  UINFO("IMU disabled");
694  }
695 
696  cameraQueue_ = device_->getOutputQueue("camera", 8, false);
697  if(imuPublished_)
698  {
699  imuLocalTransform_ = this->getLocalTransform() * imuLocalTransform_;
700  UINFO("IMU local transform = %s", imuLocalTransform_.prettyPrint().c_str());
701  device_->getOutputQueue("imu", 50, false)->addCallback([this](const std::shared_ptr<dai::ADatatype> data) {
702  auto imuData = std::dynamic_pointer_cast<dai::IMUData>(data);
703  auto imuPackets = imuData->packets;
704 
705  for(auto& imuPacket : imuPackets)
706  {
707  auto& acceleroValues = imuPacket.acceleroMeter;
708  auto& gyroValues = imuPacket.gyroscope;
709  double accStamp = std::chrono::duration<double>(acceleroValues.getTimestampDevice().time_since_epoch()).count();
710  double gyroStamp = std::chrono::duration<double>(gyroValues.getTimestampDevice().time_since_epoch()).count();
711 
712  if(publishInterIMU_)
713  {
714  IMU imu(cv::Vec3f(gyroValues.x, gyroValues.y, gyroValues.z), cv::Mat::eye(3,3,CV_64FC1),
715  cv::Vec3f(acceleroValues.x, acceleroValues.y, acceleroValues.z), cv::Mat::eye(3,3,CV_64FC1),
716  imuLocalTransform_);
717  UEventsManager::post(new IMUEvent(imu, (accStamp+gyroStamp)/2));
718  }
719  else
720  {
721  UScopeMutex lock(imuMutex_);
722  accBuffer_.emplace_hint(accBuffer_.end(), accStamp, cv::Vec3f(acceleroValues.x, acceleroValues.y, acceleroValues.z));
723  gyroBuffer_.emplace_hint(gyroBuffer_.end(), gyroStamp, cv::Vec3f(gyroValues.x, gyroValues.y, gyroValues.z));
724  }
725  }
726  });
727  }
728 
729  if(!device_->getIrDrivers().empty())
730  {
731  UINFO("Setting IR intensity");
732  device_->setIrLaserDotProjectorIntensity(dotIntensity_);
733  device_->setIrFloodLightIntensity(floodIntensity_);
734  }
735  else if(dotIntensity_ > 0 || floodIntensity_ > 0)
736  {
737  UWARN("No IR drivers were detected! IR intensity cannot be set.");
738  }
739 
740  uSleep(2000); // avoid bad frames on start
741 
742  return true;
743 #else
744  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
745 #endif
746  return false;
747 }
748 
750 {
751 #ifdef RTABMAP_DEPTHAI
752  return outputMode_ == 0?stereoModel_.isValidForProjection():stereoModel_.left().isValidForProjection();
753 #else
754  return false;
755 #endif
756 }
757 
758 std::string CameraDepthAI::getSerial() const
759 {
760 #ifdef RTABMAP_DEPTHAI
761  return device_->getMxId();
762 #endif
763  return "";
764 }
765 
767 {
769 #ifdef RTABMAP_DEPTHAI
770 
771  auto messageGroup = cameraQueue_->get<dai::MessageGroup>();
772  auto rectifLeftOrColor = messageGroup->get<dai::ImgFrame>(outputMode_<2?"left":"color");
773  auto rectifRightOrDepth = messageGroup->get<dai::ImgFrame>(imagesRectified_ && outputMode_?"depth":"right");
774 
775  cv::Mat leftOrColor, depthOrRight;
776  if(device_->getDeviceInfo().protocol == X_LINK_TCP_IP || mxidOrName_.find(".") != std::string::npos)
777  {
778  leftOrColor = cv::imdecode(rectifLeftOrColor->getData(), cv::IMREAD_ANYCOLOR);
779  depthOrRight = cv::imdecode(rectifRightOrDepth->getData(), cv::IMREAD_GRAYSCALE);
780  if(imagesRectified_ && outputMode_)
781  {
782  cv::Mat disp;
783  depthOrRight.convertTo(disp, CV_16UC1);
784  cv::divide(-stereoModel_.right().Tx() * 1000, disp, depthOrRight);
785  }
786  }
787  else
788  {
789  leftOrColor = rectifLeftOrColor->getCvFrame();
790  depthOrRight = rectifRightOrDepth->getCvFrame();
791  }
792 
793  double stamp = std::chrono::duration<double>(rectifLeftOrColor->getTimestampDevice(dai::CameraExposureOffset::MIDDLE).time_since_epoch()).count();
794  if(imagesRectified_ && outputMode_)
795  data = SensorData(leftOrColor, depthOrRight, stereoModel_.left(), this->getNextSeqID(), stamp);
796  else
797  data = SensorData(leftOrColor, depthOrRight, stereoModel_, this->getNextSeqID(), stamp);
798 
799  if(imuPublished_ && !publishInterIMU_)
800  {
801  cv::Vec3d acc, gyro;
802  std::map<double, cv::Vec3f>::const_iterator iterA, iterB;
803 
804  imuMutex_.lock();
805  while(accBuffer_.empty() || gyroBuffer_.empty() || accBuffer_.rbegin()->first < stamp || gyroBuffer_.rbegin()->first < stamp)
806  {
807  imuMutex_.unlock();
808  uSleep(1);
809  imuMutex_.lock();
810  }
811 
812  //acc
813  iterB = accBuffer_.lower_bound(stamp);
814  iterA = iterB;
815  if(iterA != accBuffer_.begin())
816  iterA = --iterA;
817  if(iterA == iterB || stamp == iterB->first)
818  {
819  acc = iterB->second;
820  }
821  else if(stamp > iterA->first && stamp < iterB->first)
822  {
823  float t = (stamp-iterA->first) / (iterB->first-iterA->first);
824  acc = iterA->second + t*(iterB->second - iterA->second);
825  }
826  accBuffer_.erase(accBuffer_.begin(), iterB);
827 
828  //gyro
829  iterB = gyroBuffer_.lower_bound(stamp);
830  iterA = iterB;
831  if(iterA != gyroBuffer_.begin())
832  iterA = --iterA;
833  if(iterA == iterB || stamp == iterB->first)
834  {
835  gyro = iterB->second;
836  }
837  else if(stamp > iterA->first && stamp < iterB->first)
838  {
839  float t = (stamp-iterA->first) / (iterB->first-iterA->first);
840  gyro = iterA->second + t*(iterB->second - iterA->second);
841  }
842  gyroBuffer_.erase(gyroBuffer_.begin(), iterB);
843 
844  imuMutex_.unlock();
845  data.setIMU(IMU(gyro, cv::Mat::eye(3, 3, CV_64FC1), acc, cv::Mat::eye(3, 3, CV_64FC1), imuLocalTransform_));
846  }
847 
848  if(detectFeatures_ == 1)
849  {
850  auto features = messageGroup->get<dai::TrackedFeatures>("feat")->trackedFeatures;
851  std::vector<cv::KeyPoint> keypoints;
852  for(auto& feature : features)
853  keypoints.emplace_back(cv::KeyPoint(feature.position.x, feature.position.y, 3));
854  data.setFeatures(keypoints, std::vector<cv::Point3f>(), cv::Mat());
855  }
856  else if(detectFeatures_ >= 2)
857  {
858  auto features = messageGroup->get<dai::NNData>("feat");
859  std::vector<float> scores_dense, local_descriptor_map, global_descriptor;
860  if(detectFeatures_ == 2)
861  {
862  scores_dense = features->getLayerFp16("heatmap");
863  local_descriptor_map = features->getLayerFp16("desc");
864  }
865  else if(detectFeatures_ == 3)
866  {
867  scores_dense = features->getLayerFp16("pred/local_head/detector/Squeeze");
868  local_descriptor_map = features->getLayerFp16("pred/local_head/descriptor/transpose");
869  global_descriptor = features->getLayerFp16("pred/global_head/l2_normalize_1");
870  }
871 
872  cv::Mat scores(200, 320, CV_32FC1, scores_dense.data());
873  cv::resize(scores, scores, targetSize_, 0, 0, cv::INTER_CUBIC);
874  if(nms_)
875  {
876  cv::Mat dilated_scores(targetSize_, CV_32FC1);
877  cv::dilate(scores, dilated_scores, cv::getStructuringElement(cv::MORPH_RECT, cv::Size(nmsRadius_*2+1, nmsRadius_*2+1)));
878  cv::Mat max_mask = scores == dilated_scores;
879  cv::dilate(scores, dilated_scores, cv::Mat());
880  cv::Mat max_mask_r1 = scores == dilated_scores;
881  cv::Mat supp_mask(targetSize_, CV_8UC1);
882  for(size_t i=0; i<2; i++)
883  {
884  cv::dilate(max_mask, supp_mask, cv::getStructuringElement(cv::MORPH_RECT, cv::Size(nmsRadius_*2+1, nmsRadius_*2+1)));
885  cv::Mat supp_scores = scores.clone();
886  supp_scores.setTo(0, supp_mask);
887  cv::dilate(supp_scores, dilated_scores, cv::getStructuringElement(cv::MORPH_RECT, cv::Size(nmsRadius_*2+1, nmsRadius_*2+1)));
888  cv::Mat new_max_mask = cv::Mat::zeros(targetSize_, CV_8UC1);
889  cv::bitwise_not(supp_mask, supp_mask);
890  cv::bitwise_and(supp_scores == dilated_scores, supp_mask, new_max_mask, max_mask_r1);
891  cv::bitwise_or(max_mask, new_max_mask, max_mask);
892  }
893  cv::bitwise_not(max_mask, supp_mask);
894  scores.setTo(0, supp_mask);
895  }
896 
897  std::vector<cv::Point> kpts;
898  cv::findNonZero(scores > threshold_, kpts);
899  std::vector<cv::KeyPoint> keypoints;
900  for(auto& kpt : kpts)
901  {
902  float response = scores.at<float>(kpt);
903  keypoints.emplace_back(cv::KeyPoint(kpt, 8, -1, response));
904  }
905 
906  cv::Mat coarse_desc(25, 40, CV_32FC(256), local_descriptor_map.data());
907  if(detectFeatures_ == 2)
908  coarse_desc.forEach<cv::Vec<float, 256>>([&](cv::Vec<float, 256>& descriptor, const int position[]) -> void {
910  });
911  cv::Mat mapX(keypoints.size(), 1, CV_32FC1);
912  cv::Mat mapY(keypoints.size(), 1, CV_32FC1);
913  for(size_t i=0; i<keypoints.size(); ++i)
914  {
915  mapX.at<float>(i) = (keypoints[i].pt.x - (targetSize_.width-1)/2) * 40/targetSize_.width + (40-1)/2;
916  mapY.at<float>(i) = (keypoints[i].pt.y - (targetSize_.height-1)/2) * 25/targetSize_.height + (25-1)/2;
917  }
918  cv::Mat map1, map2, descriptors;
919  cv::convertMaps(mapX, mapY, map1, map2, CV_16SC2);
920  cv::remap(coarse_desc, descriptors, map1, map2, cv::INTER_LINEAR);
921  descriptors.forEach<cv::Vec<float, 256>>([&](cv::Vec<float, 256>& descriptor, const int position[]) -> void {
923  });
924  descriptors = descriptors.reshape(1);
925 
926  data.setFeatures(keypoints, std::vector<cv::Point3f>(), descriptors);
927  if(detectFeatures_ == 3)
928  data.addGlobalDescriptor(GlobalDescriptor(1, cv::Mat(1, global_descriptor.size(), CV_32FC1, global_descriptor.data()).clone()));
929  }
930 
931 #else
932  UERROR("CameraDepthAI: RTAB-Map is not built with depthai-core support!");
933 #endif
934  return data;
935 }
936 
937 } // namespace rtabmap
rtabmap::SensorData
Definition: SensorData.h:51
rtabmap::CameraDepthAI::getSerial
virtual std::string getSerial() const
Definition: CameraDepthAI.cpp:758
UINFO
#define UINFO(...)
rtabmap::Camera::setImageRate
void setImageRate(float imageRate)
Definition: Camera.h:50
rtabmap::StereoCameraModel
Definition: StereoCameraModel.h:35
cy
const double cy
rtabmap_netvlad.descriptor
def descriptor
Definition: rtabmap_netvlad.py:81
fx
const double fx
this
this
rtabmap::CameraDepthAI::setDepthProfile
void setDepthProfile(int confThreshold=200, int lrcThreshold=5)
Definition: CameraDepthAI.cpp:103
rtabmap::CameraDepthAI::setDetectFeatures
void setDetectFeatures(int detectFeatures=0)
Definition: CameraDepthAI.cpp:196
rtabmap::IMUEvent
Definition: IMU.h:85
col
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE ColXpr col(Index i) const
threshold_
Index threshold_
rtabmap::CameraDepthAI::isCalibrated
virtual bool isCalibrated() const
Definition: CameraDepthAI.cpp:749
rtabmap::SensorCapture::getLocalTransform
const Transform & getLocalTransform() const
Definition: SensorCapture.h:62
true
#define true
Definition: ConvertUTF.c:57
glm::normalize
GLM_FUNC_DECL genType normalize(genType const &x)
UTimer.h
rtabmap::CameraDepthAI::setBlobPath
void setBlobPath(const std::string &blobPath)
Definition: CameraDepthAI.cpp:205
rtabmap::SensorCaptureInfo
Definition: SensorCaptureInfo.h:36
rtabmap::IMU
Definition: IMU.h:19
translation
translation
data
int data[]
UScopeMutex
Definition: UMutex.h:157
rtabmap::CameraDepthAI::available
static bool available()
Definition: CameraDepthAI.cpp:39
UConversion.h
Some conversion functions.
rtabmap_superglue.device
string device
Definition: rtabmap_superglue.py:21
rtabmap::CameraDepthAI::setGFTTDetector
void setGFTTDetector(bool useHarrisDetector=false, float minDistance=7.0f, int numTargetFeatures=1000)
Definition: CameraDepthAI.cpp:214
first
constexpr int first(int i)
ULogger::kInfo
@ kInfo
Definition: ULogger.h:252
glm::row
GLM_FUNC_DECL genType::row_type row(genType const &m, length_t const &index)
rtabmap::CameraDepthAI::setExtendedDisparity
void setExtendedDisparity(bool extendedDisparity)
Definition: CameraDepthAI.cpp:113
UASSERT
#define UASSERT(condition)
p
Point3_ p(2)
rtabmap::CameraDepthAI::CameraDepthAI
CameraDepthAI(const std::string &mxidOrName="", int resolution=1, float imageRate=0.0f, const Transform &localTransform=Transform::getIdentity())
Definition: CameraDepthAI.cpp:48
rtabmap::GlobalDescriptor
Definition: GlobalDescriptor.h:35
rtabmap::Camera
Definition: Camera.h:43
rtabmap::Camera::getImageRate
float getImageRate() const
Definition: Camera.h:49
UWARN
#define UWARN(...)
f
Point2(* f)(const Point3 &, OptionalJacobian< 2, 3 >)
rtabmap::CameraDepthAI::setSubpixelMode
void setSubpixelMode(bool enabled, int fractionalBits=3)
Definition: CameraDepthAI.cpp:135
rtabmap::CameraDepthAI::~CameraDepthAI
virtual ~CameraDepthAI()
Definition: CameraDepthAI.cpp:84
ULogger::level
static ULogger::Level level()
Definition: ULogger.h:340
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
rtabmap::Transform
Definition: Transform.h:41
util2d.h
rtabmap::CameraDepthAI::setCompanding
void setCompanding(bool enabled, int width=96)
Definition: CameraDepthAI.cpp:150
rtabmap::CameraDepthAI::init
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
Definition: CameraDepthAI.cpp:236
rtabmap::CameraDepthAI::setIrIntensity
void setIrIntensity(float dotIntensity=0.0f, float floodIntensity=0.0f)
Definition: CameraDepthAI.cpp:186
rtabmap::Camera::publishInterIMU_
bool publishInterIMU_
Definition: Camera.h:75
rtabmap::CameraDepthAI::setRectification
void setRectification(bool useSpecTranslation, float alphaScaling=0.0f, bool enabled=true)
Definition: CameraDepthAI.cpp:165
rtabmap::CameraDepthAI::setSuperPointDetector
void setSuperPointDetector(float threshold=0.01f, bool nms=true, int nmsRadius=4)
Definition: CameraDepthAI.cpp:225
UThread.h
position
Point3 position(const NavState &X, OptionalJacobian< 3, 9 > H)
UDEBUG
#define UDEBUG(...)
UEventsManager::post
static void post(UEvent *event, bool async=true, const UEventsSender *sender=0)
Definition: UEventsManager.cpp:54
uSleep
void uSleep(unsigned int ms)
Definition: Posix/UThreadC.h:23
false
#define false
Definition: ConvertUTF.c:56
cx
const double cx
rtabmap::CameraDepthAI::setIMU
void setIMU(bool imuPublished, bool publishInterIMU)
Definition: CameraDepthAI.cpp:176
matrix
Map< const Matrix< T, Dynamic, Dynamic, ColMajor >, 0, OuterStride<> > matrix(const T *data, int rows, int cols, int stride)
t
Point2 t(10, 10)
UFile.h
rtabmap
Definition: CameraARCore.cpp:35
UEventsManager.h
config
config
UERROR
#define UERROR(...)
i
int i
baseline
double baseline
CameraDepthAI.h
rtabmap::SensorCapture::getNextSeqID
int getNextSeqID()
Definition: SensorCapture.h:83
fy
const double fy
rtabmap::CameraDepthAI::setOutputMode
void setOutputMode(int outputMode=0)
Definition: CameraDepthAI.cpp:94
rtabmap::CameraDepthAI::captureImage
virtual SensorData captureImage(SensorCaptureInfo *info=0)
Definition: CameraDepthAI.cpp:766


rtabmap
Author(s): Mathieu Labbe
autogenerated on Thu Jul 25 2024 02:50:07