CameraFreenect2.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
18 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
19 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
20 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
21 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
22 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26 
28 #include <rtabmap/utilite/UStl.h>
29 #include <rtabmap/utilite/UMath.h>
30 #include <rtabmap/utilite/UTimer.h>
31 #include <rtabmap/core/util2d.h>
32 #include <opencv2/imgproc/types_c.h>
33 
34 #ifdef RTABMAP_FREENECT2
35 #include <libfreenect2/libfreenect2.hpp>
36 #include <libfreenect2/frame_listener_impl.h>
37 #include <libfreenect2/registration.h>
38 #include <libfreenect2/packet_pipeline.h>
39 #include <libfreenect2/config.h>
40 #endif
41 
42 namespace rtabmap
43 {
44 
46 {
47 #ifdef RTABMAP_FREENECT2
48  return true;
49 #else
50  return false;
51 #endif
52 }
53 
55  int deviceId,
56  Type type,
57  float imageRate,
58  const Transform & localTransform,
59  float minDepth,
60  float maxDepth,
61  bool bilateralFiltering,
62  bool edgeAwareFiltering,
63  bool noiseFiltering,
64  const std::string & pipelineName) :
65  Camera(imageRate, localTransform)
66 #ifdef RTABMAP_FREENECT2
67  ,
68  deviceId_(deviceId),
69  type_(type),
70  freenect2_(0),
71  dev_(0),
72  listener_(0),
73  reg_(0),
74  minKinect2Depth_(minDepth),
75  maxKinect2Depth_(maxDepth),
76  bilateralFiltering_(bilateralFiltering),
77  edgeAwareFiltering_(edgeAwareFiltering),
78  noiseFiltering_(noiseFiltering),
79  pipelineName_(pipelineName)
80 #endif
81 {
82 #ifdef RTABMAP_FREENECT2
83  UASSERT(minKinect2Depth_ < maxKinect2Depth_ && minKinect2Depth_>0 && maxKinect2Depth_>0 && maxKinect2Depth_<=65.535f);
84  freenect2_ = new libfreenect2::Freenect2();
85  switch(type_)
86  {
87  case kTypeColorIR:
88  listener_ = new libfreenect2::SyncMultiFrameListener(libfreenect2::Frame::Color | libfreenect2::Frame::Ir);
89  break;
90  case kTypeIRDepth:
91  listener_ = new libfreenect2::SyncMultiFrameListener(libfreenect2::Frame::Ir | libfreenect2::Frame::Depth);
92  break;
93  case kTypeColor2DepthSD:
94  case kTypeDepth2ColorHD:
95  case kTypeDepth2ColorSD:
96  default:
97  listener_ = new libfreenect2::SyncMultiFrameListener(libfreenect2::Frame::Color | libfreenect2::Frame::Depth);
98  break;
99  }
100 #endif
101 }
102 
104 {
105 #ifdef RTABMAP_FREENECT2
106  UDEBUG("");
107  if(dev_)
108  {
109  dev_->stop();
110  dev_->close();
111  //deleted in freenect2_ destructor (Freeenect2Impl::clearDevices())
112  }
113  delete listener_;
114  delete reg_;
115  delete freenect2_;
116  UDEBUG("");
117 #endif
118 }
119 
120 #ifdef RTABMAP_FREENECT2
121 libfreenect2::PacketPipeline *createPacketPipelineByName(const std::string & name)
122 {
123  std::string availablePipelines;
124 #if defined(LIBFREENECT2_WITH_OPENGL_SUPPORT)
125  availablePipelines += "gl ";
126  if (name == "gl")
127  {
128  UINFO("Using 'gl' pipeline.");
129  return new libfreenect2::OpenGLPacketPipeline();
130  }
131 #endif
132 #if defined(LIBFREENECT2_WITH_CUDA_SUPPORT)
133  availablePipelines += "cuda cudakde ";
134  if (name == "cuda")
135  {
136  UINFO("Using 'cuda' pipeline.");
137  return new libfreenect2::CudaPacketPipeline();
138  }
139  if (name == "cudakde")
140  {
141  UINFO("Using 'cudakde' pipeline.");
142  return new libfreenect2::CudaKdePacketPipeline();
143  }
144 #endif
145 #if defined(LIBFREENECT2_WITH_OPENCL_SUPPORT)
146  availablePipelines += "cl clkde ";
147  if (name == "cl")
148  {
149  UINFO("Using 'cl' pipeline.");
150  return new libfreenect2::OpenCLPacketPipeline();
151  }
152  if (name == "clkde")
153  {
154  UINFO("Using 'clkde' pipeline.");
155  return new libfreenect2::OpenCLKdePacketPipeline();
156  }
157 #endif
158  availablePipelines += "cpu";
159  if (name == "cpu")
160  {
161  UINFO("Using 'cpu' pipeline.");
162  return new libfreenect2::CpuPacketPipeline();
163  }
164 
165  if (!name.empty())
166  {
167  UERROR("'%s' pipeline is not available. Available pipelines are: \"%s\". Default one is used instead (first one in the list).",
168  name.c_str(), availablePipelines.c_str());
169  }
170 
171  // create default pipeline
172 #if defined(LIBFREENECT2_WITH_OPENGL_SUPPORT)
173  UINFO("Using 'gl' pipeline.");
174  return new libfreenect2::OpenGLPacketPipeline();
175 #elif defined(LIBFREENECT2_WITH_CUDA_SUPPORT)
176  UINFO("Using 'cuda' pipeline.");
177  return new libfreenect2::CudaPacketPipeline();
178 #elif defined(LIBFREENECT2_WITH_OPENCL_SUPPORT)
179  UINFO("Using 'cl' pipeline.");
180  return new libfreenect2::OpenCLPacketPipeline();
181 #else
182  UINFO("Using 'cpu' pipeline.");
183  return new libfreenect2::CpuPacketPipeline();
184 #endif
185 }
186 #endif
187 
188 bool CameraFreenect2::init(const std::string & calibrationFolder, const std::string & cameraName)
189 {
190 #ifdef RTABMAP_FREENECT2
191  if(dev_)
192  {
193  dev_->stop();
194  dev_->close();
195  dev_ = 0; //deleted in freenect2_ destructor (Freeenect2Impl::clearDevices())
196  }
197 
198  if(reg_)
199  {
200  delete reg_;
201  reg_ = 0;
202  }
203 
204  libfreenect2::PacketPipeline * pipeline = createPacketPipelineByName(pipelineName_);
205 
206  if(deviceId_ <= 0)
207  {
208  UDEBUG("Opening default device...");
209  dev_ = freenect2_->openDefaultDevice(pipeline);
210  pipeline = 0;// pipeline deleted in dev_ (Freenect2DeviceImpl::~Freenect2DeviceImpl())
211  }
212  else
213  {
214  UDEBUG("Opening device ID=%d...", deviceId_);
215  dev_ = freenect2_->openDevice(deviceId_, pipeline);
216  pipeline = 0;// pipeline deleted in dev_ (Freenect2DeviceImpl::~Freenect2DeviceImpl())
217  }
218 
219  if(dev_)
220  {
221  //default
222  //MinDepth(0.5f),
223  //MaxDepth(4.5f),
224  //EnableBilateralFilter(true),
225  //EnableEdgeAwareFilter(true)
226  libfreenect2::Freenect2Device::Config config;
227  config.EnableBilateralFilter = bilateralFiltering_;
228  config.EnableEdgeAwareFilter = edgeAwareFiltering_;
229  config.MinDepth = minKinect2Depth_;
230  config.MaxDepth = maxKinect2Depth_;
231  dev_->setConfiguration(config);
232 
233  dev_->setColorFrameListener(listener_);
234  dev_->setIrAndDepthFrameListener(listener_);
235 
236  dev_->start();
237 
238  UINFO("CameraFreenect2: device serial: %s", dev_->getSerialNumber().c_str());
239  UINFO("CameraFreenect2: device firmware: %s", dev_->getFirmwareVersion().c_str());
240 
241  //default registration params
242  libfreenect2::Freenect2Device::IrCameraParams depthParams = dev_->getIrCameraParams();
243  libfreenect2::Freenect2Device::ColorCameraParams colorParams = dev_->getColorCameraParams();
244  reg_ = new libfreenect2::Registration(depthParams, colorParams);
245 
246  // look for calibration files
247  stereoModel_ = StereoCameraModel();
248  if(!calibrationFolder.empty())
249  {
250  std::string calibrationName = dev_->getSerialNumber();
251  if(!cameraName.empty())
252  {
253  calibrationName = cameraName;
254  }
255  stereoModel_.setName(calibrationName, "depth", "rgb");
256  if(!stereoModel_.load(calibrationFolder, calibrationName, false))
257  {
258  UWARN("Missing calibration files for camera \"%s\" in \"%s\" folder, default calibration "
259  "is used. Note that from version 0.11.10, calibration suffixes for Freenect2 driver have "
260  "changed from \"_left\"->\"_depth\" and \"_right\"->\"_rgb\". You can safely rename "
261  "the calibration files to avoid recalibrating.",
262  calibrationName.c_str(), calibrationFolder.c_str());
263  }
264  else
265  {
266  UINFO("Custom calibration files for \"%s\" were found in \"%s\" folder. To use "
267  "factory calibration, remove the corresponding files from that directory.", calibrationName.c_str(), calibrationFolder.c_str());
268 
269  if(type_==kTypeColor2DepthSD)
270  {
271  UWARN("Freenect2: When using custom calibration file, type "
272  "kTypeColor2DepthSD is not supported. kTypeDepth2ColorSD is used instead...");
273  type_ = kTypeDepth2ColorSD;
274  }
275 
276  // downscale color image by 2
277  cv::Mat colorP = stereoModel_.right().P();
278  cv::Size colorSize = stereoModel_.right().imageSize();
279  if(type_ == kTypeDepth2ColorSD)
280  {
281  colorP.at<double>(0,0)/=2.0f; //fx
282  colorP.at<double>(1,1)/=2.0f; //fy
283  colorP.at<double>(0,2)/=2.0f; //cx
284  colorP.at<double>(1,2)/=2.0f; //cy
285  colorSize.width/=2;
286  colorSize.height/=2;
287  }
288  cv::Mat depthP = stereoModel_.left().P();
289  cv::Size depthSize = stereoModel_.left().imageSize();
290  float ratioY = float(colorSize.height)/float(depthSize.height);
291  float ratioX = float(colorSize.width)/float(depthSize.width);
292  depthP.at<double>(0,0)*=ratioX; //fx
293  depthP.at<double>(1,1)*=ratioY; //fy
294  depthP.at<double>(0,2)*=ratioX; //cx
295  depthP.at<double>(1,2)*=ratioY; //cy
296  depthSize.width*=ratioX;
297  depthSize.height*=ratioY;
298  const CameraModel & l = stereoModel_.left();
299  const CameraModel & r = stereoModel_.right();
300  stereoModel_ = StereoCameraModel(stereoModel_.name(),
301  depthSize, l.K_raw(), l.D_raw(), l.R(), depthP,
302  colorSize, r.K_raw(), r.D_raw(), r.R(), colorP,
303  stereoModel_.R(), stereoModel_.T(), stereoModel_.E(), stereoModel_.F());
304  stereoModel_.initRectificationMap();
305  }
306  }
307 
308  return true;
309  }
310  else
311  {
312  UERROR("CameraFreenect2: no device connected or failure opening the default one! Note that rtabmap should link on libusb of libfreenect2. "
313  "Tip, before starting rtabmap: \"$ export LD_LIBRARY_PATH=~/libfreenect2/depends/libusb/lib:$LD_LIBRARY_PATH\"");
314  }
315 #else
316  UERROR("CameraFreenect2: RTAB-Map is not built with Freenect2 support!");
317 #endif
318  return false;
319 }
320 
322 {
323  return true;
324 }
325 
326 std::string CameraFreenect2::getSerial() const
327 {
328 #ifdef RTABMAP_FREENECT2
329  if(dev_)
330  {
331  return dev_->getSerialNumber();
332  }
333 #endif
334  return "";
335 }
336 
338 {
340 #ifdef RTABMAP_FREENECT2
341  if(dev_ && listener_)
342  {
343  libfreenect2::FrameMap frames;
344 #ifndef LIBFREENECT2_THREADING_STDLIB
345  UDEBUG("Waiting for new frames... If it is stalled here, rtabmap should link on libusb of libfreenect2. "
346  "Tip, before starting rtabmap: \"$ export LD_LIBRARY_PATH=~/libfreenect2/depends/libusb/lib:$LD_LIBRARY_PATH\"");
347  listener_->waitForNewFrame(frames);
348 #else
349  if(!listener_->waitForNewFrame(frames, 1000))
350  {
351  UWARN("CameraFreenect2: Failed to get frames! rtabmap should link on libusb of libfreenect2. "
352  "Tip, before starting rtabmap: \"$ export LD_LIBRARY_PATH=~/libfreenect2/depends/libusb/lib:$LD_LIBRARY_PATH\"");
353  }
354  else
355 #endif
356  {
357  double stamp = UTimer::now();
358  libfreenect2::Frame *rgbFrame = 0;
359  libfreenect2::Frame *irFrame = 0;
360  libfreenect2::Frame *depthFrame = 0;
361 
362  switch(type_)
363  {
364  case kTypeColorIR: //used for calibration
365  rgbFrame = uValue(frames, libfreenect2::Frame::Color, (libfreenect2::Frame*)0);
366  irFrame = uValue(frames, libfreenect2::Frame::Ir, (libfreenect2::Frame*)0);
367  break;
368  case kTypeIRDepth:
369  irFrame = uValue(frames, libfreenect2::Frame::Ir, (libfreenect2::Frame*)0);
370  depthFrame = uValue(frames, libfreenect2::Frame::Depth, (libfreenect2::Frame*)0);
371  break;
372  case kTypeColor2DepthSD:
373  case kTypeDepth2ColorSD:
374  case kTypeDepth2ColorHD:
375  case kTypeDepth2ColorHD2:
376  default:
377  rgbFrame = uValue(frames, libfreenect2::Frame::Color, (libfreenect2::Frame*)0);
378  depthFrame = uValue(frames, libfreenect2::Frame::Depth, (libfreenect2::Frame*)0);
379  break;
380  }
381 
382  cv::Mat rgb, depth;
383  float fx=0,fy=0,cx=0,cy=0;
384  if(irFrame && depthFrame)
385  {
386  cv::Mat irMat((int)irFrame->height, (int)irFrame->width, CV_32FC1, irFrame->data);
387  //convert to gray scaled
388  float maxIr_ = 0x7FFF;
389  float minIr_ = 0x0;
390  const float factor = 255.0f / float((maxIr_ - minIr_));
391  rgb = cv::Mat(irMat.rows, irMat.cols, CV_8UC1);
392  for(int i=0; i<irMat.rows; ++i)
393  {
394  for(int j=0; j<irMat.cols; ++j)
395  {
396  rgb.at<unsigned char>(i, j) = (unsigned char)std::min(float(std::max(irMat.at<float>(i,j) - minIr_, 0.0f)) * factor, 255.0f);
397  }
398  }
399 
400  cv::Mat((int)depthFrame->height, (int)depthFrame->width, CV_32FC1, depthFrame->data).convertTo(depth, CV_16U, 1);
401 
402  cv::flip(rgb, rgb, 1);
403  cv::flip(depth, depth, 1);
404 
405  if(stereoModel_.isValidForRectification())
406  {
407  //rectify
408  rgb = stereoModel_.left().rectifyImage(rgb);
409  depth = stereoModel_.left().rectifyDepth(depth);
410  fx = stereoModel_.left().fx();
411  fy = stereoModel_.left().fy();
412  cx = stereoModel_.left().cx();
413  cy = stereoModel_.left().cy();
414  }
415  else
416  {
417  libfreenect2::Freenect2Device::IrCameraParams params = dev_->getIrCameraParams();
418  fx = params.fx;
419  fy = params.fy;
420  cx = params.cx;
421  cy = params.cy;
422  }
423  }
424  else
425  {
426  //rgb + ir or rgb + depth
427  if(stereoModel_.isValidForRectification())
428  {
429  cv::Mat rgbMatC4((int)rgbFrame->height, (int)rgbFrame->width, CV_8UC4, rgbFrame->data);
430  cv::Mat rgbMat; // rtabmap uses 3 channels RGB
431  #ifdef LIBFREENECT2_WITH_TEGRAJPEG_SUPPORT
432 
433  cv::cvtColor(rgbMatC4, rgbMat, CV_RGBA2BGR);
434 
435  #else
436 
437  cv::cvtColor(rgbMatC4, rgbMat, CV_BGRA2BGR);
438 
439  #endif
440  cv::flip(rgbMat, rgb, 1);
441 
442  //rectify color
443  rgb = stereoModel_.right().rectifyImage(rgb);
444  if(irFrame)
445  {
446  //rectify IR
447  cv::Mat((int)irFrame->height, (int)irFrame->width, CV_32FC1, irFrame->data).convertTo(depth, CV_16U, 1);
448  cv::flip(depth, depth, 1);
449  depth = stereoModel_.left().rectifyImage(depth);
450  }
451  else
452  {
453  //rectify depth
454  cv::Mat((int)depthFrame->height, (int)depthFrame->width, CV_32FC1, depthFrame->data).convertTo(depth, CV_16U, 1);
455  cv::flip(depth, depth, 1);
456 
457  //depth = stereoModel_.left().rectifyImage(depth, 0); // ~0.5/4 ms but is more noisy
458  depth = stereoModel_.left().rectifyDepth(depth); // ~16/25 ms
459 
460  bool registered = true;
461  if(registered)
462  {
463  depth = util2d::registerDepth(
464  depth,
465  stereoModel_.left().P().colRange(0,3).rowRange(0,3), //scaled depth K
466  depth.size(),
467  stereoModel_.right().P().colRange(0,3).rowRange(0,3), //scaled color K
468  stereoModel_.stereoTransform());
469  util2d::fillRegisteredDepthHoles(depth, true, false);
470  fx = stereoModel_.right().fx();
471  fy = stereoModel_.right().fy();
472  cx = stereoModel_.right().cx();
473  cy = stereoModel_.right().cy();
474  }
475  else
476  {
477  fx = stereoModel_.left().fx();
478  fy = stereoModel_.left().fy();
479  cx = stereoModel_.left().cx();
480  cy = stereoModel_.left().cy();
481  }
482  }
483  }
484  else
485  {
486  //use data from libfreenect2
487  if(irFrame)
488  {
489  cv::Mat rgbMatC4((int)rgbFrame->height, (int)rgbFrame->width, CV_8UC4, rgbFrame->data);
490  cv::Mat rgbMat; // rtabmap uses 3 channels RGB
491  #ifdef LIBFREENECT2_WITH_TEGRAJPEG_SUPPORT
492 
493  cv::cvtColor(rgbMatC4, rgbMat, CV_RGB2BGR);
494 
495  #else
496 
497  cv::cvtColor(rgbMatC4, rgbMat, CV_BGRA2BGR);
498 
499  #endif
500  cv::flip(rgbMat, rgb, 1);
501 
502  cv::Mat((int)irFrame->height, (int)irFrame->width, CV_32FC1, irFrame->data).convertTo(depth, CV_16U, 1);
503  cv::flip(depth, depth, 1);
504  }
505  else
506  {
507  //registration of the depth
508  UASSERT(reg_!=0);
509 
510  float maxDepth = maxKinect2Depth_*1000.0f;
511  float minDepth = minKinect2Depth_*1000.0f;
512  if(type_ == kTypeColor2DepthSD || type_ == kTypeDepth2ColorHD)
513  {
514  cv::Mat rgbMatBGRA;
515  libfreenect2::Frame depthUndistorted(512, 424, 4);
516  libfreenect2::Frame rgbRegistered(512, 424, 4);
517 
518  // do it before registration
519  if(noiseFiltering_)
520  {
521  cv::Mat depthMat = cv::Mat((int)depthFrame->height, (int)depthFrame->width, CV_32FC1, depthFrame->data);
522  for(int dx=0; dx<depthMat.cols; ++dx)
523  {
524  bool onEdgeX = dx==depthMat.cols-1;
525  for(int dy=0; dy<depthMat.rows; ++dy)
526  {
527  bool onEdge = onEdgeX || dy==depthMat.rows-1;
528  float z = 0.0f;
529  float & dz = depthMat.at<float>(dy,dx);
530  if(dz>=minDepth && dz <= maxDepth)
531  {
532  z = dz;
533  if(noiseFiltering_ && !onEdge)
534  {
535  z=0;
536  const float & dz1 = depthMat.at<float>(dy,dx+1);
537  const float & dz2 = depthMat.at<float>(dy+1,dx);
538  const float & dz3 = depthMat.at<float>(dy+1,dx+1);
539  if( dz1>=minDepth && dz1 <= maxDepth &&
540  dz2>=minDepth && dz2 <= maxDepth &&
541  dz3>=minDepth && dz3 <= maxDepth)
542  {
543  float avg = (dz + dz1 + dz2 + dz3) / 4.0f;
544  float thres = 0.01f*avg;
545 
546  if( fabs(dz-avg) < thres &&
547  fabs(dz1-avg) < thres &&
548  fabs(dz2-avg) < thres &&
549  fabs(dz3-avg) < thres)
550  {
551  z = dz;
552  }
553  }
554  }
555  }
556  dz = z;
557  }
558  }
559  }
560 
561  libfreenect2::Frame bidDepth(1920, 1082, 4); // HD
562  reg_->apply(rgbFrame, depthFrame, &depthUndistorted, &rgbRegistered, true, &bidDepth);
563 
564  cv::Mat depthMat;
565  if(type_ == kTypeColor2DepthSD)
566  {
567  rgbMatBGRA = cv::Mat((int)rgbRegistered.height, (int)rgbRegistered.width, CV_8UC4, rgbRegistered.data);
568  depthMat = cv::Mat((int)depthUndistorted.height, (int)depthUndistorted.width, CV_32FC1, depthUndistorted.data);
569 
570  //use IR params
571  libfreenect2::Freenect2Device::IrCameraParams params = dev_->getIrCameraParams();
572  fx = params.fx;
573  fy = params.fy;
574  cx = params.cx;
575  cy = params.cy;
576  }
577  else
578  {
579  rgbMatBGRA = cv::Mat((int)rgbFrame->height, (int)rgbFrame->width, CV_8UC4, rgbFrame->data);
580  depthMat = cv::Mat((int)bidDepth.height, (int)bidDepth.width, CV_32FC1, bidDepth.data);
581  depthMat = depthMat(cv::Range(1, 1081), cv::Range::all());
582 
583  //use color params
584  libfreenect2::Freenect2Device::ColorCameraParams params = dev_->getColorCameraParams();
585  fx = params.fx;
586  fy = params.fy;
587  cx = params.cx;
588  cy = params.cy;
589  }
590 
591  //filter max depth and flip
592  depth = cv::Mat(depthMat.size(), CV_16UC1);
593  for(int dx=0; dx<depthMat.cols; ++dx)
594  {
595  for(int dy=0; dy<depthMat.rows; ++dy)
596  {
597  unsigned short z = 0;
598  const float & dz = depthMat.at<float>(dy,dx);
599  if(dz>=minDepth && dz <= maxDepth)
600  {
601  z = (unsigned short)dz;
602  }
603  depth.at<unsigned short>(dy,(depthMat.cols-1)-dx) = z; //flip
604  }
605  }
606 
607  // rtabmap uses 3 channels RGB
608  #ifdef LIBFREENECT2_WITH_TEGRAJPEG_SUPPORT
609 
610  cv::cvtColor(rgbMatBGRA, rgb, CV_RGBA2BGR);
611 
612  #else
613 
614  cv::cvtColor(rgbMatBGRA, rgb, CV_BGRA2BGR);
615 
616  #endif
617  cv::flip(rgb, rgb, 1);
618  }
619  else //register depth to color (OLD WAY)
620  {
621  UASSERT(type_ == kTypeDepth2ColorSD || type_ == kTypeDepth2ColorHD2);
622  cv::Mat rgbMatBGRA = cv::Mat((int)rgbFrame->height, (int)rgbFrame->width, CV_8UC4, rgbFrame->data);
623  if(type_ == kTypeDepth2ColorSD)
624  {
625  cv::Mat tmp;
626  cv::resize(rgbMatBGRA, tmp, cv::Size(), 0.5, 0.5, cv::INTER_AREA);
627  rgbMatBGRA = tmp;
628  }
629  // rtabmap uses 3 channels RGB
630  #ifdef LIBFREENECT2_WITH_TEGRAJPEG_SUPPORT
631 
632  cv::cvtColor(rgbMatBGRA, rgb, CV_RGBA2BGR);
633 
634  #else
635 
636  cv::cvtColor(rgbMatBGRA, rgb, CV_BGRA2BGR);
637 
638  #endif
639  cv::flip(rgb, rgb, 1);
640 
641  cv::Mat depthFrameMat = cv::Mat((int)depthFrame->height, (int)depthFrame->width, CV_32FC1, depthFrame->data);
642  depth = cv::Mat::zeros(rgbMatBGRA.rows, rgbMatBGRA.cols, CV_16U);
643  for(int dx=0; dx<depthFrameMat.cols-1; ++dx)
644  {
645  for(int dy=0; dy<depthFrameMat.rows-1; ++dy)
646  {
647  float dz = depthFrameMat.at<float>(dy,dx);
648  if(dz>=minDepth && dz<=maxDepth)
649  {
650  bool goodDepth = true;
651  if(noiseFiltering_)
652  {
653  goodDepth = false;
654  float dz1 = depthFrameMat.at<float>(dy,dx+1);
655  float dz2 = depthFrameMat.at<float>(dy+1,dx);
656  float dz3 = depthFrameMat.at<float>(dy+1,dx+1);
657  if(dz1>=minDepth && dz1 <= maxDepth &&
658  dz2>=minDepth && dz2 <= maxDepth &&
659  dz3>=minDepth && dz3 <= maxDepth)
660  {
661  float avg = (dz + dz1 + dz2 + dz3) / 4.0f;
662  float thres = 0.01 * avg;
663  if( fabs(dz-avg) < thres &&
664  fabs(dz1-avg) < thres &&
665  fabs(dz2-avg) < thres &&
666  fabs(dz3-avg) < thres)
667  {
668  goodDepth = true;
669  }
670  }
671  }
672  if(goodDepth)
673  {
674  float cx=-1,cy=-1;
675  reg_->apply(dx, dy, dz, cx, cy);
676  if(type_ == kTypeDepth2ColorSD)
677  {
678  cx/=2.0f;
679  cy/=2.0f;
680  }
681  int rcx = cvRound(cx);
682  int rcy = cvRound(cy);
683  if(uIsInBounds(rcx, 0, depth.cols) && uIsInBounds(rcy, 0, depth.rows))
684  {
685  unsigned short & zReg = depth.at<unsigned short>(rcy, rcx);
686  if(zReg == 0 || zReg > (unsigned short)dz)
687  {
688  zReg = (unsigned short)dz;
689  }
690  }
691  }
692  }
693  }
694  }
695  util2d::fillRegisteredDepthHoles(depth, true, true, type_==kTypeDepth2ColorHD2);
696  util2d::fillRegisteredDepthHoles(depth, type_==kTypeDepth2ColorSD, type_==kTypeDepth2ColorHD2);//second pass
697  cv::flip(depth, depth, 1);
698  libfreenect2::Freenect2Device::ColorCameraParams params = dev_->getColorCameraParams();
699  fx = params.fx*(type_==kTypeDepth2ColorSD?0.5:1.0f);
700  fy = params.fy*(type_==kTypeDepth2ColorSD?0.5:1.0f);
701  cx = params.cx*(type_==kTypeDepth2ColorSD?0.5:1.0f);
702  cy = params.cy*(type_==kTypeDepth2ColorSD?0.5:1.0f);
703  }
704  }
705  }
706  }
707 
709  if(fx && fy)
710  {
711  model=CameraModel(
712  fx, //fx
713  fy, //fy
714  cx, //cx
715  cy, // cy
716  this->getLocalTransform(),
717  0,
718  rgb.size());
719  }
720  data = SensorData(rgb, depth, model, this->getNextSeqID(), stamp);
721 
722  listener_->release(frames);
723  }
724  }
725 #else
726  UERROR("CameraFreenect2: RTAB-Map is not built with Freenect2 support!");
727 #endif
728  return data;
729 }
730 
731 } // namespace rtabmap
cv::Mat R() const
Definition: CameraModel.h:112
GLM_FUNC_DECL genType min(genType const &x, genType const &y)
f
bool uIsInBounds(const T &value, const T &low, const T &high)
Definition: UMath.h:934
data
config
GLM_FUNC_DECL bool all(vecType< bool, P > const &v)
Basic mathematics functions.
cv::Mat RTABMAP_EXP registerDepth(const cv::Mat &depth, const cv::Mat &depthK, const cv::Size &colorSize, const cv::Mat &colorK, const rtabmap::Transform &transform)
Definition: util2d.cpp:1362
CameraFreenect2(int deviceId=0, Type type=kTypeDepth2ColorSD, float imageRate=0.0f, const Transform &localTransform=Transform::getIdentity(), float minDepth=0.3f, float maxDepth=12.0f, bool bilateralFiltering=true, bool edgeAwareFiltering=true, bool noiseFiltering=true, const std::string &pipelineName="")
virtual bool isCalibrated() const
virtual std::string getSerial() const
#define UASSERT(condition)
Wrappers of STL for convenient functions.
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
string name
cv::Mat D_raw() const
Definition: CameraModel.h:109
cv::Mat K_raw() const
Definition: CameraModel.h:108
const Transform & getLocalTransform() const
Definition: Camera.h:65
params
void RTABMAP_EXP fillRegisteredDepthHoles(cv::Mat &depthRegistered, bool vertical, bool horizontal, bool fillDoubleHoles=false)
Definition: util2d.cpp:1598
int getNextSeqID()
Definition: Camera.h:86
#define UDEBUG(...)
GLM_FUNC_DECL genType max(genType const &x, genType const &y)
#define UERROR(...)
#define UWARN(...)
static double now()
Definition: UTimer.cpp:80
model
Definition: trace.py:4
int size() const
Definition: Transform.h:90
virtual SensorData captureImage(CameraInfo *info=0)
V uValue(const std::map< K, V > &m, const K &key, const V &defaultValue=V())
Definition: UStl.h:240
#define UINFO(...)


rtabmap
Author(s): Mathieu Labbe
autogenerated on Mon Jan 23 2023 03:37:27