32 #include <opencv2/imgproc/types_c.h> 34 #ifdef RTABMAP_REALSENSE 35 #include <librealsense/rs.hpp> 36 #ifdef RTABMAP_REALSENSE_SLAM 39 #include <librealsense/slam/slam.h> 49 #ifdef RTABMAP_REALSENSE 63 Camera(imageRate, localTransform)
64 #ifdef RTABMAP_REALSENSE
69 presetRGB_(presetRGB),
70 presetDepth_(presetDepth),
71 computeOdometry_(computeOdometry),
72 depthScaledToRGBSize_(
false),
83 #ifdef RTABMAP_REALSENSE 91 dev_->stop(rs::source::all_sources);
98 catch(
const rs::error & error){
UWARN(
"%s", error.what());}
106 #ifdef RTABMAP_REALSENSE_SLAM 111 slam_->flush_resources();
119 #ifdef RTABMAP_REALSENSE_SLAM 120 bool setStreamConfigIntrin(
121 rs::core::stream_type stream,
122 std::map< rs::core::stream_type, rs::core::intrinsics > intrinsics,
123 rs::core::video_module_interface::supported_module_config & supported_config,
124 rs::core::video_module_interface::actual_module_config & actual_config)
126 auto & supported_stream_config = supported_config[stream];
127 if (!supported_stream_config.is_enabled || supported_stream_config.size.width != intrinsics[stream].width || supported_stream_config.size.height != intrinsics[stream].height)
129 UERROR(
"size of stream is not supported by slam");
130 UERROR(
" supported: stream %d, width: %d height: %d", (
uint32_t) stream, supported_stream_config.size.width, supported_stream_config.size.height);
131 UERROR(
" received: stream %d, width: %d height: %d", (
uint32_t) stream, intrinsics[stream].width, intrinsics[stream].height);
135 rs::core::video_module_interface::actual_image_stream_config &actual_stream_config = actual_config[stream];
136 actual_config[stream].size.width = intrinsics[stream].width;
137 actual_config[stream].size.height = intrinsics[stream].height;
138 actual_stream_config.frame_rate = supported_stream_config.frame_rate;
139 actual_stream_config.intrinsics = intrinsics[stream];
140 actual_stream_config.is_enabled =
true;
146 #ifdef RTABMAP_REALSENSE 147 depthScaledToRGBSize_ = enabled;
152 #ifdef RTABMAP_REALSENSE 157 #ifdef RTABMAP_REALSENSE 158 template<
class GET_DEPTH,
class TRANSFER_PIXEL>
void align_images(
const rs_intrinsics & depth_intrin,
const rs_extrinsics & depth_to_other,
const rs_intrinsics & other_intrin, GET_DEPTH get_depth, TRANSFER_PIXEL transfer_pixel)
161 #pragma omp parallel for schedule(dynamic) 162 for(
int depth_y = 0; depth_y < depth_intrin.height; ++depth_y)
164 int depth_pixel_index = depth_y * depth_intrin.width;
165 for(
int depth_x = 0; depth_x < depth_intrin.width; ++depth_x, ++depth_pixel_index)
168 if(
float depth = get_depth(depth_pixel_index))
171 float depth_pixel[2] = {depth_x-0.5f, depth_y-0.5f}, depth_point[3], other_point[3], other_pixel[2];
172 rs_deproject_pixel_to_point(depth_point, &depth_intrin, depth_pixel, depth);
173 rs_transform_point_to_point(other_point, &depth_to_other, depth_point);
174 rs_project_point_to_pixel(other_pixel, &other_intrin, other_point);
175 const int other_x0 =
static_cast<int>(other_pixel[0] + 0.5f);
176 const int other_y0 =
static_cast<int>(other_pixel[1] + 0.5f);
179 depth_pixel[0] = depth_x+0.5f; depth_pixel[1] = depth_y+0.5f;
180 rs_deproject_pixel_to_point(depth_point, &depth_intrin, depth_pixel, depth);
181 rs_transform_point_to_point(other_point, &depth_to_other, depth_point);
182 rs_project_point_to_pixel(other_pixel, &other_intrin, other_point);
183 const int other_x1 =
static_cast<int>(other_pixel[0] + 0.5f);
184 const int other_y1 =
static_cast<int>(other_pixel[1] + 0.5f);
186 if(other_x0 < 0 || other_y0 < 0 || other_x1 >= other_intrin.width || other_y1 >= other_intrin.height)
continue;
189 for(
int y=other_y0; y<=other_y1; ++y)
for(
int x=other_x0; x<=other_x1; ++x) transfer_pixel(depth_pixel_index, y * other_intrin.width + x);
196 void align_z_to_other(byte * z_aligned_to_other,
const uint16_t * z_pixels,
float z_scale,
const rs_intrinsics & z_intrin,
const rs_extrinsics & z_to_other,
const rs_intrinsics & other_intrin)
198 auto out_z = (
uint16_t *)(z_aligned_to_other);
199 align_images(z_intrin, z_to_other, other_intrin,
200 [z_pixels, z_scale](
int z_pixel_index) {
return z_scale * z_pixels[z_pixel_index]; },
201 [out_z, z_pixels](
int z_pixel_index,
int other_pixel_index) { out_z[other_pixel_index] = out_z[other_pixel_index] ?
std::min(out_z[other_pixel_index],z_pixels[z_pixel_index]) : z_pixels[z_pixel_index]; });
204 void align_disparity_to_other(byte * disparity_aligned_to_other,
const uint16_t * disparity_pixels,
float disparity_scale,
const rs_intrinsics & disparity_intrin,
const rs_extrinsics & disparity_to_other,
const rs_intrinsics & other_intrin)
206 auto out_disparity = (
uint16_t *)(disparity_aligned_to_other);
207 align_images(disparity_intrin, disparity_to_other, other_intrin,
208 [disparity_pixels, disparity_scale](
int disparity_pixel_index) {
return disparity_scale / disparity_pixels[disparity_pixel_index]; },
209 [out_disparity, disparity_pixels](
int disparity_pixel_index,
int other_pixel_index) { out_disparity[other_pixel_index] = disparity_pixels[disparity_pixel_index]; });
212 template<
int N>
struct bytes {
char b[N]; };
213 template<
int N,
class GET_DEPTH>
void align_other_to_depth_bytes(byte * other_aligned_to_depth, GET_DEPTH get_depth,
const rs_intrinsics & depth_intrin,
const rs_extrinsics & depth_to_other,
const rs_intrinsics & other_intrin,
const byte * other_pixels)
215 auto in_other = (
const bytes<N> *)(other_pixels);
216 auto out_other = (bytes<N> *)(other_aligned_to_depth);
217 align_images(depth_intrin, depth_to_other, other_intrin, get_depth,
218 [out_other, in_other](
int depth_pixel_index,
int other_pixel_index) { out_other[depth_pixel_index] = in_other[other_pixel_index]; });
225 std::vector<int> compute_rectification_table(
const rs_intrinsics & rect_intrin,
const rs_extrinsics & rect_to_unrect,
const rs_intrinsics & unrect_intrin)
227 std::vector<int> rectification_table;
228 rectification_table.resize(rect_intrin.width * rect_intrin.height);
229 align_images(rect_intrin, rect_to_unrect, unrect_intrin, [](
int) {
return 1.0f; },
230 [&rectification_table](
int rect_pixel_index,
int unrect_pixel_index) { rectification_table[rect_pixel_index] = unrect_pixel_index; });
231 return rectification_table;
234 template<
class T>
void rectify_image_pixels(T * rect_pixels,
const std::vector<int> & rectification_table,
const T * unrect_pixels)
236 for(
auto entry : rectification_table) *rect_pixels++ = unrect_pixels[entry];
239 void rectify_image(
uint8_t * rect_pixels,
const std::vector<int> & rectification_table,
const uint8_t * unrect_pixels, rs_format format)
244 return rectify_image_pixels((bytes<1> *)rect_pixels, rectification_table, (
const bytes<1> *)unrect_pixels);
245 case RS_FORMAT_Y16:
case RS_FORMAT_Z16:
246 return rectify_image_pixels((bytes<2> *)rect_pixels, rectification_table, (
const bytes<2> *)unrect_pixels);
247 case RS_FORMAT_RGB8:
case RS_FORMAT_BGR8:
248 return rectify_image_pixels((bytes<3> *)rect_pixels, rectification_table, (
const bytes<3> *)unrect_pixels);
249 case RS_FORMAT_RGBA8:
case RS_FORMAT_BGRA8:
250 return rectify_image_pixels((bytes<4> *)rect_pixels, rectification_table, (
const bytes<4> *)unrect_pixels);
260 #ifdef RTABMAP_REALSENSE 268 dev_->stop(rs::source::all_sources);
275 catch(
const rs::error & error){
UWARN(
"%s", error.what());}
278 bufferedFrames_.clear();
279 rsRectificationTable_.clear();
281 #ifdef RTABMAP_REALSENSE_SLAM 282 motionSeq_[0] = motionSeq_[1] = 0;
287 slam_->flush_resources();
295 ctx_ =
new rs::context();
299 if (ctx_->get_device_count() == 0)
301 UERROR(
"No RealSense device detected!");
306 dev_ = ctx_->get_device(deviceId_);
309 UERROR(
"Cannot connect to device %d", deviceId_);
312 std::string name = dev_->get_name();
313 UINFO(
"Using device %d, an %s", deviceId_, name.c_str());
314 UINFO(
" Serial number: %s", dev_->get_serial());
315 UINFO(
" Firmware version: %s", dev_->get_firmware_version());
316 UINFO(
" Preset RGB: %d", presetRGB_);
317 UINFO(
" Preset Depth: %d", presetDepth_);
319 #ifndef RTABMAP_REALSENSE_SLAM 320 computeOdometry_ =
false;
323 if (name.find(
"ZR300") == std::string::npos)
327 computeOdometry_ =
false;
331 UWARN(
"Fisheye cannot be used with %s camera, using color instead...", name.c_str());
336 rs::intrinsics depth_intrin;
337 rs::intrinsics fisheye_intrin;
338 rs::intrinsics color_intrin;
340 UINFO(
"Enabling streams...");
348 if(rgbSource_==
kFishEye || computeOdometry_)
350 dev_->enable_stream(rs::stream::fisheye, 640, 480, rs::format::raw8, 30);
354 dev_->set_option(rs::option::fisheye_strobe, 1);
357 dev_->set_option(rs::option::fisheye_external_trigger, 1);
358 dev_->set_option(rs::option::fisheye_color_auto_exposure, 1);
359 fisheye_intrin = dev_->get_stream_intrinsics(rs::stream::fisheye);
360 UINFO(
" Fisheye: %dx%d", fisheye_intrin.width, fisheye_intrin.height);
363 color_intrin = fisheye_intrin;
368 dev_->enable_stream(rs::stream::color, (rs::preset)presetRGB_);
369 color_intrin = dev_->get_stream_intrinsics(rs::stream::rectified_color);
370 UINFO(
" RGB: %dx%d", color_intrin.width, color_intrin.height);
374 dev_->enable_stream(rs::stream::infrared, (rs::preset)presetRGB_);
375 color_intrin = dev_->get_stream_intrinsics(rs::stream::infrared);
376 UINFO(
" IR left: %dx%d", color_intrin.width, color_intrin.height);
380 dev_->enable_stream(rs::stream::depth, (rs::preset)presetDepth_);
381 depth_intrin = dev_->get_stream_intrinsics(rs::stream::depth);
382 UINFO(
" Depth: %dx%d", depth_intrin.width, depth_intrin.height);
384 catch(
const rs::error & error)
386 UERROR(
"Failed starting the streams: %s", error.what());
392 #ifdef RTABMAP_REALSENSE_SLAM 393 UDEBUG(
"Setup frame callback");
395 std::function<void(rs::frame)> frameCallback = [
this](rs::frame frame)
399 const auto timestampDomain = frame.get_frame_timestamp_domain();
400 if (rs::timestamp_domain::microcontroller != timestampDomain)
402 UERROR(
"error: Junk time stamp in stream: %d\twith frame counter: %d",
403 (
int)(frame.get_stream_type()), frame.get_frame_number());
408 int width = frame.get_width();
409 int height = frame.get_height();
410 rs::core::correlated_sample_set sample_set = {};
412 rs::core::image_info info =
416 rs::utils::convert_pixel_format(frame.get_format()),
420 if(frame.get_format() == rs::format::raw8 || frame.get_format() == rs::format::y8)
422 image = cv::Mat(height, width, CV_8UC1, (
unsigned char*)frame.get_data());
423 if(frame.get_stream_type() == rs::stream::fisheye)
426 if(bufferedFrames_.size())
428 bufferedFrames_.rbegin()->second.first = image.clone();
430 bool notify = lastSyncFrames_.first.empty();
431 lastSyncFrames_ = bufferedFrames_.rbegin()->second;
434 dataReady_.release();
436 bufferedFrames_.clear();
439 else if(frame.get_stream_type() == rs::stream::infrared)
441 if(bufferedFrames_.find(frame.get_timestamp()) != bufferedFrames_.end())
443 bufferedFrames_.find(frame.get_timestamp())->second.first = image.clone();
445 bool notify = lastSyncFrames_.first.empty();
446 lastSyncFrames_ = bufferedFrames_.find(frame.get_timestamp())->second;
449 dataReady_.release();
451 bufferedFrames_.erase(frame.get_timestamp());
455 bufferedFrames_.insert(std::make_pair(frame.get_timestamp(), std::make_pair(image.clone(), cv::Mat())));
457 if(bufferedFrames_.size()>5)
459 UWARN(
"Frames cannot be synchronized!");
460 bufferedFrames_.clear();
465 else if(frame.get_format() == rs::format::z16)
467 image = cv::Mat(height, width, CV_16UC1, (
unsigned char*)frame.get_data());
468 if(bufferedFrames_.find(frame.get_timestamp()) != bufferedFrames_.end())
470 bufferedFrames_.find(frame.get_timestamp())->second.second = image.clone();
472 bool notify = lastSyncFrames_.first.empty();
473 lastSyncFrames_ = bufferedFrames_.find(frame.get_timestamp())->second;
476 dataReady_.release();
478 bufferedFrames_.erase(frame.get_timestamp());
482 bufferedFrames_.insert(std::make_pair(frame.get_timestamp(), std::make_pair(cv::Mat(), image.clone())));
484 if(bufferedFrames_.size()>5)
486 UWARN(
"Frames cannot be synchronized!");
487 bufferedFrames_.clear();
490 else if(frame.get_format() == rs::format::rgb8)
492 if(rsRectificationTable_.size())
494 image = cv::Mat(height, width, CV_8UC3);
495 rectify_image(image.data, rsRectificationTable_, (
unsigned char*)frame.get_data(), (rs_format)frame.get_format());
499 image = cv::Mat(height, width, CV_8UC3, (
unsigned char*)frame.get_data());
501 if(bufferedFrames_.find(frame.get_timestamp()) != bufferedFrames_.end())
503 bufferedFrames_.find(frame.get_timestamp())->second.first = image.clone();
505 bool notify = lastSyncFrames_.first.empty();
506 lastSyncFrames_ = bufferedFrames_.find(frame.get_timestamp())->second;
509 dataReady_.release();
511 bufferedFrames_.erase(frame.get_timestamp());
515 bufferedFrames_.insert(std::make_pair(frame.get_timestamp(), std::make_pair(image.clone(), cv::Mat())));
517 if(bufferedFrames_.size()>5)
519 UWARN(
"Frames cannot be synchronized!");
520 bufferedFrames_.clear();
531 rs::core::stream_type stream = rs::utils::convert_stream_type(frame.get_stream_type());
532 sample_set[stream] = rs::core::image_interface::create_instance_from_raw_data(
537 frame.get_timestamp(),
539 rs::core::timestamp_domain::microcontroller);
542 if (slam_->process_sample_set(sample_set) < rs::core::status_no_error)
544 UERROR(
"error: failed to process sample");
546 sample_set[stream]->release();
552 if(computeOdometry_ || rgbSource_ ==
kFishEye)
554 dev_->set_frame_callback(rs::stream::fisheye, frameCallback);
558 dev_->set_frame_callback(rs::stream::infrared, frameCallback);
560 else if(rgbSource_ ==
kColor)
562 dev_->set_frame_callback(rs::stream::color, frameCallback);
565 dev_->set_frame_callback(rs::stream::depth, frameCallback);
569 if (computeOdometry_)
571 UDEBUG(
"Setup motion callback");
573 std::function<void(rs::motion_data)> motion_callback;
574 motion_callback = [
this](rs::motion_data entry)
576 if ((entry.timestamp_data.source_id != RS_EVENT_IMU_GYRO) &&
577 (entry.timestamp_data.source_id != RS_EVENT_IMU_ACCEL))
580 rs_event_source motionType = entry.timestamp_data.source_id;
582 rs::core::correlated_sample_set sample_set = {};
583 if (motionType == RS_EVENT_IMU_ACCEL)
585 sample_set[rs::core::motion_type::accel].timestamp = entry.timestamp_data.timestamp;
586 sample_set[rs::core::motion_type::accel].data[0] = (float)entry.axes[0];
587 sample_set[rs::core::motion_type::accel].data[1] = (
float)entry.axes[1];
588 sample_set[rs::core::motion_type::accel].data[2] = (float)entry.axes[2];
589 sample_set[rs::core::motion_type::accel].type = rs::core::motion_type::accel;
591 sample_set[rs::core::motion_type::accel].frame_number = motionSeq_[0];
593 else if (motionType == RS_EVENT_IMU_GYRO)
595 sample_set[rs::core::motion_type::gyro].timestamp = entry.timestamp_data.timestamp;
596 sample_set[rs::core::motion_type::gyro].data[0] = (float)entry.axes[0];
597 sample_set[rs::core::motion_type::gyro].data[1] = (
float)entry.axes[1];
598 sample_set[rs::core::motion_type::gyro].data[2] = (float)entry.axes[2];
599 sample_set[rs::core::motion_type::gyro].type = rs::core::motion_type::gyro;
601 sample_set[rs::core::motion_type::gyro].frame_number = motionSeq_[1];
605 if (slam_->process_sample_set(sample_set) < rs::core::status_no_error)
607 UERROR(
"error: failed to process sample");
611 std::function<void(rs::timestamp_data)> timestamp_callback;
612 timestamp_callback = [](rs::timestamp_data entry) {};
614 dev_->enable_motion_tracking(motion_callback, timestamp_callback);
615 UINFO(
" enabled accel and gyro stream");
617 rs::motion_intrinsics imuIntrinsics;
618 rs::extrinsics fisheye2ImuExtrinsics;
619 rs::extrinsics fisheye2DepthExtrinsics;
622 imuIntrinsics = dev_->get_motion_intrinsics();
623 fisheye2ImuExtrinsics = dev_->get_motion_extrinsics_from(rs::stream::fisheye);
624 fisheye2DepthExtrinsics = dev_->get_extrinsics(rs::stream::depth, rs::stream::fisheye);
626 catch (
const rs::error &
e) {
627 UERROR(
"Exception: %s (try to unplug/plug the camera)", e.what());
633 slam_ =
new rs::slam::slam();
634 slam_->set_auto_occupancy_map_building(
false);
635 slam_->force_relocalization_pose(
false);
637 rs::core::video_module_interface::supported_module_config supported_config = {};
638 if (slam_->query_supported_module_config(0, supported_config) < rs::core::status_no_error)
640 UERROR(
"Failed to query the first supported module configuration");
644 rs::core::video_module_interface::actual_module_config actual_config = {};
647 std::map< rs::core::stream_type, rs::core::intrinsics > intrinsics;
648 intrinsics[rs::core::stream_type::fisheye] = rs::utils::convert_intrinsics(fisheye_intrin);
649 intrinsics[rs::core::stream_type::depth] = rs::utils::convert_intrinsics(depth_intrin);
651 if(!setStreamConfigIntrin(rs::core::stream_type::fisheye, intrinsics, supported_config, actual_config))
655 if(!setStreamConfigIntrin(rs::core::stream_type::depth, intrinsics, supported_config, actual_config))
661 actual_config[rs::core::motion_type::accel].is_enabled =
true;
662 actual_config[rs::core::motion_type::gyro].is_enabled =
true;
663 actual_config[rs::core::motion_type::gyro].intrinsics = rs::utils::convert_motion_device_intrinsics(imuIntrinsics.gyro);
664 actual_config[rs::core::motion_type::accel].intrinsics = rs::utils::convert_motion_device_intrinsics(imuIntrinsics.acc);
667 actual_config[rs::core::stream_type::fisheye].extrinsics_motion = rs::utils::convert_extrinsics(fisheye2ImuExtrinsics);
668 actual_config[rs::core::stream_type::fisheye].extrinsics = rs::utils::convert_extrinsics(fisheye2DepthExtrinsics);
670 UDEBUG(
"Set SLAM config");
672 if (slam_->set_module_config(actual_config) < rs::core::status_no_error)
674 UERROR(
"error : failed to set the enabled module configuration");
678 rs::extrinsics fisheye2imu = dev_->get_motion_extrinsics_from(rs::stream::fisheye);
680 fisheye2imu.rotation[0], fisheye2imu.rotation[1], fisheye2imu.rotation[2], fisheye2imu.translation[0],
681 fisheye2imu.rotation[3], fisheye2imu.rotation[4], fisheye2imu.rotation[5], fisheye2imu.translation[1],
682 fisheye2imu.rotation[6], fisheye2imu.rotation[7], fisheye2imu.rotation[8], fisheye2imu.translation[2]).
inverse();
686 rs::extrinsics color2Fisheye = dev_->get_extrinsics(rs::stream::fisheye, rs::stream::infrared);
688 color2Fisheye.rotation[0], color2Fisheye.rotation[1], color2Fisheye.rotation[2], color2Fisheye.translation[0],
689 color2Fisheye.rotation[3], color2Fisheye.rotation[4], color2Fisheye.rotation[5], color2Fisheye.translation[1],
690 color2Fisheye.rotation[6], color2Fisheye.rotation[7], color2Fisheye.rotation[8], color2Fisheye.translation[2]).
inverse();
691 imu2Camera *= fisheye2Color;
693 else if(rgbSource_ ==
kColor)
695 rs::extrinsics color2Fisheye = dev_->get_extrinsics(rs::stream::fisheye, rs::stream::rectified_color);
697 color2Fisheye.rotation[0], color2Fisheye.rotation[1], color2Fisheye.rotation[2], color2Fisheye.translation[0],
698 color2Fisheye.rotation[3], color2Fisheye.rotation[4], color2Fisheye.rotation[5], color2Fisheye.translation[1],
699 color2Fisheye.rotation[6], color2Fisheye.rotation[7], color2Fisheye.rotation[8], color2Fisheye.translation[2]).
inverse();
700 imu2Camera *= fisheye2Color;
706 dev_->start(rs::source::all_sources);
708 catch(
const rs::error & error)
710 UERROR(
"Failed starting the device: %s (try to unplug/plug the camera)", error.what());
721 catch(
const rs::error & error)
723 UERROR(
"Failed starting the device: %s (try to unplug/plug the camera)", error.what());
730 dev_->wait_for_frames();
732 catch (
const rs::error &
e)
734 UERROR(
"Exception: %s (try to unplug/plug the camera)", e.what());
742 D = cv::Mat::zeros(1,6,CV_64FC1);
743 D.at<
double>(0,0) = color_intrin.coeffs[0];
744 D.at<
double>(0,1) = color_intrin.coeffs[1];
745 D.at<
double>(0,4) = color_intrin.coeffs[2];
746 D.at<
double>(0,5) = color_intrin.coeffs[3];
751 D = cv::Mat::zeros(1,5,CV_64FC1);
752 D.at<
double>(0,0) = color_intrin.coeffs[0];
753 D.at<
double>(0,1) = color_intrin.coeffs[1];
754 D.at<
double>(0,2) = color_intrin.coeffs[2];
755 D.at<
double>(0,3) = color_intrin.coeffs[3];
756 D.at<
double>(0,4) = color_intrin.coeffs[4];
758 cv::Mat K = cv::Mat::eye(3, 3, CV_64FC1);
759 K.at<
double>(0,0) = color_intrin.fx;
760 K.at<
double>(1,1) = color_intrin.fy;
761 K.at<
double>(0,2) = color_intrin.ppx;
762 K.at<
double>(1,2) = color_intrin.ppy;
763 cv::Mat R = cv::Mat::eye(3, 3, CV_64FC1);
764 cv::Mat P = cv::Mat::eye(3, 4, CV_64FC1);
765 K(cv::Range(0,2), cv::Range(0,3)).copyTo(P(cv::Range(0,2), cv::Range(0,3)));
768 cv::Size(color_intrin.width, color_intrin.height),
779 rs::extrinsics rect_to_unrect = dev_->get_extrinsics(rs::stream::rectified_color, rs::stream::color);
780 rs::intrinsics unrect_intrin = dev_->get_stream_intrinsics(rs::stream::color);
781 rsRectificationTable_ = compute_rectification_table(color_intrin, rect_to_unrect, unrect_intrin);
785 UINFO(
"calibration folder=%s name=%s", calibrationFolder.c_str(), cameraName.c_str());
786 if(!calibrationFolder.empty() && !cameraName.empty())
789 if(!model.
load(calibrationFolder, cameraName))
791 UWARN(
"Failed to load calibration \"%s\" in \"%s\" folder, you should calibrate the camera!",
792 cameraName.c_str(), calibrationFolder.c_str());
796 UINFO(
"Camera parameters: fx=%f fy=%f cx=%f cy=%f",
801 cameraModel_ = model;
802 cameraModel_.
setName(cameraName);
803 cameraModel_.initRectificationMap();
810 UINFO(
"Enabling streams...done!");
815 UERROR(
"CameraRealSense: RTAB-Map is not built with RealSense support!");
827 #ifdef RTABMAP_REALSENSE 830 return dev_->get_serial();
834 UERROR(
"Cannot get a serial before initialization. Call init() before.");
842 #ifdef RTABMAP_REALSENSE_SLAM 849 #ifdef RTABMAP_REALSENSE_SLAM 850 Transform rsPoseToTransform(
const rs::slam::PoseMatrix4f & pose)
853 pose.m_data[0], pose.m_data[1], pose.m_data[2], pose.m_data[3],
854 pose.m_data[4], pose.m_data[5], pose.m_data[6], pose.m_data[7],
855 pose.m_data[8], pose.m_data[9], pose.m_data[10], pose.m_data[11]);
862 #ifdef RTABMAP_REALSENSE 869 rs::intrinsics depth_intrin = dev_->get_stream_intrinsics(rs::stream::depth);
870 rs::extrinsics depth_to_color;
871 rs::intrinsics color_intrin;
874 depth_to_color = dev_->get_extrinsics(rs::stream::depth, rs::stream::fisheye);
875 color_intrin = dev_->get_stream_intrinsics(rs::stream::fisheye);
879 depth_to_color = dev_->get_extrinsics(rs::stream::depth, rs::stream::infrared);
880 color_intrin = dev_->get_stream_intrinsics(rs::stream::infrared);
884 depth_to_color = dev_->get_extrinsics(rs::stream::depth, rs::stream::rectified_color);
885 color_intrin = dev_->get_stream_intrinsics(rs::stream::rectified_color);
888 #ifdef RTABMAP_REALSENSE_SLAM 889 if(!dataReady_.acquire(1, 5000))
891 UWARN(
"Not received new frames since 5 seconds, end of stream reached!");
897 rgb = lastSyncFrames_.first;
898 depthIn = lastSyncFrames_.second;
899 lastSyncFrames_.first = cv::Mat();
900 lastSyncFrames_.second = cv::Mat();
903 if(rgb.empty() || depthIn.empty())
909 dev_->wait_for_frames();
911 catch (
const rs::error &
e)
913 UERROR(
"Exception: %s", e.what());
918 depthIn = cv::Mat(depth_intrin.height, depth_intrin.width, CV_16UC1, (
unsigned char*)dev_->get_frame_data(rs::stream::depth));
921 rgb = cv::Mat(color_intrin.height, color_intrin.width, CV_8UC1, (
unsigned char*)dev_->get_frame_data(rs::stream::fisheye));
925 rgb = cv::Mat(color_intrin.height, color_intrin.width, CV_8UC1, (
unsigned char*)dev_->get_frame_data(rs::stream::infrared));
929 rgb = cv::Mat(color_intrin.height, color_intrin.width, CV_8UC3, (
unsigned char*)dev_->get_frame_data(rs::stream::color));
941 cv::cvtColor(rgb, bgr, CV_RGB2BGR);
944 bool rectified =
false;
945 if(rgbSource_ ==
kFishEye && cameraModel_.isRectificationMapInitialized())
947 bgr = cameraModel_.rectifyImage(bgr);
949 color_intrin.fx = cameraModel_.fx();
950 color_intrin.fy = cameraModel_.fy();
951 color_intrin.ppx = cameraModel_.cx();
952 color_intrin.ppy = cameraModel_.cy();
953 UASSERT_MSG(color_intrin.width == cameraModel_.imageWidth() && color_intrin.height == cameraModel_.imageHeight(),
954 uFormat(
"color_intrin=%dx%d cameraModel_=%dx%d",
955 color_intrin.width, color_intrin.height, cameraModel_.imageWidth(), cameraModel_.imageHeight()).c_str());
956 ((rs_intrinsics*)&color_intrin)->model = RS_DISTORTION_NONE;
958 #ifndef RTABMAP_REALSENSE_SLAM 959 else if(rgbSource_ !=
kColor)
966 if(rgbSource_ !=
kFishEye || rectified)
968 if (color_intrin.width % depth_intrin.width == 0 && color_intrin.height % depth_intrin.height == 0 &&
969 depth_intrin.width < color_intrin.width &&
970 depth_intrin.height < color_intrin.height &&
971 !depthScaledToRGBSize_)
974 depth = cv::Mat::zeros(cv::Size(depth_intrin.width, depth_intrin.height), CV_16UC1);
975 float scaleX = float(depth_intrin.width) / float(color_intrin.width);
976 float scaleY = float(depth_intrin.height) / float(color_intrin.height);
977 color_intrin.fx *= scaleX;
978 color_intrin.fy *= scaleY;
979 color_intrin.ppx *= scaleX;
980 color_intrin.ppy *= scaleY;
981 color_intrin.height = depth_intrin.height;
982 color_intrin.width = depth_intrin.width;
987 depth = cv::Mat::zeros(bgr.size(), CV_16UC1);
990 float scale = dev_->get_depth_scale();
991 for (
int dy = 0; dy < depth_intrin.height; ++dy)
993 for (
int dx = 0; dx < depth_intrin.width; ++dx)
996 uint16_t depth_value = depthIn.at<
unsigned short>(dy,dx);
997 float depth_in_meters = depth_value *
scale;
1000 if (depth_value == 0 || depth_in_meters>10.0
f)
continue;
1007 rs::float2 depth_pixel = { (float)dx, (
float)dy };
1008 rs::float3 depth_point = depth_intrin.deproject(depth_pixel, depth_in_meters);
1009 rs::float3 color_point = depth_to_color.transform(depth_point);
1010 rs::float2 color_pixel = color_intrin.project(color_point);
1012 pdx = color_pixel.x;
1013 pdy = color_pixel.y;
1019 depth.at<
unsigned short>(pdy, pdx) = (
unsigned short)(depth_in_meters*1000.0f);
1024 if (color_intrin.width > depth_intrin.width)
1034 if (!bgr.empty() && ((rgbSource_==
kFishEye && !rectified) || !depth.empty()))
1037 #ifdef RTABMAP_REALSENSE_SLAM 1041 rs::slam::PoseMatrix4f pose;
1042 if(slam_->get_camera_pose(pose) == rs::core::status_no_error)
1050 Transform opticalRotation(0,0,1,0, -1,0,0,0, 0,-1,0,0);
1051 info->
odomPose = opticalRotation * rsPoseToTransform(pose) * opticalRotation.
inverse();
1061 UERROR(
"Failed getting odometry pose");
1069 ULOGGER_WARN(
"The camera must be initialized before requesting an image.");
1072 UERROR(
"CameraRealSense: RTAB-Map is not built with RealSense support!");
CameraRealSense(int deviceId=0, int presetRGB=0, int presetDepth=0, bool computeOdometry=false, float imageRate=0, const Transform &localTransform=CameraModel::opticalRotation())
GLM_FUNC_DECL bool any(vecType< bool, P > const &v)
GLM_FUNC_DECL genType min(genType const &x, genType const &y)
detail::tvec2< float, highp > float2
single-precision floating-point vector with 2 components. (From GLM_GTX_compatibility extension) ...
bool uIsInBounds(const T &value, const T &low, const T &high)
const Transform & getLocalTransform() const
void setRGBSource(RGBSource source)
void setDepthScaledToRGBSize(bool enabled)
virtual ~CameraRealSense()
virtual std::string getSerial() const
GLM_FUNC_DECL genType e()
Basic mathematics functions.
bool load(const std::string &filePath)
virtual SensorData captureImage(CameraInfo *info=0)
#define UASSERT_MSG(condition, msg_str)
void RTABMAP_EXP fillRegisteredDepthHoles(cv::Mat &depthRegistered, bool vertical, bool horizontal, bool fillDoubleHoles=false)
void uSleep(unsigned int ms)
#define ULOGGER_WARN(...)
virtual bool isCalibrated() const
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
virtual bool odomProvided() const
detail::tvec3< float, highp > float3
single-precision floating-point vector with 3 components. (From GLM_GTX_compatibility extension) ...
std::string UTILITE_EXP uFormat(const char *fmt,...)
void setName(const std::string &name)