35 #include <multisense_ros/RawCamConfig.h> 36 #include <multisense_ros/RawCamCal.h> 37 #include <multisense_ros/DeviceInfo.h> 38 #include <multisense_ros/Histogram.h> 43 #include <multisense_lib/MultiSenseChannel.hh> 45 #include <opencv2/opencv.hpp> 46 #include <arpa/inet.h> 48 #include <turbojpeg.h> 72 const uint32_t luma_cloud_step = 16;
73 const uint32_t color_cloud_step = 16;
79 {
reinterpret_cast<Camera*
>(userDataP)->monoCallback(header); }
81 {
reinterpret_cast<Camera*
>(userDataP)->rectCallback(header); }
83 {
reinterpret_cast<Camera*
>(userDataP)->depthCallback(header); }
85 {
reinterpret_cast<Camera*
>(userDataP)->pointCloudCallback(header); }
87 {
reinterpret_cast<Camera*
>(userDataP)->rawCamDataCallback(header); }
89 {
reinterpret_cast<Camera*
>(userDataP)->colorImageCallback(header); }
91 {
reinterpret_cast<Camera*
>(userDataP)->disparityImageCallback(header); }
93 {
reinterpret_cast<Camera*
>(userDataP)->jpegImageCallback(header); }
95 {
reinterpret_cast<Camera*
>(userDataP)->histogramCallback(header); }
100 bool isValidPoint(
const cv::Vec3f& pt,
101 const float& maxRange)
113 const float mag = std::sqrt((pt[0]*pt[0])+(pt[1]*pt[1])+(pt[2]*pt[2]));
133 boost::mutex point_cloud_mutex;
135 bool publishPointCloud(int64_t imageFrameId,
136 int64_t pointsFrameId,
137 int64_t& cloudFrameId,
139 sensor_msgs::PointCloud2& cloud,
140 const uint32_t width,
141 const uint32_t height,
142 const uint32_t timeSeconds,
143 const uint32_t timeMicroSeconds,
144 const uint32_t cloudStep,
145 const std::vector<cv::Vec3f>& points,
146 const uint8_t* imageP,
147 const uint32_t colorChannels,
148 const float maxRange,
149 bool writeColorPacked,
152 boost::mutex::scoped_lock lock(point_cloud_mutex);
155 imageFrameId != pointsFrameId ||
156 cloudFrameId >= imageFrameId) {
160 cloudFrameId = imageFrameId;
161 const uint32_t imageSize = height * width;
163 if (points.size() != imageSize)
166 cloud.data.resize(imageSize * cloudStep);
168 uint8_t *cloudP =
reinterpret_cast<uint8_t*
>(&cloud.data[0]);
169 const uint32_t pointSize = 3 *
sizeof(float);
170 uint32_t validPoints = 0;
172 cv::Vec3f nanPoint(std::numeric_limits<float>::quiet_NaN(),
173 std::numeric_limits<float>::quiet_NaN(),
174 std::numeric_limits<float>::quiet_NaN());
177 for(uint32_t i=0; i<height; ++i)
178 for(uint32_t j=0; j<width; ++j) {
180 const uint32_t index = i * width + j;
182 const uint32_t* pointP =
reinterpret_cast<const uint32_t*
>(&points[index]);
183 uint32_t* targetCloudP =
reinterpret_cast<uint32_t*
>(cloudP);
190 if (
false == isValidPoint(points[index], maxRange))
194 pointP =
reinterpret_cast<const uint32_t*
>(&nanPoint[0]);
206 targetCloudP[0] = pointP[0];
207 targetCloudP[1] = pointP[1];
208 targetCloudP[2] = pointP[2];
211 const uint8_t *sourceColorP = &(imageP[colorChannels * index]);
212 uint8_t *cloudColorP = (cloudP + pointSize);
219 if (writeColorPacked || colorChannels > 2)
221 switch(colorChannels)
224 cloudColorP[3] = sourceColorP[3];
226 cloudColorP[2] = sourceColorP[2];
228 cloudColorP[1] = sourceColorP[1];
230 cloudColorP[0] = sourceColorP[0];
237 char bytes[
sizeof(uint32_t)];
246 color.bytes[0] = sourceColorP[0];
247 color.bytes[1] = sourceColorP[1] & ((colorChannels > 1) * 255);
249 float* floatCloudColorP =
reinterpret_cast<float*
>(cloudColorP);
250 floatCloudColorP[0] =
static_cast<float>(color.value);
258 cloud.row_step = validPoints * cloudStep;
259 cloud.width = validPoints;
263 cloud.height = height;
264 cloud.row_step = width * cloudStep;
267 cloud.header.stamp =
ros::Time(timeSeconds, 1000 * timeMicroSeconds);
268 cloud.data.resize(validPoints * cloudStep);
274 bool savePgm(
const std::string& fileName,
277 uint32_t bitsPerPixel,
280 std::ofstream outputStream(fileName.c_str(), std::ios::binary | std::ios::out);
282 if (
false == outputStream.good()) {
283 fprintf(stderr,
"failed to open \"%s\"\n", fileName.c_str());
287 const uint32_t imageSize = height * width;
289 switch(bitsPerPixel) {
293 outputStream <<
"P5\n" 294 << width <<
" " << height <<
"\n" 297 outputStream.write(reinterpret_cast<const char*>(dataP), imageSize);
303 outputStream <<
"P5\n" 304 << width <<
" " << height <<
"\n" 307 const uint16_t *imageP =
reinterpret_cast<const uint16_t*
>(dataP);
309 for (uint32_t i=0; i<imageSize; ++i) {
310 uint16_t o = htons(imageP[i]);
311 outputStream.write(reinterpret_cast<const char*>(&o),
sizeof(uint16_t));
318 outputStream.close();
325 const std::string& tf_prefix) :
328 left_nh_(device_nh_,
"left"),
329 right_nh_(device_nh_,
"right"),
330 left_mono_transport_(left_nh_),
331 right_mono_transport_(right_nh_),
332 left_rect_transport_(left_nh_),
333 right_rect_transport_(right_nh_),
334 left_rgb_transport_(left_nh_),
335 left_rgb_rect_transport_(left_nh_),
336 depth_transport_(device_nh_),
337 ni_depth_transport_(device_nh_),
338 disparity_left_transport_(left_nh_),
339 disparity_right_transport_(right_nh_),
340 disparity_cost_transport_(left_nh_),
341 left_mono_cam_info_(),
342 right_mono_cam_info_(),
343 left_rect_cam_info_(),
344 right_rect_cam_info_(),
345 left_rgb_rect_cam_info_(),
346 left_disp_cam_info_(),
347 right_disp_cam_info_(),
348 left_cost_cam_info_(),
349 left_rgb_cam_info_(),
351 left_mono_cam_pub_(),
352 right_mono_cam_pub_(),
353 left_rect_cam_pub_(),
354 right_rect_cam_pub_(),
358 left_rgb_rect_cam_pub_(),
359 left_mono_cam_info_pub_(),
360 right_mono_cam_info_pub_(),
361 left_rect_cam_info_pub_(),
362 right_rect_cam_info_pub_(),
363 left_disp_cam_info_pub_(),
364 right_disp_cam_info_pub_(),
365 left_cost_cam_info_pub_(),
366 left_rgb_cam_info_pub_(),
367 left_rgb_rect_cam_info_pub_(),
368 depth_cam_info_pub_(),
369 luma_point_cloud_pub_(),
370 color_point_cloud_pub_(),
371 luma_organized_point_cloud_pub_(),
372 color_organized_point_cloud_pub_(),
373 left_disparity_pub_(),
374 right_disparity_pub_(),
375 left_disparity_cost_pub_(),
376 left_stereo_disparity_pub_(),
377 right_stereo_disparity_pub_(),
379 raw_cam_config_pub_(),
389 color_point_cloud_(),
390 luma_organized_point_cloud_(),
391 color_organized_point_cloud_(),
394 left_rgb_rect_image_(),
395 left_disparity_image_(),
396 left_disparity_cost_image_(),
397 right_disparity_image_(),
398 left_stereo_disparity_(),
399 right_stereo_disparity_(),
400 got_raw_cam_left_(false),
401 got_left_luma_(false),
402 left_luma_frame_id_(0),
403 left_rect_frame_id_(0),
404 left_rgb_rect_frame_id_(-1),
405 luma_point_cloud_frame_id_(-1),
406 luma_organized_point_cloud_frame_id_(-1),
407 color_point_cloud_frame_id_(-1),
408 color_organized_point_cloud_frame_id_(-1),
413 image_calibration_(),
415 calibration_map_left_1_(NULL),
416 calibration_map_left_2_(NULL),
421 points_buff_frame_id_(-1),
422 q_matrix_(4, 4, 0.0),
423 pc_max_range_(15.0
f),
424 pc_color_frame_sync_(true),
429 luma_color_depth_(1),
430 write_pc_color_packed_(false),
431 border_clip_type_(0),
432 border_clip_value_(0.0),
439 if (Status_Ok != status) {
440 ROS_ERROR(
"Camera: failed to query version info: %s",
441 Channel::statusString(status));
445 if (Status_Ok != status) {
446 ROS_ERROR(
"Camera: failed to query device info: %s",
447 Channel::statusString(status));
462 device_info_pub_ = calibration_nh.advertise<multisense_ros::DeviceInfo>(
"device_info", 1,
true);
463 raw_cam_cal_pub_ = calibration_nh.advertise<multisense_ros::RawCamCal>(
"raw_cam_cal", 1,
true);
464 raw_cam_config_pub_ = calibration_nh.advertise<multisense_ros::RawCamConfig>(
"raw_cam_config", 1,
true);
555 Source_Disparity | Source_Luma_Left | Source_Chroma_Left),
557 Source_Disparity | Source_Luma_Left | Source_Chroma_Left));
560 Source_Disparity | Source_Luma_Left | Source_Chroma_Left),
562 Source_Disparity | Source_Luma_Left | Source_Chroma_Left));
577 raw_cam_data_pub_ = calibration_nh.advertise<multisense_ros::RawCamData>(
"raw_cam_data", 5,
626 multisense_ros::DeviceInfo msg;
634 std::vector<system::PcbInfo>::const_iterator it =
device_info_.
pcbs.begin();
636 msg.pcbSerialNumbers.push_back((*it).revision);
637 msg.pcbNames.push_back((*it).name);
675 if (Status_Ok != status)
676 ROS_ERROR(
"Camera: failed to query image calibration: %s",
677 Channel::statusString(status));
680 multisense_ros::RawCamCal cal;
684 for(uint32_t i=0; i<9; i++) cal.left_M[i] = cP[i];
686 for(uint32_t i=0; i<8; i++) cal.left_D[i] = cP[i];
688 for(uint32_t i=0; i<9; i++) cal.left_R[i] = cP[i];
690 for(uint32_t i=0; i<12; i++) cal.left_P[i] = cP[i];
693 for(uint32_t i=0; i<9; i++) cal.right_M[i] = cP[i];
695 for(uint32_t i=0; i<8; i++) cal.right_D[i] = cP[i];
697 for(uint32_t i=0; i<9; i++) cal.right_R[i] = cP[i];
699 for(uint32_t i=0; i<12; i++) cal.right_P[i] = cP[i];
701 raw_cam_cal_pub_.publish(cal);
835 const char *pcColorFrameSyncEnvStringP = getenv(
"MULTISENSE_ROS_PC_COLOR_FRAME_SYNC_OFF");
836 if (NULL != pcColorFrameSyncEnvStringP) {
838 ROS_INFO(
"color point cloud frame sync is disabled");
871 multisense_ros::Histogram rh;
875 if (Status_Ok == status) {
876 rh.frame_count = header.
frameId;
879 rh.width = header.
width;
880 rh.height = header.
height;
882 case Source_Chroma_Left:
883 case Source_Chroma_Right:
889 rh.gain = header.
gain;
901 if (Source_Jpeg_Left != header.
source)
904 const uint32_t height = header.
height;
905 const uint32_t width = header.
width;
906 const uint32_t rgbLength = height * width * 3;
919 tjhandle jpegDecompressor = tjInitDecompress();
920 tjDecompress2(jpegDecompressor,
921 reinterpret_cast<unsigned char*>(const_cast<void*>(header.
imageDataP)),
924 width, 0, height, TJPF_RGB, 0);
925 tjDestroy(jpegDecompressor);
932 boost::mutex::scoped_lock lock(
cal_lock_);
942 ROS_ERROR(
"Camera: undistort maps not initialized");
945 const CvScalar outlierColor = cv::Scalar_<double>(0.0);
949 IplImage *sourceImageP = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, 3);
950 sourceImageP->imageData =
reinterpret_cast<char*
>(&(
left_rgb_image_.data[0]));
951 IplImage *destImageP = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, 3);
954 cvRemap(sourceImageP, destImageP,
957 CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS,
960 cvReleaseImageHeader(&sourceImageP);
961 cvReleaseImageHeader(&destImageP);
981 if (!((Source_Disparity == header.
source &&
983 (Source_Disparity_Right == header.
source &&
985 (Source_Disparity_Cost == header.
source &&
987 (Source_Disparity == header.
source &&
989 (Source_Disparity_Right == header.
source &&
999 case Source_Disparity:
1000 case Source_Disparity_Right:
1002 sensor_msgs::Image *imageP = NULL;
1003 sensor_msgs::CameraInfo *camInfoP = NULL;
1007 stereo_msgs::DisparityImage *stereoDisparityImageP = NULL;
1010 if (Source_Disparity == header.
source) {
1034 imageP->data.resize(imageSize);
1035 memcpy(&imageP->data[0], header.
imageDataP, imageSize);
1037 imageP->header.stamp = t;
1038 imageP->height = header.
height;
1039 imageP->width = header.
width;
1040 imageP->is_bigendian =
false;
1045 imageP->step = header.
width;
1049 imageP->step = header.
width * 2;
1065 std::stringstream warning;
1066 warning <<
"Current camera configuration has non-square pixels (fx != fy).";
1067 warning <<
"The stereo_msgs/DisparityImage does not account for";
1068 warning <<
" this. Be careful when reprojecting to a pointcloud.";
1069 ROS_WARN(
"%s", warning.str().c_str());
1076 uint32_t floatingPointImageSize = header.
width * header.
height * 4;
1077 stereoDisparityImageP->image.data.resize(floatingPointImageSize);
1079 stereoDisparityImageP->header.stamp = t;
1081 stereoDisparityImageP->image.height = header.
height;
1082 stereoDisparityImageP->image.width = header.
width;
1083 stereoDisparityImageP->image.is_bigendian =
false;
1084 stereoDisparityImageP->image.header.stamp = t;
1085 stereoDisparityImageP->image.header.frame_id = stereoDisparityImageP->header.frame_id;
1086 stereoDisparityImageP->image.encoding =
"32FC1";
1087 stereoDisparityImageP->image.step = 4 * header.
width;
1101 stereoDisparityImageP->min_disparity = 0;
1103 stereoDisparityImageP->delta_d = 1./16.;
1111 cv::Mat_<uint16_t> tmpImage(header.
height,
1113 reinterpret_cast<uint16_t*>(
1119 cv::Mat_<float> floatingPointImage(header.
height,
1121 reinterpret_cast<float*>(&stereoDisparityImageP->image.data[0]));
1127 floatingPointImage = tmpImage / 16.0;
1129 stereoDisparityPubP->
publish(*stereoDisparityImageP);
1132 camInfoP->header = imageP->header;
1133 camInfoP->header.stamp = t;
1134 camInfoPubP->
publish(*camInfoP);
1138 case Source_Disparity_Cost:
1163 if (Source_Luma_Left != header.
source &&
1164 Source_Luma_Right != header.
source) {
1174 case Source_Luma_Left:
1205 case Source_Luma_Right:
1240 if (Source_Luma_Rectified_Left != header.
source &&
1241 Source_Luma_Rectified_Right != header.
source) {
1252 case Source_Luma_Rectified_Left:
1323 case Source_Luma_Rectified_Right:
1362 if (Source_Disparity != header.
source) {
1370 if (0 == niDepthSubscribers &&
1371 0 == depthSubscribers)
1374 const float bad_point = std::numeric_limits<float>::quiet_NaN();
1375 const uint32_t depthSize = header.
height * header.
width *
sizeof(float);
1376 const uint32_t niDepthSize = header.
height * header.
width *
sizeof(uint16_t);
1377 const uint32_t imageSize = header.
width * header.
height;
1397 float *depthImageP =
reinterpret_cast<float*
>(&
depth_image_.data[0]);
1398 uint16_t *niDepthImageP =
reinterpret_cast<uint16_t*
>(&
ni_depth_image_.data[0]);
1416 const float *disparityImageP =
reinterpret_cast<const float*
>(header.
imageDataP);
1418 for (uint32_t i = 0 ; i < imageSize ; ++i)
1420 if (0.0 >= disparityImageP[i])
1422 depthImageP[i] = bad_point;
1423 niDepthImageP[i] = 0;
1427 depthImageP[i] = scale / disparityImageP[i];
1428 niDepthImageP[i] =
static_cast<uint16_t
>(depthImageP[i] * 1000);
1450 const uint16_t *disparityImageP =
reinterpret_cast<const uint16_t*
>(header.
imageDataP);
1452 for (uint32_t i = 0 ; i < imageSize ; ++i)
1454 if (0 == disparityImageP[i])
1456 depthImageP[i] = bad_point;
1457 niDepthImageP[i] = 0;
1461 depthImageP[i] = scale / disparityImageP[i];
1462 niDepthImageP[i] =
static_cast<uint16_t
>(depthImageP[i] * 1000);
1471 if (0 != niDepthSubscribers)
1476 if (0 != depthSubscribers)
1487 if (Source_Disparity != header.
source) {
1501 const bool handle_missing =
true;
1502 const uint32_t imageSize = header.
height * header.
width;
1520 cv::Mat_<float> disparity(header.
height, header.
width,
1521 const_cast<float*>(reinterpret_cast<const float*>(header.
imageDataP)));
1523 cv::reprojectImageTo3D(disparity, points,
q_matrix_, handle_missing);
1530 cv::Mat_<uint16_t> disparityOrigP(header.
height, header.
width,
1531 const_cast<uint16_t*>(reinterpret_cast<const uint16_t*>(header.
imageDataP)));
1533 disparity = disparityOrigP / 16.0f;
1535 cv::reprojectImageTo3D(disparity, points,
q_matrix_, handle_missing);
1632 const uint32_t imageSize = header.
width * header.
height;
1640 if (Source_Luma_Rectified_Left == header.
source) {
1645 imageSize *
sizeof(uint8_t));
1659 }
else if (Source_Disparity == header.
source) {
1661 const uint32_t imageSize = header.
width * header.
height;
1667 header.
imageDataP, imageSize *
sizeof(uint16_t));
1692 if (Source_Luma_Left == header.
source) {
1694 const uint32_t imageSize = header.
width * header.
height;
1706 }
else if (Source_Chroma_Left == header.
source) {
1712 const uint32_t imageSize = 3 * height * width;
1730 const uint8_t *lumaP =
reinterpret_cast<const uint8_t*
>(&(
left_luma_image_.data[0]));
1731 const uint8_t *chromaP =
reinterpret_cast<const uint8_t*
>(header.
imageDataP);
1732 uint8_t *bgrP =
reinterpret_cast<uint8_t*
>(&(
left_rgb_image_.data[0]));
1733 const uint32_t rgbStride = width * 3;
1735 for(uint32_t
y=0;
y<height;
y++) {
1736 for(uint32_t
x=0;
x<width;
x++) {
1738 const uint32_t lumaOffset = (
y * width) +
x;
1739 const uint32_t chromaOffset = 2 * (((
y/2) * (width/2)) + (
x/2));
1741 const float px_y =
static_cast<float>(lumaP[lumaOffset]);
1742 const float px_cb =
static_cast<float>(chromaP[chromaOffset+0]) - 128.0
f;
1743 const float px_cr =
static_cast<float>(chromaP[chromaOffset+1]) - 128.0
f;
1745 float px_r = px_y + 1.402f * px_cr;
1746 float px_g = px_y - 0.34414f * px_cb - 0.71414f * px_cr;
1747 float px_b = px_y + 1.772f * px_cb;
1749 if (px_r < 0.0
f) px_r = 0.0f;
1750 else if (px_r > 255.0
f) px_r = 255.0f;
1751 if (px_g < 0.0
f) px_g = 0.0f;
1752 else if (px_g > 255.0
f) px_g = 255.0f;
1753 if (px_b < 0.0
f) px_b = 0.0f;
1754 else if (px_b > 255.0
f) px_b = 255.0f;
1756 const uint32_t rgbOffset = (
y * rgbStride) + (3 *
x);
1758 bgrP[rgbOffset + 0] =
static_cast<uint8_t
>(px_b);
1759 bgrP[rgbOffset + 1] =
static_cast<uint8_t
>(px_g);
1760 bgrP[rgbOffset + 2] =
static_cast<uint8_t
>(px_r);
1774 boost::mutex::scoped_lock lock(
cal_lock_);
1782 ROS_ERROR(
"Camera: undistort maps not initialized");
1785 const CvScalar outlierColor = cv::Scalar_<double>(0.0);
1789 IplImage *sourceImageP = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, 3);
1790 sourceImageP->imageData =
reinterpret_cast<char*
>(&(
left_rgb_image_.data[0]));
1791 IplImage *destImageP = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, 3);
1794 cvRemap(sourceImageP, destImageP,
1797 CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS,
1800 cvReleaseImageHeader(&sourceImageP);
1801 cvReleaseImageHeader(&destImageP);
1867 boost::mutex::scoped_lock lock(
cal_lock_);
1873 if (Status_Ok != status) {
1874 ROS_ERROR(
"Camera: failed to query sensor configuration: %s",
1875 Channel::statusString(status));
1905 static_cast<float>(c.
width())));
1907 static_cast<float>(c.
height())));
1962 cal.
left.
M[0][0] *= x_scale; cal.
left.
M[1][1] *= y_scale;
1963 cal.
left.
M[0][2] *= x_scale; cal.
left.
M[1][2] *= y_scale;
1964 cal.
right.
M[0][0] *= x_scale; cal.
right.
M[1][1] *= y_scale;
1965 cal.
right.
M[0][2] *= x_scale; cal.
right.
M[1][2] *= y_scale;
1966 cal.
left.
P[0][0] *= x_scale; cal.
left.
P[1][1] *= y_scale;
1967 cal.
left.
P[0][2] *= x_scale; cal.
left.
P[1][2] *= y_scale;
1968 cal.
left.
P[0][3] *= x_scale; cal.
left.
P[1][3] *= y_scale;
1970 CvMat M1 = cvMat(3, 3, CV_32F, &cal.
left.
M);
1971 CvMat D1 = cvMat(1, 8, CV_32F, &cal.
left.
D);
1972 CvMat R1 = cvMat(3, 3, CV_32F, &cal.
left.
R);
1973 CvMat P1 = cvMat(3, 4, CV_32F, &cal.
left.
P);
1975 cvInitUndistortRectifyMap(&M1, &D1, &R1, &P1,
1977 calibration_map_left_2_);
1983 multisense_ros::RawCamConfig cfg;
1985 cfg.width = c.
width();
1987 cfg.frames_per_second = c.
fps();
1988 cfg.gain = c.
gain();
1998 cfg.roll = c.
roll();
1999 cfg.pitch = c.
pitch();
2016 const float M[3][3],
2017 const float R[3][3],
2018 const float P[3][4],
2024 cameraInfo.P[0] = P[0][0] * xScale; cameraInfo.P[1] = P[0][1];
2025 cameraInfo.P[2] = P[0][2] * xScale; cameraInfo.P[3] = P[0][3] * xScale;
2026 cameraInfo.P[4] = P[1][0]; cameraInfo.P[5] = P[1][1] * yScale;
2027 cameraInfo.P[6] = P[1][2] * yScale; cameraInfo.P[7] = P[1][3];
2028 cameraInfo.P[8] = P[2][0]; cameraInfo.P[9] = P[2][1];
2029 cameraInfo.P[10] = P[2][2]; cameraInfo.P[11] = P[2][3];
2031 cameraInfo.K[0] = M[0][0] * xScale; cameraInfo.K[1] = M[0][1];
2032 cameraInfo.K[2] = M[0][2] * xScale; cameraInfo.K[3] = M[1][0];
2033 cameraInfo.K[4] = M[1][1] * yScale; cameraInfo.K[5] = M[1][2] * yScale;
2034 cameraInfo.K[6] = M[2][0]; cameraInfo.K[7] = M[2][1];
2035 cameraInfo.K[8] = M[2][2];
2037 cameraInfo.R[0] = R[0][0]; cameraInfo.R[1] = R[0][1];
2038 cameraInfo.R[2] = R[0][2]; cameraInfo.R[3] = R[1][0];
2039 cameraInfo.R[4] = R[1][1]; cameraInfo.R[5] = R[1][2];
2040 cameraInfo.R[6] = R[2][0]; cameraInfo.R[7] = R[2][1];
2041 cameraInfo.R[8] = R[2][2];
2047 cameraInfo.D.resize(8);
2048 for (uint8_t i=0 ; i < 8 ; ++i) {
2049 cameraInfo.D[i] = D[i];
2058 if (D[7] == 0.0 && D[6] == 0.0 && D[5] == 0.0) {
2135 border_clip_mask_ = cv::Mat_<uint8_t>(height, width,
static_cast<uint8_t
>(255));
2142 double halfWidth =
static_cast<double>(width)/2.0;
2143 double halfHeight =
static_cast<double>(height)/2.0;
2149 double radius = sqrt( pow( halfWidth, 2) + pow( halfHeight, 2) );
2150 radius -= borderClipValue;
2152 for (uint32_t u = 0 ; u < width ; ++u)
2154 for (uint32_t v = 0 ; v < height ; ++v)
2156 switch (borderClipType)
2160 if ( u >= borderClipValue && u <= width - borderClipValue &&
2161 v >= borderClipValue && v <= height - borderClipValue)
2170 double vector = sqrt( pow( halfWidth - u, 2) +
2171 pow( halfHeight - v, 2) );
2173 if ( vector < radius)
2182 ROS_WARN(
"Unknown border clip type.");
2197 if (Status_Ok != status)
2198 ROS_ERROR(
"Camera: failed to stop all streams: %s",
2199 Channel::statusString(status));
2208 for(uint32_t i=0; i<32; i++)
2209 if ((1<<i) & enableMask && 0 ==
stream_map_[(1<<i)]++)
2210 notStarted |= (1<<i);
2212 if (0 != notStarted) {
2215 if (Status_Ok != status)
2216 ROS_ERROR(
"Camera: failed to start streams 0x%x: %s",
2217 notStarted, Channel::statusString(status));
2227 for(uint32_t i=0; i<32; i++)
2228 if ((1<<i) & disableMask && 0 == --
stream_map_[(1<<i)])
2229 notStopped |= (1<<i);
2231 if (0 != notStopped) {
2233 if (Status_Ok != status)
2234 ROS_ERROR(
"Camera: failed to stop streams 0x%x: %s\n",
2235 notStopped, Channel::statusString(status));
virtual Status startStreams(DataSource mask)=0
ros::Publisher left_rgb_cam_info_pub_
static CRL_CONSTEXPR DataSource Source_Disparity_Cost
image_transport::Publisher right_disparity_pub_
ros::Publisher luma_organized_point_cloud_pub_
void pointCloudCallback(const crl::multisense::image::Header &header)
void rawCamDataCallback(const crl::multisense::image::Header &header)
image_transport::ImageTransport disparity_cost_transport_
image_transport::Publisher left_mono_cam_pub_
std::vector< PcbInfo > pcbs
sensor_msgs::Image right_rect_image_
void publish(const sensor_msgs::Image &image, const sensor_msgs::CameraInfo &info) const
const std::string RATIONAL_POLYNOMIAL
void disparityImageCallback(const crl::multisense::image::Header &header)
ros::NodeHandle device_nh_
uint8_t luma_color_depth_
crl::multisense::image::Calibration image_calibration_
static CRL_CONSTEXPR DataSource Source_Disparity_Right
ros::Publisher right_stereo_disparity_pub_
ros::Publisher right_rect_cam_info_pub_
bool pc_color_frame_sync_
void colorImageCallback(const crl::multisense::image::Header &header)
void publish(const boost::shared_ptr< M > &message) const
static CRL_CONSTEXPR DataSource Source_Jpeg_Left
image_transport::ImageTransport left_rect_transport_
sensor_msgs::Image ni_depth_image_
static CRL_CONSTEXPR DataSource Source_Disparity
sensor_msgs::PointCloud2 luma_organized_point_cloud_
sensor_msgs::PointCloud2 luma_point_cloud_
void monoCallback(const crl::multisense::image::Header &header)
image_transport::Publisher left_disparity_cost_pub_
sensor_msgs::CameraInfo left_mono_cam_info_
sensor_msgs::PointCloud2 color_point_cloud_
ros::Publisher left_stereo_disparity_pub_
ros::Publisher luma_point_cloud_pub_
static CRL_CONSTEXPR DataSource Source_Chroma_Left
ros::Publisher depth_cam_info_pub_
boost::mutex border_clip_lock_
virtual Status stopStreams(DataSource mask)=0
ros::Publisher histogram_pub_
uint32_t getNumSubscribers() const
ros::Publisher left_mono_cam_info_pub_
uint64_t sensorHardwareMagic
sensor_msgs::Image right_disparity_image_
Publisher advertise(const std::string &base_topic, uint32_t queue_size, bool latch=false)
virtual Status getImageHistogram(int64_t frameId, image::Histogram &histogram)=0
void borderClipChanged(int borderClipType, double borderClipValue)
double border_clip_value_
sensor_msgs::Image depth_image_
void disconnectStream(crl::multisense::DataSource disableMask)
std::string frame_id_right_
static CRL_CONSTEXPR DataSource Source_Luma_Left
void depthCallback(const crl::multisense::image::Header &header)
uint32_t disparities() const
std::string sensorFirmwareBuildDate
void connectStream(crl::multisense::DataSource enableMask)
ros::Publisher raw_cam_config_pub_
sensor_msgs::Image left_disparity_cost_image_
TFSIMD_FORCE_INLINE const tfScalar & y() const
sensor_msgs::CameraInfo right_rect_cam_info_
int64_t left_luma_frame_id_
virtual Status getVersionInfo(system::VersionInfo &v)=0
image_transport::ImageTransport right_mono_transport_
static const double MISSING_Z
image_transport::ImageTransport left_rgb_transport_
std::vector< cv::Vec3f > points_buff_
sensor_msgs::CameraInfo left_cost_cam_info_
uint32_t getNumSubscribers() const
ros::Publisher right_mono_cam_info_pub_
stereo_msgs::DisparityImage left_stereo_disparity_
std::string frame_id_left_
std::vector< uint32_t > data
image_transport::Publisher right_mono_cam_pub_
StreamMapType stream_map_
crl::multisense::system::DeviceInfo device_info_
CameraPublisher advertiseCamera(const std::string &base_topic, uint32_t queue_size, bool latch=false)
void generateBorderClip(int borderClipType, double borderClipValue, uint32_t width, uint32_t height)
int64_t luma_point_cloud_frame_id_
static CRL_CONSTEXPR DataSource Source_Luma_Right
sensor_msgs::Image left_mono_image_
image_transport::Publisher ni_depth_cam_pub_
void rectCallback(const crl::multisense::image::Header &header)
virtual Status removeIsolatedCallback(image::Callback callback)=0
virtual Status addIsolatedCallback(image::Callback callback, DataSource imageSourceMask, void *userDataP=NULL)=0
image_transport::Publisher left_disparity_pub_
ros::Publisher raw_cam_data_pub_
image_transport::CameraPublisher left_rgb_rect_cam_pub_
ros::Publisher left_disp_cam_info_pub_
image_transport::Publisher depth_cam_pub_
void publish(const sensor_msgs::Image &message) const
crl::multisense::image::Config image_config_
virtual Status getImageCalibration(image::Calibration &c)=0
std::vector< float > disparity_buff_
const std::string TYPE_32FC1
ros::NodeHandle right_nh_
image_transport::CameraPublisher right_rect_cam_pub_
image_transport::ImageTransport left_mono_transport_
const std::string PLUMB_BOB
uint32_t hardwareRevision
ros::Publisher raw_cam_cal_pub_
sensor_msgs::CameraInfo depth_cam_info_
image_transport::CameraPublisher left_rect_cam_pub_
sensor_msgs::Image left_rect_image_
ros::Publisher right_disp_cam_info_pub_
crl::multisense::Channel * driver_
sensor_msgs::Image left_luma_image_
TFSIMD_FORCE_INLINE const tfScalar & x() const
ros::Publisher device_info_pub_
image_transport::Publisher left_rgb_cam_pub_
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
image_transport::ImageTransport left_rgb_rect_transport_
sensor_msgs::CameraInfo left_rect_cam_info_
bool write_pc_color_packed_
int64_t color_organized_point_cloud_frame_id_
sensor_msgs::Image left_rgb_image_
sensor_msgs::Image right_mono_image_
image_transport::ImageTransport depth_transport_
ros::Publisher left_rgb_rect_cam_info_pub_
void histogramCallback(const crl::multisense::image::Header &header)
uint32_t exposure() const
stereo_msgs::DisparityImage right_stereo_disparity_
VersionType sensorFirmwareVersion
CvMat * calibration_map_left_2_
int64_t points_buff_frame_id_
sensor_msgs::CameraInfo left_disp_cam_info_
int64_t color_point_cloud_frame_id_
image_transport::ImageTransport right_rect_transport_
uint32_t getNumSubscribers() const
ros::Publisher left_cost_cam_info_pub_
sensor_msgs::CameraInfo right_disp_cam_info_
boost::mutex stream_lock_
int64_t left_rect_frame_id_
int64_t luma_organized_point_cloud_frame_id_
float nominalRelativeAperture
CvMat * calibration_map_left_1_
sensor_msgs::CameraInfo left_rgb_cam_info_
image_transport::ImageTransport ni_depth_transport_
image_transport::ImageTransport disparity_right_transport_
ros::Publisher left_rect_cam_info_pub_
virtual Status getDeviceInfo(system::DeviceInfo &info)=0
sensor_msgs::CameraInfo right_mono_cam_info_
void jpegImageCallback(const crl::multisense::image::Header &header)
sensor_msgs::Image left_disparity_image_
ros::Publisher color_organized_point_cloud_pub_
static CRL_CONSTEXPR DataSource Source_Luma_Rectified_Right
image_transport::ImageTransport disparity_left_transport_
sensor_msgs::PointCloud2 color_organized_point_cloud_
crl::multisense::system::VersionInfo version_info_
cv::Mat_< double > q_matrix_
sensor_msgs::CameraInfo left_rgb_rect_cam_info_
int64_t left_rgb_rect_frame_id_
sensor_msgs::Image left_rgb_rect_image_
void publishAllCameraInfo()
multisense_ros::RawCamData raw_cam_data_
uint64_t sensorHardwareVersion
ros::Publisher color_point_cloud_pub_
cv::Mat_< uint8_t > border_clip_mask_
virtual Status getImageConfig(image::Config &c)=0
void updateCameraInfo(sensor_msgs::CameraInfo &cameraInfo, const float M[3][3], const float R[3][3], const float P[3][4], const float D[8], double xScale=1, double yScale=1)
static CRL_CONSTEXPR DataSource Source_Luma_Rectified_Left