camera.cpp
Go to the documentation of this file.
1 
34 #include <arpa/inet.h>
35 #include <fstream>
36 #include <turbojpeg.h>
37 
38 #include <Eigen/Geometry>
39 #include <opencv2/opencv.hpp>
40 
45 
46 #include <multisense_lib/MultiSenseChannel.hh>
47 #include <multisense_ros/camera.h>
48 #include <multisense_ros/RawCamConfig.h>
49 #include <multisense_ros/RawCamCal.h>
50 #include <multisense_ros/DeviceInfo.h>
51 #include <multisense_ros/Histogram.h>
53 
54 using namespace crl::multisense;
55 
56 namespace multisense_ros {
57 
58 namespace { // anonymous
59 
60 tf2::Matrix3x3 toRotation(float R[3][3])
61 {
62  return tf2::Matrix3x3{R[0][0],
63  R[0][1],
64  R[0][2],
65  R[1][0],
66  R[1][1],
67  R[1][2],
68  R[2][0],
69  R[2][1],
70  R[2][2]};
71 }
72 
73 //
74 // All of the data sources that we control here
75 
76 constexpr DataSource allImageSources = (Source_Luma_Left |
89 
90 //
91 // Shims for C-style driver callbacks
92 
93 void monoCB(const image::Header& header, void* userDataP)
94 { reinterpret_cast<Camera*>(userDataP)->monoCallback(header); }
95 void rectCB(const image::Header& header, void* userDataP)
96 { reinterpret_cast<Camera*>(userDataP)->rectCallback(header); }
97 void depthCB(const image::Header& header, void* userDataP)
98 { reinterpret_cast<Camera*>(userDataP)->depthCallback(header); }
99 void pointCB(const image::Header& header, void* userDataP)
100 { reinterpret_cast<Camera*>(userDataP)->pointCloudCallback(header); }
101 void rawCB(const image::Header& header, void* userDataP)
102 { reinterpret_cast<Camera*>(userDataP)->rawCamDataCallback(header); }
103 void colorCB(const image::Header& header, void* userDataP)
104 { reinterpret_cast<Camera*>(userDataP)->colorImageCallback(header); }
105 void dispCB(const image::Header& header, void* userDataP)
106 { reinterpret_cast<Camera*>(userDataP)->disparityImageCallback(header); }
107 void jpegCB(const image::Header& header, void* userDataP)
108 { reinterpret_cast<Camera*>(userDataP)->jpegImageCallback(header); }
109 void histCB(const image::Header& header, void* userDataP)
110 { reinterpret_cast<Camera*>(userDataP)->histogramCallback(header); }
111 void colorizeCB(const image::Header& header, void* userDataP)
112 { reinterpret_cast<Camera*>(userDataP)->colorizeCallback(header); }
113 
114 bool isValidReprojectedPoint(const Eigen::Vector3f& pt, double squared_max_range)
115 {
116  return pt[2] > 0.0f && std::isfinite(pt[2]) && pt.squaredNorm() < squared_max_range;
117 }
118 
119 void writePoint(sensor_msgs::PointCloud2 &pointcloud, size_t index, const Eigen::Vector3f &point, uint32_t color)
120 {
121  float* cloudP = reinterpret_cast<float*>(&(pointcloud.data[index * pointcloud.point_step]));
122  cloudP[0] = point[0];
123  cloudP[1] = point[1];
124  cloudP[2] = point[2];
125 
126  uint32_t* colorP = reinterpret_cast<uint32_t*>(&(cloudP[3]));
127  colorP[0] = color;
128 }
129 
130 void writePoint(sensor_msgs::PointCloud2 &pointcloud,
131  size_t pointcloud_index,
132  const Eigen::Vector3f &point,
133  size_t image_index,
134  const image::Header &image)
135 {
136  switch (image.bitsPerPixel)
137  {
138  case 8:
139  {
140  const uint32_t luma = static_cast<uint32_t>(reinterpret_cast<const uint8_t*>(image.imageDataP)[image_index]);
141  return writePoint(pointcloud, pointcloud_index, point, luma);
142  }
143  case 16:
144  {
145  const uint32_t luma = static_cast<uint32_t>(reinterpret_cast<const uint16_t*>(image.imageDataP)[image_index]);
146  return writePoint(pointcloud, pointcloud_index, point, luma);
147  }
148  case 32:
149  {
150  const uint32_t luma = reinterpret_cast<const uint32_t*>(image.imageDataP)[image_index];
151  return writePoint(pointcloud, pointcloud_index, point, luma);
152  }
153  }
154 }
155 
156 bool clipPoint(const BorderClip& borderClipType,
157  double borderClipValue,
158  size_t height,
159  size_t width,
160  size_t u,
161  size_t v)
162 {
163  switch (borderClipType)
164  {
165  case BorderClip::NONE:
166  {
167  return false;
168  }
169  case BorderClip::RECTANGULAR:
170  {
171  return !( u >= borderClipValue && u <= width - borderClipValue &&
172  v >= borderClipValue && v <= height - borderClipValue);
173  }
174  case BorderClip::CIRCULAR:
175  {
176  const double halfWidth = static_cast<double>(width)/2.0;
177  const double halfHeight = static_cast<double>(height)/2.0;
178 
179  const double radius = sqrt( halfWidth * halfWidth + halfHeight * halfHeight ) - borderClipValue;
180 
181  return !(Eigen::Vector2d{halfWidth - u, halfHeight - v}.norm() < radius);
182  }
183  default:
184  {
185  ROS_WARN("Camera: Unknown border clip type.");
186  break;
187  }
188  }
189 
190  return true;
191 }
192 
193 cv::Vec3b u_interpolate_color(double u, double v, const cv::Mat &image)
194 {
195  const double width = image.cols;
196 
197  //
198  // Interpolate in just the u dimension
199  //
200  const size_t min_u = static_cast<size_t>(std::min(std::max(std::floor(u), 0.), width - 1.));
201  const size_t max_u = static_cast<size_t>(std::min(std::max(std::ceil(u), 0.), width - 1.));
202 
203  const cv::Vec3d element0 = image.at<cv::Vec3b>(width * v + min_u);
204  const cv::Vec3d element1 = image.at<cv::Vec3b>(width * v + max_u);
205 
206  const size_t delta_u = max_u - min_u;
207 
208  const double u_ratio = delta_u == 0 ? 1. : (static_cast<double>(max_u) - u) / static_cast<double>(delta_u);
209 
210  const cv::Vec3b result = (element0 * u_ratio + element1 * (1. - u_ratio));
211 
212  return result;
213 }
214 
215 } // anonymous
216 
217 //
218 // Provide compiler with definition of the static members
219 
220 constexpr char Camera::LEFT[];
221 constexpr char Camera::RIGHT[];
222 constexpr char Camera::AUX[];
223 constexpr char Camera::CALIBRATION[];
224 
225 constexpr char Camera::LEFT_CAMERA_FRAME[];
226 constexpr char Camera::RIGHT_CAMERA_FRAME[];
227 constexpr char Camera::LEFT_RECTIFIED_FRAME[];
228 constexpr char Camera::RIGHT_RECTIFIED_FRAME[];
229 constexpr char Camera::AUX_CAMERA_FRAME[];
230 constexpr char Camera::AUX_RECTIFIED_FRAME[];
231 
232 constexpr char Camera::DEVICE_INFO_TOPIC[];
233 constexpr char Camera::RAW_CAM_CAL_TOPIC[];
234 constexpr char Camera::RAW_CAM_CONFIG_TOPIC[];
235 constexpr char Camera::RAW_CAM_DATA_TOPIC[];
236 constexpr char Camera::HISTOGRAM_TOPIC[];
237 constexpr char Camera::MONO_TOPIC[];
238 constexpr char Camera::RECT_TOPIC[];
239 constexpr char Camera::DISPARITY_TOPIC[];
240 constexpr char Camera::DISPARITY_IMAGE_TOPIC[];
241 constexpr char Camera::DEPTH_TOPIC[];
242 constexpr char Camera::OPENNI_DEPTH_TOPIC[];
243 constexpr char Camera::COST_TOPIC[];
244 constexpr char Camera::COLOR_TOPIC[];
245 constexpr char Camera::RECT_COLOR_TOPIC[];
246 constexpr char Camera::POINTCLOUD_TOPIC[];
247 constexpr char Camera::COLOR_POINTCLOUD_TOPIC[];
248 constexpr char Camera::ORGANIZED_POINTCLOUD_TOPIC[];
249 constexpr char Camera::COLOR_ORGANIZED_POINTCLOUD_TOPIC[];
250 constexpr char Camera::MONO_CAMERA_INFO_TOPIC[];
251 constexpr char Camera::RECT_CAMERA_INFO_TOPIC[];
252 constexpr char Camera::COLOR_CAMERA_INFO_TOPIC[];
253 constexpr char Camera::RECT_COLOR_CAMERA_INFO_TOPIC[];
254 constexpr char Camera::DEPTH_CAMERA_INFO_TOPIC[];
255 constexpr char Camera::DISPARITY_CAMERA_INFO_TOPIC[];
256 constexpr char Camera::COST_CAMERA_INFO_TOPIC[];
257 
258 Camera::Camera(Channel* driver, const std::string& tf_prefix) :
259  driver_(driver),
260  device_nh_(""),
261  left_nh_(device_nh_, LEFT),
262  right_nh_(device_nh_, RIGHT),
263  aux_nh_(device_nh_, AUX),
264  calibration_nh_(device_nh_, CALIBRATION),
265  left_mono_transport_(left_nh_),
266  right_mono_transport_(right_nh_),
267  left_rect_transport_(left_nh_),
268  right_rect_transport_(right_nh_),
269  left_rgb_transport_(left_nh_),
270  left_rgb_rect_transport_(left_nh_),
271  depth_transport_(device_nh_),
272  ni_depth_transport_(device_nh_),
273  disparity_left_transport_(left_nh_),
274  disparity_right_transport_(right_nh_),
275  disparity_cost_transport_(left_nh_),
276  aux_mono_transport_(aux_nh_),
277  aux_rgb_transport_(aux_nh_),
278  aux_rect_transport_(aux_nh_),
279  aux_rgb_rect_transport_(aux_nh_),
280  frame_id_left_(tf_prefix + LEFT_CAMERA_FRAME),
281  frame_id_right_(tf_prefix + RIGHT_CAMERA_FRAME),
282  frame_id_aux_(tf_prefix + AUX_CAMERA_FRAME),
283  frame_id_rectified_left_(tf_prefix + LEFT_RECTIFIED_FRAME),
284  frame_id_rectified_right_(tf_prefix + RIGHT_RECTIFIED_FRAME),
285  frame_id_rectified_aux_(tf_prefix + AUX_RECTIFIED_FRAME),
286  static_tf_broadcaster_(),
287  pointcloud_max_range_(15.0),
288  last_frame_id_(-1),
289  border_clip_type_(BorderClip::NONE),
290  border_clip_value_(0.0)
291 {
292  //
293  // Query device and version information from sensor
294 
296  if (Status_Ok != status) {
297  ROS_ERROR("Camera: failed to query version info: %s", Channel::statusString(status));
298  return;
299  }
300 
302  if (Status_Ok != status) {
303  ROS_ERROR("Camera: failed to query device info: %s", Channel::statusString(status));
304  return;
305  }
306 
307  //
308  // Get the camera config
309 
310  image::Config image_config;
311  status = driver_->getImageConfig(image_config);
312  if (Status_Ok != status) {
313  ROS_ERROR("Camera: failed to query sensor configuration: %s", Channel::statusString(status));
314  return;
315  }
316 
317  //
318  // S27/S30 cameras have a 3rd aux color camera and no left color camera
319 
320  has_aux_camera_ = system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision ||
321  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision;
322 
323  //
324  // Topics published for all device types
325 
326  device_info_pub_ = calibration_nh_.advertise<multisense_ros::DeviceInfo>(DEVICE_INFO_TOPIC, 1, true);
327  raw_cam_cal_pub_ = calibration_nh_.advertise<multisense_ros::RawCamCal>(RAW_CAM_CAL_TOPIC, 1, true);
328  raw_cam_config_pub_ = calibration_nh_.advertise<multisense_ros::RawCamConfig>(RAW_CAM_CONFIG_TOPIC, 1, true);
329  histogram_pub_ = device_nh_.advertise<multisense_ros::Histogram>(HISTOGRAM_TOPIC, 5);
330 
331  //
332  // Create topic publishers (TODO: color topics should not be advertised if the device can't support it)
333 
334  if (system::DeviceInfo::HARDWARE_REV_BCAM == device_info_.hardwareRevision) {
335 
337  std::bind(&Camera::connectStream, this, Source_Luma_Left),
338  std::bind(&Camera::disconnectStream, this, Source_Luma_Left));
339 
341  std::bind(&Camera::connectStream, this, Source_Jpeg_Left),
342  std::bind(&Camera::disconnectStream, this, Source_Jpeg_Left));
343 
345  std::bind(&Camera::connectStream, this, Source_Jpeg_Left),
346  std::bind(&Camera::disconnectStream, this, Source_Jpeg_Left));
347 
348  left_mono_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(MONO_CAMERA_INFO_TOPIC, 1, true);
349  left_rgb_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(COLOR_CAMERA_INFO_TOPIC, 1, true);
350  left_rgb_rect_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true);
351 
352 
353  } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_M == device_info_.hardwareRevision) {
354 
355  // monocular variation
356 
358  std::bind(&Camera::connectStream, this, Source_Luma_Left),
359  std::bind(&Camera::disconnectStream, this, Source_Luma_Left));
361  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left),
362  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left));
364  std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left),
365  std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left));
367  std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left),
368  std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left));
369 
370  left_mono_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(MONO_CAMERA_INFO_TOPIC, 1, true);
371  left_rect_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(RECT_CAMERA_INFO_TOPIC, 1, true);
372  left_rgb_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(COLOR_CAMERA_INFO_TOPIC, 1, true);
373  left_rgb_rect_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true);
374 
375  } else { // all other MultiSense-S* variations
376 
377 
379  std::bind(&Camera::connectStream, this, Source_Luma_Left),
380  std::bind(&Camera::disconnectStream, this, Source_Luma_Left));
382  std::bind(&Camera::connectStream, this, Source_Luma_Right),
383  std::bind(&Camera::disconnectStream, this, Source_Luma_Right));
385  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left),
386  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left));
388  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Right),
389  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Right));
391  std::bind(&Camera::connectStream, this, Source_Disparity),
392  std::bind(&Camera::disconnectStream, this, Source_Disparity));
394  std::bind(&Camera::connectStream, this, Source_Disparity),
395  std::bind(&Camera::disconnectStream, this, Source_Disparity));
396 
397  if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST21 != device_info_.hardwareRevision) {
398 
399  if (has_aux_camera_) {
400 
402  std::bind(&Camera::connectStream, this, Source_Luma_Aux),
403  std::bind(&Camera::disconnectStream, this, Source_Luma_Aux));
404 
405  aux_mono_cam_info_pub_ = aux_nh_.advertise<sensor_msgs::CameraInfo>(MONO_CAMERA_INFO_TOPIC, 1, true);
406 
408  std::bind(&Camera::connectStream, this, Source_Luma_Aux | Source_Chroma_Aux),
409  std::bind(&Camera::disconnectStream, this, Source_Luma_Aux | Source_Chroma_Aux));
410 
411  aux_rgb_cam_info_pub_ = aux_nh_.advertise<sensor_msgs::CameraInfo>(COLOR_CAMERA_INFO_TOPIC, 1, true);
412 
414  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Aux),
415  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Aux));
416 
417  aux_rect_cam_info_pub_ = aux_nh_.advertise<sensor_msgs::CameraInfo>(RECT_CAMERA_INFO_TOPIC, 1, true);
418 
420  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux),
421  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux));
422 
423  aux_rgb_rect_cam_info_pub_ = aux_nh_.advertise<sensor_msgs::CameraInfo>(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true);
424  }
425  else {
426 
428  std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left),
429  std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left));
430 
431  left_rgb_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(COLOR_CAMERA_INFO_TOPIC, 1, true);
432  left_rgb_rect_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true);
433 
435  std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left),
436  std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left));
437 
438  }
439 
440  const auto point_cloud_color_topics = has_aux_camera_ ? Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux :
441  Source_Luma_Left | Source_Chroma_Left;
442 
443  color_point_cloud_pub_ = device_nh_.advertise<sensor_msgs::PointCloud2>(COLOR_POINTCLOUD_TOPIC, 5,
444  std::bind(&Camera::connectStream, this,
445  Source_Disparity | point_cloud_color_topics),
446  std::bind(&Camera::disconnectStream, this,
447  Source_Disparity | point_cloud_color_topics));
449  std::bind(&Camera::connectStream, this,
450  Source_Disparity | point_cloud_color_topics),
451  std::bind(&Camera::disconnectStream, this,
452  Source_Disparity | point_cloud_color_topics));
453 
454  }
455 
456  luma_point_cloud_pub_ = device_nh_.advertise<sensor_msgs::PointCloud2>(POINTCLOUD_TOPIC, 5,
457  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity),
458  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity));
459 
461  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity),
462  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity));
463 
464  raw_cam_data_pub_ = calibration_nh_.advertise<multisense_ros::RawCamData>(RAW_CAM_DATA_TOPIC, 5,
465  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity),
466  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity));
467 
469  std::bind(&Camera::connectStream, this, Source_Disparity),
470  std::bind(&Camera::disconnectStream, this, Source_Disparity));
471 
472  left_stereo_disparity_pub_ = left_nh_.advertise<stereo_msgs::DisparityImage>(DISPARITY_IMAGE_TOPIC, 5,
473  std::bind(&Camera::connectStream, this, Source_Disparity),
474  std::bind(&Camera::disconnectStream, this, Source_Disparity));
475 
476  if (version_info_.sensorFirmwareVersion >= 0x0300) {
477 
479  std::bind(&Camera::connectStream, this, Source_Disparity_Right),
480  std::bind(&Camera::disconnectStream, this, Source_Disparity_Right));
481 
482  right_stereo_disparity_pub_ = right_nh_.advertise<stereo_msgs::DisparityImage>(DISPARITY_IMAGE_TOPIC, 5,
483  std::bind(&Camera::connectStream, this, Source_Disparity_Right),
484  std::bind(&Camera::disconnectStream, this, Source_Disparity_Right));
485 
487  std::bind(&Camera::connectStream, this, Source_Disparity_Cost),
488  std::bind(&Camera::disconnectStream, this, Source_Disparity_Cost));
489 
490  right_disp_cam_info_pub_ = right_nh_.advertise<sensor_msgs::CameraInfo>(DISPARITY_CAMERA_INFO_TOPIC, 1, true);
491  left_cost_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(COST_CAMERA_INFO_TOPIC, 1, true);
492  }
493 
494  //
495  // Camera info topic publishers
496  left_mono_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(MONO_CAMERA_INFO_TOPIC, 1, true);
497  right_mono_cam_info_pub_ = right_nh_.advertise<sensor_msgs::CameraInfo>(MONO_CAMERA_INFO_TOPIC, 1, true);
498  left_rect_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(RECT_CAMERA_INFO_TOPIC, 1, true);
499  right_rect_cam_info_pub_ = right_nh_.advertise<sensor_msgs::CameraInfo>(RECT_CAMERA_INFO_TOPIC, 1, true);
500  left_disp_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(DISPARITY_CAMERA_INFO_TOPIC, 1, true);
501  depth_cam_info_pub_ = device_nh_.advertise<sensor_msgs::CameraInfo>(DEPTH_CAMERA_INFO_TOPIC, 1, true);
502  }
503 
504  //
505  // All image streams off
506 
507  stop();
508 
509  //
510  // Publish device info
511 
512  multisense_ros::DeviceInfo msg;
513 
514  msg.deviceName = device_info_.name;
515  msg.buildDate = device_info_.buildDate;
516  msg.serialNumber = device_info_.serialNumber;
517  msg.deviceRevision = device_info_.hardwareRevision;
518 
519  msg.numberOfPcbs = device_info_.pcbs.size();
520  for(const auto &pcb : device_info_.pcbs) {
521  msg.pcbSerialNumbers.push_back(pcb.revision);
522  msg.pcbNames.push_back(pcb.name);
523  }
524 
525  msg.imagerName = device_info_.imagerName;
526  msg.imagerType = device_info_.imagerType;
527  msg.imagerWidth = device_info_.imagerWidth;
528  msg.imagerHeight = device_info_.imagerHeight;
529 
530  msg.lensName = device_info_.lensName;
531  msg.lensType = device_info_.lensType;
532  msg.nominalBaseline = device_info_.nominalBaseline;
533  msg.nominalFocalLength = device_info_.nominalFocalLength;
534  msg.nominalRelativeAperture = device_info_.nominalRelativeAperture;
535 
536  msg.lightingType = device_info_.lightingType;
537  msg.numberOfLights = device_info_.numberOfLights;
538 
539  msg.laserName = device_info_.laserName;
540  msg.laserType = device_info_.laserType;
541 
542  msg.motorName = device_info_.motorName;
543  msg.motorType = device_info_.motorType;
544  msg.motorGearReduction = device_info_.motorGearReduction;
545 
546  msg.apiBuildDate = version_info_.apiBuildDate;
547  msg.apiVersion = version_info_.apiVersion;
548  msg.firmwareBuildDate = version_info_.sensorFirmwareBuildDate;
549  msg.firmwareVersion = version_info_.sensorFirmwareVersion;
550  msg.bitstreamVersion = version_info_.sensorHardwareVersion;
551  msg.bitstreamMagic = version_info_.sensorHardwareMagic;
552  msg.fpgaDna = version_info_.sensorFpgaDna;
553 
555 
556  //
557  // Publish image calibration
558 
559  image::Calibration image_calibration;
560  status = driver_->getImageCalibration(image_calibration);
561  if (Status_Ok != status) {
562  ROS_ERROR("Camera: failed to query image calibration: %s",
563  Channel::statusString(status));
564  }
565  else {
566 
567  multisense_ros::RawCamCal cal;
568  const float *cP;
569 
570  cP = reinterpret_cast<const float *>(&(image_calibration.left.M[0][0]));
571  for(uint32_t i=0; i<9; i++) cal.left_M[i] = cP[i];
572  cP = reinterpret_cast<const float *>(&(image_calibration.left.D[0]));
573  for(uint32_t i=0; i<8; i++) cal.left_D[i] = cP[i];
574  cP = reinterpret_cast<const float *>(&(image_calibration.left.R[0][0]));
575  for(uint32_t i=0; i<9; i++) cal.left_R[i] = cP[i];
576  cP = reinterpret_cast<const float *>(&(image_calibration.left.P[0][0]));
577  for(uint32_t i=0; i<12; i++) cal.left_P[i] = cP[i];
578 
579  cP = reinterpret_cast<const float *>(&(image_calibration.right.M[0][0]));
580  for(uint32_t i=0; i<9; i++) cal.right_M[i] = cP[i];
581  cP = reinterpret_cast<const float *>(&(image_calibration.right.D[0]));
582  for(uint32_t i=0; i<8; i++) cal.right_D[i] = cP[i];
583  cP = reinterpret_cast<const float *>(&(image_calibration.right.R[0][0]));
584  for(uint32_t i=0; i<9; i++) cal.right_R[i] = cP[i];
585  cP = reinterpret_cast<const float *>(&(image_calibration.right.P[0][0]));
586  for(uint32_t i=0; i<12; i++) cal.right_P[i] = cP[i];
587 
588  raw_cam_cal_pub_.publish(cal);
589  }
590 
591  stereo_calibration_manager_ = std::make_shared<StereoCalibrationManger>(image_config, image_calibration, device_info_);
592 
593  if (has_aux_camera_ && !stereo_calibration_manager_->validAux()) {
594  ROS_WARN("Camera: invalid aux camera calibration");
595  }
596 
597  //
598  // Publish the static transforms for our camera extrinsics for the left/right/aux frames. We will
599  // use the left camera frame as the reference coordinate frame
600 
601  const bool has_aux_extrinsics = has_aux_camera_ && stereo_calibration_manager_->validAux();
602 
603  std::vector<geometry_msgs::TransformStamped> stamped_transforms(3 + (has_aux_extrinsics ? 2 : 0));
604 
605  tf2::Transform rectified_left_T_left{toRotation(image_calibration.left.R), tf2::Vector3{0., 0., 0.}};
606  stamped_transforms[0].header.stamp = ros::Time::now();
607  stamped_transforms[0].header.frame_id = frame_id_rectified_left_;
608  stamped_transforms[0].child_frame_id = frame_id_left_;
609  stamped_transforms[0].transform = tf2::toMsg(rectified_left_T_left);
610 
611  tf2::Transform rectified_right_T_rectified_left{tf2::Matrix3x3::getIdentity(),
612  tf2::Vector3{stereo_calibration_manager_->T(), 0., 0.}};
613  stamped_transforms[1].header.stamp = ros::Time::now();
614  stamped_transforms[1].header.frame_id = frame_id_rectified_left_;
615  stamped_transforms[1].child_frame_id = frame_id_rectified_right_;
616  stamped_transforms[1].transform = tf2::toMsg(rectified_right_T_rectified_left.inverse());
617 
618  tf2::Transform rectified_right_T_right{toRotation(image_calibration.right.R), tf2::Vector3{0., 0., 0.}};
619  stamped_transforms[2].header.stamp = ros::Time::now();
620  stamped_transforms[2].header.frame_id = frame_id_rectified_right_;
621  stamped_transforms[2].child_frame_id = frame_id_right_;
622  stamped_transforms[2].transform = tf2::toMsg(rectified_right_T_right);
623 
624  if (has_aux_extrinsics)
625  {
626  tf2::Transform rectified_aux_T_rectified_left{tf2::Matrix3x3::getIdentity(),
627  tf2::Vector3{stereo_calibration_manager_->aux_T(), 0., 0.}};
628  stamped_transforms[3].header.stamp = ros::Time::now();
629  stamped_transforms[3].header.frame_id = frame_id_rectified_left_;
630  stamped_transforms[3].child_frame_id = frame_id_rectified_aux_;
631  stamped_transforms[3].transform = tf2::toMsg(rectified_aux_T_rectified_left.inverse());
632 
633  tf2::Transform rectified_aux_T_aux{toRotation(image_calibration.aux.R), tf2::Vector3{0., 0., 0.}};
634  stamped_transforms[4].header.stamp = ros::Time::now();
635  stamped_transforms[4].header.frame_id = frame_id_rectified_aux_;
636  stamped_transforms[4].child_frame_id = frame_id_aux_;
637  stamped_transforms[4].transform = tf2::toMsg(rectified_aux_T_aux);
638  }
639 
640  static_tf_broadcaster_.sendTransform(stamped_transforms);
641 
642  //
643  // Update our internal image config and publish intitial camera info
644 
645  updateConfig(image_config);
646 
647  //
648  // Initialize point cloud data structures
649 
650  luma_point_cloud_ = initialize_pointcloud<float>(true, frame_id_rectified_left_, "intensity");
651  color_point_cloud_ = initialize_pointcloud<float>(true, frame_id_rectified_left_, "rgb");
652  luma_organized_point_cloud_ = initialize_pointcloud<float>(false, frame_id_rectified_left_, "intensity");
653  color_organized_point_cloud_ = initialize_pointcloud<float>(false, frame_id_rectified_left_, "rgb");
654 
655  //
656  // Add driver-level callbacks.
657  //
658  // -Driver creates individual background thread for each callback.
659  // -Images are queued (depth=5) per callback, with oldest silently dropped if not keeping up.
660  // -All images presented are backed by a referenced buffer system (no copying of image data is done.)
661 
662  if (system::DeviceInfo::HARDWARE_REV_BCAM == device_info_.hardwareRevision) {
663 
664  driver_->addIsolatedCallback(monoCB, Source_Luma_Left, this);
665  driver_->addIsolatedCallback(jpegCB, Source_Jpeg_Left, this);
666 
667  } else {
668 
669  driver_->addIsolatedCallback(colorizeCB, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux | Source_Luma_Aux |
670  Source_Luma_Left | Source_Chroma_Left | Source_Luma_Rectified_Left, this);
671  driver_->addIsolatedCallback(monoCB, Source_Luma_Left | Source_Luma_Right | Source_Luma_Aux, this);
672  driver_->addIsolatedCallback(rectCB, Source_Luma_Rectified_Left | Source_Luma_Rectified_Right | Source_Luma_Rectified_Aux, this);
673  driver_->addIsolatedCallback(depthCB, Source_Disparity, this);
674  driver_->addIsolatedCallback(pointCB, Source_Disparity, this);
675  driver_->addIsolatedCallback(rawCB, Source_Disparity | Source_Luma_Rectified_Left, this);
676  driver_->addIsolatedCallback(colorCB, Source_Chroma_Left | Source_Chroma_Rectified_Aux | Source_Chroma_Aux, this);
677  driver_->addIsolatedCallback(dispCB, Source_Disparity | Source_Disparity_Right | Source_Disparity_Cost, this);
678  }
679 
680  //
681  // A common callback to publish histograms
682 
683  driver_->addIsolatedCallback(histCB, allImageSources, this);
684 
685  //
686  // Disable color point cloud strict frame syncing, if desired
687 
688  const char *pcColorFrameSyncEnvStringP = getenv("MULTISENSE_ROS_PC_COLOR_FRAME_SYNC_OFF");
689  if (NULL != pcColorFrameSyncEnvStringP) {
690  ROS_INFO("color point cloud frame sync is disabled");
691  }
692 }
693 
695 {
696  stop();
697 
698  if (system::DeviceInfo::HARDWARE_REV_BCAM == device_info_.hardwareRevision) {
699 
702 
703  } else {
704 
705  driver_->removeIsolatedCallback(colorizeCB);
713  }
714 }
715 
716 void Camera::borderClipChanged(const BorderClip &borderClipType, double borderClipValue)
717 {
718  //
719  // Avoid locking since updating this while a point cloud is current being projected is a non-standard case
720 
721  border_clip_type_ = borderClipType;
722  border_clip_value_ = borderClipValue;
723 }
724 
726 {
727  pointcloud_max_range_ = range;
728 }
729 
731 {
732  if (last_frame_id_ >= header.frameId)
733  return;
734 
735  last_frame_id_ = header.frameId;
736 
737  if (histogram_pub_.getNumSubscribers() > 0) {
738  multisense_ros::Histogram rh;
739  image::Histogram mh;
740 
741  Status status = driver_->getImageHistogram(header.frameId, mh);
742  if (Status_Ok == status) {
743  rh.frame_count = header.frameId;
744  rh.time_stamp = ros::Time(header.timeSeconds,
745  1000 * header.timeMicroSeconds);
746  rh.width = header.width;
747  rh.height = header.height;
748  switch(header.source) {
749  case Source_Chroma_Left:
750  case Source_Chroma_Right:
751  rh.width *= 2;
752  rh.height *= 2;
753  }
754 
755  rh.exposure_time = header.exposure;
756  rh.gain = header.gain;
757  rh.fps = header.framesPerSecond;
758  rh.channels = mh.channels;
759  rh.bins = mh.bins;
760  rh.data = mh.data;
762  }
763  }
764 }
765 
767 {
768  if (Source_Jpeg_Left != header.source) {
769  return;
770  }
771 
772  const ros::Time t = ros::Time(header.timeSeconds, 1000 * header.timeMicroSeconds);
773 
774  const uint32_t height = header.height;
775  const uint32_t width = header.width;
776  const uint32_t rgbLength = height * width * 3;
777 
778  left_rgb_image_.header.frame_id = frame_id_left_;
779  left_rgb_image_.height = height;
780  left_rgb_image_.width = width;
782  left_rgb_image_.is_bigendian = (htonl(1) == 1);
783  left_rgb_image_.step = 3 * width;
784  left_rgb_image_.header.stamp = t;
785 
786  left_rgb_image_.data.resize(rgbLength);
787 
788  tjhandle jpegDecompressor = tjInitDecompress();
789  tjDecompress2(jpegDecompressor,
790  reinterpret_cast<unsigned char*>(const_cast<void*>(header.imageDataP)),
791  header.imageLength,
792  &(left_rgb_image_.data[0]),
793  width, 0/*pitch*/, height, TJPF_RGB, 0);
794  tjDestroy(jpegDecompressor);
795 
796  const auto left_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_left_, t);
797 
799  left_rgb_cam_info_pub_.publish(left_camera_info);
800 
802 
803  const auto left_rectified_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_rectified_left_, t);
804 
805  left_rgb_rect_image_.data.resize(rgbLength);
806 
807  const cv::Mat rgb_image(height, width, CV_8UC3, &(left_rgb_image_.data[0]));
808  cv::Mat rect_rgb_image(height, width, CV_8UC3, &(left_rgb_rect_image_.data[0]));
809 
810  const auto left_remap = stereo_calibration_manager_->leftRemap();
811 
812  cv::remap(rgb_image, rect_rgb_image, left_remap->map1, left_remap->map2, cv::INTER_LINEAR);
813 
815  left_rgb_rect_image_.header.stamp = t;
816  left_rgb_rect_image_.height = height;
817  left_rgb_rect_image_.width = width;
819  left_rgb_rect_image_.is_bigendian = (htonl(1) == 1);
820  left_rgb_rect_image_.step = 3 * width;
821  left_rgb_rect_cam_pub_.publish(left_rgb_rect_image_, left_rectified_camera_info);
822  left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info);
823  }
824 }
825 
827 {
828  if (!((Source_Disparity == header.source &&
830  (Source_Disparity_Right == header.source &&
832  (Source_Disparity_Cost == header.source &&
834  (Source_Disparity == header.source &&
836  (Source_Disparity_Right == header.source &&
838  return;
839 
840  const uint32_t imageSize = (header.width * header.height * header.bitsPerPixel) / 8;
841 
842  const ros::Time t = ros::Time(header.timeSeconds, 1000 * header.timeMicroSeconds);
843 
844  switch(header.source) {
845  case Source_Disparity:
846  case Source_Disparity_Right:
847  {
848  sensor_msgs::Image *imageP = NULL;
849  image_transport::Publisher *pubP = NULL;
850  sensor_msgs::CameraInfo camInfo;
851  ros::Publisher *camInfoPubP = NULL;
852  ros::Publisher *stereoDisparityPubP = NULL;
853  stereo_msgs::DisparityImage *stereoDisparityImageP = NULL;
854 
855 
856  if (Source_Disparity == header.source) {
857  pubP = &left_disparity_pub_;
858  imageP = &left_disparity_image_;
859  imageP->header.frame_id = frame_id_rectified_left_;
860  camInfo = stereo_calibration_manager_->leftCameraInfo(frame_id_rectified_left_, t);
861  camInfoPubP = &left_disp_cam_info_pub_;
862  stereoDisparityPubP = &left_stereo_disparity_pub_;
863  stereoDisparityImageP = &left_stereo_disparity_;
864  stereoDisparityImageP->header.frame_id = frame_id_rectified_left_;
865  } else {
866  pubP = &right_disparity_pub_;
867  imageP = &right_disparity_image_;
868  imageP->header.frame_id = frame_id_rectified_right_;
869  camInfo = stereo_calibration_manager_->rightCameraInfo(frame_id_rectified_right_, t);
870  camInfoPubP = &right_disp_cam_info_pub_;
871  stereoDisparityPubP = &right_stereo_disparity_pub_;
872  stereoDisparityImageP = &right_stereo_disparity_;
873  stereoDisparityImageP->header.frame_id = frame_id_rectified_right_;
874  }
875 
876  if (pubP->getNumSubscribers() > 0)
877  {
878  imageP->data.resize(imageSize);
879  memcpy(&imageP->data[0], header.imageDataP, imageSize);
880 
881  imageP->header.stamp = t;
882  imageP->height = header.height;
883  imageP->width = header.width;
884  imageP->is_bigendian = (htonl(1) == 1);
885 
886  switch(header.bitsPerPixel) {
887  case 8:
888  imageP->encoding = sensor_msgs::image_encodings::MONO8;
889  imageP->step = header.width;
890  break;
891  case 16:
892  imageP->encoding = sensor_msgs::image_encodings::MONO16;
893  imageP->step = header.width * 2;
894  break;
895  }
896 
897  pubP->publish(*imageP);
898  }
899 
900  if (stereoDisparityPubP->getNumSubscribers() > 0)
901  {
902  //
903  // If our current image resolution is using non-square pixels, i.e.
904  // fx != fy then warn the user. This support is lacking in
905  // stereo_msgs::DisparityImage and stereo_image_proc
906 
907  if (camInfo.P[0] != camInfo.P[5])
908  {
909  std::stringstream warning;
910  warning << "Current camera configuration has non-square pixels (fx != fy).";
911  warning << "The stereo_msgs/DisparityImage does not account for";
912  warning << " this. Be careful when reprojecting to a pointcloud.";
913  ROS_WARN("%s", warning.str().c_str());
914  }
915 
916  //
917  // Our final floating point image will be serialized into uint8_t
918  // meaning we need to allocate 4 bytes per pixel
919 
920  uint32_t floatingPointImageSize = header.width * header.height * 4;
921  stereoDisparityImageP->image.data.resize(floatingPointImageSize);
922 
923  stereoDisparityImageP->header.stamp = t;
924 
925  stereoDisparityImageP->image.height = header.height;
926  stereoDisparityImageP->image.width = header.width;
927  stereoDisparityImageP->image.is_bigendian = (htonl(1) == 1);
928  stereoDisparityImageP->image.header.stamp = t;
929  stereoDisparityImageP->image.header.frame_id = stereoDisparityImageP->header.frame_id;
930  stereoDisparityImageP->image.encoding = sensor_msgs::image_encodings::TYPE_32FC1;
931  stereoDisparityImageP->image.step = 4 * header.width;
932 
933 
934  //
935  // Fx is the same for both the right and left cameras
936 
937  stereoDisparityImageP->f = camInfo.P[0];
938 
939  //
940  // Our Tx is negative. The DisparityImage message expects Tx to be
941  // positive
942 
943  stereoDisparityImageP->T = fabs(stereo_calibration_manager_->T());
944  stereoDisparityImageP->min_disparity = 0;
945  stereoDisparityImageP->max_disparity = stereo_calibration_manager_->config().disparities();
946  stereoDisparityImageP->delta_d = 1./16.;
947 
948  //
949  // The stereo_msgs::DisparityImage message expects the disparity
950  // image to be floating point. We will use OpenCV to perform our
951  // element-wise division
952 
953 
954  cv::Mat_<uint16_t> tmpImage(header.height,
955  header.width,
956  reinterpret_cast<uint16_t*>(
957  const_cast<void*>(header.imageDataP)));
958 
959  //
960  // We will copy our data directly into our output message
961 
962  cv::Mat_<float> floatingPointImage(header.height,
963  header.width,
964  reinterpret_cast<float*>(&stereoDisparityImageP->image.data[0]));
965 
966  //
967  // Convert our disparity to floating point by dividing by 16 and
968  // copy the result to the output message
969 
970  floatingPointImage = tmpImage / 16.0;
971 
972  stereoDisparityPubP->publish(*stereoDisparityImageP);
973  }
974 
975  camInfoPubP->publish(camInfo);
976 
977  break;
978  }
979  case Source_Disparity_Cost:
980 
981  left_disparity_cost_image_.data.resize(imageSize);
982  memcpy(&left_disparity_cost_image_.data[0], header.imageDataP, imageSize);
983 
985  left_disparity_cost_image_.header.stamp = t;
986  left_disparity_cost_image_.height = header.height;
987  left_disparity_cost_image_.width = header.width;
988 
990  left_disparity_cost_image_.is_bigendian = (htonl(1) == 1);
991  left_disparity_cost_image_.step = header.width;
992 
994 
996 
997  break;
998  }
999 }
1000 
1002 {
1003  if (Source_Luma_Left != header.source &&
1004  Source_Luma_Right != header.source &&
1005  Source_Luma_Aux != header.source) {
1006 
1007  ROS_ERROR("Camera: unexpected mono image source: 0x%x", header.source);
1008  return;
1009  }
1010 
1011  ros::Time t = ros::Time(header.timeSeconds, 1000 * header.timeMicroSeconds);
1012 
1013  switch(header.source) {
1014  case Source_Luma_Left:
1015  {
1016 
1017  left_mono_image_.data.resize(header.imageLength);
1018  memcpy(&left_mono_image_.data[0], header.imageDataP, header.imageLength);
1019 
1020  left_mono_image_.header.frame_id = frame_id_left_;
1021  left_mono_image_.header.stamp = t;
1022  left_mono_image_.height = header.height;
1023  left_mono_image_.width = header.width;
1024 
1025  switch(header.bitsPerPixel) {
1026  case 8:
1028  left_mono_image_.step = header.width;
1029  break;
1030  case 16:
1032  left_mono_image_.step = header.width * 2;
1033  break;
1034  }
1035 
1036  left_mono_image_.is_bigendian = (htonl(1) == 1);
1037 
1039 
1040  //
1041  // Publish a specific camera info message for the left mono image
1043 
1044  break;
1045  }
1046  case Source_Luma_Right:
1047  {
1048  right_mono_image_.data.resize(header.imageLength);
1049  memcpy(&right_mono_image_.data[0], header.imageDataP, header.imageLength);
1050 
1051  right_mono_image_.header.frame_id = frame_id_right_;
1052  right_mono_image_.header.stamp = t;
1053  right_mono_image_.height = header.height;
1054  right_mono_image_.width = header.width;
1055 
1056  switch(header.bitsPerPixel) {
1057  case 8:
1059  right_mono_image_.step = header.width;
1060  break;
1061  case 16:
1063  right_mono_image_.step = header.width * 2;
1064  break;
1065  }
1066  right_mono_image_.is_bigendian = (htonl(1) == 1);
1067 
1069 
1070  //
1071  // Publish a specific camera info message for the right mono image
1073 
1074  break;
1075  }
1076  case Source_Luma_Aux:
1077  {
1078  aux_mono_image_.data.resize(header.imageLength);
1079  memcpy(&aux_mono_image_.data[0], header.imageDataP, header.imageLength);
1080 
1081  aux_mono_image_.header.frame_id = frame_id_aux_;
1082  aux_mono_image_.header.stamp = t;
1083  aux_mono_image_.height = header.height;
1084  aux_mono_image_.width = header.width;
1085 
1086  switch(header.bitsPerPixel) {
1087  case 8:
1089  aux_mono_image_.step = header.width;
1090  break;
1091  case 16:
1093  aux_mono_image_.step = header.width * 2;
1094  break;
1095  }
1096  aux_mono_image_.is_bigendian = (htonl(1) == 1);
1097 
1099 
1100  //
1101  // Publish a specific camera info message for the aux mono image
1103  break;
1104 
1105  }
1106  }
1107 }
1108 
1110 {
1111  if (Source_Luma_Rectified_Left != header.source &&
1112  Source_Luma_Rectified_Right != header.source &&
1113  Source_Luma_Rectified_Aux != header.source) {
1114 
1115  ROS_ERROR("Camera: unexpected rectified image source: 0x%x", header.source);
1116  return;
1117  }
1118 
1119  ros::Time t = ros::Time(header.timeSeconds, 1000 * header.timeMicroSeconds);
1120 
1121  switch(header.source) {
1122  case Source_Luma_Rectified_Left:
1123  {
1124 
1125  left_rect_image_.data.resize(header.imageLength);
1126  memcpy(&left_rect_image_.data[0], header.imageDataP, header.imageLength);
1127 
1128  left_rect_image_.header.frame_id = frame_id_rectified_left_;
1129  left_rect_image_.header.stamp = t;
1130  left_rect_image_.height = header.height;
1131  left_rect_image_.width = header.width;
1132 
1133  switch(header.bitsPerPixel) {
1134  case 8:
1136  left_rect_image_.step = header.width;
1137 
1138  break;
1139  case 16:
1141  left_rect_image_.step = header.width * 2;
1142 
1143  break;
1144  }
1145 
1146  left_rect_image_.is_bigendian = (htonl(1) == 1);
1147 
1148  const auto left_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_rectified_left_, t);
1149 
1150  //
1151  // Continue to publish the rect camera info on the
1152  // <namespace>/left/camera_info topic for backward compatibility with
1153  // older versions of the driver
1154  left_rect_cam_pub_.publish(left_rect_image_, left_camera_info);
1155 
1156  left_rect_cam_info_pub_.publish(left_camera_info);
1157 
1158  break;
1159  }
1160  case Source_Luma_Rectified_Right:
1161  {
1162 
1163  right_rect_image_.data.resize(header.imageLength);
1164  memcpy(&right_rect_image_.data[0], header.imageDataP, header.imageLength);
1165 
1166  right_rect_image_.header.frame_id = frame_id_rectified_right_;
1167  right_rect_image_.header.stamp = t;
1168  right_rect_image_.height = header.height;
1169  right_rect_image_.width = header.width;
1170 
1171  switch(header.bitsPerPixel) {
1172  case 8:
1174  right_rect_image_.step = header.width;
1175  break;
1176  case 16:
1178  right_rect_image_.step = header.width * 2;
1179  break;
1180  }
1181 
1182  right_rect_image_.is_bigendian = (htonl(1) == 1);
1183 
1184  const auto right_camera_info = stereo_calibration_manager_->rightCameraInfo(frame_id_rectified_right_, t);
1185 
1186  //
1187  // Continue to publish the rect camera info on the
1188  // <namespace>/right/camera_info topic for backward compatibility with
1189  // older versions of the driver
1190  right_rect_cam_pub_.publish(right_rect_image_, right_camera_info);
1191 
1192  right_rect_cam_info_pub_.publish(right_camera_info);
1193 
1194  break;
1195  }
1196  case Source_Luma_Rectified_Aux:
1197  {
1198 
1199  aux_rect_image_.data.resize(header.imageLength);
1200  memcpy(&aux_rect_image_.data[0], header.imageDataP, header.imageLength);
1201 
1202  aux_rect_image_.header.frame_id = frame_id_rectified_aux_;
1203  aux_rect_image_.header.stamp = t;
1204  aux_rect_image_.height = header.height;
1205  aux_rect_image_.width = header.width;
1206 
1207  switch(header.bitsPerPixel) {
1208  case 8:
1210  aux_rect_image_.step = header.width;
1211  break;
1212  case 16:
1214  aux_rect_image_.step = header.width * 2;
1215  break;
1216  }
1217 
1218  aux_rect_image_.is_bigendian = (htonl(1) == 1);
1219 
1220  const auto aux_camera_info = stereo_calibration_manager_->auxCameraInfo(frame_id_rectified_aux_, t);
1221 
1222  //
1223  // Continue to publish the rect camera info on the
1224  // <namespace>/aux/camera_info topic for backward compatibility with
1225  // older versions of the driver
1226  aux_rect_cam_pub_.publish(aux_rect_image_, aux_camera_info);
1227 
1228  aux_rect_cam_info_pub_.publish(aux_camera_info);
1229 
1230  break;
1231  }
1232  }
1233 }
1234 
1236 {
1237  if (Source_Disparity != header.source) {
1238 
1239  ROS_ERROR("Camera: unexpected depth image source: 0x%x", header.source);
1240  return;
1241  }
1242 
1243  uint32_t niDepthSubscribers = ni_depth_cam_pub_.getNumSubscribers();
1244  uint32_t depthSubscribers = depth_cam_pub_.getNumSubscribers();
1245 
1246  if (0 == niDepthSubscribers && 0 == depthSubscribers)
1247  {
1248  return;
1249  }
1250 
1251  const ros::Time t(header.timeSeconds, 1000 * header.timeMicroSeconds);
1252 
1253  const float bad_point = std::numeric_limits<float>::quiet_NaN();
1254  const uint32_t depthSize = header.height * header.width * sizeof(float);
1255  const uint32_t niDepthSize = header.height * header.width * sizeof(uint16_t);
1256  const uint32_t imageSize = header.width * header.height;
1257 
1258  depth_image_.header.stamp = t;
1259  depth_image_.header.frame_id = frame_id_rectified_left_;
1260  depth_image_.height = header.height;
1261  depth_image_.width = header.width;
1262  depth_image_.is_bigendian = (htonl(1) == 1);
1263 
1265 
1267  ni_depth_image_.step = header.width * 2;
1268 
1270  depth_image_.step = header.width * 4;
1271 
1272  depth_image_.data.resize(depthSize);
1273  ni_depth_image_.data.resize(niDepthSize);
1274 
1275  float *depthImageP = reinterpret_cast<float*>(&depth_image_.data[0]);
1276  uint16_t *niDepthImageP = reinterpret_cast<uint16_t*>(&ni_depth_image_.data[0]);
1277 
1278  const uint16_t min_ni_depth = std::numeric_limits<uint16_t>::lowest();
1279  const uint16_t max_ni_depth = std::numeric_limits<uint16_t>::max();
1280 
1281  //
1282  // Disparity is in 32-bit floating point
1283 
1284  if (32 == header.bitsPerPixel) {
1285 
1286  //
1287  // Depth = focal_length*baseline/disparity
1288  // From the Q matrix used to reproject disparity images using non-isotropic
1289  // pixels we see that z = (fx*fy*Tx). Normalizing z so that
1290  // the scale factor on the homogeneous Cartesian coordinate is 1 results
1291  // in z = (fx*fy*Tx)/(-fy*d) or z = (fx*Tx)/(-d).
1292  // The 4th element of the right camera projection matrix is defined
1293  // as fx*Tx.
1294 
1295  const double scale = stereo_calibration_manager_->rightCameraInfo(frame_id_right_, t).P[3];
1296 
1297  const float *disparityImageP = reinterpret_cast<const float*>(header.imageDataP);
1298 
1299  for (uint32_t i = 0 ; i < imageSize ; ++i)
1300  {
1301  if (0.0 >= disparityImageP[i])
1302  {
1303  depthImageP[i] = bad_point;
1304  niDepthImageP[i] = 0;
1305  }
1306  else
1307  {
1308  depthImageP[i] = scale / disparityImageP[i];
1309  niDepthImageP[i] = static_cast<uint16_t>(std::min(static_cast<float>(max_ni_depth),
1310  std::max(static_cast<float>(min_ni_depth),
1311  depthImageP[i] * 1000)));
1312  }
1313  }
1314 
1315  //
1316  // Disparity is in 1/16th pixel, unsigned integer
1317 
1318  } else if (16 == header.bitsPerPixel) {
1319 
1320  //
1321  // Depth = focal_length*baseline/disparity
1322  // From the Q matrix used to reproject disparity images using non-isotropic
1323  // pixels we see that z = (fx*fy*Tx). Normalizing z so that
1324  // the scale factor on the homogeneous Cartesian coordinate is 1 results
1325  // in z = (fx*fy*Tx)/(-fy*d) or z = (fx*Tx)/(-d). Because our disparity
1326  // image is 16 bits we must also divide by 16 making z = (fx*Tx*16)/(-d)
1327  // The 4th element of the right camera projection matrix is defined
1328  // as fx*Tx.
1329 
1330 
1331  const float scale = stereo_calibration_manager_->rightCameraInfo(frame_id_right_, t).P[3] * -16.0f;
1332 
1333  const uint16_t *disparityImageP = reinterpret_cast<const uint16_t*>(header.imageDataP);
1334 
1335  for (uint32_t i = 0 ; i < imageSize ; ++i)
1336  {
1337  if (0 == disparityImageP[i])
1338  {
1339  depthImageP[i] = bad_point;
1340  niDepthImageP[i] = 0;
1341  }
1342  else
1343  {
1344  depthImageP[i] = scale / disparityImageP[i];
1345  niDepthImageP[i] = static_cast<uint16_t>(std::min(static_cast<float>(max_ni_depth),
1346  std::max(static_cast<float>(min_ni_depth),
1347  depthImageP[i] * 1000)));
1348  }
1349  }
1350 
1351  } else {
1352  ROS_ERROR("Camera: unsupported disparity bpp: %d", header.bitsPerPixel);
1353  return;
1354  }
1355 
1356  if (0 != niDepthSubscribers)
1357  {
1359  }
1360 
1361  if (0 != depthSubscribers)
1362  {
1364  }
1365 
1367 }
1368 
1370 {
1371  if (Source_Disparity != header.source) {
1372 
1373  ROS_ERROR("Camera: unexpected pointcloud image source: 0x%x", header.source);
1374  return;
1375  }
1376 
1377  //
1378  // Get the corresponding visual images so we can colorize properly
1379 
1380  std::shared_ptr<BufferWrapper<image::Header>> left_luma_rect = nullptr;
1381  std::shared_ptr<BufferWrapper<image::Header>> left_luma = nullptr;
1382  std::shared_ptr<BufferWrapper<image::Header>> left_chroma = nullptr;
1383  std::shared_ptr<BufferWrapper<image::Header>> aux_luma_rectified = nullptr;
1384  std::shared_ptr<BufferWrapper<image::Header>> aux_chroma_rectified = nullptr;
1385 
1386  const auto left_rect_image = image_buffers_.find(Source_Luma_Rectified_Left);
1387  if (left_rect_image != std::end(image_buffers_) && left_rect_image->second->data().frameId == header.frameId) {
1388  left_luma_rect = left_rect_image->second;
1389  }
1390 
1391  const auto left_luma_image = image_buffers_.find(Source_Luma_Left);
1392  if (left_luma_image != std::end(image_buffers_) && left_luma_image->second->data().frameId == header.frameId) {
1393  left_luma = left_luma_image->second;
1394  }
1395 
1396  const auto chroma_image = image_buffers_.find(Source_Chroma_Left);
1397  if (chroma_image != std::end(image_buffers_) && chroma_image->second->data().frameId == header.frameId) {
1398  left_chroma = chroma_image->second;
1399  }
1400 
1401  const auto aux_luma_rectified_image = image_buffers_.find(Source_Luma_Rectified_Aux);
1402  if (aux_luma_rectified_image != std::end(image_buffers_) && aux_luma_rectified_image->second->data().frameId == header.frameId) {
1403  aux_luma_rectified = aux_luma_rectified_image->second;
1404  }
1405 
1406  const auto aux_chroma_rectified_image = image_buffers_.find(Source_Chroma_Rectified_Aux);
1407  if (aux_chroma_rectified_image != std::end(image_buffers_) && aux_chroma_rectified_image->second->data().frameId == header.frameId) {
1408  aux_chroma_rectified = aux_chroma_rectified_image->second;
1409  }
1410 
1411  const bool color_data = (has_aux_camera_ && aux_luma_rectified && aux_chroma_rectified && stereo_calibration_manager_->validAux()) ||
1412  (!has_aux_camera_ && left_luma && left_chroma);
1413 
1414  const bool pub_pointcloud = luma_point_cloud_pub_.getNumSubscribers() > 0 && left_luma_rect;
1415  const bool pub_color_pointcloud = color_point_cloud_pub_.getNumSubscribers() > 0 && color_data;
1416  const bool pub_organized_pointcloud = luma_organized_point_cloud_pub_.getNumSubscribers() > 0 && left_luma_rect;
1417  const bool pub_color_organized_pointcloud = color_organized_point_cloud_pub_.getNumSubscribers() > 0 && color_data;
1418 
1419  if (!(pub_pointcloud || pub_color_pointcloud || pub_organized_pointcloud || pub_color_organized_pointcloud))
1420  {
1421  return;
1422  }
1423 
1424  const ros::Time t(header.timeSeconds, 1000 * header.timeMicroSeconds);
1425 
1426  //
1427  // Resize our corresponding pointclouds if we plan on publishing them
1428 
1429  if (pub_pointcloud)
1430  {
1431  luma_point_cloud_.header.stamp = t;
1432  luma_point_cloud_.data.resize(header.width * header.height * luma_point_cloud_.point_step);
1433  }
1434 
1435  if (pub_color_pointcloud)
1436  {
1437  color_point_cloud_.header.stamp = t;
1438  color_point_cloud_.data.resize(header.width * header.height * color_point_cloud_.point_step);
1439  }
1440 
1441  if (pub_organized_pointcloud)
1442  {
1443  luma_organized_point_cloud_.header.stamp = t;
1444  luma_organized_point_cloud_.data.resize(header.width * header.height * luma_organized_point_cloud_.point_step);
1445  luma_organized_point_cloud_.width = header.width;
1446  luma_organized_point_cloud_.height = header.height;
1447  luma_organized_point_cloud_.row_step = header.width * luma_organized_point_cloud_.point_step;
1448  }
1449 
1450  if (pub_color_organized_pointcloud)
1451  {
1452  color_organized_point_cloud_.header.stamp = t;
1453  color_organized_point_cloud_.data.resize(header.width * header.height * color_organized_point_cloud_.point_step);
1454  color_organized_point_cloud_.width = header.width;
1455  color_organized_point_cloud_.height = header.height;
1456  color_organized_point_cloud_.row_step = header.width * color_organized_point_cloud_.point_step;
1457  }
1458 
1459  const Eigen::Matrix4d Q = stereo_calibration_manager_->Q();
1460 
1461  const Eigen::Vector3f invalid_point(std::numeric_limits<float>::quiet_NaN(),
1462  std::numeric_limits<float>::quiet_NaN(),
1463  std::numeric_limits<float>::quiet_NaN());
1464 
1465  //
1466  // Create rectified color image upfront if we are planning to publish color pointclouds
1467 
1468  cv::Mat rectified_color;
1469  if (!has_aux_camera_ && (pub_color_pointcloud || pub_color_organized_pointcloud))
1470  {
1471  const auto &luma = left_luma->data();
1472 
1473  pointcloud_color_buffer_.resize(3 * luma.width * luma.height);
1474  pointcloud_rect_color_buffer_.resize(3 * luma.width * luma.height);
1475  ycbcrToBgr(luma, left_chroma->data(), &(pointcloud_color_buffer_[0]));
1476 
1477  cv::Mat rgb_image(luma.height, luma.width, CV_8UC3, &(pointcloud_color_buffer_[0]));
1478  cv::Mat rect_rgb_image(luma.height, luma.width, CV_8UC3, &(pointcloud_rect_color_buffer_[0]));
1479 
1480  const auto left_remap = stereo_calibration_manager_->leftRemap();
1481 
1482  cv::remap(rgb_image, rect_rgb_image, left_remap->map1, left_remap->map2, cv::INTER_LINEAR);
1483 
1484  rectified_color = std::move(rect_rgb_image);
1485  }
1486  else if(has_aux_camera_ && (pub_color_pointcloud || pub_color_organized_pointcloud))
1487  {
1488  const auto &luma = aux_luma_rectified->data();
1489 
1490  pointcloud_rect_color_buffer_.resize(3 * luma.width * luma.height);
1491 
1492  ycbcrToBgr(luma, aux_chroma_rectified->data(), reinterpret_cast<uint8_t*>(&(pointcloud_rect_color_buffer_[0])));
1493 
1494  cv::Mat rect_rgb_image(luma.height, luma.width, CV_8UC3, &(pointcloud_rect_color_buffer_[0]));
1495 
1496  rectified_color = std::move(rect_rgb_image);
1497  }
1498 
1499  //
1500  // Iterate through our disparity image once populating our pointcloud structures if we plan to publish them
1501 
1502  uint32_t packed_color = 0;
1503 
1504  const double squared_max_range = pointcloud_max_range_ * pointcloud_max_range_;
1505 
1506  const double aux_T = has_aux_camera_ ? stereo_calibration_manager_->aux_T() : stereo_calibration_manager_->T();
1507  const double T = stereo_calibration_manager_->T();
1508 
1509  size_t valid_points = 0;
1510  for (size_t y = 0 ; y < header.height ; ++y)
1511  {
1512  for (size_t x = 0 ; x < header.width ; ++x)
1513  {
1514  const size_t index = y * header.width + x;
1515 
1516  double disparity = 0.0f;
1517  switch(header.bitsPerPixel)
1518  {
1519  case 16:
1520  {
1521  disparity = static_cast<double>(reinterpret_cast<const uint16_t*>(header.imageDataP)[index]) / 16.0f;
1522  break;
1523  }
1524  case 32:
1525  {
1526  disparity = static_cast<double>(reinterpret_cast<const float*>(header.imageDataP)[index]);
1527  break;
1528  }
1529  default:
1530  {
1531  ROS_ERROR("Camera: unsupported disparity detph: %d", header.bitsPerPixel);
1532  return;
1533  }
1534  }
1535 
1536  //
1537  // We have a valid rectified color image meaning we plan to publish color pointcloud topics. Assemble the
1538  // color pixel here since it may be needed in our organized pointclouds
1539 
1540  if (!rectified_color.empty())
1541  {
1542  packed_color = 0;
1543 
1544  const double color_d = has_aux_camera_ ? (disparity * aux_T) / T : 0.0;
1545 
1546  const auto color_pixel = has_aux_camera_ ? u_interpolate_color(std::max(x - color_d, 0.), y, rectified_color) :
1547  rectified_color.at<cv::Vec3b>(y, x);
1548 
1549  packed_color |= color_pixel[2] << 16 | color_pixel[1] << 8 | color_pixel[0];
1550  }
1551 
1552  //
1553  // If our disparity is 0 pixels our corresponding 3D point is infinite. If we plan to publish organized
1554  // pointclouds we will need to add a invalid point to our pointcloud(s)
1555 
1556  if (disparity == 0.0f || clipPoint(border_clip_type_, border_clip_value_, header.width, header.height, x, y))
1557  {
1558  if (pub_organized_pointcloud)
1559  {
1560  writePoint(luma_organized_point_cloud_, index, invalid_point, index, left_luma_rect->data());
1561  }
1562 
1563  if (pub_color_organized_pointcloud)
1564  {
1565  writePoint(color_organized_point_cloud_, index, invalid_point, packed_color);
1566  }
1567 
1568  continue;
1569  }
1570 
1571  const Eigen::Vector3f point = ((Q * Eigen::Vector4d(static_cast<double>(x),
1572  static_cast<double>(y),
1573  disparity,
1574  1.0)).hnormalized()).cast<float>();
1575 
1576 
1577  const bool valid = isValidReprojectedPoint(point, squared_max_range);
1578 
1579  if (pub_pointcloud && valid)
1580  {
1581  writePoint(luma_point_cloud_, valid_points, point, index, left_luma_rect->data());
1582  }
1583 
1584  if(pub_color_pointcloud && valid)
1585  {
1586  writePoint(color_point_cloud_, valid_points, point, packed_color);
1587  }
1588 
1589  if (pub_organized_pointcloud)
1590  {
1591  writePoint(luma_organized_point_cloud_, index, valid ? point : invalid_point, index, left_luma_rect->data());
1592  }
1593 
1594  if (pub_color_organized_pointcloud)
1595  {
1596  writePoint(color_organized_point_cloud_, index, valid ? point : invalid_point, packed_color);
1597  }
1598 
1599  if (valid)
1600  {
1601  ++valid_points;
1602  }
1603  }
1604  }
1605 
1606  if (pub_pointcloud)
1607  {
1608  luma_point_cloud_.height = 1;
1609  luma_point_cloud_.row_step = valid_points * luma_point_cloud_.point_step;
1610  luma_point_cloud_.width = valid_points;
1611  luma_point_cloud_.data.resize(valid_points * luma_point_cloud_.point_step);
1613  }
1614 
1615  if(pub_color_pointcloud)
1616  {
1617  color_point_cloud_.height = 1;
1618  color_point_cloud_.row_step = valid_points * color_point_cloud_.point_step;
1619  color_point_cloud_.width = valid_points;
1620  color_point_cloud_.data.resize(valid_points * color_point_cloud_.point_step);
1622  }
1623 
1624  if (pub_organized_pointcloud)
1625  {
1627  }
1628 
1629  if (pub_color_organized_pointcloud)
1630  {
1632  }
1633 
1634 }
1635 
1637 {
1638  if (0 == raw_cam_data_pub_.getNumSubscribers()) {
1639  return;
1640  }
1641 
1642  if(Source_Disparity == header.source)
1643  {
1644  const auto image = image_buffers_.find(Source_Luma_Rectified_Left);
1645  if (image != std::end(image_buffers_) && image->second->data().frameId == header.frameId)
1646  {
1647  const auto luma_ptr = image->second;
1648  const auto &left_luma_rect = luma_ptr->data();
1649 
1650  const uint32_t left_luma_image_size = left_luma_rect.width * left_luma_rect.height;
1651 
1652  raw_cam_data_.gray_scale_image.resize(left_luma_image_size);
1653  memcpy(&(raw_cam_data_.gray_scale_image[0]),
1654  left_luma_rect.imageDataP,
1655  left_luma_image_size * sizeof(uint8_t));
1656 
1657  raw_cam_data_.frames_per_second = left_luma_rect.framesPerSecond;
1658  raw_cam_data_.gain = left_luma_rect.gain;
1659  raw_cam_data_.exposure_time = left_luma_rect.exposure;
1660  raw_cam_data_.frame_count = left_luma_rect.frameId;
1661  raw_cam_data_.time_stamp = ros::Time(left_luma_rect.timeSeconds, 1000 * left_luma_rect.timeMicroSeconds);
1662  raw_cam_data_.width = left_luma_rect.width;
1663  raw_cam_data_.height = left_luma_rect.height;
1664 
1665  const uint32_t disparity_size = header.width * header.height;
1666 
1667  raw_cam_data_.disparity_image.resize(disparity_size);
1668  memcpy(&(raw_cam_data_.disparity_image[0]),
1669  header.imageDataP,
1670  disparity_size * header.bitsPerPixel == 16 ? sizeof(uint16_t) : sizeof(uint32_t));
1671 
1673  }
1674  }
1675 }
1676 
1678 {
1679  //
1680  // The left-luma image is currently published before the matching chroma image so this can just trigger on that
1681 
1682  if (Source_Chroma_Left != header.source &&
1683  Source_Chroma_Rectified_Aux != header.source &&
1684  Source_Chroma_Aux != header.source)
1685  {
1686  ROS_WARN("Camera: unexpected color image source: 0x%x", header.source);
1687  return;
1688  }
1689 
1690  const ros::Time t(header.timeSeconds, 1000 * header.timeMicroSeconds);
1691 
1692  switch (header.source)
1693  {
1694  case Source_Chroma_Left:
1695  {
1696 
1697  const auto color_subscribers = left_rgb_cam_pub_.getNumSubscribers();
1698  const auto color_rect_subscribers = left_rgb_rect_cam_pub_.getNumSubscribers();
1699 
1700  if (color_subscribers == 0 && color_rect_subscribers == 0)
1701  {
1702  return;
1703  }
1704 
1705  const auto left_luma = image_buffers_.find(Source_Luma_Left);
1706  if (left_luma == std::end(image_buffers_)) {
1707  return;
1708  }
1709 
1710  const auto luma_ptr = left_luma->second;
1711 
1712  if (header.frameId == luma_ptr->data().frameId) {
1713 
1714  const uint32_t height = luma_ptr->data().height;
1715  const uint32_t width = luma_ptr->data().width;
1716  const uint32_t imageSize = 3 * height * width;
1717 
1718  left_rgb_image_.data.resize(imageSize);
1719 
1720  left_rgb_image_.header.frame_id = frame_id_left_;
1721  left_rgb_image_.header.stamp = t;
1722  left_rgb_image_.height = height;
1723  left_rgb_image_.width = width;
1724 
1726  left_rgb_image_.is_bigendian = (htonl(1) == 1);
1727  left_rgb_image_.step = 3 * width;
1728 
1729  //
1730  // Convert YCbCr 4:2:0 to RGB
1731 
1732  ycbcrToBgr(luma_ptr->data(), header, reinterpret_cast<uint8_t*>(&(left_rgb_image_.data[0])));
1733 
1734  const auto left_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_left_, t);
1735 
1736  if (color_subscribers != 0) {
1738 
1739  left_rgb_cam_info_pub_.publish(left_camera_info);
1740  }
1741 
1742  if (color_rect_subscribers > 0) {
1743  left_rgb_rect_image_.data.resize(imageSize);
1744 
1745  const auto left_rectified_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_rectified_left_, t);
1746 
1747  const auto remaps = stereo_calibration_manager_->leftRemap();
1748 
1749  const cv::Mat rgb_image(height, width, CV_8UC3, &(left_rgb_image_.data[0]));
1750  cv::Mat rect_rgb_image(height, width, CV_8UC3, &(left_rgb_rect_image_.data[0]));
1751 
1752  cv::remap(rgb_image, rect_rgb_image, remaps->map1, remaps->map2, cv::INTER_LINEAR);
1753 
1755  left_rgb_rect_image_.header.stamp = t;
1756  left_rgb_rect_image_.height = height;
1757  left_rgb_rect_image_.width = width;
1758 
1760  left_rgb_rect_image_.is_bigendian = (htonl(1) == 1);
1761  left_rgb_rect_image_.step = 3 * width;
1762 
1763  left_rgb_rect_cam_pub_.publish(left_rgb_rect_image_, left_rectified_camera_info);
1764 
1765  left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info);
1766  }
1767  }
1768 
1769  break;
1770  }
1771  case Source_Chroma_Rectified_Aux:
1772  {
1774  return;
1775  }
1776 
1777  const auto aux_luma = image_buffers_.find(Source_Luma_Rectified_Aux);
1778  if (aux_luma == std::end(image_buffers_)) {
1779  return;
1780  }
1781 
1782  const auto luma_ptr = aux_luma->second;
1783 
1784  if (header.frameId == luma_ptr->data().frameId) {
1785 
1786  const uint32_t height = luma_ptr->data().height;
1787  const uint32_t width = luma_ptr->data().width;
1788  const uint32_t imageSize = 3 * height * width;
1789 
1790  aux_rgb_rect_image_.data.resize(imageSize);
1791 
1792  aux_rgb_rect_image_.header.frame_id = frame_id_rectified_aux_;
1793  aux_rgb_rect_image_.header.stamp = t;
1794  aux_rgb_rect_image_.height = height;
1795  aux_rgb_rect_image_.width = width;
1796 
1798  aux_rgb_rect_image_.is_bigendian = (htonl(1) == 1);
1799  aux_rgb_rect_image_.step = 3 * width;
1800 
1801  //
1802  // Convert YCbCr 4:2:0 to RGB
1803 
1804  ycbcrToBgr(luma_ptr->data(), header, reinterpret_cast<uint8_t*>(&(aux_rgb_rect_image_.data[0])));
1805 
1806  const auto aux_camera_info = stereo_calibration_manager_->auxCameraInfo(frame_id_rectified_aux_, t);
1807 
1809 
1810  aux_rgb_rect_cam_info_pub_.publish(aux_camera_info);
1811  }
1812 
1813  break;
1814  }
1815  case Source_Chroma_Aux:
1816  {
1817  if (aux_rgb_cam_pub_.getNumSubscribers() == 0) {
1818  return;
1819  }
1820 
1821  const auto aux_luma = image_buffers_.find(Source_Luma_Aux);
1822  if (aux_luma == std::end(image_buffers_)) {
1823  return;
1824  }
1825 
1826  const auto luma_ptr = aux_luma->second;
1827 
1828  if (header.frameId == luma_ptr->data().frameId) {
1829  const uint32_t height = luma_ptr->data().height;
1830  const uint32_t width = luma_ptr->data().width;
1831  const uint32_t imageSize = 3 * height * width;
1832 
1833  aux_rgb_image_.data.resize(imageSize);
1834 
1835  aux_rgb_image_.header.frame_id = frame_id_aux_;
1836  aux_rgb_image_.header.stamp = t;
1837  aux_rgb_image_.height = height;
1838  aux_rgb_image_.width = width;
1839 
1841  aux_rgb_image_.is_bigendian = (htonl(1) == 1);
1842  aux_rgb_image_.step = 3 * width;
1843 
1844  //
1845  // Convert YCbCr 4:2:0 to RGB
1846 
1847  ycbcrToBgr(luma_ptr->data(), header, reinterpret_cast<uint8_t*>(&(aux_rgb_image_.data[0])));
1848 
1849  const auto aux_camera_info = stereo_calibration_manager_->auxCameraInfo(frame_id_aux_, t);
1850 
1852 
1853  aux_rgb_cam_info_pub_.publish(aux_camera_info);
1854 
1855  break;
1856  }
1857  }
1858  }
1859 }
1860 
1862 {
1863  if (Source_Luma_Rectified_Aux != header.source &&
1864  Source_Chroma_Rectified_Aux != header.source &&
1865  Source_Luma_Aux != header.source &&
1866  Source_Luma_Left != header.source &&
1867  Source_Chroma_Left != header.source &&
1868  Source_Luma_Rectified_Left != header.source) {
1869  ROS_WARN("Camera: unexpected colorized image source: 0x%x", header.source);
1870  return;
1871  }
1872 
1873  image_buffers_[header.source] = std::make_shared<BufferWrapper<crl::multisense::image::Header>>(driver_, header);
1874 }
1875 
1876 
1878 {
1879  stereo_calibration_manager_->updateConfig(config);
1880 
1881  //
1882  // Publish the "raw" config message
1883 
1884  multisense_ros::RawCamConfig cfg;
1885 
1886  cfg.width = config.width();
1887  cfg.height = config.height();
1888  cfg.frames_per_second = config.fps();
1889  cfg.gain = config.gain();
1890  cfg.exposure_time = config.exposure();
1891 
1892  cfg.fx = config.fx();
1893  cfg.fy = config.fy();
1894  cfg.cx = config.cx();
1895  cfg.cy = config.cy();
1896  cfg.tx = config.tx();
1897  cfg.ty = config.ty();
1898  cfg.tz = config.tz();
1899  cfg.roll = config.roll();
1900  cfg.pitch = config.pitch();
1901  cfg.yaw = config.yaw();
1902 
1904 
1905  //
1906  // Republish our camera info topics since the resolution changed
1907 
1909 }
1910 
1912 {
1913  const auto stamp = ros::Time::now();
1914 
1915  const auto left_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_left_, stamp);
1916  const auto right_camera_info = stereo_calibration_manager_->rightCameraInfo(frame_id_right_, stamp);
1917 
1918  const auto left_rectified_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_rectified_left_, stamp);
1919  const auto right_rectified_camera_info = stereo_calibration_manager_->rightCameraInfo(frame_id_rectified_right_, stamp);
1920 
1921  //
1922  // Republish camera info messages outside of image callbacks.
1923  // The camera info publishers are latching so the messages
1924  // will persist until a new message is published in one of the image
1925  // callbacks. This makes it easier when a user is trying access a camera_info
1926  // for a topic which they are not subscribed to
1927 
1928  if (system::DeviceInfo::HARDWARE_REV_BCAM == device_info_.hardwareRevision) {
1929 
1930  left_mono_cam_info_pub_.publish(left_camera_info);
1931  left_rgb_cam_info_pub_.publish(left_camera_info);
1932  left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info);
1933 
1934  } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_M == device_info_.hardwareRevision) {
1935 
1936  left_mono_cam_info_pub_.publish(left_camera_info);
1937  left_rect_cam_info_pub_.publish(left_rectified_camera_info);
1938  left_rgb_cam_info_pub_.publish(left_camera_info);
1939  left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info);
1940 
1941  } else { // all other MultiSense-S* variations
1942 
1943  if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST21 != device_info_.hardwareRevision) {
1944 
1945  left_rgb_cam_info_pub_.publish(left_camera_info);
1946  left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info);
1947  }
1948 
1949  if (version_info_.sensorFirmwareVersion >= 0x0300) {
1950 
1951  right_disp_cam_info_pub_.publish(right_rectified_camera_info);
1952  left_cost_cam_info_pub_.publish(left_rectified_camera_info);
1953  }
1954 
1955  left_mono_cam_info_pub_.publish(left_camera_info);
1956  left_rect_cam_info_pub_.publish(left_rectified_camera_info);
1957  right_mono_cam_info_pub_.publish(right_camera_info);
1958  right_rect_cam_info_pub_.publish(right_rectified_camera_info);
1959  left_disp_cam_info_pub_.publish(left_rectified_camera_info);
1960  depth_cam_info_pub_.publish(left_rectified_camera_info);
1961 
1962  if (has_aux_camera_) {
1963 
1966 
1969  }
1970  }
1971 }
1972 
1974 {
1975  std::lock_guard<std::mutex> lock(stream_lock_);
1976 
1977  stream_map_.clear();
1978 
1979  Status status = driver_->stopStreams(allImageSources);
1980  if (Status_Ok != status)
1981  ROS_ERROR("Camera: failed to stop all streams: %s",
1982  Channel::statusString(status));
1983 }
1984 
1986 {
1987  std::lock_guard<std::mutex> lock(stream_lock_);
1988 
1989  DataSource notStarted = 0;
1990 
1991  for(uint32_t i=0; i<32; i++)
1992  if ((1<<i) & enableMask && 0 == stream_map_[(1<<i)]++)
1993  notStarted |= (1<<i);
1994 
1995  if (0 != notStarted) {
1996 
1997  Status status = driver_->startStreams(notStarted);
1998  if (Status_Ok != status)
1999  ROS_ERROR("Camera: failed to start streams 0x%x: %s",
2000  notStarted, Channel::statusString(status));
2001  }
2002 }
2003 
2005 {
2006  std::lock_guard<std::mutex> lock(stream_lock_);
2007 
2008  DataSource notStopped = 0;
2009 
2010  for(uint32_t i=0; i<32; i++)
2011  if ((1<<i) & disableMask && 0 == --stream_map_[(1<<i)])
2012  notStopped |= (1<<i);
2013 
2014  if (0 != notStopped) {
2015  Status status = driver_->stopStreams(notStopped);
2016  if (Status_Ok != status)
2017  ROS_ERROR("Camera: failed to stop streams 0x%x: %s\n",
2018  notStopped, Channel::statusString(status));
2019  }
2020 }
2021 
2022 } // namespace
virtual Status startStreams(DataSource mask)=0
ros::Publisher left_rgb_cam_info_pub_
Definition: camera.h:198
const std::string frame_id_rectified_right_
Definition: camera.h:280
static CRL_CONSTEXPR DataSource Source_Disparity_Cost
image_transport::Publisher right_disparity_pub_
Definition: camera.h:213
static constexpr char RECT_CAMERA_INFO_TOPIC[]
Definition: camera.h:122
void maxPointCloudRangeChanged(double range)
Definition: camera.cpp:725
ros::Publisher luma_organized_point_cloud_pub_
Definition: camera.h:209
static CRL_CONSTEXPR DataSource Source_Luma_Aux
void pointCloudCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1369
void rawCamDataCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1636
image_transport::ImageTransport disparity_cost_transport_
Definition: camera.h:169
image_transport::Publisher left_mono_cam_pub_
Definition: camera.h:178
static const Matrix3x3 & getIdentity()
static constexpr char RAW_CAM_DATA_TOPIC[]
Definition: camera.h:106
sensor_msgs::Image right_rect_image_
Definition: camera.h:234
static constexpr char COLOR_CAMERA_INFO_TOPIC[]
Definition: camera.h:123
void publish(const sensor_msgs::Image &image, const sensor_msgs::CameraInfo &info) const
void disparityImageCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:826
ros::NodeHandle device_nh_
Definition: camera.h:150
static CRL_CONSTEXPR DataSource Source_Disparity_Right
tf2_ros::StaticTransformBroadcaster static_tf_broadcaster_
Definition: camera.h:283
static constexpr char DEPTH_TOPIC[]
Definition: camera.h:112
ros::Publisher right_stereo_disparity_pub_
Definition: camera.h:217
static constexpr char MONO_TOPIC[]
Definition: camera.h:108
ros::Publisher right_rect_cam_info_pub_
Definition: camera.h:194
void colorImageCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1677
static constexpr char DISPARITY_CAMERA_INFO_TOPIC[]
Definition: camera.h:126
void publish(const boost::shared_ptr< M > &message) const
static CRL_CONSTEXPR DataSource Source_Jpeg_Left
static constexpr char COST_CAMERA_INFO_TOPIC[]
Definition: camera.h:127
image_transport::ImageTransport left_rect_transport_
Definition: camera.h:161
sensor_msgs::Image ni_depth_image_
Definition: camera.h:236
NONE
static CRL_CONSTEXPR DataSource Source_Disparity
sensor_msgs::PointCloud2 luma_organized_point_cloud_
Definition: camera.h:239
f
sensor_msgs::PointCloud2 luma_point_cloud_
Definition: camera.h:237
void monoCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1001
image_transport::Publisher left_disparity_cost_pub_
Definition: camera.h:214
sensor_msgs::PointCloud2 color_point_cloud_
Definition: camera.h:238
ros::Publisher left_stereo_disparity_pub_
Definition: camera.h:216
ros::Publisher luma_point_cloud_pub_
Definition: camera.h:206
ros::NodeHandle aux_nh_
Definition: camera.h:153
static CRL_CONSTEXPR DataSource Source_Chroma_Left
ros::Publisher depth_cam_info_pub_
Definition: camera.h:200
virtual Status stopStreams(DataSource mask)=0
ros::Publisher histogram_pub_
Definition: camera.h:226
uint32_t getNumSubscribers() const
ros::Publisher left_mono_cam_info_pub_
Definition: camera.h:191
const std::string frame_id_left_
Definition: camera.h:276
static constexpr char ORGANIZED_POINTCLOUD_TOPIC[]
Definition: camera.h:119
sensor_msgs::Image right_disparity_image_
Definition: camera.h:251
Publisher advertise(const std::string &base_topic, uint32_t queue_size, bool latch=false)
std::unordered_map< crl::multisense::DataSource, std::shared_ptr< BufferWrapper< crl::multisense::image::Header > > > image_buffers_
Definition: camera.h:311
virtual Status getImageHistogram(int64_t frameId, image::Histogram &histogram)=0
static constexpr char DEVICE_INFO_TOPIC[]
Definition: camera.h:103
static constexpr char RECT_TOPIC[]
Definition: camera.h:109
double border_clip_value_
Definition: camera.h:306
sensor_msgs::Image depth_image_
Definition: camera.h:235
image_transport::ImageTransport aux_rgb_transport_
Definition: camera.h:171
void disconnectStream(crl::multisense::DataSource disableMask)
Definition: camera.cpp:2004
static constexpr char DISPARITY_IMAGE_TOPIC[]
Definition: camera.h:111
image_transport::Publisher aux_mono_cam_pub_
Definition: camera.h:187
static CRL_CONSTEXPR DataSource Source_Luma_Left
void depthCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1235
void connectStream(crl::multisense::DataSource enableMask)
Definition: camera.cpp:1985
ros::Publisher raw_cam_config_pub_
Definition: camera.h:223
static constexpr char COST_TOPIC[]
Definition: camera.h:114
sensor_msgs::Image left_disparity_cost_image_
Definition: camera.h:250
#define ROS_WARN(...)
virtual Status getVersionInfo(system::VersionInfo &v)=0
geometry_msgs::TransformStamped t
image_transport::ImageTransport right_mono_transport_
Definition: camera.h:160
image_transport::ImageTransport left_rgb_transport_
Definition: camera.h:163
uint32_t getNumSubscribers() const
void colorizeCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1861
ros::Publisher right_mono_cam_info_pub_
Definition: camera.h:192
const std::string frame_id_right_
Definition: camera.h:277
stereo_msgs::DisparityImage left_stereo_disparity_
Definition: camera.h:253
image_transport::ImageTransport aux_mono_transport_
Definition: camera.h:170
ros::Publisher aux_rgb_cam_info_pub_
Definition: camera.h:202
std::vector< uint32_t > data
static CRL_CONSTEXPR DataSource Source_Luma_Rectified_Aux
image_transport::Publisher right_mono_cam_pub_
Definition: camera.h:179
void borderClipChanged(const BorderClip &borderClipType, double borderClipValue)
Definition: camera.cpp:716
StreamMapType stream_map_
Definition: camera.h:290
crl::multisense::system::DeviceInfo device_info_
Definition: camera.h:265
CameraPublisher advertiseCamera(const std::string &base_topic, uint32_t queue_size, bool latch=false)
image_transport::CameraPublisher aux_rgb_rect_cam_pub_
Definition: camera.h:189
static CRL_CONSTEXPR DataSource Source_Luma_Right
ros::Publisher aux_rect_cam_info_pub_
Definition: camera.h:203
sensor_msgs::Image left_mono_image_
Definition: camera.h:231
image_transport::Publisher ni_depth_cam_pub_
Definition: camera.h:183
void rectCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1109
virtual Status removeIsolatedCallback(image::Callback callback)=0
virtual Status addIsolatedCallback(image::Callback callback, DataSource imageSourceMask, void *userDataP=NULL)=0
image_transport::ImageTransport aux_rgb_rect_transport_
Definition: camera.h:173
const std::string frame_id_rectified_left_
Definition: camera.h:279
void updateConfig(const crl::multisense::image::Config &config)
Definition: camera.cpp:1877
image_transport::Publisher left_disparity_pub_
Definition: camera.h:212
ros::Publisher raw_cam_data_pub_
Definition: camera.h:222
image_transport::CameraPublisher left_rgb_rect_cam_pub_
Definition: camera.h:185
ros::Publisher left_disp_cam_info_pub_
Definition: camera.h:195
image_transport::Publisher depth_cam_pub_
Definition: camera.h:182
void publish(const sensor_msgs::Image &message) const
BorderClip border_clip_type_
Definition: camera.h:305
constexpr Eigen::Matrix< T, 3, 1 > ycbcrToBgr(const crl::multisense::image::Header &luma, const crl::multisense::image::Header &chroma, size_t u, size_t v)
#define ROS_INFO(...)
image_transport::ImageTransport aux_rect_transport_
Definition: camera.h:172
virtual Status getImageCalibration(image::Calibration &c)=0
const std::string TYPE_32FC1
ros::NodeHandle right_nh_
Definition: camera.h:152
image_transport::CameraPublisher right_rect_cam_pub_
Definition: camera.h:181
image_transport::ImageTransport left_mono_transport_
Definition: camera.h:159
image_transport::CameraPublisher aux_rect_cam_pub_
Definition: camera.h:188
ros::Publisher raw_cam_cal_pub_
Definition: camera.h:224
image_transport::CameraPublisher left_rect_cam_pub_
Definition: camera.h:180
static CRL_CONSTEXPR DataSource Source_Chroma_Aux
sensor_msgs::Image left_rect_image_
Definition: camera.h:233
ros::Publisher right_disp_cam_info_pub_
Definition: camera.h:196
crl::multisense::Channel * driver_
Definition: camera.h:145
std::shared_ptr< StereoCalibrationManger > stereo_calibration_manager_
Definition: camera.h:271
ros::Publisher device_info_pub_
Definition: camera.h:225
static constexpr char RAW_CAM_CAL_TOPIC[]
Definition: camera.h:104
image_transport::Publisher left_rgb_cam_pub_
Definition: camera.h:184
const std::string frame_id_rectified_aux_
Definition: camera.h:281
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
INLINE Rall1d< T, V, S > sqrt(const Rall1d< T, V, S > &arg)
const std::string MONO16
static constexpr char RECT_COLOR_CAMERA_INFO_TOPIC[]
Definition: camera.h:124
static constexpr char COLOR_POINTCLOUD_TOPIC[]
Definition: camera.h:118
static constexpr char OPENNI_DEPTH_TOPIC[]
Definition: camera.h:113
static constexpr char COLOR_TOPIC[]
Definition: camera.h:115
image_transport::ImageTransport left_rgb_rect_transport_
Definition: camera.h:164
std::mutex stream_lock_
Definition: camera.h:289
ros::NodeHandle calibration_nh_
Definition: camera.h:154
ros::Publisher aux_rgb_rect_cam_info_pub_
Definition: camera.h:204
sensor_msgs::Image aux_rgb_image_
Definition: camera.h:244
sensor_msgs::Image left_rgb_image_
Definition: camera.h:243
sensor_msgs::Image right_mono_image_
Definition: camera.h:232
image_transport::ImageTransport depth_transport_
Definition: camera.h:165
ros::Publisher left_rgb_rect_cam_info_pub_
Definition: camera.h:199
void histogramCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:730
stereo_msgs::DisparityImage right_stereo_disparity_
Definition: camera.h:254
B toMsg(const A &a)
static constexpr char RECT_COLOR_TOPIC[]
Definition: camera.h:116
uint32_t DataSource
ros::NodeHandle left_nh_
Definition: camera.h:151
double pointcloud_max_range_
Definition: camera.h:295
image_transport::ImageTransport right_rect_transport_
Definition: camera.h:162
uint32_t getNumSubscribers() const
static CRL_CONSTEXPR DataSource Source_Chroma_Rectified_Aux
ros::Publisher left_cost_cam_info_pub_
Definition: camera.h:197
static constexpr char DEPTH_CAMERA_INFO_TOPIC[]
Definition: camera.h:125
static constexpr char HISTOGRAM_TOPIC[]
Definition: camera.h:107
image_transport::Publisher aux_rgb_cam_pub_
Definition: camera.h:186
image_transport::ImageTransport ni_depth_transport_
Definition: camera.h:166
image_transport::ImageTransport disparity_right_transport_
Definition: camera.h:168
static constexpr char POINTCLOUD_TOPIC[]
Definition: camera.h:117
sensor_msgs::Image aux_mono_image_
Definition: camera.h:242
ros::Publisher left_rect_cam_info_pub_
Definition: camera.h:193
std::vector< uint8_t > pointcloud_rect_color_buffer_
Definition: camera.h:259
static Time now()
virtual Status getDeviceInfo(system::DeviceInfo &info)=0
void jpegImageCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:766
sensor_msgs::Image left_disparity_image_
Definition: camera.h:249
ros::Publisher color_organized_point_cloud_pub_
Definition: camera.h:210
sensor_msgs::Image aux_rgb_rect_image_
Definition: camera.h:247
static constexpr char MONO_CAMERA_INFO_TOPIC[]
Definition: camera.h:121
static CRL_CONSTEXPR DataSource Source_Luma_Rectified_Right
image_transport::ImageTransport disparity_left_transport_
Definition: camera.h:167
sensor_msgs::PointCloud2 color_organized_point_cloud_
Definition: camera.h:240
crl::multisense::system::VersionInfo version_info_
Definition: camera.h:264
static constexpr char COLOR_ORGANIZED_POINTCLOUD_TOPIC[]
Definition: camera.h:120
void sendTransform(const geometry_msgs::TransformStamped &transform)
sensor_msgs::Image left_rgb_rect_image_
Definition: camera.h:245
const std::string frame_id_aux_
Definition: camera.h:278
ros::Publisher aux_mono_cam_info_pub_
Definition: camera.h:201
#define ROS_ERROR(...)
multisense_ros::RawCamData raw_cam_data_
Definition: camera.h:256
ros::Publisher color_point_cloud_pub_
Definition: camera.h:207
virtual Status getImageConfig(image::Config &c)=0
int64_t last_frame_id_
Definition: camera.h:300
sensor_msgs::Image aux_rect_image_
Definition: camera.h:246
std::vector< uint8_t > pointcloud_color_buffer_
Definition: camera.h:258
static constexpr char DISPARITY_TOPIC[]
Definition: camera.h:110
static constexpr char RAW_CAM_CONFIG_TOPIC[]
Definition: camera.h:105
static CRL_CONSTEXPR DataSource Source_Luma_Rectified_Left


multisense_ros
Author(s):
autogenerated on Sun Mar 14 2021 02:34:55