camera.cpp
Go to the documentation of this file.
1 
34 #include <arpa/inet.h>
35 #include <fstream>
36 #include <turbojpeg.h>
37 
38 #include <Eigen/Geometry>
39 #include <opencv2/opencv.hpp>
40 
45 
46 #include <multisense_lib/MultiSenseChannel.hh>
47 #include <multisense_ros/camera.h>
48 #include <multisense_ros/RawCamConfig.h>
49 #include <multisense_ros/RawCamCal.h>
50 #include <multisense_ros/DeviceInfo.h>
51 #include <multisense_ros/Histogram.h>
53 
54 using namespace crl::multisense;
55 
56 namespace multisense_ros {
57 
58 namespace { // anonymous
59 
60 tf2::Matrix3x3 toRotation(float R[3][3])
61 {
62  return tf2::Matrix3x3{R[0][0],
63  R[0][1],
64  R[0][2],
65  R[1][0],
66  R[1][1],
67  R[1][2],
68  R[2][0],
69  R[2][1],
70  R[2][2]};
71 }
72 
73 //
74 // All of the data sources that we control here
75 
76 constexpr DataSource allImageSources = (Source_Luma_Left |
89 
90 //
91 // Shims for C-style driver callbacks
92 
93 void monoCB(const image::Header& header, void* userDataP)
94 { reinterpret_cast<Camera*>(userDataP)->monoCallback(header); }
95 void rectCB(const image::Header& header, void* userDataP)
96 { reinterpret_cast<Camera*>(userDataP)->rectCallback(header); }
97 void depthCB(const image::Header& header, void* userDataP)
98 { reinterpret_cast<Camera*>(userDataP)->depthCallback(header); }
99 void pointCB(const image::Header& header, void* userDataP)
100 { reinterpret_cast<Camera*>(userDataP)->pointCloudCallback(header); }
101 void rawCB(const image::Header& header, void* userDataP)
102 { reinterpret_cast<Camera*>(userDataP)->rawCamDataCallback(header); }
103 void colorCB(const image::Header& header, void* userDataP)
104 { reinterpret_cast<Camera*>(userDataP)->colorImageCallback(header); }
105 void dispCB(const image::Header& header, void* userDataP)
106 { reinterpret_cast<Camera*>(userDataP)->disparityImageCallback(header); }
107 void jpegCB(const image::Header& header, void* userDataP)
108 { reinterpret_cast<Camera*>(userDataP)->jpegImageCallback(header); }
109 void histCB(const image::Header& header, void* userDataP)
110 { reinterpret_cast<Camera*>(userDataP)->histogramCallback(header); }
111 void colorizeCB(const image::Header& header, void* userDataP)
112 { reinterpret_cast<Camera*>(userDataP)->colorizeCallback(header); }
113 void groundSurfaceCB(const image::Header& header, void* userDataP)
114 { reinterpret_cast<Camera*>(userDataP)->groundSurfaceCallback(header); }
115 void groundSurfaceSplineCB(const ground_surface::Header& header, void* userDataP)
116 { reinterpret_cast<Camera*>(userDataP)->groundSurfaceSplineCallback(header); }
117 
118 } // anonymous
119 
120 //
121 // Provide compiler with definition of the static members
122 
123 constexpr char Camera::LEFT[];
124 constexpr char Camera::RIGHT[];
125 constexpr char Camera::AUX[];
126 constexpr char Camera::CALIBRATION[];
127 constexpr char Camera::GROUND_SURFACE[];
128 
129 constexpr char Camera::ORIGIN_FRAME[];
130 constexpr char Camera::HEAD_FRAME[];
131 constexpr char Camera::LEFT_CAMERA_FRAME[];
132 constexpr char Camera::RIGHT_CAMERA_FRAME[];
133 constexpr char Camera::LEFT_RECTIFIED_FRAME[];
134 constexpr char Camera::RIGHT_RECTIFIED_FRAME[];
135 constexpr char Camera::AUX_CAMERA_FRAME[];
136 constexpr char Camera::AUX_RECTIFIED_FRAME[];
137 
138 constexpr char Camera::DEVICE_INFO_TOPIC[];
139 constexpr char Camera::RAW_CAM_CAL_TOPIC[];
140 constexpr char Camera::RAW_CAM_CONFIG_TOPIC[];
141 constexpr char Camera::RAW_CAM_DATA_TOPIC[];
142 constexpr char Camera::HISTOGRAM_TOPIC[];
143 constexpr char Camera::MONO_TOPIC[];
144 constexpr char Camera::RECT_TOPIC[];
145 constexpr char Camera::DISPARITY_TOPIC[];
146 constexpr char Camera::DISPARITY_IMAGE_TOPIC[];
147 constexpr char Camera::DEPTH_TOPIC[];
148 constexpr char Camera::OPENNI_DEPTH_TOPIC[];
149 constexpr char Camera::COST_TOPIC[];
150 constexpr char Camera::COLOR_TOPIC[];
151 constexpr char Camera::RECT_COLOR_TOPIC[];
152 constexpr char Camera::POINTCLOUD_TOPIC[];
153 constexpr char Camera::COLOR_POINTCLOUD_TOPIC[];
154 constexpr char Camera::ORGANIZED_POINTCLOUD_TOPIC[];
155 constexpr char Camera::COLOR_ORGANIZED_POINTCLOUD_TOPIC[];
156 constexpr char Camera::MONO_CAMERA_INFO_TOPIC[];
157 constexpr char Camera::RECT_CAMERA_INFO_TOPIC[];
158 constexpr char Camera::COLOR_CAMERA_INFO_TOPIC[];
159 constexpr char Camera::RECT_COLOR_CAMERA_INFO_TOPIC[];
160 constexpr char Camera::DEPTH_CAMERA_INFO_TOPIC[];
161 constexpr char Camera::DISPARITY_CAMERA_INFO_TOPIC[];
162 constexpr char Camera::COST_CAMERA_INFO_TOPIC[];
163 constexpr char Camera::GROUND_SURFACE_IMAGE_TOPIC[];
164 constexpr char Camera::GROUND_SURFACE_INFO_TOPIC[];
165 constexpr char Camera::GROUND_SURFACE_POINT_SPLINE_TOPIC[];
166 
167 Camera::Camera(Channel* driver, const std::string& tf_prefix) :
168  driver_(driver),
169  device_nh_(""),
170  left_nh_(device_nh_, LEFT),
171  right_nh_(device_nh_, RIGHT),
172  aux_nh_(device_nh_, AUX),
173  calibration_nh_(device_nh_, CALIBRATION),
174  ground_surface_nh_(device_nh_, GROUND_SURFACE),
175  left_mono_transport_(left_nh_),
176  right_mono_transport_(right_nh_),
177  left_rect_transport_(left_nh_),
178  right_rect_transport_(right_nh_),
179  left_rgb_transport_(left_nh_),
180  left_rgb_rect_transport_(left_nh_),
181  depth_transport_(device_nh_),
182  ni_depth_transport_(device_nh_),
183  disparity_left_transport_(left_nh_),
184  disparity_right_transport_(right_nh_),
185  disparity_cost_transport_(left_nh_),
186  aux_mono_transport_(aux_nh_),
187  aux_rgb_transport_(aux_nh_),
188  aux_rect_transport_(aux_nh_),
189  aux_rgb_rect_transport_(aux_nh_),
190  ground_surface_transport_(ground_surface_nh_),
191  stereo_calibration_manager_(nullptr),
192  frame_id_origin_(tf_prefix + ORIGIN_FRAME),
193  frame_id_head_(tf_prefix + HEAD_FRAME),
194  frame_id_left_(tf_prefix + LEFT_CAMERA_FRAME),
195  frame_id_right_(tf_prefix + RIGHT_CAMERA_FRAME),
196  frame_id_aux_(tf_prefix + AUX_CAMERA_FRAME),
197  frame_id_rectified_left_(tf_prefix + LEFT_RECTIFIED_FRAME),
198  frame_id_rectified_right_(tf_prefix + RIGHT_RECTIFIED_FRAME),
199  frame_id_rectified_aux_(tf_prefix + AUX_RECTIFIED_FRAME),
200  static_tf_broadcaster_(),
201  pointcloud_max_range_(15.0),
202  last_frame_id_(-1),
203  border_clip_type_(BorderClip::NONE),
204  border_clip_value_(0.0)
205 {
206  //
207  // Query device and version information from sensor
208 
210  if (Status_Ok != status) {
211  ROS_ERROR("Camera: failed to query version info: %s", Channel::statusString(status));
212  return;
213  }
214 
216  if (Status_Ok != status) {
217  ROS_ERROR("Camera: failed to query device info: %s", Channel::statusString(status));
218  return;
219  }
220 
221  //
222  // Get the camera config
223 
224  image::Config image_config;
225  status = driver_->getImageConfig(image_config);
226  if (Status_Ok != status) {
227  ROS_ERROR("Camera: failed to query sensor configuration: %s", Channel::statusString(status));
228  return;
229  }
230 
231  //
232  // S27/S30 cameras have a 3rd aux color camera and no left color camera
233 
234  has_left_camera_ = system::DeviceInfo::HARDWARE_REV_MULTISENSE_SL == device_info_.hardwareRevision ||
235  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7 == device_info_.hardwareRevision ||
236  system::DeviceInfo::HARDWARE_REV_MULTISENSE_M == device_info_.hardwareRevision ||
237  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7S == device_info_.hardwareRevision ||
238  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S21 == device_info_.hardwareRevision ||
239  system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST21 == device_info_.hardwareRevision ||
240  system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST25 == device_info_.hardwareRevision ||
241  system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision ||
242  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision ||
243  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7AR == device_info_.hardwareRevision ||
244  system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == device_info_.hardwareRevision ||
245  system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21i == device_info_.hardwareRevision ||
246  system::DeviceInfo::HARDWARE_REV_MULTISENSE_MONOCAM == device_info_.hardwareRevision ||
247  system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_STEREO == device_info_.hardwareRevision ||
248  system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_MONOCAM == device_info_.hardwareRevision ||
249  system::DeviceInfo::HARDWARE_REV_MONO == device_info_.hardwareRevision;
250 
251  has_right_camera_ = system::DeviceInfo::HARDWARE_REV_MULTISENSE_SL == device_info_.hardwareRevision ||
252  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7 == device_info_.hardwareRevision ||
253  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7S == device_info_.hardwareRevision ||
254  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S21 == device_info_.hardwareRevision ||
255  system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST21 == device_info_.hardwareRevision ||
256  system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST25 == device_info_.hardwareRevision ||
257  system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision ||
258  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision ||
259  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7AR == device_info_.hardwareRevision ||
260  system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == device_info_.hardwareRevision ||
261  system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21i == device_info_.hardwareRevision ||
262  system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_STEREO == device_info_.hardwareRevision;
263 
264  has_aux_camera_ = system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision ||
265  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision ||
266  system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21i == device_info_.hardwareRevision;
267 
269  system::DeviceInfo::HARDWARE_REV_MULTISENSE_M == device_info_.hardwareRevision ||
270  system::DeviceInfo::IMAGER_TYPE_CMV2000_COLOR == device_info_.imagerType ||
271  system::DeviceInfo::IMAGER_TYPE_CMV4000_COLOR == device_info_.imagerType ||
272  system::DeviceInfo::IMAGER_TYPE_AR0239_COLOR == device_info_.imagerType;
273 
274 
275  //
276  // Topics published for all device types
277 
278  device_info_pub_ = calibration_nh_.advertise<multisense_ros::DeviceInfo>(DEVICE_INFO_TOPIC, 1, true);
279  raw_cam_cal_pub_ = calibration_nh_.advertise<multisense_ros::RawCamCal>(RAW_CAM_CAL_TOPIC, 1, true);
280  raw_cam_config_pub_ = calibration_nh_.advertise<multisense_ros::RawCamConfig>(RAW_CAM_CONFIG_TOPIC, 1, true);
281  histogram_pub_ = device_nh_.advertise<multisense_ros::Histogram>(HISTOGRAM_TOPIC, 5);
282 
283  //
284  // Create spline-based ground surface publishers for S27/S30 cameras
285 
286  const bool can_support_ground_surface_ =
287  system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision ||
288  system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision ||
289  system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == device_info_.hardwareRevision ||
290  system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21i == device_info_.hardwareRevision ||
291  system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_STEREO == device_info_.hardwareRevision;
292 
295  std::bind(&Camera::connectStream, this, Source_Ground_Surface_Class_Image),
296  std::bind(&Camera::disconnectStream, this, Source_Ground_Surface_Class_Image));
297 
299 
301  }
302 
303 
304  if (system::DeviceInfo::HARDWARE_REV_BCAM == device_info_.hardwareRevision) {
305 
306  // bcam has unique topics compared to all other S* variants
307 
309  std::bind(&Camera::connectStream, this, Source_Luma_Left),
310  std::bind(&Camera::disconnectStream, this, Source_Luma_Left));
311 
313  std::bind(&Camera::connectStream, this, Source_Jpeg_Left),
314  std::bind(&Camera::disconnectStream, this, Source_Jpeg_Left));
315 
317  std::bind(&Camera::connectStream, this, Source_Jpeg_Left),
318  std::bind(&Camera::disconnectStream, this, Source_Jpeg_Left));
319 
320  left_mono_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(MONO_CAMERA_INFO_TOPIC, 1, true);
321  left_rgb_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(COLOR_CAMERA_INFO_TOPIC, 1, true);
322  left_rgb_rect_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true);
323 
324  } else {
325 
326  // all other MultiSense-S* variations
327 
328  if (has_left_camera_) {
329 
331  std::bind(&Camera::connectStream, this, Source_Luma_Left),
332  std::bind(&Camera::disconnectStream, this, Source_Luma_Left));
333 
334  left_mono_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(MONO_CAMERA_INFO_TOPIC, 1, true);
335 
337  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left),
338  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left));
339 
340  left_rect_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(RECT_CAMERA_INFO_TOPIC, 1, true);
341  }
342 
343  if (has_right_camera_) {
344 
346  std::bind(&Camera::connectStream, this, Source_Luma_Right),
347  std::bind(&Camera::disconnectStream, this, Source_Luma_Right));
348 
349  right_mono_cam_info_pub_ = right_nh_.advertise<sensor_msgs::CameraInfo>(MONO_CAMERA_INFO_TOPIC, 1, true);
350 
352  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Right),
353  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Right));
354 
355  right_rect_cam_info_pub_ = right_nh_.advertise<sensor_msgs::CameraInfo>(RECT_CAMERA_INFO_TOPIC, 1, true);
356  }
357 
358  // only publish for stereo cameras
360 
361  // run the stereo topics if left and right cameras both exist
362 
364  std::bind(&Camera::connectStream, this, Source_Disparity),
365  std::bind(&Camera::disconnectStream, this, Source_Disparity));
366 
368  std::bind(&Camera::connectStream, this, Source_Disparity),
369  std::bind(&Camera::disconnectStream, this, Source_Disparity));
370 
371  depth_cam_info_pub_ = device_nh_.advertise<sensor_msgs::CameraInfo>(DEPTH_CAMERA_INFO_TOPIC, 1, true);
372 
373  luma_point_cloud_pub_ = device_nh_.advertise<sensor_msgs::PointCloud2>(POINTCLOUD_TOPIC, 5,
374  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity),
375  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity));
376 
378  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity),
379  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity));
380 
381  raw_cam_data_pub_ = calibration_nh_.advertise<multisense_ros::RawCamData>(RAW_CAM_DATA_TOPIC, 5,
382  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity),
383  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity));
384 
386  std::bind(&Camera::connectStream, this, Source_Disparity),
387  std::bind(&Camera::disconnectStream, this, Source_Disparity));
388 
389  left_disp_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(DISPARITY_CAMERA_INFO_TOPIC, 1, true);
390 
391  left_stereo_disparity_pub_ = left_nh_.advertise<stereo_msgs::DisparityImage>(DISPARITY_IMAGE_TOPIC, 5,
392  std::bind(&Camera::connectStream, this, Source_Disparity),
393  std::bind(&Camera::disconnectStream, this, Source_Disparity));
394 
395  if (version_info_.sensorFirmwareVersion >= 0x0300) {
396 
398  std::bind(&Camera::connectStream, this, Source_Disparity_Right),
399  std::bind(&Camera::disconnectStream, this, Source_Disparity_Right));
400 
401  right_disp_cam_info_pub_ = right_nh_.advertise<sensor_msgs::CameraInfo>(DISPARITY_CAMERA_INFO_TOPIC, 1, true);
402 
403  right_stereo_disparity_pub_ = right_nh_.advertise<stereo_msgs::DisparityImage>(DISPARITY_IMAGE_TOPIC, 5,
404  std::bind(&Camera::connectStream, this, Source_Disparity_Right),
405  std::bind(&Camera::disconnectStream, this, Source_Disparity_Right));
406 
408  std::bind(&Camera::connectStream, this, Source_Disparity_Cost),
409  std::bind(&Camera::disconnectStream, this, Source_Disparity_Cost));
410 
411  left_cost_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(COST_CAMERA_INFO_TOPIC, 1, true);
412  }
413 
414  // publish colorized stereo topics
415  if (has_aux_camera_) {
416 
417  const auto point_cloud_color_topics = has_aux_camera_ ? Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux :
418  Source_Luma_Left | Source_Chroma_Left;
419 
420  color_point_cloud_pub_ = device_nh_.advertise<sensor_msgs::PointCloud2>(COLOR_POINTCLOUD_TOPIC, 5,
421  std::bind(&Camera::connectStream, this,
422  Source_Disparity | point_cloud_color_topics),
423  std::bind(&Camera::disconnectStream, this,
424  Source_Disparity | point_cloud_color_topics));
425 
427  std::bind(&Camera::connectStream, this,
428  Source_Disparity | point_cloud_color_topics),
429  std::bind(&Camera::disconnectStream, this,
430  Source_Disparity | point_cloud_color_topics));
431 
432  }
433 
434  }
435 
436  // handle color topic publishing if color data exists
437  if (has_aux_camera_) {
438 
440  std::bind(&Camera::connectStream, this, Source_Luma_Aux),
441  std::bind(&Camera::disconnectStream, this, Source_Luma_Aux));
442 
443  aux_mono_cam_info_pub_ = aux_nh_.advertise<sensor_msgs::CameraInfo>(MONO_CAMERA_INFO_TOPIC, 1, true);
444 
446  std::bind(&Camera::connectStream, this, Source_Luma_Aux | Source_Chroma_Aux),
447  std::bind(&Camera::disconnectStream, this, Source_Luma_Aux | Source_Chroma_Aux));
448 
449  aux_rgb_cam_info_pub_ = aux_nh_.advertise<sensor_msgs::CameraInfo>(COLOR_CAMERA_INFO_TOPIC, 1, true);
450 
452  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Aux),
453  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Aux));
454 
455  aux_rect_cam_info_pub_ = aux_nh_.advertise<sensor_msgs::CameraInfo>(RECT_CAMERA_INFO_TOPIC, 1, true);
456 
458  std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux),
459  std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux));
460 
461  aux_rgb_rect_cam_info_pub_ = aux_nh_.advertise<sensor_msgs::CameraInfo>(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true);
462 
463  } else if (has_color_ && !has_aux_camera_) { // !has_aux_camera_ is redundant, but leaving to prevent confusion over edge cases
464 
466  std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left),
467  std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left));
468 
470  std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left),
471  std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left));
472 
473  left_rgb_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(COLOR_CAMERA_INFO_TOPIC, 1, true);
474  left_rgb_rect_cam_info_pub_ = left_nh_.advertise<sensor_msgs::CameraInfo>(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true);
475 
476  }
477 
478  }
479 
480  //
481  // All image streams off
482 
483  stop();
484 
485  //
486  // Publish device info
487 
488  multisense_ros::DeviceInfo msg;
489 
490  msg.deviceName = device_info_.name;
491  msg.buildDate = device_info_.buildDate;
492  msg.serialNumber = device_info_.serialNumber;
493  msg.deviceRevision = device_info_.hardwareRevision;
494 
495  msg.numberOfPcbs = device_info_.pcbs.size();
496  for(const auto &pcb : device_info_.pcbs) {
497  msg.pcbSerialNumbers.push_back(pcb.revision);
498  msg.pcbNames.push_back(pcb.name);
499  }
500 
501  msg.imagerName = device_info_.imagerName;
502  msg.imagerType = device_info_.imagerType;
503  msg.imagerWidth = device_info_.imagerWidth;
504  msg.imagerHeight = device_info_.imagerHeight;
505 
506  msg.lensName = device_info_.lensName;
507  msg.lensType = device_info_.lensType;
508  msg.nominalBaseline = device_info_.nominalBaseline;
509  msg.nominalFocalLength = device_info_.nominalFocalLength;
510  msg.nominalRelativeAperture = device_info_.nominalRelativeAperture;
511 
512  msg.lightingType = device_info_.lightingType;
513  msg.numberOfLights = device_info_.numberOfLights;
514 
515  msg.laserName = device_info_.laserName;
516  msg.laserType = device_info_.laserType;
517 
518  msg.motorName = device_info_.motorName;
519  msg.motorType = device_info_.motorType;
520  msg.motorGearReduction = device_info_.motorGearReduction;
521 
522  msg.apiBuildDate = version_info_.apiBuildDate;
523  msg.apiVersion = version_info_.apiVersion;
524  msg.firmwareBuildDate = version_info_.sensorFirmwareBuildDate;
525  msg.firmwareVersion = version_info_.sensorFirmwareVersion;
526  msg.bitstreamVersion = version_info_.sensorHardwareVersion;
527  msg.bitstreamMagic = version_info_.sensorHardwareMagic;
528  msg.fpgaDna = version_info_.sensorFpgaDna;
529 
531 
532  //
533  // Publish image calibration
534 
536  {
537  image::Calibration image_calibration;
538  status = driver_->getImageCalibration(image_calibration);
539  if (Status_Ok != status) {
540  ROS_ERROR("Camera: failed to query image calibration: %s",
541  Channel::statusString(status));
542  }
543  else {
544  // currently no ROS cameras have only an aux camera
545  // HARDWARE_REV_MULTISENSE_MONOCAM uses the left cal for its calibration
546 
547  multisense_ros::RawCamCal cal;
548  const float *cP;
549 
550  cP = reinterpret_cast<const float *>(&(image_calibration.left.M[0][0]));
551  for(uint32_t i=0; i<9; i++) cal.left_M[i] = cP[i];
552  cP = reinterpret_cast<const float *>(&(image_calibration.left.D[0]));
553  for(uint32_t i=0; i<8; i++) cal.left_D[i] = cP[i];
554  cP = reinterpret_cast<const float *>(&(image_calibration.left.R[0][0]));
555  for(uint32_t i=0; i<9; i++) cal.left_R[i] = cP[i];
556  cP = reinterpret_cast<const float *>(&(image_calibration.left.P[0][0]));
557  for(uint32_t i=0; i<12; i++) cal.left_P[i] = cP[i];
558 
559  if (not has_right_camera_) {
560 
561  // publish the left calibration twice if there is no right camera
562  cP = reinterpret_cast<const float *>(&(image_calibration.left.M[0][0]));
563  for(uint32_t i=0; i<9; i++) cal.right_M[i] = cP[i];
564  cP = reinterpret_cast<const float *>(&(image_calibration.left.D[0]));
565  for(uint32_t i=0; i<8; i++) cal.right_D[i] = cP[i];
566  cP = reinterpret_cast<const float *>(&(image_calibration.left.R[0][0]));
567  for(uint32_t i=0; i<9; i++) cal.right_R[i] = cP[i];
568  cP = reinterpret_cast<const float *>(&(image_calibration.left.P[0][0]));
569  for(uint32_t i=0; i<12; i++) cal.right_P[i] = cP[i];
570 
571  } else {
572 
573  cP = reinterpret_cast<const float *>(&(image_calibration.right.M[0][0]));
574  for(uint32_t i=0; i<9; i++) cal.right_M[i] = cP[i];
575  cP = reinterpret_cast<const float *>(&(image_calibration.right.D[0]));
576  for(uint32_t i=0; i<8; i++) cal.right_D[i] = cP[i];
577  cP = reinterpret_cast<const float *>(&(image_calibration.right.R[0][0]));
578  for(uint32_t i=0; i<9; i++) cal.right_R[i] = cP[i];
579  cP = reinterpret_cast<const float *>(&(image_calibration.right.P[0][0]));
580  for(uint32_t i=0; i<12; i++) cal.right_P[i] = cP[i];
581 
582  }
583 
585  }
586 
587  stereo_calibration_manager_ = std::make_shared<StereoCalibrationManager>(image_config, image_calibration, device_info_);
588 
589  if (has_aux_camera_ && !stereo_calibration_manager_->validAux()) {
590  ROS_WARN("Camera: invalid aux camera calibration");
591  }
592 
593  //
594  // Publish the static transforms for our camera extrinsics for the left/right/aux frames. We will
595  // use the left camera frame as the reference coordinate frame
596 
597  const bool has_right_extrinsics = has_right_camera_ && stereo_calibration_manager_->validRight();
598  const bool has_aux_extrinsics = has_aux_camera_ && stereo_calibration_manager_->validAux();
599 
600  std::vector<geometry_msgs::TransformStamped> stamped_transforms{};
601 
602  stamped_transforms.emplace_back();
603  tf2::Transform rectified_left_T_left{toRotation(image_calibration.left.R), tf2::Vector3{0., 0., 0.}};
604  stamped_transforms.back().header.stamp = ros::Time::now();
605  stamped_transforms.back().header.frame_id = frame_id_rectified_left_;
606  stamped_transforms.back().child_frame_id = frame_id_left_;
607  stamped_transforms.back().transform = tf2::toMsg(rectified_left_T_left);
608 
609  if (has_right_extrinsics)
610  {
611  stamped_transforms.emplace_back();
612  tf2::Transform rectified_right_T_rectified_left{tf2::Matrix3x3::getIdentity(),
613  tf2::Vector3{stereo_calibration_manager_->T(), 0., 0.}};
614  stamped_transforms.back().header.stamp = ros::Time::now();
615  stamped_transforms.back().header.frame_id = frame_id_rectified_left_;
616  stamped_transforms.back().child_frame_id = frame_id_rectified_right_;
617  stamped_transforms.back().transform = tf2::toMsg(rectified_right_T_rectified_left.inverse());
618 
619  stamped_transforms.emplace_back();
620  tf2::Transform rectified_right_T_right{toRotation(image_calibration.right.R), tf2::Vector3{0., 0., 0.}};
621  stamped_transforms.back().header.stamp = ros::Time::now();
622  stamped_transforms.back().header.frame_id = frame_id_rectified_right_;
623  stamped_transforms.back().child_frame_id = frame_id_right_;
624  stamped_transforms.back().transform = tf2::toMsg(rectified_right_T_right);
625  }
626 
627  if (has_aux_extrinsics)
628  {
629  const Eigen::Vector3d aux_T = stereo_calibration_manager_->aux_T();
630 
631  stamped_transforms.emplace_back();
632  tf2::Transform rectified_aux_T_rectified_left{tf2::Matrix3x3::getIdentity(),
633  tf2::Vector3{aux_T(0), aux_T(1), aux_T(2)}};
634  stamped_transforms.back().header.stamp = ros::Time::now();
635  stamped_transforms.back().header.frame_id = frame_id_rectified_left_;
636  stamped_transforms.back().child_frame_id = frame_id_rectified_aux_;
637  stamped_transforms.back().transform = tf2::toMsg(rectified_aux_T_rectified_left.inverse());
638 
639  stamped_transforms.emplace_back();
640  tf2::Transform rectified_aux_T_aux{toRotation(image_calibration.aux.R), tf2::Vector3{0., 0., 0.}};
641  stamped_transforms.back().header.stamp = ros::Time::now();
642  stamped_transforms.back().header.frame_id = frame_id_rectified_aux_;
643  stamped_transforms.back().child_frame_id = frame_id_aux_;
644  stamped_transforms.back().transform = tf2::toMsg(rectified_aux_T_aux);
645  }
646 
647  static_tf_broadcaster_.sendTransform(stamped_transforms);
648 
649  //
650  // Update our internal image config and publish intitial camera info
651 
652  updateConfig(image_config);
653  }
654 
655  //
656  // Add driver-level callbacks.
657  //
658  // -Driver creates individual background thread for each callback.
659  // -Images are queued (depth=5) per callback, with oldest silently dropped if not keeping up.
660  // -All images presented are backed by a referenced buffer system (no copying of image data is done.)
661 
662  if (system::DeviceInfo::HARDWARE_REV_BCAM == device_info_.hardwareRevision) {
663 
664  driver_->addIsolatedCallback(monoCB, Source_Luma_Left, this);
665  driver_->addIsolatedCallback(jpegCB, Source_Jpeg_Left, this);
666 
667  } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_VPB == device_info_.hardwareRevision) {
668  // no incoming image data from vpb
669  } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_MONOCAM == device_info_.hardwareRevision) {
670 
671  driver_->addIsolatedCallback(monoCB, Source_Luma_Left, this);
672  driver_->addIsolatedCallback(rectCB, Source_Luma_Rectified_Left, this);
673 
674  } else {
675 
677  driver_->addIsolatedCallback(monoCB, Source_Luma_Left | Source_Luma_Right | Source_Luma_Aux, this);
678  driver_->addIsolatedCallback(rectCB, Source_Luma_Rectified_Left | Source_Luma_Rectified_Right | Source_Luma_Rectified_Aux, this);
679  }
680 
681  if (has_color_ && has_aux_camera_) {
682  driver_->addIsolatedCallback(colorizeCB, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux | Source_Luma_Aux |
683  Source_Luma_Left | Source_Luma_Rectified_Left, this);
684  driver_->addIsolatedCallback(colorCB, Source_Chroma_Rectified_Aux | Source_Chroma_Aux, this);
685  } else {
686  driver_->addIsolatedCallback(colorizeCB, Source_Luma_Left | Source_Chroma_Left | Source_Luma_Rectified_Left, this);
687  driver_->addIsolatedCallback(colorCB, Source_Chroma_Left, this);
688  }
689 
691  driver_->addIsolatedCallback(depthCB, Source_Disparity, this);
692  driver_->addIsolatedCallback(pointCB, Source_Disparity, this);
693  driver_->addIsolatedCallback(rawCB, Source_Disparity | Source_Luma_Rectified_Left, this);
694  driver_->addIsolatedCallback(dispCB, Source_Disparity | Source_Disparity_Right | Source_Disparity_Cost, this);
695  }
696  }
697 
698  //
699  // A common callback to publish histograms
700 
701  driver_->addIsolatedCallback(histCB, allImageSources, this);
702 
703  //
704  // Add ground surface callbacks for S27/S30 cameras
705 
707  driver_->addIsolatedCallback(groundSurfaceCB, Source_Ground_Surface_Class_Image, this);
708  driver_->addIsolatedCallback(groundSurfaceSplineCB, this);
709  }
710 
711  //
712  // Disable color point cloud strict frame syncing, if desired
713 
714  const char *pcColorFrameSyncEnvStringP = getenv("MULTISENSE_ROS_PC_COLOR_FRAME_SYNC_OFF");
715  if (NULL != pcColorFrameSyncEnvStringP) {
716  ROS_INFO("color point cloud frame sync is disabled");
717  }
718 
719  //
720  // Diagnostics
723  diagnostic_updater_.add("device_status", this, &Camera::deviceStatusDiagnostic);
726 }
727 
729 {
730  stop();
731 
732  if (system::DeviceInfo::HARDWARE_REV_BCAM == device_info_.hardwareRevision) {
733 
736 
737  } else {
738 
742  }
743 
744  if (has_color_) {
745  driver_->removeIsolatedCallback(colorizeCB);
747  }
748 
754  }
755  }
756 
758  {
759  driver_->removeIsolatedCallback(groundSurfaceCB);
760  driver_->removeIsolatedCallback(groundSurfaceSplineCB);
761  }
762 }
763 
764 void Camera::borderClipChanged(const BorderClip &borderClipType, double borderClipValue)
765 {
766  //
767  // Avoid locking since updating this while a point cloud is current being projected is a non-standard case
768 
769  border_clip_type_ = borderClipType;
770  border_clip_value_ = borderClipValue;
771 }
772 
774 {
775  pointcloud_max_range_ = range;
776 }
777 
778 void Camera::timeSyncChanged(bool ptp_enabled, int32_t ptp_time_offset_secs)
779 {
780  ptp_time_sync_ = ptp_enabled;
781  ptp_time_offset_secs_ = ptp_time_offset_secs;
782 }
783 
785 {
786  // Generate extrinsics matrix
787  Eigen::Matrix<float, 3, 3> eigen_rot =
788  (Eigen::AngleAxis<float>(extrinsics.yaw, Eigen::Matrix<float, 3, 1>(0, 0, 1))
789  * Eigen::AngleAxis<float>(extrinsics.pitch, Eigen::Matrix<float, 3, 1>(0, 1, 0))
790  * Eigen::AngleAxis<float>(extrinsics.roll, Eigen::Matrix<float, 3, 1>(1, 0, 0))).matrix();
791 
792  tf2::Matrix3x3 tf2_rot{
793  eigen_rot(0, 0),
794  eigen_rot(0, 1),
795  eigen_rot(0, 2),
796  eigen_rot(1, 0),
797  eigen_rot(1, 1),
798  eigen_rot(1, 2),
799  eigen_rot(2, 0),
800  eigen_rot(2, 1),
801  eigen_rot(2, 2)};
802 
803  std::vector<geometry_msgs::TransformStamped> extrinsic_transforms_(1);
804 
805  tf2::Transform multisense_head_T_origin{tf2_rot, tf2::Vector3{extrinsics.x, extrinsics.y, extrinsics.z}};
806  extrinsic_transforms_[0].header.stamp = ros::Time::now();
807  extrinsic_transforms_[0].header.frame_id = frame_id_origin_;
808  extrinsic_transforms_[0].child_frame_id = frame_id_rectified_left_;
809  extrinsic_transforms_[0].transform = tf2::toMsg(multisense_head_T_origin);
810 
811  static_tf_broadcaster_.sendTransform(extrinsic_transforms_);
812 }
813 
815  const ground_surface_utilities::SplineDrawParameters &spline_draw_params)
816 {
817  spline_draw_params_ = spline_draw_params;
818 }
819 
821 {
822  if (last_frame_id_ >= header.frameId)
823  return;
824 
825  last_frame_id_ = header.frameId;
826 
827  if (histogram_pub_.getNumSubscribers() > 0) {
828  multisense_ros::Histogram rh;
829  image::Histogram mh;
830 
831  Status status = driver_->getImageHistogram(header.frameId, mh);
832  if (Status_Ok == status) {
833  rh.frame_count = header.frameId;
834  rh.time_stamp = convertImageTimestamp(header.timeSeconds, header.timeMicroSeconds);
835  rh.width = header.width;
836  rh.height = header.height;
837  switch(header.source) {
838  case Source_Chroma_Left:
839  case Source_Chroma_Right:
840  rh.width *= 2;
841  rh.height *= 2;
842  }
843 
844  rh.exposure_time = header.exposure;
845  rh.gain = header.gain;
846  rh.fps = header.framesPerSecond;
847  rh.channels = mh.channels;
848  rh.bins = mh.bins;
849  rh.data = mh.data;
851  }
852  }
853 }
854 
856 {
857  if (Source_Jpeg_Left != header.source) {
858  return;
859  }
860 
861  ros::Time t = convertImageTimestamp(header.timeSeconds, header.timeMicroSeconds);
862 
864  {
865  throw std::runtime_error("Uninitialized stereo calibration manager");
866  }
867 
868  const uint32_t height = header.height;
869  const uint32_t width = header.width;
870  const uint32_t rgbLength = height * width * 3;
871 
872  left_rgb_image_.header.frame_id = frame_id_left_;
873  left_rgb_image_.height = height;
874  left_rgb_image_.width = width;
876  left_rgb_image_.is_bigendian = (htonl(1) == 1);
877  left_rgb_image_.step = 3 * width;
878  left_rgb_image_.header.stamp = t;
879 
880  left_rgb_image_.data.resize(rgbLength);
881 
882  tjhandle jpegDecompressor = tjInitDecompress();
883  tjDecompress2(jpegDecompressor,
884  reinterpret_cast<unsigned char*>(const_cast<void*>(header.imageDataP)),
885  header.imageLength,
886  &(left_rgb_image_.data[0]),
887  width, 0/*pitch*/, height, TJPF_RGB, 0);
888  tjDestroy(jpegDecompressor);
889 
890  const auto left_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_left_, t);
891 
893  left_rgb_cam_info_pub_.publish(left_camera_info);
894 
896 
897  const auto left_rectified_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_rectified_left_, t);
898 
899  left_rgb_rect_image_.data.resize(rgbLength);
900 
901  const cv::Mat rgb_image(height, width, CV_8UC3, &(left_rgb_image_.data[0]));
902  cv::Mat rect_rgb_image(height, width, CV_8UC3, &(left_rgb_rect_image_.data[0]));
903 
904  const auto left_remap = stereo_calibration_manager_->leftRemap();
905 
906  cv::remap(rgb_image, rect_rgb_image, left_remap->map1, left_remap->map2, cv::INTER_LINEAR);
907 
909  left_rgb_rect_image_.header.stamp = t;
910  left_rgb_rect_image_.height = height;
911  left_rgb_rect_image_.width = width;
913  left_rgb_rect_image_.is_bigendian = (htonl(1) == 1);
914  left_rgb_rect_image_.step = 3 * width;
915  left_rgb_rect_cam_pub_.publish(left_rgb_rect_image_, left_rectified_camera_info);
916  left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info);
917  }
918 }
919 
921 {
922  if (!((Source_Disparity == header.source &&
924  (Source_Disparity_Right == header.source &&
926  (Source_Disparity_Cost == header.source &&
928  (Source_Disparity == header.source &&
930  (Source_Disparity_Right == header.source &&
932  return;
933 
934  const uint32_t imageSize = (header.width * header.height * header.bitsPerPixel) / 8;
935 
936  ros::Time t = convertImageTimestamp(header.timeSeconds, header.timeMicroSeconds);
937 
939  {
940  throw std::runtime_error("Uninitialized stereo calibration manager");
941  }
942 
943  switch(header.source) {
944  case Source_Disparity:
945  case Source_Disparity_Right:
946  {
947  sensor_msgs::Image *imageP = NULL;
948  image_transport::Publisher *pubP = NULL;
949  sensor_msgs::CameraInfo camInfo;
950  ros::Publisher *camInfoPubP = NULL;
951  ros::Publisher *stereoDisparityPubP = NULL;
952  stereo_msgs::DisparityImage *stereoDisparityImageP = NULL;
953 
954 
955  if (Source_Disparity == header.source) {
956  pubP = &left_disparity_pub_;
957  imageP = &left_disparity_image_;
958  imageP->header.frame_id = frame_id_rectified_left_;
959  camInfo = stereo_calibration_manager_->leftCameraInfo(frame_id_rectified_left_, t);
960  camInfoPubP = &left_disp_cam_info_pub_;
961  stereoDisparityPubP = &left_stereo_disparity_pub_;
962  stereoDisparityImageP = &left_stereo_disparity_;
963  stereoDisparityImageP->header.frame_id = frame_id_rectified_left_;
964  } else {
965  pubP = &right_disparity_pub_;
966  imageP = &right_disparity_image_;
967  imageP->header.frame_id = frame_id_rectified_right_;
968  camInfo = stereo_calibration_manager_->rightCameraInfo(frame_id_rectified_right_, t);
969  camInfoPubP = &right_disp_cam_info_pub_;
970  stereoDisparityPubP = &right_stereo_disparity_pub_;
971  stereoDisparityImageP = &right_stereo_disparity_;
972  stereoDisparityImageP->header.frame_id = frame_id_rectified_right_;
973  }
974 
975  if (pubP->getNumSubscribers() > 0)
976  {
977  imageP->data.resize(imageSize);
978  memcpy(&imageP->data[0], header.imageDataP, imageSize);
979 
980  imageP->header.stamp = t;
981  imageP->height = header.height;
982  imageP->width = header.width;
983  imageP->is_bigendian = (htonl(1) == 1);
984 
985  switch(header.bitsPerPixel) {
986  case 8:
987  imageP->encoding = sensor_msgs::image_encodings::MONO8;
988  imageP->step = header.width;
989  break;
990  case 16:
991  imageP->encoding = sensor_msgs::image_encodings::MONO16;
992  imageP->step = header.width * 2;
993  break;
994  }
995 
996  pubP->publish(*imageP);
997  }
998 
999  if (stereoDisparityPubP->getNumSubscribers() > 0)
1000  {
1001  if (!stereo_calibration_manager_->validRight())
1002  {
1003  throw std::runtime_error("Stereo calibration manager missing right calibration");
1004  }
1005 
1006  //
1007  // If our current image resolution is using non-square pixels, i.e.
1008  // fx != fy then warn the user. This support is lacking in
1009  // stereo_msgs::DisparityImage and stereo_image_proc
1010 
1011  if (camInfo.P[0] != camInfo.P[5])
1012  {
1013  std::stringstream warning;
1014  warning << "Current camera configuration has non-square pixels (fx != fy).";
1015  warning << "The stereo_msgs/DisparityImage does not account for";
1016  warning << " this. Be careful when reprojecting to a pointcloud.";
1017  ROS_WARN("%s", warning.str().c_str());
1018  }
1019 
1020  //
1021  // Our final floating point image will be serialized into uint8_t
1022  // meaning we need to allocate 4 bytes per pixel
1023 
1024  uint32_t floatingPointImageSize = header.width * header.height * 4;
1025  stereoDisparityImageP->image.data.resize(floatingPointImageSize);
1026 
1027  stereoDisparityImageP->header.stamp = t;
1028 
1029  stereoDisparityImageP->image.height = header.height;
1030  stereoDisparityImageP->image.width = header.width;
1031  stereoDisparityImageP->image.is_bigendian = (htonl(1) == 1);
1032  stereoDisparityImageP->image.header.stamp = t;
1033  stereoDisparityImageP->image.header.frame_id = stereoDisparityImageP->header.frame_id;
1034  stereoDisparityImageP->image.encoding = sensor_msgs::image_encodings::TYPE_32FC1;
1035  stereoDisparityImageP->image.step = 4 * header.width;
1036 
1037 
1038  //
1039  // Fx is the same for both the right and left cameras
1040 
1041  stereoDisparityImageP->f = camInfo.P[0];
1042 
1043  //
1044  // Our Tx is negative. The DisparityImage message expects Tx to be
1045  // positive
1046 
1047  stereoDisparityImageP->T = fabs(stereo_calibration_manager_->T());
1048  stereoDisparityImageP->min_disparity = 0;
1049  stereoDisparityImageP->max_disparity = stereo_calibration_manager_->config().disparities();
1050  stereoDisparityImageP->delta_d = 1./16.;
1051 
1052  //
1053  // The stereo_msgs::DisparityImage message expects the disparity
1054  // image to be floating point. We will use OpenCV to perform our
1055  // element-wise division
1056 
1057 
1058  cv::Mat_<uint16_t> tmpImage(header.height,
1059  header.width,
1060  reinterpret_cast<uint16_t*>(
1061  const_cast<void*>(header.imageDataP)));
1062 
1063  //
1064  // We will copy our data directly into our output message
1065 
1066  cv::Mat_<float> floatingPointImage(header.height,
1067  header.width,
1068  reinterpret_cast<float*>(&stereoDisparityImageP->image.data[0]));
1069 
1070  //
1071  // Convert our disparity to floating point by dividing by 16 and
1072  // copy the result to the output message
1073 
1074  floatingPointImage = tmpImage / 16.0;
1075 
1076  stereoDisparityPubP->publish(*stereoDisparityImageP);
1077  }
1078 
1079  camInfoPubP->publish(camInfo);
1080 
1081  break;
1082  }
1083  case Source_Disparity_Cost:
1084 
1085  left_disparity_cost_image_.data.resize(imageSize);
1086  memcpy(&left_disparity_cost_image_.data[0], header.imageDataP, imageSize);
1087 
1089  left_disparity_cost_image_.header.stamp = t;
1090  left_disparity_cost_image_.height = header.height;
1091  left_disparity_cost_image_.width = header.width;
1092 
1094  left_disparity_cost_image_.is_bigendian = (htonl(1) == 1);
1095  left_disparity_cost_image_.step = header.width;
1096 
1098 
1100 
1101  break;
1102  }
1103 }
1104 
1106 {
1107  if (Source_Luma_Left != header.source &&
1108  Source_Luma_Right != header.source &&
1109  Source_Luma_Aux != header.source) {
1110 
1111  ROS_ERROR("Camera: unexpected mono image source: 0x%lx", header.source);
1112  return;
1113  }
1114 
1115  ros::Time t = convertImageTimestamp(header.timeSeconds, header.timeMicroSeconds);
1116 
1118  {
1119  throw std::runtime_error("Uninitialized stereo calibration manager");
1120  }
1121 
1122  switch(header.source) {
1123  case Source_Luma_Left:
1124  {
1125 
1126  left_mono_image_.data.resize(header.imageLength);
1127  memcpy(&left_mono_image_.data[0], header.imageDataP, header.imageLength);
1128 
1129  left_mono_image_.header.frame_id = frame_id_left_;
1130  left_mono_image_.header.stamp = t;
1131  left_mono_image_.height = header.height;
1132  left_mono_image_.width = header.width;
1133 
1134  switch(header.bitsPerPixel) {
1135  case 8:
1137  left_mono_image_.step = header.width;
1138  break;
1139  case 16:
1141  left_mono_image_.step = header.width * 2;
1142  break;
1143  }
1144 
1145  left_mono_image_.is_bigendian = (htonl(1) == 1);
1146 
1148 
1149  //
1150  // Publish a specific camera info message for the left mono image
1152 
1153  break;
1154  }
1155  case Source_Luma_Right:
1156  {
1157  right_mono_image_.data.resize(header.imageLength);
1158  memcpy(&right_mono_image_.data[0], header.imageDataP, header.imageLength);
1159 
1160  right_mono_image_.header.frame_id = frame_id_right_;
1161  right_mono_image_.header.stamp = t;
1162  right_mono_image_.height = header.height;
1163  right_mono_image_.width = header.width;
1164 
1165  switch(header.bitsPerPixel) {
1166  case 8:
1168  right_mono_image_.step = header.width;
1169  break;
1170  case 16:
1172  right_mono_image_.step = header.width * 2;
1173  break;
1174  }
1175  right_mono_image_.is_bigendian = (htonl(1) == 1);
1176 
1178 
1179  //
1180  // Publish a specific camera info message for the right mono image
1182 
1183  break;
1184  }
1185  case Source_Luma_Aux:
1186  {
1187  aux_mono_image_.data.resize(header.imageLength);
1188  memcpy(&aux_mono_image_.data[0], header.imageDataP, header.imageLength);
1189 
1190  aux_mono_image_.header.frame_id = frame_id_aux_;
1191  aux_mono_image_.header.stamp = t;
1192  aux_mono_image_.height = header.height;
1193  aux_mono_image_.width = header.width;
1194 
1195  switch(header.bitsPerPixel) {
1196  case 8:
1198  aux_mono_image_.step = header.width;
1199  break;
1200  case 16:
1202  aux_mono_image_.step = header.width * 2;
1203  break;
1204  }
1205  aux_mono_image_.is_bigendian = (htonl(1) == 1);
1206 
1208 
1209  //
1210  // Publish a specific camera info message for the aux mono image
1212  break;
1213  }
1214  }
1215 }
1216 
1218 {
1219  if (Source_Luma_Rectified_Left != header.source &&
1220  Source_Luma_Rectified_Right != header.source &&
1221  Source_Luma_Rectified_Aux != header.source) {
1222 
1223  ROS_ERROR("Camera: unexpected rectified image source: 0x%lx", header.source);
1224  return;
1225  }
1226 
1227  ros::Time t = convertImageTimestamp(header.timeSeconds, header.timeMicroSeconds);
1228 
1230  {
1231  throw std::runtime_error("Uninitialized stereo calibration manager");
1232  }
1233 
1234  switch(header.source) {
1235  case Source_Luma_Rectified_Left:
1236  {
1237 
1238  left_rect_image_.data.resize(header.imageLength);
1239  memcpy(&left_rect_image_.data[0], header.imageDataP, header.imageLength);
1240 
1241  left_rect_image_.header.frame_id = frame_id_rectified_left_;
1242  left_rect_image_.header.stamp = t;
1243  left_rect_image_.height = header.height;
1244  left_rect_image_.width = header.width;
1245 
1246  switch(header.bitsPerPixel) {
1247  case 8:
1249  left_rect_image_.step = header.width;
1250 
1251  break;
1252  case 16:
1254  left_rect_image_.step = header.width * 2;
1255 
1256  break;
1257  }
1258 
1259  left_rect_image_.is_bigendian = (htonl(1) == 1);
1260 
1261  const auto left_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_rectified_left_, t);
1262 
1263  //
1264  // Continue to publish the rect camera info on the
1265  // <namespace>/left/camera_info topic for backward compatibility with
1266  // older versions of the driver
1267  left_rect_cam_pub_.publish(left_rect_image_, left_camera_info);
1268 
1269  left_rect_cam_info_pub_.publish(left_camera_info);
1270 
1271  break;
1272  }
1273  case Source_Luma_Rectified_Right:
1274  {
1275 
1276  right_rect_image_.data.resize(header.imageLength);
1277  memcpy(&right_rect_image_.data[0], header.imageDataP, header.imageLength);
1278 
1279  right_rect_image_.header.frame_id = frame_id_rectified_right_;
1280  right_rect_image_.header.stamp = t;
1281  right_rect_image_.height = header.height;
1282  right_rect_image_.width = header.width;
1283 
1284  switch(header.bitsPerPixel) {
1285  case 8:
1287  right_rect_image_.step = header.width;
1288  break;
1289  case 16:
1291  right_rect_image_.step = header.width * 2;
1292  break;
1293  }
1294 
1295  right_rect_image_.is_bigendian = (htonl(1) == 1);
1296 
1297  const auto right_camera_info = stereo_calibration_manager_->rightCameraInfo(frame_id_rectified_right_, t);
1298 
1299  //
1300  // Continue to publish the rect camera info on the
1301  // <namespace>/right/camera_info topic for backward compatibility with
1302  // older versions of the driver
1303  right_rect_cam_pub_.publish(right_rect_image_, right_camera_info);
1304 
1305  right_rect_cam_info_pub_.publish(right_camera_info);
1306 
1307  break;
1308  }
1309  case Source_Luma_Rectified_Aux:
1310  {
1311 
1312  aux_rect_image_.data.resize(header.imageLength);
1313  memcpy(&aux_rect_image_.data[0], header.imageDataP, header.imageLength);
1314 
1315  aux_rect_image_.header.frame_id = frame_id_rectified_aux_;
1316  aux_rect_image_.header.stamp = t;
1317  aux_rect_image_.height = header.height;
1318  aux_rect_image_.width = header.width;
1319 
1320  switch(header.bitsPerPixel) {
1321  case 8:
1323  aux_rect_image_.step = header.width;
1324  break;
1325  case 16:
1327  aux_rect_image_.step = header.width * 2;
1328  break;
1329  }
1330 
1331  aux_rect_image_.is_bigendian = (htonl(1) == 1);
1332 
1333  const auto aux_camera_info = stereo_calibration_manager_->auxCameraInfo(frame_id_rectified_aux_, t, header.width, header.height);
1334 
1335  //
1336  // Continue to publish the rect camera info on the
1337  // <namespace>/aux/camera_info topic for backward compatibility with
1338  // older versions of the driver
1339  aux_rect_cam_pub_.publish(aux_rect_image_, aux_camera_info);
1340 
1341  aux_rect_cam_info_pub_.publish(aux_camera_info);
1342 
1343  break;
1344  }
1345  }
1346 }
1347 
1349 {
1350  if (Source_Disparity != header.source) {
1351 
1352  ROS_ERROR("Camera: unexpected depth image source: 0x%lx", header.source);
1353  return;
1354  }
1355 
1356  uint32_t niDepthSubscribers = ni_depth_cam_pub_.getNumSubscribers();
1357  uint32_t depthSubscribers = depth_cam_pub_.getNumSubscribers();
1358 
1359  if (0 == niDepthSubscribers && 0 == depthSubscribers)
1360  {
1361  return;
1362  }
1363 
1364  ros::Time t = convertImageTimestamp(header.timeSeconds, header.timeMicroSeconds);
1365 
1367  {
1368  throw std::runtime_error("Uninitialized stereo calibration manager");
1369  }
1370 
1371  const float bad_point = std::numeric_limits<float>::quiet_NaN();
1372  const uint32_t depthSize = header.height * header.width * sizeof(float);
1373  const uint32_t niDepthSize = header.height * header.width * sizeof(uint16_t);
1374  const uint32_t imageSize = header.width * header.height;
1375 
1376  depth_image_.header.stamp = t;
1377  depth_image_.header.frame_id = frame_id_rectified_left_;
1378  depth_image_.height = header.height;
1379  depth_image_.width = header.width;
1380  depth_image_.is_bigendian = (htonl(1) == 1);
1381 
1383 
1385  ni_depth_image_.step = header.width * 2;
1386 
1388  depth_image_.step = header.width * 4;
1389 
1390  depth_image_.data.resize(depthSize);
1391  ni_depth_image_.data.resize(niDepthSize);
1392 
1393  float *depthImageP = reinterpret_cast<float*>(&depth_image_.data[0]);
1394  uint16_t *niDepthImageP = reinterpret_cast<uint16_t*>(&ni_depth_image_.data[0]);
1395 
1396  const uint16_t min_ni_depth = std::numeric_limits<uint16_t>::lowest();
1397  const uint16_t max_ni_depth = std::numeric_limits<uint16_t>::max();
1398 
1399  // disparity conversion requires valid right image
1400  if (!stereo_calibration_manager_->validRight())
1401  {
1402  throw std::runtime_error("Stereo calibration manager missing right calibration");
1403  }
1404 
1405  //
1406  // Disparity is in 32-bit floating point
1407 
1408  if (32 == header.bitsPerPixel) {
1409 
1410  //
1411  // Depth = focal_length*baseline/disparity
1412  // From the Q matrix used to reproject disparity images using non-isotropic
1413  // pixels we see that z = (fx*fy*Tx). Normalizing z so that
1414  // the scale factor on the homogeneous Cartesian coordinate is 1 results
1415  // in z = (fx*fy*Tx)/(-fy*d) or z = (fx*Tx)/(-d).
1416  // The 4th element of the right camera projection matrix is defined
1417  // as fx*Tx.
1418 
1419  const double scale = stereo_calibration_manager_->rightCameraInfo(frame_id_right_, t).P[3];
1420 
1421  const float *disparityImageP = reinterpret_cast<const float*>(header.imageDataP);
1422 
1423  for (uint32_t i = 0 ; i < imageSize ; ++i)
1424  {
1425  if (0.0 >= disparityImageP[i])
1426  {
1427  depthImageP[i] = bad_point;
1428  niDepthImageP[i] = 0;
1429  }
1430  else
1431  {
1432  depthImageP[i] = scale / disparityImageP[i];
1433  niDepthImageP[i] = static_cast<uint16_t>(std::min(static_cast<float>(max_ni_depth),
1434  std::max(static_cast<float>(min_ni_depth),
1435  depthImageP[i] * 1000)));
1436  }
1437  }
1438 
1439  //
1440  // Disparity is in 1/16th pixel, unsigned integer
1441 
1442  } else if (16 == header.bitsPerPixel) {
1443 
1444  //
1445  // Depth = focal_length*baseline/disparity
1446  // From the Q matrix used to reproject disparity images using non-isotropic
1447  // pixels we see that z = (fx*fy*Tx). Normalizing z so that
1448  // the scale factor on the homogeneous Cartesian coordinate is 1 results
1449  // in z = (fx*fy*Tx)/(-fy*d) or z = (fx*Tx)/(-d). Because our disparity
1450  // image is 16 bits we must also divide by 16 making z = (fx*Tx*16)/(-d)
1451  // The 4th element of the right camera projection matrix is defined
1452  // as fx*Tx.
1453 
1454 
1455  const float scale = stereo_calibration_manager_->rightCameraInfo(frame_id_right_, t).P[3] * -16.0f;
1456 
1457  const uint16_t *disparityImageP = reinterpret_cast<const uint16_t*>(header.imageDataP);
1458 
1459  for (uint32_t i = 0 ; i < imageSize ; ++i)
1460  {
1461  if (0 == disparityImageP[i])
1462  {
1463  depthImageP[i] = bad_point;
1464  niDepthImageP[i] = 0;
1465  }
1466  else
1467  {
1468  depthImageP[i] = scale / disparityImageP[i];
1469  niDepthImageP[i] = static_cast<uint16_t>(std::min(static_cast<float>(max_ni_depth),
1470  std::max(static_cast<float>(min_ni_depth),
1471  depthImageP[i] * 1000)));
1472  }
1473  }
1474 
1475  } else {
1476  ROS_ERROR("Camera: unsupported disparity bpp: %d", header.bitsPerPixel);
1477  return;
1478  }
1479 
1480  if (0 != niDepthSubscribers)
1481  {
1483  }
1484 
1485  if (0 != depthSubscribers)
1486  {
1488  }
1489 
1491 }
1492 
1494 {
1495  if (Source_Disparity != header.source) {
1496 
1497  ROS_ERROR("Camera: unexpected pointcloud image source: 0x%lx", header.source);
1498  return;
1499  }
1500 
1502  {
1503  throw std::runtime_error("Uninitialized stereo calibration manager");
1504  }
1505 
1506  //
1507  // Get the corresponding visual images so we can colorize properly
1508 
1509  std::shared_ptr<BufferWrapper<image::Header>> left_luma_rect = nullptr;
1510  std::shared_ptr<BufferWrapper<image::Header>> left_luma = nullptr;
1511  std::shared_ptr<BufferWrapper<image::Header>> left_chroma = nullptr;
1512  std::shared_ptr<BufferWrapper<image::Header>> aux_luma_rectified = nullptr;
1513  std::shared_ptr<BufferWrapper<image::Header>> aux_chroma_rectified = nullptr;
1514 
1515  const auto left_rect_image = image_buffers_.find(Source_Luma_Rectified_Left);
1516  if (left_rect_image != std::end(image_buffers_) && left_rect_image->second->data().frameId == header.frameId) {
1517  left_luma_rect = left_rect_image->second;
1518  }
1519 
1520  const auto left_luma_image = image_buffers_.find(Source_Luma_Left);
1521  if (left_luma_image != std::end(image_buffers_) && left_luma_image->second->data().frameId == header.frameId) {
1522  left_luma = left_luma_image->second;
1523  }
1524 
1525  const auto chroma_image = image_buffers_.find(Source_Chroma_Left);
1526  if (chroma_image != std::end(image_buffers_) && chroma_image->second->data().frameId == header.frameId) {
1527  left_chroma = chroma_image->second;
1528  }
1529 
1530  const auto aux_luma_rectified_image = image_buffers_.find(Source_Luma_Rectified_Aux);
1531  if (aux_luma_rectified_image != std::end(image_buffers_) && aux_luma_rectified_image->second->data().frameId == header.frameId) {
1532  aux_luma_rectified = aux_luma_rectified_image->second;
1533  }
1534 
1535  const auto aux_chroma_rectified_image = image_buffers_.find(Source_Chroma_Rectified_Aux);
1536  if (aux_chroma_rectified_image != std::end(image_buffers_) && aux_chroma_rectified_image->second->data().frameId == header.frameId) {
1537  aux_chroma_rectified = aux_chroma_rectified_image->second;
1538  }
1539 
1540  const bool color_data = (has_aux_camera_ && aux_luma_rectified && aux_chroma_rectified && stereo_calibration_manager_->validAux()) ||
1541  (!has_aux_camera_ && left_luma && left_chroma);
1542 
1543  const bool pub_pointcloud = luma_point_cloud_pub_.getNumSubscribers() > 0 && left_luma_rect;
1544  const bool pub_color_pointcloud = color_point_cloud_pub_.getNumSubscribers() > 0 && color_data;
1545  const bool pub_organized_pointcloud = luma_organized_point_cloud_pub_.getNumSubscribers() > 0 && left_luma_rect;
1546  const bool pub_color_organized_pointcloud = color_organized_point_cloud_pub_.getNumSubscribers() > 0 && color_data;
1547 
1548  if (!(pub_pointcloud || pub_color_pointcloud || pub_organized_pointcloud || pub_color_organized_pointcloud))
1549  {
1550  return;
1551  }
1552 
1553  ros::Time t = convertImageTimestamp(header.timeSeconds, header.timeMicroSeconds);
1554 
1555  //
1556  // Resize our corresponding pointclouds if we plan on publishing them
1557 
1558  if (pub_pointcloud)
1559  {
1560  switch(left_luma_rect->data().bitsPerPixel)
1561  {
1562  case 8:
1563  initializePointcloud<float, uint8_t>(luma_point_cloud_,
1564  t,
1565  header.width * header.height,
1566  1,
1567  true,
1569  "intensity");
1570  break;
1571  case 16:
1572  initializePointcloud<float, uint16_t>(luma_point_cloud_,
1573  t,
1574  header.width * header.height,
1575  1,
1576  true,
1578  "intensity");
1579  break;
1580  default:
1581  ROS_ERROR("Camera: unsupported luma rectified depth for pontcloud conversion: %d", left_luma_rect->data().bitsPerPixel);
1582  }
1583  }
1584 
1585  if (pub_color_pointcloud)
1586  {
1587  initializePointcloud<float, uint32_t>(color_point_cloud_,
1588  t,
1589  header.width * header.height,
1590  1,
1591  true,
1593  "rgb");
1594  }
1595 
1596  if (pub_organized_pointcloud)
1597  {
1598  switch(left_luma_rect->data().bitsPerPixel)
1599  {
1600  case 8:
1601  initializePointcloud<float, uint8_t>(luma_organized_point_cloud_,
1602  t,
1603  header.width,
1604  header.height,
1605  false,
1607  "intensity");
1608  break;
1609  case 16:
1610  initializePointcloud<float, uint16_t>(luma_organized_point_cloud_,
1611  t,
1612  header.width,
1613  header.height,
1614  false,
1616  "intensity");
1617  break;
1618  default:
1619  ROS_ERROR("Camera: unsupported luma rectified depth for pontcloud conversion: %d", left_luma_rect->data().bitsPerPixel);
1620  }
1621  }
1622 
1623  if (pub_color_organized_pointcloud)
1624  {
1625  initializePointcloud<float, uint32_t>(color_organized_point_cloud_,
1626  t,
1627  header.width,
1628  header.height,
1629  false,
1631  "rgb");
1632  }
1633 
1634  const Eigen::Vector3f invalid_point(std::numeric_limits<float>::quiet_NaN(),
1635  std::numeric_limits<float>::quiet_NaN(),
1636  std::numeric_limits<float>::quiet_NaN());
1637 
1638  //
1639  // Create rectified color image upfront if we are planning to publish color pointclouds
1640 
1641  cv::Mat rectified_color;
1642  if (!has_aux_camera_ && (pub_color_pointcloud || pub_color_organized_pointcloud))
1643  {
1644  const auto &luma = left_luma->data();
1645 
1646  pointcloud_color_buffer_.resize(3 * luma.width * luma.height);
1647  pointcloud_rect_color_buffer_.resize(3 * luma.width * luma.height);
1648  ycbcrToBgr(luma, left_chroma->data(), &(pointcloud_color_buffer_[0]));
1649 
1650  cv::Mat rgb_image(luma.height, luma.width, CV_8UC3, &(pointcloud_color_buffer_[0]));
1651  cv::Mat rect_rgb_image(luma.height, luma.width, CV_8UC3, &(pointcloud_rect_color_buffer_[0]));
1652 
1653  const auto left_remap = stereo_calibration_manager_->leftRemap();
1654 
1655  cv::remap(rgb_image, rect_rgb_image, left_remap->map1, left_remap->map2, cv::INTER_LINEAR);
1656 
1657  rectified_color = std::move(rect_rgb_image);
1658  }
1659  else if(has_aux_camera_ && (pub_color_pointcloud || pub_color_organized_pointcloud))
1660  {
1661  const auto &luma = aux_luma_rectified->data();
1662 
1663  pointcloud_rect_color_buffer_.resize(3 * luma.width * luma.height);
1664 
1665  ycbcrToBgr(luma, aux_chroma_rectified->data(), reinterpret_cast<uint8_t*>(&(pointcloud_rect_color_buffer_[0])));
1666 
1667  cv::Mat rect_rgb_image(luma.height, luma.width, CV_8UC3, &(pointcloud_rect_color_buffer_[0]));
1668 
1669  //
1670  // If we are running in full-aux mode and 1/4 res mode resize the rectified aux image to match the
1671  // disparity resolution
1672 
1673  if (stereo_calibration_manager_->config().cameraProfile() == Full_Res_Aux_Cam &&
1674  (header.width != luma.width || header.height != luma.height))
1675  {
1676  cv::Mat resized_rect_rgb_image;
1677  cv::resize(rect_rgb_image,
1678  resized_rect_rgb_image,
1679  cv::Size{static_cast<int>(header.width), static_cast<int>(header.height)},
1680  0, 0, cv::INTER_AREA);
1681 
1682  rect_rgb_image = std::move(resized_rect_rgb_image);
1683  }
1684 
1685  rectified_color = std::move(rect_rgb_image);
1686  }
1687 
1688  // disparity conversion requires valid right calibration
1689  if (!stereo_calibration_manager_->validRight())
1690  {
1691  throw std::runtime_error("Stereo calibration manager missing right calibration");
1692  }
1693 
1694  const auto left_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_left_, t);
1695  const auto right_camera_info = stereo_calibration_manager_->rightCameraInfo(frame_id_right_, t);
1696  const auto aux_camera_info = stereo_calibration_manager_->auxCameraInfo(frame_id_rectified_aux_, t,
1697  rectified_color.cols, rectified_color.rows);
1698 
1699  //
1700  // Iterate through our disparity image once populating our pointcloud structures if we plan to publish them
1701 
1702  uint32_t packed_color = 0;
1703 
1704  const float squared_max_range = pointcloud_max_range_ * pointcloud_max_range_;
1705 
1706  size_t valid_points = 0;
1707  for (size_t y = 0 ; y < header.height ; ++y)
1708  {
1709  for (size_t x = 0 ; x < header.width ; ++x)
1710  {
1711  const size_t index = y * header.width + x;
1712 
1713  float disparity = 0.0f;
1714  switch(header.bitsPerPixel)
1715  {
1716  case 16:
1717  {
1718  disparity = static_cast<float>(reinterpret_cast<const uint16_t*>(header.imageDataP)[index]) / 16.0f;
1719  break;
1720  }
1721  case 32:
1722  {
1723  disparity = static_cast<float>(reinterpret_cast<const float*>(header.imageDataP)[index]);
1724  break;
1725  }
1726  default:
1727  {
1728  ROS_ERROR("Camera: unsupported disparity detph: %d", header.bitsPerPixel);
1729  return;
1730  }
1731  }
1732 
1733  const Eigen::Vector3f point = stereo_calibration_manager_->reproject(x, y, disparity,
1734  left_camera_info, right_camera_info);
1735 
1736  //
1737  // We have a valid rectified color image meaning we plan to publish color pointcloud topics. Assemble the
1738  // color pixel here since it may be needed in our organized pointclouds
1739 
1740  if (!rectified_color.empty())
1741  {
1742  packed_color = 0;
1743 
1744  const auto color_pixel = (has_aux_camera_ && disparity != 0.0) ?
1745  interpolateColor(stereo_calibration_manager_->rectifiedAuxProject(point, aux_camera_info),
1746  rectified_color) :
1747  rectified_color.at<cv::Vec3b>(y, x);
1748 
1749 
1750  packed_color |= color_pixel[2] << 16 | color_pixel[1] << 8 | color_pixel[0];
1751  }
1752 
1753  //
1754  // If our disparity is 0 pixels our corresponding 3D point is infinite. If we plan to publish organized
1755  // pointclouds we will need to add a invalid point to our pointcloud(s)
1756 
1757  if (disparity == 0.0 || clipPoint(border_clip_type_, border_clip_value_, header.width, header.height, x, y))
1758  {
1759  if (pub_organized_pointcloud)
1760  {
1761  writePoint(luma_organized_point_cloud_, index, invalid_point, index, left_luma_rect->data().bitsPerPixel, left_luma_rect->data().imageDataP);
1762  }
1763 
1764  if (pub_color_organized_pointcloud)
1765  {
1766  writePoint(color_organized_point_cloud_, index, invalid_point, packed_color);
1767  }
1768 
1769  continue;
1770  }
1771 
1772  const bool valid = stereo_calibration_manager_->isValidReprojectedPoint(point, squared_max_range);
1773 
1774  if (pub_pointcloud && valid)
1775  {
1776  writePoint(luma_point_cloud_, valid_points, point, index, left_luma_rect->data().bitsPerPixel, left_luma_rect->data().imageDataP);
1777  }
1778 
1779  if(pub_color_pointcloud && valid)
1780  {
1781  writePoint(color_point_cloud_, valid_points, point, packed_color);
1782  }
1783 
1784  if (pub_organized_pointcloud)
1785  {
1786  writePoint(luma_organized_point_cloud_, index, valid ? point : invalid_point, index, left_luma_rect->data().bitsPerPixel, left_luma_rect->data().imageDataP);
1787  }
1788 
1789  if (pub_color_organized_pointcloud)
1790  {
1791  writePoint(color_organized_point_cloud_, index, valid ? point : invalid_point, packed_color);
1792  }
1793 
1794  if (valid)
1795  {
1796  ++valid_points;
1797  }
1798  }
1799  }
1800 
1801  if (pub_pointcloud)
1802  {
1803  luma_point_cloud_.row_step = valid_points * luma_point_cloud_.point_step;
1804  luma_point_cloud_.width = valid_points;
1805  luma_point_cloud_.data.resize(valid_points * luma_point_cloud_.point_step);
1806 
1808  }
1809 
1810  if (pub_color_pointcloud)
1811  {
1812  color_point_cloud_.row_step = valid_points * color_point_cloud_.point_step;
1813  color_point_cloud_.width = valid_points;
1814  color_point_cloud_.data.resize(valid_points * color_point_cloud_.point_step);
1815 
1817  }
1818 
1819  if (pub_organized_pointcloud)
1820  {
1822  }
1823 
1824  if (pub_color_organized_pointcloud)
1825  {
1827  }
1828 
1829 }
1830 
1832 {
1833  if (0 == raw_cam_data_pub_.getNumSubscribers()) {
1834  return;
1835  }
1836 
1837  if(Source_Disparity == header.source)
1838  {
1839  const auto image = image_buffers_.find(Source_Luma_Rectified_Left);
1840  if (image != std::end(image_buffers_) && image->second->data().frameId == header.frameId)
1841  {
1842  const auto luma_ptr = image->second;
1843  const auto &left_luma_rect = luma_ptr->data();
1844 
1845  const uint32_t left_luma_image_size = left_luma_rect.width * left_luma_rect.height;
1846 
1847  raw_cam_data_.gray_scale_image.resize(left_luma_image_size);
1848  memcpy(&(raw_cam_data_.gray_scale_image[0]),
1849  left_luma_rect.imageDataP,
1850  left_luma_image_size * sizeof(uint8_t));
1851 
1852  raw_cam_data_.frames_per_second = left_luma_rect.framesPerSecond;
1853  raw_cam_data_.gain = left_luma_rect.gain;
1854  raw_cam_data_.exposure_time = left_luma_rect.exposure;
1855  raw_cam_data_.frame_count = left_luma_rect.frameId;
1856  raw_cam_data_.time_stamp = convertImageTimestamp(header.timeSeconds, header.timeMicroSeconds);
1857  raw_cam_data_.width = left_luma_rect.width;
1858  raw_cam_data_.height = left_luma_rect.height;
1859 
1860  const uint32_t disparity_size = header.width * header.height;
1861 
1862  raw_cam_data_.disparity_image.resize(disparity_size);
1863  memcpy(&(raw_cam_data_.disparity_image[0]),
1864  header.imageDataP,
1865  disparity_size * header.bitsPerPixel == 16 ? sizeof(uint16_t) : sizeof(uint32_t));
1866 
1868  }
1869  }
1870 }
1871 
1873 {
1874  //
1875  // The left-luma image is currently published before the matching chroma image so this can just trigger on that
1876 
1877  if (Source_Chroma_Left != header.source &&
1878  Source_Chroma_Rectified_Aux != header.source &&
1879  Source_Chroma_Aux != header.source)
1880  {
1881  ROS_WARN("Camera: unexpected color image source: 0x%lx", header.source);
1882  return;
1883  }
1884 
1885  ros::Time t = convertImageTimestamp(header.timeSeconds, header.timeMicroSeconds);
1886 
1888  {
1889  throw std::runtime_error("Uninitialized stereo calibration manager");
1890  }
1891 
1892  switch (header.source)
1893  {
1894  case Source_Chroma_Left:
1895  {
1896 
1897  const auto color_subscribers = left_rgb_cam_pub_.getNumSubscribers();
1898  const auto color_rect_subscribers = left_rgb_rect_cam_pub_.getNumSubscribers();
1899 
1900  if (color_subscribers == 0 && color_rect_subscribers == 0)
1901  {
1902  return;
1903  }
1904 
1905  const auto left_luma = image_buffers_.find(Source_Luma_Left);
1906  if (left_luma == std::end(image_buffers_)) {
1907  return;
1908  }
1909 
1910  const auto luma_ptr = left_luma->second;
1911 
1912  if (header.frameId == luma_ptr->data().frameId) {
1913 
1914  const uint32_t height = luma_ptr->data().height;
1915  const uint32_t width = luma_ptr->data().width;
1916  const uint32_t imageSize = 3 * height * width;
1917 
1918  left_rgb_image_.data.resize(imageSize);
1919 
1920  left_rgb_image_.header.frame_id = frame_id_left_;
1921  left_rgb_image_.header.stamp = t;
1922  left_rgb_image_.height = height;
1923  left_rgb_image_.width = width;
1924 
1926  left_rgb_image_.is_bigendian = (htonl(1) == 1);
1927  left_rgb_image_.step = 3 * width;
1928 
1929  //
1930  // Convert YCbCr 4:2:0 to RGB
1931 
1932  ycbcrToBgr(luma_ptr->data(), header, reinterpret_cast<uint8_t*>(&(left_rgb_image_.data[0])));
1933 
1934  const auto left_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_left_, t);
1935 
1936  if (color_subscribers != 0) {
1938 
1939  left_rgb_cam_info_pub_.publish(left_camera_info);
1940  }
1941 
1942  if (color_rect_subscribers > 0) {
1943  left_rgb_rect_image_.data.resize(imageSize);
1944 
1945  const auto left_rectified_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_rectified_left_, t);
1946 
1947  const auto remaps = stereo_calibration_manager_->leftRemap();
1948 
1949  const cv::Mat rgb_image(height, width, CV_8UC3, &(left_rgb_image_.data[0]));
1950  cv::Mat rect_rgb_image(height, width, CV_8UC3, &(left_rgb_rect_image_.data[0]));
1951 
1952  cv::remap(rgb_image, rect_rgb_image, remaps->map1, remaps->map2, cv::INTER_LINEAR);
1953 
1955  left_rgb_rect_image_.header.stamp = t;
1956  left_rgb_rect_image_.height = height;
1957  left_rgb_rect_image_.width = width;
1958 
1960  left_rgb_rect_image_.is_bigendian = (htonl(1) == 1);
1961  left_rgb_rect_image_.step = 3 * width;
1962 
1963  left_rgb_rect_cam_pub_.publish(left_rgb_rect_image_, left_rectified_camera_info);
1964 
1965  left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info);
1966  }
1967  }
1968 
1969  break;
1970  }
1971  case Source_Chroma_Rectified_Aux:
1972  {
1974  return;
1975  }
1976 
1977  const auto aux_luma = image_buffers_.find(Source_Luma_Rectified_Aux);
1978  if (aux_luma == std::end(image_buffers_)) {
1979  return;
1980  }
1981 
1982  const auto luma_ptr = aux_luma->second;
1983 
1984  if (header.frameId == luma_ptr->data().frameId) {
1985 
1986  const uint32_t height = luma_ptr->data().height;
1987  const uint32_t width = luma_ptr->data().width;
1988  const uint32_t imageSize = 3 * height * width;
1989 
1990  aux_rgb_rect_image_.data.resize(imageSize);
1991 
1992  aux_rgb_rect_image_.header.frame_id = frame_id_rectified_aux_;
1993  aux_rgb_rect_image_.header.stamp = t;
1994  aux_rgb_rect_image_.height = height;
1995  aux_rgb_rect_image_.width = width;
1996 
1998  aux_rgb_rect_image_.is_bigendian = (htonl(1) == 1);
1999  aux_rgb_rect_image_.step = 3 * width;
2000 
2001  //
2002  // Convert YCbCr 4:2:0 to RGB
2003 
2004  ycbcrToBgr(luma_ptr->data(), header, reinterpret_cast<uint8_t*>(&(aux_rgb_rect_image_.data[0])));
2005 
2006  const auto aux_camera_info = stereo_calibration_manager_->auxCameraInfo(frame_id_rectified_aux_, t, width, height);
2007 
2009 
2010  aux_rgb_rect_cam_info_pub_.publish(aux_camera_info);
2011  }
2012 
2013  break;
2014  }
2015  case Source_Chroma_Aux:
2016  {
2017  if (aux_rgb_cam_pub_.getNumSubscribers() == 0) {
2018  return;
2019  }
2020 
2021  const auto aux_luma = image_buffers_.find(Source_Luma_Aux);
2022  if (aux_luma == std::end(image_buffers_)) {
2023  return;
2024  }
2025 
2026  const auto luma_ptr = aux_luma->second;
2027 
2028  if (header.frameId == luma_ptr->data().frameId) {
2029  const uint32_t height = luma_ptr->data().height;
2030  const uint32_t width = luma_ptr->data().width;
2031  const uint32_t imageSize = 3 * height * width;
2032 
2033  aux_rgb_image_.data.resize(imageSize);
2034 
2035  aux_rgb_image_.header.frame_id = frame_id_aux_;
2036  aux_rgb_image_.header.stamp = t;
2037  aux_rgb_image_.height = height;
2038  aux_rgb_image_.width = width;
2039 
2041  aux_rgb_image_.is_bigendian = (htonl(1) == 1);
2042  aux_rgb_image_.step = 3 * width;
2043 
2044  //
2045  // Convert YCbCr 4:2:0 to RGB
2046 
2047  ycbcrToBgr(luma_ptr->data(), header, reinterpret_cast<uint8_t*>(&(aux_rgb_image_.data[0])));
2048 
2049  const auto aux_camera_info = stereo_calibration_manager_->auxCameraInfo(frame_id_aux_, t, width, height);
2050 
2052 
2053  aux_rgb_cam_info_pub_.publish(aux_camera_info);
2054 
2055  break;
2056  }
2057  }
2058  }
2059 }
2060 
2062 {
2063  if (Source_Luma_Rectified_Aux != header.source &&
2064  Source_Chroma_Rectified_Aux != header.source &&
2065  Source_Luma_Aux != header.source &&
2066  Source_Luma_Left != header.source &&
2067  Source_Chroma_Left != header.source &&
2068  Source_Luma_Rectified_Left != header.source) {
2069  ROS_WARN("Camera: unexpected colorized image source: 0x%lx", header.source);
2070  return;
2071  }
2072 
2073  image_buffers_[header.source] = std::make_shared<BufferWrapper<crl::multisense::image::Header>>(driver_, header);
2074 }
2075 
2077 {
2078  if (Source_Ground_Surface_Class_Image != header.source)
2079  {
2080  ROS_WARN("Camera: unexpected image source: 0x%lx", header.source);
2081  return;
2082  }
2083 
2084  const ros::Time t(header.timeSeconds, 1000 * header.timeMicroSeconds);
2085 
2087  {
2088  throw std::runtime_error("Uninitialized stereo calibration manager");
2089  }
2090 
2091  switch (header.source)
2092  {
2093  case Source_Ground_Surface_Class_Image:
2094  {
2095  const auto ground_surface_subscribers = ground_surface_cam_pub_.getNumSubscribers();
2096 
2097  if (ground_surface_subscribers == 0)
2098  return;
2099 
2100  const uint32_t height = header.height;
2101  const uint32_t width = header.width;
2102  const uint32_t imageSize = 3 * height * width;
2103 
2104  ground_surface_image_.data.resize(imageSize);
2105 
2107  ground_surface_image_.header.stamp = t;
2108  ground_surface_image_.height = height;
2109  ground_surface_image_.width = width;
2110 
2112  ground_surface_image_.is_bigendian = (htonl(1) == 1);
2113  ground_surface_image_.step = 3* width;
2114 
2115  // Get pointer to output image
2116  uint8_t* output = reinterpret_cast<uint8_t*>(&(ground_surface_image_.data[0]));
2117 
2118  // Colorize image with classes
2119  const size_t rgb_stride = ground_surface_image_.width * 3;
2120 
2121  for(uint32_t y = 0; y < ground_surface_image_.height; ++y)
2122  {
2123  const size_t row_offset = y * rgb_stride;
2124 
2125  for(uint32_t x = 0; x < ground_surface_image_.width; ++x)
2126  {
2127  const uint8_t *imageP = reinterpret_cast<const uint8_t*>(header.imageDataP);
2128 
2129  const size_t image_offset = (y * ground_surface_image_.width) + x;
2130  memcpy(output + row_offset + (3 * x), ground_surface_utilities::groundSurfaceClassToPixelColor(imageP[image_offset]).data(), 3);
2131  }
2132  }
2133 
2135 
2136  // Publish info
2137  const auto ground_surface_info = stereo_calibration_manager_->leftCameraInfo(frame_id_rectified_left_, t);
2138  ground_surface_info_pub_.publish(ground_surface_info);
2139 
2140  break;
2141  }
2142  }
2143 }
2144 
2146 {
2147  if (header.controlPointsBitsPerPixel != 32)
2148  {
2149  ROS_WARN("Expecting floats for spline control points, got %u bits per pixel instead", header.controlPointsBitsPerPixel);
2150  return;
2151  }
2152 
2153  if (!header.success)
2154  {
2155  ROS_WARN("Ground surface modelling failed, consider modifying camera extrinsics and/or algorithm parameters");
2156  return;
2157  }
2158 
2160  {
2161  throw std::runtime_error("Uninitialized stereo calibration manager");
2162  }
2163 
2164  // Convert row spline control point data to eigen matrix
2165  Eigen::Map<const Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor>> controlGrid(
2166  reinterpret_cast<const float*>(header.controlPointsImageDataP), header.controlPointsHeight, header.controlPointsWidth);
2167 
2168  // Calculate frustum azimuth angles
2169  const auto config = stereo_calibration_manager_->config();
2170 
2171  float minMaxAzimuthAngle[2];
2172  minMaxAzimuthAngle[0] = M_PI_2 - atan(config.cx() / config.fx());
2173  minMaxAzimuthAngle[1] = M_PI_2 + atan((config.width() - config.cx()) / config.fx());
2174 
2175  // Generate pointcloud for visualization
2177  controlGrid,
2180  header.xzCellOrigin,
2181  header.xzCellSize,
2182  minMaxAzimuthAngle,
2183  header.extrinsics,
2184  header.quadraticParams,
2185  config.tx());
2186 
2187  // Send pointcloud message
2189 }
2190 
2191 ros::Time Camera::convertImageTimestamp(uint32_t time_secs, uint32_t time_microsecs)
2192 {
2193  auto time_stamp = ros::Time(time_secs, 1000 * time_microsecs);
2194  if (ptp_time_sync_)
2195  {
2196  time_stamp += ros::Duration(ptp_time_offset_secs_);
2197  }
2198 
2199  return time_stamp;
2200 }
2201 
2203 {
2205  {
2206  throw std::runtime_error("Uninitialized stereo calibration manager");
2207  }
2208 
2209  stereo_calibration_manager_->updateConfig(config);
2210 
2211  //
2212  // Publish the "raw" config message
2213 
2214  multisense_ros::RawCamConfig cfg;
2215 
2216  cfg.width = config.width();
2217  cfg.height = config.height();
2218  cfg.frames_per_second = config.fps();
2219  cfg.gain = config.gain();
2220  cfg.exposure_time = config.exposure();
2221 
2222  cfg.fx = config.fx();
2223  cfg.fy = config.fy();
2224  cfg.cx = config.cx();
2225  cfg.cy = config.cy();
2226  cfg.tx = config.tx();
2227  cfg.ty = config.ty();
2228  cfg.tz = config.tz();
2229  cfg.roll = config.roll();
2230  cfg.pitch = config.pitch();
2231  cfg.yaw = config.yaw();
2232 
2234 
2235  //
2236  // Republish our camera info topics since the resolution changed
2237 
2239 }
2240 
2242 {
2243  stat.add("device name", device_info_.name);
2244  stat.add("build date", device_info_.buildDate);
2245  stat.add("serial number", device_info_.serialNumber);
2246  stat.add("device revision", device_info_.hardwareRevision);
2247 
2248  for(const auto &pcb : device_info_.pcbs)
2249  {
2250  stat.add("pcb: " + pcb.name, pcb.revision);
2251  }
2252 
2253  stat.add("imager name", device_info_.imagerName);
2254  stat.add("imager type", device_info_.imagerType);
2255  stat.add("imager width", device_info_.imagerWidth);
2256  stat.add("imager height", device_info_.imagerHeight);
2257 
2258  stat.add("lens name", device_info_.lensName);
2259  stat.add("lens type", device_info_.lensType);
2260  stat.add("nominal baseline", device_info_.nominalBaseline);
2261  stat.add("nominal focal length", device_info_.nominalFocalLength);
2262  stat.add("nominal relative aperture", device_info_.nominalRelativeAperture);
2263 
2264  stat.add("lighting type", device_info_.lightingType);
2265  stat.add("number of lights", device_info_.numberOfLights);
2266 
2267  stat.add("laser name", device_info_.laserName);
2268  stat.add("laser type", device_info_.laserType);
2269 
2270  stat.add("motor name", device_info_.motorName);
2271  stat.add("motor type", device_info_.motorType);
2272  stat.add("motor gear reduction", device_info_.motorGearReduction);
2273 
2274  stat.add("api build date", version_info_.apiBuildDate);
2275  stat.add("api version", version_info_.apiVersion);
2276  stat.add("firmware build date", version_info_.sensorFirmwareBuildDate);
2277  stat.add("firmware version", version_info_.sensorFirmwareVersion);
2278  stat.add("bitstream version", version_info_.sensorHardwareVersion);
2279  stat.add("bitstream magic", version_info_.sensorHardwareMagic);
2280  stat.add("fpga dna", version_info_.sensorFpgaDna);
2281  stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "MultiSense Device Info");
2282 }
2283 
2285 {
2287 
2288  if (crl::multisense::Status_Ok == driver_->getDeviceStatus(statusMessage))
2289  {
2290  stat.add("uptime", ros::Time(statusMessage.uptime));
2291  stat.add("system", statusMessage.systemOk);
2292  stat.add("laser", statusMessage.laserOk);
2293  stat.add("laser motor", statusMessage.laserMotorOk);
2294  stat.add("cameras", statusMessage.camerasOk);
2295  stat.add("imu", statusMessage.imuOk);
2296  stat.add("external leds", statusMessage.externalLedsOk);
2297  stat.add("processing pipeline", statusMessage.processingPipelineOk);
2298  stat.add("power supply temp", statusMessage.powerSupplyTemperature);
2299  stat.add("fpga temp", statusMessage.fpgaTemperature);
2300  stat.add("left imager temp", statusMessage.leftImagerTemperature);
2301  stat.add("right imager temp", statusMessage.rightImagerTemperature);
2302  stat.add("input voltage", statusMessage.inputVoltage);
2303  stat.add("input current", statusMessage.inputCurrent);
2304  stat.add("fpga power", statusMessage.fpgaPower);
2305  stat.add("logic power", statusMessage.logicPower);
2306  stat.add("imager power", statusMessage.imagerPower);
2307  stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "MultiSense Status: OK");
2308  }
2309  else
2310  {
2311  stat.summary(diagnostic_msgs::DiagnosticStatus::ERROR, "MultiSense Status: ERROR - Unable to retrieve status");
2312  }
2313 }
2314 
2316 {
2317  crl::multisense::system::PtpStatus ptpStatusMessage;
2318 
2319  if (crl::multisense::Status_Ok == driver_->getPtpStatus(ptpStatusMessage)) {
2320  stat.add("ptp_enabled", ptp_time_sync_);
2321  stat.add("ptp_status", ptpStatusMessage.gm_present == 1);
2322  stat.add("grandmaster_offset_ns", ptpStatusMessage.gm_offset);
2323  stat.add("path_delay_ns", ptpStatusMessage.path_delay);
2324  stat.add("steps_removed", ptpStatusMessage.steps_removed);
2325 
2326  if (ptpStatusMessage.gm_present)
2327  {
2328  stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "PTP Status: OK");
2329  }
2330  else
2331  {
2332  stat.summary(diagnostic_msgs::DiagnosticStatus::ERROR, "PTP Status: Not Synchronized");
2333  }
2334  }
2335  else
2336  {
2337  stat.summary(diagnostic_msgs::DiagnosticStatus::ERROR, " PTP Status: ERROR - Unable to retrieve PTP status");
2338  }
2339 }
2340 
2342 {
2344 }
2345 
2347 {
2348  const auto stamp = ros::Time::now();
2349 
2351  {
2352  throw std::runtime_error("Uninitialized stereo calibration manager");
2353  }
2354 
2355  const auto left_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_left_, stamp);
2356  const auto right_camera_info = stereo_calibration_manager_->rightCameraInfo(frame_id_right_, stamp);
2357 
2358  const auto left_rectified_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_rectified_left_, stamp);
2359  const auto right_rectified_camera_info = stereo_calibration_manager_->rightCameraInfo(frame_id_rectified_right_, stamp);
2360 
2361  //
2362  // Republish camera info messages outside of image callbacks.
2363  // The camera info publishers are latching so the messages
2364  // will persist until a new message is published in one of the image
2365  // callbacks. This makes it easier when a user is trying access a camera_info
2366  // for a topic which they are not subscribed to
2367 
2368  if (system::DeviceInfo::HARDWARE_REV_BCAM == device_info_.hardwareRevision) {
2369 
2370  left_mono_cam_info_pub_.publish(left_camera_info);
2371  left_rgb_cam_info_pub_.publish(left_camera_info);
2372  left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info);
2373 
2374  } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_M == device_info_.hardwareRevision) {
2375 
2376  left_mono_cam_info_pub_.publish(left_camera_info);
2377  left_rect_cam_info_pub_.publish(left_rectified_camera_info);
2378  left_rgb_cam_info_pub_.publish(left_camera_info);
2379  left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info);
2380 
2381  } else { // all other MultiSense-S* variations
2382 
2383  if (has_left_camera_) {
2384  left_mono_cam_info_pub_.publish(left_camera_info);
2385  left_rect_cam_info_pub_.publish(left_rectified_camera_info);
2386  }
2387 
2388  if (has_right_camera_) {
2389  right_mono_cam_info_pub_.publish(right_camera_info);
2390  right_rect_cam_info_pub_.publish(right_rectified_camera_info);
2391  }
2392 
2393  // only publish for stereo cameras
2395 
2396  left_disp_cam_info_pub_.publish(left_rectified_camera_info);
2397  depth_cam_info_pub_.publish(left_rectified_camera_info);
2398 
2399  if (version_info_.sensorFirmwareVersion >= 0x0300) {
2400 
2401  right_disp_cam_info_pub_.publish(right_rectified_camera_info);
2402  left_cost_cam_info_pub_.publish(left_rectified_camera_info);
2403  }
2404  }
2405 
2406  // handle color topic publishing if color data exists
2407  if (has_aux_camera_) {
2408 
2409  //
2410  // The mono aux camera will operate at the native aux camera resolution. The rectified cameras will match
2411  // the main stereo pair
2412 
2413  const auto stereoResolution = stereo_calibration_manager_->operatingStereoResolution();
2414  const auto auxResolution = stereo_calibration_manager_->operatingStereoResolution();
2415 
2416  aux_mono_cam_info_pub_.publish(stereo_calibration_manager_->auxCameraInfo(frame_id_aux_, stamp, auxResolution));
2417  aux_rect_cam_info_pub_.publish(stereo_calibration_manager_->auxCameraInfo(frame_id_rectified_aux_, stamp, stereoResolution));
2418 
2419  aux_rgb_cam_info_pub_.publish(stereo_calibration_manager_->auxCameraInfo(frame_id_aux_, stamp, auxResolution));
2421 
2422  } else if (has_color_ && !has_aux_camera_) { // !has_aux_camera_ is redundant, but leaving to prevent confusion over edge cases
2423 
2424  left_rgb_cam_info_pub_.publish(left_camera_info);
2425  left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info);
2426 
2427  }
2428  }
2429 }
2430 
2432 {
2433  std::lock_guard<std::mutex> lock(stream_lock_);
2434 
2435  stream_map_.clear();
2436 
2437  Status status = driver_->stopStreams(allImageSources);
2438  if (Status_Ok != status)
2439  ROS_ERROR("Camera: failed to stop all streams: %s",
2440  Channel::statusString(status));
2441 }
2442 
2444 {
2445  std::lock_guard<std::mutex> lock(stream_lock_);
2446 
2447  DataSource notStarted = 0;
2448 
2449  for(uint32_t i=0; i<32; i++)
2450  if ((1<<i) & enableMask && 0 == stream_map_[(1<<i)]++)
2451  notStarted |= (1<<i);
2452 
2453  if (0 != notStarted) {
2454 
2455  Status status = driver_->startStreams(notStarted);
2456  if (Status_Ok != status)
2457  ROS_ERROR("Camera: failed to start streams 0x%lx: %s",
2458  notStarted, Channel::statusString(status));
2459  }
2460 }
2461 
2463 {
2464  std::lock_guard<std::mutex> lock(stream_lock_);
2465 
2466  DataSource notStopped = 0;
2467 
2468  for(uint32_t i=0; i<32; i++)
2469  if ((1<<i) & disableMask && 0 == --stream_map_[(1<<i)])
2470  notStopped |= (1<<i);
2471 
2472  if (0 != notStopped) {
2473  Status status = driver_->stopStreams(notStopped);
2474  if (Status_Ok != status)
2475  ROS_ERROR("Camera: failed to stop streams 0x%lx: %s\n",
2476  notStopped, Channel::statusString(status));
2477  }
2478 }
2479 
2480 } // namespace
crl::multisense::system::StatusMessage::rightImagerTemperature
float rightImagerTemperature
multisense_ros::Camera::diagnostic_updater_
diagnostic_updater::Updater diagnostic_updater_
Definition: camera.h:372
crl::multisense::system::StatusMessage::fpgaPower
float fpgaPower
multisense_ros::Camera::left_rect_image_
sensor_msgs::Image left_rect_image_
Definition: camera.h:256
multisense_ros::Camera::disparity_right_transport_
image_transport::ImageTransport disparity_right_transport_
Definition: camera.h:187
multisense_ros::interpolateColor
cv::Vec3b interpolateColor(const Eigen::Vector2f &pixel, const cv::Mat &image)
Given a floating point pixel location, determine its average color using all neighboring pixels.
Definition: camera_utilities.cpp:540
multisense_ros::Camera::left_rgb_image_
sensor_msgs::Image left_rgb_image_
Definition: camera.h:266
crl::multisense::system::VersionInfo::sensorFirmwareBuildDate
std::string sensorFirmwareBuildDate
crl::multisense::system::DeviceInfo::name
std::string name
multisense_ros::Camera::borderClipChanged
void borderClipChanged(const BorderClip &borderClipType, double borderClipValue)
Definition: camera.cpp:764
crl::multisense::system::PtpStatus::gm_present
uint8_t gm_present
tf2_ros::StaticTransformBroadcaster::sendTransform
void sendTransform(const geometry_msgs::TransformStamped &transform)
multisense_ros::Camera::left_disparity_cost_image_
sensor_msgs::Image left_disparity_cost_image_
Definition: camera.h:273
multisense_ros::Camera::DISPARITY_TOPIC
static constexpr char DISPARITY_TOPIC[]
Definition: camera.h:124
multisense_ros::Camera::left_rgb_transport_
image_transport::ImageTransport left_rgb_transport_
Definition: camera.h:182
multisense_ros::Camera::driver_
crl::multisense::Channel * driver_
Definition: camera.h:163
multisense_ros::Camera::right_rect_cam_pub_
image_transport::CameraPublisher right_rect_cam_pub_
Definition: camera.h:201
multisense_ros::Camera::RECT_CAMERA_INFO_TOPIC
static constexpr char RECT_CAMERA_INFO_TOPIC[]
Definition: camera.h:136
crl::multisense::Status_Ok
static CRL_CONSTEXPR Status Status_Ok
multisense_ros::Camera::frame_id_aux_
const std::string frame_id_aux_
Definition: camera.h:305
multisense_ros::Camera::raw_cam_cal_pub_
ros::Publisher raw_cam_cal_pub_
Definition: camera.h:247
crl::multisense::system::PtpStatus::gm_offset
int64_t gm_offset
multisense_ros::Camera::frame_id_rectified_aux_
const std::string frame_id_rectified_aux_
Definition: camera.h:308
multisense_ros::Camera::border_clip_type_
BorderClip border_clip_type_
Definition: camera.h:332
crl::multisense::system::DeviceInfo::laserType
uint32_t laserType
crl::multisense::image::Histogram::data
std::vector< uint32_t > data
crl::multisense::Channel::getDeviceInfo
virtual Status getDeviceInfo(system::DeviceInfo &info)=0
msg
msg
ros::Publisher
crl::multisense::system::StatusMessage::externalLedsOk
bool externalLedsOk
crl::multisense::system::ExternalCalibration::yaw
float yaw
image_encodings.h
crl::multisense::system::VersionInfo::sensorHardwareVersion
uint64_t sensorHardwareVersion
multisense_ros::Camera::left_disparity_pub_
image_transport::Publisher left_disparity_pub_
Definition: camera.h:235
multisense_ros::Camera::left_rect_transport_
image_transport::ImageTransport left_rect_transport_
Definition: camera.h:180
multisense_ros::Camera::pointcloud_color_buffer_
std::vector< uint8_t > pointcloud_color_buffer_
Definition: camera.h:283
crl::multisense::Source_Luma_Rectified_Right
static CRL_CONSTEXPR DataSource Source_Luma_Rectified_Right
crl::multisense::system::ExternalCalibration
multisense_ros::Camera::ni_depth_cam_pub_
image_transport::Publisher ni_depth_cam_pub_
Definition: camera.h:203
multisense_ros::Camera::publishAllCameraInfo
void publishAllCameraInfo()
Definition: camera.cpp:2346
multisense_ros::Camera::colorImageCallback
void colorImageCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1872
multisense_ros::Camera::depth_cam_info_pub_
ros::Publisher depth_cam_info_pub_
Definition: camera.h:221
multisense_ros::Camera::right_disparity_image_
sensor_msgs::Image right_disparity_image_
Definition: camera.h:274
crl::multisense::image::Config::fps
float fps() const
crl::multisense::system::DeviceInfo::pcbs
std::vector< PcbInfo > pcbs
multisense_ros::Camera::right_stereo_disparity_pub_
ros::Publisher right_stereo_disparity_pub_
Definition: camera.h:240
multisense_ros::Camera::left_rect_cam_pub_
image_transport::CameraPublisher left_rect_cam_pub_
Definition: camera.h:200
crl::multisense::image::Config::tx
float tx() const
distortion_models.h
multisense_ros::Camera::last_frame_id_
int64_t last_frame_id_
Definition: camera.h:327
multisense_ros::Camera::diagnostic_trigger_
ros::Timer diagnostic_trigger_
Definition: camera.h:378
image_transport::CameraPublisher::getNumSubscribers
uint32_t getNumSubscribers() const
multisense_ros::Camera::aux_rgb_cam_pub_
image_transport::Publisher aux_rgb_cam_pub_
Definition: camera.h:206
diagnostic_updater::DiagnosticStatusWrapper::add
void add(const std::string &key, const bool &b)
crl::multisense::system::DeviceInfo::lensName
std::string lensName
crl::multisense::system::ExternalCalibration::y
float y
crl::multisense::system::StatusMessage::inputCurrent
float inputCurrent
multisense_ros::Camera::aux_rgb_rect_cam_pub_
image_transport::CameraPublisher aux_rgb_rect_cam_pub_
Definition: camera.h:209
multisense_ros::Camera::aux_rgb_image_
sensor_msgs::Image aux_rgb_image_
Definition: camera.h:267
crl::multisense::system::DeviceInfo::lightingType
uint32_t lightingType
multisense_ros::Camera::stop
void stop()
Definition: camera.cpp:2431
multisense_ros::Camera::aux_rgb_rect_image_
sensor_msgs::Image aux_rgb_rect_image_
Definition: camera.h:270
multisense_ros::Camera::COST_TOPIC
static constexpr char COST_TOPIC[]
Definition: camera.h:128
multisense_ros::Camera::raw_cam_data_
multisense_ros::RawCamData raw_cam_data_
Definition: camera.h:281
multisense_ros::Camera::ptpStatusDiagnostic
void ptpStatusDiagnostic(diagnostic_updater::DiagnosticStatusWrapper &stat)
Definition: camera.cpp:2315
crl::multisense::Source_Luma_Left
static CRL_CONSTEXPR DataSource Source_Luma_Left
NONE
NONE
crl::multisense::system::StatusMessage::fpgaTemperature
float fpgaTemperature
multisense_ros::Camera::convertImageTimestamp
ros::Time convertImageTimestamp(uint32_t time_secs, uint32_t time_microsecs)
Definition: camera.cpp:2191
multisense_ros::Camera::groundSurfaceSplineCallback
void groundSurfaceSplineCallback(const crl::multisense::ground_surface::Header &header)
Definition: camera.cpp:2145
image_transport::Publisher::getNumSubscribers
uint32_t getNumSubscribers() const
multisense_ros::Camera::can_support_ground_surface_
bool can_support_ground_surface_
Definition: camera.h:368
multisense_ros::Camera::ptp_time_offset_secs_
int32_t ptp_time_offset_secs_
Definition: camera.h:385
multisense_ros::Camera::timeSyncChanged
void timeSyncChanged(bool ptp_enabled, int32_t ptp_time_offset_sec)
Definition: camera.cpp:778
multisense_ros::Camera::rawCamDataCallback
void rawCamDataCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1831
crl::multisense::Source_Disparity
static CRL_CONSTEXPR DataSource Source_Disparity
multisense_ros::Camera::has_left_camera_
bool has_left_camera_
Definition: camera.h:353
multisense_ros::Camera::right_mono_transport_
image_transport::ImageTransport right_mono_transport_
Definition: camera.h:179
multisense_ros::Camera::right_nh_
ros::NodeHandle right_nh_
Definition: camera.h:170
multisense_ros::Camera::deviceStatusDiagnostic
void deviceStatusDiagnostic(diagnostic_updater::DiagnosticStatusWrapper &stat)
Definition: camera.cpp:2284
multisense_ros::Camera::right_disparity_pub_
image_transport::Publisher right_disparity_pub_
Definition: camera.h:236
multisense_ros::Camera::aux_mono_cam_pub_
image_transport::Publisher aux_mono_cam_pub_
Definition: camera.h:207
image_transport::CameraPublisher::publish
void publish(const sensor_msgs::Image &image, const sensor_msgs::CameraInfo &info) const
multisense_ros::Camera::frame_id_origin_
const std::string frame_id_origin_
Definition: camera.h:301
multisense_ros::Camera::aux_rect_image_
sensor_msgs::Image aux_rect_image_
Definition: camera.h:269
crl::multisense::image::Calibration::Data::P
float P[3][4]
multisense_ros::Camera::left_rgb_rect_transport_
image_transport::ImageTransport left_rgb_rect_transport_
Definition: camera.h:183
multisense_ros::Camera::version_info_
crl::multisense::system::VersionInfo version_info_
Definition: camera.h:289
multisense_ros::Camera::luma_organized_point_cloud_pub_
ros::Publisher luma_organized_point_cloud_pub_
Definition: camera.h:232
crl::multisense::Channel::removeIsolatedCallback
virtual Status removeIsolatedCallback(apriltag::Callback callback)=0
multisense_ros::Camera::colorizeCallback
void colorizeCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:2061
crl::multisense::Channel::getImageCalibration
virtual Status getImageCalibration(image::Calibration &c)=0
multisense_ros::Camera::RAW_CAM_CAL_TOPIC
static constexpr char RAW_CAM_CAL_TOPIC[]
Definition: camera.h:118
multisense_ros::Camera::right_rect_image_
sensor_msgs::Image right_rect_image_
Definition: camera.h:257
multisense_ros::Camera::aux_mono_cam_info_pub_
ros::Publisher aux_mono_cam_info_pub_
Definition: camera.h:222
crl::multisense::system::DeviceInfo::nominalBaseline
float nominalBaseline
multisense_ros::Camera::ground_surface_transport_
image_transport::ImageTransport ground_surface_transport_
Definition: camera.h:193
multisense_ros::Camera::aux_rgb_transport_
image_transport::ImageTransport aux_rgb_transport_
Definition: camera.h:190
multisense_ros::Camera::border_clip_value_
double border_clip_value_
Definition: camera.h:333
multisense_ros::Camera::left_mono_cam_info_pub_
ros::Publisher left_mono_cam_info_pub_
Definition: camera.h:212
crl::multisense::image::Config::roll
float roll() const
multisense_ros::Camera::aux_mono_transport_
image_transport::ImageTransport aux_mono_transport_
Definition: camera.h:189
sensor_msgs::image_encodings::RGB8
const std::string RGB8
multisense_ros::Camera::right_rect_transport_
image_transport::ImageTransport right_rect_transport_
Definition: camera.h:181
crl::multisense::image::Config::exposure
uint32_t exposure() const
multisense_ros::Camera::ground_surface_info_pub_
ros::Publisher ground_surface_info_pub_
Definition: camera.h:226
multisense_ros::Camera::disparity_left_transport_
image_transport::ImageTransport disparity_left_transport_
Definition: camera.h:186
multisense_ros::Camera::aux_mono_image_
sensor_msgs::Image aux_mono_image_
Definition: camera.h:265
crl::multisense::image::Calibration::aux
Data aux
crl::multisense::image::Calibration::Data::M
float M[3][3]
multisense_ros::Camera::RECT_COLOR_CAMERA_INFO_TOPIC
static constexpr char RECT_COLOR_CAMERA_INFO_TOPIC[]
Definition: camera.h:138
tf2::Matrix3x3::getIdentity
static const Matrix3x3 & getIdentity()
crl::multisense::image::Calibration::left
Data left
image_transport::ImageTransport::advertiseCamera
CameraPublisher advertiseCamera(const std::string &base_topic, uint32_t queue_size, bool latch=false)
ros::Publisher::publish
void publish(const boost::shared_ptr< M > &message) const
multisense_ros::Camera::RAW_CAM_CONFIG_TOPIC
static constexpr char RAW_CAM_CONFIG_TOPIC[]
Definition: camera.h:119
multisense_ros::Camera::frame_id_rectified_left_
const std::string frame_id_rectified_left_
Definition: camera.h:306
crl::multisense::Source_Chroma_Aux
static CRL_CONSTEXPR DataSource Source_Chroma_Aux
ros::NodeHandle::advertise
Publisher advertise(AdvertiseOptions &ops)
crl::multisense::image::Config::width
uint32_t width() const
multisense_ros::Camera::histogram_pub_
ros::Publisher histogram_pub_
Definition: camera.h:249
data
data
multisense_ros::Camera::left_rgb_rect_cam_pub_
image_transport::CameraPublisher left_rgb_rect_cam_pub_
Definition: camera.h:205
multisense_ros::Camera::pointCloudCallback
void pointCloudCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1493
ground_surface_utilities::convertSplineToPointcloud
std::vector< Eigen::Vector3f, Eigen::aligned_allocator< Eigen::Vector3f > > convertSplineToPointcloud(const Eigen::Matrix< float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor > &controlGrid, const SplineDrawParameters &splineDrawParams, const double pointcloudMaxRange, const float *xzCellOrigin, const float *xzCellSize, const float *minMaxAzimuthAngle, const float *extrinsics, const float *quadraticParams, const float baseline)
Generate a pointcloud representation of a b-spline ground surface model for visualization.
Definition: ground_surface_utilities.cpp:200
multisense_ros::Camera::ground_surface_spline_pub_
ros::Publisher ground_surface_spline_pub_
Definition: camera.h:230
crl::multisense::Channel::startStreams
virtual Status startStreams(DataSource mask)=0
image_transport::ImageTransport::advertise
Publisher advertise(const std::string &base_topic, uint32_t queue_size, bool latch=false)
multisense_ros::Camera::left_stereo_disparity_pub_
ros::Publisher left_stereo_disparity_pub_
Definition: camera.h:239
crl::multisense::Source_Disparity_Right
static CRL_CONSTEXPR DataSource Source_Disparity_Right
crl::multisense::Source_Chroma_Left
static CRL_CONSTEXPR DataSource Source_Chroma_Left
multisense_ros::Camera::POINTCLOUD_TOPIC
static constexpr char POINTCLOUD_TOPIC[]
Definition: camera.h:131
multisense_ros::Camera::DEPTH_TOPIC
static constexpr char DEPTH_TOPIC[]
Definition: camera.h:126
crl::multisense::system::PtpStatus::path_delay
int64_t path_delay
multisense_ros::Camera::left_rgb_rect_image_
sensor_msgs::Image left_rgb_rect_image_
Definition: camera.h:268
multisense_ros::Camera::left_rect_cam_info_pub_
ros::Publisher left_rect_cam_info_pub_
Definition: camera.h:214
DataSource
DataSource
crl::multisense::system::DeviceInfo::imagerWidth
uint32_t imagerWidth
multisense_ros::Camera::GROUND_SURFACE_POINT_SPLINE_TOPIC
static constexpr char GROUND_SURFACE_POINT_SPLINE_TOPIC[]
Definition: camera.h:144
crl::multisense::Channel::getDeviceStatus
virtual Status getDeviceStatus(system::StatusMessage &status)=0
multisense_ros::clipPoint
bool clipPoint(const BorderClip &borderClipType, double borderClipValue, size_t height, size_t width, size_t u, size_t v)
Determine if a pixel should be clipped given the provided parameters.
Definition: camera_utilities.cpp:503
crl::multisense::image::Histogram::bins
uint32_t bins
multisense_ros::Camera::disconnectStream
void disconnectStream(crl::multisense::DataSource disableMask)
Definition: camera.cpp:2462
multisense_ros::Status
Definition: status.h:43
crl::multisense::system::VersionInfo::sensorHardwareMagic
uint64_t sensorHardwareMagic
multisense_ros::Camera::deviceInfoDiagnostic
void deviceInfoDiagnostic(diagnostic_updater::DiagnosticStatusWrapper &stat)
Definition: camera.cpp:2241
crl::multisense::image::Config
multisense_ros::Camera::frame_id_rectified_right_
const std::string frame_id_rectified_right_
Definition: camera.h:307
diagnostic_updater::DiagnosticStatusWrapper::summary
void summary(const diagnostic_msgs::DiagnosticStatus &src)
crl::multisense::system::StatusMessage::processingPipelineOk
bool processingPipelineOk
multisense_ros::Camera::extrinsicsChanged
void extrinsicsChanged(crl::multisense::system::ExternalCalibration extrinsics)
Definition: camera.cpp:784
point_cloud_utilities.h
multisense_ros::Camera::device_info_pub_
ros::Publisher device_info_pub_
Definition: camera.h:248
multisense_ros::Camera::aux_rgb_rect_cam_info_pub_
ros::Publisher aux_rgb_rect_cam_info_pub_
Definition: camera.h:225
crl::multisense::system::VersionInfo::apiBuildDate
std::string apiBuildDate
crl::multisense::image::Config::height
uint32_t height() const
crl::multisense::image::Calibration::Data::D
float D[8]
multisense_ros
Definition: camera.h:58
crl::multisense::image::Histogram
multisense_ros::Camera::aux_rect_cam_info_pub_
ros::Publisher aux_rect_cam_info_pub_
Definition: camera.h:224
crl::multisense::system::StatusMessage::powerSupplyTemperature
float powerSupplyTemperature
crl::multisense::system::DeviceInfo::serialNumber
std::string serialNumber
crl::multisense::system::ExternalCalibration::roll
float roll
multisense_ros::Camera::pointcloud_max_range_
double pointcloud_max_range_
Definition: camera.h:322
multisense_ros::Camera::COLOR_TOPIC
static constexpr char COLOR_TOPIC[]
Definition: camera.h:129
multisense_ros::Camera::color_organized_point_cloud_pub_
ros::Publisher color_organized_point_cloud_pub_
Definition: camera.h:233
multisense_ros::Camera::left_mono_transport_
image_transport::ImageTransport left_mono_transport_
Definition: camera.h:178
crl::multisense::Channel::getImageHistogram
virtual Status getImageHistogram(int64_t frameId, image::Histogram &histogram)=0
multisense_ros::Camera::left_rgb_cam_pub_
image_transport::Publisher left_rgb_cam_pub_
Definition: camera.h:204
multisense_ros::Camera::ground_surface_image_
sensor_msgs::Image ground_surface_image_
Definition: camera.h:279
multisense_ros::Camera::COLOR_POINTCLOUD_TOPIC
static constexpr char COLOR_POINTCLOUD_TOPIC[]
Definition: camera.h:132
multisense_ros::Camera::ORGANIZED_POINTCLOUD_TOPIC
static constexpr char ORGANIZED_POINTCLOUD_TOPIC[]
Definition: camera.h:133
multisense_ros::Camera::right_rect_cam_info_pub_
ros::Publisher right_rect_cam_info_pub_
Definition: camera.h:215
multisense_ros::Camera::ground_surface_nh_
ros::NodeHandle ground_surface_nh_
Definition: camera.h:173
crl::multisense::system::DeviceInfo::hardwareRevision
uint32_t hardwareRevision
multisense_ros::Camera::stream_lock_
std::mutex stream_lock_
Definition: camera.h:316
crl::multisense::image::Calibration
multisense_ros::Camera::has_color_
bool has_color_
Definition: camera.h:363
multisense_ros::Camera::left_rgb_rect_cam_info_pub_
ros::Publisher left_rgb_rect_cam_info_pub_
Definition: camera.h:220
multisense_ros::Camera::stereo_calibration_manager_
std::shared_ptr< StereoCalibrationManager > stereo_calibration_manager_
Definition: camera.h:296
crl::multisense::Source_Luma_Right
static CRL_CONSTEXPR DataSource Source_Luma_Right
crl::multisense::image::Config::gain
float gain() const
multisense_ros::Camera::depth_cam_pub_
image_transport::Publisher depth_cam_pub_
Definition: camera.h:202
multisense_ros::Camera::~Camera
~Camera()
Definition: camera.cpp:728
tf2::Transform
crl::multisense::Source_Luma_Aux
static CRL_CONSTEXPR DataSource Source_Luma_Aux
crl::multisense::system::DeviceInfo::buildDate
std::string buildDate
multisense_ros::Camera::luma_point_cloud_pub_
ros::Publisher luma_point_cloud_pub_
Definition: camera.h:228
crl::multisense::system::DeviceInfo::laserName
std::string laserName
crl::multisense::system::StatusMessage::systemOk
bool systemOk
multisense_ros::Camera::ptp_time_sync_
std::atomic_bool ptp_time_sync_
Definition: camera.h:384
crl::multisense::system::VersionInfo::apiVersion
VersionType apiVersion
multisense_ros::Camera::aux_rect_transport_
image_transport::ImageTransport aux_rect_transport_
Definition: camera.h:191
multisense_ros::Camera::has_aux_camera_
bool has_aux_camera_
Definition: camera.h:358
ROS_WARN
#define ROS_WARN(...)
crl::multisense::image::Config::yaw
float yaw() const
multisense_ros::Camera::right_stereo_disparity_
stereo_msgs::DisparityImage right_stereo_disparity_
Definition: camera.h:277
crl::multisense::system::StatusMessage::imagerPower
float imagerPower
multisense_ros::Camera::has_right_camera_
bool has_right_camera_
Definition: camera.h:348
multisense_ros::Camera::HISTOGRAM_TOPIC
static constexpr char HISTOGRAM_TOPIC[]
Definition: camera.h:121
crl::multisense::system::VersionInfo::sensorFirmwareVersion
VersionType sensorFirmwareVersion
multisense_ros::Camera::updateConfig
void updateConfig(const crl::multisense::image::Config &config)
Definition: camera.cpp:2202
multisense_ros::BorderClip
BorderClip
Definition: camera_utilities.h:62
crl::multisense::Channel::getImageConfig
virtual Status getImageConfig(image::Config &c)=0
multisense_ros::Camera::color_point_cloud_
sensor_msgs::PointCloud2 color_point_cloud_
Definition: camera.h:261
multisense_ros::Camera::COST_CAMERA_INFO_TOPIC
static constexpr char COST_CAMERA_INFO_TOPIC[]
Definition: camera.h:141
ground_surface_utilities::SplineDrawParameters
Struct containing parameters for drawing a pointcloud representation of a B-Spline model.
Definition: ground_surface_utilities.h:68
crl::multisense::Source_Disparity_Cost
static CRL_CONSTEXPR DataSource Source_Disparity_Cost
crl::multisense::Channel::getVersionInfo
virtual Status getVersionInfo(system::VersionInfo &v)=0
ros::TimerEvent
crl::multisense::system::DeviceInfo::motorType
uint32_t motorType
multisense_ros::Camera::groundSurfaceSplineDrawParametersChanged
void groundSurfaceSplineDrawParametersChanged(const ground_surface_utilities::SplineDrawParameters &spline_draw_params)
Definition: camera.cpp:814
multisense_ros::Camera::histogramCallback
void histogramCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:820
multisense_ros::Camera::RECT_COLOR_TOPIC
static constexpr char RECT_COLOR_TOPIC[]
Definition: camera.h:130
image_transport::Publisher::publish
void publish(const sensor_msgs::Image &message) const
multisense_ros::Camera::left_rgb_cam_info_pub_
ros::Publisher left_rgb_cam_info_pub_
Definition: camera.h:219
crl::multisense::system::PtpStatus::steps_removed
uint16_t steps_removed
crl::multisense::Channel::addIsolatedCallback
virtual Status addIsolatedCallback(apriltag::Callback callback, void *userDataP=NULL)=0
diagnostic_updater::Updater::setHardwareID
void setHardwareID(const std::string &hwid)
crl::multisense::system::StatusMessage::camerasOk
bool camerasOk
crl::multisense::system::StatusMessage::laserOk
bool laserOk
crl::multisense::system::DeviceInfo::motorName
std::string motorName
multisense_ros::Camera::RAW_CAM_DATA_TOPIC
static constexpr char RAW_CAM_DATA_TOPIC[]
Definition: camera.h:120
crl::multisense::system::StatusMessage::leftImagerTemperature
float leftImagerTemperature
crl::multisense::Channel::getPtpStatus
virtual Status getPtpStatus(system::PtpStatus &ptpStatus)=0
camera.h
multisense_ros::Camera::depth_image_
sensor_msgs::Image depth_image_
Definition: camera.h:258
multisense_ros::Camera::color_organized_point_cloud_
sensor_msgs::PointCloud2 color_organized_point_cloud_
Definition: camera.h:263
crl::multisense::image::Config::cx
float cx() const
multisense_ros::Camera::aux_rgb_cam_info_pub_
ros::Publisher aux_rgb_cam_info_pub_
Definition: camera.h:223
multisense_ros::Camera::disparityImageCallback
void disparityImageCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:920
multisense_ros::ycbcrToBgr
constexpr Eigen::Matrix< T, 3, 1 > ycbcrToBgr(const crl::multisense::image::Header &luma, const crl::multisense::image::Header &chroma, size_t u, size_t v)
Definition: camera_utilities.h:104
multisense_ros::Camera::disparity_cost_transport_
image_transport::ImageTransport disparity_cost_transport_
Definition: camera.h:188
crl::multisense::system::ExternalCalibration::pitch
float pitch
multisense_ros::Camera::rectCallback
void rectCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1217
multisense_ros::Camera::connectStream
void connectStream(crl::multisense::DataSource enableMask)
Definition: camera.cpp:2443
multisense_ros::Camera::left_mono_cam_pub_
image_transport::Publisher left_mono_cam_pub_
Definition: camera.h:198
multisense_ros::Camera::luma_point_cloud_
sensor_msgs::PointCloud2 luma_point_cloud_
Definition: camera.h:260
diagnostic_updater::Updater::update
void update()
crl::multisense::system::StatusMessage::imuOk
bool imuOk
crl::multisense::Source_Jpeg_Left
static CRL_CONSTEXPR DataSource Source_Jpeg_Left
multisense_ros::Camera::COLOR_ORGANIZED_POINTCLOUD_TOPIC
static constexpr char COLOR_ORGANIZED_POINTCLOUD_TOPIC[]
Definition: camera.h:134
multisense_ros::Camera::ground_surface_cam_pub_
image_transport::Publisher ground_surface_cam_pub_
Definition: camera.h:210
multisense_ros::Camera::aux_rect_cam_pub_
image_transport::CameraPublisher aux_rect_cam_pub_
Definition: camera.h:208
multisense_ros::Camera::left_cost_cam_info_pub_
ros::Publisher left_cost_cam_info_pub_
Definition: camera.h:218
multisense_ros::Camera::jpegImageCallback
void jpegImageCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:855
multisense_ros::writePoint
void writePoint(sensor_msgs::PointCloud2 &pointcloud, const size_t index, const Eigen::Vector3f &point)
Definition: point_cloud_utilities.cpp:92
crl::multisense::system::VersionInfo::sensorFpgaDna
uint64_t sensorFpgaDna
crl::multisense::Channel
multisense_ros::Camera::RECT_TOPIC
static constexpr char RECT_TOPIC[]
Definition: camera.h:123
multisense_ros::Camera::depth_transport_
image_transport::ImageTransport depth_transport_
Definition: camera.h:184
multisense_ros::Camera::monoCallback
void monoCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1105
crl::multisense::system::DeviceInfo::lensType
uint32_t lensType
multisense_ros::Camera::left_stereo_disparity_
stereo_msgs::DisparityImage left_stereo_disparity_
Definition: camera.h:276
ros::Time
multisense_ros::Camera::luma_organized_point_cloud_
sensor_msgs::PointCloud2 luma_organized_point_cloud_
Definition: camera.h:262
multisense_ros::Camera::GROUND_SURFACE_INFO_TOPIC
static constexpr char GROUND_SURFACE_INFO_TOPIC[]
Definition: camera.h:143
multisense_ros::Camera::image_buffers_
std::unordered_map< crl::multisense::DataSource, std::shared_ptr< BufferWrapper< crl::multisense::image::Header > > > image_buffers_
Definition: camera.h:343
sensor_msgs::image_encodings::TYPE_32FC1
const std::string TYPE_32FC1
crl::multisense::image::Config::fx
float fx() const
multisense_ros::Camera::spline_draw_params_
ground_surface_utilities::SplineDrawParameters spline_draw_params_
Definition: camera.h:338
multisense_ros::Camera::OPENNI_DEPTH_TOPIC
static constexpr char OPENNI_DEPTH_TOPIC[]
Definition: camera.h:127
multisense_ros::Camera::left_nh_
ros::NodeHandle left_nh_
Definition: camera.h:169
multisense_ros::Camera::MONO_TOPIC
static constexpr char MONO_TOPIC[]
Definition: camera.h:122
crl::multisense::system::DeviceInfo::nominalFocalLength
float nominalFocalLength
crl::multisense::system::ExternalCalibration::x
float x
crl::multisense::image::Calibration::Data::R
float R[3][3]
image_transport::Publisher
sensor_msgs::image_encodings::MONO8
const std::string MONO8
multisense_ros::Camera::raw_cam_config_pub_
ros::Publisher raw_cam_config_pub_
Definition: camera.h:246
multisense_ros::Camera::stream_map_
StreamMapType stream_map_
Definition: camera.h:317
crl::multisense::image::Config::tz
float tz() const
multisense_ros::Camera::maxPointCloudRangeChanged
void maxPointCloudRangeChanged(double range)
Definition: camera.cpp:773
sensor_msgs::image_encodings::MONO16
const std::string MONO16
crl::multisense::system::DeviceInfo::numberOfLights
uint32_t numberOfLights
multisense_ros::Camera::left_disp_cam_info_pub_
ros::Publisher left_disp_cam_info_pub_
Definition: camera.h:216
ROS_ERROR
#define ROS_ERROR(...)
multisense_ros::Camera::depthCallback
void depthCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:1348
multisense_ros::Camera::diagnosticTimerCallback
void diagnosticTimerCallback(const ros::TimerEvent &)
Definition: camera.cpp:2341
multisense_ros::Camera::left_mono_image_
sensor_msgs::Image left_mono_image_
Definition: camera.h:254
crl::multisense
crl::multisense::system::DeviceInfo::imagerName
std::string imagerName
crl::multisense::system::StatusMessage::laserMotorOk
bool laserMotorOk
multisense_ros::Camera::DISPARITY_CAMERA_INFO_TOPIC
static constexpr char DISPARITY_CAMERA_INFO_TOPIC[]
Definition: camera.h:140
multisense_ros::Camera::calibration_nh_
ros::NodeHandle calibration_nh_
Definition: camera.h:172
multisense_ros::Camera::frame_id_left_
const std::string frame_id_left_
Definition: camera.h:303
multisense_ros::Camera::COLOR_CAMERA_INFO_TOPIC
static constexpr char COLOR_CAMERA_INFO_TOPIC[]
Definition: camera.h:137
crl::multisense::image::Config::ty
float ty() const
ground_surface_utilities::eigenToPointcloud
sensor_msgs::PointCloud2 eigenToPointcloud(const std::vector< Eigen::Vector3f, Eigen::aligned_allocator< Eigen::Vector3f >> &input, const std::string &frame_id)
Convert an eigen representation of a pointcloud to a ROS sensor_msgs::PointCloud2 format.
Definition: ground_surface_utilities.cpp:181
multisense_ros::Camera::DEVICE_INFO_TOPIC
static constexpr char DEVICE_INFO_TOPIC[]
Definition: camera.h:117
diagnostic_updater::DiagnosticStatusWrapper
tf2_geometry_msgs.h
crl::multisense::system::DeviceInfo::nominalRelativeAperture
float nominalRelativeAperture
tf2::toMsg
B toMsg(const A &a)
crl::multisense::system::PtpStatus
crl::multisense::system::StatusMessage::inputVoltage
float inputVoltage
crl::multisense::Channel::stopStreams
virtual Status stopStreams(DataSource mask)=0
multisense_ros::Camera::groundSurfaceCallback
void groundSurfaceCallback(const crl::multisense::image::Header &header)
Definition: camera.cpp:2076
sensor_msgs::image_encodings::BGR8
const std::string BGR8
crl::multisense::system::DeviceInfo::imagerType
uint32_t imagerType
crl::multisense::system::StatusMessage::uptime
double uptime
crl::multisense::system::DeviceInfo::motorGearReduction
float motorGearReduction
multisense_ros::Camera::aux_rgb_rect_transport_
image_transport::ImageTransport aux_rgb_rect_transport_
Definition: camera.h:192
multisense_ros::Camera::GROUND_SURFACE_IMAGE_TOPIC
static constexpr char GROUND_SURFACE_IMAGE_TOPIC[]
Definition: camera.h:142
ros::Publisher::getNumSubscribers
uint32_t getNumSubscribers() const
crl::multisense::image::Config::fy
float fy() const
multisense_ros::Camera::static_tf_broadcaster_
tf2_ros::StaticTransformBroadcaster static_tf_broadcaster_
Definition: camera.h:310
multisense_ros::Camera::device_info_
crl::multisense::system::DeviceInfo device_info_
Definition: camera.h:290
multisense_ros::Camera::right_mono_cam_info_pub_
ros::Publisher right_mono_cam_info_pub_
Definition: camera.h:213
ground_surface_utilities::groundSurfaceClassToPixelColor
Eigen::Matrix< uint8_t, 3, 1 > groundSurfaceClassToPixelColor(const uint8_t value)
Look up table to convert a ground surface class value into into a color for visualization (out-of-bou...
Definition: ground_surface_utilities.cpp:168
crl::multisense::system::ExternalCalibration::z
float z
multisense_ros::Camera::right_mono_cam_pub_
image_transport::Publisher right_mono_cam_pub_
Definition: camera.h:199
crl::multisense::image::Calibration::right
Data right
crl::multisense::Source_Chroma_Rectified_Aux
static CRL_CONSTEXPR DataSource Source_Chroma_Rectified_Aux
multisense_ros::Camera::aux_nh_
ros::NodeHandle aux_nh_
Definition: camera.h:171
multisense_ros::Camera::right_mono_image_
sensor_msgs::Image right_mono_image_
Definition: camera.h:255
tf2::Matrix3x3
crl::multisense::image::Config::cy
float cy() const
header
const std::string header
multisense_ros::Camera::device_nh_
ros::NodeHandle device_nh_
Definition: camera.h:168
ROS_INFO
#define ROS_INFO(...)
crl::multisense::ground_surface::Header
multisense_ros::Camera::ni_depth_image_
sensor_msgs::Image ni_depth_image_
Definition: camera.h:259
multisense_ros::Camera::ni_depth_transport_
image_transport::ImageTransport ni_depth_transport_
Definition: camera.h:185
diagnostic_updater::DiagnosticTaskVector::add
void add(const std::string &name, TaskFunction f)
multisense_ros::Camera::color_point_cloud_pub_
ros::Publisher color_point_cloud_pub_
Definition: camera.h:229
crl::multisense::system::DeviceInfo::imagerHeight
uint32_t imagerHeight
multisense_ros::Camera::MONO_CAMERA_INFO_TOPIC
static constexpr char MONO_CAMERA_INFO_TOPIC[]
Definition: camera.h:135
ros::NodeHandle::createTimer
Timer createTimer(Duration period, const TimerCallback &callback, bool oneshot=false, bool autostart=true) const
ros::Duration
multisense_ros::Camera::frame_id_right_
const std::string frame_id_right_
Definition: camera.h:304
crl::multisense::image::Histogram::channels
uint32_t channels
multisense_ros::Camera::DISPARITY_IMAGE_TOPIC
static constexpr char DISPARITY_IMAGE_TOPIC[]
Definition: camera.h:125
t
geometry_msgs::TransformStamped t
Transform.h
crl::multisense::image::Config::pitch
float pitch() const
float
float
multisense_ros::Camera::raw_cam_data_pub_
ros::Publisher raw_cam_data_pub_
Definition: camera.h:245
multisense_ros::Camera::DEPTH_CAMERA_INFO_TOPIC
static constexpr char DEPTH_CAMERA_INFO_TOPIC[]
Definition: camera.h:139
multisense_ros::Camera::pointcloud_rect_color_buffer_
std::vector< uint8_t > pointcloud_rect_color_buffer_
Definition: camera.h:284
multisense_ros::Camera::right_disp_cam_info_pub_
ros::Publisher right_disp_cam_info_pub_
Definition: camera.h:217
crl::multisense::system::StatusMessage::logicPower
float logicPower
crl::multisense::system::StatusMessage
crl::multisense::image::Header
multisense_ros::Camera::left_disparity_image_
sensor_msgs::Image left_disparity_image_
Definition: camera.h:272
crl::multisense::Source_Luma_Rectified_Left
static CRL_CONSTEXPR DataSource Source_Luma_Rectified_Left
ros::Time::now
static Time now()
multisense_ros::Camera::left_disparity_cost_pub_
image_transport::Publisher left_disparity_cost_pub_
Definition: camera.h:237
crl::multisense::Source_Luma_Rectified_Aux
static CRL_CONSTEXPR DataSource Source_Luma_Rectified_Aux


multisense_ros
Author(s):
autogenerated on Thu Apr 17 2025 02:49:24