base_nodelet.cpp
Go to the documentation of this file.
1 /******************************************************************************
2  Copyright (c) 2017, Intel Corporation
3  All rights reserved.
4 
5  Redistribution and use in source and binary forms, with or without
6  modification, are permitted provided that the following conditions are met:
7 
8  1. Redistributions of source code must retain the above copyright notice, this
9  list of conditions and the following disclaimer.
10 
11  2. Redistributions in binary form must reproduce the above copyright notice,
12  this list of conditions and the following disclaimer in the documentation
13  and/or other materials provided with the distribution.
14 
15  3. Neither the name of the copyright holder nor the names of its contributors
16  may be used to endorse or promote products derived from this software without
17  specific prior written permission.
18 
19  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20  AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21  IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
23  FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24  DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25  SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
27  OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29  *******************************************************************************/
30 
31 #include <string>
32 #include <algorithm>
33 #include <vector>
34 #include <map>
36 
37 using cv::Mat;
38 using cv::Scalar;
39 using std::string;
40 using std::vector;
41 
43 namespace realsense_camera
44 {
45  const std::map<std::string, std::string> CAMERA_NAME_TO_VALIDATED_FIRMWARE
47  /*
48  * Nodelet Destructor.
49  */
50  BaseNodelet::~BaseNodelet()
51  {
52  try
53  {
54  if (enable_tf_ == true && enable_tf_dynamic_ == true)
55  {
56  transform_thread_->join();
57  }
58 
59  stopCamera();
60 
61  if (rs_context_)
62  {
64  rs_context_ = NULL;
65  checkError();
66  }
67 
68  // Kill all old system progress groups
69  while (!system_proc_groups_.empty())
70  {
71  killpg(system_proc_groups_.front(), SIGHUP);
72  system_proc_groups_.pop();
73  }
74 
75  ROS_INFO_STREAM(nodelet_name_ << " - Stopping...");
76  if (!ros::isShuttingDown())
77  {
78  ros::shutdown();
79  }
80  }
81  catch(const std::exception& ex)
82  {
83  ROS_ERROR_STREAM(ex.what());
84  }
85  catch(...)
86  {
87  ROS_ERROR_STREAM("Unknown exception has occured!");
88  }
89  }
90 
91  /*
92  * Initialize the nodelet.
93  */
94  void BaseNodelet::onInit() try
95  {
96  getParameters();
97 
98  if (enable_[RS_STREAM_DEPTH] == false && enable_[RS_STREAM_COLOR] == false)
99  {
100  ROS_ERROR_STREAM(nodelet_name_ << " - None of the streams are enabled. Exiting!");
101  ros::shutdown();
102  }
103 
104  while (false == connectToCamera()) // Poll for camera and connect if found
105  {
106  ROS_INFO_STREAM(nodelet_name_ << " - Sleeping 5 seconds then retrying to connect");
107  ros::Duration(5).sleep();
108  }
109 
110  advertiseTopics();
111  advertiseServices();
112  std::vector<std::string> dynamic_params = setDynamicReconfServer();
113  getCameraOptions();
114  setStaticCameraOptions(dynamic_params);
115  setStreams();
116  startCamera();
117 
118  // Start transforms thread
119  if (enable_tf_ == true)
120  {
121  getCameraExtrinsics();
122 
123  if (enable_tf_dynamic_ == true)
124  {
125  transform_thread_ =
126  boost::shared_ptr<boost::thread>(new boost::thread (boost::bind(&BaseNodelet::prepareTransforms, this)));
127  }
128  else
129  {
130  publishStaticTransforms();
131  }
132  }
133 
134  // Start dynamic reconfigure callback
135  startDynamicReconfCallback();
136  }
137  catch(const rs::error & e)
138  {
139  ROS_ERROR_STREAM(nodelet_name_ << " - " << "RealSense error calling "
140  << e.get_failed_function() << "(" << e.get_failed_args() << "):\n "
141  << e.what());
142  ros::shutdown();
143  }
144  catch(const std::exception & e)
145  {
146  ROS_ERROR_STREAM(nodelet_name_ << " - " << e.what());
147  ros::shutdown();
148  }
149  catch(...)
150  {
151  ROS_ERROR_STREAM(nodelet_name_ << " - Caught unknown exception...shutting down!");
152  ros::shutdown();
153  }
154 
155  /*
156  * Get the nodelet parameters.
157  */
158  void BaseNodelet::getParameters()
159  {
160  nodelet_name_ = getName();
161  nh_ = getNodeHandle();
162  pnh_ = getPrivateNodeHandle();
163  pnh_.getParam("serial_no", serial_no_);
164  pnh_.getParam("usb_port_id", usb_port_id_);
165  pnh_.getParam("camera_type", camera_type_);
166  pnh_.param("mode", mode_, DEFAULT_MODE);
167  pnh_.param("enable_depth", enable_[RS_STREAM_DEPTH], ENABLE_DEPTH);
168  pnh_.param("enable_color", enable_[RS_STREAM_COLOR], ENABLE_COLOR);
169  pnh_.param("enable_ir", enable_[RS_STREAM_INFRARED], ENABLE_IR);
170  pnh_.param("enable_pointcloud", enable_pointcloud_, ENABLE_PC);
171  pnh_.param("enable_tf", enable_tf_, ENABLE_TF);
172  pnh_.param("enable_tf_dynamic", enable_tf_dynamic_, ENABLE_TF_DYNAMIC);
173  pnh_.param("tf_publication_rate", tf_publication_rate_, TF_PUBLICATION_RATE);
174  pnh_.param("depth_width", width_[RS_STREAM_DEPTH], DEPTH_WIDTH);
175  pnh_.param("depth_height", height_[RS_STREAM_DEPTH], DEPTH_HEIGHT);
176  pnh_.param("color_width", width_[RS_STREAM_COLOR], COLOR_WIDTH);
177  pnh_.param("color_height", height_[RS_STREAM_COLOR], COLOR_HEIGHT);
178  pnh_.param("depth_fps", fps_[RS_STREAM_DEPTH], DEPTH_FPS);
179  pnh_.param("color_fps", fps_[RS_STREAM_COLOR], COLOR_FPS);
180  pnh_.param("base_frame_id", base_frame_id_, DEFAULT_BASE_FRAME_ID);
181  pnh_.param("depth_frame_id", frame_id_[RS_STREAM_DEPTH], DEFAULT_DEPTH_FRAME_ID);
182  pnh_.param("color_frame_id", frame_id_[RS_STREAM_COLOR], DEFAULT_COLOR_FRAME_ID);
183  pnh_.param("ir_frame_id", frame_id_[RS_STREAM_INFRARED], DEFAULT_IR_FRAME_ID);
184  pnh_.param("depth_optical_frame_id", optical_frame_id_[RS_STREAM_DEPTH], DEFAULT_DEPTH_OPTICAL_FRAME_ID);
185  pnh_.param("color_optical_frame_id", optical_frame_id_[RS_STREAM_COLOR], DEFAULT_COLOR_OPTICAL_FRAME_ID);
186  pnh_.param("ir_optical_frame_id", optical_frame_id_[RS_STREAM_INFRARED], DEFAULT_IR_OPTICAL_FRAME_ID);
187 
188  // set IR stream to match depth
189  width_[RS_STREAM_INFRARED] = width_[RS_STREAM_DEPTH];
190  height_[RS_STREAM_INFRARED] = height_[RS_STREAM_DEPTH];
191  fps_[RS_STREAM_INFRARED] = fps_[RS_STREAM_DEPTH];
192  }
193 
194  /*
195  * Connect to camera.
196  */
197  bool BaseNodelet::connectToCamera()
198  {
200  if (rs_error_)
201  {
202  ROS_ERROR_STREAM(nodelet_name_ << " - No cameras detected!");
203  }
204  checkError();
205 
206  int num_of_cameras = rs_get_device_count(rs_context_, &rs_error_);
207  checkError();
208 
209  // Exit with error if no cameras are connected.
210  if (num_of_cameras < 1)
211  {
212  ROS_ERROR_STREAM(nodelet_name_ << " - No cameras detected!");
214  rs_context_ = NULL;
215  checkError();
216  return false;
217  }
218 
219  // Print list of all cameras found
220  std::vector<int> camera_type_index = listCameras(num_of_cameras);
221 
222  // Exit with error if no cameras of correct type are connected.
223  if (camera_type_index.size() < 1)
224  {
225  ROS_ERROR_STREAM(nodelet_name_ << " - No '" << camera_type_ << "' cameras detected!");
227  rs_context_ = NULL;
228  checkError();
229  return false;
230  }
231 
232  // Exit with error if no serial_no or usb_port_id is specified and multiple cameras are detected.
233  if (serial_no_.empty() && usb_port_id_.empty() && camera_type_index.size() > 1)
234  {
235  ROS_ERROR_STREAM(nodelet_name_ <<
236  " - Multiple cameras of same type detected but no input serial_no or usb_port_id specified");
238  rs_context_ = NULL;
239  checkError();
240  return false;
241  }
242 
243  // init rs_device_ before starting loop
244  rs_device_ = nullptr;
245 
246  // find camera
247  for (int i : camera_type_index)
248  {
249  rs_device* rs_detected_device = rs_get_device(rs_context_, i, &rs_error_);
250  checkError();
251 
252  // check serial_no and usb_port_id
253  if ((serial_no_.empty() || serial_no_ == rs_get_device_serial(rs_detected_device, &rs_error_)) &&
254  (usb_port_id_.empty() || usb_port_id_ == rs_get_device_usb_port_id(rs_detected_device, &rs_error_)))
255  {
256  // device found
257  rs_device_ = rs_detected_device;
258  break;
259  }
260  // continue loop
261  }
262 
263  if (rs_device_ == nullptr)
264  {
265  // camera not found
266  string error_msg = " - Couldn't find camera to connect with ";
267  error_msg += "serial_no = " + serial_no_ + ", ";
268  error_msg += "usb_port_id = " + usb_port_id_;
269 
270  ROS_ERROR_STREAM(nodelet_name_ << error_msg);
271 
273  rs_context_ = NULL;
274  checkError();
275  return false;
276  }
277 
278  // print device info
279  ROS_INFO_STREAM(nodelet_name_ << " - Connecting to camera with Serial No: " <<
280  rs_get_device_serial(rs_device_, &rs_error_) <<
281  ", USB Port ID: " << rs_get_device_usb_port_id(rs_device_, &rs_error_));
282  checkError();
283  return true;
284  }
285 
286  /*
287  * List details of the detected cameras.
288  */
289  std::vector<int> BaseNodelet::listCameras(int num_of_cameras)
290  {
291  // print list of detected cameras
292  std::vector<int> camera_type_index;
293 
294  for (int i = 0; i < num_of_cameras; i++)
295  {
296  std::string detected_camera_msg = " - Detected the following camera:";
297  std::string warning_msg = " - Detected unvalidated firmware:";
298  // get device
299  rs_device* rs_detected_device = rs_get_device(rs_context_, i, &rs_error_);
300 
301  // get camera serial number
302  std::string camera_serial_number = rs_get_device_serial(rs_detected_device, &rs_error_);
303  checkError();
304 
305  // get camera name
306  std::string camera_name = rs_get_device_name(rs_detected_device, &rs_error_);
307  checkError();
308 
309  // get camera firmware
310  std::string camera_fw = rs_get_device_firmware_version(rs_detected_device, &rs_error_);
311  checkError();
312 
313  if (camera_name.find(camera_type_) != std::string::npos)
314  {
315  camera_type_index.push_back(i);
316  }
317  // print camera details
318  detected_camera_msg = detected_camera_msg +
319  "\n\t\t\t\t- Serial No: " + camera_serial_number + ", USB Port ID: " +
320  rs_get_device_usb_port_id(rs_detected_device, &rs_error_) +
321  ", Name: " + camera_name +
322  ", Camera FW: " + camera_fw;
323  checkError();
324 
325  std::string camera_warning_msg = checkFirmwareValidation("camera", camera_fw, camera_name, camera_serial_number);
326 
327  if (!camera_warning_msg.empty())
328  {
329  warning_msg = warning_msg + "\n\t\t\t\t- " + camera_warning_msg;
330  }
331 
332  if (rs_supports(rs_detected_device, RS_CAPABILITIES_ADAPTER_BOARD, &rs_error_))
333  {
334  const char * adapter_fw = rs_get_device_info(rs_detected_device,
336  checkError();
337  detected_camera_msg = detected_camera_msg + ", Adapter FW: " + adapter_fw;
338  std::string adapter_warning_msg = checkFirmwareValidation("adapter", adapter_fw, camera_name,
339  camera_serial_number);
340  if (!adapter_warning_msg.empty())
341  {
342  warning_msg = warning_msg + "\n\t\t\t\t- " + adapter_warning_msg;
343  }
344  }
345 
346  if (rs_supports(rs_detected_device, RS_CAPABILITIES_MOTION_EVENTS, &rs_error_))
347  {
348  const char * motion_module_fw = rs_get_device_info(rs_detected_device,
350  checkError();
351  detected_camera_msg = detected_camera_msg + ", Motion Module FW: " + motion_module_fw;
352  std::string motion_module_warning_msg = checkFirmwareValidation("motion_module", motion_module_fw, camera_name,
353  camera_serial_number);
354  if (!motion_module_warning_msg.empty())
355  {
356  warning_msg = warning_msg + "\n\t\t\t\t- " + motion_module_warning_msg;
357  }
358  }
359  ROS_INFO_STREAM(nodelet_name_ + detected_camera_msg);
360  if (warning_msg != " - Detected unvalidated firmware:")
361  {
362  ROS_WARN_STREAM(nodelet_name_ + warning_msg);
363  }
364  }
365 
366  return camera_type_index;
367  }
368 
369  /*
370  * Advertise topics.
371  */
372  void BaseNodelet::advertiseTopics()
373  {
374  ros::NodeHandle color_nh(nh_, COLOR_NAMESPACE);
375  image_transport::ImageTransport color_image_transport(color_nh);
376  camera_publisher_[RS_STREAM_COLOR] = color_image_transport.advertiseCamera(COLOR_TOPIC, 1);
377 
378  ros::NodeHandle depth_nh(nh_, DEPTH_NAMESPACE);
379  image_transport::ImageTransport depth_image_transport(depth_nh);
380  camera_publisher_[RS_STREAM_DEPTH] = depth_image_transport.advertiseCamera(DEPTH_TOPIC, 1);
381  pointcloud_publisher_ = depth_nh.advertise<sensor_msgs::PointCloud2>(PC_TOPIC, 1);
382 
383  ros::NodeHandle ir_nh(nh_, IR_NAMESPACE);
384  image_transport::ImageTransport ir_image_transport(ir_nh);
385  camera_publisher_[RS_STREAM_INFRARED] = ir_image_transport.advertiseCamera(IR_TOPIC, 1);
386  }
387 
388  /*
389  * Advertise services.
390  */
391  void BaseNodelet::advertiseServices()
392  {
393  get_options_service_ = pnh_.advertiseService(SETTINGS_SERVICE,
394  &BaseNodelet::getCameraOptionValues, this);
395  set_power_service_ = pnh_.advertiseService(CAMERA_SET_POWER_SERVICE,
396  &BaseNodelet::setPowerCameraService, this);
397  force_power_service_ = pnh_.advertiseService(CAMERA_FORCE_POWER_SERVICE,
398  &BaseNodelet::forcePowerCameraService, this);
399  is_powered_service_ = pnh_.advertiseService(CAMERA_IS_POWERED_SERVICE,
400  &BaseNodelet::isPoweredCameraService, this);
401  }
402 
403  /*
404  * Get the latest values of the camera options.
405  */
406  bool BaseNodelet::getCameraOptionValues(realsense_camera::CameraConfiguration::Request & req,
407  realsense_camera::CameraConfiguration::Response & res)
408  {
409  std::string get_options_result_str;
410  std::string opt_name, opt_value;
411 
412  for (CameraOptions o : camera_options_)
413  {
414  opt_name = rs_option_to_string(o.opt);
415  std::transform(opt_name.begin(), opt_name.end(), opt_name.begin(), ::tolower);
416  o.value = rs_get_device_option(rs_device_, o.opt, 0);
417  opt_value = boost::lexical_cast<std::string>(o.value);
418  get_options_result_str += opt_name + ":" + opt_value + ";";
419  }
420 
421  res.configuration_str = get_options_result_str;
422  return true;
423  }
424 
425  /*
426  * Check for Nodelet subscribers
427  */
428  bool BaseNodelet::checkForSubscriber()
429  {
430  for (int index=0; index < STREAM_COUNT; index++)
431  {
432  if (camera_publisher_[index].getNumSubscribers() > 0)
433  {
434  return true;
435  }
436  }
437  if (pointcloud_publisher_.getNumSubscribers() > 0)
438  {
439  return true;
440  }
441  return false;
442  }
443 
444  /*
445  * Service to check if camera is powered on or not
446  */
447  bool BaseNodelet::isPoweredCameraService(realsense_camera::IsPowered::Request & req,
448  realsense_camera::IsPowered::Response & res)
449  {
450  if (rs_is_device_streaming(rs_device_, 0) == 1)
451  {
452  res.is_powered = true;
453  }
454  else
455  {
456  res.is_powered = false;
457  }
458  return true;
459  }
460 
461  /*
462  * Set Power Camera service
463  */
464  bool BaseNodelet::setPowerCameraService(realsense_camera::SetPower::Request & req,
465  realsense_camera::SetPower::Response & res)
466  {
467  res.success = true;
468 
469  if (req.power_on == true)
470  {
471  start_camera_ = true;
472  start_stop_srv_called_ = true;
473  }
474  else
475  {
476  if (rs_is_device_streaming(rs_device_, 0) == 0)
477  {
478  ROS_INFO_STREAM(nodelet_name_ << " - Camera is already Stopped");
479  }
480  else
481  {
482  if (checkForSubscriber() == false)
483  {
484  start_camera_ = false;
485  start_stop_srv_called_ = true;
486  }
487  else
488  {
489  ROS_INFO_STREAM(nodelet_name_ << " - Cannot stop the camera. Nodelet has subscriber.");
490  res.success = false;
491  }
492  }
493  }
494  return res.success;
495 }
496 
497  /*
498  * Force Power Camera service
499  */
500  bool BaseNodelet::forcePowerCameraService(realsense_camera::ForcePower::Request & req,
501  realsense_camera::ForcePower::Response & res)
502  {
503  start_camera_ = req.power_on;
504  start_stop_srv_called_ = true;
505  return true;
506  }
507 
508 
509  /*
510  * Get the options supported by the camera along with their min, max and step values.
511  */
512  void BaseNodelet::getCameraOptions()
513  {
514  for (int i = 0; i < RS_OPTION_COUNT; ++i)
515  {
516  CameraOptions o = { (rs_option) i };
517 
518  if (rs_device_supports_option(rs_device_, o.opt, &rs_error_))
519  {
520  rs_get_device_option_range(rs_device_, o.opt, &o.min, &o.max, &o.step, 0);
521 
522  // Skip the camera options where min and max values are the same.
523  if (o.min != o.max)
524  {
525  o.value = rs_get_device_option(rs_device_, o.opt, 0);
526  camera_options_.push_back(o);
527  }
528  }
529  }
530  }
531 
532  /*
533  * Set the static camera options.
534  */
535  void BaseNodelet::setStaticCameraOptions(std::vector<std::string> dynamic_params)
536  {
537  ROS_INFO_STREAM(nodelet_name_ << " - Setting static camera options");
538 
539  // Iterate through the supported camera options
540  for (CameraOptions o : camera_options_)
541  {
542  std::string opt_name = rs_option_to_string(o.opt);
543  bool found = false;
544 
545  for (std::string param_name : dynamic_params)
546  {
547  std::transform(opt_name.begin(), opt_name.end(), opt_name.begin(), ::tolower);
548  if (opt_name.compare(param_name) == 0)
549  {
550  found = true;
551  break;
552  }
553  }
554  // Skip the dynamic options and set only the static camera options.
555  if (found == false)
556  {
557  std::string key;
558  double val;
559 
560  if (pnh_.searchParam(opt_name, key))
561  {
562  double opt_val;
563  pnh_.getParam(key, val);
564 
565  // Validate and set the input values within the min-max range
566  if (val < o.min)
567  {
568  opt_val = o.min;
569  }
570  else if (val > o.max)
571  {
572  opt_val = o.max;
573  }
574  else
575  {
576  opt_val = val;
577  }
578  ROS_INFO_STREAM(nodelet_name_ << " - Setting camera option " << opt_name << " = " << opt_val);
579  rs_set_device_option(rs_device_, o.opt, opt_val, &rs_error_);
580  checkError();
581  }
582  }
583  }
584  }
585 
586  /*
587  * Set up the callbacks for the camera streams
588  */
589  void BaseNodelet::setFrameCallbacks()
590  {
591  depth_frame_handler_ = [&](rs::frame frame) // NOLINT(build/c++11)
592  {
593  publishTopic(RS_STREAM_DEPTH, frame);
594 
595  if (enable_pointcloud_ == true)
596  {
597  publishPCTopic();
598  }
599  };
600 
601  color_frame_handler_ = [&](rs::frame frame) // NOLINT(build/c++11)
602  {
603  publishTopic(RS_STREAM_COLOR, frame);
604  };
605 
606  ir_frame_handler_ = [&](rs::frame frame) // NOLINT(build/c++11)
607  {
608  publishTopic(RS_STREAM_INFRARED, frame);
609  };
610 
611  rs_set_frame_callback_cpp(rs_device_, RS_STREAM_DEPTH, new rs::frame_callback(depth_frame_handler_), &rs_error_);
612  checkError();
613 
614  rs_set_frame_callback_cpp(rs_device_, RS_STREAM_COLOR, new rs::frame_callback(color_frame_handler_), &rs_error_);
615  checkError();
616 
617  // Need to add this check due to a bug in librealsense which calls the IR callback
618  // if INFRARED stream is disable AND INFRARED2 stream is enabled
619  // https://github.com/IntelRealSense/librealsense/issues/393
620  if (enable_[RS_STREAM_INFRARED])
621  {
622  rs_set_frame_callback_cpp(rs_device_, RS_STREAM_INFRARED, new rs::frame_callback(ir_frame_handler_), &rs_error_);
623  checkError();
624  }
625  }
626 
627  /*
628  * Set the streams according to their corresponding flag values.
629  */
630  void BaseNodelet::setStreams()
631  {
632  // Enable streams
633  for (int stream=0; stream < STREAM_COUNT; stream++)
634  {
635  if (enable_[stream] == true)
636  {
637  enableStream(static_cast<rs_stream>(stream), width_[stream], height_[stream], format_[stream], fps_[stream]);
638  }
639  else if (enable_[stream] == false)
640  {
641  disableStream(static_cast<rs_stream>(stream));
642  }
643  }
644  }
645 
646  /*
647  * Enable individual streams.
648  */
649  void BaseNodelet::enableStream(rs_stream stream_index, int width, int height, rs_format format, int fps)
650  {
651  if (rs_is_stream_enabled(rs_device_, stream_index, 0) == 0)
652  {
653  if (mode_.compare("manual") == 0)
654  {
655  ROS_INFO_STREAM(nodelet_name_ << " - Enabling " << STREAM_DESC[stream_index] << " in manual mode");
656  rs_enable_stream(rs_device_, stream_index, width, height, format, fps, &rs_error_);
657  checkError();
658  }
659  else
660  {
661  ROS_INFO_STREAM(nodelet_name_ << " - Enabling " << STREAM_DESC[stream_index] << " in preset mode");
662  rs_enable_stream_preset(rs_device_, stream_index, RS_PRESET_BEST_QUALITY, &rs_error_);
663  checkError();
664  }
665  }
666  if (camera_info_ptr_[stream_index] == NULL)
667  {
668  // Allocate image resources
669  getStreamCalibData(stream_index);
670  step_[stream_index] = camera_info_ptr_[stream_index]->width * unit_step_size_[stream_index];
671  image_[stream_index] = cv::Mat(camera_info_ptr_[stream_index]->height,
672  camera_info_ptr_[stream_index]->width, cv_type_[stream_index], cv::Scalar(0, 0, 0));
673  }
674  ts_[stream_index] = -1;
675  }
676 
677  /*
678  * Prepare camera_info for each enabled stream.
679  */
680  void BaseNodelet::getStreamCalibData(rs_stream stream_index)
681  {
682  rs_intrinsics intrinsic;
683 
684  if (stream_index == RS_STREAM_COLOR)
685  {
687  }
688  else
689  {
690  rs_get_stream_intrinsics(rs_device_, stream_index, &intrinsic, &rs_error_);
691  }
692 
693  if (rs_error_)
694  {
695  ROS_ERROR_STREAM(nodelet_name_ << " - Verify camera firmware version and/or calibration data!");
696  }
697  checkError();
698 
699  sensor_msgs::CameraInfo * camera_info = new sensor_msgs::CameraInfo();
700  camera_info_ptr_[stream_index] = sensor_msgs::CameraInfoPtr(camera_info);
701 
702  camera_info->header.frame_id = optical_frame_id_[stream_index];
703  camera_info->width = intrinsic.width;
704  camera_info->height = intrinsic.height;
705 
706  camera_info->K.at(0) = intrinsic.fx;
707  camera_info->K.at(2) = intrinsic.ppx;
708  camera_info->K.at(4) = intrinsic.fy;
709  camera_info->K.at(5) = intrinsic.ppy;
710  camera_info->K.at(8) = 1;
711 
712  camera_info->P.at(0) = camera_info->K.at(0);
713  camera_info->P.at(1) = 0;
714  camera_info->P.at(2) = camera_info->K.at(2);
715  camera_info->P.at(3) = 0;
716 
717  camera_info->P.at(4) = 0;
718  camera_info->P.at(5) = camera_info->K.at(4);
719  camera_info->P.at(6) = camera_info->K.at(5);
720  camera_info->P.at(7) = 0;
721 
722  camera_info->P.at(8) = 0;
723  camera_info->P.at(9) = 0;
724  camera_info->P.at(10) = 1;
725  camera_info->P.at(11) = 0;
726 
727  if (stream_index == RS_STREAM_DEPTH)
728  {
729  // set depth to color translation values in Projection matrix (P)
730  rs_extrinsics z_extrinsic;
732  if (rs_error_)
733  {
734  ROS_ERROR_STREAM(nodelet_name_ << " - Verify camera is calibrated!");
735  }
736  checkError();
737  camera_info->P.at(3) = z_extrinsic.translation[0]; // Tx
738  camera_info->P.at(7) = z_extrinsic.translation[1]; // Ty
739  camera_info->P.at(11) = z_extrinsic.translation[2]; // Tz
740  }
741 
742  camera_info->distortion_model = "plumb_bob";
743 
744  // set R (rotation matrix) values to identity matrix
745  camera_info->R.at(0) = 1.0;
746  camera_info->R.at(1) = 0.0;
747  camera_info->R.at(2) = 0.0;
748  camera_info->R.at(3) = 0.0;
749  camera_info->R.at(4) = 1.0;
750  camera_info->R.at(5) = 0.0;
751  camera_info->R.at(6) = 0.0;
752  camera_info->R.at(7) = 0.0;
753  camera_info->R.at(8) = 1.0;
754 
755  for (int i = 0; i < 5; i++)
756  {
757  camera_info->D.push_back(intrinsic.coeffs[i]);
758  }
759  }
760 
761  /*
762  * Disable individual streams.
763  */
764  void BaseNodelet::disableStream(rs_stream stream_index)
765  {
766  if (rs_is_stream_enabled(rs_device_, stream_index, 0) == 1)
767  {
768  ROS_INFO_STREAM(nodelet_name_ << " - Disabling " << STREAM_DESC[stream_index] << " stream");
769  rs_disable_stream(rs_device_, stream_index, &rs_error_);
770  checkError();
771  }
772  }
773 
774  /*
775  * Start camera.
776  */
777  std::string BaseNodelet::startCamera()
778  {
779  if (rs_is_device_streaming(rs_device_, 0) == 0)
780  {
781  ROS_INFO_STREAM(nodelet_name_ << " - Starting camera");
782  // Set up the callbacks for each stream
783  setFrameCallbacks();
784  try
785  {
786  rs_device_->start(rs_source_);
787  }
788  catch (std::runtime_error & e)
789  {
790  ROS_ERROR_STREAM(nodelet_name_ << " - Couldn't start camera -- " << e.what());
791  ros::shutdown();
792  }
793  camera_start_ts_ = ros::Time::now();
794  return "Camera Started Successfully";
795  }
796  return "Camera is already Started";
797  }
798 
799  /*
800  * Stop camera.
801  */
802  std::string BaseNodelet::stopCamera()
803  {
804  if (rs_is_device_streaming(rs_device_, 0) == 1)
805  {
806  ROS_INFO_STREAM(nodelet_name_ << " - Stopping camera");
807  try
808  {
809  rs_device_->stop(rs_source_);
810  }
811  catch (std::runtime_error & e)
812  {
813  ROS_ERROR_STREAM(nodelet_name_ << " - Couldn't stop camera -- " << e.what());
814  ros::shutdown();
815  }
816  return "Camera Stopped Successfully";
817  }
818  return "Camera is already Stopped";
819  }
820 
821 
822  /*
823  * Copy frame data from realsense to member cv images.
824  */
825  void BaseNodelet::setImageData(rs_stream stream_index, rs::frame & frame)
826  {
827  if (stream_index == RS_STREAM_DEPTH)
828  {
829  // fill depth buffer
830  image_depth16_ = reinterpret_cast<const uint16_t *>(frame.get_data());
831  float depth_scale_meters = rs_get_device_depth_scale(rs_device_, &rs_error_);
832  if (depth_scale_meters == MILLIMETER_METERS)
833  {
834  image_[stream_index].data = (unsigned char *) image_depth16_;
835  }
836  else
837  {
838  cvWrapper_ = cv::Mat(image_[stream_index].size(), cv_type_[stream_index],
839  const_cast<void *>(reinterpret_cast<const void *>(image_depth16_)), step_[stream_index]);
840  cvWrapper_.convertTo(image_[stream_index], cv_type_[stream_index],
841  static_cast<double>(depth_scale_meters) / static_cast<double>(MILLIMETER_METERS));
842  }
843  }
844  else
845  {
846  image_[stream_index].data = (unsigned char *) (frame.get_data());
847  }
848  }
849 
850  /*
851  * Set depth enable
852  */
853  void BaseNodelet::setDepthEnable(bool &enable_depth)
854  {
855  // Set flags
856  if (enable_depth == false)
857  {
858  if (enable_[RS_STREAM_COLOR] == false)
859  {
860  ROS_INFO_STREAM(nodelet_name_ << " - Color stream is also disabled. Cannot disable depth stream");
861  enable_depth = true;
862  }
863  else
864  {
865  enable_[RS_STREAM_DEPTH] = false;
866  }
867  }
868  else
869  {
870  enable_[RS_STREAM_DEPTH] = true;
871  }
872  }
873 
874  /*
875  * Determine the timestamp for the publish topic.
876  */
877  ros::Time BaseNodelet::getTimestamp(rs_stream stream_index, double frame_ts)
878  {
879  return ros::Time(camera_start_ts_) + ros::Duration(frame_ts * 0.001);
880  }
881 
882  /*
883  * Publish topic.
884  */
885  void BaseNodelet::publishTopic(rs_stream stream_index, rs::frame &frame) try
886  {
887  // mutex to ensure only one frame per stream is processed at a time
888  std::unique_lock<std::mutex> lock(frame_mutex_[stream_index]);
889 
890  double frame_ts = frame.get_timestamp();
891  if (ts_[stream_index] != frame_ts) // Publish frames only if its not duplicate
892  {
893  setImageData(stream_index, frame);
894  // Publish stream only if there is at least one subscriber.
895  if (camera_publisher_[stream_index].getNumSubscribers() > 0)
896  {
897  sensor_msgs::ImagePtr msg = cv_bridge::CvImage(std_msgs::Header(),
898  encoding_[stream_index],
899  image_[stream_index]).toImageMsg();
900  msg->header.frame_id = optical_frame_id_[stream_index];
901  // Publish timestamp to synchronize frames.
902  msg->header.stamp = getTimestamp(stream_index, frame_ts);
903  msg->width = image_[stream_index].cols;
904  msg->height = image_[stream_index].rows;
905  msg->is_bigendian = false;
906  msg->step = step_[stream_index];
907  camera_info_ptr_[stream_index]->header.stamp = msg->header.stamp;
908  camera_publisher_[stream_index].publish(msg, camera_info_ptr_[stream_index]);
909  }
910  }
911  ts_[stream_index] = frame_ts;
912  }
913  catch(const rs::error & e)
914  {
915  ROS_ERROR_STREAM(nodelet_name_ << " - " << "RealSense error calling "
916  << e.get_failed_function() << "(" << e.get_failed_args() << "):\n "
917  << e.what());
918  ros::shutdown();
919  }
920  catch(const std::exception & e)
921  {
922  ROS_ERROR_STREAM(nodelet_name_ << " - " << e.what());
923  ros::shutdown();
924  }
925  catch(...)
926  {
927  ROS_ERROR_STREAM(nodelet_name_ << " - Caught unknown exception...shutting down!");
928  ros::shutdown();
929  }
930 
931  /*
932  * Publish pointcloud topic.
933  */
934  void BaseNodelet::publishPCTopic()
935  {
936  cv::Mat & image_color = image_[RS_STREAM_COLOR];
937  // Publish pointcloud only if there is at least one subscriber.
938  if (pointcloud_publisher_.getNumSubscribers() > 0 && rs_is_stream_enabled(rs_device_, RS_STREAM_DEPTH, 0) == 1)
939  {
940  rs_intrinsics color_intrinsic;
941  rs_extrinsics z_extrinsic;
942 
943  rs_intrinsics z_intrinsic;
944  rs_get_stream_intrinsics(rs_device_, RS_STREAM_DEPTH, &z_intrinsic, &rs_error_);
945  checkError();
946 
947  if (enable_[RS_STREAM_COLOR] == true)
948  {
949  rs_get_stream_intrinsics(rs_device_, RS_STREAM_COLOR, &color_intrinsic, &rs_error_);
950  checkError();
952  checkError();
953  }
954 
955  // Convert pointcloud from the camera to pointcloud object for ROS.
956  sensor_msgs::PointCloud2 msg_pointcloud;
957  msg_pointcloud.width = width_[RS_STREAM_DEPTH];
958  msg_pointcloud.height = height_[RS_STREAM_DEPTH];
959  msg_pointcloud.header.stamp = ros::Time::now();
960  msg_pointcloud.is_dense = true;
961 
962  sensor_msgs::PointCloud2Modifier modifier(msg_pointcloud);
963 
964  modifier.setPointCloud2Fields(4, "x", 1,
965  sensor_msgs::PointField::FLOAT32, "y", 1,
966  sensor_msgs::PointField::FLOAT32, "z", 1,
967  sensor_msgs::PointField::FLOAT32, "rgb", 1,
968  sensor_msgs::PointField::FLOAT32);
969 
970  modifier.setPointCloud2FieldsByString(2, "xyz", "rgb");
971 
972  sensor_msgs::PointCloud2Iterator<float>iter_x(msg_pointcloud, "x");
973  sensor_msgs::PointCloud2Iterator<float>iter_y(msg_pointcloud, "y");
974  sensor_msgs::PointCloud2Iterator<float>iter_z(msg_pointcloud, "z");
975 
976  sensor_msgs::PointCloud2Iterator<uint8_t>iter_r(msg_pointcloud, "r");
977  sensor_msgs::PointCloud2Iterator<uint8_t>iter_g(msg_pointcloud, "g");
978  sensor_msgs::PointCloud2Iterator<uint8_t>iter_b(msg_pointcloud, "b");
979 
980  float depth_point[3], color_point[3], color_pixel[2], scaled_depth;
981  unsigned char *color_data = image_color.data;
982  checkError(); // Default value is 0.001
983 
984  float depth_scale_meters = rs_get_device_depth_scale(rs_device_, &rs_error_);
985  // Fill the PointCloud2 fields.
986  for (int y = 0; y < z_intrinsic.height; y++)
987  {
988  for (int x = 0; x < z_intrinsic.width; x++)
989  {
990  scaled_depth = static_cast<float>(*image_depth16_) * depth_scale_meters;
991  float depth_pixel[2] = {static_cast<float>(x), static_cast<float>(y)};
992  rs_deproject_pixel_to_point(depth_point, &z_intrinsic, depth_pixel, scaled_depth);
993 
994  if (depth_point[2] <= 0.0f || depth_point[2] > max_z_)
995  {
996  depth_point[0] = 0.0f;
997  depth_point[1] = 0.0f;
998  depth_point[2] = 0.0f;
999  }
1000 
1001  *iter_x = depth_point[0];
1002  *iter_y = depth_point[1];
1003  *iter_z = depth_point[2];
1004 
1005  // Default to white color.
1006  *iter_r = static_cast<uint8_t>(255);
1007  *iter_g = static_cast<uint8_t>(255);
1008  *iter_b = static_cast<uint8_t>(255);
1009 
1010  if (enable_[RS_STREAM_COLOR] == true)
1011  {
1012  rs_transform_point_to_point(color_point, &z_extrinsic, depth_point);
1013  rs_project_point_to_pixel(color_pixel, &color_intrinsic, color_point);
1014 
1015  if (color_pixel[1] < 0.0f || color_pixel[1] >= image_color.rows
1016  || color_pixel[0] < 0.0f || color_pixel[0] >= image_color.cols)
1017  {
1018  // For out of bounds color data, default to a shade of blue in order to visually distinguish holes.
1019  // This color value is same as the librealsense out of bounds color value.
1020  *iter_r = static_cast<uint8_t>(96);
1021  *iter_g = static_cast<uint8_t>(157);
1022  *iter_b = static_cast<uint8_t>(198);
1023  }
1024  else
1025  {
1026  int i = static_cast<int>(color_pixel[0]);
1027  int j = static_cast<int>(color_pixel[1]);
1028 
1029  *iter_r = static_cast<uint8_t>(color_data[i * 3 + j * image_color.cols * 3]);
1030  *iter_g = static_cast<uint8_t>(color_data[i * 3 + j * image_color.cols * 3 + 1]);
1031  *iter_b = static_cast<uint8_t>(color_data[i * 3 + j * image_color.cols * 3 + 2]);
1032  }
1033  }
1034 
1035  image_depth16_++;
1036  ++iter_x; ++iter_y; ++iter_z; ++iter_r; ++iter_g; ++iter_b;
1037  }
1038  }
1039 
1040  msg_pointcloud.header.frame_id = optical_frame_id_[RS_STREAM_DEPTH];
1041  pointcloud_publisher_.publish(msg_pointcloud);
1042  }
1043  }
1044 
1045  /*
1046  * Get the camera extrinsics
1047  */
1048  void BaseNodelet::getCameraExtrinsics()
1049  {
1050  // Get offset between base frame and depth frame
1051  rs_get_device_extrinsics(rs_device_, RS_STREAM_DEPTH, RS_STREAM_COLOR, &color2depth_extrinsic_, &rs_error_);
1052  if (rs_error_)
1053  {
1054  ROS_ERROR_STREAM(nodelet_name_ << " - Verify camera is calibrated!");
1055  }
1056  checkError();
1057 
1058  // Get offset between base frame and infrared frame
1059  rs_get_device_extrinsics(rs_device_, RS_STREAM_INFRARED, RS_STREAM_COLOR, &color2ir_extrinsic_, &rs_error_);
1060  if (rs_error_)
1061  {
1062  ROS_ERROR_STREAM(nodelet_name_ << " - Verify camera is calibrated!");
1063  }
1064  checkError();
1065  }
1066 
1067  /*
1068  * Publish Static transforms.
1069  */
1070  void BaseNodelet::publishStaticTransforms()
1071  {
1072  // Publish transforms for the cameras
1073  ROS_INFO_STREAM(nodelet_name_ << " - Publishing camera transforms (/tf_static)");
1074 
1075  tf::Quaternion q_c2co;
1076  tf::Quaternion q_d2do;
1077  tf::Quaternion q_i2io;
1078  geometry_msgs::TransformStamped b2d_msg;
1079  geometry_msgs::TransformStamped d2do_msg;
1080  geometry_msgs::TransformStamped b2c_msg;
1081  geometry_msgs::TransformStamped c2co_msg;
1082  geometry_msgs::TransformStamped b2i_msg;
1083  geometry_msgs::TransformStamped i2io_msg;
1084 
1085  // Get the current timestamp for all static transforms
1086  transform_ts_ = ros::Time::now();
1087 
1088  // The color frame is used as the base frame.
1089  // Hence no additional transformation is done from base frame to color frame.
1090  b2c_msg.header.stamp = transform_ts_;
1091  b2c_msg.header.frame_id = base_frame_id_;
1092  b2c_msg.child_frame_id = frame_id_[RS_STREAM_COLOR];
1093  b2c_msg.transform.translation.x = 0;
1094  b2c_msg.transform.translation.y = 0;
1095  b2c_msg.transform.translation.z = 0;
1096  b2c_msg.transform.rotation.x = 0;
1097  b2c_msg.transform.rotation.y = 0;
1098  b2c_msg.transform.rotation.z = 0;
1099  b2c_msg.transform.rotation.w = 1;
1100  static_tf_broadcaster_.sendTransform(b2c_msg);
1101 
1102  // Transform color frame to color optical frame
1103  q_c2co.setRPY(-M_PI/2, 0.0, -M_PI/2);
1104  c2co_msg.header.stamp = transform_ts_;
1105  c2co_msg.header.frame_id = frame_id_[RS_STREAM_COLOR];
1106  c2co_msg.child_frame_id = optical_frame_id_[RS_STREAM_COLOR];
1107  c2co_msg.transform.translation.x = 0;
1108  c2co_msg.transform.translation.y = 0;
1109  c2co_msg.transform.translation.z = 0;
1110  c2co_msg.transform.rotation.x = q_c2co.getX();
1111  c2co_msg.transform.rotation.y = q_c2co.getY();
1112  c2co_msg.transform.rotation.z = q_c2co.getZ();
1113  c2co_msg.transform.rotation.w = q_c2co.getW();
1114  static_tf_broadcaster_.sendTransform(c2co_msg);
1115 
1116  // Transform base frame to depth frame
1117  b2d_msg.header.stamp = transform_ts_;
1118  b2d_msg.header.frame_id = base_frame_id_;
1119  b2d_msg.child_frame_id = frame_id_[RS_STREAM_DEPTH];
1120  b2d_msg.transform.translation.x = color2depth_extrinsic_.translation[2];
1121  b2d_msg.transform.translation.y = -color2depth_extrinsic_.translation[0];
1122  b2d_msg.transform.translation.z = -color2depth_extrinsic_.translation[1];
1123  b2d_msg.transform.rotation.x = 0;
1124  b2d_msg.transform.rotation.y = 0;
1125  b2d_msg.transform.rotation.z = 0;
1126  b2d_msg.transform.rotation.w = 1;
1127  static_tf_broadcaster_.sendTransform(b2d_msg);
1128 
1129  // Transform depth frame to depth optical frame
1130  q_d2do.setRPY(-M_PI/2, 0.0, -M_PI/2);
1131  d2do_msg.header.stamp = transform_ts_;
1132  d2do_msg.header.frame_id = frame_id_[RS_STREAM_DEPTH];
1133  d2do_msg.child_frame_id = optical_frame_id_[RS_STREAM_DEPTH];
1134  d2do_msg.transform.translation.x = 0;
1135  d2do_msg.transform.translation.y = 0;
1136  d2do_msg.transform.translation.z = 0;
1137  d2do_msg.transform.rotation.x = q_d2do.getX();
1138  d2do_msg.transform.rotation.y = q_d2do.getY();
1139  d2do_msg.transform.rotation.z = q_d2do.getZ();
1140  d2do_msg.transform.rotation.w = q_d2do.getW();
1141  static_tf_broadcaster_.sendTransform(d2do_msg);
1142 
1143  // Transform base frame to infrared frame
1144  b2i_msg.header.stamp = transform_ts_;
1145  b2i_msg.header.frame_id = base_frame_id_;
1146  b2i_msg.child_frame_id = frame_id_[RS_STREAM_INFRARED];
1147  b2i_msg.transform.translation.x = color2ir_extrinsic_.translation[2];
1148  b2i_msg.transform.translation.y = -color2ir_extrinsic_.translation[0];
1149  b2i_msg.transform.translation.z = -color2ir_extrinsic_.translation[1];
1150  b2i_msg.transform.rotation.x = 0;
1151  b2i_msg.transform.rotation.y = 0;
1152  b2i_msg.transform.rotation.z = 0;
1153  b2i_msg.transform.rotation.w = 1;
1154  static_tf_broadcaster_.sendTransform(b2i_msg);
1155 
1156  // Transform infrared frame to infrared optical frame
1157  q_i2io.setRPY(-M_PI/2, 0.0, -M_PI/2);
1158  i2io_msg.header.stamp = transform_ts_;
1159  i2io_msg.header.frame_id = frame_id_[RS_STREAM_INFRARED];
1160  i2io_msg.child_frame_id = optical_frame_id_[RS_STREAM_INFRARED];
1161  i2io_msg.transform.translation.x = 0;
1162  i2io_msg.transform.translation.y = 0;
1163  i2io_msg.transform.translation.z = 0;
1164  i2io_msg.transform.rotation.x = q_i2io.getX();
1165  i2io_msg.transform.rotation.y = q_i2io.getY();
1166  i2io_msg.transform.rotation.z = q_i2io.getZ();
1167  i2io_msg.transform.rotation.w = q_i2io.getW();
1168  static_tf_broadcaster_.sendTransform(i2io_msg);
1169  }
1170 
1171  /*
1172  * Publish Dynamic transforms.
1173  */
1174  void BaseNodelet::publishDynamicTransforms()
1175  {
1176  tf::Transform tr;
1177  tf::Quaternion q;
1178 
1179  // The color frame is used as the base frame.
1180  // Hence no additional transformation is done from base frame to color frame.
1181  tr.setOrigin(tf::Vector3(0, 0, 0));
1182  tr.setRotation(tf::Quaternion(0, 0, 0, 1));
1183  dynamic_tf_broadcaster_.sendTransform(tf::StampedTransform(tr, transform_ts_,
1184  base_frame_id_, frame_id_[RS_STREAM_COLOR]));
1185 
1186  // Transform color frame to color optical frame
1187  tr.setOrigin(tf::Vector3(0, 0, 0));
1188  q.setRPY(-M_PI/2, 0.0, -M_PI/2);
1189  tr.setRotation(q);
1190  dynamic_tf_broadcaster_.sendTransform(tf::StampedTransform(tr, transform_ts_,
1191  frame_id_[RS_STREAM_COLOR], optical_frame_id_[RS_STREAM_COLOR]));
1192 
1193  // Transform base frame to depth frame
1195  color2depth_extrinsic_.translation[2],
1196  -color2depth_extrinsic_.translation[0],
1197  -color2depth_extrinsic_.translation[1]));
1198  tr.setRotation(tf::Quaternion(0, 0, 0, 1));
1199  dynamic_tf_broadcaster_.sendTransform(tf::StampedTransform(tr, transform_ts_,
1200  base_frame_id_, frame_id_[RS_STREAM_DEPTH]));
1201 
1202  // Transform depth frame to depth optical frame
1203  tr.setOrigin(tf::Vector3(0, 0, 0));
1204  q.setRPY(-M_PI/2, 0.0, -M_PI/2);
1205  tr.setRotation(q);
1206  dynamic_tf_broadcaster_.sendTransform(tf::StampedTransform(tr, transform_ts_,
1207  frame_id_[RS_STREAM_DEPTH], optical_frame_id_[RS_STREAM_DEPTH]));
1208 
1209  // Transform base frame to infrared frame
1211  color2ir_extrinsic_.translation[2],
1212  -color2ir_extrinsic_.translation[0],
1213  -color2ir_extrinsic_.translation[1]));
1214  tr.setRotation(tf::Quaternion(0, 0, 0, 1));
1215  dynamic_tf_broadcaster_.sendTransform(tf::StampedTransform(tr, transform_ts_,
1216  base_frame_id_, frame_id_[RS_STREAM_INFRARED]));
1217 
1218  // Transform infrared frame to infrared optical frame
1219  tr.setOrigin(tf::Vector3(0, 0, 0));
1220  q.setRPY(-M_PI/2, 0.0, -M_PI/2);
1221  tr.setRotation(q);
1222  dynamic_tf_broadcaster_.sendTransform(tf::StampedTransform(tr, transform_ts_,
1223  frame_id_[RS_STREAM_INFRARED], optical_frame_id_[RS_STREAM_INFRARED]));
1224  }
1225 
1226  /*
1227  * Prepare transforms.
1228  */
1229  void BaseNodelet::prepareTransforms()
1230  {
1231  // Publish transforms for the cameras
1232  ROS_INFO_STREAM(nodelet_name_ << " - Publishing camera transforms (/tf)");
1233 
1234  ros::Rate loop_rate(tf_publication_rate_);
1235 
1236  while (ros::ok())
1237  {
1238  // update the time stamp for publication
1239  transform_ts_ = ros::Time::now();
1240 
1241  publishDynamicTransforms();
1242 
1243  loop_rate.sleep();
1244  }
1245  }
1246 
1247  /*
1248  * Display error details and shutdown ROS.
1249  */
1250  void BaseNodelet::checkError()
1251  {
1252  if (rs_error_)
1253  {
1254  ROS_ERROR_STREAM(nodelet_name_ << " - Error calling " << rs_get_failed_function(rs_error_) << " ( "
1255  << rs_get_failed_args(rs_error_) << " ): \n" << rs_get_error_message(rs_error_) << " \n");
1257  rs_error_ = NULL;
1258  ros::shutdown();
1259  }
1260  }
1261 
1262  void BaseNodelet::wrappedSystem(const std::vector<std::string>& string_argv)
1263  {
1264  pid_t pid;
1265 
1266  // Convert the args to char * const * for exec
1267  char * argv[string_argv.size() + 1];
1268 
1269  for (size_t i = 0; i < string_argv.size(); ++i)
1270  {
1271  argv[i] = const_cast<char*>(string_argv[i].c_str());
1272  }
1273  argv[string_argv.size()] = NULL;
1274 
1275  pid = fork();
1276 
1277  if (pid == -1)
1278  { // failed to fork
1279  ROS_WARN_STREAM(nodelet_name_ <<
1280  " - Failed to fork system command:"
1281  << boost::algorithm::join(string_argv, " ")
1282  << strerror(errno));
1283  }
1284  else if (pid == 0)
1285  { // child process
1286  // set to it's own process group
1287  setpgid(getpid(), getpid());
1288 
1289  // sleep for 1 second to ensure previous calls are completed
1290  sleep(1);
1291  // environ is the current environment from <unistd.h>
1292  execvpe(argv[0], argv, environ);
1293 
1294  _exit(EXIT_FAILURE); // exec never returns
1295  }
1296  else
1297  { // parent process
1298  // Save the progress pid (process group)
1299  system_proc_groups_.push(pid);
1300 
1301  // If more than 10, process the oldest now
1302  if (system_proc_groups_.size() > 10)
1303  {
1304  killpg(system_proc_groups_.front(), SIGHUP);
1305  system_proc_groups_.pop();
1306  }
1307  // do not wait as this is the main thread
1308  }
1309  }
1310 
1311  std::string BaseNodelet::checkFirmwareValidation(const std::string& fw_type,
1312  const std::string& current_fw,
1313  const std::string& camera_name,
1314  const std::string& camera_serial_number)
1315  {
1316  for (auto& elem : CAMERA_NAME_TO_VALIDATED_FIRMWARE)
1317  {
1318  std::cout << elem.first << " ; " << elem.second << std::endl;
1319  }
1320 
1321  std::string warning_msg = "";
1322  std::string cam_name = camera_name + "_" + fw_type;
1323  auto it = CAMERA_NAME_TO_VALIDATED_FIRMWARE.find(cam_name);
1324  if (it == CAMERA_NAME_TO_VALIDATED_FIRMWARE.end())
1325  {
1326  warning_msg = "Camera " + cam_name + " not found!";
1327  }
1328  else
1329  {
1330  std::string validated_firmware = it->second;
1331  if (current_fw != validated_firmware)
1332  {
1333  warning_msg = camera_serial_number + "'s current " + fw_type + " firmware is " + current_fw +
1334  ", Validated " + fw_type + " firmware is " + validated_firmware;
1335  }
1336  }
1337 
1338  return warning_msg;
1339  }
1340 
1341 } // namespace realsense_camera
const std::string PC_TOPIC
Definition: constants.h:73
const std::string DEFAULT_IR_FRAME_ID
Definition: constants.h:67
#define RS_API_VERSION
void rs_set_device_option(rs_device *device, rs_option option, double value, rs_error **error)
const bool ENABLE_IR
Definition: constants.h:55
float coeffs[5]
TFSIMD_FORCE_INLINE void setRotation(const Quaternion &q)
const std::string DEFAULT_DEPTH_FRAME_ID
Definition: constants.h:65
RS_CAPABILITIES_MOTION_EVENTS
RS_PRESET_BEST_QUALITY
const std::string & get_failed_args() const
const bool ENABLE_COLOR
Definition: constants.h:54
const bool ENABLE_TF_DYNAMIC
Definition: constants.h:61
rs_error * e
const std::string COLOR_NAMESPACE
Definition: constants.h:74
const char * rs_get_device_info(const rs_device *device, rs_camera_info info, rs_error **error)
GLint GLint GLsizei GLsizei height
const float MILLIMETER_METERS
Definition: constants.h:85
GLint GLint GLint GLint GLint GLint y
const std::string DEFAULT_COLOR_FRAME_ID
Definition: constants.h:66
void rs_set_frame_callback_cpp(rs_device *device, rs_stream stream, rs_frame_callback *callback, rs_error **error)
RS_CAPABILITIES_ADAPTER_BOARD
const bool ENABLE_DEPTH
Definition: constants.h:53
const std::string STREAM_DESC[STREAM_COUNT]
Definition: constants.h:82
bool sleep() const
const int DEPTH_HEIGHT
Definition: constants.h:45
const std::string CAMERA_IS_POWERED_SERVICE
Definition: constants.h:79
PLUGINLIB_EXPORT_CLASS(depth_image_proc::ConvertMetricNodelet, nodelet::Nodelet)
void rs_disable_stream(rs_device *device, rs_stream stream, rs_error **error)
RS_OPTION_COUNT
camera_info
std::string getName(void *handle)
const std::string DEFAULT_IR_OPTICAL_FRAME_ID
Definition: constants.h:70
const char * rs_option_to_string(rs_option option)
rs_option
void setPointCloud2Fields(int n_fields,...)
const int COLOR_HEIGHT
Definition: constants.h:47
TFSIMD_FORCE_INLINE const tfScalar & getW() const
const int MAP_START_VALUES_SIZE
Definition: constants.h:143
void rs_get_device_option_range(rs_device *device, rs_option option, double *min, double *max, double *step, rs_error **error)
rs_device * rs_get_device(rs_context *context, int index, rs_error **error)
float translation[3]
const char * rs_get_error_message(const rs_error *error)
rs_error * rs_error_
stream
GLuint index
CameraPublisher advertiseCamera(const std::string &base_topic, uint32_t queue_size, bool latch=false)
const std::string DEFAULT_COLOR_OPTICAL_FRAME_ID
Definition: constants.h:69
GLfloat f
format
void setRPY(const tfScalar &roll, const tfScalar &pitch, const tfScalar &yaw)
float rs_get_device_depth_scale(const rs_device *device, rs_error **error)
const void * get_data() const
const std::string IR_TOPIC
Definition: constants.h:77
const int DEPTH_FPS
Definition: constants.h:50
double rs_get_device_option(rs_device *device, rs_option option, rs_error **error)
int rs_is_stream_enabled(const rs_device *device, rs_stream stream, rs_error **error)
rs_format
ROSCPP_DECL bool ok()
ROSCPP_DECL bool isShuttingDown()
const std::string COLOR_TOPIC
Definition: constants.h:75
const std::string DEFAULT_BASE_FRAME_ID
Definition: constants.h:64
RS_CAMERA_INFO_MOTION_MODULE_FIRMWARE_VERSION
const char * rs_get_device_firmware_version(const rs_device *device, rs_error **error)
void rs_free_error(rs_error *error)
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
const char * rs_get_device_usb_port_id(const rs_device *device, rs_error **error)
void rs_get_device_extrinsics(const rs_device *device, rs_stream from, rs_stream to, rs_extrinsics *extrin, rs_error **error)
#define ROS_WARN_STREAM(args)
const double TF_PUBLICATION_RATE
Definition: constants.h:62
int rs_get_device_count(const rs_context *context, rs_error **error)
const std::string DEPTH_TOPIC
Definition: constants.h:72
RS_STREAM_RECTIFIED_COLOR
const bool ENABLE_TF
Definition: constants.h:60
const std::string DEFAULT_DEPTH_OPTICAL_FRAME_ID
Definition: constants.h:68
bool sleep()
const std::string DEPTH_NAMESPACE
Definition: constants.h:71
int rs_supports(rs_device *device, rs_capabilities capability, rs_error **error)
const int STREAM_COUNT
Definition: constants.h:43
const char * rs_get_device_name(const rs_device *device, rs_error **error)
int rs_device_supports_option(const rs_device *device, rs_option option, rs_error **error)
#define ROS_INFO_STREAM(args)
rs_context * rs_context_
rs_stream
const int COLOR_WIDTH
Definition: constants.h:46
GLint GLint GLsizei width
const char * rs_get_failed_args(const rs_error *error)
const std::string IR_NAMESPACE
Definition: constants.h:76
double get_timestamp() const
GLsizeiptr size
const int DEPTH_WIDTH
Definition: constants.h:44
void rs_get_stream_intrinsics(const rs_device *device, rs_stream stream, rs_intrinsics *intrin, rs_error **error)
const std::string CAMERA_FORCE_POWER_SERVICE
Definition: constants.h:81
void rs_enable_stream_preset(rs_device *device, rs_stream stream, rs_preset preset, rs_error **error)
const ros::Time & getTimestamp(const T &t)
GLdouble GLdouble GLdouble GLdouble q
static Time now()
ROSCPP_DECL void shutdown()
int rs_is_device_streaming(const rs_device *device, rs_error **error)
const int COLOR_FPS
Definition: constants.h:51
GLuint res
const std::string CAMERA_SET_POWER_SERVICE
Definition: constants.h:80
#define ROS_ERROR_STREAM(args)
RS_STREAM_DEPTH
const char * rs_get_device_serial(const rs_device *device, rs_error **error)
const std::string SETTINGS_SERVICE
Definition: constants.h:78
void setPointCloud2FieldsByString(int n_fields,...)
const std::string & get_failed_function() const
const std::map< std::string, std::string > CAMERA_NAME_TO_VALIDATED_FIRMWARE(MAP_START_VALUES, MAP_START_VALUES+MAP_START_VALUES_SIZE)
const bool ENABLE_PC
Definition: constants.h:59
TFSIMD_FORCE_INLINE void setOrigin(const Vector3 &origin)
RS_STREAM_INFRARED
GLint GLint GLint GLint GLint x
sensor_msgs::ImagePtr toImageMsg() const
const std::string DEFAULT_MODE
Definition: constants.h:63
void rs_delete_context(rs_context *context, rs_error **error)
const stringpair MAP_START_VALUES[]
Definition: constants.h:132
RS_STREAM_COLOR
GLuint GLfloat * val
const char * rs_get_failed_function(const rs_error *error)
void rs_enable_stream(rs_device *device, rs_stream stream, int width, int height, rs_format format, int framerate, rs_error **error)
rs_context * rs_create_context(int api_version, rs_error **error)
RS_CAMERA_INFO_ADAPTER_BOARD_FIRMWARE_VERSION


realsense_camera
Author(s): Rajvi Jingar , Reagan Lopez , Matt Hansen , Mark Horn
autogenerated on Mon Jun 10 2019 14:40:37