base_realsense_node.cpp
Go to the documentation of this file.
1 #include "../include/base_realsense_node.h"
2 #include "assert.h"
3 #include <boost/algorithm/string.hpp>
4 #include <algorithm>
5 #include <cctype>
6 #include <mutex>
7 
8 using namespace realsense2_camera;
9 using namespace ddynamic_reconfigure;
10 
11 // stream_index_pair sip{stream_type, stream_index};
12 #define STREAM_NAME(sip) (static_cast<std::ostringstream&&>(std::ostringstream() << _stream_name[sip.first] << ((sip.second>0) ? std::to_string(sip.second) : ""))).str()
13 #define FRAME_ID(sip) (static_cast<std::ostringstream&&>(std::ostringstream() << "camera_" << STREAM_NAME(sip) << "_frame")).str()
14 #define OPTICAL_FRAME_ID(sip) (static_cast<std::ostringstream&&>(std::ostringstream() << "camera_" << STREAM_NAME(sip) << "_optical_frame")).str()
15 #define ALIGNED_DEPTH_TO_FRAME_ID(sip) (static_cast<std::ostringstream&&>(std::ostringstream() << "camera_aligned_depth_to_" << STREAM_NAME(sip) << "_frame")).str()
16 
17 SyncedImuPublisher::SyncedImuPublisher(ros::Publisher imu_publisher, std::size_t waiting_list_size):
18  _publisher(imu_publisher), _pause_mode(false),
19  _waiting_list_size(waiting_list_size)
20  {}
21 
23 {
25 }
26 
28 {
29  std::lock_guard<std::mutex> lock_guard(_mutex);
30  if (_pause_mode)
31  {
33  {
34  throw std::runtime_error("SyncedImuPublisher inner list reached maximum size of " + std::to_string(_pending_messages.size()));
35  }
36  _pending_messages.push(imu_msg);
37  }
38  else
39  {
40  _publisher.publish(imu_msg);
41  // ROS_INFO_STREAM("iid1:" << imu_msg.header.seq << ", time: " << std::setprecision (20) << imu_msg.header.stamp.toSec());
42  }
43  return;
44 }
45 
47 {
48  if (!_is_enabled) return;
49  std::lock_guard<std::mutex> lock_guard(_mutex);
50  _pause_mode = true;
51 }
52 
54 {
55  std::lock_guard<std::mutex> lock_guard(_mutex);
57  _pause_mode = false;
58 }
59 
61 {
62  // ROS_INFO_STREAM("publish imu: " << _pending_messages.size());
63  while (!_pending_messages.empty())
64  {
65  const sensor_msgs::Imu &imu_msg = _pending_messages.front();
66  _publisher.publish(imu_msg);
67  // ROS_INFO_STREAM("iid2:" << imu_msg.header.seq << ", time: " << std::setprecision (20) << imu_msg.header.stamp.toSec());
68  _pending_messages.pop();
69  }
70 }
71 
73 {
74  auto ns = ros::this_node::getNamespace();
75  ns.erase(std::remove(ns.begin(), ns.end(), '/'), ns.end());
76  return ns;
77 }
78 
80  ros::NodeHandle& privateNodeHandle,
81  rs2::device dev,
82  const std::string& serial_no) :
83  _is_running(true), _base_frame_id(""), _node_handle(nodeHandle),
84  _pnh(privateNodeHandle), _dev(dev), _json_file_path(""),
85  _serial_no(serial_no),
86  _is_initialized_time_base(false),
87  _namespace(getNamespaceStr())
88 {
89  // Types for depth stream
91  _image_format[RS2_STREAM_DEPTH] = CV_16UC1; // CVBridge type
93  _unit_step_size[RS2_STREAM_DEPTH] = sizeof(uint16_t); // sensor_msgs::ImagePtr row step size
94  _stream_name[RS2_STREAM_DEPTH] = "depth";
96 
97  // Types for confidence stream
98  _image_format[RS2_STREAM_CONFIDENCE] = CV_8UC1; // CVBridge type
100  _unit_step_size[RS2_STREAM_CONFIDENCE] = sizeof(uint8_t); // sensor_msgs::ImagePtr row step size
101  _stream_name[RS2_STREAM_CONFIDENCE] = "confidence";
103 
104  // Infrared stream
106 
107  _image_format[RS2_STREAM_INFRARED] = CV_8UC1; // CVBridge type
109  _unit_step_size[RS2_STREAM_INFRARED] = sizeof(uint8_t); // sensor_msgs::ImagePtr row step size
112 
113  // Types for color stream
114  _image_format[RS2_STREAM_COLOR] = CV_8UC3; // CVBridge type
116  _unit_step_size[RS2_STREAM_COLOR] = 3; // sensor_msgs::ImagePtr row step size
117  _stream_name[RS2_STREAM_COLOR] = "color";
119 
120  // Types for fisheye stream
121  _image_format[RS2_STREAM_FISHEYE] = CV_8UC1; // CVBridge type
123  _unit_step_size[RS2_STREAM_FISHEYE] = sizeof(uint8_t); // sensor_msgs::ImagePtr row step size
124  _stream_name[RS2_STREAM_FISHEYE] = "fisheye";
126 
127  // Types for Motion-Module streams
128  _stream_name[RS2_STREAM_GYRO] = "gyro";
129 
130  _stream_name[RS2_STREAM_ACCEL] = "accel";
131 
132  _stream_name[RS2_STREAM_POSE] = "pose";
133 
135 }
136 
138 {
139  // Kill dynamic transform thread
140  if (_tf_t)
141  _tf_t->join();
142 
143  _is_running = false;
144  _cv.notify_one();
145  if (_monitoring_t && _monitoring_t->joinable())
146  {
147  _monitoring_t->join();
148  }
149 
150  std::set<std::string> module_names;
151  for (const std::pair<stream_index_pair, std::vector<rs2::stream_profile>>& profile : _enabled_profiles)
152  {
153  try
154  {
155  std::string module_name = _sensors[profile.first].get_info(RS2_CAMERA_INFO_NAME);
156  std::pair< std::set<std::string>::iterator, bool> res = module_names.insert(module_name);
157  if (res.second)
158  {
159  _sensors[profile.first].stop();
160  _sensors[profile.first].close();
161  }
162  }
163  catch (const rs2::error& e)
164  {
165  ROS_ERROR_STREAM("Exception: " << e.what());
166  }
167  }
168 }
169 
171 {
172  if(enabled)
173  {
174  std::map<std::string, std::vector<rs2::stream_profile> > profiles;
175  std::map<std::string, rs2::sensor> active_sensors;
176  for (const std::pair<stream_index_pair, std::vector<rs2::stream_profile>>& profile : _enabled_profiles)
177  {
178  std::string module_name = _sensors[profile.first].get_info(RS2_CAMERA_INFO_NAME);
179  ROS_INFO_STREAM("insert " << rs2_stream_to_string(profile.second.begin()->stream_type())
180  << " to " << module_name);
181  profiles[module_name].insert(profiles[module_name].begin(),
182  profile.second.begin(),
183  profile.second.end());
184  active_sensors[module_name] = _sensors[profile.first];
185  }
186 
187  for (const std::pair<std::string, std::vector<rs2::stream_profile> >& sensor_profile : profiles)
188  {
189  std::string module_name = sensor_profile.first;
190  rs2::sensor sensor = active_sensors[module_name];
191  sensor.open(sensor_profile.second);
192  sensor.start(_sensors_callback[module_name]);
193  if (sensor.is<rs2::depth_sensor>())
194  {
196  }
197  }
198  }
199  else
200  {
201  std::set<std::string> module_names;
202  for (const std::pair<stream_index_pair, std::vector<rs2::stream_profile>>& profile : _enabled_profiles)
203  {
204  std::string module_name = _sensors[profile.first].get_info(RS2_CAMERA_INFO_NAME);
205  std::pair< std::set<std::string>::iterator, bool> res = module_names.insert(module_name);
206  if (res.second)
207  {
208  _sensors[profile.first].stop();
209  _sensors[profile.first].close();
210  }
211  }
212  }
213 }
214 
216 {
217  for (auto&& s : _dev.query_sensors())
218  {
219  s.set_notifications_callback([&](const rs2::notification& n)
220  {
221  std::vector<std::string> error_strings({"RT IC2 Config error",
222  "Left IC2 Config error"});
224  {
225  ROS_WARN_STREAM("Hardware Notification:" << n.get_description() << "," << n.get_timestamp() << "," << n.get_severity() << "," << n.get_category());
226  }
227  if (error_strings.end() != find_if(error_strings.begin(), error_strings.end(), [&n] (std::string err)
228  {return (n.get_description().find(err) != std::string::npos); }))
229  {
230  ROS_ERROR_STREAM("Performing Hardware Reset.");
232  }
233  });
234  }
235 }
236 
238 {
239  getParameters();
240  setupDevice();
241  setupFilters();
244  enable_devices();
245  setupPublishers();
246  setupStreams();
247  SetBaseStream();
251  startMonitoring();
252  ROS_INFO_STREAM("RealSense Node Is Up!");
253 }
254 
256 {
257  if (_is_first_frame[stream_type])
258  {
259  ROS_DEBUG_STREAM("runFirstFrameInitialization: " << _video_functions_stack.size() << ", " << rs2_stream_to_string(stream_type));
260  _is_first_frame[stream_type] = false;
261  if (!_video_functions_stack[stream_type].empty())
262  {
263  std::thread t = std::thread([=]()
264  {
265  while (!_video_functions_stack[stream_type].empty())
266  {
267  _video_functions_stack[stream_type].back()();
268  _video_functions_stack[stream_type].pop_back();
269  }
270  });
271  t.detach();
272  }
273  }
274 }
275 
277 {
278  rs2::option_range op_range = sensor.get_option_range(option);
279  return op_range.max == 1.0f &&
280  op_range.min == 0.0f &&
281  op_range.step == 1.0f;
282 }
283 
285 {
286  static const int MAX_ENUM_OPTION_VALUES(100);
287  static const float EPSILON(0.05);
288 
289  rs2::option_range op_range = sensor.get_option_range(option);
290  if (abs((op_range.step - 1)) > EPSILON || (op_range.max > MAX_ENUM_OPTION_VALUES)) return false;
291  for (auto i = op_range.min; i <= op_range.max; i += op_range.step)
292  {
293  if (sensor.get_option_value_description(option, i) == nullptr)
294  continue;
295  return true;
296  }
297  return false;
298 }
299 
301 {
302  rs2::option_range op_range = sensor.get_option_range(option);
303  return (op_range.step == 1.0);
304 }
305 
306 std::map<std::string, int> get_enum_method(rs2::options sensor, rs2_option option)
307 {
308  std::map<std::string, int> dict; // An enum to set size
309  if (is_enum_option(sensor, option))
310  {
311  rs2::option_range op_range = sensor.get_option_range(option);
312  const auto op_range_min = int(op_range.min);
313  const auto op_range_max = int(op_range.max);
314  const auto op_range_step = int(op_range.step);
315  for (auto val = op_range_min; val <= op_range_max; val += op_range_step)
316  {
317  if (sensor.get_option_value_description(option, val) == nullptr)
318  continue;
319  dict[sensor.get_option_value_description(option, val)] = val;
320  }
321  }
322  return dict;
323 }
324 
325 namespace realsense2_camera
326 {
327 
328 template <typename K, typename V>
329 std::ostream& operator<<(std::ostream& os, const std::map<K, V>& m)
330 {
331  os << '{';
332  for (const auto& kv : m)
333  {
334  os << " {" << kv.first << ": " << kv.second << '}';
335  }
336  os << " }";
337  return os;
338 }
339 
340 }
341 
346 {
347  return std::isalnum(c) || c == '/' || c == '_';
348 }
349 
354 std::string create_graph_resource_name(const std::string &original_name)
355 {
356  std::string fixed_name = original_name;
357  std::transform(fixed_name.begin(), fixed_name.end(), fixed_name.begin(),
358  [](unsigned char c) { return std::tolower(c); });
359  std::replace_if(fixed_name.begin(), fixed_name.end(), [](const char c) { return !isValidCharInName(c); },
360  '_');
361  return fixed_name;
362 }
363 
364 void BaseRealSenseNode::set_auto_exposure_roi(const std::string option_name, rs2::sensor sensor, int new_value)
365 {
367  if (option_name == "left")
368  auto_exposure_roi.min_x = new_value;
369  else if (option_name == "right")
370  auto_exposure_roi.max_x = new_value;
371  else if (option_name == "top")
372  auto_exposure_roi.min_y = new_value;
373  else if (option_name == "bottom")
374  auto_exposure_roi.max_y = new_value;
375  else
376  {
377  ROS_WARN_STREAM("Invalid option_name: " << option_name << " while setting auto exposure ROI.");
378  return;
379  }
381 }
382 
384 {
385  const rs2::region_of_interest& auto_exposure_roi(_auto_exposure_roi[sensor.get_info(RS2_CAMERA_INFO_NAME)]);
386  try
387  {
388  sensor.as<rs2::roi_sensor>().set_region_of_interest(auto_exposure_roi);
389  }
390  catch(const std::runtime_error& e)
391  {
392  ROS_ERROR_STREAM(e.what());
393  }
394 }
395 
396 void BaseRealSenseNode::readAndSetDynamicParam(ros::NodeHandle& nh1, std::shared_ptr<ddynamic_reconfigure::DDynamicReconfigure> ddynrec,
397  const std::string option_name, const int min_val, const int max_val, rs2::sensor sensor,
398  int* option_value)
399 {
400  nh1.param(option_name, *option_value, *option_value); //param (const std::string &param_name, T &param_val, const T &default_val) const
401  if (*option_value < min_val) *option_value = min_val;
402  if (*option_value > max_val) *option_value = max_val;
403 
404  ddynrec->registerVariable<int>(
405  option_name, *option_value, [this, sensor, option_name](int new_value){set_auto_exposure_roi(option_name, sensor, new_value);},
406  "auto-exposure " + option_name + " coordinate", min_val, max_val);
407 }
408 
410 {
411  for (const std::pair<stream_index_pair, std::vector<rs2::stream_profile>>& profile : _enabled_profiles)
412  {
414  std::string module_base_name(sensor.get_info(RS2_CAMERA_INFO_NAME));
415  if (sensor.is<rs2::roi_sensor>() && _auto_exposure_roi.find(module_base_name) == _auto_exposure_roi.end())
416  {
417  int max_x(_width[profile.first]-1);
418  int max_y(_height[profile.first]-1);
419 
420  std::string module_name = create_graph_resource_name(module_base_name) +"/auto_exposure_roi";
421  ros::NodeHandle nh1(nh, module_name);
422  std::shared_ptr<ddynamic_reconfigure::DDynamicReconfigure> ddynrec = std::make_shared<ddynamic_reconfigure::DDynamicReconfigure>(nh1);
423 
424  _auto_exposure_roi[module_base_name] = {0, 0, max_x, max_y};
425  rs2::region_of_interest& auto_exposure_roi(_auto_exposure_roi[module_base_name]);
426  readAndSetDynamicParam(nh1, ddynrec, "left", 0, max_x, sensor, &(auto_exposure_roi.min_x));
427  readAndSetDynamicParam(nh1, ddynrec, "right", 0, max_x, sensor, &(auto_exposure_roi.max_x));
428  readAndSetDynamicParam(nh1, ddynrec, "top", 0, max_y, sensor, &(auto_exposure_roi.min_y));
429  readAndSetDynamicParam(nh1, ddynrec, "bottom", 0, max_y, sensor, &(auto_exposure_roi.max_y));
430 
431  ddynrec->publishServicesTopics();
432  _ddynrec.push_back(ddynrec);
433 
434  // Initiate the call to set_sensor_auto_exposure_roi, after the first frame arrive.
435  rs2_stream stream_type = profile.first.first;
436  _video_functions_stack[stream_type].push_back([this, sensor](){set_sensor_auto_exposure_roi(sensor);});
437  _is_first_frame[stream_type] = true;
438  }
439  }
440 }
441 
442 void BaseRealSenseNode::registerDynamicOption(ros::NodeHandle& nh, rs2::options sensor, std::string& module_name)
443 {
444  ros::NodeHandle nh1(nh, module_name);
445  std::shared_ptr<ddynamic_reconfigure::DDynamicReconfigure> ddynrec = std::make_shared<ddynamic_reconfigure::DDynamicReconfigure>(nh1);
446  for (auto i = 0; i < RS2_OPTION_COUNT; i++)
447  {
448  rs2_option option = static_cast<rs2_option>(i);
449  const std::string option_name(create_graph_resource_name(rs2_option_to_string(option)));
450  try
451  {
452  if (!sensor.supports(option) || sensor.is_option_read_only(option))
453  {
454  continue;
455  }
456  if (is_checkbox(sensor, option))
457  {
458  auto option_value = bool(sensor.get_option(option));
459  if (nh1.param(option_name, option_value, option_value))
460  {
461  sensor.set_option(option, option_value);
462  }
463  ddynrec->registerVariable<bool>(
464  option_name, option_value,
465  [option, sensor](bool new_value) { sensor.set_option(option, new_value); },
466  sensor.get_option_description(option));
467  continue;
468  }
469  const auto enum_dict = get_enum_method(sensor, option);
470  if (enum_dict.empty())
471  {
472  rs2::option_range op_range = sensor.get_option_range(option);
473  const auto sensor_option_value = sensor.get_option(option);
474  auto option_value = sensor_option_value;
475  if (nh1.param(option_name, option_value, option_value))
476  {
477  if (option_value < op_range.min || op_range.max < option_value)
478  {
479  ROS_WARN_STREAM("Param '" << nh1.resolveName(option_name) << "' has value " << option_value
480  << " outside the range [" << op_range.min << ", " << op_range.max
481  << "]. Using current sensor value " << sensor_option_value << " instead.");
482  option_value = sensor_option_value;
483  }
484  else
485  {
486  sensor.set_option(option, option_value);
487  }
488  }
489  if (option_value < op_range.min || op_range.max < option_value)
490  {
491  ROS_WARN_STREAM("Param '" << nh1.resolveName(option_name) << "' has value " << option_value
492  << " that is not in range [" << op_range.min << ", " << op_range.max << "]"
493  << ". Removing this parameter from dynamic reconfigure options.");
494  continue;
495  }
496  if (is_int_option(sensor, option))
497  {
498  ddynrec->registerVariable<int>(
499  option_name, int(option_value),
500  [option, sensor](int new_value) { sensor.set_option(option, new_value); },
501  sensor.get_option_description(option), int(op_range.min), int(op_range.max));
502  }
503  else
504  {
505  if (i == RS2_OPTION_DEPTH_UNITS)
506  {
507  if (ROS_DEPTH_SCALE >= op_range.min && ROS_DEPTH_SCALE <= op_range.max)
508  {
509  sensor.set_option(option, ROS_DEPTH_SCALE);
510  op_range.min = ROS_DEPTH_SCALE;
511  op_range.max = ROS_DEPTH_SCALE;
512 
514  }
515  }
516  else
517  {
518  ddynrec->registerVariable<double>(
519  option_name, option_value,
520  [option, sensor](double new_value) { sensor.set_option(option, new_value); },
521  sensor.get_option_description(option), double(op_range.min), double(op_range.max));
522  }
523  }
524  }
525  else
526  {
527  const auto sensor_option_value = sensor.get_option(option);
528  auto option_value = int(sensor_option_value);
529  if (nh1.param(option_name, option_value, option_value))
530  {
531  if (std::find_if(enum_dict.cbegin(), enum_dict.cend(),
532  [&option_value](const std::pair<std::string, int>& kv) {
533  return kv.second == option_value;
534  }) == enum_dict.cend())
535  {
536  ROS_WARN_STREAM("Param '" << nh1.resolveName(option_name) << "' has value " << option_value
537  << " that is not in the enum " << enum_dict
538  << ". Using current sensor value " << sensor_option_value << " instead.");
539  option_value = sensor_option_value;
540  }
541  else
542  {
543  sensor.set_option(option, option_value);
544  }
545  }
546  if (std::find_if(enum_dict.cbegin(), enum_dict.cend(),
547  [&option_value](const std::pair<std::string, int>& kv) {
548  return kv.second == option_value;
549  }) == enum_dict.cend())
550  {
551  ROS_WARN_STREAM("Param '" << nh1.resolveName(option_name) << "' has value " << option_value
552  << " that is not in the enum " << enum_dict
553  << ". Removing this parameter from dynamic reconfigure options.");
554  continue;
555  }
556  ddynrec->registerEnumVariable<int>(
557  option_name, option_value,
558  [option, sensor](int new_value) { sensor.set_option(option, new_value); },
559  sensor.get_option_description(option), enum_dict);
560  }
561  }
562  catch(const rs2::backend_error& e)
563  {
564  ROS_WARN_STREAM("Failed to set option: " << option_name << ": " << e.what());
565  }
566  catch(const std::exception& e)
567  {
568  std::cerr << e.what() << '\n';
569  }
570 
571  }
572  ddynrec->publishServicesTopics();
573  _ddynrec.push_back(ddynrec);
574 }
575 
577 {
578  ROS_INFO("Setting Dynamic reconfig parameters.");
579 
581  {
582  std::string module_name = create_graph_resource_name(sensor.get_info(RS2_CAMERA_INFO_NAME));
583  ROS_DEBUG_STREAM("module_name:" << module_name);
584  registerDynamicOption(nh, sensor, module_name);
585  }
586 
587  for (NamedFilter nfilter : _filters)
588  {
589  std::string module_name = nfilter._name;
590  auto sensor = *(nfilter._filter);
591  ROS_DEBUG_STREAM("module_name:" << module_name);
592  registerDynamicOption(nh, sensor, module_name);
593  }
594  ROS_INFO("Done Setting Dynamic reconfig parameters.");
595 }
596 
598 {
599  if (str == "RS2_STREAM_ANY")
600  return RS2_STREAM_ANY;
601  if (str == "RS2_STREAM_COLOR")
602  return RS2_STREAM_COLOR;
603  if (str == "RS2_STREAM_INFRARED")
604  return RS2_STREAM_INFRARED;
605  if (str == "RS2_STREAM_FISHEYE")
606  return RS2_STREAM_FISHEYE;
607  throw std::runtime_error("Unknown stream string " + str);
608 }
609 
611 {
612  ROS_INFO("getParameters...");
613 
614  // Setup system to use RGB image from the infra stream if configured by user
615  bool infra_rgb;
616  _pnh.param("infra_rgb", infra_rgb, false);
617  if (infra_rgb)
618  {
620  _image_format[RS2_STREAM_INFRARED] = CV_8UC3; // CVBridge type
622  _unit_step_size[RS2_STREAM_INFRARED] = 3 * sizeof(uint8_t); // sensor_msgs::ImagePtr row step size
623  ROS_INFO_STREAM("Infrared RGB stream enabled");
624  }
625 
626  _pnh.param("align_depth", _align_depth, ALIGN_DEPTH);
627  _pnh.param("enable_pointcloud", _pointcloud, POINTCLOUD);
628  std::string pc_texture_stream("");
629  int pc_texture_idx;
630  _pnh.param("pointcloud_texture_stream", pc_texture_stream, std::string("RS2_STREAM_COLOR"));
631  _pnh.param("pointcloud_texture_index", pc_texture_idx, 0);
632  _pointcloud_texture = stream_index_pair{rs2_string_to_stream(pc_texture_stream), pc_texture_idx};
633 
634  _pnh.param("filters", _filters_str, DEFAULT_FILTERS);
635  _pointcloud |= (_filters_str.find("pointcloud") != std::string::npos);
636 
637  _pnh.param("publish_tf", _publish_tf, PUBLISH_TF);
638  _pnh.param("tf_publish_rate", _tf_publish_rate, TF_PUBLISH_RATE);
639 
640  _pnh.param("enable_sync", _sync_frames, SYNC_FRAMES);
641  if (_pointcloud || _align_depth || _filters_str.size() > 0)
642  _sync_frames = true;
643 
644  _pnh.param("json_file_path", _json_file_path, std::string(""));
645 
646  for (auto& stream : IMAGE_STREAMS)
647  {
648  std::string param_name(_stream_name[stream.first] + "_width");
649  _pnh.param(param_name, _width[stream], IMAGE_WIDTH);
650  ROS_DEBUG_STREAM("parameter:" << param_name << " = " << _width[stream]);
651  param_name = _stream_name[stream.first] + "_height";
652  _pnh.param(param_name, _height[stream], IMAGE_HEIGHT);
653  ROS_DEBUG_STREAM("parameter:" << param_name << " = " << _height[stream]);
654  param_name = _stream_name[stream.first] + "_fps";
655  _pnh.param(param_name, _fps[stream], IMAGE_FPS);
656  ROS_DEBUG_STREAM("parameter:" << param_name << " = " << _fps[stream]);
657  param_name = "enable_" + STREAM_NAME(stream);
658  _pnh.param(param_name, _enable[stream], true);
659  ROS_DEBUG_STREAM("parameter:" << param_name << " = " << _enable[stream]);
660  }
661 
662  for (auto& stream : HID_STREAMS)
663  {
664  std::string param_name(_stream_name[stream.first] + "_fps");
665  ROS_DEBUG_STREAM("reading parameter:" << param_name);
666  _pnh.param(param_name, _fps[stream], IMU_FPS);
667  param_name = "enable_" + STREAM_NAME(stream);
668  _pnh.param(param_name, _enable[stream], ENABLE_IMU);
669  ROS_DEBUG_STREAM("_enable[" << _stream_name[stream.first] << "]:" << _enable[stream]);
670  }
671  _pnh.param("base_frame_id", _base_frame_id, DEFAULT_BASE_FRAME_ID);
672  _pnh.param("odom_frame_id", _odom_frame_id, DEFAULT_ODOM_FRAME_ID);
673 
674  std::vector<stream_index_pair> streams(IMAGE_STREAMS);
675  streams.insert(streams.end(), HID_STREAMS.begin(), HID_STREAMS.end());
676  for (auto& stream : streams)
677  {
678  std::string param_name(static_cast<std::ostringstream&&>(std::ostringstream() << STREAM_NAME(stream) << "_frame_id").str());
679  _pnh.param(param_name, _frame_id[stream], FRAME_ID(stream));
680  ROS_DEBUG_STREAM("frame_id: reading parameter:" << param_name << " : " << _frame_id[stream]);
681  param_name = static_cast<std::ostringstream&&>(std::ostringstream() << STREAM_NAME(stream) << "_optical_frame_id").str();
682  _pnh.param(param_name, _optical_frame_id[stream], OPTICAL_FRAME_ID(stream));
683  ROS_DEBUG_STREAM("optical: reading parameter:" << param_name << " : " << _optical_frame_id[stream]);
684  }
685 
686  std::string unite_imu_method_str("");
687  _pnh.param("unite_imu_method", unite_imu_method_str, DEFAULT_UNITE_IMU_METHOD);
688  if (unite_imu_method_str == "linear_interpolation")
689  _imu_sync_method = imu_sync_method::LINEAR_INTERPOLATION;
690  else if (unite_imu_method_str == "copy")
691  _imu_sync_method = imu_sync_method::COPY;
692  else
693  _imu_sync_method = imu_sync_method::NONE;
694 
695  if (_imu_sync_method > imu_sync_method::NONE)
696  {
697  _pnh.param("imu_optical_frame_id", _optical_frame_id[GYRO], DEFAULT_IMU_OPTICAL_FRAME_ID);
698  }
699 
700  for (auto& stream : IMAGE_STREAMS)
701  {
702  if (stream == DEPTH || stream == CONFIDENCE) continue;
703  if (stream.second > 1) continue;
704  std::string param_name(static_cast<std::ostringstream&&>(std::ostringstream() << "aligned_depth_to_" << STREAM_NAME(stream) << "_frame_id").str());
706  }
707 
708  _pnh.param("allow_no_texture_points", _allow_no_texture_points, ALLOW_NO_TEXTURE_POINTS);
709  _pnh.param("ordered_pc", _ordered_pc, ORDERED_POINTCLOUD);
710  _pnh.param("clip_distance", _clipping_distance, static_cast<float>(-1.0));
711  _pnh.param("linear_accel_cov", _linear_accel_cov, static_cast<double>(0.01));
712  _pnh.param("angular_velocity_cov", _angular_velocity_cov, static_cast<double>(0.01));
713  _pnh.param("hold_back_imu_for_frames", _hold_back_imu_for_frames, HOLD_BACK_IMU_FOR_FRAMES);
714  _pnh.param("publish_odom_tf", _publish_odom_tf, PUBLISH_ODOM_TF);
715 }
716 
718 {
719  ROS_INFO("setupDevice...");
720  try{
721  if (!_json_file_path.empty())
722  {
724  {
725  std::stringstream ss;
726  std::ifstream in(_json_file_path);
727  if (in.is_open())
728  {
729  ss << in.rdbuf();
730  std::string json_file_content = ss.str();
731 
732  auto adv = _dev.as<rs2::serializable_device>();
733  adv.load_json(json_file_content);
734  ROS_INFO_STREAM("JSON file is loaded! (" << _json_file_path << ")");
735  }
736  else
737  ROS_WARN_STREAM("JSON file provided doesn't exist! (" << _json_file_path << ")");
738  }
739  else
740  ROS_WARN("Device does not support advanced settings!");
741  }
742  else
743  ROS_INFO("JSON file is not provided");
744 
745  ROS_INFO_STREAM("ROS Node Namespace: " << _namespace);
746 
747  auto camera_name = _dev.get_info(RS2_CAMERA_INFO_NAME);
748  ROS_INFO_STREAM("Device Name: " << camera_name);
749 
750  ROS_INFO_STREAM("Device Serial No: " << _serial_no);
751 
752  auto camera_id = _dev.get_info(RS2_CAMERA_INFO_PHYSICAL_PORT);
753 
754  ROS_INFO_STREAM("Device physical port: " << camera_id);
755 
757  ROS_INFO_STREAM("Device FW version: " << fw_ver);
758 
760  ROS_INFO_STREAM("Device Product ID: 0x" << pid);
761 
762  ROS_INFO_STREAM("Enable PointCloud: " << ((_pointcloud)?"On":"Off"));
763  ROS_INFO_STREAM("Align Depth: " << ((_align_depth)?"On":"Off"));
764  ROS_INFO_STREAM("Sync Mode: " << ((_sync_frames)?"On":"Off"));
765 
767 
768 
769  std::function<void(rs2::frame)> frame_callback_function, imu_callback_function;
770  if (_sync_frames)
771  {
772  frame_callback_function = _syncer;
773 
774  auto frame_callback_inner = [this](rs2::frame frame){
775  frame_callback(frame);
776  };
777  _syncer.start(frame_callback_inner);
778  }
779  else
780  {
781  frame_callback_function = [this](rs2::frame frame){frame_callback(frame);};
782  }
783 
784  if (_imu_sync_method == imu_sync_method::NONE)
785  {
786  imu_callback_function = [this](rs2::frame frame){imu_callback(frame);};
787  }
788  else
789  {
790  imu_callback_function = [this](rs2::frame frame){imu_callback_sync(frame, _imu_sync_method);};
791  }
792  std::function<void(rs2::frame)> multiple_message_callback_function = [this](rs2::frame frame){multiple_message_callback(frame, _imu_sync_method);};
793 
794  ROS_INFO_STREAM("Device Sensors: ");
795  for(auto&& sensor : _dev_sensors)
796  {
797  for (auto& profile : sensor.get_stream_profiles())
798  {
799  auto video_profile = profile.as<rs2::video_stream_profile>();
800  stream_index_pair sip(video_profile.stream_type(), video_profile.stream_index());
801  if (_sensors.find( sip ) != _sensors.end())
802  continue;
803  _sensors[sip] = sensor;
804  }
805 
806  std::string module_name = sensor.get_info(RS2_CAMERA_INFO_NAME);
807  if (sensor.is<rs2::depth_sensor>())
808  {
809  _sensors_callback[module_name] = frame_callback_function;
810  }
811  else if (sensor.is<rs2::color_sensor>())
812  {
813  _sensors_callback[module_name] = frame_callback_function;
814  }
815  else if (sensor.is<rs2::fisheye_sensor>())
816  {
817  _sensors_callback[module_name] = frame_callback_function;
818  }
819  else if (sensor.is<rs2::motion_sensor>())
820  {
821  _sensors_callback[module_name] = imu_callback_function;
822  }
823  else if (sensor.is<rs2::pose_sensor>())
824  {
825  _sensors_callback[module_name] = multiple_message_callback_function;
826  }
827  else
828  {
829  ROS_ERROR_STREAM("Module Name \"" << module_name << "\" isn't supported by LibRealSense! Terminating RealSense Node...");
830  ros::shutdown();
831  exit(1);
832  }
833  ROS_INFO_STREAM(std::string(sensor.get_info(RS2_CAMERA_INFO_NAME)) << " was found.");
834  }
835 
836  // Update "enable" map
837  for (std::pair<stream_index_pair, bool> const& enable : _enable )
838  {
839  const stream_index_pair& stream_index(enable.first);
840  if (enable.second && _sensors.find(stream_index) == _sensors.end())
841  {
842  ROS_INFO_STREAM("(" << rs2_stream_to_string(stream_index.first) << ", " << stream_index.second << ") sensor isn't supported by current device! -- Skipping...");
843  _enable[enable.first] = false;
844  }
845  }
846  }
847  catch(const std::exception& ex)
848  {
849  ROS_ERROR_STREAM("An exception has been thrown: " << ex.what());
850  throw;
851  }
852  catch(...)
853  {
854  ROS_ERROR_STREAM("Unknown exception has occured!");
855  throw;
856  }
857 }
858 
860 {
861  ROS_INFO("setupPublishers...");
863 
864  for (auto& stream : IMAGE_STREAMS)
865  {
866  if (_enable[stream])
867  {
868  std::stringstream image_raw, camera_info;
869  bool rectified_image = false;
870  if (stream == DEPTH || stream == CONFIDENCE || stream == INFRA1 || stream == INFRA2)
871  rectified_image = true;
872 
873  std::string stream_name(STREAM_NAME(stream));
874  image_raw << stream_name << "/image_" << ((rectified_image)?"rect_":"") << "raw";
875  camera_info << stream_name << "/camera_info";
876 
877  std::shared_ptr<FrequencyDiagnostics> frequency_diagnostics(new FrequencyDiagnostics(_fps[stream], stream_name, _serial_no));
878  _image_publishers[stream] = {image_transport.advertise(image_raw.str(), 1), frequency_diagnostics};
879  _info_publisher[stream] = _node_handle.advertise<sensor_msgs::CameraInfo>(camera_info.str(), 1);
880 
881  if (_align_depth && (stream != DEPTH) && (stream != CONFIDENCE) && stream.second < 2)
882  {
883  std::stringstream aligned_image_raw, aligned_camera_info;
884  aligned_image_raw << "aligned_depth_to_" << stream_name << "/image_raw";
885  aligned_camera_info << "aligned_depth_to_" << stream_name << "/camera_info";
886 
887  std::string aligned_stream_name = "aligned_depth_to_" + stream_name;
888  std::shared_ptr<FrequencyDiagnostics> frequency_diagnostics(new FrequencyDiagnostics(_fps[stream], aligned_stream_name, _serial_no));
889  _depth_aligned_image_publishers[stream] = {image_transport.advertise(aligned_image_raw.str(), 1), frequency_diagnostics};
890  _depth_aligned_info_publisher[stream] = _node_handle.advertise<sensor_msgs::CameraInfo>(aligned_camera_info.str(), 1);
891  }
892 
893  if (stream == DEPTH && _pointcloud)
894  {
896  }
897  }
898  }
899 
900  _synced_imu_publisher = std::make_shared<SyncedImuPublisher>();
901  if (_imu_sync_method > imu_sync_method::NONE && _enable[GYRO] && _enable[ACCEL])
902  {
903  ROS_INFO("Start publisher IMU");
904  _synced_imu_publisher = std::make_shared<SyncedImuPublisher>(_node_handle.advertise<sensor_msgs::Imu>("imu", 5));
905  _synced_imu_publisher->Enable(_hold_back_imu_for_frames);
906  }
907  else
908  {
909  if (_enable[GYRO])
910  {
912  }
913 
914  if (_enable[ACCEL])
915  {
917  }
918  }
919  if (_enable[POSE])
920  {
921  _imu_publishers[POSE] = _node_handle.advertise<nav_msgs::Odometry>("odom/sample", 100);
922  }
923 
924 
925  if (_enable[FISHEYE] &&
926  _enable[DEPTH])
927  {
928  _depth_to_other_extrinsics_publishers[FISHEYE] = _node_handle.advertise<Extrinsics>("extrinsics/depth_to_fisheye", 1, true);
929  }
930 
931  if (_enable[COLOR] &&
932  _enable[DEPTH])
933  {
934  _depth_to_other_extrinsics_publishers[COLOR] = _node_handle.advertise<Extrinsics>("extrinsics/depth_to_color", 1, true);
935  }
936 
937  if (_enable[INFRA1] &&
938  _enable[DEPTH])
939  {
940  _depth_to_other_extrinsics_publishers[INFRA1] = _node_handle.advertise<Extrinsics>("extrinsics/depth_to_infra1", 1, true);
941  }
942 
943  if (_enable[INFRA2] &&
944  _enable[DEPTH])
945  {
946  _depth_to_other_extrinsics_publishers[INFRA2] = _node_handle.advertise<Extrinsics>("extrinsics/depth_to_infra2", 1, true);
947  }
948 }
949 
951 {
952  for (auto it = frames.begin(); it != frames.end(); ++it)
953  {
954  auto frame = (*it);
955  auto stream_type = frame.get_profile().stream_type();
956 
957  if (RS2_STREAM_DEPTH == stream_type || RS2_STREAM_CONFIDENCE == stream_type)
958  continue;
959 
960  auto stream_index = frame.get_profile().stream_index();
961  if (stream_index > 1)
962  {
963  continue;
964  }
965  stream_index_pair sip{stream_type, stream_index};
966  auto& info_publisher = _depth_aligned_info_publisher.at(sip);
967  auto& image_publisher = _depth_aligned_image_publishers.at(sip);
968 
969  if(0 != info_publisher.getNumSubscribers() ||
970  0 != image_publisher.first.getNumSubscribers())
971  {
972  std::shared_ptr<rs2::align> align;
973  try{
974  align = _align.at(stream_type);
975  }
976  catch(const std::out_of_range& e)
977  {
978  ROS_DEBUG_STREAM("Allocate align filter for:" << rs2_stream_to_string(sip.first) << sip.second);
979  align = (_align[stream_type] = std::make_shared<rs2::align>(stream_type));
980  }
981  rs2::frameset processed = frames.apply_filter(*align);
982  std::vector<rs2::frame> frames_to_publish;
983  frames_to_publish.push_back(processed.get_depth_frame()); // push_back(aligned_depth_frame)
984  for (std::vector<NamedFilter>::const_iterator filter_it = _filters.begin(); filter_it != _filters.end(); filter_it++)
985  {
986  if (filter_it->_name == "colorizer")
987  {
988  frames_to_publish.push_back(filter_it->_filter->process(frames_to_publish.back())); //push_back(colorized)
989  break;
990  }
991  }
992 
993  publishFrame(frames_to_publish.back(), t, sip,
999  }
1000  }
1001 }
1002 
1004 {
1005  for (auto& elem : IMAGE_STREAMS)
1006  {
1007  if (_enable[elem])
1008  {
1009  auto& sens = _sensors[elem];
1010  auto profiles = sens.get_stream_profiles();
1011  rs2::stream_profile default_profile, selected_profile;
1012  for (auto& profile : profiles)
1013  {
1014  auto video_profile = profile.as<rs2::video_stream_profile>();
1015  ROS_DEBUG_STREAM("Sensor profile: " <<
1016  "stream_type: " << rs2_stream_to_string(video_profile.stream_type()) << "(" << video_profile.stream_index() << ")" <<
1017  "Format: " << video_profile.format() <<
1018  ", Width: " << video_profile.width() <<
1019  ", Height: " << video_profile.height() <<
1020  ", FPS: " << video_profile.fps());
1021 
1022  if (profile.stream_type() == elem.first && profile.stream_index() == elem.second)
1023  {
1024  if (profile.is_default())
1025  {
1026  default_profile = profile;
1027  }
1028  if ((_width[elem] == 0 || video_profile.width() == _width[elem]) &&
1029  (_height[elem] == 0 || video_profile.height() == _height[elem]) &&
1030  (_fps[elem] == 0 || video_profile.fps() == _fps[elem]) &&
1031  (_format.find(elem.first) == _format.end() || video_profile.format() == _format[elem.first] ))
1032  {
1033  selected_profile = profile;
1034  break;
1035  }
1036  }
1037 
1038  }
1039  if (!selected_profile)
1040  {
1041  ROS_WARN_STREAM_COND((_width[elem]!=-1 && _height[elem]!=-1 && _fps[elem]!=-1), "Given stream configuration is not supported by the device! " <<
1042  " Stream: " << rs2_stream_to_string(elem.first) <<
1043  ", Stream Index: " << elem.second <<
1044  ", Width: " << _width[elem] <<
1045  ", Height: " << _height[elem] <<
1046  ", FPS: " << _fps[elem] <<
1047  ", Format: " << ((_format.find(elem.first) == _format.end())? "None":rs2_format_to_string(rs2_format(_format[elem.first]))));
1048  if (default_profile)
1049  {
1050  ROS_WARN_STREAM_COND((_width[elem]!=-1 && _height[elem]!=-1 && _fps[elem]!=-1), "Using default profile instead.");
1051  selected_profile = default_profile;
1052  }
1053  }
1054  if (selected_profile)
1055  {
1056  auto video_profile = selected_profile.as<rs2::video_stream_profile>();
1057  _width[elem] = video_profile.width();
1058  _height[elem] = video_profile.height();
1059  _fps[elem] = video_profile.fps();
1060  _enabled_profiles[elem].push_back(selected_profile);
1061  _image[elem] = cv::Mat(_height[elem], _width[elem], _image_format[elem.first], cv::Scalar(0, 0, 0));
1062  ROS_INFO_STREAM(STREAM_NAME(elem) << " stream is enabled - width: " << _width[elem] << ", height: " << _height[elem] << ", fps: " << _fps[elem] << ", " << "Format: " << video_profile.format());
1063  }
1064  else
1065  {
1066  _enable[elem] = false;
1067  }
1068  }
1069  }
1070  if (_align_depth)
1071  {
1072  for (auto& profiles : _enabled_profiles)
1073  {
1074  _depth_aligned_image[profiles.first] = cv::Mat(_height[DEPTH], _width[DEPTH], _image_format[DEPTH.first], cv::Scalar(0, 0, 0));
1075  _depth_scaled_image[profiles.first] = cv::Mat(_height[DEPTH], _width[DEPTH], _image_format[DEPTH.first], cv::Scalar(0, 0, 0));
1076  }
1077  }
1078 
1079  // Streaming HID
1080  for (auto& elem : HID_STREAMS)
1081  {
1082  if (_enable[elem])
1083  {
1084  auto& sens = _sensors[elem];
1085  auto profiles = sens.get_stream_profiles();
1086  rs2::stream_profile default_profile, selected_profile;
1087  ROS_DEBUG_STREAM("Available profiles:");
1089  {
1090  ROS_DEBUG_STREAM("type:" << rs2_stream_to_string(profile.stream_type()) <<
1091  " fps: " << profile.fps() << ". format: " << profile.format());
1092  }
1093  for (rs2::stream_profile& profile : profiles)
1094  {
1095  if (profile.stream_type() == elem.first)
1096  {
1097  if (profile.is_default())
1098  default_profile = profile;
1099  if (_fps[elem] == 0 || profile.fps() == _fps[elem])
1100  {
1101  selected_profile = profile;
1102  break;
1103  }
1104  }
1105  }
1106  if (!selected_profile)
1107  {
1108  std::string stream_name(STREAM_NAME(elem));
1109  ROS_WARN_STREAM_COND((_fps[elem]!=-1), "No mathcing profile found for " << stream_name << " with fps=" << _fps[elem]);
1110  if (default_profile)
1111  {
1112  ROS_WARN_STREAM_COND((_fps[elem]!=-1), "Using default profile instead.");
1113  selected_profile = default_profile;
1114  }
1115  }
1116  if(selected_profile)
1117  {
1118  _fps[elem] = selected_profile.fps();
1119  _enabled_profiles[elem].push_back(selected_profile);
1120  ROS_INFO_STREAM(STREAM_NAME(elem) << " stream is enabled - fps: " << _fps[elem]);
1121  }
1122  else
1123  {
1124  _enable[elem] = false;
1125  }
1126  }
1127  }
1128 }
1129 
1131 {
1132  std::vector<std::string> filters_str;
1133  boost::split(filters_str, _filters_str, [](char c){return c == ',';});
1134  bool use_disparity_filter(false);
1135  bool use_colorizer_filter(false);
1136  bool use_decimation_filter(false);
1137  for (std::vector<std::string>::const_iterator s_iter=filters_str.begin(); s_iter!=filters_str.end(); s_iter++)
1138  {
1139  if ((*s_iter) == "colorizer")
1140  {
1141  use_colorizer_filter = true;
1142  }
1143  else if ((*s_iter) == "disparity")
1144  {
1145  use_disparity_filter = true;
1146  }
1147  else if ((*s_iter) == "spatial")
1148  {
1149  ROS_INFO("Add Filter: spatial");
1150  _filters.push_back(NamedFilter("spatial", std::make_shared<rs2::spatial_filter>()));
1151  }
1152  else if ((*s_iter) == "temporal")
1153  {
1154  ROS_INFO("Add Filter: temporal");
1155  _filters.push_back(NamedFilter("temporal", std::make_shared<rs2::temporal_filter>()));
1156  }
1157  else if ((*s_iter) == "hole_filling")
1158  {
1159  ROS_INFO("Add Filter: hole_filling");
1160  _filters.push_back(NamedFilter("hole_filling", std::make_shared<rs2::hole_filling_filter>()));
1161  }
1162  else if ((*s_iter) == "decimation")
1163  {
1164  use_decimation_filter = true;
1165  }
1166  else if ((*s_iter) == "pointcloud")
1167  {
1168  assert(_pointcloud); // For now, it is set in getParameters()..
1169  }
1170  else if ((*s_iter).size() > 0)
1171  {
1172  ROS_ERROR_STREAM("Unknown Filter: " << (*s_iter));
1173  throw;
1174  }
1175  }
1176  if (use_disparity_filter)
1177  {
1178  ROS_INFO("Add Filter: disparity");
1179  _filters.insert(_filters.begin(), NamedFilter("disparity_start", std::make_shared<rs2::disparity_transform>()));
1180  _filters.push_back(NamedFilter("disparity_end", std::make_shared<rs2::disparity_transform>(false)));
1181  ROS_INFO("Done Add Filter: disparity");
1182  }
1183  if (use_decimation_filter)
1184  {
1185  ROS_INFO("Add Filter: decimation");
1186  _filters.insert(_filters.begin(),NamedFilter("decimation", std::make_shared<rs2::decimation_filter>()));
1187  }
1188  if (use_colorizer_filter)
1189  {
1190  ROS_INFO("Add Filter: colorizer");
1191  _filters.push_back(NamedFilter("colorizer", std::make_shared<rs2::colorizer>()));
1192 
1193  // Types for depth stream
1194  _image_format[DEPTH.first] = _image_format[COLOR.first]; // CVBridge type
1195  _encoding[DEPTH.first] = _encoding[COLOR.first]; // ROS message type
1196  _unit_step_size[DEPTH.first] = _unit_step_size[COLOR.first]; // sensor_msgs::ImagePtr row step size
1197  _depth_aligned_encoding[RS2_STREAM_INFRARED] = _encoding[COLOR.first]; // ROS message type
1198  _depth_aligned_encoding[RS2_STREAM_COLOR] = _encoding[COLOR.first]; // ROS message type
1199  _depth_aligned_encoding[RS2_STREAM_FISHEYE] = _encoding[COLOR.first]; // ROS message type
1200 
1201  _width[DEPTH] = _width[COLOR];
1202  _height[DEPTH] = _height[COLOR];
1203  _image[DEPTH] = cv::Mat(std::max(0, _height[DEPTH]), std::max(0, _width[DEPTH]), _image_format[DEPTH.first], cv::Scalar(0, 0, 0));
1204  }
1205  if (_pointcloud)
1206  {
1207  ROS_INFO("Add Filter: pointcloud");
1208  _filters.push_back(NamedFilter("pointcloud", std::make_shared<rs2::pointcloud>(_pointcloud_texture.first, _pointcloud_texture.second)));
1209  }
1210  ROS_INFO("num_filters: %d", static_cast<int>(_filters.size()));
1211 }
1212 
1213 cv::Mat& BaseRealSenseNode::fix_depth_scale(const cv::Mat& from_image, cv::Mat& to_image)
1214 {
1215  static const float meter_to_mm = 0.001f;
1216  if (fabs(_depth_scale_meters - meter_to_mm) < 1e-6)
1217  {
1218  to_image = from_image;
1219  return to_image;
1220  }
1221 
1222  if (to_image.size() != from_image.size())
1223  {
1224  to_image.create(from_image.rows, from_image.cols, from_image.type());
1225  }
1226 
1227  CV_Assert(from_image.depth() == _image_format[RS2_STREAM_DEPTH]);
1228 
1229  int nRows = from_image.rows;
1230  int nCols = from_image.cols;
1231 
1232  if (from_image.isContinuous())
1233  {
1234  nCols *= nRows;
1235  nRows = 1;
1236  }
1237 
1238  int i,j;
1239  const uint16_t* p_from;
1240  uint16_t* p_to;
1241  for( i = 0; i < nRows; ++i)
1242  {
1243  p_from = from_image.ptr<uint16_t>(i);
1244  p_to = to_image.ptr<uint16_t>(i);
1245  for ( j = 0; j < nCols; ++j)
1246  {
1247  p_to[j] = p_from[j] * _depth_scale_meters / meter_to_mm;
1248  }
1249  }
1250  return to_image;
1251 }
1252 
1253 void BaseRealSenseNode::clip_depth(rs2::depth_frame depth_frame, float clipping_dist)
1254 {
1255  uint16_t* p_depth_frame = reinterpret_cast<uint16_t*>(const_cast<void*>(depth_frame.get_data()));
1256  uint16_t clipping_value = static_cast<uint16_t>(clipping_dist / _depth_scale_meters);
1257 
1258  int width = depth_frame.get_width();
1259  int height = depth_frame.get_height();
1260 
1261  #ifdef _OPENMP
1262  #pragma omp parallel for schedule(dynamic) //Using OpenMP to try to parallelise the loop
1263  #endif
1264  for (int y = 0; y < height; y++)
1265  {
1266  auto depth_pixel_index = y * width;
1267  for (int x = 0; x < width; x++, ++depth_pixel_index)
1268  {
1269  // Check if the depth value is greater than the threashold
1270  if (p_depth_frame[depth_pixel_index] > clipping_value)
1271  {
1272  p_depth_frame[depth_pixel_index] = 0; //Set to invalid (<=0) value.
1273  }
1274  }
1275  }
1276 }
1277 
1279 {
1280  sensor_msgs::Imu imu_msg;
1281  ros::Time t(gyro_data.m_time);
1282  imu_msg.header.seq = 0;
1283  imu_msg.header.stamp = t;
1284 
1285  imu_msg.angular_velocity.x = gyro_data.m_data.x();
1286  imu_msg.angular_velocity.y = gyro_data.m_data.y();
1287  imu_msg.angular_velocity.z = gyro_data.m_data.z();
1288 
1289  imu_msg.linear_acceleration.x = accel_data.m_data.x();
1290  imu_msg.linear_acceleration.y = accel_data.m_data.y();
1291  imu_msg.linear_acceleration.z = accel_data.m_data.z();
1292  return imu_msg;
1293 }
1294 
1295 template <typename T> T lerp(const T &a, const T &b, const double t) {
1296  return a * (1.0 - t) + b * t;
1297 }
1298 
1299 void BaseRealSenseNode::FillImuData_LinearInterpolation(const CimuData imu_data, std::deque<sensor_msgs::Imu>& imu_msgs)
1300 {
1301  static std::deque<CimuData> _imu_history;
1302  _imu_history.push_back(imu_data);
1303  stream_index_pair type(imu_data.m_type);
1304  imu_msgs.clear();
1305 
1306  if ((type != ACCEL) || _imu_history.size() < 3)
1307  return;
1308 
1309  std::deque<CimuData> gyros_data;
1310  CimuData accel0, accel1, crnt_imu;
1311 
1312  while (_imu_history.size())
1313  {
1314  crnt_imu = _imu_history.front();
1315  _imu_history.pop_front();
1316  if (!accel0.is_set() && crnt_imu.m_type == ACCEL)
1317  {
1318  accel0 = crnt_imu;
1319  }
1320  else if (accel0.is_set() && crnt_imu.m_type == ACCEL)
1321  {
1322  accel1 = crnt_imu;
1323  const double dt = accel1.m_time - accel0.m_time;
1324 
1325  while (gyros_data.size())
1326  {
1327  CimuData crnt_gyro = gyros_data.front();
1328  gyros_data.pop_front();
1329  const double alpha = (crnt_gyro.m_time - accel0.m_time) / dt;
1330  CimuData crnt_accel(ACCEL, lerp(accel0.m_data, accel1.m_data, alpha), crnt_gyro.m_time);
1331  imu_msgs.push_back(CreateUnitedMessage(crnt_accel, crnt_gyro));
1332  }
1333  accel0 = accel1;
1334  }
1335  else if (accel0.is_set() && crnt_imu.m_time >= accel0.m_time && crnt_imu.m_type == GYRO)
1336  {
1337  gyros_data.push_back(crnt_imu);
1338  }
1339  }
1340  _imu_history.push_back(crnt_imu);
1341  return;
1342 }
1343 
1344 void BaseRealSenseNode::FillImuData_Copy(const CimuData imu_data, std::deque<sensor_msgs::Imu>& imu_msgs)
1345 {
1346  stream_index_pair type(imu_data.m_type);
1347 
1348  static CimuData _accel_data(ACCEL, {0,0,0}, -1.0);
1349  if (ACCEL == type)
1350  {
1351  _accel_data = imu_data;
1352  return;
1353  }
1354  if (_accel_data.m_time < 0)
1355  return;
1356 
1357  imu_msgs.push_back(CreateUnitedMessage(_accel_data, imu_data));
1358 }
1359 
1361 {
1363  imu_msg.orientation.x = 0.0;
1364  imu_msg.orientation.y = 0.0;
1365  imu_msg.orientation.z = 0.0;
1366  imu_msg.orientation.w = 0.0;
1367 
1368  imu_msg.orientation_covariance = { -1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
1369  imu_msg.linear_acceleration_covariance = { _linear_accel_cov, 0.0, 0.0, 0.0, _linear_accel_cov, 0.0, 0.0, 0.0, _linear_accel_cov};
1371 }
1372 
1374 {
1375  static std::mutex m_mutex;
1376  static int seq = 0;
1377 
1378  m_mutex.lock();
1379 
1380  auto stream = frame.get_profile().stream_type();
1381  auto stream_index = (stream == GYRO.first)?GYRO:ACCEL;
1382  double frame_time = frame.get_timestamp();
1383 
1384  bool placeholder_false(false);
1385  if (_is_initialized_time_base.compare_exchange_strong(placeholder_false, true) )
1386  {
1388  }
1389 
1390  seq += 1;
1391 
1392  if (0 != _synced_imu_publisher->getNumSubscribers())
1393  {
1394  auto crnt_reading = *(reinterpret_cast<const float3*>(frame.get_data()));
1395  Eigen::Vector3d v(crnt_reading.x, crnt_reading.y, crnt_reading.z);
1396  CimuData imu_data(stream_index, v, frameSystemTimeSec(frame));
1397  std::deque<sensor_msgs::Imu> imu_msgs;
1398  switch (sync_method)
1399  {
1400  case NONE: //Cannot really be NONE. Just to avoid compilation warning.
1401  case COPY:
1402  FillImuData_Copy(imu_data, imu_msgs);
1403  break;
1404  case LINEAR_INTERPOLATION:
1405  FillImuData_LinearInterpolation(imu_data, imu_msgs);
1406  break;
1407  }
1408  while (imu_msgs.size())
1409  {
1410  sensor_msgs::Imu imu_msg = imu_msgs.front();
1411  imu_msg.header.seq = seq;
1412  ImuMessage_AddDefaultValues(imu_msg);
1413  _synced_imu_publisher->Publish(imu_msg);
1414  ROS_DEBUG("Publish united %s stream", rs2_stream_to_string(frame.get_profile().stream_type()));
1415  imu_msgs.pop_front();
1416  }
1417  }
1418  m_mutex.unlock();
1419 };
1420 
1422 {
1423  auto stream = frame.get_profile().stream_type();
1424  double frame_time = frame.get_timestamp();
1425  bool placeholder_false(false);
1426  if (_is_initialized_time_base.compare_exchange_strong(placeholder_false, true) )
1427  {
1429  }
1430 
1431  ROS_DEBUG("Frame arrived: stream: %s ; index: %d ; Timestamp Domain: %s",
1433  frame.get_profile().stream_index(),
1435 
1436  auto stream_index = (stream == GYRO.first)?GYRO:ACCEL;
1437  if (0 != _imu_publishers[stream_index].getNumSubscribers())
1438  {
1439  ros::Time t(frameSystemTimeSec(frame));
1440 
1441  auto imu_msg = sensor_msgs::Imu();
1442  ImuMessage_AddDefaultValues(imu_msg);
1443  imu_msg.header.frame_id = _optical_frame_id[stream_index];
1444 
1445  auto crnt_reading = *(reinterpret_cast<const float3*>(frame.get_data()));
1446  if (GYRO == stream_index)
1447  {
1448  imu_msg.angular_velocity.x = crnt_reading.x;
1449  imu_msg.angular_velocity.y = crnt_reading.y;
1450  imu_msg.angular_velocity.z = crnt_reading.z;
1451  }
1452  else if (ACCEL == stream_index)
1453  {
1454  imu_msg.linear_acceleration.x = crnt_reading.x;
1455  imu_msg.linear_acceleration.y = crnt_reading.y;
1456  imu_msg.linear_acceleration.z = crnt_reading.z;
1457  }
1458  _seq[stream_index] += 1;
1459  imu_msg.header.seq = _seq[stream_index];
1460  imu_msg.header.stamp = t;
1461  _imu_publishers[stream_index].publish(imu_msg);
1462  ROS_DEBUG("Publish %s stream", rs2_stream_to_string(frame.get_profile().stream_type()));
1463  }
1464 }
1465 
1467 {
1468  double frame_time = frame.get_timestamp();
1469  bool placeholder_false(false);
1470  if (_is_initialized_time_base.compare_exchange_strong(placeholder_false, true) )
1471  {
1473  }
1474 
1475  ROS_DEBUG("Frame arrived: stream: %s ; index: %d ; Timestamp Domain: %s",
1477  frame.get_profile().stream_index(),
1479  const auto& stream_index(POSE);
1480  rs2_pose pose = frame.as<rs2::pose_frame>().get_pose_data();
1481  ros::Time t(frameSystemTimeSec(frame));
1482 
1483  geometry_msgs::PoseStamped pose_msg;
1484  pose_msg.pose.position.x = -pose.translation.z;
1485  pose_msg.pose.position.y = -pose.translation.x;
1486  pose_msg.pose.position.z = pose.translation.y;
1487  pose_msg.pose.orientation.x = -pose.rotation.z;
1488  pose_msg.pose.orientation.y = -pose.rotation.x;
1489  pose_msg.pose.orientation.z = pose.rotation.y;
1490  pose_msg.pose.orientation.w = pose.rotation.w;
1491 
1494  msg.header.stamp = t;
1496  msg.child_frame_id = _frame_id[POSE];
1497  msg.transform.translation.x = pose_msg.pose.position.x;
1498  msg.transform.translation.y = pose_msg.pose.position.y;
1499  msg.transform.translation.z = pose_msg.pose.position.z;
1500  msg.transform.rotation.x = pose_msg.pose.orientation.x;
1501  msg.transform.rotation.y = pose_msg.pose.orientation.y;
1502  msg.transform.rotation.z = pose_msg.pose.orientation.z;
1503  msg.transform.rotation.w = pose_msg.pose.orientation.w;
1504 
1505  if (_publish_odom_tf) br.sendTransform(msg);
1506 
1507  if (0 != _imu_publishers[stream_index].getNumSubscribers())
1508  {
1509  double cov_pose(_linear_accel_cov * pow(10, 3-(int)pose.tracker_confidence));
1510  double cov_twist(_angular_velocity_cov * pow(10, 1-(int)pose.tracker_confidence));
1511 
1513  v_msg.vector.x = -pose.velocity.z;
1514  v_msg.vector.y = -pose.velocity.x;
1515  v_msg.vector.z = pose.velocity.y;
1516  tf::Vector3 tfv;
1517  tf::vector3MsgToTF(v_msg.vector,tfv);
1519  tfv=tf::quatRotate(q,tfv);
1520  tf::vector3TFToMsg(tfv,v_msg.vector);
1521 
1523  om_msg.vector.x = -pose.angular_velocity.z;
1524  om_msg.vector.y = -pose.angular_velocity.x;
1525  om_msg.vector.z = pose.angular_velocity.y;
1526  tf::vector3MsgToTF(om_msg.vector,tfv);
1527  tfv=tf::quatRotate(q,tfv);
1528  tf::vector3TFToMsg(tfv,om_msg.vector);
1529 
1530 
1531  nav_msgs::Odometry odom_msg;
1532  _seq[stream_index] += 1;
1533 
1534  odom_msg.header.frame_id = _odom_frame_id;
1535  odom_msg.child_frame_id = _frame_id[POSE];
1536  odom_msg.header.stamp = t;
1537  odom_msg.header.seq = _seq[stream_index];
1538  odom_msg.pose.pose = pose_msg.pose;
1539  odom_msg.pose.covariance = {cov_pose, 0, 0, 0, 0, 0,
1540  0, cov_pose, 0, 0, 0, 0,
1541  0, 0, cov_pose, 0, 0, 0,
1542  0, 0, 0, cov_twist, 0, 0,
1543  0, 0, 0, 0, cov_twist, 0,
1544  0, 0, 0, 0, 0, cov_twist};
1545  odom_msg.twist.twist.linear = v_msg.vector;
1546  odom_msg.twist.twist.angular = om_msg.vector;
1547  odom_msg.twist.covariance ={cov_pose, 0, 0, 0, 0, 0,
1548  0, cov_pose, 0, 0, 0, 0,
1549  0, 0, cov_pose, 0, 0, 0,
1550  0, 0, 0, cov_twist, 0, 0,
1551  0, 0, 0, 0, cov_twist, 0,
1552  0, 0, 0, 0, 0, cov_twist};
1553  _imu_publishers[stream_index].publish(odom_msg);
1554  ROS_DEBUG("Publish %s stream", rs2_stream_to_string(frame.get_profile().stream_type()));
1555  }
1556 }
1557 
1559 {
1560  _synced_imu_publisher->Pause();
1561 
1562  try{
1563  double frame_time = frame.get_timestamp();
1564 
1565  // We compute a ROS timestamp which is based on an initial ROS time at point of first frame,
1566  // and the incremental timestamp from the camera.
1567  // In sync mode the timestamp is based on ROS time
1568  bool placeholder_false(false);
1569  if (_is_initialized_time_base.compare_exchange_strong(placeholder_false, true) )
1570  {
1572  }
1573 
1574  ros::Time t(frameSystemTimeSec(frame));
1575  if (frame.is<rs2::frameset>())
1576  {
1577  ROS_DEBUG("Frameset arrived.");
1578  bool is_depth_arrived = false;
1579  auto frameset = frame.as<rs2::frameset>();
1580  ROS_DEBUG("List of frameset before applying filters: size: %d", static_cast<int>(frameset.size()));
1581  for (auto it = frameset.begin(); it != frameset.end(); ++it)
1582  {
1583  auto f = (*it);
1584  auto stream_type = f.get_profile().stream_type();
1585  auto stream_index = f.get_profile().stream_index();
1586  auto stream_format = f.get_profile().format();
1587  auto stream_unique_id = f.get_profile().unique_id();
1588 
1589  ROS_DEBUG("Frameset contain (%s, %d, %s %d) frame. frame_number: %llu ; frame_TS: %f ; ros_TS(NSec): %lu",
1590  rs2_stream_to_string(stream_type), stream_index, rs2_format_to_string(stream_format), stream_unique_id, frame.get_frame_number(), frame_time, t.toNSec());
1591  runFirstFrameInitialization(stream_type);
1592  }
1593  // Clip depth_frame for max range:
1594  rs2::depth_frame depth_frame = frameset.get_depth_frame();
1595  if (depth_frame && _clipping_distance > 0)
1596  {
1597  clip_depth(depth_frame, _clipping_distance);
1598  }
1599 
1600  ROS_DEBUG("num_filters: %d", static_cast<int>(_filters.size()));
1601  for (std::vector<NamedFilter>::const_iterator filter_it = _filters.begin(); filter_it != _filters.end(); filter_it++)
1602  {
1603  ROS_DEBUG("Applying filter: %s", filter_it->_name.c_str());
1604  frameset = filter_it->_filter->process(frameset);
1605  }
1606 
1607  ROS_DEBUG("List of frameset after applying filters: size: %d", static_cast<int>(frameset.size()));
1608  for (auto it = frameset.begin(); it != frameset.end(); ++it)
1609  {
1610  auto f = (*it);
1611  auto stream_type = f.get_profile().stream_type();
1612  auto stream_index = f.get_profile().stream_index();
1613  auto stream_format = f.get_profile().format();
1614  auto stream_unique_id = f.get_profile().unique_id();
1615 
1616  ROS_DEBUG("Frameset contain (%s, %d, %s %d) frame. frame_number: %llu ; frame_TS: %f ; ros_TS(NSec): %lu",
1617  rs2_stream_to_string(stream_type), stream_index, rs2_format_to_string(stream_format), stream_unique_id, frame.get_frame_number(), frame_time, t.toNSec());
1618  }
1619  ROS_DEBUG("END OF LIST");
1620  ROS_DEBUG_STREAM("Remove streams with same type and index:");
1621  // TODO - Fix the following issue:
1622  // Currently publishers are set using a map of stream type and index only.
1623  // It means that colorized depth image <DEPTH, 0, Z16> and colorized depth image <DEPTH, 0, RGB>
1624  // use the same publisher.
1625  // As a workaround we remove the earlier one, the original one, assuming that if colorizer filter is
1626  // set it means that that's what the client wants.
1627  //
1628  bool points_in_set(false);
1629  std::vector<rs2::frame> frames_to_publish;
1630  std::vector<stream_index_pair> is_in_set;
1631  for (auto it = frameset.begin(); it != frameset.end(); ++it)
1632  {
1633  auto f = (*it);
1634  auto stream_type = f.get_profile().stream_type();
1635  auto stream_index = f.get_profile().stream_index();
1636  auto stream_format = f.get_profile().format();
1637  if (f.is<rs2::points>())
1638  {
1639  if (!points_in_set)
1640  {
1641  points_in_set = true;
1642  frames_to_publish.push_back(f);
1643  }
1644  continue;
1645  }
1646  stream_index_pair sip{stream_type,stream_index};
1647  if (std::find(is_in_set.begin(), is_in_set.end(), sip) == is_in_set.end())
1648  {
1649  is_in_set.push_back(sip);
1650  frames_to_publish.push_back(f);
1651  }
1652  if (_align_depth && stream_type == RS2_STREAM_DEPTH && stream_format == RS2_FORMAT_Z16)
1653  {
1654  is_depth_arrived = true;
1655  }
1656  }
1657 
1658  for (auto it = frames_to_publish.begin(); it != frames_to_publish.end(); ++it)
1659  {
1660  auto f = (*it);
1661  auto stream_type = f.get_profile().stream_type();
1662  auto stream_index = f.get_profile().stream_index();
1663  auto stream_format = f.get_profile().format();
1664 
1665  ROS_DEBUG("Frameset contain (%s, %d, %s) frame. frame_number: %llu ; frame_TS: %f ; ros_TS(NSec): %lu",
1666  rs2_stream_to_string(stream_type), stream_index, rs2_format_to_string(stream_format), frame.get_frame_number(), frame_time, t.toNSec());
1667 
1668  if (f.is<rs2::points>())
1669  {
1671  {
1672  ROS_DEBUG("Publish pointscloud");
1673  publishPointCloud(f.as<rs2::points>(), t, frameset);
1674  }
1675  continue;
1676  }
1677  stream_index_pair sip{stream_type,stream_index};
1678  publishFrame(f, t,
1679  sip,
1680  _image,
1684  _encoding);
1685  }
1686 
1687  if (_align_depth && is_depth_arrived)
1688  {
1689  ROS_DEBUG("publishAlignedDepthToOthers(...)");
1690  publishAlignedDepthToOthers(frameset, t);
1691  }
1692  }
1693  else if (frame.is<rs2::video_frame>())
1694  {
1695  auto stream_type = frame.get_profile().stream_type();
1696  auto stream_index = frame.get_profile().stream_index();
1697  ROS_DEBUG("Single video frame arrived (%s, %d). frame_number: %llu ; frame_TS: %f ; ros_TS(NSec): %lu",
1698  rs2_stream_to_string(stream_type), stream_index, frame.get_frame_number(), frame_time, t.toNSec());
1699  runFirstFrameInitialization(stream_type);
1700 
1701  stream_index_pair sip{stream_type,stream_index};
1702  if (frame.is<rs2::depth_frame>())
1703  {
1704  if (_clipping_distance > 0)
1705  {
1707  }
1708  }
1709  publishFrame(frame, t,
1710  sip,
1711  _image,
1715  _encoding);
1716  }
1717  }
1718  catch(const std::exception& ex)
1719  {
1720  ROS_ERROR_STREAM("An error has occurred during frame callback: " << ex.what());
1721  }
1722  _synced_imu_publisher->Resume();
1723 }; // frame_callback
1724 
1726 {
1727  auto stream = frame.get_profile().stream_type();
1728  switch (stream)
1729  {
1730  case RS2_STREAM_GYRO:
1731  case RS2_STREAM_ACCEL:
1732  if (sync_method > imu_sync_method::NONE) imu_callback_sync(frame, sync_method);
1733  else imu_callback(frame);
1734  break;
1735  case RS2_STREAM_POSE:
1736  pose_callback(frame);
1737  break;
1738  default:
1739  frame_callback(frame);
1740  }
1741 }
1742 
1743 bool BaseRealSenseNode::setBaseTime(double frame_time, rs2_timestamp_domain time_domain)
1744 {
1745  ROS_WARN_ONCE(time_domain == RS2_TIMESTAMP_DOMAIN_SYSTEM_TIME ? "Frame metadata isn't available! (frame_timestamp_domain = RS2_TIMESTAMP_DOMAIN_SYSTEM_TIME)" : "");
1746  if (time_domain == RS2_TIMESTAMP_DOMAIN_HARDWARE_CLOCK)
1747  {
1748  ROS_WARN("frame's time domain is HARDWARE_CLOCK. Timestamps may reset periodically.");
1750  _camera_time_base = frame_time;
1751  return true;
1752  }
1753  return false;
1754 }
1755 
1757 {
1759  {
1760  double elapsed_camera_ms = (/*ms*/ frame.get_timestamp() - /*ms*/ _camera_time_base) / 1000.0;
1761  return (_ros_time_base.toSec() + elapsed_camera_ms);
1762  }
1763  else
1764  {
1765  return (frame.get_timestamp() / 1000.0);
1766  }
1767 }
1768 
1770 {
1771  ROS_INFO("setupStreams...");
1772  try{
1773  std::shared_ptr<rs2::video_stream_profile> left_profile;
1774  std::shared_ptr<rs2::video_stream_profile> right_profile;
1775 
1776  // Publish image stream info
1777  for (auto& profiles : _enabled_profiles)
1778  {
1779  for (auto& profile : profiles.second)
1780  {
1782  {
1783  auto video_profile = profile.as<rs2::video_stream_profile>();
1784  updateStreamCalibData(video_profile);
1785 
1786  // stream index: 1=left, 2=right
1787  if (video_profile.stream_index() == 1) { left_profile = std::make_shared<rs2::video_stream_profile>(video_profile); }
1788  if (video_profile.stream_index() == 2) { right_profile = std::make_shared<rs2::video_stream_profile>(video_profile); }
1789  }
1790  }
1791  }
1792 
1793  if (left_profile && right_profile) {
1794  updateExtrinsicsCalibData(*left_profile, *right_profile);
1795  }
1796 
1797  // Streaming IMAGES
1798  std::map<std::string, std::vector<rs2::stream_profile> > profiles;
1799  std::map<std::string, rs2::sensor> active_sensors;
1800  for (const std::pair<stream_index_pair, std::vector<rs2::stream_profile>>& profile : _enabled_profiles)
1801  {
1802  std::string module_name = _sensors[profile.first].get_info(RS2_CAMERA_INFO_NAME);
1803  ROS_INFO_STREAM("insert " << rs2_stream_to_string(profile.second.begin()->stream_type())
1804  << " to " << module_name);
1805  profiles[module_name].insert(profiles[module_name].begin(),
1806  profile.second.begin(),
1807  profile.second.end());
1808  active_sensors[module_name] = _sensors[profile.first];
1809  }
1810 
1811  for (const std::pair<std::string, std::vector<rs2::stream_profile> >& sensor_profile : profiles)
1812  {
1813  std::string module_name = sensor_profile.first;
1814  rs2::sensor sensor = active_sensors[module_name];
1815  sensor.open(sensor_profile.second);
1816  sensor.start(_sensors_callback[module_name]);
1817  if (sensor.is<rs2::depth_sensor>())
1818  {
1820  }
1821  }
1822  }
1823  catch(const std::exception& ex)
1824  {
1825  ROS_ERROR_STREAM("An exception has been thrown: " << ex.what());
1826  throw;
1827  }
1828  catch(...)
1829  {
1830  ROS_ERROR_STREAM("Unknown exception has occured!");
1831  throw;
1832  }
1833 }
1834 
1836 {
1837  stream_index_pair stream_index{video_profile.stream_type(), video_profile.stream_index()};
1838  auto intrinsic = video_profile.get_intrinsics();
1839  _stream_intrinsics[stream_index] = intrinsic;
1840  _camera_info[stream_index].width = intrinsic.width;
1841  _camera_info[stream_index].height = intrinsic.height;
1842  _camera_info[stream_index].header.frame_id = _optical_frame_id[stream_index];
1843 
1844  _camera_info[stream_index].K.at(0) = intrinsic.fx;
1845  _camera_info[stream_index].K.at(2) = intrinsic.ppx;
1846  _camera_info[stream_index].K.at(4) = intrinsic.fy;
1847  _camera_info[stream_index].K.at(5) = intrinsic.ppy;
1848  _camera_info[stream_index].K.at(8) = 1;
1849 
1850  _camera_info[stream_index].P.at(0) = _camera_info[stream_index].K.at(0);
1851  _camera_info[stream_index].P.at(1) = 0;
1852  _camera_info[stream_index].P.at(2) = _camera_info[stream_index].K.at(2);
1853  _camera_info[stream_index].P.at(3) = 0;
1854  _camera_info[stream_index].P.at(4) = 0;
1855  _camera_info[stream_index].P.at(5) = _camera_info[stream_index].K.at(4);
1856  _camera_info[stream_index].P.at(6) = _camera_info[stream_index].K.at(5);
1857  _camera_info[stream_index].P.at(7) = 0;
1858  _camera_info[stream_index].P.at(8) = 0;
1859  _camera_info[stream_index].P.at(9) = 0;
1860  _camera_info[stream_index].P.at(10) = 1;
1861  _camera_info[stream_index].P.at(11) = 0;
1862 
1863  if (intrinsic.model == RS2_DISTORTION_KANNALA_BRANDT4)
1864  {
1865  _camera_info[stream_index].distortion_model = "equidistant";
1866  } else {
1867  _camera_info[stream_index].distortion_model = "plumb_bob";
1868  }
1869 
1870  // set R (rotation matrix) values to identity matrix
1871  _camera_info[stream_index].R.at(0) = 1.0;
1872  _camera_info[stream_index].R.at(1) = 0.0;
1873  _camera_info[stream_index].R.at(2) = 0.0;
1874  _camera_info[stream_index].R.at(3) = 0.0;
1875  _camera_info[stream_index].R.at(4) = 1.0;
1876  _camera_info[stream_index].R.at(5) = 0.0;
1877  _camera_info[stream_index].R.at(6) = 0.0;
1878  _camera_info[stream_index].R.at(7) = 0.0;
1879  _camera_info[stream_index].R.at(8) = 1.0;
1880 
1881  _camera_info[stream_index].D.resize(5);
1882  for (int i = 0; i < 5; i++)
1883  {
1884  _camera_info[stream_index].D.at(i) = intrinsic.coeffs[i];
1885  }
1886 
1887  if (stream_index == DEPTH && _enable[DEPTH] && _enable[COLOR])
1888  {
1889  _camera_info[stream_index].P.at(3) = 0; // Tx
1890  _camera_info[stream_index].P.at(7) = 0; // Ty
1891  }
1892 
1893  if (_align_depth)
1894  {
1895  for (auto& profiles : _enabled_profiles)
1896  {
1897  for (auto& profile : profiles.second)
1898  {
1899  auto video_profile = profile.as<rs2::video_stream_profile>();
1900  stream_index_pair stream_index{video_profile.stream_type(), video_profile.stream_index()};
1901  _depth_aligned_camera_info[stream_index] = _camera_info[stream_index];
1902  }
1903  }
1904  }
1905 }
1906 
1908 {
1909  stream_index_pair left{left_video_profile.stream_type(), left_video_profile.stream_index()};
1910  stream_index_pair right{right_video_profile.stream_type(), right_video_profile.stream_index()};
1911 
1912  // Get the relative extrinsics between the left and right camera
1913  auto LEFT_T_RIGHT = right_video_profile.get_extrinsics_to(left_video_profile);
1914 
1915  auto R = Eigen::Map<Eigen::Matrix<float,3,3,Eigen::RowMajor>>(LEFT_T_RIGHT.rotation);
1916  auto T = Eigen::Map<Eigen::Matrix<float,3,1>>(LEFT_T_RIGHT.translation);
1917 
1918  // force y- and z-axis components to be 0 (but do we also need to force P(0,3) and P(1,3) to be 0?)
1919  T[1] = 0;
1920  T[2] = 0;
1921 
1922  Eigen::Matrix<float,3,4,Eigen::RowMajor> RT;
1923  RT << R, T;
1924 
1925  auto K_right = Eigen::Map<Eigen::Matrix<double,3,3,Eigen::RowMajor>>(_camera_info[right].K.data());
1926 
1927  // Compute Projection matrix for the right camera
1928  auto P_right = K_right.cast<float>() * RT;
1929 
1930  // Note that all matrices are stored in row-major format
1931  // 1. Leave the left rotation matrix as identity
1932  // 2. Set the right rotation matrix
1933  _camera_info[right].R.at(0) = LEFT_T_RIGHT.rotation[0];
1934  _camera_info[right].R.at(1) = LEFT_T_RIGHT.rotation[1];
1935  _camera_info[right].R.at(2) = LEFT_T_RIGHT.rotation[2];
1936  _camera_info[right].R.at(3) = LEFT_T_RIGHT.rotation[3];
1937  _camera_info[right].R.at(4) = LEFT_T_RIGHT.rotation[4];
1938  _camera_info[right].R.at(5) = LEFT_T_RIGHT.rotation[5];
1939  _camera_info[right].R.at(6) = LEFT_T_RIGHT.rotation[6];
1940  _camera_info[right].R.at(7) = LEFT_T_RIGHT.rotation[7];
1941  _camera_info[right].R.at(8) = LEFT_T_RIGHT.rotation[8];
1942 
1943  // 3. Leave the left projection matrix
1944  // 4. Set the right projection matrix
1945  _camera_info[right].P.at(0) = P_right(0,0);
1946  _camera_info[right].P.at(1) = P_right(0,1);
1947  _camera_info[right].P.at(2) = P_right(0,2);
1948  _camera_info[right].P.at(3) = P_right(0,3);
1949  _camera_info[right].P.at(4) = P_right(1,0);
1950  _camera_info[right].P.at(5) = P_right(1,1);
1951  _camera_info[right].P.at(6) = P_right(1,2);
1952  _camera_info[right].P.at(7) = P_right(1,3);
1953  _camera_info[right].P.at(8) = P_right(2,0);
1954  _camera_info[right].P.at(9) = P_right(2,1);
1955  _camera_info[right].P.at(10) = P_right(2,2);
1956  _camera_info[right].P.at(11) = P_right(2,3);
1957 
1958 }
1959 
1961 {
1962  Eigen::Matrix3f m;
1963  // We need to be careful about the order, as RS2 rotation matrix is
1964  // column-major, while Eigen::Matrix3f expects row-major.
1965  m << rotation[0], rotation[3], rotation[6],
1966  rotation[1], rotation[4], rotation[7],
1967  rotation[2], rotation[5], rotation[8];
1968  Eigen::Quaternionf q(m);
1969  return tf::Quaternion(q.x(), q.y(), q.z(), q.w());
1970 }
1971 
1973  const float3& trans,
1974  const tf::Quaternion& q,
1975  const std::string& from,
1976  const std::string& to)
1977 {
1979  msg.header.stamp = t;
1980  msg.header.frame_id = from;
1981  msg.child_frame_id = to;
1982  msg.transform.translation.x = trans.z;
1983  msg.transform.translation.y = -trans.x;
1984  msg.transform.translation.z = -trans.y;
1985  msg.transform.rotation.x = q.getX();
1986  msg.transform.rotation.y = q.getY();
1987  msg.transform.rotation.z = q.getZ();
1988  msg.transform.rotation.w = q.getW();
1989  _static_tf_msgs.push_back(msg);
1990 }
1991 
1993 {
1994  // Transform base to stream
1995  tf::Quaternion quaternion_optical;
1996  quaternion_optical.setRPY(-M_PI / 2, 0.0, -M_PI / 2);
1997  float3 zero_trans{0, 0, 0};
1998 
1999  ros::Time transform_ts_ = ros::Time::now();
2000 
2001  rs2_extrinsics ex;
2002  try
2003  {
2004  ex = getAProfile(stream).get_extrinsics_to(base_profile);
2005  }
2006  catch (std::exception& e)
2007  {
2008  if (!strcmp(e.what(), "Requested extrinsics are not available!"))
2009  {
2010  ROS_WARN_STREAM("(" << rs2_stream_to_string(stream.first) << ", " << stream.second << ") -> (" << rs2_stream_to_string(base_profile.stream_type()) << ", " << base_profile.stream_index() << "): " << e.what() << " : using unity as default.");
2011  ex = rs2_extrinsics({{1, 0, 0, 0, 1, 0, 0, 0, 1}, {0,0,0}});
2012  }
2013  else
2014  {
2015  throw e;
2016  }
2017  }
2018 
2020  Q = quaternion_optical * Q * quaternion_optical.inverse();
2021 
2022  float3 trans{ex.translation[0], ex.translation[1], ex.translation[2]};
2023  publish_static_tf(transform_ts_, trans, Q, _base_frame_id, _frame_id[stream]);
2024 
2025  // Transform stream frame to stream optical frame
2026  publish_static_tf(transform_ts_, zero_trans, quaternion_optical, _frame_id[stream], _optical_frame_id[stream]);
2027 
2028  if (_align_depth && _depth_aligned_frame_id.find(stream) != _depth_aligned_frame_id.end())
2029  {
2030  publish_static_tf(transform_ts_, trans, Q, _base_frame_id, _depth_aligned_frame_id[stream]);
2031  publish_static_tf(transform_ts_, zero_trans, quaternion_optical, _depth_aligned_frame_id[stream], _optical_frame_id[stream]);
2032  }
2033 }
2034 
2036 {
2037  const std::vector<stream_index_pair> base_stream_priority = {DEPTH, POSE};
2038 
2039  std::vector<stream_index_pair>::const_iterator base_stream(base_stream_priority.begin());
2040  while( (_sensors.find(*base_stream) == _sensors.end()) && (base_stream != base_stream_priority.end()))
2041  {
2042  base_stream++;
2043  }
2044  if (base_stream == base_stream_priority.end())
2045  {
2046  throw std::runtime_error("No known base_stream found for transformations.");
2047  }
2048  ROS_INFO_STREAM("SELECTED BASE:" << base_stream->first << ", " << base_stream->second);
2049 
2050  _base_stream = *base_stream;
2051 }
2052 
2054 {
2056 
2057  // Publish static transforms
2058  if (_publish_tf)
2059  {
2060  for (std::pair<stream_index_pair, bool> ienable : _enable)
2061  {
2062  if (ienable.second)
2063  {
2064  calcAndPublishStaticTransform(ienable.first, base_profile);
2065  }
2066  }
2067  // Static transform for non-positive values
2068  if (_tf_publish_rate > 0)
2069  _tf_t = std::shared_ptr<std::thread>(new std::thread(boost::bind(&BaseRealSenseNode::publishDynamicTransforms, this)));
2070  else
2072  }
2073 
2074  // Publish Extrinsics Topics:
2075  if (_enable[DEPTH] &&
2076  _enable[FISHEYE])
2077  {
2078  static const char* frame_id = "depth_to_fisheye_extrinsics";
2079  const auto& ex = base_profile.get_extrinsics_to(getAProfile(FISHEYE));
2080 
2083  }
2084 
2085  if (_enable[DEPTH] &&
2086  _enable[COLOR])
2087  {
2088  static const char* frame_id = "depth_to_color_extrinsics";
2089  const auto& ex = base_profile.get_extrinsics_to(getAProfile(COLOR));
2092  }
2093 
2094  if (_enable[DEPTH] &&
2095  _enable[INFRA1])
2096  {
2097  static const char* frame_id = "depth_to_infra1_extrinsics";
2098  const auto& ex = base_profile.get_extrinsics_to(getAProfile(INFRA1));
2101  }
2102 
2103  if (_enable[DEPTH] &&
2104  _enable[INFRA2])
2105  {
2106  static const char* frame_id = "depth_to_infra2_extrinsics";
2107  const auto& ex = base_profile.get_extrinsics_to(getAProfile(INFRA2));
2110  }
2111 
2112 }
2113 
2115 {
2116  // Publish transforms for the cameras
2117  ROS_WARN("Publishing dynamic camera transforms (/tf) at %g Hz", _tf_publish_rate);
2118 
2119  ros::Rate loop_rate(_tf_publish_rate);
2120 
2121  while (ros::ok())
2122  {
2123  // Update the time stamp for publication
2125  for(auto& msg : _static_tf_msgs)
2126  msg.header.stamp = t;
2127 
2128  _dynamic_tf_broadcaster.sendTransform(_static_tf_msgs);
2129 
2130  loop_rate.sleep();
2131  }
2132 }
2133 
2135 {
2136  if (_enable[GYRO])
2137  {
2138  _info_publisher[GYRO] = _node_handle.advertise<IMUInfo>("gyro/imu_info", 1, true);
2139  IMUInfo info_msg = getImuInfo(GYRO);
2140  _info_publisher[GYRO].publish(info_msg);
2141  }
2142 
2143  if (_enable[ACCEL])
2144  {
2145  _info_publisher[ACCEL] = _node_handle.advertise<IMUInfo>("accel/imu_info", 1, true);
2146  IMUInfo info_msg = getImuInfo(ACCEL);
2147  _info_publisher[ACCEL].publish(info_msg);
2148  }
2149 }
2150 
2151 void reverse_memcpy(unsigned char* dst, const unsigned char* src, size_t n)
2152 {
2153  size_t i;
2154 
2155  for (i=0; i < n; ++i)
2156  dst[n-1-i] = src[i];
2157 
2158 }
2159 
2161 {
2162  ROS_INFO_STREAM_ONCE("publishing " << (_ordered_pc ? "" : "un") << "ordered pointcloud.");
2163  std::vector<NamedFilter>::iterator pc_filter = find_if(_filters.begin(), _filters.end(), [] (NamedFilter s) { return s._name == "pointcloud"; } );
2164  rs2_stream texture_source_id = static_cast<rs2_stream>((int)pc_filter->_filter->get_option(rs2_option::RS2_OPTION_STREAM_FILTER));
2165  bool use_texture = texture_source_id != RS2_STREAM_ANY;
2166  static int warn_count(0);
2167  static const int DISPLAY_WARN_NUMBER(5);
2168  rs2::frameset::iterator texture_frame_itr = frameset.end();
2169  if (use_texture)
2170  {
2171  std::set<rs2_format> available_formats{ rs2_format::RS2_FORMAT_RGB8, rs2_format::RS2_FORMAT_Y8 };
2172 
2173  texture_frame_itr = find_if(frameset.begin(), frameset.end(), [&texture_source_id, &available_formats] (rs2::frame f)
2174  {return (rs2_stream(f.get_profile().stream_type()) == texture_source_id) &&
2175  (available_formats.find(f.get_profile().format()) != available_formats.end()); });
2176  if (texture_frame_itr == frameset.end())
2177  {
2178  warn_count++;
2179  std::string texture_source_name = pc_filter->_filter->get_option_value_description(rs2_option::RS2_OPTION_STREAM_FILTER, static_cast<float>(texture_source_id));
2180  ROS_WARN_STREAM_COND(warn_count == DISPLAY_WARN_NUMBER, "No stream match for pointcloud chosen texture " << texture_source_name);
2181  return;
2182  }
2183  warn_count = 0;
2184  }
2185 
2186  int texture_width(0), texture_height(0);
2187  int num_colors(0);
2188 
2189  const rs2::vertex* vertex = pc.get_vertices();
2190  const rs2::texture_coordinate* color_point = pc.get_texture_coordinates();
2191 
2192  rs2_intrinsics depth_intrin = pc.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
2193 
2195  modifier.setPointCloud2FieldsByString(1, "xyz");
2196  modifier.resize(pc.size());
2197  if (_ordered_pc)
2198  {
2199  _msg_pointcloud.width = depth_intrin.width;
2200  _msg_pointcloud.height = depth_intrin.height;
2201  _msg_pointcloud.is_dense = false;
2202  }
2203 
2204  vertex = pc.get_vertices();
2205  size_t valid_count(0);
2206  if (use_texture)
2207  {
2208  rs2::video_frame texture_frame = (*texture_frame_itr).as<rs2::video_frame>();
2209  texture_width = texture_frame.get_width();
2210  texture_height = texture_frame.get_height();
2211  num_colors = texture_frame.get_bytes_per_pixel();
2212  uint8_t* color_data = (uint8_t*)texture_frame.get_data();
2213  std::string format_str;
2214  switch(texture_frame.get_profile().format())
2215  {
2216  case RS2_FORMAT_RGB8:
2217  format_str = "rgb";
2218  break;
2219  case RS2_FORMAT_Y8:
2220  format_str = "intensity";
2221  break;
2222  default:
2223  throw std::runtime_error("Unhandled texture format passed in pointcloud " + std::to_string(texture_frame.get_profile().format()));
2224  }
2228 
2233  color_point = pc.get_texture_coordinates();
2234 
2235  float color_pixel[2];
2236  for (size_t point_idx=0; point_idx < pc.size(); point_idx++, vertex++, color_point++)
2237  {
2238  float i(color_point->u);
2239  float j(color_point->v);
2240  bool valid_color_pixel(i >= 0.f && i <=1.f && j >= 0.f && j <=1.f);
2241  bool valid_pixel(vertex->z > 0 && (valid_color_pixel || _allow_no_texture_points));
2242  if (valid_pixel || _ordered_pc)
2243  {
2244  *iter_x = vertex->x;
2245  *iter_y = vertex->y;
2246  *iter_z = vertex->z;
2247 
2248  if (valid_color_pixel)
2249  {
2250  color_pixel[0] = i * texture_width;
2251  color_pixel[1] = j * texture_height;
2252  int pixx = static_cast<int>(color_pixel[0]);
2253  int pixy = static_cast<int>(color_pixel[1]);
2254  int offset = (pixy * texture_width + pixx) * num_colors;
2255  reverse_memcpy(&(*iter_color), color_data+offset, num_colors); // PointCloud2 order of rgb is bgr.
2256  }
2257  ++iter_x; ++iter_y; ++iter_z;
2258  ++iter_color;
2259  ++valid_count;
2260  }
2261  }
2262  }
2263  else
2264  {
2265  std::string format_str = "intensity";
2269 
2273 
2274  for (size_t point_idx=0; point_idx < pc.size(); point_idx++, vertex++)
2275  {
2276  bool valid_pixel(vertex->z > 0);
2277  if (valid_pixel || _ordered_pc)
2278  {
2279  *iter_x = vertex->x;
2280  *iter_y = vertex->y;
2281  *iter_z = vertex->z;
2282 
2283  ++iter_x; ++iter_y; ++iter_z;
2284  ++valid_count;
2285  }
2286  }
2287  }
2290  if (!_ordered_pc)
2291  {
2292  _msg_pointcloud.width = valid_count;
2293  _msg_pointcloud.height = 1;
2294  _msg_pointcloud.is_dense = true;
2295  modifier.resize(valid_count);
2296  }
2298 }
2299 
2300 
2301 Extrinsics BaseRealSenseNode::rsExtrinsicsToMsg(const rs2_extrinsics& extrinsics, const std::string& frame_id) const
2302 {
2303  Extrinsics extrinsicsMsg;
2304  for (int i = 0; i < 9; ++i)
2305  {
2306  extrinsicsMsg.rotation[i] = extrinsics.rotation[i];
2307  if (i < 3)
2308  extrinsicsMsg.translation[i] = extrinsics.translation[i];
2309  }
2310 
2311  extrinsicsMsg.header.frame_id = frame_id;
2312  return extrinsicsMsg;
2313 }
2314 
2316 {
2317  const std::vector<rs2::stream_profile> profiles = _sensors[stream].get_stream_profiles();
2318  return *(std::find_if(profiles.begin(), profiles.end(),
2319  [&stream] (const rs2::stream_profile& profile) {
2320  return ((profile.stream_type() == stream.first) && (profile.stream_index() == stream.second));
2321  }));
2322 }
2323 
2325 {
2326  IMUInfo info{};
2327  auto sp = _enabled_profiles[stream_index].front().as<rs2::motion_stream_profile>();
2328  rs2_motion_device_intrinsic imuIntrinsics;
2329  try
2330  {
2331  imuIntrinsics = sp.get_motion_intrinsics();
2332  }
2333  catch(const std::runtime_error &ex)
2334  {
2335  ROS_DEBUG_STREAM("No Motion Intrinsics available.");
2336  imuIntrinsics = {{{1,0,0,0},{0,1,0,0},{0,0,1,0}}, {0,0,0}, {0,0,0}};
2337  }
2338 
2339  auto index = 0;
2340  info.frame_id = _optical_frame_id[stream_index];
2341  for (int i = 0; i < 3; ++i)
2342  {
2343  for (int j = 0; j < 4; ++j)
2344  {
2345  info.data[index] = imuIntrinsics.data[i][j];
2346  ++index;
2347  }
2348  info.noise_variances[i] = imuIntrinsics.noise_variances[i];
2349  info.bias_variances[i] = imuIntrinsics.bias_variances[i];
2350  }
2351  return info;
2352 }
2353 
2355  const stream_index_pair& stream,
2356  std::map<stream_index_pair, cv::Mat>& images,
2357  const std::map<stream_index_pair, ros::Publisher>& info_publishers,
2358  const std::map<stream_index_pair, ImagePublisherWithFrequencyDiagnostics>& image_publishers,
2359  std::map<stream_index_pair, int>& seq,
2360  std::map<stream_index_pair, sensor_msgs::CameraInfo>& camera_info,
2361  const std::map<stream_index_pair, std::string>& optical_frame_id,
2362  const std::map<rs2_stream, std::string>& encoding,
2363  bool copy_data_from_frame)
2364 {
2365  ROS_DEBUG("publishFrame(...)");
2366  unsigned int width = 0;
2367  unsigned int height = 0;
2368  auto bpp = 1;
2369  if (f.is<rs2::video_frame>())
2370  {
2371  auto image = f.as<rs2::video_frame>();
2372  width = image.get_width();
2373  height = image.get_height();
2374  bpp = image.get_bytes_per_pixel();
2375  }
2376  auto& image = images[stream];
2377 
2378  if (copy_data_from_frame)
2379  {
2380  if (images[stream].size() != cv::Size(width, height))
2381  {
2382  image.create(height, width, image.type());
2383  }
2384  image.data = (uint8_t*)f.get_data();
2385  }
2386  if (f.is<rs2::depth_frame>())
2387  {
2389  }
2390 
2391  ++(seq[stream]);
2392  auto& info_publisher = info_publishers.at(stream);
2393  auto& image_publisher = image_publishers.at(stream);
2394 
2395  image_publisher.second->tick();
2396  if(0 != info_publisher.getNumSubscribers() ||
2397  0 != image_publisher.first.getNumSubscribers())
2398  {
2400  img = cv_bridge::CvImage(std_msgs::Header(), encoding.at(stream.first), image).toImageMsg();
2401  img->width = width;
2402  img->height = height;
2403  img->is_bigendian = false;
2404  img->step = width * bpp;
2405  img->header.frame_id = optical_frame_id.at(stream);
2406  img->header.stamp = t;
2407  img->header.seq = seq[stream];
2408 
2409  auto& cam_info = camera_info.at(stream);
2410  if (cam_info.width != width)
2411  {
2413  }
2414  cam_info.header.stamp = t;
2415  cam_info.header.seq = seq[stream];
2416  info_publisher.publish(cam_info);
2417 
2418  image_publisher.first.publish(img);
2419  // ROS_INFO_STREAM("fid: " << cam_info.header.seq << ", time: " << std::setprecision (20) << t.toSec());
2420  ROS_DEBUG("%s stream published", rs2_stream_to_string(f.get_profile().stream_type()));
2421  }
2422 }
2423 
2425  {
2426  // Assuming that all D400 SKUs have depth sensor
2427  auto profiles = _enabled_profiles[stream_index];
2428  auto it = std::find_if(profiles.begin(), profiles.end(),
2429  [&](const rs2::stream_profile& profile)
2430  { return (profile.stream_type() == stream_index.first); });
2431  if (it == profiles.end())
2432  return false;
2433 
2434  profile = *it;
2435  return true;
2436  }
2437 
2439 {
2441  {
2442  _temperature_nodes.push_back({option, std::make_shared<TemperatureDiagnostics>(rs2_option_to_string(option), _serial_no )});
2443  }
2444 
2445  int time_interval(1000);
2446  std::function<void()> func = [this, time_interval](){
2447  std::mutex mu;
2448  std::unique_lock<std::mutex> lock(mu);
2449  while(_is_running) {
2450  _cv.wait_for(lock, std::chrono::milliseconds(time_interval), [&]{return !_is_running;});
2451  if (_is_running)
2452  {
2455  }
2456  }
2457  };
2458  _monitoring_t = std::make_shared<std::thread>(func);
2459 }
2460 
2462 {
2464  for (OptionTemperatureDiag option_diag : _temperature_nodes)
2465  {
2466  rs2_option option(option_diag.first);
2467  if (sensor.supports(option))
2468  {
2469  try
2470  {
2471  auto option_value = sensor.get_option(option);
2472  option_diag.second->update(option_value);
2473  }
2474  catch(const std::exception& e)
2475  {
2476  ROS_DEBUG_STREAM("Failed checking for temperature." << std::endl << e.what());
2477  }
2478  }
2479  }
2480 }
2481 
2483 {
2484  for (auto &image_publisher : _image_publishers)
2485  {
2486  image_publisher.second.second->update();
2487  }
2488 }
2489 
2490 TemperatureDiagnostics::TemperatureDiagnostics(std::string name, std::string serial_no)
2491  {
2492  _updater.add(name, this, &TemperatureDiagnostics::diagnostics);
2493  _updater.setHardwareID(serial_no);
2494  }
2495 
2497 {
2498  status.summary(0, "OK");
2499  status.add("Index", _crnt_temp);
2500 }
std::map< stream_index_pair, cv::Mat > _depth_aligned_image
void FillImuData_Copy(const CimuData imu_data, std::deque< sensor_msgs::Imu > &imu_msgs)
std::map< stream_index_pair, int > _seq
RS2_OPTION_COUNT
msg
RS2_LOG_SEVERITY_ERROR
frame apply_filter(filter_interface &filter)
align
RS2_OPTION_PROJECTOR_TEMPERATURE
std::map< rs2_stream, int > _image_format
R
BaseRealSenseNode(ros::NodeHandle &nodeHandle, ros::NodeHandle &privateNodeHandle, rs2::device dev, const std::string &serial_no)
const std::vector< stream_index_pair > IMAGE_STREAMS
GLboolean GLboolean GLboolean b
const std::string DEFAULT_BASE_FRAME_ID
Definition: constants.h:70
GLint y
void clip_depth(rs2::depth_frame depth_frame, float clipping_dist)
cv::Mat & fix_depth_scale(const cv::Mat &from_image, cv::Mat &to_image)
void registerAutoExposureROIOptions(ros::NodeHandle &nh)
TemperatureDiagnostics(std::string name, std::string serial_no)
tf2_ros::StaticTransformBroadcaster _static_tf_broadcaster
RS2_CAMERA_INFO_PHYSICAL_PORT
U1 type
std::vector< sensor > query_sensors() const
const bool ALIGN_DEPTH
Definition: constants.h:44
rs2_option
tf::Quaternion rotationMatrixToQuaternion(const float rotation[9]) const
_linear_acceleration_type linear_acceleration
const std::string DEFAULT_ODOM_FRAME_ID
Definition: constants.h:71
void publish(const boost::shared_ptr< M > &message) const
void ImuMessage_AddDefaultValues(sensor_msgs::Imu &imu_msg)
unsigned int tracker_confidence
GLdouble s
const std::string DEFAULT_IMU_OPTICAL_FRAME_ID
Definition: constants.h:86
RS2_TIMESTAMP_DOMAIN_HARDWARE_CLOCK
def info(args)
void publishPointCloud(rs2::points f, const ros::Time &t, const rs2::frameset &frameset)
const GLfloat * m
std::map< stream_index_pair, ros::Publisher > _info_publisher
float translation[3]
tf2_ros::TransformBroadcaster _dynamic_tf_broadcaster
void summary(unsigned char lvl, const std::string s)
std::map< stream_index_pair, rs2_intrinsics > _stream_intrinsics
const bool PUBLISH_ODOM_TF
Definition: constants.h:67
RS2_CAMERA_INFO_PRODUCT_ID
double dt
void reverse_memcpy(unsigned char *dst, const unsigned char *src, size_t n)
rs2_extrinsics get_extrinsics_to(const stream_profile &to) const
std::string resolveName(const std::string &name, bool remap=true) const
stream_profile get_profile() const
rs2_log_severity get_severity() const
RS2_STREAM_FISHEYE
RS2_FORMAT_Z16
rs2_vector translation
const std::string DEFAULT_UNITE_IMU_METHOD
Definition: constants.h:93
#define OPTICAL_FRAME_ID(sip)
_orientation_type orientation
int get_bytes_per_pixel() const
virtual void registerDynamicReconfigCb(ros::NodeHandle &nh) override
RS2_STREAM_POSE
void publishAlignedDepthToOthers(rs2::frameset frames, const ros::Time &t)
Publisher advertise(const std::string &base_topic, uint32_t queue_size, bool latch=false)
GLenum GLenum dst
RS2_DISTORTION_KANNALA_BRANDT4
RS2_OPTION_ASIC_TEMPERATURE
const void * get_data() const
_angular_velocity_covariance_type angular_velocity_covariance
struct Vertex vertex[VERTEXNUM]
unsigned short uint16_t
Extrinsics rsExtrinsicsToMsg(const rs2_extrinsics &extrinsics, const std::string &frame_id) const
const stream_index_pair INFRA2
K_right
RS2_CAMERA_INFO_NAME
std::string get_description() const
static void vector3TFToMsg(const Vector3 &bt_v, geometry_msgs::Vector3 &msg_v)
virtual void toggleSensors(bool enabled) override
_orientation_type orientation
GLuint GLuint stream
GLenum src
IMUInfo getImuInfo(const stream_index_pair &stream_index)
RS2_STREAM_GYRO
RS2_OPTION_DEPTH_UNITS
std::map< rs2_stream, bool > _is_first_frame
GLdouble n
unsigned char uint8_t
const char * rs2_stream_to_string(rs2_stream stream)
rs2_vector angular_velocity
std::map< stream_index_pair, ImagePublisherWithFrequencyDiagnostics > _image_publishers
TFSIMD_FORCE_INLINE const tfScalar & getW() const
e
const texture_coordinate * get_texture_coordinates() const
float rotation[9]
#define ROS_WARN(...)
void registerDynamicOption(ros::NodeHandle &nh, rs2::options sensor, std::string &module_name)
GLenum GLenum GLsizei void * image
void imu_callback_sync(rs2::frame frame, imu_sync_method sync_method=imu_sync_method::COPY)
std::queue< sensor_msgs::Imu > _pending_messages
std::pair< rs2_option, std::shared_ptr< TemperatureDiagnostics > > OptionTemperatureDiag
status
GLuint index
std::map< stream_index_pair, sensor_msgs::CameraInfo > _depth_aligned_camera_info
RS2_STREAM_COLOR
depth_frame get_depth_frame() const
GLdouble t
std::vector< OptionTemperatureDiag > _temperature_nodes
GLboolean GLboolean GLboolean GLboolean a
_position_type position
GLuint GLfloat * val
size_t size() const
RS2_FORMAT_RGB8
std::map< stream_index_pair, rs2::sensor > _sensors
not_this_one begin(...)
std::map< stream_index_pair, int > _width
bool is_checkbox(rs2::options sensor, rs2_option option)
GLdouble f
std::pair< rs2_stream, int > stream_index_pair
Definition: constants.h:98
bool is() const
void updateStreamCalibData(const rs2::video_stream_profile &video_profile)
std::map< std::string, int > get_enum_method(rs2::options sensor, rs2_option option)
GLfloat GLfloat GLfloat alpha
const stream_index_pair CONFIDENCE
const bool PUBLISH_TF
Definition: constants.h:50
sensor_msgs::PointCloud2 _msg_pointcloud
#define STREAM_NAME(sip)
rs2_vector velocity
dictionary streams
GLsizeiptr size
bool is_enum_option(rs2::options sensor, rs2_option option)
T lerp(const T &a, const T &b, const double t)
void load_json(const std::string &json_content) const
depth_frame
const char * rs2_option_to_string(rs2_option option)
D const & i
const GLubyte * c
bool is() const
std::map< stream_index_pair, rs2_extrinsics > _depth_to_other_extrinsics
const char * get_option_description(rs2_option option) const
GLdouble x
std::map< rs2_stream, std::string > _encoding
static void vector3MsgToTF(const geometry_msgs::Vector3 &msg_v, Vector3 &bt_v)
ROSCPP_DECL const std::string & getNamespace()
double get_timestamp() const
_point_step_type point_step
std::map< stream_index_pair, bool > _enable
const stream_index_pair GYRO
void hardware_reset()
T
std::map< stream_index_pair, cv::Mat > _image
RS2_STREAM_CONFIDENCE
const char * get_info(rs2_camera_info info) const
void setRPY(const tfScalar &roll, const tfScalar &pitch, const tfScalar &yaw)
std::shared_ptr< std::thread > _tf_t
bool is_option_read_only(rs2_option option) const
GLint GLsizei GLsizei height
#define ROS_WARN_ONCE(...)
std::map< rs2_stream, std::vector< std::function< void()> > > _video_functions_stack
#define ROS_INFO(...)
const stream_index_pair DEPTH
const vertex * get_vertices() const
_child_frame_id_type child_frame_id
rs2_quaternion rotation
T as() const
bool param(const std::string &param_name, T &param_val, const T &default_val) const
std::shared_ptr< SyncedImuPublisher > _synced_imu_publisher
profiles
const char * rs2_format_to_string(rs2_format format)
std::string create_graph_resource_name(const std::string &original_name)
GLenum GLenum GLsizei const GLuint GLboolean enabled
ROSCPP_DECL bool ok()
GLint j
const std::string TYPE_16UC1
GLint left
rs2_format
float get_depth_scale(rs2::device dev)
std::map< rs2_stream, std::string > _stream_name
#define ROS_WARN_STREAM_COND(cond, args)
RS2_CAMERA_INFO_FIRMWARE_VERSION
TFSIMD_FORCE_INLINE Vector3 quatRotate(const Quaternion &rotation, const Vector3 &v)
void start(S on_frame)
GLint void * img
int stream_index() const
iterator end() const
iterator begin() const
sensor
std::map< stream_index_pair, ImagePublisherWithFrequencyDiagnostics > _depth_aligned_image_publishers
std::map< stream_index_pair, cv::Mat > _depth_scaled_image
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
void multiple_message_callback(rs2::frame frame, imu_sync_method sync_method)
bool supports(rs2_option option) const
rs2_stream rs2_string_to_stream(std::string str)
rs2_intrinsics get_intrinsics() const
RS2_STREAM_DEPTH
GLenum func
const int IMU_FPS
Definition: constants.h:57
rs2_stream
#define ROS_WARN_STREAM(args)
void sendTransform(const geometry_msgs::TransformStamped &transform)
#define ROS_DEBUG_STREAM(args)
P_right
rs2_notification_category get_category() const
const bool ALLOW_NO_TEXTURE_POINTS
Definition: constants.h:46
bool sleep()
RS2_STREAM_INFRARED
void publishFrame(rs2::frame f, const ros::Time &t, const stream_index_pair &stream, std::map< stream_index_pair, cv::Mat > &images, const std::map< stream_index_pair, ros::Publisher > &info_publishers, const std::map< stream_index_pair, ImagePublisherWithFrequencyDiagnostics > &image_publishers, std::map< stream_index_pair, int > &seq, std::map< stream_index_pair, sensor_msgs::CameraInfo > &camera_info, const std::map< stream_index_pair, std::string > &optical_frame_id, const std::map< rs2_stream, std::string > &encoding, bool copy_data_from_frame=true)
Quaternion inverse() const
bool getEnabledProfile(const stream_index_pair &stream_index, rs2::stream_profile &profile)
RS2_STREAM_ACCEL
rs2_format format
const bool POINTCLOUD
Definition: constants.h:45
rs2_format format() const
_angular_velocity_type angular_velocity
std::map< stream_index_pair, std::vector< rs2::stream_profile > > _enabled_profiles
#define ROS_INFO_STREAM_ONCE(args)
const int IMAGE_FPS
Definition: constants.h:55
const int IMAGE_WIDTH
Definition: constants.h:53
bpp
GLdouble GLdouble GLdouble q
std::map< stream_index_pair, ros::Publisher > _imu_publishers
void updateExtrinsicsCalibData(const rs2::video_stream_profile &left_video_profile, const rs2::video_stream_profile &right_video_profile)
virtual void calcAndPublishStaticTransform(const stream_index_pair &stream, const rs2::stream_profile &base_profile)
uint32_t getNumSubscribers() const
GLboolean enable
void diagnostics(diagnostic_updater::DiagnosticStatusWrapper &status)
GLdouble right
#define ROS_INFO_STREAM(args)
RS2_STREAM_ANY
std::map< stream_index_pair, ros::Publisher > _depth_to_other_extrinsics_publishers
GLuint in
std::vector< rs2_option > _monitor_options
std::map< stream_index_pair, std::string > _frame_id
rs2_timestamp_domain get_frame_timestamp_domain() const
int get_height() const
std::map< stream_index_pair, int > _height
const stream_index_pair ACCEL
const stream_index_pair POSE
std::map< rs2_stream, std::shared_ptr< rs2::align > > _align
void set_sensor_auto_exposure_roi(rs2::sensor sensor)
std::map< stream_index_pair, sensor_msgs::CameraInfo > _camera_info
const bool SYNC_FRAMES
Definition: constants.h:48
_orientation_covariance_type orientation_covariance
void readAndSetDynamicParam(ros::NodeHandle &nh1, std::shared_ptr< ddynamic_reconfigure::DDynamicReconfigure > ddynrec, const std::string option_name, const int min_val, const int max_val, rs2::sensor sensor, int *option_value)
std::map< rs2_stream, int > _unit_step_size
_frame_id_type frame_id
const double TF_PUBLISH_RATE
Definition: constants.h:51
bool is() const
std::shared_ptr< std::thread > _monitoring_t
str(const bytes &b)
void publish_static_tf(const ros::Time &t, const float3 &trans, const tf::Quaternion &q, const std::string &from, const std::string &to)
void open(const stream_profile &profile) const
static Time now()
_linear_acceleration_covariance_type linear_acceleration_covariance
std::vector< geometry_msgs::TransformStamped > _static_tf_msgs
const bool ORDERED_POINTCLOUD
Definition: constants.h:47
double get_timestamp() const
std::map< rs2_stream, std::string > _depth_aligned_encoding
ROSCPP_DECL void shutdown()
const std::vector< stream_index_pair > HID_STREAMS
std::map< stream_index_pair, int > _fps
const bool HOLD_BACK_IMU_FOR_FRAMES
Definition: constants.h:66
_translation_type translation
std::map< std::string, std::function< void(rs2::frame)> > _sensors_callback
std::vector< rs2::sensor > _dev_sensors
void runFirstFrameInitialization(rs2_stream stream_type)
std::vector< std::shared_ptr< ddynamic_reconfigure::DDynamicReconfigure > > _ddynrec
GLuint res
void add(const std::string &key, const T &val)
std::map< stream_index_pair, int > _depth_aligned_seq
_stamp_type stamp
option_range get_option_range(rs2_option option) const
const stream_index_pair FISHEYE
const char * get_info(rs2_camera_info info) const
const stream_index_pair COLOR
#define ROS_ERROR_STREAM(args)
std::map< std::string, rs2::region_of_interest > _auto_exposure_roi
bool isValidCharInName(char c)
sensor_msgs::Imu CreateUnitedMessage(const CimuData accel_data, const CimuData gyro_data)
const stream_index_pair INFRA1
rs2_format format
void sendTransform(const geometry_msgs::TransformStamped &transform)
bool setBaseTime(double frame_time, rs2_timestamp_domain time_domain)
rs2::stream_profile getAProfile(const stream_index_pair &stream)
void setPointCloud2FieldsByString(int n_fields,...)
const char * rs2_timestamp_domain_to_string(rs2_timestamp_domain info)
unsigned long long get_frame_number() const
::sensor_msgs::Imu_< std::allocator< void > > Imu
rs2_stream stream_type() const
std::map< stream_index_pair, ros::Publisher > _depth_aligned_info_publisher
std::map< stream_index_pair, std::string > _optical_frame_id
void set_option(rs2_option option, float value) const
#define ALIGNED_DEPTH_TO_FRAME_ID(sip)
float get_option(rs2_option option) const
const float ROS_DEPTH_SCALE
Definition: constants.h:97
bool is_int_option(rs2::options sensor, rs2_option option)
const bool ENABLE_IMU
Definition: constants.h:65
GLdouble v
_header_type header
int get_width() const
int fps() const
const char * get_option_value_description(rs2_option option, float val) const
RS2_TIMESTAMP_DOMAIN_SYSTEM_TIME
void set_auto_exposure_roi(const std::string option_name, rs2::sensor sensor, int new_value)
const int IMAGE_HEIGHT
Definition: constants.h:54
void start(T callback) const
const std::string DEFAULT_FILTERS
Definition: constants.h:94
std::map< stream_index_pair, std::string > _depth_aligned_frame_id
void FillImuData_LinearInterpolation(const CimuData imu_data, std::deque< sensor_msgs::Imu > &imu_msgs)
GLint GLsizei width
RS2_FORMAT_Y8
std::shared_ptr< ::sensor_msgs::Image > ImagePtr
Q
GLintptr offset
T as() const
T as() const
#define ROS_DEBUG(...)
std::map< rs2_stream, int > _format
std::string to_string(T value)
#define FRAME_ID(sip)
rs2_timestamp_domain


realsense2_camera
Author(s): Sergey Dorodnicov , Doron Hirshberg
autogenerated on Mon Mar 1 2021 03:34:04