camera_aravis_nodelet.cpp
Go to the documentation of this file.
1 /****************************************************************************
2  *
3  * camera_aravis
4  *
5  * Copyright © 2022 Fraunhofer IOSB and contributors
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Library General Public
9  * License as published by the Free Software Foundation; either
10  * version 2 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Library General Public License for more details.
16  *
17  * You should have received a copy of the GNU Library General Public
18  * License along with this library; if not, write to the
19  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20  * Boston, MA 02110-1301, USA.
21  *
22  ****************************************************************************/
23 
25 
26 #include <thread>
27 
30 
31 namespace camera_aravis
32 {
33 
35 {
36 }
37 
39 {
40  for(int i=0; i < p_streams_.size(); i++) {
41  if(p_streams_[i]) {
42  arv_stream_set_emit_signals(p_streams_[i], FALSE);
43  }
44  }
45 
46  spawning_ = false;
47  if (spawn_stream_thread_.joinable())
48  {
49  spawn_stream_thread_.join();
50  }
51 
53  if (software_trigger_thread_.joinable())
54  {
56  }
57 
58  tf_thread_active_ = false;
59  if (tf_dyn_thread_.joinable())
60  {
61  tf_dyn_thread_.join();
62  }
63 
65  if (diagnostic_thread_.joinable())
66  {
67  diagnostic_thread_.join();
68  }
69 
70 
71  for(int i=0; i < p_streams_.size(); i++) {
72  guint64 n_completed_buffers = 0;
73  guint64 n_failures = 0;
74  guint64 n_underruns = 0;
75  arv_stream_get_statistics(p_streams_[i], &n_completed_buffers, &n_failures, &n_underruns);
76  ROS_INFO("Completed buffers = %Lu", (unsigned long long ) n_completed_buffers);
77  ROS_INFO("Failures = %Lu", (unsigned long long ) n_failures);
78  ROS_INFO("Underruns = %Lu", (unsigned long long ) n_underruns);
79  if (arv_camera_is_gv_device(p_camera_))
80  {
81  guint64 n_resent;
82  guint64 n_missing;
83  arv_gv_stream_get_statistics(reinterpret_cast<ArvGvStream*>(p_streams_[i]), &n_resent, &n_missing);
84  ROS_INFO("Resent buffers = %Lu", (unsigned long long ) n_resent);
85  ROS_INFO("Missing = %Lu", (unsigned long long ) n_missing);
86  }
87  }
88 
89 
90  if (p_device_)
91  {
92  arv_device_execute_command(p_device_, "AcquisitionStop");
93  }
94 
95  for(int i = 0; i < p_streams_.size(); i++) {
96  g_object_unref(p_streams_[i]);
97  }
98  g_object_unref(p_camera_);
99 }
100 
102 {
105 
106  // Retrieve ros parameters
107  verbose_ = pnh.param<bool>("verbose", verbose_);
108  guid_ = pnh.param<std::string>("guid", guid_); // Get the camera guid as a parameter or use the first device.
109 
110  use_ptp_stamp_ = pnh.param<bool>("use_ptp_timestamp", use_ptp_stamp_);
111  ptp_enable_feature_ = pnh.param<std::string>("ptp_enable_feature_name", ptp_enable_feature_);
112  ptp_status_feature_ = pnh.param<std::string>("ptp_status_feature_name", ptp_status_feature_);
113  ptp_set_cmd_feature_ = pnh.param<std::string>("ptp_set_cmd_feature_name", ptp_set_cmd_feature_);
114 
115  bool init_params_from_reconfig = pnh.param<bool>("init_params_from_dyn_reconfigure", true);
116 
117  std::string awb_mode = pnh.param<std::string>("BalanceWhiteAuto", "Continuous");
118  std::string wb_ratio_selector_feature = pnh.param<std::string>("wb_ratio_selector_feature", "");
119  std::string wb_ratio_feature = pnh.param<std::string>("wb_ratio_feature", "");
120 
121  std::string wb_ratio_selector_args = pnh.param<std::string>("wb_ratio_selectors", "");
122  std::vector<std::string> wb_ratio_selector_values;
123  parseStringArgs(wb_ratio_selector_args, wb_ratio_selector_values);
124 
125  std::string wb_ratio_args = pnh.param<std::string>("wb_ratios", "");
126  std::vector<std::string> wb_ratio_str_values;
127  parseStringArgs(wb_ratio_args, wb_ratio_str_values);
128 
129  if(wb_ratio_selector_values.size() != wb_ratio_str_values.size())
130  {
131  ROS_WARN("Lists of 'wb_ratio_selector' and 'wb_ratio' have different sizes. "
132  "Only setting values which exist in both lists.");
133  }
134 
135  pub_ext_camera_info_ = pnh.param<bool>("ExtendedCameraInfo", pub_ext_camera_info_); // publish an extended camera info message
136  pub_tf_optical_ = pnh.param<bool>("publish_tf", pub_tf_optical_); // should we publish tf transforms to camera optical frame?
137 
138  std::string stream_channel_args;
139  if (pnh.getParam("channel_names", stream_channel_args)) {
140  parseStringArgs(stream_channel_args, stream_names_);
141  } else {
142  stream_names_ = { "" };
143  }
144 
145  std::string pixel_format_args;
146  std::vector<std::string> pixel_formats;
147  pnh.param("PixelFormat", pixel_format_args, pixel_format_args);
148  parseStringArgs(pixel_format_args, pixel_formats);
149 
150  std::string calib_url_args;
151  std::vector<std::string> calib_urls;
152  pnh.param("camera_info_url", calib_url_args, calib_url_args);
153  parseStringArgs(calib_url_args, calib_urls);
154 
155  diagnostic_yaml_url_ = pnh.param<std::string>("diagnostic_yaml_url", diagnostic_yaml_url_);
157  std::max(0.0, pnh.param<double>("diagnostic_publish_rate", 0.1));
158 
160  this, true, false);
161  shutdown_delay_s_ = pnh.param<double>("shutdown_delay_s", 5.0);
162 
163  // Print out some useful info.
164  ROS_INFO("Attached cameras:");
165  arv_update_device_list();
166  uint n_interfaces = arv_get_n_interfaces();
167  ROS_INFO("# Interfaces: %d", n_interfaces);
168 
169  uint n_devices = arv_get_n_devices();
170  ROS_INFO("# Devices: %d", n_devices);
171  for (uint i = 0; i < n_devices; i++)
172  ROS_INFO("Device%d: %s", i, arv_get_device_id(i));
173 
174  if (n_devices == 0)
175  {
176  ROS_ERROR("No cameras detected.");
177  return;
178  }
179 
180  // Open the camera, and set it up.
181  while (!p_camera_)
182  {
183  if (guid_.empty())
184  {
185  ROS_INFO("Opening: (any)");
186  p_camera_ = arv_camera_new(NULL);
187  }
188  else
189  {
190  ROS_INFO_STREAM("Opening: " << guid_);
191  p_camera_ = arv_camera_new(guid_.c_str());
192  }
193  ros::Duration(1.0).sleep();
194  }
195 
196  p_device_ = arv_camera_get_device(p_camera_);
197  const char* vendor_name = arv_camera_get_vendor_name(p_camera_);
198  const char* model_name = arv_camera_get_model_name(p_camera_);
199  const char* device_id = arv_camera_get_device_id(p_camera_);
200  const char* device_sn = arv_device_get_string_feature_value(p_device_, "DeviceSerialNumber");
201  ROS_INFO("Successfully Opened: %s-%s-%s", vendor_name, model_name,
202  (device_sn) ? device_sn : device_id);
203 
204  // Start the dynamic_reconfigure server.
205  reconfigure_server_.reset(new dynamic_reconfigure::Server<Config>(reconfigure_mutex_, pnh));
206  reconfigure_server_->getConfigDefault(config_);
207  reconfigure_server_->getConfigMin(config_min_);
208  reconfigure_server_->getConfigMax(config_max_);
209 
210  // See which features exist in this camera device
212 
213  // Check the number of streams for this camera
214  num_streams_ = arv_device_get_integer_feature_value(p_device_, "DeviceStreamChannelCount");
215  // if this return 0, try the deprecated GevStreamChannelCount in case this is an older camera
216  if (!num_streams_) {
217  num_streams_ = arv_device_get_integer_feature_value(p_device_, "GevStreamChannelCount");
218  }
219  // if this also returns 0, assume number of streams = 1
220  if (!num_streams_) {
221  ROS_WARN("Unable to detect number of supported stream channels.");
222  num_streams_ = 1;
223  }
224 
225  ROS_INFO("Number of supported stream channels %i.", (int) num_streams_);
226 
227  // check if every stream channel has been given a channel name
228  if (stream_names_.size() < num_streams_) {
229  num_streams_ = stream_names_.size();
230  }
231 
232  // initialize the sensor structs
233  for(int i = 0; i < num_streams_; i++) {
234  sensors_.push_back(new Sensor());
235  p_streams_.push_back(NULL);
237  p_camera_info_managers_.push_back(NULL);
238  p_camera_info_node_handles_.push_back(NULL);
239  camera_infos_.push_back(sensor_msgs::CameraInfoPtr());
242  }
243 
244  // Get parameter bounds.
245  arv_camera_get_exposure_time_bounds(p_camera_, &config_min_.ExposureTime, &config_max_.ExposureTime);
246  arv_camera_get_gain_bounds(p_camera_, &config_min_.Gain, &config_max_.Gain);
247  for(int i = 0; i < num_streams_; i++) {
248  arv_camera_gv_select_stream_channel(p_camera_,i);
249  arv_camera_get_sensor_size(p_camera_, &sensors_[i]->width, &sensors_[i]->height);
250  }
251  arv_camera_get_width_bounds(p_camera_, &roi_.width_min, &roi_.width_max);
252  arv_camera_get_height_bounds(p_camera_, &roi_.height_min, &roi_.height_max);
253  arv_camera_get_frame_rate_bounds(p_camera_, &config_min_.AcquisitionFrameRate, &config_max_.AcquisitionFrameRate);
254  if (implemented_features_["FocusPos"])
255  {
256  gint64 focus_min64, focus_max64;
257  arv_device_get_integer_feature_bounds(p_device_, "FocusPos", &focus_min64, &focus_max64);
258  config_min_.FocusPos = focus_min64;
259  config_max_.FocusPos = focus_max64;
260  }
261  else
262  {
263  config_min_.FocusPos = 0;
264  config_max_.FocusPos = 0;
265  }
266 
267  for(int i = 0; i < num_streams_; i++) {
268  arv_camera_gv_select_stream_channel(p_camera_,i);
269 
270  if(init_params_from_reconfig)
271  {
272  // Initial camera settings.
273  if (implemented_features_["ExposureTime"])
274  arv_camera_set_exposure_time(p_camera_, config_.ExposureTime);
275  else if (implemented_features_["ExposureTimeAbs"])
276  arv_device_set_float_feature_value(p_device_, "ExposureTimeAbs", config_.ExposureTime);
277  if (implemented_features_["Gain"])
278  arv_camera_set_gain(p_camera_, config_.Gain);
279  if (implemented_features_["AcquisitionFrameRateEnable"])
280  arv_device_set_integer_feature_value(p_device_, "AcquisitionFrameRateEnable", 1);
281  if (implemented_features_["AcquisitionFrameRate"])
282  arv_camera_set_frame_rate(p_camera_, config_.AcquisitionFrameRate);
283 
284  // init default to full sensor resolution
285  arv_camera_set_region (p_camera_, 0, 0, roi_.width_max, roi_.height_max);
286 
287  // Set up the triggering.
288  if (implemented_features_["TriggerMode"] && implemented_features_["TriggerSelector"])
289  {
290  arv_device_set_string_feature_value(p_device_, "TriggerSelector", "FrameStart");
291  arv_device_set_string_feature_value(p_device_, "TriggerMode", "Off");
292  }
293  }
294 
295  // possibly set or override from given parameter
297 
298  if(awb_mode == "Off"
299  && !wb_ratio_selector_feature.empty() && implemented_features_[wb_ratio_selector_feature]
300  && !wb_ratio_feature.empty() && implemented_features_[wb_ratio_feature])
301  {
302  for(int l = 0; l < std::min(wb_ratio_selector_values.size(), wb_ratio_str_values.size());
303  l++)
304  {
305  arv_device_set_string_feature_value(p_device_, wb_ratio_selector_feature.c_str(),
306  wb_ratio_selector_values[l].c_str());
307  arv_device_set_float_feature_value(p_device_, wb_ratio_feature.c_str(),
308  std::stof(wb_ratio_str_values[l]));
309  }
310  }
311  }
312 
313  ros::Duration(2.0).sleep();
314 
315  // get current state of camera for config_
316  arv_camera_get_region(p_camera_, &roi_.x, &roi_.y, &roi_.width, &roi_.height);
317  config_.AcquisitionMode =
318  implemented_features_["AcquisitionMode"] ? arv_device_get_string_feature_value(p_device_, "AcquisitionMode") :
319  "Continuous";
320  config_.AcquisitionFrameRate =
321  implemented_features_["AcquisitionFrameRate"] ? arv_camera_get_frame_rate(p_camera_) : 0.0;
322  config_.ExposureAuto =
323  implemented_features_["ExposureAuto"] ? arv_device_get_string_feature_value(p_device_, "ExposureAuto") : "Off";
324  config_.ExposureTime = implemented_features_["ExposureTime"] ? arv_camera_get_exposure_time(p_camera_) : 0.0;
325  config_.GainAuto =
326  implemented_features_["GainAuto"] ? arv_device_get_string_feature_value(p_device_, "GainAuto") : "Off";
327  config_.Gain = implemented_features_["Gain"] ? arv_camera_get_gain(p_camera_) : 0.0;
328  config_.TriggerMode =
329  implemented_features_["TriggerMode"] ? arv_device_get_string_feature_value(p_device_, "TriggerMode") : "Off";
330  config_.TriggerSource =
331  implemented_features_["TriggerSource"] ? arv_device_get_string_feature_value(p_device_, "TriggerSource") :
332  "Software";
333 
334  // get pixel format name and translate into corresponding ROS name
335  for(int i = 0; i < num_streams_; i++) {
336  arv_camera_gv_select_stream_channel(p_camera_,i);
337  std::string source_selector = "Source" + std::to_string(i);
338  arv_device_set_string_feature_value(p_device_, "SourceSelector", source_selector.c_str());
339  arv_device_set_string_feature_value(p_device_, "PixelFormat", pixel_formats[i].c_str());
340  sensors_[i]->pixel_format = std::string(arv_device_get_string_feature_value(p_device_, "PixelFormat"));
341  const auto sensor_iter = CONVERSIONS_DICTIONARY.find(sensors_[i]->pixel_format);
342  if (sensor_iter!=CONVERSIONS_DICTIONARY.end()) {
343  convert_formats.push_back(sensor_iter->second);
344  }
345  else {
346  ROS_WARN_STREAM("There is no known conversion from " << sensors_[i]->pixel_format << " to a usual ROS image encoding. Likely you need to implement one.");
347  }
348 
349  sensors_[i]->n_bits_pixel = ARV_PIXEL_FORMAT_BIT_PER_PIXEL(
350  arv_device_get_integer_feature_value(p_device_, "PixelFormat"));
351  config_.FocusPos =
352  implemented_features_["FocusPos"] ? arv_device_get_integer_feature_value(p_device_, "FocusPos") : 0;
353 
354  }
355 
356  // Get other (non GenIcam) parameter current values.
357  pnh.param<double>("softwaretriggerrate", config_.softwaretriggerrate, config_.softwaretriggerrate);
358  pnh.param<int>("mtu", config_.mtu, config_.mtu);
359  pnh.param<std::string>("frame_id", config_.frame_id, config_.frame_id);
360  pnh.param<bool>("auto_master", config_.AutoMaster, config_.AutoMaster);
361  pnh.param<bool>("auto_slave", config_.AutoSlave, config_.AutoSlave);
362  setAutoMaster(config_.AutoMaster);
363  setAutoSlave(config_.AutoSlave);
364 
365  if (pub_tf_optical_)
366  {
367  tf_optical_.header.frame_id = config_.frame_id;
368  tf_optical_.child_frame_id = config_.frame_id + "_optical";
369  tf_optical_.transform.translation.x = 0.0;
370  tf_optical_.transform.translation.y = 0.0;
371  tf_optical_.transform.translation.z = 0.0;
372  tf_optical_.transform.rotation.x = -0.5;
373  tf_optical_.transform.rotation.y = 0.5;
374  tf_optical_.transform.rotation.z = -0.5;
375  tf_optical_.transform.rotation.w = 0.5;
376 
377  double tf_publish_rate;
378  pnh.param<double>("tf_publish_rate", tf_publish_rate, 0);
379  if (tf_publish_rate > 0.)
380  {
381  // publish dynamic tf at given rate (recommended when running as a Nodelet, since latching has bugs-by-design)
383  tf_dyn_thread_ = std::thread(&CameraAravisNodelet::publishTfLoop, this, tf_publish_rate);
384  }
385  else
386  {
387  // publish static tf only once (latched)
389  tf_optical_.header.stamp = ros::Time::now();
390  p_stb_->sendTransform(tf_optical_);
391  }
392  }
393 
394  // read diagnostic features from yaml file and initialize publishing thread
395  if(!diagnostic_yaml_url_.empty() && diagnostic_publish_rate_ > 0.0)
396  {
397  try
398  {
399  diagnostic_features_ = YAML::LoadFile(diagnostic_yaml_url_);
400  }
401  catch(const YAML::BadFile& e)
402  {
403  ROS_WARN("YAML file cannot be loaded: %s", e.what());
404  ROS_WARN("Camera diagnostics will not be published.");
405  }
406  catch(const YAML::ParserException& e)
407  {
408  ROS_WARN("YAML file is malformed: %s", e.what());
409  ROS_WARN("Camera diagnostics will not be published.");
410  }
411 
412  if(diagnostic_features_.size() > 0)
413  {
414  diagnostic_pub_ = getNodeHandle().advertise<CameraDiagnostics>(
415  ros::names::remap(this->getName() + "/diagnostics"), 1, true);
416 
420  }
421  else
422  {
423  ROS_WARN("No diagnostic features specified.");
424  ROS_WARN("Camera diagnostics will not be published.");
425  }
426  }
427  else if(!diagnostic_yaml_url_.empty() && diagnostic_publish_rate_ <= 0.0)
428  {
429  ROS_WARN("diagnostic_publish_rate is <= 0.0");
430  ROS_WARN("Camera diagnostics will not be published.");
431  }
432 
433  // default calibration url is [DeviceSerialNumber/DeviceID].yaml
434  ArvGcNode *p_gc_node;
435  GError *error = NULL;
436 
437  p_gc_node = arv_device_get_feature(p_device_, "DeviceSerialNumber");
438 
439  if(calib_urls[0].empty()) {
440  if( arv_gc_feature_node_is_implemented( ARV_GC_FEATURE_NODE(p_gc_node), &error) ) {
441  GType device_serial_return_type = arv_gc_feature_node_get_value_type( ARV_GC_FEATURE_NODE(p_gc_node));
442  // If the feature DeviceSerialNumber is not string, it indicates that the camera is using an older version of the genicam SFNC.
443  // Older camera models do not have a DeviceSerialNumber as string, but as integer and often set to 0.
444  // In those cases use the outdated DeviceID (deprecated since genicam SFNC v2.0).
445  if (device_serial_return_type == G_TYPE_STRING) {
446  calib_urls[0] = arv_device_get_string_feature_value(p_device_, "DeviceSerialNumber");
447  calib_urls[0] += ".yaml";
448  } else if (device_serial_return_type == G_TYPE_INT64) {
449  calib_urls[0] = arv_device_get_string_feature_value(p_device_, "DeviceID");
450  calib_urls[0] += ".yaml";
451  }
452  }
453  }
454 
455  for(int i = 0; i < num_streams_; i++) {
456  // Start the camerainfo manager.
457  std::string camera_info_frame_id = config_.frame_id;
458  if(!stream_names_[i].empty())
459  camera_info_frame_id = config_.frame_id + '/' + stream_names_[i];
460 
461  // Use separate node handles for CameraInfoManagers when using a Multisource Camera
462  if(!stream_names_[i].empty()) {
464  p_camera_info_managers_[i].reset(new camera_info_manager::CameraInfoManager(*p_camera_info_node_handles_[i], camera_info_frame_id, calib_urls[i]));
465  } else {
466  p_camera_info_managers_[i].reset(new camera_info_manager::CameraInfoManager(pnh, camera_info_frame_id, calib_urls[i]));
467  }
468 
469 
470  ROS_INFO("Reset %s Camera Info Manager", stream_names_[i].c_str());
471  ROS_INFO("%s Calib URL: %s", stream_names_[i].c_str(), calib_urls[i].c_str());
472 
473  // publish an ExtendedCameraInfo message
475  }
476 
477  // update the reconfigure config
478  reconfigure_server_->setConfigMin(config_min_);
479  reconfigure_server_->setConfigMax(config_max_);
480  reconfigure_server_->updateConfig(config_);
481  ros::Duration(2.0).sleep();
482  reconfigure_server_->setCallback(boost::bind(&CameraAravisNodelet::rosReconfigureCallback, this, _1, _2));
483 
484  // Print information.
485  ROS_INFO(" Using Camera Configuration:");
486  ROS_INFO(" ---------------------------");
487  ROS_INFO(" Vendor name = %s", arv_device_get_string_feature_value(p_device_, "DeviceVendorName"));
488  ROS_INFO(" Model name = %s", arv_device_get_string_feature_value(p_device_, "DeviceModelName"));
489  ROS_INFO(" Device id = %s", arv_device_get_string_feature_value(p_device_, "DeviceUserID"));
490  ROS_INFO(" Serial number = %s", arv_device_get_string_feature_value(p_device_, "DeviceSerialNumber"));
491  ROS_INFO(
492  " Type = %s",
493  arv_camera_is_uv_device(p_camera_) ? "USB3Vision" :
494  (arv_camera_is_gv_device(p_camera_) ? "GigEVision" : "Other"));
495  ROS_INFO(" Sensor width = %d", sensors_[0]->width);
496  ROS_INFO(" Sensor height = %d", sensors_[0]->height);
497  ROS_INFO(" ROI x,y,w,h = %d, %d, %d, %d", roi_.x, roi_.y, roi_.width, roi_.height);
498  ROS_INFO(" Pixel format = %s", sensors_[0]->pixel_format.c_str());
499  ROS_INFO(" BitsPerPixel = %lu", sensors_[0]->n_bits_pixel);
500  ROS_INFO(" Acquisition Mode = %s",
501  implemented_features_["AcquisitionMode"] ? arv_device_get_string_feature_value(p_device_, "AcquisitionMode") :
502  "(not implemented in camera)");
503  ROS_INFO(" Trigger Mode = %s",
504  implemented_features_["TriggerMode"] ? arv_device_get_string_feature_value(p_device_, "TriggerMode") :
505  "(not implemented in camera)");
506  ROS_INFO(" Trigger Source = %s",
507  implemented_features_["TriggerSource"] ? arv_device_get_string_feature_value(p_device_, "TriggerSource") :
508  "(not implemented in camera)");
509  ROS_INFO(" Can set FrameRate: %s", implemented_features_["AcquisitionFrameRate"] ? "True" : "False");
510  if (implemented_features_["AcquisitionFrameRate"])
511  {
512  ROS_INFO(" AcquisitionFrameRate = %g hz", config_.AcquisitionFrameRate);
513  }
514 
515  if (implemented_features_["BalanceWhiteAuto"])
516  {
517  ROS_INFO(" BalanceWhiteAuto = %s", awb_mode.c_str());
518  if(awb_mode != "Continuous"
519  && !wb_ratio_selector_feature.empty() && implemented_features_[wb_ratio_selector_feature]
520  && !wb_ratio_feature.empty() && implemented_features_[wb_ratio_feature])
521  {
522  for(int l = 0; l < std::min(wb_ratio_selector_values.size(), wb_ratio_str_values.size());
523  l++)
524  {
525  arv_device_set_string_feature_value(p_device_, wb_ratio_selector_feature.c_str(),
526  wb_ratio_selector_values[l].c_str());
527  float wb_ratio_val = arv_device_get_float_feature_value(p_device_, wb_ratio_feature.c_str());
528  ROS_INFO(" BalanceRatio %s = %f", wb_ratio_selector_values[l].c_str(), wb_ratio_val);
529  }
530  }
531  }
532 
533  ROS_INFO(" Can set Exposure: %s", implemented_features_["ExposureTime"] ? "True" : "False");
534  if (implemented_features_["ExposureTime"])
535  {
536  ROS_INFO(" Can set ExposureAuto: %s", implemented_features_["ExposureAuto"] ? "True" : "False");
537  ROS_INFO(" Exposure = %g us in range [%g,%g]", config_.ExposureTime, config_min_.ExposureTime,
538  config_max_.ExposureTime);
539  }
540 
541  ROS_INFO(" Can set Gain: %s", implemented_features_["Gain"] ? "True" : "False");
542  if (implemented_features_["Gain"])
543  {
544  ROS_INFO(" Can set GainAuto: %s", implemented_features_["GainAuto"] ? "True" : "False");
545  ROS_INFO(" Gain = %f %% in range [%f,%f]", config_.Gain, config_min_.Gain, config_max_.Gain);
546  }
547 
548  ROS_INFO(" Can set FocusPos: %s", implemented_features_["FocusPos"] ? "True" : "False");
549 
550  if (implemented_features_["GevSCPSPacketSize"])
551  ROS_INFO(" Network mtu = %lu", arv_device_get_integer_feature_value(p_device_, "GevSCPSPacketSize"));
552 
553  ROS_INFO(" ---------------------------");
554 
555  // Reset PTP clock
556  if (use_ptp_stamp_)
557  {
558  resetPtpClock();
559 
560  if(!ptp_set_cmd_feature_.empty())
561  {
563  {
564  arv_device_execute_command(p_device_, ptp_set_cmd_feature_.c_str());
565  }
566  else
567  ROS_WARN("PTP Error: ptp_set_cmd_feature_ '%s' is not available on the device.",
568  ptp_set_cmd_feature_.c_str());
569  }
570  }
571 
572  // spawn camera stream in thread, so onInit() is not blocked
573  spawning_ = true;
575 }
576 
578 {
581 
582  for(int i = 0; i < num_streams_; i++) {
583  while (spawning_) {
584  arv_camera_gv_select_stream_channel(p_camera_, i);
585  p_streams_[i] = arv_camera_create_stream(p_camera_, NULL, NULL);
586 
587  if (p_streams_[i])
588  {
589  // Load up some buffers.
590  arv_camera_gv_select_stream_channel(p_camera_, i);
591  const gint n_bytes_payload_stream_ = arv_camera_get_payload(p_camera_);
592 
593  p_buffer_pools_[i].reset(new CameraBufferPool(p_streams_[i], n_bytes_payload_stream_, 10));
594 
595  if (arv_camera_is_gv_device(p_camera_))
596  {
597  tuneGvStream(reinterpret_cast<ArvGvStream*>(p_streams_[i]));
598  }
599  break;
600  }
601  else
602  {
603  ROS_WARN("Stream %i: Could not create image stream for %s. Retrying...", i, guid_.c_str());
604  ros::Duration(1.0).sleep();
605  ros::spinOnce();
606  }
607  }
608  }
609 
610  // Monitor whether anyone is subscribed to the camera stream
611  std::vector<image_transport::SubscriberStatusCallback> image_cbs_;
612  std::vector<ros::SubscriberStatusCallback> info_cbs_;
613 
615  { this->rosConnectCallback();};
617  { this->rosConnectCallback();};
618 
619  for(int i = 0; i < num_streams_; i++) {
620  image_transport::ImageTransport *p_transport;
621  // Set up image_raw
622  std::string topic_name = this->getName();
623  p_transport = new image_transport::ImageTransport(pnh);
624  if(num_streams_ != 1 || !stream_names_[i].empty()) {
625  topic_name += "/" + stream_names_[i];
626  }
627 
628  cam_pubs_[i] = p_transport->advertiseCamera(
629  ros::names::remap(topic_name + "/image_raw"),
630  1, image_cb, image_cb, info_cb, info_cb);
631  }
632 
633  // Connect signals with callbacks.
634  for(int i = 0; i < num_streams_; i++) {
635  StreamIdData* data = new StreamIdData();
636  data->can = this;
637  data->stream_id = i;
638  g_signal_connect(p_streams_[i], "new-buffer", (GCallback)CameraAravisNodelet::newBufferReadyCallback, data);
639  }
640  g_signal_connect(p_device_, "control-lost", (GCallback)CameraAravisNodelet::controlLostCallback, this);
641 
642  for(int i = 0; i < num_streams_; i++) {
643  arv_stream_set_emit_signals(p_streams_[i], TRUE);
644  }
645 
646  if (std::any_of(cam_pubs_.begin(), cam_pubs_.end(),
647  [](image_transport::CameraPublisher pub){ return pub.getNumSubscribers() > 0; })
648  ) {
649  arv_camera_start_acquisition(p_camera_);
650  }
651 
652  this->get_integer_service_ = pnh.advertiseService("get_integer_feature_value", &CameraAravisNodelet::getIntegerFeatureCallback, this);
653  this->get_float_service_ = pnh.advertiseService("get_float_feature_value", &CameraAravisNodelet::getFloatFeatureCallback, this);
654  this->get_string_service_ = pnh.advertiseService("get_string_feature_value", &CameraAravisNodelet::getStringFeatureCallback, this);
655  this->get_boolean_service_ = pnh.advertiseService("get_boolean_feature_value", &CameraAravisNodelet::getBooleanFeatureCallback, this);
656 
657  this->set_integer_service_ = pnh.advertiseService("set_integer_feature_value", &CameraAravisNodelet::setIntegerFeatureCallback, this);
658  this->set_float_service_ = pnh.advertiseService("set_float_feature_value", &CameraAravisNodelet::setFloatFeatureCallback, this);
659  this->set_string_service_ = pnh.advertiseService("set_string_feature_value", &CameraAravisNodelet::setStringFeatureCallback, this);
660  this->set_boolean_service_ = pnh.advertiseService("set_boolean_feature_value", &CameraAravisNodelet::setBooleanFeatureCallback, this);
661 
662  ROS_INFO("Done initializing camera_aravis.");
663 }
664 
665 bool CameraAravisNodelet::getIntegerFeatureCallback(camera_aravis::get_integer_feature_value::Request& request, camera_aravis::get_integer_feature_value::Response& response)
666 {
667  const char* feature_name = request.feature.c_str();
668  response.response = arv_device_get_integer_feature_value(this->p_device_, feature_name);
669  return true;
670 }
671 
672 bool CameraAravisNodelet::setIntegerFeatureCallback(camera_aravis::set_integer_feature_value::Request& request, camera_aravis::set_integer_feature_value::Response& response)
673 {
674  const char* feature_name = request.feature.c_str();
675  guint64 value = request.value;
676  arv_device_set_integer_feature_value(this->p_device_, feature_name, value);
677  if(arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS) {
678  response.ok = true;
679  } else {
680  response.ok = false;
681  }
682  return true;
683 }
684 
685 bool CameraAravisNodelet::getFloatFeatureCallback(camera_aravis::get_float_feature_value::Request& request, camera_aravis::get_float_feature_value::Response& response)
686 {
687  const char* feature_name = request.feature.c_str();
688  response.response = arv_device_get_float_feature_value(this->p_device_, feature_name);
689  return true;
690 }
691 
692 bool CameraAravisNodelet::setFloatFeatureCallback(camera_aravis::set_float_feature_value::Request& request, camera_aravis::set_float_feature_value::Response& response)
693 {
694  const char* feature_name = request.feature.c_str();
695  const double value = request.value;
696  arv_device_set_float_feature_value(this->p_device_, feature_name, value);
697  if(arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS) {
698  response.ok = true;
699  } else {
700  response.ok = false;
701  }
702  return true;
703 }
704 
705 bool CameraAravisNodelet::getStringFeatureCallback(camera_aravis::get_string_feature_value::Request& request, camera_aravis::get_string_feature_value::Response& response)
706 {
707  const char* feature_name = request.feature.c_str();
708  response.response = arv_device_get_string_feature_value(this->p_device_, feature_name);
709  return true;
710 }
711 
712 bool CameraAravisNodelet::setStringFeatureCallback(camera_aravis::set_string_feature_value::Request& request, camera_aravis::set_string_feature_value::Response& response)
713 {
714  const char* feature_name = request.feature.c_str();
715  const char* value = request.value.c_str();
716  arv_device_set_string_feature_value(this->p_device_, feature_name, value);
717  if(arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS) {
718  response.ok = true;
719  } else {
720  response.ok = false;
721  }
722  return true;
723 }
724 
725 bool CameraAravisNodelet::getBooleanFeatureCallback(camera_aravis::get_boolean_feature_value::Request& request, camera_aravis::get_boolean_feature_value::Response& response)
726 {
727  const char* feature_name = request.feature.c_str();
728  response.response = arv_device_get_boolean_feature_value(this->p_device_, feature_name);
729  return true;
730 }
731 
732 bool CameraAravisNodelet::setBooleanFeatureCallback(camera_aravis::set_boolean_feature_value::Request& request, camera_aravis::set_boolean_feature_value::Response& response)
733 {
734  const char* feature_name = request.feature.c_str();
735  const bool value = request.value;
736  arv_device_set_boolean_feature_value(this->p_device_, feature_name, value);
737  if(arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS) {
738  response.ok = true;
739  } else {
740  response.ok = false;
741  }
742  return true;
743 }
744 
746 {
747  if(ptp_status_feature_.empty() || ptp_enable_feature_.empty())
748  {
749  ROS_WARN("PTP Error: ptp_status_feature_name and/or ptp_enable_feature_name are empty.");
750  return;
751  }
752 
754  {
756  ROS_WARN("PTP Error: ptp_status_feature_name '%s' is not available on the device.",
757  ptp_status_feature_.c_str());
759  ROS_WARN("PTP Error: ptp_enable_feature_name '%s' is not available on the device.",
760  ptp_enable_feature_.c_str());
761  return;
762  }
763 
764  // a PTP slave can take the following states: Slave, Listening, Uncalibrated, Faulty, Disabled
765  std::string ptp_status_str =
766  arv_device_get_string_feature_value(p_device_, ptp_status_feature_.c_str());
767  bool ptp_is_enabled = arv_device_get_boolean_feature_value(p_device_, ptp_enable_feature_.c_str());
768  if (ptp_status_str == "Faulty" ||
769  ptp_status_str == "Disabled" ||
770  ptp_status_str == "Initializing" ||
771  !ptp_is_enabled)
772  {
773  ROS_INFO("Resetting ptp clock (was set to %s)", ptp_status_str.c_str());
774 
775  arv_device_set_boolean_feature_value(p_device_, ptp_enable_feature_.c_str(), false);
776  arv_device_set_boolean_feature_value(p_device_, ptp_enable_feature_.c_str(), true);
777  }
778 }
779 
780 void CameraAravisNodelet::cameraAutoInfoCallback(const CameraAutoInfoConstPtr &msg_ptr)
781 {
782  if (config_.AutoSlave && p_device_)
783  {
784 
785  if (auto_params_.exposure_time != msg_ptr->exposure_time && implemented_features_["ExposureTime"])
786  {
787  arv_device_set_float_feature_value(p_device_, "ExposureTime", msg_ptr->exposure_time);
788  }
789 
790  if (implemented_features_["Gain"])
791  {
792  if (auto_params_.gain != msg_ptr->gain)
793  {
794  if (implemented_features_["GainSelector"])
795  {
796  arv_device_set_string_feature_value(p_device_, "GainSelector", "All");
797  }
798  arv_device_set_float_feature_value(p_device_, "Gain", msg_ptr->gain);
799  }
800 
801  if (implemented_features_["GainSelector"])
802  {
803  if (auto_params_.gain_red != msg_ptr->gain_red)
804  {
805  arv_device_set_string_feature_value(p_device_, "GainSelector", "Red");
806  arv_device_set_float_feature_value(p_device_, "Gain", msg_ptr->gain_red);
807  }
808 
809  if (auto_params_.gain_green != msg_ptr->gain_green)
810  {
811  arv_device_set_string_feature_value(p_device_, "GainSelector", "Green");
812  arv_device_set_float_feature_value(p_device_, "Gain", msg_ptr->gain_green);
813  }
814 
815  if (auto_params_.gain_blue != msg_ptr->gain_blue)
816  {
817  arv_device_set_string_feature_value(p_device_, "GainSelector", "Blue");
818  arv_device_set_float_feature_value(p_device_, "Gain", msg_ptr->gain_blue);
819  }
820  }
821  }
822 
823  if (implemented_features_["BlackLevel"])
824  {
825  if (auto_params_.black_level != msg_ptr->black_level)
826  {
827  if (implemented_features_["BlackLevelSelector"])
828  {
829  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "All");
830  }
831  arv_device_set_float_feature_value(p_device_, "BlackLevel", msg_ptr->black_level);
832  }
833 
834  if (implemented_features_["BlackLevelSelector"])
835  {
836  if (auto_params_.bl_red != msg_ptr->bl_red)
837  {
838  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Red");
839  arv_device_set_float_feature_value(p_device_, "BlackLevel", msg_ptr->bl_red);
840  }
841 
842  if (auto_params_.bl_green != msg_ptr->bl_green)
843  {
844  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Green");
845  arv_device_set_float_feature_value(p_device_, "BlackLevel", msg_ptr->bl_green);
846  }
847 
848  if (auto_params_.bl_blue != msg_ptr->bl_blue)
849  {
850  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Blue");
851  arv_device_set_float_feature_value(p_device_, "BlackLevel", msg_ptr->bl_blue);
852  }
853  }
854  }
855 
856  // White balance as TIS is providing
857  if (strcmp("The Imaging Source Europe GmbH", arv_camera_get_vendor_name(p_camera_)) == 0)
858  {
859  arv_device_set_integer_feature_value(p_device_, "WhiteBalanceRedRegister", (int)(auto_params_.wb_red * 255.));
860  arv_device_set_integer_feature_value(p_device_, "WhiteBalanceGreenRegister", (int)(auto_params_.wb_green * 255.));
861  arv_device_set_integer_feature_value(p_device_, "WhiteBalanceBlueRegister", (int)(auto_params_.wb_blue * 255.));
862  }
863  else if (implemented_features_["BalanceRatio"] && implemented_features_["BalanceRatioSelector"])
864  {
865  if (auto_params_.wb_red != msg_ptr->wb_red)
866  {
867  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
868  arv_device_set_float_feature_value(p_device_, "BalanceRatio", msg_ptr->wb_red);
869  }
870 
871  if (auto_params_.wb_green != msg_ptr->wb_green)
872  {
873  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
874  arv_device_set_float_feature_value(p_device_, "BalanceRatio", msg_ptr->wb_green);
875  }
876 
877  if (auto_params_.wb_blue != msg_ptr->wb_blue)
878  {
879  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
880  arv_device_set_float_feature_value(p_device_, "BalanceRatio", msg_ptr->wb_blue);
881  }
882  }
883 
884  auto_params_ = *msg_ptr;
885  }
886 }
887 
889 {
890  auto_params_.exposure_time = auto_params_.gain = auto_params_.gain_red = auto_params_.gain_green =
891  auto_params_.gain_blue = auto_params_.black_level = auto_params_.bl_red = auto_params_.bl_green =
892  auto_params_.bl_blue = auto_params_.wb_red = auto_params_.wb_green = auto_params_.wb_blue =
893  std::numeric_limits<double>::quiet_NaN();
894 
895  if (p_device_)
896  {
897  if (implemented_features_["ExposureTime"])
898  {
899  auto_params_.exposure_time = arv_device_get_float_feature_value(p_device_, "ExposureTime");
900  }
901 
902  if (implemented_features_["Gain"])
903  {
904  if (implemented_features_["GainSelector"])
905  {
906  arv_device_set_string_feature_value(p_device_, "GainSelector", "All");
907  }
908  auto_params_.gain = arv_device_get_float_feature_value(p_device_, "Gain");
909  if (implemented_features_["GainSelector"])
910  {
911  arv_device_set_string_feature_value(p_device_, "GainSelector", "Red");
912  auto_params_.gain_red = arv_device_get_float_feature_value(p_device_, "Gain");
913  arv_device_set_string_feature_value(p_device_, "GainSelector", "Green");
914  auto_params_.gain_green = arv_device_get_float_feature_value(p_device_, "Gain");
915  arv_device_set_string_feature_value(p_device_, "GainSelector", "Blue");
916  auto_params_.gain_blue = arv_device_get_float_feature_value(p_device_, "Gain");
917  }
918  }
919 
920  if (implemented_features_["BlackLevel"])
921  {
922  if (implemented_features_["BlackLevelSelector"])
923  {
924  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "All");
925  }
926  auto_params_.black_level = arv_device_get_float_feature_value(p_device_, "BlackLevel");
927  if (implemented_features_["BlackLevelSelector"])
928  {
929  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Red");
930  auto_params_.bl_red = arv_device_get_float_feature_value(p_device_, "BlackLevel");
931  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Green");
932  auto_params_.bl_green = arv_device_get_float_feature_value(p_device_, "BlackLevel");
933  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Blue");
934  auto_params_.bl_blue = arv_device_get_float_feature_value(p_device_, "BlackLevel");
935  }
936  }
937 
938  // White balance as TIS is providing
939  if (strcmp("The Imaging Source Europe GmbH", arv_camera_get_vendor_name(p_camera_)) == 0)
940  {
941  auto_params_.wb_red = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceRedRegister") / 255.;
942  auto_params_.wb_green = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceGreenRegister") / 255.;
943  auto_params_.wb_blue = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceBlueRegister") / 255.;
944  }
945  // the standard way
946  else if (implemented_features_["BalanceRatio"] && implemented_features_["BalanceRatioSelector"])
947  {
948  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
949  auto_params_.wb_red = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
950  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
951  auto_params_.wb_green = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
952  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
953  auto_params_.wb_blue = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
954  }
955  }
956 }
957 
958 void CameraAravisNodelet::setAutoMaster(bool value)
959 {
960  if (value)
961  {
963  auto_pub_ = getNodeHandle().advertise<CameraAutoInfo>(ros::names::remap("camera_auto_info"), 1, true);
964  }
965  else
966  {
968  }
969 }
970 
971 void CameraAravisNodelet::setAutoSlave(bool value)
972 {
973  if (value)
974  {
975  // deactivate all auto functions
976  if (implemented_features_["ExposureAuto"])
977  {
978  arv_device_set_string_feature_value(p_device_, "ExposureAuto", "Off");
979  }
980  if (implemented_features_["GainAuto"])
981  {
982  arv_device_set_string_feature_value(p_device_, "GainAuto", "Off");
983  }
984  if (implemented_features_["GainAutoBalance"])
985  {
986  arv_device_set_string_feature_value(p_device_, "GainAutoBalance", "Off");
987  }
988  if (implemented_features_["BlackLevelAuto"])
989  {
990  arv_device_set_string_feature_value(p_device_, "BlackLevelAuto", "Off");
991  }
992  if (implemented_features_["BlackLevelAutoBalance"])
993  {
994  arv_device_set_string_feature_value(p_device_, "BlackLevelAutoBalance", "Off");
995  }
996  if (implemented_features_["BalanceWhiteAuto"])
997  {
998  arv_device_set_string_feature_value(p_device_, "BalanceWhiteAuto", "Off");
999  }
1001  auto_sub_ = getNodeHandle().subscribe(ros::names::remap("camera_auto_info"), 1,
1003  }
1004  else
1005  {
1006  auto_sub_.shutdown();
1007  }
1008 }
1009 
1010 void CameraAravisNodelet::setExtendedCameraInfo(std::string channel_name, size_t stream_id)
1011 {
1013  {
1014  if (channel_name.empty()) {
1015  extended_camera_info_pubs_[stream_id] = getNodeHandle().advertise<ExtendedCameraInfo>(ros::names::remap("extended_camera_info"), 1, true);
1016  } else {
1017  extended_camera_info_pubs_[stream_id] = getNodeHandle().advertise<ExtendedCameraInfo>(ros::names::remap(channel_name + "/extended_camera_info"), 1, true);
1018  }
1019  }
1020  else
1021  {
1022  extended_camera_info_pubs_[stream_id].shutdown();
1023  }
1024 }
1025 
1026 // Extra stream options for GigEVision streams.
1027 void CameraAravisNodelet::tuneGvStream(ArvGvStream *p_stream)
1028 {
1029  gboolean b_auto_buffer = FALSE;
1030  gboolean b_packet_resend = TRUE;
1031  unsigned int timeout_packet = 40; // milliseconds
1032  unsigned int timeout_frame_retention = 200;
1033 
1034  if (p_stream)
1035  {
1036  if (!ARV_IS_GV_STREAM(p_stream))
1037  {
1038  ROS_WARN("Stream is not a GV_STREAM");
1039  return;
1040  }
1041 
1042  if (b_auto_buffer)
1043  g_object_set(p_stream, "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, "socket-buffer-size", 0,
1044  NULL);
1045  if (!b_packet_resend)
1046  g_object_set(p_stream, "packet-resend",
1047  b_packet_resend ? ARV_GV_STREAM_PACKET_RESEND_ALWAYS : ARV_GV_STREAM_PACKET_RESEND_NEVER,
1048  NULL);
1049  g_object_set(p_stream, "packet-timeout", timeout_packet * 1000, "frame-retention", timeout_frame_retention * 1000,
1050  NULL);
1051  }
1052 }
1053 
1054 void CameraAravisNodelet::rosReconfigureCallback(Config &config, uint32_t level)
1055 {
1056  reconfigure_mutex_.lock();
1057  std::string tf_prefix = tf::getPrefixParam(getNodeHandle());
1058  ROS_DEBUG_STREAM("tf_prefix: " << tf_prefix);
1059 
1060  if (config.frame_id == "")
1061  config.frame_id = this->getName();
1062 
1063  // Limit params to legal values.
1064  config.AcquisitionFrameRate = CLAMP(config.AcquisitionFrameRate, config_min_.AcquisitionFrameRate,
1065  config_max_.AcquisitionFrameRate);
1066  config.ExposureTime = CLAMP(config.ExposureTime, config_min_.ExposureTime, config_max_.ExposureTime);
1067  config.Gain = CLAMP(config.Gain, config_min_.Gain, config_max_.Gain);
1068  config.FocusPos = CLAMP(config.FocusPos, config_min_.FocusPos, config_max_.FocusPos);
1069  config.frame_id = tf::resolve(tf_prefix, config.frame_id);
1070 
1071  if (use_ptp_stamp_)
1072  resetPtpClock();
1073 
1074  // stop auto functions if slave
1075  if (config.AutoSlave)
1076  {
1077  config.ExposureAuto = "Off";
1078  config.GainAuto = "Off";
1079  // todo: all other auto functions "Off"
1080  }
1081 
1082  // reset values controlled by auto functions
1083  if (config.ExposureAuto.compare("Off") != 0)
1084  {
1085  config.ExposureTime = config_.ExposureTime;
1086  ROS_WARN("ExposureAuto is active. Cannot manually set ExposureTime.");
1087  }
1088  if (config.GainAuto.compare("Off") != 0)
1089  {
1090  config.Gain = config_.Gain;
1091  ROS_WARN("GainAuto is active. Cannot manually set Gain.");
1092  }
1093 
1094  // reset FrameRate when triggered
1095  if (config.TriggerMode.compare("Off") != 0)
1096  {
1097  config.AcquisitionFrameRate = config_.AcquisitionFrameRate;
1098  ROS_WARN("TriggerMode is active (Trigger Source: %s). Cannot manually set AcquisitionFrameRate.", config_.TriggerSource.c_str());
1099  }
1100 
1101  // Find valid user changes we need to react to.
1102  const bool changed_auto_master = (config_.AutoMaster != config.AutoMaster);
1103  const bool changed_auto_slave = (config_.AutoSlave != config.AutoSlave);
1104  const bool changed_acquisition_frame_rate = (config_.AcquisitionFrameRate != config.AcquisitionFrameRate);
1105  const bool changed_exposure_auto = (config_.ExposureAuto != config.ExposureAuto);
1106  const bool changed_exposure_time = (config_.ExposureTime != config.ExposureTime);
1107  const bool changed_gain_auto = (config_.GainAuto != config.GainAuto);
1108  const bool changed_gain = (config_.Gain != config.Gain);
1109  const bool changed_acquisition_mode = (config_.AcquisitionMode != config.AcquisitionMode);
1110  const bool changed_trigger_mode = (config_.TriggerMode != config.TriggerMode);
1111  const bool changed_trigger_source = (config_.TriggerSource != config.TriggerSource) || changed_trigger_mode;
1112  const bool changed_focus_pos = (config_.FocusPos != config.FocusPos);
1113  const bool changed_mtu = (config_.mtu != config.mtu);
1114 
1115  if (changed_auto_master)
1116  {
1117  setAutoMaster(config.AutoMaster);
1118  }
1119 
1120  if (changed_auto_slave)
1121  {
1122  setAutoSlave(config.AutoSlave);
1123  }
1124 
1125  // Set params into the camera.
1126  if (changed_exposure_time)
1127  {
1128  if (implemented_features_["ExposureTime"])
1129  {
1130  ROS_INFO("Set ExposureTime = %f us", config.ExposureTime);
1131  arv_camera_set_exposure_time(p_camera_, config.ExposureTime);
1132  }
1133  else
1134  ROS_INFO("Camera does not support ExposureTime.");
1135  }
1136 
1137  if (changed_gain)
1138  {
1139  if (implemented_features_["Gain"])
1140  {
1141  ROS_INFO("Set gain = %f", config.Gain);
1142  arv_camera_set_gain(p_camera_, config.Gain);
1143  }
1144  else
1145  ROS_INFO("Camera does not support Gain or GainRaw.");
1146  }
1147 
1148  if (changed_exposure_auto)
1149  {
1150  if (implemented_features_["ExposureAuto"] && implemented_features_["ExposureTime"])
1151  {
1152  ROS_INFO("Set ExposureAuto = %s", config.ExposureAuto.c_str());
1153  arv_device_set_string_feature_value(p_device_, "ExposureAuto", config.ExposureAuto.c_str());
1154  if (config.ExposureAuto.compare("Once") == 0)
1155  {
1156  ros::Duration(2.0).sleep();
1157  config.ExposureTime = arv_camera_get_exposure_time(p_camera_);
1158  ROS_INFO("Get ExposureTime = %f us", config.ExposureTime);
1159  config.ExposureAuto = "Off";
1160  }
1161  }
1162  else
1163  ROS_INFO("Camera does not support ExposureAuto.");
1164  }
1165  if (changed_gain_auto)
1166  {
1167  if (implemented_features_["GainAuto"] && implemented_features_["Gain"])
1168  {
1169  ROS_INFO("Set GainAuto = %s", config.GainAuto.c_str());
1170  arv_device_set_string_feature_value(p_device_, "GainAuto", config.GainAuto.c_str());
1171  if (config.GainAuto.compare("Once") == 0)
1172  {
1173  ros::Duration(2.0).sleep();
1174  config.Gain = arv_camera_get_gain(p_camera_);
1175  ROS_INFO("Get Gain = %f", config.Gain);
1176  config.GainAuto = "Off";
1177  }
1178  }
1179  else
1180  ROS_INFO("Camera does not support GainAuto.");
1181  }
1182 
1183  if (changed_acquisition_frame_rate)
1184  {
1185  if (implemented_features_["AcquisitionFrameRate"])
1186  {
1187  ROS_INFO("Set frame rate = %f Hz", config.AcquisitionFrameRate);
1188  arv_camera_set_frame_rate(p_camera_, config.AcquisitionFrameRate);
1189  }
1190  else
1191  ROS_INFO("Camera does not support AcquisitionFrameRate.");
1192  }
1193 
1194  if (changed_trigger_mode)
1195  {
1196  if (implemented_features_["TriggerMode"])
1197  {
1198  ROS_INFO("Set TriggerMode = %s", config.TriggerMode.c_str());
1199  arv_device_set_string_feature_value(p_device_, "TriggerMode", config.TriggerMode.c_str());
1200  }
1201  else
1202  ROS_INFO("Camera does not support TriggerMode.");
1203  }
1204 
1205  if (changed_trigger_source)
1206  {
1207  // delete old software trigger thread if active
1208  software_trigger_active_ = false;
1209  if (software_trigger_thread_.joinable())
1210  {
1211  software_trigger_thread_.join();
1212  }
1213 
1214  if (implemented_features_["TriggerSource"])
1215  {
1216  ROS_INFO("Set TriggerSource = %s", config.TriggerSource.c_str());
1217  arv_device_set_string_feature_value(p_device_, "TriggerSource", config.TriggerSource.c_str());
1218  }
1219  else
1220  {
1221  ROS_INFO("Camera does not support TriggerSource.");
1222  }
1223 
1224  // activate on demand
1225  if (config.TriggerMode.compare("On") == 0 && config.TriggerSource.compare("Software") == 0)
1226  {
1227  if (implemented_features_["TriggerSoftware"])
1228  {
1229  config_.softwaretriggerrate = config.softwaretriggerrate;
1230  ROS_INFO("Set softwaretriggerrate = %f", 1000.0 / ceil(1000.0 / config.softwaretriggerrate));
1231 
1232  // Turn on software timer callback.
1234  }
1235  else
1236  {
1237  ROS_INFO("Camera does not support TriggerSoftware command.");
1238  }
1239  }
1240  }
1241 
1242  if (changed_focus_pos)
1243  {
1244  if (implemented_features_["FocusPos"])
1245  {
1246  ROS_INFO("Set FocusPos = %d", config.FocusPos);
1247  arv_device_set_integer_feature_value(p_device_, "FocusPos", config.FocusPos);
1248  ros::Duration(1.0).sleep();
1249  config.FocusPos = arv_device_get_integer_feature_value(p_device_, "FocusPos");
1250  ROS_INFO("Get FocusPos = %d", config.FocusPos);
1251  }
1252  else
1253  ROS_INFO("Camera does not support FocusPos.");
1254  }
1255 
1256  if (changed_mtu)
1257  {
1258  if (implemented_features_["GevSCPSPacketSize"])
1259  {
1260  ROS_INFO("Set mtu = %d", config.mtu);
1261  arv_device_set_integer_feature_value(p_device_, "GevSCPSPacketSize", config.mtu);
1262  ros::Duration(1.0).sleep();
1263  config.mtu = arv_device_get_integer_feature_value(p_device_, "GevSCPSPacketSize");
1264  ROS_INFO("Get mtu = %d", config.mtu);
1265  }
1266  else
1267  ROS_INFO("Camera does not support mtu (i.e. GevSCPSPacketSize).");
1268  }
1269 
1270  if (changed_acquisition_mode)
1271  {
1272  if (implemented_features_["AcquisitionMode"])
1273  {
1274  ROS_INFO("Set AcquisitionMode = %s", config.AcquisitionMode.c_str());
1275  arv_device_set_string_feature_value(p_device_, "AcquisitionMode", config.AcquisitionMode.c_str());
1276 
1277  ROS_INFO("AcquisitionStop");
1278  arv_device_execute_command(p_device_, "AcquisitionStop");
1279  ROS_INFO("AcquisitionStart");
1280  arv_device_execute_command(p_device_, "AcquisitionStart");
1281  }
1282  else
1283  ROS_INFO("Camera does not support AcquisitionMode.");
1284  }
1285 
1286  // adopt new config
1287  config_ = config;
1288  reconfigure_mutex_.unlock();
1289 }
1290 
1292 {
1293  if (p_device_)
1294  {
1295  if (std::all_of(cam_pubs_.begin(), cam_pubs_.end(),
1296  [](image_transport::CameraPublisher pub){ return pub.getNumSubscribers() == 0; })
1297  ){
1298  arv_device_execute_command(p_device_, "AcquisitionStop"); // don't waste CPU if nobody is listening!
1299  }
1300  else
1301  {
1302  arv_device_execute_command(p_device_, "AcquisitionStart");
1303  }
1304  }
1305 }
1306 
1307 void CameraAravisNodelet::newBufferReadyCallback(ArvStream *p_stream, gpointer can_instance)
1308 {
1309 
1310  // workaround to get access to the instance from a static method
1311  StreamIdData *data = (StreamIdData*) can_instance;
1313  size_t stream_id = data->stream_id;
1314  image_transport::CameraPublisher *p_cam_pub = &p_can->cam_pubs_[stream_id];
1315 
1316  if( p_can->stream_names_[stream_id].empty() )
1317  {
1318  newBufferReady(p_stream, p_can, p_can->config_.frame_id, stream_id);
1319  } else {
1320  const std::string stream_frame_id = p_can->config_.frame_id + "/" + p_can->stream_names_[stream_id];
1321  newBufferReady(p_stream, p_can, stream_frame_id, stream_id);
1322  }
1323 
1324 }
1325 
1326 void CameraAravisNodelet::newBufferReady(ArvStream *p_stream, CameraAravisNodelet *p_can, std::string frame_id, size_t stream_id)
1327 {
1328  ArvBuffer *p_buffer = arv_stream_try_pop_buffer(p_stream);
1329 
1330  // check if we risk to drop the next image because of not enough buffers left
1331  gint n_available_buffers;
1332  arv_stream_get_n_buffers(p_stream, &n_available_buffers, NULL);
1333  if (n_available_buffers == 0)
1334  {
1335  p_can->p_buffer_pools_[stream_id]->allocateBuffers(1);
1336  }
1337 
1338  if (p_buffer != NULL)
1339  {
1340  if (arv_buffer_get_status(p_buffer) == ARV_BUFFER_STATUS_SUCCESS && p_can->p_buffer_pools_[stream_id]
1341  && p_can->cam_pubs_[stream_id].getNumSubscribers())
1342  {
1343  (p_can->n_buffers_)++;
1344  // get the image message which wraps around this buffer
1345  sensor_msgs::ImagePtr msg_ptr = (*(p_can->p_buffer_pools_[stream_id]))[p_buffer];
1346  // fill the meta information of image message
1347  // get acquisition time
1348  guint64 t;
1349  if (p_can->use_ptp_stamp_)
1350  t = arv_buffer_get_timestamp(p_buffer);
1351  else
1352  t = arv_buffer_get_system_timestamp(p_buffer);
1353  msg_ptr->header.stamp.fromNSec(t);
1354  // get frame sequence number
1355  msg_ptr->header.seq = arv_buffer_get_frame_id(p_buffer);
1356  // fill other stream properties
1357  msg_ptr->header.frame_id = frame_id;
1358  msg_ptr->width = p_can->roi_.width;
1359  msg_ptr->height = p_can->roi_.height;
1360  msg_ptr->encoding = p_can->sensors_[stream_id]->pixel_format;
1361  msg_ptr->step = (msg_ptr->width * p_can->sensors_[stream_id]->n_bits_pixel)/8;
1362 
1363  // do the magic of conversion into a ROS format
1364  if (p_can->convert_formats[stream_id]) {
1365  sensor_msgs::ImagePtr cvt_msg_ptr = p_can->p_buffer_pools_[stream_id]->getRecyclableImg();
1366  p_can->convert_formats[stream_id](msg_ptr, cvt_msg_ptr);
1367  msg_ptr = cvt_msg_ptr;
1368  }
1369 
1370  // get current CameraInfo data
1371  if (!p_can->camera_infos_[stream_id]) {
1372  p_can->camera_infos_[stream_id].reset(new sensor_msgs::CameraInfo);
1373  }
1374  (*p_can->camera_infos_[stream_id]) = p_can->p_camera_info_managers_[stream_id]->getCameraInfo();
1375  p_can->camera_infos_[stream_id]->header = msg_ptr->header;
1376  if (p_can->camera_infos_[stream_id]->width == 0 || p_can->camera_infos_[stream_id]->height == 0) {
1378  "The fields image_width and image_height seem not to be set in "
1379  "the YAML specified by 'camera_info_url' parameter. Please set "
1380  "them there, because actual image size and specified image size "
1381  "can be different due to the region of interest (ROI) feature. In "
1382  "the YAML the image size should be the one on which the camera was "
1383  "calibrated. See CameraInfo.msg specification!");
1384  p_can->camera_infos_[stream_id]->width = p_can->roi_.width;
1385  p_can->camera_infos_[stream_id]->height = p_can->roi_.height;
1386  }
1387 
1388 
1389  p_can->cam_pubs_[stream_id].publish(msg_ptr, p_can->camera_infos_[stream_id]);
1390 
1391  if (p_can->pub_ext_camera_info_) {
1392  ExtendedCameraInfo extended_camera_info_msg;
1393  p_can->extended_camera_info_mutex_.lock();
1394  arv_camera_gv_select_stream_channel(p_can->p_camera_, stream_id);
1395  extended_camera_info_msg.camera_info = *(p_can->camera_infos_[stream_id]);
1396  p_can->fillExtendedCameraInfoMessage(extended_camera_info_msg);
1397  p_can->extended_camera_info_mutex_.unlock();
1398  p_can->extended_camera_info_pubs_[stream_id].publish(extended_camera_info_msg);
1399  }
1400 
1401  // check PTP status, camera cannot recover from "Faulty" by itself
1402  if (p_can->use_ptp_stamp_)
1403  p_can->resetPtpClock();
1404  }
1405  else
1406  {
1407  if (arv_buffer_get_status(p_buffer) != ARV_BUFFER_STATUS_SUCCESS) {
1408  ROS_WARN("(%s) Frame error: %s", frame_id.c_str(), szBufferStatusFromInt[arv_buffer_get_status(p_buffer)]);
1409  }
1410  arv_stream_push_buffer(p_stream, p_buffer);
1411  }
1412  }
1413 
1414  // publish current lighting settings if this camera is configured as master
1415  if (p_can->config_.AutoMaster)
1416  {
1417  p_can->syncAutoParameters();
1418  p_can->auto_pub_.publish(p_can->auto_params_);
1419  }
1420 }
1421 
1422 void CameraAravisNodelet::fillExtendedCameraInfoMessage(ExtendedCameraInfo &msg)
1423 {
1424  const char *vendor_name = arv_camera_get_vendor_name(p_camera_);
1425 
1426  if (strcmp("Basler", vendor_name) == 0) {
1427  msg.exposure_time = arv_device_get_float_feature_value(p_device_, "ExposureTimeAbs");
1428  }
1429  else if (implemented_features_["ExposureTime"])
1430  {
1431  msg.exposure_time = arv_device_get_float_feature_value(p_device_, "ExposureTime");
1432  }
1433 
1434  if (strcmp("Basler", vendor_name) == 0) {
1435  msg.gain = static_cast<float>(arv_device_get_integer_feature_value(p_device_, "GainRaw"));
1436  }
1437  else if (implemented_features_["Gain"])
1438  {
1439  msg.gain = arv_device_get_float_feature_value(p_device_, "Gain");
1440  }
1441  if (strcmp("Basler", vendor_name) == 0) {
1442  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "All");
1443  msg.black_level = static_cast<float>(arv_device_get_integer_feature_value(p_device_, "BlackLevelRaw"));
1444  } else if (strcmp("JAI Corporation", vendor_name) == 0) {
1445  // Reading the black level register for both streams of the JAI FS 3500D takes too long.
1446  // The frame rate the drops below 10 fps.
1447  msg.black_level = 0;
1448  } else {
1449  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "All");
1450  msg.black_level = arv_device_get_float_feature_value(p_device_, "BlackLevel");
1451  }
1452 
1453  // White balance as TIS is providing
1454  if (strcmp("The Imaging Source Europe GmbH", vendor_name) == 0)
1455  {
1456  msg.white_balance_red = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceRedRegister") / 255.;
1457  msg.white_balance_green = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceGreenRegister") / 255.;
1458  msg.white_balance_blue = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceBlueRegister") / 255.;
1459  }
1460  // the JAI cameras become too slow when reading out the DigitalRed and DigitalBlue values
1461  // the white balance is adjusted by adjusting the Gain values for Red and Blue pixels
1462  else if (strcmp("JAI Corporation", vendor_name) == 0)
1463  {
1464  msg.white_balance_red = 1.0;
1465  msg.white_balance_green = 1.0;
1466  msg.white_balance_blue = 1.0;
1467  }
1468  // the Basler cameras use the 'BalanceRatioAbs' keyword instead
1469  else if (strcmp("Basler", vendor_name) == 0)
1470  {
1471  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
1472  msg.white_balance_red = arv_device_get_float_feature_value(p_device_, "BalanceRatioAbs");
1473  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
1474  msg.white_balance_green = arv_device_get_float_feature_value(p_device_, "BalanceRatioAbs");
1475  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
1476  msg.white_balance_blue = arv_device_get_float_feature_value(p_device_, "BalanceRatioAbs");
1477  }
1478  // the standard way
1479  else if (implemented_features_["BalanceRatio"] && implemented_features_["BalanceRatioSelector"])
1480  {
1481  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
1482  msg.white_balance_red = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
1483  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
1484  msg.white_balance_green = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
1485  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
1486  msg.white_balance_blue = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
1487  }
1488 
1489  if (strcmp("Basler", vendor_name) == 0) {
1490  msg.temperature = static_cast<float>(arv_device_get_float_feature_value(p_device_, "TemperatureAbs"));
1491  }
1492  else if (implemented_features_["DeviceTemperature"])
1493  {
1494  msg.temperature = arv_device_get_float_feature_value(p_device_, "DeviceTemperature");
1495  }
1496 
1497 }
1498 
1499 void CameraAravisNodelet::controlLostCallback(ArvDevice *p_gv_device, gpointer can_instance)
1500 {
1501  CameraAravisNodelet *p_can = (CameraAravisNodelet*)can_instance;
1502  ROS_ERROR("Control to aravis device lost.\n\t> Nodelet name: %s."
1503  "\n\t> Shutting down. Allowing for respawn.",
1504  p_can->getName().c_str());
1505 
1506  p_can->shutdown_trigger_.start();
1507 }
1508 
1510 {
1511  software_trigger_active_ = true;
1512  ROS_INFO("Software trigger started.");
1513  std::chrono::system_clock::time_point next_time = std::chrono::system_clock::now();
1514  while (ros::ok() && software_trigger_active_)
1515  {
1516  next_time += std::chrono::milliseconds(size_t(std::round(1000.0 / config_.softwaretriggerrate)));
1517  if (std::any_of(cam_pubs_.begin(), cam_pubs_.end(),
1518  [](image_transport::CameraPublisher pub){ return pub.getNumSubscribers() > 0; })
1519  )
1520  {
1521  arv_device_execute_command(p_device_, "TriggerSoftware");
1522  }
1523  if (next_time > std::chrono::system_clock::now())
1524  {
1525  std::this_thread::sleep_until(next_time);
1526  }
1527  else
1528  {
1529  ROS_WARN("Camera Aravis: Missed a software trigger event.");
1530  next_time = std::chrono::system_clock::now();
1531  }
1532  }
1533  ROS_INFO("Software trigger stopped.");
1534 }
1535 
1536 void CameraAravisNodelet::publishTfLoop(double rate)
1537 {
1538  // Publish optical transform for the camera
1539  ROS_WARN("Publishing dynamic camera transforms (/tf) at %g Hz", rate);
1540 
1541  tf_thread_active_ = true;
1542 
1543  ros::Rate loop_rate(rate);
1544 
1545  while (ros::ok() && tf_thread_active_)
1546  {
1547  // Update the header for publication
1548  tf_optical_.header.stamp = ros::Time::now();
1549  ++tf_optical_.header.seq;
1550  p_tb_->sendTransform(tf_optical_);
1551 
1552  loop_rate.sleep();
1553  }
1554 }
1555 
1558  ROS_INFO("Publishing camera diagnostics at %g Hz", rate);
1559 
1560  ros::Rate loop_rate(rate);
1561 
1562  CameraDiagnostics camDiagnosticMsg;
1563  camDiagnosticMsg.header.frame_id = config_.frame_id;
1564 
1565  // function to get feature value at given name and of given type as string
1566  auto getFeatureValueAsStrFn = [&](const std::string &name, const std::string &type)
1567  -> std::string
1568  {
1569  std::string valueStr = "";
1570 
1571  if(type == "float")
1572  valueStr = std::to_string(arv_device_get_float_feature_value(p_device_, name.c_str()));
1573  else if (type == "int")
1574  valueStr = std::to_string(arv_device_get_integer_feature_value(p_device_, name.c_str()));
1575  else if (type == "bool")
1576  valueStr = arv_device_get_boolean_feature_value(p_device_, name.c_str()) ? "true" : "false";
1577  else
1578  valueStr = arv_device_get_string_feature_value(p_device_, name.c_str());
1579 
1580  return valueStr;
1581  };
1582 
1583  // function to set feature value at given name and of given type from string
1584  auto setFeatureValueFromStrFn = [&](const std::string &name, const std::string &type,
1585  const std::string &valueStr)
1586  {
1587  if(type == "float")
1588  arv_device_set_float_feature_value(p_device_, name.c_str(), std::stod(valueStr));
1589  else if (type == "int")
1590  arv_device_set_integer_feature_value(p_device_, name.c_str(), std::stoi(valueStr));
1591  else if (type == "bool")
1592  arv_device_set_boolean_feature_value(p_device_, name.c_str(),
1593  (valueStr == "true" || valueStr == "True" || valueStr == "TRUE"));
1594  else
1595  arv_device_set_string_feature_value(p_device_, name.c_str(), valueStr.c_str());
1596  };
1597 
1598  while (ros::ok() && diagnostic_thread_active_)
1599  {
1600  camDiagnosticMsg.header.stamp = ros::Time::now();
1601  ++camDiagnosticMsg.header.seq;
1602 
1603  camDiagnosticMsg.data.clear();
1604 
1605  int featureIdx = 1;
1606  for (auto featureDict : diagnostic_features_) {
1607  std::string featureName = featureDict["FeatureName"].IsDefined()
1608  ? featureDict["FeatureName"].as<std::string>()
1609  : "";
1610  std::string featureType = featureDict["Type"].IsDefined()
1611  ? featureDict["Type"].as<std::string>()
1612  : "";
1613 
1614  if(featureName.empty() || featureType.empty())
1615  {
1616  ROS_WARN_ONCE(
1617  "Diagnostic feature at index %i does not have a field 'FeatureName' or 'Type'.",
1618  featureIdx);
1619  ROS_WARN_ONCE("Diagnostic feature will be skipped.");
1620  }
1621  else
1622  {
1623  // convert type string to lower case
1624  std::transform(featureType.begin(), featureType.end(), featureType.begin(),
1625  [](unsigned char c){ return std::tolower(c); });
1626 
1627  // if feature name is found in implemented_feature list and if enabled
1628  if (implemented_features_.find(featureName) != implemented_features_.end() &&
1629  implemented_features_.at(featureName))
1630  {
1631  diagnostic_msgs::KeyValue kvPair;
1632 
1633  // if 'selectors' which correspond to the featureName are defined
1634  if(featureDict["Selectors"].IsDefined() && featureDict["Selectors"].size() > 0)
1635  {
1636  int selectorIdx = 1;
1637  for(auto selectorDict : featureDict["Selectors"])
1638  {
1639  std::string selectorFeatureName = selectorDict["FeatureName"].IsDefined()
1640  ? selectorDict["FeatureName"].as<std::string>()
1641  : "";
1642  std::string selectorType = selectorDict["Type"].IsDefined()
1643  ? selectorDict["Type"].as<std::string>()
1644  : "";
1645  std::string selectorValue = selectorDict["Value"].IsDefined()
1646  ? selectorDict["Value"].as<std::string>()
1647  : "";
1648 
1649  if(selectorFeatureName.empty() || selectorType.empty() || selectorValue.empty())
1650  {
1651  ROS_WARN_ONCE(
1652  "Diagnostic feature selector at index %i of feature at index %i does not have a "
1653  "field 'FeatureName', 'Type' or 'Value'.",
1654  selectorIdx, featureIdx);
1655  ROS_WARN_ONCE("Diagnostic feature will be skipped.");
1656  }
1657  else
1658  {
1659  if (implemented_features_.find(selectorFeatureName) != implemented_features_.end() &&
1660  implemented_features_.at(selectorFeatureName))
1661  {
1662  setFeatureValueFromStrFn(selectorFeatureName, selectorType, selectorValue);
1663 
1664  kvPair.key = featureName + "-" + selectorValue;
1665  kvPair.value = getFeatureValueAsStrFn(featureName, featureType);
1666 
1667  camDiagnosticMsg.data.push_back(diagnostic_msgs::KeyValue(kvPair));
1668  }
1669  else
1670  {
1671  ROS_WARN_ONCE("Diagnostic feature selector with name '%s' is not implemented.",
1672  selectorFeatureName.c_str());
1673  }
1674 
1675  selectorIdx++;
1676  }
1677  }
1678  }
1679  else
1680  {
1681  kvPair.key = featureName;
1682  kvPair.value = getFeatureValueAsStrFn(featureName, featureType);
1683 
1684  camDiagnosticMsg.data.push_back(diagnostic_msgs::KeyValue(kvPair));
1685  }
1686  }
1687  else
1688  {
1689  ROS_WARN_ONCE("Diagnostic feature with name '%s' is not implemented.",
1690  featureName.c_str());
1691  }
1692  }
1693 
1694  featureIdx++;
1695  }
1696 
1697  diagnostic_pub_.publish(camDiagnosticMsg);
1698 
1699  loop_rate.sleep();
1700  }
1701 
1702 }
1703 
1705 {
1706  implemented_features_.clear();
1707  if (!p_device_)
1708  return;
1709 
1710  // get the root node of genicam description
1711  ArvGc *gc = arv_device_get_genicam(p_device_);
1712  if (!gc)
1713  return;
1714 
1715  std::list<ArvDomNode*> todo;
1716  todo.push_front((ArvDomNode*)arv_gc_get_node(gc, "Root"));
1717 
1718  while (!todo.empty())
1719  {
1720  // get next entry
1721  ArvDomNode *node = todo.front();
1722  todo.pop_front();
1723  const std::string name(arv_dom_node_get_node_name(node));
1724 
1725  // Do the indirection
1726  if (name[0] == 'p')
1727  {
1728  if (name.compare("pInvalidator") == 0)
1729  {
1730  continue;
1731  }
1732  ArvDomNode *inode = (ArvDomNode*)arv_gc_get_node(gc,
1733  arv_dom_node_get_node_value(arv_dom_node_get_first_child(node)));
1734  if (inode)
1735  todo.push_front(inode);
1736  continue;
1737  }
1738 
1739  // check for implemented feature
1740  if (ARV_IS_GC_FEATURE_NODE(node))
1741  {
1742  //if (!(ARV_IS_GC_CATEGORY(node) || ARV_IS_GC_ENUM_ENTRY(node) /*|| ARV_IS_GC_PORT(node)*/)) {
1743  ArvGcFeatureNode *fnode = ARV_GC_FEATURE_NODE(node);
1744  const std::string fname(arv_gc_feature_node_get_name(fnode));
1745  const bool usable = arv_gc_feature_node_is_available(fnode, NULL)
1746  && arv_gc_feature_node_is_implemented(fnode, NULL);
1747 
1748  ROS_INFO_STREAM_COND(verbose_, "Feature " << fname << " is " << (usable ? "usable" : "not usable"));
1749  implemented_features_.emplace(fname, usable);
1750  //}
1751  }
1752 
1753 // if (ARV_IS_GC_PROPERTY_NODE(node)) {
1754 // ArvGcPropertyNode* pnode = ARV_GC_PROPERTY_NODE(node);
1755 // const std::string pname(arv_gc_property_node_get_name(pnode));
1756 // ROS_INFO_STREAM("Property " << pname << " found");
1757 // }
1758 
1759  // add children in todo-list
1760  ArvDomNodeList *children = arv_dom_node_get_child_nodes(node);
1761  const uint l = arv_dom_node_list_get_length(children);
1762  for (uint i = 0; i < l; ++i)
1763  {
1764  todo.push_front(arv_dom_node_list_get_item(children, i));
1765  }
1766  }
1767 }
1768 
1769 void CameraAravisNodelet::parseStringArgs(std::string in_arg_string, std::vector<std::string> &out_args) {
1770  size_t array_start = 0;
1771  size_t array_end = in_arg_string.length();
1772  if(array_start != std::string::npos && array_end != std::string::npos) {
1773  // parse the string into an array of parameters
1774  std::stringstream ss(in_arg_string.substr(array_start, array_end - array_start));
1775  while (ss.good()) {
1776  std::string temp;
1777  getline( ss, temp, ',');
1778  boost::trim_left(temp);
1779  boost::trim_right(temp);
1780  out_args.push_back(temp);
1781  }
1782  } else {
1783  // add just the one argument onto the vector
1784  out_args.push_back(in_arg_string);
1785  }
1786 }
1787 
1788 // WriteCameraFeaturesFromRosparam()
1789 // Read ROS parameters from this node's namespace, and see if each parameter has a similarly named & typed feature in the camera. Then set the
1790 // camera feature to that value. For example, if the parameter camnode/Gain is set to 123.0, then we'll write 123.0 to the Gain feature
1791 // in the camera.
1792 //
1793 // Note that the datatype of the parameter *must* match the datatype of the camera feature, and this can be determined by
1794 // looking at the camera's XML file. Camera enum's are string parameters, camera bools are false/true parameters (not 0/1),
1795 // integers are integers, doubles are doubles, etc.
1796 //
1798 {
1799  XmlRpc::XmlRpcValue xml_rpc_params;
1801  ArvGcNode *p_gc_node;
1802  GError *error = NULL;
1803 
1804  getPrivateNodeHandle().getParam(this->getName(), xml_rpc_params);
1805 
1806  if (xml_rpc_params.getType() == XmlRpc::XmlRpcValue::TypeStruct)
1807  {
1808  for (iter = xml_rpc_params.begin(); iter != xml_rpc_params.end(); iter++)
1809  {
1810  std::string key = iter->first;
1811 
1812  p_gc_node = arv_device_get_feature(p_device_, key.c_str());
1813  if (p_gc_node && arv_gc_feature_node_is_implemented(ARV_GC_FEATURE_NODE(p_gc_node), &error))
1814  {
1815  // unsigned long typeValue = arv_gc_feature_node_get_value_type((ArvGcFeatureNode *)pGcNode);
1816  // ROS_INFO("%s cameratype=%lu, rosparamtype=%d", key.c_str(), typeValue, static_cast<int>(iter->second.getType()));
1817 
1818  // We'd like to check the value types too, but typeValue is often given as G_TYPE_INVALID, so ignore it.
1819  switch (iter->second.getType())
1820  {
1821  case XmlRpc::XmlRpcValue::TypeBoolean: //if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeBoolean))// && (typeValue==G_TYPE_INT64))
1822  {
1823  int value = (bool)iter->second;
1824  arv_device_set_integer_feature_value(p_device_, key.c_str(), value);
1825  ROS_INFO("Read parameter (bool) %s: %d", key.c_str(), value);
1826  }
1827  break;
1828 
1829  case XmlRpc::XmlRpcValue::TypeInt: //if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeInt))// && (typeValue==G_TYPE_INT64))
1830  {
1831  int value = (int)iter->second;
1832  arv_device_set_integer_feature_value(p_device_, key.c_str(), value);
1833  ROS_INFO("Read parameter (int) %s: %d", key.c_str(), value);
1834  }
1835  break;
1836 
1837  case XmlRpc::XmlRpcValue::TypeDouble: //if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeDouble))// && (typeValue==G_TYPE_DOUBLE))
1838  {
1839  double value = (double)iter->second;
1840  arv_device_set_float_feature_value(p_device_, key.c_str(), value);
1841  ROS_INFO("Read parameter (float) %s: %f", key.c_str(), value);
1842  }
1843  break;
1844 
1845  case XmlRpc::XmlRpcValue::TypeString: //if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeString))// && (typeValue==G_TYPE_STRING))
1846  {
1847  std::string value = (std::string)iter->second;
1848  arv_device_set_string_feature_value(p_device_, key.c_str(), value.c_str());
1849  ROS_INFO("Read parameter (string) %s: %s", key.c_str(), value.c_str());
1850  }
1851  break;
1852 
1858  default:
1859  ROS_WARN("Unhandled rosparam type in writeCameraFeaturesFromRosparam()");
1860  }
1861  }
1862  }
1863  }
1864 }
1865 
1867 {
1868  nodelet::NodeletUnload unload_service;
1869  unload_service.request.name = this->getName();
1870  ros::service::call(this->getName() + "/unload_nodelet", unload_service);
1871  ROS_INFO("Nodelet unloaded.");
1872 
1874 
1875  ros::shutdown();
1876  ROS_INFO("Shut down successful.");
1877 }
1878 
1879 } // end namespace camera_aravis
camera_aravis::CameraAravisNodelet::ptp_status_feature_
std::string ptp_status_feature_
Definition: camera_aravis_nodelet.h:119
camera_aravis::CameraAravisNodelet::tf_thread_active_
std::atomic_bool tf_thread_active_
Definition: camera_aravis_nodelet.h:230
response
const std::string response
camera_aravis::CameraAravisNodelet::auto_sub_
ros::Subscriber auto_sub_
Definition: camera_aravis_nodelet.h:241
camera_aravis::CameraAravisNodelet::setStringFeatureCallback
bool setStringFeatureCallback(camera_aravis::set_string_feature_value::Request &request, camera_aravis::set_string_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:733
camera_aravis::CameraAravisNodelet::tf_dyn_thread_
std::thread tf_dyn_thread_
Definition: camera_aravis_nodelet.h:229
camera_aravis::CameraAravisNodelet::rosConnectCallback
void rosConnectCallback()
Definition: camera_aravis_nodelet.cpp:1312
camera_aravis::CameraAravisNodelet::shutdown_delay_s_
double shutdown_delay_s_
Definition: camera_aravis_nodelet.h:260
camera_aravis::CameraAravisNodelet::diagnostic_publish_rate_
double diagnostic_publish_rate_
Definition: camera_aravis_nodelet.h:233
camera_aravis::CameraAravisNodelet::newBufferReady
static void newBufferReady(ArvStream *p_stream, CameraAravisNodelet *p_can, std::string frame_id, size_t stream_id)
Definition: camera_aravis_nodelet.cpp:1347
camera_aravis::CameraAravisNodelet::width
int32_t width
Definition: camera_aravis_nodelet.h:266
ros::service::call
bool call(const std::string &service_name, MReq &req, MRes &res)
camera_aravis::CameraAravisNodelet::diagnostic_thread_
std::thread diagnostic_thread_
Definition: camera_aravis_nodelet.h:236
ros::Publisher
camera_aravis::CameraAravisNodelet::fillExtendedCameraInfoMessage
void fillExtendedCameraInfoMessage(ExtendedCameraInfo &msg)
Definition: camera_aravis_nodelet.cpp:1443
nodelet::Nodelet::getNodeHandle
ros::NodeHandle & getNodeHandle() const
camera_aravis::CameraAravisNodelet::setExtendedCameraInfo
void setExtendedCameraInfo(std::string channel_name, size_t stream_id)
Definition: camera_aravis_nodelet.cpp:1031
image_transport::ImageTransport
camera_aravis::CameraAravisNodelet::p_tb_
std::unique_ptr< tf2_ros::TransformBroadcaster > p_tb_
Definition: camera_aravis_nodelet.h:227
camera_aravis::CameraAravisNodelet::get_integer_service_
ros::ServiceServer get_integer_service_
Definition: camera_aravis_nodelet.h:171
XmlRpc::XmlRpcValue::TypeBase64
TypeBase64
camera_aravis::CameraAravisNodelet::tf_optical_
geometry_msgs::TransformStamped tf_optical_
Definition: camera_aravis_nodelet.h:228
camera_aravis::CameraAravisNodelet::config_min_
Config config_min_
Definition: camera_aravis_nodelet.h:247
camera_aravis::CameraAravisNodelet::set_integer_service_
ros::ServiceServer set_integer_service_
Definition: camera_aravis_nodelet.h:183
ros::NodeHandle::getParam
bool getParam(const std::string &key, bool &b) const
ROS_INFO_STREAM_COND
#define ROS_INFO_STREAM_COND(cond, args)
camera_aravis::CameraAravisNodelet::~CameraAravisNodelet
virtual ~CameraAravisNodelet()
Definition: camera_aravis_nodelet.cpp:59
camera_aravis::szBufferStatusFromInt
static const char * szBufferStatusFromInt[]
Definition: conversion_utils.h:57
camera_aravis::CameraAravisNodelet::extended_camera_info_pubs_
std::vector< ros::Publisher > extended_camera_info_pubs_
Definition: camera_aravis_nodelet.h:244
ros::spinOnce
ROSCPP_DECL void spinOnce()
XmlRpc::XmlRpcValue::TypeInt
TypeInt
XmlRpc::XmlRpcValue::TypeInvalid
TypeInvalid
camera_aravis::CameraAravisNodelet::software_trigger_thread_
std::thread software_trigger_thread_
Definition: camera_aravis_nodelet.h:253
ros::shutdown
ROSCPP_DECL void shutdown()
tf::resolve
std::string resolve(const std::string &prefix, const std::string &frame_name)
ros::NodeHandle::advertiseService
ServiceServer advertiseService(AdvertiseServiceOptions &ops)
camera_aravis::CameraAravisNodelet::convert_formats
std::vector< ConversionFunction > convert_formats
Definition: camera_aravis_nodelet.h:133
camera_aravis::CameraAravisNodelet::syncAutoParameters
void syncAutoParameters()
Definition: camera_aravis_nodelet.cpp:909
camera_aravis::Config
CameraAravisConfig Config
Definition: camera_aravis_nodelet.h:105
camera_aravis
Definition: camera_aravis_nodelet.h:81
ros::Subscriber::shutdown
void shutdown()
ROS_WARN_ONCE
#define ROS_WARN_ONCE(...)
camera_aravis::CameraAravisNodelet::get_boolean_service_
ros::ServiceServer get_boolean_service_
Definition: camera_aravis_nodelet.h:180
camera_aravis::CameraAravisNodelet::guid_
std::string guid_
Definition: camera_aravis_nodelet.h:115
camera_aravis::CameraAravisNodelet::p_streams_
std::vector< ArvStream * > p_streams_
Definition: camera_aravis_nodelet.h:129
camera_aravis::CameraAravisNodelet::pub_tf_optical_
bool pub_tf_optical_
Definition: camera_aravis_nodelet.h:123
camera_aravis::CameraAravisNodelet::getIntegerFeatureCallback
bool getIntegerFeatureCallback(camera_aravis::get_integer_feature_value::Request &request, camera_aravis::get_integer_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:686
camera_aravis::CameraAravisNodelet::p_camera_
ArvCamera * p_camera_
Definition: camera_aravis_nodelet.h:125
ros::SubscriberStatusCallback
boost::function< void(const SingleSubscriberPublisher &)> SubscriberStatusCallback
camera_aravis::CameraAravisNodelet::diagnostic_pub_
ros::Publisher diagnostic_pub_
Definition: camera_aravis_nodelet.h:235
camera_aravis::CameraAravisNodelet::ptp_enable_feature_
std::string ptp_enable_feature_
Definition: camera_aravis_nodelet.h:118
ROS_DEBUG_STREAM
#define ROS_DEBUG_STREAM(args)
image_transport::ImageTransport::advertiseCamera
CameraPublisher advertiseCamera(const std::string &base_topic, uint32_t queue_size, bool latch=false)
nodelet::Nodelet::getPrivateNodeHandle
ros::NodeHandle & getPrivateNodeHandle() const
ros::Publisher::publish
void publish(const boost::shared_ptr< M > &message) const
camera_aravis::CameraAravisNodelet::CameraAravisNodelet
CameraAravisNodelet()
Definition: camera_aravis_nodelet.cpp:55
camera_aravis::CameraAravisNodelet::spawnStream
void spawnStream()
Definition: camera_aravis_nodelet.cpp:598
camera_aravis::CameraAravisNodelet
Definition: camera_aravis_nodelet.h:107
ros::NodeHandle::advertise
Publisher advertise(AdvertiseOptions &ops)
ros::ok
ROSCPP_DECL bool ok()
ROS_WARN_STREAM_ONCE
#define ROS_WARN_STREAM_ONCE(args)
camera_aravis::CameraAravisNodelet::getStringFeatureCallback
bool getStringFeatureCallback(camera_aravis::get_string_feature_value::Request &request, camera_aravis::get_string_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:726
XmlRpc::XmlRpcValue::TypeStruct
TypeStruct
camera_aravis::CameraAravisNodelet::getBooleanFeatureCallback
bool getBooleanFeatureCallback(camera_aravis::get_boolean_feature_value::Request &request, camera_aravis::get_boolean_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:746
camera_aravis::CameraAravisNodelet::publishTfLoop
void publishTfLoop(double rate)
Definition: camera_aravis_nodelet.cpp:1557
camera_aravis::CameraAravisNodelet::diagnostic_yaml_url_
std::string diagnostic_yaml_url_
Definition: camera_aravis_nodelet.h:232
class_list_macros.h
camera_aravis::CameraAravisNodelet::shutdown_trigger_
ros::Timer shutdown_trigger_
Definition: camera_aravis_nodelet.h:259
tf2_ros::StaticTransformBroadcaster
camera_aravis::CameraAravisNodelet::sensors_
std::vector< Sensor * > sensors_
Definition: camera_aravis_nodelet.h:282
ros::SingleSubscriberPublisher
ROS_WARN_STREAM
#define ROS_WARN_STREAM(args)
camera_aravis::CameraAravisNodelet::camera_infos_
std::vector< sensor_msgs::CameraInfoPtr > camera_infos_
Definition: camera_aravis_nodelet.h:224
camera_aravis::CameraAravisNodelet::get_string_service_
ros::ServiceServer get_string_service_
Definition: camera_aravis_nodelet.h:177
XmlRpc::XmlRpcValue::TypeDouble
TypeDouble
camera_aravis::CameraBufferPool::Ptr
boost::shared_ptr< CameraBufferPool > Ptr
Definition: camera_buffer_pool.h:91
camera_aravis::CameraAravisNodelet::config_max_
Config config_max_
Definition: camera_aravis_nodelet.h:248
PLUGINLIB_EXPORT_CLASS
#define PLUGINLIB_EXPORT_CLASS(class_type, base_class_type)
camera_aravis::CameraAravisNodelet::p_device_
ArvDevice * p_device_
Definition: camera_aravis_nodelet.h:126
ros::Publisher::shutdown
void shutdown()
camera_aravis::CameraAravisNodelet::spawn_stream_thread_
std::thread spawn_stream_thread_
Definition: camera_aravis_nodelet.h:251
camera_aravis::CameraAravisNodelet::config_
Config config_
Definition: camera_aravis_nodelet.h:246
camera_aravis::CameraAravisNodelet::p_camera_info_node_handles_
std::vector< std::unique_ptr< ros::NodeHandle > > p_camera_info_node_handles_
Definition: camera_aravis_nodelet.h:223
tf::getPrefixParam
std::string getPrefixParam(ros::NodeHandle &nh)
camera_aravis::CameraAravisNodelet::setIntegerFeatureCallback
bool setIntegerFeatureCallback(camera_aravis::set_integer_feature_value::Request &request, camera_aravis::set_integer_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:693
camera_aravis::CameraAravisNodelet::reconfigure_server_
std::unique_ptr< dynamic_reconfigure::Server< Config > > reconfigure_server_
Definition: camera_aravis_nodelet.h:218
camera_aravis::CameraAravisNodelet::tuneGvStream
void tuneGvStream(ArvGvStream *p_stream)
Definition: camera_aravis_nodelet.cpp:1048
XmlRpc::XmlRpcValue::TypeString
TypeString
camera_aravis::CameraAravisNodelet::onShutdownTriggered
void onShutdownTriggered(const ros::TimerEvent &)
Definition: camera_aravis_nodelet.cpp:1887
camera_aravis::CameraAravisNodelet::set_boolean_service_
ros::ServiceServer set_boolean_service_
Definition: camera_aravis_nodelet.h:192
camera_aravis::CameraAravisNodelet::writeCameraFeaturesFromRosparam
void writeCameraFeaturesFromRosparam()
Definition: camera_aravis_nodelet.cpp:1818
camera_aravis::CameraAravisNodelet::diagnostic_features_
YAML::Node diagnostic_features_
Definition: camera_aravis_nodelet.h:234
camera_aravis::CameraAravisNodelet::newBufferReadyCallback
static void newBufferReadyCallback(ArvStream *p_stream, gpointer can_instance)
Definition: camera_aravis_nodelet.cpp:1328
camera_info_manager::CameraInfoManager
camera_aravis::CameraAravisNodelet::stream_names_
std::vector< std::string > stream_names_
Definition: camera_aravis_nodelet.h:130
image_transport::CameraPublisher
ROS_WARN
#define ROS_WARN(...)
camera_aravis::CameraAravisNodelet::p_camera_info_managers_
std::vector< std::unique_ptr< camera_info_manager::CameraInfoManager > > p_camera_info_managers_
Definition: camera_aravis_nodelet.h:222
camera_aravis::CameraAravisNodelet::set_float_service_
ros::ServiceServer set_float_service_
Definition: camera_aravis_nodelet.h:186
camera_aravis::CameraAravisNodelet::discoverFeatures
void discoverFeatures()
Definition: camera_aravis_nodelet.cpp:1725
camera_aravis_nodelet.h
ros::NodeHandle::subscribe
Subscriber subscribe(const std::string &topic, uint32_t queue_size, const boost::function< void(C)> &callback, const VoidConstPtr &tracked_object=VoidConstPtr(), const TransportHints &transport_hints=TransportHints())
ros::TimerEvent
ROS_INFO_STREAM
#define ROS_INFO_STREAM(args)
camera_aravis::CameraAravisNodelet::set_string_service_
ros::ServiceServer set_string_service_
Definition: camera_aravis_nodelet.h:189
ros::names::remap
ROSCPP_DECL std::string remap(const std::string &name)
camera_aravis::CameraAravisNodelet::spawning_
std::atomic< bool > spawning_
Definition: camera_aravis_nodelet.h:250
camera_aravis::CameraAravisNodelet::cam_pubs_
std::vector< image_transport::CameraPublisher > cam_pubs_
Definition: camera_aravis_nodelet.h:221
XmlRpc::XmlRpcValue::iterator
ValueStruct::iterator iterator
camera_aravis::CameraAravisNodelet::roi_
struct camera_aravis::CameraAravisNodelet::@0 roi_
XmlRpc::XmlRpcValue::end
iterator end()
camera_aravis::CameraAravisNodelet::setBooleanFeatureCallback
bool setBooleanFeatureCallback(camera_aravis::set_boolean_feature_value::Request &request, camera_aravis::set_boolean_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:753
ros::Rate::sleep
bool sleep()
XmlRpc::XmlRpcValue::getType
const Type & getType() const
image_transport::SingleSubscriberPublisher
camera_aravis::CameraAravisNodelet::auto_params_
CameraAutoInfo auto_params_
Definition: camera_aravis_nodelet.h:239
XmlRpc::XmlRpcValue::TypeArray
TypeArray
camera_aravis::CameraAravisNodelet::rosReconfigureCallback
void rosReconfigureCallback(Config &config, uint32_t level)
Definition: camera_aravis_nodelet.cpp:1075
camera_aravis::CameraAravisNodelet::pub_ext_camera_info_
bool pub_ext_camera_info_
Definition: camera_aravis_nodelet.h:122
camera_aravis::CameraAravisNodelet::verbose_
bool verbose_
Definition: camera_aravis_nodelet.h:114
camera_aravis::CameraAravisNodelet::parseStringArgs
static void parseStringArgs(std::string in_arg_string, std::vector< std::string > &out_args)
Definition: camera_aravis_nodelet.cpp:1790
nodelet::Nodelet
camera_aravis::CameraAravisNodelet::auto_pub_
ros::Publisher auto_pub_
Definition: camera_aravis_nodelet.h:240
camera_aravis::CameraAravisNodelet::onInit
virtual void onInit() override
Definition: camera_aravis_nodelet.cpp:122
camera_aravis::CameraAravisNodelet::reconfigure_mutex_
boost::recursive_mutex reconfigure_mutex_
Definition: camera_aravis_nodelet.h:219
camera_aravis::CameraAravisNodelet::diagnostic_thread_active_
std::atomic_bool diagnostic_thread_active_
Definition: camera_aravis_nodelet.h:237
camera_aravis::CameraAravisNodelet::setFloatFeatureCallback
bool setFloatFeatureCallback(camera_aravis::set_float_feature_value::Request &request, camera_aravis::set_float_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:713
camera_aravis::CameraAravisNodelet::setAutoMaster
void setAutoMaster(bool value)
Definition: camera_aravis_nodelet.cpp:979
ROS_ERROR
#define ROS_ERROR(...)
camera_aravis::CameraAravisNodelet::num_streams_
gint num_streams_
Definition: camera_aravis_nodelet.h:128
tf2_ros::TransformBroadcaster
camera_aravis::CameraAravisNodelet::softwareTriggerLoop
void softwareTriggerLoop()
Definition: camera_aravis_nodelet.cpp:1530
camera_aravis::CameraAravisNodelet::controlLostCallback
static void controlLostCallback(ArvDevice *p_gv_device, gpointer can_instance)
Definition: camera_aravis_nodelet.cpp:1520
camera_aravis::CameraAravisNodelet::get_float_service_
ros::ServiceServer get_float_service_
Definition: camera_aravis_nodelet.h:174
nodelet::Nodelet::getName
const std::string & getName() const
ros::NodeHandle::param
T param(const std::string &param_name, const T &default_val) const
camera_aravis::CameraAravisNodelet::software_trigger_active_
std::atomic_bool software_trigger_active_
Definition: camera_aravis_nodelet.h:254
camera_aravis::CameraAravisNodelet::ptp_set_cmd_feature_
std::string ptp_set_cmd_feature_
Definition: camera_aravis_nodelet.h:120
image_transport::SubscriberStatusCallback
boost::function< void(const SingleSubscriberPublisher &)> SubscriberStatusCallback
ros::Rate
camera_aravis::CameraAravisNodelet::getFloatFeatureCallback
bool getFloatFeatureCallback(camera_aravis::get_float_feature_value::Request &request, camera_aravis::get_float_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:706
ros::Duration::sleep
bool sleep() const
camera_aravis::CameraAravisNodelet::p_stb_
std::unique_ptr< tf2_ros::StaticTransformBroadcaster > p_stb_
Definition: camera_aravis_nodelet.h:226
camera_aravis::CameraAravisNodelet::p_buffer_pools_
std::vector< CameraBufferPool::Ptr > p_buffer_pools_
Definition: camera_aravis_nodelet.h:131
XmlRpc::XmlRpcValue::begin
iterator begin()
XmlRpc::XmlRpcValue::TypeBoolean
TypeBoolean
camera_aravis::CameraAravisNodelet::implemented_features_
std::unordered_map< std::string, const bool > implemented_features_
Definition: camera_aravis_nodelet.h:257
XmlRpc::XmlRpcValue::TypeDateTime
TypeDateTime
camera_aravis::CameraAravisNodelet::resetPtpClock
void resetPtpClock()
Definition: camera_aravis_nodelet.cpp:766
ROS_INFO
#define ROS_INFO(...)
ros::NodeHandle::createTimer
Timer createTimer(Duration period, const TimerCallback &callback, bool oneshot=false, bool autostart=true) const
camera_aravis::CameraAravisNodelet::setAutoSlave
void setAutoSlave(bool value)
Definition: camera_aravis_nodelet.cpp:992
ros::Duration
camera_aravis::CameraAravisNodelet::height
int32_t height
Definition: camera_aravis_nodelet.h:269
camera_aravis::CONVERSIONS_DICTIONARY
const std::map< std::string, ConversionFunction > CONVERSIONS_DICTIONARY
Definition: conversion_utils.h:76
camera_aravis::CameraAravisNodelet::use_ptp_stamp_
bool use_ptp_stamp_
Definition: camera_aravis_nodelet.h:117
XmlRpc::XmlRpcValue
ros::NodeHandle
camera_aravis::CameraAravisNodelet::readAndPublishCameraDiagnostics
void readAndPublishCameraDiagnostics(double rate) const
Definition: camera_aravis_nodelet.cpp:1577
ros::Time::now
static Time now()
camera_aravis::CameraAravisNodelet::cameraAutoInfoCallback
void cameraAutoInfoCallback(const CameraAutoInfoConstPtr &msg_ptr)
Definition: camera_aravis_nodelet.cpp:801


camera_aravis
Author(s): Boitumelo Ruf, Fraunhofer IOSB , Dominik Kleiser, Fraunhofer IOSB , Dominik A. Klein, Fraunhofer FKIE , Steve Safarik, Straw Lab , Andrew Straw, Straw Lab , Floris van Breugel, van Breugel Lab
autogenerated on Wed Mar 13 2024 02:23:00