camera_aravis_nodelet.cpp
Go to the documentation of this file.
1 /****************************************************************************
2  *
3  * camera_aravis
4  *
5  * Copyright © 2022 Fraunhofer IOSB and contributors
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Library General Public
9  * License as published by the Free Software Foundation; either
10  * version 2 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Library General Public License for more details.
16  *
17  * You should have received a copy of the GNU Library General Public
18  * License along with this library; if not, write to the
19  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20  * Boston, MA 02110-1301, USA.
21  *
22  ****************************************************************************/
23 
25 
26 #include <thread>
27 
30 
31 namespace camera_aravis
32 {
33 
35 {
36 }
37 
39 {
40  for (int i = 0; i < p_streams_.size(); i++)
41  {
42  if (p_streams_[i])
43  {
44  arv_stream_set_emit_signals(p_streams_[i], FALSE);
45  }
46  }
47 
48  spawning_ = false;
49  if (spawn_stream_thread_.joinable())
50  {
51  spawn_stream_thread_.join();
52  }
53 
55  if (software_trigger_thread_.joinable())
56  {
58  }
59 
60  tf_thread_active_ = false;
61  if (tf_dyn_thread_.joinable())
62  {
63  tf_dyn_thread_.join();
64  }
65 
67  if (diagnostic_thread_.joinable())
68  {
69  diagnostic_thread_.join();
70  }
71 
72  for (int i = 0; i < p_streams_.size(); i++)
73  {
74  guint64 n_completed_buffers = 0;
75  guint64 n_failures = 0;
76  guint64 n_underruns = 0;
77  arv_stream_get_statistics(p_streams_[i], &n_completed_buffers, &n_failures, &n_underruns);
78  ROS_INFO("Completed buffers = %Lu", (unsigned long long)n_completed_buffers);
79  ROS_INFO("Failures = %Lu", (unsigned long long)n_failures);
80  ROS_INFO("Underruns = %Lu", (unsigned long long)n_underruns);
81  if (arv_camera_is_gv_device(p_camera_))
82  {
83  guint64 n_resent;
84  guint64 n_missing;
85  arv_gv_stream_get_statistics(reinterpret_cast<ArvGvStream*>(p_streams_[i]), &n_resent, &n_missing);
86  ROS_INFO("Resent buffers = %Lu", (unsigned long long)n_resent);
87  ROS_INFO("Missing = %Lu", (unsigned long long)n_missing);
88  }
89  }
90 
91  if (p_device_)
92  {
93  arv_device_execute_command(p_device_, "AcquisitionStop");
94  }
95 
96  for (int i = 0; i < p_streams_.size(); i++)
97  {
98  g_object_unref(p_streams_[i]);
99  }
100  g_object_unref(p_camera_);
101 }
102 
104 {
107 
108  // Retrieve ros parameters
109  verbose_ = pnh.param<bool>("verbose", verbose_);
110  guid_ = pnh.param<std::string>("guid", guid_); // Get the camera guid as a parameter or use the first device.
111 
112  use_ptp_stamp_ = pnh.param<bool>("use_ptp_timestamp", use_ptp_stamp_);
113  ptp_enable_feature_ = pnh.param<std::string>("ptp_enable_feature_name", ptp_enable_feature_);
114  ptp_status_feature_ = pnh.param<std::string>("ptp_status_feature_name", ptp_status_feature_);
115  ptp_set_cmd_feature_ = pnh.param<std::string>("ptp_set_cmd_feature_name", ptp_set_cmd_feature_);
116 
117  bool init_params_from_reconfig = pnh.param<bool>("init_params_from_dyn_reconfigure", true);
118 
119  std::string awb_mode = pnh.param<std::string>("BalanceWhiteAuto", "Continuous");
120  std::string wb_ratio_selector_feature = pnh.param<std::string>("wb_ratio_selector_feature", "");
121  std::string wb_ratio_feature = pnh.param<std::string>("wb_ratio_feature", "");
122 
123  std::string wb_ratio_selector_args = pnh.param<std::string>("wb_ratio_selectors", "");
124  std::vector<std::string> wb_ratio_selector_values;
125  parseStringArgs(wb_ratio_selector_args, wb_ratio_selector_values);
126 
127  std::string wb_ratio_args = pnh.param<std::string>("wb_ratios", "");
128  std::vector<std::string> wb_ratio_str_values;
129  parseStringArgs(wb_ratio_args, wb_ratio_str_values);
130 
131  if (wb_ratio_selector_values.size() != wb_ratio_str_values.size())
132  {
133  ROS_WARN("Lists of 'wb_ratio_selector' and 'wb_ratio' have different sizes. "
134  "Only setting values which exist in both lists.");
135  }
136 
137  pub_ext_camera_info_ = pnh.param<bool>("ExtendedCameraInfo", pub_ext_camera_info_); // publish an extended camera info message
138  pub_tf_optical_ = pnh.param<bool>("publish_tf", pub_tf_optical_); // should we publish tf transforms to camera optical frame?
139 
140  std::string stream_channel_args;
141  if (pnh.getParam("channel_names", stream_channel_args))
142  {
143  parseStringArgs(stream_channel_args, stream_names_);
144  }
145  else
146  {
147  stream_names_ = {""};
148  }
149 
150  std::string pixel_format_args;
151  std::vector<std::string> pixel_formats;
152  pnh.param("PixelFormat", pixel_format_args, pixel_format_args);
153  parseStringArgs(pixel_format_args, pixel_formats);
154 
155  std::string calib_url_args;
156  std::vector<std::string> calib_urls;
157  pnh.param("camera_info_url", calib_url_args, calib_url_args);
158  parseStringArgs(calib_url_args, calib_urls);
159 
160  diagnostic_yaml_url_ = pnh.param<std::string>("diagnostic_yaml_url", diagnostic_yaml_url_);
162  std::max(0.0, pnh.param<double>("diagnostic_publish_rate", 0.1));
163 
165  this, true, false);
166  shutdown_delay_s_ = pnh.param<double>("shutdown_delay_s", 5.0);
167 
168  // Print out some useful info.
169  ROS_INFO("Attached cameras:");
170  arv_update_device_list();
171  uint n_interfaces = arv_get_n_interfaces();
172  ROS_INFO("# Interfaces: %d", n_interfaces);
173 
174  uint n_devices = arv_get_n_devices();
175  ROS_INFO("# Devices: %d", n_devices);
176  for (uint i = 0; i < n_devices; i++)
177  ROS_INFO("Device%d: %s", i, arv_get_device_id(i));
178 
179  if (n_devices == 0)
180  {
181  ROS_ERROR("No cameras detected.");
182  return;
183  }
184 
185  // Open the camera, and set it up.
186  while (!p_camera_)
187  {
188  if (guid_.empty())
189  {
190  ROS_INFO("Opening: (any)");
191  p_camera_ = arv_camera_new(NULL);
192  }
193  else
194  {
195  ROS_INFO_STREAM("Opening: " << guid_);
196  p_camera_ = arv_camera_new(guid_.c_str());
197  }
198  ros::Duration(1.0).sleep();
199  }
200 
201  p_device_ = arv_camera_get_device(p_camera_);
202  const char* vendor_name = arv_camera_get_vendor_name(p_camera_);
203  const char* model_name = arv_camera_get_model_name(p_camera_);
204  const char* device_id = arv_camera_get_device_id(p_camera_);
205  const char* device_sn = arv_device_get_string_feature_value(p_device_, "DeviceSerialNumber");
206  ROS_INFO("Successfully Opened: %s-%s-%s", vendor_name, model_name,
207  (device_sn) ? device_sn : device_id);
208 
209  // Start the dynamic_reconfigure server.
210  reconfigure_server_.reset(new dynamic_reconfigure::Server<Config>(reconfigure_mutex_, pnh));
211  reconfigure_server_->getConfigDefault(config_);
212  reconfigure_server_->getConfigMin(config_min_);
213  reconfigure_server_->getConfigMax(config_max_);
214 
215  // See which features exist in this camera device
217 
218  // Check the number of streams for this camera
219  num_streams_ = arv_device_get_integer_feature_value(p_device_, "DeviceStreamChannelCount");
220  // if this return 0, try the deprecated GevStreamChannelCount in case this is an older camera
221  if (!num_streams_)
222  {
223  num_streams_ = arv_device_get_integer_feature_value(p_device_, "GevStreamChannelCount");
224  }
225  // if this also returns 0, assume number of streams = 1
226  if (!num_streams_)
227  {
228  ROS_WARN("Unable to detect number of supported stream channels.");
229  num_streams_ = 1;
230  }
231 
232  ROS_INFO("Number of supported stream channels %i.", (int)num_streams_);
233 
234  // check if every stream channel has been given a channel name
235  if (stream_names_.size() < num_streams_)
236  {
237  num_streams_ = stream_names_.size();
238  }
239 
240  // initialize the sensor structs
241  for (int i = 0; i < num_streams_; i++)
242  {
243  sensors_.push_back(new Sensor());
244  p_streams_.push_back(NULL);
246  p_camera_info_managers_.push_back(NULL);
247  p_camera_info_node_handles_.push_back(NULL);
248  camera_infos_.push_back(sensor_msgs::CameraInfoPtr());
251  }
252 
253  // Get parameter bounds.
254  arv_camera_get_exposure_time_bounds(p_camera_, &config_min_.ExposureTime, &config_max_.ExposureTime);
255  arv_camera_get_gain_bounds(p_camera_, &config_min_.Gain, &config_max_.Gain);
256  for (int i = 0; i < num_streams_; i++)
257  {
258  arv_camera_gv_select_stream_channel(p_camera_, i);
259  arv_camera_get_sensor_size(p_camera_, &sensors_[i]->width, &sensors_[i]->height);
260  }
261  arv_camera_get_width_bounds(p_camera_, &roi_.width_min, &roi_.width_max);
262  arv_camera_get_height_bounds(p_camera_, &roi_.height_min, &roi_.height_max);
263  arv_camera_get_frame_rate_bounds(p_camera_, &config_min_.AcquisitionFrameRate, &config_max_.AcquisitionFrameRate);
264  if (implemented_features_["FocusPos"])
265  {
266  gint64 focus_min64, focus_max64;
267  arv_device_get_integer_feature_bounds(p_device_, "FocusPos", &focus_min64, &focus_max64);
268  config_min_.FocusPos = focus_min64;
269  config_max_.FocusPos = focus_max64;
270  }
271  else
272  {
273  config_min_.FocusPos = 0;
274  config_max_.FocusPos = 0;
275  }
276 
277  for (int i = 0; i < num_streams_; i++)
278  {
279  arv_camera_gv_select_stream_channel(p_camera_, i);
280 
281  if (init_params_from_reconfig)
282  {
283  // Initial camera settings.
284  if (implemented_features_["ExposureTime"])
285  arv_camera_set_exposure_time(p_camera_, config_.ExposureTime);
286  else if (implemented_features_["ExposureTimeAbs"])
287  arv_device_set_float_feature_value(p_device_, "ExposureTimeAbs", config_.ExposureTime);
288  if (implemented_features_["Gain"])
289  arv_camera_set_gain(p_camera_, config_.Gain);
290  if (implemented_features_["AcquisitionFrameRateEnable"])
291  arv_device_set_integer_feature_value(p_device_, "AcquisitionFrameRateEnable", 1);
292  if (implemented_features_["AcquisitionFrameRate"])
293  arv_camera_set_frame_rate(p_camera_, config_.AcquisitionFrameRate);
294 
295  // init default to full sensor resolution
296  arv_camera_set_region(p_camera_, 0, 0, roi_.width_max, roi_.height_max);
297 
298  // Set up the triggering.
299  if (implemented_features_["TriggerMode"] && implemented_features_["TriggerSelector"])
300  {
301  arv_device_set_string_feature_value(p_device_, "TriggerSelector", "FrameStart");
302  arv_device_set_string_feature_value(p_device_, "TriggerMode", "Off");
303  }
304  }
305 
306  // possibly set or override from given parameter
308 
309  if (awb_mode == "Off" && !wb_ratio_selector_feature.empty() && implemented_features_[wb_ratio_selector_feature] && !wb_ratio_feature.empty() && implemented_features_[wb_ratio_feature])
310  {
311  for (int l = 0; l < std::min(wb_ratio_selector_values.size(), wb_ratio_str_values.size());
312  l++)
313  {
314  arv_device_set_string_feature_value(p_device_, wb_ratio_selector_feature.c_str(),
315  wb_ratio_selector_values[l].c_str());
316  arv_device_set_float_feature_value(p_device_, wb_ratio_feature.c_str(),
317  std::stof(wb_ratio_str_values[l]));
318  }
319  }
320  }
321 
322  ros::Duration(2.0).sleep();
323 
324  // get current state of camera for config_
325  arv_camera_get_region(p_camera_, &roi_.x, &roi_.y, &roi_.width, &roi_.height);
326  config_.AcquisitionMode =
327  implemented_features_["AcquisitionMode"] ? arv_device_get_string_feature_value(p_device_, "AcquisitionMode") : "Continuous";
328  config_.AcquisitionFrameRate =
329  implemented_features_["AcquisitionFrameRate"] ? arv_camera_get_frame_rate(p_camera_) : 0.0;
330  config_.ExposureAuto =
331  implemented_features_["ExposureAuto"] ? arv_device_get_string_feature_value(p_device_, "ExposureAuto") : "Off";
332  config_.ExposureTime = implemented_features_["ExposureTime"] ? arv_camera_get_exposure_time(p_camera_) : 0.0;
333  config_.GainAuto =
334  implemented_features_["GainAuto"] ? arv_device_get_string_feature_value(p_device_, "GainAuto") : "Off";
335  config_.Gain = implemented_features_["Gain"] ? arv_camera_get_gain(p_camera_) : 0.0;
336  config_.TriggerMode =
337  implemented_features_["TriggerMode"] ? arv_device_get_string_feature_value(p_device_, "TriggerMode") : "Off";
338  config_.TriggerSource =
339  implemented_features_["TriggerSource"] ? arv_device_get_string_feature_value(p_device_, "TriggerSource") : "Software";
340 
341  // get pixel format name and translate into corresponding ROS name
342  for (int i = 0; i < num_streams_; i++)
343  {
344  arv_camera_gv_select_stream_channel(p_camera_, i);
345  std::string source_selector = "Source" + std::to_string(i);
346  arv_device_set_string_feature_value(p_device_, "SourceSelector", source_selector.c_str());
347  arv_device_set_string_feature_value(p_device_, "PixelFormat", pixel_formats[i].c_str());
348  sensors_[i]->pixel_format = std::string(arv_device_get_string_feature_value(p_device_, "PixelFormat"));
349  const auto sensor_iter = CONVERSIONS_DICTIONARY.find(sensors_[i]->pixel_format);
350  if (sensor_iter != CONVERSIONS_DICTIONARY.end())
351  {
352  convert_formats.push_back(sensor_iter->second);
353  }
354  else
355  {
356  ROS_WARN_STREAM("There is no known conversion from " << sensors_[i]->pixel_format << " to a usual ROS image encoding. Likely you need to implement one.");
357  }
358 
359  sensors_[i]->n_bits_pixel = ARV_PIXEL_FORMAT_BIT_PER_PIXEL(
360  arv_device_get_integer_feature_value(p_device_, "PixelFormat"));
361  config_.FocusPos =
362  implemented_features_["FocusPos"] ? arv_device_get_integer_feature_value(p_device_, "FocusPos") : 0;
363  }
364 
365  // Get other (non GenIcam) parameter current values.
366  pnh.param<double>("softwaretriggerrate", config_.softwaretriggerrate, config_.softwaretriggerrate);
367  pnh.param<int>("mtu", config_.mtu, config_.mtu);
368  pnh.param<std::string>("frame_id", config_.frame_id, config_.frame_id);
369  pnh.param<bool>("auto_master", config_.AutoMaster, config_.AutoMaster);
370  pnh.param<bool>("auto_slave", config_.AutoSlave, config_.AutoSlave);
371  setAutoMaster(config_.AutoMaster);
372  setAutoSlave(config_.AutoSlave);
373 
374  if (pub_tf_optical_)
375  {
376  tf_optical_.header.frame_id = config_.frame_id;
377  tf_optical_.child_frame_id = config_.frame_id + "_optical";
378  tf_optical_.transform.translation.x = 0.0;
379  tf_optical_.transform.translation.y = 0.0;
380  tf_optical_.transform.translation.z = 0.0;
381  tf_optical_.transform.rotation.x = -0.5;
382  tf_optical_.transform.rotation.y = 0.5;
383  tf_optical_.transform.rotation.z = -0.5;
384  tf_optical_.transform.rotation.w = 0.5;
385 
386  double tf_publish_rate;
387  pnh.param<double>("tf_publish_rate", tf_publish_rate, 0);
388  if (tf_publish_rate > 0.)
389  {
390  // publish dynamic tf at given rate (recommended when running as a Nodelet, since latching has bugs-by-design)
392  tf_dyn_thread_ = std::thread(&CameraAravisNodelet::publishTfLoop, this, tf_publish_rate);
393  }
394  else
395  {
396  // publish static tf only once (latched)
398  tf_optical_.header.stamp = ros::Time::now();
399  p_stb_->sendTransform(tf_optical_);
400  }
401  }
402 
403  // read diagnostic features from yaml file and initialize publishing thread
404  if (!diagnostic_yaml_url_.empty() && diagnostic_publish_rate_ > 0.0)
405  {
406  try
407  {
408  diagnostic_features_ = YAML::LoadFile(diagnostic_yaml_url_);
409  }
410  catch (const YAML::BadFile& e)
411  {
412  ROS_WARN("YAML file cannot be loaded: %s", e.what());
413  ROS_WARN("Camera diagnostics will not be published.");
414  }
415  catch (const YAML::ParserException& e)
416  {
417  ROS_WARN("YAML file is malformed: %s", e.what());
418  ROS_WARN("Camera diagnostics will not be published.");
419  }
420 
421  if (diagnostic_features_.size() > 0)
422  {
423  diagnostic_pub_ = getNodeHandle().advertise<CameraDiagnostics>(
424  ros::names::remap(this->getName() + "/diagnostics"), 1, true);
425 
429  }
430  else
431  {
432  ROS_WARN("No diagnostic features specified.");
433  ROS_WARN("Camera diagnostics will not be published.");
434  }
435  }
436  else if (!diagnostic_yaml_url_.empty() && diagnostic_publish_rate_ <= 0.0)
437  {
438  ROS_WARN("diagnostic_publish_rate is <= 0.0");
439  ROS_WARN("Camera diagnostics will not be published.");
440  }
441 
442  // default calibration url is [DeviceSerialNumber/DeviceID].yaml
443  ArvGcNode* p_gc_node;
444  GError* error = NULL;
445 
446  p_gc_node = arv_device_get_feature(p_device_, "DeviceSerialNumber");
447 
448  if (calib_urls[0].empty())
449  {
450  if (arv_gc_feature_node_is_implemented(ARV_GC_FEATURE_NODE(p_gc_node), &error))
451  {
452  GType device_serial_return_type = arv_gc_feature_node_get_value_type(ARV_GC_FEATURE_NODE(p_gc_node));
453  // If the feature DeviceSerialNumber is not string, it indicates that the camera is using an older version of the genicam SFNC.
454  // Older camera models do not have a DeviceSerialNumber as string, but as integer and often set to 0.
455  // In those cases use the outdated DeviceID (deprecated since genicam SFNC v2.0).
456  if (device_serial_return_type == G_TYPE_STRING)
457  {
458  calib_urls[0] = arv_device_get_string_feature_value(p_device_, "DeviceSerialNumber");
459  calib_urls[0] += ".yaml";
460  }
461  else if (device_serial_return_type == G_TYPE_INT64)
462  {
463  calib_urls[0] = arv_device_get_string_feature_value(p_device_, "DeviceID");
464  calib_urls[0] += ".yaml";
465  }
466  }
467  }
468 
469  for (int i = 0; i < num_streams_; i++)
470  {
471  // Start the camerainfo manager.
472  std::string camera_info_frame_id = config_.frame_id;
473  if (!stream_names_[i].empty())
474  camera_info_frame_id = config_.frame_id + '/' + stream_names_[i];
475 
476  // Use separate node handles for CameraInfoManagers when using a Multisource Camera
477  if (!stream_names_[i].empty())
478  {
480  p_camera_info_managers_[i].reset(new camera_info_manager::CameraInfoManager(*p_camera_info_node_handles_[i], camera_info_frame_id, calib_urls[i]));
481  }
482  else
483  {
484  p_camera_info_managers_[i].reset(new camera_info_manager::CameraInfoManager(pnh, camera_info_frame_id, calib_urls[i]));
485  }
486 
487  ROS_INFO("Reset %s Camera Info Manager", stream_names_[i].c_str());
488  ROS_INFO("%s Calib URL: %s", stream_names_[i].c_str(), calib_urls[i].c_str());
489 
490  // publish an ExtendedCameraInfo message
492  }
493 
494  // update the reconfigure config
495  reconfigure_server_->setConfigMin(config_min_);
496  reconfigure_server_->setConfigMax(config_max_);
497  reconfigure_server_->updateConfig(config_);
498  ros::Duration(2.0).sleep();
499  reconfigure_server_->setCallback(boost::bind(&CameraAravisNodelet::rosReconfigureCallback, this, _1, _2));
500 
501  // Print information.
502  ROS_INFO(" Using Camera Configuration:");
503  ROS_INFO(" ---------------------------");
504  ROS_INFO(" Vendor name = %s", arv_device_get_string_feature_value(p_device_, "DeviceVendorName"));
505  ROS_INFO(" Model name = %s", arv_device_get_string_feature_value(p_device_, "DeviceModelName"));
506  ROS_INFO(" Device id = %s", arv_device_get_string_feature_value(p_device_, "DeviceUserID"));
507  ROS_INFO(" Serial number = %s", arv_device_get_string_feature_value(p_device_, "DeviceSerialNumber"));
508  ROS_INFO(
509  " Type = %s",
510  arv_camera_is_uv_device(p_camera_) ? "USB3Vision" : (arv_camera_is_gv_device(p_camera_) ? "GigEVision" : "Other"));
511  ROS_INFO(" Sensor width = %d", sensors_[0]->width);
512  ROS_INFO(" Sensor height = %d", sensors_[0]->height);
513  ROS_INFO(" ROI x,y,w,h = %d, %d, %d, %d", roi_.x, roi_.y, roi_.width, roi_.height);
514  ROS_INFO(" Pixel format = %s", sensors_[0]->pixel_format.c_str());
515  ROS_INFO(" BitsPerPixel = %lu", sensors_[0]->n_bits_pixel);
516  ROS_INFO(" Acquisition Mode = %s",
517  implemented_features_["AcquisitionMode"] ? arv_device_get_string_feature_value(p_device_, "AcquisitionMode") : "(not implemented in camera)");
518  ROS_INFO(" Trigger Mode = %s",
519  implemented_features_["TriggerMode"] ? arv_device_get_string_feature_value(p_device_, "TriggerMode") : "(not implemented in camera)");
520  ROS_INFO(" Trigger Source = %s",
521  implemented_features_["TriggerSource"] ? arv_device_get_string_feature_value(p_device_, "TriggerSource") : "(not implemented in camera)");
522  ROS_INFO(" Can set FrameRate: %s", implemented_features_["AcquisitionFrameRate"] ? "True" : "False");
523  if (implemented_features_["AcquisitionFrameRate"])
524  {
525  ROS_INFO(" AcquisitionFrameRate = %g hz", config_.AcquisitionFrameRate);
526  }
527 
528  if (implemented_features_["BalanceWhiteAuto"])
529  {
530  ROS_INFO(" BalanceWhiteAuto = %s", awb_mode.c_str());
531  if (awb_mode != "Continuous" && !wb_ratio_selector_feature.empty() && implemented_features_[wb_ratio_selector_feature] && !wb_ratio_feature.empty() && implemented_features_[wb_ratio_feature])
532  {
533  for (int l = 0; l < std::min(wb_ratio_selector_values.size(), wb_ratio_str_values.size());
534  l++)
535  {
536  arv_device_set_string_feature_value(p_device_, wb_ratio_selector_feature.c_str(),
537  wb_ratio_selector_values[l].c_str());
538  float wb_ratio_val = arv_device_get_float_feature_value(p_device_, wb_ratio_feature.c_str());
539  ROS_INFO(" BalanceRatio %s = %f", wb_ratio_selector_values[l].c_str(), wb_ratio_val);
540  }
541  }
542  }
543 
544  ROS_INFO(" Can set Exposure: %s", implemented_features_["ExposureTime"] ? "True" : "False");
545  if (implemented_features_["ExposureTime"])
546  {
547  ROS_INFO(" Can set ExposureAuto: %s", implemented_features_["ExposureAuto"] ? "True" : "False");
548  ROS_INFO(" Exposure = %g us in range [%g,%g]", config_.ExposureTime, config_min_.ExposureTime,
549  config_max_.ExposureTime);
550  }
551 
552  ROS_INFO(" Can set Gain: %s", implemented_features_["Gain"] ? "True" : "False");
553  if (implemented_features_["Gain"])
554  {
555  ROS_INFO(" Can set GainAuto: %s", implemented_features_["GainAuto"] ? "True" : "False");
556  ROS_INFO(" Gain = %f %% in range [%f,%f]", config_.Gain, config_min_.Gain, config_max_.Gain);
557  }
558 
559  ROS_INFO(" Can set FocusPos: %s", implemented_features_["FocusPos"] ? "True" : "False");
560 
561  if (implemented_features_["GevSCPSPacketSize"])
562  ROS_INFO(" Network mtu = %lu", arv_device_get_integer_feature_value(p_device_, "GevSCPSPacketSize"));
563 
564  ROS_INFO(" ---------------------------");
565 
566  // Reset PTP clock
567  if (use_ptp_stamp_)
568  {
569  resetPtpClock();
570 
571  if (!ptp_set_cmd_feature_.empty())
572  {
574  {
575  arv_device_execute_command(p_device_, ptp_set_cmd_feature_.c_str());
576  }
577  else
578  ROS_WARN("PTP Error: ptp_set_cmd_feature_ '%s' is not available on the device.",
579  ptp_set_cmd_feature_.c_str());
580  }
581  }
582 
583  // spawn camera stream in thread, so onInit() is not blocked
584  spawning_ = true;
586 }
587 
589 {
592 
593  for (int i = 0; i < num_streams_; i++)
594  {
595  while (spawning_)
596  {
597  arv_camera_gv_select_stream_channel(p_camera_, i);
598  p_streams_[i] = arv_camera_create_stream(p_camera_, NULL, NULL);
599 
600  if (p_streams_[i])
601  {
602  // Load up some buffers.
603  arv_camera_gv_select_stream_channel(p_camera_, i);
604  const gint n_bytes_payload_stream_ = arv_camera_get_payload(p_camera_);
605 
606  p_buffer_pools_[i].reset(new CameraBufferPool(p_streams_[i], n_bytes_payload_stream_, 10));
607 
608  if (arv_camera_is_gv_device(p_camera_))
609  {
610  tuneGvStream(reinterpret_cast<ArvGvStream*>(p_streams_[i]));
611  }
612  break;
613  }
614  else
615  {
616  ROS_WARN("Stream %i: Could not create image stream for %s. Retrying...", i, guid_.c_str());
617  ros::Duration(1.0).sleep();
618  ros::spinOnce();
619  }
620  }
621  }
622 
623  // Monitor whether anyone is subscribed to the camera stream
624  std::vector<image_transport::SubscriberStatusCallback> image_cbs_;
625  std::vector<ros::SubscriberStatusCallback> info_cbs_;
626 
628  { this->rosConnectCallback(); };
630  { this->rosConnectCallback(); };
631 
632  for (int i = 0; i < num_streams_; i++)
633  {
634  image_transport::ImageTransport* p_transport;
635  // Set up image_raw
636  std::string topic_name = this->getName();
637  p_transport = new image_transport::ImageTransport(pnh);
638  if (num_streams_ != 1 || !stream_names_[i].empty())
639  {
640  topic_name += "/" + stream_names_[i];
641  }
642 
643  cam_pubs_[i] = p_transport->advertiseCamera(
644  ros::names::remap(topic_name + "/image_raw"),
645  1, image_cb, image_cb, info_cb, info_cb);
646  }
647 
648  // Connect signals with callbacks.
649  for (int i = 0; i < num_streams_; i++)
650  {
651  StreamIdData* data = new StreamIdData();
652  data->can = this;
653  data->stream_id = i;
654  g_signal_connect(p_streams_[i], "new-buffer", (GCallback)CameraAravisNodelet::newBufferReadyCallback, data);
655  }
656  g_signal_connect(p_device_, "control-lost", (GCallback)CameraAravisNodelet::controlLostCallback, this);
657 
658  for (int i = 0; i < num_streams_; i++)
659  {
660  arv_stream_set_emit_signals(p_streams_[i], TRUE);
661  }
662 
663  if (std::any_of(cam_pubs_.begin(), cam_pubs_.end(),
665  { return pub.getNumSubscribers() > 0; }))
666  {
667  arv_camera_start_acquisition(p_camera_);
668  }
669 
670  this->get_integer_service_ = pnh.advertiseService("get_integer_feature_value", &CameraAravisNodelet::getIntegerFeatureCallback, this);
671  this->get_float_service_ = pnh.advertiseService("get_float_feature_value", &CameraAravisNodelet::getFloatFeatureCallback, this);
672  this->get_string_service_ = pnh.advertiseService("get_string_feature_value", &CameraAravisNodelet::getStringFeatureCallback, this);
673  this->get_boolean_service_ = pnh.advertiseService("get_boolean_feature_value", &CameraAravisNodelet::getBooleanFeatureCallback, this);
674 
675  this->set_integer_service_ = pnh.advertiseService("set_integer_feature_value", &CameraAravisNodelet::setIntegerFeatureCallback, this);
676  this->set_float_service_ = pnh.advertiseService("set_float_feature_value", &CameraAravisNodelet::setFloatFeatureCallback, this);
677  this->set_string_service_ = pnh.advertiseService("set_string_feature_value", &CameraAravisNodelet::setStringFeatureCallback, this);
678  this->set_boolean_service_ = pnh.advertiseService("set_boolean_feature_value", &CameraAravisNodelet::setBooleanFeatureCallback, this);
679 
680  this->one_shot_white_balance_service_ = pnh.advertiseService("trigger_one_shot_white_balance", &CameraAravisNodelet::oneShotWhiteBalanceCallback, this);
681 
682  ROS_INFO("Done initializing camera_aravis.");
683 }
684 
685 bool CameraAravisNodelet::getIntegerFeatureCallback(camera_aravis::get_integer_feature_value::Request& request, camera_aravis::get_integer_feature_value::Response& response)
686 {
687  const char* feature_name = request.feature.c_str();
688  response.response = arv_device_get_integer_feature_value(this->p_device_, feature_name);
689  return true;
690 }
691 
692 bool CameraAravisNodelet::setIntegerFeatureCallback(camera_aravis::set_integer_feature_value::Request& request, camera_aravis::set_integer_feature_value::Response& response)
693 {
694  const char* feature_name = request.feature.c_str();
695  guint64 value = request.value;
696  arv_device_set_integer_feature_value(this->p_device_, feature_name, value);
697  if (arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS)
698  {
699  response.ok = true;
700  }
701  else
702  {
703  response.ok = false;
704  }
705  return true;
706 }
707 
708 bool CameraAravisNodelet::getFloatFeatureCallback(camera_aravis::get_float_feature_value::Request& request, camera_aravis::get_float_feature_value::Response& response)
709 {
710  const char* feature_name = request.feature.c_str();
711  response.response = arv_device_get_float_feature_value(this->p_device_, feature_name);
712  return true;
713 }
714 
715 bool CameraAravisNodelet::setFloatFeatureCallback(camera_aravis::set_float_feature_value::Request& request, camera_aravis::set_float_feature_value::Response& response)
716 {
717  const char* feature_name = request.feature.c_str();
718  const double value = request.value;
719  arv_device_set_float_feature_value(this->p_device_, feature_name, value);
720  if (arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS)
721  {
722  response.ok = true;
723  }
724  else
725  {
726  response.ok = false;
727  }
728  return true;
729 }
730 
731 bool CameraAravisNodelet::getStringFeatureCallback(camera_aravis::get_string_feature_value::Request& request, camera_aravis::get_string_feature_value::Response& response)
732 {
733  const char* feature_name = request.feature.c_str();
734  response.response = arv_device_get_string_feature_value(this->p_device_, feature_name);
735  return true;
736 }
737 
738 bool CameraAravisNodelet::setStringFeatureCallback(camera_aravis::set_string_feature_value::Request& request, camera_aravis::set_string_feature_value::Response& response)
739 {
740  const char* feature_name = request.feature.c_str();
741  const char* value = request.value.c_str();
742  arv_device_set_string_feature_value(this->p_device_, feature_name, value);
743  if (arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS)
744  {
745  response.ok = true;
746  }
747  else
748  {
749  response.ok = false;
750  }
751  return true;
752 }
753 
754 bool CameraAravisNodelet::getBooleanFeatureCallback(camera_aravis::get_boolean_feature_value::Request& request, camera_aravis::get_boolean_feature_value::Response& response)
755 {
756  const char* feature_name = request.feature.c_str();
757  response.response = arv_device_get_boolean_feature_value(this->p_device_, feature_name);
758  return true;
759 }
760 
761 bool CameraAravisNodelet::setBooleanFeatureCallback(camera_aravis::set_boolean_feature_value::Request& request, camera_aravis::set_boolean_feature_value::Response& response)
762 {
763  const char* feature_name = request.feature.c_str();
764  const bool value = request.value;
765  arv_device_set_boolean_feature_value(this->p_device_, feature_name, value);
766  if (arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS)
767  {
768  response.ok = true;
769  }
770  else
771  {
772  response.ok = false;
773  }
774  return true;
775 }
776 
777 bool CameraAravisNodelet::oneShotWhiteBalanceCallback(camera_aravis::one_shot_white_balance::Request& request, camera_aravis::one_shot_white_balance::Response& response)
778 {
779  //--- set return values to default
780  response.is_successful = false;
781  response.balance_ratio_red = 0.f;
782  response.balance_ratio_green = 0.f;
783  response.balance_ratio_blue = 0.f;
784 
785  //--- check for correct state of camera
786  if(!this->p_device_ || !implemented_features_["BalanceWhiteAuto"])
787  return false;
788 
789  arv_device_set_string_feature_value(p_device_, "BalanceWhiteAuto", "Once");
790  if (arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS)
791  {
792  const char* tmpCharPtr = arv_device_get_string_feature_value(p_device_, "BalanceWhiteAuto");
793  std::string modeStr = "n/a";
794  if(tmpCharPtr)
795  modeStr = std::string(tmpCharPtr);
796 
797  if (implemented_features_["BalanceRatio"] && implemented_features_["BalanceRatioSelector"])
798  {
799  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
800  response.balance_ratio_red = static_cast<float>(
801  arv_device_get_float_feature_value(p_device_, "BalanceRatio"));
802 
803  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
804  response.balance_ratio_green = static_cast<float>(
805  arv_device_get_float_feature_value(p_device_, "BalanceRatio"));
806 
807  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
808  response.balance_ratio_blue = static_cast<float>(
809  arv_device_get_float_feature_value(p_device_, "BalanceRatio"));
810  }
811 
812  ROS_INFO("Setting Auto White Balance successful!");
813  ROS_INFO("> BalanceWhiteAuto Mode: %s", modeStr.c_str());
814  ROS_INFO("> BalanceRatio Red: %f", response.balance_ratio_red);
815  ROS_INFO("> BalanceRatio Green: %f", response.balance_ratio_green);
816  ROS_INFO("> BalanceRatio Blue: %f", response.balance_ratio_blue);
817 
818 
819  response.is_successful = true;
820  return true;
821  }
822  else
823  {
824  ROS_ERROR("Trying to set Auto White Balance once failed!");
825 
826  return false;
827  }
828 }
829 
831 {
832  if (ptp_status_feature_.empty() || ptp_enable_feature_.empty())
833  {
834  ROS_WARN("PTP Error: ptp_status_feature_name and/or ptp_enable_feature_name are empty.");
835  return;
836  }
837 
839  {
841  ROS_WARN("PTP Error: ptp_status_feature_name '%s' is not available on the device.",
842  ptp_status_feature_.c_str());
844  ROS_WARN("PTP Error: ptp_enable_feature_name '%s' is not available on the device.",
845  ptp_enable_feature_.c_str());
846  return;
847  }
848 
849  // a PTP slave can take the following states: Slave, Listening, Uncalibrated, Faulty, Disabled
850  const char* ptpS_satus_char_ptr =
851  arv_device_get_string_feature_value(p_device_, ptp_status_feature_.c_str());
852  std::string ptp_status_str = (ptpS_satus_char_ptr) ? std::string(ptpS_satus_char_ptr) : "Faulty";
853  bool ptp_is_enabled = arv_device_get_boolean_feature_value(p_device_, ptp_enable_feature_.c_str());
854  if (ptp_status_str == "Faulty" ||
855  ptp_status_str == "Disabled" ||
856  ptp_status_str == "Initializing" ||
857  !ptp_is_enabled)
858  {
859  ROS_INFO("Resetting ptp clock (was set to %s)", ptp_status_str.c_str());
860 
861  arv_device_set_boolean_feature_value(p_device_, ptp_enable_feature_.c_str(), false);
862  arv_device_set_boolean_feature_value(p_device_, ptp_enable_feature_.c_str(), true);
863  }
864 }
865 
866 void CameraAravisNodelet::cameraAutoInfoCallback(const CameraAutoInfoConstPtr& msg_ptr)
867 {
868  if (config_.AutoSlave && p_device_)
869  {
870 
871  if (auto_params_.exposure_time != msg_ptr->exposure_time && implemented_features_["ExposureTime"])
872  {
873  arv_device_set_float_feature_value(p_device_, "ExposureTime", msg_ptr->exposure_time);
874  }
875 
876  if (implemented_features_["Gain"])
877  {
878  if (auto_params_.gain != msg_ptr->gain)
879  {
880  if (implemented_features_["GainSelector"])
881  {
882  arv_device_set_string_feature_value(p_device_, "GainSelector", "All");
883  }
884  arv_device_set_float_feature_value(p_device_, "Gain", msg_ptr->gain);
885  }
886 
887  if (implemented_features_["GainSelector"])
888  {
889  if (auto_params_.gain_red != msg_ptr->gain_red)
890  {
891  arv_device_set_string_feature_value(p_device_, "GainSelector", "Red");
892  arv_device_set_float_feature_value(p_device_, "Gain", msg_ptr->gain_red);
893  }
894 
895  if (auto_params_.gain_green != msg_ptr->gain_green)
896  {
897  arv_device_set_string_feature_value(p_device_, "GainSelector", "Green");
898  arv_device_set_float_feature_value(p_device_, "Gain", msg_ptr->gain_green);
899  }
900 
901  if (auto_params_.gain_blue != msg_ptr->gain_blue)
902  {
903  arv_device_set_string_feature_value(p_device_, "GainSelector", "Blue");
904  arv_device_set_float_feature_value(p_device_, "Gain", msg_ptr->gain_blue);
905  }
906  }
907  }
908 
909  if (implemented_features_["BlackLevel"])
910  {
911  if (auto_params_.black_level != msg_ptr->black_level)
912  {
913  if (implemented_features_["BlackLevelSelector"])
914  {
915  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "All");
916  }
917  arv_device_set_float_feature_value(p_device_, "BlackLevel", msg_ptr->black_level);
918  }
919 
920  if (implemented_features_["BlackLevelSelector"])
921  {
922  if (auto_params_.bl_red != msg_ptr->bl_red)
923  {
924  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Red");
925  arv_device_set_float_feature_value(p_device_, "BlackLevel", msg_ptr->bl_red);
926  }
927 
928  if (auto_params_.bl_green != msg_ptr->bl_green)
929  {
930  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Green");
931  arv_device_set_float_feature_value(p_device_, "BlackLevel", msg_ptr->bl_green);
932  }
933 
934  if (auto_params_.bl_blue != msg_ptr->bl_blue)
935  {
936  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Blue");
937  arv_device_set_float_feature_value(p_device_, "BlackLevel", msg_ptr->bl_blue);
938  }
939  }
940  }
941 
942  // White balance as TIS is providing
943  if (strcmp("The Imaging Source Europe GmbH", arv_camera_get_vendor_name(p_camera_)) == 0)
944  {
945  arv_device_set_integer_feature_value(p_device_, "WhiteBalanceRedRegister", (int)(auto_params_.wb_red * 255.));
946  arv_device_set_integer_feature_value(p_device_, "WhiteBalanceGreenRegister", (int)(auto_params_.wb_green * 255.));
947  arv_device_set_integer_feature_value(p_device_, "WhiteBalanceBlueRegister", (int)(auto_params_.wb_blue * 255.));
948  }
949  else if (implemented_features_["BalanceRatio"] && implemented_features_["BalanceRatioSelector"])
950  {
951  if (auto_params_.wb_red != msg_ptr->wb_red)
952  {
953  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
954  arv_device_set_float_feature_value(p_device_, "BalanceRatio", msg_ptr->wb_red);
955  }
956 
957  if (auto_params_.wb_green != msg_ptr->wb_green)
958  {
959  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
960  arv_device_set_float_feature_value(p_device_, "BalanceRatio", msg_ptr->wb_green);
961  }
962 
963  if (auto_params_.wb_blue != msg_ptr->wb_blue)
964  {
965  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
966  arv_device_set_float_feature_value(p_device_, "BalanceRatio", msg_ptr->wb_blue);
967  }
968  }
969 
970  auto_params_ = *msg_ptr;
971  }
972 }
973 
975 {
976  auto_params_.exposure_time = auto_params_.gain = auto_params_.gain_red = auto_params_.gain_green =
977  auto_params_.gain_blue = auto_params_.black_level = auto_params_.bl_red = auto_params_.bl_green =
978  auto_params_.bl_blue = auto_params_.wb_red = auto_params_.wb_green = auto_params_.wb_blue =
979  std::numeric_limits<double>::quiet_NaN();
980 
981  if (p_device_)
982  {
983  if (implemented_features_["ExposureTime"])
984  {
985  auto_params_.exposure_time = arv_device_get_float_feature_value(p_device_, "ExposureTime");
986  }
987 
988  if (implemented_features_["Gain"])
989  {
990  if (implemented_features_["GainSelector"])
991  {
992  arv_device_set_string_feature_value(p_device_, "GainSelector", "All");
993  }
994  auto_params_.gain = arv_device_get_float_feature_value(p_device_, "Gain");
995  if (implemented_features_["GainSelector"])
996  {
997  arv_device_set_string_feature_value(p_device_, "GainSelector", "Red");
998  auto_params_.gain_red = arv_device_get_float_feature_value(p_device_, "Gain");
999  arv_device_set_string_feature_value(p_device_, "GainSelector", "Green");
1000  auto_params_.gain_green = arv_device_get_float_feature_value(p_device_, "Gain");
1001  arv_device_set_string_feature_value(p_device_, "GainSelector", "Blue");
1002  auto_params_.gain_blue = arv_device_get_float_feature_value(p_device_, "Gain");
1003  }
1004  }
1005 
1006  if (implemented_features_["BlackLevel"])
1007  {
1008  if (implemented_features_["BlackLevelSelector"])
1009  {
1010  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "All");
1011  }
1012  auto_params_.black_level = arv_device_get_float_feature_value(p_device_, "BlackLevel");
1013  if (implemented_features_["BlackLevelSelector"])
1014  {
1015  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Red");
1016  auto_params_.bl_red = arv_device_get_float_feature_value(p_device_, "BlackLevel");
1017  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Green");
1018  auto_params_.bl_green = arv_device_get_float_feature_value(p_device_, "BlackLevel");
1019  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Blue");
1020  auto_params_.bl_blue = arv_device_get_float_feature_value(p_device_, "BlackLevel");
1021  }
1022  }
1023 
1024  // White balance as TIS is providing
1025  if (strcmp("The Imaging Source Europe GmbH", arv_camera_get_vendor_name(p_camera_)) == 0)
1026  {
1027  auto_params_.wb_red = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceRedRegister") / 255.;
1028  auto_params_.wb_green = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceGreenRegister") / 255.;
1029  auto_params_.wb_blue = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceBlueRegister") / 255.;
1030  }
1031  // the standard way
1032  else if (implemented_features_["BalanceRatio"] && implemented_features_["BalanceRatioSelector"])
1033  {
1034  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
1035  auto_params_.wb_red = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
1036  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
1037  auto_params_.wb_green = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
1038  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
1039  auto_params_.wb_blue = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
1040  }
1041  }
1042 }
1043 
1044 void CameraAravisNodelet::setAutoMaster(bool value)
1045 {
1046  if (value)
1047  {
1049  auto_pub_ = getNodeHandle().advertise<CameraAutoInfo>(ros::names::remap("camera_auto_info"), 1, true);
1050  }
1051  else
1052  {
1053  auto_pub_.shutdown();
1054  }
1055 }
1056 
1057 void CameraAravisNodelet::setAutoSlave(bool value)
1058 {
1059  if (value)
1060  {
1061  // deactivate all auto functions
1062  if (implemented_features_["ExposureAuto"])
1063  {
1064  arv_device_set_string_feature_value(p_device_, "ExposureAuto", "Off");
1065  }
1066  if (implemented_features_["GainAuto"])
1067  {
1068  arv_device_set_string_feature_value(p_device_, "GainAuto", "Off");
1069  }
1070  if (implemented_features_["GainAutoBalance"])
1071  {
1072  arv_device_set_string_feature_value(p_device_, "GainAutoBalance", "Off");
1073  }
1074  if (implemented_features_["BlackLevelAuto"])
1075  {
1076  arv_device_set_string_feature_value(p_device_, "BlackLevelAuto", "Off");
1077  }
1078  if (implemented_features_["BlackLevelAutoBalance"])
1079  {
1080  arv_device_set_string_feature_value(p_device_, "BlackLevelAutoBalance", "Off");
1081  }
1082  if (implemented_features_["BalanceWhiteAuto"])
1083  {
1084  arv_device_set_string_feature_value(p_device_, "BalanceWhiteAuto", "Off");
1085  }
1087  auto_sub_ = getNodeHandle().subscribe(ros::names::remap("camera_auto_info"), 1,
1089  }
1090  else
1091  {
1092  auto_sub_.shutdown();
1093  }
1094 }
1095 
1096 void CameraAravisNodelet::setExtendedCameraInfo(std::string channel_name, size_t stream_id)
1097 {
1099  {
1100  if (channel_name.empty())
1101  {
1102  extended_camera_info_pubs_[stream_id] = getNodeHandle().advertise<ExtendedCameraInfo>(ros::names::remap("extended_camera_info"), 1, true);
1103  }
1104  else
1105  {
1106  extended_camera_info_pubs_[stream_id] = getNodeHandle().advertise<ExtendedCameraInfo>(ros::names::remap(channel_name + "/extended_camera_info"), 1, true);
1107  }
1108  }
1109  else
1110  {
1111  extended_camera_info_pubs_[stream_id].shutdown();
1112  }
1113 }
1114 
1115 // Extra stream options for GigEVision streams.
1116 void CameraAravisNodelet::tuneGvStream(ArvGvStream* p_stream)
1118  gboolean b_auto_buffer = FALSE;
1119  gboolean b_packet_resend = TRUE;
1120  unsigned int timeout_packet = 40; // milliseconds
1121  unsigned int timeout_frame_retention = 200;
1122 
1123  if (p_stream)
1124  {
1125  if (!ARV_IS_GV_STREAM(p_stream))
1126  {
1127  ROS_WARN("Stream is not a GV_STREAM");
1128  return;
1129  }
1130 
1131  if (b_auto_buffer)
1132  g_object_set(p_stream, "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, "socket-buffer-size", 0,
1133  NULL);
1134  if (!b_packet_resend)
1135  g_object_set(p_stream, "packet-resend",
1136  b_packet_resend ? ARV_GV_STREAM_PACKET_RESEND_ALWAYS : ARV_GV_STREAM_PACKET_RESEND_NEVER,
1137  NULL);
1138  g_object_set(p_stream, "packet-timeout", timeout_packet * 1000, "frame-retention", timeout_frame_retention * 1000,
1139  NULL);
1140  }
1141 }
1142 
1143 void CameraAravisNodelet::rosReconfigureCallback(Config& config, uint32_t level)
1144 {
1145  reconfigure_mutex_.lock();
1146  std::string tf_prefix = tf::getPrefixParam(getNodeHandle());
1147  ROS_DEBUG_STREAM("tf_prefix: " << tf_prefix);
1148 
1149  if (config.frame_id == "")
1150  config.frame_id = this->getName();
1151 
1152  // Limit params to legal values.
1153  config.AcquisitionFrameRate = CLAMP(config.AcquisitionFrameRate, config_min_.AcquisitionFrameRate,
1154  config_max_.AcquisitionFrameRate);
1155  config.ExposureTime = CLAMP(config.ExposureTime, config_min_.ExposureTime, config_max_.ExposureTime);
1156  config.Gain = CLAMP(config.Gain, config_min_.Gain, config_max_.Gain);
1157  config.FocusPos = CLAMP(config.FocusPos, config_min_.FocusPos, config_max_.FocusPos);
1158  config.frame_id = tf::resolve(tf_prefix, config.frame_id);
1159 
1160  if (use_ptp_stamp_)
1161  resetPtpClock();
1162 
1163  // stop auto functions if slave
1164  if (config.AutoSlave)
1165  {
1166  config.ExposureAuto = "Off";
1167  config.GainAuto = "Off";
1168  // todo: all other auto functions "Off"
1169  }
1170 
1171  // reset values controlled by auto functions
1172  if (config.ExposureAuto.compare("Off") != 0)
1173  {
1174  config.ExposureTime = config_.ExposureTime;
1175  ROS_WARN("ExposureAuto is active. Cannot manually set ExposureTime.");
1176  }
1177  if (config.GainAuto.compare("Off") != 0)
1178  {
1179  config.Gain = config_.Gain;
1180  ROS_WARN("GainAuto is active. Cannot manually set Gain.");
1181  }
1182 
1183  // reset FrameRate when triggered
1184  if (config.TriggerMode.compare("Off") != 0)
1185  {
1186  config.AcquisitionFrameRate = config_.AcquisitionFrameRate;
1187  ROS_WARN("TriggerMode is active (Trigger Source: %s). Cannot manually set AcquisitionFrameRate.", config_.TriggerSource.c_str());
1188  }
1189 
1190  // Find valid user changes we need to react to.
1191  const bool changed_auto_master = (config_.AutoMaster != config.AutoMaster);
1192  const bool changed_auto_slave = (config_.AutoSlave != config.AutoSlave);
1193  const bool changed_acquisition_frame_rate = (config_.AcquisitionFrameRate != config.AcquisitionFrameRate);
1194  const bool changed_exposure_auto = (config_.ExposureAuto != config.ExposureAuto);
1195  const bool changed_exposure_time = (config_.ExposureTime != config.ExposureTime);
1196  const bool changed_gain_auto = (config_.GainAuto != config.GainAuto);
1197  const bool changed_gain = (config_.Gain != config.Gain);
1198  const bool changed_acquisition_mode = (config_.AcquisitionMode != config.AcquisitionMode);
1199  const bool changed_trigger_mode = (config_.TriggerMode != config.TriggerMode);
1200  const bool changed_trigger_source = (config_.TriggerSource != config.TriggerSource) || changed_trigger_mode;
1201  const bool changed_focus_pos = (config_.FocusPos != config.FocusPos);
1202  const bool changed_mtu = (config_.mtu != config.mtu);
1203 
1204  if (changed_auto_master)
1205  {
1206  setAutoMaster(config.AutoMaster);
1207  }
1208 
1209  if (changed_auto_slave)
1210  {
1211  setAutoSlave(config.AutoSlave);
1212  }
1213 
1214  // Set params into the camera.
1215  if (changed_exposure_time)
1216  {
1217  if (implemented_features_["ExposureTime"])
1218  {
1219  ROS_INFO("Set ExposureTime = %f us", config.ExposureTime);
1220  arv_camera_set_exposure_time(p_camera_, config.ExposureTime);
1221  }
1222  else
1223  ROS_INFO("Camera does not support ExposureTime.");
1224  }
1225 
1226  if (changed_gain)
1227  {
1228  if (implemented_features_["Gain"])
1229  {
1230  ROS_INFO("Set gain = %f", config.Gain);
1231  arv_camera_set_gain(p_camera_, config.Gain);
1232  }
1233  else
1234  ROS_INFO("Camera does not support Gain or GainRaw.");
1235  }
1236 
1237  if (changed_exposure_auto)
1238  {
1239  if (implemented_features_["ExposureAuto"] && implemented_features_["ExposureTime"])
1240  {
1241  ROS_INFO("Set ExposureAuto = %s", config.ExposureAuto.c_str());
1242  arv_device_set_string_feature_value(p_device_, "ExposureAuto", config.ExposureAuto.c_str());
1243  if (config.ExposureAuto.compare("Once") == 0)
1244  {
1245  ros::Duration(2.0).sleep();
1246  config.ExposureTime = arv_camera_get_exposure_time(p_camera_);
1247  ROS_INFO("Get ExposureTime = %f us", config.ExposureTime);
1248  config.ExposureAuto = "Off";
1249  }
1250  }
1251  else
1252  ROS_INFO("Camera does not support ExposureAuto.");
1253  }
1254  if (changed_gain_auto)
1255  {
1256  if (implemented_features_["GainAuto"] && implemented_features_["Gain"])
1257  {
1258  ROS_INFO("Set GainAuto = %s", config.GainAuto.c_str());
1259  arv_device_set_string_feature_value(p_device_, "GainAuto", config.GainAuto.c_str());
1260  if (config.GainAuto.compare("Once") == 0)
1261  {
1262  ros::Duration(2.0).sleep();
1263  config.Gain = arv_camera_get_gain(p_camera_);
1264  ROS_INFO("Get Gain = %f", config.Gain);
1265  config.GainAuto = "Off";
1266  }
1267  }
1268  else
1269  ROS_INFO("Camera does not support GainAuto.");
1270  }
1271 
1272  if (changed_acquisition_frame_rate)
1273  {
1274  if (implemented_features_["AcquisitionFrameRate"])
1275  {
1276  ROS_INFO("Set frame rate = %f Hz", config.AcquisitionFrameRate);
1277  arv_camera_set_frame_rate(p_camera_, config.AcquisitionFrameRate);
1278  }
1279  else
1280  ROS_INFO("Camera does not support AcquisitionFrameRate.");
1281  }
1282 
1283  if (changed_trigger_mode)
1284  {
1285  if (implemented_features_["TriggerMode"])
1286  {
1287  ROS_INFO("Set TriggerMode = %s", config.TriggerMode.c_str());
1288  arv_device_set_string_feature_value(p_device_, "TriggerMode", config.TriggerMode.c_str());
1289  }
1290  else
1291  ROS_INFO("Camera does not support TriggerMode.");
1292  }
1293 
1294  if (changed_trigger_source)
1295  {
1296  // delete old software trigger thread if active
1297  software_trigger_active_ = false;
1298  if (software_trigger_thread_.joinable())
1299  {
1300  software_trigger_thread_.join();
1301  }
1302 
1303  if (implemented_features_["TriggerSource"])
1304  {
1305  ROS_INFO("Set TriggerSource = %s", config.TriggerSource.c_str());
1306  arv_device_set_string_feature_value(p_device_, "TriggerSource", config.TriggerSource.c_str());
1307  }
1308  else
1309  {
1310  ROS_INFO("Camera does not support TriggerSource.");
1311  }
1312 
1313  // activate on demand
1314  if (config.TriggerMode.compare("On") == 0 && config.TriggerSource.compare("Software") == 0)
1315  {
1316  if (implemented_features_["TriggerSoftware"])
1317  {
1318  config_.softwaretriggerrate = config.softwaretriggerrate;
1319  ROS_INFO("Set softwaretriggerrate = %f", 1000.0 / ceil(1000.0 / config.softwaretriggerrate));
1320 
1321  // Turn on software timer callback.
1323  }
1324  else
1325  {
1326  ROS_INFO("Camera does not support TriggerSoftware command.");
1327  }
1328  }
1329  }
1330 
1331  if (changed_focus_pos)
1332  {
1333  if (implemented_features_["FocusPos"])
1334  {
1335  ROS_INFO("Set FocusPos = %d", config.FocusPos);
1336  arv_device_set_integer_feature_value(p_device_, "FocusPos", config.FocusPos);
1337  ros::Duration(1.0).sleep();
1338  config.FocusPos = arv_device_get_integer_feature_value(p_device_, "FocusPos");
1339  ROS_INFO("Get FocusPos = %d", config.FocusPos);
1340  }
1341  else
1342  ROS_INFO("Camera does not support FocusPos.");
1343  }
1344 
1345  if (changed_mtu)
1346  {
1347  if (implemented_features_["GevSCPSPacketSize"])
1348  {
1349  ROS_INFO("Set mtu = %d", config.mtu);
1350  arv_device_set_integer_feature_value(p_device_, "GevSCPSPacketSize", config.mtu);
1351  ros::Duration(1.0).sleep();
1352  config.mtu = arv_device_get_integer_feature_value(p_device_, "GevSCPSPacketSize");
1353  ROS_INFO("Get mtu = %d", config.mtu);
1354  }
1355  else
1356  ROS_INFO("Camera does not support mtu (i.e. GevSCPSPacketSize).");
1357  }
1358 
1359  if (changed_acquisition_mode)
1360  {
1361  if (implemented_features_["AcquisitionMode"])
1362  {
1363  ROS_INFO("Set AcquisitionMode = %s", config.AcquisitionMode.c_str());
1364  arv_device_set_string_feature_value(p_device_, "AcquisitionMode", config.AcquisitionMode.c_str());
1365 
1366  ROS_INFO("AcquisitionStop");
1367  arv_device_execute_command(p_device_, "AcquisitionStop");
1368  ROS_INFO("AcquisitionStart");
1369  arv_device_execute_command(p_device_, "AcquisitionStart");
1370  }
1371  else
1372  ROS_INFO("Camera does not support AcquisitionMode.");
1373  }
1374 
1375  // adopt new config
1376  config_ = config;
1377  reconfigure_mutex_.unlock();
1378 }
1379 
1381 {
1382  if (p_device_)
1383  {
1384  if (std::all_of(cam_pubs_.begin(), cam_pubs_.end(),
1386  { return pub.getNumSubscribers() == 0; }))
1387  {
1388  arv_device_execute_command(p_device_, "AcquisitionStop"); // don't waste CPU if nobody is listening!
1389  }
1390  else
1391  {
1392  arv_device_execute_command(p_device_, "AcquisitionStart");
1393  }
1394  }
1395 }
1396 
1397 void CameraAravisNodelet::newBufferReadyCallback(ArvStream* p_stream, gpointer can_instance)
1398 {
1399 
1400  // workaround to get access to the instance from a static method
1401  StreamIdData* data = (StreamIdData*)can_instance;
1402  CameraAravisNodelet* p_can = (CameraAravisNodelet*)data->can;
1403  size_t stream_id = data->stream_id;
1404  image_transport::CameraPublisher* p_cam_pub = &p_can->cam_pubs_[stream_id];
1405 
1406  if (p_can->stream_names_[stream_id].empty())
1407  {
1408  newBufferReady(p_stream, p_can, p_can->config_.frame_id, stream_id);
1409  }
1410  else
1411  {
1412  const std::string stream_frame_id = p_can->config_.frame_id + "/" + p_can->stream_names_[stream_id];
1413  newBufferReady(p_stream, p_can, stream_frame_id, stream_id);
1414  }
1415 }
1416 
1417 void CameraAravisNodelet::newBufferReady(ArvStream* p_stream, CameraAravisNodelet* p_can, std::string frame_id, size_t stream_id)
1419  ArvBuffer* p_buffer = arv_stream_try_pop_buffer(p_stream);
1420 
1421  // check if we risk to drop the next image because of not enough buffers left
1422  gint n_available_buffers;
1423  arv_stream_get_n_buffers(p_stream, &n_available_buffers, NULL);
1424  if (n_available_buffers == 0)
1425  {
1426  p_can->p_buffer_pools_[stream_id]->allocateBuffers(1);
1427  }
1428 
1429  if (p_buffer != NULL)
1430  {
1431  if (arv_buffer_get_status(p_buffer) == ARV_BUFFER_STATUS_SUCCESS && p_can->p_buffer_pools_[stream_id] && p_can->cam_pubs_[stream_id].getNumSubscribers())
1432  {
1433  (p_can->n_buffers_)++;
1434  // get the image message which wraps around this buffer
1435  sensor_msgs::ImagePtr msg_ptr = (*(p_can->p_buffer_pools_[stream_id]))[p_buffer];
1436  // fill the meta information of image message
1437  // get acquisition time
1438  guint64 t;
1439  if (p_can->use_ptp_stamp_)
1440  t = arv_buffer_get_timestamp(p_buffer);
1441  else
1442  t = arv_buffer_get_system_timestamp(p_buffer);
1443  msg_ptr->header.stamp.fromNSec(t);
1444  // get frame sequence number
1445  msg_ptr->header.seq = arv_buffer_get_frame_id(p_buffer);
1446  // fill other stream properties
1447  msg_ptr->header.frame_id = frame_id;
1448  msg_ptr->width = p_can->roi_.width;
1449  msg_ptr->height = p_can->roi_.height;
1450  msg_ptr->encoding = p_can->sensors_[stream_id]->pixel_format;
1451  msg_ptr->step = (msg_ptr->width * p_can->sensors_[stream_id]->n_bits_pixel) / 8;
1452 
1453  // do the magic of conversion into a ROS format
1454  if (p_can->convert_formats[stream_id])
1455  {
1456  sensor_msgs::ImagePtr cvt_msg_ptr = p_can->p_buffer_pools_[stream_id]->getRecyclableImg();
1457  p_can->convert_formats[stream_id](msg_ptr, cvt_msg_ptr);
1458  msg_ptr = cvt_msg_ptr;
1459  }
1460 
1461  // get current CameraInfo data
1462  if (!p_can->camera_infos_[stream_id])
1463  {
1464  p_can->camera_infos_[stream_id].reset(new sensor_msgs::CameraInfo);
1465  }
1466  (*p_can->camera_infos_[stream_id]) = p_can->p_camera_info_managers_[stream_id]->getCameraInfo();
1467  p_can->camera_infos_[stream_id]->header = msg_ptr->header;
1468  if (p_can->camera_infos_[stream_id]->width == 0 || p_can->camera_infos_[stream_id]->height == 0)
1469  {
1471  "The fields image_width and image_height seem not to be set in "
1472  "the YAML specified by 'camera_info_url' parameter. Please set "
1473  "them there, because actual image size and specified image size "
1474  "can be different due to the region of interest (ROI) feature. In "
1475  "the YAML the image size should be the one on which the camera was "
1476  "calibrated. See CameraInfo.msg specification!");
1477  p_can->camera_infos_[stream_id]->width = p_can->roi_.width;
1478  p_can->camera_infos_[stream_id]->height = p_can->roi_.height;
1479  }
1480 
1481  p_can->cam_pubs_[stream_id].publish(msg_ptr, p_can->camera_infos_[stream_id]);
1482 
1483  if (p_can->pub_ext_camera_info_)
1484  {
1485  ExtendedCameraInfo extended_camera_info_msg;
1486  p_can->extended_camera_info_mutex_.lock();
1487  arv_camera_gv_select_stream_channel(p_can->p_camera_, stream_id);
1488  extended_camera_info_msg.camera_info = *(p_can->camera_infos_[stream_id]);
1489  p_can->fillExtendedCameraInfoMessage(extended_camera_info_msg);
1490  p_can->extended_camera_info_mutex_.unlock();
1491  p_can->extended_camera_info_pubs_[stream_id].publish(extended_camera_info_msg);
1492  }
1493 
1494  // check PTP status, camera cannot recover from "Faulty" by itself
1495  if (p_can->use_ptp_stamp_)
1496  p_can->resetPtpClock();
1497  }
1498  else
1499  {
1500  if (arv_buffer_get_status(p_buffer) != ARV_BUFFER_STATUS_SUCCESS)
1501  {
1502  ROS_WARN("(%s) Frame error: %s", frame_id.c_str(), szBufferStatusFromInt[arv_buffer_get_status(p_buffer)]);
1503  }
1504  arv_stream_push_buffer(p_stream, p_buffer);
1505  }
1506  }
1507 
1508  // publish current lighting settings if this camera is configured as master
1509  if (p_can->config_.AutoMaster)
1510  {
1511  p_can->syncAutoParameters();
1512  p_can->auto_pub_.publish(p_can->auto_params_);
1513  }
1514 }
1515 
1516 void CameraAravisNodelet::fillExtendedCameraInfoMessage(ExtendedCameraInfo& msg)
1517 {
1518  const char* vendor_name = arv_camera_get_vendor_name(p_camera_);
1519 
1520  if (strcmp("Basler", vendor_name) == 0)
1521  {
1522  msg.exposure_time = arv_device_get_float_feature_value(p_device_, "ExposureTimeAbs");
1523  }
1524  else if (implemented_features_["ExposureTime"])
1525  {
1526  msg.exposure_time = arv_device_get_float_feature_value(p_device_, "ExposureTime");
1527  }
1528 
1529  if (strcmp("Basler", vendor_name) == 0)
1530  {
1531  msg.gain = static_cast<float>(arv_device_get_integer_feature_value(p_device_, "GainRaw"));
1532  }
1533  else if (implemented_features_["Gain"])
1534  {
1535  msg.gain = arv_device_get_float_feature_value(p_device_, "Gain");
1536  }
1537  if (strcmp("Basler", vendor_name) == 0)
1538  {
1539  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "All");
1540  msg.black_level = static_cast<float>(arv_device_get_integer_feature_value(p_device_, "BlackLevelRaw"));
1541  }
1542  else if (strcmp("JAI Corporation", vendor_name) == 0)
1543  {
1544  // Reading the black level register for both streams of the JAI FS 3500D takes too long.
1545  // The frame rate the drops below 10 fps.
1546  msg.black_level = 0;
1547  }
1548  else
1549  {
1550  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "All");
1551  msg.black_level = arv_device_get_float_feature_value(p_device_, "BlackLevel");
1552  }
1553 
1554  // White balance as TIS is providing
1555  if (strcmp("The Imaging Source Europe GmbH", vendor_name) == 0)
1556  {
1557  msg.white_balance_red = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceRedRegister") / 255.;
1558  msg.white_balance_green = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceGreenRegister") / 255.;
1559  msg.white_balance_blue = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceBlueRegister") / 255.;
1560  }
1561  // the JAI cameras become too slow when reading out the DigitalRed and DigitalBlue values
1562  // the white balance is adjusted by adjusting the Gain values for Red and Blue pixels
1563  else if (strcmp("JAI Corporation", vendor_name) == 0)
1564  {
1565  msg.white_balance_red = 1.0;
1566  msg.white_balance_green = 1.0;
1567  msg.white_balance_blue = 1.0;
1568  }
1569  // the Basler cameras use the 'BalanceRatioAbs' keyword instead
1570  else if (strcmp("Basler", vendor_name) == 0)
1571  {
1572  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
1573  msg.white_balance_red = arv_device_get_float_feature_value(p_device_, "BalanceRatioAbs");
1574  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
1575  msg.white_balance_green = arv_device_get_float_feature_value(p_device_, "BalanceRatioAbs");
1576  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
1577  msg.white_balance_blue = arv_device_get_float_feature_value(p_device_, "BalanceRatioAbs");
1578  }
1579  // the standard way
1580  else if (implemented_features_["BalanceRatio"] && implemented_features_["BalanceRatioSelector"])
1581  {
1582  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
1583  msg.white_balance_red = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
1584  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
1585  msg.white_balance_green = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
1586  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
1587  msg.white_balance_blue = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
1588  }
1589 
1590  if (strcmp("Basler", vendor_name) == 0)
1591  {
1592  msg.temperature = static_cast<float>(arv_device_get_float_feature_value(p_device_, "TemperatureAbs"));
1593  }
1594  else if (implemented_features_["DeviceTemperature"])
1595  {
1596  msg.temperature = arv_device_get_float_feature_value(p_device_, "DeviceTemperature");
1597  }
1598 }
1599 
1600 void CameraAravisNodelet::controlLostCallback(ArvDevice* p_gv_device, gpointer can_instance)
1601 {
1602  // CameraAravisNodelet *p_can = (CameraAravisNodelet*)can_instance;
1603  // ROS_ERROR("Control to aravis device lost.\n\t> Nodelet name: %s."
1604  // "\n\t> Shutting down. Allowing for respawn.",
1605  // p_can->getName().c_str());
1606 
1607  // p_can->shutdown_trigger_.start();
1608 }
1609 
1611 {
1612  software_trigger_active_ = true;
1613  ROS_INFO("Software trigger started.");
1614  std::chrono::system_clock::time_point next_time = std::chrono::system_clock::now();
1615  while (ros::ok() && software_trigger_active_)
1616  {
1617  next_time += std::chrono::milliseconds(size_t(std::round(1000.0 / config_.softwaretriggerrate)));
1618  if (std::any_of(cam_pubs_.begin(), cam_pubs_.end(),
1620  { return pub.getNumSubscribers() > 0; }))
1621  {
1622  arv_device_execute_command(p_device_, "TriggerSoftware");
1623  }
1624  if (next_time > std::chrono::system_clock::now())
1625  {
1626  std::this_thread::sleep_until(next_time);
1627  }
1628  else
1629  {
1630  ROS_WARN("Camera Aravis: Missed a software trigger event.");
1631  next_time = std::chrono::system_clock::now();
1632  }
1633  }
1634  ROS_INFO("Software trigger stopped.");
1635 }
1636 
1637 void CameraAravisNodelet::publishTfLoop(double rate)
1638 {
1639  // Publish optical transform for the camera
1640  ROS_WARN("Publishing dynamic camera transforms (/tf) at %g Hz", rate);
1641 
1642  tf_thread_active_ = true;
1643 
1644  ros::Rate loop_rate(rate);
1645 
1646  while (ros::ok() && tf_thread_active_)
1647  {
1648  // Update the header for publication
1649  tf_optical_.header.stamp = ros::Time::now();
1650  ++tf_optical_.header.seq;
1651  p_tb_->sendTransform(tf_optical_);
1652 
1653  loop_rate.sleep();
1654  }
1655 }
1656 
1659  ROS_INFO("Publishing camera diagnostics at %g Hz", rate);
1660 
1661  ros::Rate loop_rate(rate);
1662 
1663  CameraDiagnostics camDiagnosticMsg;
1664  camDiagnosticMsg.header.frame_id = config_.frame_id;
1665 
1666  // function to get feature value at given name and of given type as string
1667  auto getFeatureValueAsStrFn = [&](const std::string& name, const std::string& type)
1668  -> std::string
1669  {
1670  std::string valueStr = "";
1671 
1672  if (type == "float")
1673  {
1674  valueStr = std::to_string(arv_device_get_float_feature_value(p_device_, name.c_str()));
1675  }
1676  else if (type == "int")
1677  {
1678  valueStr = std::to_string(arv_device_get_integer_feature_value(p_device_, name.c_str()));
1679  }
1680  else if (type == "bool")
1681  {
1682  valueStr = arv_device_get_boolean_feature_value(p_device_, name.c_str()) ? "true" : "false";
1683  }
1684  else
1685  {
1686  const char* retFeatVal = arv_device_get_string_feature_value(p_device_, name.c_str());
1687  valueStr = (retFeatVal) ? std::string(retFeatVal) : "";
1688  }
1689 
1690  return valueStr;
1691  };
1692 
1693  // function to set feature value at given name and of given type from string
1694  auto setFeatureValueFromStrFn = [&](const std::string& name, const std::string& type,
1695  const std::string& valueStr)
1696  {
1697  if (type == "float")
1698  arv_device_set_float_feature_value(p_device_, name.c_str(), std::stod(valueStr));
1699  else if (type == "int")
1700  arv_device_set_integer_feature_value(p_device_, name.c_str(), std::stoi(valueStr));
1701  else if (type == "bool")
1702  arv_device_set_boolean_feature_value(p_device_, name.c_str(),
1703  (valueStr == "true" || valueStr == "True" || valueStr == "TRUE"));
1704  else
1705  arv_device_set_string_feature_value(p_device_, name.c_str(), valueStr.c_str());
1706  };
1707 
1708  while (ros::ok() && diagnostic_thread_active_)
1709  {
1710  camDiagnosticMsg.header.stamp = ros::Time::now();
1711  ++camDiagnosticMsg.header.seq;
1712 
1713  camDiagnosticMsg.data.clear();
1714 
1715  int featureIdx = 1;
1716  for (auto featureDict : diagnostic_features_)
1717  {
1718  std::string featureName = featureDict["FeatureName"].IsDefined()
1719  ? featureDict["FeatureName"].as<std::string>()
1720  : "";
1721  std::string featureType = featureDict["Type"].IsDefined()
1722  ? featureDict["Type"].as<std::string>()
1723  : "";
1724 
1725  if (featureName.empty() || featureType.empty())
1726  {
1727  ROS_WARN_ONCE(
1728  "Diagnostic feature at index %i does not have a field 'FeatureName' or 'Type'.",
1729  featureIdx);
1730  ROS_WARN_ONCE("Diagnostic feature will be skipped.");
1731  }
1732  else
1733  {
1734  // convert type string to lower case
1735  std::transform(featureType.begin(), featureType.end(), featureType.begin(),
1736  [](unsigned char c)
1737  { return std::tolower(c); });
1738 
1739  // if feature name is found in implemented_feature list and if enabled
1740  if (implemented_features_.find(featureName) != implemented_features_.end() &&
1741  implemented_features_.at(featureName))
1742  {
1743  diagnostic_msgs::KeyValue kvPair;
1744 
1745  // if 'selectors' which correspond to the featureName are defined
1746  if (featureDict["Selectors"].IsDefined() && featureDict["Selectors"].size() > 0)
1747  {
1748  int selectorIdx = 1;
1749  for (auto selectorDict : featureDict["Selectors"])
1750  {
1751  std::string selectorFeatureName = selectorDict["FeatureName"].IsDefined()
1752  ? selectorDict["FeatureName"].as<std::string>()
1753  : "";
1754  std::string selectorType = selectorDict["Type"].IsDefined()
1755  ? selectorDict["Type"].as<std::string>()
1756  : "";
1757  std::string selectorValue = selectorDict["Value"].IsDefined()
1758  ? selectorDict["Value"].as<std::string>()
1759  : "";
1760 
1761  if (selectorFeatureName.empty() || selectorType.empty() || selectorValue.empty())
1762  {
1763  ROS_WARN_ONCE(
1764  "Diagnostic feature selector at index %i of feature at index %i does not have a "
1765  "field 'FeatureName', 'Type' or 'Value'.",
1766  selectorIdx, featureIdx);
1767  ROS_WARN_ONCE("Diagnostic feature will be skipped.");
1768  }
1769  else
1770  {
1771  if (implemented_features_.find(selectorFeatureName) != implemented_features_.end() &&
1772  implemented_features_.at(selectorFeatureName))
1773  {
1774  setFeatureValueFromStrFn(selectorFeatureName, selectorType, selectorValue);
1775 
1776  kvPair.key = featureName + "-" + selectorValue;
1777  kvPair.value = getFeatureValueAsStrFn(featureName, featureType);
1778 
1779  camDiagnosticMsg.data.push_back(diagnostic_msgs::KeyValue(kvPair));
1780  }
1781  else
1782  {
1783  ROS_WARN_ONCE("Diagnostic feature selector with name '%s' is not implemented.",
1784  selectorFeatureName.c_str());
1785  }
1786 
1787  selectorIdx++;
1788  }
1789  }
1790  }
1791  else
1792  {
1793  kvPair.key = featureName;
1794  kvPair.value = getFeatureValueAsStrFn(featureName, featureType);
1795 
1796  camDiagnosticMsg.data.push_back(diagnostic_msgs::KeyValue(kvPair));
1797  }
1798  }
1799  else
1800  {
1801  ROS_WARN_ONCE("Diagnostic feature with name '%s' is not implemented.",
1802  featureName.c_str());
1803  }
1804  }
1805 
1806  featureIdx++;
1807  }
1808 
1809  diagnostic_pub_.publish(camDiagnosticMsg);
1810 
1811  loop_rate.sleep();
1812  }
1813 }
1814 
1816 {
1817  implemented_features_.clear();
1818  if (!p_device_)
1819  return;
1820 
1821  // get the root node of genicam description
1822  ArvGc* gc = arv_device_get_genicam(p_device_);
1823  if (!gc)
1824  return;
1825 
1826  std::list<ArvDomNode*> todo;
1827  todo.push_front((ArvDomNode*)arv_gc_get_node(gc, "Root"));
1828 
1829  while (!todo.empty())
1830  {
1831  // get next entry
1832  ArvDomNode* node = todo.front();
1833  todo.pop_front();
1834  const std::string name(arv_dom_node_get_node_name(node));
1835 
1836  // Do the indirection
1837  if (name[0] == 'p')
1838  {
1839  if (name.compare("pInvalidator") == 0)
1840  {
1841  continue;
1842  }
1843  ArvDomNode* inode = (ArvDomNode*)arv_gc_get_node(gc,
1844  arv_dom_node_get_node_value(arv_dom_node_get_first_child(node)));
1845  if (inode)
1846  todo.push_front(inode);
1847  continue;
1848  }
1849 
1850  // check for implemented feature
1851  if (ARV_IS_GC_FEATURE_NODE(node))
1852  {
1853  // if (!(ARV_IS_GC_CATEGORY(node) || ARV_IS_GC_ENUM_ENTRY(node) /*|| ARV_IS_GC_PORT(node)*/)) {
1854  ArvGcFeatureNode* fnode = ARV_GC_FEATURE_NODE(node);
1855  const std::string fname(arv_gc_feature_node_get_name(fnode));
1856  const bool usable = arv_gc_feature_node_is_available(fnode, NULL) && arv_gc_feature_node_is_implemented(fnode, NULL);
1857 
1858  ROS_INFO_STREAM_COND(verbose_, "Feature " << fname << " is " << (usable ? "usable" : "not usable"));
1859  implemented_features_.emplace(fname, usable);
1860  //}
1861  }
1862 
1863  // if (ARV_IS_GC_PROPERTY_NODE(node)) {
1864  // ArvGcPropertyNode* pnode = ARV_GC_PROPERTY_NODE(node);
1865  // const std::string pname(arv_gc_property_node_get_name(pnode));
1866  // ROS_INFO_STREAM("Property " << pname << " found");
1867  // }
1868 
1869  // add children in todo-list
1870  ArvDomNodeList* children = arv_dom_node_get_child_nodes(node);
1871  const uint l = arv_dom_node_list_get_length(children);
1872  for (uint i = 0; i < l; ++i)
1873  {
1874  todo.push_front(arv_dom_node_list_get_item(children, i));
1875  }
1876  }
1877 }
1878 
1879 void CameraAravisNodelet::parseStringArgs(std::string in_arg_string, std::vector<std::string>& out_args)
1880 {
1881  size_t array_start = 0;
1882  size_t array_end = in_arg_string.length();
1883  if (array_start != std::string::npos && array_end != std::string::npos)
1884  {
1885  // parse the string into an array of parameters
1886  std::stringstream ss(in_arg_string.substr(array_start, array_end - array_start));
1887  while (ss.good())
1888  {
1889  std::string temp;
1890  getline(ss, temp, ',');
1891  boost::trim_left(temp);
1892  boost::trim_right(temp);
1893  out_args.push_back(temp);
1894  }
1895  }
1896  else
1897  {
1898  // add just the one argument onto the vector
1899  out_args.push_back(in_arg_string);
1900  }
1901 }
1902 
1903 // WriteCameraFeaturesFromRosparam()
1904 // Read ROS parameters from this node's namespace, and see if each parameter has a similarly named & typed feature in the camera. Then set the
1905 // camera feature to that value. For example, if the parameter camnode/Gain is set to 123.0, then we'll write 123.0 to the Gain feature
1906 // in the camera.
1907 //
1908 // Note that the datatype of the parameter *must* match the datatype of the camera feature, and this can be determined by
1909 // looking at the camera's XML file. Camera enum's are string parameters, camera bools are false/true parameters (not 0/1),
1910 // integers are integers, doubles are doubles, etc.
1911 //
1913 {
1914  XmlRpc::XmlRpcValue xml_rpc_params;
1916  ArvGcNode* p_gc_node;
1917  GError* error = NULL;
1918 
1919  getPrivateNodeHandle().getParam(this->getName(), xml_rpc_params);
1920 
1921  if (xml_rpc_params.getType() == XmlRpc::XmlRpcValue::TypeStruct)
1922  {
1923  for (iter = xml_rpc_params.begin(); iter != xml_rpc_params.end(); iter++)
1924  {
1925  std::string key = iter->first;
1926 
1927  p_gc_node = arv_device_get_feature(p_device_, key.c_str());
1928  if (p_gc_node && arv_gc_feature_node_is_implemented(ARV_GC_FEATURE_NODE(p_gc_node), &error))
1929  {
1930  // unsigned long typeValue = arv_gc_feature_node_get_value_type((ArvGcFeatureNode *)pGcNode);
1931  // ROS_INFO("%s cameratype=%lu, rosparamtype=%d", key.c_str(), typeValue, static_cast<int>(iter->second.getType()));
1932 
1933  // We'd like to check the value types too, but typeValue is often given as G_TYPE_INVALID, so ignore it.
1934  switch (iter->second.getType())
1935  {
1936  case XmlRpc::XmlRpcValue::TypeBoolean: // if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeBoolean))// && (typeValue==G_TYPE_INT64))
1937  {
1938  int value = (bool)iter->second;
1939  arv_device_set_integer_feature_value(p_device_, key.c_str(), value);
1940  ROS_INFO("Read parameter (bool) %s: %d", key.c_str(), value);
1941  }
1942  break;
1943 
1944  case XmlRpc::XmlRpcValue::TypeInt: // if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeInt))// && (typeValue==G_TYPE_INT64))
1945  {
1946  int value = (int)iter->second;
1947  arv_device_set_integer_feature_value(p_device_, key.c_str(), value);
1948  ROS_INFO("Read parameter (int) %s: %d", key.c_str(), value);
1949  }
1950  break;
1951 
1952  case XmlRpc::XmlRpcValue::TypeDouble: // if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeDouble))// && (typeValue==G_TYPE_DOUBLE))
1953  {
1954  double value = (double)iter->second;
1955  arv_device_set_float_feature_value(p_device_, key.c_str(), value);
1956  ROS_INFO("Read parameter (float) %s: %f", key.c_str(), value);
1957  }
1958  break;
1959 
1960  case XmlRpc::XmlRpcValue::TypeString: // if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeString))// && (typeValue==G_TYPE_STRING))
1961  {
1962  std::string value = (std::string)iter->second;
1963  arv_device_set_string_feature_value(p_device_, key.c_str(), value.c_str());
1964  ROS_INFO("Read parameter (string) %s: %s", key.c_str(), value.c_str());
1965  }
1966  break;
1967 
1973  default:
1974  ROS_WARN("Unhandled rosparam type in writeCameraFeaturesFromRosparam()");
1975  }
1976  }
1977  }
1978  }
1979 }
1980 
1982 {
1983  nodelet::NodeletUnload unload_service;
1984  unload_service.request.name = this->getName();
1985  ros::service::call(this->getName() + "/unload_nodelet", unload_service);
1986  ROS_INFO("Nodelet unloaded.");
1987 
1989 
1990  ros::shutdown();
1991  ROS_INFO("Shut down successful.");
1992 }
1993 
1994 } // end namespace camera_aravis
camera_aravis::CameraAravisNodelet::ptp_status_feature_
std::string ptp_status_feature_
Definition: camera_aravis_nodelet.h:120
camera_aravis::CameraAravisNodelet::tf_thread_active_
std::atomic_bool tf_thread_active_
Definition: camera_aravis_nodelet.h:234
response
const std::string response
camera_aravis::CameraAravisNodelet::auto_sub_
ros::Subscriber auto_sub_
Definition: camera_aravis_nodelet.h:245
camera_aravis::CameraAravisNodelet::setStringFeatureCallback
bool setStringFeatureCallback(camera_aravis::set_string_feature_value::Request &request, camera_aravis::set_string_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:759
camera_aravis::CameraAravisNodelet::tf_dyn_thread_
std::thread tf_dyn_thread_
Definition: camera_aravis_nodelet.h:233
camera_aravis::CameraAravisNodelet::rosConnectCallback
void rosConnectCallback()
Definition: camera_aravis_nodelet.cpp:1401
camera_aravis::CameraAravisNodelet::shutdown_delay_s_
double shutdown_delay_s_
Definition: camera_aravis_nodelet.h:264
camera_aravis::CameraAravisNodelet::diagnostic_publish_rate_
double diagnostic_publish_rate_
Definition: camera_aravis_nodelet.h:237
camera_aravis::CameraAravisNodelet::newBufferReady
static void newBufferReady(ArvStream *p_stream, CameraAravisNodelet *p_can, std::string frame_id, size_t stream_id)
Definition: camera_aravis_nodelet.cpp:1438
camera_aravis::CameraAravisNodelet::width
int32_t width
Definition: camera_aravis_nodelet.h:270
camera_aravis::CameraAravisNodelet::oneShotWhiteBalanceCallback
bool oneShotWhiteBalanceCallback(camera_aravis::one_shot_white_balance::Request &request, camera_aravis::one_shot_white_balance::Response &response)
Definition: camera_aravis_nodelet.cpp:798
ros::service::call
bool call(const std::string &service_name, MReq &req, MRes &res)
camera_aravis::CameraAravisNodelet::diagnostic_thread_
std::thread diagnostic_thread_
Definition: camera_aravis_nodelet.h:240
ros::Publisher
camera_aravis::CameraAravisNodelet::fillExtendedCameraInfoMessage
void fillExtendedCameraInfoMessage(ExtendedCameraInfo &msg)
Definition: camera_aravis_nodelet.cpp:1537
nodelet::Nodelet::getNodeHandle
ros::NodeHandle & getNodeHandle() const
camera_aravis::CameraAravisNodelet::setExtendedCameraInfo
void setExtendedCameraInfo(std::string channel_name, size_t stream_id)
Definition: camera_aravis_nodelet.cpp:1117
image_transport::ImageTransport
camera_aravis::CameraAravisNodelet::p_tb_
std::unique_ptr< tf2_ros::TransformBroadcaster > p_tb_
Definition: camera_aravis_nodelet.h:231
camera_aravis::CameraAravisNodelet::get_integer_service_
ros::ServiceServer get_integer_service_
Definition: camera_aravis_nodelet.h:172
XmlRpc::XmlRpcValue::TypeBase64
TypeBase64
camera_aravis::CameraAravisNodelet::tf_optical_
geometry_msgs::TransformStamped tf_optical_
Definition: camera_aravis_nodelet.h:232
camera_aravis::CameraAravisNodelet::StreamIdData::can
CameraAravisNodelet * can
Definition: camera_aravis_nodelet.h:290
camera_aravis::CameraAravisNodelet::config_min_
Config config_min_
Definition: camera_aravis_nodelet.h:251
camera_aravis::CameraAravisNodelet::set_integer_service_
ros::ServiceServer set_integer_service_
Definition: camera_aravis_nodelet.h:184
ros::NodeHandle::getParam
bool getParam(const std::string &key, bool &b) const
ROS_INFO_STREAM_COND
#define ROS_INFO_STREAM_COND(cond, args)
camera_aravis::CameraAravisNodelet::~CameraAravisNodelet
virtual ~CameraAravisNodelet()
Definition: camera_aravis_nodelet.cpp:59
camera_aravis::szBufferStatusFromInt
static const char * szBufferStatusFromInt[]
Definition: conversion_utils.h:57
camera_aravis::CameraAravisNodelet::extended_camera_info_pubs_
std::vector< ros::Publisher > extended_camera_info_pubs_
Definition: camera_aravis_nodelet.h:248
ros::spinOnce
ROSCPP_DECL void spinOnce()
XmlRpc::XmlRpcValue::TypeInt
TypeInt
XmlRpc::XmlRpcValue::TypeInvalid
TypeInvalid
camera_aravis::CameraAravisNodelet::software_trigger_thread_
std::thread software_trigger_thread_
Definition: camera_aravis_nodelet.h:257
camera_aravis::CameraAravisNodelet::StreamIdData::stream_id
size_t stream_id
Definition: camera_aravis_nodelet.h:291
ros::shutdown
ROSCPP_DECL void shutdown()
tf::resolve
std::string resolve(const std::string &prefix, const std::string &frame_name)
ros::NodeHandle::advertiseService
ServiceServer advertiseService(AdvertiseServiceOptions &ops)
camera_aravis::CameraAravisNodelet::convert_formats
std::vector< ConversionFunction > convert_formats
Definition: camera_aravis_nodelet.h:134
camera_aravis::CameraAravisNodelet::syncAutoParameters
void syncAutoParameters()
Definition: camera_aravis_nodelet.cpp:995
camera_aravis::Config
CameraAravisConfig Config
Definition: camera_aravis_nodelet.h:106
camera_aravis
Definition: camera_aravis_nodelet.h:82
ros::Subscriber::shutdown
void shutdown()
camera_aravis::CameraAravisNodelet::one_shot_white_balance_service_
ros::ServiceServer one_shot_white_balance_service_
Definition: camera_aravis_nodelet.h:196
ROS_WARN_ONCE
#define ROS_WARN_ONCE(...)
camera_aravis::CameraAravisNodelet::get_boolean_service_
ros::ServiceServer get_boolean_service_
Definition: camera_aravis_nodelet.h:181
camera_aravis::CameraAravisNodelet::guid_
std::string guid_
Definition: camera_aravis_nodelet.h:116
camera_aravis::CameraAravisNodelet::p_streams_
std::vector< ArvStream * > p_streams_
Definition: camera_aravis_nodelet.h:130
camera_aravis::CameraAravisNodelet::pub_tf_optical_
bool pub_tf_optical_
Definition: camera_aravis_nodelet.h:124
camera_aravis::CameraAravisNodelet::getIntegerFeatureCallback
bool getIntegerFeatureCallback(camera_aravis::get_integer_feature_value::Request &request, camera_aravis::get_integer_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:706
camera_aravis::CameraAravisNodelet::p_camera_
ArvCamera * p_camera_
Definition: camera_aravis_nodelet.h:126
ros::SubscriberStatusCallback
boost::function< void(const SingleSubscriberPublisher &)> SubscriberStatusCallback
camera_aravis::CameraAravisNodelet::diagnostic_pub_
ros::Publisher diagnostic_pub_
Definition: camera_aravis_nodelet.h:239
camera_aravis::CameraAravisNodelet::ptp_enable_feature_
std::string ptp_enable_feature_
Definition: camera_aravis_nodelet.h:119
ROS_DEBUG_STREAM
#define ROS_DEBUG_STREAM(args)
image_transport::ImageTransport::advertiseCamera
CameraPublisher advertiseCamera(const std::string &base_topic, uint32_t queue_size, bool latch=false)
nodelet::Nodelet::getPrivateNodeHandle
ros::NodeHandle & getPrivateNodeHandle() const
ros::Publisher::publish
void publish(const boost::shared_ptr< M > &message) const
camera_aravis::CameraAravisNodelet::CameraAravisNodelet
CameraAravisNodelet()
Definition: camera_aravis_nodelet.cpp:55
camera_aravis::CameraAravisNodelet::spawnStream
void spawnStream()
Definition: camera_aravis_nodelet.cpp:609
camera_aravis::CameraAravisNodelet
Definition: camera_aravis_nodelet.h:108
ros::NodeHandle::advertise
Publisher advertise(AdvertiseOptions &ops)
ros::ok
ROSCPP_DECL bool ok()
ROS_WARN_STREAM_ONCE
#define ROS_WARN_STREAM_ONCE(args)
camera_aravis::CameraAravisNodelet::getStringFeatureCallback
bool getStringFeatureCallback(camera_aravis::get_string_feature_value::Request &request, camera_aravis::get_string_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:752
XmlRpc::XmlRpcValue::TypeStruct
TypeStruct
camera_aravis::CameraAravisNodelet::getBooleanFeatureCallback
bool getBooleanFeatureCallback(camera_aravis::get_boolean_feature_value::Request &request, camera_aravis::get_boolean_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:775
camera_aravis::CameraAravisNodelet::publishTfLoop
void publishTfLoop(double rate)
Definition: camera_aravis_nodelet.cpp:1658
camera_aravis::CameraAravisNodelet::diagnostic_yaml_url_
std::string diagnostic_yaml_url_
Definition: camera_aravis_nodelet.h:236
class_list_macros.h
camera_aravis::CameraAravisNodelet::shutdown_trigger_
ros::Timer shutdown_trigger_
Definition: camera_aravis_nodelet.h:263
tf2_ros::StaticTransformBroadcaster
camera_aravis::CameraAravisNodelet::sensors_
std::vector< Sensor * > sensors_
Definition: camera_aravis_nodelet.h:286
ros::SingleSubscriberPublisher
ROS_WARN_STREAM
#define ROS_WARN_STREAM(args)
camera_aravis::CameraAravisNodelet::camera_infos_
std::vector< sensor_msgs::CameraInfoPtr > camera_infos_
Definition: camera_aravis_nodelet.h:228
camera_aravis::CameraAravisNodelet::get_string_service_
ros::ServiceServer get_string_service_
Definition: camera_aravis_nodelet.h:178
XmlRpc::XmlRpcValue::TypeDouble
TypeDouble
camera_aravis::CameraBufferPool::Ptr
boost::shared_ptr< CameraBufferPool > Ptr
Definition: camera_buffer_pool.h:91
camera_aravis::CameraAravisNodelet::config_max_
Config config_max_
Definition: camera_aravis_nodelet.h:252
PLUGINLIB_EXPORT_CLASS
#define PLUGINLIB_EXPORT_CLASS(class_type, base_class_type)
camera_aravis::CameraAravisNodelet::p_device_
ArvDevice * p_device_
Definition: camera_aravis_nodelet.h:127
ros::Publisher::shutdown
void shutdown()
camera_aravis::CameraAravisNodelet::spawn_stream_thread_
std::thread spawn_stream_thread_
Definition: camera_aravis_nodelet.h:255
camera_aravis::CameraAravisNodelet::config_
Config config_
Definition: camera_aravis_nodelet.h:250
camera_aravis::CameraAravisNodelet::p_camera_info_node_handles_
std::vector< std::unique_ptr< ros::NodeHandle > > p_camera_info_node_handles_
Definition: camera_aravis_nodelet.h:227
tf::getPrefixParam
std::string getPrefixParam(ros::NodeHandle &nh)
camera_aravis::CameraAravisNodelet::setIntegerFeatureCallback
bool setIntegerFeatureCallback(camera_aravis::set_integer_feature_value::Request &request, camera_aravis::set_integer_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:713
camera_aravis::CameraAravisNodelet::reconfigure_server_
std::unique_ptr< dynamic_reconfigure::Server< Config > > reconfigure_server_
Definition: camera_aravis_nodelet.h:222
camera_aravis::CameraAravisNodelet::tuneGvStream
void tuneGvStream(ArvGvStream *p_stream)
Definition: camera_aravis_nodelet.cpp:1137
XmlRpc::XmlRpcValue::TypeString
TypeString
camera_aravis::CameraAravisNodelet::onShutdownTriggered
void onShutdownTriggered(const ros::TimerEvent &)
Definition: camera_aravis_nodelet.cpp:2002
camera_aravis::CameraAravisNodelet::set_boolean_service_
ros::ServiceServer set_boolean_service_
Definition: camera_aravis_nodelet.h:193
camera_aravis::CameraAravisNodelet::writeCameraFeaturesFromRosparam
void writeCameraFeaturesFromRosparam()
Definition: camera_aravis_nodelet.cpp:1933
camera_aravis::CameraAravisNodelet::diagnostic_features_
YAML::Node diagnostic_features_
Definition: camera_aravis_nodelet.h:238
camera_aravis::CameraAravisNodelet::newBufferReadyCallback
static void newBufferReadyCallback(ArvStream *p_stream, gpointer can_instance)
Definition: camera_aravis_nodelet.cpp:1418
camera_info_manager::CameraInfoManager
camera_aravis::CameraAravisNodelet::stream_names_
std::vector< std::string > stream_names_
Definition: camera_aravis_nodelet.h:131
image_transport::CameraPublisher
ROS_WARN
#define ROS_WARN(...)
camera_aravis::CameraAravisNodelet::p_camera_info_managers_
std::vector< std::unique_ptr< camera_info_manager::CameraInfoManager > > p_camera_info_managers_
Definition: camera_aravis_nodelet.h:226
camera_aravis::CameraAravisNodelet::set_float_service_
ros::ServiceServer set_float_service_
Definition: camera_aravis_nodelet.h:187
camera_aravis::CameraAravisNodelet::discoverFeatures
void discoverFeatures()
Definition: camera_aravis_nodelet.cpp:1836
camera_aravis_nodelet.h
ros::NodeHandle::subscribe
Subscriber subscribe(const std::string &topic, uint32_t queue_size, const boost::function< void(C)> &callback, const VoidConstPtr &tracked_object=VoidConstPtr(), const TransportHints &transport_hints=TransportHints())
ros::TimerEvent
ROS_INFO_STREAM
#define ROS_INFO_STREAM(args)
camera_aravis::CameraAravisNodelet::set_string_service_
ros::ServiceServer set_string_service_
Definition: camera_aravis_nodelet.h:190
ros::names::remap
ROSCPP_DECL std::string remap(const std::string &name)
camera_aravis::CameraAravisNodelet::spawning_
std::atomic< bool > spawning_
Definition: camera_aravis_nodelet.h:254
camera_aravis::CameraAravisNodelet::cam_pubs_
std::vector< image_transport::CameraPublisher > cam_pubs_
Definition: camera_aravis_nodelet.h:225
XmlRpc::XmlRpcValue::iterator
ValueStruct::iterator iterator
camera_aravis::CameraAravisNodelet::roi_
struct camera_aravis::CameraAravisNodelet::@0 roi_
XmlRpc::XmlRpcValue::end
iterator end()
camera_aravis::CameraAravisNodelet::setBooleanFeatureCallback
bool setBooleanFeatureCallback(camera_aravis::set_boolean_feature_value::Request &request, camera_aravis::set_boolean_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:782
ros::Rate::sleep
bool sleep()
XmlRpc::XmlRpcValue::getType
const Type & getType() const
image_transport::SingleSubscriberPublisher
camera_aravis::CameraAravisNodelet::auto_params_
CameraAutoInfo auto_params_
Definition: camera_aravis_nodelet.h:243
XmlRpc::XmlRpcValue::TypeArray
TypeArray
camera_aravis::CameraAravisNodelet::rosReconfigureCallback
void rosReconfigureCallback(Config &config, uint32_t level)
Definition: camera_aravis_nodelet.cpp:1164
camera_aravis::CameraAravisNodelet::pub_ext_camera_info_
bool pub_ext_camera_info_
Definition: camera_aravis_nodelet.h:123
camera_aravis::CameraAravisNodelet::verbose_
bool verbose_
Definition: camera_aravis_nodelet.h:115
camera_aravis::CameraAravisNodelet::StreamIdData
Definition: camera_aravis_nodelet.h:288
camera_aravis::CameraAravisNodelet::parseStringArgs
static void parseStringArgs(std::string in_arg_string, std::vector< std::string > &out_args)
Definition: camera_aravis_nodelet.cpp:1900
nodelet::Nodelet
camera_aravis::CameraAravisNodelet::auto_pub_
ros::Publisher auto_pub_
Definition: camera_aravis_nodelet.h:244
camera_aravis::CameraAravisNodelet::onInit
virtual void onInit() override
Definition: camera_aravis_nodelet.cpp:124
camera_aravis::CameraAravisNodelet::reconfigure_mutex_
boost::recursive_mutex reconfigure_mutex_
Definition: camera_aravis_nodelet.h:223
camera_aravis::CameraAravisNodelet::diagnostic_thread_active_
std::atomic_bool diagnostic_thread_active_
Definition: camera_aravis_nodelet.h:241
camera_aravis::CameraAravisNodelet::setFloatFeatureCallback
bool setFloatFeatureCallback(camera_aravis::set_float_feature_value::Request &request, camera_aravis::set_float_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:736
camera_aravis::CameraAravisNodelet::setAutoMaster
void setAutoMaster(bool value)
Definition: camera_aravis_nodelet.cpp:1065
ROS_ERROR
#define ROS_ERROR(...)
camera_aravis::CameraAravisNodelet::num_streams_
gint num_streams_
Definition: camera_aravis_nodelet.h:129
tf2_ros::TransformBroadcaster
camera_aravis::CameraAravisNodelet::softwareTriggerLoop
void softwareTriggerLoop()
Definition: camera_aravis_nodelet.cpp:1631
camera_aravis::CameraAravisNodelet::controlLostCallback
static void controlLostCallback(ArvDevice *p_gv_device, gpointer can_instance)
Definition: camera_aravis_nodelet.cpp:1621
camera_aravis::CameraAravisNodelet::get_float_service_
ros::ServiceServer get_float_service_
Definition: camera_aravis_nodelet.h:175
nodelet::Nodelet::getName
const std::string & getName() const
ros::NodeHandle::param
T param(const std::string &param_name, const T &default_val) const
camera_aravis::CameraAravisNodelet::software_trigger_active_
std::atomic_bool software_trigger_active_
Definition: camera_aravis_nodelet.h:258
camera_aravis::CameraAravisNodelet::ptp_set_cmd_feature_
std::string ptp_set_cmd_feature_
Definition: camera_aravis_nodelet.h:121
image_transport::SubscriberStatusCallback
boost::function< void(const SingleSubscriberPublisher &)> SubscriberStatusCallback
ros::Rate
camera_aravis::CameraAravisNodelet::getFloatFeatureCallback
bool getFloatFeatureCallback(camera_aravis::get_float_feature_value::Request &request, camera_aravis::get_float_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:729
ros::Duration::sleep
bool sleep() const
camera_aravis::CameraAravisNodelet::p_stb_
std::unique_ptr< tf2_ros::StaticTransformBroadcaster > p_stb_
Definition: camera_aravis_nodelet.h:230
camera_aravis::CameraAravisNodelet::p_buffer_pools_
std::vector< CameraBufferPool::Ptr > p_buffer_pools_
Definition: camera_aravis_nodelet.h:132
XmlRpc::XmlRpcValue::begin
iterator begin()
XmlRpc::XmlRpcValue::TypeBoolean
TypeBoolean
camera_aravis::CameraAravisNodelet::implemented_features_
std::unordered_map< std::string, const bool > implemented_features_
Definition: camera_aravis_nodelet.h:261
XmlRpc::XmlRpcValue::TypeDateTime
TypeDateTime
camera_aravis::CameraAravisNodelet::resetPtpClock
void resetPtpClock()
Definition: camera_aravis_nodelet.cpp:851
ROS_INFO
#define ROS_INFO(...)
ros::NodeHandle::createTimer
Timer createTimer(Duration period, const TimerCallback &callback, bool oneshot=false, bool autostart=true) const
camera_aravis::CameraAravisNodelet::setAutoSlave
void setAutoSlave(bool value)
Definition: camera_aravis_nodelet.cpp:1078
ros::Duration
camera_aravis::CameraAravisNodelet::height
int32_t height
Definition: camera_aravis_nodelet.h:273
camera_aravis::CONVERSIONS_DICTIONARY
const std::map< std::string, ConversionFunction > CONVERSIONS_DICTIONARY
Definition: conversion_utils.h:76
camera_aravis::CameraAravisNodelet::use_ptp_stamp_
bool use_ptp_stamp_
Definition: camera_aravis_nodelet.h:118
XmlRpc::XmlRpcValue
ros::NodeHandle
camera_aravis::CameraAravisNodelet::readAndPublishCameraDiagnostics
void readAndPublishCameraDiagnostics(double rate) const
Definition: camera_aravis_nodelet.cpp:1678
ros::Time::now
static Time now()
camera_aravis::CameraAravisNodelet::cameraAutoInfoCallback
void cameraAutoInfoCallback(const CameraAutoInfoConstPtr &msg_ptr)
Definition: camera_aravis_nodelet.cpp:887


camera_aravis
Author(s): Boitumelo Ruf, Fraunhofer IOSB , Dominik Kleiser, Fraunhofer IOSB , Dominik A. Klein, Fraunhofer FKIE , Steve Safarik, Straw Lab , Andrew Straw, Straw Lab , Floris van Breugel, van Breugel Lab
autogenerated on Wed Sep 25 2024 02:23:21