camera_aravis_nodelet.cpp
Go to the documentation of this file.
1 /****************************************************************************
2  *
3  * camera_aravis
4  *
5  * Copyright © 2022 Fraunhofer IOSB and contributors
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Library General Public
9  * License as published by the Free Software Foundation; either
10  * version 2 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Library General Public License for more details.
16  *
17  * You should have received a copy of the GNU Library General Public
18  * License along with this library; if not, write to the
19  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20  * Boston, MA 02110-1301, USA.
21  *
22  ****************************************************************************/
23 
25 
28 
29 namespace camera_aravis
30 {
31 
33 {
34 }
35 
37 {
38  for(int i=0; i < p_streams_.size(); i++) {
39  if(p_streams_[i]) {
40  arv_stream_set_emit_signals(p_streams_[i], FALSE);
41  }
42  }
43 
44  spawning_ = false;
45  if (spawn_stream_thread_.joinable())
46  {
47  spawn_stream_thread_.join();
48  }
49 
51  if (software_trigger_thread_.joinable())
52  {
54  }
55 
56  tf_thread_active_ = false;
57  if (tf_dyn_thread_.joinable())
58  {
59  tf_dyn_thread_.join();
60  }
61 
62 
63  for(int i=0; i < p_streams_.size(); i++) {
64  guint64 n_completed_buffers = 0;
65  guint64 n_failures = 0;
66  guint64 n_underruns = 0;
67  arv_stream_get_statistics(p_streams_[i], &n_completed_buffers, &n_failures, &n_underruns);
68  ROS_INFO("Completed buffers = %Lu", (unsigned long long ) n_completed_buffers);
69  ROS_INFO("Failures = %Lu", (unsigned long long ) n_failures);
70  ROS_INFO("Underruns = %Lu", (unsigned long long ) n_underruns);
71  if (arv_camera_is_gv_device(p_camera_))
72  {
73  guint64 n_resent;
74  guint64 n_missing;
75  arv_gv_stream_get_statistics(reinterpret_cast<ArvGvStream*>(p_streams_[i]), &n_resent, &n_missing);
76  ROS_INFO("Resent buffers = %Lu", (unsigned long long ) n_resent);
77  ROS_INFO("Missing = %Lu", (unsigned long long ) n_missing);
78  }
79  }
80 
81 
82  if (p_device_)
83  {
84  arv_device_execute_command(p_device_, "AcquisitionStop");
85  }
86 
87  for(int i = 0; i < p_streams_.size(); i++) {
88  g_object_unref(p_streams_[i]);
89  }
90  g_object_unref(p_camera_);
91 }
92 
94 {
96 
97  // Retrieve ros parameters
98  verbose_ = pnh.param<bool>("verbose", verbose_);
99  guid_ = pnh.param<std::string>("guid", guid_); // Get the camera guid as a parameter or use the first device.
100  use_ptp_stamp_ = pnh.param<bool>("use_ptp_timestamp", use_ptp_stamp_);
101  pub_ext_camera_info_ = pnh.param<bool>("ExtendedCameraInfo", pub_ext_camera_info_); // publish an extended camera info message
102  pub_tf_optical_ = pnh.param<bool>("publish_tf", pub_tf_optical_); // should we publish tf transforms to camera optical frame?
103 
104  std::string stream_channel_args;
105  if (pnh.getParam("channel_names", stream_channel_args)) {
106  parseStringArgs(stream_channel_args, stream_names_);
107  } else {
108  stream_names_ = { "" };
109  }
110 
111  std::string pixel_format_args;
112  std::vector<std::string> pixel_formats;
113  pnh.param("PixelFormat", pixel_format_args, pixel_format_args);
114  parseStringArgs(pixel_format_args, pixel_formats);
115 
116  std::string calib_url_args;
117  std::vector<std::string> calib_urls;
118  pnh.param("camera_info_url", calib_url_args, calib_url_args);
119  parseStringArgs(calib_url_args, calib_urls);
120 
121  // Print out some useful info.
122  ROS_INFO("Attached cameras:");
123  arv_update_device_list();
124  uint n_interfaces = arv_get_n_interfaces();
125  ROS_INFO("# Interfaces: %d", n_interfaces);
126 
127  uint n_devices = arv_get_n_devices();
128  ROS_INFO("# Devices: %d", n_devices);
129  for (uint i = 0; i < n_devices; i++)
130  ROS_INFO("Device%d: %s", i, arv_get_device_id(i));
131 
132  if (n_devices == 0)
133  {
134  ROS_ERROR("No cameras detected.");
135  return;
136  }
137 
138  // Open the camera, and set it up.
139  while (!p_camera_)
140  {
141  if (guid_.empty())
142  {
143  ROS_INFO("Opening: (any)");
144  p_camera_ = arv_camera_new(NULL);
145  }
146  else
147  {
148  ROS_INFO_STREAM("Opening: " << guid_);
149  p_camera_ = arv_camera_new(guid_.c_str());
150  }
151  ros::Duration(1.0).sleep();
152  }
153 
154  p_device_ = arv_camera_get_device(p_camera_);
155  ROS_INFO("Opened: %s-%s", arv_camera_get_vendor_name(p_camera_),
156  arv_device_get_string_feature_value(p_device_, "DeviceSerialNumber"));
157 
158  // Start the dynamic_reconfigure server.
159  reconfigure_server_.reset(new dynamic_reconfigure::Server<Config>(reconfigure_mutex_, pnh));
160  reconfigure_server_->getConfigDefault(config_);
161  reconfigure_server_->getConfigMin(config_min_);
162  reconfigure_server_->getConfigMax(config_max_);
163 
164  // See which features exist in this camera device
166 
167  // Check the number of streams for this camera
168  num_streams_ = arv_device_get_integer_feature_value(p_device_, "DeviceStreamChannelCount");
169  // if this return 0, try the deprecated GevStreamChannelCount in case this is an older camera
170  if (!num_streams_) {
171  num_streams_ = arv_device_get_integer_feature_value(p_device_, "GevStreamChannelCount");
172  }
173  // if this also returns 0, assume number of streams = 1
174  if (!num_streams_) {
175  ROS_WARN("Unable to detect number of supported stream channels.");
176  num_streams_ = 1;
177  }
178 
179  ROS_INFO("Number of supported stream channels %i.", (int) num_streams_);
180 
181  // check if every stream channel has been given a channel name
182  if (stream_names_.size() < num_streams_) {
183  num_streams_ = stream_names_.size();
184  }
185 
186  // initialize the sensor structs
187  for(int i = 0; i < num_streams_; i++) {
188  sensors_.push_back(new Sensor());
189  p_streams_.push_back(NULL);
191  p_camera_info_managers_.push_back(NULL);
192  p_camera_info_node_handles_.push_back(NULL);
193  camera_infos_.push_back(sensor_msgs::CameraInfoPtr());
196  }
197 
198  // Get parameter bounds.
199  arv_camera_get_exposure_time_bounds(p_camera_, &config_min_.ExposureTime, &config_max_.ExposureTime);
200  arv_camera_get_gain_bounds(p_camera_, &config_min_.Gain, &config_max_.Gain);
201  for(int i = 0; i < num_streams_; i++) {
202  arv_camera_gv_select_stream_channel(p_camera_,i);
203  arv_camera_get_sensor_size(p_camera_, &sensors_[i]->width, &sensors_[i]->height);
204  }
205  arv_camera_get_width_bounds(p_camera_, &roi_.width_min, &roi_.width_max);
206  arv_camera_get_height_bounds(p_camera_, &roi_.height_min, &roi_.height_max);
207  arv_camera_get_frame_rate_bounds(p_camera_, &config_min_.AcquisitionFrameRate, &config_max_.AcquisitionFrameRate);
208  if (implemented_features_["FocusPos"])
209  {
210  gint64 focus_min64, focus_max64;
211  arv_device_get_integer_feature_bounds(p_device_, "FocusPos", &focus_min64, &focus_max64);
212  config_min_.FocusPos = focus_min64;
213  config_max_.FocusPos = focus_max64;
214  }
215  else
216  {
217  config_min_.FocusPos = 0;
218  config_max_.FocusPos = 0;
219  }
220 
221  for(int i = 0; i < num_streams_; i++) {
222  arv_camera_gv_select_stream_channel(p_camera_,i);
223 
224  // Initial camera settings.
225  if (implemented_features_["ExposureTime"])
226  arv_camera_set_exposure_time(p_camera_, config_.ExposureTime);
227  else if (implemented_features_["ExposureTimeAbs"])
228  arv_device_set_float_feature_value(p_device_, "ExposureTimeAbs", config_.ExposureTime);
229  if (implemented_features_["Gain"])
230  arv_camera_set_gain(p_camera_, config_.Gain);
231  if (implemented_features_["AcquisitionFrameRateEnable"])
232  arv_device_set_integer_feature_value(p_device_, "AcquisitionFrameRateEnable", 1);
233  if (implemented_features_["AcquisitionFrameRate"])
234  arv_camera_set_frame_rate(p_camera_, config_.AcquisitionFrameRate);
235 
236  // init default to full sensor resolution
237  arv_camera_set_region (p_camera_, 0, 0, roi_.width_max, roi_.height_max);
238 
239  // Set up the triggering.
240  if (implemented_features_["TriggerMode"] && implemented_features_["TriggerSelector"])
241  {
242  arv_device_set_string_feature_value(p_device_, "TriggerSelector", "FrameStart");
243  arv_device_set_string_feature_value(p_device_, "TriggerMode", "Off");
244  }
245 
246  // possibly set or override from given parameter
248  }
249 
250  // get current state of camera for config_
251  arv_camera_get_region(p_camera_, &roi_.x, &roi_.y, &roi_.width, &roi_.height);
252  config_.AcquisitionMode =
253  implemented_features_["AcquisitionMode"] ? arv_device_get_string_feature_value(p_device_, "AcquisitionMode") :
254  "Continuous";
255  config_.AcquisitionFrameRate =
256  implemented_features_["AcquisitionFrameRate"] ? arv_camera_get_frame_rate(p_camera_) : 0.0;
257  config_.ExposureAuto =
258  implemented_features_["ExposureAuto"] ? arv_device_get_string_feature_value(p_device_, "ExposureAuto") : "Off";
259  config_.ExposureTime = implemented_features_["ExposureTime"] ? arv_camera_get_exposure_time(p_camera_) : 0.0;
260  config_.GainAuto =
261  implemented_features_["GainAuto"] ? arv_device_get_string_feature_value(p_device_, "GainAuto") : "Off";
262  config_.Gain = implemented_features_["Gain"] ? arv_camera_get_gain(p_camera_) : 0.0;
263  config_.TriggerMode =
264  implemented_features_["TriggerMode"] ? arv_device_get_string_feature_value(p_device_, "TriggerMode") : "Off";
265  config_.TriggerSource =
266  implemented_features_["TriggerSource"] ? arv_device_get_string_feature_value(p_device_, "TriggerSource") :
267  "Software";
268 
269  // get pixel format name and translate into corresponding ROS name
270  for(int i = 0; i < num_streams_; i++) {
271  arv_camera_gv_select_stream_channel(p_camera_,i);
272  std::string source_selector = "Source" + std::to_string(i);
273  arv_device_set_string_feature_value(p_device_, "SourceSelector", source_selector.c_str());
274  arv_device_set_string_feature_value(p_device_, "PixelFormat", pixel_formats[i].c_str());
275  sensors_[i]->pixel_format = std::string(arv_device_get_string_feature_value(p_device_, "PixelFormat"));
276  const auto sensor_iter = CONVERSIONS_DICTIONARY.find(sensors_[i]->pixel_format);
277  if (sensor_iter!=CONVERSIONS_DICTIONARY.end()) {
278  convert_formats.push_back(sensor_iter->second);
279  }
280  else {
281  ROS_WARN_STREAM("There is no known conversion from " << sensors_[i]->pixel_format << " to a usual ROS image encoding. Likely you need to implement one.");
282  }
283 
284  sensors_[i]->n_bits_pixel = ARV_PIXEL_FORMAT_BIT_PER_PIXEL(
285  arv_device_get_integer_feature_value(p_device_, "PixelFormat"));
286  config_.FocusPos =
287  implemented_features_["FocusPos"] ? arv_device_get_integer_feature_value(p_device_, "FocusPos") : 0;
288 
289  }
290 
291  // Get other (non GenIcam) parameter current values.
292  pnh.param<double>("softwaretriggerrate", config_.softwaretriggerrate, config_.softwaretriggerrate);
293  pnh.param<int>("mtu", config_.mtu, config_.mtu);
294  pnh.param<std::string>("frame_id", config_.frame_id, config_.frame_id);
295  pnh.param<bool>("auto_master", config_.AutoMaster, config_.AutoMaster);
296  pnh.param<bool>("auto_slave", config_.AutoSlave, config_.AutoSlave);
297  setAutoMaster(config_.AutoMaster);
298  setAutoSlave(config_.AutoSlave);
299 
300  if (pub_tf_optical_)
301  {
302  tf_optical_.header.frame_id = config_.frame_id;
303  tf_optical_.child_frame_id = config_.frame_id + "_optical";
304  tf_optical_.transform.translation.x = 0.0;
305  tf_optical_.transform.translation.y = 0.0;
306  tf_optical_.transform.translation.z = 0.0;
307  tf_optical_.transform.rotation.x = -0.5;
308  tf_optical_.transform.rotation.y = 0.5;
309  tf_optical_.transform.rotation.z = -0.5;
310  tf_optical_.transform.rotation.w = 0.5;
311 
312  double tf_publish_rate;
313  pnh.param<double>("tf_publish_rate", tf_publish_rate, 0);
314  if (tf_publish_rate > 0.)
315  {
316  // publish dynamic tf at given rate (recommended when running as a Nodelet, since latching has bugs-by-design)
318  tf_dyn_thread_ = std::thread(&CameraAravisNodelet::publishTfLoop, this, tf_publish_rate);
319  }
320  else
321  {
322  // publish static tf only once (latched)
324  tf_optical_.header.stamp = ros::Time::now();
325  p_stb_->sendTransform(tf_optical_);
326  }
327  }
328 
329  // default calibration url is [DeviceSerialNumber/DeviceID].yaml
330  ArvGcNode *p_gc_node;
331  GError *error = NULL;
332 
333  p_gc_node = arv_device_get_feature(p_device_, "DeviceSerialNumber");
334 
335  if(calib_urls[0].empty()) {
336  if( arv_gc_feature_node_is_implemented( ARV_GC_FEATURE_NODE(p_gc_node), &error) ) {
337  GType device_serial_return_type = arv_gc_feature_node_get_value_type( ARV_GC_FEATURE_NODE(p_gc_node));
338  // If the feature DeviceSerialNumber is not string, it indicates that the camera is using an older version of the genicam SFNC.
339  // Older camera models do not have a DeviceSerialNumber as string, but as integer and often set to 0.
340  // In those cases use the outdated DeviceID (deprecated since genicam SFNC v2.0).
341  if (device_serial_return_type == G_TYPE_STRING) {
342  calib_urls[0] = arv_device_get_string_feature_value(p_device_, "DeviceSerialNumber");
343  calib_urls[0] += ".yaml";
344  } else if (device_serial_return_type == G_TYPE_INT64) {
345  calib_urls[0] = arv_device_get_string_feature_value(p_device_, "DeviceID");
346  calib_urls[0] += ".yaml";
347  }
348  }
349  }
350 
351  for(int i = 0; i < num_streams_; i++) {
352  // Start the camerainfo manager.
353  std::string camera_info_frame_id = config_.frame_id;
354  if(!stream_names_[i].empty())
355  camera_info_frame_id = config_.frame_id + '/' + stream_names_[i];
356 
357  // Use separate node handles for CameraInfoManagers when using a Multisource Camera
358  if(!stream_names_[i].empty()) {
360  p_camera_info_managers_[i].reset(new camera_info_manager::CameraInfoManager(*p_camera_info_node_handles_[i], camera_info_frame_id, calib_urls[i]));
361  } else {
362  p_camera_info_managers_[i].reset(new camera_info_manager::CameraInfoManager(pnh, camera_info_frame_id, calib_urls[i]));
363  }
364 
365 
366  ROS_INFO("Reset %s Camera Info Manager", stream_names_[i].c_str());
367  ROS_INFO("%s Calib URL: %s", stream_names_[i].c_str(), calib_urls[i].c_str());
368 
369  // publish an ExtendedCameraInfo message
371  }
372 
373  // update the reconfigure config
374  reconfigure_server_->setConfigMin(config_min_);
375  reconfigure_server_->setConfigMax(config_max_);
376  reconfigure_server_->updateConfig(config_);
377  ros::Duration(2.0).sleep();
378  reconfigure_server_->setCallback(boost::bind(&CameraAravisNodelet::rosReconfigureCallback, this, _1, _2));
379 
380  // Print information.
381  ROS_INFO(" Using Camera Configuration:");
382  ROS_INFO(" ---------------------------");
383  ROS_INFO(" Vendor name = %s", arv_device_get_string_feature_value(p_device_, "DeviceVendorName"));
384  ROS_INFO(" Model name = %s", arv_device_get_string_feature_value(p_device_, "DeviceModelName"));
385  ROS_INFO(" Device id = %s", arv_device_get_string_feature_value(p_device_, "DeviceUserID"));
386  ROS_INFO(" Serial number = %s", arv_device_get_string_feature_value(p_device_, "DeviceSerialNumber"));
387  ROS_INFO(
388  " Type = %s",
389  arv_camera_is_uv_device(p_camera_) ? "USB3Vision" :
390  (arv_camera_is_gv_device(p_camera_) ? "GigEVision" : "Other"));
391  ROS_INFO(" Sensor width = %d", sensors_[0]->width);
392  ROS_INFO(" Sensor height = %d", sensors_[0]->height);
393  ROS_INFO(" ROI x,y,w,h = %d, %d, %d, %d", roi_.x, roi_.y, roi_.width, roi_.height);
394  ROS_INFO(" Pixel format = %s", sensors_[0]->pixel_format.c_str());
395  ROS_INFO(" BitsPerPixel = %lu", sensors_[0]->n_bits_pixel);
396  ROS_INFO(
397  " Acquisition Mode = %s",
398  implemented_features_["AcquisitionMode"] ? arv_device_get_string_feature_value(p_device_, "AcquisitionMode") :
399  "(not implemented in camera)");
400  ROS_INFO(
401  " Trigger Mode = %s",
402  implemented_features_["TriggerMode"] ? arv_device_get_string_feature_value(p_device_, "TriggerMode") :
403  "(not implemented in camera)");
404  ROS_INFO(
405  " Trigger Source = %s",
406  implemented_features_["TriggerSource"] ? arv_device_get_string_feature_value(p_device_, "TriggerSource") :
407  "(not implemented in camera)");
408  ROS_INFO(" Can set FrameRate: %s", implemented_features_["AcquisitionFrameRate"] ? "True" : "False");
409  if (implemented_features_["AcquisitionFrameRate"])
410  {
411  ROS_INFO(" AcquisitionFrameRate = %g hz", config_.AcquisitionFrameRate);
412  }
413 
414  ROS_INFO(" Can set Exposure: %s", implemented_features_["ExposureTime"] ? "True" : "False");
415  if (implemented_features_["ExposureTime"])
416  {
417  ROS_INFO(" Can set ExposureAuto: %s", implemented_features_["ExposureAuto"] ? "True" : "False");
418  ROS_INFO(" Exposure = %g us in range [%g,%g]", config_.ExposureTime, config_min_.ExposureTime,
419  config_max_.ExposureTime);
420  }
421 
422  ROS_INFO(" Can set Gain: %s", implemented_features_["Gain"] ? "True" : "False");
423  if (implemented_features_["Gain"])
424  {
425  ROS_INFO(" Can set GainAuto: %s", implemented_features_["GainAuto"] ? "True" : "False");
426  ROS_INFO(" Gain = %f %% in range [%f,%f]", config_.Gain, config_min_.Gain, config_max_.Gain);
427  }
428 
429  ROS_INFO(" Can set FocusPos: %s", implemented_features_["FocusPos"] ? "True" : "False");
430 
431  if (implemented_features_["GevSCPSPacketSize"])
432  ROS_INFO(" Network mtu = %lu", arv_device_get_integer_feature_value(p_device_, "GevSCPSPacketSize"));
433 
434  ROS_INFO(" ---------------------------");
435 
436  // Reset PTP clock
437  if (use_ptp_stamp_)
438  resetPtpClock();
439 
440  // spawn camera stream in thread, so onInit() is not blocked
441  spawning_ = true;
443 }
444 
446 {
449 
450  for(int i = 0; i < num_streams_; i++) {
451  while (spawning_) {
452  arv_camera_gv_select_stream_channel(p_camera_, i);
453  p_streams_[i] = arv_camera_create_stream(p_camera_, NULL, NULL);
454 
455  if (p_streams_[i])
456  {
457  // Load up some buffers.
458  arv_camera_gv_select_stream_channel(p_camera_, i);
459  const gint n_bytes_payload_stream_ = arv_camera_get_payload(p_camera_);
460 
461  p_buffer_pools_[i].reset(new CameraBufferPool(p_streams_[i], n_bytes_payload_stream_, 10));
462 
463  if (arv_camera_is_gv_device(p_camera_))
464  {
465  tuneGvStream(reinterpret_cast<ArvGvStream*>(p_streams_[i]));
466  }
467  break;
468  }
469  else
470  {
471  ROS_WARN("Stream %i: Could not create image stream for %s. Retrying...", i, guid_.c_str());
472  ros::Duration(1.0).sleep();
473  ros::spinOnce();
474  }
475  }
476  }
477 
478  // Monitor whether anyone is subscribed to the camera stream
479  std::vector<image_transport::SubscriberStatusCallback> image_cbs_;
480  std::vector<ros::SubscriberStatusCallback> info_cbs_;
481 
483  { this->rosConnectCallback();};
485  { this->rosConnectCallback();};
486 
487  for(int i = 0; i < num_streams_; i++) {
488  image_transport::ImageTransport *p_transport;
489  // Set up image_raw
490  std::string topic_name = this->getName();
491  p_transport = new image_transport::ImageTransport(pnh);
492  if(num_streams_ != 1 || !stream_names_[i].empty()) {
493  topic_name += "/" + stream_names_[i];
494  }
495 
496  cam_pubs_[i] = p_transport->advertiseCamera(
497  ros::names::remap(topic_name + "/image_raw"),
498  1, image_cb, image_cb, info_cb, info_cb);
499  }
500 
501  // Connect signals with callbacks.
502  for(int i = 0; i < num_streams_; i++) {
503  StreamIdData* data = new StreamIdData();
504  data->can = this;
505  data->stream_id = i;
506  g_signal_connect(p_streams_[i], "new-buffer", (GCallback)CameraAravisNodelet::newBufferReadyCallback, data);
507  }
508  g_signal_connect(p_device_, "control-lost", (GCallback)CameraAravisNodelet::controlLostCallback, this);
509 
510  for(int i = 0; i < num_streams_; i++) {
511  arv_stream_set_emit_signals(p_streams_[i], TRUE);
512  }
513 
514  if (std::any_of(cam_pubs_.begin(), cam_pubs_.end(),
515  [](image_transport::CameraPublisher pub){ return pub.getNumSubscribers() > 0; })
516  ) {
517  arv_camera_start_acquisition(p_camera_);
518  }
519 
520  this->get_integer_service_ = pnh.advertiseService("get_integer_feature_value", &CameraAravisNodelet::getIntegerFeatureCallback, this);
521  this->get_float_service_ = pnh.advertiseService("get_float_feature_value", &CameraAravisNodelet::getFloatFeatureCallback, this);
522  this->get_string_service_ = pnh.advertiseService("get_string_feature_value", &CameraAravisNodelet::getStringFeatureCallback, this);
523  this->get_boolean_service_ = pnh.advertiseService("get_boolean_feature_value", &CameraAravisNodelet::getBooleanFeatureCallback, this);
524 
525  this->set_integer_service_ = pnh.advertiseService("set_integer_feature_value", &CameraAravisNodelet::setIntegerFeatureCallback, this);
526  this->set_float_service_ = pnh.advertiseService("set_float_feature_value", &CameraAravisNodelet::setFloatFeatureCallback, this);
527  this->set_string_service_ = pnh.advertiseService("set_string_feature_value", &CameraAravisNodelet::setStringFeatureCallback, this);
528  this->set_boolean_service_ = pnh.advertiseService("set_boolean_feature_value", &CameraAravisNodelet::setBooleanFeatureCallback, this);
529 
530  ROS_INFO("Done initializing camera_aravis.");
531 }
532 
533 bool CameraAravisNodelet::getIntegerFeatureCallback(camera_aravis::get_integer_feature_value::Request& request, camera_aravis::get_integer_feature_value::Response& response)
534 {
535  const char* feature_name = request.feature.c_str();
536  response.response = arv_device_get_integer_feature_value(this->p_device_, feature_name);
537  return true;
538 }
539 
540 bool CameraAravisNodelet::setIntegerFeatureCallback(camera_aravis::set_integer_feature_value::Request& request, camera_aravis::set_integer_feature_value::Response& response)
541 {
542  const char* feature_name = request.feature.c_str();
543  guint64 value = request.value;
544  arv_device_set_integer_feature_value(this->p_device_, feature_name, value);
545  if(arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS) {
546  response.ok = true;
547  } else {
548  response.ok = false;
549  }
550  return true;
551 }
552 
553 bool CameraAravisNodelet::getFloatFeatureCallback(camera_aravis::get_float_feature_value::Request& request, camera_aravis::get_float_feature_value::Response& response)
554 {
555  const char* feature_name = request.feature.c_str();
556  response.response = arv_device_get_float_feature_value(this->p_device_, feature_name);
557  return true;
558 }
559 
560 bool CameraAravisNodelet::setFloatFeatureCallback(camera_aravis::set_float_feature_value::Request& request, camera_aravis::set_float_feature_value::Response& response)
561 {
562  const char* feature_name = request.feature.c_str();
563  const double value = request.value;
564  arv_device_set_float_feature_value(this->p_device_, feature_name, value);
565  if(arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS) {
566  response.ok = true;
567  } else {
568  response.ok = false;
569  }
570  return true;
571 }
572 
573 bool CameraAravisNodelet::getStringFeatureCallback(camera_aravis::get_string_feature_value::Request& request, camera_aravis::get_string_feature_value::Response& response)
574 {
575  const char* feature_name = request.feature.c_str();
576  response.response = arv_device_get_string_feature_value(this->p_device_, feature_name);
577  return true;
578 }
579 
580 bool CameraAravisNodelet::setStringFeatureCallback(camera_aravis::set_string_feature_value::Request& request, camera_aravis::set_string_feature_value::Response& response)
581 {
582  const char* feature_name = request.feature.c_str();
583  const char* value = request.value.c_str();
584  arv_device_set_string_feature_value(this->p_device_, feature_name, value);
585  if(arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS) {
586  response.ok = true;
587  } else {
588  response.ok = false;
589  }
590  return true;
591 }
592 
593 bool CameraAravisNodelet::getBooleanFeatureCallback(camera_aravis::get_boolean_feature_value::Request& request, camera_aravis::get_boolean_feature_value::Response& response)
594 {
595  const char* feature_name = request.feature.c_str();
596  response.response = arv_device_get_boolean_feature_value(this->p_device_, feature_name);
597  return true;
598 }
599 
600 bool CameraAravisNodelet::setBooleanFeatureCallback(camera_aravis::set_boolean_feature_value::Request& request, camera_aravis::set_boolean_feature_value::Response& response)
601 {
602  const char* feature_name = request.feature.c_str();
603  const bool value = request.value;
604  arv_device_set_boolean_feature_value(this->p_device_, feature_name, value);
605  if(arv_device_get_status(this->p_device_) == ARV_DEVICE_STATUS_SUCCESS) {
606  response.ok = true;
607  } else {
608  response.ok = false;
609  }
610  return true;
611 }
612 
614 {
615  // a PTP slave can take the following states: Slave, Listening, Uncalibrated, Faulty, Disabled
616  std::string ptp_status(arv_device_get_string_feature_value(p_device_, "GevIEEE1588Status"));
617  if (ptp_status == std::string("Faulty") || ptp_status == std::string("Disabled"))
618  {
619  ROS_INFO("camera_aravis: Reset ptp clock (was set to %s)", ptp_status.c_str());
620  arv_device_set_boolean_feature_value(p_device_, "GevIEEE1588", false);
621  arv_device_set_boolean_feature_value(p_device_, "GevIEEE1588", true);
622  }
623 
624 }
625 
626 void CameraAravisNodelet::cameraAutoInfoCallback(const CameraAutoInfoConstPtr &msg_ptr)
627 {
628  if (config_.AutoSlave && p_device_)
629  {
630 
631  if (auto_params_.exposure_time != msg_ptr->exposure_time && implemented_features_["ExposureTime"])
632  {
633  arv_device_set_float_feature_value(p_device_, "ExposureTime", msg_ptr->exposure_time);
634  }
635 
636  if (implemented_features_["Gain"])
637  {
638  if (auto_params_.gain != msg_ptr->gain)
639  {
640  if (implemented_features_["GainSelector"])
641  {
642  arv_device_set_string_feature_value(p_device_, "GainSelector", "All");
643  }
644  arv_device_set_float_feature_value(p_device_, "Gain", msg_ptr->gain);
645  }
646 
647  if (implemented_features_["GainSelector"])
648  {
649  if (auto_params_.gain_red != msg_ptr->gain_red)
650  {
651  arv_device_set_string_feature_value(p_device_, "GainSelector", "Red");
652  arv_device_set_float_feature_value(p_device_, "Gain", msg_ptr->gain_red);
653  }
654 
655  if (auto_params_.gain_green != msg_ptr->gain_green)
656  {
657  arv_device_set_string_feature_value(p_device_, "GainSelector", "Green");
658  arv_device_set_float_feature_value(p_device_, "Gain", msg_ptr->gain_green);
659  }
660 
661  if (auto_params_.gain_blue != msg_ptr->gain_blue)
662  {
663  arv_device_set_string_feature_value(p_device_, "GainSelector", "Blue");
664  arv_device_set_float_feature_value(p_device_, "Gain", msg_ptr->gain_blue);
665  }
666  }
667  }
668 
669  if (implemented_features_["BlackLevel"])
670  {
671  if (auto_params_.black_level != msg_ptr->black_level)
672  {
673  if (implemented_features_["BlackLevelSelector"])
674  {
675  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "All");
676  }
677  arv_device_set_float_feature_value(p_device_, "BlackLevel", msg_ptr->black_level);
678  }
679 
680  if (implemented_features_["BlackLevelSelector"])
681  {
682  if (auto_params_.bl_red != msg_ptr->bl_red)
683  {
684  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Red");
685  arv_device_set_float_feature_value(p_device_, "BlackLevel", msg_ptr->bl_red);
686  }
687 
688  if (auto_params_.bl_green != msg_ptr->bl_green)
689  {
690  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Green");
691  arv_device_set_float_feature_value(p_device_, "BlackLevel", msg_ptr->bl_green);
692  }
693 
694  if (auto_params_.bl_blue != msg_ptr->bl_blue)
695  {
696  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Blue");
697  arv_device_set_float_feature_value(p_device_, "BlackLevel", msg_ptr->bl_blue);
698  }
699  }
700  }
701 
702  // White balance as TIS is providing
703  if (strcmp("The Imaging Source Europe GmbH", arv_camera_get_vendor_name(p_camera_)) == 0)
704  {
705  arv_device_set_integer_feature_value(p_device_, "WhiteBalanceRedRegister", (int)(auto_params_.wb_red * 255.));
706  arv_device_set_integer_feature_value(p_device_, "WhiteBalanceGreenRegister", (int)(auto_params_.wb_green * 255.));
707  arv_device_set_integer_feature_value(p_device_, "WhiteBalanceBlueRegister", (int)(auto_params_.wb_blue * 255.));
708  }
709  else if (implemented_features_["BalanceRatio"] && implemented_features_["BalanceRatioSelector"])
710  {
711  if (auto_params_.wb_red != msg_ptr->wb_red)
712  {
713  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
714  arv_device_set_float_feature_value(p_device_, "BalanceRatio", msg_ptr->wb_red);
715  }
716 
717  if (auto_params_.wb_green != msg_ptr->wb_green)
718  {
719  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
720  arv_device_set_float_feature_value(p_device_, "BalanceRatio", msg_ptr->wb_green);
721  }
722 
723  if (auto_params_.wb_blue != msg_ptr->wb_blue)
724  {
725  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
726  arv_device_set_float_feature_value(p_device_, "BalanceRatio", msg_ptr->wb_blue);
727  }
728  }
729 
730  auto_params_ = *msg_ptr;
731  }
732 }
733 
735 {
736  auto_params_.exposure_time = auto_params_.gain = auto_params_.gain_red = auto_params_.gain_green =
737  auto_params_.gain_blue = auto_params_.black_level = auto_params_.bl_red = auto_params_.bl_green =
738  auto_params_.bl_blue = auto_params_.wb_red = auto_params_.wb_green = auto_params_.wb_blue =
739  std::numeric_limits<double>::quiet_NaN();
740 
741  if (p_device_)
742  {
743  if (implemented_features_["ExposureTime"])
744  {
745  auto_params_.exposure_time = arv_device_get_float_feature_value(p_device_, "ExposureTime");
746  }
747 
748  if (implemented_features_["Gain"])
749  {
750  if (implemented_features_["GainSelector"])
751  {
752  arv_device_set_string_feature_value(p_device_, "GainSelector", "All");
753  }
754  auto_params_.gain = arv_device_get_float_feature_value(p_device_, "Gain");
755  if (implemented_features_["GainSelector"])
756  {
757  arv_device_set_string_feature_value(p_device_, "GainSelector", "Red");
758  auto_params_.gain_red = arv_device_get_float_feature_value(p_device_, "Gain");
759  arv_device_set_string_feature_value(p_device_, "GainSelector", "Green");
760  auto_params_.gain_green = arv_device_get_float_feature_value(p_device_, "Gain");
761  arv_device_set_string_feature_value(p_device_, "GainSelector", "Blue");
762  auto_params_.gain_blue = arv_device_get_float_feature_value(p_device_, "Gain");
763  }
764  }
765 
766  if (implemented_features_["BlackLevel"])
767  {
768  if (implemented_features_["BlackLevelSelector"])
769  {
770  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "All");
771  }
772  auto_params_.black_level = arv_device_get_float_feature_value(p_device_, "BlackLevel");
773  if (implemented_features_["BlackLevelSelector"])
774  {
775  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Red");
776  auto_params_.bl_red = arv_device_get_float_feature_value(p_device_, "BlackLevel");
777  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Green");
778  auto_params_.bl_green = arv_device_get_float_feature_value(p_device_, "BlackLevel");
779  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "Blue");
780  auto_params_.bl_blue = arv_device_get_float_feature_value(p_device_, "BlackLevel");
781  }
782  }
783 
784  // White balance as TIS is providing
785  if (strcmp("The Imaging Source Europe GmbH", arv_camera_get_vendor_name(p_camera_)) == 0)
786  {
787  auto_params_.wb_red = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceRedRegister") / 255.;
788  auto_params_.wb_green = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceGreenRegister") / 255.;
789  auto_params_.wb_blue = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceBlueRegister") / 255.;
790  }
791  // the standard way
792  else if (implemented_features_["BalanceRatio"] && implemented_features_["BalanceRatioSelector"])
793  {
794  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
795  auto_params_.wb_red = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
796  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
797  auto_params_.wb_green = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
798  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
799  auto_params_.wb_blue = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
800  }
801  }
802 }
803 
804 void CameraAravisNodelet::setAutoMaster(bool value)
805 {
806  if (value)
807  {
809  auto_pub_ = getNodeHandle().advertise<CameraAutoInfo>(ros::names::remap("camera_auto_info"), 1, true);
810  }
811  else
812  {
814  }
815 }
816 
817 void CameraAravisNodelet::setAutoSlave(bool value)
818 {
819  if (value)
820  {
821  // deactivate all auto functions
822  if (implemented_features_["ExposureAuto"])
823  {
824  arv_device_set_string_feature_value(p_device_, "ExposureAuto", "Off");
825  }
826  if (implemented_features_["GainAuto"])
827  {
828  arv_device_set_string_feature_value(p_device_, "GainAuto", "Off");
829  }
830  if (implemented_features_["GainAutoBalance"])
831  {
832  arv_device_set_string_feature_value(p_device_, "GainAutoBalance", "Off");
833  }
834  if (implemented_features_["BlackLevelAuto"])
835  {
836  arv_device_set_string_feature_value(p_device_, "BlackLevelAuto", "Off");
837  }
838  if (implemented_features_["BlackLevelAutoBalance"])
839  {
840  arv_device_set_string_feature_value(p_device_, "BlackLevelAutoBalance", "Off");
841  }
842  if (implemented_features_["BalanceWhiteAuto"])
843  {
844  arv_device_set_string_feature_value(p_device_, "BalanceWhiteAuto", "Off");
845  }
847  auto_sub_ = getNodeHandle().subscribe(ros::names::remap("camera_auto_info"), 1,
849  }
850  else
851  {
853  }
854 }
855 
856 void CameraAravisNodelet::setExtendedCameraInfo(std::string channel_name, size_t stream_id)
857 {
859  {
860  if (channel_name.empty()) {
861  extended_camera_info_pubs_[stream_id] = getNodeHandle().advertise<ExtendedCameraInfo>(ros::names::remap("extended_camera_info"), 1, true);
862  } else {
863  extended_camera_info_pubs_[stream_id] = getNodeHandle().advertise<ExtendedCameraInfo>(ros::names::remap(channel_name + "/extended_camera_info"), 1, true);
864  }
865  }
866  else
867  {
868  extended_camera_info_pubs_[stream_id].shutdown();
869  }
870 }
871 
872 // Extra stream options for GigEVision streams.
873 void CameraAravisNodelet::tuneGvStream(ArvGvStream *p_stream)
874 {
875  gboolean b_auto_buffer = FALSE;
876  gboolean b_packet_resend = TRUE;
877  unsigned int timeout_packet = 40; // milliseconds
878  unsigned int timeout_frame_retention = 200;
879 
880  if (p_stream)
881  {
882  if (!ARV_IS_GV_STREAM(p_stream))
883  {
884  ROS_WARN("Stream is not a GV_STREAM");
885  return;
886  }
887 
888  if (b_auto_buffer)
889  g_object_set(p_stream, "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, "socket-buffer-size", 0,
890  NULL);
891  if (!b_packet_resend)
892  g_object_set(p_stream, "packet-resend",
893  b_packet_resend ? ARV_GV_STREAM_PACKET_RESEND_ALWAYS : ARV_GV_STREAM_PACKET_RESEND_NEVER,
894  NULL);
895  g_object_set(p_stream, "packet-timeout", timeout_packet * 1000, "frame-retention", timeout_frame_retention * 1000,
896  NULL);
897  }
898 }
899 
900 void CameraAravisNodelet::rosReconfigureCallback(Config &config, uint32_t level)
901 {
902  reconfigure_mutex_.lock();
903  std::string tf_prefix = tf::getPrefixParam(getNodeHandle());
904  ROS_DEBUG_STREAM("tf_prefix: " << tf_prefix);
905 
906  if (config.frame_id == "")
907  config.frame_id = this->getName();
908 
909  // Limit params to legal values.
910  config.AcquisitionFrameRate = CLAMP(config.AcquisitionFrameRate, config_min_.AcquisitionFrameRate,
911  config_max_.AcquisitionFrameRate);
912  config.ExposureTime = CLAMP(config.ExposureTime, config_min_.ExposureTime, config_max_.ExposureTime);
913  config.Gain = CLAMP(config.Gain, config_min_.Gain, config_max_.Gain);
914  config.FocusPos = CLAMP(config.FocusPos, config_min_.FocusPos, config_max_.FocusPos);
915  config.frame_id = tf::resolve(tf_prefix, config.frame_id);
916 
917  if (use_ptp_stamp_)
918  resetPtpClock();
919 
920  // stop auto functions if slave
921  if (config.AutoSlave)
922  {
923  config.ExposureAuto = "Off";
924  config.GainAuto = "Off";
925  // todo: all other auto functions "Off"
926  }
927 
928  // reset values controlled by auto functions
929  if (config.ExposureAuto.compare("Off") != 0)
930  {
931  config.ExposureTime = config_.ExposureTime;
932  ROS_WARN("ExposureAuto is active. Cannot manually set ExposureTime.");
933  }
934  if (config.GainAuto.compare("Off") != 0)
935  {
936  config.Gain = config_.Gain;
937  ROS_WARN("GainAuto is active. Cannot manually set Gain.");
938  }
939 
940  // reset FrameRate when triggered
941  if (config.TriggerMode.compare("Off") != 0)
942  {
943  config.AcquisitionFrameRate = config_.AcquisitionFrameRate;
944  ROS_WARN("TriggerMode is active (Trigger Source: %s). Cannot manually set AcquisitionFrameRate.", config_.TriggerSource.c_str());
945  }
946 
947  // Find valid user changes we need to react to.
948  const bool changed_auto_master = (config_.AutoMaster != config.AutoMaster);
949  const bool changed_auto_slave = (config_.AutoSlave != config.AutoSlave);
950  const bool changed_acquisition_frame_rate = (config_.AcquisitionFrameRate != config.AcquisitionFrameRate);
951  const bool changed_exposure_auto = (config_.ExposureAuto != config.ExposureAuto);
952  const bool changed_exposure_time = (config_.ExposureTime != config.ExposureTime);
953  const bool changed_gain_auto = (config_.GainAuto != config.GainAuto);
954  const bool changed_gain = (config_.Gain != config.Gain);
955  const bool changed_acquisition_mode = (config_.AcquisitionMode != config.AcquisitionMode);
956  const bool changed_trigger_mode = (config_.TriggerMode != config.TriggerMode);
957  const bool changed_trigger_source = (config_.TriggerSource != config.TriggerSource) || changed_trigger_mode;
958  const bool changed_focus_pos = (config_.FocusPos != config.FocusPos);
959  const bool changed_mtu = (config_.mtu != config.mtu);
960 
961  if (changed_auto_master)
962  {
963  setAutoMaster(config.AutoMaster);
964  }
965 
966  if (changed_auto_slave)
967  {
968  setAutoSlave(config.AutoSlave);
969  }
970 
971  // Set params into the camera.
972  if (changed_exposure_time)
973  {
974  if (implemented_features_["ExposureTime"])
975  {
976  ROS_INFO("Set ExposureTime = %f us", config.ExposureTime);
977  arv_camera_set_exposure_time(p_camera_, config.ExposureTime);
978  }
979  else
980  ROS_INFO("Camera does not support ExposureTime.");
981  }
982 
983  if (changed_gain)
984  {
985  if (implemented_features_["Gain"])
986  {
987  ROS_INFO("Set gain = %f", config.Gain);
988  arv_camera_set_gain(p_camera_, config.Gain);
989  }
990  else
991  ROS_INFO("Camera does not support Gain or GainRaw.");
992  }
993 
994  if (changed_exposure_auto)
995  {
996  if (implemented_features_["ExposureAuto"] && implemented_features_["ExposureTime"])
997  {
998  ROS_INFO("Set ExposureAuto = %s", config.ExposureAuto.c_str());
999  arv_device_set_string_feature_value(p_device_, "ExposureAuto", config.ExposureAuto.c_str());
1000  if (config.ExposureAuto.compare("Once") == 0)
1001  {
1002  ros::Duration(2.0).sleep();
1003  config.ExposureTime = arv_camera_get_exposure_time(p_camera_);
1004  ROS_INFO("Get ExposureTime = %f us", config.ExposureTime);
1005  config.ExposureAuto = "Off";
1006  }
1007  }
1008  else
1009  ROS_INFO("Camera does not support ExposureAuto.");
1010  }
1011  if (changed_gain_auto)
1012  {
1013  if (implemented_features_["GainAuto"] && implemented_features_["Gain"])
1014  {
1015  ROS_INFO("Set GainAuto = %s", config.GainAuto.c_str());
1016  arv_device_set_string_feature_value(p_device_, "GainAuto", config.GainAuto.c_str());
1017  if (config.GainAuto.compare("Once") == 0)
1018  {
1019  ros::Duration(2.0).sleep();
1020  config.Gain = arv_camera_get_gain(p_camera_);
1021  ROS_INFO("Get Gain = %f", config.Gain);
1022  config.GainAuto = "Off";
1023  }
1024  }
1025  else
1026  ROS_INFO("Camera does not support GainAuto.");
1027  }
1028 
1029  if (changed_acquisition_frame_rate)
1030  {
1031  if (implemented_features_["AcquisitionFrameRate"])
1032  {
1033  ROS_INFO("Set frame rate = %f Hz", config.AcquisitionFrameRate);
1034  arv_camera_set_frame_rate(p_camera_, config.AcquisitionFrameRate);
1035  }
1036  else
1037  ROS_INFO("Camera does not support AcquisitionFrameRate.");
1038  }
1039 
1040  if (changed_trigger_mode)
1041  {
1042  if (implemented_features_["TriggerMode"])
1043  {
1044  ROS_INFO("Set TriggerMode = %s", config.TriggerMode.c_str());
1045  arv_device_set_string_feature_value(p_device_, "TriggerMode", config.TriggerMode.c_str());
1046  }
1047  else
1048  ROS_INFO("Camera does not support TriggerMode.");
1049  }
1050 
1051  if (changed_trigger_source)
1052  {
1053  // delete old software trigger thread if active
1054  software_trigger_active_ = false;
1055  if (software_trigger_thread_.joinable())
1056  {
1057  software_trigger_thread_.join();
1058  }
1059 
1060  if (implemented_features_["TriggerSource"])
1061  {
1062  ROS_INFO("Set TriggerSource = %s", config.TriggerSource.c_str());
1063  arv_device_set_string_feature_value(p_device_, "TriggerSource", config.TriggerSource.c_str());
1064  }
1065  else
1066  {
1067  ROS_INFO("Camera does not support TriggerSource.");
1068  }
1069 
1070  // activate on demand
1071  if (config.TriggerMode.compare("On") == 0 && config.TriggerSource.compare("Software") == 0)
1072  {
1073  if (implemented_features_["TriggerSoftware"])
1074  {
1075  config_.softwaretriggerrate = config.softwaretriggerrate;
1076  ROS_INFO("Set softwaretriggerrate = %f", 1000.0 / ceil(1000.0 / config.softwaretriggerrate));
1077 
1078  // Turn on software timer callback.
1080  }
1081  else
1082  {
1083  ROS_INFO("Camera does not support TriggerSoftware command.");
1084  }
1085  }
1086  }
1087 
1088  if (changed_focus_pos)
1089  {
1090  if (implemented_features_["FocusPos"])
1091  {
1092  ROS_INFO("Set FocusPos = %d", config.FocusPos);
1093  arv_device_set_integer_feature_value(p_device_, "FocusPos", config.FocusPos);
1094  ros::Duration(1.0).sleep();
1095  config.FocusPos = arv_device_get_integer_feature_value(p_device_, "FocusPos");
1096  ROS_INFO("Get FocusPos = %d", config.FocusPos);
1097  }
1098  else
1099  ROS_INFO("Camera does not support FocusPos.");
1100  }
1101 
1102  if (changed_mtu)
1103  {
1104  if (implemented_features_["GevSCPSPacketSize"])
1105  {
1106  ROS_INFO("Set mtu = %d", config.mtu);
1107  arv_device_set_integer_feature_value(p_device_, "GevSCPSPacketSize", config.mtu);
1108  ros::Duration(1.0).sleep();
1109  config.mtu = arv_device_get_integer_feature_value(p_device_, "GevSCPSPacketSize");
1110  ROS_INFO("Get mtu = %d", config.mtu);
1111  }
1112  else
1113  ROS_INFO("Camera does not support mtu (i.e. GevSCPSPacketSize).");
1114  }
1115 
1116  if (changed_acquisition_mode)
1117  {
1118  if (implemented_features_["AcquisitionMode"])
1119  {
1120  ROS_INFO("Set AcquisitionMode = %s", config.AcquisitionMode.c_str());
1121  arv_device_set_string_feature_value(p_device_, "AcquisitionMode", config.AcquisitionMode.c_str());
1122 
1123  ROS_INFO("AcquisitionStop");
1124  arv_device_execute_command(p_device_, "AcquisitionStop");
1125  ROS_INFO("AcquisitionStart");
1126  arv_device_execute_command(p_device_, "AcquisitionStart");
1127  }
1128  else
1129  ROS_INFO("Camera does not support AcquisitionMode.");
1130  }
1131 
1132  // adopt new config
1133  config_ = config;
1134  reconfigure_mutex_.unlock();
1135 }
1136 
1138 {
1139  if (p_device_)
1140  {
1141  if (std::all_of(cam_pubs_.begin(), cam_pubs_.end(),
1142  [](image_transport::CameraPublisher pub){ return pub.getNumSubscribers() == 0; })
1143  ){
1144  arv_device_execute_command(p_device_, "AcquisitionStop"); // don't waste CPU if nobody is listening!
1145  }
1146  else
1147  {
1148  arv_device_execute_command(p_device_, "AcquisitionStart");
1149  }
1150  }
1151 }
1152 
1153 void CameraAravisNodelet::newBufferReadyCallback(ArvStream *p_stream, gpointer can_instance)
1154 {
1155 
1156  // workaround to get access to the instance from a static method
1157  StreamIdData *data = (StreamIdData*) can_instance;
1159  size_t stream_id = data->stream_id;
1160  image_transport::CameraPublisher *p_cam_pub = &p_can->cam_pubs_[stream_id];
1161 
1162  if( p_can->stream_names_[stream_id].empty() )
1163  {
1164  newBufferReady(p_stream, p_can, p_can->config_.frame_id, stream_id);
1165  } else {
1166  const std::string stream_frame_id = p_can->config_.frame_id + "/" + p_can->stream_names_[stream_id];
1167  newBufferReady(p_stream, p_can, stream_frame_id, stream_id);
1168  }
1169 
1170 }
1171 
1172 void CameraAravisNodelet::newBufferReady(ArvStream *p_stream, CameraAravisNodelet *p_can, std::string frame_id, size_t stream_id)
1173 {
1174  ArvBuffer *p_buffer = arv_stream_try_pop_buffer(p_stream);
1175 
1176  // check if we risk to drop the next image because of not enough buffers left
1177  gint n_available_buffers;
1178  arv_stream_get_n_buffers(p_stream, &n_available_buffers, NULL);
1179  if (n_available_buffers == 0)
1180  {
1181  p_can->p_buffer_pools_[stream_id]->allocateBuffers(1);
1182  }
1183 
1184  if (p_buffer != NULL)
1185  {
1186  if (arv_buffer_get_status(p_buffer) == ARV_BUFFER_STATUS_SUCCESS && p_can->p_buffer_pools_[stream_id]
1187  && p_can->cam_pubs_[stream_id].getNumSubscribers())
1188  {
1189  (p_can->n_buffers_)++;
1190  // get the image message which wraps around this buffer
1191  sensor_msgs::ImagePtr msg_ptr = (*(p_can->p_buffer_pools_[stream_id]))[p_buffer];
1192  // fill the meta information of image message
1193  // get acquisition time
1194  guint64 t;
1195  if (p_can->use_ptp_stamp_)
1196  t = arv_buffer_get_timestamp(p_buffer);
1197  else
1198  t = arv_buffer_get_system_timestamp(p_buffer);
1199  msg_ptr->header.stamp.fromNSec(t);
1200  // get frame sequence number
1201  msg_ptr->header.seq = arv_buffer_get_frame_id(p_buffer);
1202  // fill other stream properties
1203  msg_ptr->header.frame_id = frame_id;
1204  msg_ptr->width = p_can->roi_.width;
1205  msg_ptr->height = p_can->roi_.height;
1206  msg_ptr->encoding = p_can->sensors_[stream_id]->pixel_format;
1207  msg_ptr->step = (msg_ptr->width * p_can->sensors_[stream_id]->n_bits_pixel)/8;
1208 
1209  // do the magic of conversion into a ROS format
1210  if (p_can->convert_formats[stream_id]) {
1211  sensor_msgs::ImagePtr cvt_msg_ptr = p_can->p_buffer_pools_[stream_id]->getRecyclableImg();
1212  p_can->convert_formats[stream_id](msg_ptr, cvt_msg_ptr);
1213  msg_ptr = cvt_msg_ptr;
1214  }
1215 
1216  // get current CameraInfo data
1217  if (!p_can->camera_infos_[stream_id]) {
1218  p_can->camera_infos_[stream_id].reset(new sensor_msgs::CameraInfo);
1219  }
1220  (*p_can->camera_infos_[stream_id]) = p_can->p_camera_info_managers_[stream_id]->getCameraInfo();
1221  p_can->camera_infos_[stream_id]->header = msg_ptr->header;
1222  if (p_can->camera_infos_[stream_id]->width == 0 || p_can->camera_infos_[stream_id]->height == 0) {
1224  "The fields image_width and image_height seem not to be set in "
1225  "the YAML specified by 'camera_info_url' parameter. Please set "
1226  "them there, because actual image size and specified image size "
1227  "can be different due to the region of interest (ROI) feature. In "
1228  "the YAML the image size should be the one on which the camera was "
1229  "calibrated. See CameraInfo.msg specification!");
1230  p_can->camera_infos_[stream_id]->width = p_can->roi_.width;
1231  p_can->camera_infos_[stream_id]->height = p_can->roi_.height;
1232  }
1233 
1234 
1235  p_can->cam_pubs_[stream_id].publish(msg_ptr, p_can->camera_infos_[stream_id]);
1236 
1237  if (p_can->pub_ext_camera_info_) {
1238  ExtendedCameraInfo extended_camera_info_msg;
1239  p_can->extended_camera_info_mutex_.lock();
1240  arv_camera_gv_select_stream_channel(p_can->p_camera_, stream_id);
1241  extended_camera_info_msg.camera_info = *(p_can->camera_infos_[stream_id]);
1242  p_can->fillExtendedCameraInfoMessage(extended_camera_info_msg);
1243  p_can->extended_camera_info_mutex_.unlock();
1244  p_can->extended_camera_info_pubs_[stream_id].publish(extended_camera_info_msg);
1245  }
1246 
1247  // check PTP status, camera cannot recover from "Faulty" by itself
1248  if (p_can->use_ptp_stamp_)
1249  p_can->resetPtpClock();
1250  }
1251  else
1252  {
1253  if (arv_buffer_get_status(p_buffer) != ARV_BUFFER_STATUS_SUCCESS) {
1254  ROS_WARN("(%s) Frame error: %s", frame_id.c_str(), szBufferStatusFromInt[arv_buffer_get_status(p_buffer)]);
1255  }
1256  arv_stream_push_buffer(p_stream, p_buffer);
1257  }
1258  }
1259 
1260  // publish current lighting settings if this camera is configured as master
1261  if (p_can->config_.AutoMaster)
1262  {
1263  p_can->syncAutoParameters();
1264  p_can->auto_pub_.publish(p_can->auto_params_);
1265  }
1266 }
1267 
1268 void CameraAravisNodelet::fillExtendedCameraInfoMessage(ExtendedCameraInfo &msg)
1269 {
1270  const char *vendor_name = arv_camera_get_vendor_name(p_camera_);
1271 
1272  if (strcmp("Basler", vendor_name) == 0) {
1273  msg.exposure_time = arv_device_get_float_feature_value(p_device_, "ExposureTimeAbs");
1274  }
1275  else if (implemented_features_["ExposureTime"])
1276  {
1277  msg.exposure_time = arv_device_get_float_feature_value(p_device_, "ExposureTime");
1278  }
1279 
1280  if (strcmp("Basler", vendor_name) == 0) {
1281  msg.gain = static_cast<float>(arv_device_get_integer_feature_value(p_device_, "GainRaw"));
1282  }
1283  else if (implemented_features_["Gain"])
1284  {
1285  msg.gain = arv_device_get_float_feature_value(p_device_, "Gain");
1286  }
1287  if (strcmp("Basler", vendor_name) == 0) {
1288  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "All");
1289  msg.black_level = static_cast<float>(arv_device_get_integer_feature_value(p_device_, "BlackLevelRaw"));
1290  } else if (strcmp("JAI Corporation", vendor_name) == 0) {
1291  // Reading the black level register for both streams of the JAI FS 3500D takes too long.
1292  // The frame rate the drops below 10 fps.
1293  msg.black_level = 0;
1294  } else {
1295  arv_device_set_string_feature_value(p_device_, "BlackLevelSelector", "All");
1296  msg.black_level = arv_device_get_float_feature_value(p_device_, "BlackLevel");
1297  }
1298 
1299  // White balance as TIS is providing
1300  if (strcmp("The Imaging Source Europe GmbH", vendor_name) == 0)
1301  {
1302  msg.white_balance_red = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceRedRegister") / 255.;
1303  msg.white_balance_green = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceGreenRegister") / 255.;
1304  msg.white_balance_blue = arv_device_get_integer_feature_value(p_device_, "WhiteBalanceBlueRegister") / 255.;
1305  }
1306  // the JAI cameras become too slow when reading out the DigitalRed and DigitalBlue values
1307  // the white balance is adjusted by adjusting the Gain values for Red and Blue pixels
1308  else if (strcmp("JAI Corporation", vendor_name) == 0)
1309  {
1310  msg.white_balance_red = 1.0;
1311  msg.white_balance_green = 1.0;
1312  msg.white_balance_blue = 1.0;
1313  }
1314  // the Basler cameras use the 'BalanceRatioAbs' keyword instead
1315  else if (strcmp("Basler", vendor_name) == 0)
1316  {
1317  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
1318  msg.white_balance_red = arv_device_get_float_feature_value(p_device_, "BalanceRatioAbs");
1319  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
1320  msg.white_balance_green = arv_device_get_float_feature_value(p_device_, "BalanceRatioAbs");
1321  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
1322  msg.white_balance_blue = arv_device_get_float_feature_value(p_device_, "BalanceRatioAbs");
1323  }
1324  // the standard way
1325  else if (implemented_features_["BalanceRatio"] && implemented_features_["BalanceRatioSelector"])
1326  {
1327  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Red");
1328  msg.white_balance_red = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
1329  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Green");
1330  msg.white_balance_green = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
1331  arv_device_set_string_feature_value(p_device_, "BalanceRatioSelector", "Blue");
1332  msg.white_balance_blue = arv_device_get_float_feature_value(p_device_, "BalanceRatio");
1333  }
1334 
1335  if (strcmp("Basler", vendor_name) == 0) {
1336  msg.temperature = static_cast<float>(arv_device_get_float_feature_value(p_device_, "TemperatureAbs"));
1337  }
1338  else if (implemented_features_["DeviceTemperature"])
1339  {
1340  msg.temperature = arv_device_get_float_feature_value(p_device_, "DeviceTemperature");
1341  }
1342 
1343 }
1344 
1345 void CameraAravisNodelet::controlLostCallback(ArvDevice *p_gv_device, gpointer can_instance)
1346 {
1347  CameraAravisNodelet *p_can = (CameraAravisNodelet*)can_instance;
1348  ROS_ERROR("Control to aravis device lost.");
1349  nodelet::NodeletUnload unload_service;
1350  unload_service.request.name = p_can->getName();
1351  if (false == ros::service::call(ros::this_node::getName() + "/unload_nodelet", unload_service))
1352  {
1353  ros::shutdown();
1354  }
1355 }
1356 
1358 {
1359  software_trigger_active_ = true;
1360  ROS_INFO("Software trigger started.");
1361  std::chrono::system_clock::time_point next_time = std::chrono::system_clock::now();
1362  while (ros::ok() && software_trigger_active_)
1363  {
1364  next_time += std::chrono::milliseconds(size_t(std::round(1000.0 / config_.softwaretriggerrate)));
1365  if (std::any_of(cam_pubs_.begin(), cam_pubs_.end(),
1366  [](image_transport::CameraPublisher pub){ return pub.getNumSubscribers() > 0; })
1367  )
1368  {
1369  arv_device_execute_command(p_device_, "TriggerSoftware");
1370  }
1371  if (next_time > std::chrono::system_clock::now())
1372  {
1373  std::this_thread::sleep_until(next_time);
1374  }
1375  else
1376  {
1377  ROS_WARN("Camera Aravis: Missed a software trigger event.");
1378  next_time = std::chrono::system_clock::now();
1379  }
1380  }
1381  ROS_INFO("Software trigger stopped.");
1382 }
1383 
1384 void CameraAravisNodelet::publishTfLoop(double rate)
1385 {
1386  // Publish optical transform for the camera
1387  ROS_WARN("Publishing dynamic camera transforms (/tf) at %g Hz", rate);
1388 
1389  tf_thread_active_ = true;
1390 
1391  ros::Rate loop_rate(rate);
1392 
1393  while (ros::ok() && tf_thread_active_)
1394  {
1395  // Update the header for publication
1396  tf_optical_.header.stamp = ros::Time::now();
1397  ++tf_optical_.header.seq;
1398  p_tb_->sendTransform(tf_optical_);
1399 
1400  loop_rate.sleep();
1401  }
1402 }
1403 
1406  implemented_features_.clear();
1407  if (!p_device_)
1408  return;
1409 
1410  // get the root node of genicam description
1411  ArvGc *gc = arv_device_get_genicam(p_device_);
1412  if (!gc)
1413  return;
1414 
1415  std::list<ArvDomNode*> todo;
1416  todo.push_front((ArvDomNode*)arv_gc_get_node(gc, "Root"));
1417 
1418  while (!todo.empty())
1419  {
1420  // get next entry
1421  ArvDomNode *node = todo.front();
1422  todo.pop_front();
1423  const std::string name(arv_dom_node_get_node_name(node));
1424 
1425  // Do the indirection
1426  if (name[0] == 'p')
1427  {
1428  if (name.compare("pInvalidator") == 0)
1429  {
1430  continue;
1431  }
1432  ArvDomNode *inode = (ArvDomNode*)arv_gc_get_node(gc,
1433  arv_dom_node_get_node_value(arv_dom_node_get_first_child(node)));
1434  if (inode)
1435  todo.push_front(inode);
1436  continue;
1437  }
1438 
1439  // check for implemented feature
1440  if (ARV_IS_GC_FEATURE_NODE(node))
1441  {
1442  //if (!(ARV_IS_GC_CATEGORY(node) || ARV_IS_GC_ENUM_ENTRY(node) /*|| ARV_IS_GC_PORT(node)*/)) {
1443  ArvGcFeatureNode *fnode = ARV_GC_FEATURE_NODE(node);
1444  const std::string fname(arv_gc_feature_node_get_name(fnode));
1445  const bool usable = arv_gc_feature_node_is_available(fnode, NULL)
1446  && arv_gc_feature_node_is_implemented(fnode, NULL);
1447 
1448  ROS_INFO_STREAM_COND(verbose_, "Feature " << fname << " is " << (usable ? "usable" : "not usable"));
1449  implemented_features_.emplace(fname, usable);
1450  //}
1451  }
1452 
1453 // if (ARV_IS_GC_PROPERTY_NODE(node)) {
1454 // ArvGcPropertyNode* pnode = ARV_GC_PROPERTY_NODE(node);
1455 // const std::string pname(arv_gc_property_node_get_name(pnode));
1456 // ROS_INFO_STREAM("Property " << pname << " found");
1457 // }
1458 
1459  // add children in todo-list
1460  ArvDomNodeList *children = arv_dom_node_get_child_nodes(node);
1461  const uint l = arv_dom_node_list_get_length(children);
1462  for (uint i = 0; i < l; ++i)
1463  {
1464  todo.push_front(arv_dom_node_list_get_item(children, i));
1465  }
1466  }
1467 }
1468 
1469 void CameraAravisNodelet::parseStringArgs(std::string in_arg_string, std::vector<std::string> &out_args) {
1470  size_t array_start = 0;
1471  size_t array_end = in_arg_string.length();
1472  if(array_start != std::string::npos && array_end != std::string::npos) {
1473  // parse the string into an array of parameters
1474  std::stringstream ss(in_arg_string.substr(array_start, array_end - array_start));
1475  while (ss.good()) {
1476  std::string temp;
1477  getline( ss, temp, ',');
1478  boost::trim_left(temp);
1479  boost::trim_right(temp);
1480  out_args.push_back(temp);
1481  }
1482  } else {
1483  // add just the one argument onto the vector
1484  out_args.push_back(in_arg_string);
1485  }
1486 }
1487 
1488 // WriteCameraFeaturesFromRosparam()
1489 // Read ROS parameters from this node's namespace, and see if each parameter has a similarly named & typed feature in the camera. Then set the
1490 // camera feature to that value. For example, if the parameter camnode/Gain is set to 123.0, then we'll write 123.0 to the Gain feature
1491 // in the camera.
1492 //
1493 // Note that the datatype of the parameter *must* match the datatype of the camera feature, and this can be determined by
1494 // looking at the camera's XML file. Camera enum's are string parameters, camera bools are false/true parameters (not 0/1),
1495 // integers are integers, doubles are doubles, etc.
1496 //
1498 {
1499  XmlRpc::XmlRpcValue xml_rpc_params;
1501  ArvGcNode *p_gc_node;
1502  GError *error = NULL;
1503 
1504  getPrivateNodeHandle().getParam(this->getName(), xml_rpc_params);
1505 
1506  if (xml_rpc_params.getType() == XmlRpc::XmlRpcValue::TypeStruct)
1507  {
1508  for (iter = xml_rpc_params.begin(); iter != xml_rpc_params.end(); iter++)
1509  {
1510  std::string key = iter->first;
1511 
1512  p_gc_node = arv_device_get_feature(p_device_, key.c_str());
1513  if (p_gc_node && arv_gc_feature_node_is_implemented(ARV_GC_FEATURE_NODE(p_gc_node), &error))
1514  {
1515  // unsigned long typeValue = arv_gc_feature_node_get_value_type((ArvGcFeatureNode *)pGcNode);
1516  // ROS_INFO("%s cameratype=%lu, rosparamtype=%d", key.c_str(), typeValue, static_cast<int>(iter->second.getType()));
1517 
1518  // We'd like to check the value types too, but typeValue is often given as G_TYPE_INVALID, so ignore it.
1519  switch (iter->second.getType())
1520  {
1521  case XmlRpc::XmlRpcValue::TypeBoolean: //if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeBoolean))// && (typeValue==G_TYPE_INT64))
1522  {
1523  int value = (bool)iter->second;
1524  arv_device_set_integer_feature_value(p_device_, key.c_str(), value);
1525  ROS_INFO("Read parameter (bool) %s: %d", key.c_str(), value);
1526  }
1527  break;
1528 
1529  case XmlRpc::XmlRpcValue::TypeInt: //if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeInt))// && (typeValue==G_TYPE_INT64))
1530  {
1531  int value = (int)iter->second;
1532  arv_device_set_integer_feature_value(p_device_, key.c_str(), value);
1533  ROS_INFO("Read parameter (int) %s: %d", key.c_str(), value);
1534  }
1535  break;
1536 
1537  case XmlRpc::XmlRpcValue::TypeDouble: //if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeDouble))// && (typeValue==G_TYPE_DOUBLE))
1538  {
1539  double value = (double)iter->second;
1540  arv_device_set_float_feature_value(p_device_, key.c_str(), value);
1541  ROS_INFO("Read parameter (float) %s: %f", key.c_str(), value);
1542  }
1543  break;
1544 
1545  case XmlRpc::XmlRpcValue::TypeString: //if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeString))// && (typeValue==G_TYPE_STRING))
1546  {
1547  std::string value = (std::string)iter->second;
1548  arv_device_set_string_feature_value(p_device_, key.c_str(), value.c_str());
1549  ROS_INFO("Read parameter (string) %s: %s", key.c_str(), value.c_str());
1550  }
1551  break;
1552 
1558  default:
1559  ROS_WARN("Unhandled rosparam type in writeCameraFeaturesFromRosparam()");
1560  }
1561  }
1562  }
1563  }
1564 }
1565 
1566 } // end namespace camera_aravis
camera_aravis::CameraAravisNodelet::tf_thread_active_
std::atomic_bool tf_thread_active_
Definition: camera_aravis_nodelet.h:218
response
const std::string response
camera_aravis::CameraAravisNodelet::auto_sub_
ros::Subscriber auto_sub_
Definition: camera_aravis_nodelet.h:222
camera_aravis::CameraAravisNodelet::setStringFeatureCallback
bool setStringFeatureCallback(camera_aravis::set_string_feature_value::Request &request, camera_aravis::set_string_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:601
camera_aravis::CameraAravisNodelet::tf_dyn_thread_
std::thread tf_dyn_thread_
Definition: camera_aravis_nodelet.h:217
camera_aravis::CameraAravisNodelet::rosConnectCallback
void rosConnectCallback()
Definition: camera_aravis_nodelet.cpp:1158
camera_aravis::CameraAravisNodelet::newBufferReady
static void newBufferReady(ArvStream *p_stream, CameraAravisNodelet *p_can, std::string frame_id, size_t stream_id)
Definition: camera_aravis_nodelet.cpp:1193
camera_aravis::CameraAravisNodelet::width
int32_t width
Definition: camera_aravis_nodelet.h:244
ros::service::call
bool call(const std::string &service_name, MReq &req, MRes &res)
ros::Publisher
camera_aravis::CameraAravisNodelet::fillExtendedCameraInfoMessage
void fillExtendedCameraInfoMessage(ExtendedCameraInfo &msg)
Definition: camera_aravis_nodelet.cpp:1289
nodelet::Nodelet::getNodeHandle
ros::NodeHandle & getNodeHandle() const
camera_aravis::CameraAravisNodelet::setExtendedCameraInfo
void setExtendedCameraInfo(std::string channel_name, size_t stream_id)
Definition: camera_aravis_nodelet.cpp:877
image_transport::ImageTransport
camera_aravis::CameraAravisNodelet::p_tb_
std::unique_ptr< tf2_ros::TransformBroadcaster > p_tb_
Definition: camera_aravis_nodelet.h:215
camera_aravis::CameraAravisNodelet::get_integer_service_
ros::ServiceServer get_integer_service_
Definition: camera_aravis_nodelet.h:163
XmlRpc::XmlRpcValue::TypeBase64
TypeBase64
camera_aravis::CameraAravisNodelet::tf_optical_
geometry_msgs::TransformStamped tf_optical_
Definition: camera_aravis_nodelet.h:216
camera_aravis::CameraAravisNodelet::config_min_
Config config_min_
Definition: camera_aravis_nodelet.h:228
camera_aravis::CameraAravisNodelet::set_integer_service_
ros::ServiceServer set_integer_service_
Definition: camera_aravis_nodelet.h:175
ros::NodeHandle::getParam
bool getParam(const std::string &key, bool &b) const
ROS_INFO_STREAM_COND
#define ROS_INFO_STREAM_COND(cond, args)
camera_aravis::CameraAravisNodelet::~CameraAravisNodelet
virtual ~CameraAravisNodelet()
Definition: camera_aravis_nodelet.cpp:57
camera_aravis::szBufferStatusFromInt
static const char * szBufferStatusFromInt[]
Definition: conversion_utils.h:57
camera_aravis::CameraAravisNodelet::extended_camera_info_pubs_
std::vector< ros::Publisher > extended_camera_info_pubs_
Definition: camera_aravis_nodelet.h:225
ros::spinOnce
ROSCPP_DECL void spinOnce()
XmlRpc::XmlRpcValue::TypeInt
TypeInt
XmlRpc::XmlRpcValue::TypeInvalid
TypeInvalid
camera_aravis::CameraAravisNodelet::software_trigger_thread_
std::thread software_trigger_thread_
Definition: camera_aravis_nodelet.h:234
ros::shutdown
ROSCPP_DECL void shutdown()
tf::resolve
std::string resolve(const std::string &prefix, const std::string &frame_name)
ros::NodeHandle::advertiseService
ServiceServer advertiseService(AdvertiseServiceOptions &ops)
camera_aravis::CameraAravisNodelet::convert_formats
std::vector< ConversionFunction > convert_formats
Definition: camera_aravis_nodelet.h:125
camera_aravis::CameraAravisNodelet::syncAutoParameters
void syncAutoParameters()
Definition: camera_aravis_nodelet.cpp:755
camera_aravis::Config
CameraAravisConfig Config
Definition: camera_aravis_nodelet.h:102
camera_aravis
Definition: camera_aravis_nodelet.h:78
ros::Subscriber::shutdown
void shutdown()
camera_aravis::CameraAravisNodelet::get_boolean_service_
ros::ServiceServer get_boolean_service_
Definition: camera_aravis_nodelet.h:172
camera_aravis::CameraAravisNodelet::guid_
std::string guid_
Definition: camera_aravis_nodelet.h:112
camera_aravis::CameraAravisNodelet::p_streams_
std::vector< ArvStream * > p_streams_
Definition: camera_aravis_nodelet.h:121
camera_aravis::CameraAravisNodelet::pub_tf_optical_
bool pub_tf_optical_
Definition: camera_aravis_nodelet.h:115
camera_aravis::CameraAravisNodelet::getIntegerFeatureCallback
bool getIntegerFeatureCallback(camera_aravis::get_integer_feature_value::Request &request, camera_aravis::get_integer_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:554
camera_aravis::CameraAravisNodelet::p_camera_
ArvCamera * p_camera_
Definition: camera_aravis_nodelet.h:117
ros::SubscriberStatusCallback
boost::function< void(const SingleSubscriberPublisher &)> SubscriberStatusCallback
ROS_DEBUG_STREAM
#define ROS_DEBUG_STREAM(args)
image_transport::ImageTransport::advertiseCamera
CameraPublisher advertiseCamera(const std::string &base_topic, uint32_t queue_size, bool latch=false)
nodelet::Nodelet::getPrivateNodeHandle
ros::NodeHandle & getPrivateNodeHandle() const
camera_aravis::CameraAravisNodelet::CameraAravisNodelet
CameraAravisNodelet()
Definition: camera_aravis_nodelet.cpp:53
camera_aravis::CameraAravisNodelet::spawnStream
void spawnStream()
Definition: camera_aravis_nodelet.cpp:466
camera_aravis::CameraAravisNodelet
Definition: camera_aravis_nodelet.h:104
ros::NodeHandle::advertise
Publisher advertise(AdvertiseOptions &ops)
ros::ok
ROSCPP_DECL bool ok()
ROS_WARN_STREAM_ONCE
#define ROS_WARN_STREAM_ONCE(args)
camera_aravis::CameraAravisNodelet::getStringFeatureCallback
bool getStringFeatureCallback(camera_aravis::get_string_feature_value::Request &request, camera_aravis::get_string_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:594
XmlRpc::XmlRpcValue::TypeStruct
TypeStruct
camera_aravis::CameraAravisNodelet::getBooleanFeatureCallback
bool getBooleanFeatureCallback(camera_aravis::get_boolean_feature_value::Request &request, camera_aravis::get_boolean_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:614
camera_aravis::CameraAravisNodelet::publishTfLoop
void publishTfLoop(double rate)
Definition: camera_aravis_nodelet.cpp:1405
class_list_macros.h
tf2_ros::StaticTransformBroadcaster
camera_aravis::CameraAravisNodelet::sensors_
std::vector< Sensor * > sensors_
Definition: camera_aravis_nodelet.h:260
ros::SingleSubscriberPublisher
ROS_WARN_STREAM
#define ROS_WARN_STREAM(args)
camera_aravis::CameraAravisNodelet::camera_infos_
std::vector< sensor_msgs::CameraInfoPtr > camera_infos_
Definition: camera_aravis_nodelet.h:212
camera_aravis::CameraAravisNodelet::get_string_service_
ros::ServiceServer get_string_service_
Definition: camera_aravis_nodelet.h:169
XmlRpc::XmlRpcValue::TypeDouble
TypeDouble
camera_aravis::CameraBufferPool::Ptr
boost::shared_ptr< CameraBufferPool > Ptr
Definition: camera_buffer_pool.h:91
camera_aravis::CameraAravisNodelet::config_max_
Config config_max_
Definition: camera_aravis_nodelet.h:229
PLUGINLIB_EXPORT_CLASS
#define PLUGINLIB_EXPORT_CLASS(class_type, base_class_type)
camera_aravis::CameraAravisNodelet::p_device_
ArvDevice * p_device_
Definition: camera_aravis_nodelet.h:118
ros::Publisher::shutdown
void shutdown()
camera_aravis::CameraAravisNodelet::spawn_stream_thread_
std::thread spawn_stream_thread_
Definition: camera_aravis_nodelet.h:232
camera_aravis::CameraAravisNodelet::config_
Config config_
Definition: camera_aravis_nodelet.h:227
camera_aravis::CameraAravisNodelet::p_camera_info_node_handles_
std::vector< std::unique_ptr< ros::NodeHandle > > p_camera_info_node_handles_
Definition: camera_aravis_nodelet.h:211
tf::getPrefixParam
std::string getPrefixParam(ros::NodeHandle &nh)
camera_aravis::CameraAravisNodelet::setIntegerFeatureCallback
bool setIntegerFeatureCallback(camera_aravis::set_integer_feature_value::Request &request, camera_aravis::set_integer_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:561
camera_aravis::CameraAravisNodelet::reconfigure_server_
std::unique_ptr< dynamic_reconfigure::Server< Config > > reconfigure_server_
Definition: camera_aravis_nodelet.h:206
camera_aravis::CameraAravisNodelet::tuneGvStream
void tuneGvStream(ArvGvStream *p_stream)
Definition: camera_aravis_nodelet.cpp:894
XmlRpc::XmlRpcValue::TypeString
TypeString
camera_aravis::CameraAravisNodelet::set_boolean_service_
ros::ServiceServer set_boolean_service_
Definition: camera_aravis_nodelet.h:184
camera_aravis::CameraAravisNodelet::writeCameraFeaturesFromRosparam
void writeCameraFeaturesFromRosparam()
Definition: camera_aravis_nodelet.cpp:1518
camera_aravis::CameraAravisNodelet::newBufferReadyCallback
static void newBufferReadyCallback(ArvStream *p_stream, gpointer can_instance)
Definition: camera_aravis_nodelet.cpp:1174
camera_info_manager::CameraInfoManager
camera_aravis::CameraAravisNodelet::stream_names_
std::vector< std::string > stream_names_
Definition: camera_aravis_nodelet.h:122
image_transport::CameraPublisher
ROS_WARN
#define ROS_WARN(...)
camera_aravis::CameraAravisNodelet::p_camera_info_managers_
std::vector< std::unique_ptr< camera_info_manager::CameraInfoManager > > p_camera_info_managers_
Definition: camera_aravis_nodelet.h:210
camera_aravis::CameraAravisNodelet::set_float_service_
ros::ServiceServer set_float_service_
Definition: camera_aravis_nodelet.h:178
camera_aravis::CameraAravisNodelet::discoverFeatures
void discoverFeatures()
Definition: camera_aravis_nodelet.cpp:1425
camera_aravis_nodelet.h
ros::NodeHandle::subscribe
Subscriber subscribe(const std::string &topic, uint32_t queue_size, const boost::function< void(C)> &callback, const VoidConstPtr &tracked_object=VoidConstPtr(), const TransportHints &transport_hints=TransportHints())
ROS_INFO_STREAM
#define ROS_INFO_STREAM(args)
camera_aravis::CameraAravisNodelet::set_string_service_
ros::ServiceServer set_string_service_
Definition: camera_aravis_nodelet.h:181
ros::names::remap
ROSCPP_DECL std::string remap(const std::string &name)
camera_aravis::CameraAravisNodelet::spawning_
std::atomic< bool > spawning_
Definition: camera_aravis_nodelet.h:231
camera_aravis::CameraAravisNodelet::cam_pubs_
std::vector< image_transport::CameraPublisher > cam_pubs_
Definition: camera_aravis_nodelet.h:209
XmlRpc::XmlRpcValue::iterator
ValueStruct::iterator iterator
camera_aravis::CameraAravisNodelet::roi_
struct camera_aravis::CameraAravisNodelet::@0 roi_
XmlRpc::XmlRpcValue::end
iterator end()
camera_aravis::CameraAravisNodelet::setBooleanFeatureCallback
bool setBooleanFeatureCallback(camera_aravis::set_boolean_feature_value::Request &request, camera_aravis::set_boolean_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:621
XmlRpc::XmlRpcValue::getType
const Type & getType() const
image_transport::SingleSubscriberPublisher
camera_aravis::CameraAravisNodelet::auto_params_
CameraAutoInfo auto_params_
Definition: camera_aravis_nodelet.h:220
XmlRpc::XmlRpcValue::TypeArray
TypeArray
ros::this_node::getName
const ROSCPP_DECL std::string & getName()
camera_aravis::CameraAravisNodelet::rosReconfigureCallback
void rosReconfigureCallback(Config &config, uint32_t level)
Definition: camera_aravis_nodelet.cpp:921
camera_aravis::CameraAravisNodelet::pub_ext_camera_info_
bool pub_ext_camera_info_
Definition: camera_aravis_nodelet.h:114
camera_aravis::CameraAravisNodelet::verbose_
bool verbose_
Definition: camera_aravis_nodelet.h:111
camera_aravis::CameraAravisNodelet::parseStringArgs
static void parseStringArgs(std::string in_arg_string, std::vector< std::string > &out_args)
Definition: camera_aravis_nodelet.cpp:1490
nodelet::Nodelet
camera_aravis::CameraAravisNodelet::auto_pub_
ros::Publisher auto_pub_
Definition: camera_aravis_nodelet.h:221
camera_aravis::CameraAravisNodelet::onInit
virtual void onInit() override
Definition: camera_aravis_nodelet.cpp:114
camera_aravis::CameraAravisNodelet::reconfigure_mutex_
boost::recursive_mutex reconfigure_mutex_
Definition: camera_aravis_nodelet.h:207
camera_aravis::CameraAravisNodelet::setFloatFeatureCallback
bool setFloatFeatureCallback(camera_aravis::set_float_feature_value::Request &request, camera_aravis::set_float_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:581
camera_aravis::CameraAravisNodelet::setAutoMaster
void setAutoMaster(bool value)
Definition: camera_aravis_nodelet.cpp:825
ROS_ERROR
#define ROS_ERROR(...)
camera_aravis::CameraAravisNodelet::num_streams_
gint num_streams_
Definition: camera_aravis_nodelet.h:120
tf2_ros::TransformBroadcaster
camera_aravis::CameraAravisNodelet::softwareTriggerLoop
void softwareTriggerLoop()
Definition: camera_aravis_nodelet.cpp:1378
camera_aravis::CameraAravisNodelet::controlLostCallback
static void controlLostCallback(ArvDevice *p_gv_device, gpointer can_instance)
Definition: camera_aravis_nodelet.cpp:1366
camera_aravis::CameraAravisNodelet::get_float_service_
ros::ServiceServer get_float_service_
Definition: camera_aravis_nodelet.h:166
nodelet::Nodelet::getName
const std::string & getName() const
ros::NodeHandle::param
T param(const std::string &param_name, const T &default_val) const
camera_aravis::CameraAravisNodelet::software_trigger_active_
std::atomic_bool software_trigger_active_
Definition: camera_aravis_nodelet.h:235
image_transport::SubscriberStatusCallback
boost::function< void(const SingleSubscriberPublisher &)> SubscriberStatusCallback
ros::Rate
camera_aravis::CameraAravisNodelet::getFloatFeatureCallback
bool getFloatFeatureCallback(camera_aravis::get_float_feature_value::Request &request, camera_aravis::get_float_feature_value::Response &response)
Definition: camera_aravis_nodelet.cpp:574
ros::Duration::sleep
bool sleep() const
camera_aravis::CameraAravisNodelet::p_stb_
std::unique_ptr< tf2_ros::StaticTransformBroadcaster > p_stb_
Definition: camera_aravis_nodelet.h:214
camera_aravis::CameraAravisNodelet::p_buffer_pools_
std::vector< CameraBufferPool::Ptr > p_buffer_pools_
Definition: camera_aravis_nodelet.h:123
XmlRpc::XmlRpcValue::begin
iterator begin()
XmlRpc::XmlRpcValue::TypeBoolean
TypeBoolean
camera_aravis::CameraAravisNodelet::implemented_features_
std::unordered_map< std::string, const bool > implemented_features_
Definition: camera_aravis_nodelet.h:238
XmlRpc::XmlRpcValue::TypeDateTime
TypeDateTime
camera_aravis::CameraAravisNodelet::resetPtpClock
void resetPtpClock()
Definition: camera_aravis_nodelet.cpp:634
ROS_INFO
#define ROS_INFO(...)
camera_aravis::CameraAravisNodelet::setAutoSlave
void setAutoSlave(bool value)
Definition: camera_aravis_nodelet.cpp:838
ros::Duration
camera_aravis::CameraAravisNodelet::height
int32_t height
Definition: camera_aravis_nodelet.h:247
camera_aravis::CONVERSIONS_DICTIONARY
const std::map< std::string, ConversionFunction > CONVERSIONS_DICTIONARY
Definition: conversion_utils.h:76
camera_aravis::CameraAravisNodelet::use_ptp_stamp_
bool use_ptp_stamp_
Definition: camera_aravis_nodelet.h:113
XmlRpc::XmlRpcValue
ros::NodeHandle
ros::Time::now
static Time now()
camera_aravis::CameraAravisNodelet::cameraAutoInfoCallback
void cameraAutoInfoCallback(const CameraAutoInfoConstPtr &msg_ptr)
Definition: camera_aravis_nodelet.cpp:647


camera_aravis
Author(s): Boitumelo Ruf, Fraunhofer IOSB , Dominik Kleiser, Fraunhofer IOSB , Dominik A. Klein, Fraunhofer FKIE , Steve Safarik, Straw Lab , Andrew Straw, Straw Lab , Floris van Breugel, van Breugel Lab
autogenerated on Fri Jul 28 2023 02:26:19