stereo_camera.cpp
Go to the documentation of this file.
2 
9 
10 #include <diagnostic_msgs/DiagnosticArray.h>
13 #include <pcl_ros/point_cloud.h>
14 
15 StereoCamera::StereoCamera(ros::NodeHandle nh, std::string serial, std::string fileCameraPath, bool fixed,
16  std::string cameraFrame, std::string targetFrame, std::string robotFrame,
17  std::string wristFrame, std::string linkFrame)
18  : Camera(nh, std::move(serial), std::move(fileCameraPath), fixed, std::move(cameraFrame), std::move(targetFrame),
19  std::move(linkFrame))
20  , robotFrame(std::move(robotFrame))
21  , wristFrame(std::move(wristFrame))
22 {
23  leftCameraInfo = boost::make_shared<sensor_msgs::CameraInfo>();
24  rightCameraInfo = boost::make_shared<sensor_msgs::CameraInfo>();
25  leftRectifiedCameraInfo = boost::make_shared<sensor_msgs::CameraInfo>();
26  rightRectifiedCameraInfo = boost::make_shared<sensor_msgs::CameraInfo>();
27 
29  make_unique<FitPrimitiveServer>(nh, "fit_primitive", boost::bind(&StereoCamera::onFitPrimitive, this, _1));
31  make_unique<SetParameterServer>(nh, "set_parameter", boost::bind(&StereoCamera::onSetParameter, this, _1));
32 
34  make_unique<RequestDataServer>(nh, "request_data", boost::bind(&StereoCamera::onRequestData, this, _1));
36  make_unique<LocatePatternServer>(nh, "locate_pattern", boost::bind(&StereoCamera::onLocatePattern, this, _1));
38  make_unique<ProjectPatternServer>(nh, "project_pattern", boost::bind(&StereoCamera::onProjectPattern, this, _1));
39  calibrateHandEyeServer = make_unique<CalibrateHandEyeServer>(
40  nh, "calibrate_hand_eye", boost::bind(&StereoCamera::onCalibrateHandEye, this, _1));
41  calibrateWorkspaceServer = make_unique<CalibrateWorkspaceServer>(
42  nh, "calibrate_workspace", boost::bind(&StereoCamera::onCalibrateWorkspace, this, _1));
43  telecentricProjectionServer = make_unique<TelecentricProjectionServer>(
44  nh, "project_telecentric", boost::bind(&StereoCamera::onTelecentricProjection, this, _1));
45  texturedPointCloudServer = make_unique<TexturedPointCloudServer>(
46  nh, "texture_point_cloud", boost::bind(&StereoCamera::onTexturedPointCloud, this, _1));
47 
48  image_transport::ImageTransport imageTransport(nh);
49  leftRawImagePublisher = imageTransport.advertiseCamera("raw/left/image", 1);
50  rightRawImagePublisher = imageTransport.advertiseCamera("raw/right/image", 1);
51  leftRectifiedImagePublisher = imageTransport.advertiseCamera("rectified/left/image", 1);
52  rightRectifiedImagePublisher = imageTransport.advertiseCamera("rectified/right/image", 1);
53  disparityMapPublisher = imageTransport.advertise("disparity_map", 1);
54  depthImagePublisher = imageTransport.advertiseCamera("depth/image", 1);
55  projectedImagePublisher = imageTransport.advertise("depth/projected_depth_map", 1);
56 
57  pointCloudPublisher = nh.advertise<pcl::PointCloud<pcl::PointXYZ>>("point_cloud", 1);
58  pointCloudPublisherColor = nh.advertise<pcl::PointCloud<pcl::PointXYZRGB>>("point_cloud_color", 1);
59  projectedPointCloudPublisher = nh.advertise<pcl::PointCloud<pcl::PointXYZ>>("projected_point_cloud", 1);
60 }
61 
63 {
64  Camera::open();
65 
67 
68  return true;
69 }
70 
72 {
74  requestDataServer->start();
75  fitPrimitiveServer->start();
76  calibrateHandEyeServer->start();
77  calibrateWorkspaceServer->start();
78  locatePatternServer->start();
79  projectPatternServer->start();
80  texturedPointCloudServer->start();
82 }
83 
84 void StereoCamera::onFitPrimitive(ensenso_camera_msgs::FitPrimitiveGoalConstPtr const& goal)
85 {
87 
88  NxLibCommand fitPrimitives(cmdFitPrimitive, serial);
89  NxLibItem const& primitives = fitPrimitives.parameters()[itmPrimitive];
90 
91  int primitivesCount = 0;
92  for (auto const& primitive : goal->primitives)
93  {
94  if (primitive.type == ensenso_camera_msgs::Primitive::SPHERE)
95  {
96  primitives[primitivesCount][itmRadius][itmMin] = primitive.min_radius * ensenso_conversion::conversionFactor;
97  primitives[primitivesCount][itmRadius][itmMax] = primitive.max_radius * ensenso_conversion::conversionFactor;
98  primitives[primitivesCount][itmCount] = primitive.count;
99  }
100  else if (primitive.type == ensenso_camera_msgs::Primitive::CYLINDER)
101  {
102  primitives[primitivesCount][itmRadius][itmMin] = primitive.min_radius * ensenso_conversion::conversionFactor;
103  primitives[primitivesCount][itmRadius][itmMax] = primitive.min_radius * ensenso_conversion::conversionFactor;
104  }
105  else if (primitive.type == ensenso_camera_msgs::Primitive::PLANE)
106  {
107  primitives[primitivesCount][itmCount] = primitive.count;
108  }
109  primitives[primitivesCount][itmType] = primitive.type;
110  primitives[primitivesCount][itmInlierThreshold] = primitive.inlier_threshold * ensenso_conversion::conversionFactor;
111  primitives[primitivesCount][itmInlierFraction] = primitive.inlier_fraction_in;
112 
113  primitivesCount++;
114  }
115 
116  NxLibItem const& commandParameters = fitPrimitives.parameters();
117  if (goal->normal_radius)
118  {
119  commandParameters[itmNormal][itmRadius] = goal->normal_radius;
120  }
121 
122  float const zeroThreshold = 10e-6;
123  if (!(std::abs(goal->region_of_interest.lower.x) < zeroThreshold &&
124  std::abs(goal->region_of_interest.lower.y) < zeroThreshold &&
125  std::abs(goal->region_of_interest.lower.z) < zeroThreshold))
126  {
127  commandParameters[itmBoundingBox][itmMin] << ensenso_conversion::toEnsensoPoint(goal->region_of_interest.lower);
128  }
129  if (!(std::abs(goal->region_of_interest.upper.x) < zeroThreshold &&
130  std::abs(goal->region_of_interest.upper.y) < zeroThreshold &&
131  std::abs(goal->region_of_interest.upper.z) < zeroThreshold))
132  {
133  commandParameters[itmBoundingBox][itmMax] << ensenso_conversion::toEnsensoPoint(goal->region_of_interest.upper);
134  }
135 
136  if (goal->failure_probability != 0)
137  {
138  commandParameters[itmFailureProbability] = goal->failure_probability;
139  }
140 
141  if (std::abs(goal->inlier_threshold) > zeroThreshold)
142  {
143  commandParameters[itmInlierThreshold] = goal->inlier_threshold * ensenso_conversion::conversionFactor;
144  }
145  if (std::abs(goal->inlier_fraction) > zeroThreshold)
146  {
147  commandParameters[itmInlierFraction] = goal->inlier_fraction;
148  }
149  if (std::abs(goal->scaling) > zeroThreshold)
150  {
151  commandParameters[itmScaling] = goal->scaling;
152  }
153  if (goal->ransac_iterations != 0)
154  {
155  commandParameters[itmScaling] = goal->ransac_iterations;
156  }
157 
158  fitPrimitives.execute();
159 
160  ensenso_camera_msgs::FitPrimitiveResult result;
161 
162  NxLibItem const primitiveResults = fitPrimitives.result()[itmPrimitive];
163  if (primitiveResults.isArray())
164  {
165  result.primitives.reserve(primitiveResults.count());
166  for (int primitiveCount = 0; primitiveCount < primitiveResults.count(); primitiveCount++)
167  {
168  NxLibItem const& currentPrimitive = primitiveResults[primitiveCount];
169  ensenso_camera_msgs::Primitive primitive;
170 
171  primitive.type = currentPrimitive[itmType].asString();
172  primitive.ransac_iterations = currentPrimitive[itmIterations].asInt();
173  primitive.inlier_count = currentPrimitive[itmInlierCount].asInt();
174  primitive.inlier_fraction_out = currentPrimitive[itmInlierFraction].asDouble();
175  primitive.score = currentPrimitive[itmScore].asDouble();
176  primitive.center = ensenso_conversion::toRosPoint(currentPrimitive[itmCenter]);
177 
178  if (primitive.type == ensenso_camera_msgs::Primitive::PLANE)
179  {
180  primitive.axes.push_back(ensenso_conversion::toRosPoint(currentPrimitive[itmAxis][0]));
181  primitive.axes.push_back(ensenso_conversion::toRosPoint(currentPrimitive[itmAxis][1]));
182  primitive.normal = ensenso_conversion::toRosPoint(currentPrimitive[itmNormal], false);
183  }
184  else if (primitive.type == ensenso_camera_msgs::Primitive::SPHERE)
185  {
186  primitive.radius = currentPrimitive[itmRadius].asDouble() / ensenso_conversion::conversionFactor;
187  }
188  else if (primitive.type == ensenso_camera_msgs::Primitive::CYLINDER)
189  {
190  primitive.axis = ensenso_conversion::toRosPoint(currentPrimitive[itmAxis]);
191  }
192  result.primitives.emplace_back(primitive);
193  }
194  }
195 
196  fitPrimitiveServer->setSucceeded(result);
197 
198  FINISH_NXLIB_ACTION(FitPrimitive)
199 }
200 
201 void StereoCamera::onRequestData(ensenso_camera_msgs::RequestDataGoalConstPtr const& goal)
202 {
204 
205  ensenso_camera_msgs::RequestDataResult result;
206  ensenso_camera_msgs::RequestDataFeedback feedback;
207 
208  bool publishResults = goal->publish_results;
209  if (!goal->publish_results && !goal->include_results_in_response)
210  {
211  publishResults = true;
212  }
213 
214  bool requestPointCloud = goal->request_point_cloud || goal->request_depth_image;
215  if (!goal->request_raw_images && !goal->request_rectified_images && !goal->request_point_cloud &&
216  !goal->request_normals && !goal->request_depth_image)
217  {
218  requestPointCloud = true;
219  }
220 
221  bool computePointCloud = requestPointCloud || goal->request_normals;
222  bool computeDisparityMap = goal->request_disparity_map || computePointCloud;
223 
224  loadParameterSet(goal->parameter_set, computeDisparityMap ? projectorOn : projectorOff);
225  ros::Time imageTimestamp = capture();
226 
228 
229  feedback.images_acquired = true;
230  requestDataServer->publishFeedback(feedback);
231 
232  if (goal->request_raw_images)
233  {
234  auto rawImages = imagePairsFromNxLibNode(cameraNode[itmImages][itmRaw], cameraFrame);
235 
236  leftCameraInfo->header.stamp = rawImages[0].first->header.stamp;
237  leftCameraInfo->header.frame_id = cameraFrame;
238  rightCameraInfo->header.stamp = leftCameraInfo->header.stamp;
239  rightCameraInfo->header.frame_id = cameraFrame;
240 
241  if (goal->include_results_in_response)
242  {
243  for (auto const& imagePair : rawImages)
244  {
245  result.left_raw_images.push_back(*imagePair.first);
246  result.right_raw_images.push_back(*imagePair.second);
247  }
248  result.left_camera_info = *leftCameraInfo;
249  result.right_camera_info = *rightCameraInfo;
250  }
251  if (publishResults)
252  {
253  // We only publish one of the images on the topic, even if FlexView is
254  // enabled.
255  leftRawImagePublisher.publish(rawImages[0].first, leftCameraInfo);
256  rightRawImagePublisher.publish(rawImages[0].second, rightCameraInfo);
257  }
258  }
259 
261 
262  // If we need the disparity map, we do the rectification implicitly in
263  // cmdComputeDisparityMap. This is more efficient when using CUDA.
264  if (goal->request_rectified_images && !computeDisparityMap)
265  {
266  NxLibCommand rectify(cmdRectifyImages, serial);
267  rectify.parameters()[itmCameras] = serial;
268  rectify.execute();
269  }
270  else if (computeDisparityMap)
271  {
272  NxLibCommand disparityMapCommand(cmdComputeDisparityMap, serial);
273  disparityMapCommand.parameters()[itmCameras] = serial;
274  disparityMapCommand.execute();
275  }
276 
277  if (goal->request_rectified_images)
278  {
279  auto rectifiedImages = imagePairsFromNxLibNode(cameraNode[itmImages][itmRectified], cameraFrame);
280 
281  leftRectifiedCameraInfo->header.stamp = rectifiedImages[0].first->header.stamp;
282  leftRectifiedCameraInfo->header.frame_id = cameraFrame;
283  rightRectifiedCameraInfo->header.stamp = leftRectifiedCameraInfo->header.stamp;
284  rightRectifiedCameraInfo->header.frame_id = cameraFrame;
285 
286  if (goal->include_results_in_response)
287  {
288  for (auto const& imagePair : rectifiedImages)
289  {
290  result.left_rectified_images.push_back(*imagePair.first);
291  result.right_rectified_images.push_back(*imagePair.second);
292  }
293  result.left_rectified_camera_info = *leftRectifiedCameraInfo;
294  result.right_rectified_camera_info = *rightRectifiedCameraInfo;
295  }
296  if (publishResults)
297  {
298  // We only publish one of the images on the topic, even if FlexView is
299  // enabled.
302  }
303  }
304 
305  if (goal->request_disparity_map)
306  {
307  auto disparityMap = imageFromNxLibNode(cameraNode[itmImages][itmDisparityMap], cameraFrame);
308 
309  if (goal->include_results_in_response)
310  {
311  result.disparity_map = *disparityMap;
312  }
313  if (publishResults)
314  {
315  disparityMapPublisher.publish(disparityMap);
316  }
317  }
318 
320 
321  PointCloudROI const* pointCloudROI = 0;
322  if (parameterSets.at(currentParameterSet).useROI)
323  {
324  pointCloudROI = &parameterSets.at(currentParameterSet).roi;
325  }
326 
327  if (computePointCloud)
328  {
329  updateGlobalLink(imageTimestamp, "", goal->use_cached_transformation);
330 
331  NxLibCommand computePointMap(cmdComputePointMap, serial);
332  computePointMap.parameters()[itmCameras] = serial;
333  computePointMap.execute();
334 
335  if (requestPointCloud && !goal->request_normals)
336  {
337  auto pointCloud = pointCloudFromNxLib(cameraNode[itmImages][itmPointMap], targetFrame, pointCloudROI);
338 
339  if (goal->include_results_in_response)
340  {
341  pcl::toROSMsg(*pointCloud, result.point_cloud);
342  }
343  if (publishResults)
344  {
345  pointCloudPublisher.publish(pointCloud);
346  }
347  }
348 
349  if (goal->request_depth_image)
350  {
351  if (cameraFrame == targetFrame)
352  {
353  auto depthImage = depthImageFromNxLibNode(cameraNode[itmImages][itmPointMap], targetFrame);
354 
355  if (goal->include_results_in_response)
356  {
357  result.depth_image = *depthImage;
358  }
359 
360  if (publishResults)
361  {
363  }
364  }
365  else
366  {
367  ROS_WARN_ONCE("Depth images are not yet supported for linked and multi camera usage. Only request depth images "
368  "when using one stereo camera only.");
369  }
370  }
371  }
372 
374 
375  if (goal->request_normals)
376  {
377  NxLibCommand(cmdComputeNormals, serial).execute();
378 
379  auto pointCloud = pointCloudWithNormalsFromNxLib(cameraNode[itmImages][itmPointMap],
380  cameraNode[itmImages][itmNormals], targetFrame, pointCloudROI);
381 
382  if (goal->include_results_in_response)
383  {
384  pcl::toROSMsg(*pointCloud, result.point_cloud);
385  }
386  if (publishResults)
387  {
388  pointCloudPublisher.publish(pointCloud);
389  }
390  }
391 
392  requestDataServer->setSucceeded(result);
393 
394  FINISH_NXLIB_ACTION(RequestData)
395 }
396 
397 void StereoCamera::onSetParameter(ensenso_camera_msgs::SetParameterGoalConstPtr const& goal)
398 {
400 
401  ensenso_camera_msgs::SetParameterResult result;
402 
403  loadParameterSet(goal->parameter_set);
404 
405  result.parameter_file_applied = false;
406  if (!goal->parameter_file.empty())
407  {
408  result.parameter_file_applied = loadSettings(goal->parameter_file);
409  if (!result.parameter_file_applied)
410  {
411  server->setAborted(result);
412  return;
413  }
414  }
415 
416  bool projectorChanged = false;
417  for (auto const& parameter : goal->parameters)
418  {
419  writeParameter(parameter);
420 
421  if (parameter.key == parameter.PROJECTOR || parameter.key == parameter.FRONT_LIGHT)
422  {
423  projectorChanged = true;
424  }
425  }
426 
427  // Synchronize to make sure that we read back the correct values.
428  NxLibCommand synchronize(cmdSynchronize, serial);
429  synchronize.parameters()[itmCameras] = serial;
430  synchronize.execute();
431 
432  saveParameterSet(goal->parameter_set, projectorChanged);
433 
434  // The new parameters might change the camera calibration.
436 
437  // Read back the actual values.
438  for (auto const& parameter : goal->parameters)
439  {
440  result.results.push_back(*readParameter(parameter.key));
441  }
442 
443  setParameterServer->setSucceeded(result);
444 
445  FINISH_NXLIB_ACTION(SetParameter)
446 }
447 
448 void StereoCamera::onLocatePattern(ensenso_camera_msgs::LocatePatternGoalConstPtr const& goal)
449 {
451 
452  ensenso_camera_msgs::LocatePatternResult result;
453  ensenso_camera_msgs::LocatePatternFeedback feedback;
454 
455  loadParameterSet(goal->parameter_set, projectorOff);
456 
457  int numberOfShots = goal->number_of_shots;
458  if (numberOfShots < 1)
459  {
460  numberOfShots = 1;
461  }
462 
463  std::vector<StereoCalibrationPattern> patterns;
464  ros::Time imageTimestamp;
465  for (int i = 0; i < numberOfShots; i++)
466  {
468 
469  ros::Time timestamp = capture();
470  if (i == 0)
471  imageTimestamp = timestamp;
472 
474 
475  if (i == (numberOfShots - 1))
476  {
477  // The last image has been captured.
478  feedback.images_acquired = true;
479  locatePatternServer->publishFeedback(feedback);
480  }
481 
482  bool clearBuffer = (i == 0);
483  patterns = collectPattern(clearBuffer);
484  if (patterns.empty())
485  {
486  result.found_pattern = false;
487  locatePatternServer->setSucceeded(result);
488  return;
489  }
490 
491  if (patterns.size() > 1)
492  {
493  // Cannot average multiple shots of multiple patterns. We will cancel the
494  // capturing and estimate the pose of each pattern individually.
495  break;
496  }
497  }
498 
499  result.found_pattern = true;
500  result.patterns.resize(patterns.size());
501  for (size_t i = 0; i < patterns.size(); i++)
502  {
503  patterns[i].writeToMessage(result.patterns[i]);
504  }
505 
507 
508  std::string patternFrame = targetFrame;
509  if (!goal->target_frame.empty())
510  {
511  patternFrame = goal->target_frame;
512  }
513 
514  result.frame = patternFrame;
515 
516  if (patterns.size() > 1)
517  {
518  // Estimate the pose of all the patterns we found.
519  auto patternPoses = estimatePatternPoses(imageTimestamp, patternFrame);
520 
521  result.pattern_poses.resize(patternPoses.size());
522  for (size_t i = 0; i < patternPoses.size(); i++)
523  {
524  result.pattern_poses[i] = stampedPoseFromTransform(patternPoses[i]);
525  }
526  }
527  else
528  {
529  // Estimate the pose of a single pattern, averaging over the different
530  // shots.
531  auto patternPose = estimatePatternPose(imageTimestamp, patternFrame);
532 
533  result.pattern_poses.resize(1);
534  result.pattern_poses[0] = stampedPoseFromTransform(patternPose);
535  }
536 
537  if (!goal->tf_frame.empty())
538  {
539  if (patterns.size() == 1)
540  {
541  auto transform = transformFromPose(result.pattern_poses[0], goal->tf_frame);
542  transformBroadcaster->sendTransform(transform);
543  }
544  else
545  {
546  ROS_WARN("Cannot publish the pattern pose in TF, because there are "
547  "multiple patterns!");
548  }
549  }
550 
551  // The target frame has changed.
553  locatePatternServer->setSucceeded(result);
554 
555  FINISH_NXLIB_ACTION(LocatePattern)
556 }
557 
558 void StereoCamera::onProjectPattern(ensenso_camera_msgs::ProjectPatternGoalConstPtr const& goal)
559 {
561 
562  ensenso_camera_msgs::ProjectPatternResult result;
563 
564  tf2::Transform targetFrameTransformation = fromMsg(goal->target_frame_transformation);
565  if (isValid(targetFrameTransformation))
566  {
567  updateTransformations(targetFrameTransformation);
568  }
569  else
570  {
572  }
573 
575 
576  if (!checkNxLibVersion(2, 1))
577  {
578  // In old NxLib versions, the project pattern command does not take the grid
579  // spacing as a parameter.
580  NxLibItem()[itmParameters][itmPattern][itmGridSpacing] = goal->grid_spacing * 1000;
581  }
582 
583  tf2::Transform patternPose = fromMsg(goal->pattern_pose);
584 
585  NxLibCommand projectPattern(cmdProjectPattern, serial);
586  projectPattern.parameters()[itmCameras] = serial;
587  projectPattern.parameters()[itmGridSpacing] = goal->grid_spacing * 1000;
588  projectPattern.parameters()[itmGridSize][0] = goal->grid_size_x;
589  projectPattern.parameters()[itmGridSize][1] = goal->grid_size_y;
590  writePoseToNxLib(patternPose, projectPattern.parameters()[itmPatternPose]);
591  projectPattern.execute();
592 
594 
595  int sensorWidth = cameraNode[itmSensor][itmSize][0].asInt();
596  int sensorHeight = cameraNode[itmSensor][itmSize][1].asInt();
597 
598  NxLibItem projectedPoints = projectPattern.result()[itmStereo][0][itmPoints];
599  NxLibItem leftPoints = projectedPoints[0];
600  NxLibItem rightPoints = projectedPoints[1];
601 
602  result.pattern_is_visible = true;
603  result.left_points.resize(leftPoints.count());
604  result.right_points.resize(leftPoints.count());
605  for (int i = 0; i < leftPoints.count(); i++)
606  {
607  double leftX = leftPoints[i][0].asDouble();
608  double leftY = leftPoints[i][1].asDouble();
609  double rightX = rightPoints[i][0].asDouble();
610  double rightY = rightPoints[i][1].asDouble();
611 
612  result.left_points[i].x = leftX;
613  result.left_points[i].y = leftY;
614  result.right_points[i].x = rightX;
615  result.right_points[i].y = rightY;
616 
617  if (leftX < 0 || leftX > sensorWidth || leftY < 0 || leftY > sensorHeight || rightX < 0 || rightX > sensorWidth ||
618  rightY < 0 || rightY > sensorHeight)
619  {
620  result.pattern_is_visible = false;
621  }
622  }
623 
624  projectPatternServer->setSucceeded(result);
625 
626  FINISH_NXLIB_ACTION(ProjectPattern)
627 }
628 
629 void StereoCamera::onCalibrateHandEye(ensenso_camera_msgs::CalibrateHandEyeGoalConstPtr const& goal)
630 {
631  START_NXLIB_ACTION(CalibrateHandEye, calibrateHandEyeServer)
632 
633  ensenso_camera_msgs::CalibrateHandEyeResult result;
634  result.command = goal->command;
635 
636  if (goal->command == goal->RESET)
637  {
640  }
641  else if (goal->command == goal->CAPTURE_PATTERN)
642  {
643  if (robotFrame.empty() || wristFrame.empty())
644  {
645  result.error_message = "You need to specify a robot base and wrist frame "
646  "to do a hand eye calibration!";
647  ROS_ERROR("%s", result.error_message.c_str());
648  calibrateHandEyeServer->setAborted(result);
649  return;
650  }
651 
652  loadParameterSet(goal->parameter_set, projectorOff);
653  ros::Time imageTimestamp = capture();
654 
656 
657  // Load the pattern buffer that we remembered from the previous
658  // calibration steps.
659  if (!handEyeCalibrationPatternBuffer.empty())
660  {
661  NxLibCommand setPatternBuffer(cmdSetPatternBuffer, serial);
662  setPatternBuffer.parameters()[itmPatterns] << handEyeCalibrationPatternBuffer;
663  setPatternBuffer.execute();
664  }
665  else
666  {
667  NxLibCommand(cmdDiscardPatterns, serial).execute();
668  }
669 
670  std::vector<StereoCalibrationPattern> patterns = collectPattern();
671  if (patterns.empty())
672  {
673  result.found_pattern = false;
674  calibrateHandEyeServer->setSucceeded(result);
675  return;
676  }
677  if (patterns.size() > 1)
678  {
679  result.error_message = "Detected multiple calibration patterns during a "
680  "hand eye calibration!";
681  ROS_ERROR("%s", result.error_message.c_str());
682  calibrateHandEyeServer->setAborted(result);
683  return;
684  }
685 
687 
688  result.found_pattern = true;
689  patterns[0].writeToMessage(result.pattern);
690 
691  auto patternPose = estimatePatternPose(imageTimestamp, cameraFrame, true);
692  result.pattern_pose = stampedPoseFromTransform(patternPose).pose;
693 
695 
696  geometry_msgs::TransformStamped robotPose;
697  try
698  {
700  }
701  catch (tf2::TransformException& e)
702  {
703  result.error_message = std::string("Could not look up the robot pose due to the TF error: ") + e.what();
704  ROS_ERROR("%s", result.error_message.c_str());
705  calibrateHandEyeServer->setAborted(result);
706  return;
707  }
708 
709  // Remember the newly collected data for the next step.
710  NxLibCommand getPatternBuffer(cmdGetPatternBuffer, serial);
711  getPatternBuffer.execute();
712  handEyeCalibrationPatternBuffer = getPatternBuffer.result()[itmPatterns].asJson();
713 
714  handEyeCalibrationRobotPoses.push_back(fromStampedMessage(robotPose));
715 
716  result.robot_pose = stampedPoseFromTransform(robotPose).pose;
717  }
718  else if (goal->command == goal->START_CALIBRATION)
719  {
720  if (handEyeCalibrationRobotPoses.size() < 5)
721  {
722  result.error_message = "You need collect at least 5 patterns before "
723  "starting a hand eye calibration!";
724  ROS_ERROR("%s", result.error_message.c_str());
725  calibrateHandEyeServer->setAborted(result);
726  return;
727  }
728 
729  // Load the pattern observations.
730  size_t numberOfPatterns = 0;
731  NxLibCommand setPatternBuffer(cmdSetPatternBuffer, serial);
732  if (!goal->pattern_observations.empty())
733  {
734  for (size_t i = 0; i < goal->pattern_observations.size(); i++)
735  {
736  StereoCalibrationPattern pattern(goal->pattern_observations[i]);
737 
738  NxLibItem patternNode = setPatternBuffer.parameters()[itmPatterns][i][serial];
739  pattern.writeToNxLib(patternNode[itmLeft][0]);
740  pattern.writeToNxLib(patternNode[itmRight][0], true);
741  }
742  }
743  else
744  {
745  setPatternBuffer.parameters()[itmPatterns] << handEyeCalibrationPatternBuffer;
746  }
747  numberOfPatterns = setPatternBuffer.parameters()[itmPatterns].count();
748  setPatternBuffer.execute();
749 
750  // Load the corresponding robot poses.
751  auto robotPoses = handEyeCalibrationRobotPoses;
752  if (!goal->robot_poses.empty())
753  {
754  robotPoses.clear();
755  for (auto const& pose : goal->robot_poses)
756  {
757  tf2::Transform tfPose = fromMsg(pose);
758  robotPoses.push_back(tfPose);
759  }
760  }
761 
762  if (robotPoses.size() != numberOfPatterns)
763  {
764  result.error_message = "The number of pattern observations does not "
765  "match the number of robot poses!";
766  ROS_ERROR("%s", result.error_message.c_str());
767  calibrateHandEyeServer->setAborted(result);
768  return;
769  }
770 
771  // Load the initial guesses from the action goal.
772  tf2::Transform link, patternPose;
773  link = fromMsg(goal->link);
774  patternPose = fromMsg(goal->pattern_pose);
775 
776  NxLibCommand calibrateHandEye(cmdCalibrateHandEye, serial);
777  calibrateHandEye.parameters()[itmSetup] = fixed ? valFixed : valMoving;
778  // The target node will be reset anyway before we calculate data for the next time.
779  calibrateHandEye.parameters()[itmTarget] = TARGET_FRAME_LINK + "_" + serial;
780  if (isValid(link))
781  {
782  writePoseToNxLib(link.inverse(), calibrateHandEye.parameters()[itmLink]);
783  }
784  if (isValid(patternPose))
785  {
786  writePoseToNxLib(patternPose, calibrateHandEye.parameters()[itmPatternPose]);
787  }
788  for (size_t i = 0; i < robotPoses.size(); i++)
789  {
790  writePoseToNxLib(robotPoses[i], calibrateHandEye.parameters()[itmTransformations][i]);
791  }
792 
793  calibrateHandEye.execute(false);
794 
795  auto getCalibrationResidual = [](NxLibItem const& node) { // NOLINT
796  if (node[itmResidual].exists())
797  {
798  return node[itmResidual].asDouble();
799  }
800  // Compatibility with the SDK 2.0.
801  return node[itmReprojectionError].asDouble();
802  };
803 
804  ros::Rate waitingRate(5);
805  while (!calibrateHandEye.finished())
806  {
807  ensenso_camera_msgs::CalibrateHandEyeFeedback feedback;
808  feedback.number_of_iterations = calibrateHandEye.result()[itmProgress][itmIterations].asInt();
809  feedback.residual = getCalibrationResidual(calibrateHandEye.result()[itmProgress]);
810  calibrateHandEyeServer->publishFeedback(feedback);
811 
812  if (calibrateHandEyeServer->isPreemptRequested())
813  {
814  NxLibCommand(cmdBreak, serial).execute();
815 
816  calibrateHandEyeServer->setPreempted();
817  return;
818  }
819 
820  waitingRate.sleep();
821  }
822 
823  result.calibration_time =
824  calibrateHandEye.result()[itmTime].asDouble() / 1000; // NxLib time is in milliseconds, ROS
825  // expects time to be in seconds.
826  result.number_of_iterations = calibrateHandEye.result()[itmIterations].asInt();
827  result.residual = getCalibrationResidual(calibrateHandEye.result());
828 
829  result.link = poseFromTransform(poseFromNxLib(cameraNode[itmLink]).inverse());
830  result.pattern_pose = poseFromTransform(poseFromNxLib(calibrateHandEye.result()[itmPatternPose]));
831 
832  if (goal->write_calibration_to_eeprom)
833  {
834  // Save the new calibration link to the camera's EEPROM.
835  NxLibCommand storeCalibration(cmdStoreCalibration, serial);
836  storeCalibration.parameters()[itmCameras] = serial;
837  storeCalibration.parameters()[itmLink] = true;
838  storeCalibration.execute();
839  }
840  }
841 
842  // The target frame has changed.
844  calibrateHandEyeServer->setSucceeded(result);
845 
846  FINISH_NXLIB_ACTION(CalibrateHandEye)
847 }
848 
849 void StereoCamera::onCalibrateWorkspace(const ensenso_camera_msgs::CalibrateWorkspaceGoalConstPtr& goal)
850 {
851  START_NXLIB_ACTION(CalibrateWorkspace, calibrateWorkspaceServer)
852 
853  if (!fixed)
854  {
855  ROS_WARN("You are performing a workspace calibration for a moving camera. "
856  "Are you sure that this is what you want to do?");
857  }
858 
859  ensenso_camera_msgs::CalibrateWorkspaceResult result;
860  result.successful = true;
861 
862  loadParameterSet(goal->parameter_set, projectorOff);
863 
864  int numberOfShots = goal->number_of_shots;
865  if (numberOfShots < 1)
866  {
867  numberOfShots = 1;
868  }
869 
870  ros::Time imageTimestamp;
871  for (int i = 0; i < numberOfShots; i++)
872  {
874 
875  ros::Time timestamp = capture();
876  if (i == 0)
877  imageTimestamp = timestamp;
878 
880 
881  bool clearBuffer = (i == 0);
882  std::vector<StereoCalibrationPattern> patterns = collectPattern(clearBuffer);
883  if (patterns.size() != 1)
884  {
885  result.successful = false;
886  calibrateWorkspaceServer->setSucceeded(result);
887  return;
888  }
889  }
890 
892 
893  tf2::Transform patternTransformation = fromStampedMessage(estimatePatternPose(imageTimestamp));
894 
896 
897  tf2::Transform definedPatternPose = fromMsg(goal->defined_pattern_pose);
898 
899  NxLibCommand calibrateWorkspace(cmdCalibrateWorkspace, serial);
900  calibrateWorkspace.parameters()[itmCameras] = serial;
901  writePoseToNxLib(patternTransformation, calibrateWorkspace.parameters()[itmPatternPose]);
902  writePoseToNxLib(definedPatternPose, calibrateWorkspace.parameters()[itmDefinedPose]);
903  calibrateWorkspace.parameters()[itmTarget] = TARGET_FRAME_LINK + "_" + serial;
904  calibrateWorkspace.execute();
905 
906  if (goal->write_calibration_to_eeprom)
907  {
908  // Save the new calibration link to the camera's EEPROM.
909  NxLibCommand storeCalibration(cmdStoreCalibration, serial);
910  storeCalibration.parameters()[itmCameras] = serial;
911  storeCalibration.parameters()[itmLink] = true;
912  storeCalibration.execute();
913  }
914 
916  calibrateWorkspaceServer->setSucceeded(result);
917 
918  FINISH_NXLIB_ACTION(CalibrateWorkspace)
919 }
920 
921 void StereoCamera::onTelecentricProjection(ensenso_camera_msgs::TelecentricProjectionGoalConstPtr const& goal)
922 {
923  START_NXLIB_ACTION(TelecentricProjection, telecentricProjectionServer)
924 
925  ensenso_camera_msgs::TelecentricProjectionResult result;
926 
927  bool useViewPose = isValid(goal->view_pose);
928  bool frameGiven = !goal->frame.empty();
929 
930  if (!frameGiven)
931  {
932  std::string error = "You have to define a valid frame, to which to projection will be published. Aborting";
933  ROS_ERROR("%s", error.c_str());
934  result.error.message = error;
935  telecentricProjectionServer->setAborted(result);
936  return;
937  }
938 
939  tf2::Transform transform;
940  std::string publishingFrame = useViewPose ? targetFrame : goal->frame;
941 
942  if (useViewPose)
943  {
944  transform = fromMsg(goal->view_pose);
945  }
946  else
947  {
948  transform = getLatestTransform(tfBuffer, targetFrame, goal->frame);
949  }
950 
951  // check goal parameters (message generation will default to value 0 or empty string)
952  int pixelScale = goal->pixel_scale != 0. ? goal->pixel_scale : 1;
953  double scaling = goal->scaling != 0. ? goal->scaling : 1.0;
954  int sizeWidth = goal->size_width != 0 ? goal->size_width : 1024;
955  int sizeHeight = goal->size_height != 0. ? goal->size_height : 768;
956  bool useOpenGL = goal->use_opengl == 1;
957 
958  NxLibCommand renderPointMap(cmdRenderPointMap, serial);
959  writePoseToNxLib(transform, renderPointMap.parameters()[itmViewPose]);
960  for (int i = 0; i < static_cast<int>(goal->serials.size()); i++)
961  {
962  renderPointMap.parameters()[itmCameras][i] = goal->serials[i];
963  }
964  renderPointMap.parameters()[itmPixelSize] = pixelScale;
965  renderPointMap.parameters()[itmScaling] = scaling;
966  renderPointMap.parameters()[itmSize][0] = sizeWidth;
967  renderPointMap.parameters()[itmSize][1] = sizeHeight;
968  renderPointMap.parameters()[itmUseOpenGL] = useOpenGL;
969 
970  renderPointMap.execute();
971 
972  if (!renderPointMap.result()[itmImages][itmRenderPointMap].exists())
973  {
974  std::string error{ "RenderPointMap command went wrong. Aborting Action." };
975  result.error.message = error;
976  ROS_ERROR("%s", error.c_str());
977  telecentricProjectionServer->setAborted(result);
978  return;
979  }
980 
981  if (goal->publish_results || goal->include_results_in_response)
982  {
983  if (goal->request_point_cloud || (!goal->request_point_cloud && !goal->request_depth_image))
984  {
985  auto pointCloud = pointCloudFromNxLib(renderPointMap.result()[itmImages][itmRenderPointMap], goal->frame);
986  if (goal->publish_results)
987  {
989  }
990  if (goal->include_results_in_response)
991  {
992  pcl::toROSMsg(*pointCloud, result.projected_point_cloud);
993  telecentricProjectionServer->setSucceeded(result);
994  }
995  else
996  {
997  telecentricProjectionServer->setSucceeded();
998  }
999  }
1000  if (goal->request_depth_image)
1001  {
1002  auto renderedView = depthImageFromNxLibNode(renderPointMap.result()[itmImages][itmRenderPointMap], goal->frame);
1003  if (goal->publish_results)
1004  {
1005  projectedImagePublisher.publish(renderedView);
1006  }
1007  if (goal->include_results_in_response)
1008  {
1009  result.projected_depth_map = *renderedView;
1010  telecentricProjectionServer->setSucceeded(result);
1011  }
1012  else
1013  {
1014  telecentricProjectionServer->setSucceeded();
1015  }
1016  }
1017  }
1018  else
1019  {
1020  telecentricProjectionServer->setSucceeded();
1021  }
1022 
1023  FINISH_NXLIB_ACTION(TelecentricProjection)
1024 }
1025 
1026 void StereoCamera::onTexturedPointCloud(ensenso_camera_msgs::TexturedPointCloudGoalConstPtr const& goal)
1027 {
1029 
1030  ensenso_camera_msgs::TexturedPointCloudResult result;
1031 
1032  if (goal->mono_serial.empty())
1033  {
1034  std::string error = "In Order to use this action, you have to specify one mono serial";
1035  ROS_ERROR("%s", error.c_str());
1036  result.error.message = error;
1037  texturedPointCloudServer->setAborted(result);
1038  return;
1039  }
1040 
1041  double far = goal->far_plane ? goal->far_plane : 10000.;
1042  double near = goal->near_plane ? goal->near_plane : -10000.;
1043 
1044  NxLibCommand renderPointMap(cmdRenderPointMap, serial);
1045 
1046  // If goal->serials are left empty, all open stereo cameras will be used as default by the command.
1047  for (int i = 0; i < static_cast<int>(goal->serials.size()); i++)
1048  {
1049  renderPointMap.parameters()[itmCameras][i] = goal->serials[i];
1050  }
1051  renderPointMap.parameters()[itmCamera] = goal->mono_serial;
1052  renderPointMap.parameters()[itmUseOpenGL] = goal->use_openGL;
1053  renderPointMap.parameters()[itmNear] = near;
1054  renderPointMap.parameters()[itmTexture] = true;
1055  renderPointMap.parameters()[itmFar] = far;
1056 
1057  renderPointMap.execute();
1058 
1059  if (goal->publish_results || goal->include_results_in_response)
1060  {
1061  NxLibItem const& cmdResults = renderPointMap.result();
1062  auto cloudColored = pointCloudTexturedFromNxLib(cmdResults[itmImages][itmRenderPointMapTexture],
1063  cmdResults[itmImages][itmRenderPointMap], targetFrame);
1064  if (goal->publish_results)
1065  {
1066  pointCloudPublisherColor.publish(cloudColored);
1067  }
1068  if (goal->include_results_in_response)
1069  {
1070  pcl::toROSMsg(*cloudColored, result.point_cloud);
1071  texturedPointCloudServer->setSucceeded(result);
1072  }
1073  else
1074  {
1075  texturedPointCloudServer->setSucceeded();
1076  }
1077  }
1078  else
1079  {
1080  texturedPointCloudServer->setSucceeded();
1081  }
1082 
1084 }
1085 
1086 void StereoCamera::saveParameterSet(std::string name, bool projectorWasSet)
1087 {
1088  if (name.empty())
1089  {
1090  name = DEFAULT_PARAMETER_SET;
1091  }
1092 
1094 
1095  ParameterSet& parameterSet = parameterSets.at(name);
1096  if (projectorWasSet)
1097  {
1098  parameterSet.autoProjector = false;
1099  }
1100 }
1101 
1102 void StereoCamera::loadParameterSet(std::string name, ProjectorState projector)
1103 {
1104  Camera::loadParameterSet(std::move(name));
1105 
1106  bool changedParameters = false;
1107  ParameterSet const& parameterSet = parameterSets.at(currentParameterSet);
1108 
1109  if (parameterSet.autoProjector && projector != projectorDontCare)
1110  {
1111  try
1112  {
1113  if (projector == projectorOn)
1114  {
1115  cameraNode[itmParameters][itmCapture][itmProjector] = true;
1116  cameraNode[itmParameters][itmCapture][itmFrontLight] = false;
1117  }
1118  else
1119  {
1120  cameraNode[itmParameters][itmCapture][itmProjector] = false;
1121  cameraNode[itmParameters][itmCapture][itmFrontLight] = true;
1122  }
1123  changedParameters = true;
1124  }
1125  catch (...)
1126  {
1127  // Setting the projector and front light fails on file cameras.
1128  }
1129  }
1130 
1131  if (changedParameters)
1132  {
1133  NxLibCommand synchronize(cmdSynchronize, serial);
1134  synchronize.parameters()[itmCameras] = serial;
1135  synchronize.execute();
1136  }
1137 }
1138 
1140 {
1141  ROS_DEBUG("Capturing an image...");
1142 
1143  NxLibCommand capture(cmdCapture, serial);
1144  capture.parameters()[itmCameras] = serial;
1145  capture.execute();
1146 
1147  NxLibItem imageNode = cameraNode[itmImages][itmRaw];
1148  imageNode = imageNode[itmLeft];
1149 
1150  if (isFileCamera)
1151  {
1152  // This workaround is needed, because the timestamp of captures from file cameras will not change over time. When
1153  // looking up the current tf tree, this will result in errors, because the time of the original timestamp is
1154  // requested, which lies in the past (and most often longer than the tfBuffer will store the transform!)
1155  return ros::Time::now();
1156  }
1157 
1158  return timestampFromNxLibNode(imageNode);
1159 }
1160 
1161 std::vector<StereoCalibrationPattern> StereoCamera::collectPattern(bool clearBuffer) const
1162 {
1163  if (clearBuffer)
1164  {
1165  NxLibCommand(cmdDiscardPatterns, serial).execute();
1166  }
1167 
1168  NxLibCommand collectPattern(cmdCollectPattern, serial);
1169  collectPattern.parameters()[itmCameras] = serial;
1170  collectPattern.parameters()[itmDecodeData] = true;
1171  collectPattern.parameters()[itmFilter][itmCameras] = serial;
1172  bool useModel = true;
1173  collectPattern.parameters()[itmFilter][itmUseModel] = useModel;
1174  collectPattern.parameters()[itmFilter][itmType] = valStatic;
1175  collectPattern.parameters()[itmFilter][itmValue] = true;
1176  try
1177  {
1178  collectPattern.execute();
1179  }
1180  catch (NxLibException& e)
1181  {
1182  if (e.getErrorCode() == NxLibExecutionFailed)
1183  {
1184  if (collectPattern.result()[itmErrorSymbol] == errPatternNotFound ||
1185  collectPattern.result()[itmErrorSymbol] == errPatternNotDecodable)
1186  {
1187  return {};
1188  }
1189  }
1190  throw;
1191  }
1192 
1193  if (!collectPattern.result()[itmStereo].exists())
1194  {
1195  // We did find patterns, but only in one of the cameras.
1196  return {};
1197  }
1198 
1199  std::vector<StereoCalibrationPattern> result;
1200 
1201  for (int i = 0; i < collectPattern.result()[itmStereo].count(); i++)
1202  {
1203  result.emplace_back(collectPattern.result()[itmStereo][i]);
1204  }
1205 
1206  // Extract the pattern's image points from the result.
1207  NxLibItem pattern = collectPattern.result()[itmPatterns][0][serial];
1208  if (pattern[itmLeft].count() > 1 || pattern[itmRight].count() > 1)
1209  {
1210  // We cannot tell which of the patterns in the two cameras belong together,
1211  // because that would need a comparison with the stereo model.
1212  return result;
1213  }
1214 
1215  for (size_t i = 0; i < result.size(); i++)
1216  {
1217  for (int j = 0; j < pattern[itmLeft][i][itmPoints].count(); j++)
1218  {
1219  NxLibItem pointNode = pattern[itmLeft][i][itmPoints][j];
1220 
1221  ensenso_camera_msgs::ImagePoint point;
1222  point.x = pointNode[0].asDouble();
1223  point.y = pointNode[1].asDouble();
1224  result.at(i).leftPoints.push_back(point);
1225  }
1226  for (int j = 0; j < pattern[itmRight][i][itmPoints].count(); j++)
1227  {
1228  NxLibItem pointNode = pattern[itmRight][i][itmPoints][j];
1229 
1230  ensenso_camera_msgs::ImagePoint point;
1231  point.x = pointNode[0].asDouble();
1232  point.y = pointNode[1].asDouble();
1233  result.at(i).rightPoints.push_back(point);
1234  }
1235  }
1236 
1237  return result;
1238 }
1239 
1240 void StereoCamera::fillCameraInfoFromNxLib(sensor_msgs::CameraInfoPtr const& info, bool right, bool rectified) const
1241 {
1243 
1244  NxLibItem monoCalibrationNode = cameraNode[itmCalibration][itmMonocular][right ? itmRight : itmLeft];
1245  NxLibItem stereoCalibrationNode = cameraNode[itmCalibration][itmStereo][right ? itmRight : itmLeft];
1246 
1247  if (rectified)
1248  {
1249  // For the rectified images all transformations are the identity (because
1250  // all of the distortions were already removed), except for the stereo
1251  // camera matrix.
1252 
1253  info->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
1254  info->D.clear();
1255  info->D.resize(5, 0);
1256 
1257  info->K.fill(0);
1258  info->P.fill(0);
1259  info->R.fill(0);
1260  for (int row = 0; row < 3; row++)
1261  {
1262  for (int column = 0; column < 3; column++)
1263  {
1264  info->P[4 * row + column] = stereoCalibrationNode[itmCamera][column][row].asDouble();
1265 
1266  if (row == column)
1267  {
1268  info->K[3 * row + column] = 1;
1269  info->R[3 * row + column] = 1;
1270  }
1271  }
1272  }
1273  }
1274  else
1275  {
1276  info->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
1277  info->D.clear();
1278  fillDistortionParamsFromNxLib(monoCalibrationNode[itmDistortion], info);
1279 
1280  info->K.fill(0); // The mono camera matrix.
1281  info->P.fill(0); // The stereo camera matrix.
1282  info->R.fill(0);
1283  for (int row = 0; row < 3; row++)
1284  {
1285  for (int column = 0; column < 3; column++)
1286  {
1287  info->K[3 * row + column] = monoCalibrationNode[itmCamera][column][row].asDouble();
1288  info->P[4 * row + column] = stereoCalibrationNode[itmCamera][column][row].asDouble();
1289 
1290  info->R[3 * row + column] = stereoCalibrationNode[itmRotation][column][row].asDouble();
1291  }
1292  }
1293  }
1294 
1295  if (right)
1296  {
1297  // Add the offset of the right camera relative to the left one to the
1298  // projection matrix.
1299  double fx = stereoCalibrationNode[itmCamera][0][0].asDouble();
1300  double baseline = cameraNode[itmCalibration][itmStereo][itmBaseline].asDouble() / 1000.0;
1301  info->P[3] = -fx * baseline;
1302  }
1303 
1304  int leftTopX = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmLeftTop][0].asInt();
1305  int leftTopY = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmLeftTop][1].asInt();
1306  int rightBottomX = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmRightBottom][0].asInt();
1307  int rightBottomY = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmRightBottom][1].asInt();
1308  info->roi.x_offset = leftTopX;
1309  info->roi.y_offset = leftTopY;
1310  info->roi.width = rightBottomX - leftTopX;
1311  info->roi.height = rightBottomY - leftTopY;
1312  if (rectified)
1313  {
1314  info->roi.do_rectify = true;
1315  }
1316 }
1317 
1319 {
1324 }
1325 
1326 ensenso_camera_msgs::ParameterPtr StereoCamera::readParameter(std::string const& key) const
1327 {
1328  auto message = boost::make_shared<ensenso_camera_msgs::Parameter>();
1329  message->key = key;
1330 
1331  if (key == ensenso_camera_msgs::Parameter::REGION_OF_INTEREST)
1332  {
1333  PointCloudROI const& roi = parameterSets.at(currentParameterSet).roi;
1334 
1335  message->region_of_interest_value.lower.x = roi.minX;
1336  message->region_of_interest_value.lower.y = roi.minY;
1337  message->region_of_interest_value.lower.z = roi.minZ;
1338  message->region_of_interest_value.upper.x = roi.maxX;
1339  message->region_of_interest_value.upper.y = roi.maxY;
1340  message->region_of_interest_value.upper.z = roi.maxZ;
1341  }
1342  else if (key == ensenso_camera_msgs::Parameter::FLEX_VIEW)
1343  {
1344  NxLibItem flexViewNode = cameraNode[itmParameters][itmCapture][itmFlexView];
1345 
1346  if (!flexViewNode.exists())
1347  {
1348  message->float_value = -1;
1349  }
1350  else if (flexViewNode.isNumber())
1351  {
1352  message->float_value = flexViewNode.asInt();
1353  }
1354  else
1355  {
1356  message->float_value = 0;
1357  }
1358  }
1359  else
1360  {
1361  message = Camera::readParameter(key);
1362  }
1363 
1364  return message;
1365 }
1366 
1367 void StereoCamera::writeParameter(ensenso_camera_msgs::Parameter const& parameter)
1368 {
1369  if (parameter.key == ensenso_camera_msgs::Parameter::REGION_OF_INTEREST)
1370  {
1372 
1373  roi.minX = parameter.region_of_interest_value.lower.x;
1374  roi.minY = parameter.region_of_interest_value.lower.y;
1375  roi.minZ = parameter.region_of_interest_value.lower.z;
1376  roi.maxX = parameter.region_of_interest_value.upper.x;
1377  roi.maxY = parameter.region_of_interest_value.upper.y;
1378  roi.maxZ = parameter.region_of_interest_value.upper.z;
1379 
1380  parameterSets.at(currentParameterSet).useROI = !roi.isEmpty();
1381  }
1382  else if (parameter.key == ensenso_camera_msgs::Parameter::FLEX_VIEW)
1383  {
1384  NxLibItem flexViewNode = cameraNode[itmParameters][itmCapture][itmFlexView];
1385 
1386  if (!flexViewNode.exists())
1387  {
1388  ROS_WARN("Writing the parameter FlexView, but the camera does not "
1389  "support it!");
1390  return;
1391  }
1392 
1393  int n = static_cast<int>(std::round(parameter.float_value));
1394  if (n <= 1)
1395  {
1396  flexViewNode = false;
1397  }
1398  else
1399  {
1400  flexViewNode = n;
1401  }
1402  }
1403  else
1404  {
1405  Camera::writeParameter(parameter);
1406  }
1407 }
std::vector< tf2::Transform > handEyeCalibrationRobotPoses
Definition: stereo_camera.h:67
sensor_msgs::ImagePtr imageFromNxLibNode(NxLibItem const &node, std::string const &frame)
std::string const DEFAULT_PARAMETER_SET
Definition: camera.h:56
std::unique_ptr< FitPrimitiveServer > fitPrimitiveServer
Definition: stereo_camera.h:45
void writeParameter(ensenso_camera_msgs::Parameter const &parameter) override
void onFitPrimitive(ensenso_camera_msgs::FitPrimitiveGoalConstPtr const &goal)
std::map< std::string, ParameterSet > parameterSets
Definition: camera.h:222
void fillDistortionParamsFromNxLib(NxLibItem const &distortionItem, sensor_msgs::CameraInfoPtr const &info)
void writeToNxLib(NxLibItem const &node, bool right=false)
void onProjectPattern(ensenso_camera_msgs::ProjectPatternGoalConstPtr const &goal)
void publish(const sensor_msgs::Image &image, const sensor_msgs::CameraInfo &info) const
bool fixed
Definition: camera.h:194
std::string targetFrame
Definition: camera.h:198
void publish(const boost::shared_ptr< M > &message) const
NxLibItem toEnsensoPoint(geometry_msgs::Point32 const &point, bool convertUnits=true)
Definition: conversion.cpp:24
ros::Time timestampFromNxLibNode(NxLibItem const &node)
void updateTransformations(tf2::Transform const &targetFrameTransformation) const
Definition: camera.cpp:252
image_transport::CameraPublisher leftRectifiedImagePublisher
Definition: stereo_camera.h:53
ros::NodeHandle nh
Definition: camera.h:200
void onCalibrateWorkspace(ensenso_camera_msgs::CalibrateWorkspaceGoalConstPtr const &goal)
void onTexturedPointCloud(ensenso_camera_msgs::TexturedPointCloudGoalConstPtr const &goal)
void updateGlobalLink(ros::Time time=ros::Time::now(), std::string frame="", bool useCachedTransformation=false) const
Definition: camera.cpp:258
pcl::PointCloud< pcl::PointNormal >::Ptr pointCloudWithNormalsFromNxLib(NxLibItem const &pointMapNode, NxLibItem const &normalNode, std::string const &frame, PointCloudROI const *roi=nullptr)
pcl::PointCloud< pcl::PointXYZ >::Ptr pointCloudFromNxLib(NxLibItem const &node, std::string const &frame, PointCloudROI const *roi=nullptr)
std::string const TARGET_FRAME_LINK
Definition: camera.h:61
image_transport::Publisher disparityMapPublisher
Definition: stereo_camera.h:55
tf2::Transform getLatestTransform(tf2_ros::Buffer const &tfBuffer, std::string const &cameraFrame, std::string const &targetFrame)
virtual geometry_msgs::TransformStamped lookupTransform(const std::string &target_frame, const std::string &source_frame, const ros::Time &time, const ros::Duration timeout) const
tf2::Transform fromMsg(geometry_msgs::Transform const &transform)
pcl::PointCloud< pcl::PointXYZRGB >::Ptr pointCloudTexturedFromNxLib(NxLibItem const &imageNode, NxLibItem const &pointsNode, std::string const &frame, PointCloudROI const *roi=nullptr)
std::unique_ptr< ProjectPatternServer > projectPatternServer
Definition: stereo_camera.h:47
bool autoProjector
Definition: camera.h:173
virtual bool open()
Definition: camera.cpp:431
std::string cameraFrame
Definition: camera.h:196
image_transport::Publisher projectedImagePublisher
Definition: stereo_camera.h:57
ProjectorState
Definition: stereo_camera.h:24
std::string handEyeCalibrationPatternBuffer
Definition: stereo_camera.h:66
#define ROS_WARN(...)
virtual std::vector< geometry_msgs::TransformStamped > estimatePatternPoses(ros::Time imageTimestamp=ros::Time::now(), std::string const &targetFrame="") const
Definition: camera.cpp:307
std::unique_ptr< CalibrateWorkspaceServer > calibrateWorkspaceServer
Definition: stereo_camera.h:44
void writePoseToNxLib(tf2::Transform const &pose, NxLibItem const &node)
geometry_msgs::Point32 toRosPoint(NxLibItem const &itemArray, bool convertUnits=true)
Definition: conversion.cpp:5
pcl::PointCloud< pcl::PointXYZRGB > TexturedPointCloud
std::unique_ptr< SetParameterServer > setParameterServer
Definition: camera.h:214
bool isEmpty() const
sensor_msgs::CameraInfoPtr leftCameraInfo
Definition: stereo_camera.h:37
ros::Publisher projectedPointCloudPublisher
Definition: stereo_camera.h:59
ros::Publisher pointCloudPublisherColor
Definition: stereo_camera.h:59
image_transport::CameraPublisher depthImagePublisher
Definition: stereo_camera.h:56
std::string serial
Definition: camera.h:187
std::unique_ptr< CalibrateHandEyeServer > calibrateHandEyeServer
Definition: stereo_camera.h:43
std::vector< StereoCalibrationPattern > collectPattern(bool clearBuffer=false) const
bool checkNxLibVersion(int major, int minor)
Definition: nxLibHelpers.h:6
geometry_msgs::Pose poseFromTransform(tf2::Transform const &transform)
void publish(const sensor_msgs::Image &message) const
void startServers() const override
#define ROS_WARN_ONCE(...)
std::unique_ptr< LocatePatternServer > locatePatternServer
Definition: stereo_camera.h:46
TFSIMD_FORCE_INLINE Quaternion inverse(const Quaternion &q)
void publishCurrentLinks(ros::TimerEvent const &timerEvent=ros::TimerEvent())
Definition: camera.cpp:564
sensor_msgs::CameraInfoPtr leftRectifiedCameraInfo
Definition: stereo_camera.h:39
Transform inverse() const
virtual geometry_msgs::TransformStamped estimatePatternPose(ros::Time imageTimestamp=ros::Time::now(), std::string const &targetFrame="", bool latestPatternOnly=false) const
Definition: camera.cpp:334
void loadParameterSet(std::string name, ProjectorState projector=projectorDontCare)
ros::Time capture() const override
virtual void writeParameter(ensenso_camera_msgs::Parameter const &parameter)
Definition: camera.cpp:398
bool isValid(tf2::Transform const &pose)
geometry_msgs::TransformStamped transformFromPose(geometry_msgs::PoseStamped const &pose, std::string const &childFrame)
bool sleep()
ros::Publisher pointCloudPublisher
Definition: stereo_camera.h:59
void onSetParameter(ensenso_camera_msgs::SetParameterGoalConstPtr const &goal) override
bool isFileCamera
Definition: camera.h:181
NxLibItem cameraNode
Definition: camera.h:188
std::unique_ptr< RequestDataServer > requestDataServer
Definition: stereo_camera.h:42
std::unique_ptr< TexturedPointCloudServer > texturedPointCloudServer
Definition: stereo_camera.h:48
virtual void startServers() const
Definition: camera.cpp:47
sensor_msgs::ImagePtr depthImageFromNxLibNode(NxLibItem const &node, std::string const &frame)
void fillCameraInfoFromNxLib(sensor_msgs::CameraInfoPtr const &info, bool right, bool rectified=false) const
tf2_ros::Buffer tfBuffer
Definition: camera.h:207
sensor_msgs::CameraInfoPtr rightRectifiedCameraInfo
Definition: stereo_camera.h:40
void loadParameterSet(std::string name)
Definition: camera.cpp:529
void saveParameterSet(std::string name, bool projectorWasSet)
const int conversionFactor
Definition: conversion.h:11
geometry_msgs::PoseStamped stampedPoseFromTransform(geometry_msgs::TransformStamped const &transform)
std::string robotFrame
Definition: stereo_camera.h:34
tf2::Transform poseFromNxLib(NxLibItem const &node)
void onRequestData(ensenso_camera_msgs::RequestDataGoalConstPtr const &goal)
void toROSMsg(const sensor_msgs::PointCloud2 &cloud, sensor_msgs::Image &image)
static Time now()
#define START_NXLIB_ACTION(ACTION_NAME, ACTION_SERVER)
Definition: camera.h:90
void onCalibrateHandEye(ensenso_camera_msgs::CalibrateHandEyeGoalConstPtr const &goal)
#define PREEMPT_ACTION_IF_REQUESTED
Definition: camera.h:133
tf2::Transform fromStampedMessage(geometry_msgs::TransformStamped const &transform)
ROSCPP_DECL bool exists(const std::string &service_name, bool print_failure_reason)
StereoCamera(ros::NodeHandle nh, std::string serial, std::string fileCameraPath, bool fixed, std::string cameraFrame, std::string targetFrame, std::string robotFrame, std::string wristFrame, std::string linkFrame)
void onLocatePattern(ensenso_camera_msgs::LocatePatternGoalConstPtr const &goal)
void saveParameterSet(std::string name)
Definition: camera.cpp:516
image_transport::CameraPublisher leftRawImagePublisher
Definition: stereo_camera.h:51
void fillBasicCameraInfoFromNxLib(sensor_msgs::CameraInfoPtr const &info) const
Definition: camera.cpp:241
void updateCameraInfo() override
image_transport::CameraPublisher rightRawImagePublisher
Definition: stereo_camera.h:52
bool open() override
ensenso_camera_msgs::ParameterPtr readParameter(std::string const &key) const override
image_transport::CameraPublisher rightRectifiedImagePublisher
Definition: stereo_camera.h:54
Definition: camera.h:178
virtual ensenso_camera_msgs::ParameterPtr readParameter(std::string const &key) const
Definition: camera.cpp:359
#define FINISH_NXLIB_ACTION(ACTION_NAME)
Definition: camera.h:101
std::vector< std::pair< sensor_msgs::ImagePtr, sensor_msgs::ImagePtr > > imagePairsFromNxLibNode(NxLibItem const &node, std::string const &frame)
std::string currentParameterSet
Definition: camera.h:223
#define ROS_ERROR(...)
sensor_msgs::CameraInfoPtr rightCameraInfo
Definition: stereo_camera.h:38
void onTelecentricProjection(ensenso_camera_msgs::TelecentricProjectionGoalConstPtr const &goal)
std::unique_ptr< TelecentricProjectionServer > telecentricProjectionServer
Definition: stereo_camera.h:49
bool loadSettings(std::string const &jsonFile, bool saveAsDefaultParameters=false)
Definition: camera.cpp:56
#define ROS_DEBUG(...)
std::string wristFrame
Definition: stereo_camera.h:35
std::unique_ptr< tf2_ros::TransformBroadcaster > transformBroadcaster
Definition: camera.h:209


ensenso_camera
Author(s): Ensenso
autogenerated on Sat Jul 27 2019 03:51:24