stereo_camera.cpp
Go to the documentation of this file.
2 
9 
10 #include <diagnostic_msgs/DiagnosticArray.h>
13 #include <pcl_ros/point_cloud.h>
14 
15 StereoCamera::StereoCamera(ros::NodeHandle nh, std::string serial, std::string fileCameraPath, bool fixed,
16  std::string cameraFrame, std::string targetFrame, std::string robotFrame,
17  std::string wristFrame, std::string linkFrame, int captureTimeout,
18  std::unique_ptr<ensenso_camera::VirtualObjectHandler> virtualObjectHandler)
19  : Camera(nh, std::move(serial), std::move(fileCameraPath), fixed, std::move(cameraFrame), std::move(targetFrame),
20  std::move(linkFrame))
21  , robotFrame(std::move(robotFrame))
22  , wristFrame(std::move(wristFrame))
23  , captureTimeout(captureTimeout)
24  , virtualObjectHandler(std::move(virtualObjectHandler))
25 {
26  leftCameraInfo = boost::make_shared<sensor_msgs::CameraInfo>();
27  rightCameraInfo = boost::make_shared<sensor_msgs::CameraInfo>();
28  leftRectifiedCameraInfo = boost::make_shared<sensor_msgs::CameraInfo>();
29  rightRectifiedCameraInfo = boost::make_shared<sensor_msgs::CameraInfo>();
30 
32  ::make_unique<FitPrimitiveServer>(nh, "fit_primitive", boost::bind(&StereoCamera::onFitPrimitive, this, _1));
34  ::make_unique<SetParameterServer>(nh, "set_parameter", boost::bind(&StereoCamera::onSetParameter, this, _1));
35 
37  ::make_unique<RequestDataServer>(nh, "request_data", boost::bind(&StereoCamera::onRequestData, this, _1));
39  ::make_unique<LocatePatternServer>(nh, "locate_pattern", boost::bind(&StereoCamera::onLocatePattern, this, _1));
41  ::make_unique<ProjectPatternServer>(nh, "project_pattern", boost::bind(&StereoCamera::onProjectPattern, this, _1));
42  calibrateHandEyeServer = ::make_unique<CalibrateHandEyeServer>(
43  nh, "calibrate_hand_eye", boost::bind(&StereoCamera::onCalibrateHandEye, this, _1));
44  calibrateWorkspaceServer = ::make_unique<CalibrateWorkspaceServer>(
45  nh, "calibrate_workspace", boost::bind(&StereoCamera::onCalibrateWorkspace, this, _1));
46  telecentricProjectionServer = ::make_unique<TelecentricProjectionServer>(
47  nh, "project_telecentric", boost::bind(&StereoCamera::onTelecentricProjection, this, _1));
48  texturedPointCloudServer = ::make_unique<TexturedPointCloudServer>(
49  nh, "texture_point_cloud", boost::bind(&StereoCamera::onTexturedPointCloud, this, _1));
50 
51  image_transport::ImageTransport imageTransport(nh);
52  leftRawImagePublisher = imageTransport.advertiseCamera("raw/left/image", 1);
53  rightRawImagePublisher = imageTransport.advertiseCamera("raw/right/image", 1);
54  leftRectifiedImagePublisher = imageTransport.advertiseCamera("rectified/left/image", 1);
55  rightRectifiedImagePublisher = imageTransport.advertiseCamera("rectified/right/image", 1);
56  disparityMapPublisher = imageTransport.advertise("disparity_map", 1);
57  depthImagePublisher = imageTransport.advertiseCamera("depth/image", 1);
58  projectedImagePublisher = imageTransport.advertise("depth/projected_depth_map", 1);
59 
60  pointCloudPublisher = nh.advertise<pcl::PointCloud<pcl::PointXYZ>>("point_cloud", 1);
61  pointCloudPublisherColor = nh.advertise<pcl::PointCloud<pcl::PointXYZRGB>>("point_cloud_color", 1);
62  projectedPointCloudPublisher = nh.advertise<pcl::PointCloud<pcl::PointXYZ>>("projected_point_cloud", 1);
63 }
64 
66 {
67  Camera::open();
68 
70 
71  return true;
72 }
73 
75 {
77  requestDataServer->start();
78  fitPrimitiveServer->start();
79  calibrateHandEyeServer->start();
80  calibrateWorkspaceServer->start();
81  locatePatternServer->start();
82  projectPatternServer->start();
83  texturedPointCloudServer->start();
85 }
86 
87 void StereoCamera::onFitPrimitive(ensenso_camera_msgs::FitPrimitiveGoalConstPtr const& goal)
88 {
90 
91  NxLibCommand fitPrimitives(cmdFitPrimitive, serial);
92  NxLibItem const& primitives = fitPrimitives.parameters()[itmPrimitive];
93 
94  int primitivesCount = 0;
95  for (auto const& primitive : goal->primitives)
96  {
97  if (primitive.type == ensenso_camera_msgs::Primitive::SPHERE)
98  {
99  primitives[primitivesCount][itmRadius][itmMin] = primitive.min_radius * ensenso_conversion::conversionFactor;
100  primitives[primitivesCount][itmRadius][itmMax] = primitive.max_radius * ensenso_conversion::conversionFactor;
101  primitives[primitivesCount][itmCount] = primitive.count;
102  }
103  else if (primitive.type == ensenso_camera_msgs::Primitive::CYLINDER)
104  {
105  primitives[primitivesCount][itmRadius][itmMin] = primitive.min_radius * ensenso_conversion::conversionFactor;
106  primitives[primitivesCount][itmRadius][itmMax] = primitive.min_radius * ensenso_conversion::conversionFactor;
107  }
108  else if (primitive.type == ensenso_camera_msgs::Primitive::PLANE)
109  {
110  primitives[primitivesCount][itmCount] = primitive.count;
111  }
112  primitives[primitivesCount][itmType] = primitive.type;
113  primitives[primitivesCount][itmInlierThreshold] = primitive.inlier_threshold * ensenso_conversion::conversionFactor;
114  primitives[primitivesCount][itmInlierFraction] = primitive.inlier_fraction_in;
115 
116  primitivesCount++;
117  }
118 
119  NxLibItem const& commandParameters = fitPrimitives.parameters();
120  if (goal->normal_radius)
121  {
122  commandParameters[itmNormal][itmRadius] = goal->normal_radius;
123  }
124 
125  float const zeroThreshold = 10e-6;
126  if (!(std::abs(goal->region_of_interest.lower.x) < zeroThreshold &&
127  std::abs(goal->region_of_interest.lower.y) < zeroThreshold &&
128  std::abs(goal->region_of_interest.lower.z) < zeroThreshold))
129  {
130  commandParameters[itmBoundingBox][itmMin] << ensenso_conversion::toEnsensoPoint(goal->region_of_interest.lower);
131  }
132  if (!(std::abs(goal->region_of_interest.upper.x) < zeroThreshold &&
133  std::abs(goal->region_of_interest.upper.y) < zeroThreshold &&
134  std::abs(goal->region_of_interest.upper.z) < zeroThreshold))
135  {
136  commandParameters[itmBoundingBox][itmMax] << ensenso_conversion::toEnsensoPoint(goal->region_of_interest.upper);
137  }
138 
139  if (goal->failure_probability != 0)
140  {
141  commandParameters[itmFailureProbability] = goal->failure_probability;
142  }
143 
144  if (std::abs(goal->inlier_threshold) > zeroThreshold)
145  {
146  commandParameters[itmInlierThreshold] = goal->inlier_threshold * ensenso_conversion::conversionFactor;
147  }
148  if (std::abs(goal->inlier_fraction) > zeroThreshold)
149  {
150  commandParameters[itmInlierFraction] = goal->inlier_fraction;
151  }
152  if (std::abs(goal->scaling) > zeroThreshold)
153  {
154  commandParameters[itmScaling] = goal->scaling;
155  }
156  if (goal->ransac_iterations != 0)
157  {
158  commandParameters[itmIterations] = goal->ransac_iterations;
159  }
160 
161  fitPrimitives.execute();
162 
163  ensenso_camera_msgs::FitPrimitiveResult result;
164 
165  NxLibItem const primitiveResults = fitPrimitives.result()[itmPrimitive];
166  if (primitiveResults.isArray())
167  {
168  result.primitives.reserve(primitiveResults.count());
169  for (int primitiveCount = 0; primitiveCount < primitiveResults.count(); primitiveCount++)
170  {
171  NxLibItem const& currentPrimitive = primitiveResults[primitiveCount];
172  ensenso_camera_msgs::Primitive primitive;
173 
174  primitive.type = currentPrimitive[itmType].asString();
175  primitive.ransac_iterations = currentPrimitive[itmIterations].asInt();
176  primitive.inlier_count = currentPrimitive[itmInlierCount].asInt();
177  primitive.inlier_fraction_out = currentPrimitive[itmInlierFraction].asDouble();
178  primitive.score = currentPrimitive[itmScore].asDouble();
179  primitive.center = ensenso_conversion::toRosPoint(currentPrimitive[itmCenter]);
180 
181  if (primitive.type == ensenso_camera_msgs::Primitive::PLANE)
182  {
183  primitive.axes.push_back(ensenso_conversion::toRosPoint(currentPrimitive[itmAxis][0]));
184  primitive.axes.push_back(ensenso_conversion::toRosPoint(currentPrimitive[itmAxis][1]));
185  primitive.normal = ensenso_conversion::toRosPoint(currentPrimitive[itmNormal], false);
186  }
187  else if (primitive.type == ensenso_camera_msgs::Primitive::SPHERE)
188  {
189  primitive.radius = currentPrimitive[itmRadius].asDouble() / ensenso_conversion::conversionFactor;
190  }
191  else if (primitive.type == ensenso_camera_msgs::Primitive::CYLINDER)
192  {
193  primitive.axis = ensenso_conversion::toRosPoint(currentPrimitive[itmAxis]);
194  }
195  result.primitives.emplace_back(primitive);
196  }
197  }
198 
199  fitPrimitiveServer->setSucceeded(result);
200 
201  FINISH_NXLIB_ACTION(FitPrimitive)
202 }
203 
204 void StereoCamera::onRequestData(ensenso_camera_msgs::RequestDataGoalConstPtr const& goal)
205 {
207 
208  ensenso_camera_msgs::RequestDataResult result;
209  ensenso_camera_msgs::RequestDataFeedback feedback;
210 
211  bool publishResults = goal->publish_results;
212  if (!goal->publish_results && !goal->include_results_in_response)
213  {
214  publishResults = true;
215  }
216 
217  bool requestPointCloud = goal->request_point_cloud || goal->request_depth_image;
218  if (!goal->request_raw_images && !goal->request_rectified_images && !goal->request_point_cloud &&
219  !goal->request_normals && !goal->request_depth_image)
220  {
221  requestPointCloud = true;
222  }
223 
224  bool computePointCloud = requestPointCloud || goal->request_normals;
225  bool computeDisparityMap = goal->request_disparity_map || computePointCloud;
226 
227  loadParameterSet(goal->parameter_set, computeDisparityMap ? projectorOn : projectorOff);
228  ros::Time imageTimestamp = capture();
229 
231 
232  feedback.images_acquired = true;
233  requestDataServer->publishFeedback(feedback);
234 
235  if (goal->request_raw_images)
236  {
237  auto rawImages = imagePairsFromNxLibNode(cameraNode[itmImages][itmRaw], cameraFrame);
238 
239  leftCameraInfo->header.stamp = rawImages[0].first->header.stamp;
240  leftCameraInfo->header.frame_id = cameraFrame;
241  rightCameraInfo->header.stamp = leftCameraInfo->header.stamp;
242  rightCameraInfo->header.frame_id = cameraFrame;
243 
244  if (goal->include_results_in_response)
245  {
246  for (auto const& imagePair : rawImages)
247  {
248  result.left_raw_images.push_back(*imagePair.first);
249  result.right_raw_images.push_back(*imagePair.second);
250  }
251  result.left_camera_info = *leftCameraInfo;
252  result.right_camera_info = *rightCameraInfo;
253  }
254  if (publishResults)
255  {
256  // We only publish one of the images on the topic, even if FlexView is
257  // enabled.
258  leftRawImagePublisher.publish(rawImages[0].first, leftCameraInfo);
259  rightRawImagePublisher.publish(rawImages[0].second, rightCameraInfo);
260  }
261  }
262 
264 
265  // If we need the disparity map, we do the rectification implicitly in
266  // cmdComputeDisparityMap. This is more efficient when using CUDA.
267  if (goal->request_rectified_images && !computeDisparityMap)
268  {
269  NxLibCommand rectify(cmdRectifyImages, serial);
270  rectify.parameters()[itmCameras] = serial;
271  rectify.execute();
272  }
273  else if (computeDisparityMap)
274  {
275  NxLibCommand disparityMapCommand(cmdComputeDisparityMap, serial);
276  disparityMapCommand.parameters()[itmCameras] = serial;
277  disparityMapCommand.execute();
278  }
279 
280  if (goal->request_rectified_images)
281  {
282  auto rectifiedImages = imagePairsFromNxLibNode(cameraNode[itmImages][itmRectified], cameraFrame);
283 
284  leftRectifiedCameraInfo->header.stamp = rectifiedImages[0].first->header.stamp;
285  leftRectifiedCameraInfo->header.frame_id = cameraFrame;
286  rightRectifiedCameraInfo->header.stamp = leftRectifiedCameraInfo->header.stamp;
287  rightRectifiedCameraInfo->header.frame_id = cameraFrame;
288 
289  if (goal->include_results_in_response)
290  {
291  for (auto const& imagePair : rectifiedImages)
292  {
293  result.left_rectified_images.push_back(*imagePair.first);
294  result.right_rectified_images.push_back(*imagePair.second);
295  }
296  result.left_rectified_camera_info = *leftRectifiedCameraInfo;
297  result.right_rectified_camera_info = *rightRectifiedCameraInfo;
298  }
299  if (publishResults)
300  {
301  // We only publish one of the images on the topic, even if FlexView is
302  // enabled.
305  }
306  }
307 
308  if (goal->request_disparity_map)
309  {
310  auto disparityMap = imageFromNxLibNode(cameraNode[itmImages][itmDisparityMap], cameraFrame);
311 
312  if (goal->include_results_in_response)
313  {
314  result.disparity_map = *disparityMap;
315  }
316  if (publishResults)
317  {
318  disparityMapPublisher.publish(disparityMap);
319  }
320  }
321 
323 
324  PointCloudROI const* pointCloudROI = 0;
325  if (parameterSets.at(currentParameterSet).useROI)
326  {
327  pointCloudROI = &parameterSets.at(currentParameterSet).roi;
328  }
329 
330  if (computePointCloud)
331  {
332  updateGlobalLink(imageTimestamp, "", goal->use_cached_transformation);
333 
334  NxLibCommand computePointMap(cmdComputePointMap, serial);
335  computePointMap.parameters()[itmCameras] = serial;
336  computePointMap.execute();
337 
338  if (requestPointCloud && !goal->request_normals)
339  {
340  auto pointCloud = pointCloudFromNxLib(cameraNode[itmImages][itmPointMap], targetFrame, pointCloudROI);
341 
342  if (goal->include_results_in_response)
343  {
344  pcl::toROSMsg(*pointCloud, result.point_cloud);
345  }
346  if (publishResults)
347  {
348  pointCloudPublisher.publish(pointCloud);
349  }
350  }
351 
352  if (goal->request_depth_image)
353  {
354  if (cameraFrame == targetFrame)
355  {
356  auto depthImage = depthImageFromNxLibNode(cameraNode[itmImages][itmPointMap], targetFrame);
357  leftRectifiedCameraInfo->header.stamp = depthImage->header.stamp;
358 
359  if (goal->include_results_in_response)
360  {
361  result.depth_image = *depthImage;
362  }
363 
364  if (publishResults)
365  {
367  }
368  }
369  else
370  {
371  ROS_WARN_ONCE("Currently it is not possible to determine the depth map once the stereo camera is extrinsically "
372  "calibrated because the point cloud is then transformed internally. If you want to have a depth "
373  "map, the camera must not be extrinsically calibrated (workspace-/ hand-eye calibration), or "
374  "have a link to another camera.");
375  }
376  }
377  }
378 
380 
381  if (goal->request_normals)
382  {
383  NxLibCommand computeNormals(cmdComputeNormals, serial);
384  computeNormals.execute();
385 
386  auto pointCloud = pointCloudWithNormalsFromNxLib(cameraNode[itmImages][itmPointMap],
387  cameraNode[itmImages][itmNormals], targetFrame, pointCloudROI);
388 
389  if (goal->include_results_in_response)
390  {
391  pcl::toROSMsg(*pointCloud, result.point_cloud);
392  }
393  if (publishResults)
394  {
395  pointCloudPublisher.publish(pointCloud);
396  }
397  }
398 
399  requestDataServer->setSucceeded(result);
400 
401  FINISH_NXLIB_ACTION(RequestData)
402 }
403 
404 void StereoCamera::onSetParameter(ensenso_camera_msgs::SetParameterGoalConstPtr const& goal)
405 {
407 
408  ensenso_camera_msgs::SetParameterResult result;
409 
410  loadParameterSet(goal->parameter_set);
411 
412  result.parameter_file_applied = false;
413  if (!goal->parameter_file.empty())
414  {
415  result.parameter_file_applied = loadSettings(goal->parameter_file);
416  if (!result.parameter_file_applied)
417  {
418  server->setAborted(result);
419  return;
420  }
421  }
422 
423  bool projectorChanged = false;
424  for (auto const& parameter : goal->parameters)
425  {
426  writeParameter(parameter);
427 
428  if (parameter.key == parameter.PROJECTOR || parameter.key == parameter.FRONT_LIGHT)
429  {
430  projectorChanged = true;
431  }
432  }
433 
434  // Synchronize to make sure that we read back the correct values.
435  NxLibCommand synchronize(cmdSynchronize, serial);
436  synchronize.parameters()[itmCameras] = serial;
437  synchronize.execute();
438 
439  saveParameterSet(goal->parameter_set, projectorChanged);
440 
441  // The new parameters might change the camera calibration.
443 
444  // Read back the actual values.
445  for (auto const& parameter : goal->parameters)
446  {
447  result.results.push_back(*readParameter(parameter.key));
448  }
449 
450  setParameterServer->setSucceeded(result);
451 
452  FINISH_NXLIB_ACTION(SetParameter)
453 }
454 
455 void StereoCamera::onLocatePattern(ensenso_camera_msgs::LocatePatternGoalConstPtr const& goal)
456 {
458 
459  ensenso_camera_msgs::LocatePatternResult result;
460  ensenso_camera_msgs::LocatePatternFeedback feedback;
461 
462  loadParameterSet(goal->parameter_set, projectorOff);
463 
464  int numberOfShots = goal->number_of_shots;
465  if (numberOfShots < 1)
466  {
467  numberOfShots = 1;
468  }
469 
470  std::vector<StereoCalibrationPattern> patterns;
471  ros::Time imageTimestamp;
472  for (int i = 0; i < numberOfShots; i++)
473  {
475 
476  ros::Time timestamp = capture();
477  if (i == 0)
478  imageTimestamp = timestamp;
479 
481 
482  if (i == (numberOfShots - 1))
483  {
484  // The last image has been captured.
485  feedback.images_acquired = true;
486  locatePatternServer->publishFeedback(feedback);
487  }
488 
489  bool clearBuffer = (i == 0);
490  patterns = collectPattern(clearBuffer);
491  if (patterns.empty())
492  {
493  result.found_pattern = false;
494  locatePatternServer->setSucceeded(result);
495  return;
496  }
497 
498  if (patterns.size() > 1)
499  {
500  // Cannot average multiple shots of multiple patterns. We will cancel the
501  // capturing and estimate the pose of each pattern individually.
502  break;
503  }
504  }
505 
506  result.found_pattern = true;
507  result.patterns.resize(patterns.size());
508  for (size_t i = 0; i < patterns.size(); i++)
509  {
510  patterns[i].writeToMessage(result.patterns[i]);
511  }
512 
514 
515  std::string patternFrame = targetFrame;
516  if (!goal->target_frame.empty())
517  {
518  patternFrame = goal->target_frame;
519  }
520 
521  result.frame = patternFrame;
522 
523  if (patterns.size() > 1)
524  {
525  // Estimate the pose of all the patterns we found.
526  auto patternPoses = estimatePatternPoses(imageTimestamp, patternFrame);
527 
528  result.pattern_poses.resize(patternPoses.size());
529  for (size_t i = 0; i < patternPoses.size(); i++)
530  {
531  result.pattern_poses[i] = stampedPoseFromTransform(patternPoses[i]);
532  }
533  }
534  else
535  {
536  // Estimate the pose of a single pattern, averaging over the different
537  // shots.
538  auto patternPose = estimatePatternPose(imageTimestamp, patternFrame);
539 
540  result.pattern_poses.resize(1);
541  result.pattern_poses[0] = stampedPoseFromTransform(patternPose);
542  }
543 
544  if (!goal->tf_frame.empty())
545  {
546  if (patterns.size() == 1)
547  {
548  auto transform = transformFromPose(result.pattern_poses[0], goal->tf_frame);
549  transformBroadcaster->sendTransform(transform);
550  }
551  else
552  {
553  ROS_WARN("Cannot publish the pattern pose in TF, because there are "
554  "multiple patterns!");
555  }
556  }
557 
558  // The target frame has changed.
560  locatePatternServer->setSucceeded(result);
561 
562  FINISH_NXLIB_ACTION(LocatePattern)
563 }
564 
565 void StereoCamera::onProjectPattern(ensenso_camera_msgs::ProjectPatternGoalConstPtr const& goal)
566 {
568 
569  ensenso_camera_msgs::ProjectPatternResult result;
570 
571  tf2::Transform targetFrameTransformation = fromMsg(goal->target_frame_transformation);
572  if (isValid(targetFrameTransformation))
573  {
574  updateTransformations(targetFrameTransformation);
575  }
576  else
577  {
579  }
580 
582 
583  if (!checkNxLibVersion(2, 1))
584  {
585  // In old NxLib versions, the project pattern command does not take the grid
586  // spacing as a parameter.
587  NxLibItem()[itmParameters][itmPattern][itmGridSpacing] = goal->grid_spacing * 1000;
588  }
589 
590  tf2::Transform patternPose = fromMsg(goal->pattern_pose);
591 
592  NxLibCommand projectPattern(cmdProjectPattern, serial);
593  projectPattern.parameters()[itmCameras] = serial;
594  projectPattern.parameters()[itmGridSpacing] = goal->grid_spacing * 1000;
595  projectPattern.parameters()[itmGridSize][0] = goal->grid_size_x;
596  projectPattern.parameters()[itmGridSize][1] = goal->grid_size_y;
597  writePoseToNxLib(patternPose, projectPattern.parameters()[itmPatternPose]);
598  projectPattern.execute();
599 
601 
602  int sensorWidth = cameraNode[itmSensor][itmSize][0].asInt();
603  int sensorHeight = cameraNode[itmSensor][itmSize][1].asInt();
604 
605  NxLibItem projectedPoints = projectPattern.result()[itmStereo][0][itmPoints];
606  NxLibItem leftPoints = projectedPoints[0];
607  NxLibItem rightPoints = projectedPoints[1];
608 
609  result.pattern_is_visible = true;
610  result.left_points.resize(leftPoints.count());
611  result.right_points.resize(leftPoints.count());
612  for (int i = 0; i < leftPoints.count(); i++)
613  {
614  double leftX = leftPoints[i][0].asDouble();
615  double leftY = leftPoints[i][1].asDouble();
616  double rightX = rightPoints[i][0].asDouble();
617  double rightY = rightPoints[i][1].asDouble();
618 
619  result.left_points[i].x = leftX;
620  result.left_points[i].y = leftY;
621  result.right_points[i].x = rightX;
622  result.right_points[i].y = rightY;
623 
624  if (leftX < 0 || leftX > sensorWidth || leftY < 0 || leftY > sensorHeight || rightX < 0 || rightX > sensorWidth ||
625  rightY < 0 || rightY > sensorHeight)
626  {
627  result.pattern_is_visible = false;
628  }
629  }
630 
631  projectPatternServer->setSucceeded(result);
632 
633  FINISH_NXLIB_ACTION(ProjectPattern)
634 }
635 
636 void StereoCamera::onCalibrateHandEye(ensenso_camera_msgs::CalibrateHandEyeGoalConstPtr const& goal)
637 {
638  START_NXLIB_ACTION(CalibrateHandEye, calibrateHandEyeServer)
639 
640  ensenso_camera_msgs::CalibrateHandEyeResult result;
641  result.command = goal->command;
642 
643  if (goal->command == goal->RESET)
644  {
647  }
648  else if (goal->command == goal->CAPTURE_PATTERN)
649  {
650  if (robotFrame.empty() || wristFrame.empty())
651  {
652  result.error_message = "You need to specify a robot base and wrist frame "
653  "to do a hand eye calibration!";
654  ROS_ERROR("%s", result.error_message.c_str());
655  calibrateHandEyeServer->setAborted(result);
656  return;
657  }
658 
659  loadParameterSet(goal->parameter_set, projectorOff);
660  ros::Time imageTimestamp = capture();
661 
663 
664  // Load the pattern buffer that we remembered from the previous
665  // calibration steps.
666  if (!handEyeCalibrationPatternBuffer.empty())
667  {
668  NxLibCommand setPatternBuffer(cmdSetPatternBuffer, serial);
669  setPatternBuffer.parameters()[itmPatterns] << handEyeCalibrationPatternBuffer;
670  setPatternBuffer.execute();
671  }
672  else
673  {
674  NxLibCommand(cmdDiscardPatterns, serial).execute();
675  }
676 
677  std::vector<StereoCalibrationPattern> patterns = collectPattern();
678  if (patterns.empty())
679  {
680  result.found_pattern = false;
681  calibrateHandEyeServer->setSucceeded(result);
682  return;
683  }
684  if (patterns.size() > 1)
685  {
686  result.error_message = "Detected multiple calibration patterns during a "
687  "hand eye calibration!";
688  ROS_ERROR("%s", result.error_message.c_str());
689  calibrateHandEyeServer->setAborted(result);
690  return;
691  }
692 
694 
695  result.found_pattern = true;
696  patterns[0].writeToMessage(result.pattern);
697 
698  auto patternPose = estimatePatternPose(imageTimestamp, cameraFrame, true);
699  result.pattern_pose = stampedPoseFromTransform(patternPose).pose;
700 
702 
703  geometry_msgs::TransformStamped robotPose;
704  try
705  {
707  }
708  catch (tf2::TransformException& e)
709  {
710  result.error_message = std::string("Could not look up the robot pose due to the TF error: ") + e.what();
711  ROS_ERROR("%s", result.error_message.c_str());
712  calibrateHandEyeServer->setAborted(result);
713  return;
714  }
715 
716  // Remember the newly collected data for the next step.
717  NxLibCommand getPatternBuffer(cmdGetPatternBuffer, serial);
718  getPatternBuffer.execute();
719  handEyeCalibrationPatternBuffer = getPatternBuffer.result()[itmPatterns].asJson();
720 
721  handEyeCalibrationRobotPoses.push_back(fromStampedMessage(robotPose));
722 
723  result.robot_pose = stampedPoseFromTransform(robotPose).pose;
724  }
725  else if (goal->command == goal->START_CALIBRATION)
726  {
727  if (handEyeCalibrationRobotPoses.size() < 5)
728  {
729  result.error_message = "You need collect at least 5 patterns before "
730  "starting a hand eye calibration!";
731  ROS_ERROR("%s", result.error_message.c_str());
732  calibrateHandEyeServer->setAborted(result);
733  return;
734  }
735 
736  // Load the pattern observations.
737  size_t numberOfPatterns = 0;
738  NxLibCommand setPatternBuffer(cmdSetPatternBuffer, serial);
739  if (!goal->pattern_observations.empty())
740  {
741  for (size_t i = 0; i < goal->pattern_observations.size(); i++)
742  {
743  StereoCalibrationPattern pattern(goal->pattern_observations[i]);
744 
745  NxLibItem patternNode = setPatternBuffer.parameters()[itmPatterns][i][serial];
746  pattern.writeToNxLib(patternNode[itmLeft][0]);
747  pattern.writeToNxLib(patternNode[itmRight][0], true);
748  }
749  }
750  else
751  {
752  setPatternBuffer.parameters()[itmPatterns] << handEyeCalibrationPatternBuffer;
753  }
754  numberOfPatterns = setPatternBuffer.parameters()[itmPatterns].count();
755  setPatternBuffer.execute();
756 
757  // Load the corresponding robot poses.
758  auto robotPoses = handEyeCalibrationRobotPoses;
759  if (!goal->robot_poses.empty())
760  {
761  robotPoses.clear();
762  for (auto const& pose : goal->robot_poses)
763  {
764  tf2::Transform tfPose = fromMsg(pose);
765  robotPoses.push_back(tfPose);
766  }
767  }
768 
769  if (robotPoses.size() != numberOfPatterns)
770  {
771  result.error_message = "The number of pattern observations does not "
772  "match the number of robot poses!";
773  ROS_ERROR("%s", result.error_message.c_str());
774  calibrateHandEyeServer->setAborted(result);
775  return;
776  }
777 
778  // Load the initial guesses from the action goal.
779  tf2::Transform link, patternPose;
780  link = fromMsg(goal->link);
781  patternPose = fromMsg(goal->pattern_pose);
782 
783  NxLibCommand calibrateHandEye(cmdCalibrateHandEye, serial);
784  calibrateHandEye.parameters()[itmSetup] = fixed ? valFixed : valMoving;
785  // The target node will be reset anyway before we calculate data for the next time.
786  calibrateHandEye.parameters()[itmTarget] = TARGET_FRAME_LINK + "_" + serial;
787  if (isValid(link))
788  {
789  writePoseToNxLib(link.inverse(), calibrateHandEye.parameters()[itmLink]);
790  }
791  if (isValid(patternPose))
792  {
793  writePoseToNxLib(patternPose, calibrateHandEye.parameters()[itmPatternPose]);
794  }
795  for (size_t i = 0; i < robotPoses.size(); i++)
796  {
797  writePoseToNxLib(robotPoses[i], calibrateHandEye.parameters()[itmTransformations][i]);
798  }
799 
800  calibrateHandEye.execute(false);
801 
802  auto getCalibrationResidual = [](NxLibItem const& node) { // NOLINT
803  if (node[itmResidual].exists())
804  {
805  return node[itmResidual].asDouble();
806  }
807  // Compatibility with the SDK 2.0.
808  return node[itmReprojectionError].asDouble();
809  };
810 
811  ros::Rate waitingRate(5);
812  while (!calibrateHandEye.finished())
813  {
814  if (calibrateHandEye.result()[itmProgress].exists())
815  {
816  ensenso_camera_msgs::CalibrateHandEyeFeedback feedback;
817  feedback.number_of_iterations = calibrateHandEye.result()[itmProgress][itmIterations].asInt();
818  feedback.residual = getCalibrationResidual(calibrateHandEye.result()[itmProgress]);
819  calibrateHandEyeServer->publishFeedback(feedback);
820  }
821 
822  if (calibrateHandEyeServer->isPreemptRequested())
823  {
824  NxLibCommand(cmdBreak, serial).execute();
825 
826  calibrateHandEyeServer->setPreempted();
827  return;
828  }
829 
830  waitingRate.sleep();
831  }
832 
833  result.calibration_time =
834  calibrateHandEye.result()[itmTime].asDouble() / 1000; // NxLib time is in milliseconds, ROS
835  // expects time to be in seconds.
836  result.number_of_iterations = calibrateHandEye.result()[itmIterations].asInt();
837  result.residual = getCalibrationResidual(calibrateHandEye.result());
838 
839  result.link = poseFromTransform(poseFromNxLib(cameraNode[itmLink]).inverse());
840  result.pattern_pose = poseFromTransform(poseFromNxLib(calibrateHandEye.result()[itmPatternPose]));
841 
842  if (goal->write_calibration_to_eeprom)
843  {
844  // Save the new calibration link to the camera's EEPROM.
845  NxLibCommand storeCalibration(cmdStoreCalibration, serial);
846  storeCalibration.parameters()[itmCameras] = serial;
847  storeCalibration.parameters()[itmLink] = true;
848  storeCalibration.execute();
849  }
850  }
851 
852  // The target frame has changed.
854  calibrateHandEyeServer->setSucceeded(result);
855 
856  FINISH_NXLIB_ACTION(CalibrateHandEye)
857 }
858 
859 void StereoCamera::onCalibrateWorkspace(const ensenso_camera_msgs::CalibrateWorkspaceGoalConstPtr& goal)
860 {
861  START_NXLIB_ACTION(CalibrateWorkspace, calibrateWorkspaceServer)
862 
863  if (!fixed)
864  {
865  ROS_WARN("You are performing a workspace calibration for a moving camera. "
866  "Are you sure that this is what you want to do?");
867  }
868 
869  ensenso_camera_msgs::CalibrateWorkspaceResult result;
870  result.successful = true;
871 
872  loadParameterSet(goal->parameter_set, projectorOff);
873 
874  int numberOfShots = goal->number_of_shots;
875  if (numberOfShots < 1)
876  {
877  numberOfShots = 1;
878  }
879 
880  ros::Time imageTimestamp;
881  for (int i = 0; i < numberOfShots; i++)
882  {
884 
885  ros::Time timestamp = capture();
886  if (i == 0)
887  imageTimestamp = timestamp;
888 
890 
891  bool clearBuffer = (i == 0);
892  std::vector<StereoCalibrationPattern> patterns = collectPattern(clearBuffer);
893  if (patterns.size() != 1)
894  {
895  result.successful = false;
896  calibrateWorkspaceServer->setSucceeded(result);
897  return;
898  }
899  }
900 
902 
903  tf2::Transform patternTransformation = fromStampedMessage(estimatePatternPose(imageTimestamp));
904 
906 
907  tf2::Transform definedPatternPose = fromMsg(goal->defined_pattern_pose);
908 
909  NxLibCommand calibrateWorkspace(cmdCalibrateWorkspace, serial);
910  calibrateWorkspace.parameters()[itmCameras] = serial;
911  writePoseToNxLib(patternTransformation, calibrateWorkspace.parameters()[itmPatternPose]);
912  writePoseToNxLib(definedPatternPose, calibrateWorkspace.parameters()[itmDefinedPose]);
913  calibrateWorkspace.parameters()[itmTarget] = TARGET_FRAME_LINK + "_" + serial;
914  calibrateWorkspace.execute();
915 
916  if (goal->write_calibration_to_eeprom)
917  {
918  // Save the new calibration link to the camera's EEPROM.
919  NxLibCommand storeCalibration(cmdStoreCalibration, serial);
920  storeCalibration.parameters()[itmCameras] = serial;
921  storeCalibration.parameters()[itmLink] = true;
922  storeCalibration.execute();
923  }
924 
926  calibrateWorkspaceServer->setSucceeded(result);
927 
928  FINISH_NXLIB_ACTION(CalibrateWorkspace)
929 }
930 
931 void StereoCamera::onTelecentricProjection(ensenso_camera_msgs::TelecentricProjectionGoalConstPtr const& goal)
932 {
933  START_NXLIB_ACTION(TelecentricProjection, telecentricProjectionServer)
934 
935  ensenso_camera_msgs::TelecentricProjectionResult result;
936 
937  bool useViewPose = isValid(goal->view_pose);
938  bool frameGiven = !goal->frame.empty();
939 
940  if (!frameGiven)
941  {
942  std::string error = "You have to define a valid frame, to which to projection will be published. Aborting";
943  ROS_ERROR("%s", error.c_str());
944  result.error.message = error;
945  telecentricProjectionServer->setAborted(result);
946  return;
947  }
948 
949  tf2::Transform transform;
950  std::string publishingFrame = useViewPose ? targetFrame : goal->frame;
951 
952  if (useViewPose)
953  {
954  transform = fromMsg(goal->view_pose);
955  }
956  else
957  {
958  transform = getLatestTransform(tfBuffer, targetFrame, goal->frame);
959  }
960 
961  int pixelScale = goal->pixel_scale != 0. ? goal->pixel_scale : 1;
962  double scaling = goal->scaling != 0. ? goal->scaling : 1.0;
963  int sizeWidth = goal->size_width != 0 ? goal->size_width : 1024;
964  int sizeHeight = goal->size_height != 0. ? goal->size_height : 768;
965  bool useOpenGL = goal->use_opengl == 1;
966  RenderPointMapParamsTelecentric params(useOpenGL, pixelScale, scaling, sizeWidth, sizeHeight, transform);
967 
968  NxLibCommand renderPointMap(cmdRenderPointMap, serial);
969 
970  for (int i = 0; i < static_cast<int>(goal->serials.size()); i++)
971  {
972  renderPointMap.parameters()[itmCameras][i] = goal->serials[i];
973  }
974  setRenderParams(renderPointMap.parameters(), nxLibVersion, &params);
975 
976  renderPointMap.execute();
977 
978  NxLibItem resultPath = retrieveResultPath(renderPointMap.result(), nxLibVersion);
979 
980  if (!resultPath.exists())
981  {
982  std::string error = "Rendered Point Map does not exist in path: " + resultPath.path;
983  result.error.message = error;
984  ROS_ERROR("%s", error.c_str());
985  telecentricProjectionServer->setAborted(result);
986  return;
987  }
988 
989  if (goal->publish_results || goal->include_results_in_response)
990  {
991  if (goal->request_point_cloud || (!goal->request_point_cloud && !goal->request_depth_image))
992  {
993  auto pointCloud = retrieveRenderedPointCloud(renderPointMap.result(), nxLibVersion, goal->frame);
994 
995  if (goal->publish_results)
996  {
998  }
999  if (goal->include_results_in_response)
1000  {
1001  pcl::toROSMsg(*pointCloud, result.projected_point_cloud);
1002  telecentricProjectionServer->setSucceeded(result);
1003  }
1004  else
1005  {
1006  telecentricProjectionServer->setSucceeded();
1007  }
1008  }
1009 
1010  if (goal->request_depth_image)
1011  {
1012  auto renderedImage = retrieveRenderedDepthMap(renderPointMap.result(), nxLibVersion, goal->frame);
1013 
1014  if (goal->publish_results)
1015  {
1016  projectedImagePublisher.publish(renderedImage);
1017  }
1018  if (goal->include_results_in_response)
1019  {
1020  result.projected_depth_map = *renderedImage;
1021  telecentricProjectionServer->setSucceeded(result);
1022  }
1023  else
1024  {
1025  telecentricProjectionServer->setSucceeded();
1026  }
1027  }
1028  }
1029  else
1030  {
1031  telecentricProjectionServer->setSucceeded();
1032  }
1033 
1034  FINISH_NXLIB_ACTION(TelecentricProjection)
1035 }
1036 
1037 void StereoCamera::onTexturedPointCloud(ensenso_camera_msgs::TexturedPointCloudGoalConstPtr const& goal)
1038 {
1040 
1041  ensenso_camera_msgs::TexturedPointCloudResult result;
1042 
1043  if (goal->mono_serial.empty())
1044  {
1045  std::string error = "In Order to use this action, you have to specify one mono serial";
1046  ROS_ERROR("%s", error.c_str());
1047  result.error.message = error;
1048  texturedPointCloudServer->setAborted(result);
1049  return;
1050  }
1051 
1052  double farPlane = goal->far_plane ? goal->far_plane : 10000.;
1053  double nearPlane = goal->near_plane ? goal->near_plane : -10000.;
1054  bool useOpenGL = goal->use_opengl == 1;
1055  bool withTexture = true;
1056 
1057  RenderPointMapParamsProjection params(useOpenGL, farPlane, nearPlane, withTexture);
1058 
1059  NxLibCommand renderPointMap(cmdRenderPointMap, serial);
1060  for (int i = 0; i < static_cast<int>(goal->serials.size()); i++)
1061  {
1062  renderPointMap.parameters()[itmCameras][i] = goal->serials[i];
1063  }
1064  renderPointMap.parameters()[itmCamera] = goal->mono_serial;
1065  setRenderParams(renderPointMap.parameters(), nxLibVersion, &params);
1066 
1067  renderPointMap.execute();
1068 
1069  if (goal->publish_results || goal->include_results_in_response)
1070  {
1071  auto cloudColored = retrieveTexturedPointCloud(renderPointMap.result(), nxLibVersion, targetFrame);
1072  if (goal->publish_results)
1073  {
1074  pointCloudPublisherColor.publish(cloudColored);
1075  }
1076  if (goal->include_results_in_response)
1077  {
1078  pcl::toROSMsg(*cloudColored, result.point_cloud);
1079  texturedPointCloudServer->setSucceeded(result);
1080  }
1081  else
1082  {
1083  texturedPointCloudServer->setSucceeded();
1084  }
1085  }
1086  else
1087  {
1088  texturedPointCloudServer->setSucceeded();
1089  }
1090 
1092 }
1093 
1094 void StereoCamera::saveParameterSet(std::string name, bool projectorWasSet)
1095 {
1096  if (name.empty())
1097  {
1098  name = DEFAULT_PARAMETER_SET;
1099  }
1100 
1102 
1103  ParameterSet& parameterSet = parameterSets.at(name);
1104  if (projectorWasSet)
1105  {
1106  parameterSet.autoProjector = false;
1107  }
1108 }
1109 
1110 void StereoCamera::loadParameterSet(std::string name, ProjectorState projector)
1111 {
1112  Camera::loadParameterSet(std::move(name));
1113 
1114  bool changedParameters = false;
1115  ParameterSet const& parameterSet = parameterSets.at(currentParameterSet);
1116 
1117  if (parameterSet.autoProjector && projector != projectorDontCare)
1118  {
1119  try
1120  {
1121  if (projector == projectorOn)
1122  {
1123  cameraNode[itmParameters][itmCapture][itmProjector] = true;
1124  cameraNode[itmParameters][itmCapture][itmFrontLight] = false;
1125  }
1126  else
1127  {
1128  cameraNode[itmParameters][itmCapture][itmProjector] = false;
1129  cameraNode[itmParameters][itmCapture][itmFrontLight] = true;
1130  }
1131  changedParameters = true;
1132  }
1133  catch (...)
1134  {
1135  // Setting the projector and front light fails on file cameras.
1136  }
1137  }
1138 
1139  if (changedParameters)
1140  {
1141  NxLibCommand synchronize(cmdSynchronize, serial);
1142  synchronize.parameters()[itmCameras] = serial;
1143  synchronize.execute();
1144  }
1145 }
1146 
1148 {
1149  // Update virtual objects. Ignore failures with a simple warning.
1151  {
1152  try
1153  {
1154  virtualObjectHandler->updateObjectLinks();
1155  }
1156  catch (const std::exception &e)
1157  {
1158  ROS_WARN("Unable to update virtual objects. Error: %s", e.what());
1159  }
1160  }
1161 
1162  ROS_DEBUG("Capturing an image...");
1163 
1164  NxLibCommand capture(cmdCapture, serial);
1165  capture.parameters()[itmCameras] = serial;
1166  if (captureTimeout > 0)
1167  {
1168  capture.parameters()[itmTimeout] = captureTimeout;
1169  }
1170  capture.execute();
1171 
1172  NxLibItem imageNode = cameraNode[itmImages][itmRaw];
1173  imageNode = imageNode[itmLeft];
1174 
1175  if (isFileCamera)
1176  {
1177  // This workaround is needed, because the timestamp of captures from file cameras will not change over time. When
1178  // looking up the current tf tree, this will result in errors, because the time of the original timestamp is
1179  // requested, which lies in the past (and most often longer than the tfBuffer will store the transform!)
1180  return ros::Time::now();
1181  }
1182 
1183  return timestampFromNxLibNode(imageNode);
1184 }
1185 
1186 std::vector<StereoCalibrationPattern> StereoCamera::collectPattern(bool clearBuffer) const
1187 {
1188  if (clearBuffer)
1189  {
1190  NxLibCommand(cmdDiscardPatterns, serial).execute();
1191  }
1192 
1193  NxLibCommand collectPattern(cmdCollectPattern, serial);
1194  collectPattern.parameters()[itmCameras] = serial;
1195  collectPattern.parameters()[itmDecodeData] = true;
1196  collectPattern.parameters()[itmFilter][itmCameras] = serial;
1197  bool useModel = true;
1198  collectPattern.parameters()[itmFilter][itmUseModel] = useModel;
1199  collectPattern.parameters()[itmFilter][itmType] = valStatic;
1200  collectPattern.parameters()[itmFilter][itmValue] = true;
1201  try
1202  {
1203  collectPattern.execute();
1204  }
1205  catch (NxLibException& e)
1206  {
1207  if (e.getErrorCode() == NxLibExecutionFailed)
1208  {
1209  if (collectPattern.result()[itmErrorSymbol] == errPatternNotFound ||
1210  collectPattern.result()[itmErrorSymbol] == errPatternNotDecodable)
1211  {
1212  return {};
1213  }
1214  }
1215  throw;
1216  }
1217 
1218  if (!collectPattern.result()[itmStereo].exists())
1219  {
1220  // We did find patterns, but only in one of the cameras.
1221  return {};
1222  }
1223 
1224  std::vector<StereoCalibrationPattern> result;
1225 
1226  for (int i = 0; i < collectPattern.result()[itmStereo].count(); i++)
1227  {
1228  result.emplace_back(collectPattern.result()[itmStereo][i]);
1229  }
1230 
1231  // Extract the pattern's image points from the result.
1232  NxLibItem pattern = collectPattern.result()[itmPatterns][0][serial];
1233  if (pattern[itmLeft].count() > 1 || pattern[itmRight].count() > 1)
1234  {
1235  // We cannot tell which of the patterns in the two cameras belong together,
1236  // because that would need a comparison with the stereo model.
1237  return result;
1238  }
1239 
1240  for (size_t i = 0; i < result.size(); i++)
1241  {
1242  for (int j = 0; j < pattern[itmLeft][i][itmPoints].count(); j++)
1243  {
1244  NxLibItem pointNode = pattern[itmLeft][i][itmPoints][j];
1245 
1246  ensenso_camera_msgs::ImagePoint point;
1247  point.x = pointNode[0].asDouble();
1248  point.y = pointNode[1].asDouble();
1249  result.at(i).leftPoints.push_back(point);
1250  }
1251  for (int j = 0; j < pattern[itmRight][i][itmPoints].count(); j++)
1252  {
1253  NxLibItem pointNode = pattern[itmRight][i][itmPoints][j];
1254 
1255  ensenso_camera_msgs::ImagePoint point;
1256  point.x = pointNode[0].asDouble();
1257  point.y = pointNode[1].asDouble();
1258  result.at(i).rightPoints.push_back(point);
1259  }
1260  }
1261 
1262  return result;
1263 }
1264 
1265 void StereoCamera::fillCameraInfoFromNxLib(sensor_msgs::CameraInfoPtr const& info, bool right, bool rectified) const
1266 {
1268 
1269  NxLibItem monoCalibrationNode = cameraNode[itmCalibration][itmMonocular][right ? itmRight : itmLeft];
1270  NxLibItem stereoCalibrationNode = cameraNode[itmCalibration][itmStereo][right ? itmRight : itmLeft];
1271 
1272  if (rectified)
1273  {
1274  // For the rectified images all transformations are the identity (because
1275  // all of the distortions were already removed), except for the stereo
1276  // camera matrix.
1277 
1278  info->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
1279  info->D.clear();
1280  info->D.resize(5, 0);
1281 
1282  info->K.fill(0);
1283  info->P.fill(0);
1284  info->R.fill(0);
1285  for (int row = 0; row < 3; row++)
1286  {
1287  for (int column = 0; column < 3; column++)
1288  {
1289  info->P[4 * row + column] = stereoCalibrationNode[itmCamera][column][row].asDouble();
1290 
1291  if (row == column)
1292  {
1293  info->K[3 * row + column] = 1;
1294  info->R[3 * row + column] = 1;
1295  }
1296  }
1297  }
1298  }
1299  else
1300  {
1301  info->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
1302  info->D.clear();
1303  fillDistortionParamsFromNxLib(monoCalibrationNode[itmDistortion], info);
1304 
1305  info->K.fill(0); // The mono camera matrix.
1306  info->P.fill(0); // The stereo camera matrix.
1307  info->R.fill(0);
1308  for (int row = 0; row < 3; row++)
1309  {
1310  for (int column = 0; column < 3; column++)
1311  {
1312  info->K[3 * row + column] = monoCalibrationNode[itmCamera][column][row].asDouble();
1313  info->P[4 * row + column] = stereoCalibrationNode[itmCamera][column][row].asDouble();
1314 
1315  info->R[3 * row + column] = stereoCalibrationNode[itmRotation][column][row].asDouble();
1316  }
1317  }
1318  }
1319 
1320  if (right)
1321  {
1322  // Add the offset of the right camera relative to the left one to the
1323  // projection matrix.
1324  double fx = stereoCalibrationNode[itmCamera][0][0].asDouble();
1325  double baseline = cameraNode[itmCalibration][itmStereo][itmBaseline].asDouble() / 1000.0;
1326  info->P[3] = -fx * baseline;
1327  }
1328 
1329  int leftTopX = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmLeftTop][0].asInt();
1330  int leftTopY = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmLeftTop][1].asInt();
1331  int rightBottomX = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmRightBottom][0].asInt();
1332  int rightBottomY = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmRightBottom][1].asInt();
1333  info->roi.x_offset = leftTopX;
1334  info->roi.y_offset = leftTopY;
1335  info->roi.width = rightBottomX - leftTopX;
1336  info->roi.height = rightBottomY - leftTopY;
1337  if (rectified)
1338  {
1339  info->roi.do_rectify = true;
1340  }
1341 }
1342 
1344 {
1349 }
1350 
1351 ensenso_camera_msgs::ParameterPtr StereoCamera::readParameter(std::string const& key) const
1352 {
1353  auto message = boost::make_shared<ensenso_camera_msgs::Parameter>();
1354  message->key = key;
1355 
1356  if (key == ensenso_camera_msgs::Parameter::REGION_OF_INTEREST)
1357  {
1358  PointCloudROI const& roi = parameterSets.at(currentParameterSet).roi;
1359 
1360  message->region_of_interest_value.lower.x = roi.minX;
1361  message->region_of_interest_value.lower.y = roi.minY;
1362  message->region_of_interest_value.lower.z = roi.minZ;
1363  message->region_of_interest_value.upper.x = roi.maxX;
1364  message->region_of_interest_value.upper.y = roi.maxY;
1365  message->region_of_interest_value.upper.z = roi.maxZ;
1366  }
1367  else if (key == ensenso_camera_msgs::Parameter::FLEX_VIEW)
1368  {
1369  NxLibItem flexViewNode = cameraNode[itmParameters][itmCapture][itmFlexView];
1370 
1371  if (!flexViewNode.exists())
1372  {
1373  message->float_value = -1;
1374  }
1375  else if (flexViewNode.isNumber())
1376  {
1377  message->float_value = flexViewNode.asInt();
1378  }
1379  else
1380  {
1381  message->float_value = 0;
1382  }
1383  }
1384  else
1385  {
1386  message = Camera::readParameter(key);
1387  }
1388 
1389  return message;
1390 }
1391 
1392 void StereoCamera::writeParameter(ensenso_camera_msgs::Parameter const& parameter)
1393 {
1394  if (parameter.key == ensenso_camera_msgs::Parameter::REGION_OF_INTEREST)
1395  {
1397 
1398  roi.minX = parameter.region_of_interest_value.lower.x;
1399  roi.minY = parameter.region_of_interest_value.lower.y;
1400  roi.minZ = parameter.region_of_interest_value.lower.z;
1401  roi.maxX = parameter.region_of_interest_value.upper.x;
1402  roi.maxY = parameter.region_of_interest_value.upper.y;
1403  roi.maxZ = parameter.region_of_interest_value.upper.z;
1404 
1405  parameterSets.at(currentParameterSet).useROI = !roi.isEmpty();
1406  }
1407  else if (parameter.key == ensenso_camera_msgs::Parameter::FLEX_VIEW)
1408  {
1409  NxLibItem flexViewNode = cameraNode[itmParameters][itmCapture][itmFlexView];
1410 
1411  if (!flexViewNode.exists())
1412  {
1413  ROS_WARN("Writing the parameter FlexView, but the camera does not "
1414  "support it!");
1415  return;
1416  }
1417 
1418  int n = static_cast<int>(std::round(parameter.float_value));
1419  if (n <= 1)
1420  {
1421  flexViewNode = false;
1422  }
1423  else
1424  {
1425  flexViewNode = n;
1426  }
1427  }
1428  else
1429  {
1430  Camera::writeParameter(parameter);
1431  }
1432 }
std::vector< tf2::Transform > handEyeCalibrationRobotPoses
Definition: stereo_camera.h:69
sensor_msgs::ImagePtr imageFromNxLibNode(NxLibItem const &node, std::string const &frame)
std::string const DEFAULT_PARAMETER_SET
Definition: camera.h:57
std::unique_ptr< FitPrimitiveServer > fitPrimitiveServer
Definition: stereo_camera.h:47
void writeParameter(ensenso_camera_msgs::Parameter const &parameter) override
void onFitPrimitive(ensenso_camera_msgs::FitPrimitiveGoalConstPtr const &goal)
std::map< std::string, ParameterSet > parameterSets
Definition: camera.h:225
void fillDistortionParamsFromNxLib(NxLibItem const &distortionItem, sensor_msgs::CameraInfoPtr const &info)
void writeToNxLib(NxLibItem const &node, bool right=false)
void onProjectPattern(ensenso_camera_msgs::ProjectPatternGoalConstPtr const &goal)
void publish(const sensor_msgs::Image &image, const sensor_msgs::CameraInfo &info) const
bool fixed
Definition: camera.h:197
std::string targetFrame
Definition: camera.h:201
void publish(const boost::shared_ptr< M > &message) const
NxLibItem toEnsensoPoint(geometry_msgs::Point32 const &point, bool convertUnits=true)
Definition: conversion.cpp:24
ros::Time timestampFromNxLibNode(NxLibItem const &node)
sensor_msgs::ImagePtr retrieveRenderedDepthMap(NxLibItem const &cmdResult, VersionInfo const &nxLibVersion, std::string const &frame)
Definition: nxLibHelpers.h:226
void updateTransformations(tf2::Transform const &targetFrameTransformation) const
Definition: camera.cpp:253
image_transport::CameraPublisher leftRectifiedImagePublisher
Definition: stereo_camera.h:55
ros::NodeHandle nh
Definition: camera.h:203
void onCalibrateWorkspace(ensenso_camera_msgs::CalibrateWorkspaceGoalConstPtr const &goal)
void onTexturedPointCloud(ensenso_camera_msgs::TexturedPointCloudGoalConstPtr const &goal)
void updateGlobalLink(ros::Time time=ros::Time::now(), std::string frame="", bool useCachedTransformation=false) const
Definition: camera.cpp:259
StereoCamera(ros::NodeHandle nh, std::string serial, std::string fileCameraPath, bool fixed, std::string cameraFrame, std::string targetFrame, std::string robotFrame, std::string wristFrame, std::string linkFrame, int captureTimeout, std::unique_ptr< ensenso_camera::VirtualObjectHandler > virtualObjectHandler=nullptr)
pcl::PointCloud< pcl::PointNormal >::Ptr pointCloudWithNormalsFromNxLib(NxLibItem const &pointMapNode, NxLibItem const &normalNode, std::string const &frame, PointCloudROI const *roi=nullptr)
pcl::PointCloud< pcl::PointXYZ >::Ptr pointCloudFromNxLib(NxLibItem const &node, std::string const &frame, PointCloudROI const *roi=nullptr)
std::string const TARGET_FRAME_LINK
Definition: camera.h:62
image_transport::Publisher disparityMapPublisher
Definition: stereo_camera.h:57
tf2::Transform getLatestTransform(tf2_ros::Buffer const &tfBuffer, std::string const &cameraFrame, std::string const &targetFrame)
virtual geometry_msgs::TransformStamped lookupTransform(const std::string &target_frame, const std::string &source_frame, const ros::Time &time, const ros::Duration timeout) const
tf2::Transform fromMsg(geometry_msgs::Transform const &transform)
std::unique_ptr< ProjectPatternServer > projectPatternServer
Definition: stereo_camera.h:49
bool autoProjector
Definition: camera.h:174
std::unique_ptr< ensenso_camera::VirtualObjectHandler > virtualObjectHandler
Definition: stereo_camera.h:76
virtual bool open()
Definition: camera.cpp:427
std::string cameraFrame
Definition: camera.h:199
image_transport::Publisher projectedImagePublisher
Definition: stereo_camera.h:59
ProjectorState
Definition: stereo_camera.h:26
std::string handEyeCalibrationPatternBuffer
Definition: stereo_camera.h:68
#define ROS_WARN(...)
virtual std::vector< geometry_msgs::TransformStamped > estimatePatternPoses(ros::Time imageTimestamp=ros::Time::now(), std::string const &targetFrame="") const
Definition: camera.cpp:303
std::unique_ptr< CalibrateWorkspaceServer > calibrateWorkspaceServer
Definition: stereo_camera.h:46
void writePoseToNxLib(tf2::Transform const &pose, NxLibItem const &node)
geometry_msgs::Point32 toRosPoint(NxLibItem const &itemArray, bool convertUnits=true)
Definition: conversion.cpp:5
pcl::PointCloud< pcl::PointXYZRGB > TexturedPointCloud
std::unique_ptr< SetParameterServer > setParameterServer
Definition: camera.h:217
bool isEmpty() const
sensor_msgs::CameraInfoPtr leftCameraInfo
Definition: stereo_camera.h:39
ros::Publisher projectedPointCloudPublisher
Definition: stereo_camera.h:61
ros::Publisher pointCloudPublisherColor
Definition: stereo_camera.h:61
image_transport::CameraPublisher depthImagePublisher
Definition: stereo_camera.h:58
std::string serial
Definition: camera.h:190
std::unique_ptr< CalibrateHandEyeServer > calibrateHandEyeServer
Definition: stereo_camera.h:45
std::vector< StereoCalibrationPattern > collectPattern(bool clearBuffer=false) const
bool checkNxLibVersion(int major, int minor)
Definition: nxLibHelpers.h:14
geometry_msgs::Pose poseFromTransform(tf2::Transform const &transform)
void publish(const sensor_msgs::Image &message) const
void startServers() const override
#define ROS_WARN_ONCE(...)
std::unique_ptr< LocatePatternServer > locatePatternServer
Definition: stereo_camera.h:48
TFSIMD_FORCE_INLINE Quaternion inverse(const Quaternion &q)
void publishCurrentLinks(ros::TimerEvent const &timerEvent=ros::TimerEvent())
Definition: camera.cpp:560
sensor_msgs::CameraInfoPtr leftRectifiedCameraInfo
Definition: stereo_camera.h:41
Transform inverse() const
virtual geometry_msgs::TransformStamped estimatePatternPose(ros::Time imageTimestamp=ros::Time::now(), std::string const &targetFrame="", bool latestPatternOnly=false) const
Definition: camera.cpp:330
void loadParameterSet(std::string name, ProjectorState projector=projectorDontCare)
ros::Time capture() const override
virtual void writeParameter(ensenso_camera_msgs::Parameter const &parameter)
Definition: camera.cpp:394
bool isValid(tf2::Transform const &pose)
geometry_msgs::TransformStamped transformFromPose(geometry_msgs::PoseStamped const &pose, std::string const &childFrame)
pcl::PointCloud< pcl::PointXYZ >::Ptr retrieveRenderedPointCloud(NxLibItem const &cmdResult, VersionInfo const &nxLibVersion, std::string const &frame)
Definition: nxLibHelpers.h:184
bool sleep()
ros::Publisher pointCloudPublisher
Definition: stereo_camera.h:61
void onSetParameter(ensenso_camera_msgs::SetParameterGoalConstPtr const &goal) override
bool isFileCamera
Definition: camera.h:182
NxLibItem cameraNode
Definition: camera.h:191
std::unique_ptr< RequestDataServer > requestDataServer
Definition: stereo_camera.h:44
std::unique_ptr< TexturedPointCloudServer > texturedPointCloudServer
Definition: stereo_camera.h:50
virtual void startServers() const
Definition: camera.cpp:48
VersionInfo nxLibVersion
Definition: camera.h:188
sensor_msgs::ImagePtr depthImageFromNxLibNode(NxLibItem const &node, std::string const &frame)
void fillCameraInfoFromNxLib(sensor_msgs::CameraInfoPtr const &info, bool right, bool rectified=false) const
NxLibItem retrieveResultPath(NxLibItem const &cmdResult, VersionInfo const &nxLibVersion)
Definition: nxLibHelpers.h:216
tf2_ros::Buffer tfBuffer
Definition: camera.h:210
sensor_msgs::CameraInfoPtr rightRectifiedCameraInfo
Definition: stereo_camera.h:42
void loadParameterSet(std::string name)
Definition: camera.cpp:525
void saveParameterSet(std::string name, bool projectorWasSet)
const int conversionFactor
Definition: conversion.h:11
geometry_msgs::PoseStamped stampedPoseFromTransform(geometry_msgs::TransformStamped const &transform)
std::string robotFrame
Definition: stereo_camera.h:36
tf2::Transform poseFromNxLib(NxLibItem const &node)
void onRequestData(ensenso_camera_msgs::RequestDataGoalConstPtr const &goal)
void setRenderParams(NxLibItem const &paramItem, RenderPointMapParams const *params)
Definition: nxLibHelpers.h:149
void toROSMsg(const sensor_msgs::PointCloud2 &cloud, sensor_msgs::Image &image)
static Time now()
#define START_NXLIB_ACTION(ACTION_NAME, ACTION_SERVER)
Definition: camera.h:91
void onCalibrateHandEye(ensenso_camera_msgs::CalibrateHandEyeGoalConstPtr const &goal)
#define PREEMPT_ACTION_IF_REQUESTED
Definition: camera.h:134
tf2::Transform fromStampedMessage(geometry_msgs::TransformStamped const &transform)
ROSCPP_DECL bool exists(const std::string &service_name, bool print_failure_reason)
void onLocatePattern(ensenso_camera_msgs::LocatePatternGoalConstPtr const &goal)
void saveParameterSet(std::string name)
Definition: camera.cpp:512
image_transport::CameraPublisher leftRawImagePublisher
Definition: stereo_camera.h:53
void fillBasicCameraInfoFromNxLib(sensor_msgs::CameraInfoPtr const &info) const
Definition: camera.cpp:242
void updateCameraInfo() override
image_transport::CameraPublisher rightRawImagePublisher
Definition: stereo_camera.h:54
bool open() override
ensenso_camera_msgs::ParameterPtr readParameter(std::string const &key) const override
image_transport::CameraPublisher rightRectifiedImagePublisher
Definition: stereo_camera.h:56
Definition: camera.h:179
virtual ensenso_camera_msgs::ParameterPtr readParameter(std::string const &key) const
Definition: camera.cpp:355
#define FINISH_NXLIB_ACTION(ACTION_NAME)
Definition: camera.h:102
std::vector< std::pair< sensor_msgs::ImagePtr, sensor_msgs::ImagePtr > > imagePairsFromNxLibNode(NxLibItem const &node, std::string const &frame)
std::string currentParameterSet
Definition: camera.h:226
#define ROS_ERROR(...)
sensor_msgs::CameraInfoPtr rightCameraInfo
Definition: stereo_camera.h:40
void onTelecentricProjection(ensenso_camera_msgs::TelecentricProjectionGoalConstPtr const &goal)
pcl::PointCloud< pcl::PointXYZRGB >::Ptr retrieveTexturedPointCloud(NxLibItem const &cmdResult, VersionInfo const &nxLibVersion, std::string const &targetFrame)
Definition: nxLibHelpers.h:200
std::unique_ptr< TelecentricProjectionServer > telecentricProjectionServer
Definition: stereo_camera.h:51
bool loadSettings(std::string const &jsonFile, bool saveAsDefaultParameters=false)
Definition: camera.cpp:57
#define ROS_DEBUG(...)
std::string wristFrame
Definition: stereo_camera.h:37
std::unique_ptr< tf2_ros::TransformBroadcaster > transformBroadcaster
Definition: camera.h:212


ensenso_camera
Author(s): Ensenso
autogenerated on Thu May 6 2021 02:50:06