stereo_camera.cpp
Go to the documentation of this file.
2 
8 
10 {
11  leftCameraInfo = ensenso::std::make_shared<sensor_msgs::msg::CameraInfo>();
12  rightCameraInfo = ensenso::std::make_shared<sensor_msgs::msg::CameraInfo>();
13  leftRectifiedCameraInfo = ensenso::std::make_shared<sensor_msgs::msg::CameraInfo>();
14  rightRectifiedCameraInfo = ensenso::std::make_shared<sensor_msgs::msg::CameraInfo>();
15 
16  fitPrimitiveServer = MAKE_SERVER(StereoCamera, FitPrimitive, "fit_primitive");
17  setParameterServer = MAKE_SERVER(StereoCamera, SetParameter, "set_parameter");
18  requestDataServer = MAKE_SERVER(StereoCamera, RequestData, "request_data");
19  locatePatternServer = MAKE_SERVER(StereoCamera, LocatePattern, "locate_pattern");
20  projectPatternServer = MAKE_SERVER(StereoCamera, ProjectPattern, "project_pattern");
21  calibrateHandEyeServer = MAKE_SERVER(StereoCamera, CalibrateHandEye, "calibrate_hand_eye");
22  calibrateWorkspaceServer = MAKE_SERVER(StereoCamera, CalibrateWorkspace, "calibrate_workspace");
23  telecentricProjectionServer = MAKE_SERVER(StereoCamera, TelecentricProjection, "project_telecentric");
24  texturedPointCloudServer = MAKE_SERVER(StereoCamera, TexturedPointCloud, "texture_point_cloud");
25 }
26 
28 {
29  if (!hasRightCamera())
30  {
31  rightCameraInfo = nullptr;
32  rightRectifiedCameraInfo = nullptr;
33  }
34 }
35 
37 {
40  if (hasRightCamera())
41  {
44  }
45  if (hasDisparityMap())
46  {
48  }
51 
52  pointCloudPublisher = ensenso::pcl::create_publisher<ensenso::pcl::PointCloud>(nh, "point_cloud", 1);
53  pointCloudNormalsPublisher = ensenso::pcl::create_publisher<ensenso::pcl::PointCloudNormals>(nh, "point_cloud", 1);
55  ensenso::pcl::create_publisher<ensenso::pcl::PointCloudColored>(nh, "point_cloud_color", 1);
57  ensenso::pcl::create_publisher<ensenso::pcl::PointCloud>(nh, "projected_point_cloud", 1);
58 }
59 
61 {
63  startServers();
66 }
67 
69 {
71  requestDataServer->start();
72  fitPrimitiveServer->start();
73  calibrateHandEyeServer->start();
74  calibrateWorkspaceServer->start();
75  locatePatternServer->start();
76  projectPatternServer->start();
77  texturedPointCloudServer->start();
79 }
80 
81 void StereoCamera::onFitPrimitive(ensenso::action::FitPrimitiveGoalConstPtr const& goal)
82 {
84 
85  NxLibCommand fitPrimitives(cmdFitPrimitive, params.serial);
86  NxLibItem const& primitives = fitPrimitives.parameters()[itmPrimitive];
87 
88  int primitivesCount = 0;
89  for (auto const& primitive : goal->primitives)
90  {
91  if (primitive.type == ensenso::msg::Primitive::SPHERE)
92  {
93  primitives[primitivesCount][itmRadius][itmMin] = primitive.min_radius * ensenso_conversion::conversionFactor;
94  primitives[primitivesCount][itmRadius][itmMax] = primitive.max_radius * ensenso_conversion::conversionFactor;
95  primitives[primitivesCount][itmCount] = primitive.count;
96  }
97  else if (primitive.type == ensenso::msg::Primitive::CYLINDER)
98  {
99  primitives[primitivesCount][itmRadius][itmMin] = primitive.min_radius * ensenso_conversion::conversionFactor;
100  primitives[primitivesCount][itmRadius][itmMax] = primitive.min_radius * ensenso_conversion::conversionFactor;
101  }
102  else if (primitive.type == ensenso::msg::Primitive::PLANE)
103  {
104  primitives[primitivesCount][itmCount] = primitive.count;
105  }
106  primitives[primitivesCount][itmType] = primitive.type;
107  primitives[primitivesCount][itmInlierThreshold] = primitive.inlier_threshold * ensenso_conversion::conversionFactor;
108  primitives[primitivesCount][itmInlierFraction] = primitive.inlier_fraction_in;
109 
110  primitivesCount++;
111  }
112 
113  NxLibItem const& commandParameters = fitPrimitives.parameters();
114  if (goal->normal_radius)
115  {
116  commandParameters[itmNormal][itmRadius] = goal->normal_radius;
117  }
118 
119  float const zeroThreshold = 10e-6;
120  if (!(std::abs(goal->region_of_interest.lower.x) < zeroThreshold &&
121  std::abs(goal->region_of_interest.lower.y) < zeroThreshold &&
122  std::abs(goal->region_of_interest.lower.z) < zeroThreshold))
123  {
124  commandParameters[itmBoundingBox][itmMin] << ensenso_conversion::toEnsensoPoint(goal->region_of_interest.lower);
125  }
126  if (!(std::abs(goal->region_of_interest.upper.x) < zeroThreshold &&
127  std::abs(goal->region_of_interest.upper.y) < zeroThreshold &&
128  std::abs(goal->region_of_interest.upper.z) < zeroThreshold))
129  {
130  commandParameters[itmBoundingBox][itmMax] << ensenso_conversion::toEnsensoPoint(goal->region_of_interest.upper);
131  }
132 
133  if (goal->failure_probability != 0)
134  {
135  commandParameters[itmFailureProbability] = goal->failure_probability;
136  }
137 
138  if (std::abs(goal->inlier_threshold) > zeroThreshold)
139  {
140  commandParameters[itmInlierThreshold] = goal->inlier_threshold * ensenso_conversion::conversionFactor;
141  }
142  if (std::abs(goal->inlier_fraction) > zeroThreshold)
143  {
144  commandParameters[itmInlierFraction] = goal->inlier_fraction;
145  }
146  if (std::abs(goal->scaling) > zeroThreshold)
147  {
148  commandParameters[itmScaling] = goal->scaling;
149  }
150  if (goal->ransac_iterations != 0)
151  {
152  commandParameters[itmIterations] = goal->ransac_iterations;
153  }
154 
155  fitPrimitives.execute();
156 
157  ensenso::action::FitPrimitiveResult result;
158 
159  NxLibItem const primitiveResults = fitPrimitives.result()[itmPrimitive];
160  if (primitiveResults.isArray())
161  {
162  result.primitives.reserve(primitiveResults.count());
163  for (int primitiveCount = 0; primitiveCount < primitiveResults.count(); primitiveCount++)
164  {
165  NxLibItem const& currentPrimitive = primitiveResults[primitiveCount];
166  ensenso::msg::Primitive primitive;
167 
168  primitive.type = currentPrimitive[itmType].asString();
169  primitive.ransac_iterations = currentPrimitive[itmIterations].asInt();
170  primitive.inlier_count = currentPrimitive[itmInlierCount].asInt();
171  primitive.inlier_fraction_out = currentPrimitive[itmInlierFraction].asDouble();
172  primitive.score = currentPrimitive[itmScore].asDouble();
173  primitive.center = ensenso_conversion::toRosPoint(currentPrimitive[itmCenter]);
174 
175  if (primitive.type == ensenso::msg::Primitive::PLANE)
176  {
177  primitive.axes.push_back(ensenso_conversion::toRosPoint(currentPrimitive[itmAxis][0]));
178  primitive.axes.push_back(ensenso_conversion::toRosPoint(currentPrimitive[itmAxis][1]));
179  primitive.normal = ensenso_conversion::toRosPoint(currentPrimitive[itmNormal], false);
180  }
181  else if (primitive.type == ensenso::msg::Primitive::SPHERE)
182  {
183  primitive.radius = currentPrimitive[itmRadius].asDouble() / ensenso_conversion::conversionFactor;
184  }
185  else if (primitive.type == ensenso::msg::Primitive::CYLINDER)
186  {
187  primitive.axis = ensenso_conversion::toRosPoint(currentPrimitive[itmAxis]);
188  }
189  result.primitives.emplace_back(primitive);
190  }
191  }
192 
193  fitPrimitiveServer->setSucceeded(std::move(result));
194 
195  FINISH_NXLIB_ACTION(FitPrimitive)
196 }
197 
198 void StereoCamera::onRequestData(ensenso::action::RequestDataGoalConstPtr const& goal)
199 {
201 
202  ensenso::action::RequestDataResult result;
203  ensenso::action::RequestDataFeedback feedback;
204 
205  // Automatically enable publishing if neither publish_reults nor include_results_in_response is enabled.
206  bool publishResults = goal->publish_results;
207  if (!goal->publish_results && !goal->include_results_in_response)
208  {
209  publishResults = true;
210  }
211 
212  // Automatically disable requesting raw images if the camera does not have any.
213  bool requestRawImages = goal->request_raw_images;
214  if (isXrSeries() && !hasRawImages() && requestRawImages)
215  {
216  ENSENSO_WARN(nh, "XR: Capture mode \"Rectified\", skipping raw images request.");
217  requestRawImages = false;
218  }
219 
220  // Automatically disable requesting rectified images if the camera does not have any.
221  bool requestRectifiedImages = goal->request_rectified_images;
222  if (isXrSeries() && hasRawImages() && requestRectifiedImages)
223  {
224  ENSENSO_WARN(nh, "XR: Capture mode \"Raw\", skipping rectified images request.");
225  requestRectifiedImages = false;
226  }
227 
228  // Automatically disable requesting raw/rectified images if the camera does not have downloaded any.
229  if (isXrSeries() && !hasDownloadedImages() && (requestRawImages || requestRectifiedImages))
230  {
231  ENSENSO_WARN(nh, "XR: Downloading raw/rectified images is disabled, skipping the raw/rectified images request.");
232  requestRawImages = false;
233  requestRectifiedImages = false;
234  }
235 
236  bool requestDisparityMap = goal->request_disparity_map;
237 
238  bool requestDepthImage = goal->request_depth_image;
239 
240  // Automatically request point cloud if no data set is explicitly selected.
241  bool requestPointCloud = goal->request_point_cloud || goal->request_depth_image;
242  if (!goal->request_raw_images && !goal->request_rectified_images && !goal->request_disparity_map &&
243  !goal->request_point_cloud && !goal->request_depth_image && !goal->request_normals)
244  {
245  requestPointCloud = true;
246  }
247 
248  bool requestNormals = goal->request_normals;
249 
250  // Normals require computePointCloud and computePointCloud requires computeDisparityMap to be called first.
251  bool computePointCloud = requestPointCloud || requestNormals;
252  bool computeDisparityMap = requestDisparityMap || computePointCloud;
253 
254  // Automatically disable requesting the disparity map if the camera does not have one.
255  if (!hasDisparityMap())
256  {
257  requestDisparityMap = false;
258  }
259 
260  // Automatically disable requesting 3d data if the camera is an XR and only has raw images.
261  if (isXrSeries() && hasRawImages() && (computeDisparityMap || computePointCloud))
262  {
264  "XR: Capture mode \"Raw\", skipping all 3D data requests! Only raw images are captured and they can "
265  "only be used for calibration actions. Rectifying these images afterwards is not possible and they "
266  "cannot be used to compute 3D data. If you want to retrieve 3D data, set capture mode to "
267  "\"Rectified\".");
268  requestDisparityMap = false;
269  requestDepthImage = false;
270  requestPointCloud = false;
271  requestNormals = false;
272  computePointCloud = false;
273  computeDisparityMap = false;
274  }
275 
276  loadParameterSet(goal->parameter_set, computeDisparityMap ? projectorOn : projectorOff);
277  ensenso::ros::Time imageTimestamp = capture();
278 
280 
281  feedback.images_acquired = true;
282  requestDataServer->publishFeedback(feedback);
283 
284  if (requestRawImages)
285  {
286  auto rawImages = imagePairsFromNxLibNode(cameraNode[itmImages][itmRaw], params.cameraFrame, params.isFileCamera);
287 
288  leftCameraInfo->header.stamp = rawImages[0].first->header.stamp;
289  if (hasRightCamera())
290  {
291  rightCameraInfo->header.stamp = leftCameraInfo->header.stamp;
292  }
293 
294  if (goal->include_results_in_response)
295  {
296  for (auto const& imagePair : rawImages)
297  {
298  result.left_raw_images.push_back(*imagePair.first);
299  if (hasRightCamera())
300  {
301  result.right_raw_images.push_back(*imagePair.second);
302  }
303  }
304  result.left_camera_info = *leftCameraInfo;
305  if (hasRightCamera())
306  {
307  result.right_camera_info = *rightCameraInfo;
308  }
309  }
310  if (publishResults)
311  {
312  // We only publish one of the images on the topic, even if FlexView is enabled.
313  leftRawImagePublisher.publish(rawImages[0].first, leftCameraInfo);
314  if (hasRightCamera())
315  {
316  rightRawImagePublisher.publish(rawImages[0].second, rightCameraInfo);
317  }
318  }
319  }
320 
322 
323  // Only call cmdRectifyImages if just the rectified images are requested. Otherwise the rectification is implicitly
324  // invoked by cmdComputeDisparityMap, which is more efficient when using CUDA.
325  if (requestRectifiedImages && !computeDisparityMap)
326  {
327  NxLibCommand rectify(cmdRectifyImages, params.serial);
328  rectify.parameters()[itmCameras] = params.serial;
329  rectify.execute();
330  }
331  else if (computeDisparityMap)
332  {
333  // Since the S-series currently uses cmdComputeDisparityMap instead of cmdComputePointMap to get the point map, the
334  // global link has to be updated here before cmdComputeDisparityMap is executed.
335  updateGlobalLink(imageTimestamp, "", goal->use_cached_transformation);
336 
337  NxLibCommand disparityMapCommand(cmdComputeDisparityMap, params.serial);
338  disparityMapCommand.parameters()[itmCameras] = params.serial;
339  disparityMapCommand.execute();
340  }
341 
342  if (requestRectifiedImages)
343  {
344  auto rectifiedImages =
346 
347  leftRectifiedCameraInfo->header.stamp = rectifiedImages[0].first->header.stamp;
348  if (hasRightCamera())
349  {
350  rightRectifiedCameraInfo->header.stamp = leftRectifiedCameraInfo->header.stamp;
351  }
352 
353  if (goal->include_results_in_response)
354  {
355  for (auto const& imagePair : rectifiedImages)
356  {
357  result.left_rectified_images.push_back(*imagePair.first);
358  if (hasRightCamera())
359  {
360  result.right_rectified_images.push_back(*imagePair.second);
361  }
362  }
363  result.left_rectified_camera_info = *leftRectifiedCameraInfo;
364  if (hasRightCamera())
365  {
366  result.right_rectified_camera_info = *rightRectifiedCameraInfo;
367  }
368  }
369  if (publishResults)
370  {
371  // We only publish one of the images on the topic, even if FlexView is enabled.
373  if (hasRightCamera())
374  {
376  }
377  }
378  }
379 
380  auto depthImageCameraInfo = ensenso::std::make_shared<sensor_msgs::msg::CameraInfo>(*leftRectifiedCameraInfo);
381 
382  addDisparityMapOffset(depthImageCameraInfo);
383 
384  if (requestDisparityMap)
385  {
386  auto disparityMap =
387  imageFromNxLibNode(cameraNode[itmImages][itmDisparityMap], params.cameraFrame, params.isFileCamera);
388 
389  depthImageCameraInfo->header.stamp = disparityMap->header.stamp;
390 
391  if (goal->include_results_in_response)
392  {
393  result.disparity_map = *disparityMap;
394  result.disparity_map_info = *depthImageCameraInfo;
395  }
396  if (publishResults)
397  {
398  disparityMapPublisher.publish(disparityMap, depthImageCameraInfo);
399  }
400  }
401 
403 
404  if (computePointCloud)
405  {
406  NxLibCommand computePointMap(cmdComputePointMap, params.serial);
407  computePointMap.parameters()[itmCameras] = params.serial;
408  computePointMap.execute();
409 
410  PointCloudROI const* pointCloudROI = 0;
411  if (parameterSets.at(currentParameterSet).useROI)
412  {
413  pointCloudROI = &parameterSets.at(currentParameterSet).roi;
414  }
415 
416  if (requestPointCloud && !requestNormals)
417  {
418  auto pointCloud = pointCloudFromNxLib(cameraNode[itmImages][itmPointMap], params.targetFrame, params.isFileCamera,
419  pointCloudROI);
420 
421  if (goal->include_results_in_response)
422  {
423  pcl::toROSMsg(*pointCloud, result.point_cloud);
424  }
425  if (publishResults)
426  {
427  publishPointCloud(pointCloudPublisher, std::move(pointCloud));
428  }
429  }
430  else
431  {
432  NxLibCommand computeNormals(cmdComputeNormals, params.serial);
433  computeNormals.execute();
434 
435  auto pointCloud =
436  pointCloudWithNormalsFromNxLib(cameraNode[itmImages][itmPointMap], cameraNode[itmImages][itmNormals],
437  params.targetFrame, params.isFileCamera, pointCloudROI);
438 
439  if (goal->include_results_in_response)
440  {
441  pcl::toROSMsg(*pointCloud, result.point_cloud);
442  }
443  if (publishResults)
444  {
445  publishPointCloud(pointCloudNormalsPublisher, std::move(pointCloud));
446  }
447  }
448  }
449 
451 
452  if (requestDepthImage)
453  {
454  // In case camera and target frame are different, the previously computed point cloud has been automatically
455  // transformed to world coordinates by the NxLib. Recompute it with the relative (to camera) flag so that the depth
456  // image is in camera coordinates.
458  {
459  NxLibCommand computePointMap(cmdComputePointMap, params.serial);
460  computePointMap.parameters()[itmCameras] = params.serial;
461  computePointMap.parameters()[itmRelative] = true;
462  computePointMap.execute();
463  }
464 
465  auto depthImage =
467 
468  depthImageCameraInfo->header.stamp = depthImage->header.stamp;
469 
470  if (goal->include_results_in_response)
471  {
472  result.depth_image = *depthImage;
473  result.depth_image_info = *depthImageCameraInfo;
474  }
475  if (publishResults)
476  {
477  depthImagePublisher.publish(depthImage, depthImageCameraInfo);
478  }
479  }
480 
481  requestDataServer->setSucceeded(std::move(result));
482 
483  FINISH_NXLIB_ACTION(RequestData)
484 }
485 
486 void StereoCamera::onSetParameter(ensenso::action::SetParameterGoalConstPtr const& goal)
487 {
489 
490  ensenso::action::SetParameterResult result;
491 
492  loadParameterSet(goal->parameter_set);
493 
494  result.parameter_file_applied = false;
495  if (!goal->parameter_file.empty())
496  {
497  result.parameter_file_applied = loadSettings(goal->parameter_file);
498  if (!result.parameter_file_applied)
499  {
500  server->setAborted(std::move(result));
501  return;
502  }
503  }
504 
505  bool projectorChanged = false;
506  for (auto const& parameter : goal->parameters)
507  {
508  writeParameter(parameter);
509 
510  if (parameter.key == parameter.PROJECTOR || parameter.key == parameter.FRONT_LIGHT)
511  {
512  projectorChanged = true;
513  }
514  }
515 
516  // Synchronize to make sure that we read back the correct values.
517  NxLibCommand synchronize(cmdSynchronize, params.serial);
518  synchronize.parameters()[itmCameras] = params.serial;
519  synchronize.execute();
520 
521  saveParameterSet(goal->parameter_set, projectorChanged);
522 
523  // The new parameters might change the camera calibration.
525 
526  // Read back the actual values.
527  for (auto const& parameter : goal->parameters)
528  {
529  result.results.push_back(*readParameter(parameter.key));
530  }
531 
532  setParameterServer->setSucceeded(std::move(result));
533 
534  FINISH_NXLIB_ACTION(SetParameter)
535 }
536 
537 void StereoCamera::onLocatePattern(ensenso::action::LocatePatternGoalConstPtr const& goal)
538 {
540 
541  ensenso::action::LocatePatternResult result;
542  ensenso::action::LocatePatternFeedback feedback;
543 
544  loadParameterSet(goal->parameter_set, projectorOff);
545 
546  int numberOfShots = goal->number_of_shots;
547  if (numberOfShots < 1)
548  {
549  numberOfShots = 1;
550  }
551 
552  std::vector<StereoCalibrationPattern> patterns;
553  ensenso::ros::Time imageTimestamp;
554  for (int i = 0; i < numberOfShots; i++)
555  {
557 
558  ensenso::ros::Time timestamp = capture();
559  if (i == 0)
560  {
561  imageTimestamp = timestamp;
562  }
563 
565 
566  if (i == (numberOfShots - 1))
567  {
568  // The last image has been captured.
569  feedback.images_acquired = true;
570  locatePatternServer->publishFeedback(feedback);
571  }
572 
573  bool clearBuffer = (i == 0);
574  patterns = collectPattern(clearBuffer);
575  if (patterns.empty())
576  {
577  result.found_pattern = false;
578  locatePatternServer->setSucceeded(std::move(result));
579  return;
580  }
581 
582  if (patterns.size() > 1)
583  {
584  // Cannot average multiple shots of multiple patterns. We will cancel the capturing and estimate the pose of each
585  // pattern individually.
586  break;
587  }
588  }
589 
590  result.found_pattern = true;
591  result.patterns.resize(patterns.size());
592  for (size_t i = 0; i < patterns.size(); i++)
593  {
594  patterns[i].writeToMessage(result.patterns[i]);
595  }
596 
598 
599  std::string patternFrame = params.targetFrame;
600  if (!goal->target_frame.empty())
601  {
602  patternFrame = goal->target_frame;
603  }
604 
605  result.frame = patternFrame;
606 
607  if (patterns.size() > 1)
608  {
609  // Estimate the pose of all the patterns we found.
610  auto patternPoses = estimatePatternPoses(imageTimestamp, patternFrame);
611 
612  result.pattern_poses.resize(patternPoses.size());
613  for (size_t i = 0; i < patternPoses.size(); i++)
614  {
615  result.pattern_poses[i] = patternPoses[i];
616  }
617  }
618  else
619  {
620  // Estimate the pose of a single pattern, averaging over the different shots.
621  auto patternPose = estimatePatternPose(imageTimestamp, patternFrame);
622 
623  result.pattern_poses.resize(1);
624  result.pattern_poses[0] = patternPose;
625  }
626 
627  if (!goal->tf_frame.empty())
628  {
629  if (patterns.size() == 1)
630  {
631  auto transform = transformFromPose(result.pattern_poses[0], goal->tf_frame);
632  transformBroadcaster->sendTransform(transform);
633  }
634  else
635  {
636  ENSENSO_WARN(nh, "Cannot publish the pattern pose in tf, because there are multiple patterns!");
637  }
638  }
639 
640  // The target frame has changed.
642  locatePatternServer->setSucceeded(std::move(result));
643 
644  FINISH_NXLIB_ACTION(LocatePattern)
645 }
646 
647 void StereoCamera::onProjectPattern(ensenso::action::ProjectPatternGoalConstPtr const& goal)
648 {
650 
651  ensenso::action::ProjectPatternResult result;
652 
653  tf2::Transform targetFrameTransformation = fromMsg(goal->target_frame_transformation);
654  if (isValid(targetFrameTransformation))
655  {
656  updateTransformations(targetFrameTransformation);
657  }
658  else
659  {
660  // Here we need to pass the current timestamp
662  }
663 
665 
666  tf2::Transform patternTransform = fromMsg(goal->pattern_pose);
667 
668  NxLibCommand projectPattern(cmdProjectPattern, params.serial);
669  projectPattern.parameters()[itmCameras] = params.serial;
670  projectPattern.parameters()[itmGridSpacing] = goal->grid_spacing * 1000;
671  projectPattern.parameters()[itmGridSize][0] = goal->grid_size_x;
672  projectPattern.parameters()[itmGridSize][1] = goal->grid_size_y;
673 
674  writeTransformToNxLib(patternTransform, projectPattern.parameters()[itmPatternPose]);
675 
676  projectPattern.execute();
677 
679 
680  int sensorWidth = cameraNode[itmSensor][itmSize][0].asInt();
681  int sensorHeight = cameraNode[itmSensor][itmSize][1].asInt();
682 
683  NxLibItem projectedPoints = projectPattern.result()[itmStereo][0][itmPoints];
684  NxLibItem leftPoints = projectedPoints[0];
685  NxLibItem rightPoints = projectedPoints[1];
686 
687  result.pattern_is_visible = true;
688  result.left_points.resize(leftPoints.count());
689  result.right_points.resize(leftPoints.count());
690  for (int i = 0; i < leftPoints.count(); i++)
691  {
692  double leftX = leftPoints[i][0].asDouble();
693  double leftY = leftPoints[i][1].asDouble();
694  double rightX = rightPoints[i][0].asDouble();
695  double rightY = rightPoints[i][1].asDouble();
696 
697  result.left_points[i].x = leftX;
698  result.left_points[i].y = leftY;
699  result.right_points[i].x = rightX;
700  result.right_points[i].y = rightY;
701 
702  if (leftX < 0 || leftX > sensorWidth || leftY < 0 || leftY > sensorHeight || rightX < 0 || rightX > sensorWidth ||
703  rightY < 0 || rightY > sensorHeight)
704  {
705  result.pattern_is_visible = false;
706  }
707  }
708 
709  projectPatternServer->setSucceeded(std::move(result));
710 
711  FINISH_NXLIB_ACTION(ProjectPattern)
712 }
713 
714 void StereoCamera::onCalibrateHandEye(ensenso::action::CalibrateHandEyeGoalConstPtr const& goal)
715 {
716  START_NXLIB_ACTION(CalibrateHandEye, calibrateHandEyeServer)
717 
718  ensenso::action::CalibrateHandEyeResult result;
719  result.command = goal->command;
720 
721  if (goal->command == goal->RESET)
722  {
725  }
726  else if (goal->command == goal->CAPTURE_PATTERN)
727  {
728  if (params.robotFrame.empty() || params.wristFrame.empty())
729  {
730  result.error_message = "You need to specify a robot base and wrist frame to do a hand-eye calibration!";
731  ENSENSO_ERROR(nh, "%s", result.error_message.c_str());
732  calibrateHandEyeServer->setAborted(std::move(result));
733  return;
734  }
735 
736  loadParameterSet(goal->parameter_set, projectorOff);
737  ensenso::ros::Time imageTimestamp = capture();
738 
740 
741  // Load the pattern buffer that we remembered from the previous calibration steps.
742  if (!handEyeCalibrationPatternBuffer.empty())
743  {
744  NxLibCommand setPatternBuffer(cmdSetPatternBuffer, params.serial);
745  setPatternBuffer.parameters()[itmPatterns] << handEyeCalibrationPatternBuffer;
746  setPatternBuffer.execute();
747  }
748  else
749  {
750  NxLibCommand(cmdDiscardPatterns, params.serial).execute();
751  }
752 
753  std::vector<StereoCalibrationPattern> patterns = collectPattern();
754  if (patterns.empty())
755  {
756  result.found_pattern = false;
757  calibrateHandEyeServer->setSucceeded(std::move(result));
758  return;
759  }
760  if (patterns.size() > 1)
761  {
762  result.error_message = "Detected multiple calibration patterns during a hand-eye calibration!";
763  ENSENSO_ERROR(nh, "%s", result.error_message.c_str());
764  calibrateHandEyeServer->setAborted(std::move(result));
765  return;
766  }
767 
769 
770  result.found_pattern = true;
771  patterns[0].writeToMessage(result.pattern);
772 
773  auto patternPose = estimatePatternPose(imageTimestamp, params.cameraFrame, true);
774  result.pattern_pose = patternPose.pose;
775 
777 
778  geometry_msgs::msg::TransformStamped robotPose;
779  try
780  {
781  robotPose = tfBuffer->lookupTransform(params.robotFrame, params.wristFrame, ensenso::ros::Time(0));
782  }
783  catch (tf2::TransformException& e)
784  {
785  result.error_message = std::string("Could not look up the robot pose due to the tf error: ") + e.what();
786  ENSENSO_ERROR(nh, "%s", result.error_message.c_str());
787  calibrateHandEyeServer->setAborted(std::move(result));
788  return;
789  }
790 
791  // Remember the newly collected data for the next step.
792  NxLibCommand getPatternBuffer(cmdGetPatternBuffer, params.serial);
793  getPatternBuffer.execute();
794  handEyeCalibrationPatternBuffer = getPatternBuffer.result()[itmPatterns].asJson();
795 
796  handEyeCalibrationRobotTransforms.push_back(fromMsg(robotPose));
797 
798  result.robot_pose = poseFromTransform(robotPose).pose;
799  }
800  else if (goal->command == goal->START_CALIBRATION)
801  {
802  if (handEyeCalibrationRobotTransforms.size() < 5)
803  {
804  result.error_message = "You need to collect at least 5 patterns before starting a hand-eye calibration!";
805  ENSENSO_ERROR(nh, "%s", result.error_message.c_str());
806  calibrateHandEyeServer->setAborted(std::move(result));
807  return;
808  }
809 
810  // Load the pattern observations.
811  size_t numberOfPatterns = 0;
812  NxLibCommand setPatternBuffer(cmdSetPatternBuffer, params.serial);
813  if (!goal->pattern_observations.empty())
814  {
815  for (size_t i = 0; i < goal->pattern_observations.size(); i++)
816  {
817  StereoCalibrationPattern pattern(goal->pattern_observations[i]);
818 
819  NxLibItem patternNode = setPatternBuffer.parameters()[itmPatterns][i][params.serial];
820  pattern.writeToNxLib(patternNode[itmLeft][0]);
821  pattern.writeToNxLib(patternNode[itmRight][0], true);
822  }
823  }
824  else
825  {
826  setPatternBuffer.parameters()[itmPatterns] << handEyeCalibrationPatternBuffer;
827  }
828  numberOfPatterns = setPatternBuffer.parameters()[itmPatterns].count();
829  setPatternBuffer.execute();
830 
831  // Load the corresponding robot transforms.
832  auto robotTransforms = handEyeCalibrationRobotTransforms;
833  if (!goal->robot_poses.empty())
834  {
835  robotTransforms.clear();
836  for (auto const& pose : goal->robot_poses)
837  {
838  tf2::Transform transform = fromMsg(pose);
839  robotTransforms.push_back(transform);
840  }
841  }
842 
843  if (robotTransforms.size() != numberOfPatterns)
844  {
845  result.error_message = "The number of pattern observations does not match the number of robot poses!";
846  ENSENSO_ERROR(nh, "%s", result.error_message.c_str());
847  calibrateHandEyeServer->setAborted(std::move(result));
848  return;
849  }
850 
851  // Load the initial guesses from the action goal.
852  tf2::Transform linkTransform, patternTransform;
853  linkTransform = fromMsg(goal->link);
854  patternTransform = fromMsg(goal->pattern_pose);
855 
856  NxLibCommand calibrateHandEye(cmdCalibrateHandEye, params.serial);
857  calibrateHandEye.parameters()[itmSetup] = params.fixed ? valFixed : valMoving;
858 
859  // Check the robot geometry and set the fixed axes accordingly.
860  if (goal->robot_geometry == goal->DOF4_FIX_CAMERA_POSE_Z_COMPONENT)
861  {
862  calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][0] = false;
863  calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][1] = false;
864  calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][2] = true;
865  }
866  else if (goal->robot_geometry == goal->DOF4_FIX_PATTERN_POSE_Z_COMPONENT)
867  {
868  calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][0] = false;
869  calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][1] = false;
870  calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][2] = true;
871  }
872  else if (goal->robot_geometry == goal->DOF3_FIX_CAMERA_POSE)
873  {
874  calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][0] = true;
875  calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][1] = true;
876  calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][2] = true;
877  }
878  else if (goal->robot_geometry == goal->DOF3_FIX_PATTERN_POSE)
879  {
880  calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][0] = true;
881  calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][1] = true;
882  calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][2] = true;
883  }
884 
885  // The target node will be reset anyway before we calculate data for the next time.
886  calibrateHandEye.parameters()[itmTarget] = getNxLibTargetFrameName();
887  if (isValid(linkTransform))
888  {
889  writeTransformToNxLib(linkTransform.inverse(), calibrateHandEye.parameters()[itmLink]);
890  }
891  if (isValid(patternTransform))
892  {
893  writeTransformToNxLib(patternTransform, calibrateHandEye.parameters()[itmPatternPose]);
894  }
895  for (size_t i = 0; i < robotTransforms.size(); i++)
896  {
897  writeTransformToNxLib(robotTransforms[i], calibrateHandEye.parameters()[itmTransformations][i]);
898  }
899 
900  calibrateHandEye.execute(false);
901 
902  auto getCalibrationResidual = [](NxLibItem const& node) { // NOLINT
903  if (node[itmResidual].exists())
904  {
905  return node[itmResidual].asDouble();
906  }
907  // Compatibility with the SDK 2.0.
908  return node[itmReprojectionError].asDouble();
909  };
910 
911  ensenso::ros::Rate waitingRate(5);
912  while (!calibrateHandEye.finished())
913  {
914  if (calibrateHandEye.result()[itmProgress].exists())
915  {
916  ensenso::action::CalibrateHandEyeFeedback feedback;
917  feedback.number_of_iterations = calibrateHandEye.result()[itmProgress][itmIterations].asInt();
918  feedback.residual = getCalibrationResidual(calibrateHandEye.result()[itmProgress]);
919  calibrateHandEyeServer->publishFeedback(feedback);
920  }
921 
922  if (calibrateHandEyeServer->isPreemptRequested())
923  {
924  NxLibCommand(cmdBreak, params.serial).execute();
925 
926  calibrateHandEyeServer->setPreempted();
927  return;
928  }
929 
930  waitingRate.sleep();
931  }
932 
933  // NxLib time is in milliseconds, ROS expects time to be in seconds.
934  if (calibrateHandEye.result()[itmTime].exists())
935  {
936  // Fallback for NxLib < 4.0.
937  result.calibration_time = calibrateHandEye.result()[itmTime].asDouble() / 1000;
938  }
939  else
940  {
941  result.calibration_time = calibrateHandEye.slot()[itmStatus][itmTime].asDouble() / 1000;
942  }
943 
944  result.number_of_iterations = calibrateHandEye.result()[itmIterations].asInt();
945  result.residual = getCalibrationResidual(calibrateHandEye.result());
946 
947  result.link = poseFromTransform(transformFromNxLib(cameraNode[itmLink]).inverse());
948  result.pattern_pose = poseFromTransform(transformFromNxLib(calibrateHandEye.result()[itmPatternPose]));
949 
950  if (goal->write_calibration_to_eeprom)
951  {
952  // Save the new calibration link to the camera's EEPROM.
953  NxLibCommand storeCalibration(cmdStoreCalibration, params.serial);
954  storeCalibration.parameters()[itmCameras] = params.serial;
955  storeCalibration.parameters()[itmLink] = true;
956  storeCalibration.execute();
957  }
958  }
959 
960  // The target frame has changed.
962  calibrateHandEyeServer->setSucceeded(std::move(result));
963 
964  FINISH_NXLIB_ACTION(CalibrateHandEye)
965 }
966 
967 void StereoCamera::onCalibrateWorkspace(ensenso::action::CalibrateWorkspaceGoalConstPtr const& goal)
968 {
969  START_NXLIB_ACTION(CalibrateWorkspace, calibrateWorkspaceServer)
970 
971  if (!params.fixed)
972  {
974  "You are performing a workspace calibration for a moving camera. Are you sure this is what you want?");
975  }
976 
977  ensenso::action::CalibrateWorkspaceResult result;
978  result.successful = true;
979 
980  loadParameterSet(goal->parameter_set, projectorOff);
981 
982  int numberOfShots = goal->number_of_shots;
983  if (numberOfShots < 1)
984  {
985  numberOfShots = 1;
986  }
987 
988  ensenso::ros::Time imageTimestamp;
989  for (int i = 0; i < numberOfShots; i++)
990  {
992 
993  ensenso::ros::Time timestamp = capture();
994  if (i == 0)
995  imageTimestamp = timestamp;
996 
998 
999  bool clearBuffer = (i == 0);
1000  std::vector<StereoCalibrationPattern> patterns = collectPattern(clearBuffer);
1001  if (patterns.size() != 1)
1002  {
1003  result.successful = false;
1004  calibrateWorkspaceServer->setSucceeded(std::move(result));
1005  return;
1006  }
1007  }
1008 
1010 
1011  auto patternPose = estimatePatternPose(imageTimestamp);
1012  tf2::Transform patternTransform = fromMsg(patternPose);
1013 
1015 
1016  tf2::Transform definedPatternTransform = fromMsg(goal->defined_pattern_pose);
1017 
1018  NxLibCommand calibrateWorkspace(cmdCalibrateWorkspace, params.serial);
1019  calibrateWorkspace.parameters()[itmCameras] = params.serial;
1020  writeTransformToNxLib(patternTransform, calibrateWorkspace.parameters()[itmPatternPose]);
1021  writeTransformToNxLib(definedPatternTransform, calibrateWorkspace.parameters()[itmDefinedPose]);
1022  calibrateWorkspace.parameters()[itmTarget] = getNxLibTargetFrameName();
1023  calibrateWorkspace.execute();
1024 
1025  if (goal->write_calibration_to_eeprom)
1026  {
1027  // Save the new calibration link to the camera's EEPROM.
1028  NxLibCommand storeCalibration(cmdStoreCalibration, params.serial);
1029  storeCalibration.parameters()[itmCameras] = params.serial;
1030  storeCalibration.parameters()[itmLink] = true;
1031  storeCalibration.execute();
1032  }
1033 
1035  calibrateWorkspaceServer->setSucceeded(std::move(result));
1036 
1037  FINISH_NXLIB_ACTION(CalibrateWorkspace)
1038 }
1039 
1040 void StereoCamera::onTelecentricProjection(ensenso::action::TelecentricProjectionGoalConstPtr const& goal)
1041 {
1042  START_NXLIB_ACTION(TelecentricProjection, telecentricProjectionServer)
1043 
1044  ensenso::action::TelecentricProjectionResult result;
1045 
1046  bool useViewPose = isValid(goal->view_pose);
1047  bool frameGiven = !goal->frame.empty();
1048 
1049  if (!frameGiven)
1050  {
1051  result.error.message = "You have to define a valid frame, to which to projection will be published. Aborting";
1052  ENSENSO_ERROR(nh, "%s", result.error.message.c_str());
1053  telecentricProjectionServer->setAborted(std::move(result));
1054  return;
1055  }
1056 
1057  tf2::Transform transform;
1058  std::string publishingFrame = useViewPose ? params.targetFrame : goal->frame;
1059 
1060  if (useViewPose)
1061  {
1062  transform = fromMsg(goal->view_pose);
1063  }
1064  else
1065  {
1066  transform = getLatestTransform(*tfBuffer, params.targetFrame, goal->frame);
1067  }
1068 
1069  int pixelScale = goal->pixel_scale != 0. ? goal->pixel_scale : 1;
1070  double scaling = goal->scaling != 0. ? goal->scaling : 1.0;
1071  int sizeWidth = goal->size_width != 0 ? goal->size_width : 1024;
1072  int sizeHeight = goal->size_height != 0. ? goal->size_height : 768;
1073  bool useOpenGL = goal->use_opengl == 1;
1074  RenderPointMapParamsTelecentric renderParams(useOpenGL, pixelScale, scaling, sizeWidth, sizeHeight,
1075  std::move(transform));
1076 
1077  NxLibCommand renderPointMap(cmdRenderPointMap, params.serial);
1078 
1079  for (int i = 0; i < static_cast<int>(goal->serials.size()); i++)
1080  {
1081  renderPointMap.parameters()[itmCameras][i] = goal->serials[i];
1082  }
1083  setRenderParams(renderPointMap.parameters(), &renderParams);
1084 
1085  renderPointMap.execute();
1086 
1087  NxLibItem resultPath = renderPointMap.result()[itmImages][itmRenderPointMap];
1088 
1089  if (!resultPath.exists())
1090  {
1091  result.error.message = "Rendered Point Map does not exist in path: " + resultPath.path;
1092  ENSENSO_ERROR(nh, "%s", result.error.message.c_str());
1093  telecentricProjectionServer->setAborted(std::move(result));
1094  return;
1095  }
1096 
1097  if (goal->publish_results || goal->include_results_in_response)
1098  {
1099  if (goal->request_point_cloud || (!goal->request_point_cloud && !goal->request_depth_image))
1100  {
1101  auto pointCloud = retrieveRenderedPointCloud(renderPointMap.result(), goal->frame, params.isFileCamera);
1102 
1103  if (goal->include_results_in_response)
1104  {
1105  pcl::toROSMsg(*pointCloud, result.projected_point_cloud);
1106  telecentricProjectionServer->setSucceeded(std::move(result));
1107  }
1108  else
1109  {
1110  telecentricProjectionServer->setSucceeded();
1111  }
1112  if (goal->publish_results)
1113  {
1114  publishPointCloud(pointCloudProjectedPublisher, std::move(pointCloud));
1115  }
1116  }
1117 
1118  if (goal->request_depth_image)
1119  {
1120  auto renderedImage = retrieveRenderedDepthMap(renderPointMap.result(), goal->frame, params.isFileCamera);
1121 
1122  if (goal->publish_results)
1123  {
1124  projectedImagePublisher.publish(renderedImage);
1125  }
1126  if (goal->include_results_in_response)
1127  {
1128  result.projected_depth_map = *renderedImage;
1129  telecentricProjectionServer->setSucceeded(std::move(result));
1130  }
1131  else
1132  {
1133  telecentricProjectionServer->setSucceeded();
1134  }
1135  }
1136  }
1137  else
1138  {
1139  telecentricProjectionServer->setSucceeded();
1140  }
1141 
1142  FINISH_NXLIB_ACTION(TelecentricProjection)
1143 }
1144 
1145 void StereoCamera::onTexturedPointCloud(ensenso::action::TexturedPointCloudGoalConstPtr const& goal)
1146 {
1147  START_NXLIB_ACTION(TexturedPointCloud, texturedPointCloudServer)
1148 
1149  ensenso::action::TexturedPointCloudResult result;
1150 
1151  if (goal->mono_serial.empty())
1152  {
1153  result.error.message = "In Order to use this action, you have to specify one mono serial";
1154  ENSENSO_ERROR(nh, "%s", result.error.message.c_str());
1155  texturedPointCloudServer->setAborted(std::move(result));
1156  return;
1157  }
1158 
1159  double farPlane = goal->far_plane ? goal->far_plane : 10000.;
1160  double nearPlane = goal->near_plane ? goal->near_plane : -10000.;
1161  bool useOpenGL = goal->use_opengl == 1;
1162  bool withTexture = true;
1163 
1164  RenderPointMapParamsProjection renderParams(useOpenGL, farPlane, nearPlane, withTexture);
1165 
1166  NxLibCommand renderPointMap(cmdRenderPointMap, params.serial);
1167  for (int i = 0; i < static_cast<int>(goal->serials.size()); i++)
1168  {
1169  renderPointMap.parameters()[itmCameras][i] = goal->serials[i];
1170  }
1171  renderPointMap.parameters()[itmCamera] = goal->mono_serial;
1172  setRenderParams(renderPointMap.parameters(), &renderParams);
1173 
1174  renderPointMap.execute();
1175 
1176  if (goal->publish_results || goal->include_results_in_response)
1177  {
1178  auto pointCloud = retrieveTexturedPointCloud(renderPointMap.result(), params.targetFrame, params.isFileCamera);
1179  if (goal->include_results_in_response)
1180  {
1181  pcl::toROSMsg(*pointCloud, result.point_cloud);
1182  texturedPointCloudServer->setSucceeded(std::move(result));
1183  }
1184  else
1185  {
1186  texturedPointCloudServer->setSucceeded();
1187  }
1188  if (goal->publish_results)
1189  {
1190  publishPointCloud(pointCloudColoredPublisher, std::move(pointCloud));
1191  }
1192  }
1193  else
1194  {
1195  texturedPointCloudServer->setSucceeded();
1196  }
1197 
1198  FINISH_NXLIB_ACTION(TexturedPointCloud)
1199 }
1200 
1201 void StereoCamera::saveParameterSet(std::string name, bool projectorWasSet)
1202 {
1203  if (name.empty())
1204  {
1205  name = DEFAULT_PARAMETER_SET;
1206  }
1207 
1209 
1210  ParameterSet& parameterSet = parameterSets.at(name);
1211  if (projectorWasSet)
1212  {
1213  parameterSet.autoProjector = false;
1214  }
1215 }
1216 
1217 void StereoCamera::loadParameterSet(std::string name, ProjectorState projector)
1218 {
1219  Camera::loadParameterSet(std::move(name));
1220 
1221  bool changedParameters = false;
1222  ParameterSet const& parameterSet = parameterSets.at(currentParameterSet);
1223 
1224  if (parameterSet.autoProjector && projector != projectorDontCare)
1225  {
1226  try
1227  {
1228  if (projector == projectorOn)
1229  {
1230  cameraNode[itmParameters][itmCapture][itmProjector] = true;
1231  cameraNode[itmParameters][itmCapture][itmFrontLight] = false;
1232  }
1233  else
1234  {
1235  cameraNode[itmParameters][itmCapture][itmProjector] = false;
1236  cameraNode[itmParameters][itmCapture][itmFrontLight] = true;
1237  }
1238  changedParameters = true;
1239  }
1240  catch (...)
1241  {
1242  // Setting the projector and front light fails on file cameras.
1243  }
1244  }
1245 
1246  if (changedParameters)
1247  {
1248  NxLibCommand synchronize(cmdSynchronize, params.serial);
1249  synchronize.parameters()[itmCameras] = params.serial;
1250  synchronize.execute();
1251  }
1252 }
1253 
1255 {
1256  // For file cameras this workaround is needed, because the timestamp of captures from file cameras will not change
1257  // over time. When looking up the current tf tree, this will result in errors, because the time of the original
1258  // timestamp is requested, which lies in the past (and most often longer than the tfBuffer will store the transform!).
1259  if (params.isFileCamera)
1260  {
1261  return ensenso::ros::now(nh);
1262  }
1263  // For stereo cameras with only one sensor (S-Series) the image is stored in the raw node.
1264  else if (isSSeries())
1265  {
1266  return timestampFromNxLibNode(cameraNode[itmImages][itmRaw]);
1267  }
1268  // For XR cameras the image node depends on the capture mode. In Raw mode it behaves like a normal stereo camera.
1269  else if (isXrSeries() && !hasRawImages())
1270  {
1271  ENSENSO_WARN_ONCE("XR: Using timestamp of first left rectified image in capture mode \"Rectified\".");
1272  return timestampFromNxLibNode(cameraNode[itmImages][itmRectified][itmLeft]);
1273  }
1274  // For stereo cameras with left and right sensor the timestamp of the left sensor is taken as the reference.
1275  else
1276  {
1277  return timestampFromNxLibNode(cameraNode[itmImages][itmRaw][itmLeft]);
1278  }
1279 }
1280 
1282 {
1283  // Update virtual objects. Ignore failures with a simple warning.
1285  {
1286  try
1287  {
1288  params.virtualObjectHandler->updateObjectLinks();
1289  }
1290  catch (const std::exception& e)
1291  {
1292  ENSENSO_WARN(nh, "Unable to update virtual objects. Error: %s", e.what());
1293  }
1294  }
1295 
1296  ENSENSO_DEBUG(nh, "Capturing an image...");
1297 
1298  NxLibCommand capture(cmdCapture, params.serial);
1299  capture.parameters()[itmCameras] = params.serial;
1300  if (params.captureTimeout > 0)
1301  {
1302  capture.parameters()[itmTimeout] = params.captureTimeout;
1303  }
1304  capture.execute();
1305 
1306  return timestampOfCapturedImage();
1307 }
1308 
1309 std::vector<StereoCalibrationPattern> StereoCamera::collectPattern(bool clearBuffer) const
1310 {
1311  if (clearBuffer)
1312  {
1313  NxLibCommand(cmdDiscardPatterns, params.serial).execute();
1314  }
1315 
1316  NxLibCommand collectPattern(cmdCollectPattern, params.serial);
1317  collectPattern.parameters()[itmCameras] = params.serial;
1318  collectPattern.parameters()[itmDecodeData] = true;
1319  collectPattern.parameters()[itmFilter][itmCameras] = params.serial;
1320  bool useModel = true;
1321  collectPattern.parameters()[itmFilter][itmUseModel] = useModel;
1322  collectPattern.parameters()[itmFilter][itmType] = valStatic;
1323  collectPattern.parameters()[itmFilter][itmValue] = true;
1324  try
1325  {
1326  collectPattern.execute();
1327  }
1328  catch (NxLibException& e)
1329  {
1330  if (e.getErrorCode() == NxLibExecutionFailed)
1331  {
1332  if (collectPattern.result()[itmErrorSymbol] == errPatternNotFound ||
1333  collectPattern.result()[itmErrorSymbol] == errPatternNotDecodable)
1334  {
1335  return {};
1336  }
1337  }
1338  throw;
1339  }
1340 
1341  if (!collectPattern.result()[itmStereo].exists())
1342  {
1343  // We did find patterns, but only in one of the cameras.
1344  return {};
1345  }
1346 
1347  std::vector<StereoCalibrationPattern> result;
1348 
1349  for (int i = 0; i < collectPattern.result()[itmStereo].count(); i++)
1350  {
1351  result.emplace_back(collectPattern.result()[itmStereo][i]);
1352  }
1353 
1354  // Extract the pattern's image points from the result.
1355  NxLibItem pattern = collectPattern.result()[itmPatterns][0][params.serial];
1356  if (pattern[itmLeft].count() > 1 || pattern[itmRight].count() > 1)
1357  {
1358  // We cannot tell which of the patterns in the two cameras belong together, because that would need a comparison
1359  // with the stereo model.
1360  return result;
1361  }
1362 
1363  for (size_t i = 0; i < result.size(); i++)
1364  {
1365  for (int j = 0; j < pattern[itmLeft][i][itmPoints].count(); j++)
1366  {
1367  NxLibItem pointNode = pattern[itmLeft][i][itmPoints][j];
1368 
1369  ensenso::msg::ImagePoint point;
1370  point.x = pointNode[0].asDouble();
1371  point.y = pointNode[1].asDouble();
1372  result.at(i).leftPoints.push_back(point);
1373  }
1374  for (int j = 0; j < pattern[itmRight][i][itmPoints].count(); j++)
1375  {
1376  NxLibItem pointNode = pattern[itmRight][i][itmPoints][j];
1377 
1378  ensenso::msg::ImagePoint point;
1379  point.x = pointNode[0].asDouble();
1380  point.y = pointNode[1].asDouble();
1381  result.at(i).rightPoints.push_back(point);
1382  }
1383  }
1384 
1385  return result;
1386 }
1387 
1388 geometry_msgs::msg::PoseStamped StereoCamera::estimatePatternPose(ensenso::ros::Time imageTimestamp,
1389  std::string const& targetFrame,
1390  bool latestPatternOnly) const
1391 {
1392  updateGlobalLink(imageTimestamp, targetFrame);
1393 
1394  NxLibCommand estimatePatternPose(cmdEstimatePatternPose, params.serial);
1395  if (latestPatternOnly)
1396  {
1397  estimatePatternPose.parameters()[itmAverage] = false;
1398 
1399  int patternCount = NxLibItem()[itmParameters][itmPatternCount].asInt();
1400  estimatePatternPose.parameters()[itmFilter][itmOr][0][itmAnd][0][itmType] = valIndex;
1401  estimatePatternPose.parameters()[itmFilter][itmOr][0][itmAnd][0][itmValue] = patternCount - 1;
1402  }
1403  else
1404  {
1405  estimatePatternPose.parameters()[itmAverage] = true;
1406  }
1407  estimatePatternPose.execute();
1408 
1409  auto patterns = estimatePatternPose.result()[itmPatterns];
1410  ENSENSO_ASSERT(patterns.count() == 1);
1411 
1412  return stampedPoseFromNxLib(patterns[0][itmPatternPose], targetFrame, imageTimestamp);
1413 }
1414 
1415 std::vector<geometry_msgs::msg::PoseStamped> StereoCamera::estimatePatternPoses(ensenso::ros::Time imageTimestamp,
1416  std::string const& targetFrame) const
1417 {
1418  updateGlobalLink(imageTimestamp, targetFrame);
1419 
1420  NxLibCommand estimatePatternPose(cmdEstimatePatternPose, params.serial);
1421  estimatePatternPose.parameters()[itmAverage] = false;
1422  estimatePatternPose.parameters()[itmFilter][itmCameras] = params.serial;
1423  estimatePatternPose.parameters()[itmFilter][itmUseModel] = true;
1424  estimatePatternPose.parameters()[itmFilter][itmType] = valStatic;
1425  estimatePatternPose.parameters()[itmFilter][itmValue] = true;
1426  estimatePatternPose.execute();
1427 
1428  auto patterns = estimatePatternPose.result()[itmPatterns];
1429 
1430  std::vector<geometry_msgs::msg::PoseStamped> result;
1431  result.reserve(patterns.count());
1432 
1433  for (int i = 0; i < patterns.count(); i++)
1434  {
1435  result.push_back(stampedPoseFromNxLib(patterns[i][itmPatternPose], targetFrame, imageTimestamp));
1436  }
1437 
1438  return result;
1439 }
1440 
1441 void StereoCamera::fillCameraInfoFromNxLib(sensor_msgs::msg::CameraInfoPtr const& info, bool right,
1442  bool rectified) const
1443 {
1445 
1446  NxLibItem monoCalibrationNode = cameraNode[itmCalibration][itmMonocular][right ? itmRight : itmLeft];
1447  NxLibItem stereoCalibrationNode = cameraNode[itmCalibration][itmStereo][right ? itmRight : itmLeft];
1448 
1449  if (rectified)
1450  {
1451  // For the rectified images all transformations are the identity (because all of the distortions were already
1452  // removed), except for the stereo camera matrix.
1453  GET_D_MATRIX(info).resize(5, 0);
1454 
1455  // Scaling the disparity map also scales the rectified images. The range is 0.1 to 1. The factor has to be applied
1456  // to the projection/camera matrix P for correct 3D data.
1457  auto scalingFactor = cameraNode[itmParameters][itmDisparityMap][itmScaling].asDouble();
1458 
1459  for (int row = 0; row < 3; row++)
1460  {
1461  for (int column = 0; column < 3; column++)
1462  {
1463  GET_P_MATRIX(info)[4 * row + column] = stereoCalibrationNode[itmCamera][column][row].asDouble() * scalingFactor;
1464 
1465  if (row == column)
1466  {
1467  GET_K_MATRIX(info)[3 * row + column] = 1;
1468  GET_R_MATRIX(info)[3 * row + column] = 1;
1469  }
1470  }
1471  }
1472  }
1473  else
1474  {
1475  fillDistortionParamsFromNxLib(monoCalibrationNode[itmDistortion], info);
1476 
1477  for (int row = 0; row < 3; row++)
1478  {
1479  for (int column = 0; column < 3; column++)
1480  {
1481  GET_K_MATRIX(info)[3 * row + column] = monoCalibrationNode[itmCamera][column][row].asDouble();
1482  GET_P_MATRIX(info)[4 * row + column] = stereoCalibrationNode[itmCamera][column][row].asDouble();
1483  GET_R_MATRIX(info)[3 * row + column] = stereoCalibrationNode[itmRotation][column][row].asDouble();
1484  }
1485  }
1486  }
1487 
1488  if (right)
1489  {
1490  // Add the offset of the right camera relative to the left one to the projection matrix.
1491  double fx = stereoCalibrationNode[itmCamera][0][0].asDouble();
1492  double baseline = cameraNode[itmCalibration][itmStereo][itmBaseline].asDouble() / 1000.0;
1493  GET_P_MATRIX(info)[3] = -fx * baseline;
1494  }
1495 
1496  int leftTopX = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmLeftTop][0].asInt();
1497  int leftTopY = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmLeftTop][1].asInt();
1498  int rightBottomX = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmRightBottom][0].asInt();
1499  int rightBottomY = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmRightBottom][1].asInt();
1500  info->roi.x_offset = leftTopX;
1501  info->roi.y_offset = leftTopY;
1502  info->roi.width = rightBottomX - leftTopX;
1503  info->roi.height = rightBottomY - leftTopY;
1504  if (rectified)
1505  {
1506  info->roi.do_rectify = true;
1507  }
1508 }
1509 
1511 {
1514  if (hasRightCamera())
1515  {
1518  }
1519 }
1520 
1521 ensenso::msg::ParameterPtr StereoCamera::readParameter(std::string const& key) const
1522 {
1523  auto message = ensenso::std::make_shared<ensenso::msg::Parameter>();
1524  message->key = key;
1525 
1526  if (key == ensenso::msg::Parameter::REGION_OF_INTEREST)
1527  {
1528  PointCloudROI const& roi = parameterSets.at(currentParameterSet).roi;
1529 
1530  message->region_of_interest_value.lower.x = roi.minX;
1531  message->region_of_interest_value.lower.y = roi.minY;
1532  message->region_of_interest_value.lower.z = roi.minZ;
1533  message->region_of_interest_value.upper.x = roi.maxX;
1534  message->region_of_interest_value.upper.y = roi.maxY;
1535  message->region_of_interest_value.upper.z = roi.maxZ;
1536  }
1537  else if (key == ensenso::msg::Parameter::FLEX_VIEW)
1538  {
1539  NxLibItem flexViewNode = cameraNode[itmParameters][itmCapture][itmFlexView];
1540 
1541  if (!flexViewNode.exists())
1542  {
1543  message->float_value = -1;
1544  }
1545  else if (flexViewNode.isNumber())
1546  {
1547  message->float_value = flexViewNode.asInt();
1548  }
1549  else
1550  {
1551  message->float_value = 0;
1552  }
1553  }
1554  else
1555  {
1557  }
1558 
1559  return message;
1560 }
1561 
1562 void StereoCamera::writeParameter(ensenso::msg::Parameter const& parameter)
1563 {
1564  if (parameter.key == ensenso::msg::Parameter::REGION_OF_INTEREST)
1565  {
1567 
1568  roi.minX = parameter.region_of_interest_value.lower.x;
1569  roi.minY = parameter.region_of_interest_value.lower.y;
1570  roi.minZ = parameter.region_of_interest_value.lower.z;
1571  roi.maxX = parameter.region_of_interest_value.upper.x;
1572  roi.maxY = parameter.region_of_interest_value.upper.y;
1573  roi.maxZ = parameter.region_of_interest_value.upper.z;
1574 
1575  parameterSets.at(currentParameterSet).useROI = !roi.isEmpty();
1576  }
1577  else if (parameter.key == ensenso::msg::Parameter::FLEX_VIEW)
1578  {
1579  NxLibItem flexViewNode = cameraNode[itmParameters][itmCapture][itmFlexView];
1580 
1581  if (!flexViewNode.exists())
1582  {
1583  ENSENSO_WARN(nh, "Writing the parameter FlexView, but the camera does not support it!");
1584  return;
1585  }
1586 
1587  int n = static_cast<int>(std::round(parameter.float_value));
1588  if (n <= 1)
1589  {
1590  flexViewNode = false;
1591  }
1592  else
1593  {
1594  flexViewNode = n;
1595  }
1596  }
1597  else
1598  {
1599  Camera::writeParameter(parameter);
1600  }
1601 }
1602 
1604 {
1605  return cameraNode[itmType].asString() == "StructuredLight";
1606 }
1607 
1609 {
1610  std::string modelName = cameraNode[itmModelName].asString();
1611  return startswith(modelName, "XR");
1612 }
1613 
1615 {
1616  return (!isSSeries());
1617 }
1618 
1620 {
1621  std::string captureMode = cameraNode[itmParameters][itmCapture][itmMode].asString();
1622  return captureMode == valRaw;
1623 }
1624 
1626 {
1627  return isXrSeries() ? cameraNode[itmParameters][itmCapture][itmDownloadImages].asBool() : true;
1628 }
1629 
1631 {
1632  return (!isSSeries());
1633 }
1634 
1635 void StereoCamera::addDisparityMapOffset(sensor_msgs::msg::CameraInfoPtr const& info) const
1636 {
1637  double disparityMapOffset = 0.0;
1638  if (cameraNode[itmCalibration][itmDynamic][itmStereo][itmDisparityMapOffset].exists())
1639  {
1640  disparityMapOffset = cameraNode[itmCalibration][itmDynamic][itmStereo][itmDisparityMapOffset].asDouble();
1641  }
1642 
1643  // Adjust the width, which differs from the image width if it has a non-zero disparity map offset.
1644  info->width -= disparityMapOffset;
1645 
1646  // Adjust origin cx.
1647  GET_K_MATRIX(info)[2] -= disparityMapOffset;
1648  GET_P_MATRIX(info)[2] -= disparityMapOffset;
1649 }
GET_R_MATRIX
#define GET_R_MATRIX(info)
Definition: namespace.h:57
ENSENSO_WARN
void ENSENSO_WARN(T... args)
Definition: logging.h:210
getLatestTransform
Transform getLatestTransform(tf2_ros::Buffer const &tfBuffer, std::string const &cameraFrame, std::string const &targetFrame)
Definition: pose_utilities.cpp:251
fromMsg
Transform fromMsg(TransformMsg const &transform)
Definition: pose_utilities.cpp:209
stampedPoseFromNxLib
StampedPoseMsg stampedPoseFromNxLib(NxLibItem const &node, std::string const &parentFrame, ensenso::ros::Time timestamp)
Definition: pose_utilities.cpp:159
StereoCamera::onProjectPattern
void onProjectPattern(ensenso::action::ProjectPatternGoalConstPtr const &goal)
Definition: stereo_camera.cpp:647
exists
ROSCPP_DECL bool exists(const std::string &service_name, bool print_failure_reason)
StereoCamera::hasDisparityMap
bool hasDisparityMap() const
Definition: stereo_camera.cpp:1630
Camera::fillBasicCameraInfoFromNxLib
void fillBasicCameraInfoFromNxLib(sensor_msgs::msg::CameraInfoPtr const &info) const
Definition: camera.cpp:324
StereoCamera::rightRawImagePublisher
image_transport::CameraPublisher rightRawImagePublisher
Definition: stereo_camera.h:45
pointCloudWithNormalsFromNxLib
std::unique_ptr< ensenso::pcl::PointCloudNormals > pointCloudWithNormalsFromNxLib(NxLibItem const &pointMapNode, NxLibItem const &normalNode, std::string const &frame, bool isFileCamera=false, PointCloudROI const *roi=nullptr)
Definition: stereo_camera_helpers.h:222
Camera::params
CameraParameters params
Definition: camera.h:258
GET_P_MATRIX
#define GET_P_MATRIX(info)
Definition: namespace.h:56
StereoCamera::rightRectifiedImagePublisher
image_transport::CameraPublisher rightRectifiedImagePublisher
Definition: stereo_camera.h:47
projectorOn
@ projectorOn
Definition: stereo_camera.h:23
StereoCamera::onCalibrateHandEye
void onCalibrateHandEye(ensenso::action::CalibrateHandEyeGoalConstPtr const &goal)
Definition: stereo_camera.cpp:714
Camera::updateTransformations
void updateTransformations(tf2::Transform const &targetFrameTransformation) const
Definition: camera.cpp:356
Camera::parameterSets
std::map< std::string, ParameterSet > parameterSets
Definition: camera.h:293
StereoCamera::handEyeCalibrationPatternBuffer
std::string handEyeCalibrationPatternBuffer
Definition: stereo_camera.h:60
tf2::Transform::inverse
Transform inverse() const
CameraParameters::robotFrame
std::string robotFrame
Definition: camera.h:234
isValid
bool isValid(Transform const &transform)
Definition: pose_utilities.cpp:32
StereoCamera::estimatePatternPose
geometry_msgs::msg::PoseStamped estimatePatternPose(ensenso::ros::Time imageTimestamp, std::string const &targetFrame="", bool latestPatternOnly=false) const override
Definition: stereo_camera.cpp:1388
Camera
Definition: camera.h:255
StereoCamera::estimatePatternPoses
std::vector< geometry_msgs::msg::PoseStamped > estimatePatternPoses(ensenso::ros::Time imageTimestamp, std::string const &targetFrame="") const override
Definition: stereo_camera.cpp:1415
StereoCamera::readParameter
ensenso::msg::ParameterPtr readParameter(std::string const &key) const override
Definition: stereo_camera.cpp:1521
timestampFromNxLibNode
ensenso::ros::Time timestampFromNxLibNode(NxLibItem const &node)
Definition: image_utilities.cpp:174
projectorOff
@ projectorOff
Definition: stereo_camera.h:24
PointCloudROI::maxX
float maxX
Definition: point_cloud_utilities.h:20
StereoCamera::projectedImagePublisher
image_transport::Publisher projectedImagePublisher
Definition: stereo_camera.h:50
fillDistortionParamsFromNxLib
void fillDistortionParamsFromNxLib(NxLibItem const &distortionItem, sensor_msgs::msg::CameraInfoPtr const &info)
Definition: image_utilities.cpp:206
StereoCamera::onFitPrimitive
void onFitPrimitive(ensenso::action::FitPrimitiveGoalConstPtr const &goal)
Definition: stereo_camera.cpp:81
START_NXLIB_ACTION
#define START_NXLIB_ACTION(ACTION_NAME, ACTION_SERVER)
Definition: camera.h:93
StereoCamera::pointCloudColoredPublisher
PointCloudPublisher< ensenso::pcl::PointCloudColored > pointCloudColoredPublisher
Definition: stereo_camera.h:54
PREEMPT_ACTION_IF_REQUESTED
#define PREEMPT_ACTION_IF_REQUESTED
Definition: camera.h:136
CameraParameters
Definition: camera.h:175
Camera::getNxLibTargetFrameName
std::string getNxLibTargetFrameName() const
Definition: camera.cpp:351
StereoCamera::calibrateWorkspaceServer
std::unique_ptr< CalibrateWorkspaceServer > calibrateWorkspaceServer
Definition: stereo_camera.h:36
StereoCamera::updateCameraTypeSpecifics
void updateCameraTypeSpecifics() override
Definition: stereo_camera.cpp:27
StereoCamera::rightRectifiedCameraInfo
sensor_msgs::msg::CameraInfoPtr rightRectifiedCameraInfo
Definition: stereo_camera.h:33
ParameterSet::autoProjector
bool autoProjector
Definition: camera.h:167
transformFromNxLib
Transform transformFromNxLib(NxLibItem const &node)
Definition: pose_utilities.cpp:144
pointCloudFromNxLib
std::unique_ptr< ensenso::pcl::PointCloud > pointCloudFromNxLib(NxLibItem const &node, std::string const &frame, bool isFileCamera=false, PointCloudROI const *roi=nullptr)
Definition: stereo_camera_helpers.h:182
StereoCamera::fitPrimitiveServer
std::unique_ptr< FitPrimitiveServer > fitPrimitiveServer
Definition: stereo_camera.h:37
DEFAULT_PARAMETER_SET
const std::string DEFAULT_PARAMETER_SET
Definition: camera.h:62
StereoCamera::depthImagePublisher
image_transport::CameraPublisher depthImagePublisher
Definition: stereo_camera.h:49
image_transport::CameraPublisher::publish
void publish(const sensor_msgs::Image &image, const sensor_msgs::CameraInfo &info) const
FINISH_NXLIB_ACTION
#define FINISH_NXLIB_ACTION(ACTION_NAME)
Definition: camera.h:104
StereoCamera::onTexturedPointCloud
void onTexturedPointCloud(ensenso::action::TexturedPointCloudGoalConstPtr const &goal)
Definition: stereo_camera.cpp:1145
writeTransformToNxLib
void writeTransformToNxLib(Transform const &transform, NxLibItem const &node)
Definition: pose_utilities.cpp:70
StereoCamera::calibrateHandEyeServer
std::unique_ptr< CalibrateHandEyeServer > calibrateHandEyeServer
Definition: stereo_camera.h:35
StereoCamera::isXrSeries
bool isXrSeries() const
Definition: stereo_camera.cpp:1608
CameraParameters::virtualObjectHandler
std::unique_ptr< ensenso_camera::VirtualObjectHandler > virtualObjectHandler
Definition: camera.h:250
StereoCamera::startServers
void startServers() const override
Definition: stereo_camera.cpp:68
StereoCamera::onCalibrateWorkspace
void onCalibrateWorkspace(ensenso::action::CalibrateWorkspaceGoalConstPtr const &goal)
Definition: stereo_camera.cpp:967
Camera::loadParameterSet
void loadParameterSet(std::string name)
Definition: camera.cpp:615
Camera::writeParameter
virtual void writeParameter(ensenso::msg::Parameter const &parameter)
Definition: camera.cpp:445
PointCloudROI::maxZ
float maxZ
Definition: point_cloud_utilities.h:22
PointCloudROI::minY
float minY
Definition: point_cloud_utilities.h:18
CameraParameters::isFileCamera
bool isFileCamera
Definition: camera.h:186
StereoCamera::locatePatternServer
std::unique_ptr< LocatePatternServer > locatePatternServer
Definition: stereo_camera.h:38
Camera::saveParameterSet
void saveParameterSet(std::string name)
Definition: camera.cpp:602
StereoCamera::fillCameraInfoFromNxLib
void fillCameraInfoFromNxLib(sensor_msgs::msg::CameraInfoPtr const &info, bool right, bool rectified=false) const
Definition: stereo_camera.cpp:1441
StereoCamera::isSSeries
bool isSSeries() const
Definition: stereo_camera.cpp:1603
ENSENSO_DEBUG
void ENSENSO_DEBUG(T... args)
Definition: logging.h:74
StereoCamera::pointCloudPublisher
PointCloudPublisher< ensenso::pcl::PointCloud > pointCloudPublisher
Definition: stereo_camera.h:52
StereoCamera::projectPatternServer
std::unique_ptr< ProjectPatternServer > projectPatternServer
Definition: stereo_camera.h:39
message
def message(msg, *args, **kwargs)
poseFromTransform
StampedPoseMsg poseFromTransform(StampedTransformMsg const &transform)
Definition: pose_utilities.cpp:182
StereoCamera::advertiseTopics
void advertiseTopics()
Definition: stereo_camera.cpp:36
Camera::startServers
virtual void startServers() const
Definition: camera.cpp:120
CameraParameters::serial
std::string serial
Definition: camera.h:180
PointCloudROI
Definition: point_cloud_utilities.h:15
Camera::currentParameterSet
std::string currentParameterSet
Definition: camera.h:294
Camera::transformBroadcaster
std::unique_ptr< tf2_ros::TransformBroadcaster > transformBroadcaster
Definition: camera.h:280
ensenso::image_transport::create_camera_publisher
inline ::image_transport::CameraPublisher create_camera_publisher(ensenso::ros::NodeHandle const &nh, ::std::string const &topic_name)
Definition: image_transport.h:64
depthImageFromNxLibNode
ImagePtr depthImageFromNxLibNode(NxLibItem const &node, std::string const &frame, bool isFileCamera)
Definition: image_utilities.cpp:182
StereoCamera::init
void init() override
Definition: stereo_camera.cpp:60
StereoCamera::disparityMapPublisher
image_transport::CameraPublisher disparityMapPublisher
Definition: stereo_camera.h:48
StereoCamera::hasDownloadedImages
bool hasDownloadedImages() const
Definition: stereo_camera.cpp:1625
MAKE_SERVER
#define MAKE_SERVER(CallbackClass, ActionName, TopicName)
Definition: action_server.h:35
GET_D_MATRIX
#define GET_D_MATRIX(info)
Definition: namespace.h:54
CameraParameters::wristFrame
std::string wristFrame
Definition: camera.h:240
StereoCalibrationPattern
Definition: calibration_pattern.h:47
StereoCamera::timestampOfCapturedImage
ensenso::ros::Time timestampOfCapturedImage() const
Definition: stereo_camera.cpp:1254
CameraParameters::fixed
bool fixed
Definition: camera.h:196
StereoCamera
Definition: stereo_camera.h:27
Camera::loadSettings
bool loadSettings(std::string const &jsonFile, bool saveAsDefaultParameters=false)
Definition: camera.cpp:129
PointCloudROI::isEmpty
bool isEmpty() const
Definition: point_cloud_utilities.h:29
tf2::Transform
ensenso::ros::NodeHandle
::ros::NodeHandle NodeHandle
Definition: node.h:215
retrieveRenderedDepthMap
sensor_msgs::msg::ImagePtr retrieveRenderedDepthMap(NxLibItem const &cmdResult, std::string const &frame, bool isFileCamera)
Definition: stereo_camera_helpers.h:335
StereoCamera::texturedPointCloudServer
std::unique_ptr< TexturedPointCloudServer > texturedPointCloudServer
Definition: stereo_camera.h:41
pose_utilities.h
StereoCamera::pointCloudProjectedPublisher
PointCloudPublisher< ensenso::pcl::PointCloud > pointCloudProjectedPublisher
Definition: stereo_camera.h:55
ParameterSet
Definition: camera.h:146
ensenso::ros::Time
::ros::Time Time
Definition: time.h:67
Camera::initStatusTimer
virtual void initStatusTimer()
Definition: camera.cpp:704
Camera::initTfPublishTimer
virtual void initTfPublishTimer()
Definition: camera.cpp:699
publishPointCloud
void publishPointCloud(ensenso::ros::Publisher< T > const &publisher, std::unique_ptr< T > pointCloud)
Definition: pcl.h:95
CameraParameters::captureTimeout
int captureTimeout
Definition: camera.h:245
image_transport::Publisher::publish
void publish(const sensor_msgs::Image &message) const
PointCloudROI::maxY
float maxY
Definition: point_cloud_utilities.h:21
StereoCamera::onSetParameter
void onSetParameter(ensenso::action::SetParameterGoalConstPtr const &goal) override
Definition: stereo_camera.cpp:486
conversion.h
StereoCamera::collectPattern
std::vector< StereoCalibrationPattern > collectPattern(bool clearBuffer=false) const
Definition: stereo_camera.cpp:1309
imagePairsFromNxLibNode
std::vector< ImagePtrPair > imagePairsFromNxLibNode(NxLibItem const &node, std::string const &frame, bool isFileCamera)
Definition: image_utilities.cpp:136
parameters.h
StereoCamera::onRequestData
void onRequestData(ensenso::action::RequestDataGoalConstPtr const &goal)
Definition: stereo_camera.cpp:198
StereoCamera::leftCameraInfo
sensor_msgs::msg::CameraInfoPtr leftCameraInfo
Definition: stereo_camera.h:30
StereoCamera::hasRawImages
bool hasRawImages() const
Definition: stereo_camera.cpp:1619
RenderPointMapParamsTelecentric
Definition: stereo_camera_helpers.h:70
ENSENSO_ASSERT
#define ENSENSO_ASSERT(cond)
Definition: core.h:166
ProjectorState
ProjectorState
Definition: stereo_camera.h:20
RenderPointMapParamsProjection
Definition: stereo_camera_helpers.h:117
ensenso::ros::now
inline ::ros::Time now(ensenso::ros::NodeHandle const &nh)
Definition: time.h:76
StereoCamera::onTelecentricProjection
void onTelecentricProjection(ensenso::action::TelecentricProjectionGoalConstPtr const &goal)
Definition: stereo_camera.cpp:1040
PointCloudROI::minX
float minX
Definition: point_cloud_utilities.h:17
StereoCamera::rightCameraInfo
sensor_msgs::msg::CameraInfoPtr rightCameraInfo
Definition: stereo_camera.h:31
StereoCamera::leftRawImagePublisher
image_transport::CameraPublisher leftRawImagePublisher
Definition: stereo_camera.h:44
StereoCamera::updateCameraInfo
void updateCameraInfo() override
Definition: stereo_camera.cpp:1510
StereoCamera::writeParameter
void writeParameter(ensenso::msg::Parameter const &parameter) override
Definition: stereo_camera.cpp:1562
StereoCamera::onLocatePattern
void onLocatePattern(ensenso::action::LocatePatternGoalConstPtr const &goal)
Definition: stereo_camera.cpp:537
Camera::readParameter
virtual ensenso::msg::ParameterPtr readParameter(std::string const &key) const
Definition: camera.cpp:406
setRenderParams
void setRenderParams(NxLibItem const &cmdParams, RenderPointMapParams const *params)
Definition: stereo_camera_helpers.h:143
PointCloudROI::minZ
float minZ
Definition: point_cloud_utilities.h:19
StereoCamera::telecentricProjectionServer
std::unique_ptr< TelecentricProjectionServer > telecentricProjectionServer
Definition: stereo_camera.h:42
ensenso::ros::Rate
::ros::Rate Rate
Definition: time.h:66
std
StereoCamera::capture
ensenso::ros::Time capture() const override
Definition: stereo_camera.cpp:1281
Camera::cameraNode
NxLibItem cameraNode
Definition: camera.h:266
transformFromPose
StampedTransformMsg transformFromPose(StampedPoseMsg const &pose, std::string const &childFrame)
Definition: pose_utilities.cpp:173
ensenso::image_transport::create_publisher
inline ::image_transport::Publisher create_publisher(ensenso::ros::NodeHandle const &nh, ::std::string const &topic_name)
Definition: image_transport.h:71
imageFromNxLibNode
ImagePtr imageFromNxLibNode(NxLibItem const &node, std::string const &frame, bool isFileCamera)
Definition: image_utilities.cpp:93
StereoCamera::loadParameterSet
void loadParameterSet(std::string name, ProjectorState projector=projectorDontCare)
Definition: stereo_camera.cpp:1217
StereoCamera::handEyeCalibrationRobotTransforms
std::vector< tf2::Transform > handEyeCalibrationRobotTransforms
Definition: stereo_camera.h:61
pcl::toROSMsg
void toROSMsg(const pcl::PointCloud< T > &cloud, sensor_msgs::Image &msg)
GET_K_MATRIX
#define GET_K_MATRIX(info)
Definition: namespace.h:55
ensenso_conversion::conversionFactor
const int conversionFactor
Definition: conversion.h:16
CameraParameters::targetFrame
std::string targetFrame
Definition: camera.h:227
ENSENSO_WARN_ONCE
void ENSENSO_WARN_ONCE(T... args)
Definition: logging.h:229
inverse
TFSIMD_FORCE_INLINE Quaternion inverse(const Quaternion &q)
ENSENSO_ERROR
void ENSENSO_ERROR(T... args)
Definition: logging.h:278
ensenso_conversion::toRosPoint
geometry_msgs::msg::Point32 toRosPoint(NxLibItem const &itemArray, bool convertUnits=true)
Definition: conversion.cpp:51
StereoCamera::StereoCamera
StereoCamera(ensenso::ros::NodeHandle &nh, CameraParameters params)
Definition: stereo_camera.cpp:9
retrieveTexturedPointCloud
std::unique_ptr< ensenso::pcl::PointCloudColored > retrieveTexturedPointCloud(NxLibItem const &cmdResult, std::string const &targetFrame, bool isFileCamera=false)
Definition: stereo_camera_helpers.h:327
startswith
bool startswith(std::string const &lhs, std::string const &rhs)
Definition: string_helper.h:5
StereoCalibrationPattern::writeToNxLib
void writeToNxLib(NxLibItem const &node, bool right=false)
Definition: calibration_pattern.cpp:120
tf2::TransformException
Camera::setParameterServer
std::unique_ptr< SetParameterServer > setParameterServer
Definition: camera.h:285
image_utilities.h
StereoCamera::requestDataServer
std::unique_ptr< RequestDataServer > requestDataServer
Definition: stereo_camera.h:40
Camera::tfBuffer
std::unique_ptr< tf2_ros::Buffer > tfBuffer
Definition: camera.h:278
StereoCamera::leftRectifiedCameraInfo
sensor_msgs::msg::CameraInfoPtr leftRectifiedCameraInfo
Definition: stereo_camera.h:32
StereoCamera::pointCloudNormalsPublisher
PointCloudPublisher< ensenso::pcl::PointCloudNormals > pointCloudNormalsPublisher
Definition: stereo_camera.h:53
projectorDontCare
@ projectorDontCare
Definition: stereo_camera.h:22
StereoCamera::hasRightCamera
bool hasRightCamera() const
Definition: stereo_camera.cpp:1614
Camera::nh
ensenso::ros::NodeHandle nh
Definition: camera.h:271
stereo_camera_helpers.h
StereoCamera::addDisparityMapOffset
void addDisparityMapOffset(sensor_msgs::msg::CameraInfoPtr const &info) const
Definition: stereo_camera.cpp:1635
stereo_camera.h
CameraParameters::cameraFrame
std::string cameraFrame
Definition: camera.h:206
Camera::updateGlobalLink
void updateGlobalLink(ensenso::ros::Time time, std::string targetFrame="", bool useCachedTransformation=false) const
Definition: camera.cpp:362
ensenso_conversion::toEnsensoPoint
NxLibItem toEnsensoPoint(geometry_msgs::msg::Point32 const &point, bool convertUnits=true)
Definition: conversion.cpp:70
retrieveRenderedPointCloud
std::unique_ptr< ensenso::pcl::PointCloud > retrieveRenderedPointCloud(NxLibItem const &cmdResult, std::string const &frame, bool isFileCamera=false)
Definition: stereo_camera_helpers.h:320
StereoCamera::saveParameterSet
void saveParameterSet(std::string name, bool projectorWasSet)
Definition: stereo_camera.cpp:1201
StereoCamera::leftRectifiedImagePublisher
image_transport::CameraPublisher leftRectifiedImagePublisher
Definition: stereo_camera.h:46
Camera::publishCurrentLinks
void publishCurrentLinks(TIMER_CALLBACK_DECLARATION_ARGS)
Definition: camera.cpp:650
move
void move(std::vector< T > &a, std::vector< T > &b)


ensenso_camera
Author(s): Ensenso
autogenerated on Wed Apr 2 2025 02:37:46