stereo_camera.cpp
Go to the documentation of this file.
2 
9 
10 #include <diagnostic_msgs/DiagnosticArray.h>
13 #include <pcl_ros/point_cloud.h>
14 
15 StereoCamera::StereoCamera(ros::NodeHandle nh, std::string serial, std::string fileCameraPath, bool fixed,
16  std::string cameraFrame, std::string targetFrame, std::string robotFrame,
17  std::string wristFrame, std::string linkFrame, int captureTimeout)
18  : Camera(nh, std::move(serial), std::move(fileCameraPath), fixed, std::move(cameraFrame), std::move(targetFrame),
19  std::move(linkFrame))
20  , robotFrame(std::move(robotFrame))
21  , wristFrame(std::move(wristFrame))
22  , captureTimeout(captureTimeout)
23 {
24  leftCameraInfo = boost::make_shared<sensor_msgs::CameraInfo>();
25  rightCameraInfo = boost::make_shared<sensor_msgs::CameraInfo>();
26  leftRectifiedCameraInfo = boost::make_shared<sensor_msgs::CameraInfo>();
27  rightRectifiedCameraInfo = boost::make_shared<sensor_msgs::CameraInfo>();
28 
30  ::make_unique<FitPrimitiveServer>(nh, "fit_primitive", boost::bind(&StereoCamera::onFitPrimitive, this, _1));
32  ::make_unique<SetParameterServer>(nh, "set_parameter", boost::bind(&StereoCamera::onSetParameter, this, _1));
33 
35  ::make_unique<RequestDataServer>(nh, "request_data", boost::bind(&StereoCamera::onRequestData, this, _1));
37  ::make_unique<LocatePatternServer>(nh, "locate_pattern", boost::bind(&StereoCamera::onLocatePattern, this, _1));
39  ::make_unique<ProjectPatternServer>(nh, "project_pattern", boost::bind(&StereoCamera::onProjectPattern, this, _1));
40  calibrateHandEyeServer = ::make_unique<CalibrateHandEyeServer>(
41  nh, "calibrate_hand_eye", boost::bind(&StereoCamera::onCalibrateHandEye, this, _1));
42  calibrateWorkspaceServer = ::make_unique<CalibrateWorkspaceServer>(
43  nh, "calibrate_workspace", boost::bind(&StereoCamera::onCalibrateWorkspace, this, _1));
44  telecentricProjectionServer = ::make_unique<TelecentricProjectionServer>(
45  nh, "project_telecentric", boost::bind(&StereoCamera::onTelecentricProjection, this, _1));
46  texturedPointCloudServer = ::make_unique<TexturedPointCloudServer>(
47  nh, "texture_point_cloud", boost::bind(&StereoCamera::onTexturedPointCloud, this, _1));
48 
49  image_transport::ImageTransport imageTransport(nh);
50  leftRawImagePublisher = imageTransport.advertiseCamera("raw/left/image", 1);
51  rightRawImagePublisher = imageTransport.advertiseCamera("raw/right/image", 1);
52  leftRectifiedImagePublisher = imageTransport.advertiseCamera("rectified/left/image", 1);
53  rightRectifiedImagePublisher = imageTransport.advertiseCamera("rectified/right/image", 1);
54  disparityMapPublisher = imageTransport.advertise("disparity_map", 1);
55  depthImagePublisher = imageTransport.advertiseCamera("depth/image", 1);
56  projectedImagePublisher = imageTransport.advertise("depth/projected_depth_map", 1);
57 
58  pointCloudPublisher = nh.advertise<pcl::PointCloud<pcl::PointXYZ>>("point_cloud", 1);
59  pointCloudPublisherColor = nh.advertise<pcl::PointCloud<pcl::PointXYZRGB>>("point_cloud_color", 1);
60  projectedPointCloudPublisher = nh.advertise<pcl::PointCloud<pcl::PointXYZ>>("projected_point_cloud", 1);
61 }
62 
64 {
65  Camera::open();
66 
68 
69  return true;
70 }
71 
73 {
75  requestDataServer->start();
76  fitPrimitiveServer->start();
77  calibrateHandEyeServer->start();
78  calibrateWorkspaceServer->start();
79  locatePatternServer->start();
80  projectPatternServer->start();
81  texturedPointCloudServer->start();
83 }
84 
85 void StereoCamera::onFitPrimitive(ensenso_camera_msgs::FitPrimitiveGoalConstPtr const& goal)
86 {
88 
89  NxLibCommand fitPrimitives(cmdFitPrimitive, serial);
90  NxLibItem const& primitives = fitPrimitives.parameters()[itmPrimitive];
91 
92  int primitivesCount = 0;
93  for (auto const& primitive : goal->primitives)
94  {
95  if (primitive.type == ensenso_camera_msgs::Primitive::SPHERE)
96  {
97  primitives[primitivesCount][itmRadius][itmMin] = primitive.min_radius * ensenso_conversion::conversionFactor;
98  primitives[primitivesCount][itmRadius][itmMax] = primitive.max_radius * ensenso_conversion::conversionFactor;
99  primitives[primitivesCount][itmCount] = primitive.count;
100  }
101  else if (primitive.type == ensenso_camera_msgs::Primitive::CYLINDER)
102  {
103  primitives[primitivesCount][itmRadius][itmMin] = primitive.min_radius * ensenso_conversion::conversionFactor;
104  primitives[primitivesCount][itmRadius][itmMax] = primitive.min_radius * ensenso_conversion::conversionFactor;
105  }
106  else if (primitive.type == ensenso_camera_msgs::Primitive::PLANE)
107  {
108  primitives[primitivesCount][itmCount] = primitive.count;
109  }
110  primitives[primitivesCount][itmType] = primitive.type;
111  primitives[primitivesCount][itmInlierThreshold] = primitive.inlier_threshold * ensenso_conversion::conversionFactor;
112  primitives[primitivesCount][itmInlierFraction] = primitive.inlier_fraction_in;
113 
114  primitivesCount++;
115  }
116 
117  NxLibItem const& commandParameters = fitPrimitives.parameters();
118  if (goal->normal_radius)
119  {
120  commandParameters[itmNormal][itmRadius] = goal->normal_radius;
121  }
122 
123  float const zeroThreshold = 10e-6;
124  if (!(std::abs(goal->region_of_interest.lower.x) < zeroThreshold &&
125  std::abs(goal->region_of_interest.lower.y) < zeroThreshold &&
126  std::abs(goal->region_of_interest.lower.z) < zeroThreshold))
127  {
128  commandParameters[itmBoundingBox][itmMin] << ensenso_conversion::toEnsensoPoint(goal->region_of_interest.lower);
129  }
130  if (!(std::abs(goal->region_of_interest.upper.x) < zeroThreshold &&
131  std::abs(goal->region_of_interest.upper.y) < zeroThreshold &&
132  std::abs(goal->region_of_interest.upper.z) < zeroThreshold))
133  {
134  commandParameters[itmBoundingBox][itmMax] << ensenso_conversion::toEnsensoPoint(goal->region_of_interest.upper);
135  }
136 
137  if (goal->failure_probability != 0)
138  {
139  commandParameters[itmFailureProbability] = goal->failure_probability;
140  }
141 
142  if (std::abs(goal->inlier_threshold) > zeroThreshold)
143  {
144  commandParameters[itmInlierThreshold] = goal->inlier_threshold * ensenso_conversion::conversionFactor;
145  }
146  if (std::abs(goal->inlier_fraction) > zeroThreshold)
147  {
148  commandParameters[itmInlierFraction] = goal->inlier_fraction;
149  }
150  if (std::abs(goal->scaling) > zeroThreshold)
151  {
152  commandParameters[itmScaling] = goal->scaling;
153  }
154  if (goal->ransac_iterations != 0)
155  {
156  commandParameters[itmIterations] = goal->ransac_iterations;
157  }
158 
159  fitPrimitives.execute();
160 
161  ensenso_camera_msgs::FitPrimitiveResult result;
162 
163  NxLibItem const primitiveResults = fitPrimitives.result()[itmPrimitive];
164  if (primitiveResults.isArray())
165  {
166  result.primitives.reserve(primitiveResults.count());
167  for (int primitiveCount = 0; primitiveCount < primitiveResults.count(); primitiveCount++)
168  {
169  NxLibItem const& currentPrimitive = primitiveResults[primitiveCount];
170  ensenso_camera_msgs::Primitive primitive;
171 
172  primitive.type = currentPrimitive[itmType].asString();
173  primitive.ransac_iterations = currentPrimitive[itmIterations].asInt();
174  primitive.inlier_count = currentPrimitive[itmInlierCount].asInt();
175  primitive.inlier_fraction_out = currentPrimitive[itmInlierFraction].asDouble();
176  primitive.score = currentPrimitive[itmScore].asDouble();
177  primitive.center = ensenso_conversion::toRosPoint(currentPrimitive[itmCenter]);
178 
179  if (primitive.type == ensenso_camera_msgs::Primitive::PLANE)
180  {
181  primitive.axes.push_back(ensenso_conversion::toRosPoint(currentPrimitive[itmAxis][0]));
182  primitive.axes.push_back(ensenso_conversion::toRosPoint(currentPrimitive[itmAxis][1]));
183  primitive.normal = ensenso_conversion::toRosPoint(currentPrimitive[itmNormal], false);
184  }
185  else if (primitive.type == ensenso_camera_msgs::Primitive::SPHERE)
186  {
187  primitive.radius = currentPrimitive[itmRadius].asDouble() / ensenso_conversion::conversionFactor;
188  }
189  else if (primitive.type == ensenso_camera_msgs::Primitive::CYLINDER)
190  {
191  primitive.axis = ensenso_conversion::toRosPoint(currentPrimitive[itmAxis]);
192  }
193  result.primitives.emplace_back(primitive);
194  }
195  }
196 
197  fitPrimitiveServer->setSucceeded(result);
198 
199  FINISH_NXLIB_ACTION(FitPrimitive)
200 }
201 
202 void StereoCamera::onRequestData(ensenso_camera_msgs::RequestDataGoalConstPtr const& goal)
203 {
205 
206  ensenso_camera_msgs::RequestDataResult result;
207  ensenso_camera_msgs::RequestDataFeedback feedback;
208 
209  bool publishResults = goal->publish_results;
210  if (!goal->publish_results && !goal->include_results_in_response)
211  {
212  publishResults = true;
213  }
214 
215  bool requestPointCloud = goal->request_point_cloud || goal->request_depth_image;
216  if (!goal->request_raw_images && !goal->request_rectified_images && !goal->request_point_cloud &&
217  !goal->request_normals && !goal->request_depth_image)
218  {
219  requestPointCloud = true;
220  }
221 
222  bool computePointCloud = requestPointCloud || goal->request_normals;
223  bool computeDisparityMap = goal->request_disparity_map || computePointCloud;
224 
225  loadParameterSet(goal->parameter_set, computeDisparityMap ? projectorOn : projectorOff);
226  ros::Time imageTimestamp = capture();
227 
229 
230  feedback.images_acquired = true;
231  requestDataServer->publishFeedback(feedback);
232 
233  if (goal->request_raw_images)
234  {
235  auto rawImages = imagePairsFromNxLibNode(cameraNode[itmImages][itmRaw], cameraFrame);
236 
237  leftCameraInfo->header.stamp = rawImages[0].first->header.stamp;
238  leftCameraInfo->header.frame_id = cameraFrame;
239  rightCameraInfo->header.stamp = leftCameraInfo->header.stamp;
240  rightCameraInfo->header.frame_id = cameraFrame;
241 
242  if (goal->include_results_in_response)
243  {
244  for (auto const& imagePair : rawImages)
245  {
246  result.left_raw_images.push_back(*imagePair.first);
247  result.right_raw_images.push_back(*imagePair.second);
248  }
249  result.left_camera_info = *leftCameraInfo;
250  result.right_camera_info = *rightCameraInfo;
251  }
252  if (publishResults)
253  {
254  // We only publish one of the images on the topic, even if FlexView is
255  // enabled.
256  leftRawImagePublisher.publish(rawImages[0].first, leftCameraInfo);
257  rightRawImagePublisher.publish(rawImages[0].second, rightCameraInfo);
258  }
259  }
260 
262 
263  // If we need the disparity map, we do the rectification implicitly in
264  // cmdComputeDisparityMap. This is more efficient when using CUDA.
265  if (goal->request_rectified_images && !computeDisparityMap)
266  {
267  NxLibCommand rectify(cmdRectifyImages, serial);
268  rectify.parameters()[itmCameras] = serial;
269  rectify.execute();
270  }
271  else if (computeDisparityMap)
272  {
273  NxLibCommand disparityMapCommand(cmdComputeDisparityMap, serial);
274  disparityMapCommand.parameters()[itmCameras] = serial;
275  disparityMapCommand.execute();
276  }
277 
278  if (goal->request_rectified_images)
279  {
280  auto rectifiedImages = imagePairsFromNxLibNode(cameraNode[itmImages][itmRectified], cameraFrame);
281 
282  leftRectifiedCameraInfo->header.stamp = rectifiedImages[0].first->header.stamp;
283  leftRectifiedCameraInfo->header.frame_id = cameraFrame;
284  rightRectifiedCameraInfo->header.stamp = leftRectifiedCameraInfo->header.stamp;
285  rightRectifiedCameraInfo->header.frame_id = cameraFrame;
286 
287  if (goal->include_results_in_response)
288  {
289  for (auto const& imagePair : rectifiedImages)
290  {
291  result.left_rectified_images.push_back(*imagePair.first);
292  result.right_rectified_images.push_back(*imagePair.second);
293  }
294  result.left_rectified_camera_info = *leftRectifiedCameraInfo;
295  result.right_rectified_camera_info = *rightRectifiedCameraInfo;
296  }
297  if (publishResults)
298  {
299  // We only publish one of the images on the topic, even if FlexView is
300  // enabled.
303  }
304  }
305 
306  if (goal->request_disparity_map)
307  {
308  auto disparityMap = imageFromNxLibNode(cameraNode[itmImages][itmDisparityMap], cameraFrame);
309 
310  if (goal->include_results_in_response)
311  {
312  result.disparity_map = *disparityMap;
313  }
314  if (publishResults)
315  {
316  disparityMapPublisher.publish(disparityMap);
317  }
318  }
319 
321 
322  PointCloudROI const* pointCloudROI = 0;
323  if (parameterSets.at(currentParameterSet).useROI)
324  {
325  pointCloudROI = &parameterSets.at(currentParameterSet).roi;
326  }
327 
328  if (computePointCloud)
329  {
330  updateGlobalLink(imageTimestamp, "", goal->use_cached_transformation);
331 
332  NxLibCommand computePointMap(cmdComputePointMap, serial);
333  computePointMap.parameters()[itmCameras] = serial;
334  computePointMap.execute();
335 
336  if (requestPointCloud && !goal->request_normals)
337  {
338  auto pointCloud = pointCloudFromNxLib(cameraNode[itmImages][itmPointMap], targetFrame, pointCloudROI);
339 
340  if (goal->include_results_in_response)
341  {
342  pcl::toROSMsg(*pointCloud, result.point_cloud);
343  }
344  if (publishResults)
345  {
346  pointCloudPublisher.publish(pointCloud);
347  }
348  }
349 
350  if (goal->request_depth_image)
351  {
352  if (cameraFrame == targetFrame)
353  {
354  auto depthImage = depthImageFromNxLibNode(cameraNode[itmImages][itmPointMap], targetFrame);
355 
356  if (goal->include_results_in_response)
357  {
358  result.depth_image = *depthImage;
359  }
360 
361  if (publishResults)
362  {
364  }
365  }
366  else
367  {
368  ROS_WARN_ONCE("Currently it is not possible to determine the depth map once the stereo camera is extrinsically "
369  "calibrated because the point cloud is then transformed internally. If you want to have a depth "
370  "map, the camera must not be extrinsically calibrated (workspace-/ hand-eye calibration), or "
371  "have a link to another camera.");
372  }
373  }
374  }
375 
377 
378  if (goal->request_normals)
379  {
380  NxLibCommand computeNormals(cmdComputeNormals, serial);
381  computeNormals.execute();
382 
383  auto pointCloud = pointCloudWithNormalsFromNxLib(cameraNode[itmImages][itmPointMap],
384  cameraNode[itmImages][itmNormals], targetFrame, pointCloudROI);
385 
386  if (goal->include_results_in_response)
387  {
388  pcl::toROSMsg(*pointCloud, result.point_cloud);
389  }
390  if (publishResults)
391  {
392  pointCloudPublisher.publish(pointCloud);
393  }
394  }
395 
396  requestDataServer->setSucceeded(result);
397 
398  FINISH_NXLIB_ACTION(RequestData)
399 }
400 
401 void StereoCamera::onSetParameter(ensenso_camera_msgs::SetParameterGoalConstPtr const& goal)
402 {
404 
405  ensenso_camera_msgs::SetParameterResult result;
406 
407  loadParameterSet(goal->parameter_set);
408 
409  result.parameter_file_applied = false;
410  if (!goal->parameter_file.empty())
411  {
412  result.parameter_file_applied = loadSettings(goal->parameter_file);
413  if (!result.parameter_file_applied)
414  {
415  server->setAborted(result);
416  return;
417  }
418  }
419 
420  bool projectorChanged = false;
421  for (auto const& parameter : goal->parameters)
422  {
423  writeParameter(parameter);
424 
425  if (parameter.key == parameter.PROJECTOR || parameter.key == parameter.FRONT_LIGHT)
426  {
427  projectorChanged = true;
428  }
429  }
430 
431  // Synchronize to make sure that we read back the correct values.
432  NxLibCommand synchronize(cmdSynchronize, serial);
433  synchronize.parameters()[itmCameras] = serial;
434  synchronize.execute();
435 
436  saveParameterSet(goal->parameter_set, projectorChanged);
437 
438  // The new parameters might change the camera calibration.
440 
441  // Read back the actual values.
442  for (auto const& parameter : goal->parameters)
443  {
444  result.results.push_back(*readParameter(parameter.key));
445  }
446 
447  setParameterServer->setSucceeded(result);
448 
449  FINISH_NXLIB_ACTION(SetParameter)
450 }
451 
452 void StereoCamera::onLocatePattern(ensenso_camera_msgs::LocatePatternGoalConstPtr const& goal)
453 {
455 
456  ensenso_camera_msgs::LocatePatternResult result;
457  ensenso_camera_msgs::LocatePatternFeedback feedback;
458 
459  loadParameterSet(goal->parameter_set, projectorOff);
460 
461  int numberOfShots = goal->number_of_shots;
462  if (numberOfShots < 1)
463  {
464  numberOfShots = 1;
465  }
466 
467  std::vector<StereoCalibrationPattern> patterns;
468  ros::Time imageTimestamp;
469  for (int i = 0; i < numberOfShots; i++)
470  {
472 
473  ros::Time timestamp = capture();
474  if (i == 0)
475  imageTimestamp = timestamp;
476 
478 
479  if (i == (numberOfShots - 1))
480  {
481  // The last image has been captured.
482  feedback.images_acquired = true;
483  locatePatternServer->publishFeedback(feedback);
484  }
485 
486  bool clearBuffer = (i == 0);
487  patterns = collectPattern(clearBuffer);
488  if (patterns.empty())
489  {
490  result.found_pattern = false;
491  locatePatternServer->setSucceeded(result);
492  return;
493  }
494 
495  if (patterns.size() > 1)
496  {
497  // Cannot average multiple shots of multiple patterns. We will cancel the
498  // capturing and estimate the pose of each pattern individually.
499  break;
500  }
501  }
502 
503  result.found_pattern = true;
504  result.patterns.resize(patterns.size());
505  for (size_t i = 0; i < patterns.size(); i++)
506  {
507  patterns[i].writeToMessage(result.patterns[i]);
508  }
509 
511 
512  std::string patternFrame = targetFrame;
513  if (!goal->target_frame.empty())
514  {
515  patternFrame = goal->target_frame;
516  }
517 
518  result.frame = patternFrame;
519 
520  if (patterns.size() > 1)
521  {
522  // Estimate the pose of all the patterns we found.
523  auto patternPoses = estimatePatternPoses(imageTimestamp, patternFrame);
524 
525  result.pattern_poses.resize(patternPoses.size());
526  for (size_t i = 0; i < patternPoses.size(); i++)
527  {
528  result.pattern_poses[i] = stampedPoseFromTransform(patternPoses[i]);
529  }
530  }
531  else
532  {
533  // Estimate the pose of a single pattern, averaging over the different
534  // shots.
535  auto patternPose = estimatePatternPose(imageTimestamp, patternFrame);
536 
537  result.pattern_poses.resize(1);
538  result.pattern_poses[0] = stampedPoseFromTransform(patternPose);
539  }
540 
541  if (!goal->tf_frame.empty())
542  {
543  if (patterns.size() == 1)
544  {
545  auto transform = transformFromPose(result.pattern_poses[0], goal->tf_frame);
546  transformBroadcaster->sendTransform(transform);
547  }
548  else
549  {
550  ROS_WARN("Cannot publish the pattern pose in TF, because there are "
551  "multiple patterns!");
552  }
553  }
554 
555  // The target frame has changed.
557  locatePatternServer->setSucceeded(result);
558 
559  FINISH_NXLIB_ACTION(LocatePattern)
560 }
561 
562 void StereoCamera::onProjectPattern(ensenso_camera_msgs::ProjectPatternGoalConstPtr const& goal)
563 {
565 
566  ensenso_camera_msgs::ProjectPatternResult result;
567 
568  tf2::Transform targetFrameTransformation = fromMsg(goal->target_frame_transformation);
569  if (isValid(targetFrameTransformation))
570  {
571  updateTransformations(targetFrameTransformation);
572  }
573  else
574  {
576  }
577 
579 
580  if (!checkNxLibVersion(2, 1))
581  {
582  // In old NxLib versions, the project pattern command does not take the grid
583  // spacing as a parameter.
584  NxLibItem()[itmParameters][itmPattern][itmGridSpacing] = goal->grid_spacing * 1000;
585  }
586 
587  tf2::Transform patternPose = fromMsg(goal->pattern_pose);
588 
589  NxLibCommand projectPattern(cmdProjectPattern, serial);
590  projectPattern.parameters()[itmCameras] = serial;
591  projectPattern.parameters()[itmGridSpacing] = goal->grid_spacing * 1000;
592  projectPattern.parameters()[itmGridSize][0] = goal->grid_size_x;
593  projectPattern.parameters()[itmGridSize][1] = goal->grid_size_y;
594  writePoseToNxLib(patternPose, projectPattern.parameters()[itmPatternPose]);
595  projectPattern.execute();
596 
598 
599  int sensorWidth = cameraNode[itmSensor][itmSize][0].asInt();
600  int sensorHeight = cameraNode[itmSensor][itmSize][1].asInt();
601 
602  NxLibItem projectedPoints = projectPattern.result()[itmStereo][0][itmPoints];
603  NxLibItem leftPoints = projectedPoints[0];
604  NxLibItem rightPoints = projectedPoints[1];
605 
606  result.pattern_is_visible = true;
607  result.left_points.resize(leftPoints.count());
608  result.right_points.resize(leftPoints.count());
609  for (int i = 0; i < leftPoints.count(); i++)
610  {
611  double leftX = leftPoints[i][0].asDouble();
612  double leftY = leftPoints[i][1].asDouble();
613  double rightX = rightPoints[i][0].asDouble();
614  double rightY = rightPoints[i][1].asDouble();
615 
616  result.left_points[i].x = leftX;
617  result.left_points[i].y = leftY;
618  result.right_points[i].x = rightX;
619  result.right_points[i].y = rightY;
620 
621  if (leftX < 0 || leftX > sensorWidth || leftY < 0 || leftY > sensorHeight || rightX < 0 || rightX > sensorWidth ||
622  rightY < 0 || rightY > sensorHeight)
623  {
624  result.pattern_is_visible = false;
625  }
626  }
627 
628  projectPatternServer->setSucceeded(result);
629 
630  FINISH_NXLIB_ACTION(ProjectPattern)
631 }
632 
633 void StereoCamera::onCalibrateHandEye(ensenso_camera_msgs::CalibrateHandEyeGoalConstPtr const& goal)
634 {
635  START_NXLIB_ACTION(CalibrateHandEye, calibrateHandEyeServer)
636 
637  ensenso_camera_msgs::CalibrateHandEyeResult result;
638  result.command = goal->command;
639 
640  if (goal->command == goal->RESET)
641  {
644  }
645  else if (goal->command == goal->CAPTURE_PATTERN)
646  {
647  if (robotFrame.empty() || wristFrame.empty())
648  {
649  result.error_message = "You need to specify a robot base and wrist frame "
650  "to do a hand eye calibration!";
651  ROS_ERROR("%s", result.error_message.c_str());
652  calibrateHandEyeServer->setAborted(result);
653  return;
654  }
655 
656  loadParameterSet(goal->parameter_set, projectorOff);
657  ros::Time imageTimestamp = capture();
658 
660 
661  // Load the pattern buffer that we remembered from the previous
662  // calibration steps.
663  if (!handEyeCalibrationPatternBuffer.empty())
664  {
665  NxLibCommand setPatternBuffer(cmdSetPatternBuffer, serial);
666  setPatternBuffer.parameters()[itmPatterns] << handEyeCalibrationPatternBuffer;
667  setPatternBuffer.execute();
668  }
669  else
670  {
671  NxLibCommand(cmdDiscardPatterns, serial).execute();
672  }
673 
674  std::vector<StereoCalibrationPattern> patterns = collectPattern();
675  if (patterns.empty())
676  {
677  result.found_pattern = false;
678  calibrateHandEyeServer->setSucceeded(result);
679  return;
680  }
681  if (patterns.size() > 1)
682  {
683  result.error_message = "Detected multiple calibration patterns during a "
684  "hand eye calibration!";
685  ROS_ERROR("%s", result.error_message.c_str());
686  calibrateHandEyeServer->setAborted(result);
687  return;
688  }
689 
691 
692  result.found_pattern = true;
693  patterns[0].writeToMessage(result.pattern);
694 
695  auto patternPose = estimatePatternPose(imageTimestamp, cameraFrame, true);
696  result.pattern_pose = stampedPoseFromTransform(patternPose).pose;
697 
699 
700  geometry_msgs::TransformStamped robotPose;
701  try
702  {
704  }
705  catch (tf2::TransformException& e)
706  {
707  result.error_message = std::string("Could not look up the robot pose due to the TF error: ") + e.what();
708  ROS_ERROR("%s", result.error_message.c_str());
709  calibrateHandEyeServer->setAborted(result);
710  return;
711  }
712 
713  // Remember the newly collected data for the next step.
714  NxLibCommand getPatternBuffer(cmdGetPatternBuffer, serial);
715  getPatternBuffer.execute();
716  handEyeCalibrationPatternBuffer = getPatternBuffer.result()[itmPatterns].asJson();
717 
718  handEyeCalibrationRobotPoses.push_back(fromStampedMessage(robotPose));
719 
720  result.robot_pose = stampedPoseFromTransform(robotPose).pose;
721  }
722  else if (goal->command == goal->START_CALIBRATION)
723  {
724  if (handEyeCalibrationRobotPoses.size() < 5)
725  {
726  result.error_message = "You need collect at least 5 patterns before "
727  "starting a hand eye calibration!";
728  ROS_ERROR("%s", result.error_message.c_str());
729  calibrateHandEyeServer->setAborted(result);
730  return;
731  }
732 
733  // Load the pattern observations.
734  size_t numberOfPatterns = 0;
735  NxLibCommand setPatternBuffer(cmdSetPatternBuffer, serial);
736  if (!goal->pattern_observations.empty())
737  {
738  for (size_t i = 0; i < goal->pattern_observations.size(); i++)
739  {
740  StereoCalibrationPattern pattern(goal->pattern_observations[i]);
741 
742  NxLibItem patternNode = setPatternBuffer.parameters()[itmPatterns][i][serial];
743  pattern.writeToNxLib(patternNode[itmLeft][0]);
744  pattern.writeToNxLib(patternNode[itmRight][0], true);
745  }
746  }
747  else
748  {
749  setPatternBuffer.parameters()[itmPatterns] << handEyeCalibrationPatternBuffer;
750  }
751  numberOfPatterns = setPatternBuffer.parameters()[itmPatterns].count();
752  setPatternBuffer.execute();
753 
754  // Load the corresponding robot poses.
755  auto robotPoses = handEyeCalibrationRobotPoses;
756  if (!goal->robot_poses.empty())
757  {
758  robotPoses.clear();
759  for (auto const& pose : goal->robot_poses)
760  {
761  tf2::Transform tfPose = fromMsg(pose);
762  robotPoses.push_back(tfPose);
763  }
764  }
765 
766  if (robotPoses.size() != numberOfPatterns)
767  {
768  result.error_message = "The number of pattern observations does not "
769  "match the number of robot poses!";
770  ROS_ERROR("%s", result.error_message.c_str());
771  calibrateHandEyeServer->setAborted(result);
772  return;
773  }
774 
775  // Load the initial guesses from the action goal.
776  tf2::Transform link, patternPose;
777  link = fromMsg(goal->link);
778  patternPose = fromMsg(goal->pattern_pose);
779 
780  NxLibCommand calibrateHandEye(cmdCalibrateHandEye, serial);
781  calibrateHandEye.parameters()[itmSetup] = fixed ? valFixed : valMoving;
782  // The target node will be reset anyway before we calculate data for the next time.
783  calibrateHandEye.parameters()[itmTarget] = TARGET_FRAME_LINK + "_" + serial;
784  if (isValid(link))
785  {
786  writePoseToNxLib(link.inverse(), calibrateHandEye.parameters()[itmLink]);
787  }
788  if (isValid(patternPose))
789  {
790  writePoseToNxLib(patternPose, calibrateHandEye.parameters()[itmPatternPose]);
791  }
792  for (size_t i = 0; i < robotPoses.size(); i++)
793  {
794  writePoseToNxLib(robotPoses[i], calibrateHandEye.parameters()[itmTransformations][i]);
795  }
796 
797  calibrateHandEye.execute(false);
798 
799  auto getCalibrationResidual = [](NxLibItem const& node) { // NOLINT
800  if (node[itmResidual].exists())
801  {
802  return node[itmResidual].asDouble();
803  }
804  // Compatibility with the SDK 2.0.
805  return node[itmReprojectionError].asDouble();
806  };
807 
808  ros::Rate waitingRate(5);
809  while (!calibrateHandEye.finished())
810  {
811  if (calibrateHandEye.result()[itmProgress].exists())
812  {
813  ensenso_camera_msgs::CalibrateHandEyeFeedback feedback;
814  feedback.number_of_iterations = calibrateHandEye.result()[itmProgress][itmIterations].asInt();
815  feedback.residual = getCalibrationResidual(calibrateHandEye.result()[itmProgress]);
816  calibrateHandEyeServer->publishFeedback(feedback);
817  }
818 
819  if (calibrateHandEyeServer->isPreemptRequested())
820  {
821  NxLibCommand(cmdBreak, serial).execute();
822 
823  calibrateHandEyeServer->setPreempted();
824  return;
825  }
826 
827  waitingRate.sleep();
828  }
829 
830  result.calibration_time =
831  calibrateHandEye.result()[itmTime].asDouble() / 1000; // NxLib time is in milliseconds, ROS
832  // expects time to be in seconds.
833  result.number_of_iterations = calibrateHandEye.result()[itmIterations].asInt();
834  result.residual = getCalibrationResidual(calibrateHandEye.result());
835 
836  result.link = poseFromTransform(poseFromNxLib(cameraNode[itmLink]).inverse());
837  result.pattern_pose = poseFromTransform(poseFromNxLib(calibrateHandEye.result()[itmPatternPose]));
838 
839  if (goal->write_calibration_to_eeprom)
840  {
841  // Save the new calibration link to the camera's EEPROM.
842  NxLibCommand storeCalibration(cmdStoreCalibration, serial);
843  storeCalibration.parameters()[itmCameras] = serial;
844  storeCalibration.parameters()[itmLink] = true;
845  storeCalibration.execute();
846  }
847  }
848 
849  // The target frame has changed.
851  calibrateHandEyeServer->setSucceeded(result);
852 
853  FINISH_NXLIB_ACTION(CalibrateHandEye)
854 }
855 
856 void StereoCamera::onCalibrateWorkspace(const ensenso_camera_msgs::CalibrateWorkspaceGoalConstPtr& goal)
857 {
858  START_NXLIB_ACTION(CalibrateWorkspace, calibrateWorkspaceServer)
859 
860  if (!fixed)
861  {
862  ROS_WARN("You are performing a workspace calibration for a moving camera. "
863  "Are you sure that this is what you want to do?");
864  }
865 
866  ensenso_camera_msgs::CalibrateWorkspaceResult result;
867  result.successful = true;
868 
869  loadParameterSet(goal->parameter_set, projectorOff);
870 
871  int numberOfShots = goal->number_of_shots;
872  if (numberOfShots < 1)
873  {
874  numberOfShots = 1;
875  }
876 
877  ros::Time imageTimestamp;
878  for (int i = 0; i < numberOfShots; i++)
879  {
881 
882  ros::Time timestamp = capture();
883  if (i == 0)
884  imageTimestamp = timestamp;
885 
887 
888  bool clearBuffer = (i == 0);
889  std::vector<StereoCalibrationPattern> patterns = collectPattern(clearBuffer);
890  if (patterns.size() != 1)
891  {
892  result.successful = false;
893  calibrateWorkspaceServer->setSucceeded(result);
894  return;
895  }
896  }
897 
899 
900  tf2::Transform patternTransformation = fromStampedMessage(estimatePatternPose(imageTimestamp));
901 
903 
904  tf2::Transform definedPatternPose = fromMsg(goal->defined_pattern_pose);
905 
906  NxLibCommand calibrateWorkspace(cmdCalibrateWorkspace, serial);
907  calibrateWorkspace.parameters()[itmCameras] = serial;
908  writePoseToNxLib(patternTransformation, calibrateWorkspace.parameters()[itmPatternPose]);
909  writePoseToNxLib(definedPatternPose, calibrateWorkspace.parameters()[itmDefinedPose]);
910  calibrateWorkspace.parameters()[itmTarget] = TARGET_FRAME_LINK + "_" + serial;
911  calibrateWorkspace.execute();
912 
913  if (goal->write_calibration_to_eeprom)
914  {
915  // Save the new calibration link to the camera's EEPROM.
916  NxLibCommand storeCalibration(cmdStoreCalibration, serial);
917  storeCalibration.parameters()[itmCameras] = serial;
918  storeCalibration.parameters()[itmLink] = true;
919  storeCalibration.execute();
920  }
921 
923  calibrateWorkspaceServer->setSucceeded(result);
924 
925  FINISH_NXLIB_ACTION(CalibrateWorkspace)
926 }
927 
928 void StereoCamera::onTelecentricProjection(ensenso_camera_msgs::TelecentricProjectionGoalConstPtr const& goal)
929 {
930  START_NXLIB_ACTION(TelecentricProjection, telecentricProjectionServer)
931 
932  ensenso_camera_msgs::TelecentricProjectionResult result;
933 
934  bool useViewPose = isValid(goal->view_pose);
935  bool frameGiven = !goal->frame.empty();
936 
937  if (!frameGiven)
938  {
939  std::string error = "You have to define a valid frame, to which to projection will be published. Aborting";
940  ROS_ERROR("%s", error.c_str());
941  result.error.message = error;
942  telecentricProjectionServer->setAborted(result);
943  return;
944  }
945 
946  tf2::Transform transform;
947  std::string publishingFrame = useViewPose ? targetFrame : goal->frame;
948 
949  if (useViewPose)
950  {
951  transform = fromMsg(goal->view_pose);
952  }
953  else
954  {
955  transform = getLatestTransform(tfBuffer, targetFrame, goal->frame);
956  }
957 
958  int pixelScale = goal->pixel_scale != 0. ? goal->pixel_scale : 1;
959  double scaling = goal->scaling != 0. ? goal->scaling : 1.0;
960  int sizeWidth = goal->size_width != 0 ? goal->size_width : 1024;
961  int sizeHeight = goal->size_height != 0. ? goal->size_height : 768;
962  bool useOpenGL = goal->use_opengl == 1;
963  RenderPointMapParamsTelecentric params(useOpenGL, pixelScale, scaling, sizeWidth, sizeHeight, transform);
964 
965  NxLibCommand renderPointMap(cmdRenderPointMap, serial);
966 
967  for (int i = 0; i < static_cast<int>(goal->serials.size()); i++)
968  {
969  renderPointMap.parameters()[itmCameras][i] = goal->serials[i];
970  }
971  setRenderParams(renderPointMap.parameters(), nxLibVersion, &params);
972 
973  renderPointMap.execute();
974 
975  NxLibItem resultPath = retrieveResultPath(renderPointMap.result(), nxLibVersion);
976 
977  if (!resultPath.exists())
978  {
979  std::string error = "Rendered Point Map does not exist in path: " + resultPath.path;
980  result.error.message = error;
981  ROS_ERROR("%s", error.c_str());
982  telecentricProjectionServer->setAborted(result);
983  return;
984  }
985 
986  if (goal->publish_results || goal->include_results_in_response)
987  {
988  if (goal->request_point_cloud || (!goal->request_point_cloud && !goal->request_depth_image))
989  {
990  auto pointCloud = retrieveRenderedPointCloud(renderPointMap.result(), nxLibVersion, goal->frame);
991 
992  if (goal->publish_results)
993  {
995  }
996  if (goal->include_results_in_response)
997  {
998  pcl::toROSMsg(*pointCloud, result.projected_point_cloud);
999  telecentricProjectionServer->setSucceeded(result);
1000  }
1001  else
1002  {
1003  telecentricProjectionServer->setSucceeded();
1004  }
1005  }
1006 
1007  if (goal->request_depth_image)
1008  {
1009  auto renderedImage = retrieveRenderedDepthMap(renderPointMap.result(), nxLibVersion, goal->frame);
1010 
1011  if (goal->publish_results)
1012  {
1013  projectedImagePublisher.publish(renderedImage);
1014  }
1015  if (goal->include_results_in_response)
1016  {
1017  result.projected_depth_map = *renderedImage;
1018  telecentricProjectionServer->setSucceeded(result);
1019  }
1020  else
1021  {
1022  telecentricProjectionServer->setSucceeded();
1023  }
1024  }
1025  }
1026  else
1027  {
1028  telecentricProjectionServer->setSucceeded();
1029  }
1030 
1031  FINISH_NXLIB_ACTION(TelecentricProjection)
1032 }
1033 
1034 void StereoCamera::onTexturedPointCloud(ensenso_camera_msgs::TexturedPointCloudGoalConstPtr const& goal)
1035 {
1037 
1038  ensenso_camera_msgs::TexturedPointCloudResult result;
1039 
1040  if (goal->mono_serial.empty())
1041  {
1042  std::string error = "In Order to use this action, you have to specify one mono serial";
1043  ROS_ERROR("%s", error.c_str());
1044  result.error.message = error;
1045  texturedPointCloudServer->setAborted(result);
1046  return;
1047  }
1048 
1049  double farPlane = goal->far_plane ? goal->far_plane : 10000.;
1050  double nearPlane = goal->near_plane ? goal->near_plane : -10000.;
1051  bool useOpenGL = goal->use_opengl == 1;
1052  bool withTexture = true;
1053 
1054  RenderPointMapParamsProjection params(useOpenGL, farPlane, nearPlane, withTexture);
1055 
1056  NxLibCommand renderPointMap(cmdRenderPointMap, serial);
1057  for (int i = 0; i < static_cast<int>(goal->serials.size()); i++)
1058  {
1059  renderPointMap.parameters()[itmCameras][i] = goal->serials[i];
1060  }
1061  renderPointMap.parameters()[itmCamera] = goal->mono_serial;
1062  setRenderParams(renderPointMap.parameters(), nxLibVersion, &params);
1063 
1064  renderPointMap.execute();
1065 
1066  if (goal->publish_results || goal->include_results_in_response)
1067  {
1068  auto cloudColored = retrieveTexturedPointCloud(renderPointMap.result(), nxLibVersion, targetFrame);
1069  if (goal->publish_results)
1070  {
1071  pointCloudPublisherColor.publish(cloudColored);
1072  }
1073  if (goal->include_results_in_response)
1074  {
1075  pcl::toROSMsg(*cloudColored, result.point_cloud);
1076  texturedPointCloudServer->setSucceeded(result);
1077  }
1078  else
1079  {
1080  texturedPointCloudServer->setSucceeded();
1081  }
1082  }
1083  else
1084  {
1085  texturedPointCloudServer->setSucceeded();
1086  }
1087 
1089 }
1090 
1091 void StereoCamera::saveParameterSet(std::string name, bool projectorWasSet)
1092 {
1093  if (name.empty())
1094  {
1095  name = DEFAULT_PARAMETER_SET;
1096  }
1097 
1099 
1100  ParameterSet& parameterSet = parameterSets.at(name);
1101  if (projectorWasSet)
1102  {
1103  parameterSet.autoProjector = false;
1104  }
1105 }
1106 
1107 void StereoCamera::loadParameterSet(std::string name, ProjectorState projector)
1108 {
1109  Camera::loadParameterSet(std::move(name));
1110 
1111  bool changedParameters = false;
1112  ParameterSet const& parameterSet = parameterSets.at(currentParameterSet);
1113 
1114  if (parameterSet.autoProjector && projector != projectorDontCare)
1115  {
1116  try
1117  {
1118  if (projector == projectorOn)
1119  {
1120  cameraNode[itmParameters][itmCapture][itmProjector] = true;
1121  cameraNode[itmParameters][itmCapture][itmFrontLight] = false;
1122  }
1123  else
1124  {
1125  cameraNode[itmParameters][itmCapture][itmProjector] = false;
1126  cameraNode[itmParameters][itmCapture][itmFrontLight] = true;
1127  }
1128  changedParameters = true;
1129  }
1130  catch (...)
1131  {
1132  // Setting the projector and front light fails on file cameras.
1133  }
1134  }
1135 
1136  if (changedParameters)
1137  {
1138  NxLibCommand synchronize(cmdSynchronize, serial);
1139  synchronize.parameters()[itmCameras] = serial;
1140  synchronize.execute();
1141  }
1142 }
1143 
1145 {
1146  ROS_DEBUG("Capturing an image...");
1147 
1148  NxLibCommand capture(cmdCapture, serial);
1149  capture.parameters()[itmCameras] = serial;
1150  if (captureTimeout > 0)
1151  {
1152  capture.parameters()[itmTimeout] = captureTimeout;
1153  }
1154  capture.execute();
1155 
1156  NxLibItem imageNode = cameraNode[itmImages][itmRaw];
1157  imageNode = imageNode[itmLeft];
1158 
1159  if (isFileCamera)
1160  {
1161  // This workaround is needed, because the timestamp of captures from file cameras will not change over time. When
1162  // looking up the current tf tree, this will result in errors, because the time of the original timestamp is
1163  // requested, which lies in the past (and most often longer than the tfBuffer will store the transform!)
1164  return ros::Time::now();
1165  }
1166 
1167  return timestampFromNxLibNode(imageNode);
1168 }
1169 
1170 std::vector<StereoCalibrationPattern> StereoCamera::collectPattern(bool clearBuffer) const
1171 {
1172  if (clearBuffer)
1173  {
1174  NxLibCommand(cmdDiscardPatterns, serial).execute();
1175  }
1176 
1177  NxLibCommand collectPattern(cmdCollectPattern, serial);
1178  collectPattern.parameters()[itmCameras] = serial;
1179  collectPattern.parameters()[itmDecodeData] = true;
1180  collectPattern.parameters()[itmFilter][itmCameras] = serial;
1181  bool useModel = true;
1182  collectPattern.parameters()[itmFilter][itmUseModel] = useModel;
1183  collectPattern.parameters()[itmFilter][itmType] = valStatic;
1184  collectPattern.parameters()[itmFilter][itmValue] = true;
1185  try
1186  {
1187  collectPattern.execute();
1188  }
1189  catch (NxLibException& e)
1190  {
1191  if (e.getErrorCode() == NxLibExecutionFailed)
1192  {
1193  if (collectPattern.result()[itmErrorSymbol] == errPatternNotFound ||
1194  collectPattern.result()[itmErrorSymbol] == errPatternNotDecodable)
1195  {
1196  return {};
1197  }
1198  }
1199  throw;
1200  }
1201 
1202  if (!collectPattern.result()[itmStereo].exists())
1203  {
1204  // We did find patterns, but only in one of the cameras.
1205  return {};
1206  }
1207 
1208  std::vector<StereoCalibrationPattern> result;
1209 
1210  for (int i = 0; i < collectPattern.result()[itmStereo].count(); i++)
1211  {
1212  result.emplace_back(collectPattern.result()[itmStereo][i]);
1213  }
1214 
1215  // Extract the pattern's image points from the result.
1216  NxLibItem pattern = collectPattern.result()[itmPatterns][0][serial];
1217  if (pattern[itmLeft].count() > 1 || pattern[itmRight].count() > 1)
1218  {
1219  // We cannot tell which of the patterns in the two cameras belong together,
1220  // because that would need a comparison with the stereo model.
1221  return result;
1222  }
1223 
1224  for (size_t i = 0; i < result.size(); i++)
1225  {
1226  for (int j = 0; j < pattern[itmLeft][i][itmPoints].count(); j++)
1227  {
1228  NxLibItem pointNode = pattern[itmLeft][i][itmPoints][j];
1229 
1230  ensenso_camera_msgs::ImagePoint point;
1231  point.x = pointNode[0].asDouble();
1232  point.y = pointNode[1].asDouble();
1233  result.at(i).leftPoints.push_back(point);
1234  }
1235  for (int j = 0; j < pattern[itmRight][i][itmPoints].count(); j++)
1236  {
1237  NxLibItem pointNode = pattern[itmRight][i][itmPoints][j];
1238 
1239  ensenso_camera_msgs::ImagePoint point;
1240  point.x = pointNode[0].asDouble();
1241  point.y = pointNode[1].asDouble();
1242  result.at(i).rightPoints.push_back(point);
1243  }
1244  }
1245 
1246  return result;
1247 }
1248 
1249 void StereoCamera::fillCameraInfoFromNxLib(sensor_msgs::CameraInfoPtr const& info, bool right, bool rectified) const
1250 {
1252 
1253  NxLibItem monoCalibrationNode = cameraNode[itmCalibration][itmMonocular][right ? itmRight : itmLeft];
1254  NxLibItem stereoCalibrationNode = cameraNode[itmCalibration][itmStereo][right ? itmRight : itmLeft];
1255 
1256  if (rectified)
1257  {
1258  // For the rectified images all transformations are the identity (because
1259  // all of the distortions were already removed), except for the stereo
1260  // camera matrix.
1261 
1262  info->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
1263  info->D.clear();
1264  info->D.resize(5, 0);
1265 
1266  info->K.fill(0);
1267  info->P.fill(0);
1268  info->R.fill(0);
1269  for (int row = 0; row < 3; row++)
1270  {
1271  for (int column = 0; column < 3; column++)
1272  {
1273  info->P[4 * row + column] = stereoCalibrationNode[itmCamera][column][row].asDouble();
1274 
1275  if (row == column)
1276  {
1277  info->K[3 * row + column] = 1;
1278  info->R[3 * row + column] = 1;
1279  }
1280  }
1281  }
1282  }
1283  else
1284  {
1285  info->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
1286  info->D.clear();
1287  fillDistortionParamsFromNxLib(monoCalibrationNode[itmDistortion], info);
1288 
1289  info->K.fill(0); // The mono camera matrix.
1290  info->P.fill(0); // The stereo camera matrix.
1291  info->R.fill(0);
1292  for (int row = 0; row < 3; row++)
1293  {
1294  for (int column = 0; column < 3; column++)
1295  {
1296  info->K[3 * row + column] = monoCalibrationNode[itmCamera][column][row].asDouble();
1297  info->P[4 * row + column] = stereoCalibrationNode[itmCamera][column][row].asDouble();
1298 
1299  info->R[3 * row + column] = stereoCalibrationNode[itmRotation][column][row].asDouble();
1300  }
1301  }
1302  }
1303 
1304  if (right)
1305  {
1306  // Add the offset of the right camera relative to the left one to the
1307  // projection matrix.
1308  double fx = stereoCalibrationNode[itmCamera][0][0].asDouble();
1309  double baseline = cameraNode[itmCalibration][itmStereo][itmBaseline].asDouble() / 1000.0;
1310  info->P[3] = -fx * baseline;
1311  }
1312 
1313  int leftTopX = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmLeftTop][0].asInt();
1314  int leftTopY = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmLeftTop][1].asInt();
1315  int rightBottomX = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmRightBottom][0].asInt();
1316  int rightBottomY = cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmRightBottom][1].asInt();
1317  info->roi.x_offset = leftTopX;
1318  info->roi.y_offset = leftTopY;
1319  info->roi.width = rightBottomX - leftTopX;
1320  info->roi.height = rightBottomY - leftTopY;
1321  if (rectified)
1322  {
1323  info->roi.do_rectify = true;
1324  }
1325 }
1326 
1328 {
1333 }
1334 
1335 ensenso_camera_msgs::ParameterPtr StereoCamera::readParameter(std::string const& key) const
1336 {
1337  auto message = boost::make_shared<ensenso_camera_msgs::Parameter>();
1338  message->key = key;
1339 
1340  if (key == ensenso_camera_msgs::Parameter::REGION_OF_INTEREST)
1341  {
1342  PointCloudROI const& roi = parameterSets.at(currentParameterSet).roi;
1343 
1344  message->region_of_interest_value.lower.x = roi.minX;
1345  message->region_of_interest_value.lower.y = roi.minY;
1346  message->region_of_interest_value.lower.z = roi.minZ;
1347  message->region_of_interest_value.upper.x = roi.maxX;
1348  message->region_of_interest_value.upper.y = roi.maxY;
1349  message->region_of_interest_value.upper.z = roi.maxZ;
1350  }
1351  else if (key == ensenso_camera_msgs::Parameter::FLEX_VIEW)
1352  {
1353  NxLibItem flexViewNode = cameraNode[itmParameters][itmCapture][itmFlexView];
1354 
1355  if (!flexViewNode.exists())
1356  {
1357  message->float_value = -1;
1358  }
1359  else if (flexViewNode.isNumber())
1360  {
1361  message->float_value = flexViewNode.asInt();
1362  }
1363  else
1364  {
1365  message->float_value = 0;
1366  }
1367  }
1368  else
1369  {
1370  message = Camera::readParameter(key);
1371  }
1372 
1373  return message;
1374 }
1375 
1376 void StereoCamera::writeParameter(ensenso_camera_msgs::Parameter const& parameter)
1377 {
1378  if (parameter.key == ensenso_camera_msgs::Parameter::REGION_OF_INTEREST)
1379  {
1381 
1382  roi.minX = parameter.region_of_interest_value.lower.x;
1383  roi.minY = parameter.region_of_interest_value.lower.y;
1384  roi.minZ = parameter.region_of_interest_value.lower.z;
1385  roi.maxX = parameter.region_of_interest_value.upper.x;
1386  roi.maxY = parameter.region_of_interest_value.upper.y;
1387  roi.maxZ = parameter.region_of_interest_value.upper.z;
1388 
1389  parameterSets.at(currentParameterSet).useROI = !roi.isEmpty();
1390  }
1391  else if (parameter.key == ensenso_camera_msgs::Parameter::FLEX_VIEW)
1392  {
1393  NxLibItem flexViewNode = cameraNode[itmParameters][itmCapture][itmFlexView];
1394 
1395  if (!flexViewNode.exists())
1396  {
1397  ROS_WARN("Writing the parameter FlexView, but the camera does not "
1398  "support it!");
1399  return;
1400  }
1401 
1402  int n = static_cast<int>(std::round(parameter.float_value));
1403  if (n <= 1)
1404  {
1405  flexViewNode = false;
1406  }
1407  else
1408  {
1409  flexViewNode = n;
1410  }
1411  }
1412  else
1413  {
1414  Camera::writeParameter(parameter);
1415  }
1416 }
std::vector< tf2::Transform > handEyeCalibrationRobotPoses
Definition: stereo_camera.h:67
sensor_msgs::ImagePtr imageFromNxLibNode(NxLibItem const &node, std::string const &frame)
std::string const DEFAULT_PARAMETER_SET
Definition: camera.h:57
std::unique_ptr< FitPrimitiveServer > fitPrimitiveServer
Definition: stereo_camera.h:45
void writeParameter(ensenso_camera_msgs::Parameter const &parameter) override
void onFitPrimitive(ensenso_camera_msgs::FitPrimitiveGoalConstPtr const &goal)
std::map< std::string, ParameterSet > parameterSets
Definition: camera.h:225
void fillDistortionParamsFromNxLib(NxLibItem const &distortionItem, sensor_msgs::CameraInfoPtr const &info)
void writeToNxLib(NxLibItem const &node, bool right=false)
void onProjectPattern(ensenso_camera_msgs::ProjectPatternGoalConstPtr const &goal)
void publish(const sensor_msgs::Image &image, const sensor_msgs::CameraInfo &info) const
bool fixed
Definition: camera.h:197
std::string targetFrame
Definition: camera.h:201
StereoCamera(ros::NodeHandle nh, std::string serial, std::string fileCameraPath, bool fixed, std::string cameraFrame, std::string targetFrame, std::string robotFrame, std::string wristFrame, std::string linkFrame, int captureTimeout)
void publish(const boost::shared_ptr< M > &message) const
NxLibItem toEnsensoPoint(geometry_msgs::Point32 const &point, bool convertUnits=true)
Definition: conversion.cpp:24
ros::Time timestampFromNxLibNode(NxLibItem const &node)
sensor_msgs::ImagePtr retrieveRenderedDepthMap(NxLibItem const &cmdResult, VersionInfo const &nxLibVersion, std::string const &frame)
Definition: nxLibHelpers.h:226
void updateTransformations(tf2::Transform const &targetFrameTransformation) const
Definition: camera.cpp:253
image_transport::CameraPublisher leftRectifiedImagePublisher
Definition: stereo_camera.h:53
ros::NodeHandle nh
Definition: camera.h:203
void onCalibrateWorkspace(ensenso_camera_msgs::CalibrateWorkspaceGoalConstPtr const &goal)
void onTexturedPointCloud(ensenso_camera_msgs::TexturedPointCloudGoalConstPtr const &goal)
void updateGlobalLink(ros::Time time=ros::Time::now(), std::string frame="", bool useCachedTransformation=false) const
Definition: camera.cpp:259
pcl::PointCloud< pcl::PointNormal >::Ptr pointCloudWithNormalsFromNxLib(NxLibItem const &pointMapNode, NxLibItem const &normalNode, std::string const &frame, PointCloudROI const *roi=nullptr)
pcl::PointCloud< pcl::PointXYZ >::Ptr pointCloudFromNxLib(NxLibItem const &node, std::string const &frame, PointCloudROI const *roi=nullptr)
std::string const TARGET_FRAME_LINK
Definition: camera.h:62
image_transport::Publisher disparityMapPublisher
Definition: stereo_camera.h:55
tf2::Transform getLatestTransform(tf2_ros::Buffer const &tfBuffer, std::string const &cameraFrame, std::string const &targetFrame)
virtual geometry_msgs::TransformStamped lookupTransform(const std::string &target_frame, const std::string &source_frame, const ros::Time &time, const ros::Duration timeout) const
tf2::Transform fromMsg(geometry_msgs::Transform const &transform)
std::unique_ptr< ProjectPatternServer > projectPatternServer
Definition: stereo_camera.h:47
bool autoProjector
Definition: camera.h:174
virtual bool open()
Definition: camera.cpp:427
std::string cameraFrame
Definition: camera.h:199
image_transport::Publisher projectedImagePublisher
Definition: stereo_camera.h:57
ProjectorState
Definition: stereo_camera.h:24
std::string handEyeCalibrationPatternBuffer
Definition: stereo_camera.h:66
#define ROS_WARN(...)
virtual std::vector< geometry_msgs::TransformStamped > estimatePatternPoses(ros::Time imageTimestamp=ros::Time::now(), std::string const &targetFrame="") const
Definition: camera.cpp:303
std::unique_ptr< CalibrateWorkspaceServer > calibrateWorkspaceServer
Definition: stereo_camera.h:44
void writePoseToNxLib(tf2::Transform const &pose, NxLibItem const &node)
geometry_msgs::Point32 toRosPoint(NxLibItem const &itemArray, bool convertUnits=true)
Definition: conversion.cpp:5
pcl::PointCloud< pcl::PointXYZRGB > TexturedPointCloud
std::unique_ptr< SetParameterServer > setParameterServer
Definition: camera.h:217
bool isEmpty() const
sensor_msgs::CameraInfoPtr leftCameraInfo
Definition: stereo_camera.h:37
ros::Publisher projectedPointCloudPublisher
Definition: stereo_camera.h:59
ros::Publisher pointCloudPublisherColor
Definition: stereo_camera.h:59
image_transport::CameraPublisher depthImagePublisher
Definition: stereo_camera.h:56
std::string serial
Definition: camera.h:190
std::unique_ptr< CalibrateHandEyeServer > calibrateHandEyeServer
Definition: stereo_camera.h:43
std::vector< StereoCalibrationPattern > collectPattern(bool clearBuffer=false) const
bool checkNxLibVersion(int major, int minor)
Definition: nxLibHelpers.h:14
geometry_msgs::Pose poseFromTransform(tf2::Transform const &transform)
void publish(const sensor_msgs::Image &message) const
void startServers() const override
#define ROS_WARN_ONCE(...)
std::unique_ptr< LocatePatternServer > locatePatternServer
Definition: stereo_camera.h:46
TFSIMD_FORCE_INLINE Quaternion inverse(const Quaternion &q)
void publishCurrentLinks(ros::TimerEvent const &timerEvent=ros::TimerEvent())
Definition: camera.cpp:560
sensor_msgs::CameraInfoPtr leftRectifiedCameraInfo
Definition: stereo_camera.h:39
Transform inverse() const
virtual geometry_msgs::TransformStamped estimatePatternPose(ros::Time imageTimestamp=ros::Time::now(), std::string const &targetFrame="", bool latestPatternOnly=false) const
Definition: camera.cpp:330
void loadParameterSet(std::string name, ProjectorState projector=projectorDontCare)
ros::Time capture() const override
virtual void writeParameter(ensenso_camera_msgs::Parameter const &parameter)
Definition: camera.cpp:394
bool isValid(tf2::Transform const &pose)
geometry_msgs::TransformStamped transformFromPose(geometry_msgs::PoseStamped const &pose, std::string const &childFrame)
pcl::PointCloud< pcl::PointXYZ >::Ptr retrieveRenderedPointCloud(NxLibItem const &cmdResult, VersionInfo const &nxLibVersion, std::string const &frame)
Definition: nxLibHelpers.h:184
bool sleep()
ros::Publisher pointCloudPublisher
Definition: stereo_camera.h:59
void onSetParameter(ensenso_camera_msgs::SetParameterGoalConstPtr const &goal) override
bool isFileCamera
Definition: camera.h:182
NxLibItem cameraNode
Definition: camera.h:191
std::unique_ptr< RequestDataServer > requestDataServer
Definition: stereo_camera.h:42
std::unique_ptr< TexturedPointCloudServer > texturedPointCloudServer
Definition: stereo_camera.h:48
virtual void startServers() const
Definition: camera.cpp:48
VersionInfo nxLibVersion
Definition: camera.h:188
sensor_msgs::ImagePtr depthImageFromNxLibNode(NxLibItem const &node, std::string const &frame)
void fillCameraInfoFromNxLib(sensor_msgs::CameraInfoPtr const &info, bool right, bool rectified=false) const
NxLibItem retrieveResultPath(NxLibItem const &cmdResult, VersionInfo const &nxLibVersion)
Definition: nxLibHelpers.h:216
tf2_ros::Buffer tfBuffer
Definition: camera.h:210
sensor_msgs::CameraInfoPtr rightRectifiedCameraInfo
Definition: stereo_camera.h:40
void loadParameterSet(std::string name)
Definition: camera.cpp:525
void saveParameterSet(std::string name, bool projectorWasSet)
const int conversionFactor
Definition: conversion.h:11
geometry_msgs::PoseStamped stampedPoseFromTransform(geometry_msgs::TransformStamped const &transform)
std::string robotFrame
Definition: stereo_camera.h:34
tf2::Transform poseFromNxLib(NxLibItem const &node)
void onRequestData(ensenso_camera_msgs::RequestDataGoalConstPtr const &goal)
void setRenderParams(NxLibItem const &paramItem, RenderPointMapParams const *params)
Definition: nxLibHelpers.h:149
void toROSMsg(const sensor_msgs::PointCloud2 &cloud, sensor_msgs::Image &image)
static Time now()
#define START_NXLIB_ACTION(ACTION_NAME, ACTION_SERVER)
Definition: camera.h:91
void onCalibrateHandEye(ensenso_camera_msgs::CalibrateHandEyeGoalConstPtr const &goal)
#define PREEMPT_ACTION_IF_REQUESTED
Definition: camera.h:134
tf2::Transform fromStampedMessage(geometry_msgs::TransformStamped const &transform)
ROSCPP_DECL bool exists(const std::string &service_name, bool print_failure_reason)
void onLocatePattern(ensenso_camera_msgs::LocatePatternGoalConstPtr const &goal)
void saveParameterSet(std::string name)
Definition: camera.cpp:512
image_transport::CameraPublisher leftRawImagePublisher
Definition: stereo_camera.h:51
void fillBasicCameraInfoFromNxLib(sensor_msgs::CameraInfoPtr const &info) const
Definition: camera.cpp:242
void updateCameraInfo() override
image_transport::CameraPublisher rightRawImagePublisher
Definition: stereo_camera.h:52
bool open() override
ensenso_camera_msgs::ParameterPtr readParameter(std::string const &key) const override
image_transport::CameraPublisher rightRectifiedImagePublisher
Definition: stereo_camera.h:54
Definition: camera.h:179
virtual ensenso_camera_msgs::ParameterPtr readParameter(std::string const &key) const
Definition: camera.cpp:355
#define FINISH_NXLIB_ACTION(ACTION_NAME)
Definition: camera.h:102
std::vector< std::pair< sensor_msgs::ImagePtr, sensor_msgs::ImagePtr > > imagePairsFromNxLibNode(NxLibItem const &node, std::string const &frame)
std::string currentParameterSet
Definition: camera.h:226
#define ROS_ERROR(...)
sensor_msgs::CameraInfoPtr rightCameraInfo
Definition: stereo_camera.h:38
void onTelecentricProjection(ensenso_camera_msgs::TelecentricProjectionGoalConstPtr const &goal)
pcl::PointCloud< pcl::PointXYZRGB >::Ptr retrieveTexturedPointCloud(NxLibItem const &cmdResult, VersionInfo const &nxLibVersion, std::string const &targetFrame)
Definition: nxLibHelpers.h:200
std::unique_ptr< TelecentricProjectionServer > telecentricProjectionServer
Definition: stereo_camera.h:49
bool loadSettings(std::string const &jsonFile, bool saveAsDefaultParameters=false)
Definition: camera.cpp:57
#define ROS_DEBUG(...)
std::string wristFrame
Definition: stereo_camera.h:35
std::unique_ptr< tf2_ros::TransformBroadcaster > transformBroadcaster
Definition: camera.h:212


ensenso_camera
Author(s): Ensenso
autogenerated on Thu Feb 6 2020 03:48:46