11 leftCameraInfo = ensenso::std::make_shared<sensor_msgs::msg::CameraInfo>();
12 rightCameraInfo = ensenso::std::make_shared<sensor_msgs::msg::CameraInfo>();
55 ensenso::pcl::create_publisher<ensenso::pcl::PointCloudColored>(
nh,
"point_cloud_color", 1);
57 ensenso::pcl::create_publisher<ensenso::pcl::PointCloud>(
nh,
"projected_point_cloud", 1);
85 NxLibCommand fitPrimitives(cmdFitPrimitive,
params.
serial);
86 NxLibItem
const& primitives = fitPrimitives.parameters()[itmPrimitive];
88 int primitivesCount = 0;
89 for (
auto const& primitive : goal->primitives)
91 if (primitive.type == ensenso::msg::Primitive::SPHERE)
95 primitives[primitivesCount][itmCount] = primitive.count;
97 else if (primitive.type == ensenso::msg::Primitive::CYLINDER)
102 else if (primitive.type == ensenso::msg::Primitive::PLANE)
104 primitives[primitivesCount][itmCount] = primitive.count;
106 primitives[primitivesCount][itmType] = primitive.type;
108 primitives[primitivesCount][itmInlierFraction] = primitive.inlier_fraction_in;
113 NxLibItem
const& commandParameters = fitPrimitives.parameters();
114 if (goal->normal_radius)
116 commandParameters[itmNormal][itmRadius] = goal->normal_radius;
119 float const zeroThreshold = 10e-6;
120 if (!(std::abs(goal->region_of_interest.lower.x) < zeroThreshold &&
121 std::abs(goal->region_of_interest.lower.y) < zeroThreshold &&
122 std::abs(goal->region_of_interest.lower.z) < zeroThreshold))
126 if (!(std::abs(goal->region_of_interest.upper.x) < zeroThreshold &&
127 std::abs(goal->region_of_interest.upper.y) < zeroThreshold &&
128 std::abs(goal->region_of_interest.upper.z) < zeroThreshold))
133 if (goal->failure_probability != 0)
135 commandParameters[itmFailureProbability] = goal->failure_probability;
138 if (std::abs(goal->inlier_threshold) > zeroThreshold)
142 if (std::abs(goal->inlier_fraction) > zeroThreshold)
144 commandParameters[itmInlierFraction] = goal->inlier_fraction;
146 if (std::abs(goal->scaling) > zeroThreshold)
148 commandParameters[itmScaling] = goal->scaling;
150 if (goal->ransac_iterations != 0)
152 commandParameters[itmIterations] = goal->ransac_iterations;
155 fitPrimitives.execute();
157 ensenso::action::FitPrimitiveResult result;
159 NxLibItem
const primitiveResults = fitPrimitives.result()[itmPrimitive];
160 if (primitiveResults.isArray())
162 result.primitives.reserve(primitiveResults.count());
163 for (
int primitiveCount = 0; primitiveCount < primitiveResults.count(); primitiveCount++)
165 NxLibItem
const& currentPrimitive = primitiveResults[primitiveCount];
166 ensenso::msg::Primitive primitive;
168 primitive.type = currentPrimitive[itmType].asString();
169 primitive.ransac_iterations = currentPrimitive[itmIterations].asInt();
170 primitive.inlier_count = currentPrimitive[itmInlierCount].asInt();
171 primitive.inlier_fraction_out = currentPrimitive[itmInlierFraction].asDouble();
172 primitive.score = currentPrimitive[itmScore].asDouble();
175 if (primitive.type == ensenso::msg::Primitive::PLANE)
181 else if (primitive.type == ensenso::msg::Primitive::SPHERE)
185 else if (primitive.type == ensenso::msg::Primitive::CYLINDER)
189 result.primitives.emplace_back(primitive);
202 ensenso::action::RequestDataResult result;
203 ensenso::action::RequestDataFeedback feedback;
206 bool publishResults = goal->publish_results;
207 if (!goal->publish_results && !goal->include_results_in_response)
209 publishResults =
true;
213 bool requestRawImages = goal->request_raw_images;
216 ENSENSO_WARN(
nh,
"XR: Capture mode \"Rectified\", skipping raw images request.");
217 requestRawImages =
false;
221 bool requestRectifiedImages = goal->request_rectified_images;
224 ENSENSO_WARN(
nh,
"XR: Capture mode \"Raw\", skipping rectified images request.");
225 requestRectifiedImages =
false;
231 ENSENSO_WARN(
nh,
"XR: Downloading raw/rectified images is disabled, skipping the raw/rectified images request.");
232 requestRawImages =
false;
233 requestRectifiedImages =
false;
236 bool requestDisparityMap = goal->request_disparity_map;
238 bool requestDepthImage = goal->request_depth_image;
241 bool requestPointCloud = goal->request_point_cloud || goal->request_depth_image;
242 if (!goal->request_raw_images && !goal->request_rectified_images && !goal->request_disparity_map &&
243 !goal->request_point_cloud && !goal->request_depth_image && !goal->request_normals)
245 requestPointCloud =
true;
248 bool requestNormals = goal->request_normals;
251 bool computePointCloud = requestPointCloud || requestNormals;
252 bool computeDisparityMap = requestDisparityMap || computePointCloud;
257 requestDisparityMap =
false;
264 "XR: Capture mode \"Raw\", skipping all 3D data requests! Only raw images are captured and they can " 265 "only be used for calibration actions. Rectifying these images afterwards is not possible and they " 266 "cannot be used to compute 3D data. If you want to retrieve 3D data, set capture mode to " 268 requestDisparityMap =
false;
269 requestDepthImage =
false;
270 requestPointCloud =
false;
271 requestNormals =
false;
272 computePointCloud =
false;
273 computeDisparityMap =
false;
281 feedback.images_acquired =
true;
284 if (requestRawImages)
294 if (goal->include_results_in_response)
296 for (
auto const& imagePair : rawImages)
298 result.left_raw_images.push_back(*imagePair.first);
301 result.right_raw_images.push_back(*imagePair.second);
325 if (requestRectifiedImages && !computeDisparityMap)
331 else if (computeDisparityMap)
337 NxLibCommand disparityMapCommand(cmdComputeDisparityMap,
params.
serial);
338 disparityMapCommand.parameters()[itmCameras] =
params.
serial;
339 disparityMapCommand.execute();
342 if (requestRectifiedImages)
344 auto rectifiedImages =
353 if (goal->include_results_in_response)
355 for (
auto const& imagePair : rectifiedImages)
357 result.left_rectified_images.push_back(*imagePair.first);
360 result.right_rectified_images.push_back(*imagePair.second);
380 auto depthImageCameraInfo = ensenso::std::make_shared<sensor_msgs::msg::CameraInfo>(*leftRectifiedCameraInfo);
384 if (requestDisparityMap)
389 depthImageCameraInfo->header.stamp = disparityMap->header.stamp;
391 if (goal->include_results_in_response)
393 result.disparity_map = *disparityMap;
394 result.disparity_map_info = *depthImageCameraInfo;
404 if (computePointCloud)
406 NxLibCommand computePointMap(cmdComputePointMap,
params.
serial);
407 computePointMap.parameters()[itmCameras] =
params.
serial;
408 computePointMap.execute();
416 if (requestPointCloud && !requestNormals)
421 if (goal->include_results_in_response)
432 NxLibCommand computeNormals(cmdComputeNormals,
params.
serial);
433 computeNormals.execute();
439 if (goal->include_results_in_response)
452 if (requestDepthImage)
459 NxLibCommand computePointMap(cmdComputePointMap,
params.
serial);
460 computePointMap.parameters()[itmCameras] =
params.
serial;
461 computePointMap.parameters()[itmRelative] =
true;
462 computePointMap.execute();
468 depthImageCameraInfo->header.stamp = depthImage->header.stamp;
470 if (goal->include_results_in_response)
472 result.depth_image = *depthImage;
473 result.depth_image_info = *depthImageCameraInfo;
490 ensenso::action::SetParameterResult result;
494 result.parameter_file_applied =
false;
495 if (!goal->parameter_file.empty())
497 result.parameter_file_applied =
loadSettings(goal->parameter_file);
498 if (!result.parameter_file_applied)
500 server->setAborted(std::move(result));
505 bool projectorChanged =
false;
506 for (
auto const& parameter : goal->parameters)
510 if (parameter.key == parameter.PROJECTOR || parameter.key == parameter.FRONT_LIGHT)
512 projectorChanged =
true;
517 NxLibCommand synchronize(cmdSynchronize,
params.
serial);
519 synchronize.execute();
527 for (
auto const& parameter : goal->parameters)
541 ensenso::action::LocatePatternResult result;
542 ensenso::action::LocatePatternFeedback feedback;
546 int numberOfShots = goal->number_of_shots;
547 if (numberOfShots < 1)
552 std::vector<StereoCalibrationPattern> patterns;
554 for (
int i = 0; i < numberOfShots; i++)
561 imageTimestamp = timestamp;
566 if (i == (numberOfShots - 1))
569 feedback.images_acquired =
true;
573 bool clearBuffer = (i == 0);
575 if (patterns.empty())
577 result.found_pattern =
false;
582 if (patterns.size() > 1)
590 result.found_pattern =
true;
591 result.patterns.resize(patterns.size());
592 for (
size_t i = 0; i < patterns.size(); i++)
594 patterns[i].writeToMessage(result.patterns[i]);
600 if (!goal->target_frame.empty())
602 patternFrame = goal->target_frame;
605 result.frame = patternFrame;
607 if (patterns.size() > 1)
612 result.pattern_poses.resize(patternPoses.size());
613 for (
size_t i = 0; i < patternPoses.size(); i++)
615 result.pattern_poses[i] = patternPoses[i];
623 result.pattern_poses.resize(1);
624 result.pattern_poses[0] = patternPose;
627 if (!goal->tf_frame.empty())
629 if (patterns.size() == 1)
636 ENSENSO_WARN(
nh,
"Cannot publish the pattern pose in tf, because there are multiple patterns!");
651 ensenso::action::ProjectPatternResult result;
654 if (
isValid(targetFrameTransformation))
668 NxLibCommand projectPattern(cmdProjectPattern,
params.
serial);
669 projectPattern.parameters()[itmCameras] =
params.
serial;
670 projectPattern.parameters()[itmGridSpacing] = goal->grid_spacing * 1000;
671 projectPattern.parameters()[itmGridSize][0] = goal->grid_size_x;
672 projectPattern.parameters()[itmGridSize][1] = goal->grid_size_y;
676 projectPattern.execute();
680 int sensorWidth =
cameraNode[itmSensor][itmSize][0].asInt();
681 int sensorHeight =
cameraNode[itmSensor][itmSize][1].asInt();
683 NxLibItem projectedPoints = projectPattern.result()[itmStereo][0][itmPoints];
684 NxLibItem leftPoints = projectedPoints[0];
685 NxLibItem rightPoints = projectedPoints[1];
687 result.pattern_is_visible =
true;
688 result.left_points.resize(leftPoints.count());
689 result.right_points.resize(leftPoints.count());
690 for (
int i = 0; i < leftPoints.count(); i++)
692 double leftX = leftPoints[i][0].asDouble();
693 double leftY = leftPoints[i][1].asDouble();
694 double rightX = rightPoints[i][0].asDouble();
695 double rightY = rightPoints[i][1].asDouble();
697 result.left_points[i].x = leftX;
698 result.left_points[i].y = leftY;
699 result.right_points[i].x = rightX;
700 result.right_points[i].y = rightY;
702 if (leftX < 0 || leftX > sensorWidth || leftY < 0 || leftY > sensorHeight || rightX < 0 || rightX > sensorWidth ||
703 rightY < 0 || rightY > sensorHeight)
705 result.pattern_is_visible =
false;
718 ensenso::action::CalibrateHandEyeResult result;
719 result.command = goal->command;
721 if (goal->command == goal->RESET)
726 else if (goal->command == goal->CAPTURE_PATTERN)
730 result.error_message =
"You need to specify a robot base and wrist frame to do a hand-eye calibration!";
744 NxLibCommand setPatternBuffer(cmdSetPatternBuffer,
params.
serial);
746 setPatternBuffer.execute();
750 NxLibCommand(cmdDiscardPatterns,
params.
serial).execute();
753 std::vector<StereoCalibrationPattern> patterns =
collectPattern();
754 if (patterns.empty())
756 result.found_pattern =
false;
760 if (patterns.size() > 1)
762 result.error_message =
"Detected multiple calibration patterns during a hand-eye calibration!";
770 result.found_pattern =
true;
771 patterns[0].writeToMessage(result.pattern);
774 result.pattern_pose = patternPose.pose;
778 geometry_msgs::msg::TransformStamped robotPose;
785 result.error_message = std::string(
"Could not look up the robot pose due to the tf error: ") + e.what();
792 NxLibCommand getPatternBuffer(cmdGetPatternBuffer,
params.
serial);
793 getPatternBuffer.execute();
800 else if (goal->command == goal->START_CALIBRATION)
804 result.error_message =
"You need to collect at least 5 patterns before starting a hand-eye calibration!";
811 size_t numberOfPatterns = 0;
812 NxLibCommand setPatternBuffer(cmdSetPatternBuffer,
params.
serial);
813 if (!goal->pattern_observations.empty())
815 for (
size_t i = 0; i < goal->pattern_observations.size(); i++)
819 NxLibItem patternNode = setPatternBuffer.parameters()[itmPatterns][i][
params.
serial];
828 numberOfPatterns = setPatternBuffer.parameters()[itmPatterns].count();
829 setPatternBuffer.execute();
833 if (!goal->robot_poses.empty())
835 robotTransforms.clear();
836 for (
auto const& pose : goal->robot_poses)
839 robotTransforms.push_back(transform);
843 if (robotTransforms.size() != numberOfPatterns)
845 result.error_message =
"The number of pattern observations does not match the number of robot poses!";
853 linkTransform =
fromMsg(goal->link);
854 patternTransform =
fromMsg(goal->pattern_pose);
856 NxLibCommand calibrateHandEye(cmdCalibrateHandEye,
params.
serial);
857 calibrateHandEye.parameters()[itmSetup] =
params.
fixed ? valFixed : valMoving;
860 if (goal->robot_geometry == goal->DOF4_FIX_CAMERA_POSE_Z_COMPONENT)
862 calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][0] =
false;
863 calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][1] =
false;
864 calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][2] =
true;
866 else if (goal->robot_geometry == goal->DOF4_FIX_PATTERN_POSE_Z_COMPONENT)
868 calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][0] =
false;
869 calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][1] =
false;
870 calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][2] =
true;
872 else if (goal->robot_geometry == goal->DOF3_FIX_CAMERA_POSE)
874 calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][0] =
true;
875 calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][1] =
true;
876 calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][2] =
true;
878 else if (goal->robot_geometry == goal->DOF3_FIX_PATTERN_POSE)
880 calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][0] =
true;
881 calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][1] =
true;
882 calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][2] =
true;
895 for (
size_t i = 0; i < robotTransforms.size(); i++)
900 calibrateHandEye.execute(
false);
902 auto getCalibrationResidual = [](NxLibItem
const& node) {
903 if (node[itmResidual].
exists())
905 return node[itmResidual].asDouble();
908 return node[itmReprojectionError].asDouble();
912 while (!calibrateHandEye.finished())
914 if (calibrateHandEye.result()[itmProgress].exists())
916 ensenso::action::CalibrateHandEyeFeedback feedback;
917 feedback.number_of_iterations = calibrateHandEye.result()[itmProgress][itmIterations].asInt();
918 feedback.residual = getCalibrationResidual(calibrateHandEye.result()[itmProgress]);
934 result.calibration_time = calibrateHandEye.result()[itmTime].asDouble() / 1000;
936 result.number_of_iterations = calibrateHandEye.result()[itmIterations].asInt();
937 result.residual = getCalibrationResidual(calibrateHandEye.result());
942 if (goal->write_calibration_to_eeprom)
945 NxLibCommand storeCalibration(cmdStoreCalibration,
params.
serial);
946 storeCalibration.parameters()[itmCameras] =
params.
serial;
947 storeCalibration.parameters()[itmLink] =
true;
948 storeCalibration.execute();
966 "You are performing a workspace calibration for a moving camera. Are you sure this is what you want?");
969 ensenso::action::CalibrateWorkspaceResult result;
970 result.successful =
true;
974 int numberOfShots = goal->number_of_shots;
975 if (numberOfShots < 1)
981 for (
int i = 0; i < numberOfShots; i++)
987 imageTimestamp = timestamp;
991 bool clearBuffer = (i == 0);
992 std::vector<StereoCalibrationPattern> patterns =
collectPattern(clearBuffer);
993 if (patterns.size() != 1)
995 result.successful =
false;
1010 NxLibCommand calibrateWorkspace(cmdCalibrateWorkspace,
params.
serial);
1011 calibrateWorkspace.parameters()[itmCameras] =
params.
serial;
1015 calibrateWorkspace.execute();
1017 if (goal->write_calibration_to_eeprom)
1020 NxLibCommand storeCalibration(cmdStoreCalibration,
params.
serial);
1021 storeCalibration.parameters()[itmCameras] =
params.
serial;
1022 storeCalibration.parameters()[itmLink] =
true;
1023 storeCalibration.execute();
1036 ensenso::action::TelecentricProjectionResult result;
1038 bool useViewPose =
isValid(goal->view_pose);
1039 bool frameGiven = !goal->frame.empty();
1043 result.error.message =
"You have to define a valid frame, to which to projection will be published. Aborting";
1054 transform =
fromMsg(goal->view_pose);
1061 int pixelScale = goal->pixel_scale != 0. ? goal->pixel_scale : 1;
1062 double scaling = goal->scaling != 0. ? goal->scaling : 1.0;
1063 int sizeWidth = goal->size_width != 0 ? goal->size_width : 1024;
1064 int sizeHeight = goal->size_height != 0. ? goal->size_height : 768;
1065 bool useOpenGL = goal->use_opengl == 1;
1067 std::move(transform));
1069 NxLibCommand renderPointMap(cmdRenderPointMap,
params.
serial);
1071 for (
int i = 0; i < static_cast<int>(goal->serials.size()); i++)
1073 renderPointMap.parameters()[itmCameras][i] = goal->serials[i];
1077 renderPointMap.execute();
1079 NxLibItem resultPath = renderPointMap.result()[itmImages][itmRenderPointMap];
1081 if (!resultPath.exists())
1083 result.error.message =
"Rendered Point Map does not exist in path: " + resultPath.path;
1089 if (goal->publish_results || goal->include_results_in_response)
1091 if (goal->request_point_cloud || (!goal->request_point_cloud && !goal->request_depth_image))
1095 if (goal->include_results_in_response)
1104 if (goal->publish_results)
1110 if (goal->request_depth_image)
1114 if (goal->publish_results)
1118 if (goal->include_results_in_response)
1120 result.projected_depth_map = *renderedImage;
1141 ensenso::action::TexturedPointCloudResult result;
1143 if (goal->mono_serial.empty())
1145 result.error.message =
"In Order to use this action, you have to specify one mono serial";
1151 double farPlane = goal->far_plane ? goal->far_plane : 10000.;
1152 double nearPlane = goal->near_plane ? goal->near_plane : -10000.;
1153 bool useOpenGL = goal->use_opengl == 1;
1154 bool withTexture =
true;
1158 NxLibCommand renderPointMap(cmdRenderPointMap,
params.
serial);
1159 for (
int i = 0; i < static_cast<int>(goal->serials.size()); i++)
1161 renderPointMap.parameters()[itmCameras][i] = goal->serials[i];
1163 renderPointMap.parameters()[itmCamera] = goal->mono_serial;
1166 renderPointMap.execute();
1168 if (goal->publish_results || goal->include_results_in_response)
1171 if (goal->include_results_in_response)
1180 if (goal->publish_results)
1203 if (projectorWasSet)
1213 bool changedParameters =
false;
1222 cameraNode[itmParameters][itmCapture][itmProjector] =
true;
1223 cameraNode[itmParameters][itmCapture][itmFrontLight] =
false;
1227 cameraNode[itmParameters][itmCapture][itmProjector] =
false;
1228 cameraNode[itmParameters][itmCapture][itmFrontLight] =
true;
1230 changedParameters =
true;
1238 if (changedParameters)
1240 NxLibCommand synchronize(cmdSynchronize,
params.
serial);
1242 synchronize.execute();
1263 ENSENSO_WARN_ONCE(
"XR: Using timestamp of first left rectified image in capture mode \"Rectified\".");
1282 catch (
const std::exception& e)
1284 ENSENSO_WARN(
nh,
"Unable to update virtual objects. Error: %s", e.what());
1305 NxLibCommand(cmdDiscardPatterns,
params.
serial).execute();
1309 collectPattern.parameters()[itmCameras] =
params.
serial;
1310 collectPattern.parameters()[itmDecodeData] =
true;
1311 collectPattern.parameters()[itmFilter][itmCameras] =
params.
serial;
1312 bool useModel =
true;
1313 collectPattern.parameters()[itmFilter][itmUseModel] = useModel;
1314 collectPattern.parameters()[itmFilter][itmType] = valStatic;
1315 collectPattern.parameters()[itmFilter][itmValue] =
true;
1318 collectPattern.execute();
1320 catch (NxLibException& e)
1322 if (e.getErrorCode() == NxLibExecutionFailed)
1324 if (collectPattern.result()[itmErrorSymbol] == errPatternNotFound ||
1325 collectPattern.result()[itmErrorSymbol] == errPatternNotDecodable)
1333 if (!collectPattern.result()[itmStereo].exists())
1339 std::vector<StereoCalibrationPattern> result;
1341 for (
int i = 0; i < collectPattern.result()[itmStereo].count(); i++)
1343 result.emplace_back(collectPattern.result()[itmStereo][i]);
1347 NxLibItem pattern = collectPattern.result()[itmPatterns][0][
params.
serial];
1348 if (pattern[itmLeft].count() > 1 || pattern[itmRight].count() > 1)
1355 for (
size_t i = 0; i < result.size(); i++)
1357 for (
int j = 0; j < pattern[itmLeft][i][itmPoints].count(); j++)
1359 NxLibItem pointNode = pattern[itmLeft][i][itmPoints][j];
1361 ensenso::msg::ImagePoint point;
1362 point.x = pointNode[0].asDouble();
1363 point.y = pointNode[1].asDouble();
1364 result.at(i).leftPoints.push_back(point);
1366 for (
int j = 0; j < pattern[itmRight][i][itmPoints].count(); j++)
1368 NxLibItem pointNode = pattern[itmRight][i][itmPoints][j];
1370 ensenso::msg::ImagePoint point;
1371 point.x = pointNode[0].asDouble();
1372 point.y = pointNode[1].asDouble();
1373 result.at(i).rightPoints.push_back(point);
1381 std::string
const& targetFrame,
1382 bool latestPatternOnly)
const 1387 if (latestPatternOnly)
1389 estimatePatternPose.parameters()[itmAverage] =
false;
1391 int patternCount = NxLibItem()[itmParameters][itmPatternCount].asInt();
1392 estimatePatternPose.parameters()[itmFilter][itmOr][0][itmAnd][0][itmType] = valIndex;
1393 estimatePatternPose.parameters()[itmFilter][itmOr][0][itmAnd][0][itmValue] = patternCount - 1;
1397 estimatePatternPose.parameters()[itmAverage] =
true;
1399 estimatePatternPose.execute();
1401 auto patterns = estimatePatternPose.result()[itmPatterns];
1408 std::string
const& targetFrame)
const 1413 estimatePatternPose.parameters()[itmAverage] =
false;
1414 estimatePatternPose.parameters()[itmFilter][itmCameras] =
params.
serial;
1415 estimatePatternPose.parameters()[itmFilter][itmUseModel] =
true;
1416 estimatePatternPose.parameters()[itmFilter][itmType] = valStatic;
1417 estimatePatternPose.parameters()[itmFilter][itmValue] =
true;
1418 estimatePatternPose.execute();
1420 auto patterns = estimatePatternPose.result()[itmPatterns];
1422 std::vector<geometry_msgs::msg::PoseStamped> result;
1423 result.reserve(patterns.count());
1425 for (
int i = 0; i < patterns.count(); i++)
1427 result.push_back(
stampedPoseFromNxLib(patterns[i][itmPatternPose], targetFrame, imageTimestamp));
1434 bool rectified)
const 1438 NxLibItem monoCalibrationNode =
cameraNode[itmCalibration][itmMonocular][right ? itmRight : itmLeft];
1439 NxLibItem stereoCalibrationNode =
cameraNode[itmCalibration][itmStereo][right ? itmRight : itmLeft];
1447 for (
int row = 0; row < 3; row++)
1449 for (
int column = 0; column < 3; column++)
1451 GET_P_MATRIX(info)[4 * row + column] = stereoCalibrationNode[itmCamera][column][row].asDouble();
1465 for (
int row = 0; row < 3; row++)
1467 for (
int column = 0; column < 3; column++)
1469 GET_K_MATRIX(info)[3 * row + column] = monoCalibrationNode[itmCamera][column][row].asDouble();
1470 GET_P_MATRIX(info)[4 * row + column] = stereoCalibrationNode[itmCamera][column][row].asDouble();
1471 GET_R_MATRIX(info)[3 * row + column] = stereoCalibrationNode[itmRotation][column][row].asDouble();
1479 double fx = stereoCalibrationNode[itmCamera][0][0].asDouble();
1480 double baseline =
cameraNode[itmCalibration][itmStereo][itmBaseline].asDouble() / 1000.0;
1484 int leftTopX =
cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmLeftTop][0].asInt();
1485 int leftTopY =
cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmLeftTop][1].asInt();
1486 int rightBottomX =
cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmRightBottom][0].asInt();
1487 int rightBottomY =
cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmRightBottom][1].asInt();
1488 info->roi.x_offset = leftTopX;
1489 info->roi.y_offset = leftTopY;
1490 info->roi.width = rightBottomX - leftTopX;
1491 info->roi.height = rightBottomY - leftTopY;
1494 info->roi.do_rectify =
true;
1511 auto message = ensenso::std::make_shared<ensenso::msg::Parameter>();
1514 if (key == ensenso::msg::Parameter::REGION_OF_INTEREST)
1518 message->region_of_interest_value.lower.x = roi.
minX;
1519 message->region_of_interest_value.lower.y = roi.
minY;
1520 message->region_of_interest_value.lower.z = roi.
minZ;
1521 message->region_of_interest_value.upper.x = roi.
maxX;
1522 message->region_of_interest_value.upper.y = roi.
maxY;
1523 message->region_of_interest_value.upper.z = roi.
maxZ;
1525 else if (key == ensenso::msg::Parameter::FLEX_VIEW)
1527 NxLibItem flexViewNode =
cameraNode[itmParameters][itmCapture][itmFlexView];
1529 if (!flexViewNode.exists())
1533 else if (flexViewNode.isNumber())
1535 message->float_value = flexViewNode.asInt();
1552 if (parameter.key == ensenso::msg::Parameter::REGION_OF_INTEREST)
1556 roi.
minX = parameter.region_of_interest_value.lower.x;
1557 roi.
minY = parameter.region_of_interest_value.lower.y;
1558 roi.
minZ = parameter.region_of_interest_value.lower.z;
1559 roi.
maxX = parameter.region_of_interest_value.upper.x;
1560 roi.
maxY = parameter.region_of_interest_value.upper.y;
1561 roi.
maxZ = parameter.region_of_interest_value.upper.z;
1565 else if (parameter.key == ensenso::msg::Parameter::FLEX_VIEW)
1567 NxLibItem flexViewNode =
cameraNode[itmParameters][itmCapture][itmFlexView];
1569 if (!flexViewNode.exists())
1571 ENSENSO_WARN(
nh,
"Writing the parameter FlexView, but the camera does not support it!");
1575 int n =
static_cast<int>(std::round(parameter.float_value));
1578 flexViewNode =
false;
1593 return cameraNode[itmType].asString() ==
"StructuredLight";
1598 std::string modelName =
cameraNode[itmModelName].asString();
1609 std::string captureMode =
cameraNode[itmParameters][itmCapture][itmMode].asString();
1610 return captureMode == valRaw;
1615 return isXrSeries() ?
cameraNode[itmParameters][itmCapture][itmDownloadImages].asBool() :
true;
1625 double disparityMapOffset = 0.0;
1626 if (
cameraNode[itmCalibration][itmDynamic][itmStereo][itmDisparityMapOffset].
exists())
1628 disparityMapOffset =
cameraNode[itmCalibration][itmDynamic][itmStereo][itmDisparityMapOffset].asDouble();
1632 info->width -= disparityMapOffset;
std::vector< tf2::Transform > handEyeCalibrationRobotTransforms
void onFitPrimitive(ensenso::action::FitPrimitiveGoalConstPtr const &goal)
std::string const DEFAULT_PARAMETER_SET
std::unique_ptr< FitPrimitiveServer > fitPrimitiveServer
NxLibItem toEnsensoPoint(geometry_msgs::msg::Point32 const &point, bool convertUnits=true)
std::map< std::string, ParameterSet > parameterSets
void ENSENSO_WARN_ONCE(T... args)
#define GET_K_MATRIX(info)
StereoCamera(ensenso::ros::NodeHandle &nh, CameraParameters params)
void ENSENSO_ERROR(T... args)
sensor_msgs::msg::CameraInfoPtr rightRectifiedCameraInfo
void writeTransformToNxLib(Transform const &transform, NxLibItem const &node)
void writeToNxLib(NxLibItem const &node, bool right=false)
void ENSENSO_DEBUG(T... args)
sensor_msgs::msg::CameraInfoPtr leftRectifiedCameraInfo
virtual void initStatusTimer()
void onCalibrateWorkspace(ensenso::action::CalibrateWorkspaceGoalConstPtr const &goal)
StampedTransformMsg transformFromPose(StampedPoseMsg const &pose, std::string const &childFrame)
#define GET_P_MATRIX(info)
void addDisparityMapOffset(sensor_msgs::msg::CameraInfoPtr const &info) const
image_transport::CameraPublisher leftRectifiedImagePublisher
std::unique_ptr< ensenso::pcl::PointCloud > pointCloudFromNxLib(NxLibItem const &node, std::string const &frame, bool isFileCamera=false, PointCloudROI const *roi=nullptr)
Transform getLatestTransform(tf2_ros::Buffer const &tfBuffer, std::string const &cameraFrame, std::string const &targetFrame)
bool hasRightCamera() const
PointCloudPublisher< ensenso::pcl::PointCloud > pointCloudPublisher
virtual ensenso::msg::ParameterPtr readParameter(std::string const &key) const
void updateTransformations(tf2::Transform const &targetFrameTransformation) const
std::unique_ptr< ensenso_camera::VirtualObjectHandler > virtualObjectHandler
std::unique_ptr< ProjectPatternServer > projectPatternServer
bool hasDownloadedImages() const
StampedPoseMsg stampedPoseFromNxLib(NxLibItem const &node, std::string const &parentFrame, ensenso::ros::Time timestamp)
void publishPointCloud(ensenso::ros::Publisher< T > const &publisher, std::unique_ptr< T > pointCloud)
void onProjectPattern(ensenso::action::ProjectPatternGoalConstPtr const &goal)
ensenso::ros::Time timestampOfCapturedImage() const
virtual void writeParameter(ensenso::msg::Parameter const ¶meter)
image_transport::Publisher projectedImagePublisher
std::string getNxLibTargetFrameName() const
std::string handEyeCalibrationPatternBuffer
std::unique_ptr< CalibrateWorkspaceServer > calibrateWorkspaceServer
ensenso::msg::ParameterPtr readParameter(std::string const &key) const override
#define MAKE_SERVER(CallbackClass, ActionName, TopicName)
std::unique_ptr< ensenso::pcl::PointCloudNormals > pointCloudWithNormalsFromNxLib(NxLibItem const &pointMapNode, NxLibItem const &normalNode, std::string const &frame, bool isFileCamera=false, PointCloudROI const *roi=nullptr)
ensenso::ros::NodeHandle nh
inline ::image_transport::CameraPublisher create_camera_publisher(ensenso::ros::NodeHandle const &nh, ::std::string const &topic_name)
void updateCameraTypeSpecifics() override
std::unique_ptr< SetParameterServer > setParameterServer
Transform fromMsg(TransformMsg const &transform)
void updateGlobalLink(ensenso::ros::Time time, std::string targetFrame="", bool useCachedTransformation=false) const
image_transport::CameraPublisher depthImagePublisher
virtual void initTfPublishTimer()
void setRenderParams(NxLibItem const &cmdParams, RenderPointMapParams const *params)
void onTexturedPointCloud(ensenso::action::TexturedPointCloudGoalConstPtr const &goal)
std::unique_ptr< CalibrateHandEyeServer > calibrateHandEyeServer
std::unique_ptr< tf2_ros::Buffer > tfBuffer
ImagePtr depthImageFromNxLibNode(NxLibItem const &node, std::string const &frame, bool isFileCamera)
void startServers() const override
#define GET_D_MATRIX(info)
void publishCurrentLinks(TIMER_CALLBACK_DECLARATION_ARGS)
std::unique_ptr< LocatePatternServer > locatePatternServer
inline ::ros::Time now(ensenso::ros::NodeHandle const &nh)
TFSIMD_FORCE_INLINE Quaternion inverse(const Quaternion &q)
void onTelecentricProjection(ensenso::action::TelecentricProjectionGoalConstPtr const &goal)
bool startswith(std::string const &lhs, std::string const &rhs)
void fillCameraInfoFromNxLib(sensor_msgs::msg::CameraInfoPtr const &info, bool right, bool rectified=false) const
void loadParameterSet(std::string name, ProjectorState projector=projectorDontCare)
ensenso::ros::Time capture() const override
image_transport::CameraPublisher disparityMapPublisher
def message(msg, args, kwargs)
::ros::NodeHandle NodeHandle
void publish(const sensor_msgs::Image &message) const
void fillDistortionParamsFromNxLib(NxLibItem const &distortionItem, sensor_msgs::msg::CameraInfoPtr const &info)
std::unique_ptr< ensenso::pcl::PointCloud > retrieveRenderedPointCloud(NxLibItem const &cmdResult, std::string const &frame, bool isFileCamera=false)
void ENSENSO_WARN(T... args)
std::unique_ptr< RequestDataServer > requestDataServer
std::unique_ptr< TexturedPointCloudServer > texturedPointCloudServer
PointCloudPublisher< ensenso::pcl::PointCloud > pointCloudProjectedPublisher
inline ::image_transport::Publisher create_publisher(ensenso::ros::NodeHandle const &nh, ::std::string const &topic_name)
StampedPoseMsg poseFromTransform(StampedTransformMsg const &transform)
void loadParameterSet(std::string name)
ensenso::ros::Time timestampFromNxLibNode(NxLibItem const &node)
void saveParameterSet(std::string name, bool projectorWasSet)
void onSetParameter(ensenso::action::SetParameterGoalConstPtr const &goal) override
const int conversionFactor
PointCloudPublisher< ensenso::pcl::PointCloudNormals > pointCloudNormalsPublisher
void move(std::vector< T > &a, std::vector< T > &b)
void publish(const sensor_msgs::Image &image, const sensor_msgs::CameraInfo &info) const
std::vector< StereoCalibrationPattern > collectPattern(bool clearBuffer=false) const
#define GET_R_MATRIX(info)
Transform transformFromNxLib(NxLibItem const &node)
void toROSMsg(const sensor_msgs::PointCloud2 &cloud, sensor_msgs::Image &image)
void onRequestData(ensenso::action::RequestDataGoalConstPtr const &goal)
sensor_msgs::msg::CameraInfoPtr leftCameraInfo
#define START_NXLIB_ACTION(ACTION_NAME, ACTION_SERVER)
#define PREEMPT_ACTION_IF_REQUESTED
#define ENSENSO_ASSERT(cond)
ROSCPP_DECL bool exists(const std::string &service_name, bool print_failure_reason)
virtual void startServers() const
sensor_msgs::msg::CameraInfoPtr rightCameraInfo
void saveParameterSet(std::string name)
bool hasDisparityMap() const
image_transport::CameraPublisher leftRawImagePublisher
void updateCameraInfo() override
image_transport::CameraPublisher rightRawImagePublisher
ImagePtr imageFromNxLibNode(NxLibItem const &node, std::string const &frame, bool isFileCamera)
std::vector< ImagePtrPair > imagePairsFromNxLibNode(NxLibItem const &node, std::string const &frame, bool isFileCamera)
bool hasRawImages() const
void writeParameter(ensenso::msg::Parameter const ¶meter) override
void onLocatePattern(ensenso::action::LocatePatternGoalConstPtr const &goal)
image_transport::CameraPublisher rightRectifiedImagePublisher
#define FINISH_NXLIB_ACTION(ACTION_NAME)
std::string currentParameterSet
bool isValid(Transform const &transform)
geometry_msgs::msg::Point32 toRosPoint(NxLibItem const &itemArray, bool convertUnits=true)
std::vector< geometry_msgs::msg::PoseStamped > estimatePatternPoses(ensenso::ros::Time imageTimestamp, std::string const &targetFrame="") const override
void onCalibrateHandEye(ensenso::action::CalibrateHandEyeGoalConstPtr const &goal)
std::unique_ptr< TelecentricProjectionServer > telecentricProjectionServer
std::unique_ptr< ensenso::pcl::PointCloudColored > retrieveTexturedPointCloud(NxLibItem const &cmdResult, std::string const &targetFrame, bool isFileCamera=false)
PointCloudPublisher< ensenso::pcl::PointCloudColored > pointCloudColoredPublisher
bool loadSettings(std::string const &jsonFile, bool saveAsDefaultParameters=false)
geometry_msgs::msg::PoseStamped estimatePatternPose(ensenso::ros::Time imageTimestamp, std::string const &targetFrame="", bool latestPatternOnly=false) const override
sensor_msgs::msg::ImagePtr retrieveRenderedDepthMap(NxLibItem const &cmdResult, std::string const &frame, bool isFileCamera)
std::unique_ptr< tf2_ros::TransformBroadcaster > transformBroadcaster
void fillBasicCameraInfoFromNxLib(sensor_msgs::msg::CameraInfoPtr const &info) const