11 leftCameraInfo = ensenso::std::make_shared<sensor_msgs::msg::CameraInfo>();
12 rightCameraInfo = ensenso::std::make_shared<sensor_msgs::msg::CameraInfo>();
55 ensenso::pcl::create_publisher<ensenso::pcl::PointCloudColored>(
nh,
"point_cloud_color", 1);
57 ensenso::pcl::create_publisher<ensenso::pcl::PointCloud>(
nh,
"projected_point_cloud", 1);
85 NxLibCommand fitPrimitives(cmdFitPrimitive,
params.
serial);
86 NxLibItem
const& primitives = fitPrimitives.parameters()[itmPrimitive];
88 int primitivesCount = 0;
89 for (
auto const& primitive : goal->primitives)
91 if (primitive.type == ensenso::msg::Primitive::SPHERE)
95 primitives[primitivesCount][itmCount] = primitive.count;
97 else if (primitive.type == ensenso::msg::Primitive::CYLINDER)
102 else if (primitive.type == ensenso::msg::Primitive::PLANE)
104 primitives[primitivesCount][itmCount] = primitive.count;
106 primitives[primitivesCount][itmType] = primitive.type;
108 primitives[primitivesCount][itmInlierFraction] = primitive.inlier_fraction_in;
113 NxLibItem
const& commandParameters = fitPrimitives.parameters();
114 if (goal->normal_radius)
116 commandParameters[itmNormal][itmRadius] = goal->normal_radius;
119 float const zeroThreshold = 10e-6;
120 if (!(std::abs(goal->region_of_interest.lower.x) < zeroThreshold &&
121 std::abs(goal->region_of_interest.lower.y) < zeroThreshold &&
122 std::abs(goal->region_of_interest.lower.z) < zeroThreshold))
126 if (!(std::abs(goal->region_of_interest.upper.x) < zeroThreshold &&
127 std::abs(goal->region_of_interest.upper.y) < zeroThreshold &&
128 std::abs(goal->region_of_interest.upper.z) < zeroThreshold))
133 if (goal->failure_probability != 0)
135 commandParameters[itmFailureProbability] = goal->failure_probability;
138 if (std::abs(goal->inlier_threshold) > zeroThreshold)
142 if (std::abs(goal->inlier_fraction) > zeroThreshold)
144 commandParameters[itmInlierFraction] = goal->inlier_fraction;
146 if (std::abs(goal->scaling) > zeroThreshold)
148 commandParameters[itmScaling] = goal->scaling;
150 if (goal->ransac_iterations != 0)
152 commandParameters[itmIterations] = goal->ransac_iterations;
155 fitPrimitives.execute();
157 ensenso::action::FitPrimitiveResult result;
159 NxLibItem
const primitiveResults = fitPrimitives.result()[itmPrimitive];
160 if (primitiveResults.isArray())
162 result.primitives.reserve(primitiveResults.count());
163 for (
int primitiveCount = 0; primitiveCount < primitiveResults.count(); primitiveCount++)
165 NxLibItem
const& currentPrimitive = primitiveResults[primitiveCount];
166 ensenso::msg::Primitive primitive;
168 primitive.type = currentPrimitive[itmType].asString();
169 primitive.ransac_iterations = currentPrimitive[itmIterations].asInt();
170 primitive.inlier_count = currentPrimitive[itmInlierCount].asInt();
171 primitive.inlier_fraction_out = currentPrimitive[itmInlierFraction].asDouble();
172 primitive.score = currentPrimitive[itmScore].asDouble();
175 if (primitive.type == ensenso::msg::Primitive::PLANE)
181 else if (primitive.type == ensenso::msg::Primitive::SPHERE)
185 else if (primitive.type == ensenso::msg::Primitive::CYLINDER)
189 result.primitives.emplace_back(primitive);
202 ensenso::action::RequestDataResult result;
203 ensenso::action::RequestDataFeedback feedback;
206 bool publishResults = goal->publish_results;
207 if (!goal->publish_results && !goal->include_results_in_response)
209 publishResults =
true;
213 bool requestRawImages = goal->request_raw_images;
216 ENSENSO_WARN(
nh,
"XR: Capture mode \"Rectified\", skipping raw images request.");
217 requestRawImages =
false;
221 bool requestRectifiedImages = goal->request_rectified_images;
224 ENSENSO_WARN(
nh,
"XR: Capture mode \"Raw\", skipping rectified images request.");
225 requestRectifiedImages =
false;
231 ENSENSO_WARN(
nh,
"XR: Downloading raw/rectified images is disabled, skipping the raw/rectified images request.");
232 requestRawImages =
false;
233 requestRectifiedImages =
false;
236 bool requestDisparityMap = goal->request_disparity_map;
238 bool requestDepthImage = goal->request_depth_image;
241 bool requestPointCloud = goal->request_point_cloud || goal->request_depth_image;
242 if (!goal->request_raw_images && !goal->request_rectified_images && !goal->request_disparity_map &&
243 !goal->request_point_cloud && !goal->request_depth_image && !goal->request_normals)
245 requestPointCloud =
true;
248 bool requestNormals = goal->request_normals;
251 bool computePointCloud = requestPointCloud || requestNormals;
252 bool computeDisparityMap = requestDisparityMap || computePointCloud;
257 requestDisparityMap =
false;
264 "XR: Capture mode \"Raw\", skipping all 3D data requests! Only raw images are captured and they can "
265 "only be used for calibration actions. Rectifying these images afterwards is not possible and they "
266 "cannot be used to compute 3D data. If you want to retrieve 3D data, set capture mode to "
268 requestDisparityMap =
false;
269 requestDepthImage =
false;
270 requestPointCloud =
false;
271 requestNormals =
false;
272 computePointCloud =
false;
273 computeDisparityMap =
false;
281 feedback.images_acquired =
true;
284 if (requestRawImages)
294 if (goal->include_results_in_response)
296 for (
auto const& imagePair : rawImages)
298 result.left_raw_images.push_back(*imagePair.first);
301 result.right_raw_images.push_back(*imagePair.second);
325 if (requestRectifiedImages && !computeDisparityMap)
331 else if (computeDisparityMap)
337 NxLibCommand disparityMapCommand(cmdComputeDisparityMap,
params.
serial);
338 disparityMapCommand.parameters()[itmCameras] =
params.
serial;
339 disparityMapCommand.execute();
342 if (requestRectifiedImages)
344 auto rectifiedImages =
353 if (goal->include_results_in_response)
355 for (
auto const& imagePair : rectifiedImages)
357 result.left_rectified_images.push_back(*imagePair.first);
360 result.right_rectified_images.push_back(*imagePair.second);
384 if (requestDisparityMap)
389 depthImageCameraInfo->header.stamp = disparityMap->header.stamp;
391 if (goal->include_results_in_response)
393 result.disparity_map = *disparityMap;
394 result.disparity_map_info = *depthImageCameraInfo;
404 if (computePointCloud)
406 NxLibCommand computePointMap(cmdComputePointMap,
params.
serial);
407 computePointMap.parameters()[itmCameras] =
params.
serial;
408 computePointMap.execute();
416 if (requestPointCloud && !requestNormals)
421 if (goal->include_results_in_response)
432 NxLibCommand computeNormals(cmdComputeNormals,
params.
serial);
433 computeNormals.execute();
439 if (goal->include_results_in_response)
452 if (requestDepthImage)
459 NxLibCommand computePointMap(cmdComputePointMap,
params.
serial);
460 computePointMap.parameters()[itmCameras] =
params.
serial;
461 computePointMap.parameters()[itmRelative] =
true;
462 computePointMap.execute();
468 depthImageCameraInfo->header.stamp = depthImage->header.stamp;
470 if (goal->include_results_in_response)
472 result.depth_image = *depthImage;
473 result.depth_image_info = *depthImageCameraInfo;
490 ensenso::action::SetParameterResult result;
494 result.parameter_file_applied =
false;
495 if (!goal->parameter_file.empty())
497 result.parameter_file_applied =
loadSettings(goal->parameter_file);
498 if (!result.parameter_file_applied)
500 server->setAborted(std::move(result));
505 bool projectorChanged =
false;
506 for (
auto const& parameter : goal->parameters)
510 if (parameter.key == parameter.PROJECTOR || parameter.key == parameter.FRONT_LIGHT)
512 projectorChanged =
true;
517 NxLibCommand synchronize(cmdSynchronize,
params.
serial);
519 synchronize.execute();
527 for (
auto const& parameter : goal->parameters)
541 ensenso::action::LocatePatternResult result;
542 ensenso::action::LocatePatternFeedback feedback;
546 int numberOfShots = goal->number_of_shots;
547 if (numberOfShots < 1)
552 std::vector<StereoCalibrationPattern> patterns;
554 for (
int i = 0; i < numberOfShots; i++)
561 imageTimestamp = timestamp;
566 if (i == (numberOfShots - 1))
569 feedback.images_acquired =
true;
573 bool clearBuffer = (i == 0);
575 if (patterns.empty())
577 result.found_pattern =
false;
582 if (patterns.size() > 1)
590 result.found_pattern =
true;
591 result.patterns.resize(patterns.size());
592 for (
size_t i = 0; i < patterns.size(); i++)
594 patterns[i].writeToMessage(result.patterns[i]);
600 if (!goal->target_frame.empty())
602 patternFrame = goal->target_frame;
605 result.frame = patternFrame;
607 if (patterns.size() > 1)
612 result.pattern_poses.resize(patternPoses.size());
613 for (
size_t i = 0; i < patternPoses.size(); i++)
615 result.pattern_poses[i] = patternPoses[i];
623 result.pattern_poses.resize(1);
624 result.pattern_poses[0] = patternPose;
627 if (!goal->tf_frame.empty())
629 if (patterns.size() == 1)
636 ENSENSO_WARN(
nh,
"Cannot publish the pattern pose in tf, because there are multiple patterns!");
651 ensenso::action::ProjectPatternResult result;
654 if (
isValid(targetFrameTransformation))
668 NxLibCommand projectPattern(cmdProjectPattern,
params.
serial);
669 projectPattern.parameters()[itmCameras] =
params.
serial;
670 projectPattern.parameters()[itmGridSpacing] = goal->grid_spacing * 1000;
671 projectPattern.parameters()[itmGridSize][0] = goal->grid_size_x;
672 projectPattern.parameters()[itmGridSize][1] = goal->grid_size_y;
676 projectPattern.execute();
680 int sensorWidth =
cameraNode[itmSensor][itmSize][0].asInt();
681 int sensorHeight =
cameraNode[itmSensor][itmSize][1].asInt();
683 NxLibItem projectedPoints = projectPattern.result()[itmStereo][0][itmPoints];
684 NxLibItem leftPoints = projectedPoints[0];
685 NxLibItem rightPoints = projectedPoints[1];
687 result.pattern_is_visible =
true;
688 result.left_points.resize(leftPoints.count());
689 result.right_points.resize(leftPoints.count());
690 for (
int i = 0; i < leftPoints.count(); i++)
692 double leftX = leftPoints[i][0].asDouble();
693 double leftY = leftPoints[i][1].asDouble();
694 double rightX = rightPoints[i][0].asDouble();
695 double rightY = rightPoints[i][1].asDouble();
697 result.left_points[i].x = leftX;
698 result.left_points[i].y = leftY;
699 result.right_points[i].x = rightX;
700 result.right_points[i].y = rightY;
702 if (leftX < 0 || leftX > sensorWidth || leftY < 0 || leftY > sensorHeight || rightX < 0 || rightX > sensorWidth ||
703 rightY < 0 || rightY > sensorHeight)
705 result.pattern_is_visible =
false;
718 ensenso::action::CalibrateHandEyeResult result;
719 result.command = goal->command;
721 if (goal->command == goal->RESET)
726 else if (goal->command == goal->CAPTURE_PATTERN)
730 result.error_message =
"You need to specify a robot base and wrist frame to do a hand-eye calibration!";
744 NxLibCommand setPatternBuffer(cmdSetPatternBuffer,
params.
serial);
746 setPatternBuffer.execute();
750 NxLibCommand(cmdDiscardPatterns,
params.
serial).execute();
753 std::vector<StereoCalibrationPattern> patterns =
collectPattern();
754 if (patterns.empty())
756 result.found_pattern =
false;
760 if (patterns.size() > 1)
762 result.error_message =
"Detected multiple calibration patterns during a hand-eye calibration!";
770 result.found_pattern =
true;
771 patterns[0].writeToMessage(result.pattern);
774 result.pattern_pose = patternPose.pose;
778 geometry_msgs::msg::TransformStamped robotPose;
785 result.error_message = std::string(
"Could not look up the robot pose due to the tf error: ") + e.what();
792 NxLibCommand getPatternBuffer(cmdGetPatternBuffer,
params.
serial);
793 getPatternBuffer.execute();
800 else if (goal->command == goal->START_CALIBRATION)
804 result.error_message =
"You need to collect at least 5 patterns before starting a hand-eye calibration!";
811 size_t numberOfPatterns = 0;
812 NxLibCommand setPatternBuffer(cmdSetPatternBuffer,
params.
serial);
813 if (!goal->pattern_observations.empty())
815 for (
size_t i = 0; i < goal->pattern_observations.size(); i++)
819 NxLibItem patternNode = setPatternBuffer.parameters()[itmPatterns][i][
params.
serial];
828 numberOfPatterns = setPatternBuffer.parameters()[itmPatterns].count();
829 setPatternBuffer.execute();
833 if (!goal->robot_poses.empty())
835 robotTransforms.clear();
836 for (
auto const& pose : goal->robot_poses)
839 robotTransforms.push_back(transform);
843 if (robotTransforms.size() != numberOfPatterns)
845 result.error_message =
"The number of pattern observations does not match the number of robot poses!";
853 linkTransform =
fromMsg(goal->link);
854 patternTransform =
fromMsg(goal->pattern_pose);
856 NxLibCommand calibrateHandEye(cmdCalibrateHandEye,
params.
serial);
857 calibrateHandEye.parameters()[itmSetup] =
params.
fixed ? valFixed : valMoving;
860 if (goal->robot_geometry == goal->DOF4_FIX_CAMERA_POSE_Z_COMPONENT)
862 calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][0] =
false;
863 calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][1] =
false;
864 calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][2] =
true;
866 else if (goal->robot_geometry == goal->DOF4_FIX_PATTERN_POSE_Z_COMPONENT)
868 calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][0] =
false;
869 calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][1] =
false;
870 calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][2] =
true;
872 else if (goal->robot_geometry == goal->DOF3_FIX_CAMERA_POSE)
874 calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][0] =
true;
875 calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][1] =
true;
876 calibrateHandEye.parameters()[itmFixed][itmLink][itmTranslation][2] =
true;
878 else if (goal->robot_geometry == goal->DOF3_FIX_PATTERN_POSE)
880 calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][0] =
true;
881 calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][1] =
true;
882 calibrateHandEye.parameters()[itmFixed][itmPatternPose][itmTranslation][2] =
true;
895 for (
size_t i = 0; i < robotTransforms.size(); i++)
900 calibrateHandEye.execute(
false);
902 auto getCalibrationResidual = [](NxLibItem
const& node) {
903 if (node[itmResidual].
exists())
905 return node[itmResidual].asDouble();
908 return node[itmReprojectionError].asDouble();
912 while (!calibrateHandEye.finished())
914 if (calibrateHandEye.result()[itmProgress].exists())
916 ensenso::action::CalibrateHandEyeFeedback feedback;
917 feedback.number_of_iterations = calibrateHandEye.result()[itmProgress][itmIterations].asInt();
918 feedback.residual = getCalibrationResidual(calibrateHandEye.result()[itmProgress]);
934 if (calibrateHandEye.result()[itmTime].exists())
937 result.calibration_time = calibrateHandEye.result()[itmTime].asDouble() / 1000;
941 result.calibration_time = calibrateHandEye.slot()[itmStatus][itmTime].asDouble() / 1000;
944 result.number_of_iterations = calibrateHandEye.result()[itmIterations].asInt();
945 result.residual = getCalibrationResidual(calibrateHandEye.result());
950 if (goal->write_calibration_to_eeprom)
953 NxLibCommand storeCalibration(cmdStoreCalibration,
params.
serial);
954 storeCalibration.parameters()[itmCameras] =
params.
serial;
955 storeCalibration.parameters()[itmLink] =
true;
956 storeCalibration.execute();
974 "You are performing a workspace calibration for a moving camera. Are you sure this is what you want?");
977 ensenso::action::CalibrateWorkspaceResult result;
978 result.successful =
true;
982 int numberOfShots = goal->number_of_shots;
983 if (numberOfShots < 1)
989 for (
int i = 0; i < numberOfShots; i++)
995 imageTimestamp = timestamp;
999 bool clearBuffer = (i == 0);
1000 std::vector<StereoCalibrationPattern> patterns =
collectPattern(clearBuffer);
1001 if (patterns.size() != 1)
1003 result.successful =
false;
1018 NxLibCommand calibrateWorkspace(cmdCalibrateWorkspace,
params.
serial);
1019 calibrateWorkspace.parameters()[itmCameras] =
params.
serial;
1023 calibrateWorkspace.execute();
1025 if (goal->write_calibration_to_eeprom)
1028 NxLibCommand storeCalibration(cmdStoreCalibration,
params.
serial);
1029 storeCalibration.parameters()[itmCameras] =
params.
serial;
1030 storeCalibration.parameters()[itmLink] =
true;
1031 storeCalibration.execute();
1044 ensenso::action::TelecentricProjectionResult result;
1046 bool useViewPose =
isValid(goal->view_pose);
1047 bool frameGiven = !goal->frame.empty();
1051 result.error.message =
"You have to define a valid frame, to which to projection will be published. Aborting";
1062 transform =
fromMsg(goal->view_pose);
1069 int pixelScale = goal->pixel_scale != 0. ? goal->pixel_scale : 1;
1070 double scaling = goal->scaling != 0. ? goal->scaling : 1.0;
1071 int sizeWidth = goal->size_width != 0 ? goal->size_width : 1024;
1072 int sizeHeight = goal->size_height != 0. ? goal->size_height : 768;
1073 bool useOpenGL = goal->use_opengl == 1;
1075 std::move(transform));
1077 NxLibCommand renderPointMap(cmdRenderPointMap,
params.
serial);
1079 for (
int i = 0; i < static_cast<int>(goal->serials.size()); i++)
1081 renderPointMap.parameters()[itmCameras][i] = goal->serials[i];
1085 renderPointMap.execute();
1087 NxLibItem resultPath = renderPointMap.result()[itmImages][itmRenderPointMap];
1089 if (!resultPath.exists())
1091 result.error.message =
"Rendered Point Map does not exist in path: " + resultPath.path;
1097 if (goal->publish_results || goal->include_results_in_response)
1099 if (goal->request_point_cloud || (!goal->request_point_cloud && !goal->request_depth_image))
1103 if (goal->include_results_in_response)
1112 if (goal->publish_results)
1118 if (goal->request_depth_image)
1122 if (goal->publish_results)
1126 if (goal->include_results_in_response)
1128 result.projected_depth_map = *renderedImage;
1149 ensenso::action::TexturedPointCloudResult result;
1151 if (goal->mono_serial.empty())
1153 result.error.message =
"In Order to use this action, you have to specify one mono serial";
1159 double farPlane = goal->far_plane ? goal->far_plane : 10000.;
1160 double nearPlane = goal->near_plane ? goal->near_plane : -10000.;
1161 bool useOpenGL = goal->use_opengl == 1;
1162 bool withTexture =
true;
1166 NxLibCommand renderPointMap(cmdRenderPointMap,
params.
serial);
1167 for (
int i = 0; i < static_cast<int>(goal->serials.size()); i++)
1169 renderPointMap.parameters()[itmCameras][i] = goal->serials[i];
1171 renderPointMap.parameters()[itmCamera] = goal->mono_serial;
1174 renderPointMap.execute();
1176 if (goal->publish_results || goal->include_results_in_response)
1179 if (goal->include_results_in_response)
1188 if (goal->publish_results)
1211 if (projectorWasSet)
1221 bool changedParameters =
false;
1230 cameraNode[itmParameters][itmCapture][itmProjector] =
true;
1231 cameraNode[itmParameters][itmCapture][itmFrontLight] =
false;
1235 cameraNode[itmParameters][itmCapture][itmProjector] =
false;
1236 cameraNode[itmParameters][itmCapture][itmFrontLight] =
true;
1238 changedParameters =
true;
1246 if (changedParameters)
1248 NxLibCommand synchronize(cmdSynchronize,
params.
serial);
1250 synchronize.execute();
1271 ENSENSO_WARN_ONCE(
"XR: Using timestamp of first left rectified image in capture mode \"Rectified\".");
1290 catch (
const std::exception& e)
1292 ENSENSO_WARN(
nh,
"Unable to update virtual objects. Error: %s", e.what());
1313 NxLibCommand(cmdDiscardPatterns,
params.
serial).execute();
1320 bool useModel =
true;
1328 catch (NxLibException& e)
1330 if (e.getErrorCode() == NxLibExecutionFailed)
1332 if (
collectPattern.result()[itmErrorSymbol] == errPatternNotFound ||
1333 collectPattern.result()[itmErrorSymbol] == errPatternNotDecodable)
1347 std::vector<StereoCalibrationPattern> result;
1349 for (
int i = 0; i <
collectPattern.result()[itmStereo].count(); i++)
1356 if (pattern[itmLeft].count() > 1 || pattern[itmRight].count() > 1)
1363 for (
size_t i = 0; i < result.size(); i++)
1365 for (
int j = 0; j < pattern[itmLeft][i][itmPoints].count(); j++)
1367 NxLibItem pointNode = pattern[itmLeft][i][itmPoints][j];
1369 ensenso::msg::ImagePoint point;
1370 point.x = pointNode[0].asDouble();
1371 point.y = pointNode[1].asDouble();
1372 result.at(i).leftPoints.push_back(point);
1374 for (
int j = 0; j < pattern[itmRight][i][itmPoints].count(); j++)
1376 NxLibItem pointNode = pattern[itmRight][i][itmPoints][j];
1378 ensenso::msg::ImagePoint point;
1379 point.x = pointNode[0].asDouble();
1380 point.y = pointNode[1].asDouble();
1381 result.at(i).rightPoints.push_back(point);
1389 std::string
const& targetFrame,
1390 bool latestPatternOnly)
const
1395 if (latestPatternOnly)
1399 int patternCount = NxLibItem()[itmParameters][itmPatternCount].asInt();
1401 estimatePatternPose.parameters()[itmFilter][itmOr][0][itmAnd][0][itmValue] = patternCount - 1;
1416 std::string
const& targetFrame)
const
1430 std::vector<geometry_msgs::msg::PoseStamped> result;
1431 result.reserve(patterns.count());
1433 for (
int i = 0; i < patterns.count(); i++)
1435 result.push_back(
stampedPoseFromNxLib(patterns[i][itmPatternPose], targetFrame, imageTimestamp));
1442 bool rectified)
const
1446 NxLibItem monoCalibrationNode =
cameraNode[itmCalibration][itmMonocular][right ? itmRight : itmLeft];
1447 NxLibItem stereoCalibrationNode =
cameraNode[itmCalibration][itmStereo][right ? itmRight : itmLeft];
1457 auto scalingFactor =
cameraNode[itmParameters][itmDisparityMap][itmScaling].asDouble();
1459 for (
int row = 0; row < 3; row++)
1461 for (
int column = 0; column < 3; column++)
1463 GET_P_MATRIX(info)[4 * row + column] = stereoCalibrationNode[itmCamera][column][row].asDouble() * scalingFactor;
1477 for (
int row = 0; row < 3; row++)
1479 for (
int column = 0; column < 3; column++)
1481 GET_K_MATRIX(info)[3 * row + column] = monoCalibrationNode[itmCamera][column][row].asDouble();
1482 GET_P_MATRIX(info)[4 * row + column] = stereoCalibrationNode[itmCamera][column][row].asDouble();
1483 GET_R_MATRIX(info)[3 * row + column] = stereoCalibrationNode[itmRotation][column][row].asDouble();
1491 double fx = stereoCalibrationNode[itmCamera][0][0].asDouble();
1492 double baseline =
cameraNode[itmCalibration][itmStereo][itmBaseline].asDouble() / 1000.0;
1496 int leftTopX =
cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmLeftTop][0].asInt();
1497 int leftTopY =
cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmLeftTop][1].asInt();
1498 int rightBottomX =
cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmRightBottom][0].asInt();
1499 int rightBottomY =
cameraNode[itmParameters][itmDisparityMap][itmAreaOfInterest][itmRightBottom][1].asInt();
1500 info->roi.x_offset = leftTopX;
1501 info->roi.y_offset = leftTopY;
1502 info->roi.width = rightBottomX - leftTopX;
1503 info->roi.height = rightBottomY - leftTopY;
1506 info->roi.do_rectify =
true;
1523 auto message = ensenso::std::make_shared<ensenso::msg::Parameter>();
1526 if (key == ensenso::msg::Parameter::REGION_OF_INTEREST)
1530 message->region_of_interest_value.lower.x = roi.
minX;
1531 message->region_of_interest_value.lower.y = roi.
minY;
1532 message->region_of_interest_value.lower.z = roi.
minZ;
1533 message->region_of_interest_value.upper.x = roi.
maxX;
1534 message->region_of_interest_value.upper.y = roi.
maxY;
1535 message->region_of_interest_value.upper.z = roi.
maxZ;
1537 else if (key == ensenso::msg::Parameter::FLEX_VIEW)
1539 NxLibItem flexViewNode =
cameraNode[itmParameters][itmCapture][itmFlexView];
1541 if (!flexViewNode.exists())
1545 else if (flexViewNode.isNumber())
1547 message->float_value = flexViewNode.asInt();
1564 if (parameter.key == ensenso::msg::Parameter::REGION_OF_INTEREST)
1568 roi.
minX = parameter.region_of_interest_value.lower.x;
1569 roi.
minY = parameter.region_of_interest_value.lower.y;
1570 roi.
minZ = parameter.region_of_interest_value.lower.z;
1571 roi.
maxX = parameter.region_of_interest_value.upper.x;
1572 roi.
maxY = parameter.region_of_interest_value.upper.y;
1573 roi.
maxZ = parameter.region_of_interest_value.upper.z;
1577 else if (parameter.key == ensenso::msg::Parameter::FLEX_VIEW)
1579 NxLibItem flexViewNode =
cameraNode[itmParameters][itmCapture][itmFlexView];
1581 if (!flexViewNode.exists())
1583 ENSENSO_WARN(
nh,
"Writing the parameter FlexView, but the camera does not support it!");
1587 int n =
static_cast<int>(std::round(parameter.float_value));
1590 flexViewNode =
false;
1605 return cameraNode[itmType].asString() ==
"StructuredLight";
1610 std::string modelName =
cameraNode[itmModelName].asString();
1621 std::string captureMode =
cameraNode[itmParameters][itmCapture][itmMode].asString();
1622 return captureMode == valRaw;
1627 return isXrSeries() ?
cameraNode[itmParameters][itmCapture][itmDownloadImages].asBool() :
true;
1637 double disparityMapOffset = 0.0;
1638 if (
cameraNode[itmCalibration][itmDynamic][itmStereo][itmDisparityMapOffset].
exists())
1640 disparityMapOffset =
cameraNode[itmCalibration][itmDynamic][itmStereo][itmDisparityMapOffset].asDouble();
1644 info->width -= disparityMapOffset;