20 #define BOOST_TEST_STATIC_LINK 22 #include <boost/test/included/unit_test.hpp> 23 #include "asr_world_model/EmptyViewportList.h" 26 #include <robot_model_services/robot_model/impl/MILDRobotModel.hpp> 27 #include <robot_model_services/robot_model/impl/MILDRobotModelWithExactIK.hpp> 28 #include <robot_model_services/robot_model/impl/MILDRobotState.hpp> 53 mNodeHandle.
param(
"output_path", mOutputPath, std::string(
""));
61 mNodeHandle.
param(
"configurable_point_clouds/test", doTest,
true);
66 ROS_INFO(
"Running configurable point cloud test");
69 std::vector<std::string> objectTypeNames1, objectTypeNames2;
70 if (!this->getParameter(
"configurable_point_clouds/object_types_1", objectTypeNames1))
72 if (!this->getParameter(
"configurable_point_clouds/object_types_2", objectTypeNames2))
75 int elements1, elements2;
76 if (!this->getParameter(
"configurable_point_clouds/elements_1", elements1))
78 if (!this->getParameter(
"configurable_point_clouds/elements_2", elements2))
83 if (!this->getParameter(
"configurable_point_clouds/distance", distance))
86 bool continueTest =
true;
90 std::map<std::string, std::vector<double>> typeToOrientation;
91 for (
unsigned int i = 0; i < objectTypeNames1.size(); i++)
93 std::string objectType = objectTypeNames1[i];
94 std::string parameter =
"configurable_point_clouds/" + objectType +
"_orientation";
95 typeToOrientation[objectType] = std::vector<double>();
96 if (!this->getParameter(parameter, typeToOrientation[objectType]))
99 for (
unsigned int i = 0; i < objectTypeNames2.size(); i++)
101 std::string objectType = objectTypeNames2[i];
102 if (std::find(objectTypeNames1.begin(), objectTypeNames1.end(), objectType) == objectTypeNames1.end()) {
103 std::string parameter =
"configurable_point_clouds/" + objectType +
"_orientation";
104 typeToOrientation[objectType] = std::vector<double>();
105 if (!this->getParameter(parameter, typeToOrientation[objectType]))
110 std::vector<double> posDeviation1List, posDeviation2List;
111 if (!this->getParameter(
"configurable_point_clouds/position_deviation_1", posDeviation1List))
113 if (!this->getParameter(
"configurable_point_clouds/position_deviation_2", posDeviation2List))
115 SimpleVector3 posDeviation1(posDeviation1List[0], posDeviation1List[1], posDeviation1List[2]);
116 SimpleVector3 posDeviation2(posDeviation2List[0], posDeviation2List[1], posDeviation2List[2]);
118 float orientationDeviation1, orientationDeviation2;
119 if (!this->getParameter(
"configurable_point_clouds/orientation_deviation_1", orientationDeviation1))
121 if (!this->getParameter(
"configurable_point_clouds/orientation_deviation_2", orientationDeviation2))
124 asr_world_model::EmptyViewportList empty;
126 ros::ServiceClient emptyViewportsClient = mGlobalNodeHandle.
serviceClient<asr_world_model::EmptyViewportList>(
"/env/asr_world_model/empty_viewport_list");
137 orientations[0] = this->getOrientation(orientations[1], 0, 0, 180);
139 SetAttributedPointCloud apc;
142 for (
int i = 0; i < elements1; i++) {
143 asr_msgs::AsrAttributedPoint element = this->getAttributedPoint(hp[0], posDeviation1, orientations[0], orientationDeviation1,
144 objectTypeNames1, typeToOrientation, i);
146 apc.request.point_cloud.elements.push_back(element);
150 for (
int i = 0; i < elements2; i++) {
151 asr_msgs::AsrAttributedPoint element = this->getAttributedPoint(hp[1], posDeviation2, orientations[1], orientationDeviation2,
152 objectTypeNames2, typeToOrientation, i);
154 apc.request.point_cloud.elements.push_back(element);
157 ROS_INFO(
"Setting initial pose");
160 position[0] = hp[0][0] + distance * (hp[1][0] - hp[0][0]) + 0.2;
161 position[1] = hp[0][1] + distance * (hp[1][1] - hp[0][1]) + 0.3;
164 SimpleQuaternion orientation = this->getOrientation(orientations[1], 0, 0, -90);
166 geometry_msgs::Pose initialPose;
169 initialPose.position.x = position[0];
170 initialPose.position.y = position[1];
171 initialPose.position.z = position[2];
173 initialPose.orientation.w = orientation.w();
174 initialPose.orientation.x = orientation.x();
175 initialPose.orientation.y = orientation.y();
176 initialPose.orientation.z = orientation.z();
178 asr_next_best_view::GetNextBestView getNBV;
180 this->setInitialPose(initialPose, NBV);
182 emptyViewportsClient.
call(empty);
184 NBV->processSetPointCloudServiceCall(apc.request, apc.response);
186 getNBV.request.current_pose = initialPose;
187 NBV->processGetNextBestViewServiceCall(getNBV.request, getNBV.response);
189 asr_next_best_view::TriggerFrustumVisualization tfv;
190 tfv.request.current_pose = getNBV.response.viewport.pose;
191 NBV->processTriggerFrustumVisualization(tfv.request, tfv.response);
193 float robotX = getNBV.response.viewport.pose.position.x;
194 float robotY = getNBV.response.viewport.pose.position.y;
196 float distanceToPC = sqrt(pow(robotX - hp[1](0), 2) + pow(robotY - hp[1](1), 2));
197 if (sqrt(pow(robotX - hp[0](0), 2) + pow(robotY - hp[0](1), 2)) < sqrt(pow(robotX - hp[1](0), 2) + pow(robotY - hp[1](1), 2))) {
198 ROS_INFO(
"Robot chooses 1st object point cloud.");
201 ROS_INFO(
"Robot chooses 2nd object point cloud.");
203 ROS_INFO(
"Distance to point cloud: %f", distanceToPC);
205 ROS_INFO(
"To run another test, enter a distance factor between 0 an 1 or press ENTER to run the test again with the same distance factor.");
206 ROS_INFO(
"Type <quit> or whatever to exit.");
208 std::getline (std::cin,input);
209 if( input.compare(
"\n") != 0 ){
212 tempDistance = boost::lexical_cast<
double>(input);
213 if (tempDistance >= 0.0 && tempDistance <= 1.0)
215 distance = tempDistance;
219 ROS_ERROR(
"Distance must be between 0.0 and 1.0!");
220 continueTest =
false;
222 }
catch(boost::bad_lexical_cast &) {
224 continueTest =
false;
232 mNodeHandle.
param(
"positions/test", doTest,
true);
240 std::vector<std::string> elementTypes;
241 if (!this->getParameter(
"positions/element_types", elementTypes))
244 std::vector<double> elementOrientationList;
245 if (!this->getParameter(
"positions/element_orientation", elementOrientationList))
248 asr_world_model::EmptyViewportList empty;
250 ros::ServiceClient emptyViewportsClient = mGlobalNodeHandle.
serviceClient<asr_world_model::EmptyViewportList>(
"/env/asr_world_model/empty_viewport_list");
253 unsigned int pcSize = 3;
255 if (pcSize != elementTypes.size()) {
271 unsigned int robotPoses = 3;
279 robotOrientations[2] = this->getOrientation(pcOrientations[0], 0, 0, 180);
280 robotOrientations[1] = this->getOrientation(pcOrientations[1], 0, 0, 180);
281 robotOrientations[0] = this->getOrientation(pcOrientations[2], 0, 0, 180);
283 for (
unsigned int i = 0; i < robotPoses; i++) {
284 SetAttributedPointCloud apc;
288 for (
unsigned int j = 0; j < pcSize; j++) {
289 SimpleQuaternion q = this->getOrientation(pcOrientations[j], elementOrientationList[0],
290 elementOrientationList[1], elementOrientationList[2]);
292 asr_msgs::AsrAttributedPoint element;
294 geometry_msgs::Pose pose;
295 pose.position.x = hp[j][0];
296 pose.position.y = hp[j][1];
297 pose.position.z = hp[j][2];
298 pose.orientation.w = q.w();
299 pose.orientation.x = q.x();
300 pose.orientation.y = q.y();
301 pose.orientation.z = q.z();
303 element.type = elementTypes[j];
306 apc.request.point_cloud.elements.push_back(element);
311 geometry_msgs::Pose initialPose;
314 initialPose.position.x = rp[i][0];
315 initialPose.position.y = rp[i][1];
316 initialPose.position.z = rp[i][2];
318 initialPose.orientation.w = robotOrientations[i].w();
319 initialPose.orientation.x = robotOrientations[i].x();
320 initialPose.orientation.y = robotOrientations[i].y();
321 initialPose.orientation.z = robotOrientations[i].z();
323 this->setInitialPose(initialPose, NBV);
326 asr_next_best_view::GetNextBestView getNBV;
328 emptyViewportsClient.
call(empty);
330 NBV->processSetPointCloudServiceCall(apc.request, apc.response);
332 getNBV.request.current_pose = initialPose;
333 NBV->processGetNextBestViewServiceCall(getNBV.request, getNBV.response);
335 if (!getNBV.response.found) {
338 asr_next_best_view::TriggerFrustumVisualization tfv;
339 tfv.request.current_pose = getNBV.response.viewport.pose;
340 NBV->processTriggerFrustumVisualization(tfv.request, tfv.response);
352 mNodeHandle.
param(
"side_objects/test", doTest,
true);
357 ROS_INFO(
"Running side object test");
359 asr_next_best_view::GetNextBestView getNBV;
360 asr_world_model::EmptyViewportList empty;
361 ros::ServiceClient emptyViewportsClient = mGlobalNodeHandle.
serviceClient<asr_world_model::EmptyViewportList>(
"/env/asr_world_model/empty_viewport_list");
363 std::ofstream outputFile(mOutputPath +
"testSideObject.dat");
364 if(outputFile.is_open()) {
365 outputFile <<
"#At first, object is located face-to-face to the robot and is then translated sideways arpind the robot\n",
366 outputFile <<
"#Translation angle\tDifference pan\t\tDifference rotation\tDifference x\tDifference y\n";
370 std::string elementType;
371 if (!this->getParameter(
"side_objects/element_type", elementType))
374 typeSet->insert(elementType);
377 if (!this->getParameter(
"side_objects/cloud_size", cloudSize))
380 std::vector<double> initialPoseList;
381 if (!this->getParameter(
"side_objects/initial_pose", initialPoseList))
384 std::vector<double> elementOrientationList;
385 if (!this->getParameter(
"side_objects/element_orientation", elementOrientationList))
388 std::vector<double> elementDeviationList;
389 if (!this->getParameter(
"side_objects/element_deviation", elementDeviationList))
391 SimpleVector3 elementDeviation(elementDeviationList[0], elementDeviationList[1], elementDeviationList[2]);
394 if (!this->getParameter(
"side_objects/step_size", stepSize))
398 if (!this->getParameter(
"side_objects/step_number", stepNumber))
402 if (!this->getParameter(
"ncp", ncp))
406 if (!this->getParameter(
"fcp", fcp))
412 geometry_msgs::Pose initialPose;
414 initialPose.position.x = initialPoseList[0];
415 initialPose.position.y = initialPoseList[1];
416 initialPose.position.z = initialPoseList[2];
417 initialPose.orientation.x = initialPoseList[3];
418 initialPose.orientation.y = initialPoseList[4];
419 initialPose.orientation.z = initialPoseList[5];
420 initialPose.orientation.w = initialPoseList[6];
422 this->setInitialPose(initialPose, NBV);
424 for (
int i = 0; i < stepNumber; i++)
428 int stepOffset = i * stepSize;
430 SetAttributedPointCloud apc;
432 robot_model_services::MILDRobotModelWithExactIK robot;
433 SimpleVector3 position(initialPose.position.x, initialPose.position.y, initialPose.position.z);
434 SimpleQuaternion orientation(initialPose.orientation.w, initialPose.orientation.x, initialPose.orientation.y, initialPose.orientation.z);
435 robot_model_services::MILDRobotStatePtr robotStatePtr(
new robot_model_services::MILDRobotState());
436 robotStatePtr = boost::static_pointer_cast<robot_model_services::MILDRobotState>(robot.calculateRobotState(robotStatePtr, position, orientation));
438 ROS_INFO_STREAM(
"Set point cloud consisting of one object type with " << cloudSize <<
" occurances.");
442 for (
int j = 0; j < cloudSize; j++)
445 Eigen::Matrix<Precision, 3, 3> stepRotation;
446 stepRotation = Eigen::AngleAxis<Precision>(0.0, SimpleVector3::UnitX())
447 * Eigen::AngleAxis<Precision>(0.0, SimpleVector3::UnitY())
448 * Eigen::AngleAxis<Precision>(stepOffset * M_PI / 180, SimpleVector3::UnitZ());
449 SimpleVector3 elementPosition = position + (ncp + fcp) / 2 * stepRotation * MathHelper::getVisualAxis(orientation);
452 if (elementDeviation[0] == 0 && elementDeviation[1] == 0 && elementDeviation[2] == 0)
454 randomVector = elementPosition;
458 int8_t occupancyValue;
460 randomVector = MathHelper::getRandomVector(elementPosition, elementDeviation);
466 Eigen::Matrix<Precision, 3, 3> elementRotation;
467 elementRotation = Eigen::AngleAxis<Precision>(elementOrientationList[0] * M_PI / 180, SimpleVector3::UnitX())
468 * Eigen::AngleAxis<Precision>(elementOrientationList[1] * M_PI / 180, SimpleVector3::UnitY())
469 * Eigen::AngleAxis<Precision>(elementOrientationList[2] * M_PI / 180, SimpleVector3::UnitZ());
470 SimpleQuaternion elementOrientation(orientation.toRotationMatrix() * stepRotation * elementRotation);
472 geometry_msgs::Pose pose;
473 pose.position.x = randomVector[0];
474 pose.position.y = randomVector[1];
475 pose.position.z = randomVector[2];
476 pose.orientation.x = elementOrientation.x();
477 pose.orientation.y = elementOrientation.y();
478 pose.orientation.z = elementOrientation.z();
479 pose.orientation.w = elementOrientation.w();
481 asr_msgs::AsrAttributedPoint element;
482 element.type = elementType;
485 apc.request.point_cloud.elements.push_back(element);
489 objectPoint.
color.r = 0;
490 objectPoint.
color.g = 255;
491 objectPoint.
color.b = 0;
492 objectPoint.
type = elementType;
494 objectPointCloudPtr->push_back(objectPoint);
497 emptyViewportsClient.
call(empty);
499 NBV->processSetPointCloudServiceCall(apc.request, apc.response);
501 getNBV.request.current_pose = initialPose;
503 ROS_INFO(
"Asking for next best view");
504 NBV->processGetNextBestViewServiceCall(getNBV.request, getNBV.response);
506 if (!getNBV.response.found)
514 asr_next_best_view::TriggerFrustumVisualization tfv;
515 tfv.request.current_pose = getNBV.response.viewport.pose;
516 NBV->processTriggerFrustumVisualization(tfv.request, tfv.response);
518 SimpleVector3 pos(initialPose.position.x, initialPose.position.y, initialPose.position.z);
519 SimpleQuaternion q(initialPose.orientation.w, initialPose.orientation.x, initialPose.orientation.y, initialPose.orientation.z);
527 ROS_INFO_STREAM(
"Original state: pan " << robotStatePtr->pan <<
" tilt " << robotStatePtr->tilt
528 <<
" rotation " << robotStatePtr->rotation
529 <<
" x " << robotStatePtr->x <<
" y " << robotStatePtr->y);
530 if (nbvCalc.
getRatingModule()->setSingleScoreContainer(viewport, viewport))
531 ROS_INFO_STREAM(
"Original viewport: utility " << viewport.
score->getWeightedNormalizedUtility() <<
" inverseCosts " << viewport.
score->getWeightedInverseCosts()
532 <<
" rating " << viewport.
score->getWeightedNormalizedUtility() * viewport.
score->getWeightedInverseCosts());
534 ROS_INFO(
"Original viewport utility <= 0");
536 asr_next_best_view::TriggerFrustumVisualization triggerFrustumVis;
537 triggerFrustumVis.request.current_pose = viewport.
getPose();
538 NBV->processTriggerOldFrustumVisualization(triggerFrustumVis.request, triggerFrustumVis.response);
540 ROS_INFO_STREAM(
"Changes: pan " << getNBV.response.robot_state.pan <<
" tilt " << getNBV.response.robot_state.tilt
541 <<
" rotation " << getNBV.response.robot_state.rotation
542 <<
" x " << getNBV.response.robot_state.x <<
" y " << getNBV.response.robot_state.y);
543 ROS_INFO_STREAM(
"Resulting viewport: utility " << getNBV.response.utility <<
" inverseCosts " << getNBV.response.inverse_costs
544 <<
" rating " << getNBV.response.utility * getNBV.response.inverse_costs);
546 double panDiff = std::abs(robotStatePtr->pan - getNBV.response.robot_state.pan);
547 double rotDiff = std::abs(robotStatePtr->rotation - getNBV.response.robot_state.rotation);
548 double xDiff = std::abs(robotStatePtr->x - getNBV.response.robot_state.x);
549 double yDiff = std::abs(robotStatePtr->y - getNBV.response.robot_state.y);
551 outputFile << stepOffset <<
"\t\t\t\t" << panDiff <<
"\t\t" << rotDiff <<
"\t\t" << xDiff <<
"\t\t" << yDiff <<
" \n";
560 template<
typename T>
bool getParameter(
const std::string &key, T ¶meter)
562 if (!mNodeHandle.
getParam(key, parameter))
575 Eigen::Matrix<Precision, 3, 3> rotation;
576 rotation = Eigen::AngleAxis<Precision>(alpha* M_PI / 180, SimpleVector3::UnitX())
577 * Eigen::AngleAxis<Precision>(beta * M_PI / 180, SimpleVector3::UnitY())
578 * Eigen::AngleAxis<Precision>(gamma * M_PI / 180, SimpleVector3::UnitZ());
580 result = originalOrientation.toRotationMatrix() * rotation;
586 std::vector<std::string> objectTypeNames,
587 std::map<std::string, std::vector<double>> typeToOrientation,
590 int8_t occupancyValue;
592 randomVector = MathHelper::getRandomVector(position, positionDeviation);
596 std::vector<double> typeOrientation = typeToOrientation[objectTypeNames[iteration % objectTypeNames.size()]];
597 float randomDeviation = MathHelper::getRandomNumber(0, orientationDeviation);
598 SimpleQuaternion randomOrientation = this->getOrientation(orientation, 0.0, 0.0, randomDeviation);
599 SimpleQuaternion quaternion = this->getOrientation(randomOrientation, typeOrientation[0], typeOrientation[1], typeOrientation[2]);
601 asr_msgs::AsrAttributedPoint element;
603 geometry_msgs::Pose pose;
604 pose.orientation.w = quaternion.w();
605 pose.orientation.x = quaternion.x();
606 pose.orientation.y = quaternion.y();
607 pose.orientation.z = quaternion.z();
608 pose.position.x = randomVector[0];
609 pose.position.y = randomVector[1];
610 pose.position.z = randomVector[2];
612 element.type = objectTypeNames[iteration % objectTypeNames.size()];
625 test_suite* evaluation = BOOST_TEST_SUITE(
"Evaluation NBV with different scenes");
633 framework::master_test_suite().add(evaluation);
ServiceClient serviceClient(const std::string &service_name, bool persistent=false, const M_string &header_values=M_string())
SimpleQuaternion getOrientation(SimpleQuaternion originalOrientation, double alpha, double beta, double gamma)
IndicesPtr getActiveIndices()
ros::NodeHandle mNodeHandle
pcl::PointCloud< ObjectPoint > ObjectPointCloud
bool doFrustumCulling(const SimpleVector3 &position, const SimpleQuaternion &orientation, const IndicesPtr &indices, ViewportPoint &viewportPoint)
creates a new camera viewport with the given data
asr_msgs::AsrAttributedPoint getAttributedPoint(SimpleVector3 position, SimpleVector3 positionDeviation, SimpleQuaternion orientation, float orientationDeviation, std::vector< std::string > objectTypeNames, std::map< std::string, std::vector< double >> typeToOrientation, int iteration)
Eigen::Matrix< Precision, 3, 1 > SimpleVector3
ROSCPP_DECL void init(int &argc, char **argv, const std::string &name, uint32_t options=0)
bool call(MReq &req, MRes &res)
virtual ~ParametersTest()
The ParametersTest class is a test class to check the NBV parameters. It works with the map "map_flur...
NextBestView is a configuration class of the related node.
void configurablePointCloudsTest()
TFSIMD_FORCE_INLINE tfScalar distance(const Vector3 &v) const
std::set< std::string > ObjectTypeSet
DefaultScoreContainerPtr score
bool param(const std::string ¶m_name, T ¶m_val, const T &default_val) const
std_msgs::ColorRGBA color
boost::shared_ptr< NextBestView > NBV
boost::shared_ptr< ObjectTypeSet > ObjectTypeSetPtr
ObjectPointCloud::Ptr ObjectPointCloudPtr
#define ROS_INFO_STREAM(args)
ros::NodeHandle mGlobalNodeHandle
int8_t getRaytracingMapOccupancyValue(const SimpleVector3 &position)
bool getParam(const std::string &key, std::string &s) const
test_suite * init_unit_test_suite(int argc, char *argv[])
RatingModulePtr getRatingModule()
#define ROS_ERROR_STREAM(args)
Eigen::Quaternion< Precision > SimpleQuaternion
bool isOccupancyValueAcceptable(const int8_t &occupancyValue)
ROSCPP_DECL void spinOnce()
bool getParameter(const std::string &key, T ¶meter)
ObjectTypeSetPtr object_type_set
ROSCPP_DECL bool waitForService(const std::string &service_name, int32_t timeout)