ParametersTest.cpp
Go to the documentation of this file.
1 
20 #define BOOST_TEST_STATIC_LINK
21 
22 #include <boost/test/included/unit_test.hpp>
23 #include "asr_world_model/EmptyViewportList.h"
26 #include <robot_model_services/robot_model/impl/MILDRobotModel.hpp>
27 #include <robot_model_services/robot_model/impl/MILDRobotModelWithExactIK.hpp>
28 #include <robot_model_services/robot_model/impl/MILDRobotState.hpp>
31 #include "typedef.hpp"
32 
33 using namespace asr_next_best_view;
34 using namespace boost::unit_test;
35 
40 class ParametersTest : public BaseTest
41 {
42 private:
47 
48  std::string mOutputPath;
49 
50 public:
51  ParametersTest() : BaseTest(false, false), mNodeHandle("~") {
53  mNodeHandle.param("output_path", mOutputPath, std::string(""));
54  ROS_INFO_STREAM("output_path: " << mOutputPath);
55  }
56 
57  virtual ~ParametersTest() {}
58 
60  bool doTest = true;
61  mNodeHandle.param("configurable_point_clouds/test", doTest, true);
62 
63  if (!doTest)
64  return;
65 
66  ROS_INFO("Running configurable point cloud test");
67 
68  // get parameters
69  std::vector<std::string> objectTypeNames1, objectTypeNames2;
70  if (!this->getParameter("configurable_point_clouds/object_types_1", objectTypeNames1))
71  return;
72  if (!this->getParameter("configurable_point_clouds/object_types_2", objectTypeNames2))
73  return;
74 
75  int elements1, elements2;
76  if (!this->getParameter("configurable_point_clouds/elements_1", elements1))
77  return;
78  if (!this->getParameter("configurable_point_clouds/elements_2", elements2))
79  return;
80 
81  // between 0.0 (at first point cloud) and 1.0 (at second point cloud)
82  double distance;
83  if (!this->getParameter("configurable_point_clouds/distance", distance))
84  return;
85 
86  bool continueTest = true;
87  while (continueTest)
88  {
89  ROS_INFO_STREAM("Running test with distance at " << distance);
90  std::map<std::string, std::vector<double>> typeToOrientation;
91  for (unsigned int i = 0; i < objectTypeNames1.size(); i++)
92  {
93  std::string objectType = objectTypeNames1[i];
94  std::string parameter = "configurable_point_clouds/" + objectType + "_orientation";
95  typeToOrientation[objectType] = std::vector<double>();
96  if (!this->getParameter(parameter, typeToOrientation[objectType]))
97  return;
98  }
99  for (unsigned int i = 0; i < objectTypeNames2.size(); i++)
100  {
101  std::string objectType = objectTypeNames2[i];
102  if (std::find(objectTypeNames1.begin(), objectTypeNames1.end(), objectType) == objectTypeNames1.end()) {
103  std::string parameter = "configurable_point_clouds/" + objectType + "_orientation";
104  typeToOrientation[objectType] = std::vector<double>();
105  if (!this->getParameter(parameter, typeToOrientation[objectType]))
106  return;
107  }
108  }
109 
110  std::vector<double> posDeviation1List, posDeviation2List;
111  if (!this->getParameter("configurable_point_clouds/position_deviation_1", posDeviation1List))
112  return;
113  if (!this->getParameter("configurable_point_clouds/position_deviation_2", posDeviation2List))
114  return;
115  SimpleVector3 posDeviation1(posDeviation1List[0], posDeviation1List[1], posDeviation1List[2]);
116  SimpleVector3 posDeviation2(posDeviation2List[0], posDeviation2List[1], posDeviation2List[2]);
117 
118  float orientationDeviation1, orientationDeviation2;
119  if (!this->getParameter("configurable_point_clouds/orientation_deviation_1", orientationDeviation1))
120  return;
121  if (!this->getParameter("configurable_point_clouds/orientation_deviation_2", orientationDeviation2))
122  return;
123 
124  asr_world_model::EmptyViewportList empty;
125  ros::service::waitForService("/env/asr_world_model/empty_viewport_list", -1);
126  ros::ServiceClient emptyViewportsClient = mGlobalNodeHandle.serviceClient<asr_world_model::EmptyViewportList>("/env/asr_world_model/empty_viewport_list");
127 
128  ROS_INFO("Generating point cloud");
129 
130  int hpSize = 2;
131  SimpleVector3* hp = new SimpleVector3[hpSize];
132  hp[0] = SimpleVector3(30.5, 5.8, 1.32);
133  hp[1] = SimpleVector3(32.6, 4.5, 1.32);
134 
135  SimpleQuaternion* orientations = new SimpleQuaternion[hpSize];
136  orientations[1] = SimpleQuaternion(0.967, 0.000, 0.000, -0.256);
137  orientations[0] = this->getOrientation(orientations[1], 0, 0, 180);
138 
139  SetAttributedPointCloud apc;
140 
141  //create first point cloud
142  for (int i = 0; i < elements1; i++) {
143  asr_msgs::AsrAttributedPoint element = this->getAttributedPoint(hp[0], posDeviation1, orientations[0], orientationDeviation1,
144  objectTypeNames1, typeToOrientation, i);
145 
146  apc.request.point_cloud.elements.push_back(element);
147  }
148 
149  //create second point cloud
150  for (int i = 0; i < elements2; i++) {
151  asr_msgs::AsrAttributedPoint element = this->getAttributedPoint(hp[1], posDeviation2, orientations[1], orientationDeviation2,
152  objectTypeNames2, typeToOrientation, i);
153 
154  apc.request.point_cloud.elements.push_back(element);
155  }
156 
157  ROS_INFO("Setting initial pose");
158  // interpolate between robot position and point cloud
159  SimpleVector3 position;
160  position[0] = hp[0][0] + distance * (hp[1][0] - hp[0][0]) + 0.2;
161  position[1] = hp[0][1] + distance * (hp[1][1] - hp[0][1]) + 0.3;
162  position[2] = 1.32;
163 
164  SimpleQuaternion orientation = this->getOrientation(orientations[1], 0, 0, -90);
165 
166  geometry_msgs::Pose initialPose;
167 
168  // pose between the two point cloud positions
169  initialPose.position.x = position[0];
170  initialPose.position.y = position[1];
171  initialPose.position.z = position[2];
172 
173  initialPose.orientation.w = orientation.w();
174  initialPose.orientation.x = orientation.x();
175  initialPose.orientation.y = orientation.y();
176  initialPose.orientation.z = orientation.z();
177 
178  asr_next_best_view::GetNextBestView getNBV;
179 
180  this->setInitialPose(initialPose, NBV);
181 
182  emptyViewportsClient.call(empty);
183 
184  NBV->processSetPointCloudServiceCall(apc.request, apc.response);
185 
186  getNBV.request.current_pose = initialPose;
187  NBV->processGetNextBestViewServiceCall(getNBV.request, getNBV.response);
188 
189  asr_next_best_view::TriggerFrustumVisualization tfv;
190  tfv.request.current_pose = getNBV.response.viewport.pose;
191  NBV->processTriggerFrustumVisualization(tfv.request, tfv.response);
192 
193  float robotX = getNBV.response.viewport.pose.position.x;
194  float robotY = getNBV.response.viewport.pose.position.y;
195 
196  float distanceToPC = sqrt(pow(robotX - hp[1](0), 2) + pow(robotY - hp[1](1), 2));
197  if (sqrt(pow(robotX - hp[0](0), 2) + pow(robotY - hp[0](1), 2)) < sqrt(pow(robotX - hp[1](0), 2) + pow(robotY - hp[1](1), 2))) {
198  ROS_INFO("Robot chooses 1st object point cloud.");
199  }
200  else {
201  ROS_INFO("Robot chooses 2nd object point cloud.");
202  }
203  ROS_INFO("Distance to point cloud: %f", distanceToPC);
204 
205  ROS_INFO("To run another test, enter a distance factor between 0 an 1 or press ENTER to run the test again with the same distance factor.");
206  ROS_INFO("Type <quit> or whatever to exit.");
207  std::string input;
208  std::getline (std::cin,input);
209  if( input.compare("\n") != 0 ){
210  try {
211  double tempDistance;
212  tempDistance = boost::lexical_cast<double>(input);
213  if (tempDistance >= 0.0 && tempDistance <= 1.0)
214  {
215  distance = tempDistance;
216  }
217  else
218  {
219  ROS_ERROR("Distance must be between 0.0 and 1.0!");
220  continueTest = false;
221  }
222  } catch(boost::bad_lexical_cast &) {
223  ROS_ERROR("Invalid number!");
224  continueTest = false;
225  }
226  }
227  }
228  }
229 
230  void positionsTest() {
231  bool doTest = true;
232  mNodeHandle.param("positions/test", doTest, true);
233 
234  if (!doTest)
235  return;
236 
237  ROS_INFO("Running positions test");
238 
239  ROS_INFO("Getting parameters");
240  std::vector<std::string> elementTypes;
241  if (!this->getParameter("positions/element_types", elementTypes))
242  return;
243 
244  std::vector<double> elementOrientationList;
245  if (!this->getParameter("positions/element_orientation", elementOrientationList))
246  return;
247 
248  asr_world_model::EmptyViewportList empty;
249  ros::service::waitForService("/env/asr_world_model/empty_viewport_list", -1);
250  ros::ServiceClient emptyViewportsClient = mGlobalNodeHandle.serviceClient<asr_world_model::EmptyViewportList>("/env/asr_world_model/empty_viewport_list");
251 
252  // object poses
253  unsigned int pcSize = 3;
254 
255  if (pcSize != elementTypes.size()) {
256  ROS_ERROR_STREAM(pcSize << "element types needed");
257  return;
258  }
259 
260  SimpleVector3* hp = new SimpleVector3[pcSize];
261  hp[0] = SimpleVector3(25.742, 8.607, 1.32);
262  hp[1] = SimpleVector3(24.041, 10.947, 1.32);
263  hp[2] = SimpleVector3(25.902, 11.025, 1.32);
264 
265  SimpleQuaternion* pcOrientations = new SimpleQuaternion[pcSize];
266  pcOrientations[0] = SimpleQuaternion(0.256, 0.000, 0.000, 0.967);
267  pcOrientations[1] = SimpleQuaternion(0.960, 0.000, 0.000, -0.281);
268  pcOrientations[2] = SimpleQuaternion(-0.483, 0.000, 0.000, 0.875);
269 
270  // robot poses
271  unsigned int robotPoses = 3;
272 
273  SimpleVector3* rp = new SimpleVector3[robotPoses];
274  rp[2] = SimpleVector3(24.288, 9.430, 1.32);
275  rp[1] = SimpleVector3(24.991, 10.441, 1.32);
276  rp[0] = SimpleVector3(25.294, 9.906, 1.32);
277 
278  SimpleQuaternion* robotOrientations = new SimpleQuaternion[robotPoses];
279  robotOrientations[2] = this->getOrientation(pcOrientations[0], 0, 0, 180);
280  robotOrientations[1] = this->getOrientation(pcOrientations[1], 0, 0, 180);
281  robotOrientations[0] = this->getOrientation(pcOrientations[2], 0, 0, 180);
282 
283  for (unsigned int i = 0; i < robotPoses; i++) {
284  SetAttributedPointCloud apc;
285 
286  // create point cloud
287  ROS_INFO("Generate point cloud");
288  for (unsigned int j = 0; j < pcSize; j++) {
289  SimpleQuaternion q = this->getOrientation(pcOrientations[j], elementOrientationList[0],
290  elementOrientationList[1], elementOrientationList[2]);
291 
292  asr_msgs::AsrAttributedPoint element;
293 
294  geometry_msgs::Pose pose;
295  pose.position.x = hp[j][0];
296  pose.position.y = hp[j][1];
297  pose.position.z = hp[j][2];
298  pose.orientation.w = q.w();
299  pose.orientation.x = q.x();
300  pose.orientation.y = q.y();
301  pose.orientation.z = q.z();
302 
303  element.type = elementTypes[j];
304  element.pose = pose;
305 
306  apc.request.point_cloud.elements.push_back(element);
307  }
308 
309  // set robot pose
310  ROS_INFO("Set robot pose");
311  geometry_msgs::Pose initialPose;
312 
313  //Pose in der Mitte von CeylonTea und CoffeeFilters
314  initialPose.position.x = rp[i][0];
315  initialPose.position.y = rp[i][1];
316  initialPose.position.z = rp[i][2];
317 
318  initialPose.orientation.w = robotOrientations[i].w();
319  initialPose.orientation.x = robotOrientations[i].x();
320  initialPose.orientation.y = robotOrientations[i].y();
321  initialPose.orientation.z = robotOrientations[i].z();
322 
323  this->setInitialPose(initialPose, NBV);
324 
325  // calculate NBV
326  asr_next_best_view::GetNextBestView getNBV;
327 
328  emptyViewportsClient.call(empty);
329 
330  NBV->processSetPointCloudServiceCall(apc.request, apc.response);
331 
332  getNBV.request.current_pose = initialPose;
333  NBV->processGetNextBestViewServiceCall(getNBV.request, getNBV.response);
334 
335  if (!getNBV.response.found) {
336  ROS_ERROR("No NBV found!");
337  } else {
338  asr_next_best_view::TriggerFrustumVisualization tfv;
339  tfv.request.current_pose = getNBV.response.viewport.pose;
340  NBV->processTriggerFrustumVisualization(tfv.request, tfv.response);
341  }
342 
343  ros::spinOnce();
344  waitForEnter();
345  ros::Duration(2).sleep();
346  }
347  }
348 
350  {
351  bool doTest = true;
352  mNodeHandle.param("side_objects/test", doTest, true);
353 
354  if (!doTest)
355  return;
356 
357  ROS_INFO("Running side object test");
358 
359  asr_next_best_view::GetNextBestView getNBV;
360  asr_world_model::EmptyViewportList empty;
361  ros::ServiceClient emptyViewportsClient = mGlobalNodeHandle.serviceClient<asr_world_model::EmptyViewportList>("/env/asr_world_model/empty_viewport_list");
362 
363  std::ofstream outputFile(mOutputPath + "testSideObject.dat");
364  if(outputFile.is_open()) {
365  outputFile << "#At first, object is located face-to-face to the robot and is then translated sideways arpind the robot\n",
366  outputFile << "#Translation angle\tDifference pan\t\tDifference rotation\tDifference x\tDifference y\n";
367  }
368 
369  ROS_INFO("Getting parameters");
370  std::string elementType;
371  if (!this->getParameter("side_objects/element_type", elementType))
372  return;
374  typeSet->insert(elementType);
375 
376  int cloudSize;
377  if (!this->getParameter("side_objects/cloud_size", cloudSize))
378  return;
379 
380  std::vector<double> initialPoseList;
381  if (!this->getParameter("side_objects/initial_pose", initialPoseList))
382  return;
383 
384  std::vector<double> elementOrientationList;
385  if (!this->getParameter("side_objects/element_orientation", elementOrientationList))
386  return;
387 
388  std::vector<double> elementDeviationList;
389  if (!this->getParameter("side_objects/element_deviation", elementDeviationList))
390  return;
391  SimpleVector3 elementDeviation(elementDeviationList[0], elementDeviationList[1], elementDeviationList[2]);
392 
393  int stepSize;
394  if (!this->getParameter("side_objects/step_size", stepSize))
395  return;
396 
397  int stepNumber;
398  if (!this->getParameter("side_objects/step_number", stepNumber))
399  return;
400 
401  double ncp;
402  if (!this->getParameter("ncp", ncp))
403  return;
404 
405  double fcp;
406  if (!this->getParameter("fcp", fcp))
407  return;
408 
409  ROS_INFO("Set initial pose");
410 
411 
412  geometry_msgs::Pose initialPose;
413 
414  initialPose.position.x = initialPoseList[0];
415  initialPose.position.y = initialPoseList[1];
416  initialPose.position.z = initialPoseList[2];
417  initialPose.orientation.x = initialPoseList[3];
418  initialPose.orientation.y = initialPoseList[4];
419  initialPose.orientation.z = initialPoseList[5];
420  initialPose.orientation.w = initialPoseList[6];
421 
422  this->setInitialPose(initialPose, NBV);
423 
424  for (int i = 0; i < stepNumber; i++)
425  {
426  ROS_INFO_STREAM("Step " << i << " of side object test");
427 
428  int stepOffset = i * stepSize;
429 
430  SetAttributedPointCloud apc;
431 
432  robot_model_services::MILDRobotModelWithExactIK robot;
433  SimpleVector3 position(initialPose.position.x, initialPose.position.y, initialPose.position.z);
434  SimpleQuaternion orientation(initialPose.orientation.w, initialPose.orientation.x, initialPose.orientation.y, initialPose.orientation.z);
435  robot_model_services::MILDRobotStatePtr robotStatePtr(new robot_model_services::MILDRobotState());
436  robotStatePtr = boost::static_pointer_cast<robot_model_services::MILDRobotState>(robot.calculateRobotState(robotStatePtr, position, orientation));
437 
438  ROS_INFO_STREAM("Set point cloud consisting of one object type with " << cloudSize << " occurances.");
439 
440  ObjectPointCloudPtr objectPointCloudPtr = ObjectPointCloudPtr(new ObjectPointCloud());
441 
442  for (int j = 0; j < cloudSize; j++)
443  {
444  // set element position
445  Eigen::Matrix<Precision, 3, 3> stepRotation;
446  stepRotation = Eigen::AngleAxis<Precision>(0.0, SimpleVector3::UnitX())
447  * Eigen::AngleAxis<Precision>(0.0, SimpleVector3::UnitY())
448  * Eigen::AngleAxis<Precision>(stepOffset * M_PI / 180, SimpleVector3::UnitZ());
449  SimpleVector3 elementPosition = position + (ncp + fcp) / 2 * stepRotation * MathHelper::getVisualAxis(orientation);
450 
451  SimpleVector3 randomVector;
452  if (elementDeviation[0] == 0 && elementDeviation[1] == 0 && elementDeviation[2] == 0)
453  {
454  randomVector = elementPosition;
455  }
456  else
457  {
458  int8_t occupancyValue;
459  do {
460  randomVector = MathHelper::getRandomVector(elementPosition, elementDeviation);
461  occupancyValue = mMapHelper.getRaytracingMapOccupancyValue(randomVector);
462  } while(!mMapHelper.isOccupancyValueAcceptable(occupancyValue));
463  }
464 
465  // set element orientation
466  Eigen::Matrix<Precision, 3, 3> elementRotation;
467  elementRotation = Eigen::AngleAxis<Precision>(elementOrientationList[0] * M_PI / 180, SimpleVector3::UnitX())
468  * Eigen::AngleAxis<Precision>(elementOrientationList[1] * M_PI / 180, SimpleVector3::UnitY())
469  * Eigen::AngleAxis<Precision>(elementOrientationList[2] * M_PI / 180, SimpleVector3::UnitZ());
470  SimpleQuaternion elementOrientation(orientation.toRotationMatrix() * stepRotation * elementRotation);
471 
472  geometry_msgs::Pose pose;
473  pose.position.x = randomVector[0];
474  pose.position.y = randomVector[1];
475  pose.position.z = randomVector[2];
476  pose.orientation.x = elementOrientation.x();
477  pose.orientation.y = elementOrientation.y();
478  pose.orientation.z = elementOrientation.z();
479  pose.orientation.w = elementOrientation.w();
480 
481  asr_msgs::AsrAttributedPoint element;
482  element.type = elementType;
483  element.pose = pose;
484 
485  apc.request.point_cloud.elements.push_back(element);
486 
487 
488  ObjectPoint objectPoint(pose);
489  objectPoint.color.r = 0;
490  objectPoint.color.g = 255;
491  objectPoint.color.b = 0;
492  objectPoint.type = elementType;
493 
494  objectPointCloudPtr->push_back(objectPoint);
495  }
496 
497  emptyViewportsClient.call(empty);
498 
499  NBV->processSetPointCloudServiceCall(apc.request, apc.response);
500 
501  getNBV.request.current_pose = initialPose;
502 
503  ROS_INFO("Asking for next best view");
504  NBV->processGetNextBestViewServiceCall(getNBV.request, getNBV.response);
505 
506  if (!getNBV.response.found)
507  {
508  ROS_ERROR("No next best view found!");
509  continue;
510  }
511 
512  ROS_INFO("Got next best view");
513 
514  asr_next_best_view::TriggerFrustumVisualization tfv;
515  tfv.request.current_pose = getNBV.response.viewport.pose;
516  NBV->processTriggerFrustumVisualization(tfv.request, tfv.response);
517 
518  SimpleVector3 pos(initialPose.position.x, initialPose.position.y, initialPose.position.z);
519  SimpleQuaternion q(initialPose.orientation.w, initialPose.orientation.x, initialPose.orientation.y, initialPose.orientation.z);
520 
521  NextBestViewCalculator nbvCalc = NBV->getCalculator();
522  ViewportPoint viewport;
523  nbvCalc.doFrustumCulling(pos, q, nbvCalc.getActiveIndices(), viewport);
524 
525  viewport.object_type_set = typeSet;
526 
527  ROS_INFO_STREAM("Original state: pan " << robotStatePtr->pan << " tilt " << robotStatePtr->tilt
528  << " rotation " << robotStatePtr->rotation
529  << " x " << robotStatePtr->x << " y " << robotStatePtr->y);
530  if (nbvCalc.getRatingModule()->setSingleScoreContainer(viewport, viewport))
531  ROS_INFO_STREAM("Original viewport: utility " << viewport.score->getWeightedNormalizedUtility() << " inverseCosts " << viewport.score->getWeightedInverseCosts()
532  << " rating " << viewport.score->getWeightedNormalizedUtility() * viewport.score->getWeightedInverseCosts());
533  else
534  ROS_INFO("Original viewport utility <= 0");
535 
536  asr_next_best_view::TriggerFrustumVisualization triggerFrustumVis;
537  triggerFrustumVis.request.current_pose = viewport.getPose();
538  NBV->processTriggerOldFrustumVisualization(triggerFrustumVis.request, triggerFrustumVis.response);
539 
540  ROS_INFO_STREAM("Changes: pan " << getNBV.response.robot_state.pan << " tilt " << getNBV.response.robot_state.tilt
541  << " rotation " << getNBV.response.robot_state.rotation
542  << " x " << getNBV.response.robot_state.x << " y " << getNBV.response.robot_state.y);
543  ROS_INFO_STREAM("Resulting viewport: utility " << getNBV.response.utility << " inverseCosts " << getNBV.response.inverse_costs
544  << " rating " << getNBV.response.utility * getNBV.response.inverse_costs);
545 
546  double panDiff = std::abs(robotStatePtr->pan - getNBV.response.robot_state.pan);
547  double rotDiff = std::abs(robotStatePtr->rotation - getNBV.response.robot_state.rotation);
548  double xDiff = std::abs(robotStatePtr->x - getNBV.response.robot_state.x);
549  double yDiff = std::abs(robotStatePtr->y - getNBV.response.robot_state.y);
550 
551  outputFile << stepOffset << "\t\t\t\t" << panDiff << "\t\t" << rotDiff << "\t\t" << xDiff << "\t\t" << yDiff << " \n";
552 
553  }
554 
555 
556  outputFile.close();
557 
558  }
559 
560  template<typename T> bool getParameter(const std::string &key, T &parameter)
561  {
562  if (!mNodeHandle.getParam(key, parameter))
563  {
564  ROS_ERROR_STREAM(key << " parameter not set!");
565  return false;
566  }
567  else
568  {
569  return true;
570  }
571  }
572 
573  SimpleQuaternion getOrientation(SimpleQuaternion originalOrientation, double alpha, double beta, double gamma)
574  {
575  Eigen::Matrix<Precision, 3, 3> rotation;
576  rotation = Eigen::AngleAxis<Precision>(alpha* M_PI / 180, SimpleVector3::UnitX())
577  * Eigen::AngleAxis<Precision>(beta * M_PI / 180, SimpleVector3::UnitY())
578  * Eigen::AngleAxis<Precision>(gamma * M_PI / 180, SimpleVector3::UnitZ());
579  SimpleQuaternion result;
580  result = originalOrientation.toRotationMatrix() * rotation;
581  return result;
582  }
583 
584  asr_msgs::AsrAttributedPoint getAttributedPoint(SimpleVector3 position, SimpleVector3 positionDeviation,
585  SimpleQuaternion orientation, float orientationDeviation,
586  std::vector<std::string> objectTypeNames,
587  std::map<std::string, std::vector<double>> typeToOrientation,
588  int iteration) {
589  SimpleVector3 randomVector;
590  int8_t occupancyValue;
591  do {
592  randomVector = MathHelper::getRandomVector(position, positionDeviation);
593  occupancyValue = mMapHelper.getRaytracingMapOccupancyValue(randomVector);
594  } while(!mMapHelper.isOccupancyValueAcceptable(occupancyValue));
595 
596  std::vector<double> typeOrientation = typeToOrientation[objectTypeNames[iteration % objectTypeNames.size()]];
597  float randomDeviation = MathHelper::getRandomNumber(0, orientationDeviation);
598  SimpleQuaternion randomOrientation = this->getOrientation(orientation, 0.0, 0.0, randomDeviation);
599  SimpleQuaternion quaternion = this->getOrientation(randomOrientation, typeOrientation[0], typeOrientation[1], typeOrientation[2]);
600 
601  asr_msgs::AsrAttributedPoint element;
602 
603  geometry_msgs::Pose pose;
604  pose.orientation.w = quaternion.w();
605  pose.orientation.x = quaternion.x();
606  pose.orientation.y = quaternion.y();
607  pose.orientation.z = quaternion.z();
608  pose.position.x = randomVector[0];
609  pose.position.y = randomVector[1];
610  pose.position.z = randomVector[2];
611 
612  element.type = objectTypeNames[iteration % objectTypeNames.size()];
613  element.pose = pose;
614 
615  return element;
616  }
617 
618 };
619 
620 test_suite* init_unit_test_suite( int argc, char* argv[] )
621 {
622  ros::init(argc, argv, "nbv");
623  ros::start();
624 
625  test_suite* evaluation = BOOST_TEST_SUITE("Evaluation NBV with different scenes");
626 
628 
629  evaluation->add(BOOST_CLASS_TEST_CASE(&ParametersTest::configurablePointCloudsTest, testPtr));
630  evaluation->add(BOOST_CLASS_TEST_CASE(&ParametersTest::positionsTest, testPtr));
631  evaluation->add(BOOST_CLASS_TEST_CASE(&ParametersTest::sideObjectTest, testPtr));
632 
633  framework::master_test_suite().add(evaluation);
634 
635  return 0;
636 }
ServiceClient serviceClient(const std::string &service_name, bool persistent=false, const M_string &header_values=M_string())
SimpleQuaternion getOrientation(SimpleQuaternion originalOrientation, double alpha, double beta, double gamma)
ros::NodeHandle mNodeHandle
pcl::PointCloud< ObjectPoint > ObjectPointCloud
Definition: typedef.hpp:85
ROSCPP_DECL void start()
bool doFrustumCulling(const SimpleVector3 &position, const SimpleQuaternion &orientation, const IndicesPtr &indices, ViewportPoint &viewportPoint)
creates a new camera viewport with the given data
asr_msgs::AsrAttributedPoint getAttributedPoint(SimpleVector3 position, SimpleVector3 positionDeviation, SimpleQuaternion orientation, float orientationDeviation, std::vector< std::string > objectTypeNames, std::map< std::string, std::vector< double >> typeToOrientation, int iteration)
Eigen::Matrix< Precision, 3, 1 > SimpleVector3
Definition: typedef.hpp:53
bool sleep() const
ROSCPP_DECL void init(int &argc, char **argv, const std::string &name, uint32_t options=0)
bool call(MReq &req, MRes &res)
virtual ~ParametersTest()
The ParametersTest class is a test class to check the NBV parameters. It works with the map "map_flur...
NextBestView is a configuration class of the related node.
void configurablePointCloudsTest()
TFSIMD_FORCE_INLINE tfScalar distance(const Vector3 &v) const
std::set< std::string > ObjectTypeSet
Definition: typedef.hpp:124
#define ROS_INFO(...)
bool param(const std::string &param_name, T &param_val, const T &default_val) const
boost::shared_ptr< NextBestView > NBV
boost::shared_ptr< ObjectTypeSet > ObjectTypeSetPtr
Definition: typedef.hpp:125
MapHelper mMapHelper
ObjectPointCloud::Ptr ObjectPointCloudPtr
Definition: typedef.hpp:86
#define ROS_INFO_STREAM(args)
ros::NodeHandle mGlobalNodeHandle
int8_t getRaytracingMapOccupancyValue(const SimpleVector3 &position)
Definition: MapHelper.hpp:243
bool getParam(const std::string &key, std::string &s) const
test_suite * init_unit_test_suite(int argc, char *argv[])
#define ROS_ERROR_STREAM(args)
std::string mOutputPath
Eigen::Quaternion< Precision > SimpleQuaternion
Definition: typedef.hpp:67
bool isOccupancyValueAcceptable(const int8_t &occupancyValue)
Definition: MapHelper.hpp:407
ROSCPP_DECL void spinOnce()
bool getParameter(const std::string &key, T &parameter)
#define ROS_ERROR(...)
ROSCPP_DECL bool waitForService(const std::string &service_name, int32_t timeout)


asr_next_best_view
Author(s): Aumann Florian, Borella Jocelyn, Heller Florian, Meißner Pascal, Schleicher Ralf, Stöckle Patrick, Stroh Daniel, Trautmann Jeremias, Walter Milena, Wittenbeck Valerij
autogenerated on Thu Jan 9 2020 07:20:18