00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040 #include <pcl/common/random.h>
00041 #include <pcl/recognition/ransac_based/obj_rec_ransac.h>
00042
00043 using namespace std;
00044 using namespace pcl::common;
00045
00046 pcl::recognition::ObjRecRANSAC::ObjRecRANSAC (float pair_width, float voxel_size)
00047 : pair_width_ (pair_width),
00048 voxel_size_ (voxel_size),
00049 position_discretization_ (5.0f*voxel_size_),
00050 rotation_discretization_ (5.0f*AUX_DEG_TO_RADIANS),
00051 abs_zdist_thresh_ (1.5f*voxel_size_),
00052 relative_obj_size_ (0.05f),
00053 visibility_ (0.2f),
00054 relative_num_of_illegal_pts_ (0.02f),
00055 intersection_fraction_ (0.03f),
00056 max_coplanarity_angle_ (3.0f*AUX_DEG_TO_RADIANS),
00057 scene_bounds_enlargement_factor_ (0.25f),
00058 ignore_coplanar_opps_ (true),
00059 frac_of_points_for_icp_refinement_ (0.3f),
00060 do_icp_hypotheses_refinement_ (true),
00061 model_library_ (pair_width, voxel_size, max_coplanarity_angle_),
00062 rec_mode_ (ObjRecRANSAC::FULL_RECOGNITION)
00063 {
00064 }
00065
00066
00067
00068 void
00069 pcl::recognition::ObjRecRANSAC::recognize (const PointCloudIn& scene, const PointCloudN& normals, list<ObjRecRANSAC::Output>& recognized_objects, double success_probability)
00070 {
00071
00072 this->clearTestData ();
00073
00074
00075 scene_octree_.build(scene, voxel_size_, &normals);
00076
00077 scene_octree_proj_.build(scene_octree_, abs_zdist_thresh_, abs_zdist_thresh_);
00078
00079
00080 scene_octree_points_ = PointCloudIn::Ptr (new PointCloudIn ());
00081
00082 scene_octree_.getFullLeavesPoints (*scene_octree_points_);
00083
00084 if ( do_icp_hypotheses_refinement_ )
00085 {
00086
00087 trimmed_icp_.init (scene_octree_points_);
00088 trimmed_icp_.setNewToOldEnergyRatio (0.99f);
00089 }
00090
00091 if ( success_probability >= 1.0 )
00092 success_probability = 0.99;
00093
00094
00095 vector<ORROctree::Node*>& full_leaves = scene_octree_.getFullLeaves();
00096 int num_iterations = this->computeNumberOfIterations(success_probability), num_full_leaves = static_cast<int> (full_leaves.size ());
00097
00098
00099 if ( num_iterations > num_full_leaves )
00100 num_iterations = num_full_leaves;
00101
00102 #ifdef OBJ_REC_RANSAC_VERBOSE
00103 printf("ObjRecRANSAC::%s(): recognizing objects [%i iteration(s)]\n", __func__, num_iterations);
00104 #endif
00105
00106
00107 this->sampleOrientedPointPairs (num_iterations, full_leaves, sampled_oriented_point_pairs_);
00108
00109
00110 if ( rec_mode_ == ObjRecRANSAC::SAMPLE_OPP )
00111 return;
00112
00113
00114 list<HypothesisBase> pre_hypotheses;
00115 int num_hypotheses = this->generateHypotheses (sampled_oriented_point_pairs_, pre_hypotheses);
00116
00117
00118 HypothesisOctree grouped_hypotheses;
00119 num_hypotheses = this->groupHypotheses (pre_hypotheses, num_hypotheses, transform_space_, grouped_hypotheses);
00120 pre_hypotheses.clear ();
00121
00122
00123 ORRGraph<Hypothesis> graph_of_close_hypotheses;
00124 this->buildGraphOfCloseHypotheses (grouped_hypotheses, graph_of_close_hypotheses);
00125 this->filterGraphOfCloseHypotheses (graph_of_close_hypotheses, accepted_hypotheses_);
00126 graph_of_close_hypotheses.clear ();
00127
00128
00129 if ( rec_mode_ == ObjRecRANSAC::TEST_HYPOTHESES )
00130 return;
00131
00132
00133 vector<BVHH::BoundedObject*> bounded_objects (accepted_hypotheses_.size ());
00134 int i = 0;
00135
00136
00137 for ( vector<Hypothesis>::iterator hypo = accepted_hypotheses_.begin () ; hypo != accepted_hypotheses_.end () ; ++hypo, ++i )
00138 {
00139
00140 BVHH::BoundedObject *bounded_object = new BVHH::BoundedObject (&(*hypo));
00141 hypo->computeCenterOfMass (bounded_object->getCentroid ());
00142 hypo->computeBounds (bounded_object->getBounds ());
00143 bounded_objects[i] = bounded_object;
00144 }
00145
00146
00147 BVHH bvh;
00148 bvh.build (bounded_objects);
00149
00150 ORRGraph<Hypothesis*> conflict_graph;
00151 this->buildGraphOfConflictingHypotheses (bvh, conflict_graph);
00152 this->filterGraphOfConflictingHypotheses (conflict_graph, recognized_objects);
00153
00154
00155 this->clearTestData ();
00156
00157 #ifdef OBJ_REC_RANSAC_VERBOSE
00158 printf("ObjRecRANSAC::%s(): done [%i object(s)]\n", __func__, static_cast<int> (recognized_objects.size ()));
00159 #endif
00160 }
00161
00162
00163
00164 void
00165 pcl::recognition::ObjRecRANSAC::sampleOrientedPointPairs (int num_iterations, const vector<ORROctree::Node*>& full_scene_leaves,
00166 list<OrientedPointPair>& output) const
00167 {
00168 #ifdef OBJ_REC_RANSAC_VERBOSE
00169 printf ("ObjRecRANSAC::%s(): sampling oriented point pairs (opps) ... ", __func__); fflush (stdout);
00170 int num_of_opps = 0;
00171 #endif
00172
00173 int num_full_leaves = static_cast<int> (full_scene_leaves.size ());
00174
00175 if ( !num_full_leaves )
00176 {
00177 #ifdef OBJ_REC_RANSAC_VERBOSE
00178 cout << "done [" << num_of_opps << " opps].\n";
00179 #endif
00180 return;
00181 }
00182
00183
00184 UniformGenerator<int> randgen (0, num_full_leaves - 1, static_cast<uint32_t> (time (NULL)));
00185
00186
00187 vector<int> ids (num_full_leaves);
00188 for ( int i = 0 ; i < num_full_leaves ; ++i )
00189 ids[i] = i;
00190
00191
00192 for ( int i = 0 ; i < num_iterations ; ++i )
00193 {
00194
00195 randgen.setParameters (0, static_cast<int> (ids.size ()) - 1);
00196 int rand_pos = randgen.run ();
00197
00198
00199 ORROctree::Node *leaf1 = full_scene_leaves[ids[rand_pos]];
00200
00201
00202 ids.erase (ids.begin() + rand_pos);
00203
00204
00205 const float *p1 = leaf1->getData ()->getPoint ();
00206 const float *n1 = leaf1->getData ()->getNormal ();
00207
00208
00209 ORROctree::Node *leaf2 = scene_octree_.getRandomFullLeafOnSphere (p1, pair_width_);
00210 if ( !leaf2 )
00211 continue;
00212
00213
00214 const float *p2 = leaf2->getData ()->getPoint ();
00215 const float *n2 = leaf2->getData ()->getNormal ();
00216
00217 if ( ignore_coplanar_opps_ )
00218 {
00219 if ( aux::pointsAreCoplanar (p1, n1, p2, n2, max_coplanarity_angle_) )
00220 continue;
00221 }
00222
00223
00224 output.push_back (OrientedPointPair (p1, n1, p2, n2));
00225
00226 #ifdef OBJ_REC_RANSAC_VERBOSE
00227 ++num_of_opps;
00228 #endif
00229 }
00230
00231 #ifdef OBJ_REC_RANSAC_VERBOSE
00232 cout << "done [" << num_of_opps << " opps].\n";
00233 #endif
00234 }
00235
00236
00237
00238 int
00239 pcl::recognition::ObjRecRANSAC::generateHypotheses (const list<OrientedPointPair>& pairs, list<HypothesisBase>& out) const
00240 {
00241 #ifdef OBJ_REC_RANSAC_VERBOSE
00242 printf("ObjRecRANSAC::%s(): generating hypotheses ... ", __func__); fflush (stdout);
00243 #endif
00244
00245
00246 ModelLibrary::HashTableCell *neigh_cells[27];
00247 float hash_table_key[3];
00248 int num_hypotheses = 0;
00249
00250 for ( list<OrientedPointPair>::const_iterator pair = pairs.begin () ; pair != pairs.end () ; ++pair )
00251 {
00252
00253 const float *scene_p1 = (*pair).p1_;
00254 const float *scene_n1 = (*pair).n1_;
00255 const float *scene_p2 = (*pair).p2_;
00256 const float *scene_n2 = (*pair).n2_;
00257
00258
00259 this->compute_oriented_point_pair_signature (scene_p1, scene_n1, scene_p2, scene_n2, hash_table_key);
00260
00261 int num_neigh_cells = model_library_.getHashTable ().getNeighbors (hash_table_key, neigh_cells);
00262
00263 for ( int i = 0 ; i < num_neigh_cells ; ++i )
00264 {
00265
00266 for ( ModelLibrary::HashTableCell::iterator cell_it = neigh_cells[i]->begin () ; cell_it != neigh_cells[i]->end () ; ++cell_it )
00267 {
00268
00269 const ModelLibrary::Model *obj_model = (*cell_it).first;
00270 ModelLibrary::node_data_pair_list& model_pairs = (*cell_it).second;
00271
00272
00273 for ( ModelLibrary::node_data_pair_list::iterator model_pair_it = model_pairs.begin () ; model_pair_it != model_pairs.end () ; ++model_pair_it )
00274 {
00275
00276 const float *model_p1 = (*model_pair_it).first->getPoint ();
00277 const float *model_n1 = (*model_pair_it).first->getNormal ();
00278 const float *model_p2 = (*model_pair_it).second->getPoint ();
00279 const float *model_n2 = (*model_pair_it).second->getNormal ();
00280
00281 HypothesisBase hypothesis(obj_model);
00282
00283 this->computeRigidTransform(model_p1, model_n1, model_p2, model_n2, scene_p1, scene_n1, scene_p2, scene_n2, hypothesis.rigid_transform_);
00284
00285 out.push_back(hypothesis);
00286 ++num_hypotheses;
00287 }
00288 }
00289 }
00290 }
00291 #ifdef OBJ_REC_RANSAC_VERBOSE
00292 printf("%i hypotheses\n", num_hypotheses);
00293 #endif
00294
00295 return (num_hypotheses);
00296 }
00297
00298
00299
00300 int
00301 pcl::recognition::ObjRecRANSAC::groupHypotheses(list<HypothesisBase>& hypotheses, int num_hypotheses,
00302 RigidTransformSpace& transform_space, HypothesisOctree& grouped_hypotheses) const
00303 {
00304 #ifdef OBJ_REC_RANSAC_VERBOSE
00305 printf("ObjRecRANSAC::%s():\n grouping %i hypotheses ... ", __func__, num_hypotheses); fflush (stdout);
00306 #endif
00307
00308
00309 float b[6]; scene_octree_.getBounds (b);
00310 float enlr = scene_bounds_enlargement_factor_*std::max (std::max (b[1]-b[0], b[3]-b[2]), b[5]-b[4]);
00311 b[0] -= enlr; b[1] += enlr;
00312 b[2] -= enlr; b[3] += enlr;
00313 b[4] -= enlr; b[5] += enlr;
00314
00315
00316 HypothesisCreator hypothesis_creator;
00317 grouped_hypotheses.build (b, position_discretization_, &hypothesis_creator);
00318
00319
00320 transform_space.build (b, position_discretization_, rotation_discretization_);
00321 float transformed_point[3];
00322
00323
00324 for ( list<HypothesisBase>::iterator hypo_it = hypotheses.begin () ; hypo_it != hypotheses.end () ; ++hypo_it )
00325 {
00326
00327 aux::transform (hypo_it->rigid_transform_, hypo_it->obj_model_->getOctreeCenterOfMass (), transformed_point);
00328
00329
00330 transform_space.addRigidTransform (hypo_it->obj_model_, transformed_point, hypo_it->rigid_transform_);
00331 }
00332
00333 list<RotationSpace*>& rotation_spaces = transform_space.getRotationSpaces ();
00334 int num_accepted = 0;
00335
00336 #ifdef OBJ_REC_RANSAC_VERBOSE
00337 printf("ObjRecRANSAC::%s(): done\n testing the cluster representatives ...\n", __func__); fflush (stdout);
00338
00339 float progress_factor = 100.0f/static_cast<float> (transform_space.getNumberOfOccupiedRotationSpaces ());
00340 int num_done = 0;
00341 #endif
00342
00343
00344 for ( list<RotationSpace*>::iterator rs_it = rotation_spaces.begin () ; rs_it != rotation_spaces.end () ; ++rs_it )
00345 {
00346 const map<string, ModelLibrary::Model*>& models = model_library_.getModels ();
00347 Hypothesis best_hypothesis;
00348 best_hypothesis.match_confidence_ = 0.0f;
00349
00350
00351 for ( map<string, ModelLibrary::Model*>::const_iterator model = models.begin () ; model != models.end () ; ++model )
00352 {
00353
00354 Hypothesis hypothesis (model->second);
00355
00356 if ( !(*rs_it)->getTransformWithMostVotes (model->second, hypothesis.rigid_transform_) )
00357 continue;
00358
00359 int int_match;
00360 int penalty;
00361 this->testHypothesis (&hypothesis, int_match, penalty);
00362
00363
00364 float num_full_leaves = static_cast<float> (hypothesis.obj_model_->getOctree ().getFullLeaves ().size ());
00365 float match_thresh = num_full_leaves*visibility_;
00366 int penalty_thresh = static_cast<int> (num_full_leaves*relative_num_of_illegal_pts_ + 0.5f);
00367
00368
00369 if ( int_match >= match_thresh && penalty <= penalty_thresh )
00370 {
00371 if ( do_icp_hypotheses_refinement_ && int_match > 3 )
00372 {
00373
00374 Eigen::Matrix<float, 4, 4> mat;
00375 aux::array12ToMatrix4x4 (hypothesis.rigid_transform_, mat);
00376
00377 trimmed_icp_.align (
00378 hypothesis.obj_model_->getPointsForRegistration (),
00379 static_cast<int> (static_cast<float> (int_match)*frac_of_points_for_icp_refinement_),
00380 mat);
00381 aux::matrix4x4ToArray12 (mat, hypothesis.rigid_transform_);
00382
00383 this->testHypothesis (&hypothesis, int_match, penalty);
00384 }
00385
00386 if ( hypothesis.match_confidence_ > best_hypothesis.match_confidence_ )
00387 best_hypothesis = hypothesis;
00388 }
00389 }
00390
00391 if ( best_hypothesis.match_confidence_ > 0.0f )
00392 {
00393 const float *c = (*rs_it)->getCenter ();
00394 HypothesisOctree::Node* node = grouped_hypotheses.createLeaf (c[0], c[1], c[2]);
00395
00396 node->setData (best_hypothesis);
00397 ++num_accepted;
00398 }
00399
00400 #ifdef OBJ_REC_RANSAC_VERBOSE
00401
00402 printf ("\r %.1f%% ", (static_cast<float> (++num_done))*progress_factor); fflush (stdout);
00403 #endif
00404 }
00405
00406 #ifdef OBJ_REC_RANSAC_VERBOSE
00407 printf("done\n %i accepted.\n", num_accepted);
00408 #endif
00409
00410 return (num_accepted);
00411 }
00412
00413
00414
00415 void
00416 pcl::recognition::ObjRecRANSAC::buildGraphOfCloseHypotheses (HypothesisOctree& hypotheses, ORRGraph<Hypothesis>& graph) const
00417 {
00418 #ifdef OBJ_REC_RANSAC_VERBOSE
00419 printf ("ObjRecRANSAC::%s(): building the graph ... ", __func__); fflush (stdout);
00420 #endif
00421
00422 vector<HypothesisOctree::Node*> hypo_leaves = hypotheses.getFullLeaves ();
00423 int i = 0;
00424
00425 graph.resize (static_cast<int> (hypo_leaves.size ()));
00426
00427 for ( vector<HypothesisOctree::Node*>::iterator hypo = hypo_leaves.begin () ; hypo != hypo_leaves.end () ; ++hypo, ++i )
00428 (*hypo)->getData ().setLinearId (i);
00429
00430 i = 0;
00431
00432
00433 for ( vector<HypothesisOctree::Node*>::const_iterator hypo = hypo_leaves.begin () ; hypo != hypo_leaves.end () ; ++hypo, ++i )
00434 {
00435
00436 graph.getNodes ()[i]->setFitness (static_cast<int> ((*hypo)->getData ().explained_pixels_.size ()));
00437 graph.getNodes ()[i]->setData ((*hypo)->getData ());
00438
00439
00440 const set<HypothesisOctree::Node*>& neighbors = (*hypo)->getNeighbors ();
00441
00442 for ( set<HypothesisOctree::Node*>::const_iterator n = neighbors.begin() ; n != neighbors.end() ; ++n )
00443 graph.insertDirectedEdge ((*hypo)->getData ().getLinearId (), (*n)->getData ().getLinearId ());
00444 }
00445
00446 #ifdef OBJ_REC_RANSAC_VERBOSE
00447 printf ("done\n");
00448 #endif
00449 }
00450
00451
00452
00453 void
00454 pcl::recognition::ObjRecRANSAC::filterGraphOfCloseHypotheses (ORRGraph<Hypothesis>& graph, std::vector<Hypothesis>& out) const
00455 {
00456 #ifdef OBJ_REC_RANSAC_VERBOSE
00457 printf ("ObjRecRANSAC::%s(): building the graph ... ", __func__); fflush (stdout);
00458 #endif
00459
00460 list<ORRGraph<Hypothesis>::Node*> on_nodes, off_nodes;
00461 graph.computeMaximalOnOffPartition (on_nodes, off_nodes);
00462
00463
00464 for ( list<ORRGraph<Hypothesis>::Node*>::iterator it = on_nodes.begin () ; it != on_nodes.end () ; ++it )
00465 out.push_back ((*it)->getData ());
00466
00467 #ifdef OBJ_REC_RANSAC_VERBOSE
00468 printf ("done [%i remaining hypotheses]\n", static_cast<int> (out.size ()));
00469 #endif
00470 }
00471
00472
00473
00474 void
00475 pcl::recognition::ObjRecRANSAC::buildGraphOfConflictingHypotheses (const BVHH& bvh, ORRGraph<Hypothesis*>& graph) const
00476 {
00477 #ifdef OBJ_REC_RANSAC_VERBOSE
00478 printf ("ObjRecRANSAC::%s(): building the conflict graph ... ", __func__); fflush (stdout);
00479 #endif
00480
00481 const vector<BVHH::BoundedObject*>* bounded_objects = bvh.getInputObjects ();
00482
00483 if ( !bounded_objects )
00484 {
00485 #ifdef OBJ_REC_RANSAC_VERBOSE
00486 printf("done\n");
00487 #endif
00488 return;
00489 }
00490
00491 int lin_id = 0;
00492
00493
00494 graph.resize (static_cast<int> (bounded_objects->size ()));
00495
00496
00497 for ( vector<BVHH::BoundedObject*>::const_iterator obj = bounded_objects->begin () ; obj != bounded_objects->end () ; ++obj, ++lin_id )
00498 {
00499 (*obj)->getData ()->setLinearId (lin_id);
00500 graph.getNodes ()[lin_id]->setData ((*obj)->getData ());
00501 }
00502
00503 typedef pair<int,int> ordered_int_pair;
00504
00505 set<ordered_int_pair, bool(*)(const ordered_int_pair&, const ordered_int_pair&)> ordered_hypotheses_ids (aux::compareOrderedPairs<int>);
00506
00507
00508 for ( vector<BVHH::BoundedObject*>::const_iterator obj = bounded_objects->begin () ; obj != bounded_objects->end () ; ++obj )
00509 {
00510
00511 Hypothesis *hypo1 = (*obj)->getData ();
00512
00513
00514 float bounds[6];
00515 hypo1->computeBounds (bounds);
00516
00517
00518 list<BVHH::BoundedObject*> intersected_objects;
00519 bvh.intersect (bounds, intersected_objects);
00520
00521 for ( list<BVHH::BoundedObject*>::iterator it = intersected_objects.begin () ; it != intersected_objects.end () ; ++it )
00522 {
00523
00524 Hypothesis *hypo2 = (*it)->getData ();
00525
00526
00527 pair<int,int> id_pair;
00528 if ( hypo1->getLinearId () < hypo2->getLinearId () )
00529 {
00530 id_pair.first = hypo1->getLinearId ();
00531 id_pair.second = hypo2->getLinearId ();
00532 }
00533 else
00534 {
00535 id_pair.first = hypo2->getLinearId ();
00536 id_pair.second = hypo1->getLinearId ();
00537 }
00538
00539
00540 pair<set<ordered_int_pair, bool(*)(const ordered_int_pair&, const ordered_int_pair&)>::iterator, bool> res = ordered_hypotheses_ids.insert (id_pair);
00541
00542 if ( res.second == false )
00543 continue;
00544
00545
00546 set<int> id_intersection;
00547
00548
00549 std::set_intersection (hypo1->explained_pixels_.begin (),
00550 hypo1->explained_pixels_.end (),
00551 hypo2->explained_pixels_.begin (),
00552 hypo2->explained_pixels_.end (),
00553 std::inserter (id_intersection, id_intersection.begin ()));
00554
00555
00556 float frac_1 = static_cast<float> (id_intersection.size ())/static_cast <float> (hypo1->explained_pixels_.size ());
00557 float frac_2 = static_cast<float> (id_intersection.size ())/static_cast <float> (hypo2->explained_pixels_.size ());
00558
00559
00560 if ( frac_1 > intersection_fraction_ || frac_2 > intersection_fraction_ )
00561 graph.insertUndirectedEdge (hypo1->getLinearId (), hypo2->getLinearId ());
00562 }
00563 }
00564
00565 #ifdef OBJ_REC_RANSAC_VERBOSE
00566 printf("done\n");
00567 #endif
00568 }
00569
00570
00571
00572 void
00573 pcl::recognition::ObjRecRANSAC::filterGraphOfConflictingHypotheses (ORRGraph<Hypothesis*>& graph, list<ObjRecRANSAC::Output>& recognized_objects) const
00574 {
00575 #ifdef OBJ_REC_RANSAC_VERBOSE
00576 printf ("ObjRecRANSAC::%s(): filtering the conflict graph ... ", __func__); fflush (stdout);
00577 #endif
00578
00579 vector<ORRGraph<Hypothesis*>::Node*> &nodes = graph.getNodes ();
00580
00581
00582 for ( vector<ORRGraph<Hypothesis*>::Node*>::iterator it = nodes.begin () ; it != nodes.end () ; ++it )
00583 {
00584 size_t num_of_explained = 0;
00585
00586
00587 for ( set<ORRGraph<Hypothesis*>::Node*>::const_iterator neigh = (*it)->getNeighbors ().begin () ; neigh != (*it)->getNeighbors ().end () ; ++neigh )
00588 num_of_explained += (*neigh)->getData ()->explained_pixels_.size ();
00589
00590
00591 (*it)->setFitness (static_cast<int> ((*it)->getData ()->explained_pixels_.size ()) - static_cast<int> (num_of_explained));
00592 }
00593
00594
00595 list<ORRGraph<Hypothesis*>::Node*> on_nodes, off_nodes;
00596 graph.computeMaximalOnOffPartition (on_nodes, off_nodes);
00597
00598
00599 for ( list<ORRGraph<Hypothesis*>::Node*>::iterator it = on_nodes.begin () ; it != on_nodes.end () ; ++it )
00600 {
00601 recognized_objects.push_back (
00602 ObjRecRANSAC::Output ((*it)->getData ()->obj_model_->getObjectName (),
00603 (*it)->getData ()->rigid_transform_,
00604 (*it)->getData ()->match_confidence_,
00605 (*it)->getData ()->obj_model_->getUserData ())
00606 );
00607 }
00608
00609 #ifdef OBJ_REC_RANSAC_VERBOSE
00610 printf ("done\n");
00611 #endif
00612 }
00613
00614
00615
00616 inline void
00617 pcl::recognition::ObjRecRANSAC::testHypothesis (Hypothesis* hypothesis, int& match, int& penalty) const
00618 {
00619 match = 0;
00620 penalty = 0;
00621
00622
00623 const std::vector<ORROctree::Node*>& full_model_leaves = hypothesis->obj_model_->getOctree ().getFullLeaves ();
00624 const float* rigid_transform = hypothesis->rigid_transform_;
00625 const ORROctreeZProjection::Pixel* pixel;
00626 float transformed_point[3];
00627
00628
00629 for ( std::vector<ORROctree::Node*>::const_iterator leaf_it = full_model_leaves.begin () ; leaf_it != full_model_leaves.end () ; ++leaf_it )
00630 {
00631
00632 aux::transform (rigid_transform, (*leaf_it)->getData ()->getPoint (), transformed_point);
00633
00634
00635 pixel = scene_octree_proj_.getPixel (transformed_point);
00636
00637 if ( pixel == NULL )
00638 continue;
00639
00640 if ( transformed_point[2] < pixel->z1 () )
00641 ++penalty;
00642 else if ( transformed_point[2] <= pixel->z2 () )
00643 {
00644 ++match;
00645
00646 hypothesis->explained_pixels_.insert (pixel->getId ());
00647 }
00648 }
00649
00650 hypothesis->match_confidence_ = static_cast<float> (match)/static_cast<float> (hypothesis->obj_model_->getOctree ().getFullLeaves ().size ());
00651 }
00652
00653
00654
00655 inline void
00656 pcl::recognition::ObjRecRANSAC::testHypothesisNormalBased (Hypothesis* hypothesis, float& match) const
00657 {
00658 match = 0.0f;
00659
00660
00661 const std::vector<ORROctree::Node*>& full_model_leaves = hypothesis->obj_model_->getOctree ().getFullLeaves ();
00662 const float* rigid_transform = hypothesis->rigid_transform_;
00663 float transformed_point[3];
00664
00665
00666 for ( std::vector<ORROctree::Node*>::const_iterator leaf_it = full_model_leaves.begin () ; leaf_it != full_model_leaves.end () ; ++leaf_it )
00667 {
00668
00669 aux::transform (rigid_transform, (*leaf_it)->getData ()->getPoint (), transformed_point);
00670
00671
00672 const ORROctreeZProjection::Pixel* pixel = scene_octree_proj_.getPixel (transformed_point);
00673
00674 if ( pixel == NULL )
00675 continue;
00676
00677
00678 if ( pixel->z1 () <= transformed_point[2] && transformed_point[2] <= pixel->z2 () )
00679 {
00680
00681 hypothesis->explained_pixels_.insert (pixel->getId ());
00682
00683
00684 const set<ORROctree::Node*, bool(*)(ORROctree::Node*,ORROctree::Node*)>* nodes = scene_octree_proj_.getOctreeNodes (transformed_point);
00685
00686 set<ORROctree::Node*, bool(*)(ORROctree::Node*,ORROctree::Node*)>::const_iterator n = nodes->begin ();
00687 ORROctree::Node *closest_node = *n;
00688 float sqr_dist, min_sqr_dist = aux::sqrDistance3 (closest_node->getData ()->getPoint (), transformed_point);
00689
00690 for ( ++n ; n != nodes->end () ; ++n )
00691 {
00692 sqr_dist = aux::sqrDistance3 ((*n)->getData ()->getPoint (), transformed_point);
00693 if ( sqr_dist < min_sqr_dist )
00694 {
00695 closest_node = *n;
00696 min_sqr_dist = sqr_dist;
00697 }
00698 }
00699
00700 float rotated_normal[3];
00701 aux::mult3x3 (rigid_transform, closest_node->getData ()->getNormal (), rotated_normal);
00702
00703 match += aux::dot3 (rotated_normal, (*leaf_it)->getData ()->getNormal ());
00704 }
00705 }
00706
00707 hypothesis->match_confidence_ = match/static_cast<float> (hypothesis->obj_model_->getOctree ().getFullLeaves ().size ());
00708 }
00709
00710