00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041 #include <iostream>
00042 using namespace std;
00043
00044 #include <narf_recognition/object_recognition_helper.h>
00045 #include <boost/thread/thread.hpp>
00046 #include "pcl/console/parse.h"
00047
00048 #include "pcl/visualization/range_image_visualizer.h"
00049 #include "pcl/visualization/pcl_visualizer.h"
00050
00051 #include "Eigen/Geometry"
00052 #include "pcl/point_cloud.h"
00053 #include "pcl/point_types.h"
00054
00055 #include "pcl/io/pcd_io.h"
00056 #include "pcl/range_image/range_image.h"
00057 #include "pcl/features/narf.h"
00058
00059 #include "pcl/common/point_correspondence.h"
00060 #include "pcl/common/poses_from_matches.h"
00061 #include "pcl/features/range_image_border_extractor.h"
00062 #include "pcl/keypoints/narf_keypoint.h"
00063 #include "pcl/range_image/range_image_planar.h"
00064 #include "sensor_msgs/Image.h"
00065 #include "pcl/common/file_io.h"
00066
00067
00068
00069 #include "pcl/common/common_headers.h"
00070
00071 using namespace pcl;
00072 using namespace pcl::visualization;
00073
00074 ObjectRecognitionHelper object_recognition_helper;
00075 ObjectRecognitionHelper::Parameters& orh_params = object_recognition_helper.getParameters ();
00076 FalsePositivesFilter& false_positives_filter = object_recognition_helper.getFalsePositivesFilter ();
00077 FalsePositivesFilter::Parameters& fpf_params = false_positives_filter.getParameters ();
00078
00079 int& max_no_of_threads = orh_params.max_no_of_threads;
00080 bool& use_rotation_invariance = orh_params.use_rotation_invariance;
00081 float& angular_resolution = orh_params.angular_resolution;
00082 RangeImage::CoordinateFrame& coordinate_frame = orh_params.coordinate_frame;
00083 float& max_descriptor_distance = orh_params.max_descriptor_distance;
00084 bool& set_unseen_to_max_range = orh_params.set_unseen_to_max_range;
00085 bool single_view_model = orh_params.single_view_model;
00086
00087 float noise_level_factor = 0.1;
00088 float support_size_factor = 0.5f;
00089
00090 bool show_model_views = 1;
00091 bool show_view_simulation_results = 0;
00092 bool show_validation_points = 0;
00093 bool show_found_objects = 1;
00094 bool show_feature_match_positions = 0;
00095 bool show_interest_points = 0;
00096 bool print_timings = true;
00097
00098 void printUsage (const char* progName)
00099 {
00100 cout << "\n\nUsage: "<<progName<<" [options] [scene.pcd] <model.pcl> [model_2.pcl] ... [model_n.pcl]\n\n"
00101 << "Options:\n"
00102 << "-------------------------------------------\n"
00103
00104
00105
00106
00107 << "-i <bool> do ICP (default "<< (int)fpf_params.do_icp<<").\n"
00108 << "-r <float> angular resolution in degrees (default "<<rad2deg (angular_resolution)<<")\n"
00109 << "-v <float> min validation point score (default "<<fpf_params.min_validation_point_score<<")\n"
00110 << "-c <int> coordinate frame (default "<<coordinate_frame<<")\n"
00111 << "-b activate single view model, which tells the system that the given model point cloud\n"
00112 << " is not of the complete object, but only one view of it - no view sampling will be performed.\n"
00113 << "-o use orientation invariant version\n"
00114 << "-f <float> do view simulation filtering with this minimum score.\n"
00115 << "-x <float> use maximum range for found objects.\n"
00116 << "-m set unseen pixels to max range\n"
00117 << "-d <float> set the maximum descriptor distance (default: "<<max_descriptor_distance<<")\n"
00118 << "-t <int> set the maximum number of threads for parallelization (default: "<<max_no_of_threads<<")\n"
00119 << "-h this help\n"
00120 << "\n\n";
00121 }
00122
00123
00124
00125 sensor_msgs::ImageConstPtr depth_image_msg;
00126
00127
00128 boost::mutex m;
00129 bool got_new_data=false;
00130
00131
00132
00133
00134
00135
00136
00137
00138
00139
00140
00141
00142
00143
00144
00145
00146
00147
00148
00149
00150
00151
00152
00153
00154
00155
00156
00157
00158
00159
00160
00161
00162
00163
00164
00165
00166
00167
00168
00169
00170 int main (int argc, char** argv)
00171 {
00172
00173
00174
00175 int range_image_source = -1;
00176 orh_params.view_sampling_first_layer = 0;
00177 orh_params.view_sampling_last_layer = 0;
00178
00179
00180 for (char c; (c = getopt (argc, argv, "bmx:i:f:hr:c:v:od:t:")) != -1; ) {
00181 switch (c) {
00182 case 't':
00183 max_no_of_threads = strtol (optarg, NULL, 0);
00184 cout << PVARN (max_no_of_threads);
00185 break;
00186 case 'i':
00187 fpf_params.do_icp = (bool)strtol (optarg, NULL, 0);
00188 cout << "Will "<< (fpf_params.do_icp?"":"not ")<<"use ICP.\n";
00189 break;
00190
00191
00192
00193
00194 case 'b':
00195 {
00196 single_view_model = true;
00197 cout << "Setting single_view_model to true.\n";
00198 break;
00199 }
00200 case 'o':
00201 {
00202 use_rotation_invariance = true;
00203 cout << "Using rotational invariant version of the features.\n";
00204 break;
00205 }
00206 case 'r':
00207 {
00208 angular_resolution = deg2rad (strtod (optarg, NULL));
00209 if (angular_resolution <= 0.0)
00210 cerr << "Angular resolution "<<optarg<<" invalid.\n";
00211 else
00212 cout << "Setting angular resolution to "<<rad2deg (angular_resolution)<<".\n";
00213 break;
00214 }
00215 case 'v':
00216 {
00217 fpf_params.do_validation_point_filtering = true;
00218 fpf_params.min_validation_point_score = strtod (optarg, NULL);
00219 cout << PVARN (fpf_params.min_validation_point_score);
00220 break;
00221 }
00222 case 'x':
00223 {
00224 fpf_params.maximum_range_for_found_objects = strtod (optarg, NULL);
00225 cout << PVARN (fpf_params.maximum_range_for_found_objects);
00226 break;
00227 }
00228 case 'd':
00229 {
00230 max_descriptor_distance = strtod (optarg, NULL);
00231 cout << PVARN (max_descriptor_distance);
00232 break;
00233 }
00234 case 'f':
00235 {
00236 fpf_params.do_view_simulation_filtering = true;
00237 fpf_params.min_view_simulation_score = strtod (optarg, NULL);
00238 cout << PVARN (fpf_params.min_view_simulation_score);
00239 break;
00240 }
00241
00242 case 'c':
00243 {
00244 coordinate_frame = (RangeImage::CoordinateFrame)strtol (optarg, NULL, 0);
00245 cout << "Using coordinate frame "<<coordinate_frame<<".\n";
00246 break;
00247 }
00248 case 'm':
00249 {
00250 set_unseen_to_max_range = true;
00251 cout << "Unseen pixels will be considered max ranges.\n";
00252 break;
00253 }
00254 case 'h':
00255 default:
00256 printUsage (argv[0]);
00257 exit (0);
00258 }
00259 }
00260
00261 vector<RangeImage> view_simulation_results;
00262 if (show_view_simulation_results)
00263 false_positives_filter.setViewSimulationImagesPtr (&view_simulation_results);
00264
00265
00266
00267
00268
00269 int model_filename_idx = optind+1;
00270 if (model_filename_idx >= argc)
00271 {
00272 cout << "\nNo *.pcd file for model given.\n\n";
00273 exit (0);
00274 }
00275
00276 vector<string> model_filenames;
00277 for (; model_filename_idx<argc; ++model_filename_idx)
00278 model_filenames.push_back (argv[model_filename_idx]);
00279 ObjectModelList& object_models = object_recognition_helper.getIntermediateElements ().object_models;
00280 if (!object_recognition_helper.addObjectModelsFromPcdFiles (model_filenames))
00281 {
00282 printUsage (argv[0]);
00283 exit (1);
00284 }
00285
00286 float average_model_radius = object_models.getAverageModelRadius ();
00287 float support_size = support_size_factor * average_model_radius;
00288 float noise_level = noise_level_factor * average_model_radius;
00289 cout << "\n-----\n"
00290 << "The average object model radius is "<<average_model_radius<<"m.\n"
00291 << " => using support size "<<support_size<<"m.\n"
00292 << " => using noise level "<<noise_level<<"m.\n"
00293 << "-----\n\n";
00294 orh_params.support_size = support_size;
00295 orh_params.noise_level = noise_level;
00296
00297 object_recognition_helper.extractModelFeatures ();
00298 std::vector<Narf*>& database_features = object_recognition_helper.getIntermediateElements ().database_features;
00299 std::vector<ObjectModelList::FeatureSource>& database_feature_sources =
00300 object_recognition_helper.getIntermediateElements ().database_feature_sources;
00301
00302
00303
00304 if (optind >= argc)
00305 {
00306 cout << "\nNo *.pcd file for scene given.\n\n";
00307 printUsage (argv[0]);
00308 exit (0);
00309 }
00310 std::string scene_filename = argv[optind];
00311
00312 if (!object_recognition_helper.createRangeImageFromPcdFile (scene_filename))
00313 {
00314 cerr << "\n-----\nRange image creation failed.\n\n";
00315 exit (1);
00316 }
00317 got_new_data = true;
00318
00319 vector<string> point_clouds_in_viewer;
00320 RangeImageVisualizer scene_range_image_widget ("scene range image");
00321 vector<RangeImageVisualizer*> view_simulation_result_widgets;
00322 vector<RangeImageVisualizer*> interest_points_widgets;
00323 vector<RangeImageVisualizer*> range_image_widgets;
00324
00325 PCLVisualizer viewer ("3D Viewer");
00326 viewer.setBackgroundColor (1, 1, 1);
00327
00328 while (!viewer.wasStopped ())
00329 {
00330 viewer.spinOnce (100);
00331 scene_range_image_widget.spinOnce ();
00332 for (size_t i=0; i<view_simulation_result_widgets.size (); ++i)
00333 view_simulation_result_widgets[i]->spinOnce ();
00334 for (size_t i=0; i<interest_points_widgets.size (); ++i)
00335 interest_points_widgets[i]->spinOnce ();
00336 for (size_t i=0; i<range_image_widgets.size (); ++i)
00337 range_image_widgets[i]->spinOnce ();
00338
00339
00340 pcl_sleep (0.01);
00341
00342 if (!got_new_data)
00343 continue;
00344 got_new_data = false;
00345 cout << "Got new data.\n";
00346
00347 object_recognition_helper.extractSceneFeatures ();
00348 object_recognition_helper.extractPoseEstimates ();
00349 std::vector<PosesFromMatches::PoseEstimatesVector>& pose_estimates_per_model =
00350 object_recognition_helper.filterFalsePositives ();
00351
00352 object_recognition_helper.printTimings ();
00353
00354
00356
00357 NarfKeypoint& interest_point_detector_scene =
00358 object_recognition_helper.getIntermediateElements ().interest_point_detector_scene;
00359
00360 boost::shared_ptr<RangeImage>& scene_range_image_ptr =
00361 object_recognition_helper.getIntermediateElements ().scene_range_image_ptr;
00362 RangeImage& scene_range_image = *scene_range_image_ptr;
00363
00364
00365
00366 while (!view_simulation_result_widgets.empty ())
00367 {
00368 delete view_simulation_result_widgets.back ();
00369 view_simulation_result_widgets.pop_back ();
00370 }
00371 for (unsigned int i=0; i<view_simulation_results.size (); i+=4)
00372 {
00373 float min_range, max_range;
00374 view_simulation_results[i].getMinMaxRanges (min_range, max_range);
00375
00376 stringstream ss; ss << setfill ('0') << setw (3) << (i/4)+1;
00377 view_simulation_result_widgets.push_back (RangeImageVisualizer::getRangeImageWidget (view_simulation_results[i+1],
00378 min_range, max_range, true, ss.str ()+".1: real view"));
00379 view_simulation_result_widgets.push_back (RangeImageVisualizer::getRangeImageWidget (view_simulation_results[i],
00380 min_range, max_range, true, ss.str ()+".2: simulated view"));
00381 view_simulation_result_widgets.push_back (RangeImageVisualizer::getRangeImageWidget (view_simulation_results[i+2],
00382 0.0f, 1.0f, true, ss.str ()+".3: scores"));
00383 if (!view_simulation_results[i+3].points.empty ())
00384 view_simulation_result_widgets.push_back (RangeImageVisualizer::getRangeImageWidget (view_simulation_results[i+3],
00385 0.0f, 1.0f, true, ss.str ()+".4: scores_normals"));
00386 }
00387
00388 vector<boost::shared_ptr<ObjectModel::PointCloudType> > objects_in_scene;
00389 for (unsigned int model_idx=0; model_idx<object_models.size (); ++model_idx)
00390 {
00391 PosesFromMatches::PoseEstimatesVector& pose_estimates = pose_estimates_per_model[model_idx];
00392 ObjectModel& object_model = *object_models[model_idx];
00393 for (unsigned int pose_estimate_idx=0; pose_estimate_idx<pose_estimates.size () && show_found_objects;
00394 ++pose_estimate_idx)
00395 {
00396 const PosesFromMatches::PoseEstimate& pose_estimate = pose_estimates[pose_estimate_idx];
00397 objects_in_scene.push_back (boost::shared_ptr<ObjectModel::PointCloudType> (new ObjectModel::PointCloudType));
00398 object_model.getTransformedPointCloud (pose_estimate.transformation, *objects_in_scene.back ());
00399 }
00400 }
00401
00402 std::vector<std::vector<PointCorrespondences6DVector > >& feature_matches =
00403 object_recognition_helper.getIntermediateElements ().feature_matches;
00404 boost::shared_ptr<PointCloud<PointXYZ> > feature_match_positions_ptr (new PointCloud<PointXYZ>);
00405 PointCloud<PointXYZ>& feature_match_positions = *feature_match_positions_ptr;
00406 if (show_feature_match_positions)
00407 {
00408 feature_match_positions.points.clear ();
00409 for (unsigned int model_idx=0; model_idx<feature_matches.size (); ++model_idx)
00410 {
00411 vector<PointCorrespondences6DVector>& model_feature_matches = feature_matches[model_idx];
00412 for (unsigned int view_idx=0; view_idx<model_feature_matches.size (); ++view_idx)
00413 {
00414 const PointCorrespondences6DVector& view_feature_matches = model_feature_matches[view_idx];
00415 for (unsigned int match_idx=0; match_idx<view_feature_matches.size (); ++match_idx)
00416 {
00417 const PointCorrespondence6D& match = view_feature_matches[match_idx];
00418 PointXYZ match_position;
00419 match_position.x=match.point1[0];
00420 match_position.y=match.point1[1];
00421 match_position.z=match.point1[2];
00422 feature_match_positions.points.push_back (match_position);
00423 }
00424 }
00425 }
00426 }
00427
00428 ObjectModel& first_object_model = *object_models[0];
00429 while (!interest_points_widgets.empty ())
00430 {
00431 delete interest_points_widgets.back ();
00432 interest_points_widgets.pop_back ();
00433 }
00434 if (show_interest_points)
00435 {
00436 interest_points_widgets.push_back (RangeImageVisualizer::getInterestPointsWidget (scene_range_image,
00437 interest_point_detector_scene.getInterestImage (), 0.0f, 1.0f,
00438 interest_point_detector_scene.getInterestPoints (), "Scene interest points"));
00439 const std::vector<RangeImage, Eigen::aligned_allocator<RangeImage> >& model_views = first_object_model.getViews ();
00440 NarfKeypoint& interest_point_detector_model =
00441 object_recognition_helper.getIntermediateElements ().interest_point_detector_model;
00442 interest_point_detector_model.getParameters ().support_size = support_size;
00443 for (unsigned int model_view_idx=0; model_view_idx<model_views.size (); ++model_view_idx)
00444 {
00445 const RangeImage& model_view = model_views[model_view_idx];
00446 interest_point_detector_model.setRangeImage (&model_view);
00447
00448
00449
00450
00451
00452
00453 interest_points_widgets.push_back (RangeImageVisualizer::getInterestPointsWidget (model_view,
00454 interest_point_detector_model.getInterestImage (), 0.0f, 1.0f,
00455 interest_point_detector_model.getInterestPoints (), "Model interest points"));
00456
00457 interest_point_detector_model.setRangeImage (NULL);
00458 }
00459 }
00460
00461
00462
00463
00464
00465 scene_range_image_widget.showRangeImage (scene_range_image);
00466
00467 const std::vector<RangeImage, Eigen::aligned_allocator<RangeImage> >& model_views = first_object_model.getViews ();
00468 for (unsigned int i=0; i<model_views.size () && show_model_views; ++i)
00469 {
00470 const RangeImage& model_view = model_views[i];
00471 range_image_widgets.push_back (RangeImageVisualizer::getRangeImageWidget (model_view, -INFINITY, INFINITY, true, "Model view"));
00472 if (show_validation_points)
00473 {
00474 const ObjectModel::ValidationPoints& validation_points = first_object_model.getValidationPoints ()[i];
00475 for (unsigned int j=0; j<validation_points.surface.size (); ++j)
00476 {
00477 float image_x, image_y;
00478 model_view.getImagePoint (validation_points.surface[j], image_x, image_y);
00479 range_image_widgets.back ()->markPoint (image_x, image_y, pcl::visualization::Vector3ub (0,255,0));
00480 }
00481 for (unsigned int j=0; j<validation_points.border.size (); ++j)
00482 {
00483 float image_x, image_y;
00484 model_view.getImagePoint (validation_points.border[j], image_x, image_y);
00485 range_image_widgets.back ()->markPoint (image_x, image_y, pcl::visualization::Vector3ub (255,0,0));
00486 }
00487 }
00488 }
00489
00490
00491 for (unsigned int i=0; i<point_clouds_in_viewer.size (); ++i)
00492 viewer.removePointCloud (point_clouds_in_viewer[i]);
00493 point_clouds_in_viewer.clear ();
00494
00495
00496 PointCloudColorHandlerCustom<pcl::PointWithRange> color_handler_cloud (scene_range_image_ptr, 0, 0, 0);
00497 point_clouds_in_viewer.push_back ("scene_range_image");
00498 viewer.addPointCloud<PointWithRange> (scene_range_image_ptr, color_handler_cloud, point_clouds_in_viewer.back ());
00499 point_clouds_in_viewer.push_back ("feature_match_positions");
00500 viewer.addPointCloud (feature_match_positions_ptr, point_clouds_in_viewer.back ());
00501 viewer.setPointCloudRenderingProperties (PCL_VISUALIZER_POINT_SIZE, 7, point_clouds_in_viewer.back ());
00502 for (unsigned int i=0; i<objects_in_scene.size (); ++i)
00503 {
00504 stringstream ss;
00505 ss<<"Found object "<<i;
00506 point_clouds_in_viewer.push_back (ss.str ());
00507 PointCloudColorHandlerRandom<ObjectModel::PointCloudType::PointType> color_handler (objects_in_scene[i]);
00508 viewer.addPointCloud<ObjectModel::PointCloudType::PointType> (objects_in_scene[i], color_handler, point_clouds_in_viewer.back ());
00509 }
00510 }
00511 }