ObjectRecognitionServer.cpp
Go to the documentation of this file.
00001 #include <narf_recognition/object_recognition_helper.h>
00002 #include <narf_recognition/ObjectRecognitionServer.h>
00003 
00004 //ROS
00005 #include <ros/ros.h>
00006 #include <sensor_msgs/Image.h>
00007 
00008 //Advertised Service
00009 #include <tidyup_msgs/DetectGraspableObjects.h>
00010 
00011 //C++
00012 #include <iostream>
00013 #include <exception>
00014 #include <cstdlib>
00015 #include <iostream>
00016 
00017 #include <boost/thread/thread.hpp>
00018 #include <boost/foreach.hpp>
00019 
00020 #include <Eigen/Geometry>
00021 
00022 #include <pcl_ros/point_cloud.h>
00023 #include <pcl_ros/impl/transforms.hpp>
00024 
00025 //PCL
00026 #include <pcl/point_types.h>
00027 #include <pcl/point_cloud.h>
00028 #include <pcl/common/file_io.h>
00029 #include <pcl/common/common_headers.h>
00030 #include <pcl/console/parse.h>
00031 #include <pcl/features/narf.h>
00032 #include <pcl/io/pcd_io.h>
00033 #include <pcl/keypoints/narf_keypoint.h>
00034 #include <pcl/visualization/pcl_visualizer.h>
00035 #include <pcl/visualization/cloud_viewer.h>
00036 #include "pcl/range_image/range_image.h"
00037 
00038 #include "pcl/io/pcd_io.h"
00039 #include "pcl/range_image/range_image.h"
00040 #include "pcl/features/narf.h"
00041 #include "pcl/common/point_correspondence.h"
00042 #include "pcl/common/poses_from_matches.h"
00043 #include "pcl/features/range_image_border_extractor.h"
00044 #include "pcl/keypoints/narf_keypoint.h"
00045 #include "pcl/range_image/range_image_planar.h"
00046 #include "pcl/common/file_io.h"
00047 #include "pcl/common/common_headers.h"
00048 #include "pcl/visualization/range_image_visualizer.h"
00049 #include "pcl/visualization/pcl_visualizer.h"
00050 #include <ros/package.h>
00051 
00052 #include <visualization_msgs/Marker.h>
00053 
00054 ObjectRecognitionServer::ObjectRecognitionServer(ros::NodeHandle& n) :
00055                 nh_(n), noise_level_factor_(0.1), support_size_factor_(0.5), show_found_objects_(
00056                                 true), cloud_received_(false), cloud_thread_shutdown_(false),visualize_result_(false) {
00057 
00058         std::string filepath = ros::package::getPath("narf_recognition");
00059 
00060         nh_.getParam("locate_objects_server/visualize_result_", visualize_result_);
00061         if (visualize_result_) {
00062                 viewer_ = new pcl::visualization::PCLVisualizer("3D Viewer");
00063                 viewer_->initCameraParameters();
00064                 viewer_->setBackgroundColor(255, 255, 255);
00065                 viewer_->setCameraPosition(0., 0., -1., -1., 1., 1.);
00066         }
00067         pcl::ObjectRecognitionHelper::Parameters& orh_params =
00068                         object_recognition_helper_.getParameters();
00069         pcl::FalsePositivesFilter& false_positives_filter =
00070                         object_recognition_helper_.getFalsePositivesFilter();
00071         pcl::FalsePositivesFilter::Parameters& fpf_params =
00072                         false_positives_filter.getParameters();
00073 
00074         orh_params.view_sampling_first_layer = 0;
00075         orh_params.view_sampling_last_layer = 2;
00076         orh_params.set_unseen_to_max_range = true;
00077         orh_params.max_no_of_threads = 1;
00078         orh_params.angular_resolution = pcl::deg2rad(0.3);
00079         orh_params.use_rotation_invariance = true;
00080         orh_params.single_view_model = 0;
00081         orh_params.max_descriptor_distance = 0.09;
00082         fpf_params.do_icp = false;
00083         fpf_params.maximum_range_for_found_objects = 2.0;
00084         fpf_params.min_validation_point_score = 0.55; //0.25;
00085 
00086         //read in object models from given filenames
00087         XmlRpc::XmlRpcValue file_list;
00088         std::string key;
00089         key.assign("locate_objects_server/filenames");
00090         nh_.getParam(key, file_list);
00091         for (unsigned int i = 0; i < file_list.size(); i++) {
00092                 modelnames_.push_back(filepath);
00093                 modelnames_[i].append(file_list[i]);
00094                 ROS_INFO_STREAM("model file " << modelnames_[i]);
00095         }
00096         ROS_INFO_STREAM("Read filenames");
00097         getModels(modelnames_);
00098 
00099         cloud_nh_.setCallbackQueue(&cloud_callback_queue_);
00100 
00101         sub_ = cloud_nh_.subscribe<sensor_msgs::PointCloud2>("/camera/depth/points",
00102                         1, &ObjectRecognitionServer::cloud_cb_, this);
00103 
00104         serv_ = nh_.advertiseService("/tidyup/request_graspable_objects",
00105                         &ObjectRecognitionServer::locate, this);
00106 
00107         marker_pub_ = nh_.advertise<visualization_msgs::Marker>("narf_object", 10);
00108         ROS_INFO("Ready to recognize objects.");
00109 }
00110 
00111 ObjectRecognitionServer::~ObjectRecognitionServer() {
00112         if (visualize_result_) {
00113                 delete viewer_;
00114         }
00115 }
00116 
00117 void ObjectRecognitionServer::runThreadCallbackLoop() {
00118         ros::Rate r(15);
00119         while (ros::ok() && !cloud_thread_shutdown_) {
00120                 ROS_INFO("Cloud thread ");
00121                 cloud_callback_queue_.callAvailable(ros::WallDuration()); // call all available callbacks
00122                 r.sleep();
00123         }
00124 }
00125 
00126 void ObjectRecognitionServer::getModels(
00127                 std::vector<std::string> model_filenames) {
00128         if (model_filenames.size() == 0) {
00129                 ROS_ERROR("No*.pcd file for model given. Exiting.");
00130                 exit(0);
00131         }
00132 
00133         pcl::ObjectModelList& object_models =
00134                         object_recognition_helper_.getIntermediateElements().object_models;
00135         if (!object_recognition_helper_.addObjectModelsFromPcdFiles(
00136                         model_filenames)) {
00137                 ROS_ERROR("Could not load object models from files: Exiting.");
00138                 exit(1);
00139         }
00140 
00141         float average_model_radius = object_models.getAverageModelRadius();
00142         float support_size = support_size_factor_ * average_model_radius;
00143         float noise_level = noise_level_factor_ * average_model_radius;
00144         ROS_INFO_STREAM(
00145                         "The average object model radius is " << average_model_radius << "\n" << "  => using support size " << support_size << "\n" << "  => using noise level " << noise_level << noise_level);
00146         pcl::ObjectRecognitionHelper::Parameters& orh_params =
00147                         object_recognition_helper_.getParameters();
00148         orh_params.support_size = support_size;
00149         orh_params.noise_level = noise_level;
00150         initial_max_plane_error_ = 0.2f * average_model_radius;
00151         minimum_plane_size_ = 1.5f
00152                         * object_models.getMaximumPlaneSize(initial_max_plane_error_);
00153         ROS_INFO_STREAM("Minimum plane size " << minimum_plane_size_);
00154 }
00155 
00156 void ObjectRecognitionServer::cloud_cb_(const sensor_msgs::PointCloud2 msg) {
00157 
00158         mutex_.lock();
00159         pcl::PointCloud<pcl::PointWithViewpoint> far_ranges;
00160         fromROSMsg(msg, point_cloud_);
00161         pcl::RangeImage::extractFarRanges(msg, far_ranges);
00162 
00163         if (!object_recognition_helper_.createRangeImageFromPointCloud(point_cloud_,
00164                         &far_ranges)) {
00165                 ROS_ERROR("Failed to create Range Image From Point Cloud.");
00166         }
00167         cloud_received_ = true;
00168         mutex_.unlock();
00169 }
00170 
00171 void ObjectRecognitionServer::visualize(
00172                 pcl::visualization::PCLVisualizer& viewer,
00173                 std::vector<pcl::PosesFromMatches::PoseEstimatesVector>& pose_estimates_per_model,
00174                 std::vector<std::string>& point_clouds_in_viewer) {
00175 
00176         viewer.removeAllPointClouds(0);
00177         boost::shared_ptr<pcl::RangeImage>& scene_range_image_ptr =
00178                         object_recognition_helper_.getIntermediateElements().scene_range_image_ptr;
00179         pcl::ObjectModelList& object_models =
00180                         object_recognition_helper_.getIntermediateElements().object_models;
00181         std::vector<boost::shared_ptr<pcl::ObjectModel::PointCloudType> > objects_in_scene; // For visualization
00182         for (unsigned int model_idx = 0; model_idx < object_models.size();
00183                         ++model_idx) {
00184 
00185                 pcl::PosesFromMatches::PoseEstimatesVector& pose_estimates =
00186                                 pose_estimates_per_model[model_idx];
00187                 pcl::ObjectModel& object_model = *object_models[model_idx];
00188 
00189                 for (unsigned int pose_estimate_idx = 0;
00190                                 pose_estimate_idx < pose_estimates.size() && show_found_objects_;
00191                                 ++pose_estimate_idx) {
00192                         const pcl::PosesFromMatches::PoseEstimate& pose_estimate =
00193                                         pose_estimates[pose_estimate_idx];
00194                         objects_in_scene.push_back(
00195                                         boost::shared_ptr<pcl::ObjectModel::PointCloudType>(
00196                                                         new pcl::ObjectModel::PointCloudType));
00197                         object_model.getTransformedPointCloud(pose_estimate.transformation,
00198                                         *objects_in_scene.back());
00199                 }
00200         }
00201 
00202         //              ObjectModel& first_object_model = *object_models[0];
00203         if (objects_in_scene.size() > 0) {
00204                 for (unsigned int i = 0; i < point_clouds_in_viewer.size(); ++i)
00205                         viewer.removePointCloud(point_clouds_in_viewer[i]);
00206 
00207                 point_clouds_in_viewer.clear();
00208 
00209                 pcl::visualization::PointCloudColorHandlerCustom<pcl::PointWithRange> color_handler_cloud(
00210                                 scene_range_image_ptr, 0, 0, 0);
00211                 point_clouds_in_viewer.push_back("scene_range_image");
00212                 viewer.addPointCloud<pcl::PointWithRange>(scene_range_image_ptr,
00213                                 color_handler_cloud, point_clouds_in_viewer.back());
00214                 point_clouds_in_viewer.push_back("feature_match_positions");
00215                 viewer.setPointCloudRenderingProperties(
00216                                 pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 7,
00217                                 point_clouds_in_viewer.back());
00218                 for (unsigned int i = 0; i < objects_in_scene.size(); ++i) {
00219                         std::stringstream ss;
00220                         ss << "Found object " << i;
00221                         point_clouds_in_viewer.push_back(ss.str());
00222                         pcl::visualization::PointCloudColorHandlerRandom<
00223                                         pcl::ObjectModel::PointCloudType::PointType> color_handler(
00224                                         objects_in_scene[i]);
00225                         viewer.addPointCloud<pcl::ObjectModel::PointCloudType::PointType>(
00226                                         objects_in_scene[i], color_handler,
00227                                         point_clouds_in_viewer.back());
00228                         //viewer.addCoordinateSystem()
00229                 }
00230         }
00231 
00232 }
00233 
00234 bool ObjectRecognitionServer::locate(
00235                 tidyup_msgs::DetectGraspableObjects::Request &req,
00236                 tidyup_msgs::DetectGraspableObjects::Response &res) {
00237 
00238         ROS_INFO("Received recognition request.");
00239         std::vector<std::string> point_clouds_in_viewer;
00240 
00241         //wait for cloud to arrive
00242         cloud_received_ = false;
00243         cloud_thread_shutdown_ = false;
00244         cloud_thread_ = new boost::thread(
00245                         &ObjectRecognitionServer::runThreadCallbackLoop, this);
00246         while (!cloud_received_ && ros::ok()) {
00247                 ros::Rate r(5);
00248                 ROS_INFO("Waiting for point cloud.\n");
00249                 r.sleep();
00250         }
00251         ROS_INFO("Received new pointcloud data.");
00252         cloud_thread_shutdown_ = true;
00253         delete (cloud_thread_);
00254 
00255         //match models
00256         object_recognition_helper_.extractModelFeatures();
00257         object_recognition_helper_.getIntermediateElements().database_feature_sources;
00258 
00259         bool do_plane_extraction = true;
00260         boost::shared_ptr<pcl::RangeImage>& scene_range_image_ptr =
00261                         object_recognition_helper_.getIntermediateElements().scene_range_image_ptr;
00262         pcl::RangeImage& scene_range_image = *scene_range_image_ptr;
00263         if (do_plane_extraction) {
00264                 pcl::PointWithRange farRangePoint;
00265                 farRangePoint.x = farRangePoint.y = farRangePoint.z =
00266                                 std::numeric_limits<float>::quiet_NaN();
00267                 farRangePoint.range = std::numeric_limits<float>::infinity();
00268                 std::vector<pcl::RangeImage::ExtractedPlane> planes;
00269                 scene_range_image.extractPlanes(initial_max_plane_error_, planes);
00270                 for (int plane_idx = 0; plane_idx < int(planes.size()); ++plane_idx) {
00271                         const pcl::RangeImage::ExtractedPlane& plane = planes[plane_idx];
00272                         if (plane.maximum_extensions[2] <= minimum_plane_size_)
00273                                 continue;
00274                         ROS_DEBUG_STREAM(
00275                                         "Removing plane of size " << plane.maximum_extensions[2] << "m (" << plane.point_indices.size() << " points).");
00276                         for (int plane_point_idx = 0;
00277                                         plane_point_idx < int(plane.point_indices.size());
00278                                         ++plane_point_idx)
00279                                 scene_range_image.points[plane.point_indices[plane_point_idx]] =
00280                                                 farRangePoint;
00281                 }
00282         }
00283 
00284         object_recognition_helper_.extractSceneFeatures();
00285         object_recognition_helper_.extractPoseEstimates();
00286 
00287         std::vector<pcl::PosesFromMatches::PoseEstimatesVector>& pose_estimates_per_model =
00288                         object_recognition_helper_.filterFalsePositives();
00289         object_recognition_helper_.printTimings();
00290 
00291         std::vector<pcl::PosesFromMatches::PoseEstimatesVector> poses =
00292                         object_recognition_helper_.getIntermediateElements().pose_estimates_per_model;
00293 
00294         tidyup_msgs::GraspableObject graspable;
00295         unsigned int counter = 0;
00296         for (unsigned int i = 0; i < modelnames_.size(); i++) {
00297                 for (unsigned int j = 0; j < poses[i].size(); j++) {
00298 
00299                         Eigen::Affine3f eigen_pose = poses[i][j].transformation;
00300 
00301                         Eigen::Quaternionf rot(eigen_pose.rotation());
00302                         Eigen::Vector3f origin(eigen_pose.translation());
00303 
00304                         graspable.pose.pose.position.x = origin[0];
00305                         graspable.pose.pose.position.y = origin[1];
00306                         graspable.pose.pose.position.z = origin[2];
00307 
00308                         graspable.pose.pose.orientation.w = rot.w();
00309                         graspable.pose.pose.orientation.x = rot.x();
00310                         graspable.pose.pose.orientation.y = rot.y();
00311                         graspable.pose.pose.orientation.z = rot.z();
00312 
00313                         graspable.reachable_left_arm = true;
00314                         graspable.reachable_right_arm = true;
00315                         graspable.name = modelnames_[i];
00316                         graspable.pose.header.frame_id = point_cloud_.header.frame_id;
00317                         graspable.pose.header.stamp = point_cloud_.header.stamp;
00318                         res.objects.push_back(graspable);
00319 
00320                         //publish a marker for each object
00321                         visualization_msgs::Marker arrow;
00322                         arrow.header.frame_id = point_cloud_.header.frame_id;
00323                         arrow.header.stamp = ros::Time::now();
00324                         arrow.ns = "tidyup";
00325                         arrow.action = visualization_msgs::Marker::ADD;
00326 
00327                         tf::Pose p;
00328                         tf::poseMsgToTF(graspable.pose.pose, p);
00329 
00330                         tf::Quaternion q;
00331                         q.setRPY(0, -M_PI / 2.0, 0);
00332                         p.setRotation(p.getRotation() * q);
00333                         arrow.pose = graspable.pose.pose;
00334                         tf::poseTFToMsg(p, arrow.pose);
00335                         arrow.color.r = 1;
00336                         arrow.color.g = 0;
00337                         arrow.color.b = 0;
00338                         arrow.color.a = 1;
00339                         arrow.id = counter;
00340                         arrow.scale.x = 0.3;
00341                         arrow.scale.y = 0.3;
00342                         arrow.scale.z = 0.3;
00343                         marker_pub_.publish(arrow);
00344                         counter++;
00345                 }
00346         }
00347 
00348         //visualize
00349         if (visualize_result_) {
00350                 visualize(*viewer_, pose_estimates_per_model, point_clouds_in_viewer);
00351                 viewer_->spinOnce(100);
00352         }
00353 
00354         ROS_INFO("Returning service call");
00355         return true;
00356 }
00357 
00358 int main(int argc, char **argv) {
00359         ros::init(argc, argv, "locate_objects_server");
00360         ros::NodeHandle n;
00361 
00362         ObjectRecognitionServer locator(n);
00363 
00364         if (locator.visualize_result_) {
00365                 while (ros::ok() && !locator.viewer_->wasStopped()) {
00366                         locator.viewer_->spinOnce(100);
00367                         ros::spinOnce();
00368                 }
00369         } else {
00370                 ros::spin();
00371         }
00372 
00373         return 0;
00374 }
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Properties Friends Defines


narf_recognition
Author(s): Juergen Hess
autogenerated on Wed Dec 26 2012 16:37:09