ShapeReconstructionNode.cpp
Go to the documentation of this file.
00001 #include "shape_reconstruction/ShapeReconstructionNode.h"
00002 
00003 #include <pcl/conversions.h>
00004 #include <pcl/io/pcd_io.h>
00005 #include <pcl/filters/filter_indices.h>
00006 #include <sensor_msgs/Image.h>
00007 
00008 #include <opencv2/imgproc/imgproc.hpp>
00009 #include <opencv2/video/tracking.hpp>
00010 #include <opencv2/highgui/highgui.hpp>
00011 #include <opencv2/video/tracking.hpp>
00012 
00013 #include <pcl_conversions/pcl_conversions.h>
00014 
00015 #include <ros/package.h>
00016 
00017 #include <cmath>
00018 
00019 #include <ros/console.h>
00020 
00021 #include "shape_reconstruction/RangeImagePlanar.hpp"
00022 
00023 #include <pcl/point_cloud.h>
00024 #include <pcl/point_types.h>
00025 #include <pcl/io/pcd_io.h>
00026 #include <pcl/visualization/pcl_visualizer.h>
00027 #include <pcl/filters/passthrough.h>
00028 
00029 #include <boost/filesystem.hpp>
00030 #include <ctime>
00031 
00032 #include <std_msgs/Bool.h>
00033 
00034 using namespace omip;
00035 
00036 ShapeReconstructionNode::ShapeReconstructionNode():
00037     _active(true),
00038     _frame_ctr(0),
00039     _max_rb(100),
00040     _detect_static_environment(false),
00041     _record_result_bag(false),
00042     _result_bag_open(false),
00043     _manually_advance_after_processing(false),
00044     _sv_estimated(false),
00045     _refine_shapes_in_each_frame(false),
00046     _processing_interval(0.0),
00047     _estimate_sv(false)
00048 {
00049     this->_rbshapes_combined_publisher = this->_node_handle.advertise<sensor_msgs::PointCloud2>("rbshapes_comb",10);
00050     this->_rgbd_pc_subscriber.subscribe(this->_node_handle, "/camera/depth_registered/points",1);
00051 
00052     this->_clustered_feats_subscriber.subscribe(this->_node_handle, "/rb_tracker/clustered_tracked_feats", 1);
00053 
00054     this->_node_quit_subscriber = this->_node_handle.subscribe("/omip/shutdown", 1,
00055                                                                &ShapeReconstructionNode::TrackerQuitCallback, this);
00056 
00057     this->_poses_and_vels_subscriber.subscribe(this->_node_handle, "/rb_tracker/state",1);
00058 
00059     this->_synchronizer_with_clustered_feats =
00060             new message_filters::Synchronizer<SRSyncPolicyWithClusteredFeatures>(SRSyncPolicyWithClusteredFeatures(10),
00061                                                                                  this->_rgbd_pc_subscriber,
00062                                                                                  this->_poses_and_vels_subscriber,
00063                                                                                  this->_clustered_feats_subscriber);
00064     this->_synchronizer_with_clustered_feats->registerCallback(boost::bind(&ShapeReconstructionNode::measurementCallbackWithClusteredFeatures, this, _1, _2, _3));
00065 
00066     this->_previous_measurement_time = ros::Time(0.0);
00067 
00068     this->_mesh_generator_srv = this->_node_handle.advertiseService("/shape_reconstruction/mesh_generator_srv", &ShapeReconstructionNode::generateMeshes, this);
00069 
00070     this->_node_handle.getParam("/shape_reconstruction/max_rb", _max_rb);
00071     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","Maximal number of rigid bodies tracked: " << _max_rb);
00072 
00073     this->_node_handle.getParam("/shape_reconstruction/detect_static_environment", this->_detect_static_environment);
00074     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","detect_static_environment = " << this->_detect_static_environment);
00075 
00076     this->_node_handle.getParam("/shape_reconstruction/publish_shapes_in_each_frame", this->_publish_shapes_in_each_frame);
00077     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","publish_shapes_in_each_frame = " << this->_publish_shapes_in_each_frame);
00078 
00079     this->_node_handle.getParam("/shape_reconstruction/refine_shapes_in_each_frame", this->_refine_shapes_in_each_frame);
00080     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","refine_shapes_in_each_frame = " << this->_refine_shapes_in_each_frame);
00081 
00082     this->_node_handle.getParam("/shape_reconstruction/manually_advance_after_processing", this->_manually_advance_after_processing);
00083     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","manually_advance_after_processing = " << this->_manually_advance_after_processing);
00084 
00085     this->_node_handle.getParam("/shape_reconstruction/processing_interval", this->_processing_interval);
00086     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","processing_interval = " << this->_processing_interval);
00087 
00088     this->_node_handle.getParam("/shape_reconstruction/estimate_sv", this->_estimate_sv);
00089     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","_estimate_sv = " << this->_estimate_sv);
00090 
00091 
00092     this->_current_pc.reset(new SRPointCloud());
00093     this->_current_pc_down.reset(new SRPointCloud());
00094     this->_current_pc_without_nans.reset(new SRPointCloud());
00095 
00096     this->_clustered_feats.reset(new pcl::PointCloud<pcl::PointXYZL>);
00097 
00098     this->_static_environment_ext_d_and_c_pub = this->_node_handle.advertise<sensor_msgs::PointCloud2>("/shape_recons/shape_static_environment", 1);
00099 
00100     this->_shape_models_ext_d_and_c_pub = this->_node_handle.advertise<omip_msgs::ShapeModels>("/shape_recons/state", 1);
00101 
00102     this->_advance_feature_tracker_pub = this->_node_handle.advertise<std_msgs::Bool>("segmentation_info_msg", 1);
00103 
00105     this->_se_polygon_data_ptr = vtkSmartPointer<vtkPolyData>::New ();
00106     this->_se_polygon_writer_ptr = vtkSmartPointer<vtkSTLWriter>::New ();
00107     this->_se_polygon_writer_ptr->SetFileTypeToBinary();
00108     this->_se_normal_estimator_ptr.reset(new pcl::NormalEstimationOMP<omip::SRPoint, pcl::Normal>());
00109     this->_se_normal_estimator_ptr->setKSearch (50);
00110     this->_se_estimated_normals_pc_ptr.reset(new pcl::PointCloud<pcl::Normal>());
00111     this->_se_tree_for_normal_estimation_ptr.reset(new pcl::search::KdTree<omip::SRPoint>());
00112     this->_se_position_color_and_normals_pc_ptr.reset(new pcl::PointCloud<pcl::PointXYZRGBNormal>());
00113     this->_se_tree_for_triangulation_ptr.reset(new pcl::search::KdTree<pcl::PointXYZRGBNormal>());
00114     this->_se_greedy_projection_triangulator_ptr.reset(new pcl::GreedyProjectionTriangulation<pcl::PointXYZRGBNormal>());
00115     // Set typical values for the parameters
00116     this->_se_greedy_projection_triangulator_ptr->setMu (2.5);
00117     this->_se_greedy_projection_triangulator_ptr->setMaximumNearestNeighbors (100);
00118     this->_se_greedy_projection_triangulator_ptr->setMaximumSurfaceAngle(M_PI); // 180 degrees
00119     this->_se_greedy_projection_triangulator_ptr->setMinimumAngle(M_PI/36); // 5 degrees
00120     this->_se_greedy_projection_triangulator_ptr->setMaximumAngle(M_PI/2); // 180 degrees
00121     this->_se_greedy_projection_triangulator_ptr->setNormalConsistency(true);
00122     this->_se_greedy_projection_triangulator_ptr->setConsistentVertexOrdering(true);
00123     // Set the maximum distance between connected points (maximum edge length)
00124     this->_se_greedy_projection_triangulator_ptr->setSearchRadius (0.3);
00125     this->_se_triangulated_mesh_ptr.reset(new pcl::PolygonMesh());
00126 
00127     this->_node_handle.getParam("/shape_reconstruction/record_result_bag", _record_result_bag);
00128     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","record_result_bag = " << _record_result_bag);
00129 
00130     if (_record_result_bag) {
00131         if (!this->_node_handle.getParam("/shape_reconstruction/result_bag_path", _result_bag_path)) {
00132             _result_bag_path = "/tmp";
00133         }
00134         ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","storing result bag in = " << _result_bag_path);
00135     }
00136 
00137     this->_voxel_resolution = 0.008f;
00138     this->_seed_resolution = 0.2f;
00139     this->_color_importance = 0.4f;
00140     this->_spatial_importance = 0.4f;
00141     this->_normal_importance = 0.8f;
00142 
00143 
00144     this->_node_handle.getParam(std::string("/shape_reconstruction/sv_voxel_resolution"), this->_voxel_resolution);
00145     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","sv_voxel_resolution = " << this->_voxel_resolution);
00146 
00147     this->_node_handle.getParam(std::string("/shape_reconstruction/sv_seed_resolution"), this->_seed_resolution);
00148     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","sv_seed_resolution = " << this->_seed_resolution);
00149 
00150     this->_node_handle.getParam(std::string("/shape_reconstruction/sv_color_importance"), this->_color_importance);
00151     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","sv_color_importance = " << this->_color_importance);
00152 
00153     this->_node_handle.getParam(std::string("/shape_reconstruction/sv_spatial_importance"), this->_spatial_importance);
00154     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","sv_spatial_importance = " << this->_spatial_importance);
00155 
00156     this->_node_handle.getParam(std::string("/shape_reconstruction/sv_normal_importance"), this->_normal_importance);
00157     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","sv_normal_importance = " << this->_normal_importance);
00158 
00159     this->_node_handle.getParam(std::string("/shape_reconstruction/sv_visualization"), this->_visualization_sv);
00160     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","sv_visualization = " << this->_visualization_sv);
00161 
00162     this->_node_handle.getParam(std::string("/shape_reconstruction/sv_visualization"), this->_visualization_sv);
00163     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode","sv_visualization = " << this->_visualization_sv);
00164 
00165     std::string ci_topic;
00166     this->getROSParameter<std::string>(std::string("/feature_tracker/camera_info_topic"),ci_topic);
00167     this->_ci_sub = this->_node_handle.subscribe(ci_topic, 1,
00168                                                  &ShapeReconstructionNode::CameraInfoCallback, this);
00169 }
00170 
00171 ShapeReconstructionNode::~ShapeReconstructionNode()
00172 {
00173     if(_result_bag_open) {
00174         this->_result_bag.close();
00175         this->_video_bag.close();
00176     }
00177 }
00178 
00179 void ShapeReconstructionNode::CameraInfoCallback(const sensor_msgs::CameraInfoConstPtr &ci_msg)
00180 {
00181     // We have to do this because sometimes the first ci message that is sent is zero (why?)
00182     if(ci_msg->height != 0)
00183     {
00184         this->_ci = sensor_msgs::CameraInfo(*ci_msg);
00185         this->_ci_sub.shutdown();
00186         for(std::map<omip::RB_id_t,omip::ShapeReconstructionPtr >::iterator it = this->_rb_shapes.begin(); it!=this->_rb_shapes.end(); it++)
00187         {
00188             it->second->setCameraInfo(this->_ci);
00189         }
00190     }
00191 }
00192 
00193 omip::ShapeReconstructionPtr ShapeReconstructionNode::_createNewShapeReconstruction()
00194 {
00195     // We create a new ShapeReconstruction object for this RB and we add it to the map
00196     ROS_WARN_STREAM_NAMED("ShapeReconstructionNode::measurementCallback","Create a new ShapeReconstructor for the newly detected RB.");
00197 
00198     bool to_initial=false;
00199     this->_node_handle.getParam("/shape_reconstruction/to_initial", to_initial);
00200     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","to_initial = " << to_initial);
00201 
00202     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","detect_static_environment = " << this->_detect_static_environment);
00203 
00204     double color_changes_erode_size=false;
00205     this->_node_handle.getParam("/shape_reconstruction/color_changes_erode_size", color_changes_erode_size);
00206     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","color_changes_erode_size = " << color_changes_erode_size);
00207 
00208     bool depth_filling=false;
00209     this->_node_handle.getParam("/shape_reconstruction/depth_filling", depth_filling);
00210     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","depth_filling = " << depth_filling);
00211 
00212     bool remove_inconsistent_points=false;
00213     this->_node_handle.getParam("/shape_reconstruction/remove_inconsistent_points", remove_inconsistent_points);
00214     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","remove_inconsistent_points = " << remove_inconsistent_points);
00215 
00216     bool accumulate_change_candidates=false;
00217     this->_node_handle.getParam("/shape_reconstruction/accumulate_change_candidates", accumulate_change_candidates);
00218     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","accumulate_change_candidates = " << accumulate_change_candidates);
00219 
00220     double min_depth_change;
00221     this->_node_handle.getParam("/shape_reconstruction/min_depth_change", min_depth_change);
00222     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","min_depth_change = " << min_depth_change);
00223 
00224     int min_color_change;
00225     this->_node_handle.getParam("/shape_reconstruction/min_color_change", min_color_change);
00226     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","min_color_change = " << min_color_change);
00227 
00228     double knn_min_radius;
00229     this->_node_handle.getParam("/shape_reconstruction/knn_min_radius", knn_min_radius);
00230     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","knn_min_radius = " << knn_min_radius);
00231 
00232     int min_num_model_pixels_in_sv;
00233     this->_node_handle.getParam("/shape_reconstruction/sv_min_num_model_pixels", min_num_model_pixels_in_sv);
00234     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","sv_min_num_model_pixels = " << min_num_model_pixels_in_sv);
00235 
00236     bool record_videos;
00237     this->_node_handle.getParam("/shape_reconstruction/record_videos", record_videos);
00238     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","record_videos = " << record_videos);
00239 
00240     bool extend_model_to_neighbor_sv;
00241     this->_node_handle.getParam("/shape_reconstruction/extend_model_to_neighbor_sv", extend_model_to_neighbor_sv);
00242     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","extend_model_to_neighbor_sv = " << extend_model_to_neighbor_sv);
00243 
00244     double similarity_in_h;
00245     this->_node_handle.getParam("/shape_reconstruction/similarity_in_h", similarity_in_h);
00246     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","similarity_in_h = " << similarity_in_h);
00247 
00248     double similarity_in_normal;
00249     this->_node_handle.getParam("/shape_reconstruction/similarity_in_h", similarity_in_normal);
00250     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","similarity_in_normal = " << similarity_in_normal);
00251 
00252     bool use_clustered_feats;
00253     this->_node_handle.getParam("/shape_reconstruction/use_clustered_feats", use_clustered_feats);
00254     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","use_clustered_feats = " << use_clustered_feats);
00255 
00256     bool data_from_bag;
00257     this->_node_handle.getParam("/feature_tracker/data_from_bag", data_from_bag);
00258     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","data_from_bag = " << data_from_bag);
00259 
00260     double avg_leaf_size;
00261     this->_node_handle.getParam("/shape_reconstruction/avg_leaf_size", avg_leaf_size);
00262     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","avg_leaf_size = " << avg_leaf_size);
00263 
00264     double ror_radius_search;
00265     this->_node_handle.getParam("/shape_reconstruction/ror_radius_search", ror_radius_search);
00266     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","ror_radius_search = " << ror_radius_search);
00267 
00268     int ror_min_neighbors;
00269     this->_node_handle.getParam("/shape_reconstruction/ror_min_neighbors", ror_min_neighbors);
00270     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","ror_min_neighbors = " << ror_min_neighbors);
00271 
00272     // TODO external parameters for depth_filling and detect_changes_in_color
00273     omip::ShapeReconstructionPtr shape_ptr = omip::ShapeReconstructionPtr(new omip::ShapeReconstruction());
00274 
00275     shape_ptr->setEstimateSV(_estimate_sv);
00276     shape_ptr->setSuperVoxelizerPtr(this->_supervoxelizer);
00277     shape_ptr->setSuperVoxelClustersPtr(this->_supervoxel_clusters);
00278     shape_ptr->setDetectStaticEnvironment(this->_detect_static_environment);
00279     shape_ptr->setAccumulateChangeCandidates(accumulate_change_candidates);
00280     shape_ptr->setMinDepthChange(min_depth_change);
00281     shape_ptr->setMinColorChange(min_color_change);
00282     shape_ptr->setRemoveInconsistentPoints(remove_inconsistent_points);
00283     shape_ptr->setKNNMinRadius(knn_min_radius);
00284     shape_ptr->setSVMinNumberModelPixels(min_num_model_pixels_in_sv);
00285     shape_ptr->setExtendToNeighborSV(extend_model_to_neighbor_sv);
00286     shape_ptr->setSimilarityInH(similarity_in_h);
00287     shape_ptr->setSimilarityInNormal(similarity_in_normal);
00288     shape_ptr->setCameraInfo(this->_ci);
00289     //shape_ptr->setSupportingFeatures(features_of_this_rb);
00290     shape_ptr->useClusteredFeatures(use_clustered_feats);
00291     shape_ptr->setDepthFilling(depth_filling);
00292     shape_ptr->setColorChangesErodeSize(color_changes_erode_size);
00293     shape_ptr->setToInitial(to_initial);
00294     shape_ptr->setRecordVideos(record_videos);
00295     shape_ptr->setLiveStream(!data_from_bag);
00296     shape_ptr->setApproxVoxelGridLeafSize(avg_leaf_size);
00297     shape_ptr->setRadiusOutRemovalSearch(ror_radius_search);
00298     shape_ptr->setRadiusOutRemovalMinNeighbors(ror_min_neighbors);    
00299 
00300     return shape_ptr;
00301 }
00302 
00303 void ShapeReconstructionNode::measurementCallbackWithClusteredFeatures(const sensor_msgs::PointCloud2ConstPtr &pc_msg,
00304                                                                        const boost::shared_ptr<omip::rbt_state_t const> &poses_and_vels,
00305                                                                        const sensor_msgs::PointCloud2ConstPtr &features_pc)
00306 {
00307     ros::Time ta = ros::Time::now();
00308 
00309     this->_current_pc_down->points.clear();
00310     this->_current_pc_down->height = IMG_HEIGHT;
00311     this->_current_pc_down->width = IMG_WIDTH;
00312     this->_current_pc_down->is_dense = true;
00313 
00314 
00315     this->_sv_estimated = false;
00316     _InitResultRosbag();
00317 
00318     // Extract the time of the upcoming message and estimate the elapsed time since the last message was processed
00319     _current_measurement_time = pc_msg->header.stamp;
00320     ros::Duration time_difference = _current_measurement_time - this->_previous_measurement_time;
00321 
00322     if(this->_previous_measurement_time.toSec() != 0.0 )
00323     {
00324         ROS_ERROR_STREAM("Time between segmentations: " << time_difference.toSec() << " s");
00325     }
00326 
00327     // Convert ROS PC message into a pcl point cloud
00328     pcl::fromROSMsg(*pc_msg, *this->_current_pc);
00329 
00330 #ifdef DOWNSAMPLING
00331     // Downsample the input pcl point cloud
00332     for(int u=0; u<this->_current_pc->width; u+=2)
00333     {
00334         for(int v=0; v<this->_current_pc->height; v+=2)
00335         {
00336             this->_current_pc_down->points.push_back(this->_current_pc->at(u, v));
00337         }
00338     }
00339 #else
00340     this->_current_pc_down = this->_current_pc;
00341 #endif
00342 
00343     // Convert clustered features message into a pcl point cloud
00344     pcl::fromROSMsg(*features_pc, *this->_clustered_feats);
00345 
00346     // Clean the input pcl point cloud of nans
00347     pcl::removeNaNFromPointCloud<SRPoint>(*this->_current_pc_down,*this->_current_pc_without_nans, this->_not_nan_indices);
00348 
00349     // Iterate over the received rb poses and check if a ShapeReconstructor for each tracked RB already exists
00350     // If not, create a new ShapeReconstructor and pass the current point cloud as initial point cloud
00351     // NOTE: This is done no matter how much time passed since the last processed message
00352     omip::RB_id_t rb_id_temp;
00353     for(int rb_poses_idx = 1; rb_poses_idx < poses_and_vels->rb_poses_and_vels.size(); rb_poses_idx++)
00354     {
00355         rb_id_temp = poses_and_vels->rb_poses_and_vels.at(rb_poses_idx).rb_id;
00356 
00357         if(this->_rb_shapes.find(rb_id_temp) == this->_rb_shapes.end())
00358         {// If this RB (identified by its id) was not previously tracked, we create a new ShapeReconstruction object for it
00359             // Unless we reached the max number of rb to reconstruct
00360             if (_rb_shapes.size() >= _max_rb)
00361             {
00362                 ROS_WARN_ONCE_NAMED("ShapeReconstructionNode.measurementCallback","Maximum limit of RBs for ShapeReconstructor reached");
00363                 continue;
00364             }
00365 
00366             omip::ShapeReconstructionPtr shape_ptr = this->_createNewShapeReconstruction();
00367 
00368             shape_ptr->setRBId(rb_id_temp);
00369             shape_ptr->setInitialFullRGBDPCAndRBT(this->_current_pc_down,
00370                                                   poses_and_vels->rb_poses_and_vels.at(rb_poses_idx).pose_wc);
00371             shape_ptr->initialize();
00372 
00373             this->_rb_shapes[rb_id_temp] = shape_ptr;
00374         }
00375         else
00376         {// If this RB (identified by its id) was previously tracked, we already have a ShapeReconstruction object for it
00377             if(_refine_shapes_in_each_frame)
00378             {
00379                 this->_rb_shapes[rb_id_temp]->RemoveInconsistentPoints(this->_current_pc_down,
00380                                                                        poses_and_vels->rb_poses_and_vels.at(rb_poses_idx).pose_wc);
00381             }
00382 
00383             if(_publish_shapes_in_each_frame)
00384             {
00385                 this->_rb_shapes[rb_id_temp]->PublishMovedModelAndSegment(_current_measurement_time,
00386                                                                           poses_and_vels->rb_poses_and_vels.at(rb_poses_idx).pose_wc,
00387                                                                           this->_video_bag,
00388                                                                           this->_result_bag_open);
00389             }
00390         }
00391 
00392         // Extract all the features that support the rigid body
00393         pcl::PassThrough<pcl::PointXYZL>::Ptr pt_filter(new pcl::PassThrough<pcl::PointXYZL>());
00394         pt_filter->setInputCloud(this->_clustered_feats);
00395         pt_filter->setFilterFieldName("label");
00396         pcl::PointCloud<pcl::PointXYZL>::Ptr features_of_this_rb(new pcl::PointCloud<pcl::PointXYZL>);
00397 
00398         uint32_t lower_bound = rb_id_temp;
00399         float lower_bound_float = 0;
00400         memcpy (&lower_bound_float, &lower_bound, sizeof (float));
00401         uint32_t upper_bound = rb_id_temp;
00402         float upper_bound_float = 0;
00403         memcpy (&upper_bound_float, &upper_bound, sizeof (float));
00404         pt_filter->setFilterLimits(lower_bound_float, upper_bound_float);
00405         //std::vector<int> passing_indices;
00406         pcl::IndicesPtr passing_indices(new std::vector<int>);
00407         pt_filter->filter(*passing_indices);
00408         ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.measurementCallbackWithClusteredFeatures",
00409                               "[RB" << rb_id_temp << "]: " << passing_indices->size() << " of " << this->_clustered_feats->points.size() << " features support this RB.");
00410 
00411         // Extract the features from the clustered features point cloud
00412         this->_extractor_cf.setInputCloud(this->_clustered_feats);
00413         this->_extractor_cf.setIndices(passing_indices);
00414         this->_extractor_cf.filter(*features_of_this_rb);
00415 
00416         // Set the supporting features of this rigid body
00417         this->_rb_shapes[rb_id_temp]->setSupportingFeatures(features_of_this_rb);
00418     }
00419 
00420     // Check if the elapsed time since the last processed message is over the processing time interval
00421     // If not, return and ignore the measurement
00422     // If yes, process this measurement
00423     if(time_difference.toSec() < this->_processing_interval && this->_previous_measurement_time.toSec() != 0.0)
00424     {
00425         ROS_INFO_STREAM_NAMED("ShapeReconstructionNode::measurementCallback","Not processing this measurement.");
00426         _AdvanceFeatureTracker();
00427         return;
00428     }else{
00429         ROS_ERROR_STREAM_NAMED("ShapeReconstructionNode::measurementCallback","Processing this measurement.");
00430     }
00431 
00432     ros::Time te = ros::Time::now();
00433 
00434     // write current point cloud, depth and rgb image
00435     if (_result_bag_open) {
00436         this->_result_bag.write(_rgbd_pc_subscriber.getTopic(), _current_measurement_time, *pc_msg);
00437 
00438         // Create a depth map from the current organized depth map
00439         cv::Mat depth_image_raw(this->_current_pc->height, this->_current_pc->width, CV_32FC1);
00440         OrganizedPC2DepthMap(this->_current_pc, depth_image_raw);
00441         //        cv::Mat depth_image(depth_image_raw.rows, depth_image_raw.cols, CV_8UC1);
00442         //        cv::convertScaleAbs(depth_image_raw, depth_image, 100, 0);
00443         //        sensor_msgs::ImagePtr depth_msg = cv_bridge::CvImage(std_msgs::Header(), "mono8", depth_image).toImageMsg();
00444         sensor_msgs::ImagePtr depth_msg;
00445         DepthImage2CvImage(depth_image_raw, depth_msg);
00446 
00447         this->_result_bag.write("/camera/depth_registered/image_rect", _current_measurement_time, *depth_msg);
00448 
00449         cv::Mat color_image(this->_current_pc->height, this->_current_pc->width, CV_8UC3);
00450         OrganizedPC2ColorMap(this->_current_pc, color_image);
00451         sensor_msgs::ImagePtr color_msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", color_image).toImageMsg();
00452 
00453         this->_result_bag.write("/camera/rgb/image_rect_color", _current_measurement_time, *color_msg);
00454 
00455     }
00456 
00457     ros::Time te2;
00458     ros::Time te3;
00459     // If we process the measurement we pass it to each ShapeReconstructor
00460     // We create a temp mapping so that any existing RBs get deleted
00461     std::map<omip::RB_id_t,omip::ShapeReconstructionPtr > rb_shapes_updated;
00462     for(int rb_poses_idx = 1; rb_poses_idx < poses_and_vels->rb_poses_and_vels.size(); rb_poses_idx++)
00463     {
00464         rb_id_temp = poses_and_vels->rb_poses_and_vels.at(rb_poses_idx).rb_id;
00465 
00466         // If this RB (identified by its id) was previously tracked, we already have a ShapeReconstruction object for it
00467         if(this->_rb_shapes.find(rb_id_temp) != this->_rb_shapes.end())
00468         {
00469             rb_shapes_updated[rb_id_temp] = this->_rb_shapes[rb_id_temp];
00470 
00471             if(!this->_sv_estimated && _estimate_sv)
00472             {
00473                 te2= ros::Time::now();
00474                 this->_EstimateSupervoxels();
00475                 te3= ros::Time::now();
00476             }
00477 
00478             rb_shapes_updated[rb_id_temp]->setFullRGBDPCandRBT(this->_current_pc_down,
00479                                                                poses_and_vels->rb_poses_and_vels.at(rb_poses_idx).pose_wc);
00480             rb_shapes_updated[rb_id_temp]->PublishMovedModelAndSegment(_current_measurement_time,
00481                                                                        poses_and_vels->rb_poses_and_vels.at(rb_poses_idx).pose_wc,
00482                                                                        this->_video_bag,
00483                                                                        this->_result_bag_open);
00484         }else{
00485             ROS_WARN_STREAM_NAMED("ShapeReconstructionNode.measurementCallback","There is no ShapeReconstructor associated to this RB id - it was detected before max_rb was reached");
00486         }
00487     }
00488     // Swap the maps (in this way, the entries of the map that were not updated because the RB was lost, get deleted)
00489     this->_rb_shapes = rb_shapes_updated;
00490 
00491     ros::Time tf = ros::Time::now();
00492 
00493     this->_CollectAndPublishShapeModels();
00494 
00495     // Write the results into bag
00496     if (_result_bag_open) {
00497         int rb_poses_idx=1;
00498         BOOST_FOREACH(shape_reconstructors_map_t::value_type rb_shape_it, this->_rb_shapes)
00499         {
00500             rb_shape_it.second->PublishMovedModelAndSegment(_current_measurement_time, poses_and_vels->rb_poses_and_vels.at(rb_poses_idx).pose_wc, _result_bag, _result_bag_open);
00501             rb_poses_idx++;
00502         }
00503     }
00504 
00505     // we have processed the measurement - if the flag
00506     if (this->_manually_advance_after_processing) {
00507         std::cout << " Hit enter to advance feature tracker" << std::endl;
00508         getchar();
00509     }
00510 
00511     _AdvanceFeatureTracker();
00512 
00513     ros::Time th = ros::Time::now();
00514 
00515 //    std::cout << "SV estimation: " << (te3-te2).toSec() << std::endl;
00516 //    std::cout << "Process RGBD frame and publish results (including SV estimation): " << (tf-te).toSec() << std::endl;
00517 //    std::cout << "Total: " << (th-ta).toSec() << std::endl;
00518 
00519     // Update previous time
00520     this->_previous_measurement_time = _current_measurement_time;
00521 }
00522 
00523 void ShapeReconstructionNode::ReadRosBag()
00524 {
00525 }
00526 
00527 void ShapeReconstructionNode::_CollectAndPublishShapeModels()
00528 {
00529     omip_msgs::ShapeModelsPtr shape_models(new omip_msgs::ShapeModels());
00530     shape_models->header.stamp = this->_current_measurement_time;
00531 
00532     static ros::Publisher* current_pc_repub = NULL;
00533     if (current_pc_repub == NULL) {
00534         current_pc_repub = new ros::Publisher;
00535     }
00536     *current_pc_repub = this->_node_handle.advertise<sensor_msgs::PointCloud2>("/camera/depth_registered/points", 1);
00537 
00538     // First we publish each shape moved with the latest motion
00539     BOOST_FOREACH(shape_reconstructors_map_t::value_type rb_shape_it, this->_rb_shapes)
00540     {
00541         rb_shape_it.second->getShapeModel(shape_models);
00542 
00543         if (_manually_advance_after_processing) {
00544             std::cout << "Republishing depth registered points (in order to sync ground truth) [to switch off set manually_advance_after_processing=false" << std::endl;
00545 
00546             SRPointCloud::Ptr current_pc = rb_shape_it.second->getCurrentPointCloud();
00547             sensor_msgs::PointCloud2 current_pc_msg;
00548             pcl::toROSMsg(*current_pc, current_pc_msg);
00549             current_pc_repub->publish(current_pc_msg);
00550         }
00551     }
00552 
00553     _shape_models_ext_d_and_c_pub.publish(*shape_models);
00554 
00555     // Then we query all the shapes in the current frame (transformed using the latest motion) and subtract them from the current point cloud to obtaint the static environment
00556     if(this->_detect_static_environment)
00557     {
00558 
00559         pcl::PointIndices::Ptr indices_of_moving_bodies_ptr = pcl::PointIndices::Ptr(new pcl::PointIndices());
00560         ROS_INFO_STREAM_NAMED("ShapeReconstructionNode._PublishShapes",
00561                               "Subtracting reconstructed shapes (obtained from the extension of both models to supervoxels) from current point cloud to estimate the static environment.");
00562 
00563         this->_set_knn_ptr.reset(new pcl::search::KdTree<omip::SRPoint>());
00564         this->_set_knn_ptr->setInputCloud(this->_current_pc_without_nans);
00565 
00566         indices_of_moving_bodies_ptr.reset(new pcl::PointIndices());
00567         BOOST_FOREACH(shape_reconstructors_map_t::value_type rb_shape_it, this->_rb_shapes)
00568         {
00569             std::vector<int> empty;
00570             std::vector<std::vector<int> > indices_of_moving_body;
00571             std::vector<std::vector<float> > distances_of_moving_body;
00572             SRPointCloud::Ptr rb_shape_ext_moved_ptr = rb_shape_it.second->getMovedRigidBodyShapeExtDandC();
00573             this->_set_knn_ptr->radiusSearch(*rb_shape_ext_moved_ptr, empty, 0.02, indices_of_moving_body, distances_of_moving_body, 0);
00574 
00575             for(int idx = 0; idx < indices_of_moving_body.size(); idx++)
00576             {
00577                 if(indices_of_moving_body.at(idx).size())
00578                 {
00579                     for(int i = 0; i<indices_of_moving_body.at(idx).size(); i++)
00580                     {
00581                         indices_of_moving_bodies_ptr->indices.push_back(indices_of_moving_body.at(idx).at(i));
00582                     }
00583                 }
00584             }
00585         }
00586         std::set<int> tmp_d_and_c(indices_of_moving_bodies_ptr->indices.begin(), indices_of_moving_bodies_ptr->indices.end());
00587         indices_of_moving_bodies_ptr->indices.clear();
00588         std::copy(tmp_d_and_c.begin(), tmp_d_and_c.end(), std::back_inserter(indices_of_moving_bodies_ptr->indices));
00589 
00590         pcl::ExtractIndices<SRPoint> extract_d_and_c;
00591         this->_static_env_ext_d_and_c_in_current_frame = omip::SRPointCloud::Ptr(new omip::SRPointCloud());
00592         // Extract the inliers
00593         extract_d_and_c.setInputCloud (this->_current_pc_without_nans);
00594         extract_d_and_c.setIndices (indices_of_moving_bodies_ptr);
00595         extract_d_and_c.setNegative (true);
00596         extract_d_and_c.filter (*this->_static_env_ext_d_and_c_in_current_frame);
00597 
00598         ROS_INFO_STREAM_NAMED("ShapeReconstructionNode._PublishShapes",
00599                               "The static environment once subtracted the reconstructed shapes (obtained from the extension of both models to supervoxels) contains "
00600                               << this->_static_env_ext_d_and_c_in_current_frame->points.size()
00601                               << " points.");
00602 
00603         sensor_msgs::PointCloud2 static_env_ext_d_and_c_in_current_frame_ros;
00604         pcl::toROSMsg(*this->_static_env_ext_d_and_c_in_current_frame, static_env_ext_d_and_c_in_current_frame_ros);
00605 
00606         this->_static_environment_ext_d_and_c_pub.publish(static_env_ext_d_and_c_in_current_frame_ros);
00607     }
00608 }
00609 
00610 typedef pcl::PointXYZRGB PointT;
00611 typedef pcl::PointCloud<PointT> PointCloudT;
00612 typedef pcl::PointXYZRGBA PointT2;
00613 typedef pcl::PointCloud<PointT2> PointCloudT2;
00614 typedef pcl::PointNormal PointNT;
00615 typedef pcl::PointCloud<PointNT> PointNCloudT;
00616 typedef pcl::PointXYZL PointLT;
00617 typedef pcl::PointCloud<PointLT> PointLCloudT;
00618 
00619 void ShapeReconstructionNode::_EstimateSupervoxels()
00620 {
00621     this->_sv_estimated = true;
00622 
00623     this->_supervoxelizer.reset(new pcl::SupervoxelClustering<SRPoint>(this->_voxel_resolution,this->_seed_resolution, true));
00624     this->_supervoxelizer->setColorImportance (this->_color_importance);
00625     this->_supervoxelizer->setSpatialImportance (this->_spatial_importance);
00626     this->_supervoxelizer->setNormalImportance (this->_normal_importance);
00627 
00628     this->_supervoxelizer->setInputCloud (this->_current_pc_down);
00629 
00630     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode._SegmentPointCloud()", "Estimating supervoxels");
00631     this->_supervoxelizer->extract (this->_supervoxel_clusters);
00632     ROS_INFO_STREAM_NAMED("ShapeReconstructionNode._SegmentPointCloud()", "Found " << this->_supervoxel_clusters.size () << " supervoxels.");
00633 
00634     if(this->_visualization_sv)
00635     {
00636         boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer (new pcl::visualization::PCLVisualizer ("3D Viewer"));
00637         viewer->setBackgroundColor (0, 0, 0);
00638 
00639 //        PointCloudT::Ptr voxel_centroid_cloud = this->_supervoxelizer->getVoxelCentroidCloud ();
00640 //        viewer->addPointCloud (voxel_centroid_cloud, "voxel centroids");
00641 //        viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE,2.0, "voxel centroids");
00642 //        viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_OPACITY,0.95, "voxel centroids");
00643 
00644         PointCloudT2::Ptr colored_voxel_cloud = this->_supervoxelizer->getColoredVoxelCloud ();
00645         viewer->addPointCloud (colored_voxel_cloud, "colored voxels");
00646         viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_OPACITY,0.8, "colored voxels");
00647 
00648         PointNCloudT::Ptr sv_normal_cloud = this->_supervoxelizer->makeSupervoxelNormalCloud (this->_supervoxel_clusters);
00649         //We have this disabled so graph is easy to see, uncomment to see supervoxel normals
00650         viewer->addPointCloudNormals<PointNT> (sv_normal_cloud,1,0.05f, "supervoxel_normals");
00651 
00652 
00653         while (!viewer->wasStopped ())
00654         {
00655             viewer->spinOnce (100);
00656         }
00657     }
00658 }
00659 
00660 bool ShapeReconstructionNode::generateMeshes(shape_reconstruction::generate_meshes::Request& request, shape_reconstruction::generate_meshes::Response& response)
00661 {
00662     // First we generate a triangular mesh for each shape and save them in stl files (done by each ShapeReconstruction object independently)
00663     BOOST_FOREACH(shape_reconstructors_map_t::value_type rb_shape_it, this->_rb_shapes)
00664     {
00665         rb_shape_it.second->generateMesh();
00666     }
00667 
00668     // Then we generate a mesh of the static environment
00669     if(this->_detect_static_environment)
00670     {
00671         ROS_INFO_STREAM_NAMED("ShapeReconstructionNode.generateMeshes",
00672                               "Called the generation of 3D triangular mesh from the model of the static environment (based on extension of both models to supervoxels).");
00673         std::string shape_ext_d_and_c_file_prefix("static_env_ext_d_and_c");
00674         this->_GenerateMesh(this->_static_env_ext_d_and_c_in_current_frame, shape_ext_d_and_c_file_prefix);
00675     }
00676     ROS_INFO_STREAM_NAMED("ShapeReconstruction.generateMesh", "Finished the generation of 3D triangular mesh from the point cloud of the static environment.");
00677 
00678     ROS_INFO_STREAM_NAMED("ShapeReconstruction.generateMesh", "Finished the generation of 3D triangular meshes");
00679 
00680     return true;
00681 }
00682 
00683 void ShapeReconstructionNode::_GenerateMesh(const omip::SRPointCloud::Ptr& pc_source, std::string shape_file_prefix)
00684 {
00685     if(pc_source->points.size())
00686     {
00687         // Normal estimation*
00688         this->_se_tree_for_normal_estimation_ptr->setInputCloud(pc_source);
00689         this->_se_normal_estimator_ptr->setInputCloud(pc_source);
00690         this->_se_normal_estimator_ptr->setSearchMethod (this->_se_tree_for_normal_estimation_ptr);
00691         this->_se_normal_estimator_ptr->compute(*this->_se_estimated_normals_pc_ptr);
00692 
00693         // Concatenate the XYZ and normal fields*
00694         pcl::concatenateFields (*pc_source, *this->_se_estimated_normals_pc_ptr, *this->_se_position_color_and_normals_pc_ptr);
00695         this->_se_tree_for_triangulation_ptr->setInputCloud (this->_se_position_color_and_normals_pc_ptr);
00696 
00697         // Get result
00698         this->_se_greedy_projection_triangulator_ptr->setInputCloud(this->_se_position_color_and_normals_pc_ptr);
00699         this->_se_greedy_projection_triangulator_ptr->setSearchMethod(this->_se_tree_for_triangulation_ptr);
00700         this->_se_greedy_projection_triangulator_ptr->reconstruct(*this->_se_triangulated_mesh_ptr);
00701 
00702         // Additional vertex information
00703         //std::vector<int> parts = gp3.getPartIDs();
00704         //std::vector<int> states = gp3.getPointStates();
00705 
00706         std::string mesh_path = ros::package::getPath("shape_reconstruction") + std::string("/meshes/");
00707         std::stringstream se_name_ss;
00708         se_name_ss << shape_file_prefix << ".stl";
00709 
00710         std::string se_mesh_full_file_name = mesh_path + se_name_ss.str();
00711 
00712         pcl::io::mesh2vtk(*this->_se_triangulated_mesh_ptr, this->_se_polygon_data_ptr);
00713 
00714         this->_se_polygon_writer_ptr->SetInput (this->_se_polygon_data_ptr);
00715         this->_se_polygon_writer_ptr->SetFileName (se_mesh_full_file_name.c_str ());
00716         this->_se_polygon_writer_ptr->Write ();
00717 
00718         ROS_INFO_STREAM_NAMED("ShapeReconstruction.generateMesh", "Resulting triangular mesh written to " << se_mesh_full_file_name);
00719     }else{
00720         ROS_WARN_STREAM_NAMED("ShapeReconstruction.generateMesh", "Impossible to generate a triangular mesh for this model, it doesn't contain any points!");
00721     }
00722 }
00723 
00724 void ShapeReconstructionNode::_InitResultRosbag() {
00725     if (!_record_result_bag)
00726         return;
00727 
00728     if (_result_bag_open)
00729         return;
00730 
00731     using namespace boost::filesystem;
00732 
00733     // prepare path
00734     path result_bag_out(_result_bag_path);
00735     if (!boost::filesystem::exists(result_bag_out)) {
00736         if (!boost::filesystem::create_directories(result_bag_out)) {
00737             ROS_ERROR_NAMED("ShapeReconstruction.InitResultRosbag", "Output directory for result bag does not exist and cannot be created!");
00738             return;
00739         }
00740     }
00741 
00742     // get input rosbag name
00743     std::string input_bag_name;
00744     if (!_node_handle.getParam("/feature_tracker/bag_file", input_bag_name)){
00745         ROS_ERROR_NAMED("ShapeReconstruction.InitResultRosbag", "Cannot infer name of input bag file from feature tracker");
00746         return;
00747     }
00748     path input_bag_path(input_bag_name);
00749 
00750     // construct output rosbag name
00751     std::stringstream result_bag_name;
00752     time_t t = time(0);   // get time now
00753     struct tm * now = localtime( & t );
00754 
00755     result_bag_name << input_bag_path.stem().string() << "_";
00756     result_bag_name << (now->tm_mon + 1) << "-" << now->tm_mday << "_" << now->tm_hour << "-" << now->tm_min << "-" << now->tm_sec;
00757 
00758     result_bag_out /= path(result_bag_name.str() + ".bag");
00759 
00760     ROS_INFO_STREAM_NAMED("ShapeReconstruction.InitResultRosbag", "Opening result rosbag " << result_bag_out.string());
00761     this->_result_bag.open(result_bag_out.string(), rosbag::bagmode::Write);
00762 
00763     this->_result_bag_open=true;
00764 
00765     // construct full video rosbag
00766     // construct output rosbag name
00767     std::stringstream video_bag_name;
00768 
00769     video_bag_name << input_bag_path.stem().string() << "_";
00770     video_bag_name << (now->tm_mon + 1) << "-" << now->tm_mday << "_" << now->tm_hour << "-" << now->tm_min << "-" << now->tm_sec;
00771     video_bag_name << "_VIDEO.bag";
00772 
00773     path video_bag_out(_result_bag_path);
00774     video_bag_out /= path(video_bag_name.str());
00775 
00776     this->_video_bag.open(video_bag_out.string(), rosbag::bagmode::Write);
00777 
00778     std::string cfg_src_path = ros::package::getPath("shape_reconstruction") + std::string("/cfg/shape_reconstruction_cfg.yaml");
00779     // copy shape reconstruction cfg
00780     path cfg_dest_path = _result_bag_path;
00781     cfg_dest_path /= result_bag_name.str() + ".yaml";
00782 
00783     std::ifstream ifs(cfg_src_path.c_str());
00784     std::ofstream ofs(cfg_dest_path.string().c_str());
00785     ofs << ifs.rdbuf();
00786 
00787 }
00788 
00789 void ShapeReconstructionNode::_AdvanceFeatureTracker() {
00790 //    ros::Duration d(0.09);
00791 //    d.sleep(); // sleep otherwise ROS might lose this message
00792 
00793     ROS_DEBUG_STREAM_NAMED("ShapeReconstruction._AdvanceFeatureTracker", "Advancing Feature Tracker");
00794     this->_advance_feature_tracker_pub.publish(std_msgs::Bool());
00795 }
00796 
00797 void ShapeReconstructionNode::TrackerQuitCallback(const std_msgs::EmptyConstPtr &empty)
00798 {
00799     ROS_INFO_STREAM_NAMED("ShapeReconstruction.TrackerQuitCallback", "Shape Reconstruction quit stopping!");
00800     _active = false;
00801 }
00802 
00803 // Main program
00804 int main(int argc, char** argv)
00805 {
00806     ros::init(argc, argv, "ShapeReconstructionNode");
00807     ShapeReconstructionNode sr_node;
00808 
00809     ros::Rate r(10); // 10 hz
00810     while (ros::ok() && sr_node.isActive())
00811     {
00812         ros::spinOnce();
00813         r.sleep();
00814     }
00815 
00816     ROS_ERROR("Shutting down ShapeReconstructionNode" );
00817 
00818     return (0);
00819 }


shape_reconstruction
Author(s): Roberto Martín-Martín
autogenerated on Sat Jun 8 2019 18:35:22