00001 #include <cstdio>
00002 #include <fstream>
00003 #include <ros/ros.h>
00004 #include <pcl/point_cloud.h>
00005 #include <pcl/io/pcd_io.h>
00006 #include <pcl/features/feature.h>
00007 #include <pcl_conversions/pcl_conversions.h>
00008
00009 #include <sensor_msgs/PointCloud2.h>
00010
00011 #include <Eigen/Eigen>
00012 #include <fstream>
00013
00014 #include <image_transport/image_transport.h>
00015
00016 #include <image_transport/subscriber_filter.h>
00017 #include <message_filters/subscriber.h>
00018 #include <message_filters/time_synchronizer.h>
00019 #include <tf/message_filter.h>
00020 #include <tf/transform_broadcaster.h>
00021 #include <tf_conversions/tf_eigen.h>
00022
00023 #include <ndt_map/depth_camera.h>
00024
00025 #include <ndt_feature_reg/ndt_frame.h>
00026 #include <ndt_feature_reg/ndt_frame_proc.h>
00027 #include <ndt_feature_reg/ndt_frame_viewer.h>
00028
00029 #include <opencv/cv.h>
00030 #include <opencv/highgui.h>
00031 #include <boost/shared_ptr.hpp>
00032
00033 #include <cv_bridge/cv_bridge.h>
00034 #include <sensor_msgs/Image.h>
00035 #include <sensor_msgs/CameraInfo.h>
00036 #include <sensor_msgs/image_encodings.h>
00037
00038 #include <message_filters/time_synchronizer.h>
00039 #include <message_filters/sync_policies/approximate_time.h>
00040
00041 using namespace lslgeneric;
00042 using namespace ndt_feature_reg;
00043
00044 cv::Mat generateLookupTable(const pcl::PointCloud<pcl::PointXYZ> &pointcloud)
00045 {
00046 cv::Mat lookup_table(cv::Size(pointcloud.width, pointcloud.height), CV_64FC3);
00047 if (pointcloud.is_dense)
00048 {
00049
00050 }
00051 else
00052 {
00053 assert(false);
00054 }
00055 return lookup_table;
00056 }
00057
00058 void subsamplePointCloud(const pcl::PointCloud<pcl::PointXYZ> &orig, pcl::PointCloud<pcl::PointXYZ> &sub, int step)
00059 {
00060 uint32_t x = 0;
00061 while (x < orig.width)
00062 {
00063 uint32_t y = 0;
00064 while (y*step < orig.height)
00065 {
00066 sub.push_back(orig.at(x*step, y*step));
00067 y += step;
00068 }
00069 x += step;
00070 }
00071 }
00072
00073
00074
00075 class NDTFeatureRegNode
00076 {
00077 protected:
00078 ros::NodeHandle nh_;
00079
00080 message_filters::Subscriber<sensor_msgs::Image> rgb_sub_;
00081 message_filters::Subscriber<sensor_msgs::Image> depth_sub_;
00082 ros::Subscriber depth_param_sub_;
00083 message_filters::Subscriber<sensor_msgs::Image> intensity_sub_;
00084 message_filters::Subscriber<sensor_msgs::PointCloud2> points2_sub_;
00085
00086 message_filters::Synchronizer<message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image> > sync_;
00087 message_filters::Synchronizer<message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::PointCloud2> > sync2_;
00088
00089
00090 bool visualize_;
00091
00092 tf::TransformBroadcaster tf_;
00093 tf::TransformListener tf_listener_;
00094
00095 ros::Publisher pub_points2_;
00096
00097 boost::mutex m_;
00098 lslgeneric::DepthCamera<pcl::PointXYZ> cameraparams_;
00099 bool camerasetup_;
00100 bool skip_matching_;
00101 bool estimate_di_;
00102 bool match_full_;
00103 bool match_no_association_;
00104 int support_size_;
00105 double max_var_;
00106 double current_res_;
00107 int max_nb_frames_;
00108
00109 int camera_nb_;
00110 bool set_initial_pose_;
00111 bool publish_cloud_;
00112 int subsample_step_;
00113 std::string world_str;
00114 std::string gt_frame_;
00115
00116 ros::Time prev_timestamp_;
00117 NDTFrameProc* proc;
00118 Eigen::Transform<double,3,Eigen::Affine,Eigen::ColMajor> global_transform_;
00119
00120
00121
00122
00123
00124
00125 void setupCamera(const sensor_msgs::CameraInfoConstPtr & camera_info)
00126 {
00127 m_.lock();
00128 if(!camerasetup_) {
00129
00130 std::cout<<"seting up camera params\n";
00131 double fx, fy, cx, cy, ds=1;
00132 std::vector<double> dist(5);
00133 fx = camera_info->K[0];
00134 fy = camera_info->K[4];
00135 cx = camera_info->K[2];
00136 cy = camera_info->K[5];
00137 if(camera_info->D.size() == 5) {
00138 dist = camera_info->D;
00139 } else {
00140 dist = std::vector<double>(5,0);
00141 }
00142 std::cout<<"Params: "<<fx<<" "<<fy<<" "<<cx<<" "<<cy<<" "<<dist.size()<<std::endl;
00143 cameraparams_ = lslgeneric::DepthCamera<pcl::PointXYZ>(fx,fy,cx,cy,dist,ds,true);
00144
00145 camerasetup_ = true;
00146 }
00147 m_.unlock();
00148
00149 }
00150
00151 public:
00152
00153 NDTFeatureRegNode(ros::NodeHandle param_nh) : sync_(2), sync2_(2)
00154 {
00155 bool xyzi_data;
00156 param_nh.param("xyzi_data", xyzi_data, false);
00157
00158 if (!xyzi_data)
00159 {
00160 rgb_sub_.subscribe(nh_, "rgb", 2);
00161 depth_sub_.subscribe(nh_, "depth", 2);
00162 depth_param_sub_ = nh_.subscribe("depth_param", 1, &NDTFeatureRegNode::setupCamera, this);
00163
00164 sync_.connectInput(rgb_sub_, depth_sub_);
00165 sync_.registerCallback( boost::bind(&NDTFeatureRegNode::rgbDepthCb, this, _1, _2) );
00166 }
00167 else
00168 {
00169 intensity_sub_.subscribe(nh_, "intensity", 1);
00170 points2_sub_.subscribe(nh_, "points2", 2);
00171 sync2_.connectInput(intensity_sub_, points2_sub_);
00172 sync2_.registerCallback( boost::bind(&NDTFeatureRegNode::intensityPoints2Cb, this, _1, _2) );
00173 }
00174
00175 pub_points2_ = nh_.advertise<sensor_msgs::PointCloud2>("reg_points2", 15);
00176
00177 param_nh.param("visualize", visualize_, false);
00178
00179 param_nh.param("skip_matching", skip_matching_, false);
00180 param_nh.param("estimate_di", estimate_di_, false);
00181 param_nh.param("match_full", match_full_, false);
00182 param_nh.param("match_no_association", match_no_association_, false);
00183 param_nh.param("support_size", support_size_, 10);
00184 param_nh.param("max_var", max_var_, 0.3);
00185 param_nh.param("current_res", current_res_, 0.2);
00186 param_nh.param("max_nb_frames", max_nb_frames_, 10);
00187
00188 param_nh.param("camera_nb", camera_nb_, 1);
00189
00190 double max_inldist_xy, max_inldist_z;
00191 int nb_ransac;
00192
00193 param_nh.param("max_inldist_xy", max_inldist_xy, 0.02);
00194 param_nh.param("max_inldist_z", max_inldist_z, 0.05);
00195 param_nh.param("nb_ransac", nb_ransac, 1000);
00196
00197 proc = new NDTFrameProc(nb_ransac, max_inldist_xy, max_inldist_z);
00198
00199 double detector_thresh;
00200 param_nh.param("detector_thresh", detector_thresh, 400.);
00201
00202 proc->detector = cv::FeatureDetector::create("SURF");
00203
00204 param_nh.param("img_scale", proc->img_scale, 0.25);
00205 param_nh.param("trim_factor", proc->trim_factor, 1.);
00206 param_nh.param("non_mean", proc->non_mean, false);
00207 param_nh.param("windowed_matching2", proc->pe.windowed, false);
00208 param_nh.param("max_kp_dist", proc->pe.maxDist, 10.);
00209 param_nh.param("min_kp_dist", proc->pe.minDist, 0.);
00210 param_nh.param("set_initial_pose", set_initial_pose_, false);
00211 std::string default_world = "world";
00212 param_nh.param("world_frame", world_str, default_world);
00213 param_nh.param("gt_frame", gt_frame_, std::string("/camera1_rgb_optical_frame"));
00214
00215
00216 param_nh.param("publish_cloud", publish_cloud_, false);
00217 param_nh.param("subsample_step", subsample_step_, 5);
00218
00219 camerasetup_ = false;
00220 global_transform_.setIdentity();
00221 }
00222
00223 ~NDTFeatureRegNode()
00224 {
00225 delete proc;
00226 }
00227
00228 void process(const cv::Mat &rgb_img, const cv::Mat &depth_img, const ros::Time ¤t_timestamp)
00229 {
00230 NDTFrame *frame = new NDTFrame();
00231
00232
00233 rgb_img.copyTo(frame->img);
00234 depth_img.copyTo(frame->depth_img);
00235 frame->supportSize = support_size_;
00236 frame->maxVar = max_var_;
00237 frame->current_res = current_res_;
00238 frame->cameraParams = cameraparams_;
00239 proc->addFrameIncremental(frame, skip_matching_, estimate_di_,match_full_,match_no_association_);
00240
00241 if (visualize_)
00242 viewKeypointMatches(proc, 10);
00243
00244
00245 proc->trimNbFrames(max_nb_frames_);
00246
00247 global_transform_ = global_transform_ * proc->transformVector.back();
00248 tf::Transform transform;
00249 tf::Transform global_transform;
00250
00251 tf::transformEigenToTF(proc->transformVector.back(), transform);
00252
00253 tf::transformEigenToTF(global_transform_, global_transform);
00254
00255
00256
00257
00258 tf_.sendTransform(tf::StampedTransform(global_transform, current_timestamp, world_str, "global_ndt_feature_reg"));
00259
00260 if (publish_cloud_)
00261 {
00262 pcl::PointCloud<pcl::PointXYZ> depthcloud, sub_depthcloud;
00263 cameraparams_.convertDepthImageToPointCloud(frame->depth_img,depthcloud);
00264
00265 subsamplePointCloud(depthcloud, sub_depthcloud, subsample_step_);
00266
00267 sensor_msgs::PointCloud2 pcloud;
00268 pcl::toROSMsg(sub_depthcloud,pcloud);
00269 pcloud.header.stamp = ros::Time::now();
00270 pcloud.header.frame_id = "global_ndt_feature_reg";
00271 pub_points2_.publish(pcloud);
00272 }
00273
00274 try
00275 {
00276
00277
00278
00279
00280
00281
00282
00283
00284
00285
00286
00287
00288
00289
00290 if (set_initial_pose_)
00291 {
00292 tf::StampedTransform glb_gt_transform;
00293 tf_listener_.lookupTransform(world_str, gt_frame_, current_timestamp, glb_gt_transform);
00294
00295 tf::transformTFToEigen(glb_gt_transform, global_transform_);
00296 set_initial_pose_ = false;
00297 }
00298
00299 prev_timestamp_ = current_timestamp;
00300 }
00301 catch ( tf::TransformException & ex )
00302 {
00303 ROS_ERROR( "%s",
00304 ex.what() );
00305 }
00306
00307 }
00308
00309 void intensityPoints2Cb(const sensor_msgs::ImageConstPtr& msg_img, const sensor_msgs::PointCloud2::ConstPtr& msg_pts)
00310 {
00311 m_.lock();
00312
00313 ROS_INFO("Got intensity and points2 pair...");
00314 cv_bridge::CvImagePtr cv_ptr;
00315 try
00316 {
00317 cv_ptr = cv_bridge::toCvCopy(msg_img, "mono8");
00318 }
00319 catch (cv_bridge::Exception& e)
00320 {
00321 ROS_ERROR("cv_bridge exception: %s", e.what());
00322 return;
00323 }
00324
00325 cv::Mat intensity = (cv_ptr->image);
00326 pcl::PointCloud<pcl::PointXYZ> pc;
00327 pcl::fromROSMsg (*msg_pts, pc);
00328
00329
00330
00331 if (!camerasetup_)
00332 {
00333 cameraparams_.setLookupTable(generateLookupTable(pc));
00334 camerasetup_ = true;
00335 }
00336 m_.unlock();
00337 }
00338
00339 void rgbDepthCb(const sensor_msgs::ImageConstPtr& msg_rgb, const sensor_msgs::ImageConstPtr& msg_depth)
00340 {
00341 m_.lock();
00342
00343 cv_bridge::CvImageConstPtr cv_ptr_rgb;
00344 cv_bridge::CvImageConstPtr cv_ptr_depth;
00345 try
00346 {
00347
00348
00349 cv_ptr_rgb = cv_bridge::toCvShare(msg_rgb);
00350 cv_ptr_depth = cv_bridge::toCvShare(msg_depth);
00351 }
00352 catch (cv_bridge::Exception& e)
00353 {
00354 ROS_ERROR("cv_bridge exception: %s", e.what());
00355 return;
00356 }
00357
00358
00359
00360
00361 if (camerasetup_)
00362 {
00363
00364 process(cv_ptr_rgb->image, cv_ptr_depth->image, msg_rgb->header.stamp);
00365 }
00366 m_.unlock();
00367 }
00368
00369 EIGEN_MAKE_ALIGNED_OPERATOR_NEW
00370
00371 };
00372
00373 int main(int argc, char **argv)
00374 {
00375 ros::init(argc, argv, "ndt_feature_reg_node");
00376
00377 ros::NodeHandle param_nh ("~");
00378
00379 NDTFeatureRegNode n(param_nh);
00380 ros::spin();
00381
00382 return 0;
00383 }