00001 
00002 
00003 
00004 
00005 
00006 
00007 
00008 
00009 
00010 
00011 
00012 
00013 
00014 
00015 
00016 
00017 
00018 
00019 
00020 
00021 
00022 
00023 
00024 
00025 
00026 
00027 
00028 
00029 
00030 
00031 
00032 
00033 
00034 
00035 #include <ros/ros.h>
00036 
00037 #include <std_msgs/Header.h>
00038 #include <sensor_msgs/Image.h>
00039 #include <sensor_msgs/PointCloud2.h>
00040 #include <sensor_msgs/CameraInfo.h>
00041 
00042 #include <message_filters/subscriber.h>
00043 #include <message_filters/synchronizer.h>
00044 #include <message_filters/sync_policies/approximate_time.h>
00045 #include <cv_bridge/CvBridge.h>
00046 
00047 
00048 #include <tf/transform_listener.h>
00049 
00050 
00051 #include <pcl16/point_cloud.h>
00052 #include <pcl16/point_types.h>
00053 #include <pcl16/common/common.h>
00054 #include <pcl16/common/eigen.h>
00055 #include <pcl16/common/centroid.h>
00056 #include <pcl16/io/io.h>
00057 #include <pcl16/io/pcd_io.h>
00058 #include <pcl16_ros/transforms.h>
00059 #include <pcl16/ros/conversions.h>
00060 #include <pcl16/ModelCoefficients.h>
00061 #include <pcl16/sample_consensus/method_types.h>
00062 #include <pcl16/sample_consensus/model_types.h>
00063 #include <pcl16/segmentation/sac_segmentation.h>
00064 #include <pcl16/filters/extract_indices.h>
00065 
00066 
00067 #include <opencv2/core/core.hpp>
00068 #include <opencv2/imgproc/imgproc.hpp>
00069 #include <opencv2/highgui/highgui.hpp>
00070 
00071 
00072 #include <boost/shared_ptr.hpp>
00073 
00074 #include <tabletop_pushing/point_cloud_segmentation.h>
00075 
00076 
00077 #include <vector>
00078 #include <set>
00079 #include <string>
00080 #include <sstream>
00081 #include <iostream>
00082 #include <utility>
00083 #include <float.h>
00084 #include <math.h>
00085 #include <time.h> 
00086 #include <cstdlib> 
00087 
00088 using boost::shared_ptr;
00089 typedef pcl16::PointCloud<pcl16::PointXYZ> XYZPointCloud;
00090 typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image,
00091                                                         sensor_msgs::Image,
00092                                                         sensor_msgs::PointCloud2> MySyncPolicy;
00093 using tabletop_pushing::PointCloudSegmentation;
00094 using tabletop_pushing::ProtoObject;
00095 using tabletop_pushing::ProtoObjects;
00096 
00097 class DataCollectNode
00098 {
00099  public:
00100   DataCollectNode(ros::NodeHandle &n) :
00101       n_(n), n_private_("~"),
00102       image_sub_(n, "color_image_topic", 1),
00103       depth_sub_(n, "depth_image_topic", 1),
00104       cloud_sub_(n, "point_cloud_topic", 1),
00105       sync_(MySyncPolicy(15), image_sub_, depth_sub_, cloud_sub_),
00106       camera_initialized_(false), save_count_(0), cluster_(false)
00107   {
00108     tf_ = shared_ptr<tf::TransformListener>(new tf::TransformListener());
00109     pcl_segmenter_ = shared_ptr<PointCloudSegmentation>(new PointCloudSegmentation(tf_));
00110 
00111     
00112     n_private_.param("display_wait_ms", display_wait_ms_, 3);
00113     n_private_.param("save_all", save_all_, false);
00114 
00115 
00116     n_private_.param("min_workspace_x", pcl_segmenter_->min_workspace_x_, 0.0);
00117     n_private_.param("min_workspace_z", pcl_segmenter_->min_workspace_z_, 0.0);
00118     n_private_.param("max_workspace_x", pcl_segmenter_->max_workspace_x_, 0.0);
00119     n_private_.param("max_workspace_z", pcl_segmenter_->max_workspace_z_, 0.0);
00120     n_private_.param("min_table_z", pcl_segmenter_->min_table_z_, -0.5);
00121     n_private_.param("max_table_z", pcl_segmenter_->max_table_z_, 1.5);
00122 
00123     n_private_.param("mps_min_inliers", pcl_segmenter_->mps_min_inliers_, 10000);
00124     n_private_.param("mps_min_angle_thresh", pcl_segmenter_->mps_min_angle_thresh_, 2.0);
00125     n_private_.param("mps_min_dist_thresh", pcl_segmenter_->mps_min_dist_thresh_, 0.02);
00126     n_private_.param("table_ransac_thresh", pcl_segmenter_->table_ransac_thresh_,
00127                      0.01);
00128     n_private_.param("table_ransac_angle_thresh",
00129                      pcl_segmenter_->table_ransac_angle_thresh_, 30.0);
00130     n_private_.param("pcl_cluster_tolerance", pcl_segmenter_->cluster_tolerance_,
00131                      0.25);
00132     n_private_.param("pcl_difference_thresh", pcl_segmenter_->cloud_diff_thresh_,
00133                      0.01);
00134     n_private_.param("pcl_min_cluster_size", pcl_segmenter_->min_cluster_size_,
00135                      100);
00136     n_private_.param("pcl_max_cluster_size", pcl_segmenter_->max_cluster_size_,
00137                      2500);
00138     n_private_.param("pcl_voxel_downsample_res", pcl_segmenter_->voxel_down_res_,
00139                      0.005);
00140     n_private_.param("pcl_cloud_intersect_thresh",
00141                      pcl_segmenter_->cloud_intersect_thresh_, 0.005);
00142     n_private_.param("pcl_concave_hull_alpha", pcl_segmenter_->hull_alpha_,
00143                      0.1);
00144     n_private_.param("use_pcl_voxel_downsample",
00145                      pcl_segmenter_->use_voxel_down_, true);
00146 
00147     std::string output_path_def = "~";
00148     n_private_.param("img_output_path", base_output_path_, output_path_def);
00149 
00150     std::string cam_info_topic_def = "/kinect_head/rgb/camera_info";
00151     n_private_.param("cam_info_topic", cam_info_topic_,
00152                      cam_info_topic_def);
00153     std::string default_workspace_frame = "/torso_lift_link";
00154     n_private_.param("workspace_frame", workspace_frame_,
00155                      default_workspace_frame);
00156     n_private_.param("max_depth", max_depth_, 4.0);
00157 
00158     
00159     sync_.registerCallback(&DataCollectNode::sensorCallback,
00160                            this);
00161   }
00162 
00163   void sensorCallback(const sensor_msgs::ImageConstPtr& img_msg,
00164                       const sensor_msgs::ImageConstPtr& depth_msg,
00165                       const sensor_msgs::PointCloud2ConstPtr& cloud_msg)
00166   {
00167     if (!camera_initialized_)
00168     {
00169       ROS_INFO_STREAM("Initializing camera.");
00170       cam_info_ = *ros::topic::waitForMessage<sensor_msgs::CameraInfo>(
00171           cam_info_topic_, n_, ros::Duration(5.0));
00172       camera_initialized_ = true;
00173       pcl_segmenter_->cam_info_ = cam_info_;
00174     }
00175     
00176     cv::Mat color_frame(bridge_.imgMsgToCv(img_msg));
00177     cv::Mat depth_frame(bridge_.imgMsgToCv(depth_msg));
00178 
00179     
00180     for (int r = 0; r < depth_frame.rows; ++r)
00181     {
00182       float* depth_row = depth_frame.ptr<float>(r);
00183       for (int c = 0; c < depth_frame.cols; ++c)
00184       {
00185         float cur_d = depth_row[c];
00186         if (isnan(cur_d))
00187         {
00188           depth_row[c] = 0.0;
00189         }
00190       }
00191     }
00192 
00193     
00194     cv::Mat depth_save_img(depth_frame.size(), CV_16UC1);
00195     depth_frame.convertTo(depth_save_img, CV_16UC1, 65535/max_depth_);
00196     cv::imshow("color", color_frame);
00197     cv::imshow("depth", depth_save_img);
00198 
00199     
00200     XYZPointCloud cloud;
00201     pcl16::fromROSMsg(*cloud_msg, cloud);
00202     tf_->waitForTransform(workspace_frame_, cloud.header.frame_id,
00203                           cloud.header.stamp, ros::Duration(0.5));
00204 
00205     
00206     tf::StampedTransform transform;
00207     tf_->lookupTransform(workspace_frame_, cloud.header.frame_id, ros::Time(0), transform);
00208     pcl16_ros::transformPointCloud(workspace_frame_, cloud, cloud, *tf_);
00209 
00210     XYZPointCloud object_cloud;
00211     cv::Mat object_img;
00212     ProtoObjects objs;
00213     if (cluster_)
00214     {
00215       
00216       cur_camera_header_ = img_msg->header;
00217       pcl_segmenter_->cur_camera_header_ = cur_camera_header_;
00218       pcl_segmenter_->findTabletopObjects(cloud, objs, object_cloud, false);
00219       object_img = pcl_segmenter_->projectProtoObjectsIntoImage(
00220           objs, color_frame.size(), cur_camera_header_.frame_id);
00221       pcl_segmenter_->displayObjectImage(object_img, "Objects", true);
00222     }
00223 
00224     char c = cv::waitKey(display_wait_ms_);
00225     if (c == 'c')
00226     {
00227       cluster_ = !cluster_;
00228     }
00229 
00230     if (c == 's' || save_all_)
00231     {
00232       ROS_INFO_STREAM("Writting image number " << save_count_);
00233       std::stringstream color_out;
00234       std::stringstream depth_out;
00235       std::stringstream cloud_out;
00236       std::stringstream object_img_out;
00237       std::stringstream object_cloud_out;
00238       std::stringstream transform_out;
00239       color_out << base_output_path_ << "/color" << save_count_ << ".png";
00240       depth_out << base_output_path_ << "/depth" << save_count_ << ".png";
00241       transform_out << base_output_path_ << "/transform" << save_count_ << ".txt";
00242       cloud_out << base_output_path_ << "/cloud" << save_count_ << ".pcd";
00243       object_cloud_out << base_output_path_ << "/object_cloud" << save_count_ << ".pcd";
00244       object_img_out << base_output_path_ << "/object_img" << save_count_ << ".png";
00245 
00246       cv::imwrite(color_out.str(), color_frame);
00247       cv::imwrite(depth_out.str(), depth_save_img);
00248 
00249       
00250       pcl16::io::savePCDFile(cloud_out.str(), *cloud_msg);
00251 
00252       
00253       if (cluster_ && object_cloud.size() > 0)
00254       {
00255         cv::imwrite(object_img_out.str(), object_img);
00256         pcl16::io::savePCDFile(object_cloud_out.str(), object_cloud);
00257         for (unsigned int i = 0; i < objs.size(); ++i)
00258         {
00259           std::stringstream cluster_cloud_out;
00260           cluster_cloud_out << base_output_path_ << "/object_" << i << "_cloud" <<
00261               save_count_ << ".pcd";
00262           pcl16::io::savePCDFile(cluster_cloud_out.str(), object_cloud);
00263         }
00264       }
00265 
00266       
00267       tf::Vector3 trans = transform.getOrigin();
00268       tf::Quaternion rot = transform.getRotation();
00269       std::ofstream transform_file(transform_out.str().c_str());
00270       transform_file << "[" << rot.getX() << ", " << rot.getY() << ", " << rot.getZ() << ", "
00271                      << rot.getW() << "]\n";
00272       transform_file << "[" << trans.getX() << ", " << trans.getY() << ", " << trans.getZ()
00273                      << "]\n";
00274       ROS_INFO_STREAM("Wrote image number " << save_count_);
00275       save_count_++;
00276     }
00277   }
00278 
00282   void spin()
00283   {
00284     while(n_.ok())
00285     {
00286       ros::spinOnce();
00287     }
00288   }
00289 
00290  protected:
00291   ros::NodeHandle n_;
00292   ros::NodeHandle n_private_;
00293   message_filters::Subscriber<sensor_msgs::Image> image_sub_;
00294   message_filters::Subscriber<sensor_msgs::Image> depth_sub_;
00295   message_filters::Subscriber<sensor_msgs::PointCloud2> cloud_sub_;
00296   message_filters::Synchronizer<MySyncPolicy> sync_;
00297   sensor_msgs::CameraInfo cam_info_;
00298   sensor_msgs::CvBridge bridge_;
00299   shared_ptr<tf::TransformListener> tf_;
00300   shared_ptr<PointCloudSegmentation> pcl_segmenter_;
00301   int display_wait_ms_;
00302   bool save_all_;
00303   std::string base_output_path_;
00304   std::string cam_info_topic_;
00305   std::string workspace_frame_;
00306   bool camera_initialized_;
00307   int save_count_;
00308   double max_depth_;
00309   std_msgs::Header cur_camera_header_;
00310   std_msgs::Header prev_camera_header_;
00311   bool cluster_;
00312 };
00313 
00314 int main(int argc, char ** argv)
00315 {
00316   int seed = time(NULL);
00317   srand(seed);
00318   ros::init(argc, argv, "data_node");
00319   ros::NodeHandle n;
00320   DataCollectNode data_node(n);
00321   data_node.spin();
00322   return 0;
00323 }
00324