tabletop_pushing_perception_node.cpp
Go to the documentation of this file.
00001 /*********************************************************************
00002  * Software License Agreement (BSD License)
00003  *
00004  *  Copyright (c) 2012, Georgia Institute of Technology
00005  *  All rights reserved.
00006  *
00007  *  Redistribution and use in source and binary forms, with or without
00008  *  modification, are permitted provided that the following conditions
00009  *  are met:
00010  *
00011  *   * Redistributions of source code must retain the above copyright
00012  *     notice, this list of conditions and the following disclaimer.
00013  *   * Redistributions in binary form must reproduce the above
00014  *     copyright notice, this list of conditions and the following
00015  *     disclaimer in the documentation and/or other materials provided
00016  *     with the distribution.
00017  *   * Neither the name of the Georgia Institute of Technology nor the names of
00018  *     its contributors may be used to endorse or promote products derived
00019  *     from this software without specific prior written permission.
00020  *
00021  *  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
00022  *  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
00023  *  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
00024  *  FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
00025  *  COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
00026  *  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
00027  *  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00028  *  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
00029  *  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00030  *  LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
00031  *  ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00032  *  POSSIBILITY OF SUCH DAMAGE.
00033  *********************************************************************/
00034 // ROS
00035 #include <ros/ros.h>
00036 #include <ros/package.h>
00037 
00038 #include <std_msgs/Header.h>
00039 #include <geometry_msgs/PointStamped.h>
00040 #include <geometry_msgs/Pose2D.h>
00041 #include <geometry_msgs/PoseStamped.h>
00042 #include <sensor_msgs/Image.h>
00043 #include <sensor_msgs/PointCloud2.h>
00044 #include <sensor_msgs/CameraInfo.h>
00045 
00046 #include <message_filters/subscriber.h>
00047 #include <message_filters/synchronizer.h>
00048 #include <message_filters/sync_policies/approximate_time.h>
00049 #include <cv_bridge/cv_bridge.h>
00050 #include <sensor_msgs/image_encodings.h>
00051 #include <actionlib/server/simple_action_server.h>
00052 
00053 #include <pr2_manipulation_controllers/JTTaskControllerState.h>
00054 #include <pr2_manipulation_controllers/JinvTeleopControllerState.h>
00055 
00056 // TF
00057 #include <tf/transform_listener.h>
00058 
00059 // PCL
00060 #include <pcl16/point_cloud.h>
00061 #include <pcl16/point_types.h>
00062 #include <pcl16/common/common.h>
00063 #include <pcl16/common/eigen.h>
00064 #include <pcl16/common/centroid.h>
00065 #include <pcl16/io/io.h>
00066 #include <pcl16/io/pcd_io.h>
00067 #include <pcl16_ros/transforms.h>
00068 #include <pcl16/ros/conversions.h>
00069 #include <pcl16/ModelCoefficients.h>
00070 #include <pcl16/registration/transformation_estimation_svd.h>
00071 #include <pcl16/sample_consensus/method_types.h>
00072 #include <pcl16/sample_consensus/model_types.h>
00073 #include <pcl16/segmentation/sac_segmentation.h>
00074 #include <pcl16/filters/extract_indices.h>
00075 
00076 // OpenCV
00077 #include <opencv2/core/core.hpp>
00078 #include <opencv2/imgproc/imgproc.hpp>
00079 #include <opencv2/highgui/highgui.hpp>
00080 
00081 // Boost
00082 #include <boost/shared_ptr.hpp>
00083 
00084 // cpl_visual_features
00085 #include <cpl_visual_features/helpers.h>
00086 #include <cpl_visual_features/comp_geometry.h>
00087 #include <cpl_visual_features/features/shape_context.h>
00088 
00089 // tabletop_pushing
00090 #include <tabletop_pushing/LearnPush.h>
00091 #include <tabletop_pushing/LocateTable.h>
00092 #include <tabletop_pushing/VisFeedbackPushTrackingAction.h>
00093 #include <tabletop_pushing/point_cloud_segmentation.h>
00094 #include <tabletop_pushing/shape_features.h>
00095 #include <tabletop_pushing/object_tracker_25d.h>
00096 #include <tabletop_pushing/push_primitives.h>
00097 #include <tabletop_pushing/arm_obj_segmentation.h>
00098 #include <tabletop_pushing/extern/Timer.hpp>
00099 
00100 // libSVM
00101 #include <libsvm/svm.h>
00102 
00103 // STL
00104 #include <vector>
00105 #include <queue>
00106 #include <set>
00107 #include <string>
00108 #include <sstream>
00109 #include <iostream>
00110 #include <utility>
00111 #include <float.h>
00112 #include <math.h>
00113 #include <cmath>
00114 #include <time.h> // for srand(time(NULL))
00115 #include <cstdlib> // for MAX_RAND
00116 
00117 // Debugging IFDEFS
00118 // #define DISPLAY_INPUT_COLOR 1
00119 // #define DISPLAY_INPUT_DEPTH 1
00120 #define DISPLAY_WAIT 1
00121 // #define PROFILE_CB_TIME 1
00122 // #define DEBUG_POSE_ESTIMATION 1
00123 
00124 using boost::shared_ptr;
00125 
00126 using tabletop_pushing::LearnPush;
00127 using tabletop_pushing::LocateTable;
00128 using tabletop_pushing::PushVector;
00129 using tabletop_pushing::PointCloudSegmentation;
00130 using tabletop_pushing::ProtoObject;
00131 using tabletop_pushing::ProtoObjects;
00132 using tabletop_pushing::ShapeLocation;
00133 using tabletop_pushing::ShapeLocations;
00134 using tabletop_pushing::ObjectTracker25D;
00135 using tabletop_pushing::ArmObjSegmentation;
00136 
00137 using geometry_msgs::PoseStamped;
00138 using geometry_msgs::PointStamped;
00139 using geometry_msgs::Pose2D;
00140 using geometry_msgs::Twist;
00141 typedef pcl16::PointCloud<pcl16::PointXYZ> XYZPointCloud;
00142 typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image,
00143                                                         sensor_msgs::Image,
00144                                                         sensor_msgs::Image,
00145                                                         sensor_msgs::PointCloud2> MySyncPolicy;
00146 using cpl_visual_features::upSample;
00147 using cpl_visual_features::downSample;
00148 using cpl_visual_features::subPIAngle;
00149 using cpl_visual_features::lineSegmentIntersection2D;
00150 using cpl_visual_features::ShapeDescriptors;
00151 using cpl_visual_features::ShapeDescriptor;
00152 
00153 typedef tabletop_pushing::VisFeedbackPushTrackingFeedback PushTrackerState;
00154 typedef tabletop_pushing::VisFeedbackPushTrackingGoal PushTrackerGoal;
00155 typedef tabletop_pushing::VisFeedbackPushTrackingResult PushTrackerResult;
00156 typedef tabletop_pushing::VisFeedbackPushTrackingAction PushTrackerAction;
00157 
00158 #define FOOTPRINT_XY_RES 0.001
00159 
00160 inline int objLocToIdx(double val, double min_val, double max_val)
00161 {
00162   return round((val-min_val)/FOOTPRINT_XY_RES);
00163 }
00164 
00165 struct ScoredIdx
00166 {
00167   double score;
00168   int idx;
00169 };
00170 
00171 class ScoredIdxComparison
00172 {
00173  public:
00174   ScoredIdxComparison(const bool& descend=false) : descend_(descend) {}
00175   bool operator() (const ScoredIdx& lhs, const ScoredIdx&rhs) const
00176   {
00177     if (descend_)
00178     {
00179       return (lhs.score < rhs.score);
00180     }
00181     else
00182     {
00183       return (lhs.score > rhs.score);
00184     }
00185   }
00186  protected:
00187   bool descend_;
00188 };
00189 
00190 class TabletopPushingPerceptionNode
00191 {
00192  public:
00193   TabletopPushingPerceptionNode(ros::NodeHandle &n) :
00194       n_(n), n_private_("~"),
00195       image_sub_(n, "color_image_topic", 1),
00196       depth_sub_(n, "depth_image_topic", 1),
00197       mask_sub_(n, "mask_image_topic", 1),
00198       cloud_sub_(n, "point_cloud_topic", 1),
00199       sync_(MySyncPolicy(15), image_sub_, depth_sub_, mask_sub_, cloud_sub_),
00200       as_(n, "push_tracker", false),
00201       have_depth_data_(false),
00202       camera_initialized_(false), recording_input_(false), record_count_(0),
00203       learn_callback_count_(0), goal_out_count_(0), goal_heading_count_(0),
00204       frame_callback_count_(0), frame_set_count_(0),
00205       just_spun_(false), major_axis_spin_pos_scale_(0.75), object_not_moving_thresh_(0),
00206       object_not_moving_count_(0), object_not_moving_count_limit_(10),
00207       gripper_not_moving_thresh_(0), gripper_not_moving_count_(0),
00208       gripper_not_moving_count_limit_(10), current_file_id_(""), force_swap_(false),
00209       num_position_failures_(0), footprint_count_(0)
00210   {
00211     tf_ = shared_ptr<tf::TransformListener>(new tf::TransformListener());
00212     pcl_segmenter_ = shared_ptr<PointCloudSegmentation>(
00213         new PointCloudSegmentation(tf_));
00214     // Get parameters from the server
00215     n_private_.param("display_wait_ms", display_wait_ms_, 3);
00216     n_private_.param("use_displays", use_displays_, false);
00217     n_private_.param("write_input_to_disk", write_input_to_disk_, false);
00218     n_private_.param("write_to_disk", write_to_disk_, false);
00219 
00220     n_private_.param("min_workspace_x", pcl_segmenter_->min_workspace_x_, 0.0);
00221     n_private_.param("min_workspace_z", pcl_segmenter_->min_workspace_z_, 0.0);
00222     n_private_.param("max_workspace_x", pcl_segmenter_->max_workspace_x_, 0.0);
00223     n_private_.param("max_workspace_z", pcl_segmenter_->max_workspace_z_, 0.0);
00224     n_private_.param("min_table_z", pcl_segmenter_->min_table_z_, -0.5);
00225     n_private_.param("max_table_z", pcl_segmenter_->max_table_z_, 1.5);
00226 
00227     // TODO: Tie these parameters with those in the tabletop_executive... (need to align goal and workspace names)
00228     n_private_.param("min_goal_x", min_goal_x_, 0.425);
00229     n_private_.param("max_goal_x", max_goal_x_, 0.8);
00230     n_private_.param("min_goal_y", min_goal_y_, -0.3);
00231     n_private_.param("max_goal_y", max_goal_y_, 0.3);
00232 
00233     n_private_.param("mps_min_inliers", pcl_segmenter_->mps_min_inliers_, 10000);
00234     n_private_.param("mps_min_angle_thresh", pcl_segmenter_->mps_min_angle_thresh_, 2.0);
00235     n_private_.param("mps_min_dist_thresh", pcl_segmenter_->mps_min_dist_thresh_, 0.02);
00236 
00237     std::string default_workspace_frame = "torso_lift_link";
00238     n_private_.param("workspace_frame", workspace_frame_,
00239                      default_workspace_frame);
00240     std::string default_camera_frame = "head_mount_kinect_rgb_optical_frame";
00241     n_private_.param("camera_frame", camera_frame_, default_camera_frame);
00242 
00243     std::string output_path_def = "~";
00244     n_private_.param("img_output_path", base_output_path_, output_path_def);
00245 
00246     n_private_.param("start_tracking_on_push_call", start_tracking_on_push_call_, false);
00247 
00248     n_private_.param("num_downsamples", num_downsamples_, 2);
00249     pcl_segmenter_->num_downsamples_ = num_downsamples_;
00250 
00251     std::string cam_info_topic_def = "/kinect_head/rgb/camera_info";
00252     n_private_.param("cam_info_topic", cam_info_topic_, cam_info_topic_def);
00253 
00254     // PCL Segmentation parameters
00255     n_private_.param("table_ransac_thresh", pcl_segmenter_->table_ransac_thresh_, 0.01);
00256     n_private_.param("table_ransac_angle_thresh", pcl_segmenter_->table_ransac_angle_thresh_, 30.0);
00257     n_private_.param("cylinder_ransac_thresh", pcl_segmenter_->cylinder_ransac_thresh_, 0.03);
00258     n_private_.param("cylinder_ransac_angle_thresh", pcl_segmenter_->cylinder_ransac_angle_thresh_, 1.5);
00259     n_private_.param("optimize_cylinder_coefficients", pcl_segmenter_->optimize_cylinder_coefficients_, false);
00260     n_private_.param("sphere_ransac_thresh", pcl_segmenter_->sphere_ransac_thresh_, 0.01);
00261     n_private_.param("pcl_cluster_tolerance", pcl_segmenter_->cluster_tolerance_, 0.25);
00262     n_private_.param("pcl_difference_thresh", pcl_segmenter_->cloud_diff_thresh_, 0.01);
00263     n_private_.param("pcl_min_cluster_size", pcl_segmenter_->min_cluster_size_, 100);
00264     n_private_.param("pcl_max_cluster_size", pcl_segmenter_->max_cluster_size_, 2500);
00265     n_private_.param("pcl_voxel_downsample_res", pcl_segmenter_->voxel_down_res_, 0.005);
00266     n_private_.param("pcl_cloud_intersect_thresh", pcl_segmenter_->cloud_intersect_thresh_, 0.005);
00267     n_private_.param("pcl_table_hull_alpha", pcl_segmenter_->hull_alpha_, 0.1);
00268     n_private_.param("use_pcl_voxel_downsample", pcl_segmenter_->use_voxel_down_, true);
00269     n_private_.param("icp_max_iters", pcl_segmenter_->icp_max_iters_, 100);
00270     n_private_.param("icp_transform_eps", pcl_segmenter_->icp_transform_eps_, 0.0);
00271     n_private_.param("icp_max_cor_dist", pcl_segmenter_->icp_max_cor_dist_, 1.0);
00272     n_private_.param("icp_ransac_thresh", pcl_segmenter_->icp_ransac_thresh_, 0.015);
00273 
00274     n_private_.param("push_tracker_dist_thresh", tracker_dist_thresh_, 0.05);
00275     n_private_.param("push_tracker_angle_thresh", tracker_angle_thresh_, 0.01);
00276     n_private_.param("major_axis_spin_pos_scale", major_axis_spin_pos_scale_, 0.75);
00277     bool use_cv_ellipse;
00278     n_private_.param("use_cv_ellipse", use_cv_ellipse, false);
00279     n_private_.param("use_mps_segmentation", use_mps_segmentation_, false);
00280 
00281     n_private_.param("max_object_gripper_dist", max_object_gripper_dist_, 0.10);
00282     n_private_.param("gripper_not_moving_thresh", gripper_not_moving_thresh_, 0.005);
00283     n_private_.param("object_not_moving_thresh", object_not_moving_thresh_, 0.005);
00284     n_private_.param("gripper_not_moving_count_limit", gripper_not_moving_count_limit_, 100);
00285     n_private_.param("object_not_moving_count_limit", object_not_moving_count_limit_, 100);
00286     n_private_.param("object_not_detected_count_limit", object_not_detected_count_limit_, 5);
00287     n_private_.param("object_too_far_count_limit", object_too_far_count_limit_, 5);
00288     n_private_.param("object_not_between_count_limit", object_not_between_count_limit_, 5);
00289     n_private_.param("object_not_between_epsilon", object_not_between_epsilon_, 0.01);
00290     n_private_.param("start_loc_push_time_limit", start_loc_push_time_, 5.0);
00291     n_private_.param("start_loc_push_dist", start_loc_push_dist_, 0.30);
00292     n_private_.param("use_center_pointing_shape_context", use_center_pointing_shape_context_, true);
00293     n_private_.param("self_mask_dilate_size", mask_dilate_size_, 5);
00294     n_private_.param("point_cloud_hist_res", point_cloud_hist_res_, 0.005);
00295     n_private_.param("boundary_hull_alpha", hull_alpha_, 0.01);
00296     n_private_.param("hull_gripper_spread", gripper_spread_, 0.05);
00297 
00298     n_.param("start_loc_use_fixed_goal", start_loc_use_fixed_goal_, false);
00299     n_.param("use_graphcut_arm_seg", use_graphcut_arm_seg_, false);
00300 
00301 
00302     std::string arm_color_model_name;
00303     n_private_.param("arm_color_model_name", arm_color_model_name, std::string(""));
00304 
00305 #ifdef DEBUG_POSE_ESTIMATION
00306     pose_est_stream_.open("/u/thermans/data/new/pose_ests.txt");
00307 #endif // DEBUG_POSE_ESTIMATION
00308 
00309     // Initialize classes requiring parameters
00310     arm_obj_segmenter_ = shared_ptr<ArmObjSegmentation>(new ArmObjSegmentation());
00311     if (arm_color_model_name.length() > 0)
00312     {
00313       std::stringstream arm_color_model_path;
00314       arm_color_model_path << ros::package::getPath("tabletop_pushing") << "/cfg/" << arm_color_model_name;
00315       arm_obj_segmenter_->loadArmColorModel(arm_color_model_path.str());
00316     }
00317     obj_tracker_ = shared_ptr<ObjectTracker25D>(
00318         new ObjectTracker25D(pcl_segmenter_, arm_obj_segmenter_, num_downsamples_, use_displays_,
00319                              write_to_disk_, base_output_path_, camera_frame_, use_cv_ellipse,
00320                              use_mps_segmentation_, use_graphcut_arm_seg_, hull_alpha_));
00321 
00322     // Setup ros node connections
00323     sync_.registerCallback(&TabletopPushingPerceptionNode::sensorCallback,
00324                            this);
00325     push_pose_server_ = n_.advertiseService(
00326         "get_learning_push_vector",
00327         &TabletopPushingPerceptionNode::learnPushCallback, this);
00328     table_location_server_ = n_.advertiseService(
00329         "get_table_location", &TabletopPushingPerceptionNode::getTableLocation,
00330         this);
00331 
00332     // Setup arm controller state callbacks
00333     jtteleop_l_arm_subscriber_ = n_.subscribe("/l_cart_transpose_push/state", 1,
00334                                               &TabletopPushingPerceptionNode::lArmStateCartCB,
00335                                               this);
00336     jtteleop_r_arm_subscriber_ = n_.subscribe("/r_cart_transpose_push/state", 1,
00337                                               &TabletopPushingPerceptionNode::rArmStateCartCB,
00338                                               this);
00339     jinv_l_arm_subscriber_  = n_.subscribe("/l_cart_jinv_push/state", 1,
00340                                            &TabletopPushingPerceptionNode::lArmStateVelCB,
00341                                            this);
00342     jinv_r_arm_subscriber_ = n_.subscribe("/r_cart_jinv_push/state", 1,
00343                                           &TabletopPushingPerceptionNode::rArmStateVelCB,
00344                                           this);
00345     // Setup push tracking action server
00346     as_.registerGoalCallback(
00347         boost::bind(&TabletopPushingPerceptionNode::pushTrackerGoalCB, this));
00348     as_.registerPreemptCallback(
00349         boost::bind(&TabletopPushingPerceptionNode::pushTrackerPreemptCB,this));
00350     as_.start();
00351   }
00352 
00353   void sensorCallback(const sensor_msgs::ImageConstPtr& img_msg,
00354                       const sensor_msgs::ImageConstPtr& depth_msg,
00355                       const sensor_msgs::ImageConstPtr& mask_msg,
00356                       const sensor_msgs::PointCloud2ConstPtr& cloud_msg)
00357   {
00358 #ifdef PROFILE_CB_TIME
00359     long long cb_start_time = Timer::nanoTime();
00360 #endif
00361 
00362     if (!camera_initialized_)
00363     {
00364       cam_info_ = *ros::topic::waitForMessage<sensor_msgs::CameraInfo>(
00365           cam_info_topic_, n_, ros::Duration(5.0));
00366       camera_initialized_ = true;
00367       pcl_segmenter_->cam_info_ = cam_info_;
00368       ROS_DEBUG_STREAM("Cam info: " << cam_info_);
00369     }
00370     // Convert images to OpenCV format
00371     cv::Mat color_frame;
00372     cv::Mat depth_frame;
00373     cv::Mat self_mask;
00374     cv_bridge::CvImagePtr color_cv_ptr = cv_bridge::toCvCopy(img_msg);
00375     cv_bridge::CvImagePtr depth_cv_ptr = cv_bridge::toCvCopy(depth_msg);
00376     cv_bridge::CvImagePtr mask_cv_ptr = cv_bridge::toCvCopy(mask_msg);
00377     color_frame = color_cv_ptr->image;
00378     depth_frame = depth_cv_ptr->image;
00379     self_mask = mask_cv_ptr->image;
00380 
00381     // cv::cvtColor(color_frame, color_frame, CV_RGB2BGR);
00382 
00383 #ifdef PROFILE_CB_TIME
00384     long long grow_mask_start_time = Timer::nanoTime();
00385 #endif
00386     // Grow arm mask if requested
00387     if (mask_dilate_size_ > 0)
00388     {
00389       cv::Mat morph_element(mask_dilate_size_, mask_dilate_size_, CV_8UC1, cv::Scalar(255));
00390       cv::erode(self_mask, self_mask, morph_element);
00391     }
00392 #ifdef PROFILE_CB_TIME
00393     double grow_mask_elapsed_time = (((double)(Timer::nanoTime() - grow_mask_start_time)) /
00394                                      Timer::NANOSECONDS_PER_SECOND);
00395     long long transform_start_time = Timer::nanoTime();
00396 #endif
00397     // Transform point cloud into the correct frame and convert to PCL struct
00398     XYZPointCloud cloud;
00399     pcl16::fromROSMsg(*cloud_msg, cloud);
00400     tf_->waitForTransform(workspace_frame_, cloud.header.frame_id, cloud.header.stamp, ros::Duration(0.5));
00401     pcl16_ros::transformPointCloud(workspace_frame_, cloud, cloud, *tf_);
00402 #ifdef PROFILE_CB_TIME
00403     double transform_elapsed_time = (((double)(Timer::nanoTime() - transform_start_time)) /
00404                                    Timer::NANOSECONDS_PER_SECOND);
00405     long long filter_start_time = Timer::nanoTime();
00406 #endif
00407 
00408     // TODO: Add a switch
00409     // TODO: Smooth / fill in depth map instead of zeroing (mode filter)
00410     // Convert nans to zeros
00411     for (int r = 0; r < depth_frame.rows; ++r)
00412     {
00413       float* depth_row = depth_frame.ptr<float>(r);
00414       for (int c = 0; c < depth_frame.cols; ++c)
00415       {
00416         float cur_d = depth_row[c];
00417         if (isnan(cur_d))
00418         {
00419           depth_row[c] = 0.0;
00420         }
00421       }
00422     }
00423 
00424     XYZPointCloud cloud_self_filtered(cloud);
00425     pcl16::PointXYZ nan_point;
00426     nan_point.x = numeric_limits<float>::quiet_NaN();
00427     nan_point.y = numeric_limits<float>::quiet_NaN();
00428     nan_point.z = numeric_limits<float>::quiet_NaN();
00429     for (unsigned int x = 0; x < cloud.width; ++x)
00430     {
00431       for (unsigned int y = 0; y < cloud.height; ++y)
00432       {
00433         if (self_mask.at<uchar>(y,x) == 0)
00434         {
00435           cloud_self_filtered.at(x,y) = nan_point;
00436         }
00437       }
00438     }
00439 #ifdef PROFILE_CB_TIME
00440     double filter_elapsed_time = (((double)(Timer::nanoTime() - filter_start_time)) /
00441                                    Timer::NANOSECONDS_PER_SECOND);
00442     long long downsample_start_time = Timer::nanoTime();
00443 #endif
00444 
00445     // Downsample everything first
00446     cv::Mat color_frame_down = downSample(color_frame, num_downsamples_);
00447     cv::Mat depth_frame_down = downSample(depth_frame, num_downsamples_);
00448     cv::Mat self_mask_down = downSample(self_mask, num_downsamples_);
00449     cv::Mat arm_mask_crop;
00450     color_frame_down.copyTo(arm_mask_crop, self_mask_down);
00451 
00452 #ifdef PROFILE_CB_TIME
00453     double downsample_elapsed_time = (((double)(Timer::nanoTime() - downsample_start_time)) /
00454                                    Timer::NANOSECONDS_PER_SECOND);
00455     long long copy_start_time = Timer::nanoTime();
00456 #endif
00457 
00458     // Save internally for use in the service callback
00459     // prev_color_frame_ = cur_color_frame_.clone();
00460     // prev_depth_frame_ = cur_depth_frame_.clone();
00461     // prev_self_mask_ = cur_self_mask_.clone();
00462     // prev_camera_header_ = cur_camera_header_;
00463 
00464     // Update the current versions
00465     cur_color_frame_ = color_frame_down.clone();
00466     cur_depth_frame_ = depth_frame_down.clone();
00467     cur_self_mask_ = self_mask_down.clone();
00468     cur_point_cloud_ = cloud;
00469     cur_self_filtered_cloud_ = cloud_self_filtered;
00470     have_depth_data_ = true;
00471     cur_camera_header_ = img_msg->header;
00472     pcl_segmenter_->cur_camera_header_ = cur_camera_header_;
00473 
00474 #ifdef PROFILE_CB_TIME
00475     double copy_elapsed_time = (((double)(Timer::nanoTime() - copy_start_time)) /
00476                               Timer::NANOSECONDS_PER_SECOND);
00477     double update_tracks_elapsed_time = 0.0;
00478     double copy_tracks_elapsed_time = 0.0;
00479     double display_tracks_elapsed_time = 0.0;
00480     double publish_feedback_elapsed_time = 0.0;
00481     double evaluate_goal_elapsed_time = 0.0;
00482     long long tracker_start_time = Timer::nanoTime();
00483 #endif
00484 
00485     if (obj_tracker_->isInitialized() && !obj_tracker_->isPaused())
00486     {
00487       PoseStamped arm_pose;
00488       if (pushing_arm_ == "l")
00489       {
00490         arm_pose = l_arm_pose_;
00491       }
00492       else
00493       {
00494         arm_pose = r_arm_pose_;
00495       }
00496 #ifdef PROFILE_CB_TIME
00497       long long update_tracks_start_time = Timer::nanoTime();
00498 #endif
00499 
00500       PushTrackerState tracker_state;
00501       if (use_graphcut_arm_seg_)
00502       {
00503         obj_tracker_->updateTracks(cur_color_frame_, cur_depth_frame_, cur_self_mask_, cur_point_cloud_,
00504                                    proxy_name_, tracker_state);
00505       }
00506       else
00507       {
00508         obj_tracker_->updateTracks(cur_color_frame_, cur_depth_frame_, cur_self_mask_, cur_self_filtered_cloud_,
00509                                    proxy_name_, tracker_state);
00510       }
00511 
00512 #ifdef DEBUG_POSE_ESTIMATION
00513       pose_est_stream_ << tracker_state.x.x << " " << tracker_state.x.y << " " << tracker_state.z << " "
00514                        << tracker_state.x.theta << "\n";
00515 #endif // DEBUG_POSE_ESTIMATION
00516 
00517 #ifdef PROFILE_CB_TIME
00518       update_tracks_elapsed_time = (((double)(Timer::nanoTime() - update_tracks_start_time)) /
00519                                     Timer::NANOSECONDS_PER_SECOND);
00520       long long copy_tracks_start_time = Timer::nanoTime();
00521 #endif
00522       ProtoObject cur_obj = obj_tracker_->getMostRecentObject();
00523       // TODO: Move this into the tracker or somewhere better
00524       // XYZPointCloud hull_cloud = tabletop_pushing::getObjectBoundarySamples(cur_obj, hull_alpha_);
00525 
00526       // // Visualize hull_cloud;
00527       // // NOTE: Get this point with tf for offline use
00528       // geometry_msgs::PointStamped hand_pt_ros;
00529       // geometry_msgs::PointStamped base_point;
00530       // base_point.point.x = 0.0;
00531       // base_point.point.y = 0.0;
00532       // base_point.point.z = 0.0;
00533       // // base_point.header.frame_id = (pushing_arm_ == "l") ? "l_gripper_tool_frame" : "r_gripper_tool_frame";
00534       // base_point.header.frame_id = "l_gripper_tool_frame";
00535       // // base_point.header.stamp = ros::Time::now();
00536       // tf_->transformPoint(workspace_frame_, base_point, hand_pt_ros);
00537       // pcl16::PointXYZ hand_pt;
00538       // hand_pt.x = hand_pt_ros.point.x;
00539       // hand_pt.y = hand_pt_ros.point.y;
00540       // hand_pt.z = hand_pt_ros.point.z;
00541       // geometry_msgs::PointStamped forward_pt_ros;
00542       // geometry_msgs::PointStamped base_point_forward;
00543       // base_point_forward.point.x = 0.01;
00544       // base_point_forward.point.y = 0.0;
00545       // base_point_forward.point.z = 0.0;
00546       // // base_point_forward.header.frame_id = (pushing_arm_ == "l") ? "l_gripper_tool_frame" : "r_gripper_tool_frame";
00547       // base_point_forward.header.frame_id = "l_gripper_tool_frame";
00548       // tf_->transformPoint(workspace_frame_, base_point_forward, forward_pt_ros);
00549       // pcl16::PointXYZ forward_pt;
00550       // forward_pt.x = forward_pt_ros.point.x;
00551       // forward_pt.y = forward_pt_ros.point.y;
00552       // forward_pt.z = forward_pt_ros.point.z;
00553       // cv::Mat hull_cloud_viz = tabletop_pushing::visualizeObjectContactLocation(hull_cloud, tracker_state,
00554       //                                                                           hand_pt, forward_pt);
00555       // cv::imshow("obj footprint", hull_cloud_viz);
00556       // std::stringstream input_out_name;
00557       // input_out_name << base_output_path_ << "input" << footprint_count_ << ".png";
00558       // cv::imwrite(input_out_name.str(), cur_color_frame_);
00559       // std::stringstream footprint_out_name;
00560       // footprint_out_name << base_output_path_ << "footprint" << footprint_count_++ << ".png";
00561       // cv::imwrite(footprint_out_name.str(), hull_cloud_viz);
00562 
00563       tracker_state.proxy_name = proxy_name_;
00564       tracker_state.controller_name = controller_name_;
00565       tracker_state.behavior_primitive = behavior_primitive_;
00566 
00567       PointStamped start_point;
00568       PointStamped end_point;
00569       start_point.header.frame_id = workspace_frame_;
00570       end_point.header.frame_id = workspace_frame_;
00571       start_point.point.x = tracker_state.x.x;
00572       start_point.point.y = tracker_state.x.y;
00573       start_point.point.z = tracker_state.z;
00574       end_point.point.x = tracker_goal_pose_.x;
00575       end_point.point.y = tracker_goal_pose_.y;
00576       end_point.point.z = start_point.point.z;
00577 #ifdef PROFILE_CB_TIME
00578       copy_tracks_elapsed_time = (((double)(Timer::nanoTime() - copy_tracks_start_time)) /
00579                                         Timer::NANOSECONDS_PER_SECOND);
00580       long long display_tracks_start_time = Timer::nanoTime();
00581 #endif
00582 
00583       displayPushVector(cur_color_frame_, start_point, end_point);
00584       // displayRobotGripperPoses(cur_color_frame_);
00585       displayGoalHeading(cur_color_frame_, start_point, tracker_state.x.theta,
00586                          tracker_goal_pose_.theta);
00587 #ifdef PROFILE_CB_TIME
00588       display_tracks_elapsed_time = (((double)(Timer::nanoTime() - display_tracks_start_time)) /
00589                                   Timer::NANOSECONDS_PER_SECOND);
00590       long long evaluate_goals_start_time = Timer::nanoTime();
00591 #endif
00592 
00593       // make sure that the action hasn't been canceled
00594       if (as_.isActive())
00595       {
00596 #ifdef PROFILE_CB_TIME
00597         long long publish_feedback_start_time = Timer::nanoTime();
00598 #endif
00599         as_.publishFeedback(tracker_state);
00600 #ifdef PROFILE_CB_TIME
00601         publish_feedback_elapsed_time = (((double)(Timer::nanoTime() - publish_feedback_start_time)) /
00602                                       Timer::NANOSECONDS_PER_SECOND);
00603         long long evaluate_goal_start_time = Timer::nanoTime();
00604 #endif
00605         evaluateGoalAndAbortConditions(tracker_state);
00606 #ifdef PROFILE_CB_TIME
00607         evaluate_goal_elapsed_time = (((double)(Timer::nanoTime() - evaluate_goal_start_time)) /
00608                                       Timer::NANOSECONDS_PER_SECOND);
00609 #endif
00610       }
00611     }
00612     else if (obj_tracker_->isInitialized() && obj_tracker_->isPaused())
00613     {
00614       obj_tracker_->pausedUpdate(cur_color_frame_);
00615       PointStamped start_point;
00616       PointStamped end_point;
00617       PushTrackerState tracker_state = obj_tracker_->getMostRecentState();
00618       start_point.header.frame_id = workspace_frame_;
00619       end_point.header.frame_id = workspace_frame_;
00620       start_point.point.x = tracker_state.x.x;
00621       start_point.point.y = tracker_state.x.y;
00622       start_point.point.z = tracker_state.z;
00623       end_point.point.x = tracker_goal_pose_.x;
00624       end_point.point.y = tracker_goal_pose_.y;
00625       end_point.point.z = start_point.point.z;
00626       displayPushVector(cur_color_frame_, start_point, end_point, "goal_vector", true);
00627       // displayRobotGripperPoses(cur_color_frame_);
00628       displayGoalHeading(cur_color_frame_, start_point, tracker_state.x.theta,
00629                          tracker_goal_pose_.theta, true);
00630     }
00631 #ifdef PROFILE_CB_TIME
00632     double tracker_elapsed_time = (((double)(Timer::nanoTime() - tracker_start_time)) /
00633                                  Timer::NANOSECONDS_PER_SECOND);
00634 #endif
00635 
00636 
00637     // Display junk
00638 #ifdef DISPLAY_INPUT_COLOR
00639     if (use_displays_)
00640     {
00641       cv::imshow("color", cur_color_frame_);
00642       // cv::imshow("self_mask", cur_self_mask_);
00643       cv::imshow("arm_mask_crop", arm_mask_crop);
00644     }
00645     // Way too much disk writing!
00646     if (write_input_to_disk_ && recording_input_)
00647     {
00648       std::stringstream out_name;
00649       if (current_file_id_.size() > 0)
00650       {
00651         std::stringstream cloud_out_name;
00652         out_name << base_output_path_ << current_file_id_ << "_input_" << record_count_ << ".png";
00653         // cloud_out_name << base_output_path_ << current_file_id_ << "_object_" << record_count_ << ".pcd";
00654         // ProtoObject cur_obj = obj_tracker_->getMostRecentObject();
00655         // pcl16::io::savePCDFile(cloud_out_name.str(), cur_obj.cloud);
00656       }
00657       else
00658       {
00659         out_name << base_output_path_ << "input" << record_count_ << ".png";
00660       }
00661       cv::imwrite(out_name.str(), cur_color_frame_);
00662       // std::stringstream self_out_name;
00663       // self_out_name << base_output_path_ << "self" << record_count_ << ".png";
00664       // cv::imwrite(self_out_name.str(), cur_self_mask_);
00665       record_count_++;
00666     }
00667 #endif // DISPLAY_INPUT_COLOR
00668 #ifdef DISPLAY_INPUT_DEPTH
00669     if (use_displays_)
00670     {
00671       double depth_max = 1.0;
00672       cv::minMaxLoc(cur_depth_frame_, NULL, &depth_max);
00673       cv::Mat depth_display = cur_depth_frame_.clone();
00674       depth_display /= depth_max;
00675       cv::imshow("input_depth", depth_display);
00676     }
00677 #endif // DISPLAY_INPUT_DEPTH
00678 #ifdef DISPLAY_WAIT
00679     if (use_displays_)
00680     {
00681       cv::waitKey(display_wait_ms_);
00682     }
00683 #endif // DISPLAY_WAIT
00684     ++frame_callback_count_;
00685 #ifdef PROFILE_CB_TIME
00686     double cb_elapsed_time = (((double)(Timer::nanoTime() - tracker_start_time)) /
00687                             Timer::NANOSECONDS_PER_SECOND);
00688     if (obj_tracker_->isInitialized() && !obj_tracker_->isPaused())
00689     {
00690       ROS_INFO_STREAM("cb_elapsed_time " << cb_elapsed_time);
00691       ROS_INFO_STREAM("\t grow_mask_elapsed_time " << grow_mask_elapsed_time);
00692       ROS_INFO_STREAM("\t transform_elapsed_time " << transform_elapsed_time);
00693       ROS_INFO_STREAM("\t filter_elapsed_time " << filter_elapsed_time);
00694       ROS_INFO_STREAM("\t downsample_elapsed_time " << downsample_elapsed_time);
00695       ROS_INFO_STREAM("\t copy_elapsed_time " << copy_elapsed_time);
00696       ROS_INFO_STREAM("\t tracker_elapsed_time " << tracker_elapsed_time);
00697       ROS_INFO_STREAM("\t\t update_tracks_elapsed_time " << update_tracks_elapsed_time);
00698       ROS_INFO_STREAM("\t\t copy_tracks_elapsed_time " << copy_tracks_elapsed_time);
00699       ROS_INFO_STREAM("\t\t display_tracks_elapsed_time " << display_tracks_elapsed_time);
00700       ROS_INFO_STREAM("\t\t publish_feedback_elapsed_time " << publish_feedback_elapsed_time);
00701       ROS_INFO_STREAM("\t\t evaluate_goal_elapsed_time " << evaluate_goal_elapsed_time);
00702     }
00703 #endif
00704   }
00705 
00706   void evaluateGoalAndAbortConditions(PushTrackerState& tracker_state)
00707   {
00708     // Check for goal conditions
00709     float x_error = tracker_goal_pose_.x - tracker_state.x.x;
00710     float y_error = tracker_goal_pose_.y - tracker_state.x.y;
00711     float theta_error = subPIAngle(tracker_goal_pose_.theta - tracker_state.x.theta);
00712 
00713     float x_dist = fabs(x_error);
00714     float y_dist = fabs(y_error);
00715     float theta_dist = fabs(theta_error);
00716 
00717     if (timing_push_)
00718     {
00719       if (pushingTimeUp())
00720       {
00721         abortPushingGoal("Pushing time up");
00722       }
00723       return;
00724     }
00725 
00726     if (controller_name_ == "rotate_to_heading")
00727     {
00728       if (theta_dist < tracker_angle_thresh_)
00729       {
00730         ROS_INFO_STREAM("Cur state: (" << tracker_state.x.x << ", " <<
00731                         tracker_state.x.y << ", " << tracker_state.x.theta << ")");
00732         ROS_INFO_STREAM("Desired goal: (" << tracker_goal_pose_.x << ", " <<
00733                         tracker_goal_pose_.y << ", " << tracker_goal_pose_.theta << ")");
00734         ROS_INFO_STREAM("Goal error: (" << x_dist << ", " << y_dist << ", "
00735                         << theta_dist << ")");
00736         PushTrackerResult res;
00737         res.aborted = false;
00738         as_.setSucceeded(res);
00739         obj_tracker_->pause();
00740       }
00741       return;
00742     }
00743 
00744     if (x_dist < tracker_dist_thresh_ && y_dist < tracker_dist_thresh_)
00745     {
00746       ROS_INFO_STREAM("Cur state: (" << tracker_state.x.x << ", " <<
00747                       tracker_state.x.y << ", " << tracker_state.x.theta << ")");
00748       ROS_INFO_STREAM("Desired goal: (" << tracker_goal_pose_.x << ", " <<
00749                       tracker_goal_pose_.y << ", " << tracker_goal_pose_.theta << ")");
00750       ROS_INFO_STREAM("Goal error: (" << x_dist << ", " << y_dist << ", "
00751                       << theta_dist << ")");
00752       PushTrackerResult res;
00753       res.aborted = false;
00754       as_.setSucceeded(res);
00755       obj_tracker_->pause();
00756       return;
00757     }
00758 
00759     if (objectNotMoving(tracker_state))
00760     {
00761       abortPushingGoal("Object is not moving");
00762     }
00763     else if (gripperNotMoving())
00764     {
00765       abortPushingGoal("Gripper is not moving");
00766     }
00767     else if (objectDisappeared(tracker_state))
00768     {
00769       abortPushingGoal("Object disappeared");
00770     }
00771     else
00772     {
00773       if (objectTooFarFromGripper(tracker_state.x))
00774       {
00775         abortPushingGoal("Object is too far from gripper.");
00776       }
00777       else if (behavior_primitive_ != "gripper_pull")
00778       {
00779         abortPushingGoal("Object is not between gripper and goal.");
00780       }
00781     }
00782   }
00783 
00784   void abortPushingGoal(std::string msg)
00785   {
00786     ROS_WARN_STREAM(msg << " Aborting.");
00787     PushTrackerResult res;
00788     res.aborted = true;
00789     as_.setAborted(res);
00790     obj_tracker_->pause();
00791   }
00792 
00802   bool learnPushCallback(LearnPush::Request& req, LearnPush::Response& res)
00803   {
00804     if ( have_depth_data_ )
00805     {
00806       if (!req.analyze_previous)
00807       {
00808         controller_name_ = req.controller_name;
00809         proxy_name_ = req.proxy_name;
00810         behavior_primitive_ = req.behavior_primitive;
00811       }
00812 
00813       if (req.initialize)
00814       {
00815         num_position_failures_ = 0;
00816         ROS_INFO_STREAM("Initializing");
00817         record_count_ = 0;
00818         learn_callback_count_ = 0;
00819         res.no_push = true;
00820         ROS_INFO_STREAM("Stopping input recording");
00821         recording_input_ = false;
00822         obj_tracker_->stopTracking();
00823       }
00824       else if (req.analyze_previous || req.get_pose_only)
00825       {
00826         ROS_INFO_STREAM("Getting current object pose");
00827         getObjectPose(res);
00828         res.no_push = true;
00829         ROS_INFO_STREAM("Stopping input recording");
00830         recording_input_ = false;
00831         ProtoObject cur_obj = obj_tracker_->getMostRecentObject();
00832         if (cur_obj.cloud.header.frame_id.size() == 0)
00833         {
00834           ROS_INFO_STREAM("cur_obj.cloud.header.frame_id is blank, setting to workspace_frame_");
00835           cur_obj.cloud.header.frame_id = workspace_frame_;
00836         }
00837         PushTrackerState cur_state;
00838         cur_state.x.x = res.centroid.x;
00839         cur_state.x.y = res.centroid.y;
00840         cur_state.x.theta = res.theta;
00841         cur_state.z = res.centroid.z;
00842         if (req.analyze_previous)
00843         {
00844           // Display different color to signal swap available
00845           obj_tracker_->trackerDisplay(cur_color_frame_, cur_state, cur_obj, true);
00846           ROS_INFO_STREAM("Current theta: " << cur_state.x.theta);
00847           if (use_displays_)
00848           {
00849             ROS_INFO_STREAM("Presss 's' to swap orientation: ");
00850             char key_press = cv::waitKey(2000);
00851             if (key_press == 's')
00852             {
00853               // TODO: Is this correct?
00854               force_swap_ = !obj_tracker_->getSwapState();
00855               startTracking(cur_state, force_swap_);
00856               ROS_INFO_STREAM("Swapped theta: " << cur_state.x.theta);
00857               res.theta = cur_state.x.theta;
00858               obj_tracker_->stopTracking();
00859               obj_tracker_->trackerDisplay(cur_color_frame_, cur_state, cur_obj);
00860               // NOTE: Try and force redraw
00861               cv::waitKey(3);
00862             }
00863           }
00864         }
00865         else
00866         {
00867           ROS_INFO_STREAM("Calling tracker display");
00868           obj_tracker_->trackerDisplay(cur_color_frame_, cur_state, cur_obj);
00869           obj_tracker_->stopTracking();
00870         }
00871         ROS_INFO_STREAM("Done getting current pose\n");
00872       }
00873       else // NOTE: Assume pushing as default
00874       {
00875         ROS_INFO_STREAM("Determining push start pose");
00876         res = getPushStartPose(req);
00877         recording_input_ = !res.no_objects;
00878         if (recording_input_)
00879         {
00880           ROS_INFO_STREAM("Starting input recording");
00881           ROS_INFO_STREAM("current_file_id: " << current_file_id_);
00882         }
00883         else
00884         {
00885           ROS_INFO_STREAM("Stopping input recording");
00886         }
00887         res.no_push = res.no_objects;
00888       }
00889     }
00890     else
00891     {
00892       ROS_ERROR_STREAM("Calling getStartPose prior to receiving sensor data.");
00893       recording_input_ = false;
00894       res.no_push = true;
00895       return false;
00896     }
00897     return true;
00898   }
00899 
00900   LearnPush::Response getPushStartPose(LearnPush::Request& req)
00901   {
00902     LearnPush::Response res;
00903     PushTrackerState cur_state;
00904     startTracking(cur_state, force_swap_);
00905     ProtoObject cur_obj = obj_tracker_->getMostRecentObject();
00906     if (req.learn_start_loc)
00907     {
00908       obj_tracker_->trackerDisplay(cur_color_frame_, cur_state, cur_obj, true);
00909       if (use_displays_)
00910       {
00911         ROS_INFO_STREAM("Current theta: " << cur_state.x.theta);
00912         ROS_INFO_STREAM("Presss 's' to swap orientation: ");
00913         char key_press = cv::waitKey(2000);
00914         if (key_press == 's')
00915         {
00916           force_swap_ = !force_swap_;
00917           obj_tracker_->toggleSwap();
00918           startTracking(cur_state, force_swap_);
00919           obj_tracker_->trackerDisplay(cur_color_frame_, cur_state, cur_obj);
00920           // NOTE: Try and force redraw
00921           cv::waitKey(3);
00922           ROS_INFO_STREAM("Swapped theta: " << cur_state.x.theta);
00923         }
00924       }
00925     }
00926 
00927     if (!start_tracking_on_push_call_)
00928     {
00929       obj_tracker_->pause();
00930     }
00931 
00932     if (cur_state.no_detection)
00933     {
00934       ROS_WARN_STREAM("No objects found");
00935       res.centroid.x = 0.0;
00936       res.centroid.y = 0.0;
00937       res.centroid.z = 0.0;
00938       res.theta = 0.0;
00939       res.no_objects = true;
00940       return res;
00941     }
00942     res.no_objects = false;
00943     res.centroid.x = cur_obj.centroid[0];
00944     res.centroid.y = cur_obj.centroid[1];
00945     res.centroid.z = cur_obj.centroid[2];
00946     res.theta = cur_state.x.theta;
00947 
00948     // Set basic push information
00949     PushVector p;
00950     p.header.frame_id = workspace_frame_;
00951     bool pull_start = (req.behavior_primitive == "gripper_pull");
00952     bool rotate_push = (req.controller_name == "rotate_to_heading");
00953 
00954     // Choose a pushing location to test if we are learning good pushing locations
00955     if (req.learn_start_loc)
00956     {
00957       // Get the pushing location
00958       ShapeLocation chosen_loc;
00959       float predicted_score = -1;
00960       if (req.start_loc_param_path.length() > 0) // Choose start location using the learned classifier
00961       {
00962         ROS_INFO_STREAM("Finding learned push start loc");
00963         ROS_INFO_STREAM("Using param path "<< req.start_loc_param_path);
00964 
00965         // HACK: We set the name to "rand" if we are testing with rand
00966         if (req.start_loc_param_path.compare("rand") == 0)
00967         {
00968           chosen_loc = chooseRandomPushStartLoc(cur_obj, cur_state, rotate_push);
00969         }
00970         else
00971         {
00972           chosen_loc = chooseLearnedPushStartLoc(cur_obj, cur_state, req.start_loc_param_path, predicted_score,
00973                                                  req.previous_position_worked, rotate_push);
00974         }
00975       }
00976       else if (start_loc_use_fixed_goal_)
00977       {
00978         chosen_loc = chooseFixedGoalPushStartLoc(cur_obj, cur_state, req.new_object,
00979                                                  req.num_start_loc_pushes_per_sample, req.num_start_loc_sample_locs,
00980                                                  req.trial_id);
00981       }
00982       else
00983       {
00984         chosen_loc = choosePushStartLoc(cur_obj, cur_state, req.new_object, req.num_start_loc_clusters);
00985       }
00986       ROS_INFO_STREAM("Chosen loc is: (" << chosen_loc.boundary_loc_.x << ", " << chosen_loc.boundary_loc_.y << ")");
00987       res.shape_descriptor.assign(chosen_loc.descriptor_.begin(), chosen_loc.descriptor_.end());
00988       res.predicted_score = predicted_score;
00989       float new_push_angle;
00990       if (rotate_push)
00991       {
00992         // Set goal for rotate pushing angle and goal state; then get start location as usual below
00993         new_push_angle = getRotatePushHeading(cur_state, chosen_loc, cur_obj);
00994         res.goal_pose.x = cur_state.x.x;
00995         res.goal_pose.y = cur_state.x.y;
00996         // NOTE: Uncomment for visualization purposes
00997         // res.goal_pose.x = res.centroid.x+cos(new_push_angle)*start_loc_push_dist_;
00998         // res.goal_pose.y = res.centroid.y+sin(new_push_angle)*start_loc_push_dist_;
00999         res.goal_pose.theta = subPIAngle(cur_state.x.theta+M_PI);
01000       }
01001       else
01002       {
01003         // Set goal for pushing and then get start location as usual below
01004         new_push_angle = atan2(res.centroid.y - chosen_loc.boundary_loc_.y,
01005                                res.centroid.x - chosen_loc.boundary_loc_.x);
01006         res.goal_pose.x = res.centroid.x+cos(new_push_angle)*start_loc_push_dist_;
01007         res.goal_pose.y = res.centroid.y+sin(new_push_angle)*start_loc_push_dist_;
01008       }
01009 
01010       p.start_point.x = chosen_loc.boundary_loc_.x;
01011       p.start_point.y = chosen_loc.boundary_loc_.y;
01012       p.start_point.z = chosen_loc.boundary_loc_.z;
01013       p.push_angle = new_push_angle;
01014       p.push_dist = start_loc_push_dist_;
01015 
01016       // Push for a fixed amount of time
01017       timing_push_ = true;
01018 
01019       // NOTE: Write object point cloud to disk, images too for use in offline learning if we want to
01020       // change features in the future
01021       if (write_to_disk_)
01022       {
01023         std::stringstream cloud_file_name;
01024         cloud_file_name << base_output_path_ << req.trial_id << "_obj_cloud.pcd";
01025         std::stringstream color_file_name;
01026         color_file_name << base_output_path_ << req.trial_id << "_color.png";
01027         current_file_id_ = req.trial_id;
01028         pcl16::io::savePCDFile(cloud_file_name.str(), cur_obj.cloud);
01029         cv::imwrite(color_file_name.str(), cur_color_frame_);
01030       }
01031     }
01032     else
01033     {
01034       res.goal_pose.x = req.goal_pose.x;
01035       res.goal_pose.y = req.goal_pose.y;
01036 
01037       // Get straight line from current location to goal pose as start
01038       if (pull_start)
01039       {
01040         // NOTE: Want the opposite direction for pulling as pushing
01041         p.push_angle = atan2(res.centroid.y - res.goal_pose.y, res.centroid.x - res.goal_pose.x);
01042       }
01043       else if (rotate_push)
01044       {
01045         // TODO: Figure out something here
01046       }
01047       else
01048       {
01049         p.push_angle = atan2(res.goal_pose.y - res.centroid.y, res.goal_pose.x - res.centroid.x);
01050       }
01051       // Get vector through centroid and determine start point and distance
01052       Eigen::Vector3f push_unit_vec(std::cos(p.push_angle), std::sin(p.push_angle), 0.0f);
01053       std::vector<pcl16::PointXYZ> end_points;
01054       pcl_segmenter_->lineCloudIntersectionEndPoints(cur_obj.cloud, push_unit_vec, cur_obj.centroid, end_points);
01055       p.start_point.x = end_points[0].x;
01056       p.start_point.y = end_points[0].y;
01057       p.start_point.z = end_points[0].z;
01058       // Get push distance
01059       p.push_dist = hypot(res.centroid.x - res.goal_pose.x, res.centroid.y - res.goal_pose.y);
01060       timing_push_ = false;
01061     }
01062     // Visualize push vector
01063     PointStamped obj_centroid;
01064     obj_centroid.header.frame_id = workspace_frame_;
01065     obj_centroid.point = res.centroid;
01066     if (rotate_push)
01067     {
01068       displayGoalHeading(cur_color_frame_, obj_centroid, cur_state.x.theta, res.goal_pose.theta);
01069     }
01070     else
01071     {
01072       PointStamped start_point;
01073       start_point.header.frame_id = workspace_frame_;
01074       start_point.point = p.start_point;
01075       PointStamped end_point;
01076       end_point.header.frame_id = workspace_frame_;
01077       end_point.point.x = res.goal_pose.x;
01078       end_point.point.y = res.goal_pose.y;
01079       end_point.point.z = start_point.point.z;
01080       displayPushVector(cur_color_frame_, start_point, end_point);
01081       if (use_displays_)
01082       {
01083         displayInitialPushVector(cur_color_frame_, start_point, end_point, obj_centroid);
01084       }
01085     }
01086 
01087     // Cleanup and return
01088     ROS_INFO_STREAM("Chosen push start point: (" << p.start_point.x << ", " << p.start_point.y << ", " <<
01089                     p.start_point.z << ")");
01090     ROS_INFO_STREAM("Push dist: " << p.push_dist);
01091     ROS_INFO_STREAM("Push angle: " << p.push_angle << "\n");
01092     learn_callback_count_++;
01093     res.push = p;
01094     tracker_goal_pose_ = res.goal_pose;
01095     return res;
01096   }
01097 
01108   ShapeLocation choosePushStartLoc(ProtoObject& cur_obj, PushTrackerState& cur_state, bool new_object, int num_clusters)
01109   {
01110     if (new_object)
01111     {
01112       start_loc_history_.clear();
01113     }
01114     // Get shape features and associated locations
01115     ShapeLocations locs = tabletop_pushing::extractObjectShapeContext(cur_obj, use_center_pointing_shape_context_);
01116     // tabletop_pushing::extractObjectShapeContext(cur_obj, !use_center_pointing_shape_context_);
01117 
01118     // Choose location index from features and history
01119     int loc_idx = 0;
01120     if (start_loc_history_.size() == 0)
01121     {
01122       // TODO: Improve this to find a more unique / prototypical point?
01123       loc_idx = rand() % locs.size();
01124     }
01125     else
01126     {
01127       // Cluster locs based on shape similarity
01128       std::vector<int> cluster_ids;
01129       ShapeDescriptors centers;
01130       double min_err_change = 0.001;
01131       int max_iter = 1000;
01132       tabletop_pushing::clusterShapeFeatures(locs, num_clusters, cluster_ids, centers, min_err_change, max_iter);
01133 
01134       // Display the boundary locations colored by their cluster IDs
01135       cv::Mat boundary_disp_img(cur_color_frame_.size(), CV_32FC3, cv::Scalar(0,0,0));
01136       for (unsigned int i = 0; i < locs.size(); ++i)
01137       {
01138         const cv::Point2f img_idx = pcl_segmenter_->projectPointIntoImage(
01139             locs[i].boundary_loc_, cur_obj.cloud.header.frame_id, camera_frame_);
01140         boundary_disp_img.at<cv::Vec3f>(img_idx.y, img_idx.x) = pcl_segmenter_->colors_[cluster_ids[i]];
01141       }
01142       cv::imshow("Cluster colors", boundary_disp_img);
01143 
01144       // TODO: Easier to just keep picking random locs and choose first one with unused cluster center?
01145       // Find which clusters the previous choices map too, pick one other than those randomly
01146       std::vector<int> used_clusters;
01147       for (int i = 0; i < start_loc_history_.size(); ++i)
01148       {
01149         double cluster_dist = 0;
01150         int closest = tabletop_pushing::closestShapeFeatureCluster(start_loc_history_[i].descriptor_, centers,
01151                                                                    cluster_dist);
01152         used_clusters.push_back(closest);
01153       }
01154       bool done = false;
01155       int rand_cluster = -1;
01156       while (!done)
01157       {
01158         rand_cluster = rand() % num_clusters;
01159         done = true;
01160         for (int i = 0; i < used_clusters.size(); ++i)
01161         {
01162           if (used_clusters[i] == rand_cluster)
01163           {
01164             done = false;
01165           }
01166         }
01167       }
01168       ROS_INFO_STREAM("Chose cluster " << rand_cluster);
01169       // Pick random loc that has cluster id rand_cluster
01170       std::vector<int> loc_choices;
01171       for (int l = 0; l < locs.size(); ++l)
01172       {
01173         if (cluster_ids[l] == rand_cluster)
01174         {
01175           loc_choices.push_back(l);
01176         }
01177       }
01178       int choice_idx = rand()%loc_choices.size();
01179       loc_idx = loc_choices[choice_idx];
01180     }
01181     // Transform location into object frame for storage in history
01182     ShapeLocation s(worldPointInObjectFrame(locs[loc_idx].boundary_loc_, cur_state),
01183                     locs[loc_idx].descriptor_);
01184     start_loc_history_.push_back(s);
01185     return locs[loc_idx];
01186   }
01187 
01200   ShapeLocation chooseFixedGoalPushStartLoc(ProtoObject& cur_obj, PushTrackerState& cur_state, bool new_object,
01201                                             int num_start_loc_pushes_per_sample, int num_start_loc_sample_locs,
01202                                             std::string trial_id)
01203   {
01204     XYZPointCloud hull_cloud = tabletop_pushing::getObjectBoundarySamples(cur_obj, hull_alpha_);
01205 
01206     int rot_idx = -1;
01207     if (new_object)
01208     {
01209       // Set new object model
01210       start_loc_obj_ = cur_obj;
01211       // Reset boundary traversal data
01212       start_loc_arc_length_percent_ = 0.0;
01213       start_loc_push_sample_count_ = 0;
01214       start_loc_history_.clear();
01215 
01216       // NOTE: Initial start location is the dominant orientation
01217       ROS_INFO_STREAM("Current state theta is: " << cur_state.x.theta);
01218       double min_angle_dist = FLT_MAX;
01219       for (int i = 0; i < hull_cloud.size(); ++i)
01220       {
01221         double theta_i = atan2(hull_cloud.at(i).y - cur_state.x.y, hull_cloud.at(i).x - cur_state.x.x);
01222         double angle_dist_i = fabs(subPIAngle(theta_i - cur_state.x.theta));
01223         if (angle_dist_i < min_angle_dist)
01224         {
01225           min_angle_dist = angle_dist_i;
01226           rot_idx = i;
01227         }
01228       }
01229     }
01230     else
01231     {
01232       // Increment boundary location if necessary
01233       if (start_loc_history_.size() % num_start_loc_pushes_per_sample == 0)
01234       {
01235         start_loc_arc_length_percent_ += 1.0/num_start_loc_sample_locs;
01236         ROS_INFO_STREAM("Incrementing arc length percent based on: " << num_start_loc_pushes_per_sample);
01237       }
01238 
01239       // Get initial object boundary location in the current world frame
01240       ROS_INFO_STREAM("init_obj_point: " << start_loc_history_[0].boundary_loc_);
01241       pcl16::PointXYZ init_loc_point = objectPointInWorldFrame(start_loc_history_[0].boundary_loc_, cur_state);
01242       ROS_INFO_STREAM("init_loc_point: " << init_loc_point);
01243 
01244       // Find index of closest point on current boundary to the initial pushing location
01245       double min_dist = FLT_MAX;
01246       for (int i = 0; i < hull_cloud.size(); ++i)
01247       {
01248         double dist_i = pcl_segmenter_->sqrDist(init_loc_point, hull_cloud.at(i));
01249         if (dist_i < min_dist)
01250         {
01251           min_dist = dist_i;
01252           rot_idx = i;
01253         }
01254       }
01255     }
01256     // Test hull_cloud orientation, reverse iteration if it is negative
01257     double pt0_theta = atan2(hull_cloud[rot_idx].y - cur_state.x.y, hull_cloud[rot_idx].x - cur_state.x.x);
01258     int pt1_idx = (rot_idx+1) % hull_cloud.size();
01259     double pt1_theta = atan2(hull_cloud[pt1_idx].y - cur_state.x.y, hull_cloud[pt1_idx].x - cur_state.x.x);
01260     bool reverse_data = false;
01261     if (subPIAngle(pt1_theta - pt0_theta) < 0)
01262     {
01263       reverse_data = true;
01264       ROS_INFO_STREAM("Reversing data for boundaries");
01265     }
01266 
01267     // Compute cumulative distance around the boundary at each point
01268     std::vector<double> boundary_dists(hull_cloud.size(), 0.0);
01269     double boundary_length = 0.0;
01270     ROS_INFO_STREAM("rot_idx is " << rot_idx);
01271     for (int i = 1; i <= hull_cloud.size(); ++i)
01272     {
01273       int idx0 = (rot_idx+i-1) % hull_cloud.size();
01274       int idx1 = (rot_idx+i) % hull_cloud.size();
01275       if (reverse_data)
01276       {
01277         idx0 = (hull_cloud.size()+rot_idx-i+1) % hull_cloud.size();
01278         idx1 = (hull_cloud.size()+rot_idx-i) % hull_cloud.size();
01279       }
01280       // NOTE: This makes boundary_dists[rot_idx] = 0.0, and we have no location at 100% the boundary_length
01281       boundary_dists[idx0] = boundary_length;
01282       double loc_dist = pcl_segmenter_->dist(hull_cloud[idx0], hull_cloud[idx1]);
01283       boundary_length += loc_dist;
01284     }
01285 
01286     // Find location at start_loc_arc_length_percent_ around the boundary
01287     double desired_boundary_dist = start_loc_arc_length_percent_*boundary_length;
01288     ROS_INFO_STREAM("Finding location at dist " << desired_boundary_dist << " ~= " << start_loc_arc_length_percent_*100 <<
01289                     "\% of " << boundary_length);
01290     int boundary_loc_idx;
01291     double min_boundary_dist_diff = FLT_MAX;
01292     for (int i = 0; i < hull_cloud.size(); ++i)
01293     {
01294       double boundary_dist_diff_i = fabs(desired_boundary_dist - boundary_dists[i]);
01295       if (boundary_dist_diff_i < min_boundary_dist_diff)
01296       {
01297         min_boundary_dist_diff = boundary_dist_diff_i;
01298         boundary_loc_idx = i;
01299       }
01300     }
01301     ROS_INFO_STREAM("Chose location at idx: " << boundary_loc_idx << " with diff " << min_boundary_dist_diff);
01302     // Get descriptor at the chosen location
01303     // ShapeLocations locs = tabletop_pushing::extractShapeContextFromSamples(hull_cloud, cur_obj,
01304     //                                                                         use_center_pointing_shape_context_);
01305     pcl16::PointXYZ boundary_loc = hull_cloud[boundary_loc_idx];
01306     ShapeDescriptor sd = tabletop_pushing::extractLocalAndGlobalShapeFeatures(hull_cloud, cur_obj, boundary_loc,
01307                                                                               boundary_loc_idx, gripper_spread_,
01308                                                                               hull_alpha_, point_cloud_hist_res_);
01309     // Add into pushing history in object frame
01310     // ShapeLocation s(worldPointInObjectFrame(locs[boundary_loc_idx].boundary_loc_, cur_state),
01311     //                 locs[boundary_loc_idx].descriptor_);
01312     // start_loc_history_.push_back(s);
01313     ShapeLocation s_obj(worldPointInObjectFrame(boundary_loc, cur_state), sd);
01314     start_loc_history_.push_back(s_obj);
01315 
01316     // TODO: Project desired outline to show where to place object before pushing?
01317     cv::Mat hull_img(cur_color_frame_.size(), CV_8UC1, cv::Scalar(0));
01318     pcl_segmenter_->projectPointCloudIntoImage(hull_cloud, hull_img);
01319     hull_img*=255;
01320     cv::Mat hull_disp_img(hull_img.size(), CV_8UC3, cv::Scalar(0,0,0));
01321     cv::cvtColor(hull_img, hull_disp_img, CV_GRAY2BGR);
01322     cv::Point2f img_rot_idx = pcl_segmenter_->projectPointIntoImage(hull_cloud[rot_idx], hull_cloud.header.frame_id,
01323                                                                     camera_frame_);
01324     cv::circle(hull_disp_img, img_rot_idx, 4, cv::Scalar(0,255,0), 3);
01325     cv::Point2f img_loc_idx = pcl_segmenter_->projectPointIntoImage(boundary_loc,
01326                                                                     hull_cloud.header.frame_id, camera_frame_);
01327     cv::circle(hull_disp_img, img_loc_idx, 4, cv::Scalar(0,0, 255));
01328     cv::imshow("object hull", hull_disp_img);
01329     if (write_to_disk_)
01330     {
01331       std::stringstream hull_file_name;
01332       hull_file_name << base_output_path_ << trial_id << "_obj_hull.png";
01333       std::stringstream hull_disp_file_name;
01334       hull_disp_file_name << base_output_path_ << trial_id << "_obj_hull_disp.png";
01335       cv::imwrite(hull_file_name.str(), hull_img);
01336       cv::imwrite(hull_disp_file_name.str(), hull_disp_img);
01337     }
01338     ShapeLocation s_world(boundary_loc, sd);
01339     return s_world;
01340     // return locs[boundary_loc_idx];
01341   }
01342 
01343   ShapeLocation chooseLearnedPushStartLoc(ProtoObject& cur_obj, PushTrackerState& cur_state, std::string param_path,
01344                                           float& chosen_score, bool previous_position_worked,
01345                                           bool rotate_push)
01346   {
01347     // Get features for all of the boundary locations
01348     // TODO: Set these values somewhere else
01349     int local_length = 36;
01350     int global_length = 60;
01351     XYZPointCloud hull_cloud = tabletop_pushing::getObjectBoundarySamples(cur_obj, hull_alpha_);
01352     ShapeDescriptors sds = tabletop_pushing::extractLocalAndGlobalShapeFeatures(hull_cloud, cur_obj,
01353                                                                                 gripper_spread_, hull_alpha_,
01354                                                                                 point_cloud_hist_res_);
01355     // Read in model SVs and coefficients
01356     svm_model* push_model;
01357     push_model = svm_load_model(param_path.c_str());
01358     // Remove trailing .model
01359     param_path.erase(param_path.size()-6, 6);
01360     std::stringstream train_feat_path;
01361     train_feat_path << param_path << "-feats.txt";
01362 
01363     // TODO: Get these parameters from disk
01364     double gamma_local = 2.5;
01365     double gamma_global = 2.0;
01366     double mixture_weight = 0.7;
01367     if (rotate_push)
01368     {
01369       gamma_local = 0.05;
01370       gamma_global = 2.5;
01371       mixture_weight = 0.7;
01372     }
01373     cv::Mat K = tabletop_pushing::computeChi2Kernel(sds, train_feat_path.str(), local_length, global_length,
01374                                                     gamma_local, gamma_global, mixture_weight);
01375 
01376     std::vector<double> pred_push_scores;
01377     std::priority_queue<ScoredIdx, std::vector<ScoredIdx>, ScoredIdxComparison> pq(
01378         (ScoredIdxComparison(rotate_push)) );
01379     XYZPointCloud hull_cloud_obj;
01380     hull_cloud_obj.width = hull_cloud.size();
01381     hull_cloud_obj.height = 1;
01382     hull_cloud_obj.is_dense = false;
01383     hull_cloud_obj.resize(hull_cloud_obj.width*hull_cloud_obj.height);
01384 
01385     // Perform prediction at all sample locations
01386     for (int i = 0; i < K.cols; ++i)
01387     {
01388       svm_node* x = new svm_node[K.rows+1];
01389       x[0].value = 0;
01390       x[0].index = 0;
01391       for (int j = 0; j < K.rows; ++j)
01392       {
01393         x[j+1].value = K.at<double>(j, i);
01394         x[j+1].index = 0; // unused
01395       }
01396       // Perform prediction and convert into appropriate space
01397       double raw_pred_score = svm_predict(push_model, x);
01398       double pred_score;
01399       if (rotate_push)
01400       {
01401         pred_score = exp(raw_pred_score);
01402       }
01403       else
01404       {
01405         pred_score = exp(raw_pred_score);
01406       }
01407       if (isnan(pred_score) || isinf(pred_score))
01408       {
01409         ROS_WARN_STREAM("Sample " << i <<  " has pred score: " << pred_score << "\traw pred score: " << raw_pred_score);
01410       }
01411       if (isinf(pred_score))
01412       {
01413         pred_score = raw_pred_score;
01414       }
01415       ScoredIdx scored_idx;
01416       scored_idx.score = pred_score;
01417       scored_idx.idx = i;
01418       pq.push(scored_idx);
01419       pred_push_scores.push_back(pred_score);
01420       // Center cloud at (0,0) but leave the orientation
01421       hull_cloud_obj[i].x = hull_cloud[i].x - cur_state.x.x;
01422       hull_cloud_obj[i].y = hull_cloud[i].y - cur_state.x.y;
01423       delete x;
01424     }
01425 
01426     // Free SVM struct
01427     svm_free_and_destroy_model(&push_model);
01428 
01429     // Write SVM scores & descriptors to disk?
01430     writePredictedScoreToDisk(hull_cloud, sds, pred_push_scores);
01431 
01432     ScoredIdx best_scored = pq.top();
01433     // if (!previous_position_worked)
01434     // {
01435     //   // TODO: Replace this with location history not simple count
01436     //   num_position_failures_++;
01437     //   ROS_INFO_STREAM("Ignoring top: " << num_position_failures_ << " positions");
01438     //   for (int p = 0; p < num_position_failures_; ++p)
01439     //   {
01440     //     pq.pop();
01441     //   }
01442     // }
01443     // else
01444     // {
01445     //   num_position_failures_ = 0;
01446     // }
01447 
01448     // Ensure goal pose is on the table
01449     while (pq.size() > 0)
01450     {
01451       ScoredIdx chosen = pq.top();
01452       pq.pop();
01453 
01454       // Return the location of the best score
01455       ShapeLocation loc;
01456       loc.boundary_loc_ = hull_cloud[chosen.idx];
01457       loc.descriptor_ = sds[chosen.idx];
01458       float new_push_angle;
01459       Pose2D goal_pose =  generateStartLocLearningGoalPose(cur_state, cur_obj, loc, new_push_angle, rotate_push);
01460       if (rotate_push || goalPoseValid(goal_pose))
01461       {
01462         ROS_INFO_STREAM("Chose push location " << chosen.idx << " with score " << chosen.score);
01463         pcl16::PointXYZ selected(hull_cloud_obj[chosen.idx].x, hull_cloud_obj[chosen.idx].y, 0.0);
01464         displayLearnedPushLocScores(pred_push_scores, hull_cloud_obj, selected, rotate_push);
01465         chosen_score = chosen.score;
01466         return loc;
01467       }
01468     }
01469     // No points wokred
01470     ShapeLocation best_loc;
01471     best_loc.boundary_loc_ = hull_cloud[best_scored.idx];
01472     best_loc.descriptor_ = sds[best_scored.idx];
01473     ROS_INFO_STREAM("Chose default push location " << best_scored.idx << " with score " << best_scored.score);
01474     chosen_score = best_scored.score;
01475     return best_loc;
01476   }
01477 
01478   ShapeLocation chooseRandomPushStartLoc(ProtoObject& cur_obj, PushTrackerState& cur_state, bool rotate_push)
01479   {
01480     // Get features for all of the boundary locations
01481     // TODO: Set these values somewhere else
01482     XYZPointCloud hull_cloud = tabletop_pushing::getObjectBoundarySamples(cur_obj, hull_alpha_);
01483     ShapeDescriptors sds = tabletop_pushing::extractLocalAndGlobalShapeFeatures(hull_cloud, cur_obj,
01484                                                                                 gripper_spread_, hull_alpha_,
01485                                                                                 point_cloud_hist_res_);
01486     std::vector<int> available_indices;
01487     for (int i = 0; i < hull_cloud.size(); ++i)
01488     {
01489       available_indices.push_back(i);
01490     }
01491     // Ensure goal pose is on the table
01492     while (available_indices.size() > 0)
01493     {
01494       int rand_idx = rand()%available_indices.size();
01495       int chosen_idx = available_indices[rand_idx];
01496 
01497       // Return the location of the best score
01498       ShapeLocation loc;
01499       loc.boundary_loc_ = hull_cloud[chosen_idx];
01500       loc.descriptor_ = sds[chosen_idx];
01501       float new_push_angle;
01502       Pose2D goal_pose =  generateStartLocLearningGoalPose(cur_state, cur_obj, loc, new_push_angle, rotate_push);
01503       if (rotate_push || goalPoseValid(goal_pose))
01504       {
01505         // TODO: Display boundary with 0 scores
01506         ROS_INFO_STREAM("Choosing random idx: " << chosen_idx);
01507         return loc;
01508       }
01509       available_indices.erase(available_indices.begin()+rand_idx);
01510     }
01511     // No points wokred
01512     int chosen_idx = rand()%hull_cloud.size();
01513     ShapeLocation chosen_loc;
01514     chosen_loc.boundary_loc_ = hull_cloud[chosen_idx];
01515     chosen_loc.descriptor_ = sds[chosen_idx];
01516     return chosen_loc;
01517   }
01518 
01519   std::vector<pcl16::PointXYZ> findAxisAlignedBoundingBox(PushTrackerState& cur_state, ProtoObject& cur_obj)
01520   {
01521     // Get cloud in object frame
01522     XYZPointCloud object_frame_cloud;
01523     worldPointsInObjectFrame(cur_obj.cloud, cur_state, object_frame_cloud);
01524     // Find min max x and y
01525     double min_x = FLT_MAX;
01526     double max_x = -FLT_MAX;
01527     double min_y = FLT_MAX;
01528     double max_y = -FLT_MAX;
01529     for (int i = 0; i < object_frame_cloud.size(); ++i)
01530     {
01531       if (object_frame_cloud[i].x < min_x)
01532       {
01533         min_x = object_frame_cloud[i].x;
01534       }
01535       if (object_frame_cloud[i].x > max_x)
01536       {
01537         max_x = object_frame_cloud[i].x;
01538       }
01539       if (object_frame_cloud[i].y < min_y)
01540       {
01541         min_y = object_frame_cloud[i].y;
01542       }
01543       if (object_frame_cloud[i].y > max_y)
01544       {
01545         max_y = object_frame_cloud[i].y;
01546       }
01547     }
01548     std::vector<pcl16::PointXYZ> vertices;
01549     vertices.push_back(objectPointInWorldFrame(pcl16::PointXYZ(max_x, max_y, 0.0), cur_state));
01550     vertices.push_back(objectPointInWorldFrame(pcl16::PointXYZ(max_x, min_y, 0.0), cur_state));
01551     vertices.push_back(objectPointInWorldFrame(pcl16::PointXYZ(min_x, min_y, 0.0), cur_state));
01552     vertices.push_back(objectPointInWorldFrame(pcl16::PointXYZ(min_x, max_y, 0.0), cur_state));
01553     return vertices;
01554   }
01555 
01556   float getRotatePushHeading(PushTrackerState& cur_state, ShapeLocation& chosen_loc, ProtoObject& cur_obj)
01557   {
01558     // cv::RotatedRect bounding_box = obj_tracker_->findFootprintBox(cur_obj);
01559     // cv::Point2f vertices[4];
01560     // bounding_box.points(vertices);
01561 
01562     std::vector<pcl16::PointXYZ> vertices = findAxisAlignedBoundingBox(cur_state, cur_obj);
01563     double min_dist = FLT_MAX;
01564     int chosen_idx = 0;
01565     float push_angle_world_frame = 0.0;
01566 
01567     cv::Point2f box_center;
01568     box_center.x = (vertices[0].x+vertices[2].x)*0.5;
01569     box_center.y = (vertices[0].y+vertices[2].y)*0.5;
01570     for (int i = 0; i < 4; ++i)
01571     {
01572       int j = (i+1)%4;
01573       double line_dist = cpl_visual_features::pointLineDistance2D(chosen_loc.boundary_loc_, vertices[i], vertices[j]);
01574       if (line_dist < min_dist)
01575       {
01576         min_dist = line_dist;
01577         chosen_idx = i;
01578         // NOTE: Push the direction from the chosen side's midpoint through the Box's center
01579         push_angle_world_frame = atan2(box_center.y-(vertices[i].y + vertices[j].y)*0.5,
01580                                        box_center.x-(vertices[i].x + vertices[j].x)*0.5);
01581       }
01582     }
01583 
01584     cv::Mat display_frame;
01585     cur_color_frame_.copyTo(display_frame);
01586     for (int i = 0; i < 4; ++i)
01587     {
01588       int j = (i+1)%4;
01589 
01590       PointStamped a_world;
01591       a_world.header.frame_id = cur_obj.cloud.header.frame_id;
01592       a_world.point.x = vertices[i].x;
01593       a_world.point.y = vertices[i].y;
01594       a_world.point.z = vertices[i].z;
01595       PointStamped b_world;
01596       b_world.header.frame_id = cur_obj.cloud.header.frame_id;
01597       b_world.point.x = vertices[j].x;
01598       b_world.point.y = vertices[j].y;
01599       b_world.point.z = vertices[j].z;
01600       cv::Point a_img = pcl_segmenter_->projectPointIntoImage(a_world);
01601       cv::Point b_img = pcl_segmenter_->projectPointIntoImage(b_world);
01602       cv::line(display_frame, a_img, b_img, cv::Scalar(0,0,0),3);
01603       if (i == chosen_idx)
01604       {
01605         // ROS_INFO_STREAM("chosen_idx is: " << chosen_idx);
01606         cv::line(display_frame, a_img, b_img, cv::Scalar(0,255,255),1);
01607       }
01608       else
01609       {
01610         cv::line(display_frame, a_img, b_img, cv::Scalar(0,255,0),1);
01611       }
01612     }
01613     cv::imshow("footprint_box", display_frame);
01614     // TODO: write to disk?
01615     return push_angle_world_frame;
01616   }
01617 
01618   Pose2D generateStartLocLearningGoalPose(PushTrackerState& cur_state, ProtoObject& cur_obj,
01619                                           ShapeLocation& chosen_loc, float& new_push_angle, bool rotate_push=false)
01620   {
01621     Pose2D goal_pose;
01622     if (rotate_push)
01623     {
01624       // Set goal for rotate pushing angle and goal state; then get start location as usual below
01625       new_push_angle = getRotatePushHeading(cur_state, chosen_loc, cur_obj);
01626       goal_pose.x = cur_state.x.x;
01627       goal_pose.y = cur_state.x.y;
01628       // NOTE: Uncomment for visualization purposes
01629       // res.goal_pose.x = cur_state.x.x+cos(new_push_angle)*start_loc_push_dist_;
01630       // res.goal_pose.y = cur_state.x.y+sin(new_push_angle)*start_loc_push_dist_;
01631       // TODO: Figure out +/- here
01632       goal_pose.theta = cur_state.x.theta+M_PI;
01633     }
01634     else
01635     {
01636       // Set goal for pushing and then get start location as usual below
01637       new_push_angle = atan2(cur_state.x.y - chosen_loc.boundary_loc_.y,
01638                              cur_state.x.x - chosen_loc.boundary_loc_.x);
01639       goal_pose.x = cur_state.x.x+cos(new_push_angle)*start_loc_push_dist_;
01640       goal_pose.y = cur_state.x.y+sin(new_push_angle)*start_loc_push_dist_;
01641     }
01642     return goal_pose;
01643   }
01644 
01645   bool goalPoseValid(Pose2D goal_pose) const
01646   {
01647     return (goal_pose.x < max_goal_x_ && goal_pose.x > min_goal_x_ &&
01648             goal_pose.y < max_goal_y_ && goal_pose.y > min_goal_y_);
01649   }
01650 
01651   ShapeLocation getStartLocDescriptor(ProtoObject& cur_obj, PushTrackerState& cur_state, geometry_msgs::Point start_pt)
01652   {
01653     // Get shape features and associated locations
01654     ShapeLocations locs = tabletop_pushing::extractObjectShapeContext(cur_obj, use_center_pointing_shape_context_);
01655     // Find location closest to the chosen start point
01656     float min_dist = FLT_MAX;
01657     unsigned int loc_idx = locs.size();
01658     for (unsigned int i = 0; i < locs.size(); ++i)
01659     {
01660       float loc_dist = pcl_segmenter_->dist(locs[i].boundary_loc_, start_pt);
01661       if (loc_dist < min_dist)
01662       {
01663         min_dist = loc_dist;
01664         loc_idx = i;
01665       }
01666     }
01667     ROS_WARN_STREAM("Chose loc " << locs[loc_idx].boundary_loc_ << " with distance " << min_dist << "m");
01668     return locs[loc_idx];
01669   }
01670 
01671   void worldPointsInObjectFrame(XYZPointCloud& world_cloud, PushTrackerState& cur_state, XYZPointCloud& object_cloud)
01672   {
01673     object_cloud.width = world_cloud.size();
01674     object_cloud.height = 1;
01675     object_cloud.is_dense = false;
01676     object_cloud.resize(object_cloud.width);
01677     for (int i = 0; i < world_cloud.size(); ++i)
01678     {
01679       object_cloud[i] = worldPointInObjectFrame(world_cloud[i], cur_state);
01680     }
01681   }
01682 
01683   pcl16::PointXYZ worldPointInObjectFrame(pcl16::PointXYZ world_pt, PushTrackerState& cur_state)
01684   {
01685     // Center on object frame
01686     pcl16::PointXYZ shifted_pt;
01687     shifted_pt.x = world_pt.x - cur_state.x.x;
01688     shifted_pt.y = world_pt.y - cur_state.x.y;
01689     shifted_pt.z = world_pt.z - cur_state.z;
01690     double ct = cos(cur_state.x.theta);
01691     double st = sin(cur_state.x.theta);
01692     // Rotate into correct frame
01693     pcl16::PointXYZ obj_pt;
01694     obj_pt.x =  ct*shifted_pt.x + st*shifted_pt.y;
01695     obj_pt.y = -st*shifted_pt.x + ct*shifted_pt.y;
01696     obj_pt.z = shifted_pt.z; // NOTE: Currently assume 2D motion
01697     return obj_pt;
01698   }
01699 
01700   pcl16::PointXYZ objectPointInWorldFrame(pcl16::PointXYZ obj_pt, PushTrackerState& cur_state)
01701   {
01702     // Rotate out of object frame
01703     pcl16::PointXYZ rotated_pt;
01704     double ct = cos(cur_state.x.theta);
01705     double st = sin(cur_state.x.theta);
01706     rotated_pt.x = ct*obj_pt.x - st*obj_pt.y;
01707     rotated_pt.y = st*obj_pt.x + ct*obj_pt.y;
01708     rotated_pt.z = obj_pt.z;  // NOTE: Currently assume 2D motion
01709     // Shift to world frame
01710     pcl16::PointXYZ world_pt;
01711     world_pt.x = rotated_pt.x + cur_state.x.x;
01712     world_pt.y = rotated_pt.y + cur_state.x.y;
01713     world_pt.z = rotated_pt.z + cur_state.z;
01714     return world_pt;
01715   }
01716 
01717   void getObjectPose(LearnPush::Response& res)
01718   {
01719     bool no_objects = false;
01720     // TODO: Switch to findTargetObjectGC?
01721     // ProtoObject cur_obj = obj_tracker_->findTargetObjectGC(cur_color_frame_,
01722     //                                                        cur_point_cloud_, cur_depth_frame_,
01723     //                                                        cur_self_mask_, no_objects, true);
01724     ProtoObject cur_obj = obj_tracker_->findTargetObject(cur_color_frame_, cur_point_cloud_, no_objects, true);
01725     if (no_objects)
01726     {
01727       ROS_WARN_STREAM("No objects found on analysis");
01728       res.centroid.x = 0.0;
01729       res.centroid.y = 0.0;
01730       res.centroid.z = 0.0;
01731       res.theta = 0.0;
01732       res.no_objects = true;
01733       return;
01734     }
01735     cv::RotatedRect obj_ellipse;
01736     obj_tracker_->fitObjectEllipse(cur_obj, obj_ellipse);
01737     res.no_objects = false;
01738     res.centroid.x = cur_obj.centroid[0];
01739     res.centroid.y = cur_obj.centroid[1];
01740     res.centroid.z = cur_obj.centroid[2];
01741     res.theta = obj_tracker_->getThetaFromEllipse(obj_ellipse);
01742     if(obj_tracker_->getSwapState())
01743     {
01744       if(res.theta > 0.0)
01745         res.theta += - M_PI;
01746       else
01747         res.theta += M_PI;
01748     }
01749   }
01750 
01751   void startTracking(PushTrackerState& state, bool start_swap=false)
01752   {
01753     ROS_INFO_STREAM("Starting tracker");
01754     frame_set_count_++;
01755     goal_out_count_ = 0;
01756     goal_heading_count_ = 0;
01757     frame_callback_count_ = 0;
01758     if (use_graphcut_arm_seg_)
01759     {
01760       obj_tracker_->initTracks(cur_color_frame_, cur_depth_frame_, cur_self_mask_, cur_point_cloud_,
01761                                proxy_name_, state, start_swap);
01762     }
01763     else
01764     {
01765       obj_tracker_->initTracks(cur_color_frame_, cur_depth_frame_, cur_self_mask_, cur_self_filtered_cloud_,
01766                                proxy_name_, state, start_swap);
01767     }
01768 
01769   }
01770 
01771   void lArmStateCartCB(const pr2_manipulation_controllers::JTTaskControllerState l_arm_state)
01772   {
01773     l_arm_pose_ = l_arm_state.x;
01774     l_arm_vel_ = l_arm_state.xd;
01775   }
01776   void rArmStateCartCB(const pr2_manipulation_controllers::JTTaskControllerState r_arm_state)
01777   {
01778     r_arm_pose_ = r_arm_state.x;
01779     r_arm_vel_ = r_arm_state.xd;
01780   }
01781   void lArmStateVelCB(const pr2_manipulation_controllers::JinvTeleopControllerState l_arm_state)
01782   {
01783     l_arm_pose_ = l_arm_state.x;
01784     l_arm_vel_ = l_arm_state.xd;
01785   }
01786   void rArmStateVelCB(const pr2_manipulation_controllers::JinvTeleopControllerState r_arm_state)
01787   {
01788     r_arm_pose_ = r_arm_state.x;
01789     r_arm_vel_ = r_arm_state.xd;
01790   }
01791 
01792   void pushTrackerGoalCB()
01793   {
01794     ROS_INFO_STREAM("Accepting goal");
01795     shared_ptr<const PushTrackerGoal> tracker_goal = as_.acceptNewGoal();
01796     tracker_goal_pose_ = tracker_goal->desired_pose;
01797     pushing_arm_ = tracker_goal->which_arm;
01798     controller_name_ = tracker_goal->controller_name;
01799     proxy_name_ = tracker_goal->proxy_name;
01800     behavior_primitive_ = tracker_goal->behavior_primitive;
01801     ROS_INFO_STREAM("Accepted goal of " << tracker_goal_pose_);
01802     gripper_not_moving_count_ = 0;
01803     object_not_moving_count_ = 0;
01804     object_not_detected_count_ = 0;
01805     object_too_far_count_ = 0;
01806     object_not_between_count_ = 0;
01807     push_start_time_ = ros::Time::now().toSec();
01808 
01809     if (obj_tracker_->isInitialized())
01810     {
01811       obj_tracker_->unpause();
01812     }
01813     else
01814     {
01815       PushTrackerState state;
01816       startTracking(state);
01817     }
01818   }
01819 
01820   void pushTrackerPreemptCB()
01821   {
01822     obj_tracker_->pause();
01823     ROS_INFO_STREAM("Preempted push tracker");
01824     // set the action state to preempted
01825     as_.setPreempted();
01826   }
01827 
01828   bool objectNotMoving(PushTrackerState& tracker_state)
01829   {
01830     if (tracker_state.x_dot.x < object_not_moving_thresh_ &&
01831         tracker_state.x_dot.y < object_not_moving_thresh_)
01832     {
01833       ++object_not_moving_count_;
01834     }
01835     else
01836     {
01837       object_not_moving_count_ = 0;
01838     }
01839     return object_not_moving_count_  >= object_not_moving_count_limit_;
01840   }
01841 
01842   bool objectDisappeared(PushTrackerState& tracker_state)
01843   {
01844     if (tracker_state.no_detection)
01845     {
01846       ++object_not_detected_count_;
01847     }
01848     else
01849     {
01850       object_not_detected_count_ = 0;
01851     }
01852     return object_not_detected_count_  >= object_not_detected_count_limit_;
01853   }
01854 
01855   bool gripperNotMoving()
01856   {
01857     Twist gripper_vel;
01858     if ( pushing_arm_ == "l")
01859     {
01860       gripper_vel = l_arm_vel_;
01861     }
01862     else
01863     {
01864       gripper_vel = r_arm_vel_;
01865     }
01866     if (gripper_vel.linear.x < gripper_not_moving_thresh_ &&
01867         gripper_vel.linear.y < gripper_not_moving_thresh_)
01868     {
01869       ++gripper_not_moving_count_;
01870     }
01871     else
01872     {
01873       gripper_not_moving_count_ = 0;
01874     }
01875     return gripper_not_moving_count_  >= gripper_not_moving_count_limit_;
01876   }
01877 
01878   bool objectNotBetweenGoalAndGripper(Pose2D& obj_state)
01879   {
01880     if (pushing_arm_ == "l")
01881     {
01882       if( pointIsBetweenOthers(l_arm_pose_.pose.position, obj_state, tracker_goal_pose_,
01883                                object_not_between_epsilon_))
01884       {
01885         ++object_not_between_count_;
01886       }
01887       else
01888       {
01889         object_not_between_count_ = 0;
01890       }
01891     }
01892     else if (pushing_arm_ == "r")
01893     {
01894       if( pointIsBetweenOthers(r_arm_pose_.pose.position, obj_state, tracker_goal_pose_,
01895                                object_not_between_epsilon_))
01896       {
01897         ++object_not_between_count_;
01898       }
01899       else
01900       {
01901         object_not_between_count_ = 0;
01902       }
01903     }
01904     return object_not_between_count_ >= object_not_between_count_limit_;
01905   }
01906 
01907   // TODO: Make this threshold the initial distance when pushing + some epsilon
01908   bool objectTooFarFromGripper(Pose2D& obj_state)
01909   {
01910     geometry_msgs::Point gripper_pt;
01911     if (pushing_arm_ == "l")
01912     {
01913       gripper_pt = l_arm_pose_.pose.position;
01914     }
01915     else
01916     {
01917       gripper_pt = r_arm_pose_.pose.position;
01918     }
01919     float gripper_dist = hypot(gripper_pt.x-obj_state.x,gripper_pt.y-obj_state.y);
01920     if (gripper_dist  > max_object_gripper_dist_)
01921     {
01922       ++object_too_far_count_;
01923     }
01924     else
01925     {
01926       object_too_far_count_ = 0;
01927     }
01928     return object_too_far_count_ >= object_too_far_count_limit_;
01929   }
01930 
01931   bool pushingTimeUp()
01932   {
01933     if (ros::Time::now().toSec() - push_start_time_ > start_loc_push_time_)
01934     {
01935       return true;
01936     }
01937     return false;
01938   }
01939 
01940   bool pointIsBetweenOthers(geometry_msgs::Point pt, Pose2D& x1, Pose2D& x2, double epsilon=0.0)
01941   {
01942     // Project the vector pt->x2 onto the vector x1->x2
01943     const float a_x = x2.x - pt.x;
01944     const float a_y = x2.y - pt.y;
01945     const float b_x = x2.x - x1.x;
01946     const float b_y = x2.y - x1.y;
01947     const float a_dot_b = a_x*b_x + a_y*b_y;
01948     const float b_dot_b = b_x*b_x + b_y*b_y;
01949     const float a_onto_b = a_dot_b/b_dot_b;
01950 
01951     // If the (squared) distance of the projection is less than the vector from x1->x2 then it is between them
01952     const float d_1_x = a_onto_b*b_x;
01953     const float d_1_y = a_onto_b*b_y;
01954     const float d_1 = d_1_x*d_1_x + d_1_y*d_1_y;
01955     const float d_2 = b_x*b_x + b_y*b_y;
01956 
01957     // NOTE: Add epsilon squared distance to the projected distance to allow for small noise
01958     return d_1+epsilon*epsilon < d_2;
01959   }
01960 
01970   bool getTableLocation(LocateTable::Request& req, LocateTable::Response& res)
01971   {
01972     if ( have_depth_data_ )
01973     {
01974       getTablePlane(cur_point_cloud_, res.table_centroid);
01975       if ((res.table_centroid.pose.position.x == 0.0 &&
01976            res.table_centroid.pose.position.y == 0.0 &&
01977            res.table_centroid.pose.position.z == 0.0) ||
01978           res.table_centroid.pose.position.x < 0.0)
01979       {
01980         ROS_ERROR_STREAM("No plane found, leaving");
01981         res.found_table = false;
01982         return false;
01983       }
01984       res.found_table = true;
01985       res.table_centroid.header.stamp = ros::Time::now();
01986     }
01987     else
01988     {
01989       ROS_ERROR_STREAM("Calling getTableLocation prior to receiving sensor data.");
01990       res.found_table = false;
01991       return false;
01992     }
01993     return true;
01994   }
01995 
02003   void getTablePlane(XYZPointCloud& cloud, PoseStamped& p)
02004   {
02005     XYZPointCloud obj_cloud, table_cloud;
02006     // TODO: Comptue the hull on the first call
02007     Eigen::Vector4f table_centroid;
02008     pcl_segmenter_->getTablePlane(cloud, obj_cloud, table_cloud, table_centroid, false, true);
02009     p.pose.position.x = table_centroid[0];
02010     p.pose.position.y = table_centroid[1];
02011     p.pose.position.z = table_centroid[2];
02012     p.header = cloud.header;
02013     ROS_INFO_STREAM("Table centroid is: ("
02014                     << p.pose.position.x << ", "
02015                     << p.pose.position.y << ", "
02016                     << p.pose.position.z << ")");
02017     table_centroid_ = p;
02018   }
02019 
02020   void displayPushVector(cv::Mat& img, PointStamped& start_point, PointStamped& end_point,
02021                          std::string display_name="goal_vector", bool force_no_write=false)
02022   {
02023     cv::Mat disp_img;
02024     img.copyTo(disp_img);
02025 
02026     cv::Point img_start_point = pcl_segmenter_->projectPointIntoImage(start_point);
02027     cv::Point img_end_point = pcl_segmenter_->projectPointIntoImage(end_point);
02028     cv::line(disp_img, img_start_point, img_end_point, cv::Scalar(0,0,0),3);
02029     cv::line(disp_img, img_start_point, img_end_point, cv::Scalar(0,255,0));
02030     cv::circle(disp_img, img_end_point, 4, cv::Scalar(0,0,0),3);
02031     cv::circle(disp_img, img_end_point, 4, cv::Scalar(0,255,0));
02032     if (use_displays_)
02033     {
02034       cv::imshow(display_name, disp_img);
02035     }
02036     if (write_to_disk_ && !force_no_write)
02037     {
02038       // Write to disk to create video output
02039       std::stringstream push_out_name;
02040       push_out_name << base_output_path_ << display_name << "_" << frame_set_count_ << "_"
02041                     << goal_out_count_++ << ".png";
02042       cv::imwrite(push_out_name.str(), disp_img);
02043     }
02044   }
02045 
02046   void displayInitialPushVector(cv::Mat& img, PointStamped& start_point, PointStamped& end_point,
02047                                 PointStamped& centroid)
02048   {
02049     cv::Mat disp_img;
02050     img.copyTo(disp_img);
02051 
02052     cv::Point img_start_point = pcl_segmenter_->projectPointIntoImage(start_point);
02053     cv::Point img_end_point = pcl_segmenter_->projectPointIntoImage(end_point);
02054     cv::line(disp_img, img_start_point, img_end_point, cv::Scalar(0,0,0),3);
02055     cv::line(disp_img, img_start_point, img_end_point, cv::Scalar(0,255,0));
02056     cv::circle(disp_img, img_end_point, 4, cv::Scalar(0,0,0),3);
02057     cv::circle(disp_img, img_end_point, 4, cv::Scalar(0,255,0));
02058     cv::Point img_centroid_point = pcl_segmenter_->projectPointIntoImage(centroid);
02059     cv::circle(disp_img, img_centroid_point, 4, cv::Scalar(0,0,0),3);
02060     cv::circle(disp_img, img_centroid_point, 4, cv::Scalar(0,0,255));
02061     cv::imshow("initial_vector", disp_img);
02062   }
02063 
02064 
02065   void displayGoalHeading(cv::Mat& img, PointStamped& centroid, double theta, double goal_theta,
02066                           bool force_no_write=false)
02067   {
02068     cv::Mat disp_img;
02069     img.copyTo(disp_img);
02070 
02071     cv::Point img_c_idx = pcl_segmenter_->projectPointIntoImage(centroid);
02072     cv::RotatedRect obj_ellipse = obj_tracker_->getMostRecentEllipse();
02073     const float x_maj_rad = (std::cos(theta)*obj_ellipse.size.height*0.5);
02074     const float y_maj_rad = (std::sin(theta)*obj_ellipse.size.height*0.5);
02075     pcl16::PointXYZ table_heading_point(centroid.point.x+x_maj_rad, centroid.point.y+y_maj_rad,
02076                                       centroid.point.z);
02077     const cv::Point2f img_maj_idx = pcl_segmenter_->projectPointIntoImage(
02078         table_heading_point, centroid.header.frame_id, camera_frame_);
02079     const float goal_x_rad = (std::cos(goal_theta)*obj_ellipse.size.height*0.5);
02080     const float goal_y_rad = (std::sin(goal_theta)*obj_ellipse.size.height*0.5);
02081     pcl16::PointXYZ goal_heading_point(centroid.point.x+goal_x_rad, centroid.point.y+goal_y_rad,
02082                                      centroid.point.z);
02083     cv::Point2f img_goal_idx = pcl_segmenter_->projectPointIntoImage(
02084         goal_heading_point, centroid.header.frame_id, camera_frame_);
02085     // TODO: Draw partially shaded ellipse showing angle error
02086     float img_start_angle = RAD2DEG(std::atan2(img_maj_idx.y-img_c_idx.y,
02087                                                img_maj_idx.x-img_c_idx.x));
02088     float img_end_angle = RAD2DEG(std::atan2(img_goal_idx.y-img_c_idx.y,
02089                                              img_goal_idx.x-img_c_idx.x));
02090     double img_height = std::sqrt(std::pow(img_maj_idx.x-img_c_idx.x,2) +
02091                                   std::pow(img_maj_idx.y-img_c_idx.y,2));
02092     // NOTE: Renormalize goal idx positiong to be on a circle with current heading point
02093     cv::Point img_goal_draw_idx(std::cos(DEG2RAD(img_end_angle))*img_height+img_c_idx.x,
02094                                 std::sin(DEG2RAD(img_end_angle))*img_height+img_c_idx.y);
02095     cv::Size axes(img_height, img_height);
02096     // TODO: Draw backgrounds in black
02097     cv::ellipse(disp_img, img_c_idx, axes, img_start_angle, 0,
02098                 RAD2DEG(subPIAngle(DEG2RAD(img_end_angle-img_start_angle))), cv::Scalar(0,0,0), 3);
02099     cv::line(disp_img, img_c_idx, img_maj_idx, cv::Scalar(0,0,0), 3);
02100     cv::line(disp_img, img_c_idx, img_goal_draw_idx, cv::Scalar(0,0,0), 3);
02101     cv::ellipse(disp_img, img_c_idx, axes, img_start_angle, 0,
02102                 RAD2DEG(subPIAngle(DEG2RAD(img_end_angle-img_start_angle))), cv::Scalar(0,0,255));
02103     cv::line(disp_img, img_c_idx, img_maj_idx, cv::Scalar(0,0,255));
02104     cv::line(disp_img, img_c_idx, img_goal_draw_idx, cv::Scalar(0,0,255));
02105     if (use_displays_)
02106     {
02107       cv::imshow("goal_heading", disp_img);
02108     }
02109     if (write_to_disk_ && !force_no_write)
02110     {
02111       // Write to disk to create video output
02112       std::stringstream push_out_name;
02113       push_out_name << base_output_path_ << "goal_heading_" << frame_set_count_ << "_"
02114                     << goal_heading_count_++ << ".png";
02115       cv::imwrite(push_out_name.str(), disp_img);
02116     }
02117   }
02118 
02119   void displayRobotGripperPoses(cv::Mat& img)
02120   {
02121     cv::Mat disp_img;
02122     img.copyTo(disp_img);
02123 
02124     PointStamped l_gripper;
02125     l_gripper.header.frame_id = "torso_lift_link";
02126     l_gripper.header.stamp = ros::Time(0);
02127     l_gripper.point = l_arm_pose_.pose.position;
02128     PointStamped r_gripper;
02129     r_gripper.header.frame_id = "torso_lift_link";
02130     r_gripper.header.stamp = ros::Time(0);
02131     r_gripper.point = r_arm_pose_.pose.position;
02132     cv::Point l_gripper_img_point = pcl_segmenter_->projectPointIntoImage(l_gripper);
02133     cv::Point r_gripper_img_point = pcl_segmenter_->projectPointIntoImage(r_gripper);
02134     cv::circle(disp_img, l_gripper_img_point, 4, cv::Scalar(0,0,0),3);
02135     cv::circle(disp_img, l_gripper_img_point, 4, cv::Scalar(0,255,0));
02136     cv::circle(disp_img, r_gripper_img_point, 4, cv::Scalar(0,0,0),3);
02137     cv::circle(disp_img, r_gripper_img_point, 4, cv::Scalar(0,0,255));
02138 
02139     if (use_displays_)
02140     {
02141       cv::imshow("grippers", disp_img);
02142     }
02143   }
02144 
02145   void displayLearnedPushLocScores(std::vector<double>& push_scores, XYZPointCloud& locs, pcl16::PointXYZ selected,
02146                                    bool rotate_push)
02147   {
02148     double max_y = 0.2;
02149     double min_y = -0.2;
02150     double max_x = 0.2;
02151     double min_x = -0.2;
02152     int rows = ceil((max_y-min_y)/FOOTPRINT_XY_RES);
02153     int cols = ceil((max_x-min_x)/FOOTPRINT_XY_RES);
02154     cv::Mat footprint(rows, cols, CV_8UC3, cv::Scalar(255,255,255));
02155 
02156     for (int i = 0; i < push_scores.size(); ++i)
02157     {
02158       int img_y = rows-objLocToIdx(locs[i].x, min_x, max_x);
02159       int img_x = cols-objLocToIdx(locs[i].y, min_y, max_y);
02160       double score;
02161       if(rotate_push)
02162       {
02163         score = push_scores[i]/M_PI;
02164       }
02165       else
02166       {
02167         score = -log(push_scores[i])/10;
02168       }
02169       // ROS_INFO_STREAM("loc (" << x << ", " << y << ") : " << push_scores[i] << "\t" << score);
02170       cv::Scalar color(0, score*255, (1-score)*255);
02171       cv::circle(footprint, cv::Point(img_x, img_y), 1, color, 3);
02172       cv::circle(footprint, cv::Point(img_x, img_y), 2, color, 3);
02173       cv::circle(footprint, cv::Point(img_x, img_y), 3, color, 3);
02174     }
02175 
02176     // TOOD: Set out_file_path correctly
02177     std::stringstream score_file_name;
02178     score_file_name << base_output_path_ << "score_footprint" << "_" << frame_set_count_ << ".png";
02179     cv::imwrite(score_file_name.str(), footprint);
02180 
02181     // Highlight selected pushing locationx
02182     cv::Point selected_img(cols-objLocToIdx(selected.y, min_y, max_y), rows-objLocToIdx(selected.x, min_x, max_x));
02183     cv::circle(footprint, selected_img, 5, cv::Scalar(0,0,0), 2);
02184     cv::imshow("Push score", footprint);
02185 
02186     std::stringstream score_selected_file_name;
02187     score_selected_file_name << base_output_path_ << "score_footprint_selected" << "_" << frame_set_count_ << ".png";
02188     cv::imwrite(score_selected_file_name.str(), footprint);
02189   }
02190 
02191   void writePredictedScoreToDisk(XYZPointCloud& hull_cloud, ShapeDescriptors& sds, std::vector<double>& pred_scores)
02192   {
02193     // Write SVM scores & descriptors to disk?
02194     std::stringstream svm_file_name;
02195     svm_file_name << base_output_path_ << "predicted_svm_scores" << "_" << frame_set_count_ << ".txt";
02196     // TODO: write loc, score, descriptor
02197     std::ofstream svm_file_stream;
02198     svm_file_stream.open(svm_file_name.str().c_str());
02199     for (int i = 0; i < hull_cloud.size(); ++i)
02200     {
02201       svm_file_stream << hull_cloud[i].x << " " << hull_cloud[i].y << " " << pred_scores[i];
02202       for (int j = 0; j < sds[i].size(); ++j)
02203       {
02204         svm_file_stream << " " << sds[i][j];
02205       }
02206       svm_file_stream << "\n";
02207     }
02208     svm_file_stream.close();
02209   }
02210 
02211 
02215   void spin()
02216   {
02217     while(n_.ok())
02218     {
02219       ros::spinOnce();
02220     }
02221   }
02222 
02223  protected:
02224   ros::NodeHandle n_;
02225   ros::NodeHandle n_private_;
02226   message_filters::Subscriber<sensor_msgs::Image> image_sub_;
02227   message_filters::Subscriber<sensor_msgs::Image> depth_sub_;
02228   message_filters::Subscriber<sensor_msgs::Image> mask_sub_;
02229   message_filters::Subscriber<sensor_msgs::PointCloud2> cloud_sub_;
02230   message_filters::Synchronizer<MySyncPolicy> sync_;
02231   sensor_msgs::CameraInfo cam_info_;
02232   shared_ptr<tf::TransformListener> tf_;
02233   ros::ServiceServer push_pose_server_;
02234   ros::ServiceServer table_location_server_;
02235   ros::Subscriber jinv_l_arm_subscriber_;
02236   ros::Subscriber jinv_r_arm_subscriber_;
02237   ros::Subscriber jtteleop_l_arm_subscriber_;
02238   ros::Subscriber jtteleop_r_arm_subscriber_;
02239   actionlib::SimpleActionServer<PushTrackerAction> as_;
02240   cv::Mat cur_color_frame_;
02241   cv::Mat cur_depth_frame_;
02242   cv::Mat cur_self_mask_;
02243   // cv::Mat prev_color_frame_;
02244   // cv::Mat prev_depth_frame_;
02245   // cv::Mat prev_self_mask_;
02246   std_msgs::Header cur_camera_header_;
02247   // std_msgs::Header prev_camera_header_;
02248   XYZPointCloud cur_point_cloud_;
02249   XYZPointCloud cur_self_filtered_cloud_;
02250   shared_ptr<PointCloudSegmentation> pcl_segmenter_;
02251   bool have_depth_data_;
02252   int display_wait_ms_;
02253   bool use_displays_;
02254   bool write_input_to_disk_;
02255   bool write_to_disk_;
02256   std::string base_output_path_;
02257   int num_downsamples_;
02258   std::string workspace_frame_;
02259   std::string camera_frame_;
02260   PoseStamped table_centroid_;
02261   bool camera_initialized_;
02262   std::string cam_info_topic_;
02263   bool start_tracking_on_push_call_;
02264   bool recording_input_;
02265   int record_count_;
02266   int learn_callback_count_;
02267   int goal_out_count_;
02268   int goal_heading_count_;
02269   int frame_callback_count_;
02270   shared_ptr<ObjectTracker25D> obj_tracker_;
02271   Pose2D tracker_goal_pose_;
02272   std::string pushing_arm_;
02273   std::string proxy_name_;
02274   std::string controller_name_;
02275   std::string behavior_primitive_;
02276   double tracker_dist_thresh_;
02277   double tracker_angle_thresh_;
02278   bool just_spun_;
02279   double major_axis_spin_pos_scale_;
02280   int frame_set_count_;
02281   PoseStamped l_arm_pose_;
02282   PoseStamped r_arm_pose_;
02283   Twist l_arm_vel_;
02284   Twist r_arm_vel_;
02285   bool use_mps_segmentation_;
02286   double max_object_gripper_dist_;
02287   double object_not_moving_thresh_;
02288   int object_not_moving_count_;
02289   int object_not_moving_count_limit_;
02290   double gripper_not_moving_thresh_;
02291   int gripper_not_moving_count_;
02292   int gripper_not_moving_count_limit_;
02293   int object_not_detected_count_;
02294   int object_not_detected_count_limit_;
02295   int object_too_far_count_;
02296   int object_too_far_count_limit_;
02297   int object_not_between_count_;
02298   int object_not_between_count_limit_;
02299   double object_not_between_epsilon_;
02300   ShapeLocations start_loc_history_;
02301   double start_loc_push_time_;
02302   double start_loc_push_dist_;
02303   double push_start_time_;
02304   bool timing_push_;
02305   bool use_center_pointing_shape_context_;
02306   bool start_loc_use_fixed_goal_;
02307   std::string current_file_id_;
02308   ProtoObject start_loc_obj_;
02309   double start_loc_arc_length_percent_;
02310   int start_loc_push_sample_count_;
02311   bool force_swap_;
02312   int mask_dilate_size_;
02313   double point_cloud_hist_res_;
02314   int num_position_failures_;
02315   double min_goal_x_;
02316   double max_goal_x_;
02317   double min_goal_y_;
02318   double max_goal_y_;
02319   shared_ptr<ArmObjSegmentation> arm_obj_segmenter_;
02320   bool use_graphcut_arm_seg_;
02321   double hull_alpha_;
02322   double gripper_spread_;
02323   int footprint_count_;
02324 #ifdef DEBUG_POSE_ESTIMATION
02325   std::ofstream pose_est_stream_;
02326 #endif
02327 
02328 };
02329 
02330 int main(int argc, char ** argv)
02331 {
02332   int seed = time(NULL);
02333   srand(seed);
02334   std::cout << "Rand seed is: " << seed << std::endl;
02335   ros::init(argc, argv, "tabletop_pushing_perception_node");
02336   ros::NodeHandle n;
02337   TabletopPushingPerceptionNode perception_node(n);
02338   perception_node.spin();
02339   return 0;
02340 }


tabletop_pushing
Author(s): Tucker Hermans
autogenerated on Wed Nov 27 2013 11:59:44