object_tracker_25d.cpp
Go to the documentation of this file.
00001 // TabletopPushing
00002 #include <tabletop_pushing/object_tracker_25d.h>
00003 #include <tabletop_pushing/push_primitives.h>
00004 #include <tabletop_pushing/shape_features.h>
00005 #include <tabletop_pushing/extern/Timer.hpp>
00006 
00007 // OpenCV
00008 #include <opencv2/imgproc/imgproc.hpp>
00009 #include <opencv2/highgui/highgui.hpp>
00010 
00011 // PCL
00012 #include <pcl16/common/pca.h>
00013 
00014 // cpl_visual_features
00015 #include <cpl_visual_features/helpers.h>
00016 
00017 // Debugging IFDEFS
00018 // #define PROFILE_TRACKING_TIME 1
00019 // #define PROFILE_FIND_TARGET_TIME 1
00020 
00021 using namespace tabletop_pushing;
00022 using geometry_msgs::PoseStamped;
00023 using boost::shared_ptr;
00024 using cpl_visual_features::subPIAngle;
00025 
00026 typedef tabletop_pushing::VisFeedbackPushTrackingFeedback PushTrackerState;
00027 typedef tabletop_pushing::VisFeedbackPushTrackingGoal PushTrackerGoal;
00028 typedef tabletop_pushing::VisFeedbackPushTrackingResult PushTrackerResult;
00029 typedef tabletop_pushing::VisFeedbackPushTrackingAction PushTrackerAction;
00030 
00031 typedef pcl16::PointCloud<pcl16::PointXYZ> XYZPointCloud;
00032 
00033 ObjectTracker25D::ObjectTracker25D(shared_ptr<PointCloudSegmentation> segmenter,
00034                                    shared_ptr<ArmObjSegmentation> arm_segmenter, int num_downsamples,
00035                                    bool use_displays, bool write_to_disk, std::string base_output_path,
00036                                    std::string camera_frame, bool use_cv_ellipse, bool use_mps_segmentation,
00037                                    bool use_graphcut_arm_seg, double hull_alpha) :
00038     pcl_segmenter_(segmenter), arm_segmenter_(arm_segmenter),
00039     num_downsamples_(num_downsamples), initialized_(false),
00040     frame_count_(0), use_displays_(use_displays), write_to_disk_(write_to_disk),
00041     base_output_path_(base_output_path), record_count_(0), swap_orientation_(false),
00042     paused_(false), frame_set_count_(0), camera_frame_(camera_frame),
00043     use_cv_ellipse_fit_(use_cv_ellipse), use_mps_segmentation_(use_mps_segmentation),
00044     have_obj_color_model_(false), have_table_color_model_(false), use_graphcut_arm_seg_(use_graphcut_arm_seg),
00045     hull_alpha_(hull_alpha)
00046 {
00047   upscale_ = std::pow(2,num_downsamples_);
00048 }
00049 
00050 ProtoObject ObjectTracker25D::findTargetObjectGC(cv::Mat& in_frame, XYZPointCloud& cloud, cv::Mat& depth_frame,
00051                                                  cv::Mat self_mask, bool& no_objects, bool init)
00052 {
00053   // Segment arm from background using graphcut
00054   ROS_INFO_STREAM("Getting table cloud and mask.");
00055   XYZPointCloud table_cloud, non_table_cloud;
00056   cv::Mat table_mask = getTableMask(cloud, table_cloud, self_mask.size(), non_table_cloud);
00057 
00058   if (!have_table_color_model_)
00059   {
00060     ROS_INFO_STREAM("Building table color model.");
00061     table_color_model_ = buildColorModel(table_cloud, in_frame, 3);
00062     have_table_color_model_ = true;
00063     if (have_obj_color_model_)
00064     {
00065       ROS_INFO_STREAM("Combining bg color models");
00066       arm_segmenter_->buildBGColorModel(table_color_model_, obj_color_model_);
00067     }
00068   }
00069   ROS_INFO_STREAM("Segmenting arm.");
00070   cv::Mat segs = arm_segmenter_->segment(in_frame, depth_frame, self_mask, table_mask, init);
00071   pcl16::PointIndices obj_pts;
00072   ROS_INFO_STREAM("Removing table and arm points.");
00073   // Remove arm and table points from cloud
00074   for(int i = 0; i < non_table_cloud.size(); ++i)
00075   {
00076     if (isnan(non_table_cloud.at(i).x) || isnan(non_table_cloud.at(i).y) || isnan(non_table_cloud.at(i).z))
00077     {
00078       continue;
00079     }
00080     // ROS_INFO_STREAM("Projecting point.");
00081     cv::Point img_pt = pcl_segmenter_->projectPointIntoImage(non_table_cloud.at(i),
00082                                                              non_table_cloud.header.frame_id, camera_frame_);
00083     // ROS_INFO_STREAM("(" << cloud.at(i).x << ", " << cloud.at(i).y << ", " << cloud.at(i).z << ") -> (" <<
00084     //                 img_pt.x << ", " << img_pt.y << ")");
00085     const bool is_arm = segs.at<uchar>(img_pt.y, img_pt.x) != 0;
00086     // ROS_INFO_STREAM("is_arm " << is_arm);
00087     const bool is_table = table_mask.at<uchar>(img_pt.y, img_pt.x) != 0;
00088     // ROS_INFO_STREAM("is_table " << is_table);
00089     if ( !is_arm && !is_table)
00090     {
00091       obj_pts.indices.push_back(i);
00092     }
00093   }
00094   if (obj_pts.indices.size() < 1)
00095   {
00096     ROS_WARN_STREAM("No objects found in findTargetObjectGC");
00097     ProtoObject empty;
00098     no_objects = true;
00099     return empty;
00100   }
00101   ROS_INFO_STREAM("Copying object points");
00102   XYZPointCloud objs_cloud;
00103   pcl16::copyPointCloud(non_table_cloud, obj_pts, objs_cloud);
00104 
00105   // Cluster objects from remaining point cloud
00106   ProtoObjects objs;
00107   XYZPointCloud objects_cloud_down;
00108   // TODO: Add switch to choose between downsampling object cloud and not
00109   ROS_INFO_STREAM("Downsampling object points");
00110   pcl_segmenter_->downsampleCloud(objs_cloud, objects_cloud_down);
00111   // Find independent regions
00112   if (objects_cloud_down.size() > 0)
00113   {
00114     ROS_INFO_STREAM("Clustering object points");
00115     pcl_segmenter_->clusterProtoObjects(objects_cloud_down, objs);
00116   }
00117   else
00118   {
00119     ROS_WARN_STREAM("No objects found in findTargetObjectGC");
00120     ProtoObject empty;
00121     no_objects = true;
00122     return empty;
00123   }
00124   ROS_INFO_STREAM("Matching object");
00125   no_objects = false;
00126   return matchToTargetObject(objs, in_frame, init);
00127 }
00128 
00129 ProtoObject ObjectTracker25D::findTargetObject(cv::Mat& in_frame, XYZPointCloud& cloud,
00130                                                bool& no_objects, bool init)
00131 {
00132 #ifdef PROFILE_FIND_TARGET_TIME
00133   long long find_target_start_time = Timer::nanoTime();
00134 #endif
00135   ProtoObjects objs;
00136   pcl_segmenter_->findTabletopObjects(cloud, objs, use_mps_segmentation_);
00137 #ifdef PROFILE_FIND_TARGET_TIME
00138   double find_tabletop_objects_elapsed_time = (((double)(Timer::nanoTime() - find_target_start_time)) /
00139                                            Timer::NANOSECONDS_PER_SECOND);
00140   long long choose_object_start_time = Timer::nanoTime();
00141 #endif
00142   if (objs.size() == 0)
00143   {
00144     ROS_WARN_STREAM("No objects found");
00145     ProtoObject empty;
00146     no_objects = true;
00147 #ifdef PROFILE_FIND_TARGET_TIME
00148     double find_target_elapsed_time = (((double)(Timer::nanoTime() - find_target_start_time)) /
00149                                     Timer::NANOSECONDS_PER_SECOND);
00150     ROS_INFO_STREAM("\t find_target_elapsed_time " << find_target_elapsed_time);
00151     ROS_INFO_STREAM("\t\t find_tabletop_objects_elapsed_time " << find_tabletop_objects_elapsed_time);
00152 #endif
00153     return empty;
00154   }
00155   no_objects = false;
00156   return matchToTargetObject(objs, in_frame, init);
00157 }
00158 
00159 ProtoObject ObjectTracker25D::matchToTargetObject(ProtoObjects& objs, cv::Mat& in_frame, bool init)
00160 {
00161   int chosen_idx = 0;
00162   if (objs.size() == 1)
00163   {
00164     // ROS_INFO_STREAM("Picking the only object");
00165   }
00166   else if (init || frame_count_ == 0)
00167   {
00168     // ROS_INFO_STREAM("Picking the biggest object at initialization");
00169     // NOTE: Assume we care about the biggest currently
00170     unsigned int max_size = 0;
00171     for (unsigned int i = 0; i < objs.size(); ++i)
00172     {
00173       if (objs[i].cloud.size() > max_size)
00174       {
00175         max_size = objs[i].cloud.size();
00176         chosen_idx = i;
00177       }
00178     }
00179   }
00180   else // Find closest object to last time
00181   {
00182     // ROS_INFO_STREAM("Finding the closest object to previous");
00183     double min_dist = 1000.0;
00184     for (unsigned int i = 0; i < objs.size(); ++i)
00185     {
00186       double centroid_dist = pcl_segmenter_->sqrDist(objs[i].centroid, previous_obj_.centroid);
00187       if (centroid_dist  < min_dist)
00188       {
00189         min_dist = centroid_dist;
00190         chosen_idx = i;
00191       }
00192       // TODO: Match color GMM model
00193     }
00194     // ROS_INFO_STREAM("Chose object " << chosen_idx << " at distance " << min_dist);
00195   }
00196   if (init && use_graphcut_arm_seg_)
00197   {
00198     // Extract color GMM model
00199     // ROS_INFO_STREAM("Building object color model.");
00200     obj_color_model_ = buildColorModel(objs[chosen_idx].cloud, in_frame, 5);
00201     have_obj_color_model_ = true;
00202     if (have_table_color_model_)
00203     {
00204       // ROS_INFO_STREAM("Combining bg color models");
00205       arm_segmenter_->buildBGColorModel(table_color_model_, obj_color_model_);
00206     }
00207   }
00208 
00209 #ifdef PROFILE_FIND_TARGET_TIME
00210   double choose_object_elapsed_time = (((double)(Timer::nanoTime() - choose_object_start_time)) /
00211                                        Timer::NANOSECONDS_PER_SECOND);
00212   long long display_object_start_time = Timer::nanoTime();
00213 #endif
00214 
00215   if (use_displays_)
00216   {
00217     cv::Mat disp_img = pcl_segmenter_->projectProtoObjectsIntoImage(
00218         objs, in_frame.size(), objs[0].cloud.header.frame_id);
00219     pcl_segmenter_->displayObjectImage(disp_img, "Objects", true);
00220     // ROS_INFO_STREAM("Updating display");
00221   }
00222   // ROS_INFO_STREAM("Returning matched object\n");
00223 #ifdef PROFILE_FIND_TARGET_TIME
00224   double find_target_elapsed_time = (((double)(Timer::nanoTime() - find_target_start_time)) /
00225                                   Timer::NANOSECONDS_PER_SECOND);
00226   double display_object_elapsed_time = (((double)(Timer::nanoTime() - display_object_start_time)) /
00227                                         Timer::NANOSECONDS_PER_SECOND);
00228   ROS_INFO_STREAM("\t find_target_elapsed_time " << find_target_elapsed_time);
00229   ROS_INFO_STREAM("\t\t find tabletop_objects_elapsed_time " << find_tabletop_objects_elapsed_time);
00230   ROS_INFO_STREAM("\t\t choose_object_elapsed_time " << choose_object_elapsed_time);
00231   ROS_INFO_STREAM("\t\t display_objects_elapsed_time " << display_object_elapsed_time);
00232 #endif
00233 
00234   return objs[chosen_idx];
00235 }
00236 
00237 void ObjectTracker25D::updateHeading(PushTrackerState& state, bool init_state)
00238 {
00239   if(swap_orientation_)
00240   {
00241     state.x.theta += (state.x.theta > 0.0) ? - M_PI : M_PI;
00242   }
00243   // If not initializing, check if we need to swap our addition because of heading change
00244   if (!init_state && (state.x.theta > 0) != (previous_state_.x.theta > 0))
00245   {
00246     // Test if swapping makes a shorter distance than changing
00247     float augmented_theta = state.x.theta + (state.x.theta > 0.0) ? - M_PI : M_PI;
00248     float augmented_diff = fabs(subPIAngle(augmented_theta - previous_state_.x.theta));
00249     float current_diff = fabs(subPIAngle(state.x.theta - previous_state_.x.theta));
00250     if (augmented_diff < current_diff)
00251     {
00252       swap_orientation_ = !swap_orientation_;
00253       // We either need to swap or need to undo the swap
00254       state.x.theta = augmented_theta;
00255     }
00256   }
00257 }
00258 
00259 void ObjectTracker25D::computeState(ProtoObject& cur_obj, XYZPointCloud& cloud, std::string proxy_name,
00260                                     cv::Mat& in_frame, PushTrackerState& state, bool init_state)
00261 {
00262   // TODO: Have each proxy create an image, and send that image to the trackerDisplay
00263   // function to deal with saving and display.
00264   cv::RotatedRect obj_ellipse;
00265   if (proxy_name == ELLIPSE_PROXY || proxy_name == CENTROID_PROXY ||
00266       proxy_name == SPHERE_PROXY || proxy_name == CYLINDER_PROXY)
00267   {
00268     fitObjectEllipse(cur_obj, obj_ellipse);
00269     previous_obj_ellipse_ = obj_ellipse;
00270     state.x.theta = getThetaFromEllipse(obj_ellipse);
00271     state.x.x = cur_obj.centroid[0];
00272     state.x.y = cur_obj.centroid[1];
00273     state.z = cur_obj.centroid[2];
00274     updateHeading(state, init_state);
00275   }
00276   else if (proxy_name == HULL_ELLIPSE_PROXY || proxy_name == HULL_ICP_PROXY ||
00277            proxy_name == HULL_SHAPE_CONTEXT_PROXY)
00278   {
00279     // Get 2D object boundary
00280     XYZPointCloud hull_cloud = tabletop_pushing::getObjectBoundarySamples(cur_obj, hull_alpha_);
00281     // Use vertical z centroid from object
00282     state.z = cur_obj.centroid[2];
00283 
00284     // Get ellipse orientation from the 2D boundary
00285     // TODO: Add in switch for different hull proxies
00286     if (frame_count_ < 1 || proxy_name == HULL_ELLIPSE_PROXY)
00287     {
00288       fitHullEllipse(hull_cloud, obj_ellipse);
00289       state.x.theta = getThetaFromEllipse(obj_ellipse);
00290       // Get (x,y) centroid of boundary
00291       state.x.x = obj_ellipse.center.x;
00292       state.x.y = obj_ellipse.center.y;
00293     }
00294     else
00295     {
00296       cpl_visual_features::Path matches;
00297       XYZPointCloud aligned;
00298       Eigen::Matrix4f transform = Eigen::Matrix4f::Identity();
00299       if (proxy_name == HULL_SHAPE_CONTEXT_PROXY)
00300       {
00301         double match_cost;
00302         matches = compareBoundaryShapes(previous_hull_cloud_, hull_cloud, match_cost);
00303         ROS_INFO_STREAM("Found minimum cost match of: " << match_cost);
00304         estimateTransformFromMatches(previous_hull_cloud_, hull_cloud, matches, transform);
00305       }
00306       else // (proxy_name == HULL_ICP_PROXY)
00307       {
00308         // TODO: Add guess of movement based on previous state estimates
00309         double match_score = pcl_segmenter_->ICPBoundarySamples(previous_hull_cloud_, hull_cloud, transform,
00310                                                                 aligned);
00311         // ROS_INFO_STREAM("Found ICP match with score: " << match_score);
00312       }
00313 
00314       // Transform previous state using the estimate and update current state
00315       // ROS_INFO_STREAM("Found transform of: \n" << transform);
00316       Eigen::Vector4f x_t_0(previous_state_.x.x, previous_state_.x.y, previous_state_.z, 1.0);
00317       Eigen::Vector4f x_t_1 = transform*x_t_0;
00318       Eigen::Matrix3f rot = transform.block<3,3>(0,0);
00319       state.x.x = x_t_1(0);
00320       state.x.y = x_t_1(1);
00321       const Eigen::Vector3f x_axis(cos(previous_state_.x.theta), sin(previous_state_.x.theta), 0.0);
00322       const Eigen::Vector3f x_axis_t = rot*x_axis;
00323       state.x.theta = atan2(x_axis_t(1), x_axis_t(0));
00324       // Visualize the matches
00325       if (use_displays_ || write_to_disk_)
00326       {
00327         cv::Mat match_img;
00328         if (proxy_name == HULL_SHAPE_CONTEXT_PROXY)
00329         {
00330           match_img = visualizeObjectBoundaryMatches(previous_hull_cloud_, hull_cloud, state, matches);
00331         }
00332         else
00333         {
00334           for (int i = 0; i < previous_hull_cloud_.size(); ++i)
00335           {
00336             matches.push_back(i);
00337           }
00338           match_img = visualizeObjectBoundaryMatches(previous_hull_cloud_, aligned, state, matches);
00339         }
00340         if (use_displays_)
00341         {
00342           cv::imshow("Boundary matches", match_img);
00343         }
00344         if (write_to_disk_ && !isPaused())
00345         {
00346           std::stringstream out_name;
00347           out_name << base_output_path_ << "boundary_matches_" << frame_set_count_ << "_"
00348                    << record_count_ << ".png";
00349           cv::imwrite(out_name.str(), match_img);
00350         }
00351       }
00352     }
00353     // Update stuff
00354     previous_obj_ellipse_ = obj_ellipse;
00355     previous_hull_cloud_ = hull_cloud;
00356     updateHeading(state, init_state);
00357   }
00358   else if (proxy_name == BOUNDING_BOX_XY_PROXY)
00359   {
00360     findFootprintBox(cur_obj, obj_ellipse);
00361     double min_z = 10000;
00362     double max_z = -10000;
00363     for (int i = 0; i < cur_obj.cloud.size(); ++i)
00364     {
00365       if (cur_obj.cloud.at(i).z < min_z)
00366       {
00367         min_z = cur_obj.cloud.at(i).z;
00368       }
00369       if (cur_obj.cloud.at(i).z > max_z)
00370       {
00371         max_z = cur_obj.cloud.at(i).z;
00372       }
00373     }
00374     previous_obj_ellipse_ = obj_ellipse;
00375     state.x.x = obj_ellipse.center.x;
00376     state.x.y = obj_ellipse.center.y;
00377     state.z = (min_z+max_z)*0.5;
00378     state.x.theta = getThetaFromEllipse(obj_ellipse);
00379     updateHeading(state, init_state);
00380     // ROS_INFO_STREAM("box (x,y,z): " << state.x.x << ", " << state.x.y << ", " <<
00381     //                 state.z << ")");
00382     // ROS_INFO_STREAM("centroid (x,y,z): " << cur_obj.centroid[0] << ", " << cur_obj.centroid[1]
00383     //                 << ", " << cur_obj.centroid[2] << ")");
00384   }
00385   else
00386   {
00387     ROS_WARN_STREAM("Unknown perceptual proxy: " << proxy_name << " requested");
00388   }
00389   if (proxy_name == SPHERE_PROXY)
00390   {
00391     XYZPointCloud sphere_cloud;
00392     pcl16::ModelCoefficients sphere;
00393     pcl_segmenter_->fitSphereRANSAC(cur_obj,sphere_cloud, sphere);
00394     cv::Mat lbl_img(in_frame.size(), CV_8UC1, cv::Scalar(0));
00395     cv::Mat disp_img(in_frame.size(), CV_8UC3, cv::Scalar(0,0,0));
00396     if (sphere_cloud.size() < 1)
00397     {
00398       ROS_INFO_STREAM("Sphere has 0 points");
00399     }
00400     else
00401     {
00402       pcl_segmenter_->projectPointCloudIntoImage(sphere_cloud, lbl_img);
00403       lbl_img*=255;
00404       pcl16::PointXYZ centroid_point(sphere.values[0], sphere.values[1], sphere.values[2]);
00405       cv::cvtColor(lbl_img, disp_img, CV_GRAY2BGR);
00406       const cv::Point img_c_idx = pcl_segmenter_->projectPointIntoImage(
00407           centroid_point, cur_obj.cloud.header.frame_id, camera_frame_);
00408       cv::circle(disp_img, img_c_idx, 4, cv::Scalar(0,255,0));
00409       cv::imshow("sphere",disp_img);
00410     }
00411     state.x.x = sphere.values[0];
00412     state.x.y = sphere.values[1];
00413     state.z = sphere.values[2];
00414     // state.x.theta = 0.0;
00415     // TODO: Draw ellipse of the projected circle parallel to the table
00416     // std::stringstream out_name;
00417     // out_name << base_output_path_ << "sphere_" << frame_set_count_ << "_"
00418     //          << record_count_ << ".png";
00419     // cv::imwrite(out_name.str(), disp_img);
00420 
00421     // ROS_INFO_STREAM("sphere (x,y,z,r): " << state.x.x << ", " << state.x.y << ", " << state.z
00422     //                 << ", " << sphere.values[3] << ")");
00423     // ROS_INFO_STREAM("centroid (x,y,z): " << cur_obj.centroid[0] << ", " << cur_obj.centroid[1]
00424     //                 << ", " << cur_obj.centroid[2] << ")");
00425   }
00426   if (proxy_name == CYLINDER_PROXY)
00427   {
00428     XYZPointCloud cylinder_cloud;
00429     pcl16::ModelCoefficients cylinder;
00430     pcl_segmenter_->fitCylinderRANSAC(cur_obj, cylinder_cloud, cylinder);
00431     cv::Mat lbl_img(in_frame.size(), CV_8UC1, cv::Scalar(0));
00432     pcl_segmenter_->projectPointCloudIntoImage(cylinder_cloud, lbl_img);
00433     lbl_img*=255;
00434     cv::imshow("cylinder",lbl_img);
00435     ROS_INFO_STREAM("cylinder: " << cylinder);
00436     // NOTE: Z may be bade, depending on how it is computed
00437     // TODO: Update this to the cylinder centroid
00438     state.x.x = cylinder.values[0];
00439     state.x.y = cylinder.values[1];
00440     state.z = cur_obj.centroid[2];//# cylinder.values[2];
00441     // state.x.theta = 0.0;
00442     // ROS_INFO_STREAM("cylinder (x,y,z): " << state.x.x << ", " << state.x.y << ", " <<
00443     //                 cylinder.values[2] << ")");
00444     // ROS_INFO_STREAM("centroid (x,y,z): " << cur_obj.centroid[0] << ", " << cur_obj.centroid[1]
00445     //                 << ", " << cur_obj.centroid[2] << ")");
00446   }
00447 
00448   if (use_displays_ || write_to_disk_)
00449   {
00450     if (proxy_name == ELLIPSE_PROXY)
00451     {
00452       trackerDisplay(in_frame, cur_obj, obj_ellipse);
00453     }
00454     else if(proxy_name == BOUNDING_BOX_XY_PROXY)
00455     {
00456       trackerBoxDisplay(in_frame, cur_obj, obj_ellipse);
00457     }
00458     else
00459     {
00460       trackerDisplay(in_frame, state, cur_obj);
00461     }
00462   }
00463 }
00464 
00465 void ObjectTracker25D::fitObjectEllipse(ProtoObject& obj, cv::RotatedRect& ellipse)
00466 {
00467   if (use_cv_ellipse_fit_)
00468   {
00469     findFootprintEllipse(obj, ellipse);
00470   }
00471   else
00472   {
00473     fit2DMassEllipse(obj, ellipse);
00474   }
00475 }
00476 
00477 void ObjectTracker25D::fitHullEllipse(XYZPointCloud& hull_cloud, cv::RotatedRect& obj_ellipse)
00478 {
00479   Eigen::Vector3f eigen_values;
00480   Eigen::Matrix3f eigen_vectors;
00481   Eigen::Vector4f centroid;
00482 
00483   // HACK: Copied/adapted from PCA in PCL because PCL was seg faulting after an update on the robot
00484   // Compute mean
00485   centroid = Eigen::Vector4f::Zero();
00486   // ROS_INFO_STREAM("Getting centroid");
00487   pcl16::compute3DCentroid(hull_cloud, centroid);
00488   // Compute demeanished cloud
00489   Eigen::MatrixXf cloud_demean;
00490   // ROS_INFO_STREAM("Demenaing point cloud");
00491   pcl16::demeanPointCloud(hull_cloud, centroid, cloud_demean);
00492 
00493   // Compute the product cloud_demean * cloud_demean^T
00494   // ROS_INFO_STREAM("Getting alpha");
00495   Eigen::Matrix3f alpha = static_cast<Eigen::Matrix3f> (cloud_demean.topRows<3> () * cloud_demean.topRows<3> ().transpose ());
00496 
00497   // Compute eigen vectors and values
00498   // ROS_INFO_STREAM("Getting eigenvectors");
00499   Eigen::SelfAdjointEigenSolver<Eigen::Matrix3f> evd (alpha);
00500   // Organize eigenvectors and eigenvalues in ascendent order
00501   for (int i = 0; i < 3; ++i)
00502   {
00503     eigen_values[i] = evd.eigenvalues()[2-i];
00504     eigen_vectors.col(i) = evd.eigenvectors().col(2-i);
00505   }
00506 
00507   // try{
00508   //   pca.setInputCloud(cloud_no_z.makeShared());
00509   //   ROS_INFO_STREAM("Getting mean");
00510   //   centroid = pca.getMean();
00511   //   ROS_INFO_STREAM("Getting eiven values");
00512   //   eigen_values = pca.getEigenValues();
00513   //   ROS_INFO_STREAM("Getting eiven vectors");
00514   //   eigen_vectors = pca.getEigenVectors();
00515   // } catch(pcl16::InitFailedException ife)
00516   // {
00517   //   ROS_WARN_STREAM("Failed to compute PCA");
00518   //   ROS_WARN_STREAM("ife: " << ife.what());
00519   // }
00520 
00521   obj_ellipse.center.x = centroid[0];
00522   obj_ellipse.center.y = centroid[1];
00523   obj_ellipse.angle = RAD2DEG(atan2(eigen_vectors(1,0), eigen_vectors(0,0))-0.5*M_PI);
00524   // NOTE: major axis is defined by height
00525   obj_ellipse.size.height = std::max(eigen_values(0)*0.1, 0.07);
00526   obj_ellipse.size.width = std::max(eigen_values(1)*0.1, 0.03);
00527 
00528 }
00529 
00530 void ObjectTracker25D::findFootprintEllipse(ProtoObject& obj, cv::RotatedRect& obj_ellipse)
00531 {
00532   // Get 2D footprint of object and fit an ellipse to it
00533   std::vector<cv::Point2f> obj_pts;
00534   for (unsigned int i = 0; i < obj.cloud.size(); ++i)
00535   {
00536     obj_pts.push_back(cv::Point2f(obj.cloud[i].x, obj.cloud[i].y));
00537   }
00538   ROS_DEBUG_STREAM("Number of points is: " << obj_pts.size());
00539   obj_ellipse = cv::fitEllipse(obj_pts);
00540 }
00541 
00542 
00543 void ObjectTracker25D::findFootprintBox(ProtoObject& obj, cv::RotatedRect& box)
00544 {
00545   // Get 2D footprint of object and fit an ellipse to it
00546   std::vector<cv::Point2f> obj_pts;
00547   for (unsigned int i = 0; i < obj.cloud.size(); ++i)
00548   {
00549     obj_pts.push_back(cv::Point2f(obj.cloud[i].x, obj.cloud[i].y));
00550   }
00551   ROS_DEBUG_STREAM("Number of points is: " << obj_pts.size());
00552   box = cv::minAreaRect(obj_pts);
00553 }
00554 
00555 void ObjectTracker25D::fit2DMassEllipse(ProtoObject& obj, cv::RotatedRect& obj_ellipse)
00556 {
00557   // pcl16::PCA<pcl16::PointXYZ> pca(true);
00558   XYZPointCloud cloud_no_z;
00559   cloud_no_z.header = obj.cloud.header;
00560   cloud_no_z.width = obj.cloud.points.size();
00561   cloud_no_z.height = 1;
00562   cloud_no_z.is_dense = false;
00563   cloud_no_z.resize(cloud_no_z.width*cloud_no_z.height);
00564   if (obj.cloud.size() < 3)
00565   {
00566     ROS_WARN_STREAM("Too few points to find ellipse");
00567     obj_ellipse.center.x = 0.0;
00568     obj_ellipse.center.y = 0.0;
00569     obj_ellipse.angle = 0;
00570     obj_ellipse.size.width = 0;
00571     obj_ellipse.size.height = 0;
00572   }
00573   for (unsigned int i = 0; i < obj.cloud.size(); ++i)
00574   {
00575     cloud_no_z[i] = obj.cloud[i];
00576     cloud_no_z[i].z = 0.0f;
00577   }
00578   Eigen::Vector3f eigen_values;
00579   Eigen::Matrix3f eigen_vectors;
00580   Eigen::Vector4f centroid;
00581 
00582   // HACK: Copied/adapted from PCA in PCL because PCL was seg faulting after an update on the robot
00583   // Compute mean
00584   centroid = Eigen::Vector4f::Zero();
00585   // ROS_INFO_STREAM("Getting centroid");
00586   pcl16::compute3DCentroid(cloud_no_z, centroid);
00587   // Compute demeanished cloud
00588   Eigen::MatrixXf cloud_demean;
00589   // ROS_INFO_STREAM("Demenaing point cloud");
00590   pcl16::demeanPointCloud(cloud_no_z, centroid, cloud_demean);
00591 
00592   // Compute the product cloud_demean * cloud_demean^T
00593   // ROS_INFO_STREAM("Getting alpha");
00594   Eigen::Matrix3f alpha = static_cast<Eigen::Matrix3f> (cloud_demean.topRows<3> () * cloud_demean.topRows<3> ().transpose ());
00595 
00596   // Compute eigen vectors and values
00597   // ROS_INFO_STREAM("Getting eigenvectors");
00598   Eigen::SelfAdjointEigenSolver<Eigen::Matrix3f> evd (alpha);
00599   // Organize eigenvectors and eigenvalues in ascendent order
00600   for (int i = 0; i < 3; ++i)
00601   {
00602     eigen_values[i] = evd.eigenvalues()[2-i];
00603     eigen_vectors.col(i) = evd.eigenvectors().col(2-i);
00604   }
00605 
00606   // try{
00607   //   pca.setInputCloud(cloud_no_z.makeShared());
00608   //   ROS_INFO_STREAM("Getting mean");
00609   //   centroid = pca.getMean();
00610   //   ROS_INFO_STREAM("Getting eiven values");
00611   //   eigen_values = pca.getEigenValues();
00612   //   ROS_INFO_STREAM("Getting eiven vectors");
00613   //   eigen_vectors = pca.getEigenVectors();
00614   // } catch(pcl16::InitFailedException ife)
00615   // {
00616   //   ROS_WARN_STREAM("Failed to compute PCA");
00617   //   ROS_WARN_STREAM("ife: " << ife.what());
00618   // }
00619 
00620   obj_ellipse.center.x = centroid[0];
00621   obj_ellipse.center.y = centroid[1];
00622   obj_ellipse.angle = RAD2DEG(atan2(eigen_vectors(1,0), eigen_vectors(0,0))-0.5*M_PI);
00623   // NOTE: major axis is defined by height
00624   obj_ellipse.size.height = std::max(eigen_values(0)*0.1, 0.07);
00625   obj_ellipse.size.width = std::max(eigen_values(1)*0.1, 0.03);
00626 }
00627 
00628 void ObjectTracker25D::initTracks(cv::Mat& in_frame, cv::Mat& depth_frame, cv::Mat& self_mask,
00629                                   XYZPointCloud& cloud, std::string proxy_name,
00630                                   tabletop_pushing::VisFeedbackPushTrackingFeedback& state, bool start_swap)
00631 {
00632   paused_ = false;
00633   initialized_ = false;
00634   obj_saved_ = false;
00635   swap_orientation_ = start_swap;
00636   bool no_objects = false;
00637   frame_count_ = 0;
00638   record_count_ = 0;
00639   frame_set_count_++;
00640   ProtoObject cur_obj;
00641   if (use_graphcut_arm_seg_)
00642   {
00643     cur_obj = findTargetObjectGC(in_frame, cloud, depth_frame, self_mask, no_objects, true);
00644   }
00645   else
00646   {
00647     cur_obj = findTargetObject(in_frame, cloud, no_objects, true);
00648   }
00649   initialized_ = true;
00650   if (no_objects)
00651   {
00652     state.header.seq = 0;
00653     state.header.stamp = cloud.header.stamp;
00654     state.header.frame_id = cloud.header.frame_id;
00655     state.no_detection = true;
00656     return;
00657   }
00658   else
00659   {
00660     computeState(cur_obj, cloud, proxy_name, in_frame, state, true);
00661     state.header.seq = 0;
00662     state.header.stamp = cloud.header.stamp;
00663     state.header.frame_id = cloud.header.frame_id;
00664     state.no_detection = false;
00665   }
00666   state.init_x.x = state.x.x;
00667   state.init_x.y = state.x.y;
00668   state.init_x.theta = state.x.theta;
00669   state.x_dot.x = 0.0;
00670   state.x_dot.y = 0.0;
00671   state.x_dot.theta = 0.0;
00672 
00673   ROS_DEBUG_STREAM("x: (" << state.x.x << ", " << state.x.y << ", " <<
00674                    state.x.theta << ")");
00675   ROS_DEBUG_STREAM("x_dot: (" << state.x_dot.x << ", " << state.x_dot.y
00676                    << ", " << state.x_dot.theta << ")\n");
00677 
00678   previous_time_ = state.header.stamp.toSec();
00679   previous_state_ = state;
00680   init_state_ = state;
00681   previous_obj_ = cur_obj;
00682   obj_saved_ = true;
00683   frame_count_ = 1;
00684   record_count_ = 1;
00685 }
00686 
00687 double ObjectTracker25D::getThetaFromEllipse(cv::RotatedRect& obj_ellipse)
00688 {
00689   return subPIAngle(DEG2RAD(obj_ellipse.angle)+0.5*M_PI);
00690 }
00691 
00692 void ObjectTracker25D::updateTracks(cv::Mat& in_frame, cv::Mat& depth_frame, cv::Mat& self_mask,
00693                                     XYZPointCloud& cloud, std::string proxy_name, PushTrackerState& state)
00694 {
00695 #ifdef PROFILE_TRACKING_TIME
00696   long long update_start_time = Timer::nanoTime();
00697 #endif
00698   if (!initialized_)
00699   {
00700 #ifdef PROFILE_TRACKING_TIME
00701     long long init_start_time = Timer::nanoTime();
00702 #endif
00703     initTracks(in_frame, depth_frame, self_mask, cloud, proxy_name, state);
00704 #ifdef PROFILE_TRACKING_TIME
00705     double init_elapsed_time = (((double)(Timer::nanoTime() - init_start_time)) / Timer::NANOSECONDS_PER_SECOND);
00706     ROS_INFO_STREAM("init_elapsed_time " << init_elapsed_time);
00707 #endif
00708     return;
00709   }
00710   bool no_objects = false;
00711 #ifdef PROFILE_TRACKING_TIME
00712   long long find_target_start_time = Timer::nanoTime();
00713 #endif
00714   ProtoObject cur_obj;
00715   if (use_graphcut_arm_seg_)
00716   {
00717     cur_obj = findTargetObjectGC(in_frame, cloud, depth_frame, self_mask, no_objects);
00718   }
00719   else
00720   {
00721     cur_obj = findTargetObject(in_frame, cloud, no_objects);
00722   }
00723 #ifdef PROFILE_TRACKING_TIME
00724   double find_target_elapsed_time = (((double)(Timer::nanoTime() - find_target_start_time)) / Timer::NANOSECONDS_PER_SECOND);
00725   long long update_model_start_time = Timer::nanoTime();
00726 #endif
00727 
00728   // Update model
00729   if (no_objects)
00730   {
00731     state.header.seq = frame_count_;
00732     state.header.stamp = cloud.header.stamp;
00733     state.header.frame_id = cloud.header.frame_id;
00734     state.no_detection = true;
00735     state.x = previous_state_.x;
00736     state.x_dot = previous_state_.x_dot;
00737     state.z = previous_state_.z;
00738     ROS_WARN_STREAM("Using previous state, but updating time!");
00739     if (use_displays_ || write_to_disk_)
00740     {
00741       if (obj_saved_)
00742       {
00743         trackerDisplay(in_frame, previous_state_, previous_obj_);
00744       }
00745     }
00746   }
00747   else
00748   {
00749     obj_saved_ = true;
00750     computeState(cur_obj, cloud, proxy_name, in_frame, state);
00751     state.header.seq = frame_count_;
00752     state.header.stamp = cloud.header.stamp;
00753     state.header.frame_id = cloud.header.frame_id;
00754     // Estimate dynamics and do some bookkeeping
00755     double delta_x = state.x.x - previous_state_.x.x;
00756     double delta_y = state.x.y - previous_state_.x.y;
00757     double delta_theta = subPIAngle(state.x.theta - previous_state_.x.theta);
00758     double delta_t = state.header.stamp.toSec() - previous_time_;
00759     state.x_dot.x = delta_x/delta_t;
00760     state.x_dot.y = delta_y/delta_t;
00761     state.x_dot.theta = delta_theta/delta_t;
00762 
00763     ROS_DEBUG_STREAM("x: (" << state.x.x << ", " << state.x.y << ", " <<
00764                      state.x.theta << ")");
00765     ROS_DEBUG_STREAM("x_dot: (" << state.x_dot.x << ", " << state.x_dot.y
00766                      << ", " << state.x_dot.theta << ")");
00767     previous_obj_ = cur_obj;
00768   }
00769   // We update the header and take care of other bookkeeping before returning
00770   state.init_x.x = init_state_.x.x;
00771   state.init_x.y = init_state_.x.y;
00772   state.init_x.theta = init_state_.x.theta;
00773 
00774   previous_time_ = state.header.stamp.toSec();
00775   previous_state_ = state;
00776   frame_count_++;
00777   record_count_++;
00778 #ifdef PROFILE_TRACKING_TIME
00779   double update_model_elapsed_time = (((double)(Timer::nanoTime() - update_model_start_time)) /
00780                                    Timer::NANOSECONDS_PER_SECOND);
00781   double update_elapsed_time = (((double)(Timer::nanoTime() - update_start_time)) / Timer::NANOSECONDS_PER_SECOND);
00782   ROS_INFO_STREAM("update_elapsed_time " << update_elapsed_time);
00783   ROS_INFO_STREAM("\t find_target_elapsed_time " << find_target_elapsed_time);
00784   ROS_INFO_STREAM("\t update_model_elapsed_time " << update_model_elapsed_time);
00785 #endif
00786 
00787 }
00788 
00789 void ObjectTracker25D::pausedUpdate(cv::Mat in_frame)
00790 {
00791   if (use_displays_ || write_to_disk_)
00792   {
00793     trackerDisplay(in_frame, previous_state_, previous_obj_);
00794   }
00795   record_count_++;
00796 }
00797 
00798 
00799 //
00800 // I/O Functions
00801 //
00802 
00803 void ObjectTracker25D::trackerDisplay(cv::Mat& in_frame, ProtoObject& cur_obj, cv::RotatedRect& obj_ellipse)
00804 {
00805   cv::Mat centroid_frame;
00806   in_frame.copyTo(centroid_frame);
00807   pcl16::PointXYZ centroid_point(cur_obj.centroid[0], cur_obj.centroid[1],
00808                                  cur_obj.centroid[2]);
00809   const cv::Point img_c_idx = pcl_segmenter_->projectPointIntoImage(
00810       centroid_point, cur_obj.cloud.header.frame_id, camera_frame_);
00811   // double ellipse_angle_rad = subPIAngle(DEG2RAD(obj_ellipse.angle));
00812   double theta = getThetaFromEllipse(obj_ellipse);
00813   if(swap_orientation_)
00814   {
00815     theta += (theta > 0.0) ? - M_PI : M_PI;
00816   }
00817   const float x_min_rad = (std::cos(theta+0.5*M_PI)* obj_ellipse.size.width*0.5);
00818   const float y_min_rad = (std::sin(theta+0.5*M_PI)* obj_ellipse.size.width*0.5);
00819   pcl16::PointXYZ table_min_point(centroid_point.x+x_min_rad, centroid_point.y+y_min_rad,
00820                                   centroid_point.z);
00821   const float x_maj_rad = (std::cos(theta)*obj_ellipse.size.height*0.5);
00822   const float y_maj_rad = (std::sin(theta)*obj_ellipse.size.height*0.5);
00823   pcl16::PointXYZ table_maj_point(centroid_point.x+x_maj_rad, centroid_point.y+y_maj_rad,
00824                                   centroid_point.z);
00825   const cv::Point2f img_min_idx = pcl_segmenter_->projectPointIntoImage(
00826       table_min_point, cur_obj.cloud.header.frame_id, camera_frame_);
00827   const cv::Point2f img_maj_idx = pcl_segmenter_->projectPointIntoImage(
00828       table_maj_point, cur_obj.cloud.header.frame_id, camera_frame_);
00829   cv::line(centroid_frame, img_c_idx, img_maj_idx, cv::Scalar(0,0,0),3);
00830   cv::line(centroid_frame, img_c_idx, img_maj_idx, cv::Scalar(0,0,255),1);
00831   cv::line(centroid_frame, img_c_idx, img_min_idx, cv::Scalar(0,0,0),3);
00832   cv::line(centroid_frame, img_c_idx, img_min_idx, cv::Scalar(0,255,0),1);
00833   cv::Size img_size;
00834   img_size.width = std::sqrt(std::pow(img_maj_idx.x-img_c_idx.x,2) +
00835                              std::pow(img_maj_idx.y-img_c_idx.y,2))*2.0;
00836   img_size.height = std::sqrt(std::pow(img_min_idx.x-img_c_idx.x,2) +
00837                               std::pow(img_min_idx.y-img_c_idx.y,2))*2.0;
00838   float img_angle = RAD2DEG(std::atan2(img_maj_idx.y-img_c_idx.y,
00839                                        img_maj_idx.x-img_c_idx.x));
00840   cv::RotatedRect img_ellipse(img_c_idx, img_size, img_angle);
00841   cv::ellipse(centroid_frame, img_ellipse, cv::Scalar(0,0,0), 3);
00842   cv::ellipse(centroid_frame, img_ellipse, cv::Scalar(0,255,255), 1);
00843   if (use_displays_)
00844   {
00845     cv::imshow("Object State", centroid_frame);
00846   }
00847   if (write_to_disk_ && !isPaused())
00848   {
00849     // ROS_INFO_STREAM("Writing ellipse to disk!");
00850     std::stringstream out_name;
00851     out_name << base_output_path_ << "obj_state_" << frame_set_count_ << "_"
00852              << record_count_ << ".png";
00853     cv::imwrite(out_name.str(), centroid_frame);
00854   }
00855 }
00856 
00857 void ObjectTracker25D::trackerBoxDisplay(cv::Mat& in_frame, ProtoObject& cur_obj, cv::RotatedRect& obj_ellipse)
00858 {
00859   cv::Mat centroid_frame;
00860   in_frame.copyTo(centroid_frame);
00861   pcl16::PointXYZ centroid_point(cur_obj.centroid[0], cur_obj.centroid[1],
00862                                  cur_obj.centroid[2]);
00863   const cv::Point img_c_idx = pcl_segmenter_->projectPointIntoImage(
00864       centroid_point, cur_obj.cloud.header.frame_id, camera_frame_);
00865   // double ellipse_angle_rad = subPIAngle(DEG2RAD(obj_ellipse.angle));
00866   double theta = getThetaFromEllipse(obj_ellipse);
00867   if(swap_orientation_)
00868   {
00869     theta += (theta > 0.0) ? - M_PI : M_PI;
00870   }
00871   const float x_min_rad = (std::cos(theta+0.5*M_PI)* obj_ellipse.size.width*0.5);
00872   const float y_min_rad = (std::sin(theta+0.5*M_PI)* obj_ellipse.size.width*0.5);
00873   pcl16::PointXYZ table_min_point(centroid_point.x+x_min_rad, centroid_point.y+y_min_rad,
00874                                   centroid_point.z);
00875   const float x_maj_rad = (std::cos(theta)*obj_ellipse.size.height*0.5);
00876   const float y_maj_rad = (std::sin(theta)*obj_ellipse.size.height*0.5);
00877   pcl16::PointXYZ table_maj_point(centroid_point.x+x_maj_rad, centroid_point.y+y_maj_rad,
00878                                   centroid_point.z);
00879   const cv::Point2f img_min_idx = pcl_segmenter_->projectPointIntoImage(
00880       table_min_point, cur_obj.cloud.header.frame_id, camera_frame_);
00881   const cv::Point2f img_maj_idx = pcl_segmenter_->projectPointIntoImage(
00882       table_maj_point, cur_obj.cloud.header.frame_id, camera_frame_);
00883   cv::line(centroid_frame, img_c_idx, img_maj_idx, cv::Scalar(0,0,0),3);
00884   cv::line(centroid_frame, img_c_idx, img_maj_idx, cv::Scalar(0,0,255),1);
00885   cv::line(centroid_frame, img_c_idx, img_min_idx, cv::Scalar(0,0,0),3);
00886   cv::line(centroid_frame, img_c_idx, img_min_idx, cv::Scalar(0,255,0),1);
00887   cv::Size img_size;
00888   img_size.width = std::sqrt(std::pow(img_maj_idx.x-img_c_idx.x,2) +
00889                              std::pow(img_maj_idx.y-img_c_idx.y,2))*2.0;
00890   img_size.height = std::sqrt(std::pow(img_min_idx.x-img_c_idx.x,2) +
00891                               std::pow(img_min_idx.y-img_c_idx.y,2))*2.0;
00892   float img_angle = RAD2DEG(std::atan2(img_maj_idx.y-img_c_idx.y,
00893                                        img_maj_idx.x-img_c_idx.x));
00894   cv::RotatedRect img_ellipse(img_c_idx, img_size, img_angle);
00895   cv::Point2f vertices[4];
00896   img_ellipse.points(vertices);
00897   for (int i = 0; i < 4; i++)
00898   {
00899     cv::line(centroid_frame, vertices[i], vertices[(i+1)%4], cv::Scalar(0,0,0), 3);
00900   }
00901   for (int i = 0; i < 4; i++)
00902   {
00903     cv::line(centroid_frame, vertices[i], vertices[(i+1)%4], cv::Scalar(0,255,255), 1);
00904   }
00905   if (use_displays_)
00906   {
00907     cv::imshow("Object State", centroid_frame);
00908   }
00909   if (write_to_disk_ && !isPaused())
00910   {
00911     std::stringstream out_name;
00912     out_name << base_output_path_ << "obj_state_" << frame_set_count_ << "_"
00913              << record_count_ << ".png";
00914     cv::imwrite(out_name.str(), centroid_frame);
00915   }
00916 }
00917 
00918 void ObjectTracker25D::trackerDisplay(cv::Mat& in_frame, PushTrackerState& state, ProtoObject& obj, bool other_color)
00919 {
00920   cv::Mat centroid_frame;
00921   in_frame.copyTo(centroid_frame);
00922   pcl16::PointXYZ centroid_point(state.x.x, state.x.y, state.z);
00923   const cv::Point img_c_idx = pcl_segmenter_->projectPointIntoImage(
00924       centroid_point, obj.cloud.header.frame_id, camera_frame_);
00925   double theta = state.x.theta;
00926 
00927   // TODO: Change this based on proxy?
00928   const float x_min_rad = (std::cos(theta+0.5*M_PI)*0.05);
00929   const float y_min_rad = (std::sin(theta+0.5*M_PI)*0.05);
00930   pcl16::PointXYZ table_min_point(centroid_point.x+x_min_rad, centroid_point.y+y_min_rad,
00931                                   centroid_point.z);
00932   const float x_maj_rad = (std::cos(theta)*0.15);
00933   const float y_maj_rad = (std::sin(theta)*0.15);
00934   pcl16::PointXYZ table_maj_point(centroid_point.x+x_maj_rad, centroid_point.y+y_maj_rad,
00935                                   centroid_point.z);
00936   const cv::Point2f img_min_idx = pcl_segmenter_->projectPointIntoImage(
00937       table_min_point, obj.cloud.header.frame_id, camera_frame_);
00938   const cv::Point2f img_maj_idx = pcl_segmenter_->projectPointIntoImage(
00939       table_maj_point, obj.cloud.header.frame_id, camera_frame_);
00940   cv::line(centroid_frame, img_c_idx, img_maj_idx, cv::Scalar(0,0,0),3);
00941   cv::line(centroid_frame, img_c_idx, img_min_idx, cv::Scalar(0,0,0),3);
00942   if( other_color)
00943   {
00944     cv::line(centroid_frame, img_c_idx, img_maj_idx, cv::Scalar(255, 255,0),1);
00945     cv::line(centroid_frame, img_c_idx, img_min_idx, cv::Scalar(0, 255, 255),1);
00946   }
00947   else
00948   {
00949     cv::line(centroid_frame, img_c_idx, img_maj_idx, cv::Scalar(0,0,255),1);
00950     cv::line(centroid_frame, img_c_idx, img_min_idx, cv::Scalar(0,255,0),1);
00951   }
00952   cv::Size img_size;
00953   img_size.width = std::sqrt(std::pow(img_maj_idx.x-img_c_idx.x,2) +
00954                              std::pow(img_maj_idx.y-img_c_idx.y,2))*2.0;
00955   img_size.height = std::sqrt(std::pow(img_min_idx.x-img_c_idx.x,2) +
00956                               std::pow(img_min_idx.y-img_c_idx.y,2))*2.0;
00957   // float img_angle = RAD2DEG(std::atan2(img_maj_idx.y-img_c_idx.y,
00958   //                                      img_maj_idx.x-img_c_idx.x));
00959   // cv::RotatedRect img_ellipse(img_c_idx, img_size, img_angle);
00960   // cv::ellipse(centroid_frame, img_ellipse, cv::Scalar(0,0,0), 3);
00961   // cv::ellipse(centroid_frame, img_ellipse, cv::Scalar(0,255,255), 1);
00962 
00963   if (use_displays_)
00964   {
00965     cv::imshow("Object State", centroid_frame);
00966   }
00967   if (write_to_disk_ && !isPaused())
00968   {
00969     std::stringstream out_name;
00970     out_name << base_output_path_ << "obj_state_" << frame_set_count_ << "_"
00971              << record_count_ << ".png";
00972     cv::imwrite(out_name.str(), centroid_frame);
00973   }
00974 }
00975 
00976 cv::Mat ObjectTracker25D::getTableMask(XYZPointCloud& cloud, XYZPointCloud& table_cloud, cv::Size mask_size,
00977                                        XYZPointCloud& obj_cloud)
00978 {
00979   cv::Mat table_mask(mask_size, CV_8UC1, cv::Scalar(0));
00980   Eigen::Vector4f table_centroid;
00981   pcl_segmenter_->getTablePlane(cloud, obj_cloud, table_cloud, table_centroid);
00982   pcl_segmenter_->projectPointCloudIntoImage(table_cloud, table_mask, camera_frame_, 255);
00983   return table_mask;
00984 }
00985 
00986 
00987 GMM ObjectTracker25D::buildColorModel(XYZPointCloud& cloud, cv::Mat& frame, int nc)
00988 {
00989   cv::Mat frame_lab_uchar(frame.size(), frame.type());
00990   cv::Mat frame_lab(frame.size(), CV_32FC3);
00991   cv::cvtColor(frame, frame_lab_uchar, CV_BGR2HSV);
00992   frame_lab_uchar.convertTo(frame_lab, CV_32FC3, 1.0/255);
00993 
00994   std::vector<cv::Vec3f> pnts;
00995   for (int i = 0; i < cloud.size(); ++i)
00996   {
00997     cv::Point img_pt = pcl_segmenter_->projectPointIntoImage(cloud.at(i), cloud.header.frame_id, camera_frame_);
00998     cv::Vec3f img_val = frame_lab.at<cv::Vec3f>(img_pt.y, img_pt.x);
00999     pnts.push_back(img_val);
01000   }
01001   ROS_INFO_STREAM("Have " << pnts.size() << " points for the model");
01002   GMM color_model(0.0001);
01003   color_model.alloc(nc);
01004   if (pnts.size() > 1)
01005   {
01006     color_model.kmeansInit(pnts, 0.05);
01007     color_model.GmmEm(pnts);
01008     color_model.dispparams();
01009   }
01010   return color_model;
01011 }


tabletop_pushing
Author(s): Tucker Hermans
autogenerated on Wed Nov 27 2013 11:59:44