motion_graphcut.cpp
Go to the documentation of this file.
00001 // TODO: Figure out includes necessary
00002 #include <tabletop_pushing/extern/graphcut/graph.h>
00003 #include <tabletop_pushing/extern/graphcut/energy.h>
00004 #include <tabletop_pushing/extern/graphcut/GCoptimization.h>
00005 
00006 // TODO: Make importable
00007 typedef Graph<float, float, float> GraphType;
00008 
00009 inline float max(const float a, const double b)
00010 {
00011   return std::max(static_cast<double>(a), b);
00012 }
00013 
00014 inline float min(const float a, const double b)
00015 {
00016   return std::min(static_cast<double>(a), b);
00017 }
00018 
00019 class ArmModel
00020 {
00021  public:
00022   ArmModel() : l_hand_on(false),  r_hand_on(false), l_arm_on(false),
00023                r_arm_on(false)
00024   {
00025     hands.clear();
00026     arms.clear();
00027   }
00028   unsigned int size()
00029   {
00030     return 2;
00031   }
00032 
00033   std::vector<cv::Point> operator[](unsigned int i)
00034   {
00035     if (i == 0)
00036       return hands;
00037     if (i > 1)
00038     {
00039       std::stringstream err_msg;
00040       err_msg << "Index argument: " << i << " is out of range in class ArmModel";
00041       throw std::out_of_range(err_msg.str());
00042     }
00043     return arms;
00044   }
00045 
00053   float distanceToArm(cv::Point2f p, cv::Mat& depth_frame)
00054   {
00055     float l_dist = distanceToArm(p, l_chain, depth_frame);
00056     float r_dist = distanceToArm(p, r_chain, depth_frame);
00057     if (l_dist < 0 && r_dist < 0) return -1.0f;
00058     if (l_dist < 0)
00059     {
00060       return r_dist;
00061     }
00062     if (r_dist < 0)
00063     {
00064       return l_dist;
00065     }
00066     return min(l_dist, r_dist);
00067   }
00068 
00069   float distanceToArm(cv::Point2f p, std::vector<cv::Point>& chain,
00070                       cv::Mat& depth_frame)
00071   {
00072     if (chain.size() == 0)
00073     {
00074       return -1.0f;
00075     }
00076     float min_dist = 640.0f*480.0f;
00077     for (unsigned int i = 1; i < chain.size(); ++i)
00078     {
00079       float d_i = pointLineDistance(p, chain[i-1] , chain[i], depth_frame);
00080       if (d_i < min_dist)
00081         min_dist = d_i;
00082     }
00083     return min_dist;
00084   }
00085 
00086   float pointLineDistance(cv::Point2f p, cv::Point2f l0, cv::Point2f l1,
00087                           cv::Mat& depth_frame)
00088   {
00089     if (l0.x == l1.x)
00090     {
00091       cv::Point2f q(l0.x, p.y);
00092       float l_max_x = max(l0.x, l1.x);
00093       float l_min_x = min(l0.x, l1.x);
00094       if (p.y > l_max_x)
00095       {
00096         q.y = l_max_x;
00097       }
00098       else if (p.y < l_min_x)
00099       {
00100         q.y = l_min_x;
00101       }
00102       return pointPointDistance(p,q,depth_frame);
00103     }
00104     cv::Point2f x0;
00105     cv::Point2f x1;
00106 
00107     if (l0.x < l1.x)
00108     {
00109       x0 = l0;
00110       x1 = l1;
00111     }
00112     else
00113     {
00114       x0 = l1;
00115       x1 = l0;
00116     }
00117     cv::Point2f v = x1 - x0;
00118     cv::Point2f w = p - x0;
00119 
00120     float c0 = w.x*v.x+w.y*v.y;
00121     float c1 = v.x*v.x+v.y*v.y;
00122     float b = c0/c1;
00123 
00124     cv::Point2f q = x0 + b*v;
00125 
00126     float d = pointPointDistance(p,q,depth_frame);
00127     if (c0 <= 0 || q.x < x0.x)
00128     {
00129       d = pointPointDistance(p,x0,depth_frame);
00130     }
00131     if (c1 <= 0 || q.x > x1.x)
00132     {
00133       d = pointPointDistance(p,x1,depth_frame);
00134     }
00135     return d;
00136   }
00137 
00138   float pointPointDistance(cv::Point2f& p, cv::Point2f& q, cv::Mat& depth_frame)
00139   {
00140     // return hypot(p.x-q.x,p.y-q.y);
00141     // TODO: Use 3D distance between the two points
00142     const float d_x = p.x-q.x;
00143     const float d_y = p.y-q.y;
00144     // const float d_d = (depth_frame.at<float>(p.y,p.x)-
00145     //                    depth_frame.at<float>(q.y,q.x));
00146     // return sqrt(d_x*d_x + d_y*d_y + d_d*d_d);
00147     return sqrt(d_x*d_x + d_y*d_y);
00148   }
00149 
00150   std::vector<cv::Point> hands;
00151   std::vector<cv::Point> arms;
00152   std::vector<cv::Point> l_chain;
00153   std::vector<cv::Point> r_chain;
00154   bool l_hand_on;
00155   bool r_hand_on;
00156   bool l_arm_on;
00157   bool r_arm_on;
00158 };
00159 
00160 class MotionGraphcut
00161 {
00162  public:
00163   MotionGraphcut(double workspace_background_weight = 1.0f,
00164                  double min_weight = 0.01, double magnitude_thresh=0.1,
00165                  double flow_gain = 0.3, int arm_grow_radius=2) :
00166       workspace_background_weight_(workspace_background_weight),
00167       min_weight_(min_weight), magnitude_thresh_(magnitude_thresh),
00168       flow_gain_(flow_gain), arm_grow_radius_(arm_grow_radius)
00169   {
00170   }
00171 
00172   virtual ~MotionGraphcut()
00173   {
00174   }
00175 
00191   cv::Mat operator()(cv::Mat& color_frame, cv::Mat& depth_frame,
00192                      cv::Mat& u, cv::Mat& v, cv::Mat& workspace_mask,
00193                      cv::Mat& table_heights, ArmModel arm_locs)
00194   {
00195     const int R = color_frame.rows;
00196     const int C = color_frame.cols;
00197     int num_nodes = R*C;
00198     int num_edges = ((C-1)*3+1)*(R-1)+(C-1);
00199     GraphType *g;
00200     g = new GraphType(num_nodes, num_edges);
00201 
00202 #ifdef VISUALIZE_GRAPH_WEIGHTS
00203     cv::Mat fg_weights(color_frame.size(), CV_32FC1);
00204     cv::Mat bg_weights(color_frame.size(), CV_32FC1);
00205     cv::Mat table_weights(color_frame.size(), CV_32FC1);
00206 #endif // VISUALIZE_GRAPH_WEIGHTS
00207 #ifdef VISUALIZE_GRAPH_EDGE_WEIGHTS
00208     cv::Mat left_weights(color_frame.size(), CV_32FC1, cv::Scalar(0.0));
00209     cv::Mat up_weights(color_frame.size(), CV_32FC1, cv::Scalar(0.0));
00210     cv::Mat up_left_weights(color_frame.size(), CV_32FC1, cv::Scalar(0.0));
00211 #endif // VISUALIZE_GRAPH_EDGE_WEIGHTS
00212 
00213     for (int r = 0; r < R; ++r)
00214     {
00215       for (int c = 0; c < C; ++c)
00216       {
00217         g->add_node();
00218         float magnitude = std::sqrt(u.at<float>(r,c)*u.at<float>(r,c) +
00219                                     v.at<float>(r,c)*v.at<float>(r,c));
00220         // Check if we are hardcoding this spot to background
00221         if (workspace_mask.at<uchar>(r,c) == 0)
00222         {
00223           g->add_tweights(r*C+c, min_weight_, workspace_background_weight_);
00224 
00225 #ifdef VISUALIZE_GRAPH_WEIGHTS
00226           fg_weights.at<float>(r,c) = min_weight_;
00227           bg_weights.at<float>(r,c) = workspace_background_weight_;
00228           table_weights.at<float>(r,c) = min_weight_;
00229 #endif // VISUALIZE_GRAPH_WEIGHTS
00230           continue;
00231         }
00232         const float mag_score = max(getFlowFGScore(magnitude), min_weight_);
00233         const float table_score = max(getTableScore(color_frame.at<cv::Vec3f>(r,c),
00234             fabs(table_heights.at<float>(r,c))), min_weight_);
00235         const float not_mag_score = max(1.0 - mag_score, min_weight_);
00236         const float bg_score = not_mag_score + table_score;
00237         g->add_tweights(r*C+c, mag_score, bg_score);
00238 #ifdef VISUALIZE_GRAPH_WEIGHTS
00239         fg_weights.at<float>(r,c) = mag_score;
00240         bg_weights.at<float>(r,c) = bg_score;
00241         table_weights.at<float>(r,c) = table_score;
00242 #endif // VISUALIZE_GRAPH_WEIGHTS
00243       }
00244     }
00245     for (int r = 0; r < R; ++r)
00246     {
00247       for (int c = 0; c < C; ++c)
00248       {
00249         // Connect node to previous ones
00250         if (c > 0)
00251         {
00252           // Add left-link
00253           float w_l = getEdgeWeight(color_frame.at<cv::Vec3f>(r,c),
00254                                     depth_frame.at<float>(r,c),
00255                                     color_frame.at<cv::Vec3f>(r,c-1),
00256                                     depth_frame.at<float>(r,c-1));
00257           g->add_edge(r*C+c, r*C+c-1, /*capacities*/ w_l, w_l);
00258 #ifdef VISUALIZE_GRAPH_EDGE_WEIGHTS
00259           left_weights.at<float>(r,c) = w_l;
00260 #endif // VISUALIZE_EDGE_GRAPH_WEIGHTS
00261         }
00262         if (r > 0)
00263         {
00264           // Add up-link
00265           float w_u = getEdgeWeight(color_frame.at<cv::Vec3f>(r,c),
00266                                     depth_frame.at<float>(r,c),
00267                                     color_frame.at<cv::Vec3f>(r-1,c),
00268                                     depth_frame.at<float>(r-1,c));
00269           g->add_edge(r*C+c, (r-1)*C+c, /*capacities*/ w_u, w_u);
00270 #ifdef VISUALIZE_GRAPH_EDGE_WEIGHTS
00271           up_weights.at<float>(r,c) = w_u;
00272 #endif // VISUALIZE_GRAPH_EDGE_WEIGHTS
00273           // Add up-left-link
00274           if (c > 0)
00275           {
00276             float w_ul = getEdgeWeight(color_frame.at<cv::Vec3f>(r,c),
00277                                        depth_frame.at<float>(r,c),
00278                                        color_frame.at<cv::Vec3f>(r-1,c-1),
00279                                        depth_frame.at<float>(r-1,c-1));
00280             g->add_edge(r*C+c, (r-1)*C+c-1, /*capacities*/ w_ul, w_ul);
00281 #ifdef VISUALIZE_GRAPH_EDGE_WEIGHTS
00282           up_left_weights.at<float>(r,c) = w_ul;
00283 #endif // VISUALIZE_GRAPH_EDGE_WEIGHTS
00284           }
00285         }
00286       }
00287     }
00288 
00289 #ifdef VISUALIZE_GRAPH_WEIGHTS
00290     cv::imshow("fg_weights", fg_weights);
00291     cv::imshow("bg_weights", bg_weights);
00292     // double table_max=0;
00293     // cv::minMaxLoc(table_weights, NULL, &table_max);
00294     // table_weights /= table_max;
00295     cv::imshow("table_weights", table_weights);
00296 #endif // VISUALIZE_GRAPH_WEIGHTS
00297 #ifdef VISUALIZE_GRAPH_EDGE_WEIGHTS
00298     double up_max = 1.0;
00299     cv::minMaxLoc(up_weights, NULL, &up_max);
00300     up_weights /= up_max;
00301 
00302     cv::imshow("up_weights", up_weights);
00303     double left_max = 1.0;
00304     cv::minMaxLoc(left_weights, NULL, &left_max);
00305     left_weights /= left_max;
00306     cv::imshow("left_weights", left_weights);
00307     double up_left_max = 1.0;
00308     cv::minMaxLoc(up_left_weights, NULL, &up_left_max);
00309     up_left_weights /= up_max;
00310     cv::imshow("up_left_weights", up_left_weights);
00311 #endif // VISUALIZE_GRAPH_EDGE_WEIGHTS
00312 
00313     // int flow = g->maxflow(false);
00314     g->maxflow(false);
00315 
00316     // Convert output into image
00317     cv::Mat segs = convertFlowResultsToCvMat(g, R, C);
00318     delete g;
00319     return segs;
00320   }
00321 
00334   cv::Mat segmentRobotArm(cv::Mat& color_frame_in, cv::Mat& depth_frame_in,
00335                           cv::Mat& workspace_mask_in, ArmModel& arms,
00336                           int min_arm_x, int max_arm_x, int min_arm_y,
00337                           int max_arm_y)
00338   {
00339     // NOTE: We examine only a subwindow in the image to avoid too make things
00340     // more efficient
00341     const int crop_min_x = max(0, min_arm_x - arm_search_radius_);
00342     const int crop_max_x = min(color_frame_in.cols,
00343                                     max_arm_x + arm_search_radius_);
00344     const int crop_min_y = max(0, min_arm_y - arm_search_radius_);
00345     const int crop_max_y = min(color_frame_in.rows,
00346                                     max_arm_y + arm_search_radius_);
00347     cv::Rect roi(crop_min_x, crop_min_y, crop_max_x-crop_min_x,
00348                  crop_max_y-crop_min_y);
00349     cv::Mat color_frame = color_frame_in(roi);
00350     cv::Mat depth_frame = depth_frame_in(roi);
00351     cv::Mat workspace_mask = workspace_mask_in(roi);
00352 
00353     const int R = color_frame.rows;
00354     const int C = color_frame.cols;
00355 
00356     int num_nodes = R*C;
00357     int num_edges = ((C-1)*3+1)*(R-1)+(C-1);
00358     GraphType *g;
00359     g = new GraphType(num_nodes, num_edges);
00360 
00361 #ifdef VISUALIZE_ARM_GRAPH_WEIGHTS
00362     cv::Mat fg_weights(color_frame.size(), CV_32FC1, cv::Scalar(0.0));
00363     cv::Mat bg_weights(color_frame.size(), CV_32FC1, cv::Scalar(0.0));
00364     cv::Mat dist_img(color_frame.size(), CV_32FC1, cv::Scalar(0.0));
00365 #endif // VISUALIZE_GRAPH_WEIGHTS
00366 #ifdef VISUALIZE_ARM_GRAPH_EDGE_WEIGHTS
00367     cv::Mat left_weights(color_frame.size(), CV_32FC1, cv::Scalar(0.0));
00368     cv::Mat up_weights(color_frame.size(), CV_32FC1, cv::Scalar(0.0));
00369     cv::Mat up_left_weights(color_frame.size(), CV_32FC1, cv::Scalar(0.0));
00370 #endif // VISUALIZE_ARM_GRAPH_WEIGHTS
00371 
00372     // One gaussian estimated from wrist to elbow, elbow to forearm and a
00373     // separate one is estimated from the gripper tip to wrist
00374     if (arms[0].size() == 0)
00375     {
00376       ROS_WARN_STREAM("No hands!");
00377     }
00378     if (arms[1].size() == 0)
00379     {
00380       ROS_WARN_STREAM("No arms!");
00381     }
00382     std::vector<cv::Vec3f> hand_stats = getImagePointGaussian(color_frame,
00383                                                               arms[0],
00384                                                               crop_min_x,
00385                                                               crop_min_y);
00386     std::vector<cv::Vec3f> arm_stats = getImagePointGaussian(color_frame,
00387                                                              arms[1],
00388                                                              crop_min_x,
00389                                                              crop_min_y);
00390     // Tie weights to fg / bg
00391     for (int r = 0; r < R; ++r)
00392     {
00393       for (int c = 0; c < C; ++c)
00394       {
00395         g->add_node();
00396 #ifdef USE_WORKSPACE_MASK_FOR_ARM
00397         if (workspace_mask.at<uchar>(r,c) == 0)
00398         {
00399           g->add_tweights(r*C+c, min_weight_, workspace_background_weight_);
00400 #ifdef VISUALIZE_ARM_GRAPH_WEIGHTS
00401           fg_weights.at<float>(r,c) = min_weight_;
00402           bg_weights.at<float>(r,c) = workspace_background_weight_;
00403           dist_img.at<float>(r,c) = min_weight_;
00404 #endif // VISUALIZE_GRAPH_WEIGHTS
00405           continue;
00406         }
00407 #endif // USE_WORKSPACE_MASK_FOR_ARM
00408 #ifdef VISUALIZE_ARM_GRAPH_WEIGHTS
00409         const float me_score = max(getArmFGScore(color_frame, depth_frame, r, c,
00410                                                  arm_stats, hand_stats, arms,
00411                                                  roi, dist_img), min_weight_);
00412 #else // VISUALIZE_ARM_GRAPH_WEIGHTS
00413         const float me_score = max(getArmFGScore(color_frame, depth_frame, r, c,
00414                                                  arm_stats, hand_stats, arms,
00415                                                  roi), min_weight_);
00416 #endif // VISUALIZE_ARM_GRAPH_WEIGHTS
00417         const float not_me_score = max(1.0 - me_score, min_weight_);
00418         g->add_tweights(r*C+c, me_score, not_me_score);
00419 #ifdef VISUALIZE_ARM_GRAPH_WEIGHTS
00420         fg_weights.at<float>(r,c) = me_score;
00421         bg_weights.at<float>(r,c) = not_me_score;
00422 #endif // VISUALIZE_ARM_GRAPH_WEIGHTS
00423       }
00424     }
00425     // Add edge weights
00426     for (int r = 0; r < R; ++r)
00427     {
00428       for (int c = 0; c < C; ++c)
00429       {
00430         // Connect node to previous ones
00431         if (c > 0)
00432         {
00433           // Add left-link
00434           float w_l = getEdgeWeight(color_frame.at<cv::Vec3f>(r,c),
00435                                     depth_frame.at<float>(r,c),
00436                                     color_frame.at<cv::Vec3f>(r,c-1),
00437                                     depth_frame.at<float>(r,c-1));
00438           g->add_edge(r*C+c, r*C+c-1, /*capacities*/ w_l, w_l);
00439 #ifdef VISUALIZE_ARM_GRAPH_EDGE_WEIGHTS
00440           left_weights.at<float>(r,c) = w_l;
00441 #endif // VISUALIZE_ARM_GRAPH_EDGE_WEIGHTS
00442         }
00443         if (r > 0)
00444         {
00445           // Add up-link
00446           float w_u = getEdgeWeight(color_frame.at<cv::Vec3f>(r,c),
00447                                     depth_frame.at<float>(r,c),
00448                                     color_frame.at<cv::Vec3f>(r-1,c),
00449                                     depth_frame.at<float>(r-1,c));
00450           g->add_edge(r*C+c, (r-1)*C+c, /*capacities*/ w_u, w_u);
00451 #ifdef VISUALIZE_ARM_GRAPH_EDGE_WEIGHTS
00452           up_weights.at<float>(r,c) = w_u;
00453 #endif // VISUALIZE_ARM_GRAPH_WEIGHTS
00454 
00455           // Add up-left-link
00456           if (c > 0)
00457           {
00458             float w_ul = getEdgeWeight(color_frame.at<cv::Vec3f>(r,c),
00459                                        depth_frame.at<float>(r,c),
00460                                        color_frame.at<cv::Vec3f>(r-1,c-1),
00461                                        depth_frame.at<float>(r-1,c-1));
00462             g->add_edge(r*C+c, (r-1)*C+c-1, /*capacities*/ w_ul, w_ul);
00463 #ifdef VISUALIZE_ARM_GRAPH_EDGE_WEIGHTS
00464             up_left_weights.at<float>(r,c) = w_ul;
00465 #endif // VISUALIZE_ARM_GRAPH_EDGE_WEIGHTS
00466 
00467           }
00468         }
00469       }
00470     }
00471 #ifdef VISUALIZE_ARM_GRAPH_WEIGHTS
00472     cv::imshow("fg_weights_arm", fg_weights);
00473     cv::imshow("bg_weights_arm", bg_weights);
00474     cv::imshow("arm_dist_scores", dist_img);
00475 #endif // VISUALIZE_ARM_GRAPH_WEIGHTS
00476 #ifdef VISUALIZE_ARM_GRAPH_EDGE_WEIGHTS
00477     double up_max = 1.0;
00478     cv::minMaxLoc(up_weights, NULL, &up_max);
00479     up_weights /= up_max;
00480     double left_max = 1.0;
00481     cv::minMaxLoc(left_weights, NULL, &left_max);
00482     left_weights /= left_max;
00483     double up_left_max = 1.0;
00484     cv::minMaxLoc(up_left_weights, NULL, &up_left_max);
00485     up_left_weights /= up_max;
00486     cv::imshow("up_weights_arm", up_weights);
00487     cv::imshow("left_weights_arm", left_weights);
00488     cv::imshow("up_left_weights_arm", up_left_weights);
00489 #endif // VISUALIZE_ARM_GRAPH_EDGE_WEIGHTS
00490 
00491     // int flow = g->maxflow(false);
00492     g->maxflow(false);
00493 
00494     // Convert output into image
00495     cv::Mat segs = convertFlowResultsToCvMat(g, R, C, roi,
00496                                              color_frame_in.size());
00497     delete g;
00498     return segs;
00499   }
00500 
00501   float getFlowFGScore(float magnitude)
00502   {
00503     return min(1.0, max(flow_gain_*exp(magnitude), 0.0));
00504   }
00505 
00506   float getTableScore(cv::Vec3f cur_c, float height_from_table)
00507   {
00508     const float dist_score = exp(-height_from_table/table_height_var_);
00509 #ifndef USE_TABLE_COLOR_ESTIMATE
00510     return min(1.0, max(dist_score, 0.0));
00511 #else // USE_TABLE_COLOR_ESTIMATE
00512     const float h_score = 1.0-fabs(cur_c[0] - table_stats_[0][0])/(
00513         table_stats_[1][0] + arm_color_var_add_);
00514     const float s_score = 1.0-fabs(cur_c[1] - table_stats_[0][1])/(
00515         table_stats_[1][1] + arm_color_var_add_);
00516     const float table_score = (h_score + s_score)/2.0+0.5*dist_score;
00517     return table_score;
00518 #endif // USE_TABLE_COLOR_ESTIMATE
00519   }
00520 #ifdef VISUALIZE_ARM_GRAPH_WEIGHTS
00521   float getArmFGScore(cv::Mat& color_frame, cv::Mat& depth_frame, int r, int c,
00522                       std::vector<cv::Vec3f>& arm_stats,
00523                       std::vector<cv::Vec3f>& hand_stats, ArmModel& arms,
00524                       cv::Rect& roi, cv::Mat& dist_img)
00525 #else // VISUALIZE_ARM_GRAPH_WEIGHTS
00526   float getArmFGScore(cv::Mat& color_frame, cv::Mat& depth_frame, int r, int c,
00527                       std::vector<cv::Vec3f>& arm_stats,
00528                       std::vector<cv::Vec3f>& hand_stats, ArmModel& arms,
00529                       cv::Rect& roi)
00530 #endif // VISUALIZE_ARM_GRAPH_WEIGHTS
00531   {
00532     const float dist_score = exp(-arms.distanceToArm(
00533         cv::Point2f(c+roi.x,r+roi.y), depth_frame)/arm_dist_var_);
00534 #ifdef VISUALIZE_ARM_GRAPH_WEIGHTS
00535     dist_img.at<float>(r,c) = dist_score;
00536 #endif // VISUALIZE_ARM_GRAPH_WEIGHTS
00537     cv::Vec3f cur_c = color_frame.at<cv::Vec3f>(r,c);
00538     const float arm_h_score = 1.0-fabs(cur_c[0] - arm_stats[0][0])/(
00539         arm_stats[1][0] + arm_color_var_add_);
00540     const float arm_s_score = 1.0-fabs(cur_c[1] - arm_stats[0][1])/(
00541         arm_stats[1][1] + arm_color_var_add_);
00542     const float arm_v_score = 1.0-fabs(cur_c[2] - arm_stats[0][2])/(
00543         arm_stats[1][2] + arm_color_var_add_);
00544     const float arm_score = (arm_alpha_*(arm_h_score + arm_s_score +
00545                                          arm_v_score)/3.0 +
00546                              arm_beta_*dist_score);
00547     const float hand_h_score = 1.0-fabs(cur_c[0] - hand_stats[0][0])/(
00548         hand_stats[1][0] + arm_color_var_add_);
00549     const float hand_s_score = 1.0-fabs(cur_c[1] - hand_stats[0][1])/(
00550         hand_stats[1][1] + arm_color_var_add_);
00551     const float hand_v_score = 1.0-fabs(cur_c[2] - hand_stats[0][2])/(
00552         hand_stats[1][2] + arm_color_var_add_);
00553     const float hand_score = (arm_alpha_*(hand_h_score + hand_s_score +
00554                                           hand_v_score) / 3.0 +
00555                               arm_beta_*dist_score);
00556     return max(hand_score, arm_score);
00557   }
00558 
00559   std::vector<cv::Vec3f> getImagePointGaussian(cv::Mat& color_frame,
00560                                                std::vector<cv::Point> points,
00561                                                int min_x=0, int min_y=0)
00562   {
00563     // Calculate color means and variances
00564     const int C = color_frame.cols;
00565     const int R = color_frame.rows;
00566     int pixel_count = 0;
00567     cv::Vec3f means;
00568     means[0] = 0.0;
00569     means[1] = 0.0;
00570     means[2] = 0.0;
00571     for (unsigned int i = 0; i < points.size(); ++i)
00572     {
00573       for (int r = max(0, points[i].y - min_y - arm_grow_radius_);
00574            r < min(points[i].y - min_y + arm_grow_radius_, R); ++r)
00575       {
00576         for (int c = max(0, points[i].x - min_x - arm_grow_radius_);
00577              c < min(points[i].x - min_x + arm_grow_radius_, C); ++c)
00578         {
00579           cv::Vec3f cur_color = color_frame.at<cv::Vec3f>(r,c);
00580           means += cur_color;
00581           ++pixel_count;
00582         }
00583       }
00584     }
00585     if (pixel_count > 0)
00586     {
00587       means[0] /= pixel_count;
00588       means[1] /= pixel_count;
00589       means[2] /= pixel_count;
00590     }
00591     else
00592     {
00593       ROS_WARN_STREAM("Calculating stats for 0 pixels");
00594     }
00595     cv::Vec3f vars;
00596     vars[0] = 0.0;
00597     vars[1] = 0.0;
00598     vars[2] = 0.0;
00599     for (unsigned int i = 0; i < points.size(); ++i)
00600     {
00601       for (int r = max(0, points[i].y - min_y -arm_grow_radius_);
00602            r < min(points[i].y - min_y + arm_grow_radius_, R); ++r)
00603       {
00604         for (int c = max(0, points[i].x - min_x - arm_grow_radius_);
00605              c < min(points[i].x - min_x + arm_grow_radius_, C); ++c)
00606         {
00607           cv::Vec3f diff = color_frame.at<cv::Vec3f>(r,c);
00608           diff = diff.mul(diff);
00609           vars += diff;
00610         }
00611       }
00612     }
00613     vars[0] /=  (pixel_count+1.0);
00614     vars[1] /=  (pixel_count+1.0);
00615     vars[2] /=  (pixel_count+1.0);
00616     std::vector<cv::Vec3f> stats;
00617     stats.push_back(means);
00618     stats.push_back(vars);
00619     return stats;
00620   }
00621 
00622   void setTableColorStats(cv::Mat& color_frame, std::vector<cv::Point>& pts)
00623   {
00624     cv::Mat color_frame_hsv(color_frame.size(), color_frame.type());
00625     cv::cvtColor(color_frame, color_frame_hsv, CV_BGR2HSV);
00626     cv::Mat color_frame_f(color_frame_hsv.size(), CV_32FC3);
00627     color_frame_hsv.convertTo(color_frame_f, CV_32FC3, 1.0/255, 0);
00628     table_stats_ = getImagePointGaussian(color_frame_f, pts);
00629   }
00630 
00631   cv::Mat convertFlowResultsToCvMat(GraphType *g, int R, int C)
00632   {
00633     cv::Mat segs(R, C, CV_8UC1, cv::Scalar(0));
00634     for (int r = 0; r < R; ++r)
00635     {
00636       uchar* seg_row = segs.ptr<uchar>(r);
00637       for (int c = 0; c < C; ++c)
00638       {
00639         int label = (g->what_segment(r*C+c) == GraphType::SOURCE);
00640         seg_row[c] = label*255;
00641       }
00642     }
00643     return segs;
00644   }
00645 
00646   cv::Mat convertFlowResultsToCvMat(GraphType *g, int R, int C,
00647                                     cv::Rect roi, cv::Size out_size)
00648   {
00649     cv::Mat segs(out_size, CV_8UC1, cv::Scalar(0));
00650     for (int r = 0; r < R; ++r)
00651     {
00652       for (int c = 0; c < C; ++c)
00653       {
00654         int label = (g->what_segment(r*C+c) == GraphType::SOURCE);
00655         segs.at<uchar>(r+roi.y, c+roi.x) = label*255;
00656       }
00657     }
00658     return segs;
00659   }
00660 
00661   float getEdgeWeight(cv::Vec3f c0, float d0, cv::Vec3f c1, float d1)
00662   {
00663     cv::Vec3f c_d = c0-c1;
00664     float w_d = d0-d1;
00665     float w_c = w_c_alpha_*exp(fabs(c_d[0])) + w_c_beta_*exp(fabs(c_d[1])) +
00666         /*w_c_beta_*exp(fabs(c_d[2])) +*/ w_c_gamma_*exp(fabs(w_d));
00667     return w_c;
00668   }
00669 
00670   enum SegLabels
00671   {
00672     MOVING,
00673     ARM,
00674     TABLE,
00675     BG
00676   };
00677 
00678   double workspace_background_weight_;
00679   double min_weight_;
00680   double w_c_alpha_;
00681   double w_c_beta_;
00682   double w_c_gamma_;
00683   double magnitude_thresh_;
00684   double flow_gain_;
00685   double table_height_var_;
00686   double arm_dist_var_;
00687   double arm_color_var_add_;
00688   double arm_alpha_;
00689   double arm_beta_;
00690   int arm_grow_radius_;
00691   int arm_search_radius_;
00692   std::vector<cv::Vec3f> table_stats_;
00693 };
00694 
00695 class MGCNode
00696 {
00697  public:
00698   MGCNode(ros::NodeHandle &n) :
00699       n_(n), n_private_("~"),
00700       image_sub_(n, "color_image_topic", 1),
00701       depth_sub_(n, "depth_image_topic", 1),
00702       cloud_sub_(n, "point_cloud_topic", 1),
00703       sync_(MySyncPolicy(15), image_sub_, depth_sub_, cloud_sub_),
00704       it_(n),
00705       singulation_server_(n, "singulation_action",
00706                     boost::bind(&TabletopPushingPerceptionNode::trackerGoalCallback,
00707                                 this, _1),
00708                     false),
00709       tf_(), os_(&ft_), pcl_segmenter_(&ft_),
00710       have_depth_data_(false), tracking_(false),
00711       tracker_initialized_(false), tracker_count_(0)
00712   {
00713     // Get parameters from the server
00714     n_private_.param("crop_min_x", crop_min_x_, 0);
00715     n_private_.param("crop_max_x", crop_max_x_, 640);
00716     n_private_.param("crop_min_y", crop_min_y_, 0);
00717     n_private_.param("crop_max_y", crop_max_y_, 480);
00718     n_private_.param("display_wait_ms", display_wait_ms_, 3);
00719     n_private_.param("min_workspace_x", min_workspace_x_, 0.0);
00720     n_private_.param("min_workspace_y", min_workspace_y_, 0.0);
00721     n_private_.param("min_workspace_z", min_workspace_z_, 0.0);
00722     n_private_.param("below_table_z", below_table_z_, 0.1);
00723     n_private_.param("max_workspace_x", max_workspace_x_, 0.0);
00724     n_private_.param("max_workspace_y", max_workspace_y_, 0.0);
00725     n_private_.param("max_workspace_z", max_workspace_z_, 0.0);
00726     n_private_.param("min_pushing_x", min_pushing_x_, 0.0);
00727     n_private_.param("min_pushing_y", min_pushing_y_, 0.0);
00728     n_private_.param("max_pushing_x", max_pushing_x_, 0.0);
00729     n_private_.param("max_pushing_y", max_pushing_y_, 0.0);
00730     std::string default_workspace_frame = "/torso_lift_link";
00731     n_private_.param("workspace_frame", workspace_frame_,
00732                     default_workspace_frame);
00733 
00734     n_private_.param("min_table_z", pcl_segmenter_.min_table_z_, -0.5);
00735     n_private_.param("max_table_z", pcl_segmenter_.max_table_z_, 1.5);
00736     pcl_segmenter_.min_workspace_x_ = min_workspace_x_;
00737     pcl_segmenter_.max_workspace_x_ = max_workspace_x_;
00738 
00739     n_private_.param("segmenting_moving_stuff", segmenting_moving_stuff_,
00740                      false);
00741     n_private_.param("autostart_tracking", tracking_, false);
00742     n_private_.param("auto_flow_cluster", auto_flow_cluster_, false);
00743     n_private_.param("autostart_pcl_segmentation", autorun_pcl_segmentation_,
00744                      false);
00745 
00746     n_private_.param("num_downsamples", num_downsamples_, 2);
00747     std::string cam_info_topic_def = "/kinect_head/rgb/camera_info";
00748     n_private_.param("cam_info_topic", cam_info_topic_,
00749                     cam_info_topic_def);
00750     n_private_.param("table_ransac_thresh", pcl_segmenter_.table_ransac_thresh_,
00751                      0.01);
00752 
00753     base_output_path_ = "/home/thermans/sandbox/cut_out/";
00754     // Graphcut weights
00755     // n_private_.param("mgc_workspace_bg_weight",
00756     //                 mgc_.workspace_background_weight_, 1.0);
00757     // n_private_.param("mgc_min_weight", mgc_.min_weight_, 0.01);
00758     // n_private_.param("mgc_w_c_alpha", mgc_.w_c_alpha_, 0.1);
00759     // n_private_.param("mgc_w_c_beta",  mgc_.w_c_beta_, 0.1);
00760     // n_private_.param("mgc_w_c_gamma", mgc_.w_c_gamma_, 0.1);
00761     // n_private_.param("mgc_arm_grow_radius", mgc_.arm_grow_radius_, 2);
00762     // n_private_.param("mgc_arm_search_radius", mgc_.arm_search_radius_, 50);
00763     // // Lucas Kanade params
00764     // n_private_.param("mgc_magnitude_thresh", mgc_.magnitude_thresh_, 0.1);
00765     // n_private_.param("mgc_flow_gain", mgc_.flow_gain_, 0.3);
00766     // n_private_.param("mgc_table_var", mgc_.table_height_var_, 0.03);
00767     // n_private_.param("mgc_arm_dist_var", mgc_.arm_dist_var_, 20.0);
00768     // n_private_.param("mgc_arm_color_var_add", mgc_.arm_color_var_add_, 0.1);
00769     // n_private_.param("mgc_arm_color_weight", mgc_.arm_alpha_, 0.5);
00770     // n_private_.param("mgc_arm_dist_weight", mgc_.arm_beta_, 0.5);
00771     int win_size = 5;
00772     n_private_.param("lk_win_size", win_size, 5);
00773     lkflow_.setWinSize(win_size);
00774     int num_levels = 4;
00775     n_private_.param("lk_num_levels", num_levels, 4);
00776     lkflow_.setNumLevels(num_levels);
00777     n_private_.param("lk_ratio_thresh", lkflow_.r_thresh_, 30.0);
00778     n_private_.param("max_flow_clusters", os_.max_k_, 2);
00779     n_private_.param("flow_cluster_max_iter", os_.kmeans_max_iter_, 200);
00780     n_private_.param("flow_cluster_epsilon", os_.kmeans_epsilon_, 0.05);
00781     n_private_.param("flow_cluster_attempts", os_.kmeans_tries_, 5);
00782     n_private_.param("affine_estimate_radius", os_.affine_estimate_radius_, 5);
00783     n_private_.param("min_affine_point_set_size", os_.min_affine_point_set_size_,
00784                      100);
00785     n_private_.param("max_ransac_iters", os_.max_ransac_iters_,
00786                      150);
00787     n_private_.param("affine_RANSAC_epsilon", os_.ransac_epsilon_, 1.0);
00788     n_private_.param("affine_RANSAC_inlier_percent",
00789                      os_.ransac_inlier_percent_est_, 0.05);
00790     n_private_.param("minimum_new_cluster_separation",
00791                      os_.minimum_new_cluster_separation_, 5.0);
00792     n_private_.param("surf_hessian_thresh", ft_.surf_.hessianThreshold,
00793                      150.0);
00794     bool use_fast;
00795     n_private_.param("use_fast_corners", use_fast, false);
00796     ft_.setUseFast(use_fast);
00797     n_private_.param("image_hist_size", image_hist_size_, 5);
00798     n_private_.param("pcl_cluster_tolerance", pcl_segmenter_.cluster_tolerance_,
00799                      0.25);
00800     n_private_.param("pcl_difference_thresh", pcl_segmenter_.cloud_diff_thresh_,
00801                      0.01);
00802     n_private_.param("pcl_min_cluster_size", pcl_segmenter_.min_cluster_size_,
00803                      100);
00804     n_private_.param("pcl_max_cluster_size", pcl_segmenter_.max_cluster_size_,
00805                      2500);
00806     n_private_.param("normal_estimate_search_radius", norm_est_radius_, 0.03);
00807     n_private_.param("pcl_voxel_downsample_res", pcl_segmenter_.voxel_down_res_,
00808                      0.005);
00809     n_private_.param("use_pcl_voxel_downsample", pcl_segmenter_.use_voxel_down_,
00810                      true);
00811 
00812     // Setup ros node connections
00813     sync_.registerCallback(&TabletopPushingPerceptionNode::sensorCallback,
00814                            this);
00815     push_pose_server_ = n_.advertiseService(
00816         "get_push_pose", &TabletopPushingPerceptionNode::getPushPose, this);
00817     table_location_server_ = n_.advertiseService(
00818         "get_table_location", &TabletopPushingPerceptionNode::getTableLocation,
00819         this);
00820     motion_mask_pub_ = it_.advertise("motion_mask", 15);
00821     motion_img_pub_ = it_.advertise("motion_img", 15);
00822     arm_mask_pub_ = it_.advertise("arm_mask", 15);
00823     arm_img_pub_ = it_.advertise("arm_img", 15);
00824     pcl_segmenter_.pcl_obj_seg_pub_ = n_.advertise<sensor_msgs::PointCloud2>(
00825         "separate_table_objs", 1000);
00826     singulation_server_.start();
00827   }
00828 
00829   void sensorCallback(const sensor_msgs::ImageConstPtr& img_msg,
00830                       const sensor_msgs::ImageConstPtr& depth_msg,
00831                       const sensor_msgs::PointCloud2ConstPtr& cloud_msg)
00832   {
00833     // Convert images to OpenCV format
00834     cv::Mat color_frame(bridge_.imgMsgToCv(img_msg));
00835     cv::Mat depth_frame(bridge_.imgMsgToCv(depth_msg));
00836 
00837     // Swap kinect color channel order
00838     cv::cvtColor(color_frame, color_frame, CV_RGB2BGR);
00839 
00840     // Transform point cloud into the correct frame and convert to PCL struct
00841     XYZPointCloud cloud;
00842     pcl::fromROSMsg(*cloud_msg, cloud);
00843     tf_.waitForTransform(workspace_frame_, cloud.header.frame_id,
00844                          cloud.header.stamp, ros::Duration(0.5));
00845     pcl_ros::transformPointCloud(workspace_frame_, cloud, cloud, tf_);
00846 
00847     // Convert nans to zeros
00848     for (int r = 0; r < depth_frame.rows; ++r)
00849     {
00850       float* depth_row = depth_frame.ptr<float>(r);
00851       for (int c = 0; c < depth_frame.cols; ++c)
00852       {
00853         float cur_d = depth_row[c];
00854         if (isnan(cur_d))
00855         {
00856           depth_row[c] = 0.0;
00857         }
00858       }
00859     }
00860 
00861     cv::Mat workspace_mask(color_frame.rows, color_frame.cols, CV_8UC1,
00862                            cv::Scalar(255));
00863     // Black out pixels in color and depth images outside of workspace
00864     // As well as outside of the crop window
00865     for (int r = 0; r < color_frame.rows; ++r)
00866     {
00867       uchar* workspace_row = workspace_mask.ptr<uchar>(r);
00868       for (int c = 0; c < color_frame.cols; ++c)
00869       {
00870         // NOTE: Cloud is accessed by at(column, row)
00871         pcl::PointXYZ cur_pt = cloud.at(c, r);
00872         if (cur_pt.x < min_workspace_x_ || cur_pt.x > max_workspace_x_ ||
00873             cur_pt.y < min_workspace_y_ || cur_pt.y > max_workspace_y_ ||
00874             cur_pt.z < min_workspace_z_ || cur_pt.z > max_workspace_z_ ||
00875             r < crop_min_y_ || c < crop_min_x_ || r > crop_max_y_ ||
00876             c > crop_max_x_ )
00877         {
00878           workspace_row[c] = 0;
00879         }
00880       }
00881     }
00882 
00883     // Downsample everything first
00884     cv::Mat color_frame_down = downSample(color_frame, num_downsamples_);
00885     cv::Mat depth_frame_down = downSample(depth_frame, num_downsamples_);
00886     cv::Mat workspace_mask_down = downSample(workspace_mask, num_downsamples_);
00887 
00888     // Save internally for use in the service callback
00889     prev_color_frame_ = cur_color_frame_.clone();
00890     prev_depth_frame_ = cur_depth_frame_.clone();
00891     prev_workspace_mask_ = cur_workspace_mask_.clone();
00892     prev_camera_header_ = cur_camera_header_;
00893 
00894     // Update the current versions
00895     cur_color_frame_ = color_frame_down.clone();
00896     cur_depth_frame_ = depth_frame_down.clone();
00897     cur_workspace_mask_ = workspace_mask_down.clone();
00898     cur_point_cloud_ = cloud;
00899     have_depth_data_ = true;
00900     cur_camera_header_ = img_msg->header;
00901 
00902     // Debug stuff
00903     if (autorun_pcl_segmentation_) getPushVector();// findRandomPushPose(cloud);
00904 
00905     // Started via actionlib call
00906     updateTracks(cur_color_frame_, cur_depth_frame_, prev_color_frame_,
00907                  prev_depth_frame_, cur_point_cloud_);
00908     // if (segmenting_moving_stuff_)
00909     // {
00910     //   cv::Mat seg_mask = segmentMovingStuff(cur_color_frame_, cur_depth_frame_,
00911     //                                         prev_color_frame_, prev_depth_frame_,
00912     //                                         cur_point_cloud_);
00913     //   prev_seg_mask_ = seg_mask.clone();
00914     //   getPushVector();
00915     // }
00916 
00917     // Display junk
00918 #ifdef DISPLAY_INPUT_COLOR
00919     cv::imshow("color", cur_color_frame_);
00920 #endif // DISPLAY_INPUT_COLOR
00921 #ifdef DISPLAY_INPUT_DEPTH
00922     double depth_max = 1.0;
00923     cv::minMaxLoc(cur_depth_frame_, NULL, &depth_max);
00924     cv::Mat depth_display = cur_depth_frame_.clone();
00925     depth_display /= depth_max;
00926     cv::imshow("input_depth", depth_display);
00927 #endif // DISPLAY_INPUT_DEPTH
00928 #ifdef DISPLAY_WORKSPACE_MASK
00929     cv::imshow("workspace_mask", cur_workspace_mask_);
00930 #endif // DISPLAY_WORKSPACE_MASK
00931 #if defined DISPLAY_INPUT_COLOR || defined DISPLAY_INPUT_DEPTH || defined DISPLAY_OPTICAL_FLOW || defined DISPLAY_GRAPHCUT || defined DISPLAY_WORKSPACE_MASK || defined DISPLAY_OPT_FLOW_INTERNALS || defined DISPLAY_GRAPHCUT || defined VISUALIZE_GRAPH_WEIGHTS || defined VISUALIZE_ARM_GRAPH_WEIGHTS || defined DISPLAY_ARM_CIRCLES || defined DISPLAY_FLOW_FIELD_CLUSTERING
00932     cv::waitKey(display_wait_ms_);
00933 #endif // Any display defined
00934   }
00935 
00936   bool getTableLocation(LocateTable::Request& req, LocateTable::Response& res)
00937   {
00938     if ( have_depth_data_ )
00939     {
00940       res.table_centroid = getTablePlane(cur_point_cloud_);
00941       if ((res.table_centroid.pose.position.x == 0.0 &&
00942            res.table_centroid.pose.position.y == 0.0 &&
00943            res.table_centroid.pose.position.z == 0.0) ||
00944           res.table_centroid.pose.position.x < 0.0)
00945       {
00946         ROS_ERROR_STREAM("No plane found, leaving");
00947         res.found_table = false;
00948         return false;
00949       }
00950       res.found_table = true;
00951     }
00952     else
00953     {
00954       ROS_ERROR_STREAM("Calling getTableLocation prior to receiving sensor data.");
00955       res.found_table = false;
00956       return false;
00957     }
00958     return true;
00959   }
00960 
00961   bool getPushPose(PushPose::Request& req, PushPose::Response& res)
00962   {
00963     if ( have_depth_data_ )
00964     {
00965       res = findPushPose(cur_color_frame_, cur_depth_frame_, cur_point_cloud_,
00966                          req.use_guided);
00967     }
00968     else
00969     {
00970       ROS_ERROR_STREAM("Calling getPushPose prior to receiving sensor data.");
00971       return false;
00972     }
00973     return true;
00974   }
00975 
00976   PoseStamped getPushVector()
00977   {
00978     if (tracker_count_ == 1)
00979     {
00980       ROS_INFO_STREAM("Finding tabletop objects. tracker_count_ == 1");
00981       ProtoObjects objs;
00982       pcl_segmenter_.findTabletopObjects(objs, cur_point_cloud_);
00983     }
00984     else if (tracker_count_ > 1)
00985     {
00986       ROS_INFO_STREAM("Updating tabletop objects. tracker_count_ > 1");
00987       // TODO: Store downsampled versions of these internal to pcl_segmenter
00988       // or object singulation...
00989       ProtoObjects objs = pcl_segmenter_.updateObjects(cur_point_cloud_);
00990       // TODO: trim down what gets sent here
00991       return os_.getPushVector(motion_mask_hist_.back(),
00992                                arm_mask_hist_.back(),
00993                                workspace_mask_hist_.back(),
00994                                color_frame_hist_.back(),
00995                                depth_frame_hist_.back(),
00996                                flow_u_hist_.back(),
00997                                flow_v_hist_.back(), objs);
00998     }
00999     PoseStamped empty;
01000     return empty;
01001   }
01002 
01003   PushPose::Response findPushPose(cv::Mat visual_frame,
01004                                   cv::Mat depth_frame,
01005                                   XYZPointCloud& cloud, bool use_guided)
01006   {
01007     PushPose::Response res;
01008     if (use_guided)
01009     {
01010       res.push_pose = getPushVector();
01011     }
01012     else
01013     {
01014       res.push_pose = findRandomPushPose(cloud);
01015     }
01016     res.invalid_push_pose = false;
01017     return res;
01018   }
01019 
01020   //
01021   // Region tracking methods
01022   //
01023 
01024   void trackerGoalCallback(
01025       const tabletop_pushing::ObjectSingulationGoalConstPtr &goal)
01026   {
01027     if (goal->init)
01028     {
01029       ROS_INFO_STREAM("Initializing tracker.");
01030       initTracker();
01031       tabletop_pushing::ObjectSingulationResult result;
01032       singulation_server_.setSucceeded(result);
01033     }
01034 
01035     if (goal->start)
01036     {
01037       ROS_INFO_STREAM("Starting tracker.");
01038       startTracker();
01039       tabletop_pushing::ObjectSingulationResult result;
01040       singulation_server_.setSucceeded(result);
01041     }
01042     else
01043     {
01044       ROS_INFO_STREAM("Stopping tracker.");
01045       stopTracker();
01046       tabletop_pushing::ObjectSingulationResult result;
01047       if (goal->get_singulation_vector)
01048       {
01049         result.singulation_vector = getPushVector();
01050       }
01051       singulation_server_.setSucceeded(result);
01052       // singulation_server_.setPreempted();
01053     }
01054   }
01055 
01056   void initTracker()
01057   {
01058     tracker_initialized_ = false;
01059     tracking_ = false;
01060   }
01061 
01062   void startTracker()
01063   {
01064     // tracker_initialized_ = true;
01065     tracking_ = true;
01066   }
01067 
01068   void stopTracker()
01069   {
01070     // tracker_initialized_ = false;
01071     tracking_ = false;
01072   }
01073 
01074   void updateTracks(cv::Mat color_frame, cv::Mat& depth_frame,
01075                     cv::Mat& prev_color_frame, cv::Mat& prev_depth_frame,
01076                     XYZPointCloud& cloud)
01077   {
01078     // Get a grayscale image as well
01079     cv::Mat gray_frame;
01080     cv::cvtColor(color_frame, gray_frame, CV_BGR2GRAY);
01081 
01082     if (!tracker_initialized_)
01083     {
01084       cam_info_ = *ros::topic::waitForMessage<sensor_msgs::CameraInfo>(
01085           cam_info_topic_, n_, ros::Duration(5.0));
01086 
01087       table_centroid_ = getTablePlane(cloud);
01088       if (table_centroid_.pose.position.x == 0.0 &&
01089           table_centroid_.pose.position.y == 0.0 &&
01090           table_centroid_.pose.position.z == 0.0)
01091       {
01092         ROS_DEBUG_STREAM("No plane found!");
01093       }
01094       else
01095       {
01096         min_workspace_z_ = table_centroid_.pose.position.z - below_table_z_;
01097         n_private_.setParam("min_workspace_z", min_workspace_z_);
01098         ROS_DEBUG_STREAM("Found plane");
01099       }
01100 
01101       ft_.initTracks(gray_frame);
01102 
01103       tracker_initialized_ = true;
01104       tracker_count_ = 0;
01105       return;
01106     }
01107     if (!tracking_)
01108     {
01109       return;
01110     }
01111     // Get sparse flow
01112     AffineFlowMeasures sparse_flow = ft_.updateTracks(gray_frame,
01113                                                       cur_workspace_mask_);
01114     ++tracker_count_;
01115   }
01116 
01117   //
01118   // Core method for calculation
01119   //
01120 //   cv::Mat segmentMovingStuff(cv::Mat& color_frame, cv::Mat& depth_frame,
01121 //                              cv::Mat& prev_color_frame,
01122 //                              cv::Mat& prev_depth_frame, XYZPointCloud& cloud)
01123 //   {
01124 //     if (!tracking_ || !tracker_initialized_ || tracker_count_ < 1)
01125 //     {
01126 //       cv::Mat empty_segments(color_frame.rows, color_frame.cols, CV_8UC1,
01127 //                              cv::Scalar(0));
01128 //       return empty_segments;
01129 //     }
01130 
01131 //     // TODO: Consolidate into a single function call ?
01132 //     // Convert frame to floating point HSV
01133 //     cv::Mat color_frame_hsv(color_frame.size(), color_frame.type());
01134 //     cv::cvtColor(color_frame, color_frame_hsv, CV_BGR2HSV);
01135 //     cv::Mat color_frame_f(color_frame_hsv.size(), CV_32FC3);
01136 //     color_frame_hsv.convertTo(color_frame_f, CV_32FC3, 1.0/255, 0);
01137 
01138 //     // Get optical flow
01139 //     std::vector<cv::Mat> flow_outs = lkflow_(color_frame, prev_color_frame);
01140 
01141 //     // Project locations of the arms and hands into the image
01142 //     int min_arm_x = 0;
01143 //     int max_arm_x = 0;
01144 //     int min_arm_y = 0;
01145 //     int max_arm_y = 0;
01146 //     ArmModel hands_and_arms = projectArmPoses(cur_camera_header_,
01147 //                                               color_frame.size(), min_arm_x,
01148 //                                               max_arm_x, min_arm_y, max_arm_y);
01149 //     // Get pixel heights above the table
01150 //     cv::Mat heights_above_table = getTableHeightDistances();
01151 //     // Perform graphcut for motion detection
01152 //     cv::Mat cut = mgc_(color_frame_f, depth_frame, flow_outs[0], flow_outs[1],
01153 //                        cur_workspace_mask_, heights_above_table,
01154 //                        hands_and_arms);
01155 //     // Perform graphcut for arm localization
01156 //     cv::Mat arm_cut(color_frame.size(), CV_8UC1, cv::Scalar(0));
01157 //     if (hands_and_arms[0].size() > 0 || hands_and_arms[1].size() > 0)
01158 //     {
01159 //       arm_cut = mgc_.segmentRobotArm(color_frame_f, depth_frame,
01160 //                                      cur_workspace_mask_, hands_and_arms,
01161 //                                      min_arm_x, max_arm_x, min_arm_y, max_arm_y);
01162 //     }
01163 
01164 //     // Publish the moving region stuff
01165 //     cv_bridge::CvImage motion_mask_msg;
01166 //     motion_mask_msg.image = cut;
01167 //     motion_mask_msg.header = cur_camera_header_;
01168 //     motion_mask_msg.encoding = sensor_msgs::image_encodings::TYPE_8UC1;
01169 //     motion_mask_pub_.publish(motion_mask_msg.toImageMsg());
01170 
01171 //     // Publish arm stuff
01172 //     cv_bridge::CvImage arm_mask_msg;
01173 //     arm_mask_msg.image = arm_cut;
01174 //     arm_mask_msg.header = cur_camera_header_;
01175 //     arm_mask_msg.encoding = sensor_msgs::image_encodings::TYPE_8UC1;
01176 //     arm_mask_pub_.publish(arm_mask_msg.toImageMsg());
01177 
01178 //     // Also publish color versions
01179 //     cv::Mat moving_regions_img;
01180 //     color_frame.copyTo(moving_regions_img, cut);
01181 //     cv_bridge::CvImage motion_img_msg;
01182 //     cv::Mat motion_img_send(cut.size(), CV_8UC3);
01183 //     moving_regions_img.convertTo(motion_img_send, CV_8UC3, 1.0, 0);
01184 //     motion_img_msg.image = motion_img_send;
01185 //     motion_img_msg.header = cur_camera_header_;
01186 //     motion_img_msg.encoding = sensor_msgs::image_encodings::TYPE_8UC3;
01187 //     motion_img_pub_.publish(motion_img_msg.toImageMsg());
01188 
01189 //     cv::Mat arm_regions_img;
01190 //     color_frame.copyTo(arm_regions_img, arm_cut);
01191 //     cv_bridge::CvImage arm_img_msg;
01192 //     cv::Mat arm_img_send(arm_regions_img.size(), CV_8UC3);
01193 //     arm_regions_img.convertTo(arm_img_send, CV_8UC3, 1.0, 0);
01194 //     arm_img_msg.image = arm_img_send;
01195 //     arm_img_msg.header = cur_camera_header_;
01196 //     arm_img_msg.encoding = sensor_msgs::image_encodings::TYPE_8UC3;
01197 //     arm_img_pub_.publish(arm_img_msg.toImageMsg());
01198 //     // cv::Mat not_arm_move = cut - arm_cut;
01199 //     // cv::Mat not_arm_move_color;
01200 //     // color_frame.copyTo(not_arm_move_color, not_arm_move);
01201 
01202 //     // Get point cloud associateds with the motion mask and arm mask
01203 //     // XYZPointCloud moving_cloud = getMaskedPointCloud(cloud, cut);
01204 //     // XYZPointCloud arm_cloud = getMaskedPointCloud(cloud, arm_cut);
01205 
01206 //     cv::Mat last_motion_mask;
01207 //     cv::Mat last_arm_mask;
01208 //     cv::Mat last_workspace_mask;
01209 //     cv::Mat last_color_frame;
01210 //     cv::Mat last_depth_frame;
01211 //     cv::Mat last_flow_u;
01212 //     cv::Mat last_flow_v;
01213 
01214 //     cut.copyTo(last_motion_mask);
01215 //     arm_cut.copyTo(last_arm_mask);
01216 //     cur_workspace_mask_.copyTo(last_workspace_mask);
01217 //     color_frame.copyTo(last_color_frame);
01218 //     depth_frame.copyTo(last_depth_frame);
01219 //     flow_outs[0].copyTo(last_flow_u);
01220 //     flow_outs[1].copyTo(last_flow_v);
01221 
01222 //     motion_mask_hist_.push_back(last_motion_mask);
01223 //     arm_mask_hist_.push_back(last_arm_mask);
01224 //     workspace_mask_hist_.push_back(last_workspace_mask);
01225 //     color_frame_hist_.push_back(last_color_frame);
01226 //     depth_frame_hist_.push_back(last_depth_frame);
01227 //     flow_u_hist_.push_back(last_flow_u);
01228 //     flow_v_hist_.push_back(last_flow_v);
01229 
01230 //     if (motion_mask_hist_.size() > image_hist_size_)
01231 //     {
01232 //       motion_mask_hist_.pop_front();
01233 //       arm_mask_hist_.pop_front();
01234 //       workspace_mask_hist_.pop_front();
01235 //       color_frame_hist_.pop_front();
01236 //       depth_frame_hist_.pop_front();
01237 //       flow_u_hist_.pop_front();
01238 //       flow_v_hist_.pop_front();
01239 //     }
01240 
01241 // #ifdef WRITE_INPUT_TO_DISK
01242 //     std::stringstream input_out_name;
01243 //     input_out_name << base_output_path_ << "input" << tracker_count_ << ".tiff";
01244 //     cv::imwrite(input_out_name.str(), color_frame);
01245 // #endif // WRITE_INPUT_TO_DISK
01246 // #ifdef WRITE_FLOWS_TO_DISK
01247 //     cv::Mat flow_thresh_disp_img(color_frame.size(), CV_8UC3);
01248 //     flow_thresh_disp_img = color_frame.clone();
01249 //     for (int r = 0; r < flow_thresh_disp_img.rows; ++r)
01250 //     {
01251 //       for (int c = 0; c < flow_thresh_disp_img.cols; ++c)
01252 //       {
01253 //         float u = flow_outs[0].at<float>(r,c);
01254 //         float v = flow_outs[1].at<float>(r,c);
01255 //         if (std::sqrt(u*u+v*v) > mgc_.magnitude_thresh_)
01256 //         {
01257 //           cv::line(flow_thresh_disp_img, cv::Point(c,r),
01258 //                    cv::Point(c-u, r-v), cv::Scalar(0,255,0));
01259 //         }
01260 //       }
01261 //     }
01262 
01263 //     std::stringstream flow_out_name;
01264 //     flow_out_name << base_output_path_ << "flow" << tracker_count_ << ".tiff";
01265 //     cv::imwrite(flow_out_name.str(), flow_thresh_disp_img);
01266 // #endif // WRITE_FLOWS_TO_DISK
01267 // #ifdef WRITE_CUTS_TO_DISK
01268 //     std::stringstream cut_out_name;
01269 //     cut_out_name << base_output_path_ << "cut" << tracker_count_ << ".tiff";
01270 //     cv::imwrite(cut_out_name.str(), moving_regions_img);
01271 // #endif // WRITE_CUTS_TO_DISK
01272 // #ifdef WRITE_ARM_CUT_TO_DISK
01273 //     std::stringstream arm_cut_out_name;
01274 //     arm_cut_out_name << base_output_path_ << "arm_cut" << tracker_count_ << ".tiff";
01275 //     cv::imwrite(arm_cut_out_name.str(), arm_regions_img);
01276 //     // std::stringstream not_arm_move_out_name;
01277 //     // not_arm_move_out_name << base_output_path_ << "not_arm_move" << tracker_count_
01278 //     //                      << ".tiff";
01279 //     // cv::imwrite(not_arm_move_out_name.str(), not_arm_move_color);
01280 // #endif // WRITE_ARM_CUT_TO_DISK
01281 // #ifdef DISPLAY_OPTICAL_FLOW
01282 //     cpl_visual_features::displayOpticalFlow(color_frame, flow_outs[0],
01283 //                                             flow_outs[1],
01284 //                                             mgc_.magnitude_thresh_);
01285 // #endif // DISPLAY_OPTICAL_FLOW
01286 // #ifdef DISPLAY_ARM_CIRCLES
01287 //     cv::Mat arms_img(color_frame.size(), CV_8UC3);
01288 //     arms_img = color_frame.clone();
01289 //     for (unsigned int i = 0; i < hands_and_arms.size(); ++i)
01290 //     {
01291 //       for (unsigned int j = 0; j < hands_and_arms[i].size(); ++j)
01292 //       {
01293 //         cv::Scalar color;
01294 //         if (i%2 == 0)
01295 //         {
01296 //           color = cv::Scalar(0,0,255);
01297 //         }
01298 //         else
01299 //         {
01300 //           color = cv::Scalar(0,255,0);
01301 //         }
01302 //         cv::circle(arms_img, hands_and_arms[i][j], 2, color);
01303 //       }
01304 //     }
01305 //     cv::imshow("arms", arms_img);
01306 // #endif
01307 // #ifdef DISPLAY_GRAPHCUT
01308 //     cv::imshow("moving_regions", moving_regions_img);
01309 //     cv::imshow("arm_cut", arm_regions_img);
01310 //     // cv::imshow("not_arm_move", not_arm_move_color);
01311 // #endif // DISPLAY_GRAPHCUT
01312 //     return cut;
01313 //   }
01314 
01315   //
01316   // Arm detection methods
01317   //
01318   cv::Point projectPointIntoImage(PointStamped cur_point,
01319                                   std::string target_frame)
01320   {
01321     cv::Point img_loc;
01322     try
01323     {
01324       // Transform point into the camera frame
01325       PointStamped image_frame_loc_m;
01326       tf_.transformPoint(target_frame, cur_point, image_frame_loc_m);
01327       // Project point onto the image
01328       img_loc.x = static_cast<int>((cam_info_.K[0]*image_frame_loc_m.point.x +
01329                                     cam_info_.K[2]*image_frame_loc_m.point.z) /
01330                                    image_frame_loc_m.point.z);
01331       img_loc.y = static_cast<int>((cam_info_.K[4]*image_frame_loc_m.point.y +
01332                                     cam_info_.K[5]*image_frame_loc_m.point.z) /
01333                                    image_frame_loc_m.point.z);
01334 
01335       // Downsample poses if the image is downsampled
01336       for (int i = 0; i < num_downsamples_; ++i)
01337       {
01338         img_loc.x /= 2;
01339         img_loc.y /= 2;
01340       }
01341     }
01342     catch (tf::TransformException e)
01343     {
01344       // ROS_ERROR_STREAM(e.what());
01345     }
01346     return img_loc;
01347   }
01348 
01349   cv::Point projectPointIntoImage(PoseStamped cur_pose,
01350                                   std::string target_frame)
01351   {
01352     PointStamped cur_point;
01353     cur_point.header = cur_pose.header;
01354     cur_point.point = cur_pose.pose.position;
01355     return projectPointIntoImage(cur_point, target_frame);
01356   }
01357 
01358   bool getLineValues(cv::Point p1, cv::Point p2, std::vector<cv::Point>& line,
01359                      cv::Size frame_size,
01360                      int &min_x, int &max_x, int &min_y, int &max_y)
01361   {
01362     int num_points_added = 0;
01363     bool steep = (abs(p1.y - p2.y) > abs(p1.x - p2.x));
01364     if (steep)
01365     {
01366       // Swap x and y
01367       cv::Point tmp(p1.y, p1.x);
01368       p1.x = tmp.x;
01369       p1.y = tmp.y;
01370       tmp.y = p2.x;
01371       tmp.x = p2.y;
01372       p2.x = tmp.x;
01373       p2.y = tmp.y;
01374     }
01375     if (p1.x > p2.x)
01376     {
01377       // Swap p1 and p2
01378       cv::Point tmp(p1.x, p1.y);
01379       p1.x = p2.x;
01380       p1.y = p2.y;
01381       p2.x = tmp.x;
01382       p2.y = tmp.y;
01383     }
01384     int dx = p2.x - p1.x;
01385     int dy = abs(p2.y - p1.y);
01386     int error = dx / 2;
01387     int ystep = 0;
01388     if (p1.y < p2.y)
01389     {
01390       ystep = 1;
01391     }
01392     else if (p1.y > p2.y)
01393     {
01394       ystep = -1;
01395     }
01396     for(int x = p1.x, y = p1.y; x <= p2.x; ++x)
01397     {
01398       if (steep)
01399       {
01400         cv::Point p_new(y,x);
01401         // Test that p_new is in the image
01402         if (x < 0 || y < 0 || x >= frame_size.height || y >= frame_size.width ||
01403             (x == 0 && y == 0))
01404         {
01405         }
01406         else
01407         {
01408           if (p_new.x < min_x)
01409             min_x = p_new.x;
01410           if (p_new.x > max_x)
01411             max_x = p_new.x;
01412           if (p_new.y < min_y)
01413             min_y = p_new.y;
01414           if (p_new.y > max_y)
01415             max_y = p_new.y;
01416           line.push_back(p_new);
01417           ++num_points_added;
01418         }
01419       }
01420       else
01421       {
01422         cv::Point p_new(x,y);
01423         // Test that p_new is in the image
01424         if (x < 0 || y < 0 || x >= frame_size.width || y >= frame_size.height ||
01425             (x == 0 && y == 0))
01426         {
01427         }
01428         else
01429         {
01430           if (p_new.x < min_x)
01431             min_x = p_new.x;
01432           if (p_new.x > max_x)
01433             max_x = p_new.x;
01434           if (p_new.y < min_y)
01435             min_y = p_new.y;
01436           if (p_new.y > max_y)
01437             max_y = p_new.y;
01438           line.push_back(p_new);
01439           ++num_points_added;
01440         }
01441       }
01442       error -= dy;
01443       if (error < 0)
01444       {
01445         y += ystep;
01446         error += dx;
01447       }
01448     }
01449     return (num_points_added > 0);
01450   }
01451 
01452   ArmModel projectArmPoses(std_msgs::Header img_header, cv::Size frame_size,
01453                            int &min_x, int &max_x, int &min_y, int &max_y)
01454   {
01455     // Project all arm joints into image
01456     ArmModel arm_locs;
01457 
01458     // Left hand
01459     cv::Point ll0 = projectJointOriginIntoImage(img_header,
01460                                                 "l_gripper_l_finger_tip_link");
01461     cv::Point ll1 = projectJointOriginIntoImage(img_header,
01462                                                 "l_gripper_l_finger_link");
01463     cv::Point lr0 = projectJointOriginIntoImage(img_header,
01464                                                 "l_gripper_r_finger_tip_link");
01465     cv::Point lr1 = projectJointOriginIntoImage(img_header,
01466                                                 "l_gripper_r_finger_link");
01467     cv::Point lp = projectJointOriginIntoImage(img_header,
01468                                                "l_gripper_palm_link");
01469     // TODO: Add more arm locations
01470     // Left arm
01471     cv::Point l1 = projectJointOriginIntoImage(img_header, "l_wrist_flex_link");
01472     cv::Point l2 = projectJointOriginIntoImage(img_header, "l_forearm_link");
01473     cv::Point l3 = projectJointOriginIntoImage(img_header, "l_upper_arm_link");
01474     arm_locs.l_chain.push_back(lp);
01475     arm_locs.l_chain.push_back(ll1);
01476     arm_locs.l_chain.push_back(ll0);
01477     arm_locs.l_chain.push_back(lr0);
01478     arm_locs.l_chain.push_back(lr1);
01479     arm_locs.l_chain.push_back(lp);
01480     arm_locs.l_chain.push_back(l1);
01481     arm_locs.l_chain.push_back(l2);
01482     arm_locs.l_chain.push_back(l3);
01483 
01484     // Right hand
01485     cv::Point rl0 = projectJointOriginIntoImage(img_header,
01486                                                 "r_gripper_l_finger_tip_link");
01487     cv::Point rl1 = projectJointOriginIntoImage(img_header,
01488                                                 "r_gripper_l_finger_link");
01489     cv::Point rr0 = projectJointOriginIntoImage(img_header,
01490                                                 "r_gripper_r_finger_tip_link");
01491     cv::Point rr1 = projectJointOriginIntoImage(img_header,
01492                                                 "r_gripper_r_finger_link");
01493     cv::Point rp = projectJointOriginIntoImage(img_header,
01494                                                "r_gripper_palm_link");
01495 
01496     // TODO: Add more arm locations
01497     // Right arm
01498     cv::Point r1 = projectJointOriginIntoImage(img_header, "r_wrist_flex_link");
01499     cv::Point r2 = projectJointOriginIntoImage(img_header, "r_forearm_link");
01500     cv::Point r3 = projectJointOriginIntoImage(img_header, "r_upper_arm_link");
01501     arm_locs.r_chain.push_back(rp);
01502     arm_locs.r_chain.push_back(rl1);
01503     arm_locs.r_chain.push_back(rl0);
01504     arm_locs.r_chain.push_back(rr0);
01505     arm_locs.r_chain.push_back(rr1);
01506     arm_locs.r_chain.push_back(rp);
01507     arm_locs.r_chain.push_back(r1);
01508     arm_locs.r_chain.push_back(r2);
01509     arm_locs.r_chain.push_back(r3);
01510 
01511     // Keep track of min and max values
01512     min_x = 10000;
01513     max_x = 0;
01514     min_y = 10000;
01515     max_y = 0;
01516 
01517     // Add left hand
01518     arm_locs.l_hand_on = getLineValues(ll0, ll1, arm_locs.hands, frame_size,
01519                                        min_x, max_x, min_y, max_y);
01520     arm_locs.l_hand_on = (getLineValues(ll1, lp, arm_locs.hands, frame_size,
01521                                         min_x, max_x, min_y, max_y) ||
01522                           arm_locs.l_hand_on);
01523     arm_locs.l_hand_on = (getLineValues(lr0, lr1, arm_locs.hands, frame_size,
01524                                         min_x, max_x, min_y, max_y) ||
01525                           arm_locs.l_hand_on);
01526     arm_locs.l_hand_on = (getLineValues(lr1, lp, arm_locs.hands, frame_size,
01527                                         min_x, max_x, min_y, max_y) ||
01528                           arm_locs.l_hand_on);
01529     // Add left arm
01530     arm_locs.l_arm_on = getLineValues(lp, l1, arm_locs.arms, frame_size,
01531                                       min_x, max_x, min_y, max_y);
01532     arm_locs.l_arm_on = (getLineValues(l1, l2, arm_locs.arms, frame_size,
01533                                       min_x, max_x, min_y, max_y) ||
01534                          arm_locs.l_arm_on);
01535     arm_locs.l_arm_on = (getLineValues(l2, l3, arm_locs.arms, frame_size,
01536                                        min_x, max_x, min_y, max_y) ||
01537                          arm_locs.l_arm_on);
01538     // Add right hand
01539     arm_locs.r_hand_on = (getLineValues(rl0, rl1, arm_locs.hands, frame_size,
01540                                        min_x, max_x, min_y, max_y) ||
01541                          arm_locs.r_hand_on);
01542     arm_locs.r_hand_on = (getLineValues(rl1, rp, arm_locs.hands, frame_size,
01543                                         min_x, max_x, min_y, max_y) ||
01544                          arm_locs.r_hand_on);
01545     arm_locs.r_hand_on = (getLineValues(rr0, rr1, arm_locs.hands, frame_size,
01546                                        min_x, max_x, min_y, max_y) ||
01547                          arm_locs.r_hand_on);
01548     arm_locs.r_hand_on = (getLineValues(rr1, rp, arm_locs.hands, frame_size,
01549                                         min_x, max_x, min_y, max_y) ||
01550                          arm_locs.r_hand_on);
01551     // Add right arm
01552     arm_locs.r_arm_on = getLineValues(rp, r1, arm_locs.arms, frame_size,
01553                                       min_x, max_x, min_y, max_y);
01554     arm_locs.r_arm_on = (getLineValues(r1, r2, arm_locs.arms, frame_size,
01555                                        min_x, max_x, min_y, max_y) ||
01556                          arm_locs.r_arm_on);
01557     arm_locs.r_arm_on = (getLineValues(r2, r3, arm_locs.arms, frame_size,
01558                                        min_x, max_x, min_y, max_y) ||
01559                          arm_locs.r_arm_on);
01560     return arm_locs;
01561   }
01562 
01563   cv::Point projectJointOriginIntoImage(std_msgs::Header img_header,
01564                                         std::string joint_name)
01565   {
01566     PointStamped joint_origin;
01567     joint_origin.header.frame_id = joint_name;
01568     joint_origin.header.stamp = img_header.stamp;
01569     joint_origin.point.x = 0.0;
01570     joint_origin.point.y = 0.0;
01571     joint_origin.point.z = 0.0;
01572     // TODO: Use more information in setting correct locations
01573     if (joint_name == "r_gripper_tool_frame" ||
01574         joint_name == "l_gripper_tool_frame" ||
01575         joint_name == "l_gripper_l_finger_tip_link" ||
01576         joint_name == "l_gripper_l_finger_link" ||
01577         joint_name == "l_gripper_r_finger_tip_link" ||
01578         joint_name == "l_gripper_r_finger_link" ||
01579         joint_name == "r_gripper_l_finger_tip_link" ||
01580         joint_name == "r_gripper_l_finger_link" ||
01581         joint_name == "r_gripper_r_finger_tip_link" ||
01582         joint_name == "r_gripper_r_finger_link" ||
01583         joint_name == "r_gripper_tool_frame")
01584     {
01585       joint_origin.point.z = 0.01;
01586       if (joint_name == "r_gripper_l_finger_link" ||
01587           joint_name == "l_gripper_l_finger_link")
01588       {
01589         joint_origin.point.y = 0.02;
01590       }
01591       else if (joint_name == "r_gripper_r_finger_link" ||
01592                joint_name == "l_gripper_r_finger_link")
01593       {
01594         joint_origin.point.y = -0.02;
01595       }
01596     }
01597     // TODO: Check this better
01598     if (joint_name == "r_upper_arm_link")
01599     {
01600       joint_origin.point.y = -0.05;
01601     }
01602     // TODO: Check this better
01603     if (joint_name == "l_upper_arm_link")
01604     {
01605       joint_origin.point.y = 0.05;
01606     }
01607     if (joint_name == "r_gripper_palm_link")
01608     {
01609       joint_origin.point.x = 0.05;
01610     }
01611     if (joint_name == "l_gripper_palm_link")
01612     {
01613       joint_origin.point.x = 0.05;
01614     }
01615     return projectPointIntoImage(joint_origin, img_header.frame_id);
01616   }
01617 
01618   void drawTablePlaneOnImage(XYZPointCloud& plane, PoseStamped cent)
01619   {
01620     cv::Mat plane_display = cur_color_frame_.clone();
01621     std::vector<cv::Point> table_points;
01622     for (unsigned int i = 0; i < plane.points.size(); ++i)
01623     {
01624       PointStamped h;
01625       h.header = plane.header;
01626       h.point.x = plane.points[i].x;
01627       h.point.y = plane.points[i].y;
01628       h.point.z = plane.points[i].z;
01629       cv::Point p = projectPointIntoImage(h, cur_camera_header_.frame_id);
01630       table_points.push_back(p);
01631 #ifdef DISPLAY_PLANE_ESTIMATE
01632       cv::circle(plane_display, p, 2, cv::Scalar(0,255,0));
01633 #endif // DISPLAY_PLANE_ESTIMATE
01634     }
01635 
01636 #ifdef USE_TABLE_COLOR_ESTIMATE
01637     ROS_INFO_STREAM("Calculating table color stats.");
01638     mgc_.setTableColorStats(cur_color_frame_, table_points);
01639 #endif // USE_TABLE_COLOR_ESTIMATE
01640 
01641 #ifdef DISPLAY_PLANE_ESTIMATE
01642     cv::Point cent_img = projectPointIntoImage(cent,
01643                                                cur_camera_header_.frame_id);
01644     cv::circle(plane_display, cent_img, 5, cv::Scalar(0,0,255));
01645     cv::imshow("table_image", plane_display);
01646 #endif // DISPLAY_PLANE_ESTIMATE
01647   }
01648 
01649   //
01650   // Helper Methods
01651   //
01652   cv::Mat downSample(cv::Mat data_in, int scales)
01653   {
01654     cv::Mat out = data_in.clone();
01655     for (int i = 0; i < scales; ++i)
01656     {
01657       cv::pyrDown(data_in, out);
01658       data_in = out;
01659     }
01660     return out;
01661   }
01662 
01663   cv::Mat upSample(cv::Mat data_in, int scales)
01664   {
01665     cv::Mat out = data_in.clone();
01666     for (int i = 0; i < scales; ++i)
01667     {
01668       // NOTE: Currently assumes even cols, rows for data_in
01669       cv::Size out_size(data_in.cols*2, data_in.rows*2);
01670       cv::pyrUp(data_in, out, out_size);
01671       data_in = out;
01672     }
01673     return out;
01674   }
01675 
01676   cv::Mat getTableHeightDistances()
01677   {
01678     cv::Mat table_distances(cur_depth_frame_.size(), CV_32FC1, cv::Scalar(0.0));
01679     const float table_height = table_centroid_.pose.position.z;
01680     for (int r = 0; r < table_distances.rows; ++r)
01681     {
01682       for (int c = 0; c < table_distances.cols; ++c)
01683       {
01684         pcl::PointXYZ cur_pt = cur_point_cloud_.at(2*c, 2*r);
01685         if (isnan(cur_pt.x) || isnan(cur_pt.y) || isnan(cur_pt.z))
01686         {
01687           // 3 meters is sufficiently far away
01688           table_distances.at<float>(r,c) = 3.0;
01689         }
01690         else
01691         {
01692           table_distances.at<float>(r,c) = cur_pt.z - table_height;
01693         }
01694       }
01695     }
01696 #ifdef DISPLAY_TABLE_DISTANCES
01697     cv::imshow("table_distances_raw", table_distances);
01698 #endif // DISPLAY_TABLE_DISTANCES
01699     return table_distances;
01700   }
01701 
01702   PoseStamped getTablePlane(XYZPointCloud& cloud)
01703   {
01704     Eigen::Vector4f table_centroid = pcl_segmenter_.getTablePlane(cloud);
01705     PoseStamped p;
01706     p.pose.position.x = table_centroid[0];
01707     p.pose.position.y = table_centroid[1];
01708     p.pose.position.z = table_centroid[2];
01709     p.header = cloud.header;
01710     ROS_INFO_STREAM("Table centroid is: ("
01711                     << p.pose.position.x << ", "
01712                     << p.pose.position.y << ", "
01713                     << p.pose.position.z << ")");
01714     // TODO: XYZPointCloud plane_cloud = pcl_segmenter_.getCurrentTablePoints()
01715     // drawTablePlaneOnImage(plane_cloud, p);
01716     return p;
01717   }
01718 
01719   PoseStamped findRandomPushPose(XYZPointCloud& input_cloud)
01720   {
01721     ProtoObjects objs;
01722     pcl_segmenter_.findTabletopObjects(input_cloud, objs);
01723     prev_proto_objs_ = cur_proto_objs_;
01724     cur_proto_objs_ = objs;
01725 
01726     ROS_INFO_STREAM("Found " << objs.size() << " objects.");
01727 
01728     // TODO: publish a ros point cloud here for visualization
01729     // TODO: Move the publisher out of the segmentation class
01730 
01731     std::vector<Eigen::Vector4f> cluster_centroids;
01732     for (unsigned int i = 0; i < objs.size(); ++i)
01733     {
01734       if (objs[i].centroid[0] > min_pushing_x_ &&
01735           objs[i].centroid[0] < max_pushing_x_ &&
01736           objs[i].centroid[1] > min_pushing_y_ &&
01737           objs[i].centroid[1] < max_pushing_y_)
01738       {
01739         cluster_centroids.push_back(objs[i].centroid);
01740       }
01741     }
01742     geometry_msgs::PoseStamped p;
01743 
01744     if (cluster_centroids.size() < 1)
01745     {
01746       ROS_WARN_STREAM("No object clusters found! Returning empty push_pose");
01747       p.header.frame_id = "/torso_lift_link";
01748       return p;
01749     }
01750     ROS_INFO_STREAM("Found " << cluster_centroids.size() << " proto objects");
01751     int rand_idx = rand() % cluster_centroids.size();
01752     Eigen::Vector4f obj_xyz_centroid = cluster_centroids[rand_idx];
01753     p.pose.position.x = obj_xyz_centroid[0];
01754     p.pose.position.y = obj_xyz_centroid[1];
01755     // Set z to be the table height
01756     p.pose.position.z = objs[0].table_centroid[2];
01757     ROS_INFO_STREAM("Chosen push pose is at: (" << obj_xyz_centroid[0] << ", "
01758                     << obj_xyz_centroid[1] << ", " << objs[0].table_centroid[2]
01759                     << ")");
01760 
01761     p.pose.orientation.x = 0;
01762     p.pose.orientation.y = 0;
01763     p.pose.orientation.z = 0;
01764     p.pose.orientation.w = 0;
01765 
01766     p.header.frame_id = "/torso_lift_link";
01767     return p;
01768   }
01769 
01770   XYZPointCloud getMaskedPointCloud(XYZPointCloud& input_cloud, cv::Mat& mask_in)
01771   {
01772     // TODO: Assert that input_cloud is shaped
01773     cv::Mat mask = upSample(mask_in, num_downsamples_);
01774 
01775     // Select points from point cloud that are in the mask:
01776     pcl::PointIndices mask_indices;
01777     mask_indices.header = input_cloud.header;
01778     for (int y = 0; y < mask.rows; ++y)
01779     {
01780       uchar* mask_row = mask.ptr<uchar>(y);
01781       for (int x = 0; x < mask.cols; ++x)
01782       {
01783         if (mask_row[x] != 0)
01784         {
01785           mask_indices.indices.push_back(y*input_cloud.width + x);
01786         }
01787       }
01788     }
01789 
01790     XYZPointCloud masked_cloud;
01791     pcl::copyPointCloud(input_cloud, mask_indices, masked_cloud);
01792     return masked_cloud;
01793   }
01794 
01798   void spin()
01799   {
01800     while(n_.ok())
01801     {
01802       ros::spinOnce();
01803     }
01804   }
01805 
01806  protected:
01807   ros::NodeHandle n_;
01808   ros::NodeHandle n_private_;
01809   message_filters::Subscriber<sensor_msgs::Image> image_sub_;
01810   message_filters::Subscriber<sensor_msgs::Image> depth_sub_;
01811   message_filters::Subscriber<sensor_msgs::PointCloud2> cloud_sub_;
01812   message_filters::Synchronizer<MySyncPolicy> sync_;
01813   image_transport::ImageTransport it_;
01814   sensor_msgs::CameraInfo cam_info_;
01815   image_transport::Publisher motion_img_pub_;
01816   image_transport::Publisher motion_mask_pub_;
01817   image_transport::Publisher arm_img_pub_;
01818   image_transport::Publisher arm_mask_pub_;
01819   actionlib::SimpleActionServer<tabletop_pushing::
01820                                 ObjectSingulationAction> singulation_server_;
01821   sensor_msgs::CvBridge bridge_;
01822   tf::TransformListener tf_;
01823   ros::ServiceServer push_pose_server_;
01824   ros::ServiceServer table_location_server_;
01825   cv::Mat cur_color_frame_;
01826   cv::Mat cur_depth_frame_;
01827   cv::Mat cur_workspace_mask_;
01828   cv::Mat prev_color_frame_;
01829   cv::Mat prev_depth_frame_;
01830   cv::Mat prev_workspace_mask_;
01831   cv::Mat prev_seg_mask_;
01832   std::deque<cv::Mat> motion_mask_hist_;
01833   std::deque<cv::Mat> arm_mask_hist_;
01834   std::deque<cv::Mat> workspace_mask_hist_;
01835   std::deque<cv::Mat> color_frame_hist_;
01836   std::deque<cv::Mat> depth_frame_hist_;
01837   std::deque<cv::Mat> flow_u_hist_;
01838   std::deque<cv::Mat> flow_v_hist_;
01839   std_msgs::Header cur_camera_header_;
01840   std_msgs::Header prev_camera_header_;
01841   XYZPointCloud cur_point_cloud_;
01842   PointCloudSegmentation pcl_segmenter_;
01843   ObjectSingulation os_;
01844   bool have_depth_data_;
01845   int crop_min_x_;
01846   int crop_max_x_;
01847   int crop_min_y_;
01848   int crop_max_y_;
01849   int display_wait_ms_;
01850   double min_workspace_x_;
01851   double max_workspace_x_;
01852   double min_workspace_y_;
01853   double max_workspace_y_;
01854   double min_workspace_z_;
01855   double max_workspace_z_;
01856   double min_pushing_x_;
01857   double max_pushing_x_;
01858   double min_pushing_y_;
01859   double max_pushing_y_;
01860   double below_table_z_;
01861   int num_downsamples_;
01862   std::string workspace_frame_;
01863   PoseStamped table_centroid_;
01864   bool tracking_;
01865   bool tracker_initialized_;
01866   std::string cam_info_topic_;
01867   int tracker_count_;
01868   std::string base_output_path_;
01869   int image_hist_size_;
01870   double norm_est_radius_;
01871   bool autorun_pcl_segmentation_;
01872   bool auto_flow_cluster_;
01873   bool segmenting_moving_stuff_;
01874   ProtoObjects prev_proto_objs_;
01875   ProtoObjects cur_proto_objs_;
01876 };
01877 
01878 int main(int argc, char ** argv)
01879 {
01880   srand(time(NULL));
01881   ros::init(argc, argv, "tabletop_perception_node");
01882   ros::NodeHandle n;
01883   MGCNode perception_node(n);
01884   perception_node.spin();
01885 
01886   return 0;
01887 }


tabletop_pushing
Author(s): Tucker Hermans
autogenerated on Wed Nov 27 2013 11:59:44