00001
00034 #define NO_ROS 0
00035 #define PR2 0
00036 #define GRASPING 0
00037
00038 #include "cv.h"
00039 #include "highgui.h"
00040 #include <stdio.h>
00041 #include <ctype.h>
00042 #include <Eigen/Eigen>
00043 #include <assert.h>
00044 #include <iostream>
00045 #include <set>
00046 #include <opencv2/core/eigen.hpp>
00047 #include <cmath>
00048 #include <ros/ros.h>
00049 #include "sensor_msgs/Image.h"
00050 #include "cv_bridge/CvBridge.h"
00051 #include "camera_self_filter/mask.h"
00052 #include "Timer.h"
00053 #include <tf_conversions/tf_eigen.h>
00054 #include <eigen_conversions/eigen_msg.h>
00055 #include <pcl/point_cloud.h>
00056 #include <pcl/point_types.h>
00057 #include <pcl/ros/conversions.h>
00058 #include <pcl/filters/voxel_grid.h>
00059 #include <pcl/features/feature.h>
00060 #include "pcl/segmentation/extract_clusters.h"
00061 #include <pcl/filters/extract_indices.h>
00062 #include <pcl/features/normal_3d.h>
00063 #include <math.h>
00064 #include "pcl/common/common.h"
00065 #include <simple_robot_control/robot_control.h>
00066 #include <tf/transform_listener.h>
00067 #include <pcl/common/angles.h>
00068 #include "pcl/io/pcd_io.h"
00069 #include "pcl_ros/transforms.h"
00070 typedef pcl::PointXYZRGB PointT;
00071 typedef pcl::KdTree<PointT>::Ptr KdTreePtr;
00072
00073
00074 using std::cout;
00075 using std::endl;
00076
00077
00078
00079
00080
00081 static void icvGetRTMatrix(const CvPoint2D32f* a, const CvPoint2D32f* b,
00082 int count, CvMat* M) {
00083
00084 double sa[16], sb[4], m[4], *om = M->data.db;
00085 CvMat A = cvMat(4, 4, CV_64F, sa), B = cvMat(4, 1, CV_64F, sb);
00086 CvMat MM = cvMat(4, 1, CV_64F, m);
00087
00088 int i;
00089
00090 memset(sa, 0, sizeof(sa));
00091 memset(sb, 0, sizeof(sb));
00092
00093 for (i = 0; i < count; i++) {
00094 sa[0] += a[i].x * a[i].x + a[i].y * a[i].y;
00095 sa[1] += 0;
00096 sa[2] += a[i].x;
00097 sa[3] += a[i].y;
00098
00099 sa[4] += 0;
00100 sa[5] += a[i].x * a[i].x + a[i].y * a[i].y;
00101 sa[6] += -a[i].y;
00102 sa[7] += a[i].x;
00103
00104 sa[8] += a[i].x;
00105 sa[9] += -a[i].y;
00106 sa[10] += 1;
00107 sa[11] += 0;
00108
00109 sa[12] += a[i].y;
00110 sa[13] += a[i].x;
00111 sa[14] += 0;
00112 sa[15] += 1;
00113
00114 sb[0] += a[i].x * b[i].x + a[i].y * b[i].y;
00115 sb[1] += a[i].x * b[i].y - a[i].y * b[i].x;
00116 sb[2] += b[i].x;
00117 sb[3] += b[i].y;
00118 }
00119
00120 cvSolve(&A, &B, &MM, CV_SVD);
00121
00122 om[0] = om[4] = m[0];
00123 om[1] = -m[1];
00124 om[3] = m[1];
00125 om[2] = m[2];
00126 om[5] = m[3];
00127
00128 }
00129
00130
00131
00132
00133
00134 cv::Mat cvEstimateRigidTransformFrom2(const cv::Mat& A, const cv::Mat& B) {
00135 cv::Mat M = cv::Mat(2, 3, CV_64F);
00136
00137 CvPoint2D32f* a = (CvPoint2D32f*) A.data;
00138 CvPoint2D32f* b = (CvPoint2D32f*) B.data;
00139 CvMat matM = M;
00140 icvGetRTMatrix(a, b, 2, &matM);
00141 return M;
00142 }
00143
00144
00145
00146
00147
00148
00149
00150 class FeatureTracker {
00151 public:
00152
00153
00154 static const int MAX_CORNERS = 500;
00155 static const double quality = 0.01;
00156 static const double min_distance = 10;
00157 static const int WIN_SIZE = 10;
00158
00159
00160 const static int num_points_to_select = 3;
00161
00162 tf::TransformListener tf_listen;
00163
00164 cv::Mat vis_image;
00165 cv::Mat vis_image_orig;
00166 cv::Mat last_image;
00167 std::vector<std::vector<cv::Point2f> > corner_trajectories;
00168 std::vector<std::vector<cv::Point2f> > corner_trajectories_filtered_and_transposed;
00169
00170 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud;
00171 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_segment;
00172 #if !NO_ROS
00173 ros::NodeHandle nh;
00174 #endif
00175 Eigen::VectorXi good_corners;
00176
00177
00178 int num_tracking_steps;
00179
00180
00181 static unsigned char colormap[];
00182
00183 static const float residual_error_comp = 0.1;
00184
00185 FeatureTracker() :
00186 num_tracking_steps(0) {
00187 }
00188
00189
00190
00191 bool grasp();
00192
00193
00194
00195 std::vector<cv::Point2f> findCorners(cv::Mat& first_image,
00196 std::vector<std::vector<cv::Point> > contours=std::vector<std::vector<cv::Point> > (), cv::Mat* mask = 0);
00197
00198
00199
00200
00201
00202 bool tracking_step(cv::Mat& new_image);
00203
00204
00205
00206
00207 bool calcResiduals(const std::vector<Eigen::Matrix3f>& RTs,
00208 const std::vector<std::vector<cv::Point2f> >& corner_trajectories_t,
00209 Eigen::VectorXf& residuals);
00210
00211 bool filterTrajectories();
00212
00216 int draw(Eigen::VectorXf& prob_dist);
00217
00218 bool selectRandomFeatures(std::vector<int>& features_idx,
00219 const Eigen::VectorXf& mask);
00220
00221
00222
00223
00224 bool generateHypothesis(const Eigen::VectorXf& mask,
00225 std::vector<Eigen::Matrix3f>& RTs);
00226
00227 bool getPclPoint(int x, int y, pcl::PointXYZ& point);
00228
00229
00230
00231
00232
00233 bool evluateTrajectories();
00234
00235
00236
00237
00238
00239
00240 float sampleHypothesisSet(std::vector<int>& feature_assocoation,
00241 Eigen::VectorXf& feature_per_hyp_count);
00242
00243 };
00244
00245 bool FeatureTracker::grasp() {
00246 #if !NO_ROS
00247
00248
00249 double clusterTolerance=0.08;
00250 int spacialLocator=0;
00251 int minClusterSize=4;
00252 int maxClusterSize=200;
00253 double clusterEpsilon=1;
00254 double preGraspAbove=0.15;
00255 double underTheObject=0.02;
00256
00257 ros::Publisher cloud_msg = nh.advertise<sensor_msgs::PointCloud2>(
00258 "points_segmented", 1);
00259
00260 ros::Publisher pre_pose = nh.advertise<geometry_msgs::PoseStamped>(
00261 "our_pose", 1000);
00262
00263 ros::Publisher pre_grasp = nh.advertise<geometry_msgs::PoseStamped>(
00264 "pre_grasp", 1000);
00265
00266 simple_robot_control::Robot robot;
00267
00268
00269
00270
00271
00272
00273 for (size_t i = 0; i < cloud_segment->points.size(); ++i) {
00274 if (cloud_segment->points[i].x == 0.0) {
00275 cloud_segment->erase(cloud_segment->begin() + i);
00276 cloud_segment->width--;
00277 cloud_segment->points.resize(cloud_segment->width);
00278 }
00279 }
00280
00281
00282
00283
00284
00285
00286
00287
00288 Eigen::Vector4f centroid;
00289 pcl::compute3DCentroid(*cloud_segment, centroid);
00290
00291
00292
00293 if (centroid(1) != 0.0) {
00294 EIGEN_ALIGN16 Eigen::Vector3f eigen_values;
00295 EIGEN_ALIGN16 Eigen::Matrix3f eigen_vectors;
00296 Eigen::Matrix3f cov;
00297 Eigen::Vector3f eigen_vector1;
00298 Eigen::Vector3f eigen_vector2;
00299 Eigen::Vector3f vector3rd;
00300 Eigen::Matrix4f pose;
00301 KdTreePtr clusters_tree_;
00302
00303 clusters_tree_ = boost::make_shared<pcl::KdTreeFLANN<PointT> >();
00304 int dotprod;
00305 pcl::EuclideanClusterExtraction<PointT> cluster_;
00306 cluster_.setClusterTolerance(clusterTolerance);
00307 cluster_.setSpatialLocator(spacialLocator);
00308 cluster_.setMinClusterSize(minClusterSize);
00309 cluster_.setMaxClusterSize(maxClusterSize);
00310 clusters_tree_ = boost::make_shared<pcl::KdTreeFLANN<PointT> >();
00311 clusters_tree_->setEpsilon(clusterEpsilon);
00312 cluster_.setSearchMethod(clusters_tree_);
00313
00314 pcl::PointCloud<pcl::PointXYZRGB>::Ptr pcl_cloud_(
00315 new pcl::PointCloud<pcl::PointXYZRGB>());
00316
00317 pcl::PointIndices::Ptr object_inliers(new pcl::PointIndices());
00318 std::vector<pcl::PointIndices> cluster_indices;
00319
00320 uint8_t col = 5;
00321
00322 for (size_t i = 0; i < cloud_segment->points.size(); ++i) {
00323 pcl::PointXYZRGB point_color;
00324 point_color.x = cloud_segment->points[i].x;
00325 point_color.y = cloud_segment->points[i].y;
00326 point_color.z = cloud_segment->points[i].z;
00327 point_color.r = col;
00328 point_color.g = col;
00329 point_color.b = col;
00330 pcl_cloud_->points.push_back(point_color);
00331
00332 }
00333
00334
00335
00336
00337
00338
00339
00340 cluster_.setInputCloud(pcl_cloud_);
00341 cluster_.extract(cluster_indices);
00342
00343
00344
00345
00346
00347
00348 if (int(cluster_indices.size()) > 0) {
00349
00350
00351 pcl::copyPointCloud(*pcl_cloud_, cluster_indices[0],
00352 *cloud_segment);
00353 pcl::compute3DCentroid(*cloud_segment, centroid);
00354 }
00355
00356
00357
00358 pcl::computeCovarianceMatrixNormalized(*cloud_segment, centroid, cov);
00359 pcl::eigen33(cov, eigen_vectors, eigen_values);
00360
00361
00362
00363
00364 eigen_vector1(0) = eigen_vectors(0, 2);
00365 eigen_vector1(1) = eigen_vectors(1, 2);
00366 eigen_vector1(2) = eigen_vectors(2, 2);
00367 eigen_vector2(0) = eigen_vectors(0, 1);
00368 eigen_vector2(1) = eigen_vectors(1, 1);
00369 eigen_vector2(2) = eigen_vectors(2, 1);
00370 dotprod = eigen_vector1.dot(eigen_vector2);
00371
00372
00373
00374
00375
00376
00377 vector3rd = eigen_vector1.cross(eigen_vector2);
00378
00379
00380 for (int i = 0; i <= 2; i++) {
00381 pose(i, 0) = eigen_vector1(i);
00382 pose(i, 1) = eigen_vector2(i);
00383 pose(i, 2) = vector3rd(i);
00384 pose(i, 3) = centroid(i);
00385 }
00386 pose(3, 0) = 0.0;
00387 pose(3, 1) = 0.0;
00388 pose(3, 2) = 0.0;
00389 pose(3, 3) = 1.0;
00390 std::cerr << "pose: " << pose << std::endl;
00391
00392
00393 Eigen::Affine3d e2;
00394 e2.matrix() = pose.cast<double>();
00395 geometry_msgs::Pose p_msgs;
00396
00397 tf::poseEigenToMsg(e2, p_msgs);
00398
00399
00400 geometry_msgs::PoseStamped msg;
00401 msg.pose = p_msgs;
00402 msg.header.frame_id = "/openni_rgb_optical_frame";
00403 msg.header.stamp = ros::Time::now();
00404 msg.header.seq = 0;
00405 pre_pose.publish(msg);
00406
00407 ros::Time time;
00408 time = ros::Time::now();
00409 bool found_transform = tf_listen.waitForTransform(
00410 "/openni_rgb_optical_frame", "/base_link",
00411 cloud_segment->header.stamp, ros::Duration(1.0));
00412
00413
00414
00415 Eigen::Affine3d e3;
00416 e3.matrix() = pose.cast<double>();
00417 geometry_msgs::Pose p_msgs2;
00418
00419 tf::poseEigenToMsg(e3, p_msgs2);
00420 geometry_msgs::PoseStamped msg2;
00421
00422 msg2.pose = p_msgs2;
00423 msg2.header.frame_id = "/openni_rgb_optical_frame";
00424 msg2.header.stamp = ros::Time::now();
00425 msg2.header.seq = 0;
00426
00427
00428
00429 if (found_transform) {
00430 geometry_msgs::PoseStamped msg_base_link;
00431 tf_listen.transformPose("/base_link", cloud_segment->header.stamp,
00432 msg2, msg2.header.frame_id, msg_base_link);
00433 msg_base_link.pose.position.z = msg_base_link.pose.position.z
00434 + preGraspAbove;
00435
00436
00437 Eigen::Vector3f orth(0, 0, -1);
00438
00439 Eigen::Quaternionf quaternion;
00440 quaternion.x() = msg_base_link.pose.orientation.x;
00441 quaternion.y() = msg_base_link.pose.orientation.y;
00442 quaternion.z() = msg_base_link.pose.orientation.z;
00443 quaternion.w() = msg_base_link.pose.orientation.w;
00444 Eigen::Matrix3f mat;
00445 Eigen::Matrix3f mat2;
00446 Eigen::Matrix3f mat_last_new;
00447 Eigen::Matrix3f mat_last;
00448 mat_last = quaternion.toRotationMatrix();
00449
00450
00451 mat_last_new = mat_last;
00452 mat_last_new(0, 2) = -mat_last(0, 0);
00453 mat_last_new(1, 2) = -mat_last(1, 0);
00454 mat_last_new(2, 2) = -mat_last(2, 0);
00455
00456 mat_last_new(0, 0) = mat_last(0, 2);
00457 mat_last_new(1, 0) = mat_last(1, 2);
00458 mat_last_new(2, 0) = mat_last(2, 2);
00459 Eigen::Vector3f xaxis;
00460 Eigen::Vector3f yaxis;
00461 Eigen::Vector3f zaxis;
00462 Eigen::Vector3f xaxis_new;
00463 Eigen::Vector3f yaxis_new;
00464 Eigen::Vector3f zaxis_new;
00465 xaxis(0) = mat_last_new(0, 0);
00466 xaxis(1) = mat_last_new(1, 0);
00467 xaxis(2) = mat_last_new(2, 0);
00468
00469 yaxis(0) = mat_last_new(0, 1);
00470 yaxis(1) = mat_last_new(1, 1);
00471 yaxis(2) = mat_last_new(2, 1);
00472
00473 zaxis(0) = mat_last_new(0, 2);
00474 zaxis(1) = mat_last_new(1, 2);
00475 zaxis(2) = mat_last_new(2, 2);
00476
00477 yaxis_new = zaxis.cross(orth);
00478 zaxis_new = orth.cross(yaxis_new);
00479 xaxis_new = yaxis_new.cross(zaxis_new);
00480
00481 mat_last_new.block<3, 1>(0, 0) = xaxis_new;
00482 mat_last_new.block<3, 1>(0, 1) = yaxis_new;
00483 mat_last_new.block<3, 1>(0, 2) = zaxis_new;
00484
00485
00486
00487
00488 Eigen::Quaternionf quaternion_last(mat_last_new);
00489
00490 msg_base_link.pose.orientation.x = quaternion_last.x();
00491 msg_base_link.pose.orientation.y = quaternion_last.y();
00492 msg_base_link.pose.orientation.z = quaternion_last.z();
00493 msg_base_link.pose.orientation.w = quaternion_last.w();
00494
00495
00496 sensor_msgs::PointCloud2 cloudMessage;
00497
00498 pcl::toROSMsg(*cloud_segment, cloudMessage);
00499
00500 cloudMessage.header.stamp = ros::Time::now();
00501 cloudMessage.header.frame_id = "/openni_rgb_optical_frame";
00502 cloud_msg.publish(cloudMessage);
00503
00504
00505 pre_grasp.publish(msg_base_link);
00506
00507 tf::Stamped<tf::Transform> point_stamped;
00508 tf::poseStampedMsgToTF(msg_base_link, point_stamped);
00509
00510 tf::StampedTransform tf_l2(point_stamped, ros::Time::now(),
00511 "base_link", "doesnt_matter");
00512
00513
00514 geometry_msgs::PoseStamped msg_base_link_pre;
00515
00516 msg_base_link_pre.header.frame_id = "/openni_rgb_optical_frame";
00517 msg_base_link_pre.header.stamp = msg_base_link.header.stamp;
00518 msg_base_link_pre.header.seq = 0;
00519
00520 double parkingXl=0.437;
00521 double parkingYl=0.356;
00522 double parkingZl=0.762;
00523 double parkingOrientationXl=0.048;
00524 double parkingOrientationYl=0.133;
00525 double parkingOrientationZl= -0.352;
00526 double parkingOrientationWl=0.925;
00527
00528
00529
00530 msg_base_link_pre.pose.position.x = parkingXl;
00531 msg_base_link_pre.pose.position.y = parkingYl;
00532
00533 msg_base_link_pre.pose.position.z = parkingZl;
00534 msg_base_link_pre.pose.orientation.x = parkingOrientationXl;
00535 msg_base_link_pre.pose.orientation.y = parkingOrientationYl;
00536 msg_base_link_pre.pose.orientation.z = parkingOrientationZl;
00537 msg_base_link_pre.pose.orientation.w = parkingOrientationWl;
00538
00539 tf::Stamped<tf::Transform> point_stamped_pre;
00540 tf::poseStampedMsgToTF(msg_base_link_pre, point_stamped_pre);
00541
00542 tf::StampedTransform tf_l2_pre(point_stamped_pre, ros::Time::now(),
00543 "base_link", "doesnt_matter");
00544
00545 robot.left_arm.moveGrippertoPose(tf_l2_pre);
00546
00547
00548
00549 geometry_msgs::PoseStamped msg_base_link_pre_r;
00550 msg_base_link_pre_r.header.frame_id = "/openni_rgb_optical_frame";
00551 msg_base_link_pre_r.header.stamp = msg_base_link.header.stamp;
00552 msg_base_link_pre_r.header.seq = 0;
00553
00554 double parkingXr=0.546;
00555 double parkingYr= -0.271;
00556 double parkingZr= 0.750;
00557 double parkingOrientationXr=-0.012;
00558 double parkingOrientationYr=0.198;
00559 double parkingOrientationZr= 0.336;
00560 double parkingOrientationWr=0.921;
00561
00562 msg_base_link_pre_r.pose.position.x = parkingXr;
00563 msg_base_link_pre_r.pose.position.y = parkingYr;
00564
00565 msg_base_link_pre_r.pose.position.z = parkingZr;
00566 msg_base_link_pre_r.pose.orientation.x = parkingOrientationXr;
00567 msg_base_link_pre_r.pose.orientation.y = parkingOrientationYr;
00568 msg_base_link_pre_r.pose.orientation.z = parkingOrientationZr;
00569 msg_base_link_pre_r.pose.orientation.w = parkingOrientationWr;
00570
00571 tf::Stamped<tf::Transform> point_stamped_pre_r;
00572 tf::poseStampedMsgToTF(msg_base_link_pre_r, point_stamped_pre_r);
00573
00574 tf::StampedTransform tf_l2_pre_r(point_stamped_pre_r,
00575 ros::Time::now(), "base_link", "doesnt_matter");
00576 robot.right_arm.moveGrippertoPose(tf_l2_pre_r);
00577
00578
00579 robot.left_arm.moveGrippertoPose(tf_l2);
00580 robot.left_gripper.open();
00581
00582
00583
00584 msg_base_link.pose.position.z = msg_base_link.pose.position.z - preGraspAbove
00585 - underTheObject;
00586 tf::Stamped<tf::Transform> point_stamped_aim;
00587 tf::poseStampedMsgToTF(msg_base_link, point_stamped_aim);
00588 tf::StampedTransform tf_l_aim(point_stamped_aim, ros::Time::now(),
00589 "base_link", "doesnt_matter");
00590
00591
00592 robot.left_arm.moveGrippertoPose(tf_l_aim);
00593
00594
00595 robot.left_gripper.close(50);
00596 robot.left_arm.moveGrippertoPose(tf_l2);
00597
00598
00599
00600
00601
00602
00603 double throwXr= 0.550;
00604 double throwYr= 0.711;
00605 double throwZr=0.804;
00606
00607
00608 msg_base_link.pose.position.x = throwXr;
00609 msg_base_link.pose.position.y = throwYr;
00610 msg_base_link.pose.position.z = throwZr;
00611
00612 tf::Stamped<tf::Transform> point_stamped_throw;
00613 tf::poseStampedMsgToTF(msg_base_link, point_stamped_throw);
00614 tf::StampedTransform tf_l_throw(point_stamped_throw,
00615 ros::Time::now(), "base_link", "doesnt_matter");
00616 robot.left_arm.moveGrippertoPose(tf_l_throw);
00617 robot.left_gripper.open();
00618 }
00619
00620 }
00621 #endif
00622 return true;
00623 }
00624
00625
00626
00627
00628 std::vector<cv::Point2f> FeatureTracker::findCorners(cv::Mat& first_image,
00629 std::vector<std::vector<cv::Point> > contours, cv::Mat* mask) {
00630 num_tracking_steps = 0;
00631 cv::Mat first_image_grey;
00632 cv::cvtColor(first_image, first_image_grey, CV_BGR2GRAY);
00633 std::vector<cv::Point2f> corners;
00634 cv::goodFeaturesToTrack(first_image_grey, corners, MAX_CORNERS, quality,
00635 min_distance);
00636
00637 cv::cornerSubPix(first_image_grey, corners, cv::Size(WIN_SIZE, WIN_SIZE),
00638 cv::Size(-1, -1),
00639 cv::TermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03));
00640 float diff;
00641 for (std::vector<cv::Point2f>::size_type k = 0; k != corners.size(); k++) {
00642 for (std::vector<std::vector<cv::Point> >::size_type i = 0;
00643 i != contours.size(); i++) {
00644
00645 std::vector<cv::Point>& current_contour = contours[i];
00646 for (std::vector<cv::Point>::size_type j = 0;
00647 j != contours[i].size(); j++) {
00648
00649 diff =
00650 sqrt(
00651 (corners[k].x - current_contour[j].x)
00652 * (corners[k].x - current_contour[j].x)
00653 + ((corners[k].y - current_contour[j].y)
00654 * (corners[k].y
00655 - current_contour[j].y)));
00656 if (diff < 1.5) {
00657
00658 corners[k].x = 1.0;
00659 corners[k].y = 1.0;
00660 }
00661
00662 }
00663
00664 }
00665
00666 }
00667
00668
00669
00670 std::vector<cv::Point2f>::iterator iter = corners.begin();
00671 while (iter != corners.end()) {
00672 if ((iter->x - 1.0 < 0.0000001) && (iter->y - 1.0 < 0.0000001)) {
00673
00674 iter = corners.erase(iter);
00675 } else {
00676
00677
00678 ++iter;
00679 }
00680 }
00681 good_corners = Eigen::VectorXi::Ones(corners.size());
00682
00683
00684
00685
00686
00687 corner_trajectories.push_back(corners);
00688 last_image = first_image_grey;
00689
00690
00691 vis_image_orig = first_image;
00692 vis_image = vis_image_orig.clone();
00693 for (unsigned int i = 0; i < corners.size(); i++) {
00694 int color_idx = ((num_tracking_steps * 11) % 256) * 3;
00695 cv::circle(
00696 vis_image,
00697 corners[i],
00698 3,
00699 cv::Scalar(colormap[color_idx], colormap[color_idx + 1],
00700 colormap[color_idx + 2], 0), -1);
00701 }
00702
00703 return corners;
00704 }
00705
00706
00707
00708
00709
00710 bool FeatureTracker::tracking_step(cv::Mat& new_image) {
00711 num_tracking_steps++;
00712 cv::Mat new_image_grey;
00713 cv::cvtColor(new_image, new_image_grey, CV_BGR2GRAY);
00714 std::vector<cv::Point2f> new_corners;
00715 std::vector<unsigned char> status;
00716 std::vector<float> error;
00717 cv::calcOpticalFlowPyrLK(last_image, new_image_grey,
00718 corner_trajectories.back(), new_corners, status, error,
00719 cv::Size(WIN_SIZE, WIN_SIZE), 3,
00720 cv::TermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03));
00721
00722 last_image = new_image_grey;
00723
00724 corner_trajectories.push_back(new_corners);
00725 for (unsigned int i = 0; i < good_corners.rows(); i++) {
00726 good_corners[i] = good_corners[i] & status[i];
00727 }
00728
00729
00730
00731
00732
00733 vis_image_orig = new_image;
00734 vis_image = vis_image_orig.clone();
00735 std::vector<cv::Point2f>& old_corners =
00736 corner_trajectories[num_tracking_steps - 1];
00737 for (unsigned int i = 0; i < new_corners.size(); i++) {
00738 if (!good_corners[i])
00739 continue;
00740 int color_idx = ((num_tracking_steps * 11) % 256) * 3;
00741
00742
00743
00744
00745
00746 }
00747
00748 return true;
00749
00750 }
00751
00752
00753
00754
00755 bool FeatureTracker::calcResiduals(const std::vector<Eigen::Matrix3f>& RTs,
00756 const std::vector<std::vector<cv::Point2f> >& corner_trajectories_t,
00757 Eigen::VectorXf& residuals) {
00758
00759 residuals = Eigen::VectorXf(corner_trajectories_t.size());
00760 for (unsigned int t = 0; t < corner_trajectories_t.size(); t++) {
00761 const std::vector<cv::Point2f>& current_trajectory =
00762 corner_trajectories_t[t];
00763
00764
00765 assert(RTs.size() == current_trajectory.size() - 1);
00766 float residual = 0;
00767 for (unsigned int c = 0; c < current_trajectory.size() - 1; c++) {
00768 Eigen::Vector3f old_corner(current_trajectory[c].x,
00769 current_trajectory[c].y, 1.0);
00770 Eigen::Vector3f new_corner(current_trajectory[c + 1].x,
00771 current_trajectory[c + 1].y, 1.0);
00772 Eigen::Vector3f res = new_corner - (RTs[c] * old_corner);
00773
00774
00775
00776 residual += std::max(0.0f, res.squaredNorm() - residual_error_comp);
00777
00778 }
00779 residuals[t] = residual;
00780 }
00781 return true;
00782 }
00783
00784 bool FeatureTracker::filterTrajectories() {
00785
00786 std::vector<std::vector<cv::Point2f> > corner_trajectories_filtered;
00787
00788
00789 int num_good_features = good_corners.sum();
00790
00791
00792 for (unsigned int j = 0; j < corner_trajectories.size(); j++) {
00793 std::vector<cv::Point2f> features_filtered;
00794 features_filtered.reserve(num_good_features);
00795 for (unsigned int i = 0; i < good_corners.rows(); i++) {
00796 if (good_corners[i]) {
00797 features_filtered.push_back(corner_trajectories[j][i]);
00798 }
00799 }
00800 corner_trajectories_filtered.push_back(features_filtered);
00801 }
00802
00803
00804 corner_trajectories_filtered_and_transposed.clear();
00805 corner_trajectories_filtered_and_transposed.reserve(good_corners.rows());
00806 for (unsigned int i = 0; i < good_corners.rows(); i++) {
00807 std::vector<cv::Point2f> features_trans;
00808 features_trans.reserve(corner_trajectories_filtered.size());
00809 for (unsigned int j = 0; j < corner_trajectories_filtered.size(); j++) {
00810 features_trans.push_back(corner_trajectories_filtered[j][i]);
00811 }
00812 corner_trajectories_filtered_and_transposed.push_back(features_trans);
00813 }
00814
00815
00816
00817 return true;
00818
00819 }
00820
00824 int FeatureTracker::draw(Eigen::VectorXf& prob_dist) {
00825 float pos = (float) rand() / RAND_MAX;
00826 int idx = 0;
00827 float accumulator = 0.0f;
00828 for (;;) {
00829 accumulator += prob_dist[idx];
00830 if (accumulator > pos)
00831 break;
00832 idx++;
00833 }
00834 return idx;
00835
00836 }
00837
00838
00839 bool FeatureTracker::selectRandomFeatures(std::vector<int>& features_idx,
00840 const Eigen::VectorXf& mask) {
00841
00842 float variance = 200.0f * 200.0f;
00843
00844
00845 int num_non_masked_elements = mask.sum();
00846 if (num_non_masked_elements < 3) {
00847
00848
00849 return false;
00850 }
00851 int* map_array = new int[num_non_masked_elements];
00852 int counter = 0;
00853 for (int i = 0; i < mask.rows(); i++) {
00854 if (mask[i] > 0.0) {
00855 map_array[counter] = i;
00856 counter++;
00857 }
00858
00859 }
00860
00861
00862 int idx1 = rand() % num_non_masked_elements;
00863 Eigen::VectorXf prob_dist(num_non_masked_elements);
00864 cv::Point2f current_point =
00865 corner_trajectories_filtered_and_transposed[map_array[idx1]].back();
00866 for (int i = 0; i < num_non_masked_elements; i++) {
00867 if (i == idx1) {
00868 prob_dist[i] = 0.0;
00869 continue;
00870 }
00871 cv::Point2f other =
00872 corner_trajectories_filtered_and_transposed[map_array[i]].back();
00873 float dist_sqr = pow((other.x - current_point.x), 2)
00874 + pow((other.y - current_point.y), 2);
00875
00876 float dist_sqr_n = dist_sqr / variance;
00877 prob_dist[i] = exp(-0.5 * dist_sqr_n);
00878
00879 }
00880
00881 float normalizer = prob_dist.sum();
00882
00883
00884 if (normalizer < 0.0000000001) {
00885 return false;
00886 }
00887
00888 prob_dist = prob_dist / normalizer;
00889 int idx2 = draw(prob_dist);
00890
00891 assert(map_array[idx1] < mask.rows());
00892 assert(map_array[idx2] < mask.rows());
00893 features_idx.clear();
00894 features_idx.push_back(map_array[idx1]);
00895 features_idx.push_back(map_array[idx2]);
00896
00897 delete[] map_array;
00898 return true;
00899 }
00900
00901
00902
00903
00904 bool FeatureTracker::generateHypothesis(const Eigen::VectorXf& mask,
00905 std::vector<Eigen::Matrix3f>& RTs) {
00906
00907 int num_elements = mask.sum();
00908
00909
00910 if (num_elements < 3)
00911 return false;
00912
00913 std::vector<int> features_idx;
00914 if (!selectRandomFeatures(features_idx, mask))
00915 return false;
00916
00917 RTs.reserve(num_tracking_steps);
00918
00919
00920 for (int i = 1; i < num_tracking_steps + 1; i++) {
00921
00922
00923 cv::Mat new_points(features_idx.size(), 1, CV_32FC2);
00924 cv::Mat old_points(features_idx.size(), 1, CV_32FC2);
00925 for (int j = 0; j < features_idx.size(); j++) {
00926 cv::Point2f& new_point =
00927 corner_trajectories_filtered_and_transposed[features_idx[j]][i];
00928 new_points.ptr<float>()[j * 2] = new_point.x;
00929 new_points.ptr<float>()[j * 2 + 1] = new_point.y;
00930 cv::Point2f& old_point =
00931 corner_trajectories_filtered_and_transposed[features_idx[j]][i
00932 - 1];
00933 old_points.ptr<float>()[j * 2] = old_point.x;
00934 old_points.ptr<float>()[j * 2 + 1] = old_point.y;
00935 }
00936
00937
00938 cv::Mat RT_cv = cvEstimateRigidTransformFrom2(old_points, new_points);
00939 Eigen::Matrix3f RT = Eigen::Matrix3f::Identity();
00940 Eigen::Matrix<float, 2, 3> rt_temp;
00941 cv::cv2eigen(RT_cv, rt_temp);
00942 RT.block<2, 3>(0, 0) = rt_temp;
00943 RTs.push_back(RT);
00944 }
00945
00946
00947 return true;
00948
00949 }
00950
00951 bool FeatureTracker::getPclPoint(int x, int y, pcl::PointXYZ& point) {
00952
00953
00954 if (isnan(cloud->points[x + 640 * y].x))
00955 return false;
00956 else {
00957 point = cloud->points[x + 640 * y];
00958
00959 return true;
00960 }
00961 }
00962
00963
00964
00965
00966
00967 bool FeatureTracker::evluateTrajectories() {
00968
00969 int num_samples = 15;
00970 int num_hypotheses = 10;
00971 filterTrajectories();
00972 Eigen::VectorXf scores = Eigen::VectorXf::Zero(num_samples);
00973 std::vector<std::vector<int> > feature_assocoations;
00974 std::vector<Eigen::VectorXf> features_per_hyp;
00975
00976
00977 for (int i = 0; i < num_samples; i++) {
00978 std::vector<int> feature_assocoation;
00979 Eigen::VectorXf feature_per_hyp_count;
00980 scores[i] = sampleHypothesisSet(feature_assocoation,
00981 feature_per_hyp_count);
00982 feature_assocoations.push_back(feature_assocoation);
00983 features_per_hyp.push_back(feature_per_hyp_count);
00984
00985
00986
00987
00988
00989 }
00990
00991
00992 int best_hypthesis_set_idx;
00993 scores.minCoeff(&best_hypthesis_set_idx);
00994
00995
00996 std::vector<int>& final_feature_association =
00997 feature_assocoations[best_hypthesis_set_idx];
00998 Eigen::VectorXf& final_feature_per_hyp_count =
00999 features_per_hyp[best_hypthesis_set_idx];
01000
01001
01002
01003
01004
01005 final_feature_per_hyp_count[0] = 0;
01006
01007
01008 int best_single_hyp;
01009 final_feature_per_hyp_count.maxCoeff(&best_single_hyp);
01010
01011 int idx_max_features;
01012 idx_max_features = best_single_hyp;
01013
01014
01015 int features_best_hyp;
01016 features_best_hyp = final_feature_per_hyp_count[best_single_hyp];
01017
01018
01019 pcl::PointCloud<pcl::PointXYZ>::Ptr cloudnew(
01020 new pcl::PointCloud<pcl::PointXYZ>);
01021 cloud_segment = cloudnew;
01022
01023 vis_image = vis_image_orig.clone();
01024 size_t index = 0;
01025
01026 for (int i = 0; i < corner_trajectories_filtered_and_transposed.size();
01027 i++) {
01028 int idx = final_feature_association[i];
01029 if (idx >= 0)
01030
01031 cv::circle(
01032 vis_image,
01033 corner_trajectories_filtered_and_transposed[i].back(),
01034 3,
01035 cv::Scalar(colormap[3 * idx], colormap[3 * idx + 1],
01036 colormap[3 * idx + 2], 0), 5);
01037 #if !NO_ROS
01038 if ((idx >= 1) && (idx == idx_max_features)) {
01039 pcl::PointXYZ point;
01040 if (getPclPoint(
01041 corner_trajectories_filtered_and_transposed[i].back().x,
01042 corner_trajectories_filtered_and_transposed[i].back().y,
01043 point))
01044
01045 {
01046
01047
01048 cloud_segment->points.push_back(point);
01049 }
01050
01051
01052
01053
01054 }
01055 #endif
01056
01057 }
01058
01059 return true;
01060
01061 }
01062
01063
01064
01065
01066
01067
01068 float FeatureTracker::sampleHypothesisSet(std::vector<int>& feature_assocoation,
01069 Eigen::VectorXf& feature_per_hyp_count) {
01070 int num_hypotheses = 10;
01071 float mask_threshold = 80;
01072
01073 Eigen::MatrixXf residualMatrix(
01074 corner_trajectories_filtered_and_transposed.size(), num_hypotheses);
01075
01076 std::vector<std::vector<Eigen::Matrix3f> > hypothesis;
01077
01078
01079 std::vector<Eigen::Matrix3f> RTs_I;
01080 RTs_I.resize(num_tracking_steps, Eigen::Matrix3f::Identity());
01081
01082
01083 hypothesis.push_back(RTs_I);
01084
01085 Eigen::VectorXf mask = Eigen::VectorXf::Ones(
01086 corner_trajectories_filtered_and_transposed.size());
01087 bool points_left;
01088 int i = 1;
01089 for (; i < num_hypotheses; i++) {
01090 Eigen::VectorXf residuals;
01091 std::vector<Eigen::Matrix3f>& last_hypothesis = hypothesis[i - 1];
01092
01093
01094
01095
01096 calcResiduals(last_hypothesis,
01097 corner_trajectories_filtered_and_transposed, residuals);
01098
01099
01100 residualMatrix.col(i - 1) = residuals;
01101
01102
01103
01104
01105
01106 Eigen::VectorXf mask_bin =
01107 (residuals.array() - mask_threshold).matrix();
01108 for (int i = 0; i < mask_bin.rows(); i++) {
01109 mask_bin[i] = (mask_bin[i] > 0.0f) ? 1.0f : 0.0f;
01110 }
01111
01112 Eigen::ArrayXf mask_array = mask.array();
01113 Eigen::ArrayXf mask_array_bin = mask_bin.array();
01114
01115 mask = mask_array.min(mask_array_bin);
01116
01117 std::vector<Eigen::Matrix3f> new_hypothesis;
01118
01119
01120 points_left = generateHypothesis(mask, new_hypothesis);
01121 if (!points_left)
01122 break;
01123
01124
01125 hypothesis.push_back(new_hypothesis);
01126
01127 }
01128
01129 int& actual_num_hypothesis = i;
01130
01131
01132 if (actual_num_hypothesis == num_hypotheses) {
01133 Eigen::VectorXf residuals;
01134
01135
01136 calcResiduals(hypothesis.back(),
01137 corner_trajectories_filtered_and_transposed, residuals);
01138
01139
01140 residualMatrix.col(residualMatrix.cols() - 1) = residuals;
01141 } else {
01142 Eigen::MatrixXf residualMatrix2 = residualMatrix.block(0, 0,
01143 residualMatrix.rows(), actual_num_hypothesis);
01144 residualMatrix = residualMatrix2;
01145 }
01146
01147
01148
01149 Eigen::VectorXf summed_residuals = residualMatrix.colwise().sum();
01150
01151
01152
01153 Eigen::VectorXf summed_residuals_per_h = Eigen::VectorXf::Zero(
01154 residualMatrix.cols());
01155
01156
01157 std::vector<int> best_feature_h_idx;
01158 best_feature_h_idx.resize(
01159 corner_trajectories_filtered_and_transposed.size(), 0);
01160 feature_per_hyp_count = Eigen::VectorXf::Ones(residualMatrix.cols());
01161 feature_per_hyp_count *= 0.0000001f;
01162 int num_outliers = 0;
01163 for (int i = 0; i < corner_trajectories_filtered_and_transposed.size();
01164 i++) {
01165 int idx;
01166 residualMatrix.row(i).minCoeff(&idx);
01167 float residual_value = residualMatrix(i, idx);
01168
01169
01170 if (mask[i] > 0.0f) {
01171 num_outliers++;
01172 best_feature_h_idx[i] = -1;
01173 continue;
01174 }
01175
01176 best_feature_h_idx[i] = idx;
01177 feature_per_hyp_count[idx]++;
01178 summed_residuals_per_h[idx] += residualMatrix(i, idx);
01179
01180
01181
01182
01183
01184
01185
01186
01191 }
01192
01193 vis_image = vis_image_orig.clone();
01194 for (int i = 0; i < corner_trajectories_filtered_and_transposed.size();
01195 i++) {
01196 int idx = best_feature_h_idx[i];
01197 if (idx >= 0 && feature_per_hyp_count[idx] > 5.1) {
01198
01199
01200 } else {
01201 best_feature_h_idx[i] = -1;
01202 }
01203 }
01204
01205
01206 summed_residuals.array() /= feature_per_hyp_count.array();
01207
01208
01209
01210
01211
01212 summed_residuals_per_h.array() /= feature_per_hyp_count.array();
01213
01214
01215
01216
01217 feature_assocoation = best_feature_h_idx;
01218 float outlier_penalty = num_outliers * 100.0;
01219
01220
01221 return summed_residuals_per_h.sum();
01222
01223 }
01224
01225 unsigned char FeatureTracker::colormap[36] = { 255, 0, 0, 0, 255, 0, 0, 0, 255,
01226 255, 255, 0, 255, 0, 255, 0, 255, 255, 127, 0, 0, 0, 127, 0, 0, 0, 127,
01227 127, 127, 0, 127, 0, 127, 0, 127, 127, };
01228
01229
01230
01231
01232
01233
01234
01235
01236
01237
01238
01239
01240
01241
01242
01243
01244
01245
01246
01247
01248
01249
01250
01251
01252
01253
01254
01255
01256
01257
01258
01259
01260
01261
01262
01263
01264
01265
01266
01267
01268
01269
01270
01271
01272
01273
01274
01275
01276
01277
01278
01279
01280
01281
01282
01283
01284
01285
01286
01287
01288
01289
01290
01291
01292
01293
01294
01295
01296
01297
01298
01299
01300
01301
01302
01303
01304
01305
01306
01307
01308
01309
01310
01311
01312
01313
01314
01315
01316
01317
01318
01319
01320
01321
01322
01323
01324
01325
01326
01327
01328
01329
01330
01331
01332
01333
01334
01335
01336
01337
01338
01339
01340
01341
01342
01343
01344
01345
01346
01347
01348
01349
01350
01351
01352
01353
01354
01355
01356
01357
01358
01359
01360
01361
01362
01363
01364
01365
01366
01367
01368
01369
01370
01371
01372
01373
01374
01375
01376
01377
01378
01379
01380
01381
01382
01383
01384
01385
01386
01387
01388
01389
01390
01391
01392
01393
01394
01395
01396
01397
01398
01399
01400
01401
01402
01403
01404
01405
01406
01407
01408
01409
01410
01411
01412
01413
01414
01415
01416
01417
01418
01419
01420
01421
01422
01423
01424
01425
01426
01427
01428
01429
01430
01431
01432
01433
01434
01435
01436
01437
01438
01439
01440
01441
01442
01443
01444
01445
01446
01447
01448
01449
01450
01451
01452
01453
01454
01455
01456
01457
01458
01459
01460
01461
01462
01463
01464
01465
01466
01467
01468
01469
01470
01471
01472
01473
01474
01475
01476
01477
01478
01479
01480
01481
01482
01483
01484
01485
01486
01487
01488 #if NO_ROS
01489 int main( int argc, char** argv )
01490 {
01491 ros::init(argc, argv, "image_tracker");
01492
01493 if (argc == 1)
01494 {
01495 std::cerr << "Usage: " << argv[0] << " <images>" << std::endl;
01496 exit(0);
01497 }
01498 cv::namedWindow("tracker", 0);
01499
01500
01501
01502 int num_images = argc - 1;
01503 int num_images_loaded = 0;
01504 IplImage** all_images = new IplImage*[num_images];
01505
01506
01507 FeatureTracker ft;
01508 all_images[0] = cvLoadImage(argv[1]);
01509 num_images_loaded++;
01510 cv::Mat img(all_images[0]);
01511 ft.findCorners(img);
01512
01513 cv::imshow("tracker", ft.vis_image);
01514 cv::waitKey();
01515
01516 int c;
01517 for (int i = 1; i < num_images; i++) {
01518 all_images[i] = cvLoadImage(argv[i+1]);
01519 num_images_loaded++;
01520 printf("loading image %s\n", argv[i+1]);
01521 cv::Mat img(all_images[i]);
01522 ft.tracking_step(img);
01523
01524 if( (i-1) % 5 == 0) {
01525 Timer t;
01526 ft.evluateTrajectories();
01527 printf("took %f secs\n", t.stop());
01528 cv::imshow("tracker", ft.vis_image);
01529 c = cvWaitKey();
01530 if( (char)c == 27 )
01531 break;
01532 }
01533
01534 }
01535
01536 for (int i = 0; i < num_images_loaded; i++) {
01537 cvReleaseImage(&all_images[i]);
01538 }
01539 delete [] all_images;
01540
01541 }
01542
01543 #else
01544
01545
01546
01547
01548
01549 int main(int argc, char** argv) {
01550
01551 ros::init(argc, argv, "image_tracker");
01552 sensor_msgs::CvBridge _bridge;
01553 cv::namedWindow("tracker", 0);
01554
01555
01556 #if PR2
01557 std::string input_image_topic =
01558 argc == 2 ? argv[1] : "/kinect/rgb/image_color";
01559 #if !GRASPING
01560 input_image_topic =
01561 argc == 2 ? argv[1] : "/prosilica/image_rect_color";
01562 #endif
01563
01564
01565 #else
01566 std::string input_image_topic =
01567 argc == 2 ? argv[1] : "/camera/rgb/image_color";
01568 #endif
01569
01570 FeatureTracker ft;
01571
01572
01573 sensor_msgs::PointCloud2 cloudMessage;
01574
01575 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud1(
01576 new pcl::PointCloud<pcl::PointXYZ>);
01577
01578 sensor_msgs::PointCloud2ConstPtr pclmasg_ptr;
01579 #if PR2
01580 pclmasg_ptr = ros::topic::waitForMessage<sensor_msgs::PointCloud2>(
01581 "/kinect/depth/points");
01582 #else
01583 pclmasg_ptr = ros::topic::waitForMessage<sensor_msgs::PointCloud2>(
01584 "/camera/depth/points");
01585
01586 #endif
01587 pcl::fromROSMsg(*pclmasg_ptr, *cloud1);
01588
01589 ft.cloud = cloud1;
01590
01591 sensor_msgs::ImageConstPtr imgmasg_ptr = ros::topic::waitForMessage<
01592 sensor_msgs::Image>(input_image_topic, ros::Duration(5.0));
01593 IplImage* ipl_img = _bridge.imgMsgToCv(imgmasg_ptr, "passthrough");
01594 cv::Mat img(ipl_img);
01595 #if PR2 && GRASPING
01596
01597 ros::ServiceClient svc = ft.nh.serviceClient<camera_self_filter::mask>(
01598 "self_mask");
01599 camera_self_filter::mask servicecall;
01600 servicecall.request.header.frame_id = "openni_rgb_optical_frame";
01601 servicecall.request.header.stamp = ros::Time::now();
01602 svc.call(servicecall);
01603 sensor_msgs::ImageConstPtr maskptr = boost::make_shared<sensor_msgs::Image>(
01604 boost::ref(servicecall.response.mask_image));
01605
01606 IplImage* ipl_mask = _bridge.imgMsgToCv(maskptr, "passthrough");
01607 cvDilate(ipl_mask, ipl_mask, 0, 3);
01608 cvNot(ipl_mask, ipl_mask);
01609 cv::Mat mask(ipl_mask);
01610
01611 cv::Mat ROI_mat = cv::Mat::zeros(img.rows, img.cols, CV_8U);
01612
01613 cv::cvtColor(img, ROI_mat, CV_RGB2GRAY);
01614 cv::Mat img_color = cv::Mat::zeros(mask.rows, mask.cols, CV_8UC3);
01615 cv::Mat img_color2 = cv::Mat::zeros(mask.rows, mask.cols, CV_8UC3);
01616 cv::cvtColor(mask, img_color, CV_GRAY2RGB);
01617 cv::bitwise_and(img, img_color, img);
01618
01619 std::vector<std::vector<cv::Point> > contours;
01620 cv::threshold(mask, mask, 100, 255, CV_THRESH_BINARY);
01621 cv::Canny(mask, mask, 10, 50, 3);
01622 cv::findContours(mask, contours, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
01623
01624
01625
01626
01627
01628
01629
01630
01631
01632 ft.findCorners(img, contours, &mask);
01633 #else
01634 ft.findCorners(img);
01635 #endif
01636
01637 cv::imshow("tracker", ft.vis_image);
01638 cv::waitKey();
01639
01640 char key;
01641 int c;
01642 for (;;) {
01643
01644
01645 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud1(
01646 new pcl::PointCloud<pcl::PointXYZ>);
01647
01648 sensor_msgs::PointCloud2ConstPtr pclmasg_ptr;
01649 #if PR2
01650 pclmasg_ptr = ros::topic::waitForMessage<sensor_msgs::PointCloud2>(
01651 "/kinect/depth/points");
01652 #else
01653 pclmasg_ptr = ros::topic::waitForMessage<sensor_msgs::PointCloud2>(
01654 "/camera/depth/points");
01655
01656 #endif
01657 pcl::fromROSMsg(*pclmasg_ptr, *cloud1);
01658
01659 ft.cloud = cloud1;
01660
01661 sensor_msgs::ImageConstPtr imgmasg_ptr = ros::topic::waitForMessage<
01662 sensor_msgs::Image>(input_image_topic, ros::Duration(5.0));
01663 IplImage* ipl_img = _bridge.imgMsgToCv(imgmasg_ptr, "passthrough");
01664 cv::Mat img(ipl_img);
01665 #if PR2 && GRASPING
01666 ros::ServiceClient svc = ft.nh.serviceClient<camera_self_filter::mask>(
01667 "self_mask");
01668 camera_self_filter::mask servicecall;
01669 servicecall.request.header.frame_id = "openni_rgb_optical_frame";
01670 servicecall.request.header.stamp = ros::Time::now();
01671 svc.call(servicecall);
01672 sensor_msgs::ImageConstPtr maskptr = boost::make_shared<
01673 sensor_msgs::Image>(
01674 boost::ref(servicecall.response.mask_image));
01675
01676 IplImage* ipl_mask = _bridge.imgMsgToCv(maskptr, "passthrough");
01677 cvDilate(ipl_mask, ipl_mask, 0, 3);
01678 cvNot(ipl_mask, ipl_mask);
01679 cv::Mat mask(ipl_mask);
01680
01681 cv::Mat ROI_mat = cv::Mat::zeros(img.rows, img.cols, CV_8U);
01682
01683 cv::cvtColor(img, ROI_mat, CV_RGB2GRAY);
01684 cv::Mat img_color = cv::Mat::zeros(mask.rows, mask.cols, CV_8UC3);
01685 cv::cvtColor(mask, img_color, CV_GRAY2RGB);
01686 cv::bitwise_and(img, img_color, img);
01687 #endif
01688
01689 Timer t;
01690 ft.tracking_step(img);
01691 ft.evluateTrajectories();
01692 cv::imshow("tracker", ft.vis_image);
01693 c = cvWaitKey();
01694 if ((char) c == 27)
01695 break;
01696 #if PR2 && GRASPING //grasping mode
01697 else if ((char) c == 'g') {
01698
01699 ft.grasp();
01700
01701 }
01702 #endif
01703 printf("took %f secs\n", t.stop());
01704
01705 }
01706
01707 return 0;
01708
01709 }
01710
01711 #endif
01712
01713
01714