$search
00001 00034 #define NO_ROS 0 00035 #define PR2 0 00036 #define GRASPING 0 00037 00038 #include "cv.h" 00039 #include "highgui.h" 00040 #include <stdio.h> 00041 #include <ctype.h> 00042 #include <Eigen/Eigen> 00043 #include <assert.h> 00044 #include <iostream> 00045 #include <set> 00046 #include <opencv2/core/eigen.hpp> 00047 #include <cmath> 00048 #include <ros/ros.h> 00049 #include "sensor_msgs/Image.h" 00050 #include "cv_bridge/CvBridge.h" 00051 #include "camera_self_filter/mask.h" 00052 #include "Timer.h" 00053 #include <tf_conversions/tf_eigen.h> 00054 #include <eigen_conversions/eigen_msg.h> 00055 #include <pcl/point_cloud.h> 00056 #include <pcl/point_types.h> 00057 #include <pcl/ros/conversions.h> 00058 #include <pcl/filters/voxel_grid.h> 00059 #include <pcl/features/feature.h> 00060 #include "pcl/segmentation/extract_clusters.h" 00061 #include <pcl/filters/extract_indices.h> 00062 #include <pcl/features/normal_3d.h> 00063 #include <math.h> 00064 #include "pcl/common/common.h" 00065 #include <simple_robot_control/robot_control.h> 00066 #include <tf/transform_listener.h> 00067 #include <pcl/common/angles.h> 00068 #include "pcl/io/pcd_io.h" 00069 #include "pcl_ros/transforms.h" 00070 typedef pcl::PointXYZRGB PointT; 00071 typedef pcl::KdTree<PointT>::Ptr KdTreePtr; 00072 00073 00074 using std::cout; 00075 using std::endl; 00076 00077 /*** 00078 * Retrives rotation and translation minimzing the error of the rigid transformation between two sets of points. 00079 * Modified opencv version. 00080 */ 00081 static void icvGetRTMatrix(const CvPoint2D32f* a, const CvPoint2D32f* b, 00082 int count, CvMat* M) { 00083 00084 double sa[16], sb[4], m[4], *om = M->data.db; 00085 CvMat A = cvMat(4, 4, CV_64F, sa), B = cvMat(4, 1, CV_64F, sb); 00086 CvMat MM = cvMat(4, 1, CV_64F, m); 00087 00088 int i; 00089 00090 memset(sa, 0, sizeof(sa)); 00091 memset(sb, 0, sizeof(sb)); 00092 00093 for (i = 0; i < count; i++) { 00094 sa[0] += a[i].x * a[i].x + a[i].y * a[i].y; 00095 sa[1] += 0; 00096 sa[2] += a[i].x; 00097 sa[3] += a[i].y; 00098 00099 sa[4] += 0; 00100 sa[5] += a[i].x * a[i].x + a[i].y * a[i].y; 00101 sa[6] += -a[i].y; 00102 sa[7] += a[i].x; 00103 00104 sa[8] += a[i].x; 00105 sa[9] += -a[i].y; 00106 sa[10] += 1; 00107 sa[11] += 0; 00108 00109 sa[12] += a[i].y; 00110 sa[13] += a[i].x; 00111 sa[14] += 0; 00112 sa[15] += 1; 00113 00114 sb[0] += a[i].x * b[i].x + a[i].y * b[i].y; 00115 sb[1] += a[i].x * b[i].y - a[i].y * b[i].x; 00116 sb[2] += b[i].x; 00117 sb[3] += b[i].y; 00118 } 00119 00120 cvSolve(&A, &B, &MM, CV_SVD); 00121 00122 om[0] = om[4] = m[0]; 00123 om[1] = -m[1]; 00124 om[3] = m[1]; 00125 om[2] = m[2]; 00126 om[5] = m[3]; 00127 00128 } 00129 00130 /*** 00131 * Retrives rotation and translation minimzing the error of the rigid transformation between two sets of points. 00132 * 00133 */ 00134 cv::Mat cvEstimateRigidTransformFrom2(const cv::Mat& A, const cv::Mat& B) { 00135 cv::Mat M = cv::Mat(2, 3, CV_64F); 00136 00137 CvPoint2D32f* a = (CvPoint2D32f*) A.data; 00138 CvPoint2D32f* b = (CvPoint2D32f*) B.data; 00139 CvMat matM = M; 00140 icvGetRTMatrix(a, b, 2, &matM); 00141 return M; 00142 } 00143 00144 /*** 00145 * Extracts corner features from an image and tracks them in subsequent images. 00146 * The features are clustered such that each feature cluster consists of features with similar trajectories, i.e. 00147 * which can be explained by the same sequence of rigid transforms. 00148 */ 00149 00150 class FeatureTracker { 00151 public: 00152 00153 //Parameters for feature detection 00154 static const int MAX_CORNERS = 500; 00155 static const double quality = 0.01; 00156 static const double min_distance = 10; 00157 static const int WIN_SIZE = 10; 00158 00159 //number of points used to calculate RT 00160 const static int num_points_to_select = 3; 00161 00162 tf::TransformListener tf_listen; 00163 00164 cv::Mat vis_image; 00165 cv::Mat vis_image_orig; 00166 cv::Mat last_image; 00167 std::vector<std::vector<cv::Point2f> > corner_trajectories; 00168 std::vector<std::vector<cv::Point2f> > corner_trajectories_filtered_and_transposed; 00169 00170 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud; 00171 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_segment; 00172 #if !NO_ROS 00173 ros::NodeHandle nh; 00174 #endif 00175 Eigen::VectorXi good_corners; 00176 00177 //executed tracking steps 00178 int num_tracking_steps; 00179 00180 // colormap for disparities, RGB order 00181 static unsigned char colormap[]; 00182 00183 static const float residual_error_comp = 0.1; // 0.1; //deduct half a pixel from the residuals at each timestep 00184 00185 FeatureTracker() : 00186 num_tracking_steps(0) { 00187 } 00188 /*** 00189 * Find the corresponding point cloud calculate pose and grasp the object 00190 */ 00191 bool grasp(); 00192 /*** 00193 * Find corners using OpnecCV's goodFeaturesToTrack. 00194 */ 00195 std::vector<cv::Point2f> findCorners(cv::Mat& first_image, 00196 std::vector<std::vector<cv::Point> > contours=std::vector<std::vector<cv::Point> > (), cv::Mat* mask = 0); 00197 00198 /*** 00199 * Execute one tracking step: 00200 * Calculate sparse optical flow from all corners in old image 00201 */ 00202 bool tracking_step(cv::Mat& new_image); 00203 00204 /*** 00205 * Caluculates residuals as squared reprojection error between rigid transform trajectory predicted by hypothesis and actual trajectory. 00206 */ 00207 bool calcResiduals(const std::vector<Eigen::Matrix3f>& RTs, 00208 const std::vector<std::vector<cv::Point2f> >& corner_trajectories_t, 00209 Eigen::VectorXf& residuals); 00210 00211 bool filterTrajectories(); 00212 00216 int draw(Eigen::VectorXf& prob_dist); 00217 00218 bool selectRandomFeatures(std::vector<int>& features_idx, 00219 const Eigen::VectorXf& mask); 00220 00221 /*** 00222 * generates a rotation translation trajectory hypothesis for all features marked in mask in a RANSAC like fashion. 00223 */ 00224 bool generateHypothesis(const Eigen::VectorXf& mask, 00225 std::vector<Eigen::Matrix3f>& RTs); 00226 00227 bool getPclPoint(int x, int y, pcl::PointXYZ& point); 00228 00229 /*** Top-level function: 00230 * Samples a number of hypothesis sets, i.e. samples sets of possible feature clusters. 00231 * Hypothesis sets are scored and best one is returned/visualized. 00232 */ 00233 bool evluateTrajectories(); 00234 00235 /*** 00236 * Sample recursively possible feature-to-cluster associations (hypotheses) by generating a hypothesis 00237 * on the input features set, calculate outliers and generate another hypothesis, ... and so on until a 00238 * hypothesis exists for each feature. 00239 */ 00240 float sampleHypothesisSet(std::vector<int>& feature_assocoation, 00241 Eigen::VectorXf& feature_per_hyp_count); 00242 00243 }; 00244 00245 bool FeatureTracker::grasp() { 00246 #if !NO_ROS 00247 00248 00249 double clusterTolerance=0.08; 00250 int spacialLocator=0; 00251 int minClusterSize=4; 00252 int maxClusterSize=200; 00253 double clusterEpsilon=1; 00254 double preGraspAbove=0.15; 00255 double underTheObject=0.02; 00256 00257 ros::Publisher cloud_msg = nh.advertise<sensor_msgs::PointCloud2>( 00258 "points_segmented", 1); 00259 00260 ros::Publisher pre_pose = nh.advertise<geometry_msgs::PoseStamped>( 00261 "our_pose", 1000); 00262 00263 ros::Publisher pre_grasp = nh.advertise<geometry_msgs::PoseStamped>( 00264 "pre_grasp", 1000); 00265 00266 simple_robot_control::Robot robot; 00267 //DEBUG 00268 //std::cout << " height of the cloud: " << cloud_segment->height << std::endl; 00269 //std::cout << " width of the cloud: " << cloud_segment->width << std::endl; 00270 //std::cout << "size " << cloud_segment->points.size() << std::endl; 00271 00272 //erase not valid points 00273 for (size_t i = 0; i < cloud_segment->points.size(); ++i) { 00274 if (cloud_segment->points[i].x == 0.0) { 00275 cloud_segment->erase(cloud_segment->begin() + i); 00276 cloud_segment->width--; 00277 cloud_segment->points.resize(cloud_segment->width); 00278 } 00279 } 00280 //DEBUG 00281 //std::cerr << "Cloud after segmentation: " << std::endl; 00282 //for (size_t i = 0; i < cloud_segment->points.size(); ++i) 00283 //std::cerr << " " << cloud_segment->points[i].x << " " 00284 // << cloud_segment->points[i].y << " " 00285 // << cloud_segment->points[i].z << std::endl; 00286 00287 //computing centroid of the point cloud 00288 Eigen::Vector4f centroid; 00289 pcl::compute3DCentroid(*cloud_segment, centroid); 00290 //DEBUG 00291 //std::cerr << "centroid: " << centroid << std::endl; 00292 00293 if (centroid(1) != 0.0) { 00294 EIGEN_ALIGN16 Eigen::Vector3f eigen_values; 00295 EIGEN_ALIGN16 Eigen::Matrix3f eigen_vectors; 00296 Eigen::Matrix3f cov; 00297 Eigen::Vector3f eigen_vector1; 00298 Eigen::Vector3f eigen_vector2; 00299 Eigen::Vector3f vector3rd; 00300 Eigen::Matrix4f pose; 00301 KdTreePtr clusters_tree_; 00302 00303 clusters_tree_ = boost::make_shared<pcl::KdTreeFLANN<PointT> >(); 00304 int dotprod; 00305 pcl::EuclideanClusterExtraction<PointT> cluster_; 00306 cluster_.setClusterTolerance(clusterTolerance); 00307 cluster_.setSpatialLocator(spacialLocator); 00308 cluster_.setMinClusterSize(minClusterSize); 00309 cluster_.setMaxClusterSize(maxClusterSize); 00310 clusters_tree_ = boost::make_shared<pcl::KdTreeFLANN<PointT> >(); 00311 clusters_tree_->setEpsilon(clusterEpsilon); 00312 cluster_.setSearchMethod(clusters_tree_); 00313 00314 pcl::PointCloud<pcl::PointXYZRGB>::Ptr pcl_cloud_( 00315 new pcl::PointCloud<pcl::PointXYZRGB>()); 00316 00317 pcl::PointIndices::Ptr object_inliers(new pcl::PointIndices()); 00318 std::vector<pcl::PointIndices> cluster_indices; 00319 //some color- doesnt matter in this case 00320 uint8_t col = 5; 00321 00322 for (size_t i = 0; i < cloud_segment->points.size(); ++i) { 00323 pcl::PointXYZRGB point_color; 00324 point_color.x = cloud_segment->points[i].x; 00325 point_color.y = cloud_segment->points[i].y; 00326 point_color.z = cloud_segment->points[i].z; 00327 point_color.r = col; 00328 point_color.g = col; 00329 point_color.b = col; 00330 pcl_cloud_->points.push_back(point_color); 00331 00332 } 00333 //DEBUG 00334 //std::cerr << "Point cloud : " << std::endl; 00335 //for (size_t i = 0; i < pcl_cloud_->points.size(); ++i) 00336 //std::cerr << " " << pcl_cloud_->points[i].x << " " 00337 //<< pcl_cloud_->points[i].y << " " << pcl_cloud_->points[i].z 00338 //<< std::endl; 00339 00340 cluster_.setInputCloud(pcl_cloud_); 00341 cluster_.extract(cluster_indices); 00342 00343 //DEBUG 00344 //std::cerr << "clusters size: " << int(cluster_indices.size()) 00345 // << std::endl; 00346 00347 //compute the real centroid after clustering 00348 if (int(cluster_indices.size()) > 0) { 00349 //DEBUG 00350 //std::cerr << "cluster indices: " << cluster_indices[0] << std::endl; 00351 pcl::copyPointCloud(*pcl_cloud_, cluster_indices[0], 00352 *cloud_segment); 00353 pcl::compute3DCentroid(*cloud_segment, centroid); 00354 } 00355 00356 //orientation of the gripper based on PCA 00357 00358 pcl::computeCovarianceMatrixNormalized(*cloud_segment, centroid, cov); 00359 pcl::eigen33(cov, eigen_vectors, eigen_values); 00360 00361 //DEBUG 00362 // std::cerr << "eigen vectors: " << eigen_vectors << std::endl; 00363 // std::cerr << "eigen values: " << eigen_values << std::endl; 00364 eigen_vector1(0) = eigen_vectors(0, 2); 00365 eigen_vector1(1) = eigen_vectors(1, 2); 00366 eigen_vector1(2) = eigen_vectors(2, 2); 00367 eigen_vector2(0) = eigen_vectors(0, 1); 00368 eigen_vector2(1) = eigen_vectors(1, 1); 00369 eigen_vector2(2) = eigen_vectors(2, 1); 00370 dotprod = eigen_vector1.dot(eigen_vector2); 00371 00372 //DEBUG 00373 //std::cerr << "dot product: " << dotprod << std::endl; 00374 //std::cerr << "eigen vector1: " << eigen_vector1 << std::endl; 00375 //std::cerr << "eigen vector2: " << eigen_vector2 << std::endl; 00376 00377 vector3rd = eigen_vector1.cross(eigen_vector2); 00378 //DEBUG 00379 //std::cerr << "cross-vector3: " << vector3rd << std::endl; 00380 for (int i = 0; i <= 2; i++) { 00381 pose(i, 0) = eigen_vector1(i); 00382 pose(i, 1) = eigen_vector2(i); 00383 pose(i, 2) = vector3rd(i); 00384 pose(i, 3) = centroid(i); 00385 } 00386 pose(3, 0) = 0.0; 00387 pose(3, 1) = 0.0; 00388 pose(3, 2) = 0.0; 00389 pose(3, 3) = 1.0; 00390 std::cerr << "pose: " << pose << std::endl; 00391 00392 //transform pose matrix to eigen::affine3d 00393 Eigen::Affine3d e2; 00394 e2.matrix() = pose.cast<double>(); 00395 geometry_msgs::Pose p_msgs; 00396 00397 tf::poseEigenToMsg(e2, p_msgs); 00398 00399 //publishing the pose 00400 geometry_msgs::PoseStamped msg; 00401 msg.pose = p_msgs; 00402 msg.header.frame_id = "/openni_rgb_optical_frame"; 00403 msg.header.stamp = ros::Time::now(); 00404 msg.header.seq = 0; 00405 pre_pose.publish(msg); 00406 00407 ros::Time time; 00408 time = ros::Time::now(); 00409 bool found_transform = tf_listen.waitForTransform( 00410 "/openni_rgb_optical_frame", "/base_link", 00411 cloud_segment->header.stamp, ros::Duration(1.0)); 00412 //DEBUG 00413 //std::cerr << "found transform? : " << found_transform << std::endl; 00414 00415 Eigen::Affine3d e3; 00416 e3.matrix() = pose.cast<double>(); 00417 geometry_msgs::Pose p_msgs2; 00418 00419 tf::poseEigenToMsg(e3, p_msgs2); 00420 geometry_msgs::PoseStamped msg2; 00421 00422 msg2.pose = p_msgs2; 00423 msg2.header.frame_id = "/openni_rgb_optical_frame"; 00424 msg2.header.stamp = ros::Time::now(); 00425 msg2.header.seq = 0; 00426 00427 //transforming the pose to the base link 00428 //msg_base_link pre position for the grasping 00429 if (found_transform) { 00430 geometry_msgs::PoseStamped msg_base_link; 00431 tf_listen.transformPose("/base_link", cloud_segment->header.stamp, 00432 msg2, msg2.header.frame_id, msg_base_link); 00433 msg_base_link.pose.position.z = msg_base_link.pose.position.z 00434 + preGraspAbove; 00435 00436 //computing the orientation of the pre pose 00437 Eigen::Vector3f orth(0, 0, -1); 00438 00439 Eigen::Quaternionf quaternion; 00440 quaternion.x() = msg_base_link.pose.orientation.x; 00441 quaternion.y() = msg_base_link.pose.orientation.y; 00442 quaternion.z() = msg_base_link.pose.orientation.z; 00443 quaternion.w() = msg_base_link.pose.orientation.w; 00444 Eigen::Matrix3f mat; 00445 Eigen::Matrix3f mat2; 00446 Eigen::Matrix3f mat_last_new; 00447 Eigen::Matrix3f mat_last; 00448 mat_last = quaternion.toRotationMatrix(); 00449 //DEBUG 00450 //std::cerr << "matrix last BEFORE: " << mat_last << std::endl; 00451 mat_last_new = mat_last; 00452 mat_last_new(0, 2) = -mat_last(0, 0); 00453 mat_last_new(1, 2) = -mat_last(1, 0); 00454 mat_last_new(2, 2) = -mat_last(2, 0); 00455 00456 mat_last_new(0, 0) = mat_last(0, 2); 00457 mat_last_new(1, 0) = mat_last(1, 2); 00458 mat_last_new(2, 0) = mat_last(2, 2); 00459 Eigen::Vector3f xaxis; 00460 Eigen::Vector3f yaxis; 00461 Eigen::Vector3f zaxis; 00462 Eigen::Vector3f xaxis_new; 00463 Eigen::Vector3f yaxis_new; 00464 Eigen::Vector3f zaxis_new; 00465 xaxis(0) = mat_last_new(0, 0); 00466 xaxis(1) = mat_last_new(1, 0); 00467 xaxis(2) = mat_last_new(2, 0); 00468 00469 yaxis(0) = mat_last_new(0, 1); 00470 yaxis(1) = mat_last_new(1, 1); 00471 yaxis(2) = mat_last_new(2, 1); 00472 00473 zaxis(0) = mat_last_new(0, 2); 00474 zaxis(1) = mat_last_new(1, 2); 00475 zaxis(2) = mat_last_new(2, 2); 00476 00477 yaxis_new = zaxis.cross(orth); 00478 zaxis_new = orth.cross(yaxis_new); 00479 xaxis_new = yaxis_new.cross(zaxis_new); 00480 00481 mat_last_new.block<3, 1>(0, 0) = xaxis_new; 00482 mat_last_new.block<3, 1>(0, 1) = yaxis_new; 00483 mat_last_new.block<3, 1>(0, 2) = zaxis_new; 00484 00485 //DEBUG 00486 //std::cerr << "matrix last AFTER: " << mat_last_new << std::endl; 00487 00488 Eigen::Quaternionf quaternion_last(mat_last_new); 00489 00490 msg_base_link.pose.orientation.x = quaternion_last.x(); 00491 msg_base_link.pose.orientation.y = quaternion_last.y(); 00492 msg_base_link.pose.orientation.z = quaternion_last.z(); 00493 msg_base_link.pose.orientation.w = quaternion_last.w(); 00494 00495 //publishing the point cloud in rviz 00496 sensor_msgs::PointCloud2 cloudMessage; 00497 00498 pcl::toROSMsg(*cloud_segment, cloudMessage); 00499 00500 cloudMessage.header.stamp = ros::Time::now(); 00501 cloudMessage.header.frame_id = "/openni_rgb_optical_frame"; 00502 cloud_msg.publish(cloudMessage); 00503 00504 //publishing the final pre pose 00505 pre_grasp.publish(msg_base_link); 00506 00507 tf::Stamped<tf::Transform> point_stamped; 00508 tf::poseStampedMsgToTF(msg_base_link, point_stamped); 00509 00510 tf::StampedTransform tf_l2(point_stamped, ros::Time::now(), 00511 "base_link", "doesnt_matter"); 00512 00513 //PARKING POSITION BEFORE STARTING GRASPING PROCESS 00514 geometry_msgs::PoseStamped msg_base_link_pre; 00515 00516 msg_base_link_pre.header.frame_id = "/openni_rgb_optical_frame"; 00517 msg_base_link_pre.header.stamp = msg_base_link.header.stamp; 00518 msg_base_link_pre.header.seq = 0; 00519 00520 double parkingXl=0.437; 00521 double parkingYl=0.356; 00522 double parkingZl=0.762; 00523 double parkingOrientationXl=0.048; 00524 double parkingOrientationYl=0.133; 00525 double parkingOrientationZl= -0.352; 00526 double parkingOrientationWl=0.925; 00527 00528 00529 00530 msg_base_link_pre.pose.position.x = parkingXl; 00531 msg_base_link_pre.pose.position.y = parkingYl; 00532 00533 msg_base_link_pre.pose.position.z = parkingZl; 00534 msg_base_link_pre.pose.orientation.x = parkingOrientationXl; 00535 msg_base_link_pre.pose.orientation.y = parkingOrientationYl; 00536 msg_base_link_pre.pose.orientation.z = parkingOrientationZl; 00537 msg_base_link_pre.pose.orientation.w = parkingOrientationWl; 00538 00539 tf::Stamped<tf::Transform> point_stamped_pre; 00540 tf::poseStampedMsgToTF(msg_base_link_pre, point_stamped_pre); 00541 00542 tf::StampedTransform tf_l2_pre(point_stamped_pre, ros::Time::now(), 00543 "base_link", "doesnt_matter"); 00544 //go to the park position with the left hand 00545 robot.left_arm.moveGrippertoPose(tf_l2_pre); 00546 00547 //do the same with the right hand 00548 00549 geometry_msgs::PoseStamped msg_base_link_pre_r; 00550 msg_base_link_pre_r.header.frame_id = "/openni_rgb_optical_frame"; 00551 msg_base_link_pre_r.header.stamp = msg_base_link.header.stamp; 00552 msg_base_link_pre_r.header.seq = 0; 00553 00554 double parkingXr=0.546; 00555 double parkingYr= -0.271; 00556 double parkingZr= 0.750; 00557 double parkingOrientationXr=-0.012; 00558 double parkingOrientationYr=0.198; 00559 double parkingOrientationZr= 0.336; 00560 double parkingOrientationWr=0.921; 00561 00562 msg_base_link_pre_r.pose.position.x = parkingXr; 00563 msg_base_link_pre_r.pose.position.y = parkingYr; 00564 00565 msg_base_link_pre_r.pose.position.z = parkingZr; 00566 msg_base_link_pre_r.pose.orientation.x = parkingOrientationXr; 00567 msg_base_link_pre_r.pose.orientation.y = parkingOrientationYr; 00568 msg_base_link_pre_r.pose.orientation.z = parkingOrientationZr; 00569 msg_base_link_pre_r.pose.orientation.w = parkingOrientationWr; 00570 00571 tf::Stamped<tf::Transform> point_stamped_pre_r; 00572 tf::poseStampedMsgToTF(msg_base_link_pre_r, point_stamped_pre_r); 00573 00574 tf::StampedTransform tf_l2_pre_r(point_stamped_pre_r, 00575 ros::Time::now(), "base_link", "doesnt_matter"); 00576 robot.right_arm.moveGrippertoPose(tf_l2_pre_r); 00577 00578 //move the left gripper to before calculated pre pose 00579 robot.left_arm.moveGrippertoPose(tf_l2); 00580 robot.left_gripper.open(); 00581 00582 //left gripper moves to the object position 00583 //underTheObject- how much under the object should the gripper go 00584 msg_base_link.pose.position.z = msg_base_link.pose.position.z - preGraspAbove 00585 - underTheObject; 00586 tf::Stamped<tf::Transform> point_stamped_aim; 00587 tf::poseStampedMsgToTF(msg_base_link, point_stamped_aim); 00588 tf::StampedTransform tf_l_aim(point_stamped_aim, ros::Time::now(), 00589 "base_link", "doesnt_matter"); 00590 00591 00592 robot.left_arm.moveGrippertoPose(tf_l_aim); 00593 00594 //close the gripper and go to the pre pose 00595 robot.left_gripper.close(50); 00596 robot.left_arm.moveGrippertoPose(tf_l2); 00597 00598 //DEBUG 00599 //std::cerr << "position : " << msg_base_link.pose.position 00600 //<< std::endl; 00601 00602 //move to the pose where we want robot to throw the object 00603 double throwXr= 0.550; 00604 double throwYr= 0.711; 00605 double throwZr=0.804; 00606 00607 00608 msg_base_link.pose.position.x = throwXr; 00609 msg_base_link.pose.position.y = throwYr; 00610 msg_base_link.pose.position.z = throwZr; 00611 00612 tf::Stamped<tf::Transform> point_stamped_throw; 00613 tf::poseStampedMsgToTF(msg_base_link, point_stamped_throw); 00614 tf::StampedTransform tf_l_throw(point_stamped_throw, 00615 ros::Time::now(), "base_link", "doesnt_matter"); 00616 robot.left_arm.moveGrippertoPose(tf_l_throw); 00617 robot.left_gripper.open(); 00618 } 00619 00620 } 00621 #endif 00622 return true; 00623 } 00624 00625 /*** 00626 * Find corners using OpnecCV's goodFeaturesToTrack. 00627 */ 00628 std::vector<cv::Point2f> FeatureTracker::findCorners(cv::Mat& first_image, 00629 std::vector<std::vector<cv::Point> > contours, cv::Mat* mask) { 00630 num_tracking_steps = 0; 00631 cv::Mat first_image_grey; 00632 cv::cvtColor(first_image, first_image_grey, CV_BGR2GRAY); 00633 std::vector<cv::Point2f> corners; 00634 cv::goodFeaturesToTrack(first_image_grey, corners, MAX_CORNERS, quality, 00635 min_distance); 00636 00637 cv::cornerSubPix(first_image_grey, corners, cv::Size(WIN_SIZE, WIN_SIZE), 00638 cv::Size(-1, -1), 00639 cv::TermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03)); 00640 float diff; 00641 for (std::vector<cv::Point2f>::size_type k = 0; k != corners.size(); k++) { 00642 for (std::vector<std::vector<cv::Point> >::size_type i = 0; 00643 i != contours.size(); i++) { 00644 00645 std::vector<cv::Point>& current_contour = contours[i]; 00646 for (std::vector<cv::Point>::size_type j = 0; 00647 j != contours[i].size(); j++) { 00648 00649 diff = 00650 sqrt( 00651 (corners[k].x - current_contour[j].x) 00652 * (corners[k].x - current_contour[j].x) 00653 + ((corners[k].y - current_contour[j].y) 00654 * (corners[k].y 00655 - current_contour[j].y))); 00656 if (diff < 1.5) { 00657 00658 corners[k].x = 1.0; 00659 corners[k].y = 1.0; 00660 } 00661 00662 } 00663 00664 } 00665 00666 } 00667 00668 //DEBUG 00669 //std::cerr<<"corners SIZE BEFORE: "<<corners.size()<<std::endl; 00670 std::vector<cv::Point2f>::iterator iter = corners.begin(); 00671 while (iter != corners.end()) { 00672 if ((iter->x - 1.0 < 0.0000001) && (iter->y - 1.0 < 0.0000001)) { 00673 // erase returns the new iterator to next position 00674 iter = corners.erase(iter); 00675 } else { 00676 // if erase is not called, then we manually increment 00677 // the iterator to next position 00678 ++iter; 00679 } 00680 } 00681 good_corners = Eigen::VectorXi::Ones(corners.size()); 00682 //DEBUG 00683 //std::cerr<<"corners SIZE AFTER: "<<corners.size()<<std::endl; 00684 00685 //std::cerr<<"corners AFTER: "<<corners<<std::endl; 00686 00687 corner_trajectories.push_back(corners); 00688 last_image = first_image_grey; 00689 00690 //visulization 00691 vis_image_orig = first_image; 00692 vis_image = vis_image_orig.clone(); 00693 for (unsigned int i = 0; i < corners.size(); i++) { 00694 int color_idx = ((num_tracking_steps * 11) % 256) * 3; 00695 cv::circle( 00696 vis_image, 00697 corners[i], 00698 3, 00699 cv::Scalar(colormap[color_idx], colormap[color_idx + 1], 00700 colormap[color_idx + 2], 0), -1); 00701 } 00702 00703 return corners; 00704 } 00705 00706 /*** 00707 * Execute one tracking step: 00708 * Calculate sparse optical flow from all corners in old image 00709 */ 00710 bool FeatureTracker::tracking_step(cv::Mat& new_image) { 00711 num_tracking_steps++; 00712 cv::Mat new_image_grey; 00713 cv::cvtColor(new_image, new_image_grey, CV_BGR2GRAY); 00714 std::vector<cv::Point2f> new_corners; 00715 std::vector<unsigned char> status; 00716 std::vector<float> error; 00717 cv::calcOpticalFlowPyrLK(last_image, new_image_grey, 00718 corner_trajectories.back(), new_corners, status, error, 00719 cv::Size(WIN_SIZE, WIN_SIZE), 3, 00720 cv::TermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03)); 00721 00722 last_image = new_image_grey; 00723 00724 corner_trajectories.push_back(new_corners); 00725 for (unsigned int i = 0; i < good_corners.rows(); i++) { 00726 good_corners[i] = good_corners[i] & status[i]; 00727 } 00728 //DEBUG 00729 //printf("num good corners %d %d new _corner_size %d \n", good_corners.sum(), 00730 //good_corners.rows(), corner_trajectories.back().size()); 00731 00732 //visulization 00733 vis_image_orig = new_image; 00734 vis_image = vis_image_orig.clone(); 00735 std::vector<cv::Point2f>& old_corners = 00736 corner_trajectories[num_tracking_steps - 1]; 00737 for (unsigned int i = 0; i < new_corners.size(); i++) { 00738 if (!good_corners[i]) 00739 continue; 00740 int color_idx = ((num_tracking_steps * 11) % 256) * 3; 00741 //DEBUG 00742 // cv::circle(vis_image, new_corners[i], 3, cv::Scalar(colormap[color_idx], colormap[color_idx +1], colormap[color_idx +2],0), -1); 00743 // cv::line(vis_image,new_corners[i], old_corners[i], cv::Scalar(0,0,255,0), 1); 00744 // cv::line(vis_image,new_corners[i], old_corners[i], cv::Scalar(colormap[color_idx], colormap[color_idx +1], colormap[color_idx +2]), 1); 00745 00746 } 00747 00748 return true; 00749 00750 } 00751 00752 /*** 00753 * Caluculates residuals as squared reprojection error between rigid transform trajectory predicted by hypothesis and actual trajectory. 00754 */ 00755 bool FeatureTracker::calcResiduals(const std::vector<Eigen::Matrix3f>& RTs, 00756 const std::vector<std::vector<cv::Point2f> >& corner_trajectories_t, 00757 Eigen::VectorXf& residuals) { 00758 00759 residuals = Eigen::VectorXf(corner_trajectories_t.size()); 00760 for (unsigned int t = 0; t < corner_trajectories_t.size(); t++) { 00761 const std::vector<cv::Point2f>& current_trajectory = 00762 corner_trajectories_t[t]; 00763 //DEBUG 00764 // printf("RTs.size() %d current_trajectory.size() %d\n",RTs.size(),current_trajectory.size()); 00765 assert(RTs.size() == current_trajectory.size() - 1); 00766 float residual = 0; 00767 for (unsigned int c = 0; c < current_trajectory.size() - 1; c++) { 00768 Eigen::Vector3f old_corner(current_trajectory[c].x, 00769 current_trajectory[c].y, 1.0); 00770 Eigen::Vector3f new_corner(current_trajectory[c + 1].x, 00771 current_trajectory[c + 1].y, 1.0); 00772 Eigen::Vector3f res = new_corner - (RTs[c] * old_corner); 00773 //DEBUG 00774 // cout<<"res for traj "<<t<<": vec\n"<<res<<endl; 00775 00776 residual += std::max(0.0f, res.squaredNorm() - residual_error_comp); 00777 00778 } 00779 residuals[t] = residual; 00780 } 00781 return true; 00782 } 00783 00784 bool FeatureTracker::filterTrajectories() { 00785 00786 std::vector<std::vector<cv::Point2f> > corner_trajectories_filtered; 00787 //DEBUG 00788 //printf("corner_trajectories size %d corner_trajectories[0].size() %d good _corners %d\n", corner_trajectories.size(), corner_trajectories[0].size(), good_corners.rows()); 00789 int num_good_features = good_corners.sum(); 00790 00791 //filter out bad features 00792 for (unsigned int j = 0; j < corner_trajectories.size(); j++) { 00793 std::vector<cv::Point2f> features_filtered; 00794 features_filtered.reserve(num_good_features); 00795 for (unsigned int i = 0; i < good_corners.rows(); i++) { 00796 if (good_corners[i]) { 00797 features_filtered.push_back(corner_trajectories[j][i]); 00798 } 00799 } 00800 corner_trajectories_filtered.push_back(features_filtered); 00801 } 00802 00803 //transpose 00804 corner_trajectories_filtered_and_transposed.clear(); 00805 corner_trajectories_filtered_and_transposed.reserve(good_corners.rows()); 00806 for (unsigned int i = 0; i < good_corners.rows(); i++) { 00807 std::vector<cv::Point2f> features_trans; 00808 features_trans.reserve(corner_trajectories_filtered.size()); 00809 for (unsigned int j = 0; j < corner_trajectories_filtered.size(); j++) { 00810 features_trans.push_back(corner_trajectories_filtered[j][i]); 00811 } 00812 corner_trajectories_filtered_and_transposed.push_back(features_trans); 00813 } 00814 //DEBUG 00815 //printf("corner_trajectories_filtered size %d \n", corner_trajectories_filtered.size()); 00816 00817 return true; 00818 00819 } 00820 00824 int FeatureTracker::draw(Eigen::VectorXf& prob_dist) { 00825 float pos = (float) rand() / RAND_MAX; 00826 int idx = 0; 00827 float accumulator = 0.0f; 00828 for (;;) { 00829 accumulator += prob_dist[idx]; 00830 if (accumulator > pos) 00831 break; 00832 idx++; 00833 } 00834 return idx; 00835 00836 } 00837 00838 //TODO: This needs to be done more efficiently... 00839 bool FeatureTracker::selectRandomFeatures(std::vector<int>& features_idx, 00840 const Eigen::VectorXf& mask) { 00841 00842 float variance = 200.0f * 200.0f; //pixel size of objects 00843 //select random but depending on gaussian distance 00844 //first create map 00845 int num_non_masked_elements = mask.sum(); 00846 if (num_non_masked_elements < 3) { 00847 //DEBUG 00848 //printf("num non masked elements %d\n", num_non_masked_elements); 00849 return false; 00850 } 00851 int* map_array = new int[num_non_masked_elements]; 00852 int counter = 0; 00853 for (int i = 0; i < mask.rows(); i++) { 00854 if (mask[i] > 0.0) { 00855 map_array[counter] = i; 00856 counter++; 00857 } 00858 00859 } 00860 00861 //we assume 3 points are selected... 00862 int idx1 = rand() % num_non_masked_elements; 00863 Eigen::VectorXf prob_dist(num_non_masked_elements); 00864 cv::Point2f current_point = 00865 corner_trajectories_filtered_and_transposed[map_array[idx1]].back(); 00866 for (int i = 0; i < num_non_masked_elements; i++) { 00867 if (i == idx1) { 00868 prob_dist[i] = 0.0; 00869 continue; 00870 } 00871 cv::Point2f other = 00872 corner_trajectories_filtered_and_transposed[map_array[i]].back(); 00873 float dist_sqr = pow((other.x - current_point.x), 2) 00874 + pow((other.y - current_point.y), 2); 00875 00876 float dist_sqr_n = dist_sqr / variance; 00877 prob_dist[i] = exp(-0.5 * dist_sqr_n); 00878 00879 } 00880 00881 float normalizer = prob_dist.sum(); 00882 00883 //take care of the case where numbers are really low 00884 if (normalizer < 0.0000000001) { 00885 return false; 00886 } 00887 00888 prob_dist = prob_dist / normalizer; 00889 int idx2 = draw(prob_dist); 00890 00891 assert(map_array[idx1] < mask.rows()); 00892 assert(map_array[idx2] < mask.rows()); 00893 features_idx.clear(); 00894 features_idx.push_back(map_array[idx1]); 00895 features_idx.push_back(map_array[idx2]); 00896 00897 delete[] map_array; 00898 return true; 00899 } 00900 00901 /*** 00902 * generates a rotation translation trajectory hypothesis for all features marked in mask in a RANSAC like fashion. 00903 */ 00904 bool FeatureTracker::generateHypothesis(const Eigen::VectorXf& mask, 00905 std::vector<Eigen::Matrix3f>& RTs) { 00906 00907 int num_elements = mask.sum(); 00908 //DEBUG 00909 //printf("in generateHypothesis num_elements %d\n",num_elements); 00910 if (num_elements < 3) 00911 return false; 00912 00913 std::vector<int> features_idx; 00914 if (!selectRandomFeatures(features_idx, mask)) 00915 return false; 00916 00917 RTs.reserve(num_tracking_steps); 00918 //DEBUG 00919 //printf("num_tracking_steps %d\n",num_tracking_steps); 00920 for (int i = 1; i < num_tracking_steps + 1; i++) { 00921 //DEBUG 00922 //printf("step gen %d \n", i); 00923 cv::Mat new_points(features_idx.size(), 1, CV_32FC2); 00924 cv::Mat old_points(features_idx.size(), 1, CV_32FC2); 00925 for (int j = 0; j < features_idx.size(); j++) { 00926 cv::Point2f& new_point = 00927 corner_trajectories_filtered_and_transposed[features_idx[j]][i]; 00928 new_points.ptr<float>()[j * 2] = new_point.x; 00929 new_points.ptr<float>()[j * 2 + 1] = new_point.y; 00930 cv::Point2f& old_point = 00931 corner_trajectories_filtered_and_transposed[features_idx[j]][i 00932 - 1]; 00933 old_points.ptr<float>()[j * 2] = old_point.x; 00934 old_points.ptr<float>()[j * 2 + 1] = old_point.y; 00935 } 00936 //another option 00937 // cv::Mat RT_cv = cv::estimateRigidTransform(old_points, new_points, false); 00938 cv::Mat RT_cv = cvEstimateRigidTransformFrom2(old_points, new_points); 00939 Eigen::Matrix3f RT = Eigen::Matrix3f::Identity(); 00940 Eigen::Matrix<float, 2, 3> rt_temp; 00941 cv::cv2eigen(RT_cv, rt_temp); 00942 RT.block<2, 3>(0, 0) = rt_temp; 00943 RTs.push_back(RT); 00944 } 00945 //DEBUG 00946 //cout<<"last hyp tf \n"<<RTs.back()<<endl; 00947 return true; 00948 00949 } 00950 00951 bool FeatureTracker::getPclPoint(int x, int y, pcl::PointXYZ& point) { 00952 //DEBUG 00953 //std::cerr<<"X : "<<x<<std::endl<<"Y : "<<y<<std::endl; 00954 if (isnan(cloud->points[x + 640 * y].x)) 00955 return false; 00956 else { 00957 point = cloud->points[x + 640 * y]; 00958 00959 return true; 00960 } 00961 } 00962 00963 /*** Top-level function: 00964 * Samples a number of hypothesis sets, i.e. samples sets of possible feature clusters. 00965 * Hypothesis sets are scored and best one is returned/visualized. 00966 */ 00967 bool FeatureTracker::evluateTrajectories() { 00968 00969 int num_samples = 15; 00970 int num_hypotheses = 10; 00971 filterTrajectories(); 00972 Eigen::VectorXf scores = Eigen::VectorXf::Zero(num_samples); 00973 std::vector<std::vector<int> > feature_assocoations; 00974 std::vector<Eigen::VectorXf> features_per_hyp; 00975 00976 //create hypothesis set samples 00977 for (int i = 0; i < num_samples; i++) { 00978 std::vector<int> feature_assocoation; 00979 Eigen::VectorXf feature_per_hyp_count; 00980 scores[i] = sampleHypothesisSet(feature_assocoation, 00981 feature_per_hyp_count); 00982 feature_assocoations.push_back(feature_assocoation); 00983 features_per_hyp.push_back(feature_per_hyp_count); 00984 //DEBUG 00985 // if(feature_assocoation.size() != 1){ 00986 // cv::imshow("tracker_temp", vis_image); 00987 // cvWaitKey(1500); 00988 // } 00989 } 00990 00991 //find best hypothesis... 00992 int best_hypthesis_set_idx; 00993 scores.minCoeff(&best_hypthesis_set_idx); 00994 00995 //..which becomes final feature-to-cluster association. 00996 std::vector<int>& final_feature_association = 00997 feature_assocoations[best_hypthesis_set_idx]; 00998 Eigen::VectorXf& final_feature_per_hyp_count = 00999 features_per_hyp[best_hypthesis_set_idx]; 01000 01001 //DEBUG 01002 //cout<<"feature association size "<<feature_assocoations.size()<<endl; 01003 //cout<<"feature per hyp size "<<features_per_hyp.size()<<endl; 01004 01005 final_feature_per_hyp_count[0] = 0; 01006 //DEBUG 01007 //cout<<"final feature_per_hyp\n: "<<final_feature_per_hyp_count<<endl; 01008 int best_single_hyp; 01009 final_feature_per_hyp_count.maxCoeff(&best_single_hyp); 01010 01011 int idx_max_features; 01012 idx_max_features = best_single_hyp; 01013 //DEBUG 01014 //cout<<"idx max features "<<idx_max_features<<endl; 01015 int features_best_hyp; 01016 features_best_hyp = final_feature_per_hyp_count[best_single_hyp]; 01017 //DEBUG 01018 //cout<<"features best hyp "<<features_best_hyp<<endl; 01019 pcl::PointCloud<pcl::PointXYZ>::Ptr cloudnew( 01020 new pcl::PointCloud<pcl::PointXYZ>); 01021 cloud_segment = cloudnew; 01022 01023 vis_image = vis_image_orig.clone(); 01024 size_t index = 0; 01025 01026 for (int i = 0; i < corner_trajectories_filtered_and_transposed.size(); 01027 i++) { 01028 int idx = final_feature_association[i]; 01029 if (idx >= 0) 01030 01031 cv::circle( 01032 vis_image, 01033 corner_trajectories_filtered_and_transposed[i].back(), 01034 3, 01035 cv::Scalar(colormap[3 * idx], colormap[3 * idx + 1], 01036 colormap[3 * idx + 2], 0), 5); 01037 #if !NO_ROS 01038 if ((idx >= 1) && (idx == idx_max_features)) { 01039 pcl::PointXYZ point; 01040 if (getPclPoint( 01041 corner_trajectories_filtered_and_transposed[i].back().x, 01042 corner_trajectories_filtered_and_transposed[i].back().y, 01043 point)) 01044 //new point cloud 01045 { 01046 //DEBUG 01047 //cout << "points: " << point << endl; 01048 cloud_segment->points.push_back(point); 01049 } 01050 //DEBUG 01051 // }else{ 01052 // cv::circle(vis_image, corner_trajectories_filtered_and_transposed[i].back(), 9, cv::Scalar( 0 , 0, 0, 0), 5, 1); 01053 01054 } 01055 #endif 01056 01057 } 01058 01059 return true; 01060 01061 } 01062 01063 /*** 01064 * Sample recursively possible feature-to-cluster associations (hypotheses) by generating a hypothesis 01065 * on the input features set, calculate outliers and generate another hypothesis, ... and so on until a 01066 * hypothesis exists for each feature. 01067 */ 01068 float FeatureTracker::sampleHypothesisSet(std::vector<int>& feature_assocoation, 01069 Eigen::VectorXf& feature_per_hyp_count) { 01070 int num_hypotheses = 10; 01071 float mask_threshold = 80; 01072 01073 Eigen::MatrixXf residualMatrix( 01074 corner_trajectories_filtered_and_transposed.size(), num_hypotheses); 01075 01076 std::vector<std::vector<Eigen::Matrix3f> > hypothesis; 01077 01078 //create identity Matrix as first hypothesos 01079 std::vector<Eigen::Matrix3f> RTs_I; 01080 RTs_I.resize(num_tracking_steps, Eigen::Matrix3f::Identity()); 01081 //DEBUG 01082 //printf ("num_tracking_steps %d RTs_I.size() %d \n", num_tracking_steps, RTs_I.size()); 01083 hypothesis.push_back(RTs_I); 01084 01085 Eigen::VectorXf mask = Eigen::VectorXf::Ones( 01086 corner_trajectories_filtered_and_transposed.size()); 01087 bool points_left; 01088 int i = 1; 01089 for (; i < num_hypotheses; i++) { 01090 Eigen::VectorXf residuals; 01091 std::vector<Eigen::Matrix3f>& last_hypothesis = hypothesis[i - 1]; 01092 //DEBUG 01093 //printf("last_hypothesis size %d\n", last_hypothesis.size()); 01094 01095 //calculate the residuals incurred by current hypothesis (i.e. error between features' actual RT and the hypothetical RT). 01096 calcResiduals(last_hypothesis, 01097 corner_trajectories_filtered_and_transposed, residuals); 01098 01099 //store residuals 01100 residualMatrix.col(i - 1) = residuals; 01101 //DEBUG 01102 // cout<<"residuals\n"<<residuals<<endl; 01103 // cout<<"residuals_mat \n"<<residualMatrix<<endl; 01104 01105 //find inliers and mark them in binary mask 01106 Eigen::VectorXf mask_bin = 01107 (residuals.array() - mask_threshold).matrix(); 01108 for (int i = 0; i < mask_bin.rows(); i++) { 01109 mask_bin[i] = (mask_bin[i] > 0.0f) ? 1.0f : 0.0f; 01110 } 01111 01112 Eigen::ArrayXf mask_array = mask.array(); 01113 Eigen::ArrayXf mask_array_bin = mask_bin.array(); 01114 01115 mask = mask_array.min(mask_array_bin); 01116 01117 std::vector<Eigen::Matrix3f> new_hypothesis; 01118 01119 //for the outliers, generate a new hypothesis 01120 points_left = generateHypothesis(mask, new_hypothesis); 01121 if (!points_left) 01122 break; 01123 //DEBUG 01124 //printf("generated new hyp of size %d\n", new_hypothesis.size()); 01125 hypothesis.push_back(new_hypothesis); 01126 01127 } 01128 //calc last residuals if all hypothesis had to be used 01129 int& actual_num_hypothesis = i; 01130 //DEBUG 01131 //printf("actual_num_hypothesis %d num_hypotheses %d\n", actual_num_hypothesis,num_hypotheses); 01132 if (actual_num_hypothesis == num_hypotheses) { 01133 Eigen::VectorXf residuals; 01134 //DEBUG 01135 //printf("hypothesis.back() size %d\n", hypothesis.back().size()); 01136 calcResiduals(hypothesis.back(), 01137 corner_trajectories_filtered_and_transposed, residuals); 01138 //DEBUG 01139 // cout<<"last residuals\n"<<residuals; 01140 residualMatrix.col(residualMatrix.cols() - 1) = residuals; 01141 } else { 01142 Eigen::MatrixXf residualMatrix2 = residualMatrix.block(0, 0, 01143 residualMatrix.rows(), actual_num_hypothesis); 01144 residualMatrix = residualMatrix2; 01145 } 01146 //DEBUG 01147 //cout<<"residuals_mat \n"<<residualMatrix<<endl; 01148 01149 Eigen::VectorXf summed_residuals = residualMatrix.colwise().sum(); 01150 //DEBUG 01151 //cout<<"summed_residuals \n"<<summed_residuals<<endl; 01152 01153 Eigen::VectorXf summed_residuals_per_h = Eigen::VectorXf::Zero( 01154 residualMatrix.cols()); 01155 01156 //Finally run the process again and assign each feature to its best possible RT hypothesis (or mark it as complete outlier). 01157 std::vector<int> best_feature_h_idx; 01158 best_feature_h_idx.resize( 01159 corner_trajectories_filtered_and_transposed.size(), 0); 01160 feature_per_hyp_count = Eigen::VectorXf::Ones(residualMatrix.cols()); 01161 feature_per_hyp_count *= 0.0000001f; 01162 int num_outliers = 0; 01163 for (int i = 0; i < corner_trajectories_filtered_and_transposed.size(); 01164 i++) { 01165 int idx; 01166 residualMatrix.row(i).minCoeff(&idx); 01167 float residual_value = residualMatrix(i, idx); 01168 01169 //check if feature was an outlier 01170 if (mask[i] > 0.0f) { 01171 num_outliers++; 01172 best_feature_h_idx[i] = -1; 01173 continue; 01174 } 01175 01176 best_feature_h_idx[i] = idx; 01177 feature_per_hyp_count[idx]++; 01178 summed_residuals_per_h[idx] += residualMatrix(i, idx); 01179 //DEBUG 01180 // if (idx >= 0 && feature_per_hyp_count[idx] > 2.1){ 01181 // cv::circle(vis_image, corner_trajectories_filtered_and_transposed[i].back(), 9, cv::Scalar( colormap[3*idx], colormap[3*idx+1], colormap[3*idx+2],0), 5); 01182 // }else{ 01183 // cv::circle(vis_image, corner_trajectories_filtered_and_transposed[i].back(), 9, cv::Scalar( 0 , 0, 0, 0), 5, 1); 01184 // } 01185 // 01186 // 01191 } 01192 01193 vis_image = vis_image_orig.clone(); 01194 for (int i = 0; i < corner_trajectories_filtered_and_transposed.size(); 01195 i++) { 01196 int idx = best_feature_h_idx[i]; 01197 if (idx >= 0 && feature_per_hyp_count[idx] > 5.1) { 01198 //DEBUG 01199 //cv::circle(vis_image, corner_trajectories_filtered_and_transposed[i].back(), 9, cv::Scalar( colormap[3*idx], colormap[3*idx+1], colormap[3*idx+2],0), 5); 01200 } else { //stupid feature 01201 best_feature_h_idx[i] = -1; 01202 } 01203 } 01204 //DEBUG 01205 //cout<<"num feature_per_hyp_count \n"<<feature_per_hyp_count<<endl; 01206 summed_residuals.array() /= feature_per_hyp_count.array(); 01207 01208 //cout<<"summed_residuals \n"<<summed_residuals<<endl; 01209 01210 //cout<<"summed_residuals_per_h \n"<<summed_residuals_per_h<<endl; 01211 01212 summed_residuals_per_h.array() /= feature_per_hyp_count.array(); 01213 //cout<<"summed_residuals_per_h \n"<<summed_residuals_per_h<<endl; 01214 01215 //cout<<"total average residual error "<<summed_residuals_per_h.sum()<<endl; 01216 01217 feature_assocoation = best_feature_h_idx; 01218 float outlier_penalty = num_outliers * 100.0; 01219 //DEBUG 01220 //printf("outlier num %d penalty %f\n", num_outliers, outlier_penalty); 01221 return summed_residuals_per_h.sum(); 01222 01223 } 01224 01225 unsigned char FeatureTracker::colormap[36] = { 255, 0, 0, 0, 255, 0, 0, 0, 255, 01226 255, 255, 0, 255, 0, 255, 0, 255, 255, 127, 0, 0, 0, 127, 0, 0, 0, 127, 01227 127, 127, 0, 127, 0, 127, 0, 127, 127, }; 01228 01229 //unsigned char FeatureTracker::colormap[768] = 01230 // { 150, 150, 150, 01231 // 107, 0, 12, 01232 // 106, 0, 18, 01233 // 105, 0, 24, 01234 // 103, 0, 30, 01235 // 102, 0, 36, 01236 // 101, 0, 42, 01237 // 99, 0, 48, 01238 // 98, 0, 54, 01239 // 97, 0, 60, 01240 // 96, 0, 66, 01241 // 94, 0, 72, 01242 // 93, 0, 78, 01243 // 92, 0, 84, 01244 // 91, 0, 90, 01245 // 89, 0, 96, 01246 // 88, 0, 102, 01247 // 87, 0, 108, 01248 // 85, 0, 114, 01249 // 84, 0, 120, 01250 // 83, 0, 126, 01251 // 82, 0, 131, 01252 // 80, 0, 137, 01253 // 79, 0, 143, 01254 // 78, 0, 149, 01255 // 77, 0, 155, 01256 // 75, 0, 161, 01257 // 74, 0, 167, 01258 // 73, 0, 173, 01259 // 71, 0, 179, 01260 // 70, 0, 185, 01261 // 69, 0, 191, 01262 // 68, 0, 197, 01263 // 66, 0, 203, 01264 // 65, 0, 209, 01265 // 64, 0, 215, 01266 // 62, 0, 221, 01267 // 61, 0, 227, 01268 // 60, 0, 233, 01269 // 59, 0, 239, 01270 // 57, 0, 245, 01271 // 56, 0, 251, 01272 // 55, 0, 255, 01273 // 54, 0, 255, 01274 // 52, 0, 255, 01275 // 51, 0, 255, 01276 // 50, 0, 255, 01277 // 48, 0, 255, 01278 // 47, 0, 255, 01279 // 46, 0, 255, 01280 // 45, 0, 255, 01281 // 43, 0, 255, 01282 // 42, 0, 255, 01283 // 41, 0, 255, 01284 // 40, 0, 255, 01285 // 38, 0, 255, 01286 // 37, 0, 255, 01287 // 36, 0, 255, 01288 // 34, 0, 255, 01289 // 33, 0, 255, 01290 // 32, 0, 255, 01291 // 31, 0, 255, 01292 // 29, 0, 255, 01293 // 28, 0, 255, 01294 // 27, 0, 255, 01295 // 26, 0, 255, 01296 // 24, 0, 255, 01297 // 23, 0, 255, 01298 // 22, 0, 255, 01299 // 20, 0, 255, 01300 // 19, 0, 255, 01301 // 18, 0, 255, 01302 // 17, 0, 255, 01303 // 15, 0, 255, 01304 // 14, 0, 255, 01305 // 13, 0, 255, 01306 // 11, 0, 255, 01307 // 10, 0, 255, 01308 // 9, 0, 255, 01309 // 8, 0, 255, 01310 // 6, 0, 255, 01311 // 5, 0, 255, 01312 // 4, 0, 255, 01313 // 3, 0, 255, 01314 // 1, 0, 255, 01315 // 0, 4, 255, 01316 // 0, 10, 255, 01317 // 0, 16, 255, 01318 // 0, 22, 255, 01319 // 0, 28, 255, 01320 // 0, 34, 255, 01321 // 0, 40, 255, 01322 // 0, 46, 255, 01323 // 0, 52, 255, 01324 // 0, 58, 255, 01325 // 0, 64, 255, 01326 // 0, 70, 255, 01327 // 0, 76, 255, 01328 // 0, 82, 255, 01329 // 0, 88, 255, 01330 // 0, 94, 255, 01331 // 0, 100, 255, 01332 // 0, 106, 255, 01333 // 0, 112, 255, 01334 // 0, 118, 255, 01335 // 0, 124, 255, 01336 // 0, 129, 255, 01337 // 0, 135, 255, 01338 // 0, 141, 255, 01339 // 0, 147, 255, 01340 // 0, 153, 255, 01341 // 0, 159, 255, 01342 // 0, 165, 255, 01343 // 0, 171, 255, 01344 // 0, 177, 255, 01345 // 0, 183, 255, 01346 // 0, 189, 255, 01347 // 0, 195, 255, 01348 // 0, 201, 255, 01349 // 0, 207, 255, 01350 // 0, 213, 255, 01351 // 0, 219, 255, 01352 // 0, 225, 255, 01353 // 0, 231, 255, 01354 // 0, 237, 255, 01355 // 0, 243, 255, 01356 // 0, 249, 255, 01357 // 0, 255, 255, 01358 // 0, 255, 249, 01359 // 0, 255, 243, 01360 // 0, 255, 237, 01361 // 0, 255, 231, 01362 // 0, 255, 225, 01363 // 0, 255, 219, 01364 // 0, 255, 213, 01365 // 0, 255, 207, 01366 // 0, 255, 201, 01367 // 0, 255, 195, 01368 // 0, 255, 189, 01369 // 0, 255, 183, 01370 // 0, 255, 177, 01371 // 0, 255, 171, 01372 // 0, 255, 165, 01373 // 0, 255, 159, 01374 // 0, 255, 153, 01375 // 0, 255, 147, 01376 // 0, 255, 141, 01377 // 0, 255, 135, 01378 // 0, 255, 129, 01379 // 0, 255, 124, 01380 // 0, 255, 118, 01381 // 0, 255, 112, 01382 // 0, 255, 106, 01383 // 0, 255, 100, 01384 // 0, 255, 94, 01385 // 0, 255, 88, 01386 // 0, 255, 82, 01387 // 0, 255, 76, 01388 // 0, 255, 70, 01389 // 0, 255, 64, 01390 // 0, 255, 58, 01391 // 0, 255, 52, 01392 // 0, 255, 46, 01393 // 0, 255, 40, 01394 // 0, 255, 34, 01395 // 0, 255, 28, 01396 // 0, 255, 22, 01397 // 0, 255, 16, 01398 // 0, 255, 10, 01399 // 0, 255, 4, 01400 // 2, 255, 0, 01401 // 8, 255, 0, 01402 // 14, 255, 0, 01403 // 20, 255, 0, 01404 // 26, 255, 0, 01405 // 32, 255, 0, 01406 // 38, 255, 0, 01407 // 44, 255, 0, 01408 // 50, 255, 0, 01409 // 56, 255, 0, 01410 // 62, 255, 0, 01411 // 68, 255, 0, 01412 // 74, 255, 0, 01413 // 80, 255, 0, 01414 // 86, 255, 0, 01415 // 92, 255, 0, 01416 // 98, 255, 0, 01417 // 104, 255, 0, 01418 // 110, 255, 0, 01419 // 116, 255, 0, 01420 // 122, 255, 0, 01421 // 128, 255, 0, 01422 // 133, 255, 0, 01423 // 139, 255, 0, 01424 // 145, 255, 0, 01425 // 151, 255, 0, 01426 // 157, 255, 0, 01427 // 163, 255, 0, 01428 // 169, 255, 0, 01429 // 175, 255, 0, 01430 // 181, 255, 0, 01431 // 187, 255, 0, 01432 // 193, 255, 0, 01433 // 199, 255, 0, 01434 // 205, 255, 0, 01435 // 211, 255, 0, 01436 // 217, 255, 0, 01437 // 223, 255, 0, 01438 // 229, 255, 0, 01439 // 235, 255, 0, 01440 // 241, 255, 0, 01441 // 247, 255, 0, 01442 // 253, 255, 0, 01443 // 255, 251, 0, 01444 // 255, 245, 0, 01445 // 255, 239, 0, 01446 // 255, 233, 0, 01447 // 255, 227, 0, 01448 // 255, 221, 0, 01449 // 255, 215, 0, 01450 // 255, 209, 0, 01451 // 255, 203, 0, 01452 // 255, 197, 0, 01453 // 255, 191, 0, 01454 // 255, 185, 0, 01455 // 255, 179, 0, 01456 // 255, 173, 0, 01457 // 255, 167, 0, 01458 // 255, 161, 0, 01459 // 255, 155, 0, 01460 // 255, 149, 0, 01461 // 255, 143, 0, 01462 // 255, 137, 0, 01463 // 255, 131, 0, 01464 // 255, 126, 0, 01465 // 255, 120, 0, 01466 // 255, 114, 0, 01467 // 255, 108, 0, 01468 // 255, 102, 0, 01469 // 255, 96, 0, 01470 // 255, 90, 0, 01471 // 255, 84, 0, 01472 // 255, 78, 0, 01473 // 255, 72, 0, 01474 // 255, 66, 0, 01475 // 255, 60, 0, 01476 // 255, 54, 0, 01477 // 255, 48, 0, 01478 // 255, 42, 0, 01479 // 255, 36, 0, 01480 // 255, 30, 0, 01481 // 255, 24, 0, 01482 // 255, 18, 0, 01483 // 255, 12, 0, 01484 // 255, 6, 0, 01485 // 255, 0, 0, 01486 // }; 01487 01488 #if NO_ROS 01489 int main( int argc, char** argv ) 01490 { 01491 ros::init(argc, argv, "image_tracker"); 01492 01493 if (argc == 1) 01494 { 01495 std::cerr << "Usage: " << argv[0] << " <images>" << std::endl; 01496 exit(0); 01497 } 01498 cv::namedWindow("tracker", 0); 01499 //DEBUG 01500 //cv::namedWindow("tracker_temp", 0); 01501 01502 int num_images = argc - 1; 01503 int num_images_loaded = 0; 01504 IplImage** all_images = new IplImage*[num_images]; 01505 01506 01507 FeatureTracker ft; 01508 all_images[0] = cvLoadImage(argv[1]); 01509 num_images_loaded++; 01510 cv::Mat img(all_images[0]); 01511 ft.findCorners(img); 01512 01513 cv::imshow("tracker", ft.vis_image); 01514 cv::waitKey(); 01515 01516 int c; 01517 for (int i = 1; i < num_images; i++) { 01518 all_images[i] = cvLoadImage(argv[i+1]); 01519 num_images_loaded++; 01520 printf("loading image %s\n", argv[i+1]); 01521 cv::Mat img(all_images[i]); 01522 ft.tracking_step(img); 01523 01524 if( (i-1) % 5 == 0) { 01525 Timer t; 01526 ft.evluateTrajectories(); 01527 printf("took %f secs\n", t.stop()); 01528 cv::imshow("tracker", ft.vis_image); 01529 c = cvWaitKey(); 01530 if( (char)c == 27 ) 01531 break; 01532 } 01533 01534 } 01535 01536 for (int i = 0; i < num_images_loaded; i++) { 01537 cvReleaseImage(&all_images[i]); 01538 } 01539 delete [] all_images; 01540 01541 } 01542 01543 #else 01544 01545 /*** 01546 * Main method to be run with PR2 robot 01547 */ 01548 01549 int main(int argc, char** argv) { 01550 01551 ros::init(argc, argv, "image_tracker"); 01552 sensor_msgs::CvBridge _bridge; 01553 cv::namedWindow("tracker", 0); 01554 //DEBUG 01555 //cv::namedWindow("tracker_temp", 0); 01556 #if PR2 01557 std::string input_image_topic = 01558 argc == 2 ? argv[1] : "/kinect/rgb/image_color"; 01559 #if !GRASPING 01560 input_image_topic = 01561 argc == 2 ? argv[1] : "/prosilica/image_rect_color"; 01562 #endif 01563 01564 01565 #else 01566 std::string input_image_topic = 01567 argc == 2 ? argv[1] : "/camera/rgb/image_color"; 01568 #endif 01569 01570 FeatureTracker ft; 01571 01572 //convert pcl::PointCloud<pcl::PointXYZRGB> to sensor messages point cloud 2 01573 sensor_msgs::PointCloud2 cloudMessage; 01574 01575 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud1( 01576 new pcl::PointCloud<pcl::PointXYZ>); 01577 01578 sensor_msgs::PointCloud2ConstPtr pclmasg_ptr; 01579 #if PR2 01580 pclmasg_ptr = ros::topic::waitForMessage<sensor_msgs::PointCloud2>( 01581 "/kinect/depth/points"); 01582 #else 01583 pclmasg_ptr = ros::topic::waitForMessage<sensor_msgs::PointCloud2>( 01584 "/camera/depth/points"); 01585 01586 #endif 01587 pcl::fromROSMsg(*pclmasg_ptr, *cloud1); 01588 01589 ft.cloud = cloud1; 01590 01591 sensor_msgs::ImageConstPtr imgmasg_ptr = ros::topic::waitForMessage< 01592 sensor_msgs::Image>(input_image_topic, ros::Duration(5.0)); 01593 IplImage* ipl_img = _bridge.imgMsgToCv(imgmasg_ptr, "passthrough"); 01594 cv::Mat img(ipl_img); 01595 #if PR2 && GRASPING 01596 01597 ros::ServiceClient svc = ft.nh.serviceClient<camera_self_filter::mask>( 01598 "self_mask"); 01599 camera_self_filter::mask servicecall; 01600 servicecall.request.header.frame_id = "openni_rgb_optical_frame"; 01601 servicecall.request.header.stamp = ros::Time::now(); 01602 svc.call(servicecall); 01603 sensor_msgs::ImageConstPtr maskptr = boost::make_shared<sensor_msgs::Image>( 01604 boost::ref(servicecall.response.mask_image)); 01605 01606 IplImage* ipl_mask = _bridge.imgMsgToCv(maskptr, "passthrough"); 01607 cvDilate(ipl_mask, ipl_mask, 0, 3); 01608 cvNot(ipl_mask, ipl_mask); 01609 cv::Mat mask(ipl_mask); 01610 01611 cv::Mat ROI_mat = cv::Mat::zeros(img.rows, img.cols, CV_8U); 01612 01613 cv::cvtColor(img, ROI_mat, CV_RGB2GRAY); 01614 cv::Mat img_color = cv::Mat::zeros(mask.rows, mask.cols, CV_8UC3); 01615 cv::Mat img_color2 = cv::Mat::zeros(mask.rows, mask.cols, CV_8UC3); 01616 cv::cvtColor(mask, img_color, CV_GRAY2RGB); 01617 cv::bitwise_and(img, img_color, img); 01618 01619 std::vector<std::vector<cv::Point> > contours; 01620 cv::threshold(mask, mask, 100, 255, CV_THRESH_BINARY); 01621 cv::Canny(mask, mask, 10, 50, 3); 01622 cv::findContours(mask, contours, CV_RETR_LIST, CV_CHAIN_APPROX_NONE); 01623 //DEBUG 01624 //std::cerr<<"contours size: "<<contours.size()<<std::endl; 01625 //cv::imshow("image", img_color); 01626 //cv::waitKey(); 01627 01628 //DEBUG 01629 //cv::imshow("tracker2", mask); 01630 //cv::waitKey(); 01631 01632 ft.findCorners(img, contours, &mask); 01633 #else 01634 ft.findCorners(img); 01635 #endif 01636 01637 cv::imshow("tracker", ft.vis_image); 01638 cv::waitKey(); 01639 01640 char key; 01641 int c; 01642 for (;;) { 01643 01644 01645 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud1( 01646 new pcl::PointCloud<pcl::PointXYZ>); 01647 01648 sensor_msgs::PointCloud2ConstPtr pclmasg_ptr; 01649 #if PR2 01650 pclmasg_ptr = ros::topic::waitForMessage<sensor_msgs::PointCloud2>( 01651 "/kinect/depth/points"); 01652 #else 01653 pclmasg_ptr = ros::topic::waitForMessage<sensor_msgs::PointCloud2>( 01654 "/camera/depth/points"); 01655 01656 #endif 01657 pcl::fromROSMsg(*pclmasg_ptr, *cloud1); 01658 01659 ft.cloud = cloud1; 01660 01661 sensor_msgs::ImageConstPtr imgmasg_ptr = ros::topic::waitForMessage< 01662 sensor_msgs::Image>(input_image_topic, ros::Duration(5.0)); 01663 IplImage* ipl_img = _bridge.imgMsgToCv(imgmasg_ptr, "passthrough"); 01664 cv::Mat img(ipl_img); 01665 #if PR2 && GRASPING 01666 ros::ServiceClient svc = ft.nh.serviceClient<camera_self_filter::mask>( 01667 "self_mask"); 01668 camera_self_filter::mask servicecall; 01669 servicecall.request.header.frame_id = "openni_rgb_optical_frame"; 01670 servicecall.request.header.stamp = ros::Time::now(); 01671 svc.call(servicecall); 01672 sensor_msgs::ImageConstPtr maskptr = boost::make_shared< 01673 sensor_msgs::Image>( 01674 boost::ref(servicecall.response.mask_image)); 01675 01676 IplImage* ipl_mask = _bridge.imgMsgToCv(maskptr, "passthrough"); 01677 cvDilate(ipl_mask, ipl_mask, 0, 3); 01678 cvNot(ipl_mask, ipl_mask); 01679 cv::Mat mask(ipl_mask); 01680 01681 cv::Mat ROI_mat = cv::Mat::zeros(img.rows, img.cols, CV_8U); 01682 01683 cv::cvtColor(img, ROI_mat, CV_RGB2GRAY); 01684 cv::Mat img_color = cv::Mat::zeros(mask.rows, mask.cols, CV_8UC3); 01685 cv::cvtColor(mask, img_color, CV_GRAY2RGB); 01686 cv::bitwise_and(img, img_color, img); 01687 #endif 01688 01689 Timer t; 01690 ft.tracking_step(img); 01691 ft.evluateTrajectories(); 01692 cv::imshow("tracker", ft.vis_image); 01693 c = cvWaitKey(); 01694 if ((char) c == 27) 01695 break; 01696 #if PR2 && GRASPING //grasping mode 01697 else if ((char) c == 'g') { 01698 01699 ft.grasp(); 01700 01701 } 01702 #endif 01703 printf("took %f secs\n", t.stop()); 01704 01705 } 01706 01707 return 0; 01708 01709 } 01710 01711 #endif 01712 01713 01714