00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036 #include <frame_common/frame.h>
00037
00038 using namespace Eigen;
00039 using namespace std;
00040 using namespace pcl;
00041
00042
00043 #include <sys/time.h>
00044 static double mstime()
00045 {
00046 timeval tv;
00047 gettimeofday(&tv,NULL);
00048 long long ts = tv.tv_sec;
00049 ts *= 1000000;
00050 ts += tv.tv_usec;
00051 return (double)ts*.001;
00052 }
00053
00054 namespace frame_common
00055 {
00056
00057 void Frame::setCamParams(const CamParams &c)
00058 {
00059
00060 cam = c;
00061 iproj.setZero();
00062 iproj(0,0) = c.fx;
00063 iproj(1,1) = c.fy;
00064 iproj(0,2)=c.cx;
00065 iproj(1,2)=c.cy;
00066 iproj(2,2)=1.0;
00067
00068
00069 disp_to_cart(0,0) = 1/c.fx;
00070 disp_to_cart(1,1) = 1/c.fy;
00071 disp_to_cart(0,3) = -c.cx/c.fx;
00072 disp_to_cart(1,3) = -c.cy/c.fy;
00073 disp_to_cart(2,3) = 1.0;
00074 disp_to_cart(3,2) = 1/(c.tx*c.fx);
00075
00076 cart_to_disp.setZero();
00077 cart_to_disp(0,0) = c.fx;
00078 cart_to_disp(1,1) = c.fy;
00079 cart_to_disp(0,2) = c.cx;
00080 cart_to_disp(1,2) = c.cy;
00081 cart_to_disp(2,3) = c.fx*c.tx;
00082 cart_to_disp(3,2) = 1.0;
00083
00084 if (c.tx > 0.0)
00085 isStereo = true;
00086 else
00087 isStereo = false;
00088 }
00089
00090
00091
00092 Eigen::Vector3d Frame::cam2pix(const Eigen::Vector3d &cam_coord) const
00093 {
00094 double xl = cam.fx * cam_coord[0] + cam.cx * cam_coord[2];
00095 double y = cam.fy * cam_coord[1] + cam.cy * cam_coord[2];
00096 double w = cam_coord[2];
00097 double xd = cam.fx * cam.tx;
00098 double rw = 1.0/w;
00099 double y_rw = y * rw;
00100 return Eigen::Vector3d(xl*rw, y_rw, xd*rw);
00101 }
00102
00103
00104 Eigen::Vector3d Frame::pix2cam(const Eigen::Vector3d &pix_coord) const
00105 {
00106 double x = pix_coord[0] - cam.cx;
00107 double y = pix_coord[1] - cam.cy;
00108 double z = cam.fx;
00109 double w = pix_coord[2]/cam.tx;
00110 return Eigen::Vector3d(x/w, y/w, z/w);
00111 }
00112
00113
00114
00115 void Frame::setTKpts(Eigen::Vector4d trans, Eigen::Quaterniond rot)
00116 {
00117 Vector3d tr;
00118 tr = trans.head<3>();
00119
00120 Matrix4d Q;
00121 Q << 1.0, 0.0, 0.0, -cam.cx,
00122 0.0, 1.0, 0.0, -cam.cy,
00123 0.0, 0.0, 0.0, cam.fx,
00124 0.0, 0.0, 1.0/cam.tx, 0;
00125 Matrix<double,3,4> P;
00126 P << cam.fx, 0.0, cam.cx, 0.0,
00127 0.0, cam.fx, cam.cy, 0.0,
00128 0.0, 0.0, 1.0, 0.0;
00129
00130 Matrix4d T;
00131 T.setZero();
00132 Matrix3d R = rot.toRotationMatrix();
00133 T.block<3,3>(0,0) = R.transpose();
00134 T.block<3,1>(0,3) = -R*tr;
00135 T(3,3) = 1.0;
00136
00137 P = P*T*Q;
00138
00139
00140 tkpts.resize(kpts.size());
00141 Vector4d v(0.0,0.0,0.0,1.0);
00142 for (int i=0; i<(int)kpts.size(); i++)
00143 {
00144 Vector3d vk;
00145 v(0) = kpts[i].pt.x;
00146 v(1) = kpts[i].pt.y;
00147 v(2) = disps[i];
00148 vk = P*v;
00149 tkpts[i].pt.x = vk(0)/vk(2);
00150 tkpts[i].pt.y = vk(1)/vk(2);
00151 }
00152 }
00153
00154
00155
00156
00157
00158
00159 FrameProc::FrameProc(int v)
00160 {
00161
00162 detector = new cv::GridAdaptedFeatureDetector(new cv::FastFeatureDetector(v), 1000);
00163
00164
00165
00166 extractor = new cv::SurfDescriptorExtractor;
00167
00168
00169 ndisp = 64;
00170 doSparse = false;
00171 }
00172
00173 void FrameProc::setFrameDetector(const cv::Ptr<cv::FeatureDetector>& new_detector)
00174 {
00175 detector = new_detector;
00176 }
00177
00178 void FrameProc::setFrameDescriptor(const cv::Ptr<cv::DescriptorExtractor>& new_extractor)
00179 {
00180 extractor = new_extractor;
00181 }
00182
00183
00184 void FrameProc::setMonoFrame(Frame& frame, const cv::Mat &img, const cv::Mat& mask )
00185 {
00186 frame.img = img;
00187
00188
00189 frame.kpts.clear();
00190
00191 detector->detect(img, frame.kpts, mask);
00192
00193 extractor->compute(img, frame.kpts, frame.dtors);
00194
00195
00196 frame.pts.resize(frame.kpts.size());
00197 frame.goodPts.assign(frame.kpts.size(), false);
00198 frame.disps.assign(frame.kpts.size(), 10);
00199 }
00200
00201
00202
00203
00204 void FrameProc::setStereoFrame(Frame &frame, const cv::Mat &img, const cv::Mat &imgr, const cv::Mat &left_mask, int nfrac,
00205 bool setPointCloud)
00206 {
00207 setMonoFrame( frame, img, left_mask );
00208 frame.imgRight = imgr;
00209
00210
00211 setStereoPoints( frame, nfrac, setPointCloud );
00212 }
00213
00214
00215
00216
00217 void FrameProc::setStereoFrame(Frame &frame, const cv::Mat &img, const cv::Mat &imgr, int nfrac,
00218 bool setPointCloud)
00219 {
00220 setStereoFrame( frame, img, imgr, cv::Mat(), nfrac, setPointCloud );
00221 }
00222
00223
00224 void FrameProc::setStereoPoints(Frame &frame, int nfrac, bool setPointCloud)
00225 {
00226
00227
00228 frame.disps.clear();
00229
00230
00231 FrameStereo *st;
00232 if (doSparse)
00233 st = new SparseStereo(frame.img,frame.imgRight,true,ndisp);
00234 else if (nfrac > 0)
00235 {
00236 DenseStereo::ndisp = ndisp;
00237 st = new DenseStereo(frame.img,frame.imgRight,ndisp,1.0/(double)nfrac);
00238 }
00239 else
00240 {
00241 DenseStereo::ndisp = ndisp;
00242 st = new DenseStereo(frame.img,frame.imgRight,ndisp);
00243 }
00244
00245 int nkpts = frame.kpts.size();
00246 frame.goodPts.resize(nkpts);
00247 frame.pts.resize(nkpts);
00248 frame.disps.resize(nkpts);
00249
00250 #pragma omp parallel for shared( st )
00251 for (int i=0; i<nkpts; i++)
00252 {
00253 double disp = st->lookup_disparity(frame.kpts[i].pt.x,frame.kpts[i].pt.y);
00254 frame.disps[i] = disp;
00255 if (disp > 0.0)
00256 {
00257 frame.goodPts[i] = true;
00258 Vector3d pt(frame.kpts[i].pt.x,frame.kpts[i].pt.y,disp);
00259 frame.pts[i].head(3) = frame.pix2cam(pt);
00260 frame.pts[i](3) = 1.0;
00261
00262 }
00263 else
00264 frame.goodPts[i] = false;
00265 }
00266
00267 if (!setPointCloud) return;
00268
00269
00270 double cx = frame.cam.cx;
00271 double cy = frame.cam.cy;
00272 double fx = frame.cam.fx;
00273 double tx = frame.cam.tx;
00274
00275
00276 frame.dense_pointcloud.points.resize(0);
00277
00278 for (int v=0; v<(int)frame.img.rows; v++)
00279 {
00280 const uchar *imrow = frame.img.ptr<uchar>(v);
00281 for (int u=0; u<(int)frame.img.cols; u++, imrow++)
00282 {
00283 double disp = st->lookup_disparity(u,v);
00284 if (disp > 0)
00285 {
00286 pcl::PointXYZRGB pt;
00287
00288 double w = tx / disp;
00289 pt.x = ((double)u - cx)*w;
00290 pt.y = ((double)v - cy)*w;
00291 pt.z = fx*w;
00292
00293
00294 uint8_t g = *imrow;
00295 int32_t rgb = (g << 16) | (g << 8) | g;
00296 pt.rgb = *(float*)(&rgb);
00297 frame.dense_pointcloud.points.push_back(pt);
00298 }
00299 }
00300 }
00301
00302 delete st;
00303 }
00304
00305
00306 void PointcloudProc::setPointcloud(Frame &frame, const pcl::PointCloud<pcl::PointXYZRGB>& input_cloud) const
00307 {
00308 reduceCloud(input_cloud, frame.pointcloud);
00309
00310
00311
00312
00313 frame.pl_pts.clear();
00314 frame.pl_kpts.clear();
00315 frame.pl_normals.clear();
00316 frame.pl_ipts.clear();
00317
00318 unsigned int ptcloudsize = frame.pointcloud.points.size();
00319 frame.pl_pts.resize(ptcloudsize);
00320 frame.pl_kpts.resize(ptcloudsize);
00321 frame.pl_normals.resize(ptcloudsize);
00322 frame.pl_ipts.resize(ptcloudsize);
00323
00324 #pragma omp parallel for
00325 for (unsigned int i=0; i < frame.pointcloud.points.size(); i++)
00326 {
00327 PointXYZRGBNormal &pt = frame.pointcloud.points[i];
00328
00329 frame.pl_pts[i] = Eigen::Vector4d(pt.x, pt.y, pt.z, 1.0);
00330 frame.pl_normals[i] = Eigen::Vector4d(pt.normal[0], pt.normal[1], pt.normal[2], 1.0);
00331 frame.pl_kpts[i] = projectPoint(frame.pl_pts[i], frame.cam);
00332 frame.pl_ipts[i] = -1;
00333 }
00334 }
00335
00336 void PointcloudProc::match(const Frame& frame0, const Frame& frame1,
00337 const Eigen::Vector3d& trans, const Eigen::Quaterniond& rot,
00338 std::vector<cv::DMatch>& matches) const
00339 {
00340 PointCloud<PointXYZRGBNormal> transformed_cloud;
00341
00342
00343
00344
00345
00346 transformPointCloudWithNormals<PointXYZRGBNormal>(frame0.pointcloud, transformed_cloud, Vector3f(0,0,0), rot.cast<float>().conjugate());
00347 transformPointCloudWithNormals<PointXYZRGBNormal>(transformed_cloud, transformed_cloud, -trans.cast<float>(), Quaternionf(1, 0, 0, 0));
00348
00349
00350
00351
00352
00353
00354
00355
00356
00357
00358
00359
00360
00361
00362
00363 std::vector<int> f0_indices, f1_indices;
00364 getMatchingIndices(transformed_cloud, frame1.pointcloud, f0_indices, f1_indices);
00365
00366
00367
00368
00369
00370 matches.clear();
00371
00372
00373 #pragma omp parallel for shared( transformed_cloud, f0_indices, f1_indices )
00374 for (unsigned int i=1; i < f0_indices.size(); i++)
00375 {
00376 const PointXYZRGBNormal &pt0 = transformed_cloud.points[f0_indices[i]];
00377 const PointXYZRGBNormal &pt1 = frame1.pointcloud.points[f1_indices[i]];
00378
00379
00380 Quaterniond normalquat;
00381 Vector3d norm0(pt0.normal[0], pt0.normal[1], pt0.normal[2]), norm1(pt1.normal[0], pt1.normal[1], pt1.normal[2]);
00382 normalquat.setFromTwoVectors(norm0, norm1);
00383
00384 double dist = (Vector3d(pt0.x, pt0.y, pt0.z)-Vector3d(pt1.x, pt1.y, pt1.z)).norm();
00385
00386
00387
00388
00389
00390
00391
00392
00393
00394 if ((norm0 - norm1).norm() < 0.5 && dist < 0.2)
00395 #pragma omp critical
00396 matches.push_back(cv::DMatch(f0_indices[i], f1_indices[i], dist));
00397 }
00398
00399 printf("[Frame] Found %d matches, then converted %d matches.\n", (int)f0_indices.size(), (int)matches.size());
00400 }
00401
00402 void PointcloudProc::getMatchingIndices(const PointCloud<PointXYZRGBNormal>& input,
00403 const PointCloud<PointXYZRGBNormal>& output,
00404 std::vector<int>& input_indices, std::vector<int>& output_indices) const
00405 {
00406
00407 KdTreeFLANN<PointXYZRGBNormal> input_tree, output_tree;
00408
00409 input_tree.setInputCloud(boost::make_shared<const PointCloud<PointXYZRGBNormal> >(input));
00410 output_tree.setInputCloud(boost::make_shared<const PointCloud<PointXYZRGBNormal> >(output));
00411
00412
00413
00414 #pragma omp parallel for shared( input_tree, output_tree )
00415 for (unsigned int i = 0; i < input.points.size(); i++)
00416 {
00417 PointXYZRGBNormal input_pt = input.points[i];
00418 std::vector<int> input_indexvect(1), output_indexvect(1);
00419 std::vector<float> input_distvect(1), output_distvect(1);
00420
00421
00422 output_tree.nearestKSearch(input_pt, 1, input_indexvect, input_distvect);
00423
00424 PointXYZRGBNormal output_pt = output.points[input_indexvect[0]];
00425
00426
00427 input_tree.nearestKSearch(output_pt, 1, output_indexvect, output_distvect);
00428
00429
00430 #pragma omp critical
00431 if (output_indexvect[0] == (int)i)
00432 {
00433 input_indices.push_back(i);
00434 output_indices.push_back(input_indexvect[0]);
00435 }
00436 }
00437 }
00438
00439
00440 void PointcloudProc::reduceCloud(const PointCloud<PointXYZRGB>& input, PointCloud<PointXYZRGBNormal>& output) const
00441 {
00442 PointCloud<PointXYZRGB> cloud_nan_filtered, cloud_box_filtered, cloud_voxel_reduced;
00443 PointCloud<Normal> normals;
00444 PointCloud<PointXYZRGBNormal> cloud_normals;
00445
00446 std::vector<int> indices;
00447
00448
00449 removeNaNFromPointCloud(input, cloud_nan_filtered, indices);
00450 indices.clear();
00451
00452
00453 Eigen::Vector4f min_pt(-100, -100, -100, -100);
00454 Eigen::Vector4f max_pt(100, 100, 100, 100);
00455 getPointsInBox(cloud_nan_filtered, min_pt, max_pt, indices);
00456
00457 ExtractIndices<PointXYZRGB> boxfilter;
00458 boxfilter.setInputCloud(boost::make_shared<const PointCloud<PointXYZRGB> >(cloud_nan_filtered));
00459 boxfilter.setIndices (boost::make_shared<vector<int> > (indices));
00460 boxfilter.filter(cloud_box_filtered);
00461
00462
00463 VoxelGrid<PointXYZRGB> voxelfilter;
00464 voxelfilter.setInputCloud (boost::make_shared<const PointCloud<PointXYZRGB> > (cloud_box_filtered));
00465 voxelfilter.setLeafSize (0.05, 0.05, 0.05);
00466
00467 voxelfilter.filter (cloud_voxel_reduced);
00468
00469
00470 NormalEstimation<PointXYZRGB, Normal> normalest;
00471 normalest.setViewPoint(0, 0, 0);
00472 normalest.setSearchMethod (boost::make_shared<KdTreeFLANN<PointXYZRGB> > ());
00473
00474 normalest.setRadiusSearch (0.25);
00475
00476 normalest.setInputCloud(boost::make_shared<const PointCloud<PointXYZRGB> >(cloud_voxel_reduced));
00477 normalest.compute(normals);
00478
00479 pcl::concatenateFields (cloud_voxel_reduced, normals, cloud_normals);
00480
00481
00482 PassThrough<PointXYZRGBNormal> normalfilter;
00483 normalfilter.setFilterFieldName("curvature");
00484
00485 normalfilter.setFilterLimits(0.0, 0.2);
00486 normalfilter.setInputCloud(boost::make_shared<const PointCloud<PointXYZRGBNormal> >(cloud_normals));
00487 normalfilter.filter(output);
00488 }
00489
00490 Eigen::Vector3d PointcloudProc::projectPoint(Eigen::Vector4d& point, CamParams cam) const
00491 {
00492 Eigen::Vector3d keypoint;
00493
00494 keypoint(0) = (cam.fx*point.x()) / point.z() + cam.cx;
00495 keypoint(1) = (cam.fy*point.y()) / point.z() + cam.cy;
00496 keypoint(2) = (cam.fx*(point.x()-cam.tx)/point.z() + cam.cx);
00497
00498 return keypoint;
00499 }
00500
00501 }
00502
00503
00504
00505
00506
00507
00508
00509
00510
00511
00512
00513