Go to the documentation of this file.00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035 #ifndef NDTFRAME_HH
00036 #define NDTFRAME_HH
00037
00038 #include "pcl/point_cloud.h"
00039 #include "pcl/point_types.h"
00040 #include "pcl/io/pcd_io.h"
00041 #include "pcl/features/feature.h"
00042 #include "pcl/registration/icp.h"
00043 #include "pcl/filters/voxel_grid.h"
00044 #include "opencv2/core/core.hpp"
00045
00046 #include <ndt_map/cell_vector.h>
00047 #include <ndt_map/ndt_map.h>
00048 #include <ndt_map/depth_camera.h>
00049
00050 #include <ndt_feature_reg/ndt_frame_tools.h>
00051 #include <ndt_map/pointcloud_utils.h>
00052
00053 namespace ndt_feature_reg
00054 {
00055 inline
00056 double getDoubleTime(struct timeval& time)
00057 {
00058 return time.tv_sec + time.tv_usec * 1e-6;
00059 }
00060
00061 inline
00062 double getDoubleTime()
00063 {
00064 struct timeval time;
00065 gettimeofday(&time,NULL);
00066 return getDoubleTime(time);
00067 }
00068
00069 class NDTFrame
00070 {
00071 public:
00072 EIGEN_MAKE_ALIGNED_OPERATOR_NEW
00073 public:
00074 NDTFrame() : supportSize(3),maxVar(0.3),current_res(0.2)
00075 {
00076 ndt_map = lslgeneric::NDTMap(&idx_prototype);
00077 }
00078 virtual ~NDTFrame()
00079 {
00080 }
00081 NDTFrame(const NDTFrame &other)
00082 {
00083 other.img.copyTo(img);
00084 other.depth_img.copyTo(depth_img);
00085 kpts = other.kpts;
00086 pc_kpts = other.pc_kpts;
00087 pts = other.pts;
00088 idx_prototype = other.idx_prototype;
00089 ndt_map = other.ndt_map;
00090 supportSize = other.supportSize;
00091 cameraParams = other.cameraParams;
00092 maxVar = other.maxVar;
00093 current_res=other.current_res;
00094 other.dtors.copyTo(dtors);
00095 }
00096
00097 void clear()
00098 {
00099 maxVar = 0;
00100 kpts.resize(0);
00101 pc_kpts.resize(0);
00102 pts.resize(0);
00103 idx_prototype = lslgeneric::CellVector();
00104 ndt_map = lslgeneric::NDTMap(&idx_prototype);
00105
00106
00107
00108 }
00109
00110 cv::Mat img;
00111 cv::Mat depth_img;
00112 size_t supportSize;
00113 double maxVar;
00114 double current_res;
00115 lslgeneric::DepthCamera<pcl::PointXYZ> cameraParams;
00116 std::vector<cv::KeyPoint> kpts;
00117 pcl::PointCloud<pcl::PointXYZ> pc_kpts;
00119 std::vector<Eigen::Vector4d, Eigen::aligned_allocator<Eigen::Vector4d> > pts;
00120
00121 lslgeneric::CellVector idx_prototype;
00122 lslgeneric::NDTMap ndt_map;
00123
00124 const pcl::PointXYZ& getKeyPointCenter(int keyPointIdx)
00125 {
00126
00127 const pcl::PointXYZ &pt = pc_kpts[keyPointIdx];
00128 return pt;
00129 }
00130
00131 void assignPts()
00132 {
00133 if(pc_kpts.size() == 0)
00134 computeNDT();
00135 pts.resize(pc_kpts.size());
00136 for (size_t i = 0; i < pc_kpts.size(); i++)
00137 {
00138 const pcl::PointXYZ& pt = pc_kpts[i];
00139 pts[i].head(3) = Eigen::Vector3d(pt.x,pt.y,pt.z);
00140 pts[i](3) = 1.0;
00141
00142 }
00143 }
00144
00145 void computeNDT(bool estimateDI = false, bool nonMean = false)
00146 {
00147 if(kpts.size() > 0)
00148 {
00149
00150 pc_kpts = ndt_map.loadDepthImageFeatures(depth_img,kpts,supportSize,maxVar,cameraParams,estimateDI,nonMean);
00151
00152
00153 }
00154 else
00155 {
00156 lslgeneric::LazyGrid idx_prototype_grid(current_res);
00157 ndt_map = lslgeneric::NDTMap(&idx_prototype_grid);
00158
00159 ndt_map.loadDepthImage(depth_img,cameraParams);
00160 ndt_map.computeNDTCells();
00161
00162
00163
00164 }
00165
00166
00167
00168
00169
00170
00171
00172
00173
00174
00175
00176
00177
00178
00179
00180
00181 }
00182
00183
00184
00185
00186
00187
00188
00189
00190
00191
00192
00193
00194
00195
00196
00197
00198
00199
00200
00201
00202
00203
00204
00205
00206 pcl::PointCloud<pcl::PointXYZRGB> getColoredPointCloud()
00207 {
00208 lslgeneric::DepthCamera<pcl::PointXYZRGB> cameraParamsLocal (cameraParams.fx,cameraParams.fy,cameraParams.cx,
00209 cameraParams.cy,cameraParams.dist,cameraParams.ds, cameraParams.isFloatImg);
00210 cameraParamsLocal.setupDepthPointCloudLookUpTable(depth_img.size());
00211
00212 pcl::PointCloud<pcl::PointXYZRGB> cloud;
00213 cameraParamsLocal.convertDepthImageToPointCloud(depth_img,cloud);
00214
00215 size_t w = img.size().width;
00216 size_t h = img.size().height;
00217 size_t idx = 0;
00218 cloud.width = w;
00219 cloud.height = h;
00220 cloud.is_dense = true;
00221 const uchar* pimg = img.ptr<uchar>(0);
00222 std::cout<<"img channels "<<img.channels()<<std::endl;
00223 pcl::PointXYZRGB color;
00224
00225 uint8_t r = 0, g = 0, b = 0;
00226
00227 for( size_t i =0; i<h; i++)
00228 {
00229 for( size_t j =0; j<w; j++)
00230 {
00231 if(img.channels() == 3)
00232 {
00233 const cv::Vec3b& bgr = img.at<cv::Vec3b>((int)i,(int)j);
00234 r = bgr[0];
00235 g = bgr[1];
00236 b = bgr[2];
00237
00238 idx = 3*(i * w + j);
00239
00240
00241
00242 }
00243 else if (img.channels() == 1)
00244 {
00245 idx = (i * w + j);
00246 r = pimg[idx];
00247 g = pimg[idx];
00248 b = pimg[idx];
00249 }
00250
00251
00252
00253 pcl::PointXYZRGB &color = cloud.points[idx];
00254
00255 color.r = r;
00256 color.g = g;
00257 color.b = b;
00258
00259
00260
00261
00262
00263
00264
00265
00266
00267
00268
00269
00270
00271
00272 }
00273 }
00274 return cloud;
00275 }
00276
00277 cv::Mat dtors;
00278
00279
00280
00281
00282
00283
00284
00285
00286
00287
00288
00289
00290
00291
00292
00293
00294
00295
00296
00297
00298
00299
00300
00301
00302
00303
00304
00305
00306
00307
00308
00309
00310
00311
00312
00313
00314
00315
00316
00317
00318
00319
00320
00321
00322
00323
00324
00325
00326
00327
00328
00329
00330
00331
00332
00333
00334
00335
00336
00337
00338
00339
00340
00341
00342 };
00343
00344
00345 class PoseEstimator
00346 {
00347 public:
00348 PoseEstimator(int NRansac,
00349 double maxidx, double maxidd);
00350 size_t estimate(const NDTFrame &f0, const NDTFrame &f1);
00351 size_t estimate(const NDTFrame &f0, const NDTFrame &f1, const std::vector<cv::DMatch>& matches);
00352
00353 const std::vector<cv::DMatch>& getInliers() const
00354 {
00355 return inliers;
00356 }
00357
00358 void matchFrames(const NDTFrame& f0, const NDTFrame& f2, std::vector<cv::DMatch>& fwd_matches);
00359
00360 inline Eigen::Affine3f getTransform()
00361 {
00362 Eigen::Affine3f transl_transform = (Eigen::Affine3f)Eigen::Translation3f(trans[0], trans[1], trans[2]);
00363 Eigen::Affine3f rot_transform = (Eigen::Affine3f)Eigen::Matrix3f(rot.cast<float>());
00364 return transl_transform*rot_transform;
00365 }
00366
00367
00368 std::vector<cv::DMatch> matches;
00369 std::vector<cv::DMatch> inliers;
00370
00371 int numRansac;
00372
00374 bool windowed;
00375
00377 double maxInlierXDist2, maxInlierDDist2;
00378
00379 double maxDist;
00380 double minDist;
00381
00383 cv::Ptr<cv::DescriptorMatcher> matcher;
00384 int wx , wy ;
00385
00386
00387 Eigen::Matrix3d rot;
00388 Eigen::Quaterniond quat;
00389 Eigen::Vector3d trans;
00390
00391 bool projectMatches;
00392 };
00393
00394
00395 }
00396
00397
00398
00399 #endif