00001 #include <Eigen/Eigen>
00002 #define EIGEN_YES_I_KNOW_SPARSE_MODULE_IS_NOT_STABLE_YET
00003 #include <iostream>
00004 #include <Eigen/Sparse>
00005 #include <unsupported/Eigen/SparseExtra>
00006 #include <unsupported/Eigen/CholmodSupport>
00007 #include <pcl/kdtree/kdtree_flann.h>
00008
00009 #include <hrl_phri_2011/utils.h>
00010 #include <hrl_phri_2011/hsl_rgb_conversions.h>
00011 #include <hrl_phri_2011/pcl_basic.h>
00012
00013 #define SE(r, l) ( std::exp(-(r) * (r) / (2 * (l) * (l))) )
00014
00015 using namespace Eigen;
00016 using namespace boost;
00017
00018 void colorizeDataPC(const PCRGB& data_pc, PCRGB& color_pc, vector<int> color_inds, double saturation)
00019 {
00020 vector<float> data;
00021 for(size_t i=0;i<color_inds.size();i++)
00022 data.push_back(data_pc.points[color_inds[i]].rgb);
00023 vector<int> all_inds, null_inds(color_pc.points.size() - color_inds.size());
00024 for(size_t i=0;i<color_pc.points.size();i++)
00025 all_inds.push_back(i);
00026 set_difference(all_inds.begin(), all_inds.end(), color_inds.begin(), color_inds.end(), null_inds.begin());
00027
00028 float max_val = *std::max_element(data.begin(), data.end());
00029 ROS_INFO("Max data value: %f", max_val);
00030 double h, s, l;
00031 for(size_t i=0;i<color_inds.size();i++) {
00032 extractHSL(color_pc.points[color_inds[i]].rgb, h, s, l);
00033 h = (double) 240.0 * data[i] / max_val;
00034 if(h < 0) h = 0; if(h > 240.0) h = 240.0;
00035 writeHSL(240.0 - h, saturation, l, color_pc.points[color_inds[i]].rgb);
00036 }
00037
00038 for(size_t i=0;i<null_inds.size();i++) {
00039 extractHSL(color_pc.points[null_inds[i]].rgb, h, s, l);
00040 writeHSL(h, 0, l, color_pc.points[null_inds[i]].rgb);
00041 }
00042 }
00043
00044 class GPRegressor
00045 {
00046 public:
00047 PCRGB::Ptr input_pc, data_pc;
00048 pcl::KdTreeFLANN<PRGB> data_kd_tree;
00049 SparseLLT<SparseMatrix<double>, Cholmod > K_inverse;
00050 shared_ptr<SparseMatrix<double> > K_sparse, Ks_sparse;
00051 shared_ptr<DynamicSparseMatrix<double> > K_dyn, Ks_dyn;
00052 VectorXd y, alpha;
00053 double length_scale, signal_variance, noise_variance;
00054 vector<int> nonzero_inds;
00055 size_t training_size, testing_size;
00056
00057 GPRegressor();
00058 void importTrainingData(const PCRGB::Ptr& input_pc);
00059 void computeK();
00060 void computeAlpha();
00061 void computeKs(const PCRGB::Ptr& test_pc);
00062 void findRegression(const PCRGB::Ptr& test_pc, VectorXd& f_mean);
00063 double logPseudoLikelihood();
00064 };
00065
00066 GPRegressor::GPRegressor() :
00067 data_pc(new PCRGB()),
00068 data_kd_tree(new pcl::KdTreeFLANN<PRGB> ())
00069 {
00070 ros::param::param<double>("~length_scale", length_scale, 0.01);
00071 ros::param::param<double>("~signal_variance", signal_variance, 0.01);
00072 ros::param::param<double>("~noise_variance", noise_variance, 0.01);
00073 ROS_INFO("length_scale: %f, signal_variance: %f, noise_variance: %f",
00074 length_scale, signal_variance, noise_variance);
00075 }
00076
00077 void GPRegressor::importTrainingData(const PCRGB::Ptr& input_pc)
00078 {
00079
00080 pcl::KdTreeFLANN<PRGB> kd_tree_input(new pcl::KdTreeFLANN<PRGB> ());
00081 kd_tree_input.setInputCloud(input_pc);
00082 vector<int> inds; vector<float> dists;
00083 for(size_t i=0;i<input_pc->size();i++) {
00084 kd_tree_input.radiusSearch(i, 3 * length_scale, inds, dists);
00085 if(dists.size() != 0)
00086 data_pc->points.push_back(input_pc->points[i]);
00087 inds.clear(); dists.clear();
00088 }
00089 training_size = data_pc->size();
00090 y.resize(training_size);
00091 for(size_t i=0;i<training_size;i++)
00092 y(i) = data_pc->points[i].rgb;
00093 data_kd_tree.setInputCloud(data_pc);
00094 }
00095
00096 void GPRegressor::computeK()
00097 {
00098 ROS_INFO("Computing K(X, X) = K");
00099 K_dyn.reset(new DynamicSparseMatrix<double>(training_size, training_size));
00100 int nnz = 0;
00101 vector<int> inds; vector<float> dists;
00102 for(size_t i=0;i<training_size;i++) {
00103 data_kd_tree.radiusSearch(i, 3 * length_scale, inds, dists);
00104 for(size_t j=0;j<dists.size();j++) {
00105 if(i < (size_t) inds[j])
00106 K_dyn->coeffRef(i, inds[j]) = SQ(signal_variance) * SE(dists[j], length_scale);
00107 else
00108 K_dyn->coeffRef(inds[j], i) = SQ(signal_variance) * SE(dists[j], length_scale);
00109 nnz++;
00110 }
00111 inds.clear(); dists.clear();
00112 K_dyn->coeffRef(i, i) = SQ(noise_variance);
00113 }
00114 ROS_INFO("Sparsity: %f", nnz / (double) (training_size * training_size));
00115 K_sparse.reset(new SparseMatrix<double>(*K_dyn));
00116 ROS_INFO("Inverting Covariance Matrix K");
00117 K_inverse.compute(*K_sparse);
00118 ROS_INFO("Inversion Completed (success? %d)", K_inverse.succeeded());
00119 }
00120
00121 void GPRegressor::computeAlpha()
00122 {
00123 ROS_INFO("Solving K alpha = y");
00124
00125 alpha = K_inverse.solve(y);
00126 ROS_INFO("Alpha computed.");
00127 }
00128
00129 double GPRegressor::logPseudoLikelihood()
00130 {
00131 double u_i_diff, s_i_sq, K_inv_i_i, pred_log_prob_i;
00132 double loo_log_pred_prob = 0;
00133 VectorXd e_i(training_size), e_sol;
00134 for(size_t i=0;i<training_size;i++) {
00135 e_i.setZero();
00136 e_i(i) = 1;
00137 e_sol = K_inverse.solve(e_i);
00138 K_inv_i_i = e_sol(i);
00139 u_i_diff = alpha(i) / K_inv_i_i;
00140 s_i_sq = 1.0 / K_inv_i_i;
00141 pred_log_prob_i = -0.5 * log(s_i_sq) - SQ(u_i_diff) / (2.0 * s_i_sq) - 0.5 * log(2.0 * PI);
00142 loo_log_pred_prob += pred_log_prob_i;
00143 }
00144 return loo_log_pred_prob;
00145 }
00146
00147 void GPRegressor::computeKs(const PCRGB::Ptr& test_pc)
00148 {
00149 testing_size = test_pc->size();
00150 Ks_dyn.reset(new DynamicSparseMatrix<double>(training_size, testing_size));
00151 ROS_INFO("Computing K(X, Xtest) = Ks");
00152 vector<int> inds; vector<float> dists;
00153 for(size_t i=0;i<testing_size;i++) {
00154 data_kd_tree.radiusSearch(*test_pc, i, 3 * length_scale, inds, dists);
00155 for(size_t j=0;j<dists.size();j++)
00156 Ks_dyn->coeffRef(inds[j], i) = SE(dists[j], length_scale);
00157 if(dists.size() != 0)
00158 nonzero_inds.push_back(i);
00159 inds.clear(); dists.clear();
00160 }
00161 Ks_sparse.reset(new SparseMatrix<double>(*Ks_dyn));
00162 ROS_INFO("Ks Computed");
00163 VectorXd sol = K_sparse->transpose() * alpha;
00164 for(size_t i=0;i<training_size;i++)
00165 if(i%30==0)
00166 ROS_INFO("y: %f, sol: %f, diff: %f", y(i), sol(i), y(i) - sol(i));
00167 }
00168
00169 void GPRegressor::findRegression(const PCRGB::Ptr& test_pc, VectorXd& f_mean)
00170 {
00171 computeK();
00172 computeAlpha();
00173 ROS_INFO("LOO log likelihood: %f", logPseudoLikelihood());
00174 computeKs(test_pc);
00175 f_mean = Ks_sparse->transpose() * alpha;
00176 f_mean = f_mean.array();
00177 ROS_INFO("Ridge regression found");
00178
00179
00180
00181
00182
00183 }
00184
00185 int main(int argc, char **argv)
00186 {
00187 ros::init(argc, argv, "gp_regression");
00188
00189
00190 PCRGB::Ptr input_pc, head_pc;
00191 vector<PCRGB::Ptr> pc_list;
00192 readBagTopic<PCRGB>(argv[1], pc_list, "/data_cloud");
00193 input_pc = pc_list[0];
00194
00195 pc_list.clear();
00196 readBagTopic<PCRGB>(argv[2], pc_list, "/stitched_head");
00197 head_pc = pc_list[0];
00198
00199 GPRegressor gp_reg;
00200 gp_reg.importTrainingData(input_pc);
00201 Eigen::VectorXd f_mean;
00202 gp_reg.findRegression(head_pc, f_mean);
00203 ROS_INFO("f_mean size: %d, nzs: %d", f_mean.size(), gp_reg.nonzero_inds.size());
00204
00205 PCRGB fm_pc, fm_color_pc;
00206 for(size_t i=0;i<head_pc->points.size();i++) {
00207
00208
00209
00210
00211 PRGB npt;
00212 const PRGB pt = head_pc->points[i];
00213 npt.x = pt.x; npt.y = pt.y; npt.z = pt.z;
00214 npt.rgb = f_mean(i);
00215
00216 fm_pc.points.push_back(npt);
00217 npt.rgb = pt.rgb;
00218 fm_color_pc.points.push_back(npt);
00219 }
00220 colorizeDataPC(fm_pc, fm_color_pc, gp_reg.nonzero_inds, 100);
00221 ROS_INFO("Done processing, publishing colored PC to /function_mean");
00222 fm_color_pc.header.frame_id = "/base_link";
00223 pubLoop(fm_color_pc, "/function_mean", 1);
00224 }