22 GMMParameterEstimator::GMMParameterEstimator(
unsigned int pNumberDimensions,
unsigned int pNumberKernelsMin,
unsigned int pNumberKernelsMax,
unsigned int pNumberOfRuns,
unsigned int pNumberOfSyntheticSamples,
double pIntervalPosition,
double pIntervalOrientation, std::string pPathOrientationPlots,
unsigned int pAttemptsPerRun)
23 : mNumberDimensions(pNumberDimensions)
24 , mNumberKernelsMin(pNumberKernelsMin)
25 , mNumberKernelsMax(pNumberKernelsMax)
26 , mNumberKernels(pNumberKernelsMax - pNumberKernelsMin + 1)
27 , mNumberOfRuns(pNumberOfRuns)
28 , mNumberOfSyntheticSamples(pNumberOfSyntheticSamples)
29 , mIntervalPosition(pIntervalPosition)
30 , mIntervalOrientation(pIntervalOrientation)
31 , mPathOrientationPlots(pPathOrientationPlots)
32 , mAttemptsPerRun(pAttemptsPerRun)
44 throw std::invalid_argument(
"Cannot add a 3d sample to a learner configured for " + boost::lexical_cast<std::string>(
mNumberDimensions) +
"d samples.");
47 std::vector<double> datum;
48 datum.push_back(pSample[0]);
49 datum.push_back(pSample[1]);
50 datum.push_back(pSample[2]);
51 mData.push_back(datum);
56 std::random_device rd;
57 std::default_random_engine generator(rd());
63 std::vector<double> datum;
65 datum.push_back(pSample[0] + distribution(generator));
66 datum.push_back(pSample[1] + distribution(generator));
67 datum.push_back(pSample[2] + distribution(generator));
69 mData.push_back(datum);
78 throw std::invalid_argument(
"Cannot add a 4d sample to a learner configured for " + boost::lexical_cast<std::string>(
mNumberDimensions) +
"d samples.");
81 std::vector<double> datum;
82 datum.push_back(pSample.w());
83 datum.push_back(pSample.x());
84 datum.push_back(pSample.y());
85 datum.push_back(pSample.z());
86 mData.push_back(datum);
91 Eigen::Vector3d ea = pSample.toRotationMatrix().eulerAngles(0, 1, 2);
94 std::random_device rd;
95 std::default_random_engine generator(rd());
101 std::vector<double> datum;
104 Eigen::Matrix3d m(Eigen::AngleAxisd(ea[0] + distribution(generator), Eigen::Vector3d::UnitX())
105 * Eigen::AngleAxisd(ea[1] + distribution(generator), Eigen::Vector3d::UnitY())
106 * Eigen::AngleAxisd(ea[2] + distribution(generator), Eigen::Vector3d::UnitZ()));
109 Eigen::Quaternion<double> noisedSample(m);
110 datum.push_back(noisedSample.w());
111 datum.push_back(noisedSample.x());
112 datum.push_back(noisedSample.y());
113 datum.push_back(noisedSample.z());
116 mData.push_back(datum);
124 std::vector<unsigned int> nparams;
125 std::vector<double> llk;
126 std::vector<double> bic;
127 std::vector<GaussianMixtureModel> model;
138 bool learningCycleCompleted =
false;
146 bool useGenericMatrices =
true;
152 unsigned int offset = i * mNumberOfRuns + run;
156 learningCycleCompleted =
true;
164 ROS_INFO_STREAM(
"Attempting to learn diagonal covariance matrix instead of generic one.");
165 useGenericMatrices =
false;
172 if(!learningCycleCompleted) {
173 ROS_ERROR(
"Learning cycle incomplete. GMM learning failed! Please restart learner!");
176 ROS_INFO(
"Learning cycle completed.");
181 unsigned int indexOfBestBic = std::max_element(bic.begin(), bic.end()) - bic.begin();
188 std::stringstream msg;
190 unsigned int offset = i * mNumberOfRuns + run;
193 if(offset == indexOfBestBic) msg <<
"[x] ";
197 msg <<
"Kernels: " <<
mNumberKernelsMin + i <<
" Run: " << run + 1 <<
" BIC: " << bic[offset] <<
" LLK: " << llk[offset];
219 for (
unsigned int i = 0; i < numberOfKernels; i++)
227 std::vector<double> histInRoll, histInPitch, histInYaw;
229 unsigned int sampleAmount = 100;
231 std::vector<Eigen::VectorXd> samples(sampleAmount);
233 for (
unsigned int s = 0;
s < sampleAmount;
s++) samples.at(
s) = Eigen::VectorXd(mean->size());
236 for (
unsigned int i = 0; i < sampleAmount; i++)
238 Eigen::VectorXd sample = samples[i];
241 Eigen::Quaterniond quat = Eigen::Quaterniond(sample[0], sample[1], sample[2], sample[3]);
244 Eigen::Vector3d ea = quat.toRotationMatrix().eulerAngles(0, 1, 2);
247 if(ea[0] < -M_PI) ea[0] += M_PI;
248 if(ea[1] < -M_PI) ea[1] += M_PI;
249 if(ea[2] < -M_PI) ea[2] += M_PI;
251 if(ea[0] > M_PI) ea[0] -= M_PI;
252 if(ea[1] > M_PI) ea[1] -= M_PI;
253 if(ea[2] > M_PI) ea[2] -= M_PI;
256 histInRoll.push_back(ea[0]);
257 histInPitch.push_back(ea[1]);
258 histInYaw.push_back(ea[2]);
261 auto timestamp = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
265 double lower = -M_PI;
267 double buckets = sampleAmount;
269 std::vector<std::pair<double, double>> histOutRoll(buckets), histOutPitch(buckets), histOutYaw(buckets);
275 for (
unsigned int i = 0; i < histOutRoll.size(); i++) histOutRoll.at(i).second /= histOutRoll.size();
276 for (
unsigned int i = 0; i < histOutPitch.size(); i++) histOutPitch.at(i).second /= histOutPitch.size();
277 for (
unsigned int i = 0; i < histOutYaw.size(); i++) histOutYaw.at(i).second /= histOutYaw.size();
280 Visualization::GMMGnuplotVisualization orientationVisualizer;
281 orientationVisualizer.plotOrientationHistogram(
mPathOrientationPlots +
"/" + std::to_string(timestamp) +
"-roll.gp", histOutRoll,
"roll");
282 orientationVisualizer.plotOrientationHistogram(
mPathOrientationPlots +
"/" + std::to_string(timestamp) +
"-pitch.gp", histOutPitch,
"pitch");
283 orientationVisualizer.plotOrientationHistogram(
mPathOrientationPlots +
"/" + std::to_string(timestamp) +
"-yaw.gp", histOutYaw,
"yaw");
289 unsigned int& nparams,
293 bool useGenericMatrices)
295 int covMatType = cv::EM::COV_MAT_GENERIC;
297 if (!useGenericMatrices)
298 covMatType = cv::EM::COV_MAT_DIAGONAL;
301 cv::EM myEM = cv::EM(nc, covMatType,
303 cv::TermCriteria::COUNT + cv::TermCriteria::EPS,
309 cv::Mat loglikelihoods;
311 cv::Mat cvdata(data.size(), data.at(0).size(), CV_64FC1);
312 for (
int i = 0; i < cvdata.rows; i++)
313 for(
int j = 0; j < cvdata.cols; j++)
314 cvdata.at<
double>(i, j) = data.at(i).at(j);
317 if (!myEM.train(cvdata, loglikelihoods))
return false;
320 std::vector<cv::Mat> cvcovs = myEM.get<std::vector<cv::Mat>>(
"covs");
321 std::vector<boost::shared_ptr<Eigen::MatrixXd>> covs(cvcovs.size());
322 for (
unsigned int i = 0; i < cvcovs.size(); i++)
324 cv::Mat cvcov = cvcovs.at(i);
326 if (cvcov.rows != cvcov.cols)
331 Eigen::MatrixXd cov(cvcov.rows, cvcov.cols);
333 for (
int j = 0; j < cvcov.rows; j++)
335 for (
int k = j; k < cvcov.cols; k++)
337 double entry = cvcov.at<
double>(j,k);
340 if(std::abs(entry - cvcov.at<
double>(k,j)) >= std::numeric_limits<double>::epsilon())
342 ROS_DEBUG_STREAM(
"Found unsymmetric covariance matrix entries " << cvcov.at<
double>(j,k) <<
" (" << j <<
"," << k <<
"), " 343 << cvcov.at<
double>(k,j) <<
" (" << k <<
"," << j <<
"). Not a valid covariance matrix.");
353 Eigen::MatrixXd eigenvalues;
356 Eigen::SelfAdjointEigenSolver<Eigen::Matrix3d> es(cov);
357 eigenvalues = es.eigenvalues();
360 else if (cov.rows() == 4)
362 Eigen::SelfAdjointEigenSolver<Eigen::Matrix4d> es(cov);
363 eigenvalues = es.eigenvalues();
366 throw std::invalid_argument(
"In GMMParameterEstimator::runExpectationMaximization(): Found matrix with unsupported number of dimensions (supported are 3,4).");
368 for (
unsigned int i = 0; i < eigenvalues.size(); i++)
370 if (eigenvalues(i) < 0)
372 ROS_DEBUG_STREAM(
"Found invalid eigenvalue " << eigenvalues(i) <<
": matrix not positive semi-definite, not a valid covariance matrix.");
375 else if (eigenvalues(i) < std::numeric_limits<double>::epsilon())
377 ROS_DEBUG_STREAM(
"Found eigenvalue 0: Matrix cannot be inverted, invalid.");
387 for (
int i = 0; i < loglikelihoods.rows; i++) llk += loglikelihoods.at<
double>(i,0);
391 cv::Mat cvweights = myEM.get<cv::Mat>(
"weights");
392 std::vector<double> weights(cvweights.cols);
393 for (
int i = 0; i < cvweights.cols; i++) weights.at(i) = cvweights.at<
double>(i);
395 cv::Mat cvmeans = myEM.get<cv::Mat>(
"means");
396 std::vector<boost::shared_ptr<Eigen::VectorXd>> means(cvmeans.rows);
397 for (
int i = 0; i < cvmeans.rows; i++)
399 Eigen::VectorXd mean(cvmeans.cols);
400 for (
int j = 0; j < cvmeans.cols; j++) mean[j] = cvmeans.at<
double>(i, j);
406 for(
unsigned int i = 0; i < weights.size(); i++)
410 kernel.
mMean = means[i];
413 newmodel.addKernel(kernel);
416 newmodel.normalizeWeights();
422 unsigned int d = model.
getKernels().at(0).mMean->size();
423 if (useGenericMatrices)
424 nparams += (d * (d + 1) / 2 + d) * model.
getKernels().size();
431 throw std::runtime_error(
"In GMMParameterEstimator::runExpectationMaximization(): trying to use empty model.");
434 bic = llk - 0.25 * (double) nparams * std::log((
double) data.size());
void getModel(GaussianMixtureModel &gmm)
bool runExpectationMaximization(const std::vector< std::vector< double >> data, unsigned int nc, unsigned int &nparams, double &llk, double &bic, GaussianMixtureModel &model, bool useGenericMatrices=true)
unsigned int mNumberKernels
static void drawNormal(const Eigen::VectorXd &mean, const Eigen::MatrixXd &cov, unsigned int amount, std::vector< Eigen::VectorXd > &samples)
unsigned int getNumberOfKernels()
void addDatum(Eigen::Vector3d pSample)
std::vector< PSMLearner::GaussianKernel > getKernels() const
std::string mPathOrientationPlots
boost::shared_ptr< Eigen::MatrixXd > mCovariance
GMMParameterEstimator(unsigned int pNumberOfDimensions, unsigned int pNumberOfKernelsMin, unsigned int pNumberOfKernelsMax, unsigned int pNumberOfRuns, unsigned int pNumberOfSyntheticSamples, double pIntervalPosition, double pIntervalOrientation, std::string pPathOrientationPlots, unsigned int pAttemptsPerKernel)
double mIntervalOrientation
unsigned int mNumberKernelsMin
std::vector< std::vector< double > > mData
#define ROS_DEBUG_STREAM(args)
boost::shared_ptr< Eigen::VectorXd > mMean
unsigned int mNumberOfRuns
unsigned int mNumberOfSyntheticSamples
#define ROS_INFO_STREAM(args)
unsigned int mNumberDimensions
GaussianMixtureModel mBestGMM
unsigned int mAttemptsPerRun
static void calcHistogram(double lower, double upper, unsigned int buckets, std::vector< double > in, std::vector< std::pair< double, double >> &out)