31 #include <boost/math/distributions/chi_squared.hpp> 39 boost::math::chi_squared_distribution<double> chi2(dofs);
40 return boost::math::quantile(chi2, alpha);
44 template<
class GncParameters>
60 const GncParameters&
params = GncParameters())
61 : state_(initialValues),
66 for (
size_t i = 0;
i < graph.
size();
i++) {
70 auto robust = boost::dynamic_pointer_cast<
73 nfg_[
i] = robust ? factor-> cloneWithNewNoiseModel(robust->noise()) : factor;
89 barcSq_ = inth * Vector::Ones(nfg_.
size());
104 barcSq_ = Vector::Ones(nfg_.
size());
105 for (
size_t k = 0; k < nfg_.
size(); k++) {
107 barcSq_[k] = 0.5 *
Chi2inv(alpha, nfg_[k]->
dim());
138 weights_ = Vector::Ones(nfg_.
size());
139 BaseOptimizer baseOptimizer(nfg_, state_);
142 double prev_cost = nfg_.
error(result);
150 if (params_.verbosity >= GncParameters::Verbosity::SUMMARY) {
151 std::cout <<
"GNC Optimizer stopped because maximum residual at " 152 "initialization is small." 155 if (params_.verbosity >= GncParameters::Verbosity::VALUES) {
156 result.
print(
"result\n");
157 std::cout <<
"mu: " << mu << std::endl;
163 for (iter = 0; iter < params_.maxIterations; iter++) {
166 if (params_.verbosity >= GncParameters::Verbosity::VALUES) {
167 std::cout <<
"iter: " << iter << std::endl;
168 result.
print(
"result\n");
169 std::cout <<
"mu: " << mu << std::endl;
170 std::cout <<
"weights: " << weights_ << std::endl;
177 BaseOptimizer baseOptimizer_iter(graph_iter, state_);
178 result = baseOptimizer_iter.optimize();
181 cost = graph_iter.
error(result);
193 if (params_.verbosity >= GncParameters::Verbosity::VALUES) {
194 std::cout <<
"previous cost: " << prev_cost << std::endl;
195 std::cout <<
"current cost: " << cost << std::endl;
199 if (params_.verbosity >= GncParameters::Verbosity::SUMMARY) {
200 std::cout <<
"final iterations: " << iter << std::endl;
201 std::cout <<
"final mu: " << mu << std::endl;
202 std::cout <<
"final weights: " << weights_ << std::endl;
203 std::cout <<
"previous cost: " << prev_cost << std::endl;
204 std::cout <<
"current cost: " << cost << std::endl;
212 double mu_init = 0.0;
214 switch (params_.lossType) {
219 for (
size_t k = 0; k < nfg_.
size(); k++) {
221 mu_init =
std::max(mu_init, 2 * nfg_[k]->
error(state_) / barcSq_[k]);
232 mu_init = std::numeric_limits<double>::infinity();
233 for (
size_t k = 0; k < nfg_.
size(); k++) {
235 double rk = nfg_[k]->
error(state_);
236 mu_init = (2 * rk - barcSq_[k]) > 0 ?
237 std::min(mu_init, barcSq_[k] / (2 * rk - barcSq_[k]) ) : mu_init;
240 return mu_init > 0 && !
std::isinf(mu_init) ? mu_init : -1;
244 throw std::runtime_error(
245 "GncOptimizer::initializeMu: called with unknown loss type.");
251 switch (params_.lossType) {
254 return std::max(1.0, mu / params_.muStep);
257 return mu * params_.muStep;
259 throw std::runtime_error(
260 "GncOptimizer::updateMu: called with unknown loss type.");
266 bool muConverged =
false;
267 switch (params_.lossType) {
275 throw std::runtime_error(
276 "GncOptimizer::checkMuConvergence: called with unknown loss type.");
278 if (muConverged && params_.verbosity >= GncParameters::Verbosity::SUMMARY)
279 std::cout <<
"muConverged = true " << std::endl;
286 < params_.relativeCostTol;
287 if (costConverged && params_.verbosity >= GncParameters::Verbosity::SUMMARY)
288 std::cout <<
"checkCostConvergence = true " << std::endl;
289 return costConverged;
294 bool weightsConverged =
false;
295 switch (params_.lossType) {
297 weightsConverged =
false;
300 weightsConverged =
true;
301 for (
int i = 0;
i < weights.size();
i++) {
303 > params_.weightsTol) {
304 weightsConverged =
false;
310 throw std::runtime_error(
311 "GncOptimizer::checkWeightsConvergence: called with unknown loss type.");
314 && params_.verbosity >= GncParameters::Verbosity::SUMMARY)
315 std::cout <<
"weightsConverged = true " << std::endl;
316 return weightsConverged;
321 const double cost,
const double prev_cost)
const {
331 for (
size_t i = 0;
i < nfg_.
size();
i++) {
333 auto factor = boost::dynamic_pointer_cast<
337 factor->noiseModel());
341 newGraph[
i] = factor->cloneWithNewNoiseModel(newNoiseModel);
343 throw std::runtime_error(
344 "GncOptimizer::makeWeightedGraph: unexpected non-Gaussian noise model.");
356 std::vector<size_t> allWeights;
357 for (
size_t k = 0; k < nfg_.
size(); k++) {
358 allWeights.push_back(k);
360 std::vector<size_t> unknownWeights;
361 std::set_difference(allWeights.begin(), allWeights.end(),
362 params_.knownInliers.begin(),
363 params_.knownInliers.end(),
364 std::inserter(unknownWeights, unknownWeights.begin()));
367 switch (params_.lossType) {
369 for (
size_t k : unknownWeights) {
371 double u2_k = nfg_[k]->
error(currentEstimate);
373 (mu * barcSq_[k]) / (u2_k + mu * barcSq_[k]), 2);
379 double upperbound = (mu + 1) / mu * barcSq_.maxCoeff();
380 double lowerbound = mu / (mu + 1) * barcSq_.minCoeff();
381 for (
size_t k : unknownWeights) {
383 double u2_k = nfg_[k]->
error(currentEstimate);
384 if (u2_k >= upperbound) {
386 }
else if (u2_k <= lowerbound) {
389 weights[k] =
std::sqrt(barcSq_[k] * mu * (mu + 1) / u2_k)
397 throw std::runtime_error(
398 "GncOptimizer::calculateWeights: called with unknown loss type.");
double initializeMu() const
Initialize the gnc parameter mu such that loss is approximately convex (remark 5 in GNC paper)...
void setInlierCostThresholdsAtProbability(const double alpha)
Factor Graph consisting of non-linear factors.
static double Chi2inv(const double alpha, const size_t dofs)
EIGEN_DEVICE_FUNC const SqrtReturnType sqrt() const
iterator iter(handle obj)
Values optimize()
Compute optimal solution using graduated non-convexity.
GncOptimizer(const NonlinearFactorGraph &graph, const Values &initialValues, const GncParameters ¶ms=GncParameters())
Constructor.
bool checkWeightsConvergence(const Vector &weights) const
Check convergence of weights to binary values.
const Values & getState() const
Access a copy of the internal values.
NonlinearFactorGraph graph
const GncParameters & getParams() const
Access a copy of the parameters.
bool checkConvergence(const double mu, const Vector &weights, const double cost, const double prev_cost) const
Check for convergence between consecutive GNC iterations.
const NonlinearFactorGraph & getFactors() const
Access a copy of the internal factor graph.
bool equals(const GncOptimizer &other, double tol=1e-9) const
Equals.
EIGEN_DEVICE_FUNC const RoundReturnType round() const
bool equals(const NonlinearFactorGraph &other, double tol=1e-9) const
NonlinearFactorGraph makeWeightedGraph(const Vector &weights) const
Create a graph where each factor is weighted by the gnc weights.
static shared_ptr Information(const Matrix &M, bool smart=true)
const Vector & getInlierCostThresholds() const
Get the inlier threshold.
bool checkMuConvergence(const double mu) const
Check if we have reached the value of mu for which the surrogate loss matches the original loss...
const Vector & getWeights() const
Access a copy of the GNC weights.
Vector weights_
Weights associated to each factor in GNC (this could be a local variable in optimize, but it is useful to make it accessible from outside).
Array< double, 1, 3 > e(1./3., 0.5, 2.)
static SmartStereoProjectionParams params
boost::shared_ptr< This > shared_ptr
virtual Matrix information() const
Compute information matrix.
bool checkCostConvergence(const double cost, const double prev_cost) const
Check convergence of relative cost differences.
Vector calculateWeights(const Values ¤tEstimate, const double mu)
Calculate gnc weights.
const mpreal dim(const mpreal &a, const mpreal &b, mp_rnd_t r=mpreal::get_default_rnd())
GncParameters::OptimizerType BaseOptimizer
For each parameter, specify the corresponding optimizer: e.g., GaussNewtonParams -> GaussNewtonOptimi...
void print(const std::string &str="", const KeyFormatter &keyFormatter=DefaultKeyFormatter) const
double updateMu(const double mu) const
Update the gnc parameter mu to gradually increase nonconvexity.
NonlinearFactorGraph nfg_
Original factor graph to be solved by GNC.
void setInlierCostThresholds(const Vector &inthVec)
double error(const Values &values) const
GncParameters params_
GNC parameters.
Jet< T, N > pow(const Jet< T, N > &f, double g)
Vector barcSq_
Inlier thresholds. A factor is considered an inlier if factor.error() < barcSq_[i] (where i is the po...
bool equal(const T &obj1, const T &obj2, double tol)
Values state_
Initial values to be used at each iteration by GNC.
void setInlierCostThresholds(const double inth)