30 #ifndef RTABMAP_FLANN_AUTOTUNED_INDEX_H_
31 #define RTABMAP_FLANN_AUTOTUNED_INDEX_H_
49 template<
typename Distance>
50 inline NNIndex<Distance>*
57 AutotunedIndexParams(
float target_precision = 0.8,
float build_weight = 0.01,
float memory_weight = 0,
float sample_fraction = 0.1)
61 (*this)[
"target_precision"] = target_precision;
63 (*this)[
"build_weight"] = build_weight;
65 (*this)[
"memory_weight"] = memory_weight;
67 (*this)[
"sample_fraction"] = sample_fraction;
72 template <
typename Distance>
73 class AutotunedIndex :
public NNIndex<Distance>
81 typedef AutotunedIndex<Distance>
IndexType;
138 Logger::info(
"----------------------------------------------------\n");
139 Logger::info(
"Autotuned parameters:\n");
142 Logger::info(
"----------------------------------------------------\n");
148 Logger::info(
"----------------------------------------------------\n");
149 Logger::info(
"Search parameters:\n");
152 Logger::info(
"----------------------------------------------------\n");
167 bestIndex_->addPoints(points, rebuild_threshold);
179 template<
typename Archive>
184 ar & *
static_cast<NNIndex<Distance>*
>(
this);
192 if (Archive::is_saving::value) {
193 index_type = get_param<flann_algorithm_t>(
bestParams_,
"algorithm");
198 if (Archive::is_loading::value) {
236 const SearchParams& params)
const
247 std::vector< std::vector<size_t> >& indices,
248 std::vector<std::vector<DistanceType> >& dists,
276 std::vector< std::vector<size_t> >& indices,
277 std::vector<std::vector<DistanceType> >& dists,
376 Logger::info(
"KMeansTree using params: max_iterations=%d, branching=%d\n",
377 get_param<int>(cost.params,
"iterations"),
378 get_param<int>(cost.params,
"branching"));
384 float buildTime = (
float)
t.value;
390 cost.memoryCost = (kmeans.usedMemory() + datasetMemory) / datasetMemory;
391 cost.searchTimeCost = searchTime;
392 cost.buildTimeCost = buildTime;
393 Logger::info(
"KMeansTree buildTime=%g, searchTime=%g, build_weight=%g\n", buildTime, searchTime,
build_weight_);
403 Logger::info(
"KDTree using params: trees=%d\n", get_param<int>(cost.params,
"trees"));
409 float buildTime = (
float)
t.value;
415 cost.memoryCost = (
kdtree.usedMemory() + datasetMemory) / datasetMemory;
416 cost.searchTimeCost = searchTime;
417 cost.buildTimeCost = buildTime;
418 Logger::info(
"KDTree buildTime=%g, searchTime=%g\n", buildTime, searchTime);
472 Logger::info(
"KMEANS, Step 1: Exploring parameter space\n");
475 int maxIterations[] = { 1, 5, 10, 15 };
476 int branchingFactors[] = { 16, 32, 64, 128, 256 };
479 costs.reserve(costs.size() + kmeansParamSpaceSize);
487 cost.params[
"iterations"] = maxIterations[
i];
488 cost.params[
"branching"] = branchingFactors[
j];
491 costs.push_back(cost);
520 Logger::info(
"KD-TREE, Step 1: Exploring parameter space\n");
523 int testTrees[] = { 1, 4, 8, 16, 32 };
529 cost.params[
"trees"] = testTrees[
i];
532 costs.push_back(cost);
562 std::vector<CostData> costs;
565 int testSampleSize =
std::min(sampleSize / 10, 1000);
567 Logger::info(
"Entering autotuning, dataset size: %d, sampleSize: %d, testSampleSize: %d, target precision: %g\n",
dataset_.
rows, sampleSize, testSampleSize,
target_precision_);
571 if (testSampleSize < 10) {
572 Logger::info(
"Choosing linear, dataset too small\n");
582 Logger::info(
"Computing ground truth... \n");
587 while (
t.value<0.2) {
594 CostData linear_cost;
595 linear_cost.searchTimeCost = (
float)
t.value/repeats;
596 linear_cost.buildTimeCost = 0;
597 linear_cost.memoryCost = 0;
600 costs.push_back(linear_cost);
603 Logger::info(
"Autotuning parameters...\n");
608 float bestTimeCost = costs[0].buildTimeCost *
build_weight_ + costs[0].searchTimeCost;
609 for (
size_t i = 0;
i < costs.size(); ++
i) {
610 float timeCost = costs[
i].buildTimeCost *
build_weight_ + costs[
i].searchTimeCost;
611 Logger::debug(
"Time cost: %g\n", timeCost);
612 if (timeCost < bestTimeCost) {
613 bestTimeCost = timeCost;
616 Logger::debug(
"Best time cost: %g\n", bestTimeCost);
619 if (bestTimeCost > 0) {
620 float bestCost = (costs[0].buildTimeCost *
build_weight_ + costs[0].searchTimeCost) / bestTimeCost;
621 for (
size_t i = 0;
i < costs.size(); ++
i) {
622 float crtCost = (costs[
i].buildTimeCost *
build_weight_ + costs[
i].searchTimeCost) / bestTimeCost +
624 Logger::debug(
"Cost: %g\n", crtCost);
625 if (crtCost < bestCost) {
627 bestParams = costs[
i].params;
630 Logger::debug(
"Best cost: %g\n", bestCost);
650 const size_t SAMPLE_COUNT = 1000;
660 Logger::info(
"Computing ground truth\n");
667 while (
t.value<0.2) {
673 float linear = (
float)
t.value/repeats;
676 Logger::info(
"Estimating number of checks\n");
681 Logger::info(
"KMeans algorithm, estimating cluster border factor\n");
683 float bestSearchTime = -1;
684 float best_cb_index = -1;
685 int best_checks = -1;
686 for (cb_index = 0; cb_index < 1.1f; cb_index += 0.2f) {
689 if ((searchTime < bestSearchTime) || (bestSearchTime == -1)) {
690 bestSearchTime = searchTime;
691 best_cb_index = cb_index;
692 best_checks = checks;
695 searchTime = bestSearchTime;
696 cb_index = best_cb_index;
697 checks = best_checks;
700 Logger::info(
"Optimum cb_index: %g\n", cb_index);
707 Logger::info(
"Required number of checks: %d \n", checks);
708 searchParams.checks = checks;
710 speedup = linear / searchTime;
712 delete[] gt_matches.ptr();
713 delete[] testDataset.ptr();