TranslationAveragingExample.py
Go to the documentation of this file.
1 """
2 GTSAM Copyright 2010-2018, Georgia Tech Research Corporation,
3 Atlanta, Georgia 30332-0415
4 All Rights Reserved
5 Authors: Frank Dellaert, et al. (see THANKS for the full author list)
6 
7 See LICENSE for the license information
8 
9 This example shows how 1dsfm uses outlier rejection (MFAS) and optimization (translation recovery)
10 together for estimating global translations from relative translation directions and global rotations.
11 The purpose of this example is to illustrate the connection between these two classes using a small SfM dataset.
12 
13 Author: Akshay Krishnan
14 Date: September 2020
15 """
16 
17 from collections import defaultdict
18 from typing import List, Tuple
19 
20 import numpy as np
21 
22 import gtsam
23 from gtsam.examples import SFMdata
24 
25 # Hyperparameters for 1dsfm, values used from Kyle Wilson's code.
26 MAX_1DSFM_PROJECTION_DIRECTIONS = 48
27 OUTLIER_WEIGHT_THRESHOLD = 0.1
28 
29 
30 def get_data() -> Tuple[gtsam.Values, List[gtsam.BinaryMeasurementUnit3]]:
31  """"Returns global rotations and unit translation directions between 8 cameras
32  that lie on a circle and face the center. The poses of 8 cameras are obtained from SFMdata
33  and the unit translations directions between some camera pairs are computed from their
34  global translations. """
35  fx, fy, s, u0, v0 = 50.0, 50.0, 0.0, 50.0, 50.0
36  wTc_list = SFMdata.createPoses(gtsam.Cal3_S2(fx, fy, s, u0, v0))
37  # Rotations of the cameras in the world frame.
38  wRc_values = gtsam.Values()
39  # Normalized translation directions from camera i to camera j
40  # in the coordinate frame of camera i.
41  i_iZj_list = []
42  for i in range(0, len(wTc_list) - 2):
43  # Add the rotation.
44  wRi = wTc_list[i].rotation()
45  wRc_values.insert(i, wRi)
46  # Create unit translation measurements with next two poses.
47  for j in range(i + 1, i + 3):
48  # Compute the translation from pose i to pose j, in the world reference frame.
49  w_itj = wTc_list[j].translation() - wTc_list[i].translation()
50  # Obtain the translation in the camera i's reference frame.
51  i_itj = wRi.unrotate(w_itj)
52  # Compute the normalized unit translation direction.
53  i_iZj = gtsam.Unit3(i_itj)
54  i_iZj_list.append(gtsam.BinaryMeasurementUnit3(
55  i, j, i_iZj, gtsam.noiseModel.Isotropic.Sigma(3, 0.01)))
56  # Add the last two rotations.
57  wRc_values.insert(len(wTc_list) - 1, wTc_list[-1].rotation())
58  wRc_values.insert(len(wTc_list) - 2, wTc_list[-2].rotation())
59  return wRc_values, i_iZj_list
60 
61 
62 def filter_outliers(w_iZj_list: gtsam.BinaryMeasurementsUnit3) -> gtsam.BinaryMeasurementsUnit3:
63  """Removes outliers from a list of Unit3 measurements that are the
64  translation directions from camera i to camera j in the world frame."""
65 
66  # Indices of measurements that are to be used as projection directions.
67  # These are randomly chosen. All sampled directions must be unique.
68  num_directions_to_sample = min(
69  MAX_1DSFM_PROJECTION_DIRECTIONS, len(w_iZj_list))
70  sampled_indices = np.random.choice(
71  len(w_iZj_list), num_directions_to_sample, replace=False)
72 
73  # Sample projection directions from the measurements.
74  projection_directions = [w_iZj_list[idx].measured()
75  for idx in sampled_indices]
76 
77  outlier_weights = []
78  # Find the outlier weights for each direction using MFAS.
79  for direction in projection_directions:
80  algorithm = gtsam.MFAS(w_iZj_list, direction)
81  outlier_weights.append(algorithm.computeOutlierWeights())
82 
83  # Compute average of outlier weights. Each outlier weight is a map from a pair of Keys
84  # (camera IDs) to a weight, where weights are proportional to the probability of the edge
85  # being an outlier.
86  avg_outlier_weights = defaultdict(float)
87  for outlier_weight_dict in outlier_weights:
88  for keypair, weight in outlier_weight_dict.items():
89  avg_outlier_weights[keypair] += weight / len(outlier_weights)
90 
91  # Remove w_iZj that have weight greater than threshold, these are outliers.
92  w_iZj_inliers = gtsam.BinaryMeasurementsUnit3()
93  [w_iZj_inliers.append(w_iZj) for w_iZj in w_iZj_list if avg_outlier_weights[(
94  w_iZj.key1(), w_iZj.key2())] < OUTLIER_WEIGHT_THRESHOLD]
95 
96  return w_iZj_inliers
97 
98 
99 def estimate_poses(i_iZj_list: gtsam.BinaryMeasurementsUnit3,
100  wRc_values: gtsam.Values) -> gtsam.Values:
101  """Estimate poses given rotations and normalized translation directions between cameras.
102 
103  Args:
104  i_iZj_list: List of normalized translation direction measurements between camera pairs,
105  Z here refers to measurements. The measurements are of camera j with reference
106  to camera i (iZj), in camera i's coordinate frame (i_). iZj represents a unit
107  vector to j in i's frame and is not a transformation.
108  wRc_values: Rotations of the cameras in the world frame.
109 
110  Returns:
111  gtsam.Values: Estimated poses.
112  """
113 
114  # Convert the translation direction measurements to world frame using the rotations.
115  w_iZj_list = gtsam.BinaryMeasurementsUnit3()
116  for i_iZj in i_iZj_list:
117  w_iZj = gtsam.Unit3(wRc_values.atRot3(i_iZj.key1())
118  .rotate(i_iZj.measured().point3()))
119  w_iZj_list.append(gtsam.BinaryMeasurementUnit3(
120  i_iZj.key1(), i_iZj.key2(), w_iZj, i_iZj.noiseModel()))
121 
122  # Remove the outliers in the unit translation directions.
123  w_iZj_inliers = filter_outliers(w_iZj_list)
124 
125  # Run the optimizer to obtain translations for normalized directions.
126  wtc_values = gtsam.TranslationRecovery(w_iZj_inliers).run()
127 
128  wTc_values = gtsam.Values()
129  for key in wRc_values.keys():
130  wTc_values.insert(key, gtsam.Pose3(
131  wRc_values.atRot3(key), wtc_values.atPoint3(key)))
132  return wTc_values
133 
134 
135 def main():
136  wRc_values, i_iZj_list = get_data()
137  wTc_values = estimate_poses(i_iZj_list, wRc_values)
138  print("**** Translation averaging output ****")
139  print(wTc_values)
140  print("**************************************")
141 
142 
143 if __name__ == '__main__':
144  main()
void print(const Matrix &A, const string &s, ostream &stream)
Definition: Matrix.cpp:155
Rot3_ rotation(const Pose3_ &pose)
std::vector< BinaryMeasurement< Unit3 >> BinaryMeasurementsUnit3
Definition: dataset.h:506
#define min(a, b)
Definition: datatypes.h:19
Represents a 3D point on a unit sphere.
Definition: Unit3.h:42
static const Point3 point3(0.08, 0.08, 0.0)
Point3_ rotate(const Rot3_ &x, const Point3_ &p)
Point3 measured
size_t len(handle h)
Definition: pytypes.h:1514
void run(Expr &expr, Dev &dev)
Definition: TensorSyclRun.h:33
static shared_ptr Sigma(size_t dim, double sigma, bool smart=true)
Definition: NoiseModel.cpp:567


gtsam
Author(s):
autogenerated on Sat May 8 2021 02:51:06