2 GTSAM Copyright 2010-2019, Georgia Tech Research Corporation, 
    3 Atlanta, Georgia 30332-0415 
    6 See LICENSE for the license information 
    8 Unit tests to ensure backwards compatibility of the Python wrapper. 
   12 from typing 
import Iterable, List, Optional, Tuple, Union
 
   20 from gtsam 
import (BetweenFactorPose2, Cal3_S2, Cal3Bundler, CameraSetCal3_S2,
 
   21                    CameraSetCal3Bundler, IndexPair, LevenbergMarquardtParams,
 
   22                    PinholeCameraCal3_S2, PinholeCameraCal3Bundler, Point2,
 
   23                    Point2Pairs, Point3, Pose2, Pose2Pairs, Pose3, Rot2, Rot3,
 
   24                    SfmTrack2d, ShonanAveraging2, ShonanAveragingParameters2,
 
   25                    Similarity2, Similarity3, TriangulationParameters,
 
   28 UPRIGHT = Rot3.Ypr(-np.pi / 2, 0.0, -np.pi / 2)
 
   32     """Tests for backwards compatibility of the Python wrapper.""" 
   35         """Setup test fixtures""" 
   36         p1 = [-1.0, 0.0, -1.0]
 
   38         q1 = 
Rot3(1.0, 0.0, 0.0, 0.0)
 
   39         q2 = 
Rot3(1.0, 0.0, 0.0, 0.0)
 
   44         self.
origin = np.array([0.0, 0.0, 0.0])
 
   51         fx, fy, s, u0, v0 = 2, 2, 0, 0, 0
 
   52         k1, k2, p1, p2 = 0, 0, 0, 0
 
   78         Estimate spatial point from image measurements using 
   79         rectification from a Cal3Fisheye camera model. 
   82             k.calibration().calibrate(pt)
 
   95         Estimate spatial point from image measurements using 
   96         rectification from a Cal3Unified camera model. 
   99             k.calibration().calibrate(pt)
 
  111         """Ensures that DSF generates three tracks from measurements 
  112         in 3 images (H=200,W=400).""" 
  113         kps_i0 = 
Keypoints(np.array([[10.0, 20], [30, 40]]))
 
  114         kps_i1 = 
Keypoints(np.array([[50.0, 60], [70, 80], [90, 100]]))
 
  115         kps_i2 = 
Keypoints(np.array([[110.0, 120], [130, 140]]))
 
  118         keypoints_list.append(kps_i0)
 
  119         keypoints_list.append(kps_i1)
 
  120         keypoints_list.append(kps_i2)
 
  125         matches_dict[
IndexPair(0, 1)] = np.array([[0, 0], [1, 1]])
 
  126         matches_dict[
IndexPair(1, 2)] = np.array([[2, 0], [1, 1]])
 
  133         assert len(tracks) == 3
 
  137         assert track0.numberMeasurements() == 2
 
  138         np.testing.assert_allclose(track0.measurements[0][1], 
Point2(10, 20))
 
  139         np.testing.assert_allclose(track0.measurements[1][1], 
Point2(50, 60))
 
  140         assert track0.measurements[0][0] == 0
 
  141         assert track0.measurements[1][0] == 1
 
  142         np.testing.assert_allclose(
 
  143             track0.measurementMatrix(),
 
  149         np.testing.assert_allclose(track0.indexVector(), [0, 1])
 
  153         np.testing.assert_allclose(
 
  154             track1.measurementMatrix(),
 
  161         np.testing.assert_allclose(track1.indexVector(), [0, 1, 2])
 
  165         np.testing.assert_allclose(
 
  166             track2.measurementMatrix(),
 
  172         np.testing.assert_allclose(track2.indexVector(), [1, 2])
 
  175         """Test construction of 2D SfM track.""" 
  177         measurements.append((0, 
Point2(10, 20)))
 
  180         assert track.numberMeasurements() == 1
 
  184         Simple test that checks for equality between C++ example 
  185         file and the Python implementation. See 
  186         gtsam_unstable/examples/FixedLagSmootherExample.cpp 
  200         prior_mean = 
Pose2(0, 0, 0)
 
  202             np.array([0.3, 0.3, 0.1]))
 
  204         new_factors.push_back(
 
  205             gtsam.PriorFactorPose2(X1, prior_mean, prior_noise))
 
  206         new_values.insert(X1, prior_mean)
 
  207         new_timestamps.insert((X1, 0.0))
 
  215             Pose2(0.995821, 0.0231012, 0.0300001),
 
  216             Pose2(1.49284, 0.0457247, 0.045),
 
  217             Pose2(1.98981, 0.0758879, 0.06),
 
  218             Pose2(2.48627, 0.113502, 0.075),
 
  219             Pose2(2.98211, 0.158558, 0.09),
 
  220             Pose2(3.47722, 0.211047, 0.105),
 
  221             Pose2(3.97149, 0.270956, 0.12),
 
  222             Pose2(4.4648, 0.338272, 0.135),
 
  223             Pose2(4.95705, 0.41298, 0.15),
 
  224             Pose2(5.44812, 0.495063, 0.165),
 
  225             Pose2(5.9379, 0.584503, 0.18),
 
  233             previous_key = 
int(1000 * (time - delta_time))
 
  234             current_key = 
int(1000 * time)
 
  237             new_timestamps.insert((current_key, time))
 
  242             current_pose = 
Pose2(time * 2, 0, 0)
 
  243             new_values.insert(current_key, current_pose)
 
  247             odometry_measurement_1 = 
Pose2(0.61, -0.08, 0.02)
 
  249                 np.array([0.1, 0.1, 0.05]))
 
  250             new_factors.push_back(
 
  251                 gtsam.BetweenFactorPose2(previous_key, current_key,
 
  252                                          odometry_measurement_1,
 
  255             odometry_measurement_2 = 
Pose2(0.47, 0.03, 0.01)
 
  257                 np.array([0.05, 0.05, 0.05]))
 
  258             new_factors.push_back(
 
  259                 gtsam.BetweenFactorPose2(previous_key, current_key,
 
  260                                          odometry_measurement_2,
 
  267                 smoother_batch.update(new_factors, new_values, new_timestamps)
 
  269                 estimate = smoother_batch.calculateEstimatePose2(current_key)
 
  270                 self.assertTrue(estimate.equals(ground_truth[i], 1e-4))
 
  273                 new_timestamps.clear()
 
  275                 new_factors.resize(0)
 
  290         gfg.add(x1, np.eye(1), x0, -np.eye(1), np.ones(1), BETWEEN_NOISE)
 
  291         gfg.add(x2, np.eye(1), x1, -np.eye(1), 2 * np.ones(1), BETWEEN_NOISE)
 
  292         gfg.add(x0, np.eye(1), np.zeros(1), PRIOR_NOISE)
 
  296         for key 
in keys[::-1]:
 
  297             ordering.push_back(key)
 
  299         bn = gfg.eliminateSequential(ordering)
 
  300         self.assertEqual(bn.size(), 3)
 
  303         keyVector.append(keys[2])
 
  304         ordering = gtsam.Ordering.ColamdConstrainedLastGaussianFactorGraph(
 
  306         bn = gfg.eliminateSequential(ordering)
 
  307         self.assertEqual(bn.size(), 3)
 
  311         Check that optimizing for Karcher mean (which minimizes Between distance) 
  314         R = Rot3.Expmap(np.array([0.1, 0, 0]))
 
  318         actual = gtsam.FindKarcherMeanRot3(rotations)
 
  322         """Averaging 3 identity rotations should yield the identity.""" 
  328         aRb_expected = 
Rot3()
 
  330         aRb = gtsam.FindKarcherMeanRot3(aRb_list)
 
  334         """Check that the InnerConstraint factor leaves the mean unchanged.""" 
  340         R = Rot3.Expmap(np.array([0.1, 0, 0]))
 
  344         R12 = R.compose(R.compose(R))
 
  345         graph.add(gtsam.BetweenFactorRot3(1, 2, R12, MODEL))
 
  349         graph.add(gtsam.KarcherMeanFactorRot3(keys))
 
  352         initial.insert(1, R.inverse())
 
  357         actual = gtsam.FindKarcherMeanRot3(
 
  364         """Ensure estimation of the Pose2 element to align two 2d point clouds succeeds. 
  366         Two point clouds represent horseshoe-shapes of the same size, just rotated and translated: 
  392         aTb = Pose2.Align(ab_pairs)
 
  393         self.assertIsNotNone(aTb)
 
  395         for pt_a, pt_b 
in zip(pts_a, pts_b):
 
  396             pt_a_ = aTb.transformFrom(pt_b)
 
  397             np.testing.assert_allclose(pt_a, pt_a_)
 
  400         A = np.array(pts_a).T
 
  401         B = np.array(pts_b).T
 
  402         aTb = Pose2.Align(A, B)
 
  403         self.assertIsNotNone(aTb)
 
  405         for pt_a, pt_b 
in zip(pts_a, pts_b):
 
  406             pt_a_ = aTb.transformFrom(pt_b)
 
  407             np.testing.assert_allclose(pt_a, pt_a_)
 
  410         """Test if Align method can align 2 squares.""" 
  411         square = np.array([[0, 0, 0], [0, 1, 0], [1, 1, 0], [1, 0, 0]],
 
  414         transformed = sTt.transformTo(square)
 
  418             st_pairs.append((square[:, j], transformed[:, j]))
 
  421         estimated_sTt = Pose3.Align(st_pairs)
 
  425         estimated_sTt = Pose3.Align(square, transformed)
 
  429         """Check if ShonanAveraging2 constructor works when not initialized from g2o file. 
  446             Pose2(Rot2.fromDegrees(0), np.array([0, 0])),
 
  447             Pose2(Rot2.fromDegrees(90), np.array([0, 4])),
 
  448             Pose2(Rot2.fromDegrees(0), np.array([4, 0])),
 
  451         edges = [(0, 1), (1, 2), (0, 2)]
 
  452         i2Ri1_dict = {(i1, i2):
 
  454                       for (i1, i2) 
in edges}
 
  456         lm_params = LevenbergMarquardtParams.CeresDefaults()
 
  458         shonan_params.setUseHuber(
False)
 
  459         shonan_params.setCertifyOptimality(
True)
 
  463         for (i1, i2), i2Ri1 
in i2Ri1_dict.items():
 
  464             i2Ti1 = 
Pose2(i2Ri1, np.zeros(2))
 
  465             between_factors.append(
 
  469         initial = obj.initializeRandomly()
 
  470         result_values, _ = obj.run(initial, min_p=2, max_p=100)
 
  472         wRi_list = [result_values.atRot2(i) 
for i 
in range(num_images)]
 
  473         thetas_deg = np.array([wRi.degrees() 
for wRi 
in wRi_list])
 
  476         thetas_deg = (thetas_deg - thetas_deg[0] + 180) % 360 - 180
 
  478         expected_thetas_deg = np.array([0.0, 90.0, 0.0])
 
  479         np.testing.assert_allclose(thetas_deg, expected_thetas_deg, atol=0.1)
 
  482         """Test Align of list of Pose2Pair. 
  486            same scale (no gauge ambiguity) 
  487            world frame has poses rotated about 180 degrees. 
  488            world and egovehicle frame translated by 15 meters w.r.t. each other 
  490         R180 = Rot2.fromDegrees(180)
 
  498         eToi_list = [eTo0, eTo1, eTo2]
 
  502         wTo0 = 
Pose2(R180, np.array([-10, 0]))
 
  503         wTo1 = 
Pose2(R180, np.array([-5, 0]))
 
  504         wTo2 = 
Pose2(R180, np.array([0, 0]))
 
  506         wToi_list = [wTo0, wTo1, wTo2]
 
  511         wSe = Similarity2.Align(we_pairs)
 
  513         for wToi, eToi 
in zip(wToi_list, eToi_list):
 
  517         """Test if Align Pose2Pairs method can account for gauge ambiguity. 
  521            with gauge ambiguity (2x scale) 
  522            world frame has poses rotated by 90 degrees. 
  523            world and egovehicle frame translated by 11 meters w.r.t. each other 
  525         R90 = Rot2.fromDegrees(90)
 
  533         eToi_list = [eTo0, eTo1, eTo2]
 
  537         wTo0 = 
Pose2(R90, np.array([0, 12]))
 
  538         wTo1 = 
Pose2(R90, np.array([0, 14]))
 
  539         wTo2 = 
Pose2(R90, np.array([0, 18]))
 
  541         wToi_list = [wTo0, wTo1, wTo2]
 
  546         wSe = Similarity2.Align(we_pairs)
 
  548         for wToi, eToi 
in zip(wToi_list, eToi_list):
 
  552         """Test if Align Pose2Pairs method can account for gauge ambiguity. 
  554         Make sure a big and small square can be aligned. 
  555         The u's represent a big square (10x10), and v's represents a small square (4x4). 
  559            with gauge ambiguity (2.5x scale) 
  562         R0 = Rot2.fromDegrees(0)
 
  563         R90 = Rot2.fromDegrees(90)
 
  564         R180 = Rot2.fromDegrees(180)
 
  565         R270 = Rot2.fromDegrees(270)
 
  567         aTi0 = 
Pose2(R0, np.array([2, 3]))
 
  568         aTi1 = 
Pose2(R90, np.array([12, 3]))
 
  569         aTi2 = 
Pose2(R180, np.array([12, 13]))
 
  570         aTi3 = 
Pose2(R270, np.array([2, 13]))
 
  572         aTi_list = [aTi0, aTi1, aTi2, aTi3]
 
  574         bTi0 = 
Pose2(R0, np.array([4, 3]))
 
  575         bTi1 = 
Pose2(R90, np.array([8, 3]))
 
  576         bTi2 = 
Pose2(R180, np.array([8, 7]))
 
  577         bTi3 = 
Pose2(R270, np.array([4, 7]))
 
  579         bTi_list = [bTi0, bTi1, bTi2, bTi3]
 
  584         aSb = Similarity2.Align(ab_pairs)
 
  586         for aTi, bTi 
in zip(aTi_list, bTi_list):
 
  590         """Test Align Pose3Pairs method. 
  594            same scale (no gauge ambiguity) 
  595            world frame has poses rotated about x-axis (90 degree roll) 
  596            world and egovehicle frame translated by 15 meters w.r.t. each other 
  598         Rx90 = Rot3.Rx(np.deg2rad(90))
 
  602         eTo0 = 
Pose3(
Rot3(), np.array([5, 0, 0]))
 
  603         eTo1 = 
Pose3(
Rot3(), np.array([10, 0, 0]))
 
  604         eTo2 = 
Pose3(
Rot3(), np.array([15, 0, 0]))
 
  606         eToi_list = [eTo0, eTo1, eTo2]
 
  610         wTo0 = 
Pose3(Rx90, np.array([-10, 0, 0]))
 
  611         wTo1 = 
Pose3(Rx90, np.array([-5, 0, 0]))
 
  612         wTo2 = 
Pose3(Rx90, np.array([0, 0, 0]))
 
  614         wToi_list = [wTo0, wTo1, wTo2]
 
  619         wSe = Similarity3.Align(we_pairs)
 
  621         for wToi, eToi 
in zip(wToi_list, eToi_list):
 
  625         """Test if Align Pose3Pairs method can account for gauge ambiguity. 
  629            with gauge ambiguity (2x scale) 
  630            world frame has poses rotated about z-axis (90 degree yaw) 
  631            world and egovehicle frame translated by 11 meters w.r.t. each other 
  633         Rz90 = Rot3.Rz(np.deg2rad(90))
 
  637         eTo0 = 
Pose3(
Rot3(), np.array([1, 0, 0]))
 
  638         eTo1 = 
Pose3(
Rot3(), np.array([2, 0, 0]))
 
  639         eTo2 = 
Pose3(
Rot3(), np.array([4, 0, 0]))
 
  641         eToi_list = [eTo0, eTo1, eTo2]
 
  645         wTo0 = 
Pose3(Rz90, np.array([0, 12, 0]))
 
  646         wTo1 = 
Pose3(Rz90, np.array([0, 14, 0]))
 
  647         wTo2 = 
Pose3(Rz90, np.array([0, 18, 0]))
 
  649         wToi_list = [wTo0, wTo1, wTo2]
 
  654         wSe = Similarity3.Align(we_pairs)
 
  656         for wToi, eToi 
in zip(wToi_list, eToi_list):
 
  660         """Test if Align Pose3Pairs method can account for gauge ambiguity. 
  662         Make sure a big and small square can be aligned. 
  663         The u's represent a big square (10x10), and v's represents a small square (4x4). 
  667            with gauge ambiguity (2.5x scale) 
  670         R0 = Rot3.Rz(np.deg2rad(0))
 
  671         R90 = Rot3.Rz(np.deg2rad(90))
 
  672         R180 = Rot3.Rz(np.deg2rad(180))
 
  673         R270 = Rot3.Rz(np.deg2rad(270))
 
  675         aTi0 = 
Pose3(R0, np.array([2, 3, 0]))
 
  676         aTi1 = 
Pose3(R90, np.array([12, 3, 0]))
 
  677         aTi2 = 
Pose3(R180, np.array([12, 13, 0]))
 
  678         aTi3 = 
Pose3(R270, np.array([2, 13, 0]))
 
  680         aTi_list = [aTi0, aTi1, aTi2, aTi3]
 
  682         bTi0 = 
Pose3(R0, np.array([4, 3, 0]))
 
  683         bTi1 = 
Pose3(R90, np.array([8, 3, 0]))
 
  684         bTi2 = 
Pose3(R180, np.array([8, 7, 0]))
 
  685         bTi3 = 
Pose3(R270, np.array([4, 7, 0]))
 
  687         bTi_list = [bTi0, bTi1, bTi2, bTi3]
 
  692         aSb = Similarity3.Align(ab_pairs)
 
  694         for aTi, bTi 
in zip(aTi_list, bTi_list):
 
  699         calibration: Union[Cal3Bundler, Cal3_S2],
 
  700         camera_model: Union[PinholeCameraCal3Bundler, PinholeCameraCal3_S2],
 
  701         cal_params: Iterable[Iterable[Union[int, float]]],
 
  702         camera_set: Optional[Union[CameraSetCal3Bundler,
 
  703                                    CameraSetCal3_S2]] = 
None,
 
  704     ) -> Tuple[List[Point2], Union[CameraSetCal3Bundler, CameraSetCal3_S2,
 
  705                                    List[Cal3Bundler], List[Cal3_S2]]]:
 
  707         Generate vector of measurements for given calibration and camera model. 
  710             calibration: Camera calibration e.g. Cal3_S2 
  711             camera_model: Camera model e.g. PinholeCameraCal3_S2 
  712             cal_params: Iterable of camera parameters for `calibration` e.g. [K1, K2] 
  713             camera_set: Cameraset object (for individual calibrations) 
  716             list of measurements and list/CameraSet object for cameras 
  718         if camera_set 
is not None:
 
  719             cameras = camera_set()
 
  726             camera = camera_model(pose, K)
 
  727             cameras.append(camera)
 
  729             measurements.append(z)
 
  731         return measurements, cameras
 
  734         """Tests triangulation with shared Cal3_S2 calibration""" 
  736         sharedCal = (1500, 1200, 0, 640, 480)
 
  740             camera_model=PinholeCameraCal3_S2,
 
  741             cal_params=(sharedCal, sharedCal))
 
  753         measurements_noisy.append(measurements[0] - np.array([0.1, 0.5]))
 
  754         measurements_noisy.append(measurements[1] - np.array([-0.2, 0.3]))
 
  766         """Ensure triangulation with a robust model works.""" 
  767         sharedCal = 
Cal3_S2(1500, 1200, 0, 640, 480)
 
  770         landmark = 
Point3(5, 0.5, 1.2)
 
  774         pose3 = pose1 * 
Pose3(Rot3.Ypr(0.1, 0.2, 0.1), 
Point3(0.1, -2, -0.1))
 
  780         z1: Point2 = camera1.project(landmark)
 
  781         z2: Point2 = camera2.project(landmark)
 
  782         z3: Point2 = camera3.project(landmark)
 
  793         self.assertTrue(np.allclose(landmark, actual, atol=1e-2))
 
  796         measurements[0] += 
Point2(100, 120)  
 
  805         self.assertTrue(np.linalg.norm(landmark - actual2) >= 0.2)
 
  806         self.assertTrue(np.linalg.norm(landmark - actual2) <= 0.5)
 
  815         self.assertTrue(np.allclose(actual2, actual3, atol=0.1))
 
  828         self.assertTrue(np.allclose(landmark, actual4, atol=0.05))
 
  831         """Check safe triangulation function.""" 
  832         pose1, pose2 = self.
poses 
  834         K1 = 
Cal3_S2(1500, 1200, 0, 640, 480)
 
  839         K2 = 
Cal3_S2(1600, 1300, 0, 650, 440)
 
  847         cameras.append(camera1)
 
  848         cameras.append(camera2)
 
  851         measurements.append(z1)
 
  852         measurements.append(z2)
 
  854         landmarkDistanceThreshold = 10  
 
  858             cameras, measurements, params)
 
  860         self.assertTrue(actual.valid())
 
  862         landmarkDistanceThreshold = 4  
 
  864                                           landmarkDistanceThreshold)
 
  866         self.assertTrue(actual.farPoint())
 
  870         pose3 = pose1 * 
Pose3(Rot3.Ypr(0.1, 0.2, 0.1), 
Point3(0.1, -2, -.1))
 
  871         K3 = 
Cal3_S2(700, 500, 0, 640, 480)
 
  875         cameras.append(camera3)
 
  876         measurements.append(z3 + 
Point2(10, -10))
 
  878         landmarkDistanceThreshold = 10  
 
  879         outlierThreshold = 100  
 
  881                                           landmarkDistanceThreshold,
 
  884         self.assertTrue(actual.valid())
 
  889                                           landmarkDistanceThreshold,
 
  892         self.assertTrue(actual.outlier())
 
  895 if __name__ == 
"__main__":