camera_chain_sensor.py
Go to the documentation of this file.
00001 # Software License Agreement (BSD License)
00002 #
00003 # Copyright (c) 2008, Willow Garage, Inc.
00004 # All rights reserved.
00005 #
00006 # Redistribution and use in source and binary forms, with or without
00007 # modification, are permitted provided that the following conditions
00008 # are met:
00009 #
00010 #  * Redistributions of source code must retain the above copyright
00011 #    notice, this list of conditions and the following disclaimer.
00012 #  * Redistributions in binary form must reproduce the above
00013 #    copyright notice, this list of conditions and the following
00014 #    disclaimer in the documentation and/or other materials provided
00015 #    with the distribution.
00016 #  * Neither the name of Willow Garage, Inc. nor the names of its
00017 #    contributors may be used to endorse or promote products derived
00018 #    from this software without specific prior written permission.
00019 #
00020 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
00021 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
00022 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
00023 # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
00024 # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
00025 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
00026 # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00027 # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
00028 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00029 # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
00030 # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00031 # POSSIBILITY OF SUCH DAMAGE.
00032 
00033 # author: Vijay Pradeep
00034 
00035 # Define a camera sensor attached to a chain
00036 #
00037 #       before_chain_Ts -- camera_chain -- after_chain_Ts -- camera
00038 #      /
00039 #   root
00040 #      \
00041 #       checkerboard
00042 
00043 import numpy
00044 from numpy import matrix, reshape, array, zeros, diag, real
00045 
00046 import roslib; roslib.load_manifest('calibration_estimation')
00047 import rospy
00048 from calibration_estimation.full_chain import FullChainRobotParams
00049 from sensor_msgs.msg import JointState
00050 
00051 class CameraChainBundler:
00052     """
00053     Tool used to generate a list of CameraChain sensors from a single calibration sample message
00054     """
00055     def __init__(self, valid_configs):
00056         self._valid_configs = valid_configs
00057 
00058     # Construct a CameraChainSensor for every camera chain sensor that exists in the given robot measurement
00059     def build_blocks(self, M_robot):
00060         """
00061         Input:
00062         - M_robot: A calibration sample of type calibration_msgs/RobotMeasurement
00063         Returns:
00064         - sensors: A list of CameraChainSensor objects that corresponds to all camera chain sensors found
00065                    in the calibration message that was passed in.
00066         """
00067         sensors = []
00068         for cur_config in self._valid_configs:
00069             if cur_config["sensor_id"] in [ x.camera_id for x in M_robot.M_cam ]:
00070                 if "chain_id" in cur_config.keys() and cur_config["chain_id"] != None:
00071                     if cur_config["chain_id"] in [ x.chain_id  for x in M_robot.M_chain ] :
00072                         M_cam   = M_robot.M_cam  [ [ x.camera_id for x in M_robot.M_cam   ].index(cur_config["sensor_id"])]
00073                         M_chain = M_robot.M_chain[ [ x.chain_id  for x in M_robot.M_chain ].index(cur_config["chain_id"]) ]
00074                     else:
00075                         continue
00076                 else:
00077                     M_cam   = M_robot.M_cam  [ [ x.camera_id for x in M_robot.M_cam   ].index(cur_config["sensor_id"])]
00078                     M_chain = None
00079                     cur_config["chain_id"] = None
00080                 cur_sensor = CameraChainSensor(cur_config, M_cam, M_chain)
00081                 sensors.append(cur_sensor)
00082             else:
00083                 rospy.logdebug("  Didn't find block")
00084         return sensors
00085 
00086 class CameraChainSensor:
00087     def __init__(self, config_dict, M_cam, M_chain):
00088         """
00089         Generates a single sensor block for a single configuration
00090         Inputs:
00091         - config_dict: The configuration for this specific sensor. This looks exactly like
00092                        a single element from the valid_configs list passed into CameraChainBundler.__init__
00093         - M_cam: The camera measurement of type calibration_msgs/CameraMeasurement
00094         - M_chain: The chain measurement of type calibration_msgs/ChainMeasurement
00095         """
00096         self.sensor_type = "camera"
00097         self.sensor_id = config_dict["sensor_id"]
00098 
00099         self._config_dict = config_dict
00100         self._M_cam = M_cam
00101         self._M_chain = M_chain
00102 
00103         self._full_chain = FullChainRobotParams(config_dict["chain_id"], config_dict["frame_id"])
00104 
00105         self.terms_per_sample = 2
00106         if "baseline_rgbd" in config_dict.keys():
00107             self.terms_per_sample = 3
00108 
00109     def update_config(self, robot_params):
00110         """
00111         On each optimization cycle the set of system parameters that we're optimizing over will change. Thus,
00112         after each change in parameters we must call update_config to ensure that our calculated residual reflects
00113         the newest set of system parameters.
00114         """
00115         self._camera = robot_params.rectified_cams[ self.sensor_id ]
00116         if self._full_chain is not None:
00117             self._full_chain.update_config(robot_params)
00118 
00119     def compute_residual(self, target_pts):
00120         """
00121         Computes the measurement residual for the current set of system parameters and target points
00122         Input:
00123         - target_pts: 4XN matrix, storing features point locations in world cartesian homogenous coordinates.
00124         Output:
00125         - r: terms_per_sample*N long vector, storing pixel residuals for the target points in the form of
00126              [u1, v1, u2, v2, ..., uN, vN] or [u1, v1, u'1, u2....]
00127         """
00128         z_mat = self.get_measurement()
00129         h_mat = self.compute_expected(target_pts)
00130         r = array(reshape(h_mat - z_mat, [-1,1]))[:,0]
00131         return r
00132 
00133     def compute_residual_scaled(self, target_pts):
00134         """
00135         Computes the residual, and then scales it by sqrt(Gamma), where Gamma
00136         is the information matrix for this measurement (Cov^-1).
00137         """
00138         r = self.compute_residual(target_pts)
00139         gamma_sqrt = self.compute_marginal_gamma_sqrt(target_pts)
00140         r_scaled = gamma_sqrt * matrix(r).T
00141         return array(r_scaled.T)[0]
00142 
00143     def compute_marginal_gamma_sqrt(self, target_pts):
00144         """
00145         Calculates the square root of the information matrix for the measurement of the
00146         current set of system parameters at the passed in set of target points.
00147         """
00148         import scipy.linalg
00149         cov = self.compute_cov(target_pts)
00150         gamma = matrix(zeros(cov.shape))
00151         num_pts = self.get_residual_length()/self.terms_per_sample
00152 
00153         for k in range(num_pts):
00154             first = self.terms_per_sample*k
00155             last = self.terms_per_sample*k+self.terms_per_sample
00156             sub_cov = matrix(cov[first:last, first:last])
00157             sub_gamma_sqrt_full = matrix(scipy.linalg.sqrtm(sub_cov.I))
00158             sub_gamma_sqrt = real(sub_gamma_sqrt_full)
00159             assert(scipy.linalg.norm(sub_gamma_sqrt_full - sub_gamma_sqrt) < 1e-6)
00160             gamma[first:last, first:last] = sub_gamma_sqrt
00161         return gamma
00162 
00163     def get_residual_length(self):
00164         return self.terms_per_sample*len(self._M_cam.image_points)
00165 
00166     def get_measurement(self):
00167         """
00168         Get the target's pixel coordinates as measured by the actual sensor
00169         """
00170         if self.terms_per_sample == 2:
00171             return numpy.matrix([[pt.x, pt.y] for pt in self._M_cam.image_points])
00172         elif self.terms_per_sample == 3:
00173             return numpy.matrix([[pt.x, pt.y, self._camera.get_disparity(self._M_cam.cam_info.P, pt.z)] for pt in self._M_cam.image_points])
00174 
00175     def compute_expected(self, target_pts):
00176         """
00177         Compute the expected pixel coordinates for a set of target points.
00178         target_pts: 4xN matrix, storing feature points of the target, in homogeneous coords
00179         Returns: target points projected into pixel coordinates, in a Nx2 matrix
00180         """
00181         if self._M_chain is not None:
00182             return self._compute_expected(self._M_chain.chain_state, target_pts)
00183         else:
00184             return self._compute_expected(None, target_pts)
00185     def _compute_expected(self, chain_state, target_pts):
00186         """
00187         Compute what measurement we would expect to see, based on the current system parameters
00188         and the specified target point locations.
00189 
00190         Inputs:
00191         - chain_state: The joint angles of this sensor's chain of type sensor_msgs/JointState.
00192         - target_pts: 4XN matrix, storing features point locations in world cartesian homogenous coordinates.
00193 
00194         Returns: target points projected into pixel coordinates, in a Nx2 matrix
00195         """
00196         # Camera pose in root frame
00197         camera_pose_root = self._full_chain.calc_block.fk(chain_state)
00198         cam_frame_pts = camera_pose_root.I * target_pts
00199         # Do the camera projection
00200         pixel_pts = self._camera.project(self._M_cam.cam_info.P, cam_frame_pts)
00201         return pixel_pts.T
00202 
00203     def compute_expected_J(self, target_pts):
00204         """
00205         The output Jacobian J shows how moving target_pts in cartesian space affects
00206         the expected measurement in (u,v) camera coordinates.
00207         For n points in target_pts, J is a 2nx3n matrix
00208         Note: This doesn't seem to be used anywhere, except maybe in some drawing code
00209         """
00210         epsilon = 1e-8
00211         N = len(self._M_cam.image_points)
00212         Jt = zeros([N*3, N*self.terms_per_sample])
00213         for k in range(N):
00214             # Compute jacobian for point k
00215             sub_Jt = zeros([3,self.terms_per_sample])
00216             x = target_pts[:,k].copy()
00217             f0 = self.compute_expected(x)
00218             for i in [0,1,2]:
00219                 x[i,0] += epsilon
00220                 fTest = self.compute_expected(x)
00221                 sub_Jt[i,:] = array((fTest - f0) / epsilon)
00222                 x[i,0] -= epsilon
00223             Jt[k*3:(k+1)*3, k*self.terms_per_sample:(k+1)*self.terms_per_sample] = sub_Jt
00224         return Jt.T
00225 
00226     def compute_cov(self, target_pts):
00227         '''
00228         Computes the measurement covariance in pixel coordinates for the given
00229         set of target points (target_pts)
00230         Input:
00231          - target_pts: 4xN matrix, storing N feature points of the target, in homogeneous coords
00232         '''
00233         cam_cov = matrix(zeros([self.get_residual_length(), self.get_residual_length()]))
00234 
00235         # Convert StdDev into variance
00236         var_u = self._camera._cov_dict['u'] * self._camera._cov_dict['u']
00237         var_v = self._camera._cov_dict['v'] * self._camera._cov_dict['v']
00238         for k in range(cam_cov.shape[0]/self.terms_per_sample):
00239             cam_cov[self.terms_per_sample*k  , self.terms_per_sample*k]   = var_u
00240             cam_cov[self.terms_per_sample*k+1, self.terms_per_sample*k+1] = var_v
00241             if self.terms_per_sample == 3:
00242                 cam_cov[self.terms_per_sample*k+2, self.terms_per_sample*k+2] = self._camera._cov_dict['x'] * self._camera._cov_dict['x']
00243 
00244         # Both chain and camera covariances are now in measurement space, so we can simply add them together
00245         if self._M_chain is not None:
00246             return self.get_chain_cov(target_pts) + cam_cov
00247         else:
00248             return cam_cov
00249 
00250     def get_chain_cov(self, target_pts, epsilon=1e-8):
00251         num_joints = len(self._M_chain.chain_state.position)
00252         Jt = zeros([num_joints, self.get_residual_length()])
00253 
00254         x = JointState()
00255         x.position = self._M_chain.chain_state.position[:]
00256 
00257         # Compute the Jacobian from the chain's joint angles to pixel residuals
00258         f0 = reshape(array(self._compute_expected(x, target_pts)), [-1])
00259         for i in range(num_joints):
00260             x.position = [cur_pos for cur_pos in self._M_chain.chain_state.position]
00261             x.position[i] += epsilon
00262             fTest = reshape(array(self._compute_expected(x, target_pts)), [-1])
00263             Jt[i] = (fTest - f0)/epsilon
00264         cov_angles = [x*x for x in self._full_chain.calc_block._chain._cov_dict['joint_angles']]
00265 
00266         # Transform the chain's covariance from joint angle space into pixel space using the just calculated jacobian
00267         return matrix(Jt).T * matrix(diag(cov_angles)) * matrix(Jt)
00268         
00269     def build_sparsity_dict(self):
00270         """
00271         Build a dictionary that defines which parameters will in fact affect this measurement.
00272 
00273         Returned dictionary should be of the following form:
00274           transforms:
00275             my_transformA: [1, 1, 1, 1, 1, 1]
00276             my_transformB: [1, 1, 1, 1, 1, 1]
00277           chains:
00278             my_chain:
00279               gearing: [1, 1, ---, 1]
00280           rectified_cams:
00281             my_cam:
00282               baseline_shift: 1
00283               f_shift: 1
00284               cx_shift: 1
00285               cy_shift: 1
00286         """
00287         sparsity = self._full_chain.build_sparsity_dict()
00288         sparsity['rectified_cams'] = {}
00289         sparsity['rectified_cams'][self.sensor_id] = dict( [(x,1) for x in self._camera.get_param_names()] )
00290         return sparsity
00291 
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Properties Friends


calibration_estimation
Author(s): Vijay Pradeep, Michael Ferguson
autogenerated on Thu Aug 15 2013 10:15:40