intensity_feature.py
Go to the documentation of this file.
00001 #
00002 # Copyright (c) 2010, Georgia Tech Research Corporation
00003 # All rights reserved.
00004 # 
00005 # Redistribution and use in source and binary forms, with or without
00006 # modification, are permitted provided that the following conditions are met:
00007 #     * Redistributions of source code must retain the above copyright
00008 #       notice, this list of conditions and the following disclaimer.
00009 #     * Redistributions in binary form must reproduce the above copyright
00010 #       notice, this list of conditions and the following disclaimer in the
00011 #       documentation and/or other materials provided with the distribution.
00012 #     * Neither the name of the Georgia Tech Research Corporation nor the
00013 #       names of its contributors may be used to endorse or promote products
00014 #       derived from this software without specific prior written permission.
00015 # 
00016 # THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
00017 # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
00018 # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
00019 # DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
00020 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
00021 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
00022 # OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00023 # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
00024 # OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
00025 # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00026 #
00027 
00028 import roslib; roslib.load_manifest('trf_learn')
00029 import rospy
00030 import cv
00031 import numpy as np
00032 import feature_extractor_fpfh.srv as fsrv
00033 import hrl_opencv.image3d as i3d
00034 import hrl_lib.rutils as ru
00035 import hrl_lib.prob as pr
00036 import hrl_lib.tf_utils as tfu
00037 
00038 
00039 ##
00040 # Generalized from Probabilistic robotics for N != weights.shape[0]
00041 def sample_points(weights, N):
00042     assert(weights.shape[0] >= N)
00043 
00044     M = weights.shape[0]
00045     weights = weights / np.sum(weights)
00046     r = np.random.rand() * (1.0/M)
00047     c = weights[0,0]
00048     i = 0
00049     Xt = []
00050 
00051     indices = np.sort(np.random.permutation(np.arange(1, M+1))[0:N]).tolist()
00052     for m in indices:
00053         U = r + (m - 1) * (1.0/M)
00054         while U > c:
00055             i = i + 1
00056             c = c + weights[i,0]
00057         Xt.append(i)
00058     return Xt
00059 
00060 
00061 def test_sample_points():
00062     w = np.matrix([.1,.4,.5]).T
00063     count = [0., 0., 0.]
00064 
00065     for i in range(6000.):
00066         idx = sample_points(w, 2)
00067         for x in idx:
00068             count[x] += 1
00069     print np.matrix(count) / np.sum(count)
00070 
00071 
00072 def intensity_pyramid_feature(point2d_image, np_image_arr, win_size, multipliers, flatten=True):
00073     invalid_location = False
00074     local_intensity = []
00075     for multiplier in multipliers:
00076         if multiplier == 1:
00077             features = i3d.local_window(point2d_image, np_image_arr, win_size, flatten=flatten)
00078         else:
00079             features = i3d.local_window(point2d_image, np_image_arr, win_size*multiplier, 
00080                                         resize_to=win_size, flatten=flatten)
00081         if features == None:
00082             invalid_location = True
00083             break
00084         else:
00085             local_intensity.append(features)
00086     if invalid_location:
00087         return None
00088     else:
00089         if flatten:
00090             return np.row_stack(local_intensity)
00091         else:
00092             return local_intensity
00093 
00094 class Subsampler:
00095     def __init__(self):
00096         self.proxy = rospy.ServiceProxy('subsample', fsrv.SubsampleCalc)
00097 
00098     def subsample(self, points3d, frame='base_link'):
00099         req = fsrv.SubsampleCalcRequest()
00100         req.input = ru.np_to_pointcloud(points3d, frame)
00101         res = self.proxy(req)
00102         return ru.pointcloud_to_np(res.output)
00103 
00104 
00105 class IntensityCloudFeatureExtractor:
00106 
00107     def __init__(self, pointcloud_bl, cvimage_mat, expected_loc_bl, distance_feature_points, 
00108                 image_T_bl, camera_calibration, params):
00109 
00110         self.pointcloud_bl = pointcloud_bl
00111         self.cvimage_mat = cvimage_mat
00112         self.expected_loc_bl = expected_loc_bl
00113         self.distance_feature_points = distance_feature_points
00114 
00115         self.image_T_bl = image_T_bl
00116         self.camera_calibration = camera_calibration
00117         self.params = params
00118         self.subsampler_service = Subsampler()
00119         self.sizes = None #Important but access should be limited to decouple code
00120 
00121     def get_sizes(self):
00122         return self.sizes
00123 
00124     def _subsample(self):
00125         rospy.loginfo('Subsampling using PCL')
00126         rospy.loginfo('before %s' % str(self.pointcloud_bl.shape))
00127         self.pc_sub_samp_bl = self.subsampler_service.subsample(self.pointcloud_bl)
00128         rospy.loginfo('after %s' % str(self.pc_sub_samp_bl.shape))
00129 
00130     def _sample_points(self):
00131         rospy.loginfo('Sampling points')
00132         #evaluate all points
00133         gaussian = pr.Gaussian(self.expected_loc_bl, \
00134                                np.matrix([[self.params.uncertainty_x**2, 0,      0], \
00135                                           [0, self.params.uncertainty_y**2,      0], \
00136                                           [0,      0, self.params.uncertainty_z**2]]))
00137 
00138         pdf = gaussian.pdf_mat()
00139         probs = np.matrix(pdf(self.pc_sub_samp_bl))
00140 
00141         #sample unique points
00142         n_samples = min(self.params.n_samples, self.pc_sub_samp_bl.shape[1])
00143         pt_indices = list(set(sample_points(probs.T, n_samples)))
00144 
00145         #only keep those that are in bound of points
00146         sampled_pts3d_bl = self.pc_sub_samp_bl[:, pt_indices]
00147         sampled_pts3d_image = tfu.transform_points(self.image_T_bl, sampled_pts3d_bl)
00148         sampled_pts2d = self.camera_calibration.project(sampled_pts3d_image)
00149         sampled_pix2d = np.matrix(np.round(sampled_pts2d))
00150 
00151         #throw away points that are outside of bounds
00152         x = sampled_pix2d[0,:]
00153         y = sampled_pix2d[1,:]
00154         good_pts = np.where((x >= 0) + (x < self.camera_calibration.w) \
00155                           + (y >= 0) + (y < self.camera_calibration.h))[1].A1
00156 
00157         sampled_pts3d_bl = sampled_pts3d_bl[:, good_pts]
00158         sampled_pix2d = sampled_pix2d[:, good_pts]
00159 
00160         rospy.loginfo('got %s good points' % str(sampled_pix2d.shape[1]))
00161         return sampled_pts3d_bl, sampled_pix2d
00162 
00163     def feature_vec_at(self, point3d_bl, point2d_image):
00164         fea_calculated = []
00165 
00166         #Get synthetic distance points
00167         distance_feas = None
00168         if self.distance_feature_points != None:
00169             distance_feas = np.power(np.sum(np.power(self.distance_feature_points - point3d_bl, 2), 0), .5).T
00170             fea_calculated.append(distance_feas)
00171 
00172         #Get intensity features 
00173         intensity = intensity_pyramid_feature(point2d_image, np.asarray(self.cvimage_mat), 
00174                 self.params.win_size, self.params.win_multipliers, True)
00175         #pdb.set_trace()
00176         if intensity == None:
00177             return None
00178         else:
00179             fea_calculated.append(intensity)
00180 
00181         if self.sizes == None:
00182             self.sizes = {}
00183             if distance_feas != None:
00184                 self.sizes['distance'] = distance_feas.shape[0]
00185             self.sizes['intensity'] = intensity.shape[0]
00186 
00187         return fea_calculated
00188 
00189     def extract_features(self):
00190         self._subsample()
00191         sampled_pts3d_bl, sampled_pix2d = self._sample_points()
00192         features_l = []
00193         pts_with_features = []
00194 
00195         rospy.loginfo('Extracting features')
00196         for i in range(sampled_pts3d_bl.shape[1]):
00197             features = self.feature_vec_at(sampled_pts3d_bl[:,i], sampled_pix2d[:,i])
00198             if features != None:
00199                 features_l.append(features)
00200                 pts_with_features.append(i)
00201             if i % 500 == 0:
00202                 rospy.loginfo(i)
00203 
00204         features_by_type = zip(*features_l)
00205         xs = np.row_stack([np.column_stack(f) for f in features_by_type])
00206         rospy.loginfo('Finished feature extraction.')
00207         return xs, sampled_pix2d[:, pts_with_features], sampled_pts3d_bl[:, pts_with_features]
00208 
00209 


trf_learn
Author(s): Hai Nguyen (hai@gatech.edu) Advisor: Prof. Charlie Kemp, Lab: Healthcare Robotics Lab at Georgia Tech
autogenerated on Wed Nov 27 2013 11:47:18