cluster_bounding_box_finder_3d.py
Go to the documentation of this file.
00001 #!/usr/bin/python
00002 # Software License Agreement (BSD License)
00003 #
00004 # Copyright (c) 2009, Willow Garage, Inc.
00005 # All rights reserved.
00006 #
00007 # Redistribution and use in source and binary forms, with or without
00008 # modification, are permitted provided that the following conditions
00009 # are met:
00010 #
00011 #  * Redistributions of source code must retain the above copyright
00012 #    notice, this list of conditions and the following disclaimer.
00013 #  * Redistributions in binary form must reproduce the above
00014 #    copyright notice, this list of conditions and the following
00015 #    disclaimer in the documentation and/or other materials provided
00016 #    with the distribution.
00017 #  * Neither the name of the Willow Garage nor the names of its
00018 #    contributors may be used to endorse or promote products derived
00019 #    from this software without specific prior written permission.
00020 #
00021 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
00022 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
00023 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
00024 # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
00025 # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
00026 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
00027 # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00028 # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
00029 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00030 # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
00031 # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00032 # POSSIBILITY OF SUCH DAMAGE.
00033 #
00034 # author: Kaijen Hsiao
00035 
00036 ## @package cluster_bounding_box_finder
00037 #Use PCA to find the principal directions and bounding box for a cluster
00038 
00039 from __future__ import division
00040 import roslib
00041 roslib.load_manifest('object_manipulator')
00042 import rospy
00043 import scipy
00044 import pdb
00045 import random
00046 import math
00047 import tf
00048 import scipy.linalg
00049 from geometry_msgs.msg import PoseStamped, Point, Pose, Vector3
00050 from object_manipulation_msgs.srv import FindClusterBoundingBox, FindClusterBoundingBoxResponse
00051 from convert_functions import *
00052 
00053 ## class for using PCA to find the principal directions and bounding
00054 # box for a point cluster
00055 class ClusterBoundingBoxFinder3D:
00056 
00057     def __init__(self, tf_listener = None, tf_broadcaster = None): 
00058 
00059         #init a TF transform listener
00060         if tf_listener == None:
00061             self.tf_listener = tf.TransformListener()
00062         else:
00063             self.tf_listener = tf_listener
00064 
00065         #init a TF transform broadcaster for the object frame
00066         if tf_broadcaster == None:
00067             self.tf_broadcaster = tf.TransformBroadcaster()
00068         else:
00069             self.tf_broadcaster = tf_broadcaster
00070 
00071 
00072     ##run eigenvector PCA for a 3xn scipy matrix, return the directions (columns in 3x3 matrix)
00073     def pca(self, points):
00074         cov_mat = points[0:3,:] * points[0:3,:].T
00075         (values, vectors) = scipy.linalg.eig(cov_mat)
00076         vectors[0:3,2] = scipy.cross(vectors[0:3,0], vectors[0:3,1])
00077         return vectors
00078 
00079 
00080     ##shift the points to be around the mean for x and y (matrix is 4xn)
00081     def mean_shift_xyz(self, points):
00082         mean = [0]*3
00083         shifted_points = points.copy()
00084         for i in range(3):
00085             mean[i] = scipy.mean(points[i,:])
00086             shifted_points[i,:] -= mean[i]    
00087         return (shifted_points, mean)
00088 
00089 
00090     ##remove outliers from the cluster (4xn scipy matrix)
00091     def remove_outliers(self, points):
00092 
00093         points_filtered = points[:]
00094         empty_space_width = .02
00095         check_percent = .02
00096         edge_width = .005
00097 
00098         #remove outliers in each dimension (x,y,z)
00099         for dim in range(3):
00100 
00101             #sort the points by their values in that dimension
00102             num_points = scipy.shape(points_filtered)[1]
00103             points_sorted = points_filtered[:, points_filtered[dim, :].argsort().tolist()[0]]
00104 
00105             #chop off the top points if they are more than empty_space_width away from the bounding box edge
00106             ind = int(math.floor(num_points*(1-check_percent)))
00107             if points_sorted[dim, -1] - points_sorted[dim, ind] > empty_space_width:
00108                 
00109                 #find the first point that isn't within edge_width of the point at ind, and lop off all points after
00110                 searcharr = scipy.array(points_sorted[dim, :]).flatten()
00111                 searchval = points_sorted[dim, ind] + edge_width
00112                 thres_ind = scipy.searchsorted(searcharr, searchval)
00113                 if thres_ind != 0 and thres_ind != len(searcharr):
00114                     points_filtered = points_sorted[:, 0:thres_ind] 
00115                     rospy.loginfo("chopped points off of dim %d, highest val = %5.3f, searchval = %5.3f"%(dim, points_sorted[dim, -1], searchval))
00116             else:
00117                 points_filtered = points_sorted
00118 
00119             #do both sides 
00120             ind = int(math.floor(num_points*check_percent))
00121             if points_filtered[dim, ind] - points_filtered[dim, 0] > empty_space_width:
00122 
00123                 #find the first point that isn't within edge_width of the point at ind, and lop off all points before
00124                 searcharr = scipy.array(points_sorted[dim, :]).flatten()
00125                 searchval = points_sorted[dim, ind] - edge_width
00126                 thres_ind = scipy.searchsorted(searcharr, searchval)
00127                 if thres_ind != 0 and thres_ind != len(searcharr):
00128                     points_filtered = points_filtered[:, thres_ind:-1]
00129                     rospy.loginfo("chopped points off of dim -%d, lowest val = %5.3f, searchval = %5.3f"%(dim, points_sorted[dim, 0], searchval))
00130         return points_filtered
00131 
00132 
00133     ##find the object frame and bounding box for a PointCloud or PointCloud2
00134     #use the local rosparam z_up_frame to specify the desired frame to use where the z-axis is special (box z will be frame z)
00135     #if not specified, the point cloud's frame is assumed to be the desired z_up_frame
00136     def find_object_frame_and_bounding_box(self, point_cloud):
00137         
00138         #leaving point cloud in the cluster frame
00139         cluster_frame = point_cloud.header.frame_id
00140         self.base_frame = cluster_frame
00141         (points, cluster_to_base_frame) = transform_point_cloud(self.tf_listener, point_cloud, self.base_frame)
00142 
00143         #run PCA on all 3 dimensions 
00144         (shifted_points, xyz_mean) = self.mean_shift_xyz(points)
00145         directions = self.pca(shifted_points[0:3, :])
00146 
00147         #convert the points to object frame:
00148         #rotate all the points to be in the frame of the eigenvectors (should already be centered around xyz_mean)
00149         rotmat = scipy.matrix(scipy.identity(4))
00150         rotmat[0:3,0:3] = directions
00151         object_points = rotmat**-1 * shifted_points
00152 
00153         #remove outliers from the cluster
00154         #object_points = self.remove_outliers(object_points)
00155 
00156         #find the object bounding box in the new object frame as [[xmin, ymin, zmin], [xmax, ymax, zmax]] (coordinates of opposite corners)
00157         object_bounding_box = [[0]*3 for i in range(2)]
00158         object_bounding_box_dims = [0]*3
00159         for dim in range(3):
00160             object_bounding_box[0][dim] = object_points[dim,:].min()
00161             object_bounding_box[1][dim] = object_points[dim,:].max()
00162             object_bounding_box_dims[dim] = object_bounding_box[1][dim] - object_bounding_box[0][dim]
00163 
00164         #now shift the object frame and bounding box so that the center is the center of the object
00165         offset_mat = scipy.mat(scipy.identity(4))
00166         for i in range(3):
00167             offset = object_bounding_box[1][i] - object_bounding_box_dims[i]/2.  #center
00168             object_bounding_box[0][i] -= offset   #mins
00169             object_bounding_box[1][i] -= offset   #maxes
00170             object_points[i, :] -= offset
00171             offset_mat[i,3] = offset
00172         rotmat = rotmat * offset_mat
00173 
00174         #record the transforms from object frame to base frame and to the original cluster frame,
00175         #broadcast the object frame to tf, and draw the object frame in rviz
00176         unshift_mean = scipy.identity(4)
00177         for i in range(3):
00178             unshift_mean[i,3] = xyz_mean[i]
00179         object_to_base_frame = unshift_mean*rotmat
00180         object_to_cluster_frame = cluster_to_base_frame**-1 * object_to_base_frame
00181 
00182         #broadcast the object frame to tf
00183         (object_frame_pos, object_frame_quat) = mat_to_pos_and_quat(object_to_cluster_frame)
00184         self.tf_broadcaster.sendTransform(object_frame_pos, object_frame_quat, rospy.Time.now(), "object_frame", cluster_frame) 
00185 
00186         return (object_points, object_bounding_box_dims, object_bounding_box, object_to_base_frame, object_to_cluster_frame)
00187 
00188 
00189     


object_manipulator
Author(s): Matei Ciocarlie and Kaijen Hsiao
autogenerated on Mon Oct 6 2014 02:59:50