bag_processor.py
Go to the documentation of this file.
00001 import roslib; roslib.load_manifest('hai_sandbox')
00002 import rospy
00003 import sys
00004 import hrl_lib.util as ut
00005 import hrl_lib.rutils as ru
00006 
00007 import tf
00008 import hrl_lib.transforms as htf
00009 import hrl_lib.tf_utils as tfu
00010 import tf.transformations as tr
00011 import pr2_msgs.msg as pm
00012 
00013 import scipy.spatial as sp
00014 from multiprocessing import Process
00015 import time
00016 import os
00017 import numpy as np
00018 import math
00019 import cv
00020 import hai_sandbox.features as fea
00021 import hrl_pr2_lib.pr2_kinematics as pr2k
00022 import pdb
00023 import sensor_msgs.msg as sm 
00024 import hrl_pr2_lib.devices as hpr2
00025 
00026 #import perception3d.gaussian_curvature as gc
00027 
00028 def segment_msgs(time_segments, msgs):
00029     segs = []
00030     for segment in time_segments:
00031         start = segment[0]
00032         endt  = segment[1]
00033         sx = 0
00034         ex = len(msgs)
00035 
00036         if start != 'start':
00037             for i, m in enumerate(msgs):
00038                 if m.header.stamp.to_time() < start:
00039                     sx = i
00040                 else:
00041                     break
00042 
00043         if endt != 'end':
00044             for i, m in enumerate(msgs[sx:]):
00045                 if m.header.stamp.to_time() > endt:
00046                     ex = i + sx
00047                     break
00048 
00049         #pdb.set_trace()
00050         seg = msgs[sx:ex]
00051         segs.append(msgs[sx: ex])
00052     return segs
00053 
00054 ##
00055 # Find times where contact has been made
00056 #
00057 # @return array of locations where contact has been made, array of times for each location where contact has been made (can be duplicated)
00058 def find_contact_times(left_mat, right_mat, times, thres):
00059     left_mat = left_mat - left_mat[:, 0] 
00060     right_mat = right_mat - right_mat[:,0]
00061     
00062     #When/where did contact happen? 
00063     #TODO: we are assuming just one finger of one arm here!
00064     #pdb.set_trace()
00065     loc_r, time_c = np.where(np.abs(left_mat) > thres)
00066     times_contact = times[time_c.A1]
00067     unique_times = np.array(np.sort(list(set(times_contact.tolist()))))
00068     #return (loc_r, times_contact)
00069 
00070     return unique_times
00071 
00072 def playback_bag(bag_name):
00073     cmd = 'rosbag play %s --clock' % bag_name
00074     print cmd
00075     os.system(cmd)
00076 
00077 class SimpleJointStateMsg:
00078     def __init__(self, header, transforms_dict):
00079         self.header = header
00080         self.transforms_dict = transforms_dict
00081 
00082 # Find contact points & transform gripper tip to base_frame
00083 class ExtractTFData:
00084     def __init__(self, listener=None):#, pointcloud_msg):
00085         rospy.Subscriber('/pressure/l_gripper_motor', pm.PressureState, self.lpress_cb)
00086         rospy.Subscriber('/joint_states', sm.JointState, self.joint_state_cb)
00087         self.ftip_frames = ['r_gripper_tool_frame',
00088                             'l_gripper_tool_frame']
00089 
00090         if listener != None:
00091             self.tflistener = listener
00092         else:
00093             self.tflistener = tf.TransformListener()
00094 
00095         self.lmat0 = None
00096         self.rmat0 = None
00097         self.contact_locs = []
00098         self.base_frame = '/base_footprint'
00099 
00100         self.contact = False
00101         self.contact_stopped = False
00102         self.pointcloud_transform = None
00103 
00104         self.jstate_msgs = []
00105         self.last_jstate_time = time.time() + 999999999.
00106 
00107     def joint_state_cb(self, jmsg):
00108         tdict = {}
00109 
00110         # good for display purposes (independent of torso link etc)
00111         tdict['bf_T_rtip'] = tfu.transform('/base_footprint', self.ftip_frames[0], self.tflistener)
00112         tdict['bf_T_ltip'] = tfu.transform('/base_footprint', self.ftip_frames[1], self.tflistener)
00113 
00114         # want FK from torso
00115         tdict['torso_T_rtip'] = tfu.transform('/torso_lift_link', self.ftip_frames[0], self.tflistener)
00116         tdict['torso_T_ltip'] = tfu.transform('/torso_lift_link', self.ftip_frames[1], self.tflistener)
00117 
00118         #self.jstate_msgs.append(SimpleJointStateMsg(jmsg.header, tdict))
00119         self.last_jstate_time = time.time()
00120 
00121     def lpress_cb(self, pmsg):
00122         lmat = np.matrix((pmsg.l_finger_tip)).T
00123         rmat = np.matrix((pmsg.r_finger_tip)).T
00124         if self.lmat0 == None:
00125             self.lmat0 = lmat
00126             self.rmat0 = rmat
00127             return
00128 
00129         #zero
00130         lmat = lmat - self.lmat0
00131         rmat = rmat - self.rmat0
00132    
00133         ##
00134         # extract data during contact events
00135         #touch detected
00136         if np.any(np.abs(lmat) > 250) or np.any(np.abs(rmat) > 250): #TODO: replace this with something more sound
00137             #Contact has been made!! look up gripper tip location
00138             def frame_loc(from_frame):
00139                 p_base = tfu.transform(self.base_frame, from_frame, self.tflistener) \
00140                                * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
00141                 return tfu.matrix_as_tf(p_base)
00142             tip_locs = [frame_loc(n)[0] for n in self.ftip_frames]
00143             t = pmsg.header.stamp.to_time() 
00144             rospy.loginfo("contact detected at %.3f" % t)
00145             self.contact_locs.append([t, tip_locs])
00146             self.contact = True
00147         else:
00148             if self.contact == True:
00149                 rospy.loginfo('contact stopped')
00150                 self.contact_stopped = True
00151             self.contact = False
00152 
00153             #Only get this transform after we've stopped making contact.
00154             #self.pointcloud_transform = tfu.transform(self.base_frame, self.pointcloud_msg.header.frame_id, self.tflistener)
00155 
00156 
00157 class JointMsgConverter:
00158     def __init__(self):
00159         self.name_dict = None
00160         self.joint_groups = ut.load_pickle('link_names.pkl')
00161         #self.joint_groups['rarm']      = rospy.get_param('/r_arm_controller/joints')
00162         #self.joint_groups['larm']      = rospy.get_param('/l_arm_controller/joints')
00163         #self.joint_groups['head_traj'] = rospy.get_param('/head_traj_controller/joints')
00164         #self.joint_groups['torso']     = rospy.get_param('/torso_controller/joints')
00165 
00166     def msgs_to_dict(self, msgs):
00167         converted = []
00168         for i in range(len(msgs)):
00169             msg = msgs[i]
00170             if self.name_dict == None:
00171                 self.name_dict = {}
00172                 for i, n in enumerate(msg.name):
00173                     self.name_dict[n] = i 
00174 
00175                 self.joint_idx = {}
00176                 for group in self.joint_groups.keys():
00177                     self.joint_idx[group] = [self.name_dict[name] for name in self.joint_groups[group]]
00178 
00179             joint_poses = {}
00180             joint_vel = {}
00181             joint_eff = {}
00182             #extract data for pose, vel, eff
00183             for d, data in zip([joint_poses, joint_vel, joint_eff], [msg.position, msg.velocity, msg.effort]):
00184                 #look up values for each joint group
00185                 dmat = np.matrix(data).T
00186                 for group in self.joint_groups.keys():
00187                     d[group] = dmat[self.joint_idx[group], 0]
00188             converted.append({'poses': joint_poses, 'vels': joint_vel, 'efforts': joint_eff, 'time': msg.header.stamp.to_time()})
00189 
00190         #Take out wrapping of forearm & wrist
00191         for group in ['rarm', 'larm']:
00192             for i in range(len(self.joint_groups[group])):
00193                 joint = self.joint_groups[group][i]
00194                 if 'forearm_roll' in joint or 'wrist_roll' in joint:
00195                     delta = msgs[1].position[i] - msgs[0].position[i]
00196                     realdelta = delta % (2 * math.pi)
00197                     if realdelta >= math.pi:
00198                         realdelta -= 2 * math.pi
00199                     correction = delta - realdelta
00200                     for j in range(1, len(msgs)):
00201                         converted[j]['poses'][group][i,0] -= correction
00202                         #msgs[j].positions[i] -= correction
00203         return converted
00204 
00205 ##
00206 # @param surf_locs list of ((x,y), lap, size, dir, hess)
00207 # @param point_cloud_3d 3xn matrix
00208 # @param point_cloud_2d 2xn matrix
00209 def assign_3d_to_surf(surf_locs, point_cloud_3d, point_cloud_2d):
00210     point_cloud_2d_tree = sp.KDTree(np.array(point_cloud_2d.T))
00211     #print '>> shape of point_cloud_3d', point_cloud_3d.shape
00212 
00213     surf_loc3d = []
00214     for s in surf_locs:
00215         loc = s[0]
00216         idx = point_cloud_2d_tree.query(np.array(loc))[1]
00217         surf_loc3d.append(point_cloud_3d[:, idx])
00218         #print '   %s matched to %s 3d %s' % (str(loc), str(point_cloud_2d[:,idx].T), str(point_cloud_3d[:, idx].T))
00219 
00220     surf_loc3d = np.column_stack(surf_loc3d)
00221     return surf_loc3d
00222 
00223 def find_contacts_and_fk(tflistener, arm):
00224     find_contact_locs = ExtractTFData(tflistener)
00225     while not rospy.is_shutdown() \
00226             and (not find_contact_locs.contact_stopped) \
00227             and ((time.time() - find_contact_locs.last_jstate_time) < 1.):
00228         print 'waiting for ExtractTFData to finish.'
00229         time.sleep(.5)
00230     print 'got %d joint state messages' % len(find_contact_locs.jstate_msgs)
00231 
00232     contact_locs = find_contact_locs.contact_locs
00233     if arm == 'right':
00234         contact_tips = [np.matrix(r[1][0]).T for r in contact_locs]
00235     else:
00236         contact_tips = [np.matrix(r[1][1]).T for r in contact_locs]
00237     contact_tips = np.column_stack(contact_tips)
00238     return contact_tips[:,0], find_contact_locs.jstate_msgs
00239 
00240 def project_2d_bounded(cam_info, point_cloud_cam):
00241     point_cloud_2d_cam = cam_info.project(point_cloud_cam) 
00242     # only count points in image bounds (should be in cam info)
00243     _, in_bounds = np.where(np.invert((point_cloud_2d_cam[0,:] >= (cam_info.w-.6)) + (point_cloud_2d_cam[0,:] < 0) \
00244                                     + (point_cloud_2d_cam[1,:] >= (cam_info.h-.6)) + (point_cloud_2d_cam[1,:] < 0)))
00245     point_cloud_2d_cam = point_cloud_2d_cam[:, in_bounds.A1]
00246     point_cloud_reduced_cam = point_cloud_cam[:, in_bounds.A1]
00247 
00248     return point_cloud_2d_cam, point_cloud_reduced_cam
00249 
00250 def find3d_surf(start_conditions):
00251     ## Project pointcloud into 2d
00252     point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
00253     # from base_frame to prosilica frame
00254     point_cloud_pro = tfu.transform_points(start_conditions['pro_T_bf'], point_cloud_bf)
00255     point_cloud_2d_pro, point_cloud_reduced_pro = project_2d_bounded(start_conditions['camera_info'], point_cloud_pro)
00256     #point_cloud_2d_pro = .project(point_cloud_pro) 
00257     ## only count points in image bounds (should be in cam info)
00258     #cam_info = start_conditions['camera_info']
00259     #_, in_bounds = np.where(np.invert((point_cloud_2d_pro[0,:] >= (cam_info.w-.6)) + (point_cloud_2d_pro[0,:] < 0) \
00260     #                                + (point_cloud_2d_pro[1,:] >= (cam_info.h-.6)) + (point_cloud_2d_pro[1,:] < 0)))
00261     #point_cloud_2d_pro = point_cloud_2d_pro[:, in_bounds.A1]
00262     #point_cloud_reduced_pro = point_cloud_pro[:, in_bounds.A1]
00263 
00264     ## Find 3D SURF features
00265     model_file_name = start_conditions['model_image']
00266     model_surf_loc, model_surf_descriptors = fea.surf_color(cv.LoadImage(model_file_name))
00267     surf_loc3d_pro = np.matrix(assign_3d_to_surf(model_surf_loc, point_cloud_reduced_pro, point_cloud_2d_pro))
00268     return model_surf_loc, model_surf_descriptors, surf_loc3d_pro, point_cloud_2d_pro
00269 
00270 ##########################################################################
00271 # TODO: need some parameters for processing 'model_image', maybe circles
00272 # of different sizes.
00273 def extract_object_localization_features2(start_conditions, tflistener, arm_used, p_base_map):
00274     mid_contact_bf, jstate_msgs = find_contacts_and_fk(tflistener, arm_used)
00275     model_surf_loc, model_surf_descriptors, surf_loc3d_pro, point_cloud_2d_pro = find3d_surf(start_conditions)
00276 
00277     #Find frame
00278     surf_loc3d_bf = (np.linalg.inv(start_conditions['pro_T_bf']) \
00279             * np.row_stack((surf_loc3d_pro, 1+np.zeros((1,surf_loc3d_pro.shape[1])))))[0:3,:]
00280     frame_bf = create_frame(surf_loc3d_bf, p=np.matrix([-1, 0, 0.]).T)
00281     center_bf = np.mean(surf_loc3d_bf, 1)
00282 
00283     #Find out what the SURF features point to in this new frame
00284     bf_R_pro = (start_conditions['pro_T_bf'][0:3, 0:3]).T
00285     bf_R_obj = frame_bf
00286     x_bf     = frame_bf[:,0]
00287     x_pro    = bf_R_pro.T * x_bf
00288     x_ang_pro = math.atan2(x_pro[1,0], x_pro[0,0])
00289 
00290     center_pro = tfu.transform_points(start_conditions['pro_T_bf'], center_bf)
00291     center2d_pro = start_conditions['camera_info'].project(center_pro)
00292 
00293     surf_directions = []
00294     surf_dir_center = []
00295     for loc, lap, size, direction, hess in model_surf_loc:
00296         surf_directions.append(ut.standard_rad(np.radians(direction) - x_ang_pro))
00297         direction_to_center = center2d_pro - np.matrix(loc).T
00298         surf_dir_center.append(direction_to_center)
00299 
00300     surf_dir_center = np.column_stack(surf_dir_center)
00301     return {
00302             'contact_bf': mid_contact_bf,
00303             'surf_loc3d_pro': surf_loc3d_pro,
00304             'surf_loc2d_pro': model_surf_loc,
00305             'point_cloud_2d_pro': point_cloud_2d_pro,
00306 
00307             'surf_directions': surf_directions, #Orientation
00308             'surf_pose_dir2d': surf_dir_center,   #Position
00309             'descriptors': model_surf_descriptors,
00310 
00311             'jtransforms': jstate_msgs,
00312             'frame_bf': frame_bf,
00313             'center_bf': center_bf
00314             }
00315 
00316 ##########################################################################
00317 # TODO: need some parameters for processing 'model_image', maybe circles
00318 # of different sizes.
00319 def extract_object_localization_features(start_conditions, tflistener):
00320     ## Find contacts
00321     find_contact_locs = ExtractTFData(tflistener)
00322     r = rospy.Rate(10)
00323     while not rospy.is_shutdown() and not find_contact_locs.contact_stopped:
00324         r.sleep()
00325     contact_locs = find_contact_locs.contact_locs
00326 
00327     ## Detect features, get 3d location for each feature
00328     model_file_name = start_conditions['model_image']
00329     model_surf_loc, model_surf_descriptors = fea.surf_color(cv.LoadImage(model_file_name))
00330 
00331     ## Assign 3d location to surf features
00332     point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
00333     point_cloud_pro = start_conditions['pro_T_bf'] * np.row_stack((point_cloud_bf, 1+np.zeros((1, point_cloud_bf.shape[1]))))
00334     point_cloud_2d_pro = start_conditions['camera_info'].project(point_cloud_pro[0:3,:])
00335     surf_loc3d_arr_bf = np.array(assign_3d_to_surf(model_surf_loc, point_cloud_bf, point_cloud_2d_pro))
00336     surf_loc_tree_bf = sp.KDTree(surf_loc3d_arr_bf.T)
00337 
00338     #################################################
00339     # not needed right now but can be useful later..
00340     #################################################
00341     # Get SURF features closest to contact locs
00342     left_contact, right_contact = zip(*[(np.matrix(r[1][2]).T, np.matrix(r[1][3]).T) for r in contact_locs])
00343     left_contact = np.column_stack(left_contact)
00344     right_contact = np.column_stack(right_contact)
00345     mid_contact_bf = (left_contact[:,0] + right_contact[:,0]) / 2.
00346     #data_dict['pro_T_bf']  * np.row_stack((mid_contact_bf, np
00347 
00348     surf_closest_idx = surf_loc_tree_bf.query(np.array(mid_contact_bf.T))[1] #Get surf feature at mid point
00349     surf_closest3d   = surf_loc3d_arr_bf[:, surf_closest_idx]
00350     surf_closest_fea = model_surf_loc[surf_closest_idx]
00351 
00352     #Create a frame for this group of features
00353     surf_loc_3d_pro = (start_conditions['pro_T_bf'] * np.row_stack([surf_loc3d_arr_bf, 1 + np.zeros((1, surf_loc3d_arr_bf.shape[1]))]))[0:3,:]
00354     object_frame_pro = create_frame(np.matrix(surf_loc_3d_pro))
00355 
00356     #Find out what the SURF features point to in this new frame
00357     surf_directions = []
00358     for loc, lap, size, direction, hess in model_surf_loc:
00359         drad = np.radians(direction)
00360         #project direction into the cannonical object frame
00361         surf_dir_obj = object_frame_pro * np.matrix([np.cos(drad), np.sin(drad), 0.]).T
00362 
00363         #measure angle between SURF feature and x axis of object frame, store this as delta theta
00364         delta_theta = math.atan2(surf_dir_obj[1,0], surf_dir_obj[0,0])
00365         surf_directions.append(delta_theta)
00366 
00367     return {
00368             'descriptors': model_surf_descriptors, 
00369             'directions': surf_directions, 
00370             'contact_bf': mid_contact_bf,
00371             'closest_feature': surf_closest_fea[0],
00372             #'object_frame_bf': [np.mean(np.matrix(surf_loc3d_arr_bf), 1), create_frame(surf_loc3d_arr_bf)],
00373             'object_frame_pro': [np.mean(np.matrix(surf_loc_3d_pro), 1), object_frame_pro, surf_loc_3d_pro]
00374             }
00375 
00376     #surf_dir_obj => obj_dir
00377     #project all points ontocz
00378     #draw this surf feature in image
00379     #proc_surfed = fea.draw_surf(proc_img, model_surf_loc, (200, 0, 0))
00380     #proc_surfed = fea.draw_surf(proc_surfed, [surf_closest_fea], (0,0,255))
00381 
00382 #########################################################
00383 # Pose estimation
00384 #  # for each SURF direction, subtract delta_theta from it to get a vote for the direction of the object frame's
00385 #    x axis
00386 #  # average all the predictions to get object's x direction
00387 
00388 def create_frame(points3d, p=np.matrix([-1,0,0.]).T):
00389     #pdb.set_trace()
00390     u, s, vh = np.linalg.svd(np.cov(points3d))
00391     u = np.matrix(u)
00392 
00393     # Pick normal
00394     if (u[:,2].T * p)[0,0] < 0:
00395         normal = -u[:,2]
00396     else:
00397         normal = u[:,2]
00398 
00399     # pick the next direction as the one closest to to +z or +x
00400     z_plus = np.matrix([0, 0, 1.0]).T
00401     x_plus = np.matrix([1, 0, 0.0]).T
00402 
00403     u0 = u[:,0]
00404     u1 = u[:,1]
00405 
00406     mags = []
00407     pos_dirs = []
00408     for udir in [u0, u1, -u0, -u1]:
00409         for can_dir in [z_plus, x_plus]:
00410             mags.append((udir.T * can_dir)[0,0])
00411             pos_dirs.append(udir)
00412     x_dir = pos_dirs[np.argmax(mags)]
00413 
00414     # Cross product for the final (is this the same as the final vector?)
00415     y_dir = np.cross(normal.T, x_dir.T).T
00416     return np.matrix(np.column_stack([x_dir, y_dir, normal]))
00417 
00418 def process_bag(full_bag_name, prosilica_image_file, model_image_file, experiment_start_condition_pkl, arm_used='left'):
00419     bag_path, bag_name_ext = os.path.split(full_bag_name)
00420     filename, ext = os.path.splitext(bag_name_ext)
00421     
00422     ###############################################################################
00423     # Playback the bag
00424     bag_playback = Process(target=playback_bag, args=(full_bag_name,))
00425     bag_playback.start()
00426 
00427     ###############################################################################
00428     ## Listen for transforms using rosbag
00429     rospy.init_node('bag_proceessor')
00430 
00431     tl = tf.TransformListener()
00432 
00433     print 'waiting for transform'
00434     tl.waitForTransform('map', 'base_footprint', rospy.Time(), rospy.Duration(20))
00435     # Extract the starting location map_T_bf
00436     p_base = tfu.transform('map', 'base_footprint', tl) \
00437             * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
00438     t, r = tfu.matrix_as_tf(p_base)
00439     pose_base = (t, r)
00440     print 'done with tf'
00441 
00442     ##########################################################
00443     ## Find contact locations
00444     start_conditions = ut.load_pickle(experiment_start_condition_pkl)
00445     start_conditions['highdef_image'] = prosilica_image_file
00446     start_conditions['model_image'] = model_image_file
00447     rospy.loginfo('extracting object localization features')
00448     start_conditions['pose_parameters'] = extract_object_localization_features2(start_conditions, tl, arm_used, p_base)
00449 
00450     if bag_playback.is_alive():
00451         rospy.loginfo('Terminating playback process')
00452         bag_playback.terminate()
00453         time.sleep(1)
00454         bag_playback.terminate()
00455         time.sleep(1)
00456         rospy.loginfo('Playback process terminated? %s' % str(not bag_playback.is_alive()))
00457 
00458 
00459     ###############################################################################
00460     #Read bag using programmatic API
00461     pr2_kinematics = pr2k.PR2Kinematics(tl)
00462     converter = JointMsgConverter()
00463     rospy.loginfo('opening bag, reading state topics')
00464     topics_dict = ru.bag_sel(full_bag_name, ['/joint_states', '/l_cart/command_pose', 
00465                                              '/r_cart/command_pose', '/torso_controller/state',
00466                                              '/pressure/l_gripper_motor', '/pressure/r_gripper_motor'])
00467 
00468     ## Select the arm that has been moving, segment joint states based on contact states.
00469     if arm_used == 'left':
00470         pressures = topics_dict['/pressure/l_gripper_motor']
00471     elif arm_used == 'right':
00472         pressures = topics_dict['/pressure/r_gripper_motor']
00473     else:
00474         raise RuntimeError('arm_used invalid')
00475 
00476     ## find contact times
00477     rospy.loginfo('Finding contact times')
00478     left_f, right_f, ptimes = hpr2.pressure_state_to_mat(pressures['msg'])
00479 
00480     ## create segments based on contacts
00481     # TODO: make this accept more contact stages
00482     contact_times = find_contact_times(left_f, right_f, ptimes, 250)
00483     if len(contact_times) > 2:
00484         time_segments = [['start', contact_times[0]], [contact_times[0], contact_times[-1]], [contact_times[-1], 'end']]
00485     else:
00486         time_segments = [['start', 'end']]
00487 
00488     rospy.loginfo('Splitting messages based on contact times')
00489     ## split pressure readings based on contact times
00490     pressure_lseg = segment_msgs(time_segments, topics_dict['/pressure/l_gripper_motor']['msg'])
00491     pressure_rseg = segment_msgs(time_segments, topics_dict['/pressure/r_gripper_motor']['msg'])
00492 
00493     ## split cartesian commands based on contact times
00494     lcart_seg = segment_msgs(time_segments, topics_dict['/l_cart/command_pose']['msg'])
00495     rcart_seg = segment_msgs(time_segments, topics_dict['/r_cart/command_pose']['msg'])
00496 
00497     ## split joint states
00498     joint_states = topics_dict['/joint_states']['msg']
00499     print 'there are %d joint state messages in bag' % len(joint_states)
00500 
00501     j_segs     = segment_msgs(time_segments, topics_dict['/joint_states']['msg'])
00502     jseg_dicts = [converter.msgs_to_dict(seg) for seg in j_segs]
00503     # find the first set of joint states
00504     j0_dict    = jseg_dicts[0][0]
00505 
00506     ## perform FK
00507     rospy.loginfo('Performing FK to find tip locations')
00508     bf_T_obj = htf.composeHomogeneousTransform(start_conditions['pose_parameters']['frame_bf'], 
00509                                                start_conditions['pose_parameters']['center_bf'])
00510     obj_T_bf = np.linalg.inv(bf_T_obj)
00511     for jseg_dict in jseg_dicts:
00512         for d in jseg_dict:
00513             rtip_bf = pr2_kinematics.right.fk('base_footprint',
00514                     'r_wrist_roll_link', 'r_gripper_tool_frame',
00515                     d['poses']['rarm'].A1.tolist())
00516             ltip_bf =  pr2_kinematics.left.fk('base_footprint',
00517                     'l_wrist_roll_link', 'l_gripper_tool_frame',
00518                     d['poses']['larm'].A1.tolist())
00519             rtip_obj = obj_T_bf * rtip_bf
00520             ltip_obj = obj_T_bf * ltip_bf
00521 
00522             d['rtip_obj'] = tfu.matrix_as_tf(rtip_obj)
00523             d['ltip_obj'] = tfu.matrix_as_tf(ltip_obj)
00524 
00525             d['rtip_bf'] = tfu.matrix_as_tf(rtip_bf)
00526             d['ltip_bf'] = tfu.matrix_as_tf(ltip_bf)
00527     
00528     ###############################################################################
00529     # make movement state dictionaries, one for each state
00530     movement_states = []
00531     for i, seg in enumerate(time_segments):
00532         name = "state_%d" % i
00533         start_times = [lcart_seg[i][0].header.stamp.to_time(), 
00534                        rcart_seg[i][0].header.stamp.to_time(), 
00535                        jseg_dicts[i][0]['time'],
00536                        pressure_lseg[i][0].header.stamp.to_time(), 
00537                        pressure_rseg[i][0].header.stamp.to_time()]
00538 
00539         sdict = {'name': name,
00540                  'start_time': np.min(start_times),
00541                  'cartesian': [[ru.ros_to_dict(ps) for ps in lcart_seg[i]], 
00542                                [ru.ros_to_dict(ps) for ps in rcart_seg[i]]],
00543                  'joint_states': jseg_dicts[i]
00544                  #'pressure': [pressure_lseg[i], pressure_rseg[i]]
00545                  } 
00546 
00547         movement_states.append(sdict)
00548 
00549     # store in a dict
00550     data = {'start_conditions': start_conditions, # ['camera_info', 'map_T_bf', 'pro_T_bf', 'points' (in base_frame), 
00551                                                   # 'highdef_image', 'model_image',
00552                                                     ## 'pose_parameters'
00553                                                         ## 'descriptors'
00554                                                         ## 'directions' (wrt to cannonical orientation)
00555                                                         ## 'closest_feature'
00556             'base_pose': pose_base, 
00557             'robot_pose': j0_dict,
00558             'arm': arm_used,
00559             'movement_states': movement_states}
00560 
00561     # save dicts to pickles
00562     processed_bag_name = '%s_processed.pkl' % os.path.join(bag_path, filename)
00563     rospy.loginfo('saving to %s' % processed_bag_name)
00564     ut.save_pickle(data, processed_bag_name)
00565     bag_playback.join()
00566     rospy.loginfo('finished!')
00567 
00568 
00569 #python bag_processor.py 08_06/light/off/_2010-08-06-13-35-04.bag 08_06/light/off/off_start.png light_switch_model.png 08_06/light/off/off_start.pkl
00570 if __name__ == '__main__':
00571     arm_used = 'left'
00572     full_bag_name                  = sys.argv[1]
00573     prosilica_image_file           = sys.argv[2]
00574     model_image_file               = sys.argv[3]
00575     experiment_start_condition_pkl = sys.argv[4]
00576 
00577     process_bag(full_bag_name, prosilica_image_file, model_image_file, experiment_start_condition_pkl)
00578 
00579 
00580 
00581 
00582 
00583 
00584 
00585 
00586 
00587 
00588 
00589 
00590 
00591 
00592 
00593 
00594 
00595 
00596 
00597 
00598 
00599 
00600 
00601 
00602 
00603 
00604 
00605 
00606 
00607 
00608 
00609 
00610 
00611 
00612 
00613 
00614 
00615 
00616 
00617 
00618 
00619 
00620 
00621 
00622 
00623 
00624 
00625 
00626 
00627 
00628 
00629 
00630 
00631 
00632 
00633 
00634 
00635 
00636 
00637 
00638 
00639 
00640 
00641 
00642 
00643 
00644 
00645 
00646 
00647 
00648 
00649 
00650 
00651 
00652 
00653 
00654 
00655 
00656 
00657    
00658 #def create_frame2(contact_point, points3d, p=np.matrix([1,0,0.]).T):
00659 ### contact point is the center, local plane from 3D points close to contact
00660 #    u, s, vh = np.linalg.svd(points3d)
00661 #    u = np.matrix(u)
00662 #    #pdb.set_trace()
00663 #
00664 #    # Pick normal
00665 #    if (u[:,2].T * p)[0,0] < 0:
00666 #        normal = -u[:,2]
00667 #    else:
00668 #        normal = u[:,2]
00669 #
00670 #    # pick the next direction as the one closest to to +z or +x
00671 #    z_plus = np.matrix([0,0,1.0]).T
00672 #    x_plus = np.matrix([1,0,0.0]).T
00673 #
00674 #    u0 = u[:,0]
00675 #    u1 = u[:,1]
00676 #
00677 #    mags = []
00678 #    pos_dirs = []
00679 #    for udir in [u0, u1, -u0, -u1]:
00680 #        for can_dir in [z_plus, x_plus]:
00681 #            mags.append((udir.T * can_dir)[0,0])
00682 #            pos_dirs.append(udir)
00683 #    x_dir = pos_dirs[np.argmax(mags)]
00684 #
00685 #    # Cross product for the final (is this the same as the final vector?)
00686 #    y_dir = np.cross(normal.T, x_dir.T).T
00687 #    return np.column_stack([x_dir, y_dir, normal])
00688 
00689 
00690 
00691 
00692 
00693 
00694 
00695 
00696 
00697 
00698     #for loc, lap, size, direction, hess in model_surf_loc:
00699     #    drad = np.radians(direction)
00700     #    #project direction into the cannonical object frame
00701     #    #surf_dir_obj = object_frame_pro * np.matrix([np.cos(drad), np.sin(drad), 0.]).T
00702     #    #obj_R_bf = frame_bf.T
00703 
00704     #    bf_R_pro = (start_conditions['pro_T_bf'][0:3,0:3]).T
00705     #    surf_dir_obj = frame_bf.T * bf_R_pro * np.matrix([np.cos(drad), np.sin(drad), 0.]).T 
00706 
00707     #    #measure angle between SURF feature and x axis of object frame, store this as delta theta
00708     #    delta_theta = math.atan2(surf_dir_obj[1,0], surf_dir_obj[0,0])
00709     #    surf_directions.append(delta_theta)
00710 
00711     ##print 'frame_bf.T', frame_bf.T
00712     ##print 'bf_R_pro', (start_conditions['pro_T_bf'][0:3,0:3]).T
00713 
00714 
00715 
00716 
00717 
00718 
00719 
00720 
00721 
00722 
00723     #r = rospy.Rate(10)
00724     #while not rospy.is_shutdown() and not find_contact_locs.contact_stopped:
00725     #    r.sleep()
00726     #contact_locs = find_contact_locs.contact_locs
00727     #et = ExtractTFData()
00728     # ['camera_info', 'map_T_bf', 'pro_T_bf', 'points']
00729 
00730 
00731 
00732 
00733 
00734 
00735 
00736     #print 'got loc %.2f %.2f %.2f'% (t[0], t[1], t[2])
00737     #loc_fname = '%s_loc.pkl' % os.path.join(bag_path, filename)
00738     #print 'saving to %s' % loc_fname
00739     #ut.save_pickle((t,r), loc_fname)
00740 
00741 
00742     #print contact_times - contact_times[0]
00743     #print contact_times[1:] - contact_times[:-1]
00744     #pb.plot(contact_times-contact_times[0], 'g.')
00745     #pb.show()
00746 
00747 
00748 
00749 # pose_base = [t, r], t is len3, r is len4
00750 # j0_dict {'poses': joint_poses, 'vels': joint_vel, 'efforts': joint_eff, 'time': msg.header.stamp.to_time()}
00751 #         with each entry having keys: ['rarm'] ['larm'] ['head_traj'] ['torso']    
00752 # arm is a string {'left', 'right'}
00753 # movement_states
00754 #       'name'
00755 #       'start_time'
00756 #       'cartesian'
00757 #       'joint_states'
00758 #       'pressure'
00759 
00760 
00761 
00762 
00763 
00764 
00765 
00766 
00767 
00768 
00769 
00770 
00771 
00772 
00773 
00774 
00775 
00776 
00777 
00778 
00779 
00780 
00781 
00782 
00783 
00784 
00785             #'movement_states': [{'name': #,
00786             #                     'cartesian':#,
00787             #                     'joint_states': # j_dicts, j_times
00788             #                     'pressure': #,
00789             #                     }]
00790             #                      #}
00791     
00792     ##ut.save_pickle(data, extracted_name)
00793 
00794 
00795     #if len(joint_states) <= 1:
00796     #    raise RuntimeError('Not enough joint state messages.  Got %d messages.' % len(joint_states))
00797     #joint_states)


hai_sandbox
Author(s): Hai Nguyen
autogenerated on Wed Nov 27 2013 11:46:56