00001 import roslib; roslib.load_manifest('hai_sandbox')
00002 import rospy
00003
00004 import hrl_lib.prob as pb
00005 import hrl_lib.util as ut
00006 import hai_sandbox.pr2 as pr2
00007 import hai_sandbox.msg as hm
00008 import hrl_lib.msg as hlm
00009 import pr2_msgs.msg as pm
00010 import sensor_msgs.msg as sm
00011 import scipy.spatial as sp
00012 import actionlib
00013 import hai_sandbox.cmd_process as cmdp
00014
00015 import tf.transformations as tr
00016 import hrl_lib.transforms as htf
00017 import hrl_lib.rutils as ru
00018 import numpy as np
00019 import pdb
00020 import sys
00021 import visualization_msgs.msg as vm
00022 import geometry_msgs.msg as gm
00023
00024 import time
00025 import hrl_lib.tf_utils as tfu
00026 import tf
00027 import cv
00028 from cv_bridge.cv_bridge import CvBridge, CvBridgeError
00029 import hai_sandbox.features as fea
00030 import hai_sandbox.bag_processor as bp
00031 import hrl_camera.ros_camera as rc
00032 import math
00033 import hrl_pr2_lib.devices as hpr2
00034 import hai_sandbox.viz as viz
00035 import os
00036 import threading
00037
00038
00039 def dict_to_arm_arg(d):
00040 trans = [d['pose']['position']['x'],
00041 d['pose']['position']['y'],
00042 d['pose']['position']['z']]
00043 rot = [d['pose']['orientation']['x'],
00044 d['pose']['orientation']['y'],
00045 d['pose']['orientation']['z'],
00046 d['pose']['orientation']['w']]
00047 return [trans, rot, d['header']['frame_id'], d['header']['stamp']]
00048
00049
00050
00051
00052 def match_image(descriptors, image, threshold=.6):
00053 cgray = fea.grayscale(image)
00054 c_loc, c_desc = fea.surf(cgray)
00055 features_db = sp.KDTree(np.array(descriptors))
00056 matched = []
00057 for i, desc in enumerate(c_desc):
00058 dists, idxs = features_db.query(np.array(desc), 2)
00059 ratio = dists[0] / dists[1]
00060 if ratio < threshold:
00061 matched.append({'model': descriptors[idxs[0]],
00062 'model_idx': idxs[0],
00063 'candidate': desc,
00064 'candidate_loc': c_loc[i]})
00065 return matched
00066
00067
00068 def normalize_ang(a):
00069 while a < 0:
00070 a = a + (2*np.pi)
00071 while a > (2*np.pi):
00072 a = a - (2*np.pi)
00073 return a
00074
00075
00076 def find_object_pose(image, point_cloud_msg, data_dict, pro_T_bf, cam_info, RADIUS=.1):
00077
00078 desc = data_dict['start_conditions']['pose_parameters']['descriptors']
00079 match_info = match_image(desc, image, .6)
00080 match_idxs = [r['model_idx'] for r in match_info]
00081 match_locs = [r['candidate_loc'] for r in match_info]
00082 rospy.loginfo('Matched %d out of %d descriptors' % (len(match_info), len(desc)))
00083 if len(match_info) < 4:
00084 raise RuntimeError('Insufficient number of matches')
00085
00086
00087
00088 point_cloud_bf = ru.pointcloud_to_np(point_cloud_msg)
00089 point_cloud_pro = tfu.transform_points(pro_T_bf, point_cloud_bf)
00090 point_cloud_2d_pro, point_cloud_reduced_pro = bp.project_2d_bounded(cam_info, point_cloud_pro)
00091
00092
00093 model_directions = np.matrix(data_dict['start_conditions']['pose_parameters']['surf_pose_dir2d'][:, match_idxs])
00094 match_locs_mat = np.column_stack([np.matrix(l[0]).T for l in match_locs])
00095
00096 expected_position_set2d = match_locs_mat + model_directions
00097
00098 expected_position2d = np.median(expected_position_set2d, 1)
00099 expected_position2d = np.column_stack([expected_position2d, expected_position_set2d])
00100 expected_position_locs = [[[expected_position2d[0,i], expected_position2d[1,i]]] for i in range(expected_position2d.shape[1])]
00101
00102 expected_position3d_pro = np.matrix(bp.assign_3d_to_surf(expected_position_locs,
00103 point_cloud_reduced_pro, point_cloud_2d_pro))
00104 expected_positions3d_bf = tfu.transform_points(np.linalg.inv(pro_T_bf), expected_position3d_pro)
00105
00106
00107 matched_surf_loc3d_pro = np.matrix(bp.assign_3d_to_surf(match_locs, point_cloud_reduced_pro, point_cloud_2d_pro))
00108 matched_surf_loc3d_bf = tfu.transform_points(np.linalg.inv(pro_T_bf), matched_surf_loc3d_pro)
00109
00110
00111 center_bf = expected_positions3d_bf[:,0]
00112 point_cloud_kd_bf = sp.KDTree(point_cloud_bf.T)
00113 neighbor_idxs = point_cloud_kd_bf.query_ball_point(np.array(center_bf.T), RADIUS)
00114 points_nearby_bf = point_cloud_bf[:, neighbor_idxs[0]]
00115
00116
00117 a_frame_bf = bp.create_frame(points_nearby_bf, p= -center_bf)
00118 normal_bf = a_frame_bf[:,2]
00119 null_bf = a_frame_bf[:,0:2]
00120
00121
00122
00123
00124
00125
00126
00127
00128
00129
00130
00131
00132
00133
00134
00135
00136
00137
00138
00139
00140
00141
00142
00143
00144
00145
00146
00147
00148
00149
00150
00151
00152
00153 model_delta_thetas = np.array(data_dict['start_conditions']['pose_parameters']['surf_directions'])[match_idxs]
00154 cand_rad_angs = np.array([np.radians(d) for loc, lap, size, d, hess in match_locs])
00155 hypothesized_angles = np.array([ut.standard_rad(cand_rad_angs[i] - model_delta_thetas[i]) for i in range(len(model_delta_thetas))])
00156
00157
00158 x_vec_hat_pro = np.matrix([np.mean(np.cos(hypothesized_angles)), np.mean(np.sin(hypothesized_angles)), 0.]).T
00159
00160
00161 x_hat_bf = np.linalg.inv(pro_T_bf[0:3,0:3]) * x_vec_hat_pro
00162
00163
00164 x_hat_null_bf = null_bf * np.linalg.inv(null_bf.T * null_bf) * null_bf.T * x_hat_bf
00165
00166
00167 y_bf = np.cross(normal_bf.T, x_hat_null_bf.T).T
00168 surf_vecs_bf = np.linalg.inv(pro_T_bf[0:3,0:3]) * np.row_stack([np.cos(hypothesized_angles),
00169 np.sin(hypothesized_angles),
00170 np.zeros((1,len(hypothesized_angles)))])
00171 display_dict = {'surf_vecs_bf': surf_vecs_bf,
00172 'surf_loc3d_pro': matched_surf_loc3d_pro,
00173 'expected_positions3d_bf': expected_positions3d_bf}
00174
00175 rospy.loginfo('Observed normal: %s' % str(normal_bf.T))
00176 rospy.loginfo('Inferred x direction: %s' % str(x_hat_bf.T))
00177
00178
00179 x_hat_null_bf = x_hat_null_bf / np.linalg.norm(x_hat_null_bf)
00180 y_bf = y_bf / np.linalg.norm(y_bf)
00181 return np.column_stack([x_hat_null_bf, y_bf, normal_bf]), center_bf, display_dict
00182
00183
00184 def cv_to_ros(cvimg, frame_id):
00185 bridge = CvBridge()
00186 rosimg = bridge.cv_to_imgmsg(cvimg)
00187 rosimg.header.frame_id = frame_id
00188 return rosimg
00189
00190 class DisplayRecordedPoseReduced:
00191 def __init__(self, online, seconds_to_run):
00192 self.online = online
00193 self.seconds_to_run = seconds_to_run
00194 self.marker_pub = rospy.Publisher('object_frame', vm.Marker)
00195 self.point_cloud_pub = rospy.Publisher('original_point_cloud', sm.PointCloud)
00196 self.surf_pub = rospy.Publisher('surf_features', sm.PointCloud)
00197 self.expected_positions3d_pub = rospy.Publisher('expected_position3d_bf', sm.PointCloud)
00198 self.left_arm_trajectory_pub = rospy.Publisher('left_arm_trajectory', sm.PointCloud)
00199 self.right_arm_trajectory_pub = rospy.Publisher('right_arm_trajectory', sm.PointCloud)
00200 self.contact_pub = rospy.Publisher('contact_bf', vm.Marker)
00201 self.fine_pose = rospy.Publisher('base_pose_obj', gm.PoseStamped)
00202
00203 if not self.online:
00204 self.proc_pub = rospy.Publisher('/prosilica/image_rect_color', sm.Image)
00205 self.caminfo = rospy.Publisher('/prosilica/camera_info', sm.CameraInfo)
00206 self.tfbroadcast = tf.TransformBroadcaster()
00207
00208 def display(self, frame_bf, center_bf,
00209 point_cloud_bf, surf_loc3d_pro, proc_img, expected_position3d_bf, pro_T_bf,
00210 left_arm_trajectory, right_arm_trajectory, fine_position_bf, contact_bf=None):
00211
00212 frame_marker = viz.create_frame_marker(center_bf, frame_bf, .4, 'base_footprint')
00213 proc_image_msg = cv_to_ros(proc_img, 'high_def_optical_frame')
00214 proc_cam_info_msg = ut.load_pickle('prosilica_caminfo.pkl')
00215 surf_pc = ru.np_to_pointcloud(surf_loc3d_pro, 'high_def_optical_frame')
00216 expected_position3d_bf = ru.np_to_pointcloud(expected_position3d_bf, 'base_footprint')
00217 left_arm_traj_bf = ru.np_to_pointcloud(left_arm_trajectory, 'base_footprint')
00218 right_arm_traj_bf = ru.np_to_pointcloud(right_arm_trajectory, 'base_footprint')
00219
00220 ps_fine_position_bf = gm.PoseStamped()
00221 ps_fine_position_bf.header.frame_id = '/base_footprint'
00222 ps_fine_position_bf.pose.position.x = fine_position_bf[0][0]
00223 ps_fine_position_bf.pose.position.y = fine_position_bf[0][1]
00224 ps_fine_position_bf.pose.position.z = fine_position_bf[0][2]
00225 ps_fine_position_bf.pose.orientation.x = fine_position_bf[1][0]
00226 ps_fine_position_bf.pose.orientation.y = fine_position_bf[1][1]
00227 ps_fine_position_bf.pose.orientation.z = fine_position_bf[1][2]
00228 ps_fine_position_bf.pose.orientation.w = fine_position_bf[1][3]
00229
00230 if contact_bf != None:
00231 contact_marker = viz.single_marker(contact_bf, np.matrix([0,0,0,1.]).T, 'sphere', 'base_footprint', scale=[.02, .02, .02])
00232 else:
00233 contact_marker = None
00234
00235 self.publish_messages(frame_marker, proc_image_msg, proc_cam_info_msg,
00236 surf_pc, point_cloud_bf, expected_position3d_bf, pro_T_bf,
00237 left_arm_traj_bf, right_arm_traj_bf, ps_fine_position_bf, contact_marker)
00238
00239 def publish_messages(self, frame_marker, proc_image_msg, proc_cam_info_msg,
00240 surf_pc, point_cloud_bf, expected_position3d_bf, pro_T_bf,
00241 left_arm_traj_bf, right_arm_traj_bf, fine_position_bf, contact_marker = None):
00242 start_time = time.time()
00243
00244 r = rospy.Rate(10.)
00245 while not rospy.is_shutdown():
00246 frame_marker.header.stamp = rospy.get_rostime()
00247 point_cloud_bf.header.stamp = rospy.get_rostime()
00248 proc_image_msg.header.stamp = rospy.get_rostime()
00249 proc_cam_info_msg.header.stamp = rospy.get_rostime()
00250 surf_pc.header.stamp = rospy.get_rostime()
00251 expected_position3d_bf.header.stamp = rospy.get_rostime()
00252 left_arm_traj_bf.header.stamp = rospy.get_rostime()
00253 right_arm_traj_bf.header.stamp = rospy.get_rostime()
00254 fine_position_bf.header.stamp = rospy.get_rostime()
00255 if contact_marker != None:
00256 contact_marker.header.stamp = rospy.get_rostime()
00257
00258
00259 self.marker_pub.publish(frame_marker)
00260 self.point_cloud_pub.publish(point_cloud_bf)
00261 self.surf_pub.publish(surf_pc)
00262 self.expected_positions3d_pub.publish(expected_position3d_bf)
00263 self.left_arm_trajectory_pub.publish(left_arm_traj_bf)
00264 self.right_arm_trajectory_pub.publish(right_arm_traj_bf)
00265 self.fine_pose.publish(fine_position_bf)
00266 if contact_marker != None:
00267 self.contact_pub.publish(contact_marker)
00268
00269 if not self.online:
00270 self.proc_pub.publish(proc_image_msg)
00271 self.caminfo.publish(proc_cam_info_msg)
00272
00273 t, r = tfu.matrix_as_tf(np.linalg.inv(pro_T_bf))
00274 self.tfbroadcast.sendTransform(t, r, rospy.Time.now(), '/high_def_optical_frame', "/base_footprint")
00275
00276 time.sleep(.1)
00277 if (time.time() - start_time) > self.seconds_to_run:
00278 break
00279
00280
00281 class DisplayRecordedPose:
00282 def __init__(self):
00283 rospy.init_node('display_pose')
00284 self.marker_pub = rospy.Publisher('object_frame', vm.Marker)
00285 self.point_cloud_pub = rospy.Publisher('original_point_cloud', sm.PointCloud)
00286 self.surf_pub = rospy.Publisher('surf_features', sm.PointCloud)
00287 self.proc_pub = rospy.Publisher('/prosilica/image_rect_color', sm.Image)
00288 self.caminfo = rospy.Publisher('/prosilica/camera_info', sm.CameraInfo)
00289 self.contact_pub = rospy.Publisher('contact_bf', vm.Marker)
00290 self.tfbroadcast = tf.TransformBroadcaster()
00291
00292 def display_original_object_pose(self, data_fname):
00293 print 'loading pickle'
00294 data_dict = ut.load_pickle(data_fname)
00295 surf_loc3d_pro = data_dict['start_conditions']['pose_parameters']['surf_loc3d_pro']
00296 surf_loc2d_pro = data_dict['start_conditions']['pose_parameters']['surf_loc2d_pro']
00297 contact_bf = data_dict['start_conditions']['pose_parameters']['contact_bf']
00298 point_cloud_2d_pro = data_dict['start_conditions']['pose_parameters']['point_cloud_2d_pro']
00299 frame_bf = data_dict['start_conditions']['pose_parameters']['frame_bf']
00300 center_bf = data_dict['start_conditions']['pose_parameters']['center_bf']
00301
00302 model_file_name = data_dict['start_conditions']['model_image']
00303 pc = data_dict['start_conditions']['points']
00304 surf_cloud_2d_pro = data_dict['start_conditions']['camera_info'].project(surf_loc3d_pro[0:3,:])
00305
00306 rospy.loginfo('Original normal (base_footprint): %s' % str(frame_bf[:,2].T))
00307 rospy.loginfo('Original x direction (base_footprint): %s' % str(frame_bf[:,0].T))
00308
00309 rospy.loginfo('SURF directions')
00310 for e in data_dict['start_conditions']['pose_parameters']['surf_directions']:
00311 print np.degrees(e)
00312
00313
00314 discrete_loc = np.array(np.round(point_cloud_2d_pro), dtype='int')
00315 proc_np = np.asarray(cv.LoadImageM(model_file_name))
00316 proc_np[discrete_loc[1,:], discrete_loc[0,:]] = 0
00317 proc_cv = cv.fromarray(proc_np)
00318 cv.SaveImage('proc_3d_orig_surf.png', fea.draw_surf(proc_cv, surf_loc2d_pro, (200, 0, 0)))
00319
00320
00321 nslocs = []
00322 for idx, sloc in enumerate(surf_loc2d_pro):
00323 loc, lap, size, d, hess = sloc
00324 nslocs.append(((surf_cloud_2d_pro[0,idx], surf_cloud_2d_pro[1,idx]), lap, size, d, hess))
00325 cv.SaveImage('proc_3d_proj_surf.png', fea.draw_surf(proc_cv, nslocs, (0, 200, 0)))
00326
00327 frame_marker = viz.create_frame_marker(center_bf, frame_bf, .4, 'base_footprint')
00328 proc_image_msg = cv_to_ros(cv.LoadImage(model_file_name), 'high_def_optical_frame')
00329 proc_cam_info_msg = ut.load_pickle('prosilica_caminfo.pkl')
00330 contact_marker = viz.single_marker(contact_bf, np.matrix([0,0,0,1.]).T, 'sphere', 'base_footprint', scale=[.02, .02, .02])
00331 surf_pc = ru.np_to_pointcloud(surf_loc3d_pro, 'high_def_optical_frame')
00332 self.publish_messages(pc, frame_marker, contact_marker, surf_pc, proc_image_msg, proc_cam_info_msg, data_dict)
00333
00334
00335
00336 def publish_messages(self, pc, frame_marker, contact_marker, surf_pc, proc_image_msg, proc_cam_info_msg, data_dict):
00337 print 'publishing msgs'
00338 r = rospy.Rate(10.)
00339 while not rospy.is_shutdown():
00340 pc.header.stamp = rospy.get_rostime()
00341 frame_marker.header.stamp = rospy.get_rostime()
00342 contact_marker.header.stamp = rospy.get_rostime()
00343 surf_pc.header.stamp = rospy.get_rostime()
00344 proc_image_msg.header.stamp = rospy.get_rostime()
00345 proc_cam_info_msg.header.stamp = rospy.get_rostime()
00346
00347
00348 self.marker_pub.publish(frame_marker)
00349 self.point_cloud_pub.publish(pc)
00350 self.surf_pub.publish(surf_pc)
00351 self.proc_pub.publish(proc_image_msg)
00352 self.caminfo.publish(proc_cam_info_msg)
00353 self.contact_pub.publish(contact_marker)
00354
00355
00356 t, r = tfu.matrix_as_tf(np.linalg.inv(data_dict['start_conditions']['pro_T_bf']))
00357 self.tfbroadcast.sendTransform(t, r, rospy.Time.now(), '/high_def_optical_frame', "/base_footprint")
00358
00359 t, r = tfu.matrix_as_tf(np.linalg.inv(data_dict['start_conditions']['map_T_bf']))
00360 self.tfbroadcast.sendTransform(t, r, rospy.Time.now(), '/map', "/base_footprint")
00361
00362 time.sleep(.1)
00363 print 'done'
00364
00365 def image_diff_val(before_frame, after_frame):
00366 br = np.asarray(before_frame)
00367 ar = np.asarray(after_frame)
00368 max_sum = br.shape[0] * br.shape[1] * br.shape[2] * 255.
00369 sdiff = np.sum(np.abs(ar - br)) / max_sum
00370 return sdiff
00371
00372 def image_diff_val2(before_frame, after_frame):
00373 br = np.asarray(before_frame)
00374 ar = np.asarray(after_frame)
00375 max_sum = br.shape[0] * br.shape[1] * br.shape[2] * 255.
00376 sdiff = np.abs((np.sum(br) / max_sum) - (np.sum(ar) / max_sum))
00377
00378 return sdiff
00379
00380
00381
00382
00383
00384
00385
00386 class ExecuteAction(threading.Thread):
00387 def __init__(self, f, args):
00388 threading.Thread.__init__(self)
00389 self.f = f
00390 self.args = args
00391
00392 def run(self):
00393 self.f(*self.args)
00394
00395 class Imitate:
00396
00397 def __init__(self, offline=False):
00398 rospy.init_node('imitate')
00399 self.should_switch = False
00400 self.lmat0 = None
00401 self.rmat0 = None
00402 self.pressure_exceeded = False
00403
00404 if not offline:
00405 self.behavior_marker_pub = rospy.Publisher('imitate_behavior_marker', hlm.String)
00406 self.tf_listener = tf.TransformListener()
00407 self.robot = pr2.PR2(self.tf_listener)
00408 self.prosilica = rc.Prosilica('prosilica', 'streaming')
00409 self.wide_angle_camera = rc.ROSCamera('/wide_stereo/left/image_rect_color')
00410 self.laser_scanner = hpr2.LaserScanner('point_cloud_srv')
00411 self.cam_info = rc.ROSCameraCalibration('/prosilica/camera_info')
00412 rospy.loginfo('waiting for cam info message')
00413 self.cam_info.wait_till_msg()
00414 rospy.loginfo('got cam info')
00415 rospy.Subscriber('pressure/l_gripper_motor', pm.PressureState, self.lpress_cb)
00416 rospy.loginfo('finished init')
00417
00418 def shutdown(self):
00419 if self.should_switch:
00420 rospy.loginfo('switching back joint controllers')
00421 self.robot.controller_manager.switch(['l_arm_controller', 'r_arm_controller'], ['l_cart', 'r_cart'])
00422
00423 def lpress_cb(self, pmsg):
00424 TOUCH_THRES = 4000
00425 lmat = np.matrix((pmsg.l_finger_tip)).T
00426 rmat = np.matrix((pmsg.r_finger_tip)).T
00427 if self.lmat0 == None:
00428 self.lmat0 = lmat
00429 self.rmat0 = rmat
00430 return
00431
00432 lmat = lmat - self.lmat0
00433 rmat = rmat - self.rmat0
00434
00435
00436 if np.any(np.abs(lmat) > TOUCH_THRES) or np.any(np.abs(rmat) > TOUCH_THRES):
00437 rospy.loginfo('Pressure limit exceedeD!! %d %d' % (np.max(np.abs(lmat)), np.max(np.abs(rmat))))
00438 self.pressure_exceeded = True
00439
00440
00441
00442 def manipulate_cartesian_behavior(self, data, bf_T_obj, offset=np.matrix([.01,0,-.01]).T):
00443 rospy.loginfo('STATE manipulate')
00444 rospy.loginfo('there are %d states' % len(data['movement_states']))
00445 rospy.loginfo('switching controllers')
00446 self.robot.controller_manager.switch(['l_cart', 'r_cart'], ['l_arm_controller', 'r_arm_controller'])
00447 self.should_switch = True
00448 rospy.on_shutdown(self.shutdown)
00449 self.robot.left_arm.set_posture(self.robot.left_arm.POSTURES['elbowupl'])
00450 self.robot.right_arm.set_posture(self.robot.right_arm.POSTURES['elbowupr'])
00451
00452
00453 for state in range(len(data['movement_states'])):
00454 if rospy.is_shutdown():
00455 break
00456
00457 if self.pressure_exceeded:
00458 self.pressure_exceeded = False
00459 rospy.loginfo('Exiting movement state loop')
00460 break
00461
00462 cur_state = data['movement_states'][state]
00463 rospy.loginfo("starting %s" % cur_state['name'])
00464 start_time = cur_state['start_time']
00465 wall_start_time = rospy.get_rostime().to_time()
00466
00467 tl_T_bf = tfu.transform('/torso_lift_link', '/base_footprint', self.tf_listener)
00468 offset_mat = np.row_stack(( np.column_stack((np.matrix(np.eye(3)), offset)), np.matrix([0,0,0,1.])))
00469 tl_T_obj = tl_T_bf * bf_T_obj * offset_mat
00470 left_tip_poses = []
00471
00472
00473 m = hlm.String()
00474 m.header.stamp = rospy.get_rostime()
00475 m.data = str(state)
00476 self.behavior_marker_pub.publish(m)
00477
00478 for d in cur_state['joint_states']:
00479
00480 if rospy.is_shutdown():
00481 break
00482 if self.pressure_exceeded:
00483 rospy.loginfo('Exiting inner movement state loop')
00484 break
00485
00486
00487 msg_time = d['time']
00488 msg_time_from_start = msg_time - start_time
00489 cur_time = rospy.get_rostime().to_time()
00490 wall_time_from_start = (cur_time - wall_start_time)
00491 sleep_time = (msg_time_from_start - wall_time_from_start) - .005
00492 if sleep_time > 0:
00493 time.sleep(sleep_time)
00494
00495
00496 rtip_pose_bf = tfu.matrix_as_tf((tl_T_obj * tfu.tf_as_matrix(d['rtip_obj'])))
00497 ltip_pose_bf = tfu.matrix_as_tf((tl_T_obj * tfu.tf_as_matrix(d['ltip_obj'])))
00498
00499 self.robot.left_arm.set_cart_pose(ltip_pose_bf[0], ltip_pose_bf[1],
00500 'torso_lift_link', rospy.Time.now().to_time())
00501 self.robot.right_arm.set_cart_pose(rtip_pose_bf[0], rtip_pose_bf[1],
00502 'torso_lift_link', rospy.Time.now().to_time())
00503 left_tip_poses.append(ltip_pose_bf[0])
00504
00505 rospy.loginfo("%s FINISHED" % cur_state['name'])
00506 m.header.stamp = rospy.get_rostime()
00507 self.behavior_marker_pub.publish(m)
00508
00509 self.robot.controller_manager.switch(['l_arm_controller', 'r_arm_controller'], ['l_cart', 'r_cart'])
00510 self.should_switch = False
00511 rospy.loginfo('done manipulate')
00512
00513 def find_pose_behavior(self, data):
00514 j0_dict = data['robot_pose']
00515 cpos = self.robot.pose()
00516
00517
00518 self.robot.head.set_pose(data['movement_states'][0]['joint_states'][-1]['poses']['head_traj'], 1.)
00519
00520 self.robot.torso.set_pose(j0_dict['poses']['torso'][0,0], block=True)
00521
00522
00523
00524
00525
00526 online = True
00527 rospy.loginfo('Getting high res image')
00528
00529 if online:
00530 image = self.prosilica.get_frame()
00531 rospy.loginfo('Getting a laser scan')
00532 if online:
00533 points = None
00534 while points == None:
00535 points = self.laser_scanner.scan(math.radians(180.), math.radians(-180.), 10.)
00536 print points.header
00537 if len(points.points) < 400:
00538 rospy.loginfo('Got point cloud with only %d points expected at least 400 points' % len(points.points))
00539 points = None
00540 else:
00541 rospy.loginfo('Got %d points point cloud' % len(points.points))
00542
00543
00544
00545 if not online:
00546 points = ut.load_pickle('tmp_points.pkl')
00547 image = cv.LoadImage('tmp_light_switch.png')
00548
00549
00550 rospy.loginfo('Finding object pose')
00551
00552
00553
00554 pro_T_bf = tfu.transform('/high_def_optical_frame', '/base_footprint', self.tf_listener)
00555 bf_R_obj, center_bf, display_dict = find_object_pose(image, points, data, pro_T_bf, self.cam_info)
00556 bf_T_obj = htf.composeHomogeneousTransform(bf_R_obj, center_bf)
00557
00558
00559 rospy.loginfo('get pose of tip of arm in frame of object')
00560 rtip_objs = []
00561 ltip_objs = []
00562 rtip_pose_bf = []
00563 ltip_pose_bf = []
00564 joint_states_time = []
00565 for state in range(len(data['movement_states'])):
00566 cur_state = data['movement_states'][state]
00567 for d in cur_state['joint_states']:
00568 rtip_objs.append(d['rtip_obj'][0])
00569 ltip_objs.append(d['ltip_obj'][0])
00570 rtip_pose_bf.append(bf_T_obj * tfu.tf_as_matrix(d['rtip_obj']))
00571 ltip_pose_bf.append(bf_T_obj * tfu.tf_as_matrix(d['ltip_obj']))
00572 joint_states_time.append(d['time'])
00573
00574 l_tip_objs_bf = tfu.transform_points(bf_T_obj, np.column_stack(ltip_objs))
00575 r_tip_objs_bf = tfu.transform_points(bf_T_obj, np.column_stack(rtip_objs))
00576
00577
00578 dframe_bf = data['start_conditions']['pose_parameters']['frame_bf']
00579 dcenter_bf = data['start_conditions']['pose_parameters']['center_bf']
00580 probot_obj = np.linalg.inv(htf.composeHomogeneousTransform(dframe_bf, dcenter_bf))
00581 probot_bf = tfu.matrix_as_tf(bf_T_obj * probot_obj)
00582 probot_bf = [probot_bf[0], tr.quaternion_from_euler(0, 0, tr.euler_from_matrix(tr.quaternion_matrix(probot_bf[1]), 'sxyz')[2], 'sxyz')]
00583 rospy.loginfo('Driving to location %s at %.3f m away from current pose.' % \
00584 (str(probot_obj[0]), np.linalg.norm(probot_bf[0])))
00585
00586
00587 rospy.loginfo('Sending out perception results (5 seconds).')
00588
00589 display = DisplayRecordedPoseReduced(True, 5)
00590 display.display(bf_R_obj, center_bf, points, display_dict['surf_loc3d_pro'], image, display_dict['expected_positions3d_bf'], pro_T_bf, l_tip_objs_bf, r_tip_objs_bf, tfu.matrix_as_tf(bf_T_obj * np.linalg.inv(htf.composeHomogeneousTransform(dframe_bf, dcenter_bf))))
00591 return probot_bf, bf_T_obj
00592
00593
00594
00595
00596
00597
00598 def camera_change_detect(self, f, args):
00599
00600 start_pose = self.robot.head.pose()
00601 self.robot.head.set_pose(np.radians(np.matrix([1.04, -20]).T), 1)
00602 time.sleep(4)
00603 for i in range(4):
00604 before_frame = self.wide_angle_camera.get_frame()
00605 cv.SaveImage('before.png', before_frame)
00606 f(*args)
00607 time.sleep(2)
00608 for i in range(3):
00609 after_frame = self.wide_angle_camera.get_frame()
00610
00611 cv.SaveImage('after.png', after_frame)
00612 sdiff = image_diff_val2(before_frame, after_frame)
00613
00614 self.robot.head.set_pose(start_pose, 1)
00615 time.sleep(3)
00616
00617 threshold = .03
00618 rospy.loginfo('camera difference %.4f (thres %.3f)' % (sdiff, threshold))
00619 if sdiff > threshold:
00620 rospy.loginfo('difference detected!')
00621 return True
00622 else:
00623 rospy.loginfo('NO differences detected!')
00624 return False
00625
00626 def change_detect(self, f, args):
00627 detectors = [self.camera_change_detect]
00628 results = []
00629 for d in detectors:
00630 results.append(d(f, args))
00631 return np.any(results)
00632
00633 def pose_robot_behavior(self, data):
00634 j0_dict = data['robot_pose']
00635
00636 self.robot.left_arm.set_pose(j0_dict['poses']['larm'], 5., block=False)
00637 self.robot.right_arm.set_pose(j0_dict['poses']['rarm'], 5., block=False)
00638 self.robot.head.set_pose(j0_dict['poses']['head_traj'], 5.)
00639 self.robot.torso.set_pose(j0_dict['poses']['torso'][0,0], block=True)
00640
00641 def run_explore(self, data_fname):
00642 rospy.loginfo('loading demonstration pickle')
00643 tpk = time.time()
00644 data = ut.load_pickle(data_fname)
00645 print 'took %.2f secs' % (time.time() - tpk)
00646
00647
00648 rospy.loginfo('posing robot')
00649 self.pose_robot_behavior(data)
00650
00651
00652
00653 adjustment_result = False
00654 i = 0
00655 while adjustment_result == False:
00656 try:
00657 probot_bf, bf_T_obj = self.find_pose_behavior(data)
00658 adjustment_result = self.fine_drive_behavior(tfu.tf_as_matrix(probot_bf))
00659 except RuntimeError, e:
00660 rospy.loginfo('%s' % str(e))
00661 continue
00662 i = i + 1
00663 if i > 3:
00664 return
00665
00666 mean = np.matrix([[0.0, 0.0, 0.0]]).T
00667 cov = np.matrix(np.eye(3) * .0004)
00668 cov[2,2] = .000005
00669 g = pb.Gaussian(mean, cov)
00670
00671 failed_offsets = []
00672 offsets = [np.matrix(np.zeros((3,1)))]
00673
00674 date_str = time.strftime('%A_%m_%d_%Y_%I:%M%p')
00675 os.system('mkdir %s' % date_str)
00676 os.system('mkdir %s/success' % date_str)
00677 os.system('mkdir %s/failure' % date_str)
00678 i = 0
00679 success = False
00680 current_center = np.matrix(np.zeros((3,1)))
00681 while (not rospy.is_shutdown()):
00682 rospy.loginfo('=============================================')
00683 rospy.loginfo('Current offset is %s' % str(offsets[0].T))
00684 tstart = time.time()
00685
00686
00687
00688 bag_name = 'trial.bag'
00689 cmd = 'rosbag record -O %s /imitate_behavior_marker /pressure/l_gripper_motor /pressure/r_gripper_motor /accelerometer/l_gripper_motor /accelerometer/r_gripper_motor /joint_states /l_cart/command_pose /l_cart/command_posture /l_cart/state /r_cart/command_pose /r_cart/command_posture /r_cart/state /head_traj_controller/command /base_controller/command /l_gripper_controller/command /r_gripper_controller/command /torso_controller/command /torso_controller/state /base_scan /tf /laser_tilt_controller/laser_scanner_signal' % bag_name
00690 recording_process = cmdp.CmdProcess(cmd.split())
00691 recording_process.run()
00692 rospy.loginfo('Getting a scan before action.')
00693 before_scan = self.laser_scanner.scan(math.radians(180.), math.radians(-180.), 5.)
00694
00695 if self.change_detect(self.manipulate_cartesian_behavior, [data, bf_T_obj, offsets[0]]):
00696 rospy.loginfo('success!')
00697 success = True
00698 folder = 'success'
00699 current_center = offsets[0]
00700 rospy.loginfo('Updating current center to %s.' % str(current_center.T))
00701 else:
00702 rospy.loginfo('failed.')
00703 success = False
00704 folder = 'failure'
00705 failed_offsets.append(offsets[0])
00706
00707 recording_process.kill()
00708 rospy.loginfo('Getting a scan after action.')
00709 after_scan = self.laser_scanner.scan(math.radians(180.), math.radians(-180.), 5.)
00710 scmd1 = 'mv %s %s/%s/trial%d.bag' % (bag_name, date_str, folder, i)
00711 scmd2 = 'mv before.png %s/%s/trial%d_before.png' % (date_str, folder, i)
00712 scmd3 = 'mv after.png %s/%s/trial%d_after.png' % (date_str, folder, i)
00713
00714 def f(before_scan, after_scan, bf_T_obj, offsets, date_str, folder, i):
00715 rospy.loginfo('Saving scans.')
00716 ut.save_pickle({'before_scan': before_scan,
00717 'after_scan': after_scan,
00718 'bf_T_obj': bf_T_obj,
00719 'offset': offsets[0]},
00720 '%s/%s/trial%d.pkl' % (date_str, folder, i))
00721 e = ExecuteAction(f, [before_scan, after_scan, bf_T_obj, offsets, date_str, folder, i])
00722 e.start()
00723 for scmd in [scmd1, scmd2, scmd3]:
00724 rospy.loginfo(scmd)
00725 os.system(scmd)
00726
00727 offsets[0] = current_center + g.sample()
00728 i = i + 1
00729 rospy.loginfo('Trial took %f secs' % (time.time() - tstart))
00730 if success:
00731 rospy.loginfo('Reset environment please!!')
00732 raw_input()
00733
00734 if i > 200:
00735 break
00736
00737
00738
00739
00740
00741
00742 def fine_drive_behavior(self, probot_bf):
00743 map_T_bf = tfu.transform('map', 'base_footprint', self.tf_listener)
00744 probot_map = map_T_bf * probot_bf
00745
00746 return True
00747
00748
00749
00750
00751
00752
00753
00754
00755 def coarse_drive_behavior(self, data):
00756 t, r = data['base_pose']
00757 rospy.loginfo('Driving to location %s' % str(t))
00758 rospy.loginfo('press <enter> to continue')
00759 raw_input()
00760
00761 rvalue = self.robot.base.set_pose(t, r, '/map', block=True)
00762 rospy.loginfo('result is %s' % str(rvalue))
00763 tfinal, rfinal = self.robot.base.get_pose()
00764 rospy.loginfo('final pose error (step 1): %.3f' % (np.linalg.norm(t - tfinal)))
00765 self.drive_ff(data['base_pose'])
00766
00767
00768
00769
00770
00771
00772
00773
00774
00775
00776 def drive_ff(self, tf_pose_map):
00777
00778
00779 bf_T_map = tfu.transform('base_footprint', 'map', self.tf_listener)
00780 p_bf = bf_T_map * tfu.tf_as_matrix(tf_pose_map)
00781 rospy.loginfo('drive_ff %s' % str(p_bf[0:2,3]))
00782 if np.linalg.norm(p_bf[0:2,3]) > .2:
00783 rospy.loginfo('drive_ff rejecting command due to large displacement command')
00784 return False
00785 self.robot.base.move_to(p_bf[0:2,3], True)
00786
00787
00788 current_ang_map = tr.euler_from_matrix(tfu.transform('map', 'base_footprint', self.tf_listener)[0:3, 0:3], 'sxyz')[2]
00789 desired_ang_map = tr.euler_from_matrix(tfu.tf_as_matrix(tf_pose_map), 'sxyz')[2]
00790 delta_angle_map = desired_ang_map - current_ang_map
00791 if np.degrees(delta_angle_map) > 40:
00792 rospy.loginfo('drive_ff rejecting command due to large rotation command')
00793 return False
00794
00795 self.robot.base.turn_by(delta_angle_map)
00796
00797 return True
00798
00799 def run(self, data_fname, state='fine_positioning'):
00800
00801
00802
00803
00804
00805
00806
00807
00808
00809
00810 print 'loading data'
00811 data = ut.load_pickle(data_fname)
00812 print 'done'
00813 state = 'fine_positioning_offline'
00814 pdb.set_trace()
00815
00816
00817
00818
00819
00820
00821
00822
00823
00824
00825
00826
00827
00828
00829
00830
00831
00832
00833
00834
00835
00836
00837
00838
00839
00840
00841
00842
00843
00844
00845
00846
00847
00848 if state == 'fine_positioning_offline':
00849 rospy.loginfo('Finding object pose (offline)')
00850
00851 pro_T_bf = data['start_conditions']['pro_T_bf']
00852 cam_info = data['start_conditions']['camera_info']
00853 image = cv.LoadImage(data['start_conditions']['model_image'])
00854 points = data['start_conditions']['points']
00855
00856 bf_R_obj, center_bf, display_dict = find_object_pose(image, points, data, pro_T_bf, cam_info)
00857 print 'DONE!'
00858
00859
00860
00861 rospy.loginfo('get pose of tip of arm in frame of object')
00862 rtip_objs = []
00863 ltip_objs = []
00864 rtip_objs_bf = []
00865 ltip_objs_bf = []
00866 for state in range(len(data['movement_states'])):
00867 cur_state = data['movement_states'][state]
00868 for d in cur_state['joint_states']:
00869 rtip_objs.append(d['rtip_obj'][0])
00870 ltip_objs.append(d['ltip_obj'][0])
00871 rtip_objs_bf.append(d['rtip_bf'][0])
00872 ltip_objs_bf.append(d['ltip_bf'][0])
00873 rtip_objs_obj = np.column_stack(rtip_objs)
00874 ltip_objs_obj = np.column_stack(ltip_objs)
00875
00876 rtip_objs_bf = np.column_stack(rtip_objs_bf)
00877 ltip_objs_bf = np.column_stack(ltip_objs_bf)
00878
00879 dframe_bf = data['start_conditions']['pose_parameters']['frame_bf']
00880 dcenter_bf = data['start_conditions']['pose_parameters']['center_bf']
00881 bf_T_obj = htf.composeHomogeneousTransform(dframe_bf, dcenter_bf)
00882 probot_obj = np.linalg.inv(htf.composeHomogeneousTransform(dframe_bf, dcenter_bf))
00883 probot_bf = tfu.matrix_as_tf(bf_T_obj * probot_obj)
00884 pdb.set_trace()
00885
00886 rospy.loginfo('sending out results')
00887 display = DisplayRecordedPoseReduced(False, 100)
00888 display.display(bf_R_obj, center_bf, points,
00889 display_dict['surf_loc3d_pro'], image,
00890 display_dict['expected_positions3d_bf'], pro_T_bf,
00891 ltip_objs_bf, rtip_objs_bf,
00892 probot_bf,
00893 data['start_conditions']['pose_parameters']['contact_bf'])
00894
00895
00896
00897
00898
00899
00900
00901
00902
00903
00904
00905
00906
00907
00908
00909
00910
00911
00912
00913
00914
00915
00916
00917
00918
00919
00920
00921
00922
00923
00924
00925
00926
00927
00928
00929
00930
00931
00932
00933
00934
00935
00936
00937
00938
00939
00940
00941
00942
00943
00944
00945
00946
00947
00948
00949
00950
00951
00952
00953
00954
00955
00956 if state == 'manipulate':
00957 rospy.loginfo('STATE manipulate')
00958 rospy.loginfo('there are %d states' % len(data['movement_states']))
00959
00960 for state in range(len(data['movement_states'])):
00961 cur_state = data['movement_states'][state]
00962 rospy.loginfo("starting %s" % cur_state['name'])
00963
00964 larm, lvel, ltime, rarm, rvel, rtime = zip(*[[jdict['poses']['larm'], jdict['vels']['larm'], jdict['time'], \
00965 jdict['poses']['rarm'], jdict['vels']['rarm'], jdict['time']] \
00966 for jdict in cur_state['joint_states']])
00967
00968 larm = np.column_stack(larm)
00969 lvel = np.column_stack(lvel)
00970 ltime = np.array(ltime) - cur_state['start_time']
00971
00972 rarm = np.column_stack(rarm)
00973 rvel = np.column_stack(rvel)
00974 rtime = np.array(rtime) - cur_state['start_time']
00975
00976
00977 self.robot.left_arm.set_poses(larm[:,0], np.array([2.]), block=False)
00978 self.robot.right_arm.set_poses(rarm[:,0], np.array([2.]), block=True)
00979
00980
00981 self.robot.left_arm.set_poses(larm, ltime, vel_mat=lvel, block=False)
00982 self.robot.right_arm.set_poses(rarm, rtime, vel_mat=rvel, block=True)
00983
00984 rospy.loginfo("%s FINISHED" % cur_state['name'])
00985 time.sleep(5)
00986
00987
00988
00989
00990
00991
00992 class ControllerTest:
00993 def __init__(self):
00994 self.robot = pr2.PR2()
00995
00996 def run(self):
00997 rospy.loginfo('switching to cartesian controllers')
00998 self.robot.controller_manager.switch(['l_cart', 'r_cart'], ['l_arm_controller', 'r_arm_controller'])
00999 rospy.on_shutdown(self.shutdown)
01000 r = rospy.Rate(1)
01001
01002 while not rospy.is_shutdown():
01003 self.robot.left_arm.set_posture(self.robot.left_arm.POSTURES['elbowupl'])
01004 self.robot.right_arm.set_posture(self.robot.right_arm.POSTURES['elbowupr'])
01005 r.sleep()
01006
01007 def shutdown(self):
01008 rospy.loginfo('switching back joint controllers')
01009 self.robot.controller_manager.switch(['l_arm_controller', 'r_arm_controller'], ['l_cart', 'r_cart'])
01010
01011
01012 if __name__ == '__main__':
01013 mode = sys.argv[1]
01014
01015 if mode == 'run':
01016 im = Imitate()
01017 im.run_explore(sys.argv[2])
01018
01019 if mode == 'display':
01020 im = Imitate(True)
01021 im.run(sys.argv[2])
01022
01023 if False:
01024 rospy.init_node('send_base_cmd')
01025 client = actionlib.SimpleActionClient('go_angle', hm.GoAngleAction)
01026
01027 pdb.set_trace()
01028
01029 goal = hm.GoAngleGoal()
01030 goal.angle = math.radians(90)
01031 print 'sending goal'
01032 client.send_goal(goal)
01033 print 'waiting'
01034 client.wait_for_result()
01035
01036 if False:
01037 rospy.init_node('send_base_cmd')
01038 client = actionlib.SimpleActionClient('go_xy', hm.GoXYAction)
01039 client.wait_for_server()
01040 pdb.set_trace()
01041
01042 goal = hm.GoXYGoal()
01043 goal.x = .2
01044 print 'sending goal'
01045 client.send_goal(goal)
01046 print 'waiting'
01047 client.wait_for_result()
01048
01049
01050 if False:
01051 dd = DisplayRecordedPose()
01052 dd.display_original_object_pose(sys.argv[1])
01053
01054 if False:
01055 c = ControllerTest()
01056 c.run()
01057
01058
01059
01060
01061
01062
01063
01064
01065
01066
01067
01068
01069
01070
01071
01072
01073
01074
01075
01076
01077
01078
01079
01080
01081
01082
01083
01084
01085
01086
01087
01088
01089
01090
01091
01092
01093
01094
01095
01096
01097
01098
01099
01100
01101
01102
01103
01104
01105
01106
01107
01108
01109
01110
01111
01112
01113
01114
01115
01116
01117
01118
01119
01120
01121
01122
01123
01124
01125
01126
01127
01128
01129