test12.py
Go to the documentation of this file.
00001 import roslib; roslib.load_manifest('hai_sandbox')
00002 import rospy
00003 import hrl_lib.rutils as ru
00004 import sys
00005 import sensor_msgs.msg as sm
00006 import hrl_lib.util as ut
00007 import hai_sandbox.features as fea
00008 from cv_bridge.cv_bridge import CvBridge, CvBridgeError
00009 import cv
00010 import tf
00011 import hrl_lib.tf_utils as tfu
00012 from sensor_msgs.msg import CameraInfo
00013 import numpy as np
00014 import tf.transformations as tr
00015 import scipy.spatial as sp
00016 
00017 class ROSCameraCalibration:
00018     def __init__(self, channel):
00019         rospy.Subscriber(channel, CameraInfo, self.camera_info)
00020         self.has_msg = False
00021 
00022     def camera_info(self, msg):
00023         self.distortion = np.matrix(msg.D)
00024         self.K = np.reshape(np.matrix(msg.K), (3,3))
00025         self.R = np.reshape(np.matrix(msg.R), (3,3))
00026         self.P = np.reshape(np.matrix(msg.P), (3,4))
00027         self.w = msg.width
00028         self.h = msg.height
00029         self.frame = msg.header.frame_id
00030         self.has_msg = True
00031 
00032     ##
00033     # project 3D point into this camera 
00034     #   
00035     # @param p 3x1 matrix in given coord frame
00036     # @param tf_listener None if transformation not needed
00037     # @param from_frame None is default camera frame
00038     # @return 2x1 matrix
00039     def project(self, p, tf_listener=None, from_frame=None):
00040         if not(from_frame == None or from_frame == self.frame):
00041             p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
00042                            * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
00043             trans, q = tfu.matrix_as_tf(p_cam)
00044             p = np.matrix(trans).T
00045 
00046         p = np.row_stack((p, np.matrix([1.])))
00047         pp = self.P * p
00048         pp = pp / pp[2,:]
00049         return pp[0:2,:]
00050 
00051 
00052 class ImageProcess:
00053 
00054     def __init__(self, surf_name, contacts_name):
00055         forearm_cam_l = '/l_forearm_cam/image_rect_color'
00056 
00057         self.bridge = CvBridge()
00058         rospy.loginfo('loading %s' % surf_name)
00059         self.surf_data = ut.load_pickle(surf_name)
00060         rospy.loginfo('loading %s' % contacts_name)
00061         self.scene, self.contact_points = ut.load_pickle(contacts_name)
00062         self.surf_idx = None
00063         cv.NamedWindow('surf', 1)
00064         self.tflistener = tf.TransformListener()
00065         self.camera_geo = ROSCameraCalibration('/l_forearm_cam/camera_info')
00066 
00067         self.lmat0 = None
00068         self.rmat0 = None
00069         self.contact = False
00070         self.contact_stopped = False
00071 
00072         #rospy.Subscriber('/pressure/l_gripper_motor', pm.PressureState, self.lpress_cb)
00073         rospy.Subscriber(forearm_cam_l, sm.Image, self.image_cb)
00074         print 'ready'
00075 
00076     #def lpress_cb(self, msg):
00077     #def lpress_cb(self, pmsg):
00078     #    #conv to mat
00079     #    lmat = np.matrix((pmsg.l_finger_tip)).T
00080     #    rmat = np.matrix((pmsg.r_finger_tip)).T
00081     #    if self.lmat0 == None:
00082     #        self.lmat0 = lmat
00083     #        self.rmat0 = rmat
00084     #        return
00085 
00086     #    #zero
00087     #    lmat = lmat - self.lmat0
00088     #    rmat = rmat - self.rmat0
00089    
00090     #    #touch detected
00091     #    if np.any(np.abs(lmat) > 250) or np.any(np.abs(rmat) > 250): #TODO: replace this with something more sound
00092     #        self.contact = True
00093     #    else:
00094     #        if self.contact == True:
00095     #            self.contact_stopped = True
00096     #        self.contact = False
00097     #        #Contact has been made!! look up gripper tip location
00098     #        #to_frame = 'base_link'
00099     #        #def frame_loc(from_frame):
00100     #        #    p_base = tfu.transform('base_footprint', from_frame, self.tflistener) \
00101     #        #                   * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
00102     #        #                   #* tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
00103     #        #    return tfu.matrix_as_tf(p_base)
00104 
00105     #        #tip_locs = [frame_loc(n)[0] for n in self.ftip_frames]
00106     #        #t = pmsg.header.stamp.to_time() 
00107     #        #rospy.loginfo("contact detected at %.3f" % t)
00108     #        #self.contact_locs.append([t, tip_locs])
00109 
00110     #    #self.last_msg = time.time()
00111     #    rospy.loginfo('lpress_cb ' + str(np.max(rmat)) + ' ' + str(np.max(lmat)))
00112 
00113     def image_cb(self, msg):
00114         image_time = msg.header.stamp.to_time()
00115         image      = self.bridge.imgmsg_to_cv(msg, 'bgr8')
00116         if self.surf_idx == None:
00117             for i, d in enumerate(self.surf_data):
00118                 t, sdata = d
00119                 if image_time == t:
00120                     self.surf_idx = i
00121                     break
00122         else:
00123             self.surf_idx = self.surf_idx + 1
00124 
00125         stime, sdata = self.surf_data[self.surf_idx]
00126         if stime != image_time:
00127             print 'surf time != image_time'
00128 
00129         surf_keypoints, surf_descriptors = sdata 
00130         nimage = fea.draw_surf(image, surf_keypoints, (255,0,0))
00131         cv.ShowImage('surf', nimage)
00132         cv.WaitKey(10)
00133 
00134         # Track and give 3D location to features.
00135         ## Project 3D points into this frame (need access to tf => must do online or from cache)
00136         ##              camera_T_3dframe at ti
00137         scene2d = self.camera_geo.project(self.scene, self.tflistener, 'base_footprint')
00138         scene2d_tree = sp.KDTree(np.array(scene2d.T))
00139 
00140         ## Find features close to these 2d points
00141         for loc, lap, size, d, hess in surf_keypoints:
00142             idx = scene2d_tree.query(np.array(loc))[1]
00143             orig3d = self.scene[:, idx]
00144 
00145         ## Get features closest to the contact point
00146         ## stop running if contact has stopped
00147         if self.contact:
00148         #TODO: let's put this tracking on hold..
00149 
00150 
00151 if __name__ == '__main__':
00152     surf_name = sys.argv[1]
00153     contacts_name = sys.argv[2]
00154     rospy.init_node('test12')
00155     ip = ImageProcess(surf_name, contacts_name)
00156     r = rospy.Rate(10)
00157     while not rospy.is_shutdown():
00158         r.sleep()
00159 
00160 
00161 #raw_video_bag = '2010-07-22-20-34-59/2010-07-22-20-34-59.bag'
00162 #'2010-07-22-20-34-59.surf_pkl'
00163 
00164 #Take all features in this video and map them to 3D surfaces.
00165 
00166     #Record features, pose of camera to do transformation...
00167     # From feature 2D point F2D
00168     # Project 3D points into this frame (need access to tf => must do online or from cache)
00169     #              camera_T_3dframe at ti
00170     #     2dpoints = camera_T_3dframe * 3dpoints
00171     # (Every frame has been tracked! just retrieve the appropriate feature from pickle.)
00172     
00173     # ("Offline phase")
00174     # grab nearest 3D point in image space
00175     #     2dpoints.query(f2d)
00176     # store this 3d location with this feature
00177     # 
00178     #Track features using rudimentary tracker (from laser interface)
00179 
00180 #load pickle, this should match with video we're going to get.
00181 #tracked_features_fname = sys.argv[1]
00182 #tracked_features = ru.load_pickle(tracked_features_fname)


hai_sandbox
Author(s): Hai Nguyen
autogenerated on Wed Nov 27 2013 11:46:56