Go to the documentation of this file.00001
00002 import sys
00003 import roslib
00004 roslib.load_manifest('nao_description')
00005 import rospy
00006 import tf
00007 import numpy as np
00008 from tf import TransformListener
00009 from tf import transformations
00010
00011 try:
00012 import motion
00013 from naoqi import ALProxy
00014 except ImportError:
00015 print("Error importing NaoQI. Please make sure that Aldebaran's NaoQI API is in your PYTHONPATH.")
00016 exit(1)
00017
00018
00019
00020 def space_to_str(space):
00021 if space == motion.SPACE_TORSO:
00022 return "Torso"
00023 elif space == motion.SPACE_WORLD:
00024 return "Odom"
00025 elif space == motion.SPACE_NAO:
00026 return "BaseFootprint"
00027
00028 def transform_to_str(result):
00029 tstr = ''
00030 maxColumnWidth = 20
00031 for i in range(0,4):
00032 row = result[4*i:4*i+4]
00033 rstr = ''.join(["%s" %str(k).center(maxColumnWidth) for k in (row)] )
00034 rstr = rstr + '\n'
00035 tstr = tstr+rstr
00036
00037 return tstr
00038
00039 def np_mat_to_str(m):
00040 mm = list()
00041 for i in range(0,4):
00042 for j in range(0,4):
00043 mm.append(m[i,j])
00044
00045 return transform_to_str(mm)
00046
00047 def cam_rotation_matrix():
00048 return np.matrix('0 0 1 0;-1 0 0 0; 0 -1 0 0; 0 0 0 1')
00049
00050 if __name__ == '__main__':
00051 rospy.init_node('test_tf')
00052 listener = TransformListener()
00053 ip = "ra.local"
00054 port = 9559
00055 proxy = ALProxy("ALMotion", ip, port)
00056 print "motionproxy ready"
00057 space = motion.SPACE_TORSO
00058 chainName = "Head"
00059 currentSensor = proxy.getPosition(chainName, space, True)
00060 current = proxy.getPosition(chainName, space, False)
00061 print "Position of %s in space %s:"%(chainName, space_to_str(space))
00062 print currentSensor
00063 print current
00064 rpy = currentSensor[3:]
00065 print rpy
00066 DCM = apply(transformations.euler_matrix,rpy)
00067 print DCM
00068 tTHSensor = proxy.getTransform(chainName, space, True)
00069 tTH = proxy.getTransform(chainName, space, False)
00070 print "Transform %s to %s:"%(space_to_str(space),chainName)
00071 print transform_to_str(tTHSensor)
00072
00073 chainName = "CameraTop"
00074 tTCSensor = proxy.getTransform(chainName, space, True)
00075 tTC = proxy.getTransform(chainName, space, False)
00076 print "Transform %s to %s:"%(space_to_str(space),chainName)
00077 print transform_to_str(tTCSensor)
00078 print
00079 T = np.matrix([tTCSensor[0:4],tTCSensor[4:8],tTCSensor[8:12],tTCSensor[12:]])
00080
00081 DCM = T*cam_rotation_matrix()
00082 print "Transform %s to %s (with rotated coordinate frame):"%(space_to_str(space),chainName)
00083 print np_mat_to_str(DCM)
00084
00085 stamp = rospy.Time()
00086 frame1 = 'Torso_link'
00087 frame2 = 'CameraTop_frame'
00088 try:
00089 listener.waitForTransform(frame1,frame2,stamp,rospy.Duration(1.0))
00090 (trans,rot) = listener.lookupTransform(frame1,frame2,stamp)
00091 except tf.Exception as e:
00092 print "ERROR using TF"
00093 print "%s"%(e)
00094 sys.exit(-1)
00095
00096 m = transformations.quaternion_matrix(rot)
00097 for i in range(0,3):
00098 m[i,3] = trans[i]
00099 print "[tf] Transform %s to %s:"%(frame1,frame2)
00100 print np_mat_to_str(m)
00101
00102 e = np.linalg.norm(DCM - m)
00103 print "Error is: ",e
00104 if e > 1e-5:
00105 print "ERROR: Something is wrong with your TF transformations. Transforms do not match!"
00106 else:
00107 print "Test ok. Done"
00108
00109