follow_light_node.py
Go to the documentation of this file.
00001 # Copyright (c) 2008, Willow Garage, Inc.
00002 # All rights reserved.
00003 # 
00004 # Redistribution and use in source and binary forms, with or without
00005 # modification, are permitted provided that the following conditions are met:
00006 # 
00007 #     * Redistributions of source code must retain the above copyright
00008 #       notice, this list of conditions and the following disclaimer.
00009 #     * Redistributions in binary form must reproduce the above copyright
00010 #       notice, this list of conditions and the following disclaimer in the
00011 #       documentation and/or other materials provided with the distribution.
00012 #     * Neither the name of the Willow Garage, Inc. nor the names of its
00013 #       contributors may be used to endorse or promote products derived from
00014 #       this software without specific prior written permission.
00015 # 
00016 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00017 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00018 # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00019 # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
00020 # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00021 # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
00022 # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
00023 # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
00024 # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
00025 # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00026 # POSSIBILITY OF SUCH DAMAGE.
00027 #
00028 ## @author Hai Nguyen/hai@gatech.edu
00029 from pkg import *
00030 from std_msgs.msg import Position
00031 from std_msgs.msg import BaseVel
00032 from std_msgs.msg import RobotBase2DOdom
00033 import sys
00034 import numpy as np
00035 import nodes as n
00036 import camera as cam
00037 import math
00038 from pyrob.voice import say
00039 from StringIO import StringIO
00040 
00041 class FollowBehavior:
00042     def __init__(self, velocity_topic):
00043         self.not_inited     = True
00044         self.velocity_topic = velocity_topic
00045         self.last_said      = ''
00046 
00047     def init_pose(self, robot_pose):
00048         R = cam.Rx(math.radians(90)) * cam.Ry(math.radians(-90))
00049         T = np.matrix([-.095, 0,.162]).T
00050         self.base_T_camera = cam.homo_transform3d(R, T)
00051         self.robot_pose     = n.ConstNode(robot_pose)
00052         self.local_pos      = n.nav.V_KeepLocal_P2d_V(self.robot_pose, n.ConstNode(np.matrix([0.0, 0.0]).T))
00053         self.attr               = n.nav.V_LinearAttract_V(self.local_pos, dead_zone = 0.20, far_dist = 1.0)
00054         self.cmd            = n.nav.R_Conv_V(self.attr, allow_backwards_driving = False)
00055         self.should_stop    = self.attr.is_done()
00056 
00057         self.cmd.max_vel    = .5
00058         self.has_stopped    = False
00059         self.count          = 0
00060         self.attr.verbosity = -1
00061         print 'FollowBehavior: ready!'
00062 
00063     def cursor_moved(self, point):
00064         #Transform into base's coordinate frame
00065         point3d   = np.matrix([point.x, point.y, point.z, 1.0]).T
00066         print 'FollowBehavior.cursor_moved: got point', point3d.T
00067         new_point = self.base_T_camera * point3d
00068         sio = StringIO()
00069         print >>sio, int(round(np.linalg.norm(point3d[0:3]) * 100.0))
00070         new_saying = sio.getvalue()
00071         if new_saying != self.last_said:
00072             say(new_saying)
00073             self.last_said = new_saying
00074 
00075         #Drop the z, store as homogeneous coordinates
00076         goal2d = new_point[0:2, 0]
00077         print 'FollowBehavior.cursor_moved: 2d goal', goal2d.T
00078         self.local_pos.remember(n.ConstNode(goal2d))
00079         self.has_stopped = False
00080 
00081     def robot_moved(self, update):
00082         if self.not_inited:
00083             self.init_pose(n.Pose2D(update.pos.x, update.pos.y, update.pos.th))
00084             self.not_inited = False
00085         else:
00086             self.robot_pose.const = n.Pose2D(update.pos.x, update.pos.y, update.pos.th)
00087 
00088     def run(self):
00089         if self.not_inited:
00090             return
00091         #base_command = self.cmd.val(self.count)
00092         #print 'publishing', base_command.forward_vel, base_command.rot_vel
00093         if self.should_stop.val(self.count) and not self.has_stopped:
00094             self.velocity_topic.publish(BaseVel(0,0))
00095             #print 'stoppping'
00096             self.has_stopped = True 
00097             say('done')
00098         elif not self.has_stopped:
00099             #print 'HAS STOPPED'
00100             base_command = self.cmd.val(self.count)
00101             msg = BaseVel(base_command.forward_vel, base_command.rot_vel)
00102             #print 'publishing', base_command.forward_vel, base_command.rot_vel
00103             self.velocity_topic.publish(msg)
00104         self.count = self.count + 1
00105 
00106 class FakeTopic:
00107     def publish(self, something):
00108         pass
00109 
00110 class FakePoint:
00111     def __init__(self, x, y, z):
00112         self.x = x
00113         self.y = y
00114         self.z = z
00115 
00116 
00117 if __name__ == '__main__':
00118     import time
00119     pub = rospy.TopicPub('cmd_vel', BaseVel)
00120     follow_behavior = FollowBehavior(pub)
00121     rospy.TopicSub('odom', RobotBase2DOdom, follow_behavior.robot_moved)
00122     rospy.TopicSub(CURSOR_TOPIC, Position, follow_behavior.cursor_moved)
00123     rospy.ready(sys.argv[0])
00124     #follow_behavior.cursor_moved(FakePoint(0.0,0.0,1.0))
00125     while not rospy.isShutdown():
00126         follow_behavior.run()
00127         time.sleep(0.016)
00128 
00129 #follow_behavior = FollowBehavior(FakeTopic())
00130 
00131 
00132 
00133 
00134 
00135 
00136 
00137 
00138 
00139 
00140 
00141 
00142 
00143 
00144 
00145 
00146 
00147 
00148 
00149 
00150 
00151 
00152 
00153 
00154 
00155 
00156 
00157 
00158 
00159 
00160 
00161 
00162 
00163 
00164 
00165 
00166 
00167 
00168 
00169 
00170 
00171 
00172 
00173 
00174 
00175 
00176 
00177 
00178 #def transform2D(o_p, o_angle):
00179 #    """ 
00180 #        Takes as input o_P_* and angle, both descriptions of the new coordinate
00181 #        frame in the original frame (frame o).  Returns a transfrom *_T_o that 
00182 #        describes points in the new coordinate frame (frame *).  
00183 #        The transformation applies a translation then a rotation.
00184 #    """
00185 #    t = numpy.matrix(numpy.eye(3))
00186 #    t[0:2][:,2] = -1 * o_p
00187 #    return rot_mat(o_angle) * t   
00188 #
00189 #def rot_mat(a):
00190 #    """ 
00191 #        Makes a homogeneous rotation matrix that rotates the coordinate system 
00192 #        counter cw
00193 #        i.e. rotates points clockwise
00194 #    """
00195 #    return numpy.matrix([[math.cos(a),    math.sin(a), 0],
00196 #                         [-1*math.sin(a), math.cos(a), 0],
00197 #                         [0,              0,           1]])
00198 #
00199 #class FollowLight:
00200 #    def __init__(self, velocity_topic):
00201 #        self.base_T_camera  = np.matrix(np.zeros((3,4)))
00202 #        self.dead_zone      = 0.02
00203 #        self.far_dist       = 1.0   #Distance beyond which we don't care
00204 #               self.min_attr       = 0.3   #Set min attraction to be 30% of maximum speed
00205 #
00206 #        self.velocity_topic = velocity_topic
00207 #        self.goal_b         = [0.0, 0.0]
00208 #        self.A_T_O          = transform2d(np.matrix([0.0, 0.0]).T, 0.0)
00209 #
00210 #    def cursor_moved(self, point):
00211 #        #Transform into base's coordinate frame
00212 #        point3d   = np.matrix([point.x, point.y, point.z, 1.0]).T
00213 #        new_point = self.base_T_camera * point3d
00214 #
00215 #        #Drop the z, store as homogeneous coordinates
00216 #        self.goal_b = new_point[0:2, 0]
00217 #
00218 #    def robot_moved(self, update):
00219 #        #Frames
00220 #        # A   previous position of robot in global frame
00221 #        # B   current position of robot in global frame
00222 #        # O   global frame
00223 #        B_T_O = transform2d(np.matrix([update.pos.x, update.pos.y]).T, 
00224 #                update.pos.th)
00225 #        
00226 #        #transform goal with this update
00227 #        goal_a        = self.goal_b
00228 #        B_T_A         = B_T_O * np.linalg.inv(self.A_T_O) 
00229 #        self.goal_b   = B_T_A * goal_a
00230 #        self.A_T_O    = B_T_O
00231 #
00232 #    def run(self):
00233 #        #given goal position, calculate velocity vector
00234 #        distance = np.linalg.norm(goal)
00235 #        if distance > self.dead_zone:
00236 #            mag = ((1 - self.min_attr) / (self.far_dist - self.dead_zone)) * norm_dist + self.min_attr
00237 #                       mag = min(max(mag, self.min_attr), 1.0)
00238 #                       out = mag * (goal / dist)
00239 #            vx = 
00240 #        else:
00241 #        vw = 
00242 #
00243 #        #send to robot
00244 #        self.velocity_topic.publish(BaseVel(vx,vw))
00245 
00246 
00247 
00248 
00249 
00250 
00251 
00252 
00253 
00254 
00255 
00256 
00257 
00258 
00259 
00260 
00261 
00262 
00263 
00264 
00265 
00266 
00267 
00268 
00269 
00270 


laser_interface
Author(s): Hai Nguyen and Travis Deyle. Advisor: Prof. Charlie Kemp, Lab: Healthcare Robotics Lab at Georgia Tech
autogenerated on Wed Nov 27 2013 11:45:51