move_floor_button.py
Go to the documentation of this file.
00001 #! /usr/bin/python
00002 
00003 import numpy as np
00004 
00005 import roslib; roslib.load_manifest('hrl_clickable_behaviors')
00006 import rospy
00007 import std_srvs.srv
00008 import actionlib
00009 import tf.transformations as tf_trans
00010 from tf.listener import TransformListener
00011 from geometry_msgs.msg import Quaternion
00012 
00013 from hrl_clickable_world.srv import PerceiveButtons, ButtonAction, DisplayButtons
00014 from hrl_clickable_world.srv import PerceiveButtonsResponse, ButtonActionResponse
00015 from geometry_msgs.msg import Point, PoseStamped
00016 from visualization_msgs.msg import Marker
00017 from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
00018 
00019 from hrl_move_floor_detect.srv import SegmentFloor
00020 
00021 class MoveFloorButton:
00022     def __init__(self):
00023         self.tl = TransformListener()
00024         self.perception_srv = rospy.Service("/clickable_world/detect_empty_floor",
00025                                             PerceiveButtons,
00026                                             self.do_perception)
00027         self.percept_pub = rospy.Publisher("/clickable_world/floor_button_vis",
00028                                            Marker)
00029         self.goal_pub = rospy.Publisher("/clickable_world/move_floor_goal", PoseStamped)
00030         self.action_srv = rospy.Service("/clickable_world/move_empty_floor",
00031                                         ButtonAction,
00032                                         self.do_action)
00033         self.floor_seg_srv = rospy.ServiceProxy("/move_floor_detect",
00034                                                 SegmentFloor)
00035         self.floor_move_client = actionlib.SimpleActionClient("move_base", MoveBaseAction)
00036         rospy.loginfo("[move_floor_button] MoveFloorButton loaded.")
00037 
00038     def do_perception(self, req):
00039         # segment surfaces
00040         rospy.loginfo("[move_floor_button] Segmenting floor")
00041         self.surfaces = self.floor_seg_srv().surfaces
00042         for i in range(len(self.surfaces)):
00043             self.surfaces[i].color.r = 0
00044             self.surfaces[i].color.g = 0
00045             self.surfaces[i].color.b = 256
00046             self.surfaces[i].color.a = 256
00047         if len(self.surfaces) != 0:
00048             self.percept_pub.publish(self.surfaces[0])
00049 
00050         resp = PerceiveButtonsResponse()
00051         resp.buttons = self.surfaces
00052         return resp
00053 
00054     def do_action(self, req):
00055         rospy.loginfo("[move_floor_button] MoveFloorButton clicked!")
00056         # put 3d pt in base frame
00057         req.pixel3d.header.stamp = rospy.Time(0)
00058         move_pt = self.tl.transformPoint("/base_link", req.pixel3d)
00059 
00060         floor_move_goal = MoveBaseGoal()
00061         floor_move_goal.target_pose.header.frame_id = move_pt.header.frame_id
00062         floor_move_goal.target_pose.pose.position = move_pt.point
00063         quat = tf_trans.quaternion_from_euler(0, 0, np.arctan2(move_pt.point.y, move_pt.point.x))
00064         floor_move_goal.target_pose.pose.orientation = Quaternion(quat[0], quat[1], quat[2], quat[3])
00065         floor_move_goal.target_pose.header.frame_id = "/base_link"
00066         floor_move_goal.target_pose.header.stamp = rospy.Time.now()
00067         try:
00068             self.goal_pub.publish(floor_move_goal.target_pose)
00069             self.floor_move_client.send_goal(floor_move_goal)
00070             self.floor_move_client.wait_for_result()
00071             cur_pose = self.floor_move_client.get_result()
00072         except rospy.ROSInterruptException:
00073             print "[move_floor_button] Move floor failed"
00074         return ButtonActionResponse()
00075 
00076 def main():
00077     rospy.init_node('move_floor_button')
00078     tb = MoveFloorButton()
00079     rospy.spin()
00080 
00081 if __name__ == "__main__":
00082     main()


hrl_clickable_behaviors
Author(s): Author: Kelsey Hawkins, Advisor: Prof. Charlie Kemp (Healthcare Robotics Lab at Georgia Tech)
autogenerated on Wed Nov 27 2013 12:24:35