symbol_grounding_scan_base_pose_server.py
Go to the documentation of this file.
00001 #!/usr/bin/env python
00002 #################################################################
00003 ##\file
00004 #
00005 # \note
00006 # Copyright (c) 2012 \n
00007 # University of Bedfordshire \n\n
00008 #
00009 #################################################################
00010 #
00011 # \note
00012 # Project name: care-o-bot
00013 # \note
00014 # ROS stack name: srs_public
00015 # \note
00016 # ROS package name: srs_symbolic_grounding
00017 #
00018 # \author
00019 # Author: Beisheng Liu, email:beisheng.liu@beds.ac.uk
00020 # \author
00021 # Supervised by: Dayou Li, email:dayou.li@beds.ac.uk
00022 #
00023 # \date Date of creation: Mar 2012
00024 #
00025 # \brief
00026 # Grounding scan object surface commands
00027 #
00028 #################################################################
00029 #
00030 # Redistribution and use in source and binary forms, with or without
00031 # modification, are permitted provided that the following conditions are met:
00032 #
00033 # - Redistributions of source code must retain the above copyright
00034 # notice, this list of conditions and the following disclaimer. \n
00035 # - Redistributions in binary form must reproduce the above copyright
00036 # notice, this list of conditions and the following disclaimer in the
00037 # documentation and/or other materials provided with the distribution. \n
00038 # - Neither the name of the University of Bedfordshire nor the names of its
00039 # contributors may be used to endorse or promote products derived from
00040 # this software without specific prior written permission. \n
00041 #
00042 # This program is free software: you can redistribute it and/or modify
00043 # it under the terms of the GNU Lesser General Public License LGPL as
00044 # published by the Free Software Foundation, either version 3 of the
00045 # License, or (at your option) any later version.
00046 #
00047 # This program is distributed in the hope that it will be useful,
00048 # but WITHOUT ANY WARRANTY; without even the implied warranty of
00049 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
00050 # GNU Lesser General Public License LGPL for more details.
00051 #
00052 # You should have received a copy of the GNU Lesser General Public
00053 # License LGPL along with this program.
00054 # If not, see <http://www.gnu.org/licenses/>.
00055 #
00056 #################################################################
00057 
00058 import roslib; roslib.load_manifest('srs_symbolic_grounding')
00059 from srs_symbolic_grounding.srv import *
00060 #from srs_symbolic_grounding.msg import *
00061 from std_msgs.msg import *
00062 from geometry_msgs.msg import *
00063 from nav_msgs.msg import *
00064 from nav_msgs.srv import *
00065 #from srs_knowledge.msg import *
00066 import rospy
00067 import math
00068 import tf
00069 from tf.transformations import euler_from_quaternion
00070 import csv
00071 '''
00072 def getWorkspaceOnMap():
00073         print 'test get all workspace (furnitures basically here) from map'
00074         try:
00075                 requestNewTask = rospy.ServiceProxy('get_workspace_on_map', GetWorkspaceOnMap)
00076                 res = requestNewTask('ipa-kitchen-map', True)
00077                 return res
00078         except rospy.ServiceException, e:
00079                 print "Service call failed: %s"%e
00080 '''
00081 
00082 def getMapClient(): #read map from navigation service
00083 
00084         try:
00085                 reqMap = rospy.ServiceProxy('static_map', GetMap)       
00086                 res = reqMap()
00087                 return res
00088         except rospy.ServiceException, e:
00089                 print "Service call failed: %s"%e
00090 
00091 
00092 
00093 def obstacleCheck(sbpl, fgl, po_x, po_y): 
00094         obstacle_checked_scan_base_pose_list = list() #used to save obstacle checked poses
00095         wall_checked_scan_base_pose_list = list() #used to save wall checked poses
00096         all_checked_scan_base_pose_list = list()
00097         scan_base_pose_list = sbpl #read inputs
00098         furniture_geometry_list = fgl
00099         parent_obj_x = po_x
00100         parent_obj_y = po_y
00101 
00102         #obstacle check
00103         dist_to_obstacles = 0.5  #set the minimum distance to the household furnitures
00104         #rospy.loginfo(math.atan(-0.5))
00105         #check all of the poses from the scan pose list with all the household furnitures to find a obstacle free scan pose list. the poses will be stored in obstacle_checked_scan_base_pose_list.
00106         index_1 = 0
00107         while index_1 < len(scan_base_pose_list):
00108                 index_2 = 0
00109                 while index_2 < len(furniture_geometry_list):
00110                         th = math.atan((scan_base_pose_list[index_1].y - furniture_geometry_list[index_2].pose.y) / (scan_base_pose_list[index_1].x - furniture_geometry_list[index_2].pose.x))
00111                         if scan_base_pose_list[index_1].x < furniture_geometry_list[index_2].pose.x and scan_base_pose_list[index_1].y > furniture_geometry_list[index_2].pose.y:
00112                                 th = math.pi + th
00113                         if scan_base_pose_list[index_1].x < furniture_geometry_list[index_2].pose.x and scan_base_pose_list[index_1].y < furniture_geometry_list[index_2].pose.y:
00114                                 th = -math.pi + th
00115                         delta_x = math.sqrt((scan_base_pose_list[index_1].x - furniture_geometry_list[index_2].pose.x) ** 2 + (scan_base_pose_list[index_1].y - furniture_geometry_list[index_2].pose.y) ** 2) * math.cos(th - furniture_geometry_list[index_2].pose.theta)
00116                         delta_y = math.sqrt((scan_base_pose_list[index_1].x - furniture_geometry_list[index_2].pose.x) ** 2 + (scan_base_pose_list[index_1].y - furniture_geometry_list[index_2].pose.y) ** 2) * math.sin(th - furniture_geometry_list[index_2].pose.theta)
00117                         if (delta_x <= -(furniture_geometry_list[index_2].w / 2.0 + dist_to_obstacles) or delta_x >= (furniture_geometry_list[index_2].w / 2.0 + dist_to_obstacles)) or (delta_y <= -(furniture_geometry_list[index_2].l / 2.0 + dist_to_obstacles) or delta_y >= (furniture_geometry_list[index_2].l / 2.0 + dist_to_obstacles)):
00118                                 index_2 += 1
00119                         else:
00120                                 break
00121                 if index_2 == len(furniture_geometry_list):
00122                         obstacle_checked_scan_base_pose_list.append(scan_base_pose_list[index_1])
00123                 index_1 += 1
00124         #rospy.loginfo(obstacle_checked_scan_base_pose_list)
00125 
00126         if obstacle_checked_scan_base_pose_list: #check if there is a obstacle free pose in the list.
00127                         
00128                 #wall check
00129                 data = getMapClient() #get occupancy grid map from the navigation serves 
00130 
00131                 
00132                 dist_to_walls = 0.5 #set the minimum distance to the walls
00133                 threshold = 10.0 #set the threshold to decide if a pose is occupaied. >threshold:occupied.
00134                 step_angle = 5.0 #set the step angle for putting points around the robot for the wall check (360 / step_angle points will be used)
00135 
00136                 #check all of the poses from the obstacle_checked_scan_base_pose_list with the occupancy map to find a wall free scan pose list
00137                 index_3 = 0
00138                 while index_3 < len(obstacle_checked_scan_base_pose_list):
00139                         map_index_list = list()
00140                         n = 0
00141                         while n < int(360.0 / step_angle):
00142                                 wall_check_point_x = obstacle_checked_scan_base_pose_list[index_3].x + dist_to_walls * math.cos(n * step_angle / 180.0 * math.pi)
00143                                 wall_check_point_y = obstacle_checked_scan_base_pose_list[index_3].y + dist_to_walls * math.sin(n * step_angle / 180.0 * math.pi)
00144                                 
00145                                 #rospy.loginfo([wall_check_point_x, wall_check_point_y])
00146 
00147                                 map_index = int((wall_check_point_y - data.map.info.origin.position.y) / data.map.info.resolution) * data.map.info.width + int((wall_check_point_x - data.map.info.origin.position.x) / data.map.info.resolution)
00148                                 map_index_list.append(map_index)
00149                                 n += 1
00150                                 
00151                         map_index = int((obstacle_checked_scan_base_pose_list[index_3].y - data.map.info.origin.position.y) / data.map.info.resolution) * data.map.info.width + int((obstacle_checked_scan_base_pose_list[index_3].x - data.map.info.origin.position.x) / data.map.info.resolution)
00152                         map_index_list.append(map_index)
00153                         #rospy.loginfo(map_index_list)
00154 
00155                         index_4 = 0
00156                         while index_4 < len(map_index_list):
00157                                 #rospy.loginfo(data.map.data[map_index_list[index_4]])
00158                                 if -1 < data.map.data[map_index_list[index_4]] < threshold:
00159                                         index_4 += 1
00160                                 else:
00161                                         break
00162                         if index_4 == len(map_index_list):
00163                                 wall_checked_scan_base_pose_list.append(obstacle_checked_scan_base_pose_list[index_3])
00164                         index_3 += 1
00165         '''
00166         if wall_checked_scan_base_pose_list:
00167                 step_dist = 0.05
00168                 map_index_list = list()
00169                 threshold = 20
00170                 index_5 = 0
00171                 while index_5 < len(wall_checked_scan_base_pose_list):
00172                         dist = math.sqrt((parent_obj_y - wall_checked_scan_base_pose_list[index_5].y) ** 2 + (parent_obj_x - wall_checked_scan_base_pose_list[index_5].x) ** 2)
00173                         n = 0
00174                         while n < int((dist - 0.5) / step_dist):
00175                                 wall_check_point_x = wall_checked_scan_base_pose_list[index_5].x - (0.5 + n * step_dist) * math.cos(wall_checked_scan_base_pose_list[index_5].theta)
00176                                 wall_check_point_y = wall_checked_scan_base_pose_list[index_5].y - (0.5 + n * step_dist) * math.sin(wall_checked_scan_base_pose_list[index_5].theta)
00177                                 print([wall_check_point_x, wall_check_point_y, wall_checked_scan_base_pose_list[index_5].theta])
00178                                 map_index = int((wall_check_point_y - data.map.info.origin.position.y) / data.map.info.resolution) * data.map.info.width + int((wall_check_point_x - data.map.info.origin.position.x) / data.map.info.resolution)
00179                                 map_index_list.append(map_index)
00180                                 n += 1
00181                         
00182                         index_6 = 0
00183                         while index_6 < len(map_index_list):
00184                                 if -1 < data.map.data[map_index_list[index_6]] < threshold:
00185                                         index_6 += 1
00186                                 else:
00187                                         break
00188                         if index_6 == len(map_index_list):
00189                                 all_checked_scan_base_pose_list.append(wall_checked_scan_base_pose_list[index_5])
00190                         index_5 += 1
00191         '''
00192         return  wall_checked_scan_base_pose_list                        
00193         #rospy.loginfo(wall_checked_scan_base_pose_list)
00194 
00195 
00196 
00197 #calculate scan base poses
00198 def handle_symbol_grounding_scan_base_pose(req):
00199 
00200         '''
00201         #record the map for checking
00202         data = getMapClient()
00203         spamWriter = csv.writer(open('map_data.csv', 'wb'), delimiter=' ', quotechar='|', quoting=csv.QUOTE_MINIMAL)
00204         for n in range(80, 250):
00205                 spamWriter.writerow(data.map.data[130 + 320 * n : 200 + 320 * n])
00206                 n += 1
00207         '''
00208 
00209         '''
00210         #test the map
00211         data = getMapClient()
00212         #test points
00213         x = -3.2
00214         y = -0.58  
00215         map_index = int((y - data.map.info.origin.position.y) / data.map.info.resolution * data.map.info.width + (x - data.map.info.origin.position.x - 6.0) / data.map.info.resolution - 1)
00216         for n in range(-20, 100):       
00217                 map_line = list()
00218                 for m in range (-9, 9):
00219                         map_line.append(data.map.data[map_index + n * int(data.map.info.width) + m])
00220                         m += 1
00221                 print map_line
00222                 n += 1
00223         rospy.loginfo([data.map.info.origin.position.x, data.map.info.origin.position.y])
00224         rospy.loginfo([data.map.info.width, data.map.info.height])
00225         '''
00226 
00227         
00228 
00229         '''     
00230         #get furniture information from knowledge base
00231         workspace_info = getWorkspaceOnMap()    
00232         furniture_geometry_list = list()
00233         furniture_geometry_list = workspace_info.objectsInfo
00234         
00235         
00236         #transfrom list
00237         index = 0
00238         furniture_geometry_list = list()
00239         while index < len(furniture_geometry_list):
00240                 furniture_geometry = FurnitureGeometry()
00241                 furniture_geometry.pose.x = furniture_geometry_list[index].pose.position.x
00242                 furniture_geometry.pose.y = furniture_geometry_list[index].pose.position.y
00243                 furniture_pose_rpy = tf.transformations.euler_from_quaternion([furniture_geometry_list[index].pose.orientation.x, furniture_geometry_list[index].pose.orientation.y, furniture_geometry_list[index].pose.orientation.z, furniture_geometry_list[index].pose.orientation.w])             
00244                 furniture_geometry.pose.theta = furniture_pose_rpy[2]
00245                 furniture_geometry.l = furniture_geometry_list[index].l
00246                 furniture_geometry.w = furniture_geometry_list[index].w
00247                 furniture_geometry.h = furniture_geometry_list[index].h
00248                 furniture_geometry_list.append(furniture_geometry)
00249                 index += 1
00250 
00251 
00252         
00253         '''
00254 
00255         #transform from knowledge base data to function useable data    
00256         parent_obj_x = req.parent_obj_geometry.pose.position.x
00257         parent_obj_y = req.parent_obj_geometry.pose.position.y
00258         parent_obj_rpy = tf.transformations.euler_from_quaternion([req.parent_obj_geometry.pose.orientation.x, req.parent_obj_geometry.pose.orientation.y, req.parent_obj_geometry.pose.orientation.z, req.parent_obj_geometry.pose.orientation.w])
00259         parent_obj_th = parent_obj_rpy[2]
00260         parent_obj_l = req.parent_obj_geometry.l
00261         parent_obj_w = req.parent_obj_geometry.w
00262         parent_obj_h = req.parent_obj_geometry.h
00263 
00264         #rpy = tf.transformations.euler_from_quaternion([0, 0, -0.68, 0.73])
00265         #rospy.loginfo(rpy[2])
00266 
00267         #rospy.loginfo(req.parent_obj_geometry)
00268 
00269         
00270         #transfrom furniture geometry list
00271         index = 0
00272         furniture_geometry_list = list()
00273         while index < len(req.furniture_geometry_list):
00274                 furniture_geometry = FurnitureGeometry()
00275                 furniture_geometry.pose.x = req.furniture_geometry_list[index].pose.position.x
00276                 furniture_geometry.pose.y = req.furniture_geometry_list[index].pose.position.y
00277                 furniture_pose_rpy = tf.transformations.euler_from_quaternion([req.furniture_geometry_list[index].pose.orientation.x, req.furniture_geometry_list[index].pose.orientation.y, req.furniture_geometry_list[index].pose.orientation.z, req.furniture_geometry_list[index].pose.orientation.w])             
00278                 furniture_geometry.pose.theta = furniture_pose_rpy[2]
00279                 furniture_geometry.l = req.furniture_geometry_list[index].l
00280                 furniture_geometry.w = req.furniture_geometry_list[index].w
00281                 furniture_geometry.h = req.furniture_geometry_list[index].h
00282                 furniture_geometry_list.append(furniture_geometry)
00283                 index += 1
00284         
00285         
00286 
00287         #calculate the detection width
00288         rb_distance = 1.0#0.7 #distance between the robot base and the edge of the parent obj
00289         robot_h = 1.4 #set the height of the detector
00290         detection_angle = (30.0 / 180.0) * math.pi #set the detection angle (wide) of the detector 
00291         camera_distance = math.sqrt((robot_h - parent_obj_h) ** 2 + (rb_distance - 0.2) ** 2) #distance between the detector and the surface of the parent obj
00292         detection_w = 2 * (camera_distance * math.tan(0.5 * detection_angle)) #detection wide
00293         #rospy.loginfo(detection_w)
00294 
00295 
00296 
00297 
00298 
00299 
00300         
00301         #variable structure define
00302         scan_base_pose_1 = Pose2D()
00303         scan_base_pose_2 = Pose2D()
00304         scan_base_pose_3 = Pose2D()
00305         scan_base_pose_4 = Pose2D()
00306         
00307         #create lists to store scan base poses
00308         scan_base_pose_list_1 = list()
00309         scan_base_pose_list_2 = list()
00310         scan_base_pose_list_3 = list()
00311         scan_base_pose_list_4 = list()
00312 
00313         #fix angle problem
00314         if parent_obj_th < 0:
00315                 parent_obj_th += 2.0 * math.pi
00316 
00317         elif parent_obj_th > 2.0 * math.pi:
00318                 parent_obj_th -= 2.0 * math.pi
00319         #rospy.loginfo(parent_obj_th)
00320 
00321 
00322         #to decide which side of the table is facing the robot 
00323         if ((parent_obj_th >= 0) & (parent_obj_th <= (45.0 / 180.0 * math.pi))) | ((parent_obj_th >= (135.0 / 180.0 * math.pi)) & (parent_obj_th <= (225.0 / 180.0 * math.pi))) | ((parent_obj_th >= (315.0 / 180.0 * math.pi)) & (parent_obj_th < 360)):
00324                 
00325                 #calculate 4 lists of scan poses, each list is for scanning from one side of the table.
00326                 step = int((parent_obj_l / detection_w) + 0.99)
00327                 detection_w = parent_obj_l / step
00328                 for num in range(step):
00329                         scan_base_pose_1 = Pose2D()
00330                         scan_base_pose_1.x = parent_obj_x - (parent_obj_w * 0.5 + rb_distance) * math.cos(parent_obj_th) - (0.5 * parent_obj_l - 0.5 * detection_w - num * detection_w) * math.sin(parent_obj_th)
00331                         scan_base_pose_1.y = parent_obj_y - (parent_obj_w * 0.5 + rb_distance) * math.sin(parent_obj_th) + (0.5 * parent_obj_l - 0.5 *  detection_w - num * detection_w) * math.cos(parent_obj_th)
00332                         scan_base_pose_1.theta = parent_obj_th + math.pi
00333                         if scan_base_pose_1.theta > math.pi:
00334                                 scan_base_pose_1.theta -= 2.0 * math.pi
00335                         elif scan_base_pose_1.theta < -math.pi:
00336                                 scan_base_pose_1.theta += 2.0 * math.pi
00337                         scan_base_pose_list_1.append(scan_base_pose_1)
00338 
00339                 #rospy.loginfo(scan_base_pose_list_1)
00340                 
00341                                 
00342                 for num in range(int((parent_obj_l / detection_w) + 0.99)):
00343                         scan_base_pose_2 = Pose2D()
00344                         scan_base_pose_2.x = parent_obj_x + (parent_obj_w * 0.5 + rb_distance) * math.cos(parent_obj_th) + (0.5 * parent_obj_l - 0.5 * detection_w - num * detection_w) * math.sin(parent_obj_th)
00345                         scan_base_pose_2.y = parent_obj_y + (parent_obj_w * 0.5 + rb_distance) * math.sin(parent_obj_th) - (0.5 * parent_obj_l - 0.5 * detection_w - num * detection_w) * math.cos(parent_obj_th)
00346                         scan_base_pose_2.theta = parent_obj_th
00347                         if scan_base_pose_2.theta > math.pi:
00348                                 scan_base_pose_2.theta -= 2.0 * math.pi
00349                         elif scan_base_pose_2.theta < -math.pi:
00350                                 scan_base_pose_2.theta += 2.0 * math.pi
00351                         scan_base_pose_list_2.append(scan_base_pose_2)
00352 
00353 
00354 
00355                 step = int((parent_obj_w / detection_w) + 0.99)
00356                 detection_w = parent_obj_w / step               
00357                 for num in range(step):
00358                         scan_base_pose_3 = Pose2D()
00359                         scan_base_pose_3.x = parent_obj_x + (parent_obj_l * 0.5 + rb_distance) * math.sin(parent_obj_th) - (0.5 * parent_obj_w - 0.5 * detection_w - num * detection_w) * math.cos(parent_obj_th)
00360                         scan_base_pose_3.y = parent_obj_y - (parent_obj_l * 0.5 + rb_distance) * math.cos(parent_obj_th) - (0.5 * parent_obj_w - 0.5 *  detection_w - num * detection_w) * math.sin(parent_obj_th)
00361                         scan_base_pose_3.theta = parent_obj_th - 0.5 * math.pi
00362                         if scan_base_pose_3.theta > math.pi:
00363                                 scan_base_pose_3.theta -= 2.0 * math.pi
00364                         elif scan_base_pose_3.theta < -math.pi:
00365                                 scan_base_pose_3.theta += 2.0 * math.pi
00366                         scan_base_pose_list_3.append(scan_base_pose_3)
00367 
00368                 
00369 
00370 
00371                 for num in range(step):
00372                         scan_base_pose_4 = Pose2D()
00373                         scan_base_pose_4.x = parent_obj_x - (parent_obj_l * 0.5 + rb_distance) * math.sin(parent_obj_th) + (0.5 * parent_obj_w - 0.5 * detection_w - num * detection_w) * math.cos(parent_obj_th)
00374                         scan_base_pose_4.y = parent_obj_y + (parent_obj_l * 0.5 + rb_distance) * math.cos(parent_obj_th) + (0.5 * parent_obj_w - 0.5 * detection_w - num * detection_w) * math.sin(parent_obj_th)
00375                         scan_base_pose_4.theta = parent_obj_th + 0.5 * math.pi
00376                         if scan_base_pose_4.theta > math.pi:
00377                                 scan_base_pose_4.theta -= 2.0 * math.pi
00378                         elif scan_base_pose_4.theta < -math.pi:
00379                                 scan_base_pose_4.theta += 2.0 * math.pi
00380                         scan_base_pose_list_4.append(scan_base_pose_4)
00381 
00382         #the short side is facing the robot     
00383         else:
00384                 parent_obj_th -= 0.5 * math.pi
00385                 step = int((parent_obj_w / detection_w) + 0.99)
00386                 detection_w = parent_obj_w / step
00387                 for num in range(step):
00388                         scan_base_pose_1 = Pose2D()
00389                         scan_base_pose_1.x = parent_obj_x - (parent_obj_l * 0.5 + rb_distance) * math.cos(parent_obj_th) - (0.5 * parent_obj_w - 0.5 * detection_w - num * detection_w) * math.sin(parent_obj_th)
00390                         scan_base_pose_1.y = parent_obj_y - (parent_obj_l * 0.5 + rb_distance) * math.sin(parent_obj_th) + (0.5 * parent_obj_w - 0.5 *  detection_w - num * detection_w) * math.cos(parent_obj_th)
00391                         scan_base_pose_1.theta = parent_obj_th + math.pi
00392                         if scan_base_pose_1.theta > math.pi:
00393                                 scan_base_pose_1.theta -= 2.0 * math.pi
00394                         elif scan_base_pose_1.theta < -math.pi:
00395                                 scan_base_pose_1.theta += 2.0 * math.pi
00396                         scan_base_pose_list_1.append(scan_base_pose_1)
00397         
00398                                 
00399                 for num in range(step):
00400                         scan_base_pose_2 = Pose2D()
00401                         scan_base_pose_2.x = parent_obj_x + (parent_obj_l * 0.5 + rb_distance) * math.cos(parent_obj_th) + (0.5 * parent_obj_w - 0.5 * detection_w - num * detection_w) * math.sin(parent_obj_th)
00402                         scan_base_pose_2.y = parent_obj_y + (parent_obj_l * 0.5 + rb_distance) * math.sin(parent_obj_th) - (0.5 * parent_obj_w - 0.5 * detection_w - num * detection_w) * math.cos(parent_obj_th)
00403                         scan_base_pose_2.theta = parent_obj_th
00404                         if scan_base_pose_2.theta > math.pi:
00405                                 scan_base_pose_2.theta -= 2.0 * math.pi
00406                         elif scan_base_pose_2.theta < -math.pi:
00407                                 scan_base_pose_2.theta += 2.0 * math.pi
00408                         scan_base_pose_list_2.append(scan_base_pose_2)
00409 
00410 
00411                 
00412                 step = int((parent_obj_l / detection_w) + 0.99)
00413                 detection_w = parent_obj_l / step
00414                 for num in range(step):
00415                         scan_base_pose_3 = Pose2D()
00416                         scan_base_pose_3.x = parent_obj_x + (parent_obj_w * 0.5 + rb_distance) * math.sin(parent_obj_th) - (0.5 * parent_obj_l - 0.5 * detection_w - num * detection_w) * math.cos(parent_obj_th)
00417                         scan_base_pose_3.y = parent_obj_y - (parent_obj_w * 0.5 + rb_distance) * math.cos(parent_obj_th) - (0.5 * parent_obj_l - 0.5 *  detection_w - num * detection_w) * math.sin(parent_obj_th)
00418                         scan_base_pose_3.theta = parent_obj_th - 0.5 * math.pi
00419                         if scan_base_pose_1.theta > math.pi:
00420                                 scan_base_pose_1.theta -= 2.0 * math.pi
00421                         elif scan_base_pose_1.theta < -math.pi:
00422                                 scan_base_pose_1.theta += 2.0 * math.pi
00423                         scan_base_pose_list_3.append(scan_base_pose_3)
00424 
00425                 
00426 
00427                                 
00428                 for num in range(step):
00429                         scan_base_pose_4 = Pose2D()
00430                         scan_base_pose_4.x = parent_obj_x - (parent_obj_w * 0.5 + rb_distance) * math.sin(parent_obj_th) + (0.5 * parent_obj_l - 0.5 * detection_w - num * detection_w) * math.cos(parent_obj_th)
00431                         scan_base_pose_4.y = parent_obj_y + (parent_obj_w * 0.5 + rb_distance) * math.cos(parent_obj_th) + (0.5 * parent_obj_l - 0.5 * detection_w - num * detection_w) * math.sin(parent_obj_th)
00432                         scan_base_pose_4.theta = parent_obj_th + 0.5 * math.pi
00433                         if scan_base_pose_1.theta > math.pi:
00434                                 scan_base_pose_1.theta -= 2.0 * math.pi
00435                         elif scan_base_pose_1.theta < -math.pi:
00436                                 scan_base_pose_1.theta += 2.0 * math.pi
00437                         scan_base_pose_list_4.append(scan_base_pose_4)
00438                 
00439         #rospy.loginfo(scan_base_pose_list_1)
00440         #rospy.loginfo(scan_base_pose_list_2)
00441         #rospy.loginfo(scan_base_pose_list_3)
00442         #rospy.loginfo(scan_base_pose_list_4)
00443         
00444         #obstacle check
00445         obstacle_checked_scan_base_pose_list_1 = obstacleCheck(scan_base_pose_list_1, furniture_geometry_list, parent_obj_x, parent_obj_y)
00446         obstacle_checked_scan_base_pose_list_2 = obstacleCheck(scan_base_pose_list_2, furniture_geometry_list, parent_obj_x, parent_obj_y)
00447         obstacle_checked_scan_base_pose_list_3 = obstacleCheck(scan_base_pose_list_3, furniture_geometry_list, parent_obj_x, parent_obj_y)
00448         obstacle_checked_scan_base_pose_list_4 = obstacleCheck(scan_base_pose_list_4, furniture_geometry_list, parent_obj_x, parent_obj_y)
00449 
00450 
00451 
00452         #rospy.loginfo([obstacle_checked_scan_base_pose_list_1, obstacle_checked_scan_base_pose_list_2, obstacle_checked_scan_base_pose_list_3, obstacle_checked_scan_base_pose_list_4])
00453         max_len = max(len(obstacle_checked_scan_base_pose_list_1), len(obstacle_checked_scan_base_pose_list_2), len(obstacle_checked_scan_base_pose_list_3), len(obstacle_checked_scan_base_pose_list_4))
00454 
00455         #choose the longest scan pose list 
00456         if len(obstacle_checked_scan_base_pose_list_1) == max_len:
00457                 scan_base_pose_list = [obstacle_checked_scan_base_pose_list_1]
00458         elif len(obstacle_checked_scan_base_pose_list_2) == max_len:
00459                 scan_base_pose_list = [obstacle_checked_scan_base_pose_list_2]
00460         elif len(obstacle_checked_scan_base_pose_list_3) == max_len:
00461                 scan_base_pose_list = [obstacle_checked_scan_base_pose_list_3]
00462         else:
00463                 scan_base_pose_list = [obstacle_checked_scan_base_pose_list_4]
00464         
00465 
00466         if not scan_base_pose_list:
00467                 print "no valid scan pose."
00468 
00469         
00470         return scan_base_pose_list
00471 
00472 
00473 
00474 def symbol_grounding_scan_base_pose_server():
00475         rospy.init_node('symbol_grounding_scan_base_pose_server')
00476         s = rospy.Service('symbol_grounding_scan_base_pose', SymbolGroundingScanBasePose, handle_symbol_grounding_scan_base_pose)
00477         print "Ready to receive requests."
00478         rospy.spin()
00479 
00480 
00481 
00482 if __name__ == "__main__":
00483     symbol_grounding_scan_base_pose_server()
00484 
00485 


srs_symbolic_grounding
Author(s): Beisheng Liu
autogenerated on Sun Jan 5 2014 12:01:26