$search
00001 #!/usr/bin/python 00002 ################################################################# 00003 ##\file 00004 # 00005 # \note 00006 # Copyright (c) 2012 \n 00007 # Cardiff University \n\n 00008 # 00009 ################################################################# 00010 # 00011 # \note 00012 # Project name: Multi-Role Shadow Robotic System for Independent Living 00013 # \note 00014 # ROS stack name: srs 00015 # \note 00016 # ROS package name: srs_decision_making 00017 # 00018 # \author 00019 # Author: Renxi Qiu, email: renxi.qiu@gmail.com 00020 # 00021 # \date Date of creation: April 2012 00022 # 00023 # \brief 00024 # Task coordination and interfacing for SRS decision making 00025 # 00026 ################################################################# 00027 # 00028 # Redistribution and use in source and binary forms, with or without 00029 # modification, are permitted provided that the following conditions are met: 00030 # 00031 # - Redistributions of source code must retain the above copyright 00032 # notice, this list of conditions and the following disclaimer. \n 00033 # 00034 # - Redistributions in binary form must reproduce the above copyright 00035 # notice, this list of conditions and the following disclaimer in the 00036 # documentation and/or other materials provided with the distribution. \n 00037 # 00038 # This program is free software: you can redistribute it and/or modify 00039 # it under the terms of the GNU Lesser General Public License LGPL as 00040 # published by the Free Software Foundation, either version 3 of the 00041 # License, or (at your option) any later version. 00042 # 00043 # This program is distributed in the hope that it will be useful, 00044 # but WITHOUT ANY WARRANTY; without even the implied warranty of 00045 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 00046 # GNU Lesser General Public License LGPL for more details. 00047 # 00048 # You should have received a copy of the GNU Lesser General Public 00049 # License LGPL along with this program. 00050 # If not, see <http://www.gnu.org/licenses/>. 00051 # 00052 ################################################################# 00053 # ROS imports 00054 import roslib; roslib.load_manifest('srs_decision_making') 00055 import os 00056 00057 #import states within srs_decision_making 00058 from srs_generic_states import * 00059 00060 #import states from srs_states package 00061 from mapping_states import * 00062 from detection_states import * 00063 from assisted_detection_states import * 00064 00065 #import service form semantic KB 00066 from srs_knowledge.srv import * 00067 00068 #import other high level state machines 00069 from srs_common_high_level_statemachines import * 00070 00071 #from generic_grasp_state import * 00072 00073 # This is come from srs_object_verification, the name should be updated to avoid confusion 00074 #from generic_states import * 00075 00076 """ 00077 This file contains high level state machines for detection in decision making. 00078 The actual implementation of the states can be found from 00079 srs_states packages 00080 00081 Depend on if there is any user client connected and the type of the user client, 00082 state machines below may switch between semi-autonomous or fully-autonomous mode mode. 00083 00084 If semi_autonomous_mode=True Generic states of user interventions will be loaded for error handling 00085 Otherwise, generic states based on semantic kb for error handling will be loaded 00086 00087 The default mode is semi_autonomous_mode=False assuming no UI connected to the robot 00088 00089 sm_simple_detection() 00090 #simple detection with no user intervention and no environment confirmation 00091 00092 sm_simple_detection_env() 00093 #simple detection with no user intervention but with environment confirmation 00094 00095 sm_assisted_detection() 00096 #assisted detection with user intervention and no environment confirmation 00097 00098 sm_assisted_detection_env() 00099 #assisted detection with user intervention and environment confirmation 00100 """ 00101 00102 ################################################################################ 00103 #Simple detection state machine 00104 # 00105 #robot would detect the object autonomously 00106 #no environment confirmation is needed for this detection 00107 # 00108 #if the name/id is empty, then the robot will search all the objects in the field of view 00109 #if the environment confirmation/verification is not needed, the target workspace name is not used 00110 #in the simple detection, only the nearest object to the camera is detected 00111 # 00112 #the id of the object is not used by the detection state yet, this should be changed 00113 ################################################################################ 00114 00115 class sm_simple_detection(smach.StateMachine): 00116 def __init__(self): 00117 smach.StateMachine.__init__(self, 00118 outcomes=['succeeded', 'not_completed', 'failed', 'preempted'], 00119 input_keys=['target_object_name', 'target_object_id', 'target_workspace_name'], 00120 output_keys=['target_object','target_object_pose']) 00121 00122 self.max_retries = 5 # default value for max retries 00123 try: 00124 self.max_retries = rospy.get_param("srs/common/detection_max_retries") 00125 except Exception, e: 00126 rospy.INFO('can not read the parameter of max retries, use the default value') 00127 00128 with self: 00129 smach.StateMachine.add('DETECT_OBJECT-simple', detect_object('',self.max_retries), 00130 transitions={'succeeded':'succeeded', 'retry':'DETECT_OBJECT-simple', 'no_more_retries':'not_completed', 'failed':'failed', 'preempted':'preempted'}, 00131 remapping={'object_name':'target_object_name', 'object':'target_object', 'object_pose':'target_object_pose' }) 00132 00133 ################################################################################ 00134 #Assisted detection state machine 00135 # 00136 #Robot would detect the object autonomously first, and then wait for the use for confirmation and adjustment via bounding box 00137 #No environment confirmation is needed for this detection 00138 # 00139 #If the target object name/id is empty, then the robot will search all the objects in the field of view 00140 #if the environment confirmation/verification is not needed, the target workspace name is not used 00141 #in the assisted detection, all detected objects will be processed, user will choice the best object from the list 00142 # 00143 #the id of the object is not used by the detection state yet, this should be changed 00144 ################################################################################ 00145 00146 #detection assisted by remote operator or KB, they specify region of interest for detection 00147 class sm_assisted_detection(smach.StateMachine): 00148 def __init__(self): 00149 smach.StateMachine.__init__(self, 00150 outcomes=['succeeded', 'not_completed', 'failed', 'preempted'], 00151 input_keys=['target_object_name', 'target_object_id', 'target_workspace_name'], 00152 output_keys=['target_object','target_object_pose']) 00153 00154 self.userdata.target_object_list='' 00155 self.userdata.new_scan_pose = '' 00156 00157 with self: 00158 smach.StateMachine.add('DETECT_OBJECT-assisted', detect_object_assited(), 00159 transitions={'succeeded':'USER_INTERVENTION', 'failed':'failed', 'preempted':'preempted'}, 00160 remapping={'object_name':'target_object_name', 'object_list':'target_object_list' }) 00161 00162 smach.StateMachine.add('USER_INTERVENTION', user_intervention_on_detection(), 00163 transitions={'succeeded':'succeeded', 'bb_move':'BB_MOVE', 'give_up':'not_completed', 'failed':'failed', 'preempted':'preempted'}, 00164 remapping={'target_object_name':'target_object_name', 'object':'target_object', 'object_pose':'target_object_pose', 'bb_pose':'new_scan_pose', 'target_object_list':'target_object_list'}) 00165 00166 smach.StateMachine.add('BB_MOVE', approach_pose_without_retry(), 00167 transitions={'succeeded':'DETECT_OBJECT-assisted', 'not_completed':'not_completed', 'failed':'failed','preempted':'preempted'}, 00168 remapping={'base_pose':'new_scan_pose'}) 00169 00170 00171 ################################################################################ 00172 #Confirm the workspace is exist and it is not empty 00173 # 00174 ############################################################################### 00175 class workspace_confirmation(smach.StateMachine): 00176 def __init__(self): 00177 smach.StateMachine.__init__(self, 00178 outcomes=['succeeded', 'not_completed', 'failed', 'preempted'], 00179 input_keys=['target_workspace_name']) 00180 self.userdata.verified_workspace_pose ="" 00181 00182 with self: 00183 smach.StateMachine.add('VERIFY_EXISTENCE_OF_THE_WORKSPACE', Verify_Object_by_Name(), 00184 transitions={'succeeded':'succeeded', 'not_completed':'not_completed', 'failed':'failed', 'preempted':'preempted'}, 00185 remapping={'object_name':'target_workspace_name', 'verfified_target_object_pose':'verified_workspace_pose'}) 00186 """ 00187 smach.StateMachine.add('VERIFY_THE_WORKSPACE_IS_NOT_EMPTY', GetTableObjectCluster(), 00188 transitions={'succeeded':'succeeded', 'not_completed':'not_completed', 'failed':'failed', 'preempted':'preempted'}, 00189 remapping={'object_name':'target_object_name', 'object':'target_object', 'object_pose':'target_object_pose' }) 00190 """ 00191 00192 ################################################################################ 00193 #Simple detection state machine with environment confirmation 00194 # 00195 #Robot would detect the object autonomously 00196 #Environment confirmation is needed for this detection 00197 # 00198 #If the target object name/id is empty, then the robot will search all the objects in the field of view 00199 #if the environment confirmation/verification is needed, the target workspace name is used to get the pose of the work space 00200 #in the simple detection, only the nearest object to the camera is detected 00201 # 00202 #the id of the object is not used by the detection state yet, this should be changed 00203 ################################################################################ 00204 00205 class sm_simple_detection_env(smach.StateMachine): 00206 def __init__(self): 00207 smach.StateMachine.__init__(self, 00208 outcomes=['succeeded', 'not_completed', 'failed', 'preempted'], 00209 input_keys=['target_object_name', 'target_object_id', 'target_workspace_name'], 00210 output_keys=['target_object','target_object_pose']) 00211 00212 self.max_retries = 5 # default value for max retries 00213 try: 00214 self.max_retries = rospy.get_param("srs/common/detection_max_retries") 00215 except Exception, e: 00216 rospy.INFO('can not read parameter of max retries, use the default value') 00217 00218 self.userdata.target_object='' 00219 00220 self.userdata.target_object_pose='' 00221 00222 with self: 00223 smach.StateMachine.add('WORKSPACE_CONFRIMATION', workspace_confirmation(), 00224 transitions={'succeeded':'succeeded', 'not_completed':'not_completed', 'failed':'failed', 'preempted':'preempted'}, 00225 remapping={'target_workspace_name':'target_workspace_name'}) 00226 smach.StateMachine.add('DETECT_OBJECT-simple', detect_object('',self.max_retries), 00227 transitions={'succeeded':'succeeded', 'retry':'DETECT_OBJECT-simple', 'no_more_retries':'not_completed', 'failed':'failed', 'preempted':'preempted'}, 00228 remapping={'object_name':'target_object_name', 'object':'target_object', 'object_pose':'target_object_pose' }) 00229 00230 00231 ################################################################################ 00232 #Assisted detection state machine with environment confirmation 00233 # 00234 #Robot would detect the object autonomously first, and then wait for the use for confirmation and adjustment via bounding box 00235 #Environment confirmation is needed for this detection 00236 # 00237 #If the target object name/id is empty, then the robot will search all the objects in the field of view 00238 #if the environment confirmation/verification is needed, the target workspace name is used to get the pose of the work space 00239 #in the assisted detection, all detected objects will be processed, user will choice the best object from the list 00240 # 00241 #the id of the object is not used by the detection state yet, this should be changed 00242 ################################################################################ 00243 00244 class sm_assisted_detection_env(smach.StateMachine): 00245 def __init__(self): 00246 smach.StateMachine.__init__(self, 00247 outcomes=['succeeded', 'not_completed', 'failed', 'preempted'], 00248 input_keys=['target_object_name', 'target_object_id', 'target_workspace_name'], 00249 output_keys=['target_object','target_object_pose']) 00250 00251 00252 self.userdata.target_object_list='' 00253 self.userdata.new_scan_pose = '' 00254 00255 with self: 00256 smach.StateMachine.add('WORKSPACE_CONFRIMATION', workspace_confirmation(), 00257 transitions={'succeeded':'succeeded', 'not_completed':'not_completed', 'failed':'failed', 'preempted':'preempted'}, 00258 remapping={'target_workspace_name':'target_workspace_name'}) 00259 00260 smach.StateMachine.add('DETECT_OBJECT-assisted', detect_object_assited(), 00261 transitions={'succeeded':'USER_INTERVENTION', 'failed':'failed', 'preempted':'preempted'}, 00262 remapping={'object_name':'target_object_name', 'object_list':'target_object_list' }) 00263 00264 smach.StateMachine.add('USER_INTERVENTION', user_intervention_on_detection(), 00265 transitions={'succeeded':'succeeded', 'bb_move':'BB_MOVE', 'give_up':'not_completed', 'failed':'failed', 'preempted':'preempted'}, 00266 remapping={'target_object_name':'target_object_name', 'object':'target_object', 'object_pose':'target_object_pose', 'bb_pose':'new_scan_pose', 'target_object_list':'target_object_list'}) 00267 00268 smach.StateMachine.add('BB_MOVE', approach_pose_without_retry(), 00269 transitions={'succeeded':'DETECT_OBJECT-assisted', 'not_completed':'not_completed', 'failed':'failed','preempted':'preempted'}, 00270 remapping={'base_pose':'new_scan_pose'})