$search
00001 /* 00002 * Copyright (c) 2009, Willow Garage, Inc. 00003 * All rights reserved. 00004 * 00005 * Redistribution and use in source and binary forms, with or without 00006 * modification, are permitted provided that the following conditions are met: 00007 * 00008 * * Redistributions of source code must retain the above copyright 00009 * notice, this list of conditions and the following disclaimer. 00010 * * Redistributions in binary form must reproduce the above copyright 00011 * notice, this list of conditions and the following disclaimer in the 00012 * documentation and/or other materials provided with the distribution. 00013 * * Neither the name of the Willow Garage, Inc. nor the names of its 00014 * contributors may be used to endorse or promote products derived from 00015 * this software without specific prior written permission. 00016 * 00017 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 00018 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 00019 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 00020 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE 00021 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 00022 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 00023 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 00024 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 00025 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 00026 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 00027 * POSSIBILITY OF SUCH DAMAGE. 00028 */ 00029 00030 // Author(s): Kaijen Hsiao 00031 00032 #include <ros/ros.h> 00033 #include <actionlib/server/simple_action_server.h> 00034 #include <sensor_msgs/PointCloud2.h> 00035 #include <sensor_msgs/point_cloud_conversion.h> 00036 #include <tf/transform_listener.h> 00037 #include <pcl/io/io.h> 00038 #include "tabletop_object_detector/SegmentObjectInHand.h" 00039 #include "pr2_create_object_model/ModelObjectInHandAction.h" 00040 #include "pr2_create_object_model/ObjectInHand.h" 00041 #include "object_manipulator/tools/mechanism_interface.h" 00042 #include "arm_navigation_msgs/LinkPadding.h" 00043 #include "arm_navigation_msgs/OrderedCollisionOperations.h" 00044 #include <eigen_conversions/eigen_msg.h> 00045 #include <Eigen/Core> 00046 #include <Eigen/Geometry> 00047 #include <object_manipulation_msgs/ClusterBoundingBox.h> 00048 00049 #include <tabletop_collision_map_processing/collision_map_interface.h> 00050 00051 using pr2_create_object_model::ObjectInHand; 00052 00053 class InHandObjectModeler 00054 { 00055 00056 private: 00057 00059 ros::NodeHandle root_nh_; 00060 00062 ros::NodeHandle priv_nh_; 00063 00065 actionlib::SimpleActionServer<pr2_create_object_model::ModelObjectInHandAction> model_object_action_server_; 00066 00068 pr2_create_object_model::ModelObjectInHandResult model_object_result_; 00069 pr2_create_object_model::ModelObjectInHandFeedback model_object_feedback_; 00070 00072 ros::Publisher pub_cloud; 00073 00075 std::string segment_object_service_name_; 00076 00078 object_manipulator::MechanismInterface mech_interface_; 00079 00081 tabletop_collision_map_processing::CollisionMapInterface collision_map_interface_; 00082 00084 bool stereo_; 00085 00086 public: 00087 00088 InHandObjectModeler(std::string name) : 00089 root_nh_(""), 00090 priv_nh_("~"), 00091 model_object_action_server_(root_nh_, name+"/model_object_in_hand_action", 00092 boost::bind(&InHandObjectModeler::executeCB, this, _1), false) 00093 { 00094 segment_object_service_name_ = "/segment_object_in_hand_srv"; 00095 00096 while ( !ros::service::waitForService(segment_object_service_name_, ros::Duration().fromSec(3.0)) && priv_nh_.ok() ) 00097 { 00098 ROS_INFO("Waiting for service %s...", segment_object_service_name_.c_str()); 00099 } 00100 if (!priv_nh_.ok()) exit(0); 00101 00102 pub_cloud = priv_nh_.advertise<ObjectInHand>("modeled_object_in_hand", 1); 00103 00104 priv_nh_.param<bool>("stereo", stereo_, true); 00105 00106 model_object_action_server_.start(); 00107 ROS_INFO("started action server"); 00108 } 00109 00111 void executeCB(const pr2_create_object_model::ModelObjectInHandGoalConstPtr &goal) 00112 { 00113 sensor_msgs::PointCloud2 object_in_hand; 00114 sensor_msgs::PointCloud2 cluster; 00115 00116 //Set the min_disparity high so we can see stuff closer to the camera 00117 if(stereo_) 00118 { 00119 int res = system("rosrun dynamic_reconfigure dynparam set_from_parameters /narrow_stereo_textured/narrow_stereo_textured_proc _min_disparity:=30"); 00120 } 00121 bool result; 00122 00123 //broadcast the phase we're in 00124 model_object_feedback_.phase = model_object_feedback_.BEFORE_MOVE; 00125 model_object_action_server_.publishFeedback(model_object_feedback_); 00126 00127 //not supposed to move the arm--just grab any points we see in the hand and return them 00128 if(goal->clear_move.vector.x == 0 && goal->clear_move.vector.y == 0 && goal->clear_move.vector.z == 0){ 00129 result = callSegmentObjectInHand(goal->arm_name, cluster); 00130 if(result != 0) object_in_hand = cluster; 00131 } 00132 else 00133 { 00134 //move the arm up to get it away from stuff 00135 moveArmAway(goal->arm_name, goal->clear_move); 00136 00137 //snap the current points in the hand for motion planning 00138 pointHead(goal->arm_name); 00139 ros::Duration(0.5).sleep(); 00140 result = callSegmentObjectInHand(goal->arm_name, cluster); 00141 if(result != 0) object_in_hand = cluster; 00142 00143 //if desired, move the arm to where the camera can see it 00144 if(goal->rotate_pose.pose.position.x != 0 || goal->rotate_pose.pose.position.y != 0 00145 || goal->rotate_pose.pose.position.z != 0) 00146 { 00147 moveArmIntoView(goal->arm_name, object_in_hand, goal->rotate_pose); 00148 } 00149 00150 //if desired, rotate the object, gathering clusters as we go 00151 if(goal->rotate_object) 00152 { 00153 //clear the old cluster first 00154 object_in_hand.data.clear(); 00155 object_in_hand.width = 0; 00156 00157 //accumulate a new object model while rotating 00158 if(goal->keep_level) turnArmInwards(goal->arm_name, object_in_hand); 00159 else rotateObject(goal->arm_name, object_in_hand); 00160 } 00161 00162 //don't rotate, just snap the object where it is 00163 else 00164 { 00165 result = callSegmentObjectInHand(goal->arm_name, cluster); 00166 if(result != 0) object_in_hand = cluster; 00167 } 00168 } 00169 00170 //broadcast the phase we're in 00171 model_object_feedback_.phase = model_object_feedback_.DONE; 00172 model_object_action_server_.publishFeedback(model_object_feedback_); 00173 00174 //return the resulting object cluster 00175 model_object_result_.cluster = object_in_hand; 00176 std::cout << "object_in_hand width: " << object_in_hand.width << "\n"; 00177 00178 //if desired, attach the bounding box of the resulting cluster to the gripper in the collision map 00179 std::string collision_name; 00180 if (goal->add_to_collision_map) 00181 { 00182 attachObject(goal->arm_name, object_in_hand, collision_name); 00183 } 00184 00185 //publish the final result 00186 ObjectInHand oih; 00187 oih.cluster = object_in_hand; 00188 oih.arm_name = goal->arm_name; 00189 oih.collision_name = collision_name; 00190 model_object_result_.collision_name = collision_name; 00191 ROS_INFO("frame_id: %s, final num points: %d, collision name: %s", 00192 object_in_hand.header.frame_id.c_str(), (int)object_in_hand.width, collision_name.c_str()); 00193 pub_cloud.publish(oih); 00194 00195 //Set the min_disparity back 00196 if(stereo_) 00197 { 00198 int res = system("rosrun dynamic_reconfigure dynparam set_from_parameters /narrow_stereo_textured/narrow_stereo_textured_proc _min_disparity:=0"); 00199 } 00200 model_object_action_server_.setSucceeded(model_object_result_); 00201 } 00202 00204 bool attachObject(std::string arm_name, sensor_msgs::PointCloud2 object_in_hand, std::string &collision_name) 00205 { 00206 if(object_in_hand.width == 0){ 00207 ROS_ERROR("object_in_hand was empty!"); 00208 return false; 00209 } 00210 00211 try 00212 { 00213 ros::Time start_time = ros::Time::now(); 00214 while (!collision_map_interface_.connectionsEstablished(ros::Duration(1.0))) 00215 { 00216 if (!priv_nh_.ok() || ros::Time::now() - start_time >= ros::Duration(5.0)) 00217 { 00218 ROS_ERROR("Create object model: failed to connect to collision map interface!"); 00219 collision_name.clear(); 00220 return false; 00221 } 00222 } 00223 00224 //remove any current object in the hand from the collision map 00225 mech_interface_.detachAllObjectsFromGripper(arm_name); 00226 00227 //convert the object cluster to PointCloud1 00228 sensor_msgs::PointCloud cluster1; 00229 sensor_msgs::convertPointCloud2ToPointCloud(object_in_hand, cluster1); 00230 collision_map_interface_.processCollisionGeometryForCluster(cluster1, collision_name); 00231 00232 //compute the cluster bounding box and add it to collision map 00233 object_manipulation_msgs::ClusterBoundingBox bbox; 00234 //collision_map_interface_.getClusterBoundingBox(object_in_hand, bbox.pose_stamped, bbox.dimensions); 00235 collision_map_interface_.getClusterBoundingBox(cluster1, bbox.pose_stamped, bbox.dimensions); 00236 collision_map_interface_.processCollisionGeometryForBoundingBox(bbox, collision_name); 00237 00238 //give the cluster collision object time to instantiate 00239 ros::Duration(1.0).sleep(); 00240 00241 //attach the box to the gripper 00242 mech_interface_.attachObjectToGripper(arm_name, collision_name); 00243 } 00244 catch(...) 00245 { 00246 ROS_ERROR("Error while trying to add object to collision map"); 00247 collision_name.clear(); 00248 return false; 00249 } 00250 return true; 00251 } 00252 00253 00255 bool callSegmentObjectInHand(std::string arm_name, sensor_msgs::PointCloud2 &object_in_hand) 00256 { 00257 tabletop_object_detector::SegmentObjectInHand segmentation_srv; 00258 if (arm_name.compare("right_arm") == 0) 00259 segmentation_srv.request.wrist_frame = std::string("r_wrist_roll_link"); 00260 else 00261 segmentation_srv.request.wrist_frame = std::string("l_wrist_roll_link"); 00262 if (!ros::service::call(segment_object_service_name_, segmentation_srv)) 00263 { 00264 ROS_ERROR("Call to segmentation service failed"); 00265 return 0; 00266 } 00267 object_in_hand = segmentation_srv.response.cluster; 00268 return 1; 00269 } 00270 00272 void pointHead(std::string arm_name) 00273 { 00274 geometry_msgs::PointStamped desired_head_point; 00275 if (arm_name.compare("right_arm") == 0) 00276 desired_head_point.header.frame_id = "r_wrist_roll_link"; 00277 else 00278 desired_head_point.header.frame_id = "l_wrist_roll_link"; 00279 desired_head_point.point.x = .2; 00280 if(stereo_) mech_interface_.pointHeadAction(desired_head_point, "/narrow_stereo_optical_frame"); 00281 else mech_interface_.pointHeadAction(desired_head_point, "/openni_rgb_optical_frame"); 00282 /* else 00283 { 00284 geometry_msgs::PoseStamped gripper_pose = mech_interface_.getGripperPose(arm_name, std::string("torso_lift_link")); 00285 desired_head_point.header.frame_id = "torso_lift_link"; 00286 desired_head_point.point.x = gripper_pose.pose.position.x; 00287 desired_head_point.point.y = gripper_pose.pose.position.y; 00288 desired_head_point.point.z = gripper_pose.pose.position.z; 00289 mech_interface_.pointHeadAction(desired_head_point, "/openni_rgb_optical_frame"); 00290 }*/ 00291 ROS_INFO("pointing head"); 00292 } 00293 00295 bool moveArmIntoView(std::string arm_name, sensor_msgs::PointCloud2 object_in_hand, geometry_msgs::PoseStamped desired_pose) 00296 { 00297 /* 00298 geometry_msgs::PoseStamped desired_pose = mech_interface_.getGripperPose(arm_name, std::string("torso_lift_link")); 00299 desired_pose.pose.position.x = .7; 00300 desired_pose.pose.position.y = 0; 00301 desired_pose.pose.position.z = 0.40; 00302 */ 00303 00304 //broadcast the phase we're in 00305 model_object_feedback_.phase = model_object_feedback_.MOVE_TO_ROTATE_POSE; 00306 model_object_action_server_.publishFeedback(model_object_feedback_); 00307 00308 //if there are any points in object_in_hand, attach them to the gripper 00309 std::string collision_name; 00310 bool result = false; 00311 attachObject(arm_name, object_in_hand, collision_name); 00312 00313 // //try to get to the desired pose using move_arm 00314 // const arm_navigation_msgs::OrderedCollisionOperations col_opers; 00315 // std::vector<arm_navigation_msgs::LinkPadding> link_padding; 00316 // bool result = mech_interface_.moveArmToPose(arm_name, desired_pose, col_opers, link_padding); 00317 00318 //detach the object from the gripper 00319 mech_interface_.detachAllObjectsFromGripper(arm_name); 00320 00321 //use Cartesian controllers to go straight there in the event of failure (and raise the elbow) 00322 if(!result) 00323 { 00324 geometry_msgs::PoseStamped current_pose = mech_interface_.getGripperPose(arm_name, desired_pose.header.frame_id); 00325 double pos_dist, angle; 00326 mech_interface_.poseDists(current_pose.pose, desired_pose.pose, pos_dist, angle); 00327 double time = pos_dist / .10; 00328 double angle_time = angle / .785; 00329 if (angle_time > time) time = angle_time; 00330 const double left_elbow_up_angles[7] = {0.79,0,1.6,9999,9999,9999,9999}; 00331 const double right_elbow_up_angles[7] = {-0.79,0,-1.6,9999,9999,9999,9999}; 00332 std::vector<double> left_elbow_up_angles_vect(left_elbow_up_angles, left_elbow_up_angles+7); 00333 std::vector<double> right_elbow_up_angles_vect(right_elbow_up_angles, right_elbow_up_angles+7); 00334 if(arm_name == "left_arm") mech_interface_.sendCartesianPostureCommand(arm_name, left_elbow_up_angles_vect); 00335 else mech_interface_.sendCartesianPostureCommand(arm_name, right_elbow_up_angles_vect); 00336 mech_interface_.moveArmToPoseCartesian(arm_name, desired_pose, ros::Duration(time + 2.0), .015, .09, .04, .3, .1); 00337 } 00338 ROS_INFO("moving arm in front of head"); 00339 00340 //point the head at the gripper and wait for it to get there 00341 pointHead(arm_name); 00342 ros::Duration(0.5).sleep(); 00343 00344 return 1; 00345 } 00346 00348 bool rotateObject(std::string arm_name, sensor_msgs::PointCloud2 &object_in_hand) 00349 { 00350 sensor_msgs::PointCloud2 cluster; 00351 ROS_INFO("inside rotateObject"); 00352 00353 //get the current arm angles 00354 std::vector<double> current_arm_angles; 00355 mech_interface_.getArmAngles(arm_name, current_arm_angles); 00356 00357 //rotate the gripper by turning the last joint 00358 double max_angle_step = M_PI/8; 00359 int num_steps = 2*M_PI/max_angle_step; 00360 double step_size = 2*M_PI/num_steps; 00361 for(int view_ind=0; view_ind<num_steps; view_ind++) 00362 { 00363 //ask the arm to move to the new joint angles (except for the first step, which is the current pose) 00364 if(view_ind != 0) 00365 { 00366 std::vector< std::vector<double> > trajectory; 00367 trajectory.push_back(current_arm_angles); 00368 current_arm_angles[6] += step_size; 00369 trajectory.push_back(current_arm_angles); 00370 mech_interface_.attemptTrajectory(arm_name, trajectory, false, 0.75); 00371 } 00372 00373 //point the head at the object in the gripper 00374 pointHead(arm_name); 00375 //ros::Duration(1.0).sleep(); 00376 00377 //broadcast the step we're on 00378 model_object_feedback_.phase = model_object_feedback_.ROTATING; 00379 model_object_feedback_.rotate_ind = view_ind; 00380 model_object_action_server_.publishFeedback(model_object_feedback_); 00381 00382 //get the cluster of points in the hand 00383 bool result = callSegmentObjectInHand(arm_name, cluster); 00384 if(result != 0) 00385 { 00386 //combine resulting point cloud into object model 00387 if(object_in_hand.width == 0) object_in_hand = cluster; 00388 else 00389 { 00390 result = pcl::concatenatePointCloud(object_in_hand, cluster, object_in_hand); 00391 object_in_hand.header.stamp = ros::Time::now(); 00392 } 00393 00394 //publish the new result 00395 ROS_INFO("frame_id: %s, num points: %d", object_in_hand.header.frame_id.c_str(), (int)object_in_hand.width); 00396 pub_cloud.publish(object_in_hand); 00397 } 00398 } 00399 return 1; 00400 } 00401 00403 bool turnArmInwards(std::string arm_name, sensor_msgs::PointCloud2 &object_in_hand) 00404 { 00405 00406 geometry_msgs::PoseStamped current_pose = mech_interface_.getGripperPose(arm_name, "torso_lift_link"); 00407 Eigen::Affine3d current_trans; 00408 tf::poseMsgToEigen(current_pose.pose, current_trans); 00409 Eigen::Affine3d desired_trans; 00410 00411 int num_steps = 0; 00412 double rot_angle; 00413 std::vector<geometry_msgs::PoseStamped> desired_poses; 00414 Eigen::Vector3d z(0,0,1); 00415 00416 //need to keep the object level--compute desired poses that try to turn the gripper inwards 00417 //find the angle that rotates the current gripper x-axis to the torso_lift_link x-z plane, pointed in the -x direction 00418 //but make the gripper swing towards the center 00419 //(for the right arm, -PI/4 moves +3PI/4, left arm reversed) 00420 Eigen::Vector3d x_axis = current_trans.rotation().col(0); 00421 rot_angle = atan2(x_axis[1], -x_axis[0]); 00422 //std::cout << "rot_angle before:" << rot_angle << std::endl; 00423 if(arm_name.compare("right_arm") == 0 && rot_angle < -M_PI/4) rot_angle += 2*M_PI; 00424 if(arm_name.compare("left_arm") == 0 && rot_angle > M_PI/4) rot_angle -= 2*M_PI; 00425 //std::cout << "x_axis:\n" << x_axis << std::endl; 00426 std::cout << "rot_angle after:" << rot_angle << std::endl; 00427 00428 //compute the number of steps based on the max angle step to move before trying to snap a point cloud 00429 double max_angle_step = M_PI/8; 00430 num_steps = ceil(fabs(rot_angle) / max_angle_step) + 1; 00431 //std::cout << "num_steps:" << num_steps << std::endl; 00432 00433 //compute the sequence of desired poses at which to snap point clouds by interpolating 00434 Eigen::Affine3d step_trans; 00435 geometry_msgs::PoseStamped desired_pose; 00436 for(int view_ind=0; view_ind<num_steps; view_ind++) 00437 { 00438 double frac; 00439 if(num_steps == 1) frac = 1; 00440 else frac = view_ind / float(num_steps-1); 00441 //std::cout << "frac:" << frac << std::endl; 00442 Eigen::AngleAxis<double> step_rotation(rot_angle*frac, z); 00443 step_trans = step_rotation * current_trans; 00444 for(int i=0; i<3; i++) step_trans(i,3) = current_trans(i,3); 00445 tf::poseEigenToMsg(step_trans, desired_pose.pose); 00446 desired_pose.header.frame_id = "torso_lift_link"; 00447 desired_pose.header.stamp = ros::Time::now(); 00448 desired_poses.push_back(desired_pose); 00449 //std::cout << "desired pose:" << desired_pose << std::endl; 00450 } 00451 Eigen::Affine3d towards_trans = step_trans; 00452 //now go PI towards the center if we haven't gone that far already 00453 if(fabs(rot_angle) < M_PI) 00454 { 00455 double additional_angle = -M_PI; 00456 if(arm_name.compare("left_arm") == 0) additional_angle = M_PI; 00457 int num_add_steps = ceil(fabs(additional_angle) / max_angle_step) + 1; 00458 for(double step=0; step<num_add_steps; step++) 00459 { 00460 double frac = step / float(num_add_steps-1); 00461 Eigen::AngleAxis<double> step_rotation(additional_angle*frac, z); 00462 step_trans = step_rotation * towards_trans; 00463 for(int i=0; i<3; i++) step_trans(i,3) = towards_trans(i,3); 00464 tf::poseEigenToMsg(step_trans, desired_pose.pose); 00465 desired_pose.header.frame_id = "torso_lift_link"; 00466 desired_pose.header.stamp = ros::Time::now(); 00467 desired_poses.push_back(desired_pose); 00468 } 00469 } 00470 00471 //Gather points from multiple viewpoints while turning the object 00472 bool result; 00473 sensor_msgs::PointCloud2 cluster; 00474 for(size_t view_ind=0; view_ind<desired_poses.size(); view_ind++){ 00475 00476 //move the arm to the next viewpoint 00477 //std::cout << "moving to desired pose:" << desired_poses[view_ind] << std::endl; 00478 mech_interface_.moveArmToPoseCartesian(arm_name, desired_poses[view_ind], 00479 ros::Duration(1.0), .015, .09, .02, .16, .1); 00480 00481 //point the head 00482 pointHead(arm_name); 00483 //ros::Duration(1.0).sleep(); 00484 00485 //broadcast the step we're on 00486 model_object_feedback_.phase = model_object_feedback_.ROTATING; 00487 model_object_feedback_.rotate_ind = view_ind; 00488 model_object_action_server_.publishFeedback(model_object_feedback_); 00489 00490 //get the cluster of points in the hand 00491 result = callSegmentObjectInHand(arm_name, cluster); 00492 if(result != 0) 00493 { 00494 //combine resulting point cloud into object model 00495 if(object_in_hand.width == 0) object_in_hand = cluster; 00496 else 00497 { 00498 result = pcl::concatenatePointCloud(object_in_hand, cluster, object_in_hand); 00499 ROS_INFO("concatenate result: %d", result); 00500 object_in_hand.header.stamp = ros::Time::now(); 00501 } 00502 00503 //publish the new result 00504 ROS_INFO("frame_id: %s, num points: %d", object_in_hand.header.frame_id.c_str(), (int)object_in_hand.width); 00505 pub_cloud.publish(object_in_hand); 00506 } 00507 } 00508 00509 return 1; 00510 } 00511 00512 00514 bool moveArmAway(std::string arm_name, geometry_msgs::Vector3Stamped clear_move) 00515 { 00516 //broadcast the phase we're in 00517 model_object_feedback_.phase = model_object_feedback_.CLEAR_MOVE; 00518 model_object_action_server_.publishFeedback(model_object_feedback_); 00519 00520 mech_interface_.translateGripperCartesian(arm_name, clear_move, ros::Duration(5.0), .015, .09, .02, .16, .1); 00521 return 1; 00522 } 00523 00524 }; 00525 00526 int main(int argc, char** argv) 00527 { 00528 ros::init(argc, argv, "create_object_model_server"); 00529 InHandObjectModeler node(ros::this_node::getName()); 00530 ROS_INFO("create object model action server ready"); 00531 ros::spin(); 00532 return 0; 00533 }