00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035 #include <ros/ros.h>
00036 #include <ros/package.h>
00037
00038 #include <std_msgs/Header.h>
00039 #include <std_msgs/String.h>
00040 #include <geometry_msgs/PointStamped.h>
00041 #include <geometry_msgs/QuaternionStamped.h>
00042 #include <geometry_msgs/TwistStamped.h>
00043 #include <geometry_msgs/Pose2D.h>
00044 #include <geometry_msgs/Point.h>
00045 #include <sensor_msgs/Image.h>
00046 #include <sensor_msgs/PointCloud2.h>
00047 #include <sensor_msgs/CameraInfo.h>
00048 #include <sensor_msgs/image_encodings.h>
00049
00050 #include <message_filters/subscriber.h>
00051 #include <message_filters/time_synchronizer.h>
00052 #include <message_filters/synchronizer.h>
00053 #include <message_filters/sync_policies/approximate_time.h>
00054 #include <image_transport/image_transport.h>
00055 #include <cv_bridge/cv_bridge.h>
00056
00057
00058 #include <tf/transform_listener.h>
00059 #include <tf/transform_datatypes.h>
00060
00061
00062 #include <pcl/point_cloud.h>
00063 #include <pcl/point_types.h>
00064 #include <pcl_ros/transforms.h>
00065
00066
00067 #include <boost/shared_ptr.hpp>
00068
00069
00070 #include <opencv2/core/core.hpp>
00071 #include <opencv2/imgproc/imgproc.hpp>
00072 #include <opencv2/highgui/highgui.hpp>
00073
00074
00075 #include <vector>
00076 #include <deque>
00077 #include <queue>
00078 #include <string>
00079 #include <sstream>
00080 #include <iostream>
00081 #include <fstream>
00082 #include <utility>
00083 #include <stdexcept>
00084 #include <float.h>
00085 #include <math.h>
00086 #include <time.h>
00087 #include <cstdlib>
00088 #include <sstream>
00089
00090
00091 #include <pr2_controllers_msgs/Pr2GripperCommandAction.h>
00092 #include <pr2_gripper_sensor_msgs/PR2GripperGrabAction.h>
00093 #include <pr2_gripper_sensor_msgs/PR2GripperEventDetectorAction.h>
00094 #include <pr2_gripper_sensor_msgs/PR2GripperReleaseAction.h>
00095 #include <actionlib/client/simple_action_client.h>
00096
00097
00098 #include <visual_servo/VisualServoTwist.h>
00099 #include <visual_servo/VisualServoPose.h>
00100 #include <std_srvs/Empty.h>
00101 #include "visual_servo.cpp"
00102
00103 #include <tabletop_pushing/point_cloud_segmentation.h>
00104
00105 #define DEBUG_MODE 0
00106 #define VISUAL_SERVO_TYPE 0
00107
00108 #define PERCEPTION_COLOR_SEGMENTATION 0
00109 #define PERCEPTION_POINT_CLOUD 1
00110 #define PERCEPTION PERCEPTION_POINT_CLOUD
00111
00112
00113
00114 #define INIT 0
00115 #define SETTLE 1
00116 #define INIT_OBJS 2
00117 #define INIT_HAND 3
00118 #define INIT_DESIRED 4
00119 #define POSE_CONTR 5
00120 #define POSE_CONTR_2 6
00121 #define VS_CONTR_1 7
00122 #define VS_CONTR_2 8
00123 #define GRAB 9
00124 #define RELOCATE 10
00125 #define DESCEND_INIT 11
00126 #define DESCEND 12
00127 #define RELEASE 13
00128 #define FINISH 14
00129 #define TERM 15
00130
00131
00132 #define fmod(a,b) a - (float)((int)(a/b)*b)
00133
00134 typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image,sensor_msgs::PointCloud2> MySyncPolicy;
00135 typedef pcl::PointCloud<pcl::PointXYZ> XYZPointCloud;
00136
00137 typedef actionlib::SimpleActionClient<pr2_gripper_sensor_msgs::PR2GripperGrabAction> GrabClient;
00138 typedef actionlib::SimpleActionClient<pr2_gripper_sensor_msgs::PR2GripperEventDetectorAction> EventDetectorClient;
00139 typedef actionlib::SimpleActionClient<pr2_gripper_sensor_msgs::PR2GripperReleaseAction> ReleaseClient;
00140 typedef actionlib::SimpleActionClient<pr2_controllers_msgs::Pr2GripperCommandAction> GripperClient;
00141
00142 using boost::shared_ptr;
00143 using geometry_msgs::TwistStamped;
00144 using geometry_msgs::PoseStamped;
00145 using geometry_msgs::QuaternionStamped;
00146 using geometry_msgs::Point;
00147 using visual_servo::VisualServoTwist;
00148 using visual_servo::VisualServoPose;
00149
00150 class GripperTape
00151 {
00158 public:
00159 GripperTape()
00160 {
00161 }
00162
00163 void setTapeRelLoc(pcl::PointXYZ tape0, pcl::PointXYZ tape1, pcl::PointXYZ tape2)
00164 {
00165 tape1_loc_.x = tape1.x - tape0.x;
00166 tape1_loc_.y = tape1.y - tape0.y;
00167 tape1_loc_.z = tape1.z - tape0.z;
00168 tape2_loc_.x = tape2.x - tape0.x;
00169 tape2_loc_.y = tape2.y - tape0.y;
00170 tape2_loc_.z = tape2.z - tape0.z;
00171 }
00172
00173
00174 void setOffset(pcl::PointXYZ offset)
00175 {
00176 tape0_loc_ = offset;
00177 }
00178
00179 std::vector<pcl::PointXYZ> getTapePoseFromXYZ(pcl::PointXYZ orig)
00180 {
00181 std::vector<pcl::PointXYZ> pts; pts.clear();
00182 pcl::PointXYZ zero = addPointXYZ(orig, tape0_loc_);
00183 pcl::PointXYZ one = addPointXYZ(zero, tape1_loc_);
00184 pcl::PointXYZ two = addPointXYZ(zero, tape2_loc_);
00185
00186 pts.push_back(zero);
00187 pts.push_back(one);
00188 pts.push_back(two);
00189
00190 return pts;
00191 }
00192
00193 private:
00194 pcl::PointXYZ tape0_loc_;
00195 pcl::PointXYZ tape1_loc_;
00196 pcl::PointXYZ tape2_loc_;
00197
00198 pcl::PointXYZ addPointXYZ(pcl::PointXYZ a, pcl::PointXYZ b)
00199 {
00200 pcl::PointXYZ r;
00201 r.x = a.x + b.x;
00202 r.y = a.y + b.y;
00203 r.z = a.z + b.z;
00204 return r;
00205 }
00206 };
00207
00208 class VisualServoNode
00209 {
00210 public:
00211 VisualServoNode(ros::NodeHandle &n) :
00212 n_(n), n_private_("~"),
00213 image_sub_(n, "color_image_topic", 1),
00214 depth_sub_(n, "depth_image_topic", 1),
00215 cloud_sub_(n, "point_cloud_topic", 1),
00216 sync_(MySyncPolicy(15), image_sub_, depth_sub_, cloud_sub_),
00217 it_(n), tf_(), have_depth_data_(false), camera_initialized_(false),
00218 desire_points_initialized_(false), PHASE(INIT), is_gripper_initialized_(false), gripper_pose_estimated_(false),
00219 is_detected_(false), place_detection_(false)
00220
00221 {
00222 vs_ = shared_ptr<VisualServo>(new VisualServo(JACOBIAN_TYPE_PSEUDO));
00223 tf_ = shared_ptr<tf::TransformListener>(new tf::TransformListener());
00224
00225 n_private_.param("display_wait_ms", display_wait_ms_, 3);
00226
00227 std::string default_optical_frame = "/head_mount_kinect_rgb_optical_frame";
00228 n_private_.param("optical_frame", optical_frame_, default_optical_frame);
00229 std::string default_workspace_frame = "/torso_lift_link";
00230 n_private_.param("workspace_frame", workspace_frame_, default_workspace_frame);
00231 std::string cam_info_topic_def = "/kinect_head/rgb/camera_info";
00232 n_private_.param("cam_info_topic", cam_info_topic_, cam_info_topic_def);
00233
00234
00235 n_private_.param("target_hue_value", target_hue_value_, 10);
00236 n_private_.param("target_hue_threshold", target_hue_threshold_, 20);
00237 n_private_.param("gripper_tape_hue_value", gripper_tape_hue_value_, 180);
00238 n_private_.param("gripper_tape_hue_threshold", gripper_tape_hue_threshold_, 50);
00239 n_private_.param("default_sat_bot_value", default_sat_bot_value_, 40);
00240 n_private_.param("default_sat_top_value", default_sat_top_value_, 40);
00241 n_private_.param("default_val_value", default_val_value_, 200);
00242 n_private_.param("min_contour_size", min_contour_size_, 10.0);
00243
00244
00245 n_private_.param("jacobian_type", jacobian_type_, JACOBIAN_TYPE_INV);
00246 n_private_.param("vs_err_term_thres", vs_err_term_threshold_, 0.001);
00247 n_private_.param("pose_servo_z_offset", pose_servo_z_offset_, 0.045);
00248 n_private_.param("place_z_velocity", place_z_velocity_, -0.025);
00249 n_private_.param("gripper_tape1_offset_x", tape1_offset_x_, 0.02);
00250 n_private_.param("gripper_tape1_offset_y", tape1_offset_y_, -0.025);
00251 n_private_.param("gripper_tape1_offset_z", tape1_offset_z_, 0.07);
00252
00253
00254 sync_.registerCallback(&VisualServoNode::sensorCallback, this);
00255
00256 #ifdef EXPERIMENT
00257 chatter_pub_ = n_.advertise<std_msgs::String>("chatter", 100);
00258 alarm_ = ros::Time(0);
00259 #endif
00260
00261 gripper_tape_ = GripperTape();
00262 gripper_tape_.setOffset(pcl::PointXYZ(tape1_offset_x_, tape1_offset_y_, tape1_offset_z_));
00263 ROS_INFO("Initialization 0: Node init & Register Callback Done");
00264 }
00265
00266
00267 ~VisualServoNode()
00268 {
00269 delete gripper_client_;
00270 delete grab_client_;
00271 delete release_client_;
00272 delete detector_client_;
00273 }
00274
00278 void sensorCallback(const sensor_msgs::ImageConstPtr& img_msg,
00279 const sensor_msgs::ImageConstPtr& depth_msg, const sensor_msgs::PointCloud2ConstPtr& cloud_msg)
00280 {
00281
00282 ros::Time start = ros::Time::now();
00283
00284
00285 if (!camera_initialized_)
00286 {
00287 cam_info_ = *ros::topic::waitForMessage<sensor_msgs::CameraInfo>(cam_info_topic_, n_, ros::Duration(3.0));
00288 camera_initialized_ = true;
00289 vs_->setCamInfo(cam_info_);
00290 ROS_INFO("Initialization: Camera Info Done");
00291 }
00292
00293 cv::Mat color_frame, depth_frame;
00294 cv_bridge::CvImagePtr color_cv_ptr = cv_bridge::toCvCopy(img_msg);
00295 cv_bridge::CvImagePtr depth_cv_ptr = cv_bridge::toCvCopy(depth_msg);
00296
00297 color_frame = color_cv_ptr->image;
00298 depth_frame = depth_cv_ptr->image;
00299
00300 XYZPointCloud cloud;
00301 pcl::fromROSMsg(*cloud_msg, cloud);
00302 tf_->waitForTransform(workspace_frame_, cloud.header.frame_id,
00303 cloud.header.stamp, ros::Duration(0.9));
00304 pcl_ros::transformPointCloud(workspace_frame_, cloud, cloud, *tf_);
00305 cur_camera_header_ = img_msg->header;
00306 cur_color_frame_ = color_frame;
00307 cur_orig_color_frame_ = color_frame.clone();
00308 cur_depth_frame_ = depth_frame.clone();
00309 cur_point_cloud_ = cloud;
00310
00311
00312 gripper_pose_estimated_ = updateGripperFeatures();
00313 executeStatemachine();
00314
00315 #define DISPLAY 1
00316 #ifdef DISPLAY
00317
00318 setDisplay();
00319 #endif
00320 #ifdef EXPERIMENT
00321
00322 ros::Time end = ros::Time::now();
00323 std::stringstream ss;
00324 ss << "Time it took is " << (end - start).toSec();
00325 std_msgs::String msg;
00326 msg.data = ss.str();
00327 chatter_pub_.publish(msg);
00328 #endif
00329 }
00330
00331 void executeStatemachine()
00332 {
00333 if (sleepNonblock())
00334 {
00335 temp_draw_.clear();
00336 switch(PHASE)
00337 {
00338 case INIT:
00339 {
00340 reset();
00341 #ifndef PROFILE
00342 ROS_INFO("Initializing Services and Robot Configuration");
00343 initializeService();
00344
00345
00346 std_srvs::Empty e;
00347 i_client_.call(e);
00348
00349
00350 close();
00351 float temp = getTipDistance();
00352 close_gripper_dist_ = temp;
00353 ROS_INFO("Close Gripper Tip distane: %.4f", temp);
00354 sleep(2);
00355
00356 open();
00357 #endif
00358 if (temp != -1)
00359 {
00360 PHASE = INIT_OBJS;
00361 setSleepNonblock(5.0);
00362 }
00363 }
00364 break;
00365 case INIT_OBJS:
00366 {
00367 bool t = false;
00368 ROS_INFO("Phase Init Obj: Getting Objects without the arm");
00369 #if PERCEPTION==PERCEPTION_COLOR_SEGMENTATION
00370 t = initializeDesired(desired_);
00371 #elif PERCEPTION==PERCEPTION_POINT_CLOUD
00372 t = initializeDesired(po_);
00373 #endif
00374 if (t)
00375 {
00376 #if PERCEPTION==PERCEPTION_POINT_CLOUD
00377 ROS_INFO(">> Found %d objects", (int)(po_.size()));
00378 #endif
00379 if (is_gripper_initialized_)
00380 PHASE = INIT_DESIRED;
00381 else
00382 PHASE = SETTLE;
00383 }
00384 else
00385 {
00386 ROS_WARN("Failed. Retrying initialization in 2 seconds");
00387 setSleepNonblock(2.0);
00388 }
00389
00390 }
00391 break;
00392 case SETTLE:
00393 {
00394 ROS_INFO("Phase Settle: Move Gripper to Init Position");
00395
00396 visual_servo::VisualServoPose p_srv = formPoseService(0.62, 0.05, -0.1);
00397 if (p_client_.call(p_srv))
00398 {
00399 setSleepNonblock(3.0);
00400 PHASE = INIT_HAND;
00401 }
00402 else
00403 {
00404 ROS_WARN("Failed to put the hand in initial configuration");
00405 }
00406 }
00407 break;
00408 case INIT_HAND:
00409 {
00410 ROS_INFO("Phase Init Hand: Remembering Blue Tape Positions");
00411
00412 if (tape_features_.size() == 3)
00413 {
00414 pcl::PointXYZ tape0 = tape_features_.at(0).workspace;
00415 pcl::PointXYZ tape1 = tape_features_.at(1).workspace;
00416 pcl::PointXYZ tape2 = tape_features_.at(2).workspace;
00417
00418 gripper_tape_.setTapeRelLoc(tape0, tape1, tape2);
00419 temp_draw_.push_back(tape_features_.at(0).image);
00420 temp_draw_.push_back(tape_features_.at(1).image);
00421 temp_draw_.push_back(tape_features_.at(2).image);
00422
00423 is_gripper_initialized_ = true;
00424 PHASE = INIT_DESIRED;
00425 setSleepNonblock(0.25);
00426 }
00427 else
00428 {
00429
00430 ROS_WARN("Cannot find the hand. Please reinitialize the hand");
00431 PHASE = SETTLE;
00432 }
00433 }
00434 break;
00435
00436 case INIT_DESIRED:
00437 {
00438 ROS_INFO("Phase Initialize Desired Points");
00439 bool t = false;
00440 #if PERCEPTION==PERCEPTION_COLOR_SEGMENTATION
00441 t = setGoalForAnObject(goal_, goal_p_, desired_);
00442 #elif PERCEPTION==PERCEPTION_POINT_CLOUD
00443 if (po_.size() > 0)
00444 t = setGoalForAnObject(goal_, goal_p_, po_.front());
00445 else
00446 {
00447 ROS_INFO("No more objects to be processed. Terminating");
00448 PHASE = TERM;
00449 }
00450 #endif
00451 if (t)
00452 {
00453 PHASE = POSE_CONTR;
00454 ROS_INFO("Phase %d, Moving to next phase in 3.0 seconds", PHASE);
00455 }
00456 else
00457 {
00458 ROS_WARN("Failed. Retrying initialization in 2 seconds");
00459 setSleepNonblock(2.0);
00460 }
00461
00462 }
00463 break;
00464
00465 case POSE_CONTR:
00466 {
00467 ROS_INFO("Phase Pose Control");
00468 float x = goal_p_.pose.position.x;
00469 float y = goal_p_.pose.position.y;
00470 float z = goal_p_.pose.position.z + pose_servo_z_offset_;
00471 visual_servo::VisualServoPose p_srv = formPoseService(x, y, z);
00472 ROS_INFO("Move Arm to Pose [%f %f %f]", x, y, z);
00473 if (p_client_.call(p_srv))
00474 {
00475
00476 int code = p_srv.response.result;
00477 ROS_INFO(">> Phase %d, Pose Control: Code [%d]", POSE_CONTR, code);
00478 if (0 == code)
00479 {
00480 ROS_INFO("Phase %d, Moving to next phase in 3.0 seconds", PHASE);
00481 setSleepNonblock(5.0);
00482 #ifdef VISUAL_SERVO_TYPE
00483 PHASE = VS_CONTR_2;
00484 #else
00485 PHASE = VS_CONTR_1;
00486 #endif
00487 ROS_INFO("Start Visual Servoing");
00488 }
00489 }
00490 }
00491 break;
00492
00493 case VS_CONTR_1:
00494 {
00495
00496
00497
00498 if (tape_features_.size() == goal_.size())
00499 {
00500 std::vector<cv::Point> few_pixels_up; few_pixels_up.clear();
00501 float offset = (tape_features_.at(0).image.y - goal_.at(0).image.y)/2;
00502 for (unsigned int i = 0; i < goal_.size(); i++)
00503 {
00504 cv::Point p = goal_.at(i).image;
00505 p.y += offset;
00506 few_pixels_up.push_back(p);
00507 }
00508 cur_goal_ = vs_->CVPointToVSXYZ(cur_point_cloud_, cur_depth_frame_,few_pixels_up);
00509
00510 visual_servo::VisualServoTwist v_srv = getTwist(cur_goal_);
00511
00512
00513 if (v_srv.request.error < vs_err_term_threshold_)
00514 {
00515 PHASE = VS_CONTR_2;
00516 }
00517
00518 #ifndef PROFILE
00519 if (v_client_.call(v_srv)){}
00520 else{}
00521 #endif
00522 }
00523 }
00524 break;
00525
00526 case VS_CONTR_2:
00527 {
00528 if (!place_detection_)
00529 {
00530 place();
00531 place_detection_ = true;
00532 }
00533
00534
00535 #ifdef VISUAL_SERVO_TYPE
00536 std::vector<PoseStamped> goals, feats;
00537 goals.push_back(goal_p_);
00538 feats.push_back(tape_features_p_);
00539 visual_servo::VisualServoTwist v_srv = vs_->getTwist(goals,feats,0);
00540 v_srv.request.twist.twist.linear.z /= 4;
00541 v_srv.request.error = getError(goal_, tape_features_);
00542 #else
00543 visual_servo::VisualServoTwist v_srv = getTwist(goal_);
00544 #endif
00545
00546 if (is_detected_ || v_srv.request.error < vs_err_term_threshold_)
00547 {
00548
00549 object_z_ = tape_features_.front().workspace.z;
00550 PHASE = GRAB;
00551 sendZeroVelocity();
00552 }
00553 else
00554 {
00555
00556 if (v_client_.call(v_srv)){}
00557 else
00558 {
00559
00560
00561 }
00562 }
00563 }
00564 break;
00565 case GRAB:
00566 {
00567
00568 sendZeroVelocity();
00569 if(grab())
00570 {
00571 setSleepNonblock(2.0);
00572 float temp = getTipDistance();
00573 if (temp < close_gripper_dist_ + 0.01)
00574 {
00575 ROS_WARN(">> FAILED AT GRABBING (%f < %f). TERMINATING", temp, close_gripper_dist_ + 0.01);
00576 PHASE = TERM;
00577 }
00578 else
00579 PHASE = RELOCATE;
00580 }
00581 else
00582 {
00583 ROS_WARN(">> FAILED AT GRABBING. TERMINATING");
00584 PHASE = TERM;
00585 }
00586 }
00587 break;
00588 case RELOCATE:
00589 {
00590 ROS_INFO("Phase Relocate. Move arm to new pose");
00591 visual_servo::VisualServoPose p_srv = formPoseService(0.5, 0.3, 0.10+object_z_);
00592 if (p_client_.call(p_srv))
00593 {
00594 setSleepNonblock(2.0);
00595 PHASE = RELEASE;
00596 }
00597 }
00598 break;
00599 case DESCEND_INIT:
00600 {
00601 ROS_INFO("Phase Descend Init: get event detector run and register a callback");
00602
00603 place();
00604 PHASE = DESCEND;
00605 ROS_INFO("Phase Descend: descend slowly until reached a ground");
00606 }
00607 case DESCEND:
00608 {
00609 if (is_detected_)
00610 {
00611 sendZeroVelocity();
00612 setSleepNonblock(1.5);
00613 PHASE = FINISH;
00614 }
00615 else
00616 {
00617 visual_servo::VisualServoTwist v_srv = VisualServoMsg::createTwistMsg(1.0,0,0,place_z_velocity_, 0,0,0);
00618 v_client_.call(v_srv);
00619 }
00620
00621
00622 }
00623 break;
00624 case RELEASE:
00625 {
00626 release();
00627 setSleepNonblock(0.5);
00628 PHASE = DESCEND;
00629 }
00630 break;
00631 case FINISH:
00632 {
00633 visual_servo::VisualServoPose p_srv = formPoseService(0.5, 0.3, 0.1 + object_z_);
00634 if (p_client_.call(p_srv))
00635 {
00636 setSleepNonblock(2.0);
00637 PHASE = TERM;
00638 }
00639 }
00640 break;
00641 default:
00642 {
00643 open();
00644
00645
00646 #if PERCEPTION==PERCEPTION_POINT_CLOUD
00647 if (po_.size() > 0)
00648 po_.pop_front();
00649 else
00650 {
00651 ros::shutdown();
00652 }
00653 #endif
00654
00655 ROS_INFO("Routine Ended.");
00656 std::cout << "Press [Enter] if you want to do it again: ";
00657 while(!ros::isShuttingDown())
00658 {
00659 int c = std::cin.get();
00660 if (c == '\n')
00661 break;
00662 }
00663
00664 printf("Reset the arm and repeat Pick and Place in 3 seconds\n");
00665
00666 std_srvs::Empty e;
00667 i_client_.call(e);
00668
00669 reset();
00670 setSleepNonblock(3.0);
00671 PHASE = INIT_OBJS;
00672 }
00673 break;
00674 }
00675 }
00676 }
00677
00678 float getTipDistance()
00679 {
00680 try
00681 {
00682 std::string l_frame = "/l_gripper_l_finger_tip_link";
00683 std::string r_frame = "/l_gripper_r_finger_tip_link";
00684
00685 tf::StampedTransform transform;
00686 ros::Time now = ros::Time(0);
00687 tf::TransformListener listener;
00688
00689 listener.waitForTransform(r_frame, l_frame, now, ros::Duration(2.0));
00690 listener.lookupTransform(l_frame, r_frame, now, transform);
00691 tf::Vector3 out = transform.getOrigin();
00692
00693
00694
00695
00696 return out.y() < 0 ? -out.y() : out.y();
00697 }
00698 catch (tf::TransformException e)
00699 {
00700 ROS_WARN_STREAM(e.what());
00701 }
00702 return -1.0;
00703 }
00704
00705 void reset()
00706 {
00707 goal_.clear();
00708 cur_goal_.clear();
00709 tape_features_.clear();
00710 is_detected_ = false;
00711 }
00712
00713 #ifdef DISPLAY
00714 void setDisplay()
00715 {
00716 char phase_char[10];
00717 sprintf(phase_char, "Phase: %d", PHASE);
00718 std::string phase_str = phase_char;
00719 cv::putText(cur_orig_color_frame_, phase_str, cv::Point(529, 18), 2, 0.60, cv::Scalar(255, 255, 255), 1);
00720
00721 if (po_.size() > 0)
00722 {
00723 XYZPointCloud c = po_.at(0).cloud;
00724 for (unsigned int j = 0; j < c.size(); j++)
00725 {
00726 cv::Point p = vs_->projectPointIntoImage(c.at(j), workspace_frame_, optical_frame_, tf_);
00727 cv::circle(cur_orig_color_frame_, p, 1, cv::Scalar(255, 0, 255), 1);
00728 }
00729 for (unsigned int i = 1; i < po_.size(); i++)
00730 {
00731 XYZPointCloud c = po_.at(i).cloud;
00732 for (unsigned int j = 0; j < c.size(); j++)
00733 {
00734 cv::Point p = vs_->projectPointIntoImage(c.at(j), workspace_frame_, optical_frame_, tf_);
00735 cv::circle(cur_orig_color_frame_, p, 1, cv::Scalar(120, 0, 255/po_.size()*i), 1);
00736 }
00737 }
00738 }
00739
00740 if (goal_.size() > 0)
00741 {
00742 VSXYZ d = desired_;
00743 cv::putText(cur_orig_color_frame_, "+", d.image, 2, 0.5, cv::Scalar(255, 0, 255), 1);
00744 }
00745
00746
00747 for (unsigned int i = 0; i < goal_.size(); i++)
00748 {
00749 cv::Point p = goal_.at(i).image;
00750 cv::putText(cur_orig_color_frame_, "x", p, 2, 0.5, cv::Scalar(100*i, 0, 110*(2-i), 1));
00751 }
00752
00753
00754 for (unsigned int i = 0; i < cur_goal_.size(); i++)
00755 {
00756 cv::Point p = cur_goal_.at(i).image;
00757 cv::circle(cur_orig_color_frame_, p, 3, cv::Scalar(100*i, 0, 110*(2-i)), 1);
00758 }
00759
00760
00761 for (unsigned int i = 0; i < tape_features_.size(); i++)
00762 {
00763 cv::Point p = tape_features_.at(i).image;
00764 cv::circle(cur_orig_color_frame_, p, 2, cv::Scalar(100*i, 0, 110*(2-i)), 2);
00765 if(gripper_pose_estimated_)
00766 {
00767 cv::circle(cur_orig_color_frame_, p, 3, cv::Scalar(255,140,0), 2);
00768 }
00769 }
00770
00771 if (PHASE == INIT_DESIRED)
00772 {
00773 cv::rectangle(cur_orig_color_frame_, original_box_, cv::Scalar(0,255,255));
00774 }
00775
00776 if (PHASE == VS_CONTR_1 || PHASE == VS_CONTR_2)
00777 {
00778 float e = getError(goal_, tape_features_);
00779 if (PHASE == VS_CONTR_1)
00780 e = getError(cur_goal_, tape_features_);
00781 std::stringstream stm;
00782 stm << "Error :" << e;
00783 cv::Scalar color;
00784 if (e > vs_err_term_threshold_)
00785 color = cv::Scalar(50, 50, 255);
00786 else
00787 color = cv::Scalar(50, 255, 50);
00788 cv::putText(cur_orig_color_frame_, stm.str(), cv::Point(5, 12), 2, 0.5, color,1);
00789 }
00790
00791 for (unsigned int i = 0; i < temp_draw_.size(); i++)
00792 {
00793 cv::Point p = temp_draw_.at(i);
00794 cv::circle(cur_orig_color_frame_, p, 3, cv::Scalar(127, 255, 0), 2);
00795 }
00796
00797 cv::imshow("in", cur_orig_color_frame_);
00798 cv::waitKey(display_wait_ms_);
00799 }
00800 #endif
00801
00806
00807 void place()
00808 {
00809
00810 pr2_gripper_sensor_msgs::PR2GripperEventDetectorGoal place_goal;
00811 place_goal.command.trigger_conditions = place_goal.command.FINGER_SIDE_IMPACT_OR_SLIP_OR_ACC;
00812 place_goal.command.acceleration_trigger_magnitude = 2.6;
00813 place_goal.command.slip_trigger_magnitude = .005;
00814
00815 is_detected_ = false;
00816 ROS_INFO("Waiting for object placement contact...");
00817 detector_client_->sendGoal(place_goal,
00818 boost::bind(&VisualServoNode::placeDoneCB, this, _1, _2));
00819 }
00820
00821 void placeDoneCB(const actionlib::SimpleClientGoalState& state,
00822 const pr2_gripper_sensor_msgs::PR2GripperEventDetectorResultConstPtr& result)
00823 {
00824 if(state == actionlib::SimpleClientGoalState::SUCCEEDED)
00825 ROS_INFO("[Place ActionLib] Place Success");
00826 else
00827 ROS_WARN("[Place ActionLib] Place Failure");
00828 is_detected_ = true;
00829 }
00830
00831 void close()
00832 {
00833 pr2_controllers_msgs::Pr2GripperCommandGoal open;
00834 open.command.position = 0.0;
00835 open.command.max_effort = -1.0;
00836
00837 ROS_INFO("Sending close goal");
00838 gripper_client_->sendGoal(open);
00839 gripper_client_->waitForResult(ros::Duration(20.0));
00840 if(gripper_client_->getState() == actionlib::SimpleClientGoalState::SUCCEEDED){}
00841 else ROS_WARN("[Gripper Action Lib] Gripper Close Failed");
00842 }
00843
00844 void open()
00845 {
00846 pr2_controllers_msgs::Pr2GripperCommandGoal open;
00847 open.command.position = 0.12;
00848 open.command.max_effort = -1.0;
00849
00850 ROS_INFO("Sending open goal");
00851 gripper_client_->sendGoal(open);
00852 gripper_client_->waitForResult(ros::Duration(20.0));
00853 if(gripper_client_->getState() == actionlib::SimpleClientGoalState::SUCCEEDED){}
00854 else ROS_WARN("[Gripper Action Lib] Gripper Open Failed");
00855 }
00856
00857
00858 bool grab()
00859 {
00860 bool ret;
00861 pr2_gripper_sensor_msgs::PR2GripperGrabGoal grip;
00862 grip.command.hardness_gain = 0.02;
00863
00864 ROS_INFO("Sending grab goal");
00865 grab_client_->sendGoal(grip);
00866 grab_client_->waitForResult(ros::Duration(20.0));
00867 ret = grab_client_->getState() == actionlib::SimpleClientGoalState::SUCCEEDED;
00868 if(ret)
00869 ROS_INFO("Successfully completed Grab");
00870 else
00871 ROS_INFO("Grab Failed");
00872 return ret;
00873 }
00874
00875
00876 void release()
00877 {
00878 pr2_gripper_sensor_msgs::PR2GripperReleaseGoal place;
00879
00880 place.command.event.trigger_conditions = place.command.event.FINGER_SIDE_IMPACT_OR_SLIP_OR_ACC;
00881
00882 place.command.event.acceleration_trigger_magnitude = 2.5;
00883
00884 place.command.event.slip_trigger_magnitude = .005;
00885
00886 ROS_INFO("Waiting for object placement contact...");
00887 release_client_->sendGoal(place,
00888 boost::bind(&VisualServoNode::releaseDoneCB, this, _1, _2));
00889 }
00890
00891 void releaseDoneCB(const actionlib::SimpleClientGoalState& state,
00892 const pr2_gripper_sensor_msgs::PR2GripperReleaseResultConstPtr& result)
00893 {
00894 if(state == actionlib::SimpleClientGoalState::SUCCEEDED)
00895 ROS_INFO("[Contact ActionLib] Release Success");
00896 else
00897 ROS_WARN("[Contact ActionLib] Release Failure");
00898 is_detected_ = true;
00899
00900 sendZeroVelocity();
00901 }
00902
00903
00907 void setSleepNonblock(float time)
00908 {
00909 ros::Time alarm = ros::Time::now() + ros::Duration(time);
00910 alarm_ = alarm;
00911 }
00912
00913 bool sleepNonblock()
00914 {
00915 #ifdef PROFILE
00916 return true;
00917 #else
00918 ros::Duration d = ros::Time::now() - alarm_;
00919 if (d.toSec() > 0)
00920 {
00921
00922 alarm_ = ros::Time(0);
00923 return true;
00924 }
00925 return false;
00926 #endif
00927 }
00928
00929 bool initializeDesired(VSXYZ &vDesire)
00930 {
00931 cv::Mat mask_t = colorSegment(cur_orig_color_frame_.clone(), target_hue_value_ - target_hue_threshold_ , target_hue_value_ + target_hue_threshold_, 50, 100, 25, 100);
00932 cv::Mat element_t = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3,3));
00933
00934 cv::morphologyEx(mask_t, mask_t, cv::MORPH_OPEN, element_t);
00935 cv::dilate(mask_t, mask_t, element_t);
00936
00937
00938 cv::Mat in = mask_t.clone();
00939 std::vector<std::vector<cv::Point> > contours; contours.clear();
00940 cv::findContours(in, contours, cv::RETR_CCOMP,CV_CHAIN_APPROX_NONE);
00941
00942 if (contours.size() < 1)
00943 {
00944 ROS_WARN("No target Found");
00945 return false;
00946 }
00947
00948 unsigned int cont_max = contours.at(0).size();
00949 unsigned int cont_max_ind = 0;
00950 for (unsigned int i = 1; i < contours.size(); i++)
00951 {
00952 if (contours.at(i).size() > cont_max)
00953 {
00954 cont_max = contours.at(i).size();
00955 cont_max_ind = i;
00956 }
00957 }
00958
00959 std::vector<cv::Point> ps = contours.at(cont_max_ind);
00960 cv::Rect r = cv::boundingRect(ps);
00961 original_box_ = r;
00962
00963 #ifdef DISPLAY
00964
00965 cv::drawContours(cur_orig_color_frame_, contours, cont_max_ind, cv::Scalar(255, 255, 255));
00966 #endif
00967
00968 int size = r.width * (r.height * 0.5 + 1);
00969 float temp[size]; int ind = 0;
00970
00971 for(int i = r.x; i < r.width + r.x; i++)
00972 {
00973
00974 for (int j = r.y; j < r.height * 0.5 + r.y; j++)
00975 {
00976 uchar t = mask_t.at<uchar>(j,i);
00977 if (t > 0)
00978 {
00979 pcl::PointXYZ p = cur_point_cloud_.at(i, j);
00980 if (!isnan(p.z) && p.z > -1.5)
00981 {
00982 temp[ind++] = p.z;
00983 cv::circle(cur_orig_color_frame_, cv::Point(i, j), 1, cv::Scalar(127, 255, 0), 1);
00984 }
00985 }
00986 }
00987 }
00988 std::sort(temp, temp + ind);
00989 float max_z = temp[ind - 1];
00990
00991 float thresh = max_z - 0.2*(max_z - temp[0]);
00992
00993 float mean_x = 0, mean_y =0, mean_z = 0;
00994 int quant = 0;
00995 for(int i = r.x; i < r.width + r.x; i++)
00996 {
00997 for (int j = r.y; j < r.height*0.5 + r.y; j++)
00998 {
00999 pcl::PointXYZ p = cur_point_cloud_.at(i, j);
01000 if(!(isnan(p.x)||isnan(p.y)||isnan(p.z)))
01001 {
01002 if (p.z > thresh)
01003 {
01004 quant++;
01005 mean_x += p.x;
01006 mean_y += p.y;
01007 mean_z += p.z;
01008 }
01009 }
01010 }
01011 }
01012 pcl::PointXYZ desired(mean_x/quant, mean_y/quant, mean_z/quant);
01013 vDesire = vs_->point3DToVSXYZ(desired, tf_);
01014 return true;
01015 }
01016
01017 bool initializeDesired(tabletop_pushing::ProtoObjects &pos)
01018 {
01019 shared_ptr<tabletop_pushing::PointCloudSegmentation> pcs_ = shared_ptr<tabletop_pushing::PointCloudSegmentation>(
01020 new tabletop_pushing::PointCloudSegmentation(tf_));
01021 pcs_->min_table_z_ = -1.0;
01022 pcs_->max_table_z_ = 1.0;
01023 pcs_->min_workspace_x_ = -0.5;
01024 pcs_->max_workspace_x_ = 1.00;
01025 pcs_->min_workspace_z_ = -0.5;
01026 pcs_->max_workspace_z_ = 0.5;
01027 pcs_->num_downsamples_ = 2;
01028 pcs_->table_ransac_thresh_ = 0.015;
01029 pcs_->table_ransac_angle_thresh_ = 30.0;
01030 pcs_->cluster_tolerance_ = 0.25;
01031 pcs_->cloud_diff_thresh_ = 0.01;
01032 pcs_->min_cluster_size_ = 100;
01033 pcs_->max_cluster_size_ = 2500;
01034 pcs_->voxel_down_res_ = 0.005;
01035 pcs_->cloud_intersect_thresh_ = 0.005;
01036 pcs_->hull_alpha_ = 0.1;
01037 pcs_->use_voxel_down_ = true;
01038
01039 try
01040 {
01041 XYZPointCloud objs_cloud, plane_cloud;
01042
01043 Eigen::Vector4f table_centroid_ = pcs_->getTablePlane(cur_point_cloud_, objs_cloud, plane_cloud,
01044 false);
01045 double min_workspace_z_ = table_centroid_[2];
01046
01047 XYZPointCloud objects_cloud_down = pcs_->downsampleCloud(objs_cloud);
01048
01049
01050 tabletop_pushing::ProtoObjects po = pcs_->clusterProtoObjects(objects_cloud_down);
01051
01052
01053 if(po.size() == 0)
01054 return false;
01055 pos = po;
01056 return true;
01057 }
01058 catch (ros::Exception e)
01059 {
01060 ROS_WARN("FindTabletopObjects failed. Try to have only one object on the table");
01061 return false;
01062 }
01063
01064 }
01065
01066 bool setGoalForAnObject(std::vector<VSXYZ> &goal, PoseStamped &goal_p, tabletop_pushing::ProtoObject po)
01067 {
01068
01069 pcl::PointCloud<pcl::PointXYZ> cloud = po.cloud;
01070 float max_z = -5000;
01071 for (unsigned int i = 0; i < cloud.size(); i++)
01072 {
01073 if (cloud.at(i).z > max_z)
01074 {
01075 max_z = cloud.at(i).z;
01076 }
01077 }
01078 float avg_x = 0, avg_y = 0, avg_z = 0;
01079 int num_avg = 0;
01080
01081 float abs = max_z < 0 ? -max_z : max_z;
01082 float threshold = max_z - abs * 0.8;
01083
01084 printf("[%f vs %f ] \n", threshold, max_z);
01085 for (unsigned int i = 0; i < cloud.size(); i++)
01086 {
01087 pcl::PointXYZ p = cloud.at(i);
01088 if (!isnan(p.z) && p.z > threshold)
01089 {
01090 if (!isnan(p.x) && !isnan(p.y))
01091 {
01092 avg_x += cloud.at(i).x;
01093 avg_y += cloud.at(i).y;
01094 avg_z += cloud.at(i).z;
01095 num_avg++;
01096 }
01097 }
01098 }
01099 pcl::PointXYZ avg_p = pcl::PointXYZ(avg_x/num_avg, avg_y/num_avg, avg_z/num_avg);
01100 VSXYZ v = vs_->point3DToVSXYZ(avg_p, tf_);
01101 return setGoalForAnObject(goal, goal_p, v);
01102 }
01103
01104 bool setGoalForAnObject(std::vector<VSXYZ> &goal, PoseStamped &goal_p, VSXYZ desire)
01105 {
01106 if (isnan(desire.workspace.x) || isnan(desire.workspace.y) ||
01107 isnan(desire.workspace.z))
01108 {
01109 ROS_ERROR("Desire Values have NaN. Unable to Proceed Further");
01110 return false;
01111 }
01112
01113 std::vector<pcl::PointXYZ> temp_features = gripper_tape_.getTapePoseFromXYZ(desire.workspace);
01114 std::vector<VSXYZ> desired_vsxyz = vs_->Point3DToVSXYZ(temp_features, tf_);
01115 goal = desired_vsxyz;
01116
01117 printf("%d\n", (int)goal.size());
01118
01119 goal_p = vs_->VSXYZToPoseStamped(goal_.front());
01120
01121 goal_p.pose.orientation.x = -0.4582;
01122 goal_p.pose.orientation.z = 0.8889;
01123 goal_p.pose.orientation.w = 0;
01124
01125
01126 if (JACOBIAN_TYPE_AVG == jacobian_type_)
01127 return vs_->setDesiredInteractionMatrix(desired_vsxyz);
01128 return true;
01129 }
01130
01131 void initializeService()
01132 {
01133 ROS_DEBUG(">> Hooking Up The Service");
01134 ros::NodeHandle n;
01135 v_client_ = n.serviceClient<visual_servo::VisualServoTwist>("vs_twist");
01136 p_client_ = n.serviceClient<visual_servo::VisualServoPose>("vs_pose");
01137 i_client_ = n.serviceClient<std_srvs::Empty>("vs_init");
01138
01139 gripper_client_ = new GripperClient("l_gripper_sensor_controller/gripper_action",true);
01140 while(!gripper_client_->waitForServer(ros::Duration(5.0))){
01141 ROS_INFO("Waiting for the r_gripper_sensor_controller/event_detector action server to come up");
01142 }
01143 grab_client_ = new GrabClient("l_gripper_sensor_controller/grab",true);
01144 while(!grab_client_->waitForServer(ros::Duration(5.0))){
01145 ROS_INFO("Waiting for the r_gripper_sensor_controller/grab action server to come up");
01146 }
01147 release_client_ = new ReleaseClient("l_gripper_sensor_controller/release",true);
01148 while(!release_client_->waitForServer(ros::Duration(5.0))){
01149 ROS_INFO("Waiting for the r_gripper_sensor_controller/release action server to come up");
01150 }
01151 detector_client_ = new EventDetectorClient("l_gripper_sensor_controller/event_detector",true);
01152 while(!detector_client_->waitForServer(ros::Duration(5.0))){
01153 ROS_INFO("Waiting for the r_gripper_sensor_controller/event_detector action server to come up");
01154 }
01155
01156 }
01157
01158 visual_servo::VisualServoTwist getTwist(std::vector<VSXYZ> desire)
01159 {
01160 visual_servo::VisualServoTwist srv;
01161 srv = vs_->getTwist(desire, tape_features_);
01162
01163 if (gripper_pose_estimated_)
01164 {
01165 float scale = 0.5;
01166 srv.request.twist.twist.linear.x *= scale;
01167 srv.request.twist.twist.linear.y *= scale;
01168 srv.request.twist.twist.linear.z *= scale;
01169
01170 srv.request.twist.twist.angular.x = 0;
01171 srv.request.twist.twist.angular.y = 0;
01172 srv.request.twist.twist.angular.z = 0;
01173 }
01174 srv.request.error = getError(desire, tape_features_);
01175 return srv;
01176 }
01177
01178
01179 bool updateGripperFeatures()
01180 {
01181 bool estimated = false;
01182 int default_tape_num = 3;
01183
01184 PoseStamped p;
01185 p.header.frame_id = "/l_gripper_tool_frame";
01186 p.pose.orientation.w = 1;
01187 tf_->transformPose(workspace_frame_, p, p);
01188 Point fkpp = p.pose.position;
01189
01190
01192
01193
01194 cv::Mat tape_mask = colorSegment(cur_color_frame_.clone(), gripper_tape_hue_value_, gripper_tape_hue_threshold_);
01195
01196
01197 cv::Mat element_b = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3,3));
01198 cv::morphologyEx(tape_mask, tape_mask, cv::MORPH_OPEN, element_b);
01199
01200
01201 std::vector<cv::Moments> ms = findMoments(tape_mask, cur_color_frame_);
01202
01203
01204 std::vector<cv::Point> pts = getMomentCoordinates(ms);
01205
01206
01207 tape_features_ = vs_->CVPointToVSXYZ(cur_point_cloud_, cur_depth_frame_, pts);
01208 if ((int)tape_features_.size() != default_tape_num)
01209 {
01210 if ((int)v_fk_diff_.size() != default_tape_num)
01211 {
01212 ROS_WARN("Cannot find tape in image and do not have enough historical data to interpolate");
01213 tape_features_p_ = PoseStamped();
01214 return false;
01215 }
01216 estimated = true;
01217 std::vector<pcl::PointXYZ> estimated_pose;
01218 estimated_pose.resize(default_tape_num);
01219 tape_features_.resize(default_tape_num);
01220 for (unsigned int i = 0; i < v_fk_diff_.size(); i++)
01221 {
01222 estimated_pose.at(i).x = v_fk_diff_.at(i).x + fkpp.x;
01223 estimated_pose.at(i).y = v_fk_diff_.at(i).y + fkpp.y;
01224 estimated_pose.at(i).z = v_fk_diff_.at(i).z + fkpp.z;
01225 }
01226 tape_features_ = vs_->Point3DToVSXYZ(estimated_pose, tf_);
01227 }
01228 tape_features_p_ = vs_->VSXYZToPoseStamped(tape_features_.front());
01229
01230
01231 QuaternionStamped q;
01232 q.quaternion.w = 1;
01233 q.header.frame_id = "/l_gripper_tool_frame";
01234 tf_->transformQuaternion(workspace_frame_, q, q);
01235 tape_features_p_.pose.orientation = q.quaternion;
01236
01237
01238
01239 if (!estimated)
01240 {
01241 if (v_fk_diff_.size() == 0)
01242 v_fk_diff_.resize(default_tape_num);
01243 for (unsigned int i = 0 ; i < tape_features_.size() && i < v_fk_diff_.size(); i++)
01244 {
01245 v_fk_diff_.at(i).x = tape_features_.at(i).workspace.x - fkpp.x;
01246 v_fk_diff_.at(i).y = tape_features_.at(i).workspace.y - fkpp.y;
01247 v_fk_diff_.at(i).z = tape_features_.at(i).workspace.z - fkpp.z;
01248 }
01249 }
01250 return estimated;
01251 }
01252
01253 float getError(std::vector<VSXYZ> a, std::vector<VSXYZ> b)
01254 {
01255 float e(0.0);
01256 unsigned int size = a.size() <= b.size() ? a.size() : b.size();
01257
01258 if (size < 3)
01259 return 1;
01260
01261 for (unsigned int i = 0; i < size; i++)
01262 {
01263 pcl::PointXYZ a_c= a.at(i).camera;
01264 pcl::PointXYZ b_c= b.at(i).camera;
01265 e += pow(a_c.x - b_c.x ,2) + pow(a_c.y - b_c.y ,2);
01266 }
01267 return e;
01268 }
01269
01270
01271
01272
01273
01282 std::vector<cv::Point> getMomentCoordinates(std::vector<cv::Moments> ms)
01283 {
01284 std::vector<cv::Point> ret;
01285 ret.clear();
01286 if (ms.size() == 3) {
01287 double centroids[3][2];
01288 for (int i = 0; i < 3; i++) {
01289 cv::Moments m0 = ms.at(i);
01290 double x0, y0;
01291 x0 = m0.m10/m0.m00;
01292 y0 = m0.m01/m0.m00;
01293 centroids[i][0] = x0;
01294 centroids[i][1] = y0;
01295 }
01296
01297
01298 cv::Mat vect = cv::Mat::zeros(3,2, CV_32F);
01299 vect.at<float>(0,0) = centroids[0][0] - centroids[1][0];
01300 vect.at<float>(0,1) = centroids[0][1] - centroids[1][1];
01301 vect.at<float>(1,0) = centroids[0][0] - centroids[2][0];
01302 vect.at<float>(1,1) = centroids[0][1] - centroids[2][1];
01303 vect.at<float>(2,0) = centroids[1][0] - centroids[2][0];
01304 vect.at<float>(2,1) = centroids[1][1] - centroids[2][1];
01305
01306 double angle[3];
01307 angle[0] = abs(vect.row(0).dot(vect.row(1)));
01308 angle[1] = abs(vect.row(0).dot(vect.row(2)));
01309 angle[2] = abs(vect.row(1).dot(vect.row(2)));
01310
01311
01312 double min = angle[0];
01313 int one = 0;
01314 for (int i = 0; i < 3; i++)
01315 {
01316
01317 if (angle[i] < min)
01318 {
01319 min = angle[i];
01320 one = i;
01321 }
01322 }
01323
01324
01325 int a = one == 0 ? 1 : 0;
01326 int b = one == 2 ? 1 : 2;
01327
01328 double vX0, vY0, vX1, vY1, result;
01329 vX0 = centroids[a][0] - centroids[one][0];
01330 vY0 = centroids[a][1] - centroids[one][1];
01331 vX1 = centroids[b][0] - centroids[one][0];
01332 vY1 = centroids[b][1] - centroids[one][1];
01333 cv::Point pto(centroids[one][0], centroids[one][1]);
01334 cv::Point pta(centroids[a][0], centroids[a][1]);
01335 cv::Point ptb(centroids[b][0], centroids[b][1]);
01336
01337
01338 result = vX1*vY0 - vX0*vY1;
01339 ret.push_back(pto);
01340 if (result >= 0) {
01341 ret.push_back(ptb);
01342 ret.push_back(pta);
01343 }
01344 else {
01345 ret.push_back(pta);
01346 ret.push_back(ptb);
01347 }
01348 }
01349 return ret;
01350 }
01351
01359 std::vector<cv::Moments> findMoments(cv::Mat in, cv::Mat &color_frame, unsigned int max_num = 3)
01360 {
01361 cv::Mat temp = in.clone();
01362 std::vector<std::vector<cv::Point> > contours; contours.clear();
01363 cv::findContours(temp, contours, cv::RETR_CCOMP,CV_CHAIN_APPROX_NONE);
01364 std::vector<cv::Moments> moments; moments.clear();
01365
01366 for (unsigned int i = 0; i < contours.size(); i++) {
01367 cv::Moments m = cv::moments(contours[i]);
01368 if (m.m00 > min_contour_size_) {
01369
01370 moments.push_back(m);
01371
01372 if (moments.size() > max_num) {
01373 double small(moments.at(0).m00);
01374 unsigned int smallInd(0);
01375 for (unsigned int j = 1; j < moments.size(); j++){
01376 if (moments.at(j).m00 < small) {
01377 small = moments.at(j).m00;
01378 smallInd = j;
01379 }
01380 }
01381 moments.erase(moments.begin() + smallInd);
01382 }
01383 }
01384 }
01385 return moments;
01386 }
01387
01388 cv::Mat colorSegment(cv::Mat color_frame, int hue, int threshold)
01389 {
01390
01391
01392
01393
01394
01395
01396 return colorSegment(color_frame, hue - threshold, hue + threshold,
01397 default_sat_bot_value_, default_sat_top_value_, 40, default_val_value_);
01398 }
01399
01408 cv::Mat colorSegment(cv::Mat color_frame, int _hue_n, int _hue_p, int _sat_n, int _sat_p, int _value_n, int _value_p)
01409 {
01410 cv::Mat temp (color_frame.clone());
01411 cv::cvtColor(temp, temp, CV_BGR2HSV);
01412 std::vector<cv::Mat> hsv;
01413 cv::split(temp, hsv);
01414
01415
01416 _hue_n = (_hue_n + 360);
01417 _hue_p = (_hue_p + 360);
01418
01419
01420 cv::Mat wm(color_frame.rows, color_frame.cols, CV_8UC1, cv::Scalar(0));
01421 for (int r = 0; r < temp.rows; r++)
01422 {
01423 uchar* workspace_row = wm.ptr<uchar>(r);
01424 for (int c = 0; c < temp.cols; c++)
01425 {
01426 int hue = 2*(int)hsv[0].at<uchar>(r, c) + 360;
01427 float sat = 0.392*(int)hsv[1].at<uchar>(r, c);
01428 float value = 0.392*(int)hsv[2].at<uchar>(r, c);
01429
01430 if (_hue_n < hue && hue < _hue_p)
01431 if (_sat_n < sat && sat < _sat_p)
01432 if (_value_n < value && value < _value_p)
01433 workspace_row[c] = 255;
01434 }
01435 }
01436
01437
01438
01439
01440
01441
01442
01443
01444
01445
01446
01447
01448 return wm;
01449 }
01450
01451
01452
01453
01454
01455 bool sendZeroVelocity()
01456 {
01457
01458 visual_servo::VisualServoTwist v_srv;
01459
01460
01461 v_srv.request.error = 1;
01462 return v_client_.call(v_srv);
01463 }
01464
01465 visual_servo::VisualServoPose formPoseService(float px, float py, float pz)
01466 {
01467 return VisualServoMsg::createPoseMsg(px, py, pz, -0.4582, 0, 0.8889, 0);
01468 }
01469
01470 void printMatrix(cv::Mat_<double> in)
01471 {
01472 for (int i = 0; i < in.rows; i++) {
01473 for (int j = 0; j < in.cols; j++) {
01474 printf("%+.5f\t", in(i,j));
01475 }
01476 printf("\n");
01477 }
01478 }
01479
01480
01484 void spin()
01485 {
01486 while(n_.ok())
01487 {
01488 ros::spinOnce();
01489 }
01490 }
01491
01492 protected:
01493 ros::NodeHandle n_;
01494 ros::NodeHandle n_private_;
01495 message_filters::Subscriber<sensor_msgs::Image> image_sub_;
01496 message_filters::Subscriber<sensor_msgs::Image> depth_sub_;
01497 message_filters::Subscriber<sensor_msgs::PointCloud2> cloud_sub_;
01498 message_filters::Synchronizer<MySyncPolicy> sync_;
01499 image_transport::ImageTransport it_;
01500 sensor_msgs::CameraInfo cam_info_;
01501 shared_ptr<tf::TransformListener> tf_;
01502 cv::Mat cur_color_frame_;
01503 cv::Mat cur_orig_color_frame_;
01504 cv::Mat cur_depth_frame_;
01505 cv::Mat cur_workspace_mask_;
01506 std_msgs::Header cur_camera_header_;
01507 XYZPointCloud cur_point_cloud_;
01508
01509
01510 shared_ptr<VisualServo> vs_;
01511
01512 bool have_depth_data_;
01513 int display_wait_ms_;
01514 int num_downsamples_;
01515 std::string workspace_frame_;
01516 std::string optical_frame_;
01517 bool camera_initialized_;
01518 bool desire_points_initialized_;
01519 std::string cam_info_topic_;
01520 int tracker_count_;
01521
01522
01523 int target_hue_value_;
01524 int target_hue_threshold_;
01525 int gripper_tape_hue_value_;
01526 int gripper_tape_hue_threshold_;
01527 int default_sat_bot_value_;
01528 int default_sat_top_value_;
01529 int default_val_value_;
01530 double min_contour_size_;
01531
01532
01533 int jacobian_type_;
01534 double vs_err_term_threshold_;
01535 double pose_servo_z_offset_;
01536 double place_z_velocity_;
01537 double tape1_offset_x_;
01538 double tape1_offset_y_;
01539 double tape1_offset_z_;
01540
01541
01542 unsigned int PHASE;
01543
01544
01545 cv::Mat desired_jacobian_;
01546 std::vector<VSXYZ> cur_goal_;
01547 std::vector<VSXYZ> goal_;
01548 PoseStamped goal_p_;
01549 std::vector<VSXYZ> tape_features_;
01550 PoseStamped tape_features_p_;
01551 VSXYZ desired_;
01552 cv::Mat K;
01553 std::vector<pcl::PointXYZ> v_fk_diff_;
01554 bool is_gripper_initialized_;
01555 bool gripper_pose_estimated_;
01556
01557
01558 ros::Publisher chatter_pub_;
01559 ros::Time alarm_;
01560
01561
01562 ros::ServiceClient v_client_;
01563 ros::ServiceClient p_client_;
01564 ros::ServiceClient i_client_;
01565
01566
01567 GripperClient* gripper_client_;
01568 GrabClient* grab_client_;
01569 ReleaseClient* release_client_;
01570 EventDetectorClient* detector_client_;
01571
01572
01573 std::vector<cv::Point> temp_draw_;
01574 cv::Rect original_box_;
01575
01576
01577 bool is_detected_;
01578 bool place_detection_;
01579 float object_z_;
01580
01581 float close_gripper_dist_;
01582 GripperTape gripper_tape_;
01583
01584
01585 tabletop_pushing::ProtoObjects po_;
01586
01587 };
01588
01589 int main(int argc, char ** argv)
01590 {
01591 srand(time(NULL));
01592 ros::init(argc, argv, "visual_servo_node");
01593
01594 log4cxx::LoggerPtr my_logger = log4cxx::Logger::getLogger(ROSCONSOLE_DEFAULT_NAME);
01595 my_logger->setLevel(ros::console::g_level_lookup[ros::console::levels::Info]);
01596
01597 ros::NodeHandle n;
01598 VisualServoNode vs_node(n);
01599 vs_node.spin();
01600 }