tracking.cpp
Go to the documentation of this file.
00001 /*
00002  *  LICENSE : BSD - https://raw.github.com/yujinrobot/yujin_ocs/license/LICENSE
00003  */
00004 
00005 #include "yocs_ar_pair_tracking/tracking.hpp"
00006 #include <std_msgs/String.h>
00007 
00008 namespace yocs
00009 {
00010 
00011 ARPairTracking::ARPairTracking() {
00012   std::cout << "ARPairTracking constructor" << std::endl;
00013   init(); 
00014 }
00015 
00016 ARPairTracking::~ARPairTracking() {}
00017 
00018 bool ARPairTracking::init()
00019 {
00020   ros::NodeHandle pnh("~");
00021   pnh.param("publish_transforms", publish_transforms, ARPairTrackingDefaultParams::PUBLISH_TRANSFORMS);
00022   pnh.param("global_frame", global_frame_, ARPairTrackingDefaultParams::GLOBAL_FRAME); 
00023   pnh.param("marker_frame", marker_frame_, ARPairTrackingDefaultParams::MARKER_FRAME);
00024   pnh.param("base_frame", base_frame_, ARPairTrackingDefaultParams::BASE_FRAME);
00025 
00026   /*********************
00027   ** Publishers
00028   **********************/
00029   pub_initial_pose_ = pnh.advertise<geometry_msgs::PoseWithCovarianceStamped>(ARPairTrackingDefaultParams::PUB_INITIAL_POSE, 1);
00030   pub_relative_target_pose_ = pnh.advertise<geometry_msgs::PoseStamped>(ARPairTrackingDefaultParams::PUB_RELATIVE_TARGET_POSE, 1);
00031   pub_spotted_markers_ = pnh.advertise<std_msgs::String>(ARPairTrackingDefaultParams::PUB_SPOTTED_MARKERS, 1);
00032   sub_update_ar_pairs_ = pnh.subscribe<yocs_msgs::ARPairList>(ARPairTrackingDefaultParams::SUB_UPDATE_AR_PAIRS, 1,  &ARPairTracking::updateARPairsCB, this);
00033   
00034   ar_pairs_.clear();
00035   return true;
00036 }
00037 
00038 void ARPairTracking::addPair(const yocs_msgs::ARPair& pair)
00039 {
00040   ar_pairs_.push_back(pair);
00041 }
00042 
00043 void ARPairTracking::updateARPairsCB(const yocs_msgs::ARPairList::ConstPtr& msg)
00044 {
00045   unsigned int i;
00046   ROS_INFO("AR Pair Tracking : New Pairs Received");
00047 
00048   ar_pairs_.clear();
00049 
00050   for(i =0; i < msg->pairs.size(); i++)
00051   {
00052     ar_pairs_.push_back(msg->pairs[i]);
00053   }
00054 }
00055 
00056 void ARPairTracking::customCB(const ar_track_alvar_msgs::AlvarMarkers& spotted_markers, const std::vector<TrackedMarker> &tracked_markers)
00057 {
00058   ar_track_alvar_msgs::AlvarMarker left;
00059   ar_track_alvar_msgs::AlvarMarker right;
00060   bool both_spotted;
00061   unsigned int i;
00062 
00063   // More than one global Marker Pairs should not be detected in one scene
00064   for(i = 0; i < ar_pairs_.size(); i++)
00065   {
00066     both_spotted = spotMarkerPair(spotted_markers, ar_pairs_[i], left, right);
00067 
00068     // TODO: use confidence to incorporate covariance to global poses
00069     if(both_spotted) {
00070       computeRelativeRobotPose(ar_pairs_[i], tracked_markers, left, right);
00071     }
00072   }
00073 }
00074 
00075 bool ARPairTracking::spotMarkerPair(const ar_track_alvar_msgs::AlvarMarkers& spotted_markers, const yocs_msgs::ARPair& pair, ar_track_alvar_msgs::AlvarMarker& left, ar_track_alvar_msgs::AlvarMarker& right)
00076 {
00077   bool left_spotted = included(pair.left_id, spotted_markers, &left);
00078   bool right_spotted = included(pair.right_id, spotted_markers, &right);
00079 
00080   // should use a custom message instead of strings (e.g. bool left_spotted, bool right_spotted)
00081   std_msgs::String spotted;
00082   if(left_spotted && !right_spotted) {
00083     spotted.data = "left";
00084   } else if(!left_spotted && right_spotted) {
00085       spotted.data = "right";
00086   } else if(left_spotted && right_spotted) {
00087       spotted.data = "both";
00088   } else {
00089     spotted.data = "none";
00090   }
00091   pub_spotted_markers_.publish(spotted);
00092 
00093   return left_spotted && right_spotted;
00094 }
00095 
00096 void ARPairTracking::computeRelativeRobotPose(const yocs_msgs::ARPair& spotted_pair, const std::vector<TrackedMarker>& tracked_markers, const ar_track_alvar_msgs::AlvarMarker& left, const ar_track_alvar_msgs::AlvarMarker& right)
00097 {
00098   double baseline = spotted_pair.baseline;
00099   double target_pose_offset = spotted_pair.target_offset;
00100   std::string target_pose_frame = spotted_pair.target_frame;
00101   double left_x = left.pose.pose.position.x;
00102   double left_z = left.pose.pose.position.z;
00103   double right_x = right.pose.pose.position.x;
00104   double right_z = right.pose.pose.position.z;
00105 
00106   double left_d = std::sqrt(left_x*left_x + left_z*left_z);
00107   double right_d = std::sqrt(right_x*right_x + right_z*right_z);
00108   double b = baseline/2 + (left_d*left_d-right_d*right_d)/(2*baseline);
00109   double a = std::sqrt(left_d*left_d - b*b);
00110   ROS_DEBUG_STREAM("AR Pairing Tracker : computing robot-marker relative pose");
00111   ROS_DEBUG_STREAM("AR Pairing Tracker :   left : [" << left_x << "," << left_z << "," << left_d << "]");
00112   ROS_DEBUG_STREAM("AR Pairing Tracker :   right: [" << right_x << "," << right_z << "," << right_d <<  "]");
00113   ROS_DEBUG_STREAM("AR Pairing Tracker :   1: " << baseline/2);
00114   ROS_DEBUG_STREAM("AR Pairing Tracker :   2: " << (left_d*left_d-right_d*right_d)/(2*baseline));
00115   ROS_DEBUG_STREAM("AR Pairing Tracker :   a=" << a << " b=" << b);
00116 
00117   // angle between the robot and the first marker
00118   double alpha = atan2(left_x, left_z);
00119   double alpha_degrees = alpha * (180.0) / M_PI;
00120 
00121   // alpah + beta is angle between the robot and the second marker
00122   double beta = atan2(right_x, right_z);
00123   double beta_degrees = beta * (180.0) / M_PI;
00124 
00125   // theta is the angle between the wall and the perpendicular in front of the robot
00126   double theta = atan2((left_z - right_z), (right_x - left_x));
00127   double theta_degrees = theta * (180.0) / M_PI;
00128 
00129   double target_x = left_x + (right_x - left_x) / 2 - target_pose_offset * sin(theta);
00130   double target_z = left_z + (right_z - left_z) / 2 - target_pose_offset * cos(theta);
00131   double target_heading = atan2(target_x, target_z);
00132   double target_heading_degrees = target_heading * 180.0 / M_PI;
00133 
00134   ROS_DEBUG_STREAM("AR Pairing Tracker :      alpha=" << alpha_degrees << "degrees");
00135   ROS_DEBUG_STREAM("AR Pairing Tracker :       beta=" << beta_degrees << "degrees");
00136   ROS_DEBUG_STREAM("AR Pairing Tracker :      theta=" << theta_degrees << "degrees");
00137   ROS_DEBUG_STREAM("AR Pairing Tracker : t_[x,z,h]=[" << target_x << "," << target_z << "," << target_heading_degrees << "deg]");
00138 
00139   // target_pose -> camera
00140   geometry_msgs::PoseStamped pose;
00141   pose.header.frame_id = marker_frame_;
00142   pose.pose.position.x = target_x;
00143   pose.pose.position.y = 0; 
00144   pose.pose.position.z = target_z;
00145 
00146   tf::Quaternion quat; 
00147   quat.setEuler(theta,0,0);
00148 
00149   pose.pose.orientation.x = quat.getX(); 
00150   pose.pose.orientation.y = quat.getY(); 
00151   pose.pose.orientation.z = quat.getZ(); 
00152   pose.pose.orientation.w = quat.getW(); 
00153 
00154   try
00155   {
00156     // get and set ar_link -> target_pose
00157     tf::StampedTransform tf_ar_target_pose;
00158     tf_listener_.lookupTransform(target_pose_frame, global_frame_, ros::Time(0), tf_ar_target_pose);
00159     tf_internal_.setTransform(tf_ar_target_pose);
00160 
00161     // set target_pose -> camera
00162     tf::StampedTransform tf_ar_camera;
00163     tf_ar_camera.child_frame_id_ = target_pose_frame;
00164     mtk::pose2tf(pose, tf_ar_camera);  // pose frame_id is set above (camera_rgb_optical_frame)
00165     tf_ar_camera.stamp_ = ros::Time::now();
00166     tf_internal_.setTransform(tf_ar_camera);
00167 
00168     // get and set camera -> base_footprint
00169     tf::StampedTransform tf_camera_base_footprint;
00170     tf_listener_.lookupTransform(base_frame_, marker_frame_, ros::Time(0), tf_camera_base_footprint);
00171     tf_internal_.setTransform(tf_camera_base_footprint);
00172 
00173     // get and publish ar_link -> base_footprint
00174     tf::StampedTransform tf_ar_base_footprint;
00175     tf_internal_.lookupTransform(global_frame_, base_frame_, ros::Time(0), tf_ar_base_footprint);
00176     boost::shared_ptr<geometry_msgs::PoseWithCovarianceStamped> pwcs(new geometry_msgs::PoseWithCovarianceStamped);
00177 
00178     pwcs->header.stamp = tf_ar_base_footprint.stamp_;
00179     pwcs->header.frame_id = global_frame_;
00180 
00181     geometry_msgs::PoseStamped ps;
00182     mtk::tf2pose(tf_ar_base_footprint, ps);
00183     pwcs->pose.pose = ps.pose;
00184     pwcs->pose.covariance[6*0+0] = 0.2 * 0.2;
00185     pwcs->pose.covariance[6*1+1] = 0.2 * 0.2;
00186     pwcs->pose.covariance[6*5+5] = M_PI/12.0 * M_PI/12.0;
00187 
00188     // publish robot pose to nav watch dog
00189     pub_initial_pose_.publish(pwcs);
00190     pub_relative_target_pose_.publish(pose);
00191 
00192     // only for use in standalone mode with a 3d sensor (no robot).
00193     if(publish_transforms) {
00194       tf_brcaster_.sendTransform(tf_ar_base_footprint);
00195     }
00196   }
00197   catch (tf::TransformException const &ex)
00198   {
00199     ROS_WARN_STREAM("TF error: " << ex.what());
00200   }
00201 }
00202 
00203 } // yocs namespace


yocs_ar_pair_tracking
Author(s): Daniel Stonier, Jihoon Lee, Jorge Santos Simon
autogenerated on Thu Jun 6 2019 21:47:30