IndividualMarkers.cpp
Go to the documentation of this file.
00001 /*
00002   Software License Agreement (BSD License)
00003 
00004   Copyright (c) 2012, Scott Niekum
00005   All rights reserved.
00006 
00007   Redistribution and use in source and binary forms, with or without
00008   modification, are permitted provided that the following conditions
00009   are met:
00010 
00011   * Redistributions of source code must retain the above copyright
00012   notice, this list of conditions and the following disclaimer.
00013   * Redistributions in binary form must reproduce the above
00014   copyright notice, this list of conditions and the following
00015   disclaimer in the documentation and/or other materials provided
00016   with the distribution.
00017   * Neither the name of the Willow Garage nor the names of its
00018   contributors may be used to endorse or promote products derived
00019   from this software without specific prior written permission.
00020 
00021   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
00022   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
00023   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
00024   FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
00025   COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
00026   INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
00027   BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00028   LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
00029   CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00030   LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
00031   ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00032   POSSIBILITY OF SUCH DAMAGE.
00033 
00034   author: Scott Niekum
00035 */
00036 
00037 
00038 #include "CvTestbed.h"
00039 #include "MarkerDetector.h"
00040 #include "Shared.h"
00041 #include <cv_bridge/CvBridge.h>
00042 #include <ar_track_alvar/AlvarMarker.h>
00043 #include <ar_track_alvar/AlvarMarkers.h>
00044 #include <tf/transform_listener.h>
00045 
00046 namespace gm=geometry_msgs;
00047 namespace ata=ar_track_alvar;
00048 
00049 typedef pcl::PointXYZRGB ARPoint;
00050 typedef pcl::PointCloud<ARPoint> ARCloud;
00051 
00052 using namespace alvar;
00053 using namespace std;
00054 using boost::make_shared;
00055 
00056 bool init=true;
00057 Camera *cam;
00058 IplImage *capture_;
00059 sensor_msgs::CvBridge bridge_;
00060 image_transport::Subscriber cam_sub_;
00061 ros::Subscriber cloud_sub_;
00062 ros::Publisher arMarkerPub_;
00063 ros::Publisher rvizMarkerPub_;
00064 ros::Publisher rvizMarkerPub2_;
00065 ar_track_alvar::AlvarMarkers arPoseMarkers_;
00066 visualization_msgs::Marker rvizMarker_;
00067 tf::TransformListener *tf_listener;
00068 tf::TransformBroadcaster *tf_broadcaster;
00069 MarkerDetector<MarkerData> marker_detector;
00070 
00071 double marker_size;
00072 double max_new_marker_error;
00073 double max_track_error;
00074 std::string cam_image_topic; 
00075 std::string cam_info_topic; 
00076 std::string output_frame;
00077 
00078 
00079 //Debugging utility function
00080 void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double rad)
00081 {
00082   visualization_msgs::Marker rvizMarker;
00083 
00084   rvizMarker.header.frame_id = frame;
00085   rvizMarker.header.stamp = ros::Time::now(); 
00086   rvizMarker.id = id;
00087   rvizMarker.ns = "3dpts";
00088   
00089   rvizMarker.scale.x = rad;
00090   rvizMarker.scale.y = rad;
00091   rvizMarker.scale.z = rad;
00092   
00093   rvizMarker.type = visualization_msgs::Marker::SPHERE_LIST;
00094   rvizMarker.action = visualization_msgs::Marker::ADD;
00095   
00096   if(color==1){
00097     rvizMarker.color.r = 0.0f;
00098     rvizMarker.color.g = 1.0f;
00099     rvizMarker.color.b = 1.0f;
00100     rvizMarker.color.a = 1.0;
00101   }
00102   if(color==2){
00103     rvizMarker.color.r = 1.0f;
00104     rvizMarker.color.g = 0.0f;
00105     rvizMarker.color.b = 1.0f;
00106     rvizMarker.color.a = 1.0;
00107   }
00108   if(color==3){
00109     rvizMarker.color.r = 1.0f;
00110     rvizMarker.color.g = 1.0f;
00111     rvizMarker.color.b = 0.0f;
00112     rvizMarker.color.a = 1.0;
00113   }
00114   
00115   gm::Point p;
00116   for(int i=0; i<cloud->points.size(); i++){
00117     p.x = cloud->points[i].x;
00118     p.y = cloud->points[i].y;
00119     p.z = cloud->points[i].z;
00120     rvizMarker.points.push_back(p);
00121   }
00122   
00123   rvizMarker.lifetime = ros::Duration (1.0);
00124   rvizMarkerPub2_.publish (rvizMarker);
00125 }
00126 
00127 
00128 void drawArrow(gm::Point start, btMatrix3x3 mat, string frame, int color, int id)
00129 {
00130   visualization_msgs::Marker rvizMarker;
00131   
00132   rvizMarker.header.frame_id = frame;
00133   rvizMarker.header.stamp = ros::Time::now(); 
00134   rvizMarker.id = id;
00135   rvizMarker.ns = "arrow";
00136   
00137   rvizMarker.scale.x = 0.01;
00138   rvizMarker.scale.y = 0.01;
00139   rvizMarker.scale.z = 0.1;
00140   
00141   rvizMarker.type = visualization_msgs::Marker::ARROW;
00142   rvizMarker.action = visualization_msgs::Marker::ADD;
00143   
00144   for(int i=0; i<3; i++){
00145     rvizMarker.points.clear();  
00146     rvizMarker.points.push_back(start);
00147     gm::Point end;
00148     end.x = start.x + mat[0][i];
00149     end.y = start.y + mat[1][i];
00150     end.z = start.z + mat[2][i];
00151     rvizMarker.points.push_back(end);
00152     rvizMarker.id += 10*i;
00153     rvizMarker.lifetime = ros::Duration (1.0);
00154 
00155     if(color==1){
00156       rvizMarker.color.r = 1.0f;
00157       rvizMarker.color.g = 0.0f;
00158       rvizMarker.color.b = 0.0f;
00159       rvizMarker.color.a = 1.0;
00160     }
00161     if(color==2){
00162       rvizMarker.color.r = 0.0f;
00163       rvizMarker.color.g = 1.0f;
00164       rvizMarker.color.b = 0.0f;
00165       rvizMarker.color.a = 1.0;
00166     }
00167     if(color==3){
00168       rvizMarker.color.r = 0.0f;
00169       rvizMarker.color.g = 0.0f;
00170       rvizMarker.color.b = 1.0f;
00171       rvizMarker.color.a = 1.0;
00172     }
00173     color += 1;
00174 
00175     rvizMarkerPub2_.publish (rvizMarker);
00176   }
00177 }
00178 
00179 
00180 int PlaneFitPoseImprovement(int id, const ARCloud &corners_3D, ARCloud::Ptr selected_points, const ARCloud &cloud, Pose &p){
00181 
00182   ata::PlaneFitResult res = ata::fitPlane(selected_points);
00183   gm::PoseStamped pose;
00184   pose.header.stamp = cloud.header.stamp;
00185   pose.header.frame_id = cloud.header.frame_id;
00186   pose.pose.position = ata::centroid(*res.inliers);
00187 
00188   draw3dPoints(selected_points, cloud.header.frame_id, 1, id, 0.005);
00189           
00190   //Get 2 points that point forward in marker x direction   
00191   int i1,i2;
00192   if(isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || isnan(corners_3D[0].z) || 
00193      isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || isnan(corners_3D[3].z))
00194     {
00195       if(isnan(corners_3D[1].x) || isnan(corners_3D[1].y) || isnan(corners_3D[1].z) || 
00196          isnan(corners_3D[2].x) || isnan(corners_3D[2].y) || isnan(corners_3D[2].z))
00197         {
00198           return -1;
00199         }
00200       else{
00201         i1 = 1;
00202         i2 = 2;
00203       } 
00204     }
00205   else{
00206     i1 = 0;
00207     i2 = 3;
00208   }
00209 
00210   //Get 2 points the point forward in marker y direction   
00211   int i3,i4;
00212   if(isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || isnan(corners_3D[0].z) || 
00213      isnan(corners_3D[1].x) || isnan(corners_3D[1].y) || isnan(corners_3D[1].z))
00214     {
00215       if(isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || isnan(corners_3D[3].z) || 
00216          isnan(corners_3D[2].x) || isnan(corners_3D[2].y) || isnan(corners_3D[2].z))
00217         {
00218           return -1;
00219         }
00220       else{
00221         i3 = 2;
00222         i4 = 3;
00223       } 
00224     }
00225   else{
00226     i3 = 1;
00227     i4 = 0;
00228   }
00229    
00230   ARCloud::Ptr orient_points(new ARCloud());
00231   orient_points->points.push_back(corners_3D[i1]);
00232   draw3dPoints(orient_points, cloud.header.frame_id, 3, id+1000, 0.008);
00233       
00234   orient_points->clear();
00235   orient_points->points.push_back(corners_3D[i2]);
00236   draw3dPoints(orient_points, cloud.header.frame_id, 2, id+2000, 0.008);
00237  
00238   int succ;
00239   succ = ata::extractOrientation(res.coeffs, corners_3D[i1], corners_3D[i2], corners_3D[i3], corners_3D[i4], pose.pose.orientation);
00240   if(succ < 0) return -1;
00241 
00242   btMatrix3x3 mat; 
00243   succ = ata::extractFrame(res.coeffs, corners_3D[i1], corners_3D[i2], corners_3D[i3], corners_3D[i4], mat);
00244   if(succ < 0) return -1;
00245 
00246   drawArrow(pose.pose.position, mat, cloud.header.frame_id, 1, id);
00247 
00248   p.translation[0] = pose.pose.position.x * 100.0;
00249   p.translation[1] = pose.pose.position.y * 100.0;
00250   p.translation[2] = pose.pose.position.z * 100.0;
00251   p.quaternion[1] = pose.pose.orientation.x;
00252   p.quaternion[2] = pose.pose.orientation.y;
00253   p.quaternion[3] = pose.pose.orientation.z;
00254   p.quaternion[0] = pose.pose.orientation.w; 
00255 
00256   return 0;
00257 }
00258 
00259 
00260 void GetMarkerPoses(IplImage *image, ARCloud &cloud) {
00261 
00262   //Detect and track the markers
00263   if (marker_detector.Detect(image, cam, true, false, max_new_marker_error,
00264                              max_track_error, CVSEQ, true)) 
00265     {
00266       printf("\n--------------------------\n\n");
00267       for (size_t i=0; i<marker_detector.markers->size(); i++)
00268         {
00269           vector<cv::Point> pixels;
00270           Marker *m = &((*marker_detector.markers)[i]);
00271           int id = m->GetId();
00272           cout << "******* ID: " << id << endl;
00273 
00274           int resol = m->GetRes();
00275           int ori = m->ros_orientation;
00276       
00277           PointDouble pt1, pt2, pt3, pt4;
00278           pt4 = m->ros_marker_points_img[0];
00279           pt3 = m->ros_marker_points_img[resol-1];
00280           pt1 = m->ros_marker_points_img[(resol*resol)-resol];
00281           pt2 = m->ros_marker_points_img[(resol*resol)-1];
00282           
00283           m->ros_corners_3D[0] = cloud(pt1.x, pt1.y);
00284           m->ros_corners_3D[1] = cloud(pt2.x, pt2.y);
00285           m->ros_corners_3D[2] = cloud(pt3.x, pt3.y);
00286           m->ros_corners_3D[3] = cloud(pt4.x, pt4.y);
00287           
00288           if(ori >= 0 && ori < 4){
00289             if(ori != 0){
00290               std::rotate(m->ros_corners_3D.begin(), m->ros_corners_3D.begin() + ori, m->ros_corners_3D.end());
00291             }
00292           }
00293           else
00294             ROS_ERROR("FindMarkerBundles: Bad Orientation: %i for ID: %i", ori, id);
00295 
00296           //Get the 3D marker points
00297           BOOST_FOREACH (const PointDouble& p, m->ros_marker_points_img)
00298             pixels.push_back(cv::Point(p.x, p.y));        
00299           ARCloud::Ptr selected_points = ata::filterCloud(cloud, pixels);
00300 
00301           //Use the kinect data to find a plane and pose for the marker
00302           int ret = PlaneFitPoseImprovement(i, m->ros_corners_3D, selected_points, cloud, m->pose);     
00303         }
00304     }
00305 }
00306       
00307 
00308 
00309 void getPointCloudCallback (const sensor_msgs::PointCloud2ConstPtr &msg)
00310 {
00311   sensor_msgs::ImagePtr image_msg(new sensor_msgs::Image);
00312 
00313   if(init){
00314     CvSize sz_ = cvSize (cam->x_res, cam->y_res);
00315     capture_ = cvCreateImage (sz_, IPL_DEPTH_8U, 4);
00316     init = false;       
00317   }
00318 
00319   //If we've already gotten the cam info, then go ahead
00320   if(cam->getCamInfo_){
00321     //Convert cloud to PCL 
00322     ARCloud cloud;
00323     pcl::fromROSMsg(*msg, cloud);
00324 
00325     //Get an OpenCV image from the cloud
00326     pcl::toROSMsg (cloud, *image_msg);
00327     image_msg->header.stamp = msg->header.stamp;
00328     image_msg->header.frame_id = msg->header.frame_id;
00329             
00330     //Convert the image
00331     capture_ = bridge_.imgMsgToCv (image_msg, "rgb8");
00332 
00333     //Use the kinect to improve the pose
00334     Pose ret_pose;
00335     GetMarkerPoses(capture_, cloud);
00336 
00337     try{
00338       tf::StampedTransform CamToOutput;
00339       try{
00340         tf_listener->waitForTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, ros::Duration(1.0));
00341         tf_listener->lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, CamToOutput);
00342       }
00343       catch (tf::TransformException ex){
00344         ROS_ERROR("%s",ex.what());
00345       }
00346 
00347       arPoseMarkers_.markers.clear ();
00348       for (size_t i=0; i<marker_detector.markers->size(); i++) 
00349         {
00350           //Get the pose relative to the camera
00351           int id = (*(marker_detector.markers))[i].GetId(); 
00352           Pose p = (*(marker_detector.markers))[i].pose;
00353                 
00354           double px = p.translation[0]/100.0;
00355           double py = p.translation[1]/100.0;
00356           double pz = p.translation[2]/100.0;
00357           double qx = p.quaternion[1];
00358           double qy = p.quaternion[2];
00359           double qz = p.quaternion[3];
00360           double qw = p.quaternion[0];
00361 
00362           btQuaternion rotation (qx,qy,qz,qw);
00363           btVector3 origin (px,py,pz);
00364           btTransform t (rotation, origin);
00365           btVector3 markerOrigin (0, 0, 0);
00366           btTransform m (btQuaternion::getIdentity (), markerOrigin);
00367           btTransform markerPose = t * m; // marker pose in the camera frame
00368 
00369           //Publish the transform from the camera to the marker         
00370           std::string markerFrame = "ar_marker_";
00371           std::stringstream out;
00372           out << id;
00373           std::string id_string = out.str();
00374           markerFrame += id_string;
00375           tf::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str());
00376           tf_broadcaster->sendTransform(camToMarker);
00377                                 
00378           //Create the rviz visualization messages
00379           tf::poseTFToMsg (markerPose, rvizMarker_.pose);
00380           rvizMarker_.header.frame_id = image_msg->header.frame_id;
00381           rvizMarker_.header.stamp = image_msg->header.stamp;
00382           rvizMarker_.id = id;
00383 
00384           rvizMarker_.scale.x = 1.0 * marker_size/100.0;
00385           rvizMarker_.scale.y = 1.0 * marker_size/100.0;
00386           rvizMarker_.scale.z = 0.2 * marker_size/100.0;
00387           rvizMarker_.ns = "basic_shapes";
00388           rvizMarker_.type = visualization_msgs::Marker::CUBE;
00389           rvizMarker_.action = visualization_msgs::Marker::ADD;
00390           switch (id)
00391             {
00392             case 0:
00393               rvizMarker_.color.r = 0.0f;
00394               rvizMarker_.color.g = 0.0f;
00395               rvizMarker_.color.b = 1.0f;
00396               rvizMarker_.color.a = 1.0;
00397               break;
00398             case 1:
00399               rvizMarker_.color.r = 1.0f;
00400               rvizMarker_.color.g = 0.0f;
00401               rvizMarker_.color.b = 0.0f;
00402               rvizMarker_.color.a = 1.0;
00403               break;
00404             case 2:
00405               rvizMarker_.color.r = 0.0f;
00406               rvizMarker_.color.g = 1.0f;
00407               rvizMarker_.color.b = 0.0f;
00408               rvizMarker_.color.a = 1.0;
00409               break;
00410             case 3:
00411               rvizMarker_.color.r = 0.0f;
00412               rvizMarker_.color.g = 0.5f;
00413               rvizMarker_.color.b = 0.5f;
00414               rvizMarker_.color.a = 1.0;
00415               break;
00416             case 4:
00417               rvizMarker_.color.r = 0.5f;
00418               rvizMarker_.color.g = 0.5f;
00419               rvizMarker_.color.b = 0.0;
00420               rvizMarker_.color.a = 1.0;
00421               break;
00422             default:
00423               rvizMarker_.color.r = 0.5f;
00424               rvizMarker_.color.g = 0.0f;
00425               rvizMarker_.color.b = 0.5f;
00426               rvizMarker_.color.a = 1.0;
00427               break;
00428             }
00429           rvizMarker_.lifetime = ros::Duration (1.0);
00430           rvizMarkerPub_.publish (rvizMarker_);
00431 
00432           //Get the pose of the tag in the camera frame, then the output frame (usually torso)                          
00433           tf::Transform tagPoseOutput = CamToOutput * markerPose;
00434 
00435           //Create the pose marker messages
00436           ar_track_alvar::AlvarMarker ar_pose_marker;
00437           tf::poseTFToMsg (tagPoseOutput, ar_pose_marker.pose.pose);
00438           ar_pose_marker.header.frame_id = output_frame;
00439           ar_pose_marker.header.stamp = image_msg->header.stamp;
00440           ar_pose_marker.id = id;
00441           arPoseMarkers_.markers.push_back (ar_pose_marker);    
00442         }
00443       arMarkerPub_.publish (arPoseMarkers_);
00444     }
00445     catch (sensor_msgs::CvBridgeException & e){
00446       ROS_ERROR ("Could not convert from '%s' to 'rgb8'.", image_msg->encoding.c_str ());
00447     }
00448   }
00449 }
00450 
00451 
00452 int main(int argc, char *argv[])
00453 {
00454   ros::init (argc, argv, "marker_detect");
00455   ros::NodeHandle n;
00456         
00457   if(argc < 7){
00458     std::cout << std::endl;
00459     cout << "Not enough arguments provided." << endl;
00460     cout << "Usage: ./individualMarkers <marker size in cm> <max new marker error> <max track error> <cam image topic> <cam info topic> <output frame>" << endl;
00461     std::cout << std::endl;
00462     return 0;
00463   }
00464 
00465   // Get params from command line
00466   marker_size = atof(argv[1]);
00467   max_new_marker_error = atof(argv[2]);
00468   max_track_error = atof(argv[3]);
00469   cam_image_topic = argv[4];
00470   cam_info_topic = argv[5];
00471   output_frame = argv[6];
00472   marker_detector.SetMarkerSize(marker_size);
00473 
00474   cam = new Camera(n, cam_info_topic);
00475   tf_listener = new tf::TransformListener(n);
00476   tf_broadcaster = new tf::TransformBroadcaster();
00477   arMarkerPub_ = n.advertise < ar_track_alvar::AlvarMarkers > ("ar_pose_marker", 0);
00478   rvizMarkerPub_ = n.advertise < visualization_msgs::Marker > ("visualization_marker", 0);
00479   rvizMarkerPub2_ = n.advertise < visualization_msgs::Marker > ("ARmarker_points", 0);
00480         
00481   //Give tf a chance to catch up before the camera callback starts asking for transforms
00482   ros::Duration(1.0).sleep();
00483   ros::spinOnce();      
00484          
00485   ROS_INFO ("Subscribing to image topic");
00486   cloud_sub_ = n.subscribe(cam_image_topic, 1, &getPointCloudCallback);
00487 
00488   ros::spin ();
00489 
00490   return 0;
00491 }


ar_track_alvar
Author(s): Scott Niekum
autogenerated on Sun Oct 5 2014 22:16:26