00001
00035 #include <iostream>
00036
00037 #include <time.h>
00038 #include <Eigen/Eigen>
00039 #include <Eigen/LU>
00040 #include <tf/tf.h>
00041 #include <tf/transform_listener.h>
00042 #include <ros/ros.h>
00043 #include <ros/package.h>
00044 #include <opencv2/core/core.hpp>
00045 #include <image_transport/image_transport.h>
00046 #include <sensor_msgs/PointCloud2.h>
00047
00048 #include <tf/message_filter.h>
00049 #include <message_filters/subscriber.h>
00050
00051 #include <math.h>
00052 #include <cv_bridge/CvBridge.h>
00053 #include <tf/transform_listener.h>
00054
00055 #include "CoordinateFrame.h"
00056
00057 #include <AR/gsub.h>
00058 #include <AR/video.h>
00059 #include <AR/param.h>
00060
00061 #include "ar_kinect/ar_kinect.h"
00062 #include "ar_kinect/object.h"
00063
00064 using namespace std;
00065
00067 inline pcl::PointXYZ getPointPCLFromCV(CvPoint3D32f pcv) {
00068 pcl::PointXYZ p;
00069 p.x = pcv.x;
00070 p.y = pcv.y;
00071 p.z = pcv.z;
00072 return p;
00073 }
00074
00075 int main (int argc, char **argv)
00076 {
00077 ros::init (argc, argv, "ar_kinect");
00078 ros::NodeHandle n;
00079 ARPublisher ar_kinect (n);
00080
00081 ros::spin ();
00082
00083 return 0;
00084 }
00085
00086
00087 #ifdef AR_BOUNDING_BOX
00088
00089 bool compareDistances(const ar_pose::ARMarker& a1, const ar_pose::ARMarker& a2) {
00090 double dist1 = sqrt(a1.pose.pose.position.x * a1.pose.pose.position.x + a1.pose.pose.position.y * a1.pose.pose.position.y + a1.pose.pose.position.z * a1.pose.pose.position.z);
00091 double dist2 = sqrt(a2.pose.pose.position.x * a2.pose.pose.position.x + a2.pose.pose.position.y * a2.pose.pose.position.y + a2.pose.pose.position.z * a2.pose.pose.position.z);
00092 return dist1 < dist2;
00093 }
00094
00095 void calculateMarkerTransform(boost::multi_array<CoordinateFrame*, 3> &transformations, cv::Mat1f &distances, const double size)
00096 {
00097
00098
00099
00100 CvPoint3D32f ps[6] = {cvPoint3D32f(size, -size, 0),
00101 cvPoint3D32f(0, -size, 0),
00102 cvPoint3D32f(-size, 0, 0),
00103 cvPoint3D32f(-size, size, 0),
00104 cvPoint3D32f(0, size, 0),
00105 cvPoint3D32f(size, 0, 0)};
00106
00107
00108 distances = cv::Mat1f(6,6);
00109
00110 for(int i=0; i<6; i++) {
00111 for (int j=0; j<6; j++) {
00112 distances(cv::Point(i,j)) = Utils3D::distPoints(&ps[i],&ps[j]);
00113 for (int k=0; k<6; k++) {
00114 CvPoint3D32f p1, p2, p3;
00115 p1 = ps[i];
00116 p2 = ps[j];
00117 p3 = ps[k];
00118
00119 *(transformations[i][j][k]) = CoordinateFrame(&p1, &p2, &p3);
00120 }
00121 }
00122 }
00123
00124 }
00125
00126 #endif
00127
00128 ARPublisher::ARPublisher (ros::NodeHandle & n):n_ (n), it_ (n_), gotcloud_(false), transformations(boost::extents[6][6][6])
00129 {
00130 std::string path;
00131 std::string package_path = ros::package::getPath (ROS_PACKAGE_NAME);
00132 ros::NodeHandle n_param ("~");
00133 XmlRpc::XmlRpcValue xml_marker_center;
00134 cloud_width_ = 640;
00135
00136
00137
00138 if (!n_param.getParam ("publish_tf", publishTf_))
00139 publishTf_ = true;
00140 ROS_INFO ("\tPublish transforms: %d", publishTf_);
00141
00142 if (!n_param.getParam ("publish_visual_markers", publishVisualMarkers_))
00143 publishVisualMarkers_ = true;
00144 ROS_INFO ("\tPublish visual markers: %d", publishVisualMarkers_);
00145
00146 if (!n_param.getParam ("threshold", threshold_))
00147 threshold_ = 100;
00148 ROS_INFO ("\tThreshold: %d", threshold_);
00149
00150 if (!n_param.getParam ("marker_pattern_list", path)){
00151 sprintf(pattern_filename_, "%s/data/objects_kinect", package_path.c_str());
00152 }else{
00153 sprintf(pattern_filename_, "%s", path.c_str());
00154 }
00155 ROS_INFO ("Marker Pattern Filename: %s", pattern_filename_);
00156
00157 if (!n_param.getParam ("marker_data_directory", path)){
00158 sprintf(data_directory_, "%s", package_path.c_str());
00159 }else{
00160 sprintf(data_directory_, "%s", path.c_str());
00161 }
00162 ROS_INFO ("Marker Data Directory: %s", data_directory_);
00163
00164
00165
00166 ROS_INFO ("Subscribing to info topic");
00167 sub_ = n_.subscribe (cameraInfoTopic_, 1, &ARPublisher::camInfoCallback, this);
00168 cloud_sub_ = n_.subscribe(cloudTopic_, 1, &ARPublisher::cloudCallback, this);
00169
00170 getCamInfo_ = false;
00171
00172
00173
00174 arMarkerPub_ = n_.advertise < ar_pose::ARMarkers > ("ar_pose_markers",0);
00175 if(publishVisualMarkers_)
00176 {
00177 rvizMarkerPub_ = n_.advertise < visualization_msgs::Marker > ("visualization_marker", 0);
00178 }
00179
00180 #ifdef DEBUG_AR_KINECT
00181 kinect_pclPub_ = n_.advertise < sensor_msgs::PointCloud2 > ("debug_kinect_pcl",1);
00182 #endif
00183 }
00184
00185 ARPublisher::~ARPublisher (void)
00186 {
00187 arVideoCapStop ();
00188 arVideoClose ();
00189 }
00190
00191 void ARPublisher::camInfoCallback (const sensor_msgs::CameraInfoConstPtr & cam_info)
00192 {
00193 if (!getCamInfo_)
00194 {
00195 cam_info_ = (*cam_info);
00196
00197 cam_param_.xsize = cam_info_.width;
00198 cam_param_.ysize = cam_info_.height;
00199
00200 cam_param_.mat[0][0] = cam_info_.P[0];
00201 cam_param_.mat[1][0] = cam_info_.P[4];
00202 cam_param_.mat[2][0] = cam_info_.P[8];
00203 cam_param_.mat[0][1] = cam_info_.P[1];
00204 cam_param_.mat[1][1] = cam_info_.P[5];
00205 cam_param_.mat[2][1] = cam_info_.P[9];
00206 cam_param_.mat[0][2] = cam_info_.P[2];
00207 cam_param_.mat[1][2] = cam_info_.P[6];
00208 cam_param_.mat[2][2] = cam_info_.P[10];
00209 cam_param_.mat[0][3] = cam_info_.P[3];
00210 cam_param_.mat[1][3] = cam_info_.P[7];
00211 cam_param_.mat[2][3] = cam_info_.P[11];
00212
00213 cam_param_.dist_factor[0] = cam_info_.K[3];
00214 cam_param_.dist_factor[1] = cam_info_.K[6];
00215 cam_param_.dist_factor[2] = -100.*cam_info_.D[0];
00216 cam_param_.dist_factor[3] = 1.0;
00217
00218 arInit ();
00219
00220 ROS_INFO ("Subscribing to image and cloud topics");
00221
00222 getCamInfo_ = true;
00223 }
00224 }
00225
00226 void ARPublisher::arInit ()
00227 {
00228 arInitCparam (&cam_param_);
00229 ROS_INFO ("*** Camera Parameter ***");
00230 arParamDisp (&cam_param_);
00231
00232
00233 if ((object = ar_object::read_ObjData (pattern_filename_, data_directory_, &objectnum)) == NULL)
00234 ROS_BREAK ();
00235 ROS_DEBUG ("Objectfile num = %d", objectnum);
00236
00237 sz_ = cvSize (cam_param_.xsize, cam_param_.ysize);
00238 capture_ = cvCreateImage (sz_, IPL_DEPTH_8U, 3);
00239
00240 #ifdef ACTIVATE_TABLE_FILTER
00241 ros::param::set("RANSAC_DISTANCE",0.01);
00242 #endif
00243
00244 DBG(cerr << "building.";);
00245 #ifdef AR_BOUNDING_BOX
00246 result_pcl = n_.advertise<sensor_msgs::PointCloud2>("resulting_pcl", 1000);
00247 bb_pcl = n_.advertise<sensor_msgs::PointCloud2>("bb_pcl", 1000);
00248
00249 if (!ros::param::has("boxsize"))
00250 ros::param::set("boxsize", 0.135);
00251 ros::param::param<double>("boxsize", boundingbox_size, 0.135);
00252 if (!ros::param::has("marker_dist_threshold"))
00253 ros::param::set("marker_dist_threshold", 0.002);
00254 ros::param::param<double>("marker_dist_threshold", marker_dist_threshold, .002);
00255
00256
00257 for(int i=0; i<6; i++) {
00258 for (int j=0; j<6; j++){
00259 for (int k=0; k<6; k++){
00260 transformations[i][j][k] = new CoordinateFrame();
00261 }
00262 }
00263 }
00264
00265
00266 #endif
00267 }
00268
00269 void ARPublisher::cloudCallback(const sensor_msgs::PointCloud2ConstPtr& msg)
00270 {
00271 bool published = false;
00272
00273
00274 pcl::fromROSMsg(*msg, cloud_);
00275
00276
00277 gotcloud_ = true;
00278
00279 sensor_msgs::ImagePtr image_(new sensor_msgs::Image);
00280
00281 pcl::toROSMsg (cloud_, *image_);
00282 sensor_msgs::CvBridge bridge;
00283
00284 capture_ = bridge.imgMsgToCv(image_,std::string("bgr8"));
00285
00286 #ifdef AR_BOUNDING_BOX
00287
00288
00289
00290 double spacing;
00291 double size;
00292
00293
00294 if(!ros::param::get("/marker_spacing", spacing))
00295 spacing = 0.11;
00296
00297
00298 calculateMarkerTransform(transformations, distances, spacing);
00299
00300
00301 if(ros::param::get("/marker_size", size))
00302 {
00303 for(int i = 0; i < objectnum; i++)
00304 object[i].marker_width = size*1000;
00305 }
00306
00307 #endif
00308
00309
00310 #if 0
00311
00312 IplImage* gray = cvCreateImage(cvSize(640,480),8,1);
00313 cvCvtColor(capture_, gray, CV_BGR2GRAY);
00314
00315
00316 cvThreshold(gray, gray, 128, 255, CV_THRESH_BINARY);
00317 cvSmooth(gray, gray, CV_GAUSSIAN, 5, 5);
00318
00319 cvCvtColor(gray, capture_, CV_GRAY2BGR);
00320
00321
00322 cvWaitKey(10);
00323 cvReleaseImage(&gray);
00324 #endif
00325
00326 ARUint8 *dataPtr;
00327 ARMarkerInfo *marker_info;
00328 int marker_num;
00329 int i, k, j;
00330
00331 dataPtr = (ARUint8 *) capture_->imageData;
00332
00333
00334 if (arDetectMarker (dataPtr, threshold_, &marker_info, &marker_num) < 0)
00335 {
00336 argCleanup ();
00337 ROS_BREAK ();
00338 }
00339
00340 if (marker_num < 3) {
00341 ROS_WARN("detected %i markers in the image; at least 3 markers are necessary to estimate a coordinate system. please check your camera setup.", marker_num);
00342 return;
00343 }
00344
00345 #ifdef DEBUG_AR_KINECT
00346 pcl::PointCloud<pcl::PointXYZRGB> pcl_dat;
00347 sensor_msgs::PointCloud2 pcl_msg;
00348 #endif
00349
00350 int downsize = capture_->width/cloud_width_;
00351
00352 arPoseMarkers_.markers.clear ();
00353
00354 for (i = 0; i < objectnum; i++) {
00355 k = -1;
00356 for (j = 0; j < marker_num; j++) {
00357 if (object[i].id == marker_info[j].id) {
00358 if (k == -1)
00359 k = j;
00360 else {
00361
00362 if (marker_info[k].cf < marker_info[j].cf)
00363 k = j;
00364 }
00365 }
00366 }
00367 if (k == -1) {
00368 object[i].visible = 0;
00369 continue;
00370 }
00371
00372
00373 double quat[4], pos[3];
00374
00375 if (object[i].visible == 0)
00376 {
00377 arGetTransMat (&marker_info[k], object[i].marker_center, object[i].marker_width, object[i].trans);
00378 }
00379 else
00380 {
00381 arGetTransMatCont (&marker_info[k], object[i].trans,
00382 object[i].marker_center, object[i].marker_width, object[i].trans);
00383 }
00384 object[i].visible = 1;
00385
00386 double arQuat[4], arPos[3];
00387 arUtilMat2QuatPos (object[i].trans, arQuat, arPos);
00388
00389 pos[0] = arPos[0] * AR_TO_ROS;
00390 pos[1] = arPos[1] * AR_TO_ROS;
00391 pos[2] = arPos[2] * AR_TO_ROS;
00392
00393 quat[0] = -arQuat[0];
00394 quat[1] = -arQuat[1];
00395 quat[2] = -arQuat[2];
00396 quat[3] = arQuat[3];
00397
00398 if(gotcloud_){
00399
00400
00401 pcl::PointXYZRGB point = cloud_((int) (marker_info[k].pos[0]/downsize), (int) (marker_info[k].pos[1]/downsize));
00402
00403 if(!(std::isnan(point.x) || std::isnan(point.y) || std::isnan(point.z))) {
00404
00405
00406 int idx1 = (4 - marker_info[k].dir) + 0;
00407 int idx2 = (4 - marker_info[k].dir) + 1;
00408 int idx3 = (4 - marker_info[k].dir) + 2;
00409
00410 idx1 %= 4;
00411 idx2 %= 4;
00412 idx3 %= 4;
00413
00414 pcl::PointXYZRGB p1, p2, p3;
00415 {
00416 int u1 = static_cast<int>(marker_info[k].vertex[idx1][0]);
00417 int v1 = static_cast<int>(marker_info[k].vertex[idx1][1]);
00418 int u2 = static_cast<int>(marker_info[k].vertex[idx2][0]);
00419 int v2 = static_cast<int>(marker_info[k].vertex[idx2][1]);
00420 int u3 = static_cast<int>(marker_info[k].vertex[idx3][0]);
00421 int v3 = static_cast<int>(marker_info[k].vertex[idx3][1]);
00422
00423 if ((u1 < 0) || (u1 >= cloud_.width) || (v1 < 0) || (v1 >= cloud_.height) ||
00424 (u2 < 0) || (u2 >= cloud_.width) || (v2 < 0) || (v2 >= cloud_.height) ||
00425 (u3 < 0) || (u3 >= cloud_.width) || (v3 < 0) || (v3 >= cloud_.height)) {
00426 ROS_WARN("got invalid corner points from ar_bounding_box");
00427 return;
00428 }
00429
00430 p2 = cloud_.at(u1,v1);
00431 p1 = cloud_.at(u2,v2);
00432 p3 = cloud_.at(u3,v3);
00433 }
00434 cv::Point2f p2_(marker_info[k].vertex[idx1][0], marker_info[k].vertex[idx1][1]);
00435 cv::Point2f p1_(marker_info[k].vertex[idx2][0], marker_info[k].vertex[idx2][1]);
00436 cv::Point2f p3_(marker_info[k].vertex[idx3][0], marker_info[k].vertex[idx3][1]);
00437
00438
00439 cv::Vec2f v1 = p2_ - p1_;
00440 cv::Vec2f v2 = p3_ - p1_;
00441
00442 vector<cv::Point3f> l1;
00443
00444 cv::Point2f m1 = cv::Point2f((p1_.x+ p3_.x)*.5,(p1_.y+ p3_.y)*.5);
00445 cv::Point2f m2 = cv::Point2f((p1_.x+ p2_.x)*.5,(p1_.y+ p2_.y)*.5);
00446
00447
00448
00449
00450 for (double k1 = 0; k1 < 1; k1+=.01) {
00451 cv::Point2f tpos = m1 + cv::Point2f(k1* v1[0],k1*v1[1]);
00452 if (tpos.x >= 0 && tpos.x < cloud_.width && tpos.y >= 0 && tpos.y < cloud_.height) {
00453 pcl::PointXYZRGB tpoint = cloud_.at((int)tpos.x,(int)tpos.y);
00454 if (!isnan(tpoint.x))
00455 l1.push_back(cv::Point3f(tpoint.x,tpoint.y,tpoint.z));
00456 }
00457 }
00458
00459 vector<cv::Point3f> l2;
00460
00461
00462 for (double k2 = 0; k2 < 1; k2+=.01) {
00463 cv::Point2f tpos = m2 + cv::Point2f(k2* v2[0],k2*v2[1]);
00464
00465 if (tpos.x >= 0 && tpos.x < cloud_.width && tpos.y >= 0 && tpos.y < cloud_.height) {
00466 pcl::PointXYZRGB tpoint = cloud_.at((int)tpos.x,(int)tpos.y);
00467 if (!isnan(tpoint.x))
00468 l2.push_back(cv::Point3f(tpoint.x,tpoint.y,tpoint.z));
00469 }
00470 }
00471
00472
00473 if (l1.size() >= 6 && l2.size() >= 6) {
00474
00475
00476 cv::Vec6f line2_parameters;
00477 cv::fitLine(cv::Mat(l2),line2_parameters,CV_DIST_L2,0.01,0.01,0.01);
00478
00479 cv::Vec6f line1_parameters;
00480 cv::fitLine(cv::Mat(l1),line1_parameters,CV_DIST_L2,0.01,0.01,0.01);
00481
00482 cv::Point3f v12;
00483 cv::Point3f p12;
00484 v12.x = line1_parameters[0];
00485 v12.y = line1_parameters[1];
00486 v12.z = line1_parameters[2];
00487
00488 p12.x = line1_parameters[3];
00489 p12.y = line1_parameters[4];
00490 p12.z = line1_parameters[5];
00491
00492 cv::Point3f v13;
00493 cv::Point3f p13;
00494 v13.x = line2_parameters[0];
00495 v13.y = line2_parameters[1];
00496 v13.z = line2_parameters[2];
00497
00498 p13.x = line2_parameters[3];
00499 p13.y = line2_parameters[4];
00500 p13.z = line2_parameters[5];
00501
00502
00503
00504
00505
00506 Utils3D::STRAIGHTLINE line1;
00507 line1.loc = p12;
00508 line1.dir = v12;
00509
00510 Utils3D::STRAIGHTLINE line2;
00511 line2.loc = p13;
00512 line2.dir = v13;
00513
00514 cv::Point3f v1__ = l1[0] - l1[l1.size()-1];
00515
00516 if (v1__.ddot(line1.dir) < 0)
00517 {
00518 Utils3D::scalePoint(&line1.dir,-1);
00519 }
00520
00521 cv::Point3f v2__ = l2[0] - l2[l2.size()-1];
00522
00523
00524
00525 if (v2__.ddot(line2.dir) < 0)
00526 {
00527 Utils3D::scalePoint(&line2.dir,-1);
00528 }
00529
00530 float t1,t2;
00531
00532
00533
00534 float distance = Utils3D::getShortestDistanceBetweenTwoLines(line1,line2,&t1,&t2);
00535
00536 if (distance > 1) {
00537 cerr << "the two lines do not meet exactly." << endl;
00538 } else {
00539 CvPoint3D32f p_1_ = Utils3D::getLinePoint(line1,t1);
00540 CvPoint3D32f p_2_ = Utils3D::getLinePoint(line2,t2);
00541
00542
00543
00544 CvPoint3D32f tpt;
00545 CV_SWAP(line1.loc,line2.loc,tpt);
00546
00547
00548
00549 p1.x = (p_1_.x + p_2_.x)*.5;
00550 p1.y = (p_1_.y + p_2_.y)*.5;
00551 p1.z = (p_1_.z + p_2_.z)*.5;
00552
00553 p12 = Utils3D::getLinePoint(line1,1);
00554 p13 = Utils3D::getLinePoint(line2,1);
00555
00556 p2.x = p12.x;
00557 p2.y = p12.y;
00558 p2.z = p12.z;
00559
00560 p3.x = p13.x;
00561 p3.y = p13.y;
00562 p3.z = p13.z;
00563 }
00564 } else {
00565 cerr << "error, too few points: " << l1.size() << "\t"<<l2.size() << endl;
00566 marker_info[i].cf = 0;
00567 }
00568
00569 #ifdef DEBUG_AR_KINECT
00570 pcl_dat.points.push_back(p1);
00571 pcl_dat.points.push_back(p2);
00572 pcl_dat.points.push_back(p3);
00573
00574
00575 for (size_t pidx = 0; pidx < l1.size(); pidx++) {
00576 pcl::PointXYZRGB tp;
00577 tp.x = l1[pidx].x;
00578 tp.y = l1[pidx].y;
00579 tp.z = l1[pidx].z;
00580 pcl_dat.push_back(tp);
00581 }
00582
00583 for (size_t pidx = 0; pidx < l2.size(); pidx++) {
00584 pcl::PointXYZRGB tp;
00585 tp.x = l2[pidx].x;
00586 tp.y = l2[pidx].y;
00587 tp.z = l2[pidx].z;
00588 pcl_dat.push_back(tp);
00589 }
00590 #endif
00591
00592
00593
00594 CoordinateFrame e6p(cv::Point3f(p1.x, p1.y, p1.z), cv::Point3f(p2.x, p2.y, p2.z), cv::Point3f(p3.x, p3.y, p3.z));
00595 btQuaternion q(e6p.getRot()->z,e6p.getRot()->y,e6p.getRot()->x);
00596
00597 pos[0] = p1.x;
00598 pos[1] = p1.y;
00599 pos[2] = p1.z;
00600
00601 quat[0] = (double) q.x();
00602 quat[1] = (double) q.y();
00603 quat[2] = (double) q.z();
00604 quat[3] = (double) q.w();
00605 }
00606 }
00607
00608
00609
00610 ar_pose::ARMarker ar_pose_marker;
00611 ar_pose_marker.header.frame_id = msg->header.frame_id;
00612 ar_pose_marker.header.stamp = msg->header.stamp;
00613 ar_pose_marker.id = object[i].id;
00614
00615 ar_pose_marker.pose.pose.position.x = pos[0];
00616 ar_pose_marker.pose.pose.position.y = pos[1];
00617 ar_pose_marker.pose.pose.position.z = pos[2];
00618
00619 ar_pose_marker.pose.pose.orientation.x = quat[0];
00620 ar_pose_marker.pose.pose.orientation.y = quat[1];
00621 ar_pose_marker.pose.pose.orientation.z = quat[2];
00622 ar_pose_marker.pose.pose.orientation.w = quat[3];
00623
00624 ar_pose_marker.confidence = marker_info[i].cf * 100.0f;
00625 arPoseMarkers_.markers.push_back (ar_pose_marker);
00626
00627
00628
00629 btQuaternion rotation (quat[0], quat[1], quat[2], quat[3]);
00630 btVector3 origin (pos[0], pos[1], pos[2]);
00631 btTransform t (rotation, origin);
00632
00633 if (publishTf_)
00634 {
00635 tf::StampedTransform camToMarker (t, msg->header.stamp, msg->header.frame_id, object[i].name);
00636 broadcaster_.sendTransform(camToMarker);
00637 }
00638
00639
00640
00641 if (publishVisualMarkers_)
00642 {
00643 btVector3 markerOrigin (0, 0, 0.25 * object[i].marker_width * AR_TO_ROS);
00644 btTransform m (btQuaternion::getIdentity (), markerOrigin);
00645 btTransform markerPose = t * m;
00646
00647 tf::poseTFToMsg (markerPose, rvizMarker_.pose);
00648
00649 rvizMarker_.header.frame_id = msg->header.frame_id;
00650 rvizMarker_.header.stamp = msg->header.stamp;
00651 rvizMarker_.id = object[i].id;
00652
00653 rvizMarker_.scale.x = 1.0 * object[i].marker_width * AR_TO_ROS;
00654 rvizMarker_.scale.y = 1.0 * object[i].marker_width * AR_TO_ROS;
00655 rvizMarker_.scale.z = 0.5 * object[i].marker_width * AR_TO_ROS;
00656 rvizMarker_.ns = "basic_shapes";
00657 rvizMarker_.type = visualization_msgs::Marker::CUBE;
00658 rvizMarker_.action = visualization_msgs::Marker::ADD;
00659 switch (i)
00660 {
00661 case 0:
00662 rvizMarker_.color.r = 0.0f;
00663 rvizMarker_.color.g = 0.0f;
00664 rvizMarker_.color.b = 1.0f;
00665 rvizMarker_.color.a = 1.0;
00666 break;
00667 case 1:
00668 rvizMarker_.color.r = 1.0f;
00669 rvizMarker_.color.g = 0.0f;
00670 rvizMarker_.color.b = 0.0f;
00671 rvizMarker_.color.a = 1.0;
00672 break;
00673 default:
00674 rvizMarker_.color.r = 0.0f;
00675 rvizMarker_.color.g = 1.0f;
00676 rvizMarker_.color.b = 0.0f;
00677 rvizMarker_.color.a = 1.0;
00678 }
00679 rvizMarker_.lifetime = ros::Duration ();
00680
00681 rvizMarkerPub_.publish (rvizMarker_);
00682 ROS_DEBUG ("Published visual marker");
00683 }
00684 }
00685 arMarkerPub_.publish (arPoseMarkers_);
00686 ROS_DEBUG ("Published ar_multi markers");
00687
00688 #ifdef AR_BOUNDING_BOX
00689
00690 if (arPoseMarkers_.markers.size() >= 3) {
00691 DBG(ROS_INFO("Found enough markers"););
00692
00693 CoordinateFrame* transformFromModelPointsToObjectBase = NULL;
00694
00695 CoordinateFrame transformFromScenePointsToCamera;
00696
00697 int pointsfound = 0;
00698 CvPoint3D32f ps[3];
00699 int idxs[3];
00700
00701 std::sort(arPoseMarkers_.markers.begin(),arPoseMarkers_.markers.end(),compareDistances);
00702
00703 bool destroyresults = false;
00704
00705 for (unsigned int i = 0; i < arPoseMarkers_.markers.size() && pointsfound < 3; i++) {
00706
00707 idxs[pointsfound] = arPoseMarkers_.markers[i].id;
00708 ps[pointsfound] = cvPoint3D32f(arPoseMarkers_.markers[i].pose.pose.position.x,
00709 arPoseMarkers_.markers[i].pose.pose.position.y,
00710 arPoseMarkers_.markers[i].pose.pose.position.z);
00711
00712 DBG(cerr << "ps[" << pointsfound << "]: " << cv::Point3f(ps[pointsfound]) << " quality: " << (float)arPoseMarkers_.markers[i].confidence << endl;);
00713
00714 pointsfound++;
00715 }
00716
00717 ros::param::get("marker_dist_threshold", marker_dist_threshold);
00718 double d01 = Utils3D::distPoints(&ps[0],&ps[1]);
00719 if ( fabs(d01 - distances(cv::Point(idxs[0],idxs[1])) ) > marker_dist_threshold) {
00720 destroyresults = true;
00721 cerr << "distance 0/1 too large: " << fabs(d01 - distances(cv::Point(idxs[0],idxs[1])) ) <<endl;
00722 }
00723
00724 double d12 = Utils3D::distPoints(&ps[1],&ps[2]);
00725 if ( fabs(d12 - distances(cv::Point(idxs[1],idxs[2])) ) > marker_dist_threshold) {
00726 destroyresults = true;
00727 cerr << "distance 1/2 too large: " << fabs(d12 - distances(cv::Point(idxs[1],idxs[2])) ) <<endl;
00728 }
00729
00730 double d02 = Utils3D::distPoints(&ps[0],&ps[2]);
00731 if ( fabs(d02 - distances(cv::Point(idxs[0],idxs[2])) ) > marker_dist_threshold) {
00732 destroyresults = true;
00733 cerr << "distance 2/0 too large: " << fabs(d02 - distances(cv::Point(idxs[0],idxs[2])) ) <<endl;
00734 }
00735
00736
00737
00738 sensor_msgs::PointCloud2 bb_msg;
00739 pcl::PointCloud<pcl::PointXYZ> base_points_pcl;
00740
00741 base_points_pcl.points.push_back(getPointPCLFromCV(ps[0]));
00742 base_points_pcl.points.push_back(getPointPCLFromCV(ps[1]));
00743 base_points_pcl.points.push_back(getPointPCLFromCV(ps[2]));
00744 pcl::toROSMsg(base_points_pcl, bb_msg);
00745 bb_msg.header.frame_id = ORIGIN;
00746 bb_msg.header.stamp = msg->header.stamp;
00747 bb_pcl.publish(bb_msg);
00748
00749 transformFromModelPointsToObjectBase = transformations[idxs[0]][idxs[1]][idxs[2]];
00750 CvPoint3D32f bp1 = ps[0],bp2 = ps[1],bp3 = ps[2];
00751 transformFromScenePointsToCamera = CoordinateFrame(&bp1,&bp2,&bp3);
00752 if (transformFromModelPointsToObjectBase != NULL) {
00753 CvMat* trafo = transformFromModelPointsToObjectBase->createTransformationMatrix();
00754 cvInvert(trafo,trafo);
00755 CoordinateFrame transformFromObjectBaseToModelPoints(trafo);
00756 CoordinateFrame* e6p = transformFromObjectBaseToModelPoints.cInWorld(&transformFromScenePointsToCamera);
00757
00758 DBG(cerr << e6p->toString() << endl;);
00759
00760 btQuaternion rotation(e6p->getRot()->z,e6p->getRot()->y,e6p->getRot()->x);
00761 btVector3 origin (e6p->getPos()->x, e6p->getPos()->y, e6p->getPos()->z); btTransform t (rotation, origin);
00762
00763 broadcaster_.sendTransform(tf::StampedTransform(t,msg->header.stamp,ORIGIN, "PATTERN_BASE"));
00764
00765 published = !destroyresults;
00766 }
00767 }
00768 #endif
00769
00770 #ifdef DEBUG_AR_KINECT
00771 pcl::toROSMsg(pcl_dat,pcl_msg);
00772 pcl_msg.header.stamp = ros::Time::now();
00773 pcl_msg.header.frame_id = msg->header.frame_id;
00774 kinect_pclPub_.publish(pcl_msg);
00775 #endif
00776
00777 #ifdef AR_BOUNDING_BOX
00778 if(published)
00779 extractObject(msg);
00780 #endif
00781 }
00782