00001 #include "ros/ros.h"
00002 #include "cv.h"
00003 #include "highgui.h"
00004 #include "tracking.h"
00005 #include <visp/vpImageConvert.h>
00006 #include <visp/vpPixelMeterConversion.h>
00007 #include <visp/vpImagePoint.h>
00008 #include <visp/vpDisplayX.h>
00009 #include <visp/vpPose.h>
00010 #include <visp/vpMeterPixelConversion.h>
00011 #include <visp/vpTrackingException.h>
00012 #include <visp/vpImageIo.h>
00013 #include <visp/vpRect.h>
00014 #include <visp/vpMbKltTracker.h>
00015 #include <visp/vpMbEdgeTracker.h>
00016
00017 #include "logfilewriter.hpp"
00018
00019 namespace tracking{
00020
00021 Tracker_:: Tracker_(CmdLine& cmd, detectors::DetectorBase* detector,vpMbTracker* tracker,bool flush_display) :
00022 cmd(cmd),
00023 iter_(0),
00024 flashcode_center_(640/2,480/2),
00025 detector_(detector),
00026 tracker_(tracker),
00027 flush_display_(flush_display){
00028 std::cout << "starting tracker" << std::endl;
00029 cvTrackingBox_init_ = false;
00030 cvTrackingBox_.x = 0;
00031 cvTrackingBox_.y = 0;
00032 cvTrackingBox_.width = 0;
00033 cvTrackingBox_.height = 0;
00034
00035
00036 tracker_->getCameraParameters(cam_);
00037
00038 points3D_inner_ = cmd.get_inner_points_3D();
00039 points3D_outer_ = cmd.get_outer_points_3D();
00040 outer_points_3D_bcp_ = cmd.get_outer_points_3D();
00041 if(cmd.using_adhoc_recovery() || cmd.log_checkpoints()){
00042 for(unsigned int i=0;i<points3D_outer_.size();i++){
00043 vpPoint p;
00044 p.setWorldCoordinates(
00045 (points3D_outer_[i].get_oX()+points3D_inner_[i].get_oX())*cmd.get_adhoc_recovery_ratio(),
00046 (points3D_outer_[i].get_oY()+points3D_inner_[i].get_oY())*cmd.get_adhoc_recovery_ratio(),
00047 (points3D_outer_[i].get_oZ()+points3D_inner_[i].get_oZ())*cmd.get_adhoc_recovery_ratio()
00048 );
00049 points3D_middle_.push_back(p);
00050 }
00051 }
00052 f_ = cmd.get_flashcode_points_3D();
00053
00054 if(cmd.using_var_file()){
00055 varfile_.open(cmd.get_var_file().c_str(),std::ios::out);
00056 varfile_ << "#These are variances and other data from the model based tracker in gnuplot format" << std::endl;
00057 if(cmd.using_hinkley())
00058 varfile_ << "iteration\tvar_x\var_y\tvar_z\tvar_wx\tvar_wy\var_wz";
00059 if(cmd.using_mbt_dynamic_range())
00060 varfile_ << "\tmbt_range";
00061 if(cmd.log_pose())
00062 varfile_ << "\tpose_tx\tpose_ty\tpose_tz\tpose_rx\tpose_ry\tpose_rz";
00063 if(cmd.log_checkpoints())
00064 varfile_ << "\tcheckpoint_median";
00065
00066 varfile_ << std::endl;
00067 }
00068
00069 if(cmd.using_hinkley()){
00070 if(cmd.get_verbose())
00071 std::cout << "Initialising hinkley with alpha=" << cmd.get_hinkley_alpha() << " and delta=" << cmd.get_hinkley_delta() << std::endl;
00072 for(hinkley_array_t::iterator i = hink_.begin();i!=hink_.end();i++)
00073 i->init(cmd.get_hinkley_alpha(),cmd.get_hinkley_delta());
00074 }
00075
00076 if(cmd.using_mbt_dynamic_range()){
00077 vpMbEdgeTracker *tracker_me = dynamic_cast<vpMbEdgeTracker*>(tracker_);
00078 if(tracker_me)
00079 tracker_me->getMovingEdge(tracker_me_config_);
00080 else
00081 std::cout << "error: could not init moving edges on tracker that doesn't support them." << std::endl;
00082 }
00083
00084 tracker_->loadConfigFile(cmd.get_xml_file().c_str() );
00085 tracker_->loadModel(cmd.get_wrl_file().c_str());
00086 tracker_->setCameraParameters(cam_);
00087 }
00088
00089 detectors::DetectorBase& Tracker_:: get_detector(){
00090 return *detector_;
00091 }
00092
00093 vpMbTracker& Tracker_:: get_mbt(){
00094 return *tracker_;
00095 }
00096
00097 std::vector<vpPoint>& Tracker_:: get_points3D_inner(){
00098 return points3D_inner_;
00099 }
00100
00101 std::vector<vpPoint>& Tracker_:: get_points3D_outer(){
00102 return points3D_outer_;
00103
00104 }
00105
00106 std::vector<vpPoint>& Tracker_:: get_points3D_middle(){
00107 return points3D_middle_;
00108 }
00109
00110 std::vector<vpPoint>& Tracker_:: get_flashcode(){
00111 return f_;
00112 }
00113
00114 vpImage<vpRGBa>& Tracker_:: get_I(){
00115 return *I_;
00116 }
00117
00118 vpCameraParameters& Tracker_:: get_cam(){
00119 return cam_;
00120 }
00121
00122 CmdLine& Tracker_:: get_cmd(){
00123 return cmd;
00124 }
00125
00126 template<>
00127 const cv::Rect& Tracker_:: get_tracking_box<cv::Rect>(){
00128 return cvTrackingBox_;
00129 }
00130
00131 template<>
00132 const vpRect& Tracker_:: get_tracking_box<vpRect>(){
00133 return vpTrackingBox_;
00134 }
00135
00136 bool Tracker_:: input_selected(input_ready const& evt){
00137 return vpDisplay::getClick(evt.I,false);
00138 }
00139
00140
00141 bool Tracker_:: no_input_selected(input_ready const& evt){
00142 return !input_selected(evt);
00143 }
00144
00145 bool Tracker_:: flashcode_detected(input_ready const& evt){
00146
00147
00148 cv::Mat rgba = cv::Mat((int)evt.I.getRows(), (int)evt.I.getCols(), CV_8UC4, (void*)evt.I.bitmap);
00149
00150 cv::Mat bgr = cv::Mat((int)evt.I.getRows(), (int)evt.I.getCols(), CV_8UC3);
00151 cv::Mat alpha((int)evt.I.getRows(), (int)evt.I.getCols(), CV_8UC1);
00152
00153 cv::Mat out[] = {bgr, alpha};
00154
00155
00156 int from_to[] = { 0,2, 1,1, 2,0, 3,3 };
00157 cv::mixChannels(&rgba, 1, out, 2, from_to, 4);
00158
00159
00160
00161 return detector_->detect(bgr,cmd.get_dmx_timeout(),0,0);
00162 }
00163
00164
00165
00166
00167
00168 bool Tracker_:: flashcode_redetected(input_ready const& evt){
00169
00170
00171
00172 cv::Mat rgba = cv::Mat((int)evt.I.getRows(), (int)evt.I.getCols(), CV_8UC4, (void*)evt.I.bitmap);
00173
00174 cv::Mat bgr = cv::Mat((int)evt.I.getRows(), (int)evt.I.getCols(), CV_8UC3);
00175 cv::Mat alpha((int)evt.I.getRows(), (int)evt.I.getCols(), CV_8UC1);
00176
00177 cv::Mat out[] = {bgr, alpha};
00178
00179
00180 int from_to[] = { 0,2, 1,1, 2,0, 3,3 };
00181 cv::mixChannels(&rgba, 1, out, 2, from_to, 4);
00182
00183 if (cvTrackingBox_init_)
00184 {
00185 cv::Mat subImage = cv::Mat(bgr,get_tracking_box<cv::Rect>()).clone();
00186
00187 double timeout = cmd.get_dmx_timeout()*(double)(get_tracking_box<cv::Rect>().width*get_tracking_box<cv::Rect>().height)/(double)(bgr.cols*bgr.rows);
00188 return detector_->detect(subImage,(unsigned int)timeout,get_tracking_box<cv::Rect>().x,get_tracking_box<cv::Rect>().y);
00189 }
00190 else
00191 {
00192 return detector_->detect(bgr,cmd.get_dmx_timeout(),0,0);
00193 }
00194 }
00195
00196 void Tracker_:: find_flashcode_pos(input_ready const& evt){
00197 this->cam_ = evt.cam_;
00198
00199 std::vector<cv::Point> polygon = detector_->get_polygon();
00200 double centerX = (double)(polygon[0].x+polygon[1].x+polygon[2].x+polygon[3].x)/4.;
00201 double centerY = (double)(polygon[0].y+polygon[1].y+polygon[2].y+polygon[3].y)/4.;
00202 vpPixelMeterConversion::convertPoint(cam_, flashcode_center_, centerX, centerY);
00203
00204 for(unsigned int i=0;i<f_.size();i++){
00205 double x=0, y=0;
00206 vpImagePoint poly_pt(polygon[i].y,polygon[i].x);
00207
00208 vpPixelMeterConversion::convertPoint(cam_, poly_pt, x, y);
00209 f_[i].set_x(x);
00210 f_[i].set_y(y);
00211 }
00212 I_ = _I = &(evt.I);
00213 }
00214
00215
00216 bool Tracker_:: model_detected(msm::front::none const&){
00217 vpImageConvert::convert(*I_,Igray_);
00218 vpPose pose;
00219
00220 for(unsigned int i=0;i<f_.size();i++)
00221 pose.addPoint(f_[i]);
00222
00223 pose.computePose(vpPose::LAGRANGE,cMo_);
00224 pose.computePose(vpPose::VIRTUAL_VS,cMo_);
00225
00226
00227 std::vector<vpImagePoint> model_inner_corner(4);
00228 std::vector<vpImagePoint> model_outer_corner(4);
00229 for(unsigned int i=0;i<4;i++){
00230 points3D_outer_[i].project(cMo_);
00231 points3D_inner_[i].project(cMo_);
00232 if(cmd.using_adhoc_recovery() || cmd.log_checkpoints())
00233 points3D_middle_[i].project(cMo_);
00234 vpMeterPixelConversion::convertPoint(cam_,points3D_outer_[i].get_x(),points3D_outer_[i].get_y(),model_outer_corner[i]);
00235 vpMeterPixelConversion::convertPoint(cam_,points3D_inner_[i].get_x(),points3D_inner_[i].get_y(),model_inner_corner[i]);
00236
00237 if(cmd.get_verbose()){
00238 std::cout << "model inner corner: (" << model_inner_corner[i].get_i() << "," << model_inner_corner[i].get_j() << ")" << std::endl;
00239 }
00240 }
00241
00242 try{
00243 tracker_->resetTracker();
00244 tracker_->loadConfigFile(cmd.get_xml_file().c_str() );
00245 tracker_->loadModel(cmd.get_wrl_file().c_str());
00246 tracker_->setCameraParameters(cam_);
00247 {
00248 vpCameraParameters cam;
00249 tracker_->getCameraParameters(cam);
00250 if (cam.get_px() != 558) ROS_INFO_STREAM("detection Camera parameters: \n" << cam_);
00251 }
00252
00253 tracker_->initFromPose(Igray_,cMo_);
00254
00255 tracker_->track(Igray_);
00256 tracker_->getPose(cMo_);
00257 tracker_->setCovarianceComputation(true);
00258 for(int i=0;i<cmd.get_mbt_convergence_steps();i++){
00259 tracker_->track(Igray_);
00260 tracker_->getPose(cMo_);
00261 }
00262 }catch(vpException& e){
00263 std::cout << "Tracking failed" << std::endl;
00264 std::cout << e.getStringMessage() << std::endl;
00265 return false;
00266 }
00267 return true;
00268 }
00269
00270 bool Tracker_:: mbt_success(input_ready const& evt){
00271 iter_ = evt.frame;
00272 this->cam_ = evt.cam_;
00273
00274 try{
00275 LogFileWriter writer(varfile_);
00276 vpImageConvert::convert(evt.I,Igray_);
00277
00278 tracker_->track(Igray_);
00279 tracker_->getPose(cMo_);
00280 vpMatrix mat = tracker_->getCovarianceMatrix();
00281 if(cmd.using_var_file()){
00282 writer.write(iter_);
00283 for(unsigned int i=0;i<mat.getRows();i++)
00284 writer.write(mat[i][i]);
00285 }
00286 if(cmd.using_var_limit())
00287 for(unsigned int i=0; i<6; i++)
00288 if(mat[i][i]>cmd.get_var_limit())
00289 return false;
00290 if(cmd.using_hinkley())
00291 for(unsigned int i=0; i<6; i++){
00292 if(hink_[i].testDownUpwardJump(mat[i][i]) != vpHinkley::noJump){
00293 writer.write(mat[i][i]);
00294 if(cmd.get_verbose())
00295 std::cout << "Hinkley:detected jump!" << std::endl;
00296 return false;
00297 }
00298 }
00299 if(cmd.using_var_file() && cmd.using_mbt_dynamic_range())
00300 writer.write(tracker_me_config_.getRange());
00301
00302
00303
00304 for(unsigned int i=0;i<mat.getRows();i++)
00305 statistics.var(mat[i][i]);
00306
00307 if(mat.getRows() == 6){
00308 statistics.var_x(mat[0][0]);
00309 statistics.var_y(mat[1][1]);
00310 statistics.var_z(mat[2][2]);
00311 statistics.var_wx(mat[3][3]);
00312 statistics.var_wy(mat[4][4]);
00313 statistics.var_wz(mat[5][5]);
00314 }
00315
00316 if(cmd.using_var_file() && cmd.log_pose()){
00317 vpPoseVector p(cMo_);
00318 for(unsigned int i=0;i<p.getRows();i++)
00319 writer.write(p[i]);
00320 }
00321
00322 if(cmd.using_adhoc_recovery() || cmd.log_checkpoints()){
00323 for(unsigned int p=0;p<points3D_middle_.size();p++){
00324 vpPoint& point3D = points3D_middle_[p];
00325
00326 double _u=0.,_v=0.,_u_inner=0.,_v_inner=0.;
00327 point3D.project(cMo_);
00328 vpMeterPixelConversion::convertPoint(cam_,point3D.get_x(),point3D.get_y(),_u,_v);
00329 vpMeterPixelConversion::convertPoint(cam_,points3D_inner_[p].get_x(),points3D_inner_[p].get_y(),_u_inner,_v_inner);
00330
00331 boost::accumulators::accumulator_set<
00332 unsigned char,
00333 boost::accumulators::stats<
00334 boost::accumulators::tag::median(boost::accumulators::with_p_square_quantile)
00335 >
00336 > acc;
00337
00338 int region_width= std::max((int)(std::abs(_u-_u_inner)*cmd.get_adhoc_recovery_size()),1);
00339 int region_height=std::max((int)(std::abs(_v-_v_inner)*cmd.get_adhoc_recovery_size()),1);
00340 int u=(int)_u;
00341 int v=(int)_v;
00342 for(int i=std::max(u-region_width,0);
00343 i<std::min(u+region_width,(int)evt.I.getWidth());
00344 i++){
00345 for(int j=std::max(v-region_height,0);
00346 j<std::min(v+region_height,(int)evt.I.getHeight());
00347 j++){
00348 acc(Igray_[j][i]);
00349 statistics.checkpoints(Igray_[j][i]);
00350 }
00351 }
00352 double checkpoints_median = boost::accumulators::median(acc);
00353 if(cmd.using_var_file() && cmd.log_checkpoints())
00354 writer.write(checkpoints_median);
00355 if( cmd.using_adhoc_recovery() && (unsigned int)checkpoints_median>cmd.get_adhoc_recovery_treshold() )
00356 return false;
00357 }
00358
00359
00360 }
00361 }catch(vpException& e){
00362 std::cout << "Tracking lost" << std::endl;
00363 return false;
00364 }
00365 return true;
00366 }
00367
00368 void Tracker_:: track_model(input_ready const& evt){
00369 this->cam_ = evt.cam_;
00370
00371 std::vector<cv::Point> points;
00372 I_ = _I = &(evt.I);
00373 vpImageConvert::convert(evt.I,Igray_);
00374
00375 boost::accumulators::accumulator_set<
00376 double,
00377 boost::accumulators::stats<
00378 boost::accumulators::tag::mean
00379 >
00380 > acc;
00381
00382 for(unsigned int i=0;i<points3D_outer_.size();i++){
00383 points3D_outer_[i].project(cMo_);
00384 points3D_inner_[i].project(cMo_);
00385
00386 double u=0.,v=0.,u_inner=0.,v_inner=0;
00387 vpMeterPixelConversion::convertPoint(cam_,points3D_outer_[i].get_x(),points3D_outer_[i].get_y(),u,v);
00388 vpMeterPixelConversion::convertPoint(cam_,points3D_inner_[i].get_x(),points3D_inner_[i].get_y(),u_inner,v_inner);
00389 points.push_back(cv::Point(u,v));
00390
00391 acc(std::abs(u-u_inner));
00392 acc(std::abs(v-v_inner));
00393 }
00394
00395 if(cmd.using_mbt_dynamic_range()){
00396 int range = (const unsigned int)(boost::accumulators::mean(acc)*cmd.get_mbt_dynamic_range());
00397
00398 vpMbEdgeTracker *tracker_me = dynamic_cast<vpMbEdgeTracker*>(tracker_);
00399 if(tracker_me){
00400 tracker_me->getMovingEdge(tracker_me_config_);
00401 tracker_me_config_.setRange(range);
00402 tracker_me->setMovingEdge(tracker_me_config_);
00403 }else
00404 std::cout << "error: could not init moving edges on tracker that doesn't support them." << std::endl;
00405 }
00406 cvTrackingBox_init_ = true;
00407 cvTrackingBox_ = cv::boundingRect(cv::Mat(points));
00408 int s_x = cvTrackingBox_.x,
00409 s_y = cvTrackingBox_.y,
00410 d_x = cvTrackingBox_.x + cvTrackingBox_.width,
00411 d_y = cvTrackingBox_.y + cvTrackingBox_.height;
00412 s_x = std::max(s_x,0);
00413 s_y = std::max(s_y,0);
00414 d_x = std::min(d_x,(int)evt.I.getWidth());
00415 d_y = std::min(d_y,(int)evt.I.getHeight());
00416 cvTrackingBox_.x = s_x;
00417 cvTrackingBox_.y = s_y;
00418 cvTrackingBox_.width = d_x - s_x;
00419 cvTrackingBox_.height = d_y - s_y;
00420 vpTrackingBox_.setRect(cvTrackingBox_.x,cvTrackingBox_.y,cvTrackingBox_.width,cvTrackingBox_.height);
00421 }
00422
00423 Tracker_::statistics_t& Tracker_:: get_statistics(){
00424 return statistics;
00425 }
00426
00427 void Tracker_:: set_flush_display(bool val){
00428 flush_display_ = val;
00429 }
00430
00431 bool Tracker_:: get_flush_display(){
00432 return flush_display_;
00433 }
00434
00435 void
00436 Tracker_::updateMovingEdgeSites(visp_tracker::MovingEdgeSitesPtr sites)
00437 {
00438 if (!sites)
00439 return;
00440
00441 std::list<vpMbtDistanceLine*> linesList;
00442
00443 if(cmd.get_tracker_type() != CmdLine::KLT)
00444 dynamic_cast<vpMbEdgeTracker*>(tracker_)->getLline(linesList, 0);
00445
00446 std::list<vpMbtDistanceLine*>::iterator linesIterator = linesList.begin();
00447 if (linesList.empty())
00448 ROS_DEBUG_THROTTLE(10, "no distance lines");
00449 bool noVisibleLine = true;
00450 for (; linesIterator != linesList.end(); ++linesIterator)
00451 {
00452 vpMbtDistanceLine* line = *linesIterator;
00453
00454 if (line && line->isVisible() && line->meline)
00455 {
00456 if (line->meline->list.empty()) {
00457 ROS_DEBUG_THROTTLE(10, "no moving edge for a line");
00458 }
00459
00460 std::list<vpMeSite>::const_iterator sitesIterator = line->meline->list.begin();
00461
00462 for (; sitesIterator != line->meline->list.end(); ++sitesIterator)
00463 {
00464 visp_tracker::MovingEdgeSite movingEdgeSite;
00465 movingEdgeSite.x = sitesIterator->ifloat;
00466 movingEdgeSite.y = sitesIterator->jfloat;
00467 movingEdgeSite.suppress = sitesIterator->suppress;
00468 sites->moving_edge_sites.push_back (movingEdgeSite);
00469 }
00470 noVisibleLine = false;
00471
00472 }
00473 }
00474 if (noVisibleLine)
00475 ROS_DEBUG_THROTTLE(10, "no distance lines");
00476 }
00477
00478 void
00479 Tracker_::updateKltPoints(visp_tracker::KltPointsPtr klt)
00480 {
00481 if (!klt)
00482 return;
00483
00484 vpMbHiddenFaces<vpMbtKltPolygon> *poly_lst;
00485 std::map<int, vpImagePoint> *map_klt;
00486
00487 if(cmd.get_tracker_type() != CmdLine::MBT)
00488 poly_lst = &dynamic_cast<vpMbKltTracker*>(tracker_)->getFaces();
00489
00490 for(unsigned int i = 0 ; i < poly_lst->size() ; i++)
00491 {
00492 if((*poly_lst)[i])
00493 {
00494 map_klt = &((*poly_lst)[i]->getCurrentPoints());
00495
00496 if(map_klt->size() > 3)
00497 {
00498 for (std::map<int, vpImagePoint>::iterator it=map_klt->begin(); it!=map_klt->end(); ++it)
00499 {
00500 visp_tracker::KltPoint kltPoint;
00501 kltPoint.id = it->first;
00502 kltPoint.i = it->second.get_i();
00503 kltPoint.j = it->second.get_j();
00504 klt->klt_points_positions.push_back (kltPoint);
00505 }
00506 }
00507 }
00508 }
00509 }
00510 }
00511