video_tracker.cpp
Go to the documentation of this file.
00001 /*
00002  * matching_test.cpp
00003  *
00004  *  Created on: Oct 17, 2010
00005  *      Author: ethan
00006  */
00007 
00008 #include "pano_core/ImageAtom.h"
00009 #include "pano_core/feature_utils.h"
00010 #include "pano_core/MoleculeProcessor.h"
00011 #include "pano_core/CaptureEngine.h"
00012 #include "pano_core/ModelFitter.h"
00013 #include "pano_core/Blender.h"
00014 #include "pano_core/panoutils.h"
00015 
00016 #include <opencv2/core/core.hpp>
00017 #include <opencv2/highgui/highgui.hpp>
00018 #include <opencv2/video/tracking.hpp>
00019 #include <sstream>
00020 #include <iostream>
00021 #include <list>
00022 
00023 using namespace std;
00024 using namespace cv;
00025 using namespace pano;
00026 
00027 //void onMatchCB(const AtomPair& pair)
00028 //{
00029 //  static Mat matches_img;
00030 //  pair.atom2()->images().src().copyTo(matches_img);
00031 //  if (!pair.result().success())
00032 //    bitwise_not(matches_img, matches_img);
00033 //  drawMatchesRelative(pair.atom1()->features(), pair.atom2()->features(), pair.matches(), matches_img,
00034 //                      pair.result().inlier_mask());
00035 //  imshow("prior matches", matches_img);
00036 //}
00037 void drawPointsRelative(const vector<Point2f>& train, const vector<Point2f>& query, Mat& img,
00038                         const vector<unsigned char>& mask = vector<unsigned char> ())
00039 {
00040   for (int i = 0; i < (int)train.size(); i++)
00041   {
00042 
00043     if (mask.empty() || mask[i])
00044     {
00045       Point2f pt_new = query[i];
00046       Point2f pt_old = train[i];
00047       Point2f dist = pt_new - pt_old;
00048 
00049       cv::line(img, pt_new, pt_old, Scalar(255, 255, 255), 1);
00050       cv::circle(img, pt_new, 2, Scalar(255, 255, 255), 1);
00051     }
00052 
00053   }
00054 
00055 }
00056 int main(int ac, char ** av)
00057 {
00058 
00059   if (ac != 4)
00060   {
00061     cout << "usage: " << av[0] << " camera.yml <video device number> outputdir" << endl;
00062     return 1;
00063   }
00064 
00065   string output_dir = av[3];
00066 
00067   Camera camera;
00068   camera.setCameraIntrinsics(av[1]);
00069 
00070   BriefDescriptorExtractor brief(32);
00071 
00072   Ptr<FeatureDetector> detector(new DynamicAdaptedFeatureDetector(new FastAdjuster(), 20, 30, 1));//(new GriddedDynamicDetectorAdaptor(20, 3, 2, 2, FastAdjuster()));
00073 
00074   VideoCapture capture;
00075   capture.open(atoi(av[2]));
00076   if (!capture.isOpened())
00077   {
00078     cout << "capture device failed to open!" << endl;
00079     return 1;
00080   }
00081 
00082   cout << "following keys do stuff:" << endl;
00083   cout << "l : starts capturing the live preview ##DO THIS TO START##" << endl << endl;
00084   cout << "r : resets the pano" << endl;
00085   cout << "i/I : lower/raise inlier thresh" << endl;
00086   cout << "e/E : lower/raise error thresh" << endl;
00087   cout << "m/M : lower/raise max iterations" << endl;
00088   cout << "s : serialize the pano data" << endl;
00089   cout << "c : capture as an avi the taking of the pano" << endl;
00090   cout << "b : blends the pano" << endl;
00091   cout << "q : quit" << endl;
00092 
00093   Mat frame;
00094 
00095   bool ref_live = false;
00096 
00097   SVDRSolverParams params;
00098   params.error_thresh = 5;
00099   params.inliers_thresh = 5;
00100   params.maxiters = 100;
00101   params.nNeeded = 2;
00102 
00103   Ptr<SVDFitter> svdfitter(new SVDFitter(params));
00104 
00105   Ptr<ModelFitter> fitter(reinterpret_cast<const Ptr<ModelFitter>&> (svdfitter));
00106 
00107   Mat outimage(Size(1000, 1000), CV_8UC3);
00108 
00109   BlenderAlpha blender;
00110 
00111   Mat glob_out = Mat::zeros(Size(2000, 1000), CV_8UC3);
00112   Mat blended_out = Mat::zeros(Size(1200, 600), CV_8UC3);
00113 
00114   namedWindow("blended", CV_WINDOW_KEEPRATIO);
00115 
00116   VideoWriter video_writer;
00117 
00118   CaptureEngine capture_engine(fitter, detector, camera, output_dir);
00119 
00120   //the glob stores the pano graph
00121   //think of it as the map
00122   MoleculeGlob& glob = capture_engine.glob();
00123 
00124   BlurDetector blur_detector;
00125 
00126   int f_count = 0;
00127   double total_t = 0;
00128   float scale(1);
00129 
00130   Camera ncamera(av[1]);
00131   // camera.scale(scale, scale);
00132   ImageAtom atom(camera, Images());
00133   atom.extrinsics() = Extrinsics(Mat::eye(3, 3, CV_32F), 10);
00134   atom.extrinsics().flag(Extrinsics::ESTIMATED) = true;
00135   ImageAtom prior;
00136   MatchesVector matches;
00137   vector<Point2f> currentPoints, previousPoints;
00138   vector<uchar> status;
00139   Mat ftrs;
00140   vector<float> err;
00141   Mat prev_small, curr_small;
00142   namedWindow("frame", CV_WINDOW_KEEPRATIO);
00143   namedWindow("sm_frame", CV_WINDOW_KEEPRATIO);
00144   ImageAtom batom(ncamera, Images());
00145   KeypointVector keypoints;
00146   for (;;)
00147   {
00148 
00149     capture >> frame;
00150     if (frame.empty())
00151       continue;
00152     double t = getTickCount();
00153    // atom.images().load(frame, "", ".");
00154     resize(frame, curr_small, Size(), scale, scale, CV_INTER_AREA);
00155     //    cv::Mat frame_;
00156     //    sharpen_backwards_heat_equation(curr_small.clone(), frame_,0.5);
00157     //    frame_.convertTo(curr_small, curr_small.type());
00158 
00159     imshow("sm_frame", curr_small);
00160     detector->detect(curr_small, keypoints);
00161 
00162     // drawKeypoints(atom.images().grey(), atom.features().kpts(), ftrs, Scalar::all(-1));
00163     //imshow("ftrs", ftrs);
00164     f_count++;
00165     if (ref_live)
00166     {
00167       if (previousPoints.size())
00168       {
00169         calcOpticalFlowPyrLK(prev_small, curr_small, previousPoints, currentPoints, status, err,
00170                              Size(20, 20),5, cv::TermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 2, 1), 0);
00171         //  prior.match(atom, matches, Mat(), -1);
00172 
00173         //float merr =  mean(Mat(err))[0];
00174         for (size_t i = 0; i < status.size(); i++)
00175         {
00176           if (!status[i])
00177           {
00178             swap(status[i], status.back());
00179             swap(previousPoints[i], previousPoints.back());
00180             swap(currentPoints[i], currentPoints.back());
00181             status.pop_back();
00182             previousPoints.pop_back();
00183             currentPoints.pop_back();
00184             i--;
00185           }
00186         }
00187 
00188         Mat mpts(previousPoints);
00189         Mat mptsq(currentPoints);
00190         mpts *= 1 / scale;
00191         mptsq *= 1 / scale;
00192         drawPointsRelative(previousPoints, currentPoints, frame);
00193         imshow("frame", frame);
00194 
00195         AtomPair pair(prior.ptrToSelf(), atom.ptrToSelf(), previousPoints, currentPoints);
00196         fitter->fit(pair);
00197         if (pair.result().success())
00198         {
00199           atom.extrinsics() = pair.generateExtrinsics(atom.ptrToSelf());
00200           // cout <<"tracking good" << endl;
00201         }
00202         else
00203         {
00204           cout << "missed frame" << endl;
00205           atom.extrinsics().flag(Extrinsics::ESTIMATED) = false;
00206 
00207         }
00208       }
00209      // blender.blendIncremental(atom,glob_out);
00210     //  imshow("blended",glob_out);
00211 
00212       KeyPointsToPoints(keypoints, previousPoints);
00213       swap(prev_small,curr_small);
00214       prior = atom;
00215 
00216     }
00217 
00218     //frame timing
00219     total_t += ((double)getTickCount() - t) / getTickFrequency();
00220 
00221     if (f_count % 100 == 0)
00222     {
00223       cout << "estimated W" << atom.extrinsics().mat(Extrinsics::W) << endl;
00224       cout << "total tracking time per frame: " << total_t * 1000.0f / f_count << " milliseconds, averaged over "
00225           << f_count << " frames" << endl;
00226       total_t = 0;
00227       f_count = 0;
00228     }
00229 
00230     char key = 0xFF & waitKey(2);
00231 
00232     switch (key)
00233     {
00234       case 'c':
00235         video_writer.open(output_dir + "/video_capture.avi", CV_FOURCC('H', 'F', 'Y', 'U')/*codec*/, 30/*fps*/,
00236                           glob_out.size(), true/*color*/);
00237         break;
00238       case 'C':
00239         // video
00240         break;
00241       case 'l':
00242         ref_live = true;
00243         break;
00244       case 'b':
00245         cout << "blending...";
00246         cout.flush();
00247 
00248         {
00249           static int c = 0;
00250           stringstream ss;
00251           ss << output_dir + "/blended" << c++ << ".jpg";
00252           imwrite(ss.str(), glob_out);
00253         }
00254         // blender.BlendMolecule(*glob.getBiggestMolecule(), blended_out);
00255 
00256         cout << " done blending. look at " << output_dir + "/blended.jpg" << endl;
00257         break;
00258       case 'r':
00259         cout << "reseting pano" << endl;
00260         // glob.reset();
00261         prior.extrinsics().mat(Extrinsics::R) = Mat_<float>::eye(3, 3);
00262 
00263         glob_out = Scalar::all(0);
00264         break;
00265       case 'e':
00266         params.error_thresh -= 0.2;
00267         cout << "new error_thresh " << params.error_thresh << endl;
00268         break;
00269       case 'E':
00270         params.error_thresh += 0.2;
00271         cout << "new error_thresh " << params.error_thresh << endl;
00272         break;
00273       case 'i':
00274         params.inliers_thresh--;
00275         cout << "new inliers_thresh " << params.inliers_thresh << endl;
00276         break;
00277       case 'I':
00278         params.inliers_thresh++;
00279         cout << "new inliers_thresh " << params.inliers_thresh << endl;
00280         break;
00281       case 'm':
00282         params.maxiters--;
00283         cout << "new maxiters " << params.maxiters << endl;
00284         break;
00285       case 'M':
00286         params.maxiters++;
00287         cout << "new maxiters " << params.maxiters << endl;
00288         break;
00289       case 'g':
00290         //dont_check_blur = false;
00291         break;
00292       case 'G':
00293         //dont_check_blur = true;
00294         break;
00295       case 's':
00296       {
00297         FileStorage fs(output_dir + "/glob.yml.gz", FileStorage::WRITE);
00298         fs << "glob";
00299         glob.serialize(fs);
00300         fs.release();
00301       }
00302         break;
00303       case 27:
00304       case 'q':
00305         return 0;
00306         break;
00307       default:
00308         break;
00309     }
00310 
00311     //update the fitter wiht any params that changed
00312     (*svdfitter) = SVDFitter(params);
00313 
00314   }
00315   return 0;
00316 }


pano_core
Author(s): Ethan Rublee
autogenerated on Wed Aug 26 2015 16:34:01