flow.cpp
Go to the documentation of this file.
00001 
00005 #include "flow.hpp"
00006 
00007 int main(int argc, char** argv) {
00008         
00009         ROS_INFO("Node launched.");
00010                 
00011         ros::init(argc, argv, "flow");
00012         
00013         ros::NodeHandle private_node_handle("~");
00014         
00015         trackerData startupData;
00016                 
00017         bool inputIsValid = startupData.obtainStartingData(private_node_handle);
00018         
00019         startupData.read_addr = argv[0];
00020         startupData.read_addr = startupData.read_addr.substr(0, startupData.read_addr.size()-8);
00021         
00022         if (!inputIsValid) {
00023                 ROS_ERROR("Configuration invalid.");
00024                 return -1;
00025         }
00026                 
00027         ROS_INFO("Startup data processed.");
00028         
00029         ros::NodeHandle nh;
00030         
00031         boost::shared_ptr < featureTrackerNode > tracker_node (new featureTrackerNode (nh, startupData));
00032         
00033         globalNodePtr = &tracker_node;
00034         
00035         signal(SIGINT, mySigintHandler);
00036         
00037         ROS_INFO("Node configured.");
00038         
00039         while (!::wantsToShutdown) {
00040                 ros::spinOnce();
00041         }
00042         
00043         
00044         return 0;
00045         
00046 }
00047 
00048 void mySigintHandler(int sig) {
00049         wantsToShutdown = true;
00050         ROS_WARN("Requested shutdown... terminating feeds...");
00051         
00052         (*globalNodePtr)->prepareForTermination();
00053 }
00054 
00055 bool trackerData::obtainStartingData(ros::NodeHandle& nh) {
00056         
00057         numDetectors = 0;
00058         
00059         nh.param<string>("outputFolder", outputFolder, "outputFolder");
00060         
00061         nh.param<std::string>("topic", topic, "topic");
00062         
00063         if (topic != "topic") {
00064                 ROS_INFO("<topic> (%s) selected.", topic.c_str());
00065         } else {
00066                 topic = "/thermalvis/streamer/image_mono";
00067                 ROS_WARN("No <topic> supplied; assuming (%s)", topic.c_str());
00068                 //return false;
00069         }
00070         
00071         topicParent = topic.substr(0, topic.find_last_of("/"));
00072         
00073         ROS_INFO("topicParent = (%s)", topicParent.c_str());
00074         
00075         nh.param<string>("tracksOutputTopic", tracksOutputTopic, "tracksOutputTopic");
00076         
00077         if (tracksOutputTopic != "tracksOutputTopic") {
00078                 ROS_INFO("<tracksOutputTopic> (%s) selected.", tracksOutputTopic.c_str());
00079         } else {
00080                 ROS_WARN("No <tracksOutputTopic> supplied; will publish <tracks> at <%s>", topicParent.c_str());
00081                 //return false;
00082         }
00083         
00084         //nh.param<int>("maxFeatures", maxFeatures, 100);
00085         
00086         
00087 
00088         nh.param<bool>("outputFeatureMotion", outputFeatureMotion, false);
00089         nh.param<bool>("normalizeFeatureVelocities", normalizeFeatureVelocities, true);
00090 
00091         
00092         //nh.param<bool>("debug_mode", debugMode, false);
00093         
00094         nh.param<bool>("outputTrackCount", outputTrackCount, false);
00095         
00096         //HGH/*
00097         
00098         for (int iii = 0; iii < MAX_DETECTORS; iii++) {
00099                 
00100                 char detector_tag[256], sensitivity_tag[256], method_tag[256], descriptor_tag[256];
00101                 
00102                 sprintf(detector_tag, "detector_%d", iii + 1);
00103                 sprintf(sensitivity_tag, "sensitivity_%d", iii + 1);
00104                 sprintf(method_tag, "method_%d", iii + 1);
00105                 sprintf(descriptor_tag, "descriptor_%d", iii + 1);
00106                 
00107                 //printf("%s << detector_tag = %s\n", __FUNCTION__, detector_tag);
00108                 
00109                 
00110                 if (iii == 0) {
00111                         nh.param<std::string>(detector_tag, detector[iii], "GFTT");
00112                 } else {
00113                         nh.param<std::string>(detector_tag, detector[iii], "NONE");
00114                 }
00115                 
00116                 //printf("%s << detector[%d] = %s\n", __FUNCTION__, iii, detector[iii].c_str());
00117                 
00118                 if ((detector[iii] == "SURF") || (detector[iii] == "FAST") || (detector[iii] == "GFTT") || (detector[iii] == "HARRIS") || (detector[iii] == "ORB") || (detector[iii] == "STAR")) {
00119                         ROS_INFO("detector [%d]: %s", iii, detector[iii].c_str());
00120                 } else if (detector[iii] == "NONE"){
00121                         // printf("%s << No [%d] detector requested.\n", __FUNCTION__, iii);
00122                         break;
00123                 } else {
00124                         ROS_INFO("ERROR! Detector (%s) not recognized.", detector[iii].c_str());
00125                         return false;
00126                 }
00127                 
00128                 nh.param<double>(sensitivity_tag, sensitivity[iii], 0.2);
00129                 
00130                 ROS_INFO("detector [%d] sensitivity: %f", iii, sensitivity[iii]);
00131                 
00132                 nh.param<std::string>(method_tag, method[iii], "match");
00133                 
00134                 if ((method[iii] == "match") || (method[iii] == "track")) {
00135                         ROS_INFO("detector [%d] matching method: %s", iii, method[iii].c_str());
00136                         if (method[iii] == "match") {
00137                                 method_match[iii] = true;
00138                         } else {
00139                                 method_match[iii] = false;
00140                         }
00141                 } else {
00142                         ROS_INFO("ERROR! detector [%d] matching method (%s) not recognized.", iii, method[iii].c_str());
00143                         return false;
00144                 }
00145                 
00146                 nh.param<std::string>(descriptor_tag, descriptor[iii], "BRIEF");
00147                 
00148                 if (method_match[iii]) {
00149                         if ((descriptor[iii] == "SURF") || (descriptor[iii] == "ORB") || (descriptor[iii] == "BRIEF") || (descriptor[iii] == "SIFT")) {
00150                                 ROS_INFO("descriptor [%d]: %s", iii, descriptor[iii].c_str());
00151                         } else {
00152                                 ROS_INFO("ERROR! descriptor [%d] (%s) not recognized.", iii, descriptor[iii].c_str());
00153                                 return false;
00154                         }
00155                 }
00156                 
00157                 numDetectors++;
00158         }
00159         
00160         
00161         
00162         ROS_INFO("[%d] detectors to be implemented.", numDetectors);
00163         
00164         //*/
00165         
00166         return true;
00167 }
00168 
00169 void trackerData::initializeDescriptors(cv::Ptr<cv::DescriptorExtractor> *desc, cv::Ptr<cv::DescriptorExtractor> *hom) {
00170         
00171         desc[0] = new cv::BriefDescriptorExtractor( );
00172         // desc[0] = new cv::OrbDescriptorExtractor( );
00173         
00174         hom[0] = new cv::BriefDescriptorExtractor( );
00175         
00176 }
00177 
00178 int featureTrackerNode::publish_tracks(ros::Publisher *pub_message, unsigned int latestIndex) {
00179         
00180         averageTrackLength = 0.0;
00181         
00182         int publishedTrackCount = 0;
00183         
00184         thermalvis::feature_tracks msg;
00185 
00186         msg.source = configData.topic;
00187         
00188         if (configData.autoTrackManagement) { trimFeatureTrackVector(); }
00189         
00190         vector<unsigned int> tracksToInclude;
00191         for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
00192                 
00193                 //if (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).imageIndex == (frameCount-1)) {
00194                 //if (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).imageIndex == ((unsigned int) currentIndex)) {
00195 
00196                         tracksToInclude.push_back(iii);
00197                         publishedTrackCount++;
00198                         averageTrackLength += featureTrackVector.at(iii).locations.size();
00199                 //}
00200                 
00201         }
00202         
00203         averageTrackLength /= ((double) publishedTrackCount);
00204         
00205         //ROS_INFO("Publishing tracks: featureTrackVector.size() = (%d), averageTrackLength = (%f)", featureTrackVector.size(), averageTrackLength);
00206         
00207         unsigned int projCount = 0;
00208         for (unsigned int iii = 0; iii < tracksToInclude.size(); iii++) {
00209                 projCount += featureTrackVector.at(tracksToInclude.at(iii)).locations.size();
00210         }
00211         
00212         //ROS_WARN("Publishing (%d) projections from (%d) tracks", projCount, tracksToInclude.size());
00213         
00214         msg.projection_count = projCount;
00215 
00216         
00217         cv::Mat trackMatrix;
00218         
00219         if (configData.showTrackHistory) {
00220                 if (createTrackMatrix(featureTrackVector, trackMatrix)) {
00221                     //HGH
00222                     //imshow("trackMatrix", trackMatrix);
00223                     imshow("trackMatrix : " + configData.topic, trackMatrix);
00224                         cv::waitKey(1);
00225                 }               
00226         }
00227         
00228         
00229         vector<unsigned int> currentIndices;
00230         vector<cv::Point2f> currentProjections;
00231         
00232         msg.cameras.clear();
00233         msg.indices.clear();
00234         msg.projections_x.clear();
00235         msg.projections_y.clear();
00236         
00237         for (unsigned int iii = 0; iii < tracksToInclude.size(); iii++) {
00238                 
00239                 //unsigned int startIndex = featureTrackVector.at(tracksToInclude.at(iii)).locations.size() - std::min(configData.maxProjectionsPerFeature, ((int)featureTrackVector.at(tracksToInclude.at(iii)).locations.size()));
00240                 
00241                 //for (unsigned int jjj = startIndex; jjj < featureTrackVector.at(tracksToInclude.at(iii)).locations.size(); jjj++) {
00242                 for (unsigned int jjj = 0; jjj < featureTrackVector.at(tracksToInclude.at(iii)).locations.size(); jjj++) {
00243                         
00244                         //msg.indices.push_back(tracksToInclude.at(iii));
00245                         msg.indices.push_back(featureTrackVector.at(tracksToInclude.at(iii)).trackIndex);
00246                         msg.cameras.push_back(featureTrackVector.at(tracksToInclude.at(iii)).locations.at(jjj).imageIndex);
00247                         
00248                         if (featureTrackVector.at(tracksToInclude.at(iii)).locations.at(jjj).imageIndex == 0) {
00249                                 //ROS_ERROR("Publishing an invalid track! [%d] (%d, %d) - why does it exist??", iii, tracksToInclude.at(iii), jjj);
00250                         }
00251                         
00252                         msg.projections_x.push_back(((double) featureTrackVector.at(tracksToInclude.at(iii)).locations.at(jjj).featureCoord.x));
00253                         msg.projections_y.push_back(((double) featureTrackVector.at(tracksToInclude.at(iii)).locations.at(jjj).featureCoord.y));
00254                 }
00255                 
00256         }
00257         
00258         // Assign header info
00259         
00260         previousIndex = currentIndex;
00261         msg.header.seq = currentIndex;
00262         previous_time = original_time;
00263         msg.header.stamp = original_time;
00264         msg.header.frame_id = optical_frame;
00265         pub_message->publish(msg);
00266         
00267         return publishedTrackCount;
00268 }
00269 
00270 void featureTrackerNode::handle_camera(const sensor_msgs::ImageConstPtr& msg_ptr, const sensor_msgs::CameraInfoConstPtr& info_msg) {
00271                 
00272         while (!infoProcessed) { 
00273                 process_info(info_msg);
00274         }
00275         
00276         if (readyFrame < frameCount) {
00277                 // Want to skip if you're still processing previous frame
00278                 return;
00279         }
00280         
00281         original_time = info_msg->header.stamp;
00282         currentIndex = info_msg->header.seq;
00283         
00284         //ROS_ERROR("%f vs %f", (original_time.toSec() - lastCycleFrameTime.toSec()), configData.newFeaturesPeriod);
00285         if (((original_time.toSec() - lastCycleFrameTime.toSec()) > configData.newFeaturesPeriod) && (configData.newFeaturesPeriod != 0.0) ) {
00286                 lastCycleFrameTime = original_time;
00287                 
00288                 //ROS_ERROR("Here!");
00289                 cycleCount++;
00290                 
00291                 cycleFlag = true;
00292                 
00293         }
00294         
00295         if (lastCycleFrameTime.toSec() > original_time.toSec()) {
00296                 lastCycleFrameTime = original_time;
00297         }
00298         
00299         if (currentIndex < previousIndex) {
00300                 ROS_WARN("Current received image index is lower than previous, assuming watching a looped video. (%d) vs (%d) : (%d)", previousIndex, currentIndex, frameCount);
00301                 featuresVelocity = 9e99;
00302                 capturedFrameCount++;
00303         } else if (currentIndex > (previousIndex+1)) {
00304                 
00305                 if (!undergoingDelay) { 
00306                         skippedFrameCount++;
00307                 }
00308                 featuresVelocity = 9e99;
00309         } else {
00310                 capturedFrameCount++;
00311         }
00312         
00313         if ((frameCount > 0) && (!undergoingDelay)) {
00314                 if (info_msg->binning_y == 1) {
00315                         ROS_WARN("Current frame is a duplicate, going into NUC-handling routine...");
00316                         handle_delay();
00317                         featuresVelocity = 9e99;
00318                         return;
00319                 }       
00320         } 
00321         
00322         if (info_msg->binning_y == 1) {
00323                 return;
00324         }
00325 
00326         debug_camera_info = (*info_msg);
00327         //ROS_ERROR("debug camera seq = (%d) vs (%d)", debug_camera_info.header.seq, info_msg->header.seq);
00328         cv_ptr = cv_bridge::toCvCopy(msg_ptr, enc::BGR8);                                       // For some reason it reads as BGR, not gray
00329         msg_debug.header.stamp = info_msg->header.stamp;
00330         msg_debug.header.seq = info_msg->header.seq;
00331         
00332         act_on_image();
00333         
00334 }
00335 
00336 void featureTrackerNode::process_info(const sensor_msgs::CameraInfoConstPtr& info_msg) {
00337         
00338         try     {
00339 
00340                 // Read in camera matrix
00341                 configData.cameraData.K = cv::Mat::eye(3, 3, CV_64FC1);
00342                 
00343                 for (unsigned int mmm = 0; mmm < 3; mmm++) {
00344                                 for (unsigned int nnn = 0; nnn < 3; nnn++) {
00345                                                 configData.cameraData.K.at<double>(mmm, nnn) = info_msg->K[3*mmm + nnn];
00346                                 }
00347                 }
00348                 
00349                 configData.cameraData.cameraSize.width = info_msg->width;
00350                 configData.cameraData.cameraSize.height = info_msg->height;
00351                 
00352                 //ROS_INFO("Distance constraint calculated as (%d)", distanceConstraint);
00353                 
00354                 //ROS_INFO("(%d, %d)\n", configData.cameraData.cameraSize.width, configData.cameraData.cameraSize.height);
00355                 
00356                 unsigned int maxDistortionIndex;
00357                 if (info_msg->distortion_model == "plumb_bob") {
00358                         maxDistortionIndex = 5;
00359                 } else if (info_msg->distortion_model == "rational_polynomial") {
00360                         maxDistortionIndex = 8;
00361                 } else {
00362                         maxDistortionIndex = 5;
00363                 }
00364                 
00365                 configData.cameraData.distCoeffs = cv::Mat::zeros(1, maxDistortionIndex, CV_64FC1);
00366                 
00367                 for (unsigned int iii = 0; iii < maxDistortionIndex; iii++) {
00368                                 configData.cameraData.distCoeffs.at<double>(0, iii) = info_msg->D[iii];
00369                 }
00370                 
00371                 cout << configData.cameraData.distCoeffs << endl;
00372                 
00373                 configData.cameraData.newCamMat = cv::Mat::zeros(3, 3, CV_64FC1);
00374                 
00375                 cv::Rect validPixROI;;
00376                 
00377                 bool centerPrincipalPoint = true;
00378                 
00379                 configData.cameraData.newCamMat = getOptimalNewCameraMatrix(configData.cameraData.K, configData.cameraData.distCoeffs, configData.cameraData.cameraSize, DEFAULT_ALPHA, configData.cameraData.cameraSize, &validPixROI, centerPrincipalPoint);
00380                 
00381                 //double optimalRatio = validPixROI.width / validPixROI.height;
00382                 //double desiredRatio = configData.cameraData.cameraSize.width / configData.cameraData.cameraSize.height;
00383                 
00384                 double expansionFactor = std::min(((double) validPixROI.width) / ((double) configData.cameraData.cameraSize.width), ((double) validPixROI.height) / ((double) configData.cameraData.cameraSize.height));
00385                 
00386                 configData.cameraData.expandedSize = cv::Size(((double) configData.cameraData.cameraSize.width) / expansionFactor, ((double) configData.cameraData.cameraSize.height) / expansionFactor);
00387                 
00388                 //ROS_INFO("originalSize = (%d, %d)\n", configData.cameraData.cameraSize.width, configData.cameraData.cameraSize.height);
00389                 //ROS_INFO("correctedSize = (%d, %d)\n", configData.cameraData.expandedSize.width, configData.cameraData.expandedSize.height);
00390                 
00391                 configData.cameraData.K.copyTo(configData.cameraData.Kx);
00392                 
00393                 configData.cameraData.Kx.at<double>(0,0) /= expansionFactor;
00394                 configData.cameraData.Kx.at<double>(0,2) /= expansionFactor;
00395                 configData.cameraData.Kx.at<double>(1,1) /= expansionFactor;
00396                 configData.cameraData.Kx.at<double>(1,2) /= expansionFactor;
00397                 
00398                 configData.cameraData.expandedCamMat = getOptimalNewCameraMatrix(configData.cameraData.Kx, configData.cameraData.distCoeffs, configData.cameraData.expandedSize, DEFAULT_ALPHA, configData.cameraData.expandedSize, &validPixROI, centerPrincipalPoint);
00399                 
00400                 msg_debug.width = configData.cameraData.cameraSize.width; 
00401                 msg_debug.height = configData.cameraData.cameraSize.height;
00402                 msg_debug.encoding = "bgr8";
00403                 msg_debug.is_bigendian = false;
00404                 msg_debug.step = configData.cameraData.cameraSize.width*3;
00405                 msg_debug.data.resize(configData.cameraData.cameraSize.width*configData.cameraData.cameraSize.height*3);
00406                 
00407                 
00408                 ROS_INFO("Optical frame = (%s)", info_msg->header.frame_id.c_str());
00409                 
00410                 optical_frame = info_msg->header.frame_id;
00411                 
00412                 infoProcessed = true;
00413                 
00414                 ROS_INFO("Image info processed.");
00415                 
00416         } catch (...) {
00417                 ROS_ERROR("Some failure in reading in the camera parameters...");
00418         }
00419                 
00420 }
00421 
00422 
00423 
00424 void featureTrackerNode::act_on_image() {
00425         cv::Mat newImage(cv_ptr->image);
00426         
00427         if ((newImage.type() == CV_16UC3) || (newImage.type() == CV_16UC1)) {
00428                 ROS_ERROR("This node should only receive 8-bit images..");
00429                 return;
00430         } else if (newImage.type() == CV_8UC3) {
00431                 cvtColor(newImage, grayImage, CV_RGB2GRAY);
00432         } else if (newImage.type() == CV_8UC1) {
00433                 grayImage = newImage;
00434         }
00435         
00436         
00437         
00438         testTime = timeElapsedMS(test_timer, true);
00439         //ROS_WARN("Handle time: (%f)", testTime);
00440         
00441         testTime = timeElapsedMS(test_timer);
00442         //ROS_WARN("After equality test: (%f)", testTime);
00443         
00444         elapsedTime = timeElapsedMS(cycle_timer);
00445         
00446         if (configData.verboseMode) { ROS_INFO("Updating buffer entry (%u from %u) with index (%u)", frameCount % 2, frameCount, currentIndex); }
00447         bufferIndices[frameCount % 2] = currentIndex;
00448         grayImage.copyTo(grayImageBuffer[frameCount % 2]);
00449         
00450         if (frameCount == 0) {
00451                 for (unsigned int ppp = 0; ppp < MAX_HISTORY_FRAMES; ppp++) {
00452                         grayImageBuffer[0].copyTo(olderImages[ppp]);
00453                         olderTimes[ppp] = original_time;
00454                         olderIndices[ppp] = bufferIndices[0];
00455                 }
00456         }
00457         
00458         //mappedImage = grayImage;
00459         //mappedImage.copyTo(mappedImageBuffer[(frameCount) % MAXIMUM_FRAMES_TO_STORE]);
00460         
00461         testTime = timeElapsedMS(test_timer);
00462         //ROS_WARN("After remapping: (%f)", testTime);
00463         
00464         // Now this is where you should prepare the image pair for tracking...
00465         
00466         
00467         
00468         cv::Mat tmpX, tmpY;
00469         
00470         testTime = timeElapsedMS(test_timer, true);
00471         //ROS_WARN("Historical prep time: (%f)", testTime);
00472                 
00473         if (configData.debugMode) {
00474                 // downsampleToColor(grayImageBuffer[(frameCount-1) % MAXIMUM_FRAMES_TO_STORE], drawImage);
00475                 //adaptiveDownsample(grayImageBuffer[(frameCount-1) % MAXIMUM_FRAMES_TO_STORE], dispMat, STANDARD_NORM_MODE);
00476                 
00477                 
00478                 //double vals[2], percentiles[2], thresh = 0.005;
00479                 //percentiles[0] = thresh;
00480                 //percentiles[1] = 1 - thresh;
00481                 
00482                 //findPercentiles(grayImageBuffer[(frameCount) % MAXIMUM_FRAMES_TO_STORE], vals, percentiles, 2);
00483                 
00484                 //displayImage = Mat(grayImageBuffer[(frameCount) % MAXIMUM_FRAMES_TO_STORE].size(), CV_8UC1);
00485                 
00486                 //double v;
00487                 
00488                 //for (int iii = 0; iii < displayImage.rows; iii++) {
00489                         //for (int jjj = 0; jjj < displayImage.cols; jjj++) {
00490                                 
00491                                 //v = (double) grayImageBuffer[(frameCount) % MAXIMUM_FRAMES_TO_STORE].at<unsigned char>(iii,jjj);
00492                                 
00493                                 //v -= vals[0];
00494                                 //v *= (255.0/(vals[1]-vals[0]));
00495                                 //v = max(0.0, v);
00496                                 //v = min(255.0, v);
00497                                 
00498                                 //displayImage.at<unsigned char>(iii,jjj) = (unsigned char) v;
00499                                 
00500                         //}
00501                 //}
00502                 
00503                 
00504                 displayImage = grayImageBuffer[frameCount % 2];
00505                 
00506                 //normalize(grayImageBuffer[(frameCount) % MAXIMUM_FRAMES_TO_STORE], displayImage, -255*vals[0], 255 + 255*(vals[1]-255), NORM_MINMAX);
00507                 //normalize(grayImageBuffer[(frameCount) % MAXIMUM_FRAMES_TO_STORE], displayImage, 0, 255, NORM_MINMAX);
00508                 //straightCLAHE(grayImageBuffer[(frameCount) % MAXIMUM_FRAMES_TO_STORE], displayImage, 0.2);
00509                 
00510                 cvtColor(displayImage, drawImage, CV_GRAY2RGB);
00511                 
00512                 
00513                 //displayImage.copyTo(drawImage);
00514         }
00515         
00516         frameCount++;
00517 
00518         if (undergoingDelay) {
00519                 handle_very_new();              // Use this to get homography between images to guide continued tracking
00520         }
00521         
00522         testTime = timeElapsedMS(test_timer, true);
00523         //ROS_WARN("Detection time: (%f)", testTime);
00524 }
00525 
00526 void featureTrackerNode::trimFeatureTrackVector() {
00527         
00528         activeTrackIndices.clear();
00529         
00530         vector<unsigned int> activeFrameIndices;
00531         
00532         activeFrameIndices.push_back(bufferIndices[0]);
00533         activeFrameIndices.push_back(bufferIndices[1]);
00534         
00535         for (unsigned int iii = 0; iii < numHistoryFrames; iii++) {
00536                 if ( (configData.multiplier[iii] != 0) && configData.attemptHistoricalRecovery) {
00537                         activeFrameIndices.push_back(olderIndices[iii]);
00538                         if (configData.verboseMode) { ROS_INFO("Trimming but retaining (%d)...", olderIndices[iii]); }
00539                 }
00540                 
00541         }
00542         
00543         for (unsigned int iii = 0; iii < activeFrameIndices.size(); iii++) {
00544                 if (configData.verboseMode) { ROS_INFO("activeFrameIndices(%d) = (%d)", iii, activeFrameIndices.at(iii)); }
00545         }
00546         
00547         
00548         
00549         //ROS_ERROR("activeFrameIndices.size() = (%d)", activeFrameIndices.size());
00550         
00551         removeObsoleteElements(featureTrackVector, activeFrameIndices);
00552                 
00553 }
00554 
00555 void featureTrackerNode::trimDisplayTrackVectors() {
00556         
00557         vector<unsigned int> activeFrameIndices;
00558 
00559         for (unsigned int iii = ((unsigned int) (max(0,currentIndex-configData.drawingHistory))); iii <= ((unsigned int) currentIndex); iii++) {
00560                 activeFrameIndices.push_back(iii);
00561         }
00562 
00563         removeObsoleteElements(displayTracks, activeFrameIndices);
00564                 
00565 }
00566 
00567 
00568 void featureTrackerNode::attemptTracking() {
00569         
00570         if (previousIndex < 0) { 
00571                 // ROS_ERROR("<%s> previousIndex = (%d)", __FUNCTION__, previousIndex); }
00572                 return;
00573         }
00574         
00575         cv::vector<cv::Point2f> startingPoints, originalFinishingPoints, finishingPoints;
00576         
00577         startingPoints.insert(startingPoints.end(), globalStartingPoints.begin(), globalStartingPoints.end());
00578 
00579         // unsigned int ptsBefore = globalStartingPoints.size();
00580         
00581         //ROS_INFO("Debug (%d)", 4);
00582         
00583         // =========================================================
00584         // ACTUAL TRACKING
00585         // =========================================================
00586         lostTrackIndices.clear();
00587         if (H12.rows != 0) {
00588                 elapsedTime = timeElapsedMS(cycle_timer, false);
00589                 
00590                 if (configData.verboseMode) { cout << "H12 = " << H12 << endl; }
00591                 
00592                 if (configData.verboseMode) { ROS_INFO("About to attempt to track points (%d, %d), using homography and constraint (%d) threshold (%f)", ((int)globalStartingPoints.size()), ((int)globalFinishingPoints.size()), distanceConstraint, configData.flowThreshold); }
00593                 
00594                 finishingPoints.insert(finishingPoints.end(), startingPoints.begin(), startingPoints.end());
00595                 
00596                 transformPoints(finishingPoints, H12);
00597                 originalFinishingPoints.insert(originalFinishingPoints.end(), finishingPoints.begin(), finishingPoints.end());
00598                         
00599                 while (0) {
00600                         
00601                         cv::Mat im1_col, im2_col;
00602                         
00603                         cvtColor(grayImageBuffer[(readyFrame-1) % 2], im1_col, CV_GRAY2RGB);
00604                         cvtColor(grayImageBuffer[readyFrame % 2], im2_col, CV_GRAY2RGB);
00605                         
00606                         
00607                         //warpPerspective(im1, im1b, H12, im1.size());
00608                         
00609                         cv::Scalar color_x = CV_RGB(255, 0, 0);
00610                         displayKeyPoints(im1_col, startingPoints, im1_col, color_x);
00611                         displayKeyPoints(im2_col, finishingPoints, im2_col, color_x);
00612                         
00613                         //imshow("temp_disp", im1);     // Current image
00614                         std::copy(&(im1_col.at<unsigned char>(0,0)), &(im1_col.at<unsigned char>(0,0))+(drawImage.cols*drawImage.rows*drawImage.channels()), msg_debug.data.begin());                           
00615                         debug_pub.publish(msg_debug, debug_camera_info);
00616                         cv::waitKey(500);
00617                         //imshow("temp_disp", im2);     // Previous image under transform
00618                         std::copy(&(im2_col.at<unsigned char>(0,0)), &(im2_col.at<unsigned char>(0,0))+(drawImage.cols*drawImage.rows*drawImage.channels()), msg_debug.data.begin());                           
00619                         debug_pub.publish(msg_debug, debug_camera_info);
00620                         cv::waitKey(500);
00621         
00622                 }
00623                 
00624                 cv::Mat H_;
00625                 trackPoints(grayImageBuffer[(readyFrame-1) % 2], grayImageBuffer[readyFrame % 2], startingPoints, finishingPoints, distanceConstraint, 0.0, lostTrackIndices, H_, configData.cameraData);
00626                 // trackPoints(grayImageBuffer[(readyFrame-1) % 2], grayImageBuffer[readyFrame % 2], startingPoints, finishingPoints, distanceConstraint, configData.flowThreshold, lostTrackIndices, H12, configData.cameraData);
00627                 
00628                 if (lostTrackIndices.size() > finishingPoints.size()) {
00629                         ROS_WARN("(%d) tracks lost and (%d) retained over interruption handling.", ((int)lostTrackIndices.size()), ((int)finishingPoints.size()));
00630                         
00631                 } else {
00632                         ROS_INFO("(%d) tracks lost and (%d) retained over interruption handling.", ((int)lostTrackIndices.size()), ((int)finishingPoints.size()));
00633                 }
00634                 
00635                 
00636         
00637                 /*
00638                 if ((globalFinishingPoints.size() > 10) && (lostTrackIndices.size() < 10)) {
00639                         ROS_ERROR("Freezing next output");
00640                         freezeNextOutput = true;
00641                 }
00642                 */
00643                 
00644                 //if (configData.verboseMode) { ROS_WARN("Tracking complete."); }
00645                 elapsedTime = timeElapsedMS(cycle_timer, false);
00646                 
00647         } else {
00648                 cv::Mat H_;
00649                 
00650                 if (configData.verboseMode) { ROS_INFO("About to attempt tracking of (%d) points (no homography for guidance)..", ((int)startingPoints.size())); }
00651                 
00652                 if (configData.velocityPrediction) {
00653                         // ROS_WARN("(%d) About to try and use existing feature velocity knowledge to initialize search locations...", globalFinishingPoints.size());
00654                         // create finishing points estimates...
00655                         //ROS_WARN("Estimating point locations : (%f)", original_time.toSec() - previous_time.toSec());
00656                         assignEstimatesBasedOnVelocities(featureTrackVector, startingPoints, finishingPoints, bufferIndices[(readyFrame-1) % 2], previous_time.toSec(), original_time.toSec());
00657                         
00658                         originalFinishingPoints.insert(originalFinishingPoints.end(), finishingPoints.begin(), finishingPoints.end());
00659                 } else {
00660                         finishingPoints.insert(finishingPoints.end(), startingPoints.begin(), startingPoints.end());
00661                         originalFinishingPoints.insert(originalFinishingPoints.end(), finishingPoints.begin(), finishingPoints.end());
00662                 }
00663                 
00664                 
00665                 
00666                 //if (configData.verboseMode) { ROS_WARN("About to attempt to track points in regular mode (without homography)..."); }
00667                 trackPoints(grayImageBuffer[(readyFrame-1) % 2], grayImageBuffer[readyFrame % 2], startingPoints, finishingPoints, distanceConstraint, configData.flowThreshold, lostTrackIndices, H_, configData.cameraData);
00668                 
00669         }
00670         
00671         /*
00672         for (unsigned int xxx = 0; xxx < startingPoints.size(); xxx++) {
00673                 cv::Point2f disp1, disp2, disp3;
00674                 
00675                 disp1.x = startingPoints.at(xxx).x - finishingPoints.at(xxx).x;
00676                 disp1.y = startingPoints.at(xxx).y - finishingPoints.at(xxx).y;
00677                 
00678                 double totalDisplacement = pow(pow(disp1.x,2.0)+pow(disp1.y,2.0),0.5);
00679                 
00680                 if (totalDisplacement > 100.0) {
00681                         ROS_WARN("Point displaced: (%f) : ", totalDisplacement);
00682                 }
00683 
00684                 
00685         }
00686         */
00687         
00688         if (configData.verboseMode) { ROS_INFO("trackPoints returned (%d) points.", ((int)finishingPoints.size())); }
00689         
00690         //ROS_INFO("Debug (%d)", 5);
00691         
00692         testTime = timeElapsedMS(test_timer, false);
00693         //ROS_WARN("2: (%f)", testTime);
00694         
00695         //ROS_INFO("(%d / %d) points successfully tracked. last time: (%d) tracks.", globalFinishingPoints.size(), ptsBefore, previouslyTrackedPoints);
00696         
00697         successfullyTrackedFeatures += globalFinishingPoints.size();
00698         
00699         lostTrackCount += lostTrackIndices.size();
00700         
00701         // Filter points that may be in high-noise region...
00702         //double distProp = 0.75;
00703         //filterTrackingsByRadialProportion(globalStartingPoints, globalFinishingPoints, configData.cameraData.Kx, configData.cameraData.expandedCamMat, configData.cameraData.distCoeffs, configData.cameraData.cameraSize, distProp);
00704         
00705         //printf("%s::%s << Points (%d:%d) after tracking: %d\n", __PROGRAM__, __FUNCTION__, current_idx, kkk, globalFinishingPoints.size());
00706         
00707         
00708         
00709         if ((previousIndex == 0) || (currentIndex == 0)) { ROS_WARN("About to add matches from (%d) to (%d): (%d, %d)", previousIndex, currentIndex, ((int)startingPoints.size()), ((int)finishingPoints.size())); }
00710         addMatchesToVector(featureTrackVector, previousIndex, startingPoints, currentIndex, finishingPoints, lastAllocatedTrackIndex, configData.minSeparation, false);
00711         
00712         if (configData.debugMode) {
00713                 addMatchesToVector(displayTracks, previousIndex, startingPoints, currentIndex, finishingPoints, lastAllocatedTrackIndex, configData.minSeparation);
00714         }
00715         
00716         globalStartingPoints.clear();
00717         globalStartingPoints.insert(globalStartingPoints.end(), startingPoints.begin(), startingPoints.end());
00718         
00719         globalFinishingPoints.clear();
00720         globalFinishingPoints.insert(globalFinishingPoints.end(), finishingPoints.begin(), finishingPoints.end());
00721         
00722         
00723 }
00724 
00725 void featureTrackerNode::updateDistanceConstraint() {
00726         if (configData.verboseMode) { ROS_INFO("Entered (%s).", __FUNCTION__); }
00727                 
00728         int minDim = min(configData.cameraData.cameraSize.width, configData.cameraData.cameraSize.height);
00729         distanceConstraint = double(minDim) * configData.maxFrac;
00730         distanceConstraint += (distanceConstraint + 1) % 2;
00731         
00732         if (configData.verboseMode) { ROS_INFO("Time diff = (%f)", original_time.toSec() - previous_time.toSec()); }
00733         
00734         if (configData.verboseMode) { ROS_INFO("Non-adaptive initial distance constraint = (%d)", distanceConstraint); }
00735                 
00736         if (configData.adaptiveWindow) {
00737                 
00738                 unsigned int activePoints = 0;
00739                 activePoints += globalFinishingPoints.size();
00740                 
00741                 double predictedDisplacement;
00742                 
00743                 if (configData.velocityPrediction) {
00744                         predictedDisplacement = featuresVelocity * (original_time.toSec() - previous_time.toSec());
00745                 } else {
00746                         predictedDisplacement = distanceConstraint;
00747                 }
00748                 
00749                 
00750                 //ROS_INFO("predictedDisplacement = (%f), (%f), (%f)", predictedDisplacement, featuresVelocity, abs(original_time.toSec() - previous_time.toSec()));
00751                 int predictedRequirement = ceil(ADAPTIVE_WINDOW_CONTIGENCY_FACTOR*predictedDisplacement);
00752                 predictedRequirement += (predictedRequirement + 1) % 2;
00753                 
00754                 if (configData.verboseMode) { ROS_INFO("activePoints = (%d)", activePoints); }
00755                 
00756                 int maximumSearchDist;
00757                 
00758                 if (activePoints == 0) {
00759                         maximumSearchDist = predictedRequirement;
00760                 } else {
00761                         maximumSearchDist = (distanceConstraint * configData.maxFeatures) / activePoints;
00762                 }
00763                  
00764                 maximumSearchDist += (maximumSearchDist + 1) % 2;
00765                 
00766                 if (configData.verboseMode) { ROS_INFO("predictedRequirement = (%d)", predictedRequirement); }
00767                 if (configData.verboseMode) { ROS_INFO("maximumSearchDist = (%d)", maximumSearchDist); }
00768 
00769                 distanceConstraint = min(predictedRequirement, maximumSearchDist);
00770                 
00771                 
00772                 //ROS_ERROR("activePoints = (%d), maximumSearchDist = (%d), predictedDisplacement = (%f), predictedRequirement = (%d)", activePoints, maximumSearchDist, predictedDisplacement, predictedRequirement);
00773                 
00774         }
00775         
00776         distanceConstraint = max(distanceConstraint, TIGHT_DISTANCE_CONSTRAINT);
00777         distanceConstraint = min(distanceConstraint, minDim);
00778         
00779         if (configData.verboseMode) { ROS_INFO("distanceConstraint = (%d) (%d)", distanceConstraint, minDim); }
00780 }
00781 
00782 void featureTrackerNode::publishRoutine() {
00783         // =================================
00784         // PUBLICATION
00785         // =================================
00786         if (configData.debugMode) {
00787                 
00788 
00789                 //vector<Point2f> redistortedPoints;
00790                 //redistortPoints(globalFinishingPoints[jjj], redistortedPoints, configData.cameraData.Kx, configData.cameraData.distCoeffs, configData.cameraData.expandedCamMat);
00791                 
00792                 
00793 
00794                 /*
00795                 vector<cv::Point2f> dummyVelocities;
00796                 addMatchesToVector(displayTracks, previousIndex, finalPoints1, currentIndex, finalPoints2, configData.minSeparation, dummyVelocities);
00797                 */
00798                 
00799                 if (configData.autoTrackManagement) { trimDisplayTrackVectors(); }
00800                 
00801                 //drawFeatureTracks(drawImage2, drawImage2, displayTracks, drawingColors, current_idx, history);
00802                 
00803                 //vector<featureTrack> redistortedTracks;
00804                 //redistortTracks(displayTracks, redistortedTracks, configData.cameraData.Kx, configData.cameraData.distCoeffs, (readyFrame), configData.cameraData.expandedCamMat, history);
00805                 
00806                 //ROS_WARN("displayTracks[%d].size() = (%d)", kkk, displayTracks.size());
00807                 
00808                 if (configData.verboseMode) { ROS_INFO("Drawing (%d) pts from image index (%d)...", ((int)displayTracks.size()), currentIndex); }
00809                 drawFeatureTracks(drawImage, drawImage, displayTracks, COLOR_TRACKED_PATH, COLOR_TRACKED_POINTS, currentIndex, configData.drawingHistory);
00810                 if (configData.verboseMode) { ROS_INFO("Points drawn."); }
00811                 
00812                         
00813                 /*
00814                 if (preservedRecoveredPoints.size() > 0) {
00815                         
00816                         if (configData.verboseMode) { ROS_INFO("Drawing (%d) recovered pts...", preservedRecoveredPoints.size()); }
00817                         
00818                         //ROS_ERROR("allRecoveredPoints.size() = (%d)", allRecoveredPoints.size());
00819 
00820                 
00821                         displayKeyPoints(drawImage, preservedRecoveredPoints, drawImage, COLOR_RECOVERED_POINTS, 0, true);
00822                         preservedRecoveredPoints.clear();
00823                         
00824                         if (0) {
00825                                 cv::imshow("test", drawImage);
00826                                 cv::waitKey(1);
00827                         }
00828                         
00829                         allRecoveredPoints.clear();
00830                 }
00831                 */
00832                 
00833                 if (newlySensedFeatures.size() > 0) {
00834                         
00835                         if (configData.verboseMode) { ROS_INFO("Drawing (%d) new pts...", ((int)newlySensedFeatures.size())); }
00836                 
00837                         displayKeyPoints(drawImage, newlySensedFeatures, drawImage, COLOR_UNMATCHED_POINTS, 0, true);
00838                         
00839                 }
00840                 
00841                 if (matchedFeatures.size() > 0) {
00842                         
00843                         if (configData.verboseMode) { ROS_INFO("Drawing (%d) matched pts...", ((int)matchedFeatures.size())); }
00844                         displayKeyPoints(drawImage, matchedFeatures, drawImage, COLOR_MATCHED_POINTS, 0, true);
00845                         
00846                 }
00847                 
00848                 
00849                 
00850                 //ROS_INFO("Debug (%d)", 26);
00851                 
00852                 if (drawImage.size() != configData.cameraData.cameraSize) {
00853                         if (configData.verboseMode) { ROS_INFO("Resizing image..."); }
00854                         resize(drawImage, drawImage_resized, configData.cameraData.cameraSize, 0, 0, cv::INTER_CUBIC);
00855                 } else {
00856                         drawImage_resized = drawImage;
00857                 }
00858                 
00859                 //ROS_INFO("Debug (%d)", 27);
00860                 
00861                 
00862                 if (configData.verboseMode) { ROS_INFO("Copying image (%u, %u) to publisher...", drawImage_resized.rows, drawImage_resized.cols); }
00863                                                 
00864                 std::copy(&(drawImage_resized.at<cv::Vec3b>(0,0)[0]), &(drawImage_resized.at<cv::Vec3b>(0,0)[0])+(drawImage_resized.cols*drawImage_resized.rows*drawImage_resized.channels()), msg_debug.data.begin());
00865                 
00866                 //ROS_INFO("Debug (%d)", 28);
00867                 
00868                 if (configData.verboseMode) { ROS_INFO("Publishing to: (%s) with (%d) and (%d)", debug_pub_name, msg_debug.header.seq, debug_camera_info.header.seq); }
00869                 debug_pub.publish(msg_debug, debug_camera_info);
00870                 if (configData.verboseMode) { ROS_INFO("Published."); }
00871                 
00872                 //ROS_INFO("Debug (%d)", 29);
00873                 
00874                 //imshow("test", drawImage);
00875                 //waitKey(1);
00876                 
00877         }
00878         
00879         //ROS_INFO("Debug (%d)", 30);
00880         
00881         int publishedTrackCount = publish_tracks(&tracks_pub, currentIndex);
00882         
00883         if (configData.outputTrackCount) {
00884                 // Successfully tracked
00885                 // Newly detected
00886                 // Ancient recovered
00887                 // Newly discarded
00888                 // Total published
00889                 
00890                 // globalFinishingPoints[0].size()
00891                 trackCountStream << readyFrame << " ";
00892                 trackCountStream << successfullyTrackedFeatures << " ";
00893                 trackCountStream << publishedTrackCount << " ";
00894                 trackCountStream << newlyDetectedFeatures << " ";
00895                 trackCountStream << newlyRecoveredFeatures << " ";
00896                 //trackCountStream << discardedNewFeatures << " ";
00897                 trackCountStream << lostTrackCount << " ";
00898                 trackCountStream << averageTrackLength << endl;
00899         }
00900         
00901         if (configData.outputFeatureMotion) {
00902                 
00903                 // Calculate feature motion
00904                 double motion_x, motion_y;
00905                 
00906                 int tracked = calculateFeatureMotion(bufferIndices[readyFrame % 2], motion_x, motion_y);
00907                 
00908                 if (tracked >= MIN_FEATURES_FOR_FEATURE_MOTION) {
00909                         featureMotionStream << original_time << " ";
00910                         featureMotionStream << currentIndex << " ";
00911                         featureMotionStream << motion_x << " ";
00912                         featureMotionStream << motion_y << " ";
00913                         featureMotionStream << tracked << endl;
00914                 }
00915                 
00916         }
00917         
00918         if (freezeNextOutput) {
00919                 configData.debugMode = false;
00920         }
00921         //ROS_INFO("Debug (%d)", 31);
00922 }
00923 
00924 void featureTrackerNode::matchWithExistingTracks() {
00925         
00926         if (configData.verboseMode) { ROS_INFO("(%s) : Entered function", __FUNCTION__); }
00927         
00928         // Determine untracked features
00929 
00930         // Convert points into keypoints
00931         vector<cv::KeyPoint> featuresFromPts[2];
00932         
00933         cv::Point2f ptToAdd;
00934         cv::KeyPoint kpToAdd;
00935         kpToAdd.size = 3.0;
00936         kpToAdd.angle = -1.0;
00937         kpToAdd.response = 1.0;
00938         kpToAdd.octave = 0;
00939         kpToAdd.class_id = 0;
00940         
00941         
00942         
00943         // IF YOU WANTED YOU COULD USE THE FINAL INDICES TO IDENTIFY WHICH FEATURES LAST
00944         // APPEARED IN HISTORICAL FRAMES, AND SINCE YOU STILL HAVE ACCESS TO THOSE IMAGES, YOU COULD
00945         // IN THEORY FORM DESCRIPTORS
00946         
00947         // Then get all brand new points:
00948         
00949         for (unsigned int iii = 0; iii < newlySensedFeatures.size(); iii++) {
00950                 
00951                 kpToAdd.pt = newlySensedFeatures.at(iii);
00952                 kpToAdd.size = 3.0;
00953                 featuresFromPts[1].push_back(kpToAdd);
00954 
00955         }
00956         
00957         if (configData.verboseMode) { ROS_INFO("...with (%d) brand new points", ((int)featuresFromPts[1].size())); }
00958 
00959         cv::Mat lastChanceDescriptors[2];
00960         
00961         descriptorExtractor -> compute(grayImageBuffer[readyFrame % 2], featuresFromPts[1], lastChanceDescriptors[1]);
00962         
00963         
00964         allRecoveredPoints.clear();
00965         preservedRecoveredPoints.clear();
00966         
00967         for (unsigned int ppp = 0; ppp <= MAX_HISTORY_FRAMES; ppp++) {
00968                 
00969                 if (configData.verboseMode) { ROS_INFO("(%s) : Looping for ppp = (%d)", __FUNCTION__, ppp); }
00970                 
00971                 if (ppp != MAX_HISTORY_FRAMES) {
00972                         
00973                         if ((configData.multiplier[ppp] == 0.0) || !configData.attemptHistoricalRecovery) {
00974                                 continue;
00975                         }
00976                         
00977                         
00978                         
00979                         if (featuresVelocity > configData.maxVelocity) {
00980                                 //continue;
00981                         }
00982                         
00983                         //ROS_ERROR("Will try and recover with (%d), featuresVelocity = (%f)", ppp, featuresVelocity);
00984                         
00985                 }
00986                 
00987                 if (configData.verboseMode) { ROS_INFO("(%s) : Passed initial tests..", __FUNCTION__); }
00988                 
00989                 featuresFromPts[0].clear();
00990                 
00991                 unsigned int aimedIndex;
00992                 
00993                 if (ppp == MAX_HISTORY_FRAMES) { // THis is the exception of the loop, where you look at the previous frame
00994                         aimedIndex = bufferIndices[(readyFrame-1) % 2];
00995                 } else {
00996                         aimedIndex = olderIndices[ppp];
00997                 }
00998                 
00999                 for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
01000                         
01001                         if (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).imageIndex == bufferIndices[(readyFrame) % 2]) {
01002                                 continue;
01003                         }
01004                         
01005                         if (ppp == MAX_HISTORY_FRAMES) {
01006                                 if (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).imageIndex == aimedIndex) {
01007                                         // This track should have been a lost one...
01008                                         ptToAdd = featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).featureCoord;
01009                                         kpToAdd.pt = ptToAdd;
01010                                         featuresFromPts[0].push_back(kpToAdd);
01011                                         
01012                                 }
01013                         } else {
01014                                 
01015                                 // assignHistoricalPoints(featureTrackVector, olderIndices[ppp], bufferIndices[readyFrame % 2], historicalPoints);
01016                                 
01017 
01018                                 for (unsigned int jjj = 0; jjj < featureTrackVector.at(iii).locations.size(); jjj++) {
01019                                         
01020                                         if (featureTrackVector.at(iii).locations.at(jjj).imageIndex == aimedIndex) {
01021                                                 ptToAdd = featureTrackVector.at(iii).locations.at(jjj).featureCoord;
01022                                                 kpToAdd.pt = ptToAdd;
01023                                                 featuresFromPts[0].push_back(kpToAdd);
01024                                                 continue;
01025                                         }       
01026                                         
01027                                 }
01028                         }
01029                         
01030                         //ROS_ERROR("%d: (%d)", bufferIndices[readyFrame % 2], featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).imageIndex);
01031                 }
01032                 
01033                 // Create descriptors for untracked features
01034                 
01035                 if (ppp == MAX_HISTORY_FRAMES) { // THis is the exception of the loop, where you look at the previous frame
01036                         descriptorExtractor -> compute(grayImageBuffer[(readyFrame-1) % 2], featuresFromPts[0], lastChanceDescriptors[0]);
01037                 } else {
01038                         descriptorExtractor -> compute(olderImages[ppp], featuresFromPts[0], lastChanceDescriptors[0]);
01039                 }
01040                 
01041                 if ((featuresFromPts[0].size() > 0) && (featuresFromPts[1].size() > 0)) {
01042                 
01043                         if (configData.verboseMode) { ROS_INFO("Will now attempt to match (%d) and (%d) points...", ((int)featuresFromPts[0].size()), ((int)featuresFromPts[1].size())); }
01044                         
01045                         cv::Mat matchingMatrix;
01046                         createMatchingMatrix(matchingMatrix, lastChanceDescriptors[0], lastChanceDescriptors[1]);
01047                         if (configData.verboseMode) { ROS_INFO("Matching matrix created..."); }
01048                         
01049                         vector<vector<cv::DMatch> > matches;
01050                         twoWayPriorityMatching(matchingMatrix, matches);
01051                         if (configData.verboseMode) { ROS_INFO("Initially (%d) achieved (%d) matches...", ppp, ((int)matches.size())); }
01052                         
01053                         vector<cv::Point2f> estimatedFinalLocs;
01054                         
01055                         if (H12.rows != 0) {
01056                                 vector<cv::Point2f> tempPts;
01057                                 cv::KeyPoint::convert(featuresFromPts[0], tempPts);
01058                                 estimatedFinalLocs.insert(estimatedFinalLocs.end(), tempPts.begin(), tempPts.end());
01059                                 transformPoints(estimatedFinalLocs, H12);
01060                         } else if ((configData.velocityPrediction) && (ppp == MAX_HISTORY_FRAMES)) {
01061                                 vector<cv::Point2f> tempPts;
01062                                 cv::KeyPoint::convert(featuresFromPts[0], tempPts);
01063                                 
01064                                 assignEstimatesBasedOnVelocities(featureTrackVector, tempPts, estimatedFinalLocs, bufferIndices[(readyFrame-1) % 2], previous_time.toSec(), original_time.toSec());
01065                         } else {
01066                                 cv::KeyPoint::convert(featuresFromPts[0], estimatedFinalLocs);
01067                         }
01068                         
01069                         double matchingConstraint;
01070                 
01071                         if (ppp == MAX_HISTORY_FRAMES) { // THis is the exception of the loop, where you look at the previous frame
01072                                 matchingConstraint = distanceConstraint;
01073                         } else {
01074                                 matchingConstraint = TIGHT_DISTANCE_CONSTRAINT;
01075                         }
01076                         
01077                         cv::Point2f p1, p2;
01078                         for( size_t i = 0; i < matches.size(); i++ ) {
01079                                 // Get rid of matches that have moved too far...
01080                                 
01081                                 p1 = estimatedFinalLocs.at(matches[i][0].queryIdx);
01082                                 p2 = featuresFromPts[1].at(matches[i][0].trainIdx).pt;
01083                                 
01084                                 if (pow(pow(p1.x - p2.x, 2.0) + pow(p1.y - p2.y, 2.0), 0.5) > matchingConstraint) {
01085                                         matches.erase(matches.begin() + i);
01086                                         i--;
01087                                 }
01088                                 
01089                                 
01090                         }
01091                         
01092                         if (configData.verboseMode) { ROS_INFO("After (%d) distance constraint, achieved (%d) matches...", ppp, ((int)matches.size())); }                       
01093                         if (matches.size() > 0) {
01094                                 
01095                                 sortMatches(matches);
01096                         
01097                                 // Reduce points to matched points
01098                                 vector<cv::Point2f> points1, points2;
01099                                 vector<int> queryIdxs( matches.size() );
01100                                 vector<int> trainIdxs( matches.size() );
01101                                 
01102                                 for( size_t i = 0; i < matches.size(); i++ ) {
01103                                         queryIdxs[i] = matches[i][0].queryIdx;
01104                                         trainIdxs[i] = matches[i][0].trainIdx;
01105                                 }
01106                                 
01107                                 cv::KeyPoint::convert(featuresFromPts[0], points1, queryIdxs);
01108                                 cv::KeyPoint::convert(featuresFromPts[1], points2, trainIdxs);
01109 
01110                                 if (configData.verboseMode) { ROS_INFO("Still have (%d / %d) (%d/%d) (%d / %d) matches...", featuresFromPts[0].size(), featuresFromPts[1].size(), points1.size(), points2.size(), queryIdxs.size(), trainIdxs.size()); }
01111 
01112                                 // Integrate matches into feature tracks structure
01113                                 if (configData.verboseMode) { ROS_WARN("About to add matches (%d, %d)", points1.size(), points2.size()); }
01114                                 
01115                                 if (ppp != MAX_HISTORY_FRAMES) {
01116                                         //continue;
01117                                 }
01118                                 
01119                                 addMatchesToVector(featureTrackVector, aimedIndex, points1, bufferIndices[readyFrame % 2], points2, lastAllocatedTrackIndex, configData.minSeparation, false);
01120                                 
01121                                 if (configData.debugMode) {
01122                                         addMatchesToVector(displayTracks, aimedIndex, points1, bufferIndices[readyFrame % 2], points2, lastAllocatedTrackIndex, configData.minSeparation);
01123                                 }
01124                                 
01125                                 if (configData.verboseMode)  { ROS_INFO("Added (%d) (%d) matches to vector", ppp, points2.size()); }
01126                                 
01127                                 matchedFeatures.insert(matchedFeatures.end(), points2.begin(), points2.end());
01128                                 
01129                                 
01130                                 // concatenation should rightly destroy the input vector, because all newly detected points should have already been added to globalFinishingPoints
01131                                 
01132                                 concatenateWithExistingPoints(globalFinishingPoints, points2, configData.maxFeatures, configData.minSeparation);
01133                                 if (configData.verboseMode) { ROS_INFO("After (%d) concat, have (%d) points...", ppp, points2.size()); }
01134                                 
01135                         }
01136                         
01137                         
01138                         
01139                 }
01140         
01141                 // concatenateWithExistingPoints(globalFinishingPoints, recoveredPoints, configData.maxFeatures, configData.minSeparation);
01142                 
01143         }
01144         
01145         
01146         
01147         
01148         
01149         if (configData.verboseMode) { ROS_INFO("(%s) : Exiting function", __FUNCTION__); }
01150         
01151 }
01152 
01153 void featureTrackerNode::detectNewFeatures() {
01154         vector<cv::KeyPoint> currPoints;
01155         
01156         
01157         
01158         //for (unsigned int jjj = 0; jjj < configData.numDetectors; jjj++) {
01159                 //allPoints.insert(allPoints.end(), globalFinishingPoints.begin(), globalFinishingPoints.end());
01160                 
01161         //}
01162         
01163         if (configData.verboseMode) { ROS_INFO("Entered (%s) : Currently have (%d) points", __FUNCTION__, globalFinishingPoints.size()); }
01164         
01165         //ROS_INFO("Debug (%d)", 11);
01166         
01167         
01168         for (unsigned int jjj = 0; jjj < configData.numDetectors; jjj++) {
01169                 
01170 
01171                 
01172                 if (configData.verboseMode) { ROS_INFO("Considering application of detector (%u), with (%d) pts", jjj, globalFinishingPoints.size()); }
01173                 
01174                 //ROS_INFO("Debug (%d)", 12);
01175                 
01176                 testTime = timeElapsedMS(test_timer, false);
01177                 //ROS_WARN("1: (%f)", testTime);
01178 
01179                 
01180                 bool wantNewDetection = false;
01181                 
01182                 if (globalFinishingPoints.size() < ((unsigned int) configData.maxFeatures)) {
01183                         wantNewDetection = true;
01184                         if (configData.verboseMode) { ROS_INFO("Detector (%u) wants new detection because you have (%d) points compared with a maximum of (%d).", jjj, globalFinishingPoints.size(), configData.maxFeatures); }
01185                 } 
01186                 
01187                 testTime = timeElapsedMS(test_timer, false);
01188                 //ROS_WARN("2: (%f)", testTime);
01189 
01190                 
01191                 //ROS_INFO("Debug (%d)", 13);
01192                 
01193                 if (wantNewDetection) {
01194                         
01195                         //ROS_INFO("Debug (%d)", 14);
01196                         
01197                         currPoints.clear();
01198                         keypointDetector[jjj] -> detect(grayImageBuffer[readyFrame % 2], currPoints);
01199                         
01200                         if (configData.verboseMode) { ROS_INFO("Detector (%u) found (%d) points.", jjj, currPoints.size()); }
01201                         
01202                         //ROS_INFO("Debug (%d)", 15);
01203                         
01204                         //ROS_INFO("(%d) new points detected...", currPoints[jjj].size());
01205                         
01206                         discardedNewFeatures += currPoints.size();
01207 
01208                         testTime = timeElapsedMS(test_timer, false);
01209                         //ROS_WARN("3: (%f)", testTime);
01210                         
01211                         
01212                         if (currPoints.size() != 0) {
01213                                 
01214                                 
01215                                 
01216                                 //ROS_INFO("Debug (%d)", 16);
01217                         
01218                                 sortKeyPoints(currPoints);
01219                                 
01220                                 //ROS_INFO("Debug (%d) : (%d)", 161, currPoints[jjj].size());
01221                                 
01222                                 testTime = timeElapsedMS(test_timer, false);
01223                                 //ROS_WARN("4: (%f)", testTime);
01224                                                                         
01225                                 vector<cv::Point2f> candidates;
01226                                 
01227                                 reduceEdgyFeatures(currPoints, configData.cameraData);
01228                                 
01229                                 //ROS_INFO("Debug (%d)", 162);
01230                                 
01231                                 testTime = timeElapsedMS(test_timer, false);
01232                                 //ROS_WARN("5: (%f)", testTime);
01233                                 
01234                                 //ROS_INFO("(%d) points after edge-reduction...", currPoints[jjj].size());
01235 
01236                                 //reduceWeakFeatures(workingImNew, currPoints[jjj], minResponse);
01237                                 
01238                                 testTime = timeElapsedMS(test_timer, false);
01239                                 //ROS_WARN("6: (%f)", testTime);
01240                                 
01241                                 //ROS_INFO("(%d) points after weakness-reduction...", currPoints[jjj].size());
01242                                 
01243                                 // Want to cull new points to fit limit
01244                                 
01245                                 //ROS_INFO("Debug (%d)", 17);
01246                                 
01247                                 for (unsigned int zzz = 0; zzz < currPoints.size(); zzz++) {
01248                                         // if distance between [ currPoints[jjj].at(zzz) ] and all points in globalFinishingPoints[jjj]
01249                                         // is greater than [ configData.minSeparation ] , delete and decrement index
01250                                         
01251                                         bool violatesProximity = false;
01252                                         
01253                                         for (unsigned int yyy = 0; yyy < globalFinishingPoints.size(); yyy++) {
01254                                                 
01255                                                 if (distBetweenPts2f(currPoints.at(zzz).pt, globalFinishingPoints.at(yyy)) < configData.minSeparation) {
01256                                                         violatesProximity = true;
01257                                                         break;
01258                                                 }
01259                                                 
01260                                         }
01261                                         
01262                                         if (violatesProximity) {
01263                                                 
01264                                                 // delete
01265                                                 currPoints.erase(currPoints.begin() + zzz);
01266                                                 zzz--;
01267                                         }
01268                                 }
01269                                 
01270                                 if (configData.verboseMode) { ROS_INFO("Reduced to (%d) candidate points based on proximity.", currPoints.size()); }
01271                                 
01272                                 //ROS_INFO("Debug (%d)", 18);
01273                                 
01274                                 if ((currPoints.size() + globalFinishingPoints.size()) > configData.maxFeatures) {
01275                                         currPoints.erase(currPoints.begin() + (configData.maxFeatures - globalFinishingPoints.size()), currPoints.end());
01276                                         
01277                                         //ROS_ERROR("Reduced to (%d) points because of maxFeatures...", currPoints[jjj].size());
01278                                         
01279                                 }
01280 
01281                                 if (configData.verboseMode) { ROS_INFO("Further reduced to (%d) candidate points based on maxFeatures limit.", currPoints.size()); }
01282                                 
01283                                 newlyDetectedFeatures += currPoints.size();
01284                                 discardedNewFeatures -= currPoints.size();
01285                                 
01286                                 cv::KeyPoint::convert(currPoints, candidates);
01287                                 
01288                                 //ROS_INFO("Debug (%d)", 19);
01289                                 
01290                                 if (candidates.size() > 0) {
01291                                         
01292                                         //ROS_INFO("Debug (%d)", 20);
01293                                         
01294                                         //cornerSubPix(workingImNew, candidates, Size(1,1), Size(-1,-1), cvTermCriteria(CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 15, 0.1)); // cvTermCriteria(CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 15, 0.1)
01295                                 
01296                                         testTime = timeElapsedMS(test_timer, false);
01297                                         //ROS_WARN("7: (%f)", testTime);
01298                                 
01299                                         // reduce candidates based on features existing in close proximity
01300                                         //reduceProximalCandidates(allPoints, candidates);
01301                                         //reduceEdgyCandidates(candidates, configData.cameraData);
01302                                         
01303                                         testTime = timeElapsedMS(test_timer, false);
01304                                         //ROS_WARN("8: (%f)", testTime);
01305                                         
01306                                         //ROS_INFO("(%d) points after proximity-reduction...", candidates.size());
01307                                         
01308                                         //printf("%s << candidates after reducing ones close to edge: %d\n", __FUNCTION__, candidates.size());
01309                                         //reduceUnrefinedCandidates(candidates);
01310                                         //printf("%s << candidates after reducing ones with subpixel refinement: %d\n", __FUNCTION__, candidates.size());
01311                                         //allPoints.insert(allPoints.end(), candidates.begin(), candidates.end());
01312                                         
01313                                         //unsigned int beforeCount = 
01314                                         
01315                                         
01316                                         
01317                                         if (configData.verboseMode) { ROS_INFO("Size of featureTrackVector before adding projections on frame (%d) = (%d)", bufferIndices[readyFrame % 2], featureTrackVector.size()); }
01318                                         
01319                                         if (featureTrackVector.size() > 0) {
01320                                                 if (featureTrackVector.at(featureTrackVector.size()-1).locations.size() > 0) {
01321                                                         if (configData.verboseMode) { ROS_INFO("Latest index in vector = (%d)", featureTrackVector.at(featureTrackVector.size()-1).locations.at(featureTrackVector.at(featureTrackVector.size()-1).locations.size()-1).imageIndex); }
01322                                                 }
01323                                         }
01324                                         
01325                                         if (bufferIndices[readyFrame % 2] == 0) {
01326                                                 //ROS_ERROR("bufferIndices[%d mod 2] is 0!", readyFrame);
01327                                         }
01328                                         
01329                                         int before = lastAllocatedTrackIndex;
01330                                         addProjectionsToVector(featureTrackVector, bufferIndices[readyFrame % 2], candidates, lastAllocatedTrackIndex, configData.minSeparation);
01331                                         
01332                                         
01333                                         clearDangerFeatures(featureTrackVector, lastAllocatedTrackIndex);
01334                                         
01335                                         //ROS_ERROR("lastAllocatedTrackIndex = (%d) -> (%d)", before, ((int)lastAllocatedTrackIndex));
01336                                         
01337                                         if (configData.verboseMode) { ROS_INFO("Size of featureTrackVector after adding projections = (%d)", featureTrackVector.size()); }
01338                                         
01339                                         if (configData.verboseMode) { ROS_ERROR("About to concatenate with (%d) + (%d) / (%d) points and minsep of (%f)", globalFinishingPoints.size(), candidates.size(), configData.maxFeatures, configData.minSeparation); }
01340                                         
01341                                         concatenateWithExistingPoints(globalFinishingPoints, candidates, configData.maxFeatures, configData.minSeparation);
01342                                         
01343                                         if (configData.verboseMode) { ROS_INFO("Size of finishing points / candidates after concatenation = (%d / %d)", globalFinishingPoints.size(), candidates.size()); }
01344                                         
01345                                         
01346                                         //printf("%s << left after concatenation: %d\n", __FUNCTION__, globalFinishingPoints[jjj].size());
01347                                         
01348                                         if (configData.verboseMode) { ROS_WARN("Adding (%d) new features", candidates.size()); }
01349                                         
01350                                         newlySensedFeatures.insert(newlySensedFeatures.end(), candidates.begin(), candidates.end());
01351                                         
01352                                         testTime = timeElapsedMS(test_timer, false);
01353                                         //ROS_WARN("9: (%f)", testTime);
01354                                         
01355                                         //ROS_INFO("Debug (%d)", 21);
01356                                         
01357                                 }
01358                                 
01359                                 //ROS_INFO("Debug (%d)", 22);
01360                                 
01361                                 
01362                                 
01363                                 //ROS_INFO("Debug (%d)", 23);
01364                                 
01365                         }
01366                         
01367                         //ROS_INFO("Debug (%d)", 24);
01368                         
01369                 }
01370                 
01371         }
01372         
01373         //ROS_INFO("Debug (%d)", 25);
01374         
01375         if (H12.rows != 0) {
01376                 H12.release();
01377         }
01378 }
01379 
01380 void featureTrackerNode::features_loop(const ros::TimerEvent& event) {
01381         
01382         successfullyTrackedFeatures = 0;
01383         newlyDetectedFeatures = 0;
01384         newlyRecoveredFeatures = 0;
01385         discardedNewFeatures = 0;
01386         lostTrackCount = 0;
01387         
01388         matchedFeatures.clear();
01389         newlySensedFeatures.clear();
01390         
01391         if (readyFrame >= frameCount) {
01392                 return;
01393         }
01394         
01395         //ROS_INFO("Entered features loop for frame (%d)", frameCount);
01396         
01397         vector<cv::Point2f> recPoints1, recPoints2;
01398         
01399         testTime = timeElapsedMS(test_timer, true);
01400         //ROS_WARN("?NUC time: (%f)", testTime);
01401         
01402         if (configData.verboseMode) { ROS_ERROR("=========== Starting features loop for frame (%u [%u]) with (%d) finishing points.", readyFrame, bufferIndices[readyFrame % 2], globalFinishingPoints.size()); }
01403         
01404         //if (readyFrame > 0) {
01405                 
01406         if (configData.verboseMode) { ROS_WARN("About to update distance constraint."); }
01407         updateDistanceConstraint();
01408         if (configData.verboseMode) { ROS_WARN("Distance constraint updated.\n"); }
01409         
01410         globalStartingPoints.clear();
01411         globalStartingPoints.insert(globalStartingPoints.end(), globalFinishingPoints.begin(), globalFinishingPoints.end());
01412         
01413         if (H12.rows != 0) {            
01414                 transformPoints(globalFinishingPoints, H12);
01415                 featuresVelocity = 9e99;
01416                 if (configData.verboseMode) { ROS_INFO("(%d) Points transformed.", globalFinishingPoints.size()); }
01417         } else {
01418                 globalFinishingPoints.clear();
01419         }
01420         
01421         if (configData.verboseMode) { ROS_WARN("About to attempt tracking..."); }
01422         attemptTracking();
01423         if (configData.verboseMode) { ROS_WARN("Tracking completed.\n"); }
01424         
01425         double prelimVelocity = obtainFeatureSpeeds(featureTrackVector, bufferIndices[(readyFrame-1) % 2], previous_time.toSec(), bufferIndices[(readyFrame) % 2], original_time.toSec());
01426 
01427         featuresVelocity = max(prelimVelocity, featuresVelocity);
01428 
01429         bool featuresTooLow;
01430         
01431         if (configData.verboseMode) { ROS_ERROR("Using (%d) to update (%d).", ((int)globalFinishingPoints.size()), previousTrackedPointsPeak); }
01432         
01433         previousTrackedPointsPeak = max(previousTrackedPointsPeak, ((unsigned int) globalFinishingPoints.size()));
01434         
01435         if (((int)globalFinishingPoints.size()) < configData.minFeatures) {
01436         //if (((int) globalFinishingPoints.size()) < max(configData.minFeatures, ((int) ((1.00 - FEATURE_DROP_TRIGGER)*double(previousTrackedPointsPeak))) ) ) {
01437                 featuresTooLow = true;
01438                 if (configData.verboseMode) { ROS_ERROR("featuresTooLow == true, because feature count is (%d) vs (%d, %u).", globalFinishingPoints.size(), configData.minFeatures, previousTrackedPointsPeak); }
01439                 previousTrackedPointsPeak = globalFinishingPoints.size();
01440         } else {
01441                 featuresTooLow = false;
01442         }
01443         
01444         /*
01445         if (readyFrame == 0) {
01446                 featuresTooLow = true;
01447         } else {
01448                 featuresTooLow = false;
01449         }
01450         */
01451         
01452         if (cycleFlag) {
01453                 if (configData.verboseMode) { ROS_INFO("Cycle flag is true (%d) vs (%d, %d, %d)", cycleCount, configData.multiplier[0], configData.multiplier[1], configData.multiplier[2]); }
01454         }
01455         
01456         
01457         
01458         if (featuresTooLow || configData.detectEveryFrame || (H12.rows != 0)) {
01459                 if (configData.verboseMode) { ROS_INFO("About to detect new features on frame (%d).. because (%d, %d, %d, %d)", currentIndex, featuresTooLow, cycleFlag, configData.detectEveryFrame, (H12.rows != 0)); }
01460                 detectNewFeatures();
01461                 if (configData.verboseMode) { ROS_INFO("(%d) New features detected..", ((int)newlySensedFeatures.size())); }
01462                 
01463                 
01464         }
01465         
01466         if (configData.attemptMatching && (readyFrame > 0) && (newlySensedFeatures.size() > 0)) { // or perhaps, if there are any features to match
01467                 // Attempt to match features
01468                 matchWithExistingTracks();
01469         }
01470 
01471         
01472         for (unsigned int ppp = 0; ppp < MAX_HISTORY_FRAMES; ppp++) {
01473                 
01474                 if ((configData.multiplier[ppp] == 0.0) || !configData.attemptHistoricalRecovery) {
01475                         
01476                         continue;
01477                 }
01478                 
01479                 if (configData.verboseMode) { ROS_INFO("cycleCount = (%d) vs configData.multiplier[%d] = (%d)", cycleCount, ppp, configData.multiplier[ppp]); }
01480                 
01481                 if ( ((cycleCount % configData.multiplier[ppp]) == 0) && (cycleFlag) ) {
01482                         
01483                         //  (featuresVelocity > configData.maxVelocity)
01484                         
01485                         if (configData.verboseMode) { ROS_INFO("About to replace historical data (%d)..", ppp);}
01486                         
01487                         grayImageBuffer[readyFrame % 2].copyTo(olderImages[ppp]);
01488                         olderTimes[ppp] = original_time;
01489                         olderIndices[ppp] = bufferIndices[readyFrame % 2];
01490                         
01491                         /*
01492                         if (featuresVelocity > configData.maxVelocity) {
01493                                 cycleCount = 0;
01494                         }
01495                         */
01496                 }
01497                 
01498         }
01499         
01500         cycleFlag = false;
01501         
01502         
01503         /*
01504         // Ideally would do too this if: featuresTooLow & H12.rows != 0
01505         if (cycleFlag || (H12.rows != 0)) {
01506                 
01507                 if (configData.attemptHistoricalRecovery && (configData.newFeaturesPeriod > 0.0) ) {
01508                         if (configData.verboseMode) { ROS_WARN("About to attempt to recover tracks."); }
01509                         unsigned int recoveredTrackCount = recoverTracks();
01510                         if (configData.verboseMode) { ROS_WARN("Recovered (%d) tracks\n", recoveredTrackCount); }
01511                 }
01512                 
01513                 cycleFlag = false;
01514 
01515         }
01516         */
01517         
01518         if (configData.verboseMode) { ROS_WARN("globalFinishingPoints.size() = (%d)", ((int)globalFinishingPoints.size())); }
01519         
01520         if (readyFrame > 0) {
01521                 
01522                 
01523         
01524                 if ((globalFinishingPoints.size() < ((unsigned int) configData.minFeatures/2)) && !lowPointsWarning) {
01525                         lowPointsWarning = true;
01526                         ROS_WARN("Successfully tracked points (%d) is currently low...", globalFinishingPoints.size());
01527 
01528                         
01529                 } else if ((globalFinishingPoints.size() > ((unsigned int) configData.minFeatures)) && lowPointsWarning) {
01530                         lowPointsWarning = false;
01531                         ROS_INFO("Minimum feature count now re-achieved.");
01532                 }
01533                 
01534                 //featuresVelocity = calculateFeatureSpeeds(finalPoints1, finalPoints2, finalVelocities, previous_time.toSec(), original_time.toSec());
01535                 
01536                 //ROS_INFO("About to attempt to update feature speeds.");
01537                 featuresVelocity = updateFeatureSpeeds(featureTrackVector, bufferIndices[(readyFrame-1) % 2], previous_time.toSec(), bufferIndices[(readyFrame) % 2], original_time.toSec(), configData.maxVelocity);
01538                 //ROS_INFO("Done.");
01539         }
01540         
01541         publishRoutine();
01542         
01543         if (configData.verboseMode) { ROS_WARN("featuresVelocity = (%f)", featuresVelocity); }
01544         
01545         if (configData.velocityPrediction) {
01546                 
01547                 unsigned int activeTrackCount = getActiveTrackCount(featureTrackVector, bufferIndices[(readyFrame-1) % 2], bufferIndices[(readyFrame) % 2]);
01548                 // ROS_ERROR("activeTrackCount = (%d)", activeTrackCount);
01549                 if ((featuresVelocity == 0.0) && (activeTrackCount > 0)) { 
01550                         ROS_WARN("featuresVelocity = (%f) : Are you forgetting to mark duplicate images using <streamer>?", featuresVelocity); 
01551                 } else if (configData.verboseMode) {
01552                         ROS_INFO("featuresVelocity = (%f) over (%f) seconds", featuresVelocity, (original_time.toSec()-previous_time.toSec()));
01553                 }
01554         }
01555         
01556         if (configData.verboseMode) { ROS_ERROR("Completed features loop\n"); }
01557         
01558         readyFrame++;
01559         
01560         
01561 }
01562 
01563 int featureTrackerNode::calculateFeatureMotion(unsigned int idx, double& mx, double &my) {
01564         
01565         // Go through all active features
01566         // Find ones that were tracked in previous frame
01567         // Add to calcs
01568         
01569         double xSumm = 0.0, ySumm = 0.0;
01570         int count = 0;
01571         
01572         int *monitorIndices;
01573         
01574         monitorIndices = new int[featureTrackVector.size()]; // 0 means can't use, 1 means 2nd frame, 2 means 3rd frame
01575         
01576         if (configData.normalizeFeatureVelocities) {
01577                 
01578                 // 0. INITIALLY CLASSIFY ALL FEATURES IN TERMS OF THEIR RECENT HISTORY (0, 1 or 2 recent consecutive appearances)
01579                 // Also, count how many have had 2 recent consecutive appearances
01580                 
01581                 // For each existing feature
01582                 for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
01583                 
01584                         // If this feature was detected in the most recent frame
01585                         if (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).imageIndex == idx) {
01586                                 
01587                                 // If it has at least 3 detections
01588                                 if (featureTrackVector.at(iii).locations.size() > 2) {
01589                                         
01590                                         // If the 2nd most recent detection was in the previous frame
01591                                         if (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-2).imageIndex == (idx-1)) {
01592                                                 
01593                                                 // And the 3rd most recent detection was in the frame before that
01594                                                 if (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-3).imageIndex == (idx-2)) {
01595                                                         
01596                                                         // Now you have a 3rd frame!
01597                                                         count++;
01598                                                         
01599                                                         monitorIndices[iii] = 2;
01600                                                         
01601                                                 } else {
01602                                                         monitorIndices[iii] = 1;
01603                                                 }
01604                                                 
01605                                         } else {
01606                                                 monitorIndices[iii] = 0;
01607                                         }
01608                                         
01609                                 } else if (featureTrackVector.at(iii).locations.size() > 1) {
01610                                         
01611                                         if (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-2).imageIndex == (idx-1)) {
01612                                                 
01613                                                 monitorIndices[iii] = 1;
01614                                                 
01615                                         } else {
01616                                                 monitorIndices[iii] = 0;
01617                                         }
01618                                         
01619                                 } else {
01620                                         monitorIndices[iii] = 0;
01621                                 }
01622                                 
01623                         } else {
01624                                 monitorIndices[iii] = 0;
01625                         }
01626                         
01627                 }
01628                 
01629                 // All features that have already been successfully tracked (i.e. this is their 3rd frame) have valid velocity weightings
01630                 
01631                 // Assign all velocities
01632                 
01633                 // Establish an average velocity from the 3rd appearance features [will this basically be your output?]
01634                 
01635                 if (count > 0) {
01636                         for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
01637                                 
01638                                 if (monitorIndices[iii] >= 2) {
01639                                                                 
01640                                         featureTrackVector.at(iii).velocity_x = (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).featureCoord.x - featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-2).featureCoord.x);
01641                                         featureTrackVector.at(iii).velocity_y = (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).featureCoord.y - featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-2).featureCoord.y);
01642                                         
01643                                 }
01644                                 
01645                                 if (monitorIndices[iii] == 2) {
01646                                         
01647                                         xSumm += featureTrackVector.at(iii).velocity_x;
01648                                         ySumm += featureTrackVector.at(iii).velocity_y;
01649                                         
01650                                 } 
01651                                 
01652                         }
01653                         
01654                         mx = xSumm / double(count);
01655                         my = ySumm / double(count);
01656                         
01657                         if (configData.verboseMode) { ROS_INFO("Average feature motion since previous frame (%d / %d): (%06.2f, %06.2f)", count, 2, mx, my); }
01658                         
01659                         // Re-calculate weightings so that they all normalize to this average velocity
01660                         
01661                         for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
01662                                 
01663                                 if (monitorIndices[iii] == 2) {
01664                                                                 
01665                                         featureTrackVector.at(iii).velocityWeighting = mx / featureTrackVector.at(iii).velocity_x;
01666                                         
01667                                 } 
01668                                 
01669                         }
01670                         
01671                         // Now go through all 2nd appearance features, assign them a weighting to normalize them to the velocity from the reliable (3rd frame) features
01672                         
01673                         for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
01674                                 
01675                                 if (monitorIndices[iii] == 1) {
01676                                                                 
01677                                         featureTrackVector.at(iii).velocityWeighting = mx / featureTrackVector.at(iii).velocity_x;
01678                                         
01679                                 } 
01680                                 
01681                         }
01682                         
01683                 } else {
01684                         // If no 3rd appearance features, assign a weighting to 2nd appearance features to normalize them to their own average velocity
01685                         // These features will now be valid references in future
01686                         
01687                         for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
01688                                 
01689                                 
01690                                 if (monitorIndices[iii] == 1) {
01691                                         
01692                                         xSumm += featureTrackVector.at(iii).velocity_x;
01693                                         ySumm += featureTrackVector.at(iii).velocity_y;
01694                                         
01695                                         count++;
01696                                         
01697                                 } 
01698                                 
01699                         }
01700                         
01701                         mx = xSumm / double(count);
01702                         my = ySumm / double(count);
01703                         
01704                         if (configData.verboseMode) { ROS_INFO("Average feature motion since previous frame (%d / %d): (%06.2f, %06.2f)", count, 1, mx, my); }
01705                         
01706                         for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
01707                                 
01708                                 if (monitorIndices[iii] == 1) {
01709                                                                 
01710                                         featureTrackVector.at(iii).velocityWeighting = mx / featureTrackVector.at(iii).velocity_x;
01711                                         
01712                                 } 
01713                                 
01714                         }
01715                         
01716                 }
01717         
01718         } else {
01719                 
01720                 for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
01721                 
01722                         if (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).imageIndex == idx) {
01723                                 
01724                                 if (featureTrackVector.at(iii).locations.size() > 1) {
01725                                         
01726                                         if (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-2).imageIndex == (idx-1)) {
01727                                                 
01728                                                 xSumm += (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).featureCoord.x - featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-2).featureCoord.x);
01729                                                 ySumm += (featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).featureCoord.y - featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-2).featureCoord.y);
01730                                                 
01731                                                 count++;
01732                                                 
01733                                         }
01734                                         
01735                                 }
01736                                 
01737                         }
01738                         
01739                 }
01740                 
01741                 mx = xSumm / double(count);
01742                 my = ySumm / double(count);
01743                 
01744                 if (configData.verboseMode) { ROS_INFO("Average feature motion since previous frame: (%06.2f, %06.2f)", mx, my); }
01745                 
01746         }
01747         
01748         
01749         
01750         return count;
01751         
01752 }
01753 
01754 void featureTrackerNode::prepareForTermination() {
01755         if (trackCountStream.is_open()) trackCountStream.close();       
01756         if (featureMotionStream.is_open()) featureMotionStream.close();
01757 }
01758 
01759 void featureTrackerNode::handle_very_new() {
01760         
01761         unsigned int current_idx = frameCount-1;
01762         
01763         if (configData.verboseMode) { ROS_WARN("Handling new frame (interruption backend)"); }
01764         
01765         
01766         
01767         // Feature detection
01768         
01769         homogPoints[1].clear();
01770         homographyDetector -> detect(grayImageBuffer[current_idx % 2], homogPoints[1]);
01771         
01772         if (homogPoints[1].size() < 4) {
01773                 ROS_ERROR("Insufficient points detected (%d) in second frame for calculating homography..", homogPoints[1].size());
01774         } 
01775         
01776         if ((homogPoints[0].size() < 4) || (homogPoints[1].size() < 4)) {
01777                 
01778                 H12 = cv::Mat::eye(3, 3, CV_64FC1);
01779                 undergoingDelay = false;
01780                 return;
01781                 
01782         }
01783 
01784         if (configData.verboseMode) { ROS_INFO("About to extract homography descriptors for second frame.."); }
01785 
01786         // Feature description
01787 
01788         homographyExtractor -> compute(grayImageBuffer[current_idx % 2], homogPoints[1], homogDescriptors[1]);
01789         
01790         // Feature matching
01791         
01792         cv::Mat matchingMatrix;
01793         
01794         if (configData.verboseMode) { ROS_INFO("Creating homography matching matrix.."); }
01795         
01796         createMatchingMatrix(matchingMatrix, homogDescriptors[0], homogDescriptors[1]);
01797                         
01798         //constrainMatchingMatrix(matchingMatrix, homogPoints[0], homogPoints[1], MATCHING_DIST_CONSTRAINT, MATCHING_SIZE_CONSTRAINT);
01799         
01800         vector<vector<cv::DMatch> > matches;
01801         
01802         twoWayPriorityMatching(matchingMatrix, matches, configData.matchingMode);
01803         
01804         if (configData.verboseMode) { ROS_INFO("Sorting homography matches.."); }
01805         
01806         sortMatches(matches);
01807         
01808         // Reduce points to matched points
01809 
01810         vector<cv::Point2f> points1, points2;
01811         
01812         vector<int> queryIdxs( matches.size() );
01813         vector<int> trainIdxs( matches.size() );
01814         
01815         for( size_t i = 0; i < matches.size(); i++ ) {
01816                 queryIdxs[i] = matches[i][0].queryIdx;
01817                 trainIdxs[i] = matches[i][0].trainIdx;
01818         }
01819         
01820         cv::KeyPoint::convert(homogPoints[0], points1, queryIdxs);
01821         cv::KeyPoint::convert(homogPoints[1], points2, trainIdxs);
01822 
01823         vector<uchar> validityMask;
01824         vector<char> validityMask2;  
01825         
01826         // Homography determination
01827         
01828         if ((points1.size() < 4) || (points2.size() < 4)) {
01829                 H12 = cv::Mat::eye(3, 3, CV_64FC1);
01830                 undergoingDelay = false;
01831                 return;
01832         }
01833         
01834         if (configData.verboseMode) { ROS_INFO("Attempting to find homography with (%d, %d) matched points..", points1.size(), points2.size()); }
01835         
01836         
01837         
01838         H12 = cv::findHomography( cv::Mat(points1), cv::Mat(points2), validityMask, CV_RANSAC, 5.0 );
01839                 
01840         unsigned int validPoints = 0;
01841         
01842         for (unsigned int iii = 0; iii < validityMask.size(); iii++) {
01843                 if (validityMask.at(iii) > 0) {
01844                         validPoints++;
01845                 }
01846         }
01847         
01848         //if (configData.verboseMode) { ROS_INFO("Homography found (%d / %d / %d)", points1.size(), points2.size(), validPoints); }
01849         cv::Mat im1, im2;
01850         
01851         
01852         warpPerspective(grayImageBuffer[(current_idx-1) % 2], im2, H12, grayImageBuffer[(current_idx-1) % 2].size());
01853         //grayImageBuffer[(current_idx-1) % MAXIMUM_FRAMES_TO_STORE].copyTo(im2);
01854         
01855         grayImageBuffer[current_idx % 2].copyTo(im1);
01856         
01857         // homogPoints[1] and therefore points2 are actually from the newer image...
01858         
01859         unsigned int inlierPoints = cv::countNonZero(validityMask);
01860         
01861         if (configData.verboseMode) { ROS_INFO("Number of matches for interruption homography = (%u/%d)", inlierPoints, homogPoints[0].size()); } // , matches.size(), points1.size() 
01862         
01863         if (configData.verboseMode) { cout << "H12 = " << H12 << endl; }
01864         
01865         while (0) {
01866                 
01867                 cv::Mat im1_col, im1_col_B, im2_col;
01868                 
01869                 cvtColor(im1, im1_col, CV_GRAY2RGB);
01870                 cvtColor(im2, im2_col, CV_GRAY2RGB);
01871                 
01872                 cv::Scalar color_x = CV_RGB(255, 0, 0);
01873                 displayKeyPoints(im1_col, homogPoints[1], im1_col_B, color_x);
01874 
01875                 //imshow("temp_disp", im1);     // Current image
01876                 std::copy(&(im1_col_B.at<unsigned char>(0,0)), &(im1_col_B.at<unsigned char>(0,0))+(drawImage.cols*drawImage.rows*drawImage.channels()), msg_debug.data.begin());                               
01877                 debug_pub.publish(msg_debug, debug_camera_info);
01878                 cv::waitKey(500);
01879                 //imshow("temp_disp", im2);     // Previous image under transform
01880                 std::copy(&(im2_col.at<unsigned char>(0,0)), &(im2_col.at<unsigned char>(0,0))+(drawImage.cols*drawImage.rows*drawImage.channels()), msg_debug.data.begin());                           
01881                 debug_pub.publish(msg_debug, debug_camera_info);
01882                 cv::waitKey(500);
01883                 
01884                 
01885         }
01886         
01887         
01888         
01889         // Then, do you want to move "globalFinishingPoints" so that they're at the locations estimated by H12?
01890         
01891         
01892         
01893         undergoingDelay = false;
01894 }
01895 
01896 void featureTrackerNode::handle_delay() {
01897                 
01898         unsigned int current_idx = frameCount-1;
01899         
01900         //ROS_WARN("Handling a delay now...");
01901         
01902         
01903         // Feature detection
01904         
01905         
01906         
01907         //ROS_INFO("Current index: (%d)", current_idx);
01908         
01909         //ROS_INFO("grayIm: (%d, %d, %d, %d))", grayImageBuffer[current_idx % 2].rows, grayImageBuffer[current_idx % 2].cols, grayImageBuffer[current_idx % 2].depth(), grayImageBuffer[current_idx % 2].channels());
01910         
01911         homogPoints[0].clear();
01912         homographyDetector -> detect(grayImageBuffer[current_idx % 2], homogPoints[0]);
01913         //keypointDetector[0] -> detect(grayImageBuffer[current_idx % MAXIMUM_FRAMES_TO_STORE], homogPoints[0]);
01914         
01915         //ROS_INFO("Homography frame #1 (%d) pts detected", homogPoints[0].size());
01916         
01917         if (homogPoints[0].size() < 4) {
01918                 ROS_ERROR("Insufficient points detected (%d) in first frame for calculating homography..", homogPoints[0].size());
01919                 
01920         } else {
01921 
01922                 sortKeyPoints(homogPoints[0], 200);
01923                 
01924                 //ROS_INFO("Sorting points...");
01925         
01926                 if (configData.debugMode) {
01927                         /*
01928                         displayImage.copyTo(drawImage);
01929                         displayKeyPoints(displayImage, homogPoints[0], drawImage, CV_RGB(255, 0, 0), 0);
01930                         
01931                         std::copy(&(drawImage.at<unsigned char>(0,0)), &(drawImage.at<unsigned char>(0,0))+(drawImage.cols*drawImage.rows*drawImage.channels()), msg_debug.data.begin());                               
01932                         debug_pub.publish(msg_debug, debug_camera_info);
01933                         */
01934                 }
01935                 
01936                 homographyExtractor -> compute(grayImageBuffer[current_idx % 2], homogPoints[0], homogDescriptors[0]);
01937         }
01938         
01939         // Feature description
01940         undergoingDelay = true;
01941 }
01942 
01943 featureTrackerNode::featureTrackerNode(ros::NodeHandle& nh, trackerData startupData) {
01944         
01945         debugInitialized = false;
01946         lastAllocatedTrackIndex = -1;
01947         cycleFlag = false;
01948         cycleCount = 0;
01949         freezeNextOutput = false;
01950         lastCycleFrameTime = ros::Time(0.0);
01951         peakTracks = 0;
01952         
01953         skippedFrameCount = 0;
01954         capturedFrameCount = 0;
01955         
01956         skipTime = timeElapsedMS(skip_timer);
01957         
01958         previousIndex = -1;
01959         currentIndex = -1;
01960         
01961         undergoingDelay = false;
01962         
01963         if (configData.outputFolder == "outputFolder") {
01964                 
01965                 char timeString[256];
01966         
01967                 sprintf(timeString, "%010d.%09d", ros::Time::now().sec, ros::Time::now().nsec);
01968                 
01969                 string defaultOutput = configData.read_addr + "nodes/flow/log/" + timeString;
01970                 ROS_WARN("No output folder specified, outputting by default to (%s)", defaultOutput.c_str());
01971                 configData.outputFolder = defaultOutput;
01972                 
01973                 char buffer[256];
01974                 sprintf(buffer,"mkdir -p %s",configData.outputFolder.c_str());
01975                 int mkdirResult = system(buffer);
01976                 
01977                 ROS_INFO("Result of mkdir command: (%d)", mkdirResult);
01978                 
01979                 char buffer_check[256];
01980                 sprintf(buffer_check,"%s", &buffer[9]);
01981         
01982                 if (configData.verboseMode) { ROS_INFO("Checking that directory (%s) has been created..", buffer_check); }
01983         
01984                 while (!boost::filesystem::exists( buffer_check ) ) {
01985                         
01986                 }
01987                 
01988         }
01989                 
01990         referenceFrame = -1;
01991         
01992         frameCount = 0;
01993         readyFrame = 0;
01994         
01995         sprintf(nodeName, "%s", ros::this_node::getName().c_str());
01996 
01997         sprintf(debug_pub_name, "thermalvis%s/image_col", nodeName);
01998         
01999         infoProcessed = false;
02000         infoSent = false;
02001         
02002         configData = startupData;
02003         
02004         for (unsigned int ppp = 0; ppp < MAX_HISTORY_FRAMES; ppp++) {
02005                 configData.multiplier[ppp] = 0;
02006         }
02007         
02008         configData.initializeDetectors(keypointDetector, &homographyDetector);
02009         configData.initializeDescriptors(&descriptorExtractor, &homographyExtractor);
02010         
02011         //printf("%s::%s << Startup data: (%s)\n", __PROGRAM__, __FUNCTION__, configData.video_stream.c_str());
02012         
02013         //std::string topic = nh.resolveName(configData.video_stream + configData.video_sequence);
02014         std::string topic = nh.resolveName(configData.topic);
02015         
02016         string topic_info = configData.topic.substr(0, configData.topic.find_last_of("/") + 1) + "camera_info";
02017         
02018         //ROS_INFO("Connecting to camera_info. %s", topic_info.c_str());
02019         
02020         //image_transport::ImageTransport it(nh);
02021         it = new image_transport::ImageTransport(nh);
02022         
02023         ROS_INFO("Subscribing to camera topic (%s)", topic.c_str());
02024         camera_sub = it->subscribeCamera(topic, 1, &featureTrackerNode::handle_camera, this);
02025         
02026         if (configData.tracksOutputTopic == "tracksOutputTopic") {
02027                 
02028                 
02029                 string tmpString = configData.topicParent + "/tracks";
02030                 //string tmpString = configData.topic.substr(0, configData.topic.find_last_of("/") + 1);
02031                 //ROS_WARN("(%s), (%s)", configData.topic.c_str(), tmpString.c_str());
02032                 sprintf(tracks_pub_name, "%s", tmpString.c_str());
02033                 
02034                 
02035                 
02036         } else {
02037                 // sprintf(tracks_pub_name, "thermalvis%s/tracks", nodeName);
02038                 sprintf(tracks_pub_name, "%s", configData.tracksOutputTopic.c_str());
02039         }
02040         
02041         ROS_INFO("Publishing tracks data at (%s)", tracks_pub_name);
02042         
02043         ros::AdvertiseOptions op = ros::AdvertiseOptions::create<thermalvis::feature_tracks>(tracks_pub_name, 1, &connected, &disconnected, ros::VoidPtr(), NULL);
02044         op.has_header = false;
02045         
02046         //tracks_pub = nh.advertise<thermalvis::feature_tracks>(tracks_pub_name, 1);
02047         tracks_pub = nh.advertise(op);
02048         
02049         timer = nh.createTimer(ros::Duration(0.05), &featureTrackerNode::timed_loop, this);
02050         
02051         features_timer = nh.createTimer(ros::Duration(0.01), &featureTrackerNode::features_loop, this);
02052         
02053         //printf("%s::%s << Image transport subscribed.\n", __PROGRAM__, __FUNCTION__);
02054         
02055         
02056         
02057         
02058         if (configData.outputTrackCount) {
02059                 string outputTrackCountFile = configData.outputFolder + "/" + ros::this_node::getName().substr(1,ros::this_node::getName().size()-1) + "_trackcount.txt";
02060         
02061                 ROS_INFO("outputTrackCountFile = (%s)", outputTrackCountFile.c_str());
02062                 
02063                 trackCountStream.open(outputTrackCountFile.c_str(), ios::out);
02064         }
02065         
02066         if (configData.outputFeatureMotion) {
02067                 string outputFeatureMotionFile = configData.outputFolder + "/" + ros::this_node::getName().substr(1,ros::this_node::getName().size()-1) + "_motion.txt";
02068         
02069                 ROS_INFO("outputFeatureMotionFile = (%s)", outputFeatureMotionFile.c_str());
02070                 
02071                 featureMotionStream.open(outputFeatureMotionFile.c_str(), ios::out);
02072         }
02073         
02074         //cin.get();
02075         
02076         ROS_INFO("Establishing server callback...");
02077         f = boost::bind (&featureTrackerNode::serverCallback, this, _1, _2);
02078     server.setCallback (f);
02079     
02080 }
02081 
02082 
02083 
02084 void trackerData::initializeDetectors(cv::Ptr<cv::FeatureDetector> *det, cv::Ptr<cv::FeatureDetector> *hom) {
02085         
02086         for (unsigned int iii = 0; iii < numDetectors; iii++) {
02087                 if (detector[iii] == "SURF") {
02088                         ROS_ERROR("SURF has been deactivated due to copyright protection!");
02089                         //det[iii] = new SurfFeatureDetector( sensitivity[iii] );
02090                 } else if (detector[iii] == "FAST") {
02091                         det[iii] = new cv::FastFeatureDetector( sensitivity[iii] * FAST_DETECTOR_SENSITIVITY_SCALING );
02092                 } else if (detector[iii] == "GFTT") {
02093                         det[iii] = new cv::GoodFeaturesToTrackDetector( maxFeatures, max(MINIMUM_GFTT_SENSITIVITY, sensitivity[iii]), 1.0, 3, false );
02094                 } else if (detector[iii] == "STAR") {
02095                         det[iii] = new cv::StarFeatureDetector( 16, sensitivity[iii] );
02096                 } else if (detector[iii] == "ORB") {
02097                         det[iii] = new cv::OrbFeatureDetector( maxFeatures );
02098                 } else if (detector[iii] == "HARRIS") {
02099                         det[iii] = new cv::GoodFeaturesToTrackDetector( maxFeatures, max(MINIMUM_HARRIS_SENSITIVITY, sensitivity[iii]), 1.0, 3, true );
02100                 }
02101         }
02102         
02103         //hom[0] = new cv::OrbFeatureDetector( maxFeatures );
02104         hom[0] = new cv::FastFeatureDetector( FAST_DETECTOR_SENSITIVITY_SCALING*0.02 );
02105         //hom[0] = new SurfFeatureDetector( 1.00 );
02106         
02107 }
02108 
02109 void featureTrackerNode::timed_loop(const ros::TimerEvent& event) {
02110         
02111         elapsedTime = timeElapsedMS(cycle_timer, false);
02112         
02113         if ((frameCount > 0) && (elapsedTime > configData.delayTimeout*MS_PER_SEC) && !undergoingDelay && handleDelays) {
02114                 featuresVelocity = 9e99;
02115                 handle_delay();
02116         }
02117         
02118         
02119         
02120         skipTime = timeElapsedMS(skip_timer, false);
02121         
02122         //ROS_INFO("skipTime = (%f) vs (%f)", skipTime, SKIP_WARNING_TIME_PERIOD);
02123         
02124         if (skipTime > 1000.0*SKIP_WARNING_TIME_PERIOD) {
02125                 skipTime = timeElapsedMS(skip_timer);
02126                 
02127                 if (skippedFrameCount > 0) {
02128                         ROS_WARN("(%d) skipped frames and (%d) captured frames in last (%f) seconds..", skippedFrameCount, capturedFrameCount, SKIP_WARNING_TIME_PERIOD); // , current frame at risk of being mid-NUC if duplicates are not marked. (%d) vs (%d) : (%d)", previousIndex, currentIndex, frameCount);
02129                 }
02130                 
02131                 skipTime = 0.0;
02132                 skippedFrameCount = 0;
02133                 capturedFrameCount = 0;
02134         } 
02135         
02136         
02137 }
02138 
02139 void featureTrackerNode::serverCallback(thermalvis::flowConfig &config, uint32_t level) {
02140       
02141     configData.minSeparation = config.minSeparation;
02142     
02143     configData.adaptiveWindow = config.adaptiveWindow;
02144     configData.autoTrackManagement = config.autoTrackManagement;
02145     configData.attemptMatching = config.attemptMatching;
02146     configData.maxVelocity = config.maxVelocity;
02147     
02148     configData.detectEveryFrame = config.detectEveryFrame;
02149     
02150     configData.matchingMode = config.matchingMode;
02151     
02152     configData.showTrackHistory = config.showTrackHistory;
02153       
02154     configData.sensitivity[0] = config.sensitivity_1;
02155         configData.sensitivity[1] = config.sensitivity_2;
02156         configData.sensitivity[2] = config.sensitivity_3;
02157         
02158         configData.velocityPrediction = config.velocityPrediction;
02159         
02160         // ROS_INFO("Sens = (%f, %f, %f)", configData.sensitivity[0], configData.sensitivity[1], configData.sensitivity[2]);
02161         
02162         if (config.maxFeatures > configData.maxFeatures) {
02163                 previousTrackedPointsPeak = config.maxFeatures;
02164         }
02165         configData.maxFeatures = config.maxFeatures;
02166         
02167         
02168         configData.minFeatures = config.minFeatures;
02169         
02170         int det_array[3];
02171         det_array[0] = config.detector_1;
02172         det_array[1] = config.detector_2;
02173         det_array[2] = config.detector_3;
02174         
02175         configData.newFeaturesPeriod = config.newFeaturesPeriod;
02176         configData.attemptHistoricalRecovery = config.attemptHistoricalRecovery;
02177         
02178         if (configData.attemptHistoricalRecovery) {
02179                 configData.multiplier[0] = 1;
02180                 numHistoryFrames = 1;
02181         } else {
02182                 configData.multiplier[0] = 0;
02183                 numHistoryFrames = 0;
02184         }
02185         
02186         
02187         configData.multiplier[1] = config.multiplier_1;
02188         if ((numHistoryFrames == 1) && (configData.multiplier[1] > 0)) {
02189                 numHistoryFrames++;
02190         }
02191         
02192         configData.multiplier[2] = config.multiplier_2;
02193         if ((numHistoryFrames == 2) && (configData.multiplier[2] > 0)) {
02194                 numHistoryFrames++;
02195         }
02196         
02197         //ROS_INFO("Selected detectors: (%d, %d, %d)", config.detector_1, config.detector_2, config.detector_3);
02198         
02199         for (unsigned int iii = 0; iii < 3; iii++) {
02200                 
02201                 if (det_array[iii] == 1) {
02202                         configData.detector[iii] = "GFTT";
02203                 } else if (det_array[iii] == 2) {
02204                         configData.detector[iii] = "FAST";
02205                 } else if (det_array[iii] == 3) {
02206                         configData.detector[iii] = "HARRIS";
02207                 }
02208                 
02209         }
02210         
02211         if (config.detector_1 > 0) {
02212                 
02213                 if (config.detector_2 > 0) {
02214                         
02215                         if (config.detector_3 > 0) {
02216                                 configData.numDetectors = 3;
02217                                 
02218                         } else {
02219                                 configData.numDetectors = 2;
02220                         }
02221                         
02222                 } else {
02223                         configData.numDetectors = 1;
02224                 }
02225                 
02226         } else {
02227                 configData.numDetectors = 0;
02228         }
02229     
02230     configData.verboseMode = config.verboseMode;
02231     
02232     
02233         configData.debugMode = config.debugMode;
02234         
02235         if ((configData.debugMode) && !debugInitialized) {
02236                 
02237                 // In order to prevent ROS from overriding sequence number
02238                 //ros::AdvertiseOptions op_debug = ros::AdvertiseOptions::create<sensor_msgs::CameraInfo>(debug_pub_name, 1, &connected, &disconnected, ros::VoidPtr(), NULL);
02239                 //op_debug.has_header = false;
02240 
02241                 //debug_pub = it.advertise(op_debug);
02242                 
02243                 debug_pub = it->advertiseCamera(debug_pub_name, 1);
02244                 
02245                 debugInitialized = true;
02246                 
02247                 ROS_INFO("Advertising tracker debugging video (%s)", debug_pub_name);
02248         }
02249         
02250         configData.flowThreshold = config.flowThreshold;
02251         
02252         configData.maxFrac = config.maxFrac;
02253         
02254         configData.delayTimeout = config.delayTimeout;
02255         
02256         if (configData.delayTimeout == 0) {
02257                 handleDelays = false;
02258         } else {
02259                 handleDelays = true;
02260         }
02261         
02262         //configData.newDetectionFramecountTrigger = config.newDetectionFramecountTrigger;
02263         
02264         //configData.maxProjectionsPerFeature = config.maxProjectionsPerFeature;
02265         
02266         // TRACK RECOVERY - OLD
02267         /*
02268         configData.nearSearchHistory = config.nearSearchHistory;
02269         configData.farSearchHistory = config.farSearchHistory;
02270         configData.lossGaps[0] = configData.nearSearchHistory;
02271         configData.lossGaps[1] = configData.farSearchHistory;
02272         */
02273         
02274         // TRACK RECOVERY - NEW
02275         
02276         
02277         
02278         
02279         
02280         configData.initializeDetectors(keypointDetector, &homographyDetector);
02281         
02282         configData.drawingHistory = config.drawingHistory;
02283         
02284         //configData.minPointsForWarning = config.minPointsForWarning;
02285         
02286 }
02287 


thermalvis
Author(s): Stephen Vidas
autogenerated on Sun Jan 5 2014 11:38:45