monoslam.cpp
Go to the documentation of this file.
00001 
00005 #include "monoslam.hpp"
00006 
00007 struct timeval cycle_timer;
00008 //double elapsedTime;
00009 
00010 int main(int argc, char** argv) {
00011         
00012         ROS_INFO("Node launched.");
00013         
00014         ros::init(argc, argv, "thermal_slam");
00015         
00016         ros::NodeHandle private_node_handle("~");
00017         
00018         slamData startupData;
00019                 
00020         bool inputIsValid = startupData.obtainStartingData(private_node_handle);
00021         
00022         startupData.read_addr = argv[0];
00023         startupData.read_addr = startupData.read_addr.substr(0, startupData.read_addr.size()-12);
00024         
00025         if (!inputIsValid) {
00026                 ROS_INFO("Configuration invalid.");
00027         }
00028                 
00029         ROS_INFO("Startup data processed.");
00030         
00031         //ros::Rate loop_rate(25);
00032         
00033         ros::NodeHandle nh;
00034         
00035         boost::shared_ptr < slamNode > slam_node (new slamNode (nh, startupData));
00036         
00037         globalNodePtr = &slam_node;
00038 
00039         signal(SIGINT, mySigintHandler);
00040         
00041         ROS_INFO("Node configured.");
00042         
00043         ros::AsyncSpinner spinner(2);
00044         
00045         spinner.start();
00046         
00047         ros::waitForShutdown();
00048         
00049         ROS_INFO("Exiting.");
00050         
00051         return 0;
00052         
00053 }
00054 
00055 void slamNode::processScorecard() {
00056         
00057         // Open up file
00058         
00059         if ((configData.initializationScorecard[0] == '.') && (configData.initializationScorecard[1] == '.')) {
00060                 configData.initializationScorecard = configData.read_addr + "nodes/monoslam/config/" + configData.initializationScorecard;
00061         }
00062         
00063         //ROS_ERROR("Opening scorecard file: (%s)", configData.initializationScorecard.c_str());
00064         
00065         ifstream ifs(configData.initializationScorecard.c_str());
00066         
00067         for (int iii = 0; iii < INITIALIZATION_SCORING_PARAMETERS; iii++) {
00068                 ifs >> scorecardParams[iii][0];
00069         }
00070         
00071         for (int iii = 0; iii < INITIALIZATION_SCORING_PARAMETERS; iii++) {
00072                 ifs >> scorecardParams[iii][1];
00073         }
00074         
00075         for (int iii = 0; iii < INITIALIZATION_SCORING_PARAMETERS; iii++) {
00076                 ifs >> scorecardParams[iii][2];
00077         }
00078         
00079         ifs.close();
00080         
00081         for (int iii = 0; iii < INITIALIZATION_SCORING_PARAMETERS; iii++) {
00082                 ROS_INFO("Criteria (%d) = (%f, %f, %f)", iii, scorecardParams[iii][0], scorecardParams[iii][1], scorecardParams[iii][2]);
00083         }
00084         
00085 }
00086 
00087 bool slamData::obtainStartingData(ros::NodeHandle& nh) {
00088         
00089         nh.param<std::string>("stream", stream, "null");
00090         
00091         if (stream != "null") {
00092                 ROS_INFO("Tracker stream (%s) selected.", stream.c_str());
00093         } else {
00094                 ROS_ERROR("No tracker stream specified.");
00095                 return false;
00096         }
00097         
00098         nh.param<bool>("logErrors", logErrors, true);
00099         
00100         nh.param<bool>("keyframeEvaluationMode", keyframeEvaluationMode, false);
00101         
00102         nh.param<int>("maxTestsPerFrame", maxTestsPerFrame, 10);
00103         
00104         nh.param<int>("maxInitializationFrames", maxInitializationFrames, 30);
00105         
00106         nh.param<int>("minStartingSeparation", minStartingSeparation, 4);
00107         nh.param<int>("maxStartingSeparation", maxStartingSeparation, 12);
00108         
00109         
00110         nh.param<string>("initializationScorecard", initializationScorecard, "../config/default_scorecard.txt");
00111         
00112         return true;
00113 }
00114 
00115 bool slamNode::checkForKeyframe() {
00116         unsigned int image_idx_2 = currentPoseIndex + 1;
00117         unsigned int previousKeyframe = keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx;
00118         bool keyframeAdded = false;
00119         bool hasReverted = false;
00120         
00121         // IF CURRENT FRAME IS NOT A KEYFRAME, CHECK TO SEE IF IT SHOULD BE
00122         if (keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx != image_idx_2) {
00123                 
00124                 vector<unsigned int> tempIndices, maintainedIndices;
00125 
00126                 getActiveTracks(tempIndices, featureTrackVector, previousKeyframe, image_idx_2);
00127                 filterToCompleteTracks(maintainedIndices, tempIndices, featureTrackVector, previousKeyframe, image_idx_2);
00128 
00129                 double motionScore = getFeatureMotion(featureTrackVector, maintainedIndices, previousKeyframe, image_idx_2);            
00130                 
00131                 vector<unsigned int> untriangulatedIndices;
00132                 unsigned int trackedSinceLast = maintainedIndices.size();
00133                 reduceActiveToTriangulated(featureTrackVector, maintainedIndices, untriangulatedIndices);
00134                 
00135                 //printf("%s::%s << Tracked since last keyframe (%d, %d): %d / %d", __PROGRAM__, __FUNCTION__, previousKeyframe, image_idx_2, trackedSinceLast, startingTracksCount);
00136                 
00137                 bool lowProportionTracked = false;
00138                 if (trackedSinceLast < ((unsigned int) (((double) startingTracksCount) * configData.requiredTrackFrac))) {
00139                         lowProportionTracked = true;
00140                         //printf("%s::%s << Low proportion tracked: (%d / %d).", __PROGRAM__, __FUNCTION__, trackedSinceLast, startingTracksCount);
00141                 }
00142                 
00143                 // Low feature count will only trigger new keyframe selection if the PROPORTION is also low...
00144                 
00145                 if (motionScore > configData.motionThreshold) {
00146                         keyframe_store.addKeyframe(image_idx_2-1, blank);
00147                         keyframeAdded = true;
00148                         currentPoseIndex--;
00149                         hasReverted = true;
00150                         //printf("%s::%s << Marking (%d) as keyframe (large feature motion).", __PROGRAM__, __FUNCTION__, image_idx_2, motionScore);
00151                 } else if ((image_idx_2 - previousKeyframe) == configData.maxKeyframeSeparation) {
00152                         keyframe_store.addKeyframe(image_idx_2, blank);
00153                         keyframeAdded = true;
00154                         //printf("%s::%s << Marking (%d) as keyframe (max frames between).", __PROGRAM__, __FUNCTION__, image_idx_2);
00155                 } else if (lowProportionTracked && (((int)trackedSinceLast) < configData.minTrackedFeatures) && (keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx != (image_idx_2-1))) {
00156                         // test to see if too few features, and if previous frame is NOT a keyframe, then it should be set as one
00157                         // ((initialPoints2.size() < configData.minTrackedFeatures) && (keyframe_store.keyframes.at(keyframe_store.count-1).idx != (iii-1))) {
00158                         keyframe_store.addKeyframe(image_idx_2-1, blank);
00159                         currentPoseIndex--;
00160                         hasReverted = true;
00161                         //printf("%s::%s << Marking (%d) [prev] as keyframe (too few features).", __PROGRAM__, __FUNCTION__, image_idx_2-1);
00162                         keyframeAdded = true;
00163                         //return false;
00164                         //isKeyframe = true;
00165                         //iii--;
00166                         //keyframe_type = KF_TYPE_EXHAUSTED;
00167                 } else if (lowProportionTracked && (((int)trackedSinceLast) < configData.minTrackedFeatures)) {
00168                         keyframe_store.addKeyframe(image_idx_2, blank);
00169                         keyframeAdded = true;
00170                         // test to see if too few features since last keyframe, and if previous frame IS a keyframe, then current should be set as one
00171                         //(initialPoints2.size() < configData.minTrackedFeatures) {
00172                         //isKeyframe = true;
00173                         //keyframe_type = KF_TYPE_WEAK;
00174                         //printf("%s::%s << Marking (%d) as keyframe (too few features).", __PROGRAM__, __FUNCTION__, image_idx_2);
00175                 } else if ((((int)maintainedIndices.size()) < configData.min3dPtsForPnP) && (keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx != (image_idx_2-1))) {
00176                         // If there literally are not enough points to perform PNP for next frame
00177                         keyframe_store.addKeyframe(image_idx_2-1, blank);
00178                         currentPoseIndex--;
00179                         hasReverted = true;
00180                         keyframeAdded = true;
00181                         //printf("%s::%s << Marking (%d) [prev] as keyframe (too few 3d points).", __PROGRAM__, __FUNCTION__, image_idx_2-1);
00182                         //return false; 
00183                 } else if (((int)maintainedIndices.size()) < configData.min3dPtsForPnP) {
00184                         //printf("%s::%s << ERROR! Too few 3d points for PnP but previous frame already added...", __PROGRAM__, __FUNCTION__);
00185 
00186                 }
00187         
00188         } else {
00189                 
00190         }
00191         
00192         return hasReverted;
00193         
00194         if (hasReverted) {
00195                 // Return true if the current frame is a keyframe
00196                 return true;
00197         }
00198         
00199         if (keyframeAdded) {
00200                 ROS_INFO("Added frame (%d) as Keyframe [%d]", keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx, currentPoseIndex);
00201                 //return true;
00202         } else {
00203                 //currentPoseIndex++;
00204                 //printf("%s << Skipping...", __FUNCTION__);
00205                 //return false;
00206         }
00207         
00208         return false;
00209 }
00210 
00211 void slamNode::show_poses() {
00212 
00213         for (int iii = 0; iii < currentPoseIndex; iii++) {
00214                 if (ACM[iii].rows == 4) {
00215                         
00216                         cv::Mat temp;
00217                         
00218                         estimatePoseFromKnownPoints(temp, configData.cameraData, featureTrackVector, iii, eye4);
00219                         
00220                         ROS_INFO("EST[%d] = ", iii);
00221                         cout << temp << endl;
00222                         
00223                         estimatePoseFromKnownPoints(temp, configData.cameraData, featureTrackVector, iii, ACM[iii]);
00224                         
00225                         ROS_INFO("ES2[%d] = ", iii);
00226                         cout << temp << endl;
00227                         
00228                         ROS_INFO("ACM[%d] = ", iii);
00229                         cout << ACM[iii] << endl;
00230                 }
00231         }
00232         
00233 }
00234 
00235 void slamNode::getGuidingPose(cv::Mat *srcs, cv::Mat& dst, unsigned int idx) {
00236         
00237         cv::Mat eye4 = cv::Mat::eye(4, 4, CV_64FC1);
00238         
00239         unsigned int maxGuides = std::min(((int) idx), configData.maxGuides);
00240         
00241         if (srcs[idx].rows == 4) {
00242                 // Already has an estimate
00243                 srcs[idx].copyTo(dst);
00244         } else if (idx == 0) {
00245                 // First frame so use identity
00246                 eye4.copyTo(dst);
00247         } else if (srcs[idx-1].rows != 4) {
00248                 // No previous frame at all so just use identity as guide
00249                 eye4.copyTo(dst);
00250         } else {
00251                 // Weighted combo of previous frames
00252                 
00253                 //printf("%s::%s << Estimating frame (%d) using previous frame poses...", __PROGRAM__, __FUNCTION__, idx);
00254                 
00255                 cv::Mat R_dev, t_dev;
00256                 R_dev = cv::Mat::zeros(3, 1, CV_64FC1);
00257                 t_dev = cv::Mat::zeros(3, 1, CV_64FC1);
00258                 
00259                 double total_contrib = 0.0;
00260                 
00261                 for (unsigned int iii = idx-maxGuides; iii < idx-1; iii++) {
00262                         
00263                         double contrib = ((double) idx) - ((double) iii);
00264                         total_contrib += contrib;
00265                         
00266                         //printf("%s::%s << Using frame (%d:%d) [contrib = %f]", __PROGRAM__, __FUNCTION__, iii, iii+1, contrib);
00267                         
00268                         // For each pair of frames (iii & iii+1)
00269                         
00270                         cv::Mat R1, R2, t1, t2, rv1, rv2, rd, td;
00271                         
00272                         // Decompose into R-vec and t-vec
00273                         decomposeTransform(srcs[iii], R1, t1);
00274                         Rodrigues(R1, rv1);
00275                         decomposeTransform(srcs[iii+1], R2, t2);
00276                         Rodrigues(R2, rv2);
00277                         
00278                         // Find the derivative
00279                         rd = rv2-rv1;
00280                         
00281                         // Need to make sure derivative is represented as minimum angular change
00282                         for (unsigned int jjj = 0; jjj < 3; jjj++) {
00283                                 
00284                                 while (rd.at<double>(jjj, 0) > M_PI) {
00285                                         rd.at<double>(jjj, 0) -= 2*M_PI;
00286                                 }
00287                                 
00288                                 while (rd.at<double>(jjj, 0) < -M_PI) {
00289                                         rd.at<double>(jjj, 0) += 2*M_PI;
00290                                 }
00291                         }
00292                         
00293                         //cout << "Deriv rotation = " << rd << endl;
00294                         
00295                         td = t2-t1;
00296                         
00297                         //cout << "Deriv translation = " << td << endl;
00298                         
00299                         // Add a weighted version of the derivative to R_dev & t_dev
00300                         R_dev += pow(0.5, contrib) * rd;
00301                         t_dev += pow(0.5, contrib) * td;
00302                         
00303                 }
00304                 
00305                 // Re-weight R_dev and t_dev
00306                 R_dev /= total_contrib;
00307                 t_dev /= total_contrib;
00308                 
00309                 //cout << "Weighted rotation = " << R_dev << endl;
00310                 //cout << "Weighted translation = " << t_dev << endl;
00311                 
00312                 // Add R_dev & t_dev to the previous pose
00313                 cv::Mat R_n, t_n, rv_n;
00314                 decomposeTransform(srcs[idx-1], R_n, t_n);
00315                 Rodrigues(R_n, rv_n);
00316                 
00317                 rv_n += R_dev;
00318                 Rodrigues(rv_n, R_n);
00319                 t_n += t_dev;
00320                 
00321                 //cout << "New rotation = " << rv_n << endl;
00322                 //cout << "New translation = " << t_n << endl;
00323                 
00324                 cv::Mat T_n;
00325                 composeTransform(R_n, t_n, T_n);
00326                 transformationToProjection(T_n, dst);
00327                 //compileTransform(dst, R_n, t_n);
00328                 
00329                 //cout << "Compiled transform = " << dst << endl;
00330 
00331         }
00332 }
00333 
00334 void mySigintHandler(int sig)
00335 {
00336         wantsToShutdown = true;
00337         ROS_WARN("Requested shutdown... terminating feeds...");
00338         
00339         (*globalNodePtr)->prepareForTermination();
00340 }
00341 
00342 void slamNode::estimatePose(vector<unsigned int>& basisNodes, unsigned int idx) {
00343 
00344         //bool alreadyEstimated = false;
00345         
00346         //ROS_INFO("Estimating pose (%d)...", idx);
00347         
00348         if (ACM[idx].rows != 4) {
00349                 
00350                 if (configData.timeDebug) poseEstimationTime.startRecording();
00351                 
00352                 cv::Mat guidePose;
00353                 getGuidingPose(ACM, guidePose, idx);
00354                 
00355                 
00356                 
00357                 
00358                 //ROS_INFO("Guided pose gathered (%d)", idx);
00359                 estimatePoseFromKnownPoints(ACM[idx], configData.cameraData, featureTrackVector, idx, guidePose);
00360                 //cout << "guidePose = " << guidePose << endl;
00361                 
00362                 if (configData.timeDebug) poseEstimationTime.stopRecording();
00363                 
00364                 if (ACM[idx-1].rows != 4) {
00365                         //printf("%s::%s << Estimating pose (%d) with no guidance...", __PROGRAM__, __FUNCTION__, currentPoseIndex+1);
00366                         //estimatePoseFromKnownPoints(ACM[idx], configData.cameraData, featureTrackVector, idx, eye4);
00367                 } else {
00368                         //printf("%s::%s << Estimating pose (%d) with (%d) to guide...", __PROGRAM__, __FUNCTION__, currentPoseIndex+1, currentPoseIndex);
00369                         
00370                         
00371                         
00372                         //estimatePoseFromKnownPoints(ACM[idx], configData.cameraData, featureTrackVector, idx, ACM[idx-1]);
00373                 }
00374                 
00375         } else {
00376                 //printf("%s::%s << Pose (%d) already estimated...", __PROGRAM__, __FUNCTION__, currentPoseIndex+1);
00377                 //alreadyEstimated = true;
00378                 //return;
00379         }
00380         
00381         //cout << "estPose = " << ACM[idx] << endl;
00382         
00383         // Some kind of subsystem BA to go here...
00384         
00385         if (1) { // (!alreadyEstimated) {
00386                 double avgError;
00387                 
00388                 if (configData.timeDebug) bundleAdjustmentTime.startRecording();
00389                 if (basisNodes.size() <= ((unsigned int) (configData.adjustmentFrames/2))) {
00390                         //ROS_INFO("Basis nodes size is low: %d (%d)", basisNodes.size(), configData.flowback);
00391                         avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.poseEstimateIterations, false, false, 1);
00392                 } else if (basisNodes.size()-configData.flowback > 0) {
00393                         //for (unsigned int iii = 0; iii < basisNodes.size(); iii++) {
00394                                 //cout << basisNodes.at(iii) << endl;
00395                         //}
00396                         //ROS_INFO("Basis nodes size is a bit larger: %d (%d)", basisNodes.size(), configData.flowback);
00397                         avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.poseEstimateIterations, false, false, basisNodes.size()-configData.flowback);
00398                 } else {
00399                         //printf("Basis nodes size is poor: %d (%d)", basisNodes.size(), configData.flowback);
00400                         avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.poseEstimateIterations, false, false, 1);
00401                 }
00402                 if (configData.timeDebug) bundleAdjustmentTime.stopRecording();
00403                 //cout << "refinedPose = " << ACM[idx] << endl;
00404                 
00405                 //printf("Error adjusting latest pose (%d) estimate: %f", idx, avgError);
00406         
00407                 if (configData.logErrors) {
00408                         error_file << idx << " " << avgError << endl;
00409                 }
00410         }
00411         
00412 }
00413 
00414 void slamNode::getBasisNodes(vector<unsigned int>& basisNodes, unsigned int idx) {
00415         
00416         //printf("%s << ENTERED.\n", __FUNCTION__);
00417         
00418         basisNodes.clear();
00419         
00420         if (idx < keyframe_store.keyframes.at(0).idx) {
00421                 ROS_WARN("ERROR! IDX too low (%d / %d)", idx, keyframe_store.keyframes.at(0).idx);
00422                 return;
00423         }
00424         
00425         int activeCameras = ((int) idx) + 1 - ((int) keyframe_store.keyframes.at(0).idx);
00426         
00427         if (activeCameras < 2) {
00428                 return;
00429         }
00430         
00431         //printf("%s << DEBUG [%0d]\n", __FUNCTION__, 0);
00432         
00433         if (activeCameras <= configData.adjustmentFrames) {
00434                 
00435                 //printf("%s << DEBUG [%0d]\n", __FUNCTION__, 1);
00436                 
00437                 unsigned int firstNode = keyframe_store.keyframes.at(0).idx;
00438                 
00439                 for (unsigned int iii = firstNode; iii <= idx; iii++) {
00440                         basisNodes.push_back(iii);
00441                 }
00442                 
00443                 //printf("%s << DEBUG [%0d]\n", __FUNCTION__, 2);
00444                 
00445                 
00446         } else {
00447                 // Need to come up with some kind of subsequence to help estimate latest pose
00448                 
00449                 // Want to use same number of cameras, but spread them out a bit
00450                 
00451                 // How many MORE cameras than acceptable are there?
00452                 
00453                 //printf("%s << DEBUG [%0d]\n", __FUNCTION__, 3);
00454                 
00455                 unsigned int surplus = activeCameras - configData.adjustmentFrames;
00456                 unsigned int spread = surplus / configData.keyframeSpacing;
00457                 
00458                 spread = std::min(spread, ((unsigned int) (configData.adjustmentFrames-configData.flowback)));
00459                 
00460                 //printf("%s::%s << idx = %d; activeCameras = %d; adjFrames = %d; surplus = %d; spread = %d", __PROGRAM__, __FUNCTION__, idx, activeCameras, configData.adjustmentFrames, surplus, spread);
00461                 
00462                 // For every 6 in surplus, stretch out the early ones
00463                 unsigned int starting_cam = idx - configData.adjustmentFrames + spread + 2; // this will be first used frame
00464                 starting_cam = starting_cam - (starting_cam % configData.keyframeSpacing); // going down to nearest'5' mult
00465                 starting_cam -= configData.keyframeSpacing*(spread-1);
00466                 
00467                 //printf("%s::%s << starting_cam = %d; spread = %d; absolute limit = %d", __PROGRAM__, __FUNCTION__, starting_cam, spread, keyframe_store.keyframes.at(0).idx);
00468                 
00469                 //printf("%s << DEBUG (%04d)\n", __FUNCTION__, 4);
00470                 
00471                 for (unsigned int iii = starting_cam; iii < (idx - configData.adjustmentFrames + spread); iii += configData.keyframeSpacing) {
00472                         //printf("%s << node: %d", __FUNCTION__, iii);
00473                         basisNodes.push_back(iii);
00474                         
00475                 }
00476                 
00477                 //printf("%s << DEBUG (%04d)\n", __FUNCTION__, 5);
00478                 
00479                 //idx - (configData.adjustmentFrames - spread)
00480                 starting_cam = idx - configData.adjustmentFrames + 1 + spread + 1;
00481                 for (unsigned int iii = starting_cam; iii <= idx; iii++) {
00482                         //printf("%s << node: %d", __FUNCTION__, iii);
00483                         basisNodes.push_back(iii);
00484                 }
00485                 
00486                 //printf("%s << DEBUG (%04d)\n", __FUNCTION__, 6);
00487                 
00488         }
00489         
00490         //printf("%s << EXITING.\n", __FUNCTION__);
00491         
00492 }
00493 
00494 void slamNode::processNextFrame() {
00495         
00496         //double avgError;
00497         
00498         //ROS_INFO("About to process frame (%d)", currentPoseIndex);
00499         
00500         //ROS_INFO("Checking first frame (%d)", keyframe_store.keyframes.at(0).idx);
00501         
00502         if (!matricesAreEqual(ACM[keyframe_store.keyframes.at(0).idx], eye4)) {
00503                 //ROS_ERROR("First camera (%d) corrupted...", keyframe_store.keyframes.at(0).idx);
00504                 while (1) {}
00505         }
00506         
00507         vector<unsigned int> basisNodes;
00508         
00509         
00510         // Try triangulating points and fixing new camera at the same time...
00511         if (1) {
00512                 
00513                 if (configData.timeDebug) triangulationTime.startRecording();
00514                 unsigned int triangulationIndex = currentPoseIndex-configData.flowback;
00515                 
00516                 ROS_INFO("Getting basis nodes...");
00517                 getBasisNodes(basisNodes, triangulationIndex);
00518                 
00519                 if (basisNodes.size() > 0) {
00520                         
00521                         ROS_INFO("Finding relevant indices...");
00522                         
00523                         vector<unsigned int> triangulatedIndices, untriangulatedIndices;
00524                         findRelevantIndices(featureTrackVector, triangulatedIndices, untriangulatedIndices, basisNodes.at(0), triangulationIndex);
00525                                         
00526                         // unsigned int points_in_3d;
00527                         if (untriangulatedIndices.size() > 0) {
00528                                 
00529                                 vector<unsigned int> triangulatableIndices;
00530                                 findTriangulatableTracks3(featureTrackVector, triangulatableIndices, triangulationIndex, configData.framesForTriangulation);
00531                                 
00532                                 if (triangulatableIndices.size() > 0) {
00533                                         //ROS_INFO("About to triangulate (%d) new tracks...", triangulatableIndices.size());
00534                                         triangulateTracks(featureTrackVector, triangulatableIndices, configData.cameraData, ACM, basisNodes.at(0), triangulationIndex);
00535                                         //ROS_INFO("Tracks triangulated.");
00536                                         //double avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, basisNodes.size()-3);
00537                                         //printf("%s::%s << F(%d) Adjustment error with newly triangulated points = %f (k = %d)", __PROGRAM__, __FUNCTION__, currentPoseIndex, avgError, basisNodes.size());
00538                                         
00539                                         if (currentPoseIndex > 60) {
00540                                                 //while (1) {}
00541                                         }
00542                                         
00543                                 }
00544                                 
00545                                 
00546                         }
00547                         
00548                         if (configData.timeDebug) triangulationTime.stopRecording();
00549                         
00550                         
00551                         ROS_INFO("Performing adjustment... (%d)", ((int)basisNodes.size()));
00552                         
00553                         for (unsigned int iii = 0; iii < basisNodes.size(); iii++) {
00554                                 //ROS_INFO("basisNodes.at(%d) = (%d)", iii, basisNodes.at(iii));
00555                         }
00556                         
00557                         for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
00558                                 
00559                                 if (featureTrackVector.at(iii).locations.size() > 0) {
00560                                         //ROS_INFO("featureTrackVector.at(%d).locations.at(%d) = (%d)", iii, featureTrackVector.at(iii).locations.size()-1, featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).imageIndex);
00561                                 }
00562                                 
00563                         }
00564                         
00565                         if (configData.timeDebug) bundleAdjustmentTime.startRecording();
00566                         double avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, basisNodes.size());
00567                         ROS_INFO("F(%d) Adjustment error with newly triangulated points = %f (k = %d)", currentPoseIndex, avgError, ((int)basisNodes.size()));
00568                         if (configData.timeDebug) bundleAdjustmentTime.stopRecording();
00569                 }
00570         
00571                 
00572         }
00573         
00574         
00575         
00576         // Make sure previous pose has been estimated
00577         
00578         /*
00579         getBasisNodes(basisNodes, currentPoseIndex-1);
00580         
00581         avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, std::max(1, ((int)basisNodes.size())-10));
00582         printf("%s::%s << F(%d) Free adjustment error with = %f (k = %d)", __PROGRAM__, __FUNCTION__, currentPoseIndex, avgError, basisNodes.size());
00583         */
00584         
00585         ROS_INFO("Getting more basis nodes...");
00586         getBasisNodes(basisNodes, currentPoseIndex);
00587         ROS_INFO("Estimating pose...");
00588         estimatePose(basisNodes, currentPoseIndex);
00589         
00590         if (configData.timeDebug) {
00591                 if ((currentPoseIndex % configData.timeSpacing) == 0) {
00592                         //double factor;
00593                         trackHandlingTime.calcParameters();
00594                         triangulationTime.calcParameters();
00595                         poseEstimationTime.calcParameters();
00596                         bundleAdjustmentTime.calcParameters();                                                  // * ((double) trackHandlingTime.cycles) / ((double) currentPoseIndex)
00597                         ROS_WARN("Showing timing summary:");
00598                         ROS_INFO("Track summary: (%d, %f, %f)", trackHandlingTime.cycles, trackHandlingTime.average * ((double) trackHandlingTime.cycles) / ((double) currentPoseIndex), trackHandlingTime.sigma* ((double) trackHandlingTime.cycles) / ((double) currentPoseIndex));
00599                         ROS_INFO("Triangulation summary: (%d, %f, %f)", triangulationTime.cycles, triangulationTime.average* ((double) triangulationTime.cycles) / ((double) currentPoseIndex), triangulationTime.sigma* ((double) triangulationTime.cycles) / ((double) currentPoseIndex));
00600                         ROS_INFO("Pose summary: (%d, %f, %f)", poseEstimationTime.cycles, poseEstimationTime.average* ((double) poseEstimationTime.cycles) / ((double) currentPoseIndex), poseEstimationTime.sigma* ((double) poseEstimationTime.cycles) / ((double) currentPoseIndex));
00601                         ROS_INFO("Bundle summary: (%d, %f, %f)", bundleAdjustmentTime.cycles, bundleAdjustmentTime.average* ((double) bundleAdjustmentTime.cycles) / ((double) currentPoseIndex), bundleAdjustmentTime.sigma* ((double) bundleAdjustmentTime.cycles) / ((double) currentPoseIndex));
00602                         
00603                 }
00604         }
00605         
00606         
00607         if (1) {
00608                 update_display();
00609                 currentPoseIndex++;
00610                 return;
00611         }
00612         
00613         if (0) {
00614                 vector<unsigned int> triangulatedIndices, untriangulatedIndices;
00615                 findRelevantIndices(featureTrackVector, triangulatedIndices, untriangulatedIndices, keyframe_store.keyframes.at(0).idx, currentPoseIndex);
00616                                 
00617                 //unsigned int points_in_3d;
00618                 if (untriangulatedIndices.size() > 0) {
00619                         
00620                         vector<unsigned int> triangulatableIndices;
00621                         findTriangulatableTracks3(featureTrackVector, triangulatableIndices, currentPoseIndex, configData.framesForTriangulation);
00622                         
00623                         if (triangulatableIndices.size() > 0) {
00624                                 //ROS_INFO("About to triangulate (%d) new tracks...", triangulatableIndices.size());
00625                                 triangulateTracks(featureTrackVector, triangulatableIndices, configData.cameraData, ACM, keyframe_store.keyframes.at(0).idx, currentPoseIndex);
00626                                 double avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, basisNodes.size()-3);
00627                                 if (configData.verboseMode) { ROS_INFO("F(%d) Adjustment error with newly triangulated points = %f (k = %d)", currentPoseIndex, avgError, ((int)basisNodes.size())); };
00628                         }
00629                         
00630                         
00631                 }
00632         }
00633         
00634         
00635         
00636         // Checks next frame to determine keyframe
00637         if (checkForKeyframe()) {
00638                 
00639                 if ((currentPoseIndex+1) == keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx) {
00640                         // reverted
00641                         //ROS_INFO("Keyframe found (reverted); processing (%d)", currentPoseIndex+1);
00642                 } else {
00643                         // Not a keyframe
00644                         update_display();
00645                         currentPoseIndex++;
00646                         return;
00647                 }
00648                 
00649                 
00650         } else {
00651                 
00652                 if ((currentPoseIndex+1) == keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx) {
00653                         // not reverted
00654                         //ROS_INFO("Keyframe found (non-reverted); processing (%d)", currentPoseIndex+1);
00655                         basisNodes.clear();
00656                         getBasisNodes(basisNodes, currentPoseIndex+1);
00657                         estimatePose(basisNodes, currentPoseIndex+1);
00658                 } else {
00659                         // Not a keyframe
00660                         update_display();
00661                         currentPoseIndex++;
00662                         return;
00663                 }
00664                 
00665         }
00666         
00667         if (0) {
00668                 update_display();
00669                 currentPoseIndex++;
00670                 return;
00671         }
00672         
00673         // This block is used to get a decent initial estimate of the latest pose
00674         
00675         
00676         unsigned int image_idx_1 = keyframe_store.keyframes.at(keyframe_store.keyframes.size()-2).idx;
00677         unsigned int image_idx_2 = keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx;
00678         
00679         //ROS_INFO("Currently processing frame (%d:%d) as Keyframe", image_idx_1, image_idx_2);
00680 
00681         vector<cv::Point2f> pts1, pts2;
00682         vector<cv::Point3f> objectPoints;
00683         getTriangulatedFullSpanPoints(featureTrackVector, pts1, pts2, image_idx_1, image_idx_2, objectPoints);
00684 
00685         
00686         //estimatePoseFromKnownPoints(ACM[image_idx_2], configData.cameraData, featureTrackVector, image_idx_2, eye4);
00687         
00688         
00689         vector<unsigned int> adjustableKeyframeIndices;
00690         /*
00691         for (unsigned int iii = 0; iii < keyframe_store.keyframes.size(); iii++) {
00692                 adjustableKeyframeIndices.push_back(keyframe_store.keyframes.at(iii).idx);
00693         }
00694         
00695         while (adjustableKeyframeIndices.size() > (MAX_CAMERAS_FOR_POSE_EST_BA / 2)) {
00696                 adjustableKeyframeIndices.erase(adjustableKeyframeIndices.begin());
00697         }
00698         * */
00699         
00700         vector<unsigned int> keyframe_indices, tmp_indices, subseq_indices;
00701                                 
00702         for (unsigned int iii = 0; iii < keyframe_store.keyframes.size()-1; iii++) {
00703                 keyframe_indices.push_back(keyframe_store.keyframes.at(iii).idx);
00704         }
00705         
00706         // Risky mod
00707         //while (keyframe_indices.size() > (DEFAULT_ADJUSTMENT_FRAMES / 2)) {
00708         while (keyframe_indices.size() > (configData.adjustmentFrames / 2)) {
00709                 keyframe_indices.erase(keyframe_indices.begin());
00710         }
00711         
00712         for (int iii = keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx+1; iii < currentPoseIndex+1; iii++) {
00713                 tmp_indices.push_back(iii);
00714         }
00715         
00716         // Risky mod
00717         //randomSelection(tmp_indices, subseq_indices, (DEFAULT_ADJUSTMENT_FRAMES / 2));
00718         randomSelection(tmp_indices, subseq_indices, (configData.adjustmentFrames / 2));
00719         
00720         
00721         //unsigned int fixed_cameras = keyframe_indices.size();
00722         
00723         adjustableKeyframeIndices.insert(adjustableKeyframeIndices.end(), keyframe_indices.begin(), keyframe_indices.end());
00724         adjustableKeyframeIndices.insert(adjustableKeyframeIndices.end(), subseq_indices.begin(), subseq_indices.end());
00725         adjustableKeyframeIndices.push_back(image_idx_2);
00726         
00727         //cout << "Estimated pose: " << ACM[image_idx_2] << endl;
00728         
00729         //printf("%s::%s << DEBUG [%d]", __PROGRAM__, __FUNCTION__, 5);
00730         
00731         //unsigned int startingPoseForFullSys = ((unsigned int) std::max(((int) image_idx_2)-CAMERAS_PER_SYS, ((int) lastBasePose)));
00732         
00733         //unsigned int startingPoseForFullSys =  keyframe_store.keyframes.at(0).idx;
00734         
00735         //printf("%s::%s << DEBUG [%d]", __PROGRAM__, __FUNCTION__, 51);
00736         
00737         // DETERMINE NUMBER OF TRIANGULATED POINTS CURRENTLY BEING TRACKED
00738         //numTriangulatedPoints = countActiveTriangulatedTracks(fullSpanIndices, featureTrackVector);
00739         
00740         // ADJUST THE MINIMUM NUMBER OF FRAMES SO THAT YOU GET A SUFFICIENT AMOUNT
00741         
00742         //avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, true);
00743         //avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, adjustableKeyframeIndices, configData.keyframeIterations, false, false, fixed_cameras);
00744         
00745         if (ACM[currentPoseIndex+2].rows == 4) {
00746                 //printf("%s::%s << Pose (%d) somehow estimated at this point [%d]", __PROGRAM__, __FUNCTION__, currentPoseIndex+2, 2);
00747         }
00748                 
00749         //ROS_INFO("F(%d) Initial adjustment error = %f (k = %d)", image_idx_2, avgError, adjustableKeyframeIndices.size());
00750         
00751         if (0) {
00752                 update_display();
00753                 currentPoseIndex++;
00754                 return;
00755         }
00756         
00757         //cout << "After kF adjustment: " << ACM[image_idx_2] << endl;
00758         /*
00759         for (unsigned int iii = 0; iii < keyframe_store.keyframes.size(); iii++) {
00760                 
00761                 cout << "(" << iii << ") Actual pose: " << ACM[keyframe_store.keyframes.at(iii).idx] << endl;
00762                 
00763                 Mat tmp;
00764                 estimatePoseFromKnownPoints(tmp, configData.cameraData, featureTrackVector, keyframe_store.keyframes.at(iii).idx, ACM[keyframe_store.keyframes.at(iii).idx]);
00765                 
00766                 cout << "(" << iii << ") Re-estimated pose: " << tmp << endl;
00767         }
00768         */
00769         
00770         vector<unsigned int> triangulatedIndices, untriangulatedIndices;
00771         
00772         //reduceActiveToTriangulated(featureTrackVector, triangulatedIndices, untriangulatedIndices);
00773         
00774         findRelevantIndices(featureTrackVector, triangulatedIndices, untriangulatedIndices, image_idx_1, image_idx_2);
00775                         
00776         //printf("%s::%s << untriangulatedIndices.size() = %d vs %d", __PROGRAM__, __FUNCTION__, untriangulatedIndices.size(), triangulatedIndices.size());
00777         
00778         //unsigned int points_in_3d;
00779         if (untriangulatedIndices.size() > 0) {
00780 
00781                 vector<unsigned int> triangulatableIndices;
00782                 findTriangulatableTracks3(featureTrackVector, triangulatableIndices, image_idx_2, configData.framesForTriangulation); // (image_idx_2-image_idx_1)/2
00783                 
00784                 if (triangulatableIndices.size() > 0) {
00785                         //ROS_INFO("About to triangulate (%d) new tracks...", triangulatableIndices.size());
00786                         triangulateTracks(featureTrackVector, triangulatableIndices, configData.cameraData, ACM, image_idx_1, image_idx_2);
00787                         //avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, adjustableKeyframeIndices, configData.keyframeIterations, false, true);
00788                         //ROS_INFO("F(%d) Adjustment error with newly triangulated points = %f (k = %d)", image_idx_2, avgError, adjustableKeyframeIndices.size());
00789                 }
00790                 
00791                 
00792         }
00793                 
00794         if (1) {
00795                 update_display();
00796                 currentPoseIndex++;
00797                 return;
00798         }
00799         
00800         ROS_INFO("Establishing new adjustment indices...");
00801         
00802         vector<unsigned int> newAdjustmentIndices;
00803         for (unsigned int iii = 0; iii < keyframe_store.keyframes.size(); iii++) {
00804                 
00805                 newAdjustmentIndices.push_back(keyframe_store.keyframes.at(iii).idx);
00806                 
00807                 if (keyframe_store.keyframes.at(iii).idx == image_idx_1) {
00808                         for (unsigned int jjj = image_idx_1+1; jjj < image_idx_2; jjj++) {
00809                                 newAdjustmentIndices.push_back(jjj);
00810                         }
00811                 }
00812         }
00813         
00814         update_display();
00815         currentPoseIndex++;
00816 }
00817 
00818 void slamNode::update_display() {
00819         
00820         if (currentPoseIndex != -1) {
00821                 if (ACM[currentPoseIndex].rows == 4) {
00822                         assignPose(currentPose, ACM[currentPoseIndex]);
00823                         pose_pub.publish(currentPose);
00824                 }
00825                 
00826                 vector<unsigned int> displayIndices;
00827                 for (int iii = 0; iii <= currentPoseIndex; iii++) {
00828                         if (ACM[iii].rows == 4) {
00829                                 displayIndices.push_back(iii);
00830                         }
00831                 }
00832                 
00833                 for (unsigned int iii = 0; iii < displayIndices.size(); iii++) {
00834                         //cout << "ACM[" << displayIndices.at(iii) << "] = " << ACM[displayIndices.at(iii)] << endl;
00835                 }
00836                 
00837                 assignPartialSystem(display_sys, featureTrackVector, configData.cameraData, ACM, displayIndices, false);
00838         
00839                 
00840                 
00841         } else {
00842                 assignPose(currentPose, eye4);
00843                 pose_pub.publish(currentPose);
00844                 
00845                 display_sys.nodes.clear();
00846                 display_sys.tracks.clear();
00847         }
00848         
00849         drawGraph2(display_sys, camera_pub, points_pub, path_pub, decimation, bicolor);
00850         
00851 
00852 }
00853 
00854 void slamNode::update_cameras_to_pnp() {
00855         
00856         #pragma omp parallel for
00857         for (int iii = 0; iii < currentPoseIndex; iii++) {
00858                 if (ACM[iii].rows == 4) {
00859                         estimatePoseFromKnownPoints(ACM[iii], configData.cameraData, featureTrackVector, iii, eye4);
00860                 }
00861         }
00862         
00863 }
00864 
00865 void slamNode::handle_tracks(const thermalvis::feature_tracksConstPtr& msg) {
00866         
00867         if (configData.verboseMode) { ROS_INFO("Handling projections (%d)...", msg->projection_count); }
00868         
00869         if (!infoProcessed) {
00870                 //return;
00871         }
00872         
00873         if (msg->indices.size() == 0) {
00874                 ROS_WARN("No new tracks.");
00875                 return;
00876         }
00877         
00878         //ROS_WARN("XY (%d)", 0);
00879         
00880         if (configData.timeDebug) trackHandlingTime.startRecording();
00881         
00882         main_mutex.lock();
00883         
00884         featureTrack blankTrack;
00885         
00886         //ROS_WARN("XY (%d)", 1);
00887         
00888         unsigned int newest_track = 0;  
00889         
00890         for (unsigned int iii = 0; iii < msg->projection_count; iii++) {
00891                 
00892                 //ROS_WARN("XY [%d] (%d)", iii, 0);
00893                 
00894                 if (msg->indices[iii] > newest_track) {
00895                         newest_track = msg->indices[iii];
00896                 }
00897                 
00898         }
00899         
00900         //ROS_INFO("Newest track = (%d)", newest_track);
00901         
00902         if (newest_track >= featureTrackVector.size()) {
00903                 for (unsigned int iii = featureTrackVector.size(); iii <= newest_track; iii++) {
00904                         featureTrackVector.push_back(blankTrack);
00905                 }
00906         }
00907         
00908         //ROS_INFO("Active tracks = (%d)", featureTrackVector.size());
00909         
00910         for (unsigned int iii = 0; iii < msg->projection_count; iii++) {
00911                 
00912                 bool alreadyAdded = false;
00913                 
00914                 //ROS_INFO("%d vs %d", msg->cameras.at(iii), latestFrame);
00915                 
00916                 if (((int) msg->cameras.at(iii)) > latestFrame) {
00917                         latestFrame = msg->cameras.at(iii);
00918                 }
00919                 
00920                 
00921                 //ROS_INFO("Checking projection (%d) = (index: %d / %d)", iii, msg->indices.at(iii), featureTrackVector.size());
00922         
00923                 for (unsigned int jjj = 0; jjj < featureTrackVector.at(msg->indices.at(iii)).locations.size(); jjj++) {
00924                                 
00925                         //ROS_INFO("Checking feature track loc (%d)", jjj);
00926                                 
00927                         if (featureTrackVector.at(msg->indices.at(iii)).locations.at(jjj).imageIndex == ((int) msg->cameras.at(iii))) {
00928                                 //ROS_WARN("Already exists");
00929                                 alreadyAdded = true;
00930                                 break;
00931                         }
00932                         
00933                         
00934                         
00935                 }
00936                 
00937                 if (!alreadyAdded) {
00938                         //ROS_WARN("Adding...");
00939                         
00940                         cv::Point2f proj(((float) msg->projections_x.at(iii)), ((float) msg->projections_y.at(iii)));
00941                         
00942                         indexedFeature newFeature(msg->cameras.at(iii), proj);
00943                         
00944                         featureTrackVector.at(msg->indices.at(iii)).addFeature(newFeature);
00945                         
00946                         //ROS_WARN("Added.");
00947                 }
00948                 
00949         }
00950         
00951         cv::Mat trackMatrix;
00952         
00953         if (0) {
00954                 if (featureTrackVector.size() > 0) {
00955                         // ROS_WARN("Creating track matrix...");
00956                         if (createTrackMatrix(featureTrackVector, trackMatrix)) {
00957                                 //ROS_WARN("Track matrix created.");
00958                                 cv::imshow("trackMatrix_received", trackMatrix);
00959                                 cv::waitKey(1);
00960                         }
00961                         
00962                 }
00963         }
00964         
00965 
00966         
00967 
00968         if (configData.timeDebug) trackHandlingTime.stopRecording();
00969 
00970         
00971         //elapsedTime = timeElapsedMS(cycle_timer);
00972         
00973         main_mutex.unlock();
00974 }
00975 
00976 void slamNode::main_loop(const ros::TimerEvent& event) {
00977         
00978         if (configData.keyframeEvaluationMode && evaluationCompleted) {
00979                 return;
00980         }
00981         
00982         if (firstIteration) {
00983                 main_mutex.lock();
00984                 update_display();
00985                 main_mutex.unlock();
00986                 firstIteration = false;
00987         }
00988         
00989         if (!infoProcessed) {
00990                 //printf("%s << info not processed!", __FUNCTION__);
00991                 return;
00992         }
00993         
00994         if (!structureValid) {
00995                 if ((!configData.keyframeEvaluationMode) || (latestFrame >= configData.maxInitializationFrames)) {
00996                         main_mutex.lock();
00997                         structureValid = findStartingFrames();
00998                         main_mutex.unlock();
00999                         return;
01000                 } else if (!f1 && configData.keyframeEvaluationMode) {
01001                         f1 = true;
01002                         ROS_INFO("Waiting for sufficient frames for evaluation...");
01003                 }
01004                 
01005         }
01006         
01007         if (!structureValid) {
01008                 return;
01009         } else if (!structureFormed) {
01010                 main_mutex.lock();
01011                 structureFormed = formInitialStructure();
01012                 putativelyEstimatedFrames = currentPoseIndex-1;
01013                 main_mutex.unlock();
01014         }
01015 
01016         if (!structureFormed) {
01017                 return;
01018         }
01019         
01020         
01021         
01022                 
01023         //while ((currentPoseIndex < latestFrame) && (keyframe_store.keyframes.size() <= 7)) {
01024         while (currentPoseIndex < latestFrame) {
01025                 main_mutex.lock();
01026                 
01027                 processNextFrame();
01028                 
01029                 if (currentPoseIndex == ((int) keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx)) {
01030                         //refreshPoses();
01031                 }
01032                 
01033                 /*
01034                 if (keyframe_store.keyframes.size() == 3) {
01035                         adjustFullSystem(featureTrackVector, configData.cameraData, ACM, currentPoseIndex, 20);
01036                 } else if (keyframe_store.keyframes.size() == 4) {
01037                         adjustFullSystem(featureTrackVector, configData.cameraData, ACM, currentPoseIndex, 10);
01038                 } else if (keyframe_store.keyframes.size() == 5) {
01039                         adjustFullSystem(featureTrackVector, configData.cameraData, ACM, currentPoseIndex, 5);
01040                 } else {
01041                         adjustFullSystem(featureTrackVector, configData.cameraData, ACM, currentPoseIndex, 3);
01042                 }
01043                 */
01044                 
01045                 //update_display();
01046                 
01047                 //show_poses();
01048                 
01049                 main_mutex.unlock();
01050         }
01051         
01052         if (keyframe_store.keyframes.size() > 3) {
01053                 
01054                 //update_cameras_to_pnp();
01055                 
01056         }
01057         
01058         //bool doneRefinement = false;
01059         
01060         //while (0) {
01061         //while (currentPoseIndex == latestFrame) {
01062         //if ((keyframe_store.keyframes.size() > 3) && (!doneRefinement)) {
01063         //printf("%s << currentPoseIndex = %d (%d)", __FUNCTION__, currentPoseIndex, latestFrame);
01064         if (0) { // ((elapsedTime > 5000.0) && (currentPoseIndex == latestFrame)) {
01065                 
01066                 //main_mutex.lock();
01067                 
01068                 if (keyframe_store.keyframes.size() > 2) {
01069                         
01070                         unsigned int starting_cam = keyframe_store.keyframes.at(0).idx; //((unsigned int) max(0, ((int) currentPoseIndex-50)));
01071                         
01072                         vector<unsigned int> allIndices;
01073                         for (int iii = starting_cam; iii <= currentPoseIndex; iii++) {
01074                                 if (ACM[iii].rows == 4) {
01075                                         allIndices.push_back(iii);
01076                                 }
01077                         }
01078                         
01079                         double avgError;
01080                         
01081                         ROS_INFO("About to full-system adjust (%d)", currentPoseIndex);
01082                         
01083                         avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, allIndices, 5, true);
01084 
01085                         ROS_INFO("Full-system (%d) adjustment error = %f (k = %d)", currentPoseIndex, avgError, ((int)allIndices.size()));
01086         
01087                         //avgError = adjustFullSystem(featureTrackVector, configData.cameraData, ACM, starting_cam, currentPoseIndex, 1);
01088                         
01089                         //retrieveFullSystem(fullsys, ACM, featureTrackVector, currentPoseIndex);
01090                         //printf("%s::%s << Full-sys can be optimized to: %f", __PROGRAM__, __FUNCTION__, avgError);
01091                         
01092                         update_display();
01093                         
01094                         //doneRefinement = true;
01095                         
01096                         /*
01097                         // optimizeFullSystem(sys, featureTrackVector, configData.cameraData, ACM, currentPoseIndex);
01098                         
01099                         //keyframes_sem.lock();
01100                         vector<unsigned int> keyframeIndices;
01101                         for (unsigned int iii = 0; iii < keyframe_store.keyframes.size(); iii++) {
01102                                 keyframeIndices.push_back(keyframe_store.keyframes.at(iii).idx);
01103                                 
01104                         }
01105                         
01106                         //printf("%s << keyframeIndices.size() = %d", __FUNCTION__, keyframeIndices.size());
01107                         
01108                         //keyframes_sem.unlock();
01109                         double avgError;
01110                         
01111                         
01112                         assignPartialSystem(sys, featureTrackVector, configData.cameraData, ACM, keyframeIndices, true);
01113                         
01114                         printf("%s << sys.nodes() = %d; sys.tracks() = %d", __FUNCTION__, sys.nodes.size(), sys.tracks.size());
01115                         
01116                         avgError = optimizeSystem(sys, 1e-4, 1 );
01117                         retrievePartialSystem(sys, ACM, featureTrackVector, keyframeIndices);
01118                         
01119                         
01120                         //retrieveFullSystem(sys, ACM, featureTrackVector, keyframe_store.keyframes.at(0).idx);
01121                         printf("%s::%s << Optimizing full system while waiting... (%f)", __PROGRAM__, __FUNCTION__, avgError);
01122                         */
01123                 }
01124                 
01125                 //main_mutex.unlock();
01126 
01127                 
01128         }
01129         
01130         /*
01131         if (currentPoseIndex < latestFrame) {
01132                 printf("%s << DEBUG [%d]", __FUNCTION__, 12);
01133                 repetitionNoted = false;
01134                 processNextFrame();
01135                 printf("%s << DEBUG [%d]", __FUNCTION__, 13);
01136         } else {
01137                 if (!repetitionNoted) {
01138                         printf("%s << CurrentPose at latestFrame [%d]", __FUNCTION__, latestFrame);
01139                 } else {
01140                         repetitionNoted = true;
01141                 }
01142                 
01143         }
01144         */
01145         
01146 }
01147 
01148 void slamNode::handle_info(const sensor_msgs::CameraInfoConstPtr& info_msg) {
01149         
01150         //printf("%s::%s << Entered.", __PROGRAM__, __FUNCTION__);
01151         
01152         drawGraph2(display_sys, camera_pub, points_pub, path_pub, decimation, bicolor);
01153         
01154         if (!infoProcessed) {
01155                 
01156                 ROS_INFO("Handling camera info.");
01157                 
01158                 try     {
01159                         
01160                         configData.cameraData.K = cv::Mat::eye(3, 3, CV_64FC1);
01161                         
01162                         for (unsigned int mmm = 0; mmm < 3; mmm++) {
01163                                 for (unsigned int nnn = 0; nnn < 3; nnn++) {
01164                                         configData.cameraData.K.at<double>(mmm, nnn) = info_msg->K[3*mmm + nnn];
01165                                 }
01166                         }
01167                         
01168                         cout << configData.cameraData.K << endl;
01169                         
01170                         
01171                         
01172                         configData.cameraData.K_inv = configData.cameraData.K.inv();
01173                         
01174 
01175                         
01176                         configData.cameraData.cameraSize.width = info_msg->width;
01177                         configData.cameraData.cameraSize.height = info_msg->height;
01178                 
01179                         //printf("%s << (%d, %d)", __FUNCTION__, configData.cameraData.cameraSize.width, configData.cameraData.cameraSize.height);
01180                         
01181                         unsigned int maxDistortionIndex;
01182                         if (info_msg->distortion_model == "plumb_bob") {
01183                                 maxDistortionIndex = 5;
01184                         } else {
01185                                 
01186                                 if (info_msg->distortion_model != "rational_polynomial") {
01187                                         ROS_ERROR("Unfamiliar with <info_msg->distortion_model> of (%s)", info_msg->distortion_model.c_str());
01188                                 }
01189                                 
01190                                 maxDistortionIndex = 8;
01191                         }
01192                         
01193                         configData.cameraData.distCoeffs = cv::Mat::zeros(1, maxDistortionIndex, CV_64FC1);
01194                         configData.cameraData.blankCoeffs = cv::Mat::zeros(1, maxDistortionIndex, CV_64FC1);
01195                         
01196                         for (unsigned int iii = 0; iii < maxDistortionIndex; iii++) {
01197                                 configData.cameraData.distCoeffs.at<double>(0, iii) = info_msg->D[iii];
01198                         }
01199                         
01200                         cout << configData.cameraData.distCoeffs << endl;
01201                         
01202                         configData.cameraData.newCamMat = cv::Mat::zeros(3, 3, CV_64FC1);
01203                         
01204                         cv::Rect* validPixROI = 0;
01205                         
01206                         double alpha = 0.00;
01207                         bool centerPrincipalPoint = true;
01208                         
01209                         configData.cameraData.newCamMat = getOptimalNewCameraMatrix(configData.cameraData.K, configData.cameraData.distCoeffs, configData.cameraData.cameraSize, alpha, configData.cameraData.cameraSize, validPixROI, centerPrincipalPoint);
01210                         
01211                         //(configData.cameraData.newCamMat).copyTo(configData.cameraData.K);
01212                         
01213                         cout << configData.cameraData.newCamMat << endl;
01214                         
01215                         infoProcessed = true;
01216                         
01217                         addFixedCamera(display_sys, configData.cameraData, eye4);
01218                         
01219                         drawGraph2(display_sys, camera_pub, points_pub, path_pub, decimation, bicolor);
01220                         
01221                         assignPose(currentPose, eye4);
01222                         pose_pub.publish(currentPose);
01223                         
01224                 } catch (...) /*(sensor_msgs::CvBridgeException& e)*/ {
01225                         ROS_ERROR("Some failure in reading in the camera parameters...");
01226                 }
01227                 
01228                 ROS_INFO("Camera information processed.");
01229                 
01230         } 
01231         
01232         
01233 
01234         
01235 }
01236 
01237 void slamNode::refreshPoses() {
01238         
01239         /*
01240         for (unsigned int iii = keyframe_store.keyframes.at(0).idx; iii < keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx; iii++) {
01241                 
01242                 //estimatePoseFromKnownPoints(ACM[iii], configData.cameraData, featureTrackVector, iii, eye4);
01243                 
01244                 if (ACM[iii].rows == 4) {
01245                         Mat test;
01246                         estimatePoseFromKnownPoints(test, configData.cameraData, featureTrackVector, iii, eye4);
01247                         
01248                         //cout << "est[" << iii << "] = " << test << endl;
01249                         //cout << "acm[" << iii << "] = " << ACM[iii] << endl;
01250                 }
01251                 
01252         }
01253         */
01254         
01255         /*
01256         for (unsigned int iii = 0; iii < currentPoseIndex; iii++) {
01257                 
01258                 estimatePoseFromKnownPoints(ACM[iii], configData.cameraData, featureTrackVector, iii, eye4);
01259                 
01260                 if (ACM[iii].rows == 4) {
01261                         Mat test;
01262                         estimatePoseFromKnownPoints(test, configData.cameraData, featureTrackVector, iii, eye4);
01263                         
01264                         //cout << "est[" << iii << "] = " << test << endl;
01265                         //cout << "acm[" << iii << "] = " << ACM[iii] << endl;
01266                 }
01267                 
01268         }
01269         */
01270         for (unsigned int iii = 0; iii < keyframe_store.keyframes.size(); iii++) {
01271                 //estimatePoseFromKnownPoints(ACM[keyframe_store.keyframes.at(iii).idx], configData.cameraData, featureTrackVector, keyframe_store.keyframes.at(iii).idx, eye4);
01272         }
01273         
01274 }
01275 
01276 void slamNode::prepareForTermination() {
01277         return;
01278 }
01279 
01280 bool slamNode::findStartingFrames() {
01281         
01282         //ROS_INFO("Finding initial keyframes (latest frame = %d)", latestFrame);
01283         
01284         //printf("%s::%s << Finding initial keyframes...", __PROGRAM__, __FUNCTION__);
01285         
01286         bool foundStartingPair = false;
01287         
01288         if (latestFrame < 1) {
01289                 return foundStartingPair;
01290         }
01291 
01292         double keyframe_scores[5];
01293         cv::Mat startingTrans;
01294         
01295         cv::Mat blankMat = cv::Mat::zeros(80, 640, CV_8UC3);
01296         
01297         if ((configData.keyframeEvaluationMode) && (!evaluationStream.is_open())) {
01298                 ROS_WARN("Opening evaluation summary stream...");
01299                 evaluationStream.open(configData.evaluationFile.c_str(), ios::out | ios::app);
01300         }
01301 
01302         for (int jjj = configData.minStartingSeparation; jjj < min(latestFrame+1, configData.maxInitializationFrames); jjj++) {
01303                 
01304                 //ROS_INFO("Checking up to frame (%d) : (%d /%d)", jjj, latestFrame+1, configData.maxInitializationFrames);
01305                 
01306                 vector<unsigned int> startersToTest;
01307                 
01308                 for (int iii = max(0, ((int)jjj)-configData.maxStartingSeparation); iii < jjj-configData.minStartingSeparation; iii++) {
01309                         startersToTest.push_back(iii);
01310                 }
01311                 
01312                 
01313                 while (((int)startersToTest.size()) > configData.maxTestsPerFrame) {
01314                         
01315                         unsigned int randIndex = rand() % startersToTest.size();
01316                         startersToTest.erase(startersToTest.begin() + randIndex);
01317                         
01318                 }
01319                 
01320                 
01321                 //#pragma omp parallel for
01322                 for (unsigned int iii = 0; iii < startersToTest.size(); iii++) {
01323                         
01324                         if ((foundStartingPair) && (!configData.keyframeEvaluationMode)) {
01325                                 break;
01326                         }
01327                         
01328                         if (keyframeTestFlags.at<unsigned char>(startersToTest.at(iii),jjj) == 0) {
01329 
01330                                 vector<unsigned int> activeTracks;
01331                                 getActiveTracks(activeTracks, featureTrackVector, startersToTest.at(iii), jjj);
01332 
01333                                 //ROS_ERROR("About to test frames (%d) & (%d)", startersToTest.at(iii), jjj);
01334                                 startingTrans = cv::Mat();
01335                                 keyframeTestScores.at<double>(startersToTest.at(iii),jjj) = testKeyframePair(featureTrackVector, configData.cameraData, scorecardParams, startersToTest.at(iii), jjj, keyframe_scores, startingTrans, true /*configData.keyframeEvaluationMode*/, true);
01336                                 //ROS_INFO("Frames tested");
01337                                 
01338                                 if (keyframeTestScores.at<double>(startersToTest.at(iii),jjj) > 0.0) {
01339                                         //ROS_INFO("score (%d) -> (%d) = (%f)", startersToTest.at(iii), jjj, keyframeTestScores.at<double>(startersToTest.at(iii),jjj));
01340                                 }
01341                                 
01342                                 if (configData.keyframeEvaluationMode) {
01343                                         
01344                                         clearSystem();
01345                                         
01346                                         bool result = true;
01347                                         char outputString[256];
01348                                         
01349                                         ROS_INFO("Assigning as starting frames (%d, %d)...", startersToTest.at(iii), jjj);
01350                                         assignStartingFrames(startersToTest.at(iii), jjj, keyframe_scores, startingTrans);
01351                                         ROS_INFO("Starting frames assigned.");
01352                                         
01353                                         if (startingTrans.rows > 0) {
01354                                                 
01355                                                 //cout << "startingTrans = " << startingTrans << endl;
01356                                                 formInitialStructure();
01357                                                 
01358                                                 ROS_INFO("Initial structure formed.");
01359                                                 
01360                                                 char response = ' ';
01361                                                 
01362                                                 blankMat = cv::Mat(blankMat.size(), blankMat.type(), CV_RGB(0,255,0));
01363                                                 
01364                                                 //while (0) {
01365                                                 while ((response != 'y') && (response != 'n')) {
01366                                                         cv::imshow("readybar", blankMat);
01367                                                         response = cv::waitKey();
01368                                                 }
01369                                                 
01370                                                 blankMat = cv::Mat(blankMat.size(), blankMat.type(), CV_RGB(255,0,0));
01371                                                 cv::imshow("readybar", blankMat);
01372                                                 cv::waitKey(1);
01373                                                 
01374                                                 if (response == 'y') {
01375                                                         result = true;
01376                                                 } else {
01377                                                         result = false;
01378                                                 }
01379                                         } else {
01380                                                 ROS_INFO("Initial starting transformation invalid, returning failure.");
01381                                                 result = false;
01382                                         }
01383                                         
01384                                         
01385                                         
01386                                         ROS_INFO("Preparing string...");
01387                                         sprintf(outputString, "%06d %06d %+02.2f %+02.2f %+02.2f %+02.2f %+02.2f %1.2f %d", startersToTest.at(iii), jjj, keyframe_scores[0], keyframe_scores[1], keyframe_scores[2], keyframe_scores[3], keyframe_scores[4], keyframeTestScores.at<double>(startersToTest.at(iii),jjj), (result ? 1 : 0));
01388                                         ROS_INFO("String written.");
01389                                         /*
01390                                         evaluationStream << startersToTest.at(iii) << " " << jjj;
01391                                         evaluationStream << " " << keyframe_scores[0];
01392                                         evaluationStream << " " << keyframe_scores[1];
01393                                         evaluationStream << " " << keyframe_scores[2];
01394                                         evaluationStream << " " << keyframe_scores[3];
01395                                         evaluationStream << " " << keyframe_scores[4];
01396                                         evaluationStream << " " << 1 << endl;
01397                                         */
01398                                         
01399                                         evaluationStream << outputString << endl;
01400                                 }
01401                                 
01402                                 
01403                                 keyframeTestFlags.at<unsigned char>(startersToTest.at(iii),jjj) = 1;
01404 
01405                                 if (keyframeTestScores.at<double>(startersToTest.at(iii),jjj) >= configData.minStartupScore) {
01406                                         //ROS_INFO("Valid keyframe pair found at (%d, %d) [%f] > [%f]", startersToTest.at(iii), jjj, keyframeTestScores.at<double>(startersToTest.at(iii),jjj), configData.minStartupScore);
01407                                         
01408                                         foundStartingPair = true;
01409 
01410                                 } 
01411                                 
01412                                 ROS_INFO("Keyframe pair (%03d, %03d) initialization score = [%f] {%1.2f, %1.2f, %1.2f, %1.2f, %1.2f}", startersToTest.at(iii), jjj, keyframeTestScores.at<double>(startersToTest.at(iii),jjj), keyframe_scores[0], keyframe_scores[1], keyframe_scores[2], keyframe_scores[3], keyframe_scores[4]);
01413                                 
01414                         }
01415 
01416                 }
01417                 
01418                 if ((foundStartingPair) && (!configData.keyframeEvaluationMode)) {
01419                         break;
01420                 }
01421                 
01422         }
01423         
01424         if (configData.keyframeEvaluationMode) {
01425                 evaluationStream.close();
01426                 evaluationCompleted = true;
01427                 ROS_INFO("Keyframe evaluation results finalize. (CTRL + C) to terminate.");
01428                 
01429                 blankMat = cv::Mat(blankMat.size(), blankMat.type(), CV_RGB(0,0,255));
01430                 cv::imshow("readybar", blankMat);
01431                 cv::waitKey();
01432                 return false;
01433         }
01434         
01435         if (foundStartingPair) {
01436                 
01437                 double a, b;
01438                 cv::Point min_coord, max_coord;
01439                 unsigned int best_iii, best_jjj;
01440                 minMaxLoc(keyframeTestScores, &a, &b, &min_coord, &max_coord); //, keyframeTestFlags);
01441                 
01442                 best_iii = max_coord.y;
01443                 best_jjj = max_coord.x;
01444                 
01445                 assignStartingFrames(best_iii, best_jjj, keyframe_scores, startingTrans);
01446                 
01447                 //nextFrame = best_jjj+1;
01448         }
01449         
01450         return foundStartingPair;
01451         
01452         
01453 }
01454 
01455 void slamNode::clearSystem() {
01456         
01457         keyframe_store.connections.clear();
01458         keyframe_store.keyframes.clear();
01459         keyframe_store.count = 0;
01460         
01461         for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
01462                 featureTrackVector.at(iii).isTriangulated = false;
01463         }
01464         
01465         int finalIndex;
01466         
01467         if (configData.keyframeEvaluationMode) {
01468                 finalIndex = configData.maxInitializationFrames + 1;
01469         } else {
01470                 finalIndex = latestFrame + 1;
01471         }
01472         for (int iii = 0; iii < finalIndex; iii++) {
01473                 ACM[iii] = cv::Mat();
01474         }
01475         
01476 }
01477 
01478 double slamNode::assignStartingFrames(unsigned int best_iii, unsigned int best_jjj, double* keyframe_scores, cv::Mat& startingTrans) {
01479         
01480         printf("%s << Adding (%d) & (%d) to keyframe store... (already (%d) large)", __FUNCTION__, best_iii, best_jjj, ((int)keyframe_store.keyframes.size()));
01481         
01482         keyframe_store.addKeyframe(best_iii, blank);
01483         keyframe_store.addKeyframe(best_jjj, blank);
01484         
01485         double kfScore = testKeyframePair(featureTrackVector, configData.cameraData, scorecardParams, best_iii, best_jjj, keyframe_scores, startingTrans, configData.keyframeEvaluationMode, true);
01486         
01487         ACM[best_iii] = cv::Mat::eye(4, 4, CV_64FC1);
01488         startingTrans.copyTo(ACM[best_jjj]);
01489         
01490         ROS_INFO("kfScore = %f (%d, %d)", kfScore, best_iii, best_jjj);
01491         cout << ACM[best_iii] << endl;
01492         cout << ACM[best_jjj] << endl;
01493         
01494         //while (1) {}
01495         
01496         keyframe_store.addConnection(keyframe_store.count-2, keyframe_store.count-1, KF_CONNECTION_GEOMETRIC, F_arr[best_iii]);
01497         
01498         return kfScore;
01499         
01500 }
01501 
01502 bool slamNode::formInitialStructure() {
01503         
01504         //ROS_INFO("Entered <formInitialStructure> (%d).", baseConnectionNum);
01505         
01506         //cout << "ACM[0] = " << ACM[0] << endl;
01507         
01508         double keyframe_scores[5];
01509         cv::Mat startingTrans;
01510         bool formedInitialStructure = false;
01511 
01512         int keyframe_idx_1 = keyframe_store.connections.at(baseConnectionNum).idx1;
01513         int keyframe_idx_2 = keyframe_store.connections.at(baseConnectionNum).idx2;
01514         
01515         //printf("%s << keyframe_idx = (%d, %d) [%d]", __FUNCTION__, keyframe_idx_1, keyframe_idx_2, keyframe_store.connections.size());
01516         
01517         unsigned int image_idx_1 = keyframe_store.keyframes.at(keyframe_idx_1).idx;
01518         unsigned int image_idx_2 = keyframe_store.keyframes.at(keyframe_idx_2).idx;
01519         
01520         //printf("%s << image_idx = (%d, %d)", __FUNCTION__, image_idx_1, image_idx_2);
01521         
01522         lastBasePose = image_idx_1;
01523         
01524         vector<cv::Point2f> pts1, pts2;
01525         getPointsFromTracks(featureTrackVector, pts1, pts2, image_idx_1, image_idx_2);
01526         
01527         vector<unsigned int> activeTrackIndices, fullSpanIndices, triangulatedIndices;
01528                 
01529         getActiveTracks(activeTrackIndices, featureTrackVector, image_idx_1, image_idx_2);
01530         filterToCompleteTracks(fullSpanIndices, activeTrackIndices, featureTrackVector, image_idx_1, image_idx_2);
01531         
01532         startingTracksCount = fullSpanIndices.size();
01533         
01534         //printf("%s << Active indices for this subsequence: %d", __FUNCTION__, activeTrackIndices.size());
01535         //printf("%s << Full-span indices for this subsequence: %d", __FUNCTION__, fullSpanIndices.size());
01536         
01537         //int numTriangulatedPoints = countActiveTriangulatedTracks(fullSpanIndices, featureTrackVector);
01538         
01539         //printf("%s << Number of triangulated full-span tracks for this subsequence = %d", __FUNCTION__, numTriangulatedPoints);
01540                 
01541         vector<cv::Point3d> ptsInCloud;
01542         cv::Mat P1, R1, t1; //, CX[4];
01543         cv::Mat Rvec, C;
01544         vector<cv::Point2f> correspPoints;
01545 
01546         //ROS_INFO("Reconstructing initialization pair with (%d) & (%d)...", image_idx_1, image_idx_2);
01547         
01548         //cout << "ACM[0] = " << ACM[0] << endl;
01549         
01550         bool worked = reconstructFreshSubsequencePair(featureTrackVector, ptsInCloud, triangulatedIndices, ACM[image_idx_1], ACM[image_idx_2], configData.cameraData, image_idx_1, image_idx_2);
01551         
01552         //ROS_INFO("Initial fresh cameras:");
01553         //cout << ACM[image_idx_1] << endl;
01554         //cout << ACM[image_idx_2] << endl;
01555         
01556         if (!worked) {
01557                 ROS_ERROR("2-Frame reconstruction didn't work.");
01558         }
01559         
01560         //cam_mutex.unlock();
01561         
01562         //printf("%s::%s << DEBUG [%d]", __PROGRAM__, __FUNCTION__, 0);
01563         
01564         double keyframeError;
01565         keyframeError = testKeyframePair(featureTrackVector, configData.cameraData, scorecardParams, image_idx_1, image_idx_2, keyframe_scores, startingTrans, configData.keyframeEvaluationMode);
01566         
01567         //printf("%s::%s << DEBUG [%d]", __PROGRAM__, __FUNCTION__, 1);
01568         
01569         //cout << "(" << keyframeError << ") = " << startingTrans << endl;
01570         startingTrans.copyTo(ACM[image_idx_2]);
01571         //cam_mutex.lock();
01572         //printf("%s << Pre-optimized initialization cameras:", __FUNCTION__);
01573         //ROS_INFO("Initial keyframe cameras:");
01574         //cout << ACM[image_idx_1] << endl;
01575         //cout << ACM[image_idx_2] << endl;
01576         
01577         //cout << "ACM[0] = " << ACM[0] << endl;
01578         /*
01579         keyframeError = optimizeKeyframePair(featureTrackVector, configData.cameraData, image_idx_1, image_idx_2, ACM);
01580         printf("%s << Freshly reconstructed initial cameras:", __FUNCTION__);
01581         cout << ACM[image_idx_1] << endl;
01582         cout << ACM[image_idx_2] << endl;
01583         */
01584         //cam_mutex.unlock();
01585         
01586         //double twoViewError;
01587         
01588         vector<unsigned int> adjustableKeyframeIndices;
01589         
01590         adjustableKeyframeIndices.push_back(image_idx_1);
01591         adjustableKeyframeIndices.push_back(image_idx_2);
01592         
01593         //cam_mutex.lock();
01594         
01595         keyframeError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, adjustableKeyframeIndices, configData.initialStructureIterations, false, false, 1);
01596         ROS_INFO("Adjusted error: %f", keyframeError);
01597         //cout << ACM[image_idx_1] << endl;
01598         //cout << ACM[image_idx_2] << endl;
01599         
01600         //cam_mutex.unlock();
01601 
01602         
01603         //real_C0.copyTo(ACM[image_idx_1]);
01604         //real_C1.copyTo(ACM[image_idx_2]);
01605                                                 
01606         if (!worked) {
01607                 ROS_ERROR("Reconstruction of fresh subsequence pair failed.");
01608                 return false;
01609         }
01610         
01611         ROS_INFO("Getting active 3d points...");
01612         
01613         //printf("%s << ptsInCloud.size() (pre-update) = %d", __FUNCTION__, ptsInCloud.size());
01614         getActive3dPoints(featureTrackVector, triangulatedIndices, ptsInCloud);
01615         //printf("%s << ptsInCloud.size() (post-update) = %d", __FUNCTION__, ptsInCloud.size());
01616         
01617         ROS_INFO("Points acquired. GT.");
01618         
01619         int relativeIndex = image_idx_2-image_idx_1;
01620         
01621         printf("%s << DEBUG (%04d)\n", __FUNCTION__, 0);
01622         
01623         SysSBA subsys;
01624         addPointsToSBA(subsys, ptsInCloud);
01625         
01626         printf("%s << DEBUG (%04d)\n", __FUNCTION__, 1);
01627         
01628         //cam_mutex.lock();
01629         addFixedCamera(subsys, configData.cameraData, ACM[image_idx_1]);
01630         
01631         printf("%s << DEBUG (%04d)\n", __FUNCTION__, 2);
01632         
01633         addNewCamera(subsys, configData.cameraData, ACM[image_idx_2]);
01634         
01635         //cam_mutex.unlock();
01636         
01637         printf("%s << DEBUG (%04d)\n", __FUNCTION__, 3);
01638         
01639         subsys.nFixed = 1;
01640         
01641         addProjectionsToSBA(subsys, pts1, 0);   // keyframe_idx_1
01642         
01643         printf("%s << DEBUG (%04d)\n", __FUNCTION__, 4);
01644         
01645         addProjectionsToSBA(subsys, pts2, 1);
01646         
01647         ROS_INFO("Going through relative indices...");
01648         
01649         for (int jjj = 1; jjj < relativeIndex; jjj++) {
01650                 
01651                 vector<cv::Point2f> latestPoints;
01652                 
01653                 printf("%s << About to get corresponding points (%d)...\n", __FUNCTION__, jjj);
01654                 getCorrespondingPoints(featureTrackVector, pts1, latestPoints, image_idx_1, image_idx_1+jjj);
01655                 printf("%s << Corresponding points acquired (%d)\n", __FUNCTION__, jjj);
01656                 
01657                 
01658                 vector<cv::Point3f> objectPoints;
01659                 cv::Point3f tmpPt;
01660                 
01661                 for (unsigned int kkk = 0; kkk < ptsInCloud.size(); kkk++) {
01662                         tmpPt = cv::Point3f((float) ptsInCloud.at(kkk).x, (float) ptsInCloud.at(kkk).y, (float) ptsInCloud.at(kkk).z);
01663                         objectPoints.push_back(tmpPt);
01664                 }
01665                 
01666                 cv::Mat t, R, Rvec;
01667                 
01668                 printf("%s << DEBUG (%03d)\n", __FUNCTION__, 0);
01669                 
01670                 printf("%s << Solving PnP... (%d) (oP.size() = %d; lP.size() = %d)\n", __FUNCTION__, jjj, ((int)objectPoints.size()), ((int)latestPoints.size()));
01671                 
01672                 if (objectPoints.size() != latestPoints.size()) {
01673                         ROS_ERROR("Unable to find proper corresponding points!");
01674                         continue;
01675                 }
01676                 //solvePnPRansac(InputArray objectPoints, InputArray imagePoints, InputArray cameraMatrix, InputArray distCoeffs, OutputArray rvec, OutputArray tvec, bool useExtrinsicGuess=false, int iterationsCount=100, float reprojectionError=8.0, int minInliersCount=100, OutputArray inliers=noArray() );
01677                 solvePnPRansac(objectPoints, latestPoints, configData.cameraData.K, configData.cameraData.blankCoeffs, Rvec, t);
01678                 
01679                 printf("%s << DEBUG (%03d)\n", __FUNCTION__, 1);
01680                 
01681                 Rodrigues(Rvec, R);
01682                 
01683                 //cout << __FUNCTION__ << " << [" << jjj << "] R = " << R << endl;
01684                 //cout << __FUNCTION__ << " << [" << jjj << "] t = " << t << endl;
01685                 
01686                 
01687                 cv::Mat newCam;
01688                 cv::Mat T;
01689                 composeTransform(R, t, T);
01690                 transformationToProjection(T, newCam);
01691                 // compileTransform(newCam, R, t);
01692                 
01693                 // Inverting camera "learned" by PnP since it always seems to need to be inverted when passed to 
01694                 // and from SBA...
01695                 
01696                 cv::Mat newCamC;
01697                 projectionToTransformation(newCam, newCamC);
01698                 newCamC = newCamC.inv();
01699                 transformationToProjection(newCamC, newCam);
01700                 
01701                 addNewCamera(subsys, configData.cameraData, newCam);
01702                 
01703                 addProjectionsToSBA(subsys, latestPoints, jjj+1);
01704                 
01705                 printf("%s << DEBUG (%03d)\n", __FUNCTION__, 2);
01706                 
01707                 //avgError = optimizeSystem(subsys, 1e-4, 10);
01708                 //printf("%s << Progressive subsequence BA error = %f.", __FUNCTION__, avgError);
01709                 
01710                 //drawGraph2(subsys, camera_pub, points_pub, path_pub, decimation, bicolor);
01711                 
01712         }
01713         
01714         //printf("%s << About to optimize subsystem... (nFixed = %d)", __FUNCTION__, subsys.nFixed);
01715         //ROS_INFO("About to optimize starting sequence...");
01716         
01717         //cout << "ACM[0] = " << ACM[0] << endl;
01718         //if (iii == 0) {
01719                 
01720         subsys.nFixed = 1;
01721         ROS_INFO("About to rescale (1)");
01722         rescaleSBA(subsys, 0, 1);
01723         ROS_INFO("About to optimize between rescalings...");
01724         double avgError = optimizeSystem(subsys, 1e-4, configData.subsequenceIterations);
01725         ROS_INFO("About to rescale (2)");
01726         rescaleSBA(subsys, 0, 1);
01727         ROS_INFO("Rescaling done");
01728 
01729         
01730         if (avgError < 0.0) {
01731                 ROS_ERROR("Subsystem optimization failed to converge..");
01732         }
01733         
01734         //ROS_INFO("Subsystem optimized; err = %f", avgError);
01735         
01736         
01737         //cout << "ACM[0] = " << ACM[0] << endl;
01738         //printf("%s << DEBUG [%d] looking for crash...", __FUNCTION__, 0);
01739         
01740         //cam_mutex.lock();
01741         retrieveCameraPose(subsys, 0, ACM[image_idx_1]);
01742         retrieveCameraPose(subsys, 1, ACM[image_idx_2]);
01743         
01744         //ROS_INFO("Starting sequence cameras =");
01745         //cout << ACM[image_idx_1] << endl;
01746         
01747         
01748         //printf("%s << DEBUG [%d] looking for crash...", __FUNCTION__, 1);
01749         
01750         #pragma omp parallel for
01751         for (unsigned int ttt = 1; ttt < image_idx_2-image_idx_1; ttt++) {
01752                 retrieveCameraPose(subsys, ttt+1, ACM[image_idx_1+ttt]);
01753                 //cout << "ACM[0]" << ACM[image_idx_1+ttt] << endl;
01754         }
01755         
01756         //cout << ACM[image_idx_2] << endl;
01757         
01758         //cout << "ACM[0] = " << ACM[0] << endl;
01759         
01760         //cam_mutex.unlock();
01761 
01762         
01763         ptsInCloud.clear();
01764         retrieveAllPoints(ptsInCloud, subsys);
01765         
01766         updateTriangulatedPoints(featureTrackVector, triangulatedIndices, ptsInCloud);
01767         
01768         vector<unsigned int> basisNodes;
01769         
01770         for (unsigned int iii = image_idx_1; iii <= image_idx_2; iii++) {
01771                 basisNodes.push_back(iii);
01772         }
01773                                                                                                 
01774                                                                                                 keyframeError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, 1);
01775                                                                                                 //ROS_INFO("Full-sys error 1: %f", keyframeError);
01776                                                                                                 // So by here you've done all basic triangulation..
01777                                                                                                 /*
01778                                                                                                 assignFullSystem(sys, featureTrackVector, configData.cameraData, ACM, image_idx_1, image_idx_2);
01779                                                                                                 printf("%s << SYS: nodes = %d; tracks = %d", __FUNCTION__, sys.nodes.size(), sys.tracks.size());
01780                                                                                                 sys.nFixed = 1;
01781                                                                                                 rescaleSBA(sys, 0, sys.nodes.size()-1);
01782                                                                                                 avgError = optimizeSystem(sys, 1e-4, 10);
01783                                                                                                 rescaleSBA(sys, 0, sys.nodes.size()-1);
01784                                                                                                 printf("%s << retrieving full system with image (%d) to start with", __FUNCTION__, image_idx_1);
01785                                                                                                 retrieveFullSystem(sys, ACM, featureTrackVector, image_idx_1, image_idx_2);
01786                                                                                                 sys.nodes.clear();
01787                                                                                                 sys.tracks.clear();
01788                                                                                                 //currentPoseIndex = image_idx_2;
01789                                                                                                 //formedInitialStructure = true;
01790                                                                                                 //printf("%s::%s << Initial structure formed.", __PROGRAM__, __FUNCTION__);
01791                                                                                                 //return formedInitialStructure;
01792                                                                                                 */
01793                                 
01794         
01795         
01796         //getBasisNodes(basisNodes, currentPoseIndex);
01797         
01798         if (1) {
01799                 
01800                 vector<unsigned int> triangulatedIndices, untriangulatedIndices;
01801                 findRelevantIndices(featureTrackVector, triangulatedIndices, untriangulatedIndices, image_idx_1, image_idx_2);
01802                                 
01803                 //unsigned int points_in_3d;
01804                 if (untriangulatedIndices.size() > 0) {
01805                         
01806                         vector<unsigned int> triangulatableIndices;
01807                         findTriangulatableTracks3(featureTrackVector, triangulatableIndices, image_idx_2, configData.framesForTriangulation);
01808                         
01809                         if (triangulatableIndices.size() > 0) {
01810                                 //ROS_INFO("About to triangulate (%d) new tracks...", triangulatableIndices.size());
01811                                 triangulateTracks(featureTrackVector, triangulatableIndices, configData.cameraData, ACM, image_idx_1, image_idx_2);
01812                                 //ROS_INFO("Tracks triangulated.");
01813                                 //double avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, basisNodes.size()-3);
01814                                 //printf("%s::%s << F(%d) Adjustment error with newly triangulated points = %f (k = %d)", __PROGRAM__, __FUNCTION__, currentPoseIndex, avgError, basisNodes.size());
01815                                 
01816                                 if (currentPoseIndex > 60) {
01817                                         //while (1) {}
01818                                 }
01819                                 
01820                         }
01821                         
01822                         
01823                 }
01824         }
01825         
01826         //cout << "ACM[0] = " << ACM[0] << endl;
01827         
01828                                                                                                 keyframeError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, 1);
01829                                                                                                 //ROS_INFO("Full-sys error 2: %f", keyframeError);
01830                                                                                                 /*
01831                                                                                                 assignFullSystem(sys, featureTrackVector, configData.cameraData, ACM, image_idx_1, image_idx_2);
01832                                                                                                 //printf("%s << SYS: nodes = %d; tracks = %d", __FUNCTION__, sys.nodes.size(), sys.tracks.size());
01833                                                                                                 printf("%s::%s << Optimizing with triangulated points...", __PROGRAM__, __FUNCTION__);
01834                                                                                                 sys.nFixed = 1;
01835                                                                                                 rescaleSBA(sys, 0, sys.nodes.size()-1);
01836                                                                                                 avgError = optimizeSystem(sys, 1e-4, 10);
01837                                                                                                 rescaleSBA(sys, 0, sys.nodes.size()-1);
01838                                                                                                 printf("%s::%s << Final error = %f", __PROGRAM__, __FUNCTION__, avgError);
01839                                                                                                 
01840                                                                                                 cout << "ACM[0] = " << ACM[0] << endl;
01841                                                                                                 //printf("%s << retrieving full system with image (%d) to start with", __FUNCTION__, image_idx_1);
01842                                                                                                 retrieveFullSystem(sys, ACM, featureTrackVector, image_idx_1, image_idx_2);
01843                                                                                                 */
01844                                                                                                 currentPoseIndex = image_idx_2;
01845                                                                                                 formedInitialStructure = true;
01846                                                                                                 //ROS_INFO("Initial structure formed.");
01847                                                                                                 //cout << "ACM[0] = " << ACM[0] << endl;
01848                                                                                                 
01849                                                                                                 for (unsigned int iii = image_idx_1; iii <= image_idx_2; iii++) {
01850                                                                                                         //cout << "ACM[" << iii << "] = " << ACM[iii] << endl;
01851                                                                                                 }
01852                                                                                                 
01853                                                                                                 
01854                                                                                                 if (1) {
01855                                                                                                         update_display();
01856                                                                                                 }
01857                                                                                                 
01858                                                                                                 //while (1) { }
01859                                                                                                 
01860                                                                                                 
01861                                                                                                 return formedInitialStructure;  
01862                                                                                                 
01863         ROS_ERROR("What!? How did I get here??");
01864         cin.get();
01865                                                                                                 
01866         //updateTriangulatedPoints(featureTrackVector, triangulatedIndices, ptsInCloud);
01867         
01869         //vector<unsigned int> secondTierIndices;
01870         
01872         
01873         //getIndicesForTriangulation(secondTierIndices, activeTrackIndices, triangulatedIndices);
01874 
01875         
01876         //Point3d *secondTierPoints;
01877         //int *triangulationsCount;
01878         //triangulationsCount = new int[secondTierIndices.size()];
01879         //secondTierPoints = new Point3d[secondTierIndices.size()];
01880 
01881         //#pragma omp parallel for
01882         //for (unsigned int rrr = 0; rrr < secondTierIndices.size(); rrr++) {
01884                 //triangulationsCount[rrr] = 0;
01885                 //secondTierPoints[rrr] = Point3d(0.0, 0.0, 0.0);
01886         //}
01887         
01888         //unsigned int startIndex = image_idx_1;
01889         //unsigned int finishIndex = image_idx_2;
01890         
01892         
01894         
01895         //for (unsigned int ttt = 0; ttt < secondTierIndices.size(); ttt++) {
01896         
01898                 //for (unsigned int rrr = startIndex; rrr < finishIndex; rrr++) {
01899                         
01901                         //Mat temp_C0;
01902                         
01904                         
01905                         
01906                         //ACM[rrr].copyTo(temp_C0);
01907                         
01908                         
01909                         
01911                         
01913                         
01914                         
01915                         //for (unsigned int sss = rrr + 1; sss <= finishIndex; sss++) {
01916                                 
01917 
01918                                 
01919                                 //Mat temp_C1;
01920                                 
01922                                 
01923                                 //ACM[sss].copyTo(temp_C1);
01924                                 
01926                                 
01928                                 
01930                                 
01932                                 
01933                                 //vector<unsigned int> activeTrackIndices_t, fullSpanIndices_t, triangulatedIndices_t;
01934                                 //getActiveTracks(activeTrackIndices_t, featureTrackVector, rrr, sss);
01935                                 //filterToCompleteTracks(fullSpanIndices_t, activeTrackIndices_t, featureTrackVector, rrr, sss);
01936                                 
01938                                 
01940                                 
01941                                 
01943                                 
01944                                 //unsigned int uuu = 0;
01945                                 
01947                                 
01948                                 //while ((uuu < fullSpanIndices_t.size()) && (fullSpanIndices_t.at(uuu) < secondTierIndices.at(ttt))) {
01949                                         //uuu++;
01950                                 //}
01951                                 
01952                                 //if (uuu == fullSpanIndices_t.size()) {
01954                                         //break;
01955                                 //}
01956                                 
01957                                 //if (fullSpanIndices_t.at(uuu) == secondTierIndices.at(ttt)) {
01958                                         
01961                                         //Point2f pt1_, pt2_;
01962                                         //Point3d pt3d_;
01963                                         
01965                                         
01966                                         //pt1_ = featureTrackVector.at(secondTierIndices.at(ttt)).getCoord(rrr);
01967                                         //pt2_ = featureTrackVector.at(secondTierIndices.at(ttt)).getCoord(sss);
01968                                         
01971                                         
01972                                         
01973                                         
01974                                         //Triangulate(pt1_, pt2_, configData.cameraData.K, configData.cameraData.K_inv, temp_C1, temp_C0, pt3d_, false);
01975                                         
01976                                         //if (ttt == 0) {
01980                                         //}
01981                                         
01982                                         
01984                                                 //secondTierPoints[ttt].x += pt3d_.x;
01985                                                 //secondTierPoints[ttt].y += pt3d_.y;
01986                                                 //secondTierPoints[ttt].z += pt3d_.z;
01987                                                 //triangulationsCount[ttt]++;
01989                                         
01990                                 //}
01991                                 
01992                         //}
01993                         
01996                         
01997                 //}
01998                 
01999         //}
02000 
02001         
02003         
02004         //#pragma omp parallel for
02005         //for (unsigned int rrr = 0; rrr < secondTierIndices.size(); rrr++) {
02006                 
02007                 //if (triangulationsCount[rrr] > 0) {
02008                         //secondTierPoints[rrr].x /= ((double) triangulationsCount[rrr]);
02009                         //secondTierPoints[rrr].y /= ((double) triangulationsCount[rrr]);
02010                         //secondTierPoints[rrr].z /= ((double) triangulationsCount[rrr]);
02011                         
02013 
02014                         
02015 
02016                         //featureTrackVector.at(secondTierIndices.at(rrr)).set3dLoc(secondTierPoints[rrr]);
02017                 //}
02018                 
02019         //}
02020         
02021         
02022         //vector<unsigned int> activeCameraIndices;
02023         //activeCameraIndices.clear();
02024                 
02025         //for (unsigned int rrr = startIndex; rrr <= finishIndex; rrr++) {
02026                 //activeCameraIndices.push_back(rrr);
02027         //}
02028         
02029         //double newError = optimizeSubsystem(configData.cameraData, ACM, activeCameraIndices, featureTrackVector, activeTrackIndices, configData.subsequenceIterations);
02030                         
02031         //ROS_INFO("Subsystem error including intermediate points: %f", newError);
02032         
02033         //assignFullSystem(sys, featureTrackVector, configData.cameraData, ACM, image_idx_1, image_idx_2);
02034         
02035         //ROS_INFO("SYS: nodes = %d; tracks = %d", sys.nodes.size(), sys.tracks.size());
02036         
02037         //avgError = optimizeSystem(sys, 1e-4, configData.fullSystemIterations * 100);
02038         
02039         //ROS_INFO("retrieving full system with image (%d) to start with", image_idx_1);
02040         
02041         //retrieveFullSystem(sys, ACM, featureTrackVector, image_idx_1, image_idx_2);
02042         
02043         
02044         //for (unsigned int jjj = 0; jjj <= image_idx_2; jjj++) {
02045                 
02046                 //if (ACM[jjj].rows != 0) {
02048                 //}
02049                                 
02050         //}
02051         
02053         
02054         //currentPoseIndex = image_idx_2;
02055         
02056         //formedInitialStructure = true;
02057 
02058         //ROS_INFO("Initial structure formed.");
02059         
02060         //return formedInitialStructure;
02061 }
02062 
02063 void slamNode::assignPose(geometry_msgs::PoseStamped& pPose, cv::Mat& C) {
02064         pPose.header.seq = currentPoseIndex;
02065         pPose.header.stamp = ros::Time::now();
02066         
02067         cv::Mat R, t;
02068         Quaterniond Q;
02069         decomposeTransform(C, R, t);
02070         matrixToQuaternion(R, Q);
02071         
02072         // tried: 1,0,2; 1,2,0; 0,2,1; 2,0,1; 2,1,0; 0,1,2
02073         // x-corresponds to graph -x; y to graph -z; z to graph -y
02074         
02075         pPose.pose.position.x = t.at<double>(2,0); //;
02076         pPose.pose.position.y = -t.at<double>(0,0); //t.at<double>(1,0);
02077         pPose.pose.position.z = -t.at<double>(1,0); //t.at<double>(2,0);
02078         
02079         if (abs(pPose.pose.position.x) > MAX_RVIZ_DISPLACEMENT) {
02080                 pPose.pose.position.x = 0.0;
02081         }
02082 
02083         if (abs(pPose.pose.position.y) > MAX_RVIZ_DISPLACEMENT) {
02084                 pPose.pose.position.y = 0.0;
02085         }
02086         
02087         if (abs(pPose.pose.position.z) > MAX_RVIZ_DISPLACEMENT) {
02088                 pPose.pose.position.z = 0.0;
02089         }
02090         
02091         //printf("%s << QUAT = (%f, %f, %f, %f)", __FUNCTION__, Q.x(), Q.y(), Q.z(), Q.w());
02092         
02093         // tried x,y,z,w
02094         pPose.pose.orientation.x = Q.z();
02095         pPose.pose.orientation.y = -Q.x();
02096         pPose.pose.orientation.z = -Q.y();
02097         pPose.pose.orientation.w = Q.w();
02098 }
02099 
02100 void slamNode::serverCallback(thermalvis::monoslamConfig &config, uint32_t level) {
02101     
02102     configData.flowback = config.flowback;
02103     
02104     configData.verboseMode = config.verboseMode;
02105     
02106     configData.timeSpacing = config.timeSpacing;
02107     
02108     configData.poseEstimateIterations = config.poseEstimateIterations;
02109     
02110     configData.minStartupScore = config.minStartupScore;
02111     
02112     configData.adjustmentFrames = config.adjustmentFrames;
02113     
02114     configData.motionThreshold = config.motionThreshold;
02115     
02116     configData.timeDebug = config.timeDebug;
02117     
02118     configData.keyframeIterations = config.keyframeIterations;
02119     
02120     configData.framesForTriangulation = config.framesForTriangulation;
02121     
02122     configData.maxKeyframeSeparation = config.maxKeyframeSeparation;
02123     
02124     configData.min3dPtsForPnP = config.min3dPtsForPnP;
02125     
02126     configData.camerasPerSys = config.camerasPerSys;
02127     
02128     configData.minTrackedFeatures = config.minTrackedFeatures;
02129     
02130     configData.minGeometryScore = config.minGeometryScore;
02131     
02132     configData.minKeyframeScore = config.minKeyframeScore;
02133     
02134     configData.requiredTrackFrac = config.requiredTrackFrac;
02135     
02136     configData.fullSystemIterations = config.fullSystemIterations;
02137     
02138     configData.subsequenceIterations = config.subsequenceIterations;
02139     
02140     configData.keyframeSpacing = config.keyframeSpacing;
02141     
02142     configData.maxGuides = config.maxGuides;
02143     
02144     configData.initialStructureIterations = config.initialStructureIterations;
02145     
02146     //ROS_WARN("Switched keyframe evaluation to (%d)", configData.keyframeEvaluationMode ? 1 : 0);
02147         
02148 }
02149 
02150 slamNode::slamNode(ros::NodeHandle& nh, slamData startupData) {
02151         
02152         configData = startupData;
02153         
02154         scorecardParams = new double*[INITIALIZATION_SCORING_PARAMETERS];
02155         
02156         for (int iii = 0; iii < INITIALIZATION_SCORING_PARAMETERS; iii++) {
02157                 scorecardParams[iii] = new double[3];
02158         }
02159         
02160         processScorecard();
02161         
02162         sprintf(nodeName, "%s", ros::this_node::getName().c_str());
02163         
02164         srand(time(NULL));
02165         
02166         evaluationCompleted = false;
02167         
02168         //cam_sem = Semaphore("cameras");
02169         //tracks_sem = Semaphore("tracks");
02170         //keyframes_sem = Semaphore("keyframes");
02171         
02172         putativelyEstimatedFrames = 0;
02173         
02174         firstIteration = true;
02175         
02176         boost::mutex cam_mutex;
02177         boost::mutex tracks_mutex;
02178         boost::mutex keyframes_mutex;
02179         
02180         char timeString[256];
02181         
02182         sprintf(timeString, "%010d.%09d", ros::Time::now().sec, ros::Time::now().nsec);
02183         //stringstream convert;
02184         //convert << ros::Time::now().sec << "." << ros::Time::now().nsec;
02185         
02186         //configData.evaluationFile = configData.read_addr + "nodes/monoslam/log/" + convert.str() + ".txt";
02187         configData.evaluationFile = configData.read_addr + "nodes/monoslam/log/" + timeString + "-" + ros::this_node::getName().substr(1,ros::this_node::getName().size()-1) + ".txt";
02188         
02189         if (configData.keyframeEvaluationMode) {
02190                 ROS_INFO("evaluationFile = (%s)", configData.evaluationFile.c_str());
02191         }
02192         
02193         
02194         decimation = DEFAULT_DRAWGRAPH_DECIMATION;
02195         bicolor = DEFAULT_DRAWGRAPH_BICOLOR;
02196         
02197         display_sys.tracks.clear();
02198         display_sys.nodes.clear();
02199         
02200         eye4 = cv::Mat::eye(4, 4, CV_64FC1);
02201         
02202         
02203         repetitionNoted = false;
02204         
02205         currentPoseIndex = -1;
02206         
02207         isTracking = true;
02208         
02209         baseConnectionNum = 0;
02210         
02211         lastBasePose = -1;
02212         
02213         latestFrame = -1;
02214         
02215         path_pub = nh.advertise<visualization_msgs::Marker>( "path", 1 );
02216         camera_pub = nh.advertise<visualization_msgs::Marker>( "cameras", 1 );
02217         points_pub = nh.advertise<visualization_msgs::Marker>( "points", 1);
02218         
02219         
02220         
02221         structureValid = false;
02222         structureFormed = false;
02223         
02224         sys.verbose = 0;
02225         display_sys.verbose = 0;
02226         
02227         
02228         
02229         ROS_INFO("Setting up node.");
02230         
02231         infoProcessed = false;
02232         
02233         //std::string topic_info = nh.resolveName(configData.stream + "camera_info");
02234         
02235         std::string topic_tracks = configData.stream + "tracks";
02236         
02237         ROS_INFO("Connecting to tracks topic. %s", topic_tracks.c_str());
02238         tracks_sub = nh.subscribe<thermalvis::feature_tracks>(topic_tracks, 1, &slamNode::handle_tracks, this);
02239         
02240         std::string topic_info = configData.stream + "camera_info";
02241         
02242         ROS_INFO("Connecting to camera_info. %s", topic_info.c_str());
02243         
02244         info_sub = nh.subscribe<sensor_msgs::CameraInfo>(topic_info, 1, &slamNode::handle_info, this);
02245         
02246         
02247         
02248         ROS_INFO("Node setup.");
02249         
02250         keyframeTestScores = cv::Mat::zeros(configData.maxInitializationFrames, configData.maxInitializationFrames, CV_64FC1);
02251         keyframeTestFlags = cv::Mat::zeros(configData.maxInitializationFrames, configData.maxInitializationFrames, CV_8UC1);
02252         
02253         sprintf(pose_pub_name, "thermalvis%s/pose", nodeName);
02254         ROS_INFO("Configuring pose topic. %s", pose_pub_name);
02255         
02256         currentPose.header.frame_id = "/pgraph"; //pose_pub_name;
02257         pose_pub = nh.advertise<geometry_msgs::PoseStamped>(pose_pub_name, 1);
02258         
02259         timer = nh.createTimer(ros::Duration(0.01), &slamNode::main_loop, this);
02260         
02261         //elapsedTime = timeElapsedMS(cycle_timer);
02262         
02263         if (configData.logErrors) {
02264                 configData.errorFile = configData.read_addr + "nodes/monoslam/log/stability/" + timeString + "-" + ros::this_node::getName().substr(1,ros::this_node::getName().size()-1) + ".txt";
02265                 ROS_INFO("Current debug filename: %s", configData.errorFile.c_str());
02266                 error_file.open(configData.errorFile.c_str());
02267         }
02268         
02269         ROS_INFO("Establishing server callback...");
02270         f = boost::bind (&slamNode::serverCallback, this, _1, _2);
02271     server.setCallback (f);
02272         
02273 }
02274 
02275 timeAnalyzer::timeAnalyzer() {
02276         
02277         cycles = 0;
02278         average = 0.0;
02279         sigma = 0.0;
02280         
02281         timeElapsedMS(cycle_timer, true);
02282 }
02283 
02284 void timeAnalyzer::calcParameters() {
02285         if (cycles == 0) {
02286                 return;
02287         }
02288         
02289         average = 0.0;
02290         sigma = 0.0;
02291         
02292         for (int iii = 0; iii < cycles; iii++) {
02293                 average += vals[iii] / ((double) cycles);
02294         }
02295         
02296         for (int iii = 0; iii < cycles; iii++) {
02297                 sigma += pow(vals[iii] - average, 2.0) / ((double) cycles);
02298         }
02299         
02300         sigma = pow(sigma, 0.5);
02301         
02302 }
02303 
02304 void timeAnalyzer::startRecording() {
02305         timeElapsedMS(cycle_timer, true);
02306 }
02307 
02308 void timeAnalyzer::stopRecording() {
02309         
02310         if (cycles < 1024) {
02311                 vals[cycles] = timeElapsedMS(cycle_timer, true);
02312                 cycles++;
02313         } else {
02314                 ROS_ERROR("Too many cycles (%d) / (%d)", cycles, 1024);
02315         }
02316         
02317 }
02318 
02319 


thermalvis
Author(s): Stephen Vidas
autogenerated on Sun Jan 5 2014 11:38:45