PointFeatureTracker.h
Go to the documentation of this file.
00001 /*
00002  * PointFeatureTracker.h
00003  *
00004  *      Author: roberto
00005  *
00006  * This is a modified implementation of the method for online estimation of kinematic structures described in our paper
00007  * "Online Interactive Perception of Articulated Objects with Multi-Level Recursive Estimation Based on Task-Specific Priors"
00008  * (Martín-Martín and Brock, 2014).
00009  * This implementation can be used to reproduce the results of the paper and to be applied to new research.
00010  * The implementation allows also to be extended to perceive different information/models or to use additional sources of information.
00011  * A detail explanation of the method and the system can be found in our paper.
00012  *
00013  * If you are using this implementation in your research, please consider citing our work:
00014  *
00015 @inproceedings{martinmartin_ip_iros_2014,
00016 Title = {Online Interactive Perception of Articulated Objects with Multi-Level Recursive Estimation Based on Task-Specific Priors},
00017 Author = {Roberto {Mart\'in-Mart\'in} and Oliver Brock},
00018 Booktitle = {Proceedings of the IEEE/RSJ International Conference on Intelligent Robots and Systems},
00019 Pages = {2494-2501},
00020 Year = {2014},
00021 Location = {Chicago, Illinois, USA},
00022 Note = {http://www.robotics.tu-berlin.de/fileadmin/fg170/Publikationen_pdf/martinmartin_ip_iros_2014.pdf},
00023 Url = {http://www.robotics.tu-berlin.de/fileadmin/fg170/Publikationen_pdf/martinmartin_ip_iros_2014.pdf},
00024 Projectname = {Interactive Perception}
00025 }
00026  * If you have questions or suggestions, contact us:
00027  * roberto.martinmartin@tu-berlin.de
00028  *
00029  * Enjoy!
00030  */
00031 
00032 #ifndef REAL_FEATURE_TRACKER_H_
00033 #define REAL_FEATURE_TRACKER_H_
00034 
00035 #include "feature_tracker/FeatureTracker.h"
00036 
00037 #include <ros/ros.h>
00038 #include <message_filters/subscriber.h>
00039 #include <message_filters/time_synchronizer.h>
00040 #include <message_filters/sync_policies/approximate_time.h>
00041 #include <sensor_msgs/PointCloud2.h>
00042 #include <sensor_msgs/Image.h>
00043 #include <pcl/point_types.h>
00044 #include <cv_bridge/cv_bridge.h>
00045 #include <image_transport/image_transport.h>
00046 #include <opencv2/features2d/features2d.hpp>
00047 #include <opencv2/core/core.hpp>
00048 //#include <opencv2/core/gpumat.hpp>
00049 //#include <opencv2/gpu/gpu.hpp>
00050 #include <camera_info_manager/camera_info_manager.h>
00051 
00052 #include <iostream>
00053 #include <fstream>
00054 
00055 namespace omip
00056 {
00057 
00063 class PointFeatureTracker : public FeatureTracker
00064 {
00065 public:
00066 
00070     PointFeatureTracker(double loop_period_ns, bool using_pc = false, std::string ft_ns = std::string(""));
00071 
00075     virtual ~PointFeatureTracker();
00076 
00082     virtual void predictState(double time_interval_ns);
00083 
00092     virtual void predictMeasurement();
00093 
00100     virtual void correctState();
00101 
00108     virtual void addPredictedState(const ft_state_t& predicted_state, const double& predicted_state_timestamp_ns);
00109 
00115     virtual void setDynamicReconfigureValues(feature_tracker::FeatureTrackerDynReconfConfig &config);
00116 
00117 
00123     virtual void setFullRGBDPC(PointCloudPCL::ConstPtr full_rgb_pc);
00124 
00131     virtual void setOcclusionMaskImg(cv_bridge::CvImagePtr occ_mask_img);
00132 
00138     virtual void setCameraInfoMsg(const sensor_msgs::CameraInfo* camera_info);
00139 
00145     virtual ft_state_t getState() const;
00146 
00147     virtual cv_bridge::CvImagePtr getRGBImg();
00148 
00149     virtual cv_bridge::CvImagePtr getDepthImg();
00150 
00155     virtual cv_bridge::CvImagePtr getTrackedFeaturesImg();
00156 
00162     virtual cv_bridge::CvImagePtr getTrackedFeaturesWithPredictionMaskImg();
00163 
00168     virtual cv_bridge::CvImagePtr getDepthEdgesImg();
00169 
00174     virtual cv_bridge::CvImagePtr getPredictingMaskImg();
00175 
00180     virtual cv_bridge::CvImagePtr getTrackingMaskImg();
00181 
00186     virtual cv_bridge::CvImagePtr getDetectingMaskImg();
00187 
00192     virtual cv_bridge::CvImagePtr getPredictedAndLastFeaturesImg();
00193 
00200     virtual void setSelfOcclusionPositive(bool so_positive);
00201 
00202 
00203 protected:
00204 
00208     void _ReadParameters();
00209 
00213     void _InitializeVariables();
00214 
00215     void _EstimateDetectionMask();
00216 
00217     void _DetectNewFeatures();
00218 
00222     void _TrackFeatures();
00223 
00227     void _ProcessDepthImg();
00228 
00232     void _ProcessRGBImg();
00233 
00237     void _ProcessOcclusionMaskImg();
00238 
00242     void _ProcessPredictedLocationsPC();
00243 
00253     int _Retrieve3DCoords(
00254             std::vector<cv::Point2f>& points_in_2d, std::vector<bool>& points_status,
00255             std::vector<cv::Point3d>& points_in_3d);
00256 
00263     int _Compare3DCoords();
00264 
00265     ros::NodeHandle _node_handle;
00266     sensor_msgs::CameraInfo::Ptr _camera_info_ptr;
00267 
00268     // PCL variables
00269     PointCloudPCL::ConstPtr _received_point_cloud;
00270 
00271     // OpenCV variables
00272     cv_bridge::CvImagePtr _depth_img_ptr, _rgb_img_ptr, _current_bw_img_ptr, _previous_bw_img_ptr, _occlusion_msk_img_ptr;
00273     cv_bridge::CvImagePtr _tracked_features_img, _tracked_features_with_pred_msk_img, _depth_edges_img, _predicting_mask_img;
00274     cv_bridge::CvImagePtr _tracking_mask_img, _detecting_mask_img, _predicted_and_previous_features_img;
00275     cv::Mat _predicted_feats_msk_mat, _depth_mat, _canny_edges_mat, _feats_detection_msk_mat, _feats_tracking_msk_mat;
00276     cv::Mat _erosion_element_detecting_mat, _erosion_element_tracking_mat, _occlusion_msk_mat, _aux1_mat, _previous_depth_mat, _depth_difference_mask;
00277 
00278     Eigen::Matrix<double, 3, 4> _image_plane_proj_mat_eigen;
00279 
00280     std::vector<std::vector<cv::Point2f> >  _predicted_measurements;
00281     std::vector<cv::Point2f> _previous_measurement, _corrected_measurement;
00282     std::vector<cv::Point3d> _previous_belief_state, _corrected_belief_state;
00283     std::vector<bool> _feat_status_v;
00284     std::vector<int> _previous_feat_ids_v, _current_feat_ids_v;
00285     std::vector<int> _received_prediction_ids;
00286 
00287     ros::Time _predicted_state_rcvd_time, _previous_time_of_motion_detection;
00288     double _predicted_state_rcvd_time_ns;
00289 
00290     // Flags
00291     bool _using_pc, _first_frame, _occlusion_msk_img_rcvd, _predicted_state_rcvd, _predicted_state_processed, _camera_info_msg_rcvd;
00292     bool _publishing_predicted_and_past_feats_img, _publishing_tracked_feats_img, _publishing_tracked_feats_with_pred_msk_img;
00293     bool _attention_to_motion, _use_motion_mask;
00294 
00295     // Parameters
00296     int _erosion_size_detecting, _erosion_size_tracking, _canny_kernel_size;
00297     int _canny_ratio, _number_of_features, _min_number_of_features, _frame_counter, _processing_factor, _min_area_size_pixels;
00298     double _max_allowed_depth, _min_allowed_depth, _canny_low_threshold, _max_interframe_jump, _sensor_fps, _min_time_to_detect_motion, _min_depth_difference, _min_feat_quality;
00299 
00300     Feature::Id _feature_id_counter;
00301     std::string _ft_ns;
00302 
00303     std::vector<std::vector<float> > _feat_quality;
00304 
00305     std::ofstream _features_file;
00306 
00307     bool _so_positive;
00308 };
00309 }
00310 
00311 #endif /* REAL_FEATURE_TRACKER_H_ */


feature_tracker
Author(s): Roberto Martín-Martín
autogenerated on Sat Jun 8 2019 18:26:39