$search
00001 #include "node.h" 00002 #include "names.h" 00003 00004 //command line parameters 00005 #include "cmd_line/cmd_line.h" 00006 00007 //detectors 00008 #include "detectors/datamatrix/detector.h" 00009 #include "detectors/qrcode/detector.h" 00010 00011 //tracking 00012 #include "libauto_tracker/tracking.h" 00013 #include "libauto_tracker/threading.h" 00014 #include "libauto_tracker/events.h" 00015 00016 //visp includes 00017 #include <visp/vpDisplayX.h> 00018 #include <visp/vpMbEdgeKltTracker.h> 00019 #include <visp/vpMbKltTracker.h> 00020 #include <visp/vpMbEdgeTracker.h> 00021 #include <visp/vpTime.h> 00022 00023 #include "conversions/camera.h" 00024 #include "conversions/image.h" 00025 #include "conversions/3dpose.h" 00026 00027 #include "libauto_tracker/tracking.h" 00028 00029 #include "resource_retriever/retriever.h" 00030 #include "visp_tracker/MovingEdgeSites.h" 00031 00032 #include "std_msgs/Int8.h" 00033 00034 00035 namespace visp_auto_tracker{ 00036 Node::Node() : 00037 n_("~"), 00038 queue_size_(1), 00039 got_image_(false){ 00040 //get the tracker configuration file 00041 //this file contains all of the tracker's parameters, they are not passed to ros directly. 00042 n_.param<std::string>("tracker_config_path", tracker_config_path_, ""); 00043 n_.param<bool>("debug_display", debug_display_, false); 00044 std::string model_name; 00045 std::string model_full_path; 00046 n_.param<std::string>("model_path", model_path_, ""); 00047 n_.param<std::string>("model_name", model_name, ""); 00048 model_path_= model_path_[model_path_.length()-1]=='/'?model_path_:model_path_+std::string("/"); 00049 model_full_path = model_path_+model_name; 00050 tracker_config_path_ = model_full_path+".cfg"; 00051 ROS_INFO("model full path=%s",model_full_path.c_str()); 00052 resource_retriever::Retriever r; 00053 resource_retriever::MemoryResource res = r.get(std::string("file://")+std::string(model_full_path+".wrl")); 00054 00055 model_description_.resize(res.size); 00056 unsigned i = 0; 00057 for (; i < res.size; ++i) 00058 model_description_[i] = res.data.get()[i]; 00059 00060 ROS_INFO("model content=%s",model_description_.c_str()); 00061 00062 n_.setParam ("/model_description", model_description_); 00063 00064 } 00065 00066 void Node::waitForImage(){ 00067 while ( ros::ok ()){ 00068 if(got_image_) return; 00069 ros::spinOnce(); 00070 } 00071 } 00072 00073 //records last recieved image 00074 void Node::frameCallback(const sensor_msgs::ImageConstPtr& image, const sensor_msgs::CameraInfoConstPtr& cam_info){ 00075 boost::mutex::scoped_lock(lock_); 00076 image_header_ = image->header; 00077 I_ = visp_bridge::toVispImageRGBa(*image); //make sure the image isn't worked on by locking a mutex 00078 got_image_ = true; 00079 } 00080 00081 void Node::spin(){ 00082 //Parse command line arguments from config file (as ros param) 00083 CmdLine cmd(tracker_config_path_); 00084 cmd.set_data_directory(model_path_); //force data path 00085 00086 if(cmd.should_exit()) return; //exit if needed 00087 00088 vpMbTracker* tracker; //mb-tracker will be chosen according to config 00089 00090 vpCameraParameters cam = cmd.get_cam_calib_params(); //retrieve camera parameters 00091 if(cmd.get_verbose()) 00092 std::cout << "loaded camera parameters:" << cam << std::endl; 00093 00094 00095 //create display 00096 00097 vpDisplayX* d = NULL; 00098 if(debug_display_) 00099 d = new vpDisplayX(); 00100 00101 //init detector based on user preference 00102 detectors::DetectorBase* detector = NULL; 00103 if (cmd.get_detector_type() == CmdLine::ZBAR) 00104 detector = new detectors::qrcode::Detector; 00105 else if(cmd.get_detector_type() == CmdLine::DMTX) 00106 detector = new detectors::datamatrix::Detector; 00107 00108 //init tracker based on user preference 00109 if(cmd.get_tracker_type() == CmdLine::KLT) 00110 tracker = new vpMbKltTracker(); 00111 else if(cmd.get_tracker_type() == CmdLine::KLT_MBT) 00112 tracker = new vpMbEdgeKltTracker(); 00113 else if(cmd.get_tracker_type() == CmdLine::MBT) 00114 tracker = new vpMbEdgeTracker(); 00115 00116 //compile detectors and paramters into the automatic tracker. 00117 t_ = new tracking::Tracker(cmd,detector,tracker,debug_display_); 00118 t_->start(); //start the state machine 00119 00120 //subscribe to ros topics and prepare a publisher that will publish the pose 00121 message_filters::Subscriber<sensor_msgs::Image> raw_image_subscriber(n_, image_topic, queue_size_); 00122 message_filters::Subscriber<sensor_msgs::CameraInfo> camera_info_subscriber(n_, camera_info_topic, queue_size_); 00123 message_filters::TimeSynchronizer<sensor_msgs::Image, sensor_msgs::CameraInfo> image_info_sync(raw_image_subscriber, camera_info_subscriber, queue_size_); 00124 image_info_sync.registerCallback(boost::bind(&Node::frameCallback,this, _1, _2)); 00125 ros::Publisher object_pose_publisher = n_.advertise<geometry_msgs::PoseStamped>(object_position_topic, queue_size_); 00126 ros::Publisher object_pose_covariance_publisher = n_.advertise<geometry_msgs::PoseWithCovarianceStamped>(object_position_covariance_topic, queue_size_); 00127 ros::Publisher moving_edge_sites_publisher = n_.advertise<visp_tracker::MovingEdgeSites>(moving_edge_sites_topic, queue_size_); 00128 ros::Publisher status_publisher = n_.advertise<std_msgs::Int8>(status_topic, queue_size_); 00129 00130 //wait for an image to be ready 00131 waitForImage(); 00132 { 00133 //when an image is ready tell the tracker to start searching for patterns 00134 boost::mutex::scoped_lock(lock_); 00135 if(debug_display_) { 00136 d->init(I_); //also init display 00137 vpDisplay::setTitle(I_, "visp_auto_tracker debug display"); 00138 } 00139 00140 t_->process_event(tracking::select_input(I_)); 00141 } 00142 00143 unsigned int iter=0; 00144 geometry_msgs::PoseStamped ps; 00145 geometry_msgs::PoseWithCovarianceStamped ps_cov; 00146 visp_tracker::MovingEdgeSites me; 00147 00148 ros::Rate rate(25); //init 25fps publishing frequency 00149 while(ros::ok()){ 00150 double t = vpTime::measureTimeMs(); 00151 boost::mutex::scoped_lock(lock_); 00152 //process the new frame with the tracker 00153 t_->process_event(tracking::input_ready(I_,cam,iter)); 00154 //When the tracker is tracking, it's in the tracking::TrackModel state 00155 //Access this state and broadcast the pose 00156 tracking::TrackModel& track_model = t_->get_state<tracking::TrackModel&>(); 00157 ps.pose = visp_bridge::toGeometryMsgsPose(track_model.cMo); //convert 00158 00159 //if(*(t_->current_state())==3 /*TrackModel*/){ 00160 ps_cov.pose.pose = ps.pose; 00161 //header isn't necessarily timestamped to now (not in rosbag case) 00162 ps.header = image_header_; 00163 me.header = image_header_; 00164 ps_cov.header = image_header_; 00165 ps.header.frame_id = tracker_ref_frame; 00166 ps_cov.header.frame_id = tracker_ref_frame; 00167 me.header.frame_id = tracker_ref_frame; 00168 std_msgs::Int8 status; 00169 status.data = (unsigned char)(*(t_->current_state())); 00170 00171 object_pose_publisher.publish(ps); //publish 00172 object_pose_covariance_publisher.publish(ps_cov); //publish 00173 moving_edge_sites_publisher.publish(me); 00174 status_publisher.publish(status); 00175 00176 //} 00177 ros::spinOnce(); 00178 rate.sleep(); 00179 if (cmd.show_fps()) 00180 std::cout << "Tracking done in " << vpTime::measureTimeMs() - t << " ms" << std::endl; 00181 } 00182 t_->process_event(tracking::finished()); 00183 00184 } 00185 }