4 #include <boost/format.hpp> 8 #include <sensor_msgs/Image.h> 12 #include <visp3/core/vpImage.h> 13 #include <visp3/core/vpTranslationVector.h> 14 #include <visp3/core/vpQuaternionVector.h> 16 #include <visp3/mbt/vpMbGenericTracker.h> 22 const sensor_msgs::Image::ConstPtr& src)
33 if (src->width != dst.getWidth() || src->height != dst.getHeight())
35 ROS_INFO(
"dst is %dx%d but src size is %dx%d, resizing.",
36 dst.getWidth (), dst.getHeight (), src->width, src->height);
37 dst.resize (src->height, src->width);
40 if(src->encoding ==
MONO8) {
41 memcpy(dst.bitmap, &src->data[0], dst.getHeight () * src->step *
sizeof(
unsigned char));
43 else if(src->encoding ==
RGB8 || src->encoding ==
RGBA8 44 || src->encoding ==
BGR8 || src->encoding ==
BGRA8)
46 unsigned nc = numChannels(src->encoding);
48 (src->encoding ==
RGBA8 || src->encoding ==
BGRA8) ? nc - 1 : nc;
50 for(
unsigned i = 0; i < dst.getWidth (); ++i)
51 for(
unsigned j = 0; j < dst.getHeight (); ++j)
54 for(
unsigned c = 0; c < cEnd; ++c)
55 acc += src->data[j * src->step + i * nc + c];
61 boost::format fmt(
"bad encoding '%1'");
63 throw std::runtime_error(fmt.str());
68 const vpImage<unsigned char>& src)
70 dst.width = src.getWidth();
71 dst.height = src.getHeight();
73 dst.step = src.getWidth();
74 dst.data.resize(dst.height * dst.step);
75 for(
unsigned i = 0; i < src.getWidth (); ++i)
76 for(
unsigned j = 0; j < src.getHeight (); ++j)
77 dst.data[j * dst.step + i] = src[j][i];
83 std::stringstream stream;
84 stream <<
"Model Based Tracker Common Setttings\n" <<
85 " Angle for polygons apparition...." << vpMath::deg(tracker.getAngleAppear()) <<
" degrees\n" <<
86 " Angle for polygons disparition..." << vpMath::deg(tracker.getAngleDisappear()) <<
" degrees\n";
92 std::stringstream stream;
93 stream <<
"Moving Edge Setttings\n" <<
94 " Size of the convolution masks...." << moving_edge.getMaskSize() <<
"x"<< moving_edge.getMaskSize() <<
" pixels\n" <<
95 " Query range +/- J................" << moving_edge.getRange() <<
" pixels\n" <<
96 " Likelihood test ratio............" << moving_edge.getThreshold() <<
"\n" <<
97 " Contrast tolerance +/-..........." << moving_edge.getMu1() * 100 <<
"% and " << moving_edge.getMu2() * 100 <<
"% \n" <<
98 " Sample step......................" << moving_edge.getSampleStep() <<
" pixels\n" <<
99 " Strip............................" << moving_edge.getStrip() <<
" pixels\n";
101 stream <<
" Good moving edge threshold......." << tracker.getGoodMovingEdgesRatioThreshold()*100 <<
"%\n";
108 std::stringstream stream;
109 stream <<
"KLT Setttings\n" <<
110 " Window size......................" << klt.getWindowSize() <<
"x"<< klt.getWindowSize() <<
" pixels\n" <<
111 " Mask border......................" << tracker.getKltMaskBorder() <<
" pixels\n" <<
112 " Maximum number of features......." << klt.getMaxFeatures() <<
"\n" <<
113 " Detected points quality.........." << klt.getQuality() <<
"\n" <<
114 " Minimum distance between points.." << klt.getMinDistance() <<
" pixels\n" <<
115 " Harris free parameter............" << klt.getHarrisFreeParameter() <<
"\n" <<
116 " Block size......................." << klt.getBlockSize() <<
"x" << klt.getBlockSize() <<
" pixels\n" <<
117 " Number of pyramid levels........." << klt.getPyramidLevels() <<
"\n";
123 const vpHomogeneousMatrix& src)
125 vpQuaternionVector quaternion;
126 src.extract(quaternion);
128 dst.translation.x = src[0][3];
129 dst.translation.y = src[1][3];
130 dst.translation.z = src[2][3];
132 dst.rotation.x = quaternion.x();
133 dst.rotation.y = quaternion.y();
134 dst.rotation.z = quaternion.z();
135 dst.rotation.w = quaternion.w();
139 const geometry_msgs::Transform& src)
141 vpTranslationVector translation(src.translation.x,src.translation.y,src.translation.z);
142 vpQuaternionVector quaternion(src.rotation.x,src.rotation.y,src.rotation.z,src.rotation.w);
143 dst.buildFrom(translation, quaternion);
147 const geometry_msgs::Pose& src)
149 vpQuaternionVector quaternion
150 (src.orientation.x, src.orientation.y, src.orientation.z,
152 vpRotationMatrix rotation(quaternion);
155 for(
unsigned i = 0; i < 3; ++i)
156 for(
unsigned j = 0; j < 3; ++j)
157 dst[i][j] = rotation[i][j];
160 dst[0][3] = src.position.x;
161 dst[1][3] = src.position.y;
162 dst[2][3] = src.position.z;
169 for(
unsigned i = 0; i < 3; ++i)
170 for(
unsigned j = 0; j < 3; ++j)
174 for (
unsigned i = 0; i < 3; ++i)
180 visp_tracker::Init& srv)
182 srv.request.tracker_param.angle_appear = vpMath::deg(tracker.getAngleAppear());
183 srv.request.tracker_param.angle_disappear = vpMath::deg(tracker.getAngleDisappear());
187 vpMbGenericTracker &tracker)
189 tracker.setAngleAppear(vpMath::rad(req.tracker_param.angle_appear));
190 tracker.setAngleDisappear(vpMath::rad(req.tracker_param.angle_disappear));
194 const vpMbGenericTracker &tracker,
195 visp_tracker::Init& srv)
197 srv.request.moving_edge.first_threshold = tracker.getGoodMovingEdgesRatioThreshold();
198 srv.request.moving_edge.mask_size = moving_edge.getMaskSize();
199 srv.request.moving_edge.range = moving_edge.getRange();
200 srv.request.moving_edge.threshold = moving_edge.getThreshold();
201 srv.request.moving_edge.mu1 = moving_edge.getMu1();
202 srv.request.moving_edge.mu2 = moving_edge.getMu2();
203 srv.request.moving_edge.sample_step = moving_edge.getSampleStep();
204 srv.request.moving_edge.strip = moving_edge.getStrip();
208 vpMbGenericTracker &tracker,
211 tracker.setGoodMovingEdgesRatioThreshold(req.moving_edge.first_threshold);
212 moving_edge.setMaskSize( req.moving_edge.mask_size );
213 moving_edge.setRange( req.moving_edge.range );
214 moving_edge.setThreshold( req.moving_edge.threshold );
215 moving_edge.setMu1( req.moving_edge.mu1 );
216 moving_edge.setMu2( req.moving_edge.mu2 );
217 moving_edge.setSampleStep( req.moving_edge.sample_step );
218 moving_edge.setStrip( req.moving_edge.strip );
221 moving_edge.initMask();
223 tracker.setMovingEdge(moving_edge);
227 const vpMbGenericTracker &tracker,
228 visp_tracker::Init& srv)
230 srv.request.klt_param.max_features = klt.getMaxFeatures();
231 srv.request.klt_param.window_size = klt.getWindowSize();
232 srv.request.klt_param.quality = klt.getQuality();
233 srv.request.klt_param.min_distance = klt.getMinDistance();
234 srv.request.klt_param.harris = klt.getHarrisFreeParameter();
235 srv.request.klt_param.size_block = klt.getBlockSize();
236 srv.request.klt_param.pyramid_lvl = klt.getPyramidLevels();
237 srv.request.klt_param.mask_border = tracker.getKltMaskBorder();
241 vpMbGenericTracker &tracker,
244 klt.setMaxFeatures(req.klt_param.max_features);
245 klt.setWindowSize(req.klt_param.window_size);
246 klt.setQuality(req.klt_param.quality);
247 klt.setMinDistance(req.klt_param.min_distance);
248 klt.setHarrisFreeParameter(req.klt_param.harris);
249 klt.setBlockSize(req.klt_param.size_block);
250 klt.setPyramidLevels(req.klt_param.pyramid_lvl);
251 tracker.setKltMaskBorder((
unsigned)req.klt_param.mask_border);
253 tracker.setKltOpencv(klt);
257 sensor_msgs::CameraInfoConstPtr info)
260 throw std::runtime_error (
"missing camera calibration data");
264 if (info->K.size() != 3 * 3 || info->K[0] == 0.)
265 throw std::runtime_error (
"uncalibrated camera");
268 if (!info || info->P.size() != 3 * 4)
269 throw std::runtime_error
270 (
"camera calibration P matrix has an incorrect size");
272 if (info->distortion_model.empty ())
274 const double& px = info->K[0 * 3 + 0];
275 const double& py = info->K[1 * 3 + 1];
276 const double& u0 = info->K[0 * 3 + 2];
277 const double& v0 = info->K[1 * 3 + 2];
278 cam.initPersProjWithoutDistortion(px, py, u0, v0);
284 const double& px = info->P[0 * 4 + 0];
285 const double& py = info->P[1 * 4 + 1];
286 const double& u0 = info->P[0 * 4 + 2];
287 const double& v0 = info->P[1 * 4 + 2];
288 cam.initPersProjWithoutDistortion(px, py, u0, v0);
292 throw std::runtime_error (
"unsupported distortion model");
void convertInitRequestToVpMe(const visp_tracker::Init::Request &req, vpMbGenericTracker &tracker, vpMe &moving_edge)
void transformToVpHomogeneousMatrix(vpHomogeneousMatrix &dst, const geometry_msgs::Transform &src)
void vispImageToRos(sensor_msgs::Image &dst, const vpImage< unsigned char > &src)
Convert a ViSP image into a ROS one.
void convertVpMeToInitRequest(const vpMe &moving_edge, const vpMbGenericTracker &tracker, visp_tracker::Init &srv)
void convertVpMbTrackerToInitRequest(const vpMbGenericTracker &tracker, visp_tracker::Init &srv)
std::string convertVpKltOpencvToRosMessage(const vpMbGenericTracker &tracker, const vpKltOpencv &klt)
void convertVpKltOpencvToInitRequest(const vpKltOpencv &klt, const vpMbGenericTracker &tracker, visp_tracker::Init &srv)
void convertInitRequestToVpKltOpencv(const visp_tracker::Init::Request &req, vpMbGenericTracker &tracker, vpKltOpencv &klt)
std::string convertVpMeToRosMessage(const vpMbGenericTracker &tracker, const vpMe &moving_edge)
const std::string PLUMB_BOB
static int numChannels(const std::string &encoding)
std::string convertVpMbTrackerToRosMessage(const vpMbGenericTracker &tracker)
void initializeVpCameraFromCameraInfo(vpCameraParameters &cam, sensor_msgs::CameraInfoConstPtr info)
void convertInitRequestToVpMbTracker(const visp_tracker::Init::Request &req, vpMbGenericTracker &tracker)
void vpHomogeneousMatrixToTransform(geometry_msgs::Transform &dst, const vpHomogeneousMatrix &src)
void rosImageToVisp(vpImage< unsigned char > &dst, const sensor_msgs::Image::ConstPtr &src)
Convert a ROS image into a ViSP one.