4 #include <boost/format.hpp>
8 #include <sensor_msgs/Image.h>
12 #include <visp3/core/vpImage.h>
13 #include <visp3/core/vpTranslationVector.h>
14 #include <visp3/core/vpQuaternionVector.h>
16 #include <visp3/mbt/vpMbGenericTracker.h>
22 const sensor_msgs::Image::ConstPtr& src)
33 if (src->width != dst.getWidth() || src->height != dst.getHeight())
35 ROS_INFO(
"dst is %dx%d but src size is %dx%d, resizing.",
36 dst.getWidth (), dst.getHeight (), src->width, src->height);
37 dst.resize (src->height, src->width);
40 if(src->encoding ==
MONO8) {
41 memcpy(dst.bitmap, &src->data[0], dst.getHeight () * src->step *
sizeof(
unsigned char));
43 else if(src->encoding ==
RGB8 || src->encoding ==
RGBA8
44 || src->encoding ==
BGR8 || src->encoding ==
BGRA8)
46 unsigned nc = numChannels(src->encoding);
48 (src->encoding ==
RGBA8 || src->encoding ==
BGRA8) ? nc - 1 : nc;
50 for(
unsigned i = 0; i < dst.getWidth (); ++i)
51 for(
unsigned j = 0; j < dst.getHeight (); ++j)
54 for(
unsigned c = 0; c < cEnd; ++c)
55 acc += src->data[j * src->step + i * nc + c];
61 boost::format fmt(
"bad encoding '%1'");
63 throw std::runtime_error(fmt.str());
68 const vpImage<unsigned char>& src)
70 dst.width = src.getWidth();
71 dst.height = src.getHeight();
73 dst.step = src.getWidth();
74 dst.data.resize(dst.height * dst.step);
75 for(
unsigned i = 0; i < src.getWidth (); ++i)
76 for(
unsigned j = 0; j < src.getHeight (); ++j)
77 dst.data[j * dst.step + i] = src[j][i];
83 std::stringstream stream;
84 stream <<
"Model Based Tracker Common Setttings\n" <<
85 " Angle for polygons apparition...." << vpMath::deg(tracker.getAngleAppear()) <<
" degrees\n" <<
86 " Angle for polygons disparition..." << vpMath::deg(tracker.getAngleDisappear()) <<
" degrees\n";
92 std::stringstream stream;
93 stream <<
"Moving Edge Setttings\n" <<
94 " Size of the convolution masks...." << moving_edge.getMaskSize() <<
"x"<< moving_edge.getMaskSize() <<
" pixels\n" <<
95 " Query range +/- J................" << moving_edge.getRange() <<
" pixels\n" <<
96 " Likelihood test ratio............" << moving_edge.getThreshold() <<
"\n" <<
97 " Contrast tolerance +/-..........." << moving_edge.getMu1() * 100 <<
"% and " << moving_edge.getMu2() * 100 <<
"% \n" <<
98 " Sample step......................" << moving_edge.getSampleStep() <<
" pixels\n" <<
99 " Strip............................" << moving_edge.getStrip() <<
" pixels\n";
101 stream <<
" Good moving edge threshold......." << tracker.getGoodMovingEdgesRatioThreshold()*100 <<
"%\n";
108 std::stringstream stream;
109 stream <<
"KLT Setttings\n" <<
110 " Window size......................" << klt.getWindowSize() <<
"x"<< klt.getWindowSize() <<
" pixels\n" <<
111 " Mask border......................" << tracker.getKltMaskBorder() <<
" pixels\n" <<
112 " Maximum number of features......." << klt.getMaxFeatures() <<
"\n" <<
113 " Detected points quality.........." << klt.getQuality() <<
"\n" <<
114 " Minimum distance between points.." << klt.getMinDistance() <<
" pixels\n" <<
115 " Harris free parameter............" << klt.getHarrisFreeParameter() <<
"\n" <<
116 " Block size......................." << klt.getBlockSize() <<
"x" << klt.getBlockSize() <<
" pixels\n" <<
117 " Number of pyramid levels........." << klt.getPyramidLevels() <<
"\n";
123 const vpHomogeneousMatrix& src)
125 vpQuaternionVector quaternion;
126 src.extract(quaternion);
128 dst.translation.x = src[0][3];
129 dst.translation.y = src[1][3];
130 dst.translation.z = src[2][3];
132 dst.rotation.x = quaternion.x();
133 dst.rotation.y = quaternion.y();
134 dst.rotation.z = quaternion.z();
135 dst.rotation.w = quaternion.w();
139 const geometry_msgs::Transform& src)
141 vpTranslationVector translation(src.translation.x,src.translation.y,src.translation.z);
142 vpQuaternionVector quaternion(src.rotation.x,src.rotation.y,src.rotation.z,src.rotation.w);
143 #if VISP_VERSION_INT > VP_VERSION_INT(3,6,0)
144 dst.build(translation, quaternion);
146 dst.buildFrom(translation, quaternion);
151 const geometry_msgs::Pose& src)
153 vpQuaternionVector quaternion
154 (src.orientation.x, src.orientation.y, src.orientation.z,
156 vpRotationMatrix rotation(quaternion);
159 for(
unsigned i = 0; i < 3; ++i)
160 for(
unsigned j = 0; j < 3; ++j)
161 dst[i][j] = rotation[i][j];
164 dst[0][3] = src.position.x;
165 dst[1][3] = src.position.y;
166 dst[2][3] = src.position.z;
173 for(
unsigned i = 0; i < 3; ++i)
174 for(
unsigned j = 0; j < 3; ++j)
178 for (
unsigned i = 0; i < 3; ++i)
184 visp_tracker::Init& srv)
186 srv.request.tracker_param.angle_appear = vpMath::deg(tracker.getAngleAppear());
187 srv.request.tracker_param.angle_disappear = vpMath::deg(tracker.getAngleDisappear());
191 vpMbGenericTracker &tracker)
193 tracker.setAngleAppear(vpMath::rad(req.tracker_param.angle_appear));
194 tracker.setAngleDisappear(vpMath::rad(req.tracker_param.angle_disappear));
198 const vpMbGenericTracker &tracker,
199 visp_tracker::Init& srv)
201 srv.request.moving_edge.first_threshold = tracker.getGoodMovingEdgesRatioThreshold();
202 srv.request.moving_edge.mask_size = moving_edge.getMaskSize();
203 srv.request.moving_edge.range = moving_edge.getRange();
204 srv.request.moving_edge.threshold = moving_edge.getThreshold();
205 srv.request.moving_edge.mu1 = moving_edge.getMu1();
206 srv.request.moving_edge.mu2 = moving_edge.getMu2();
207 srv.request.moving_edge.sample_step = moving_edge.getSampleStep();
208 srv.request.moving_edge.strip = moving_edge.getStrip();
212 vpMbGenericTracker &tracker,
215 tracker.setGoodMovingEdgesRatioThreshold(req.moving_edge.first_threshold);
216 moving_edge.setMaskSize( req.moving_edge.mask_size );
217 moving_edge.setRange( req.moving_edge.range );
218 moving_edge.setThreshold( req.moving_edge.threshold );
219 moving_edge.setMu1( req.moving_edge.mu1 );
220 moving_edge.setMu2( req.moving_edge.mu2 );
221 moving_edge.setSampleStep( req.moving_edge.sample_step );
222 moving_edge.setStrip( req.moving_edge.strip );
225 moving_edge.initMask();
227 tracker.setMovingEdge(moving_edge);
231 const vpMbGenericTracker &tracker,
232 visp_tracker::Init& srv)
234 srv.request.klt_param.max_features = klt.getMaxFeatures();
235 srv.request.klt_param.window_size = klt.getWindowSize();
236 srv.request.klt_param.quality = klt.getQuality();
237 srv.request.klt_param.min_distance = klt.getMinDistance();
238 srv.request.klt_param.harris = klt.getHarrisFreeParameter();
239 srv.request.klt_param.size_block = klt.getBlockSize();
240 srv.request.klt_param.pyramid_lvl = klt.getPyramidLevels();
241 srv.request.klt_param.mask_border = tracker.getKltMaskBorder();
245 vpMbGenericTracker &tracker,
248 klt.setMaxFeatures(req.klt_param.max_features);
249 klt.setWindowSize(req.klt_param.window_size);
250 klt.setQuality(req.klt_param.quality);
251 klt.setMinDistance(req.klt_param.min_distance);
252 klt.setHarrisFreeParameter(req.klt_param.harris);
253 klt.setBlockSize(req.klt_param.size_block);
254 klt.setPyramidLevels(req.klt_param.pyramid_lvl);
255 tracker.setKltMaskBorder((
unsigned)req.klt_param.mask_border);
257 tracker.setKltOpencv(klt);
261 sensor_msgs::CameraInfoConstPtr info)
264 throw std::runtime_error (
"missing camera calibration data");
268 if (info->K.size() != 3 * 3 || info->K[0] == 0.)
269 throw std::runtime_error (
"uncalibrated camera");
272 if (!info || info->P.size() != 3 * 4)
273 throw std::runtime_error
274 (
"camera calibration P matrix has an incorrect size");
276 if (info->distortion_model.empty ())
278 const double& px = info->K[0 * 3 + 0];
279 const double& py = info->K[1 * 3 + 1];
280 const double& u0 = info->K[0 * 3 + 2];
281 const double& v0 = info->K[1 * 3 + 2];
282 cam.initPersProjWithoutDistortion(px, py, u0, v0);
288 const double& px = info->P[0 * 4 + 0];
289 const double& py = info->P[1 * 4 + 1];
290 const double& u0 = info->P[0 * 4 + 2];
291 const double& v0 = info->P[1 * 4 + 2];
292 cam.initPersProjWithoutDistortion(px, py, u0, v0);
296 throw std::runtime_error (
"unsupported distortion model");