depth_sensor_pose.cpp
Go to the documentation of this file.
2 
3 // #define DEBUG_CALIBRATION
4 
5 namespace depth_sensor_pose {
6 
7 void DepthSensorPose::estimateParams(const sensor_msgs::ImageConstPtr& depth_msg,
8  const sensor_msgs::CameraInfoConstPtr& info_msg) {
9  // Update data based on depth sensor parameters only if new params values
10  // or turned on continuous data calculations
12  camera_model_.fromCameraInfo(info_msg);
13 
14  double cx = camera_model_.cx(), cy = camera_model_.cy(), vert_min, vert_max;
15 
16  // Calculate the vertical field of view of the depth sensor
17  fieldOfView(vert_min, vert_max, cx, 0, cx, cy, cx, depth_msg->height -1);
18  const double vertical_fov = vert_max - vert_min;
19 
20  calcDeltaAngleForImgRows(vertical_fov);
23 
24  ROS_DEBUG_STREAM("Recalculate parameters because of camera model update. Parameters:\n"
25  << "\n cx = " << camera_model_.cx() << " cy = " << camera_model_.cy()
26  << "\nvertical FOV: " << vertical_fov );
27 
29  }
30 
32 }
33 
34 void DepthSensorPose::setRangeLimits(const float rmin, const float rmax) {
35  if (rmin >= 0 && rmin < rmax) {
36  range_min_ = rmin;
37  }
38  else {
39  range_min_ = 0;
40  ROS_ERROR("Incorrect value of range minimal parameter. Set default value: 0.");
41  }
42  if (rmax >= 0 && rmin < rmax) {
43  range_max_ = rmax;
44  }
45  else {
46  range_max_ = 10;
47  ROS_ERROR("Incorrect value of range maximum parameter. Set default value: 10.");
48  }
49 }
50 
51 void DepthSensorPose::setSensorMountHeightMin(const float height) {
52  if (height > 0) {
53  mount_height_min_ = height;
54  }
55  else {
57  ROS_ERROR("Incorrect value of sensor mount height parameter. Set default value: 0.");
58  }
59 }
60 
61 void DepthSensorPose::setSensorMountHeightMax(const float height) {
62  if( height > 0) {
63  mount_height_max_ = height;
64  }
65  else {
67  ROS_ERROR("Incorrect value of sensor mount height parameter. Set default value: 1m.");
68  }
69 }
70 
71 void DepthSensorPose::setSensorTiltAngleMin(const float angle) {
72  if( angle < 90 && angle > -90) {
73  tilt_angle_min_ = angle;
74  }
75  else {
76  tilt_angle_min_ = 0;
77  ROS_ERROR("Incorrect value of sensor tilt angle parameter. Set default value: 0.");
78  }
79 }
80 
81 void DepthSensorPose::setSensorTiltAngleMax(const float angle) {
82  if( angle < 90 && angle > -90) {
83  tilt_angle_max_ = angle;
84  }
85  else {
86  tilt_angle_max_ = 0;
87  ROS_ERROR("Incorrect value of sensor tilt angle parameter. Set default value: 0.");
88  }
89 }
90 
91 void DepthSensorPose::setUsedDepthHeight(const unsigned height) {
92  if( height > 0) {
93  used_depth_height_ = height;
94  }
95  else {
96  used_depth_height_ = 200;
97  ROS_ERROR("Incorrect value of used depth height parameter. Set default value: 200.");
98  }
99 }
100 
102  if (step > 0) {
103  depth_image_step_row_ = step;
104  }
105  else {
107  ROS_ERROR("Incorrect value depth image row step parameter. Set default value: 1.");
108  }
109 }
110 
112  if (step > 0) {
113  depth_image_step_col_ = step;
114  }
115  else {
117  ROS_ERROR("Incorrect value depth image column step parameter. Set default value: 1.");
118  }
119 }
120 
121 sensor_msgs::ImageConstPtr DepthSensorPose::getDbgImage() const {
122  return dbg_image_;
123 }
124 
125 double DepthSensorPose::lengthOfVector(const cv::Point3d& vec) const {
126  return sqrt(vec.x*vec.x + vec.y*vec.y + vec.z*vec.z);
127 }
128 
129 double DepthSensorPose::angleBetweenRays(const cv::Point3d& ray1,
130  const cv::Point3d& ray2) const {
131  double dot = ray1.x*ray2.x + ray1.y*ray2.y + ray1.z*ray2.z;
132 
133  return acos(dot / (lengthOfVector(ray1) * lengthOfVector(ray2)));
134 }
135 
136 void DepthSensorPose::fieldOfView(double & min, double & max, double x1, double y1,
137  double xc, double yc, double x2, double y2) {
138  cv::Point2d raw_pixel_left(x1, y1);
139  cv::Point2d rect_pixel_left = camera_model_.rectifyPoint(raw_pixel_left);
140  cv::Point3d left_ray = camera_model_.projectPixelTo3dRay(rect_pixel_left);
141 
142  cv::Point2d raw_pixel_right(x2, y2);
143  cv::Point2d rect_pixel_right = camera_model_.rectifyPoint(raw_pixel_right);
144  cv::Point3d right_ray = camera_model_.projectPixelTo3dRay(rect_pixel_right);
145 
146  cv::Point2d raw_pixel_center(xc, yc);
147  cv::Point2d rect_pixel_center = camera_model_.rectifyPoint(raw_pixel_center);
148  cv::Point3d center_ray = camera_model_.projectPixelTo3dRay(rect_pixel_center);
149 
150  min = -angleBetweenRays(center_ray, right_ray);
151  max = angleBetweenRays(left_ray, center_ray);
152 
153  ROS_ASSERT(min < 0 && max > 0);
154 }
155 
156 void DepthSensorPose::calcDeltaAngleForImgRows(double vertical_fov) {
157  const unsigned img_height = camera_model_.fullResolution().height;
158  delta_row_.resize(img_height);
159 
160  // Angle between the sensor optical axis and ray to specific cell on the camera matrix
161  for (unsigned i = 0; i < img_height; i++) {
162  delta_row_[i] = vertical_fov * (i - camera_model_.cy() - 0.5) / (static_cast<double>(img_height) - 1);
163  }
164 }
165 
166 void DepthSensorPose::calcGroundDistancesForImgRows(double mount_height, double tilt_angle,
167  std::vector<double>& distances) {
168  const double alpha = tilt_angle * M_PI / 180.0; // Sensor tilt angle in radians
169  const unsigned img_height = camera_model_.fullResolution().height;
170 
171  ROS_ASSERT(img_height >= 0 && mount_height > 0);
172 
173  distances.resize(img_height);
174 
175  // Calculations for each row of image
176  for(unsigned i = 0; i < img_height; i++) {
177  if ((delta_row_[i] + alpha) > 0) {
178  distances[i] = mount_height * sin(M_PI/2 - delta_row_[i])
179  / cos(M_PI/2 - delta_row_[i] - alpha);
180  ROS_ASSERT(distances[i] > 0);
181  }
182  else {
183  distances[i] = 100;
184  }
185  }
186 }
187 
188 template<typename T>
189 void DepthSensorPose::getGroundPoints(const sensor_msgs::ImageConstPtr& depth_msg,
190  pcl::PointCloud<pcl::PointXYZ>::Ptr& points,
191  std::list<std::pair<unsigned, unsigned>>& points_indices) {
192  enum Point { Row, Col, Depth };
193 
194 #ifdef DEBUG_CALIBRATION
195  std::vector<double> deltaVec;
196 #endif
197  const unsigned img_height = depth_msg->height;
198  const unsigned img_width = depth_msg->width;
199 
200  const T* data = reinterpret_cast<const T*>(&depth_msg->data[0]);
201 
202  // Set how many rows (from bottom of image) are used in ground finding
203  unsigned processed_rows = depth_image_step_row_;
204  if (used_depth_height_ < img_height && used_depth_height_ > 0) {
205  processed_rows = img_height - used_depth_height_;
206  }
207 
208  // Loop over each row in image from bottom of img
209  for (unsigned row = img_height - 1; row > processed_rows; row -= depth_image_step_row_) {
210  // Loop over each column in image
211  for (unsigned col = 0; col < img_width; col += depth_image_step_col_) {
212  float d = 0.0;
213 
214  if (typeid(T) == typeid(uint16_t)) {
215  unsigned depth_raw_mm = static_cast<unsigned>(data[img_width * row + col]);
216  d = static_cast<float>(depth_raw_mm) / 1000.0;
217  }
218  else if (typeid(T) == typeid(float)) {
219  d = static_cast<float>(data[img_width * row + col]);
220  }
221 
222  #ifdef DEBUG_CALIBRATION
223  std::ostringstream s;
224  if (col % (depth_image_step_col_ * 20) == 0 && row % (depth_image_step_row_ * 10) == 0) {
225  s << "GroundPoints point: (" << col << ", " << row << ") "
226  << "d: " << d << " dist_to_ground min/max: (" << dist_to_ground_min_[row]
227  << ", " << dist_to_ground_max_[row] << ")";
228  ROS_INFO_STREAM(s.str());
229  }
230  #endif
231 
232  // Check if distance to point is greater than distance to ground plane
233  if (points->size() < max_ground_points_ && d > range_min_ && d < range_max_ &&
234  d > dist_to_ground_min_[row] && d < dist_to_ground_max_[row]) {
235 
236  double z = d;
237  double x = z * (static_cast<float>(row) - camera_model_.cx()) / camera_model_.fx();
238  double y = z * (static_cast<float>(col) - camera_model_.cy()) / camera_model_.fy();
239 
240  points->push_back(pcl::PointXYZ(x, y, z));
241  points_indices.push_back({row, col});
242  }
243  }
244  }
245 }
246 
248  const sensor_msgs::ImageConstPtr& depth_msg,
249  [[maybe_unused]] double& tilt_angle, [[maybe_unused]] double& height) {
250 
251  pcl::PointCloud<pcl::PointXYZ>::Ptr ground_points(new pcl::PointCloud<pcl::PointXYZ>);
252  std::list<std::pair<unsigned, unsigned>> points_indices;
253 
254  // Check image encoding
255  if (depth_msg->encoding == sensor_msgs::image_encodings::TYPE_16UC1) {
256  getGroundPoints<uint16_t>(depth_msg, ground_points, points_indices);
257  }
258  else if (depth_msg->encoding == sensor_msgs::image_encodings::TYPE_32FC1) {
259  getGroundPoints<float>(depth_msg, ground_points, points_indices);
260  }
261  else {
262  std::stringstream ss;
263  ss << "Depth image has unsupported encoding: " << depth_msg->encoding;
264  throw std::runtime_error(ss.str());
265  }
266 
267  // Generate and publish debug image if necessary
268  if (publish_depth_enable_) {
269  dbg_image_ = prepareDbgImage(depth_msg, points_indices);
270  }
271 
272  if (ground_points->size() >= 3) {
273  // Estimate model parameters with RANSAC
274  pcl::SampleConsensusModelPlane<pcl::PointXYZ>::Ptr
275  model(new pcl::SampleConsensusModelPlane<pcl::PointXYZ>(ground_points));
276 
277  pcl::RandomSampleConsensus<pcl::PointXYZ> ransac (model);
278  ransac.setDistanceThreshold(ransacDistanceThresh_);
279  ransac.setMaxIterations(ransacMaxIter_);
280  ransac.computeModel();
281 
282  Eigen::VectorXf ground_coeffs(4);
283  ransac.getModelCoefficients(ground_coeffs);
284 
285  // Calculate height and
286  float a = ground_coeffs[0], b = ground_coeffs[1];
287  float c = ground_coeffs[2], d = ground_coeffs[3];
288 
289  // Dot product two vectors v=[a,b,c], w=[1,1,0]
290  tilt_angle = std::acos ((b*b + a*a) / (std::sqrt(b*b + a*a) * std::sqrt(a*a+b*b+c*c))) * 180.0 / M_PI;
291  height = std::abs(d) / std::sqrt(a*a+b*b+c*c);
292 
293  ROS_DEBUG_THROTTLE(1, "Estimated height = %.2f angle = %.2f", height, tilt_angle);
294 
295  std::ostringstream s;
296  s << " sensorLocationCalibration: ground_points size = " << ground_points->size()
297  << "\n a = " << ground_coeffs[0]
298  << "\n b = " << ground_coeffs[1]
299  << "\n c = " << ground_coeffs[2]
300  << "\n d = " << ground_coeffs[3]
301  << "\n height = " << height
302  << "\n tilt = " << tilt_angle;
303  ROS_DEBUG_STREAM_THROTTLE(1, s.str());
304  }
305  else {
306  ROS_ERROR("Ground points not detected. Please check parameters");
307  }
308 
309  if (height < mount_height_min_ || height > mount_height_max_) {
310  height = mount_height_min_;
311  ROS_WARN("Estimated height (%.2f) not in range. Lowest value from range used instead of estimated.", height);
312  }
313 
314  if (tilt_angle < tilt_angle_min_ || tilt_angle > tilt_angle_max_) {
315  tilt_angle = tilt_angle_min_;
316  ROS_WARN("Estimated tilt angle (%.2f) not in range. Lowest value from range used instead of estimated.", tilt_angle);
317  }
318 }
319 
320 sensor_msgs::ImagePtr DepthSensorPose::prepareDbgImage(const sensor_msgs::ImageConstPtr& depth_msg,
321  const std::list<std::pair<unsigned, unsigned>>& ground_points_indices) {
322 
323  sensor_msgs::ImagePtr img(new sensor_msgs::Image);
324  img->header = depth_msg->header;
325  img->height = depth_msg->height;
326  img->width = depth_msg->width;
327  img->encoding = "rgb8"; // image_encodings::RGB8
328  img->is_bigendian = depth_msg->is_bigendian;
329  img->step = img->width * 3; // 3 bytes per pixel
330 
331  img->data.resize(img->step * img->height);
332  uint8_t(*rgb_data)[3] = reinterpret_cast<uint8_t(*)[3]>(&img->data[0]);
333 
334  // Convert depth image to RGB
335  if (depth_msg->encoding == sensor_msgs::image_encodings::TYPE_16UC1) {
336  const uint16_t* depth_data = reinterpret_cast<const uint16_t*>(&depth_msg->data[0]);
337  for (unsigned i = 0; i < (img->width * img->height); ++i) {
338  // Scale value to cover full range of RGB 8
339  uint8_t val = 255 * (depth_data[i] - range_min_ * 1000) / (range_max_ * 1000 - range_min_ * 1000);
340  rgb_data[i][0] = 255 - val;
341  rgb_data[i][1] = 255 - val;
342  rgb_data[i][2] = 255 - val;
343  }
344  }
345  else if (depth_msg->encoding == sensor_msgs::image_encodings::TYPE_32FC1) {
346  const float* depth_data = reinterpret_cast<const float*>(&depth_msg->data[0]);
347  for (unsigned i = 0; i < (img->width * img->height); ++i) {
348  // Scale value to cover full range of RGB 8
349  uint8_t val = 255 * (depth_data[i] - range_min_) / (range_max_ - range_min_);
350  rgb_data[i][0] = 255 - val;
351  rgb_data[i][1] = 255 - val;
352  rgb_data[i][2] = 255 - val;
353  }
354  }
355  else {
356  throw std::runtime_error("Unsupported depth image encoding");
357  }
358 
359  // Add ground points to debug image (as red points)
360  for (const auto pt : ground_points_indices) {
361  const auto row = pt.first;
362  const auto col = pt.second;
363  rgb_data[row * img->width + col][0] = 255;
364  rgb_data[row * img->width + col][1] = 0;
365  rgb_data[row * img->width + col][2] = 0;
366  }
367 
368  // Add line which is the border of the ground detection area
369  std::list<std::pair<unsigned, unsigned>> pts;
370  for (unsigned i = 0; i < img->width; ++i) {
371  const auto line_row = img->height - used_depth_height_;
372  if (line_row >= 0 && line_row < img->height) {
373  pts.push_back({line_row, i});
374  }
375  }
376 
377  for (const auto pt : pts) {
378  const auto row = pt.first;
379  const auto col = pt.second;
380  rgb_data[row * img->width + col][0] = 0;
381  rgb_data[row * img->width + col][1] = 255;
382  rgb_data[row * img->width + col][2] = 0;
383  }
384 
385  return img;
386 }
387 
388 } // namespace depth_sensor_pose
d
#define ROS_DEBUG_STREAM_THROTTLE(rate, args)
void getGroundPoints(const sensor_msgs::ImageConstPtr &depth_msg, pcl::PointCloud< pcl::PointXYZ >::Ptr &points, std::list< std::pair< unsigned, unsigned >> &points_indices)
void setUsedDepthHeight(const unsigned height)
setUsedDepthHeight
std::vector< double > dist_to_ground_min_
std::vector< double > dist_to_ground_max_
float mount_height_min_
Min height of sensor mount from ground.
#define ROS_DEBUG_THROTTLE(rate,...)
cv::Point3d projectPixelTo3dRay(const cv::Point2d &uv_rect) const
float tilt_angle_min_
Min angle of sensor tilt in degrees.
image_geometry::PinholeCameraModel camera_model_
XmlRpcServer s
void calcGroundDistancesForImgRows(double mount_height, double tilt_angle, std::vector< double > &distances)
Calculates distances to ground for every row of depth image.
float tilt_angle_max_
Max angle of sensor tilt in degrees.
bool cam_model_update_
Determines if continuously cam model update is required.
sensor_msgs::ImagePtr prepareDbgImage(const sensor_msgs::ImageConstPtr &depth_msg, const std::list< std::pair< unsigned, unsigned >> &ground_points_indices)
TFSIMD_FORCE_INLINE const tfScalar & y() const
#define ROS_WARN(...)
doubleAcc dot(const VectorAcc &lhs, const VectorAcc &rhs)
void setRangeLimits(const float rmin, const float rmax)
void calcDeltaAngleForImgRows(double vertical_fov)
float range_min_
Stores the current minimum range to use.
unsigned depth_image_step_col_
Columns step in depth processing (px).
void fieldOfView(double &min, double &max, double x1, double y1, double xc, double yc, double x2, double y2)
bool fromCameraInfo(const sensor_msgs::CameraInfo &msg)
const std::string TYPE_32FC1
float mount_height_max_
Max height of sensor mount from ground.
sensor_msgs::ImageConstPtr getDbgImage() const
const std::string TYPE_16UC1
double angleBetweenRays(const cv::Point3d &ray1, const cv::Point3d &ray2) const
void estimateParams(const sensor_msgs::ImageConstPtr &depth_msg, const sensor_msgs::CameraInfoConstPtr &info_msg)
TFSIMD_FORCE_INLINE const tfScalar & x() const
double lengthOfVector(const cv::Point3d &vec) const
void setSensorMountHeightMin(const float height)
setSensorMountHeight sets the height of sensor mount (in meters) from ground
cv::Point2d rectifyPoint(const cv::Point2d &uv_raw) const
INLINE Rall1d< T, V, S > sqrt(const Rall1d< T, V, S > &arg)
void setDepthImgStepCol(const int step)
setDepthImgStepCol
#define ROS_DEBUG_STREAM(args)
TFSIMD_FORCE_INLINE const tfScalar & z() const
void setDepthImgStepRow(const int step)
setDepthImgStepRow
void sensorPoseCalibration(const sensor_msgs::ImageConstPtr &depth_msg, double &tilt_angle, double &height)
INLINE Rall1d< T, V, S > acos(const Rall1d< T, V, S > &x)
#define ROS_INFO_STREAM(args)
unsigned used_depth_height_
Used depth height from img bottom (px)
void setSensorTiltAngleMax(const float angle)
setSensorTiltAngle sets the sensor tilt angle (in degrees)
unsigned depth_image_step_row_
Rows step in depth processing (px).
INLINE Rall1d< T, V, S > cos(const Rall1d< T, V, S > &arg)
void setSensorMountHeightMax(const float height)
setSensorMountHeight sets the height of sensor mount (in meters) from ground
#define ROS_ASSERT(cond)
float range_max_
Stores the current maximum range to use.
#define ROS_ERROR(...)
INLINE Rall1d< T, V, S > sin(const Rall1d< T, V, S > &arg)
bool publish_depth_enable_
Determines if debug image should be published.
void setSensorTiltAngleMin(const float angle)
setSensorTiltAngle sets the sensor tilt angle (in degrees)


depth_sensor_pose
Author(s): Michal Drwiega (http://www.mdrwiega.com)
autogenerated on Wed May 5 2021 02:56:10