find_object_on_plane_nodelet.cpp
Go to the documentation of this file.
1 // -*- mode: c++ -*-
2 /*********************************************************************
3  * Software License Agreement (BSD License)
4  *
5  * Copyright (c) 2015, JSK Lab
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  *
12  * * Redistributions of source code must retain the above copyright
13  * notice, this list of conditions and the following disclaimer.
14  * * Redistributions in binary form must reproduce the above
15  * copyright notice, this list of conditions and the following
16  * disclaimer in the documentation and/o2r other materials provided
17  * with the distribution.
18  * * Neither the name of the JSK Lab nor the names of its
19  * contributors may be used to endorse or promote products derived
20  * from this software without specific prior written permission.
21  *
22  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
23  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
24  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
25  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
26  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
27  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
28  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
29  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
30  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
32  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
33  * POSSIBILITY OF SUCH DAMAGE.
34  *********************************************************************/
35 #define BOOST_PARAMETER_MAX_ARITY 7
37 #include <opencv2/opencv.hpp>
38 #include <cv_bridge/cv_bridge.h>
40 
41 namespace jsk_pcl_ros
42 {
44  {
45  DiagnosticNodelet::onInit();
46  pub_min_area_rect_image_ = advertise<sensor_msgs::Image>(
47  *pnh_, "debug/min_area_rect_image", 1);
49  }
50 
52  {
53  sub_image_.subscribe(*pnh_, "input", 1);
54  sub_info_.subscribe(*pnh_, "input/camera_info", 1);
55  sub_coefficients_.subscribe(*pnh_, "input/coefficients", 1);
56  sync_ = boost::make_shared<message_filters::Synchronizer<SyncPolicy> >(100);
58  sync_->registerCallback(boost::bind(&FindObjectOnPlane::find,
59  this, _1, _2, _3));
60  }
61 
63  {
66  }
67 
69  const sensor_msgs::Image::ConstPtr& image_msg,
70  const sensor_msgs::CameraInfo::ConstPtr& info_msg,
71  const pcl_msgs::ModelCoefficients::ConstPtr& polygon_3d_coefficient_msg)
72  {
73  cv::Mat object_image = cv_bridge::toCvShare(image_msg, image_msg->encoding)->image;
74 
76  pcl::ModelCoefficients::Ptr polygon_coefficients
77  (new pcl::ModelCoefficients);
78  pcl_conversions::toPCL(*polygon_3d_coefficient_msg, *polygon_coefficients);
80  (new jsk_recognition_utils::Plane(polygon_coefficients->values));
81  model.fromCameraInfo(info_msg);
82  std::vector<cv::Point> all_points;
83  for (int j = 0; j < object_image.rows; j++) {
84  for (int i = 0; i < object_image.cols; i++) {
85  if (object_image.at<uchar>(j, i) == 255) {
86  all_points.push_back(cv::Point(i, j));
87  }
88  }
89  }
90  cv::RotatedRect obb = cv::minAreaRect(all_points);
91 
92  cv::Mat min_area_rect_image;
93  cv::cvtColor(object_image, min_area_rect_image, CV_GRAY2BGR);
94  cv::Point2f vertices2f[4];
95  obb.points(vertices2f);
96  cv::line(min_area_rect_image, vertices2f[0], vertices2f[1],
97  cv::Scalar(0, 0, 255), 4);
98  cv::line(min_area_rect_image, vertices2f[1], vertices2f[2],
99  cv::Scalar(0, 0, 255), 4);
100  cv::line(min_area_rect_image, vertices2f[2], vertices2f[3],
101  cv::Scalar(0, 0, 255), 4);
102  cv::line(min_area_rect_image, vertices2f[3], vertices2f[0],
103  cv::Scalar(0, 0, 255), 4);
104  cv::Rect bb = obb.boundingRect();
105  std::vector<cv::Point3f> search_points_3d;
106  std::vector<cv::Point2f> search_points_2d;
107  generateStartPoints(cv::Point2f(bb.x, bb.y),
108  model, polygon_coefficients,
109  search_points_3d, search_points_2d);
110  for (size_t i = 0; i < search_points_2d.size(); i++) {
111  cv::circle(min_area_rect_image, search_points_2d[i], 5, cv::Scalar(0, 255, 0), 1);
112  }
113 
114  //for (size_t i = 0; i < search_points_3d.size(); i++) {
115  double min_area = DBL_MAX;
116  double min_angle;
117  cv::Point2f min_search_point;
118  double min_x;
119  double min_y;
120  for (size_t i = 0; i < search_points_2d.size(); i++) {
121  std::vector<double> angles;
122  std::vector<double> max_x;
123  std::vector<double> max_y;
124  generateAngles(object_image, search_points_2d[i], angles,
125  max_x, max_y,
126  model, plane);
127  // draw angles
128  for (size_t j = 0; j < angles.size(); j++) {
129  double area = drawAngle(min_area_rect_image, search_points_2d[i], angles[j],
130  max_x[j], max_y[j], model, plane, cv::Scalar(0, 255, 0));
131  if (area < min_area) {
132  min_area = area;
133  min_x = max_x[j];
134  min_y = max_y[j];
135  min_angle = angles[j];
136  min_search_point = search_points_2d[i];
137  }
138  }
139  }
140  drawAngle(min_area_rect_image, min_search_point, min_angle,
141  min_x, min_y, model, plane, cv::Scalar(0, 255, 255));
142  // convert the points into 3-D
144  cv_bridge::CvImage(image_msg->header,
146  min_area_rect_image).toImageMsg());
147  NODELET_INFO("published");
148  }
149 
150 
152  const cv::Mat& blob_image, const cv::Point2f& test_point,
153  std::vector<double>& angles,
154  std::vector<double>& max_x,
155  std::vector<double>& max_y,
158  {
159  angles.clear();
160  const double angle_step = 3;
161  std::vector<cv::Point> indices;
162  for (int j = 0; j < blob_image.rows; j++) {
163  for (int i = 0; i < blob_image.cols; i++) {
164  if (blob_image.at<uchar>(j, i) != 0) { // need to check
165  indices.push_back(cv::Point(i, j));
166  }
167  }
168  }
169  for (double angle = -180; angle < 180; angle += angle_step) {
170  Eigen::Vector2f ux(cos(angle/180*M_PI), sin(angle/180*M_PI));
171  //Eigen::Vector2f uy(sin(angle/180*M_PI), -cos(angle/180*M_PI));
172  cv::Point2d uy_end = getUyEnd(test_point, cv::Point2d(test_point.x + ux[0], test_point.y + ux[1]),
173  model,
174  plane);
175  Eigen::Vector2f uy(uy_end.x - test_point.x, uy_end.y - test_point.y);
176 
177  Eigen::Matrix2f A;
178  A << ux[0], uy[0],
179  ux[1], uy[1];
180  bool excluded = false;
181  double max_alpha = -DBL_MAX;
182  double max_beta = -DBL_MAX;
183  for (size_t i = 0; i < indices.size(); i++) {
184  Eigen::Vector2f p_q = Eigen::Vector2f(indices[i].x, indices[i].y) - Eigen::Vector2f(test_point.x, test_point.y);
185  Eigen::Vector2f a_b = A.inverse() * p_q;
186  double alpha = a_b[0];
187  double beta = a_b[1];
188  if (alpha < 0 || beta < 0) {
189  excluded = true;
190  break;
191  }
192  if (alpha > max_alpha) {
193  max_alpha = alpha;
194  }
195  if (beta > max_beta) {
196  max_beta = beta;
197  }
198 
199  }
200  if (!excluded) {
201  angles.push_back(angle);
202  max_x.push_back(max_alpha);
203  max_y.push_back(max_beta);
204  }
205  }
206  }
207 
209  const cv::Point3d& ray,
211  {
212  Eigen::Vector3f n = plane->getNormal();
213  Eigen::Vector3f d(ray.x, ray.y, ray.z);
214  double alpha = - plane->getD() / n.dot(d);
215  return alpha * d;
216  }
217 
219  const cv::Point2d& ux_start,
220  const cv::Point2d& ux_end,
223  {
224  cv::Point3d start_ray = model.projectPixelTo3dRay(ux_start);
225  cv::Point3d end_ray = model.projectPixelTo3dRay(ux_end);
226  Eigen::Vector3f ux_start_3d = rayPlaneInteersect(start_ray, plane);
227  Eigen::Vector3f ux_end_3d = rayPlaneInteersect(end_ray, plane);
228  Eigen::Vector3f ux_3d = ux_end_3d - ux_start_3d;
229  Eigen::Vector3f normal = plane->getNormal();
230  Eigen::Vector3f uy_3d = normal.cross(ux_3d).normalized();
231  Eigen::Vector3f uy_end_3d = ux_start_3d + uy_3d;
232  cv::Point2d uy_end = model.project3dToPixel(cv::Point3d(uy_end_3d[0],
233  uy_end_3d[1],
234  uy_end_3d[2]));
235  return uy_end;
236  }
237 
239  cv::Mat& out_image, const cv::Point2f& test_point, const double angle,
240  const double max_x, const double max_y,
243  cv::Scalar color)
244  {
245  Eigen::Vector2f ux(cos(angle/180*M_PI), sin(angle/180*M_PI));
246  cv::Point2d uy_end = getUyEnd(
247  test_point, cv::Point2d(test_point.x + ux[0], test_point.y + ux[1]),
248  model, plane);
249  Eigen::Vector2f uy(uy_end.x - test_point.x, uy_end.y - test_point.y);
250  //Eigen::Vector2f uy(sin(angle/180*M_PI), -cos(angle/180*M_PI));
251  Eigen::Vector2f to_point = ux * max_x + Eigen::Vector2f(test_point.x, test_point.y);
252  Eigen::Vector2f to_point2 = uy * max_y + Eigen::Vector2f(test_point.x, test_point.y);
253  cv::Point2f to_point_cv(to_point[0], to_point[1]);
254  cv::Point2f to_point2_cv(to_point2[0], to_point2[1]);
255  cv::Point2f to_point3_cv = to_point_cv + to_point2_cv - test_point;
256  cv::line(out_image, test_point, to_point_cv, color, 4);
257  cv::line(out_image, test_point, to_point2_cv, color, 4);
258  cv::line(out_image, to_point_cv, to_point3_cv, color, 4);
259  cv::line(out_image, to_point2_cv, to_point3_cv, color, 4);
260  return max_x * max_y; // TODO: it's not correct!
261  }
262 
264  const cv::Point2f& point_2d,
266  const pcl::ModelCoefficients::Ptr& coefficients,
267  std::vector<cv::Point3f>& search_points_3d,
268  std::vector<cv::Point2f>& search_points_2d)
269  {
270  NODELET_INFO("generateStartPoints");
272  (new jsk_recognition_utils::Plane(coefficients->values));
273  cv::Point3d ray = model.projectPixelTo3dRay(point_2d);
274  Eigen::Vector3f projected_point = rayPlaneInteersect(ray, plane);
275  const double resolution_step = 0.01;
276  const int resolution = 5;
277  search_points_3d.clear();
278  search_points_2d.clear();
279  for (int i = - resolution; i < resolution; ++i) {
280  for (int j = - resolution; j < resolution; ++j) {
281  double x_diff = resolution_step * i;
282  double y_diff = resolution_step * j;
283  Eigen::Vector3f moved_point = projected_point + Eigen::Vector3f(x_diff, y_diff, 0);
284  Eigen::Vector3f projected_moved_point;
285  plane->project(moved_point, projected_moved_point);
286  cv::Point3f projected_moved_point_cv(projected_moved_point[0],
287  projected_moved_point[1],
288  projected_moved_point[2]);
289  search_points_3d.push_back(cv::Point3f(projected_moved_point_cv));
290  cv::Point2d p2d = model.project3dToPixel(projected_moved_point_cv);
291  search_points_2d.push_back(p2d);
292  }
293  }
294  }
295 }
296 
297 
d
CvImageConstPtr toCvShare(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
resolution
virtual cv::Point2d getUyEnd(const cv::Point2d &ux_start, const cv::Point2d &ux_end, const image_geometry::PinholeCameraModel &model, const jsk_recognition_utils::Plane::Ptr &plane)
void publish(const boost::shared_ptr< M > &message) const
virtual void generateStartPoints(const cv::Point2f &point_2d, const image_geometry::PinholeCameraModel &model, const pcl::ModelCoefficients::Ptr &coefficients, std::vector< cv::Point3f > &search_points_3d, std::vector< cv::Point2f > &search_points_2d)
double cos()
double sin()
cv::Point3d projectPixelTo3dRay(const cv::Point2d &uv_rect) const
GLfloat n[6][3]
alpha
virtual void find(const sensor_msgs::Image::ConstPtr &image_msg, const sensor_msgs::CameraInfo::ConstPtr &info_msg, const pcl_msgs::ModelCoefficients::ConstPtr &polygon_3d_coefficient_msg)
PLUGINLIB_EXPORT_CLASS(jsk_pcl_ros::FindObjectOnPlane, nodelet::Nodelet)
message_filters::Subscriber< sensor_msgs::Image > sub_image_
Eigen::Vector3f rayPlaneInteersect(const cv::Point3d &ray, const jsk_recognition_utils::Plane::Ptr &plane)
void toPCL(const pcl_msgs::PointIndices &pi, pcl::PointIndices &pcl_pi)
message_filters::Subscriber< sensor_msgs::CameraInfo > sub_info_
virtual double drawAngle(cv::Mat &out_image, const cv::Point2f &test_point, const double angle, const double max_x, const double max_y, const image_geometry::PinholeCameraModel &model, const jsk_recognition_utils::Plane::Ptr &plane, cv::Scalar color)
GLfloat angle
#define M_PI
bool fromCameraInfo(const sensor_msgs::CameraInfo &msg)
boost::shared_ptr< ros::NodeHandle > pnh_
boost::shared_ptr< message_filters::Synchronizer< SyncPolicy > > sync_
virtual void generateAngles(const cv::Mat &blob_image, const cv::Point2f &test_point, std::vector< double > &angles, std::vector< double > &max_x, std::vector< double > &max_y, const image_geometry::PinholeCameraModel &model, const jsk_recognition_utils::Plane::Ptr &plane)
#define NODELET_INFO(...)
void subscribe(ros::NodeHandle &nh, const std::string &topic, uint32_t queue_size, const ros::TransportHints &transport_hints=ros::TransportHints(), ros::CallbackQueueInterface *callback_queue=0)
cv::Point2d project3dToPixel(const cv::Point3d &xyz) const
message_filters::Subscriber< pcl_msgs::ModelCoefficients > sub_coefficients_
sensor_msgs::ImagePtr toImageMsg() const


jsk_pcl_ros
Author(s): Yohei Kakiuchi
autogenerated on Mon May 3 2021 03:03:46