35 #define BOOST_PARAMETER_MAX_ARITY 7 37 #include <opencv2/opencv.hpp> 45 DiagnosticNodelet::onInit();
47 *
pnh_,
"debug/min_area_rect_image", 1);
56 sync_ = boost::make_shared<message_filters::Synchronizer<SyncPolicy> >(100);
69 const sensor_msgs::Image::ConstPtr& image_msg,
70 const sensor_msgs::CameraInfo::ConstPtr& info_msg,
71 const pcl_msgs::ModelCoefficients::ConstPtr& polygon_3d_coefficient_msg)
76 pcl::ModelCoefficients::Ptr polygon_coefficients
77 (
new pcl::ModelCoefficients);
82 std::vector<cv::Point> all_points;
83 for (
int j = 0; j < object_image.rows; j++) {
84 for (
int i = 0; i < object_image.cols; i++) {
85 if (object_image.at<uchar>(j, i) == 255) {
86 all_points.push_back(cv::Point(i, j));
90 cv::RotatedRect obb = cv::minAreaRect(all_points);
92 cv::Mat min_area_rect_image;
93 cv::cvtColor(object_image, min_area_rect_image, CV_GRAY2BGR);
94 cv::Point2f vertices2f[4];
95 obb.points(vertices2f);
96 cv::line(min_area_rect_image, vertices2f[0], vertices2f[1],
97 cv::Scalar(0, 0, 255), 4);
98 cv::line(min_area_rect_image, vertices2f[1], vertices2f[2],
99 cv::Scalar(0, 0, 255), 4);
100 cv::line(min_area_rect_image, vertices2f[2], vertices2f[3],
101 cv::Scalar(0, 0, 255), 4);
102 cv::line(min_area_rect_image, vertices2f[3], vertices2f[0],
103 cv::Scalar(0, 0, 255), 4);
104 cv::Rect bb = obb.boundingRect();
105 std::vector<cv::Point3f> search_points_3d;
106 std::vector<cv::Point2f> search_points_2d;
108 model, polygon_coefficients,
109 search_points_3d, search_points_2d);
110 for (
size_t i = 0; i < search_points_2d.size(); i++) {
111 cv::circle(min_area_rect_image, search_points_2d[i], 5, cv::Scalar(0, 255, 0), 1);
115 double min_area = DBL_MAX;
117 cv::Point2f min_search_point;
120 for (
size_t i = 0; i < search_points_2d.size(); i++) {
121 std::vector<double>
angles;
122 std::vector<double> max_x;
123 std::vector<double> max_y;
128 for (
size_t j = 0; j < angles.size(); j++) {
129 double area =
drawAngle(min_area_rect_image, search_points_2d[i], angles[j],
130 max_x[j], max_y[j], model, plane, cv::Scalar(0, 255, 0));
131 if (area < min_area) {
135 min_angle = angles[j];
136 min_search_point = search_points_2d[i];
140 drawAngle(min_area_rect_image, min_search_point, min_angle,
141 min_x, min_y, model, plane, cv::Scalar(0, 255, 255));
152 const cv::Mat& blob_image,
const cv::Point2f& test_point,
153 std::vector<double>&
angles,
154 std::vector<double>& max_x,
155 std::vector<double>& max_y,
160 const double angle_step = 3;
161 std::vector<cv::Point> indices;
162 for (
int j = 0; j < blob_image.rows; j++) {
163 for (
int i = 0; i < blob_image.cols; i++) {
164 if (blob_image.at<uchar>(j, i) != 0) {
165 indices.push_back(cv::Point(i, j));
172 cv::Point2d uy_end =
getUyEnd(test_point, cv::Point2d(test_point.x + ux[0], test_point.y + ux[1]),
175 Eigen::Vector2f uy(uy_end.x - test_point.x, uy_end.y - test_point.y);
180 bool excluded =
false;
181 double max_alpha = -DBL_MAX;
182 double max_beta = -DBL_MAX;
183 for (
size_t i = 0; i < indices.size(); i++) {
184 Eigen::Vector2f p_q = Eigen::Vector2f(indices[i].
x, indices[i].
y) - Eigen::Vector2f(test_point.x, test_point.y);
185 Eigen::Vector2f a_b = A.inverse() * p_q;
186 double alpha = a_b[0];
187 double beta = a_b[1];
188 if (alpha < 0 || beta < 0) {
192 if (alpha > max_alpha) {
195 if (beta > max_beta) {
201 angles.push_back(
angle);
202 max_x.push_back(max_alpha);
203 max_y.push_back(max_beta);
209 const cv::Point3d& ray,
212 Eigen::Vector3f
n = plane->getNormal();
213 Eigen::Vector3f
d(ray.x, ray.y, ray.z);
214 double alpha = - plane->getD() / n.dot(d);
219 const cv::Point2d& ux_start,
220 const cv::Point2d& ux_end,
228 Eigen::Vector3f ux_3d = ux_end_3d - ux_start_3d;
229 Eigen::Vector3f normal = plane->getNormal();
230 Eigen::Vector3f uy_3d = normal.cross(ux_3d).normalized();
231 Eigen::Vector3f uy_end_3d = ux_start_3d + uy_3d;
239 cv::Mat& out_image,
const cv::Point2f& test_point,
const double angle,
240 const double max_x,
const double max_y,
247 test_point, cv::Point2d(test_point.x + ux[0], test_point.y + ux[1]),
249 Eigen::Vector2f uy(uy_end.x - test_point.x, uy_end.y - test_point.y);
251 Eigen::Vector2f to_point = ux * max_x + Eigen::Vector2f(test_point.x, test_point.y);
252 Eigen::Vector2f to_point2 = uy * max_y + Eigen::Vector2f(test_point.x, test_point.y);
253 cv::Point2f to_point_cv(to_point[0], to_point[1]);
254 cv::Point2f to_point2_cv(to_point2[0], to_point2[1]);
255 cv::Point2f to_point3_cv = to_point_cv + to_point2_cv - test_point;
256 cv::line(out_image, test_point, to_point_cv, color, 4);
257 cv::line(out_image, test_point, to_point2_cv, color, 4);
258 cv::line(out_image, to_point_cv, to_point3_cv, color, 4);
259 cv::line(out_image, to_point2_cv, to_point3_cv, color, 4);
260 return max_x * max_y;
264 const cv::Point2f& point_2d,
266 const pcl::ModelCoefficients::Ptr& coefficients,
267 std::vector<cv::Point3f>& search_points_3d,
268 std::vector<cv::Point2f>& search_points_2d)
275 const double resolution_step = 0.01;
277 search_points_3d.clear();
278 search_points_2d.clear();
279 for (
int i = - resolution; i < resolution; ++i) {
280 for (
int j = - resolution; j < resolution; ++j) {
281 double x_diff = resolution_step * i;
282 double y_diff = resolution_step * j;
283 Eigen::Vector3f moved_point = projected_point + Eigen::Vector3f(x_diff, y_diff, 0);
284 Eigen::Vector3f projected_moved_point;
285 plane->project(moved_point, projected_moved_point);
286 cv::Point3f projected_moved_point_cv(projected_moved_point[0],
287 projected_moved_point[1],
288 projected_moved_point[2]);
289 search_points_3d.push_back(cv::Point3f(projected_moved_point_cv));
291 search_points_2d.push_back(p2d);
CvImageConstPtr toCvShare(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
virtual cv::Point2d getUyEnd(const cv::Point2d &ux_start, const cv::Point2d &ux_end, const image_geometry::PinholeCameraModel &model, const jsk_recognition_utils::Plane::Ptr &plane)
void publish(const boost::shared_ptr< M > &message) const
virtual void generateStartPoints(const cv::Point2f &point_2d, const image_geometry::PinholeCameraModel &model, const pcl::ModelCoefficients::Ptr &coefficients, std::vector< cv::Point3f > &search_points_3d, std::vector< cv::Point2f > &search_points_2d)
cv::Point3d projectPixelTo3dRay(const cv::Point2d &uv_rect) const
virtual void find(const sensor_msgs::Image::ConstPtr &image_msg, const sensor_msgs::CameraInfo::ConstPtr &info_msg, const pcl_msgs::ModelCoefficients::ConstPtr &polygon_3d_coefficient_msg)
PLUGINLIB_EXPORT_CLASS(jsk_pcl_ros::FindObjectOnPlane, nodelet::Nodelet)
message_filters::Subscriber< sensor_msgs::Image > sub_image_
Eigen::Vector3f rayPlaneInteersect(const cv::Point3d &ray, const jsk_recognition_utils::Plane::Ptr &plane)
void toPCL(const pcl_msgs::PointIndices &pi, pcl::PointIndices &pcl_pi)
message_filters::Subscriber< sensor_msgs::CameraInfo > sub_info_
virtual double drawAngle(cv::Mat &out_image, const cv::Point2f &test_point, const double angle, const double max_x, const double max_y, const image_geometry::PinholeCameraModel &model, const jsk_recognition_utils::Plane::Ptr &plane, cv::Scalar color)
bool fromCameraInfo(const sensor_msgs::CameraInfo &msg)
boost::shared_ptr< message_filters::Synchronizer< SyncPolicy > > sync_
virtual void generateAngles(const cv::Mat &blob_image, const cv::Point2f &test_point, std::vector< double > &angles, std::vector< double > &max_x, std::vector< double > &max_y, const image_geometry::PinholeCameraModel &model, const jsk_recognition_utils::Plane::Ptr &plane)
#define NODELET_INFO(...)
void subscribe(ros::NodeHandle &nh, const std::string &topic, uint32_t queue_size, const ros::TransportHints &transport_hints=ros::TransportHints(), ros::CallbackQueueInterface *callback_queue=0)
cv::Point2d project3dToPixel(const cv::Point3d &xyz) const
message_filters::Subscriber< pcl_msgs::ModelCoefficients > sub_coefficients_
ros::Publisher pub_min_area_rect_image_
sensor_msgs::ImagePtr toImageMsg() const
virtual void unsubscribe()