00001 #include "stereo_image_proc/processor.h"
00002 #include <sensor_msgs/image_encodings.h>
00003 #include <cmath>
00004 #include <limits>
00005
00006 namespace stereo_image_proc {
00007
00008 bool StereoProcessor::process(const sensor_msgs::ImageConstPtr& left_raw,
00009 const sensor_msgs::ImageConstPtr& right_raw,
00010 const image_geometry::StereoCameraModel& model,
00011 StereoImageSet& output, int flags) const
00012 {
00013
00014 int left_flags = flags & LEFT_ALL;
00015 int right_flags = flags & RIGHT_ALL;
00016 if (flags & STEREO_ALL) {
00017
00018 left_flags |= LEFT_RECT;
00019 right_flags |= RIGHT_RECT;
00020 }
00021 if (flags & (POINT_CLOUD | POINT_CLOUD2)) {
00022 flags |= DISPARITY;
00023
00024 left_flags |= LEFT_RECT_COLOR;
00025 }
00026 if (!mono_processor_.process(left_raw, model.left(), output.left, left_flags))
00027 return false;
00028 if (!mono_processor_.process(right_raw, model.right(), output.right, right_flags >> 4))
00029 return false;
00030
00031
00032 if (flags & DISPARITY) {
00033 processDisparity(output.left.rect, output.right.rect, model, output.disparity);
00034 }
00035
00036
00037 if (flags & POINT_CLOUD) {
00038 processPoints(output.disparity, output.left.rect_color, output.left.color_encoding, model, output.points);
00039 }
00040
00041
00042 if (flags & POINT_CLOUD2) {
00043 processPoints2(output.disparity, output.left.rect_color, output.left.color_encoding, model, output.points2);
00044 }
00045
00046 return true;
00047 }
00048
00049 void StereoProcessor::processDisparity(const cv::Mat& left_rect, const cv::Mat& right_rect,
00050 const image_geometry::StereoCameraModel& model,
00051 stereo_msgs::DisparityImage& disparity) const
00052 {
00053
00054 static const int DPP = 16;
00055 static const double inv_dpp = 1.0 / DPP;
00056
00057
00058 block_matcher_(left_rect, right_rect, disparity16_);
00059
00060
00061 sensor_msgs::Image& dimage = disparity.image;
00062 dimage.height = disparity16_.rows;
00063 dimage.width = disparity16_.cols;
00064 dimage.encoding = sensor_msgs::image_encodings::TYPE_32FC1;
00065 dimage.step = dimage.width * sizeof(float);
00066 dimage.data.resize(dimage.step * dimage.height);
00067 cv::Mat_<float> dmat(dimage.height, dimage.width, (float*)&dimage.data[0], dimage.step);
00068
00069
00070 disparity16_.convertTo(dmat, dmat.type(), inv_dpp, -(model.left().cx() - model.right().cx()));
00071 ROS_ASSERT(dmat.data == &dimage.data[0]);
00073
00074
00075 disparity.f = model.right().fx();
00076 disparity.T = model.baseline();
00077
00079
00080
00081 disparity.min_disparity = getMinDisparity();
00082 disparity.max_disparity = getMinDisparity() + getDisparityRange() - 1;
00083 disparity.delta_d = inv_dpp;
00084 }
00085
00086 inline bool isValidPoint(const cv::Vec3f& pt)
00087 {
00088
00089
00090 return pt[2] != image_geometry::StereoCameraModel::MISSING_Z && !std::isinf(pt[2]);
00091 }
00092
00093 void StereoProcessor::processPoints(const stereo_msgs::DisparityImage& disparity,
00094 const cv::Mat& color, const std::string& encoding,
00095 const image_geometry::StereoCameraModel& model,
00096 sensor_msgs::PointCloud& points) const
00097 {
00098
00099 const sensor_msgs::Image& dimage = disparity.image;
00100 const cv::Mat_<float> dmat(dimage.height, dimage.width, (float*)&dimage.data[0], dimage.step);
00101 model.projectDisparityImageTo3d(dmat, dense_points_, true);
00102
00103
00104 points.points.resize(0);
00105 points.channels.resize(3);
00106 points.channels[0].name = "rgb";
00107 points.channels[0].values.resize(0);
00108 points.channels[1].name = "u";
00109 points.channels[1].values.resize(0);
00110 points.channels[2].name = "v";
00111 points.channels[2].values.resize(0);
00112
00113 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00114 for (int32_t v = 0; v < dense_points_.cols; ++v) {
00115 if (isValidPoint(dense_points_(u,v))) {
00116
00117 geometry_msgs::Point32 pt;
00118 pt.x = dense_points_(u,v)[0];
00119 pt.y = dense_points_(u,v)[1];
00120 pt.z = dense_points_(u,v)[2];
00121 points.points.push_back(pt);
00122
00123 points.channels[1].values.push_back(u);
00124 points.channels[2].values.push_back(v);
00125 }
00126 }
00127 }
00128
00129
00130 namespace enc = sensor_msgs::image_encodings;
00131 points.channels[0].values.reserve(points.points.size());
00132 if (encoding == enc::MONO8) {
00133 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00134 for (int32_t v = 0; v < dense_points_.cols; ++v) {
00135 if (isValidPoint(dense_points_(u,v))) {
00136 uint8_t g = color.at<uint8_t>(u,v);
00137 int32_t rgb = (g << 16) | (g << 8) | g;
00138 points.channels[0].values.push_back(*(float*)(&rgb));
00139 }
00140 }
00141 }
00142 }
00143 else if (encoding == enc::RGB8) {
00144 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00145 for (int32_t v = 0; v < dense_points_.cols; ++v) {
00146 if (isValidPoint(dense_points_(u,v))) {
00147 const cv::Vec3b& rgb = color.at<cv::Vec3b>(u,v);
00148 int32_t rgb_packed = (rgb[0] << 16) | (rgb[1] << 8) | rgb[2];
00149 points.channels[0].values.push_back(*(float*)(&rgb_packed));
00150 }
00151 }
00152 }
00153 }
00154 else if (encoding == enc::BGR8) {
00155 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00156 for (int32_t v = 0; v < dense_points_.cols; ++v) {
00157 if (isValidPoint(dense_points_(u,v))) {
00158 const cv::Vec3b& bgr = color.at<cv::Vec3b>(u,v);
00159 int32_t rgb_packed = (bgr[2] << 16) | (bgr[1] << 8) | bgr[0];
00160 points.channels[0].values.push_back(*(float*)(&rgb_packed));
00161 }
00162 }
00163 }
00164 }
00165 else {
00166 ROS_WARN("Could not fill color channel of the point cloud, unrecognized encoding '%s'", encoding.c_str());
00167 }
00168 }
00169
00170 void StereoProcessor::processPoints2(const stereo_msgs::DisparityImage& disparity,
00171 const cv::Mat& color, const std::string& encoding,
00172 const image_geometry::StereoCameraModel& model,
00173 sensor_msgs::PointCloud2& points) const
00174 {
00175
00176 const sensor_msgs::Image& dimage = disparity.image;
00177 const cv::Mat_<float> dmat(dimage.height, dimage.width, (float*)&dimage.data[0], dimage.step);
00178 model.projectDisparityImageTo3d(dmat, dense_points_, true);
00179
00180
00181 points.height = dense_points_.rows;
00182 points.width = dense_points_.cols;
00183 points.fields.resize (4);
00184 points.fields[0].name = "x";
00185 points.fields[0].offset = 0;
00186 points.fields[0].count = 1;
00187 points.fields[0].datatype = sensor_msgs::PointField::FLOAT32;
00188 points.fields[1].name = "y";
00189 points.fields[1].offset = 4;
00190 points.fields[1].count = 1;
00191 points.fields[1].datatype = sensor_msgs::PointField::FLOAT32;
00192 points.fields[2].name = "z";
00193 points.fields[2].offset = 8;
00194 points.fields[2].count = 1;
00195 points.fields[2].datatype = sensor_msgs::PointField::FLOAT32;
00196 points.fields[3].name = "rgb";
00197 points.fields[3].offset = 12;
00198 points.fields[3].count = 1;
00199 points.fields[3].datatype = sensor_msgs::PointField::FLOAT32;
00200
00201 points.point_step = 16;
00202 points.row_step = points.point_step * points.width;
00203 points.data.resize (points.row_step * points.height);
00204 points.is_dense = false;
00205
00206 float bad_point = std::numeric_limits<float>::quiet_NaN ();
00207 int i = 0;
00208 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00209 for (int32_t v = 0; v < dense_points_.cols; ++v, ++i) {
00210 if (isValidPoint(dense_points_(u,v))) {
00211
00212 memcpy (&points.data[i * points.point_step + 0], &dense_points_(u,v)[0], sizeof (float));
00213 memcpy (&points.data[i * points.point_step + 4], &dense_points_(u,v)[1], sizeof (float));
00214 memcpy (&points.data[i * points.point_step + 8], &dense_points_(u,v)[2], sizeof (float));
00215 }
00216 else {
00217 memcpy (&points.data[i * points.point_step + 0], &bad_point, sizeof (float));
00218 memcpy (&points.data[i * points.point_step + 4], &bad_point, sizeof (float));
00219 memcpy (&points.data[i * points.point_step + 8], &bad_point, sizeof (float));
00220 }
00221 }
00222 }
00223
00224
00225 namespace enc = sensor_msgs::image_encodings;
00226 i = 0;
00227 if (encoding == enc::MONO8) {
00228 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00229 for (int32_t v = 0; v < dense_points_.cols; ++v, ++i) {
00230 if (isValidPoint(dense_points_(u,v))) {
00231 uint8_t g = color.at<uint8_t>(u,v);
00232 int32_t rgb = (g << 16) | (g << 8) | g;
00233 memcpy (&points.data[i * points.point_step + 12], &rgb, sizeof (int32_t));
00234 }
00235 else {
00236 memcpy (&points.data[i * points.point_step + 12], &bad_point, sizeof (float));
00237 }
00238 }
00239 }
00240 }
00241 else if (encoding == enc::RGB8) {
00242 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00243 for (int32_t v = 0; v < dense_points_.cols; ++v, ++i) {
00244 if (isValidPoint(dense_points_(u,v))) {
00245 const cv::Vec3b& rgb = color.at<cv::Vec3b>(u,v);
00246 int32_t rgb_packed = (rgb[0] << 16) | (rgb[1] << 8) | rgb[2];
00247 memcpy (&points.data[i * points.point_step + 12], &rgb_packed, sizeof (int32_t));
00248 }
00249 else {
00250 memcpy (&points.data[i * points.point_step + 12], &bad_point, sizeof (float));
00251 }
00252 }
00253 }
00254 }
00255 else if (encoding == enc::BGR8) {
00256 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00257 for (int32_t v = 0; v < dense_points_.cols; ++v, ++i) {
00258 if (isValidPoint(dense_points_(u,v))) {
00259 const cv::Vec3b& bgr = color.at<cv::Vec3b>(u,v);
00260 int32_t rgb_packed = (bgr[2] << 16) | (bgr[1] << 8) | bgr[0];
00261 memcpy (&points.data[i * points.point_step + 12], &rgb_packed, sizeof (int32_t));
00262 }
00263 else {
00264 memcpy (&points.data[i * points.point_step + 12], &bad_point, sizeof (float));
00265 }
00266 }
00267 }
00268 }
00269 else {
00270 ROS_WARN("Could not fill color channel of the point cloud, unrecognized encoding '%s'", encoding.c_str());
00271 }
00272 }
00273
00274 }