00001 #include <ros/assert.h>
00002 #include "stereo_image_proc/processor.h"
00003 #include <sensor_msgs/image_encodings.h>
00004 #include <cmath>
00005 #include <limits>
00006
00007 namespace stereo_image_proc {
00008
00009 bool StereoProcessor::process(const sensor_msgs::ImageConstPtr& left_raw,
00010 const sensor_msgs::ImageConstPtr& right_raw,
00011 const image_geometry::StereoCameraModel& model,
00012 StereoImageSet& output, int flags) const
00013 {
00014
00015 int left_flags = flags & LEFT_ALL;
00016 int right_flags = flags & RIGHT_ALL;
00017 if (flags & STEREO_ALL) {
00018
00019 left_flags |= LEFT_RECT;
00020 right_flags |= RIGHT_RECT;
00021 }
00022 if (flags & (POINT_CLOUD | POINT_CLOUD2)) {
00023 flags |= DISPARITY;
00024
00025 left_flags |= LEFT_RECT_COLOR;
00026 }
00027 if (!mono_processor_.process(left_raw, model.left(), output.left, left_flags))
00028 return false;
00029 if (!mono_processor_.process(right_raw, model.right(), output.right, right_flags >> 4))
00030 return false;
00031
00032
00033 if (flags & DISPARITY) {
00034 processDisparity(output.left.rect, output.right.rect, model, output.disparity);
00035 }
00036
00037
00038 if (flags & POINT_CLOUD) {
00039 processPoints(output.disparity, output.left.rect_color, output.left.color_encoding, model, output.points);
00040 }
00041
00042
00043 if (flags & POINT_CLOUD2) {
00044 processPoints2(output.disparity, output.left.rect_color, output.left.color_encoding, model, output.points2);
00045 }
00046
00047 return true;
00048 }
00049
00050 void StereoProcessor::processDisparity(const cv::Mat& left_rect, const cv::Mat& right_rect,
00051 const image_geometry::StereoCameraModel& model,
00052 stereo_msgs::DisparityImage& disparity) const
00053 {
00054
00055 static const int DPP = 16;
00056 static const double inv_dpp = 1.0 / DPP;
00057
00058
00059 block_matcher_(left_rect, right_rect, disparity16_);
00060
00061
00062 sensor_msgs::Image& dimage = disparity.image;
00063 dimage.height = disparity16_.rows;
00064 dimage.width = disparity16_.cols;
00065 dimage.encoding = sensor_msgs::image_encodings::TYPE_32FC1;
00066 dimage.step = dimage.width * sizeof(float);
00067 dimage.data.resize(dimage.step * dimage.height);
00068 cv::Mat_<float> dmat(dimage.height, dimage.width, (float*)&dimage.data[0], dimage.step);
00069
00070
00071 disparity16_.convertTo(dmat, dmat.type(), inv_dpp, -(model.left().cx() - model.right().cx()));
00072 ROS_ASSERT(dmat.data == &dimage.data[0]);
00074
00075
00076 disparity.f = model.right().fx();
00077 disparity.T = model.baseline();
00078
00080
00081
00082 disparity.min_disparity = getMinDisparity();
00083 disparity.max_disparity = getMinDisparity() + getDisparityRange() - 1;
00084 disparity.delta_d = inv_dpp;
00085 }
00086
00087 inline bool isValidPoint(const cv::Vec3f& pt)
00088 {
00089
00090
00091 return pt[2] != image_geometry::StereoCameraModel::MISSING_Z && !std::isinf(pt[2]);
00092 }
00093
00094 void StereoProcessor::processPoints(const stereo_msgs::DisparityImage& disparity,
00095 const cv::Mat& color, const std::string& encoding,
00096 const image_geometry::StereoCameraModel& model,
00097 sensor_msgs::PointCloud& points) const
00098 {
00099
00100 const sensor_msgs::Image& dimage = disparity.image;
00101 const cv::Mat_<float> dmat(dimage.height, dimage.width, (float*)&dimage.data[0], dimage.step);
00102 model.projectDisparityImageTo3d(dmat, dense_points_, true);
00103
00104
00105 points.points.resize(0);
00106 points.channels.resize(3);
00107 points.channels[0].name = "rgb";
00108 points.channels[0].values.resize(0);
00109 points.channels[1].name = "u";
00110 points.channels[1].values.resize(0);
00111 points.channels[2].name = "v";
00112 points.channels[2].values.resize(0);
00113
00114 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00115 for (int32_t v = 0; v < dense_points_.cols; ++v) {
00116 if (isValidPoint(dense_points_(u,v))) {
00117
00118 geometry_msgs::Point32 pt;
00119 pt.x = dense_points_(u,v)[0];
00120 pt.y = dense_points_(u,v)[1];
00121 pt.z = dense_points_(u,v)[2];
00122 points.points.push_back(pt);
00123
00124 points.channels[1].values.push_back(u);
00125 points.channels[2].values.push_back(v);
00126 }
00127 }
00128 }
00129
00130
00131 namespace enc = sensor_msgs::image_encodings;
00132 points.channels[0].values.reserve(points.points.size());
00133 if (encoding == enc::MONO8) {
00134 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00135 for (int32_t v = 0; v < dense_points_.cols; ++v) {
00136 if (isValidPoint(dense_points_(u,v))) {
00137 uint8_t g = color.at<uint8_t>(u,v);
00138 int32_t rgb = (g << 16) | (g << 8) | g;
00139 points.channels[0].values.push_back(*(float*)(&rgb));
00140 }
00141 }
00142 }
00143 }
00144 else if (encoding == enc::RGB8) {
00145 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00146 for (int32_t v = 0; v < dense_points_.cols; ++v) {
00147 if (isValidPoint(dense_points_(u,v))) {
00148 const cv::Vec3b& rgb = color.at<cv::Vec3b>(u,v);
00149 int32_t rgb_packed = (rgb[0] << 16) | (rgb[1] << 8) | rgb[2];
00150 points.channels[0].values.push_back(*(float*)(&rgb_packed));
00151 }
00152 }
00153 }
00154 }
00155 else if (encoding == enc::BGR8) {
00156 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00157 for (int32_t v = 0; v < dense_points_.cols; ++v) {
00158 if (isValidPoint(dense_points_(u,v))) {
00159 const cv::Vec3b& bgr = color.at<cv::Vec3b>(u,v);
00160 int32_t rgb_packed = (bgr[2] << 16) | (bgr[1] << 8) | bgr[0];
00161 points.channels[0].values.push_back(*(float*)(&rgb_packed));
00162 }
00163 }
00164 }
00165 }
00166 else {
00167 ROS_WARN("Could not fill color channel of the point cloud, unrecognized encoding '%s'", encoding.c_str());
00168 }
00169 }
00170
00171 void StereoProcessor::processPoints2(const stereo_msgs::DisparityImage& disparity,
00172 const cv::Mat& color, const std::string& encoding,
00173 const image_geometry::StereoCameraModel& model,
00174 sensor_msgs::PointCloud2& points) const
00175 {
00176
00177 const sensor_msgs::Image& dimage = disparity.image;
00178 const cv::Mat_<float> dmat(dimage.height, dimage.width, (float*)&dimage.data[0], dimage.step);
00179 model.projectDisparityImageTo3d(dmat, dense_points_, true);
00180
00181
00182 points.height = dense_points_.rows;
00183 points.width = dense_points_.cols;
00184 points.fields.resize (4);
00185 points.fields[0].name = "x";
00186 points.fields[0].offset = 0;
00187 points.fields[0].count = 1;
00188 points.fields[0].datatype = sensor_msgs::PointField::FLOAT32;
00189 points.fields[1].name = "y";
00190 points.fields[1].offset = 4;
00191 points.fields[1].count = 1;
00192 points.fields[1].datatype = sensor_msgs::PointField::FLOAT32;
00193 points.fields[2].name = "z";
00194 points.fields[2].offset = 8;
00195 points.fields[2].count = 1;
00196 points.fields[2].datatype = sensor_msgs::PointField::FLOAT32;
00197 points.fields[3].name = "rgb";
00198 points.fields[3].offset = 12;
00199 points.fields[3].count = 1;
00200 points.fields[3].datatype = sensor_msgs::PointField::FLOAT32;
00201
00202 points.point_step = 16;
00203 points.row_step = points.point_step * points.width;
00204 points.data.resize (points.row_step * points.height);
00205 points.is_dense = false;
00206
00207 float bad_point = std::numeric_limits<float>::quiet_NaN ();
00208 int i = 0;
00209 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00210 for (int32_t v = 0; v < dense_points_.cols; ++v, ++i) {
00211 if (isValidPoint(dense_points_(u,v))) {
00212
00213 memcpy (&points.data[i * points.point_step + 0], &dense_points_(u,v)[0], sizeof (float));
00214 memcpy (&points.data[i * points.point_step + 4], &dense_points_(u,v)[1], sizeof (float));
00215 memcpy (&points.data[i * points.point_step + 8], &dense_points_(u,v)[2], sizeof (float));
00216 }
00217 else {
00218 memcpy (&points.data[i * points.point_step + 0], &bad_point, sizeof (float));
00219 memcpy (&points.data[i * points.point_step + 4], &bad_point, sizeof (float));
00220 memcpy (&points.data[i * points.point_step + 8], &bad_point, sizeof (float));
00221 }
00222 }
00223 }
00224
00225
00226 namespace enc = sensor_msgs::image_encodings;
00227 i = 0;
00228 if (encoding == enc::MONO8) {
00229 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00230 for (int32_t v = 0; v < dense_points_.cols; ++v, ++i) {
00231 if (isValidPoint(dense_points_(u,v))) {
00232 uint8_t g = color.at<uint8_t>(u,v);
00233 int32_t rgb = (g << 16) | (g << 8) | g;
00234 memcpy (&points.data[i * points.point_step + 12], &rgb, sizeof (int32_t));
00235 }
00236 else {
00237 memcpy (&points.data[i * points.point_step + 12], &bad_point, sizeof (float));
00238 }
00239 }
00240 }
00241 }
00242 else if (encoding == enc::RGB8) {
00243 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00244 for (int32_t v = 0; v < dense_points_.cols; ++v, ++i) {
00245 if (isValidPoint(dense_points_(u,v))) {
00246 const cv::Vec3b& rgb = color.at<cv::Vec3b>(u,v);
00247 int32_t rgb_packed = (rgb[0] << 16) | (rgb[1] << 8) | rgb[2];
00248 memcpy (&points.data[i * points.point_step + 12], &rgb_packed, sizeof (int32_t));
00249 }
00250 else {
00251 memcpy (&points.data[i * points.point_step + 12], &bad_point, sizeof (float));
00252 }
00253 }
00254 }
00255 }
00256 else if (encoding == enc::BGR8) {
00257 for (int32_t u = 0; u < dense_points_.rows; ++u) {
00258 for (int32_t v = 0; v < dense_points_.cols; ++v, ++i) {
00259 if (isValidPoint(dense_points_(u,v))) {
00260 const cv::Vec3b& bgr = color.at<cv::Vec3b>(u,v);
00261 int32_t rgb_packed = (bgr[2] << 16) | (bgr[1] << 8) | bgr[0];
00262 memcpy (&points.data[i * points.point_step + 12], &rgb_packed, sizeof (int32_t));
00263 }
00264 else {
00265 memcpy (&points.data[i * points.point_step + 12], &bad_point, sizeof (float));
00266 }
00267 }
00268 }
00269 }
00270 else {
00271 ROS_WARN("Could not fill color channel of the point cloud, unrecognized encoding '%s'", encoding.c_str());
00272 }
00273 }
00274
00275 }