nerian_sp1_node.cpp
Go to the documentation of this file.
1 /*******************************************************************************
2  * Copyright (c) 2016 Nerian Vision Technologies
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a copy
5  * of this software and associated documentation files (the "Software"), to deal
6  * in the Software without restriction, including without limitation the rights
7  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8  * copies of the Software, and to permit persons to whom the Software is
9  * furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *******************************************************************************/
14 
15 #include <ros/ros.h>
16 #include <visiontransfer/asynctransfer.h>
17 #include <visiontransfer/reconstruct3d.h>
18 #include <sensor_msgs/PointCloud2.h>
19 #include <sensor_msgs/Image.h>
20 #include <opencv2/opencv.hpp>
21 #include <cv_bridge/cv_bridge.h>
22 #include <iostream>
23 #include <iomanip>
24 #include <nerian_sp1/StereoCameraInfo.h>
25 #include <boost/smart_ptr.hpp>
26 #include <colorcoder.h>
27 
28 using namespace std;
29 
49 class Sp1Node {
50 public:
51  Sp1Node(): frameNum(0) {
52  }
53 
55  }
56 
60  void init() {
61  ros::NodeHandle privateNh("~");
62 
63  // Read all ROS parameters
64  if (!privateNh.getParam("point_cloud_intensity_channel", intensityChannel)) {
65  intensityChannel = true;
66  }
67 
68  if (!privateNh.getParam("color_code_disparity_map", colorCodeDispMap)) {
69  colorCodeDispMap = true;
70  }
71 
72  if (!privateNh.getParam("color_code_legend", colorCodeLegend)) {
73  colorCodeLegend = false;
74  }
75 
76  if (!privateNh.getParam("frame", frame)) {
77  frame = "world";
78  }
79 
80  if (!privateNh.getParam("remote_port", remotePort)) {
81  remotePort = "7681";
82  }
83 
84  if (!privateNh.getParam("remote_host", remoteHost)) {
85  remoteHost = "0.0.0.0";
86  }
87 
88  if (!privateNh.getParam("local_port", localPort)) {
89  localPort = "7681";
90  }
91 
92  if (!privateNh.getParam("local_host", localHost)) {
93  localHost = "0.0.0.0";
94  }
95 
96  if (!privateNh.getParam("use_tcp", useTcp)) {
97  useTcp = false;
98  }
99 
100  if (!privateNh.getParam("ros_coordinate_system", rosCoordinateSystem)) {
101  rosCoordinateSystem = true;
102  }
103 
104  if (!privateNh.getParam("calibration_file", calibFile)) {
105  calibFile = "";
106  }
107 
108  if (!privateNh.getParam("delay_execution", execDelay)) {
109  execDelay = 0;
110  }
111 
112  if (!privateNh.getParam("max_depth", maxDepth)) {
113  maxDepth = -1;
114  }
115 
116  // Apply an initial delay if configured
117  ros::Duration(execDelay).sleep();
118 
119  // Create publishers
120  disparityPublisher.reset(new ros::Publisher(nh.advertise<sensor_msgs::Image>(
121  "/nerian_sp1/disparity_map", 5)));
122  leftImagePublisher.reset(new ros::Publisher(nh.advertise<sensor_msgs::Image>(
123  "/nerian_sp1/left_image", 5)));
124  rightImagePublisher.reset(new ros::Publisher(nh.advertise<sensor_msgs::Image>(
125  "/nerian_sp1/right_image", 5)));
126 
127  loadCameraCalibration();
128 
129  cameraInfoPublisher.reset(new ros::Publisher(nh.advertise<nerian_sp1::StereoCameraInfo>(
130  "/nerian_sp1/stereo_camera_info", 1)));
131  cloudPublisher.reset(new ros::Publisher(nh.advertise<sensor_msgs::PointCloud2>(
132  "/nerian_sp1/point_cloud", 5)));
133  }
134 
138  int run() {
139  try {
140  ros::Time lastLogTime;
141  int lastLogFrames = 0;
142 
143  AsyncTransfer asyncTransfer(useTcp ? ImageTransfer::TCP_CLIENT : ImageTransfer::UDP,
144  remoteHost.c_str(), remotePort.c_str(), localHost.c_str(), localPort.c_str());
145 
146  while(ros::ok()) {
147  // Receive image data
148  ImagePair imagePair;
149  if(!asyncTransfer.collectReceivedImagePair(imagePair, 0.5)) {
150  continue;
151  }
152 
153  ros::Time stamp = ros::Time::now();
154 
155  // Publish the selected messages
156  if(leftImagePublisher->getNumSubscribers() > 0) {
157  publishImageMsg(imagePair, stamp);
158  }
159 
160  if(disparityPublisher->getNumSubscribers() > 0 || rightImagePublisher->getNumSubscribers() > 0) {
161  publishDispMapMsg(imagePair, stamp);
162  }
163 
164  if(cloudPublisher->getNumSubscribers() > 0) {
165  if(recon3d == nullptr) {
166  // First initialize
167  initPointCloud();
168  }
169 
170  publishPointCloudMsg(imagePair, stamp);
171  }
172 
173  if(cameraInfoPublisher != NULL && cameraInfoPublisher->getNumSubscribers() > 0) {
174  publishCameraInfo(stamp, imagePair);
175  }
176 
177  // Display some simple statistics
178  frameNum++;
179  if(stamp.sec != lastLogTime.sec) {
180  if(lastLogTime != ros::Time()) {
181  double dt = (stamp - lastLogTime).toSec();
182  double fps = (frameNum - lastLogFrames) / dt;
183  ROS_INFO("%.1f fps", fps);
184  }
185  lastLogFrames = frameNum;
186  lastLogTime = stamp;
187  }
188  }
189  } catch(const std::exception& ex) {
190  ROS_FATAL("Exception occured: %s", ex.what());
191  }
192  }
193 
194 private:
195  // ROS related objects
197  boost::scoped_ptr<ros::Publisher> cloudPublisher;
198  boost::scoped_ptr<ros::Publisher> disparityPublisher;
199  boost::scoped_ptr<ros::Publisher> leftImagePublisher;
200  boost::scoped_ptr<ros::Publisher> rightImagePublisher;
201  boost::scoped_ptr<ros::Publisher> cameraInfoPublisher;
202 
203  // Parameters
205  bool useTcp;
209  std::string remotePort;
210  std::string localPort;
211  std::string frame;
212  std::string remoteHost;
213  std::string localHost;
214  std::string calibFile;
215  double execDelay;
216  double maxDepth;
217 
218  // Other members
219  int frameNum;
220  boost::scoped_ptr<Reconstruct3D> recon3d;
221  boost::scoped_ptr<ColorCoder> colCoder;
222  cv::Mat_<cv::Vec3b> colDispMap;
223  sensor_msgs::PointCloud2Ptr pointCloudMsg;
224  cv::FileStorage calibStorage;
225  nerian_sp1::StereoCameraInfoPtr camInfoMsg;
227 
232  if(calibFile == "" ) {
233  ROS_WARN("No camera calibration file configured. Cannot publish detailed camera information!");
234  } else {
235  bool success = false;
236  try {
237  if (calibStorage.open(calibFile, cv::FileStorage::READ)) {
238  success = true;
239  }
240  } catch(...) {
241  }
242 
243  if(!success) {
244  ROS_WARN("Error reading calibration file: %s\n"
245  "Cannot publish detailed camera information!", calibFile.c_str());
246  }
247  }
248  }
249 
253  void publishImageMsg(const ImagePair& imagePair, ros::Time stamp) {
254  cv_bridge::CvImage cvImg;
255  cvImg.header.frame_id = frame;
256  cvImg.header.stamp = stamp;
257  cvImg.header.seq = imagePair.getSequenceNumber(); // Actually ROS will overwrite this
258 
259  cvImg.image = cv::Mat_<unsigned char>(imagePair.getHeight(),
260  imagePair.getWidth(), imagePair.getPixelData(0), imagePair.getRowStride(0));
261  sensor_msgs::ImagePtr msg = cvImg.toImageMsg();
262 
263 
264  msg->encoding = "mono8";
265  leftImagePublisher->publish(msg);
266  }
267 
272  void publishDispMapMsg(const ImagePair& imagePair, ros::Time stamp) {
273  cv_bridge::CvImage cvImg;
274  cvImg.header.frame_id = frame;
275  cvImg.header.stamp = stamp;
276  cvImg.header.seq = imagePair.getSequenceNumber(); // Actually ROS will overwrite this
277 
278  bool format12Bit = (imagePair.getPixelFormat(1) == ImagePair::FORMAT_12_BIT);
279  cv::Mat monoImg(imagePair.getHeight(), imagePair.getWidth(),
280  format12Bit ? CV_16UC1 : CV_8UC1,
281  imagePair.getPixelData(1), imagePair.getRowStride(1));
282  string encoding = "";
283 
284  if(!colorCodeDispMap || !format12Bit) {
285  cvImg.image = monoImg;
286  encoding = (format12Bit ? "mono16": "mono8");
287  } else {
288  if(colCoder == NULL) {
289  colCoder.reset(new ColorCoder(ColorCoder::COLOR_RED_BLUE_BGR, 0, 16*111, true, true));
290  if(colorCodeLegend) {
291  // Create the legend
292  colDispMap = colCoder->createLegendBorder(monoImg.cols, monoImg.rows, 1.0/16.0);
293  } else {
294  colDispMap = cv::Mat_<cv::Vec3b>(monoImg.rows, monoImg.cols);
295  }
296  }
297 
298  cv::Mat_<cv::Vec3b> dispSection = colDispMap(cv::Rect(0, 0, monoImg.cols, monoImg.rows));
299 
300  colCoder->codeImage(cv::Mat_<unsigned short>(monoImg), dispSection);
301  cvImg.image = colDispMap;
302  encoding = "bgr8";
303  }
304 
305  sensor_msgs::ImagePtr msg = cvImg.toImageMsg();
306  msg->encoding = encoding;
307 
308  if(disparityPublisher->getNumSubscribers() > 0 && format12Bit) {
309  disparityPublisher->publish(msg);
310  }
311  if(rightImagePublisher->getNumSubscribers() > 0 && !format12Bit) {
312  rightImagePublisher->publish(msg);
313  }
314  }
315 
320  void qMatrixToRosCoords(const float* src, float* dst) {
321  dst[0] = src[8]; dst[1] = src[9];
322  dst[2] = src[10]; dst[3] = src[11];
323 
324  dst[4] = -src[0]; dst[5] = -src[1];
325  dst[6] = -src[2]; dst[7] = -src[3];
326 
327  dst[8] = -src[4]; dst[9] = -src[5];
328  dst[10] = -src[6]; dst[11] = -src[7];
329 
330  dst[12] = src[12]; dst[13] = src[13];
331  dst[14] = src[14]; dst[15] = src[15];
332  }
333 
338  void publishPointCloudMsg(ImagePair& imagePair, ros::Time stamp) {
339  if(imagePair.getPixelFormat(1) != ImagePair::FORMAT_12_BIT) {
340  return; // This is not a disparity map
341  }
342 
343  // Transform Q-matrix if desired
344  float qRos[16];
345  if(rosCoordinateSystem) {
346  qMatrixToRosCoords(imagePair.getQMatrix(), qRos);
347  imagePair.setQMatrix(qRos);
348  }
349 
350  // Get 3D points
351  float* pointMap = nullptr;
352  try {
353  pointMap = recon3d->createPointMap(imagePair, 0);
354  } catch(std::exception& ex) {
355  cerr << "Error creating point cloud: " << ex.what() << endl;
356  return;
357  }
358 
359  // Create message object and set header
360  pointCloudMsg->header.stamp = stamp;
361  pointCloudMsg->header.frame_id = frame;
362  pointCloudMsg->header.seq = imagePair.getSequenceNumber(); // Actually ROS will overwrite this
363 
364  // Copy 3D points
365  if(pointCloudMsg->data.size() != imagePair.getWidth()*imagePair.getHeight()*4*sizeof(float)) {
366  // Allocate buffer
367  pointCloudMsg->data.resize(imagePair.getWidth()*imagePair.getHeight()*4*sizeof(float));
368 
369  // Set basic data
370  pointCloudMsg->width = imagePair.getWidth();
371  pointCloudMsg->height = imagePair.getHeight();
372  pointCloudMsg->is_bigendian = false;
373  pointCloudMsg->point_step = 4*sizeof(float);
374  pointCloudMsg->row_step = imagePair.getWidth() * pointCloudMsg->point_step;
375  pointCloudMsg->is_dense = false;
376  }
377 
378  if(maxDepth < 0) {
379  // Just copy everything
380  memcpy(&pointCloudMsg->data[0], pointMap,
381  imagePair.getWidth()*imagePair.getHeight()*4*sizeof(float));
382  } else {
383  // Only copy points up to maximum depth
384  if(rosCoordinateSystem) {
385  copyPointCloudClamped<0>(pointMap, reinterpret_cast<float*>(&pointCloudMsg->data[0]),
386  imagePair.getWidth()*imagePair.getHeight());
387  } else {
388  copyPointCloudClamped<2>(pointMap, reinterpret_cast<float*>(&pointCloudMsg->data[0]),
389  imagePair.getWidth()*imagePair.getHeight());
390  }
391  }
392 
393  // Copy intensity values
394  if(intensityChannel) {
395  // Get pointers to the beginnig and end of the point cloud
396  unsigned char* cloudStart = &pointCloudMsg->data[0];
397  unsigned char* cloudEnd = &pointCloudMsg->data[0]
398  + imagePair.getWidth()*imagePair.getHeight()*4*sizeof(float);
399 
400  // Get pointer to the current pixel and end of current row
401  unsigned char* imagePtr = imagePair.getPixelData(0);
402  unsigned char* rowEndPtr = imagePtr + imagePair.getWidth();
403  int rowIncrement = imagePair.getRowStride(0) - imagePair.getWidth();
404 
405  for(unsigned char* cloudPtr = cloudStart + 3*sizeof(float);
406  cloudPtr < cloudEnd; cloudPtr+= 4*sizeof(float)) {
407  *cloudPtr = *imagePtr;
408 
409  imagePtr++;
410  if(imagePtr == rowEndPtr) {
411  // Progress to next row
412  imagePtr += rowIncrement;
413  rowEndPtr = imagePtr + imagePair.getWidth();
414  }
415  }
416  }
417 
418  cloudPublisher->publish(pointCloudMsg);
419  }
420 
425  template <int coord> void copyPointCloudClamped(float* src, float* dst, int size) {
426  // Only copy points that are below the minimum depth
427  float* endPtr = src + 4*size;
428  for(float *srcPtr = src, *dstPtr = dst; srcPtr < endPtr; srcPtr+=4, dstPtr+=4) {
429  if(srcPtr[coord] > maxDepth) {
430  dstPtr[0] = std::numeric_limits<float>::quiet_NaN();
431  dstPtr[1] = std::numeric_limits<float>::quiet_NaN();
432  dstPtr[2] = std::numeric_limits<float>::quiet_NaN();
433  } else {
434  dstPtr[0] = srcPtr[0];
435  dstPtr[1] = srcPtr[1];
436  dstPtr[2] = srcPtr[2];
437  }
438  }
439  }
440 
445  void initPointCloud() {
446  ros::NodeHandle privateNh("~");
447 
448  // Initialize 3D reconstruction class
449  recon3d.reset(new Reconstruct3D);
450 
451  // Initialize message
452  pointCloudMsg.reset(new sensor_msgs::PointCloud2);
453 
454  // Set channel information.
455  sensor_msgs::PointField fieldX;
456  fieldX.name ="x";
457  fieldX.offset = 0;
458  fieldX.datatype = sensor_msgs::PointField::FLOAT32;
459  fieldX.count = 1;
460  pointCloudMsg->fields.push_back(fieldX);
461 
462  sensor_msgs::PointField fieldY;
463  fieldY.name ="y";
464  fieldY.offset = sizeof(float);
465  fieldY.datatype = sensor_msgs::PointField::FLOAT32;
466  fieldY.count = 1;
467  pointCloudMsg->fields.push_back(fieldY);
468 
469  sensor_msgs::PointField fieldZ;
470  fieldZ.name ="z";
471  fieldZ.offset = 2*sizeof(float);
472  fieldZ.datatype = sensor_msgs::PointField::FLOAT32;
473  fieldZ.count = 1;
474  pointCloudMsg->fields.push_back(fieldZ);
475 
476  if(intensityChannel) {
477  sensor_msgs::PointField fieldI;
478  fieldI.name ="intensity";
479  fieldI.offset = 3*sizeof(float);
480  fieldI.datatype = sensor_msgs::PointField::UINT8;
481  fieldI.count = 1;
482  pointCloudMsg->fields.push_back(fieldI);
483  }
484  }
485 
489  void publishCameraInfo(ros::Time stamp, const ImagePair& imagePair) {
490  if(camInfoMsg == NULL) {
491  // Initialize the camera info structure
492  camInfoMsg.reset(new nerian_sp1::StereoCameraInfo);
493 
494  camInfoMsg->header.frame_id = frame;
495  camInfoMsg->header.seq = imagePair.getSequenceNumber(); // Actually ROS will overwrite this
496 
497  if(calibFile != "") {
498  std::vector<int> sizeVec;
499  calibStorage["size"] >> sizeVec;
500  if(sizeVec.size() != 2) {
501  std::runtime_error("Calibration file format error!");
502  }
503 
504  camInfoMsg->left_info.header = camInfoMsg->header;
505  camInfoMsg->left_info.width = sizeVec[0];
506  camInfoMsg->left_info.height = sizeVec[1];
507  camInfoMsg->left_info.distortion_model = "plumb_bob";
508  calibStorage["D1"] >> camInfoMsg->left_info.D;
509  readCalibrationArray("M1", camInfoMsg->left_info.K);
510  readCalibrationArray("R1", camInfoMsg->left_info.R);
511  readCalibrationArray("P1", camInfoMsg->left_info.P);
512  camInfoMsg->left_info.binning_x = 1;
513  camInfoMsg->left_info.binning_y = 1;
514  camInfoMsg->left_info.roi.do_rectify = false;
515  camInfoMsg->left_info.roi.height = 0;
516  camInfoMsg->left_info.roi.width = 0;
517  camInfoMsg->left_info.roi.x_offset = 0;
518  camInfoMsg->left_info.roi.y_offset = 0;
519 
520  camInfoMsg->right_info.header = camInfoMsg->header;
521  camInfoMsg->right_info.width = sizeVec[0];
522  camInfoMsg->right_info.height = sizeVec[1];
523  camInfoMsg->right_info.distortion_model = "plumb_bob";
524  calibStorage["D2"] >> camInfoMsg->right_info.D;
525  readCalibrationArray("M2", camInfoMsg->right_info.K);
526  readCalibrationArray("R2", camInfoMsg->right_info.R);
527  readCalibrationArray("P2", camInfoMsg->right_info.P);
528  camInfoMsg->right_info.binning_x = 1;
529  camInfoMsg->right_info.binning_y = 1;
530  camInfoMsg->right_info.roi.do_rectify = false;
531  camInfoMsg->right_info.roi.height = 0;
532  camInfoMsg->right_info.roi.width = 0;
533  camInfoMsg->right_info.roi.x_offset = 0;
534  camInfoMsg->right_info.roi.y_offset = 0;
535 
536  readCalibrationArray("Q", camInfoMsg->Q);
537  readCalibrationArray("T", camInfoMsg->T_left_right);
538  readCalibrationArray("R", camInfoMsg->R_left_right);
539  }
540  }
541 
542  double dt = (stamp - lastCamInfoPublish).toSec();
543  if(dt > 1.0) {
544  // Rather use the Q-matrix that we received over the network if it is valid
545  const float* qMatrix = imagePair.getQMatrix();
546  if(qMatrix[0] != 0.0) {
547  for(int i=0; i<16; i++) {
548  camInfoMsg->Q[i] = static_cast<double>(qMatrix[i]);
549  }
550  }
551 
552  // Publish once per second
553  camInfoMsg->header.stamp = stamp;
554  camInfoMsg->left_info.header.stamp = stamp;
555  camInfoMsg->right_info.header.stamp = stamp;
556  cameraInfoPublisher->publish(camInfoMsg);
557 
558  lastCamInfoPublish = stamp;
559  }
560  }
561 
565  template<class T>
566  void readCalibrationArray(const char* key, T& dest) {
567  std::vector<double> doubleVec;
568  calibStorage[key] >> doubleVec;
569 
570  if(doubleVec.size() != dest.size()) {
571  std::runtime_error("Calibration file format error!");
572  }
573 
574  std::copy(doubleVec.begin(), doubleVec.end(), dest.begin());
575  }
576 };
577 
578 int main(int argc, char** argv) {
579  try {
580  ros::init(argc, argv, "nerian_sp1");
581  Sp1Node node;
582  node.init();
583  return node.run();
584  } catch(const std::exception& ex) {
585  ROS_FATAL("Exception occured: %s", ex.what());
586  return 1;
587  }
588 }
ros::Time lastCamInfoPublish
std::string localPort
#define ROS_FATAL(...)
std::string calibFile
cv::Mat_< cv::Vec3b > colDispMap
void publishImageMsg(const ImagePair &imagePair, ros::Time stamp)
Publishes a rectified left camera image.
std::string remoteHost
bool colorCodeLegend
bool sleep() const
ROSCPP_DECL void init(int &argc, char **argv, const std::string &name, uint32_t options=0)
A simple node that receives data from the SP1 stereo vision system and forwards it to ROS...
#define ROS_WARN(...)
void qMatrixToRosCoords(const float *src, float *dst)
Transform Q matrix to match the ROS coordinate system: Swap y/z axis, then swap x/y axis...
void copyPointCloudClamped(float *src, float *dst, int size)
Copies all points in a point cloud that have a depth smaller than maxDepth. Other points are set to N...
std::string localHost
boost::scoped_ptr< ros::Publisher > rightImagePublisher
ros::NodeHandle nh
cv::FileStorage calibStorage
void loadCameraCalibration()
Loads a camera calibration file if configured.
bool colorCodeDispMap
double execDelay
void initPointCloud()
Performs all neccessary initializations for point cloud+ publishing.
bool rosCoordinateSystem
int main(int argc, char **argv)
boost::scoped_ptr< ros::Publisher > cloudPublisher
int run()
The main loop of this node.
#define ROS_INFO(...)
nerian_sp1::StereoCameraInfoPtr camInfoMsg
boost::scoped_ptr< ColorCoder > colCoder
void readCalibrationArray(const char *key, T &dest)
Reads a vector from the calibration file to a boost:array.
ROSCPP_DECL bool ok()
void publishPointCloudMsg(ImagePair &imagePair, ros::Time stamp)
Reconstructs the 3D locations form the disparity map and publishes them as point cloud.
bool intensityChannel
double maxDepth
boost::scoped_ptr< Reconstruct3D > recon3d
bool getParam(const std::string &key, std::string &s) const
boost::scoped_ptr< ros::Publisher > cameraInfoPublisher
std::string frame
static Time now()
std::string remotePort
boost::scoped_ptr< ros::Publisher > leftImagePublisher
void init()
Performs general initializations.
sensor_msgs::PointCloud2Ptr pointCloudMsg
boost::scoped_ptr< ros::Publisher > disparityPublisher
sensor_msgs::ImagePtr toImageMsg() const
std_msgs::Header header
void publishCameraInfo(ros::Time stamp, const ImagePair &imagePair)
Publishes the camera info once per second.
void publishDispMapMsg(const ImagePair &imagePair, ros::Time stamp)
Publishes the disparity map as 16-bit grayscale image or color coded RGB image.


nerian_sp1
Author(s): Nerian Vision Technologies
autogenerated on Thu Jun 6 2019 19:11:34