mynteye_ros_wrapper_nodelet.cpp
Go to the documentation of this file.
1 #include <opencv2/core/core.hpp>
2 #include <opencv2/highgui/highgui.hpp>
3 #include <opencv2/imgproc/imgproc.hpp>
4 #include <opencv2/opencv.hpp>
5 #include <sys/stat.h>
6 
7 #include <ros/ros.h>
8 #include <nodelet/nodelet.h>
9 #include <sensor_msgs/Image.h>
10 #include <sensor_msgs/CameraInfo.h>
13 #include <sensor_msgs/Imu.h>
14 
16 
17 #include <boost/thread/thread.hpp>
18 #include <boost/make_shared.hpp>
19 
20 #include "camera.h"
21 #include "utility.h"
22 #include "calibration_parameters.h"
23 #include "init_parameters.h"
24 
25 // #define VERBOSE
26 // #define VERBOSE_TO_FILE
27 
28 namespace mynt_wrapper {
29 
30 #ifdef USE_OPENCV2
31 void CompatDistCoeffs(cv::Mat &distCoeffs) {
32  int w = distCoeffs.cols;
33  if (w >= 8) {
34  w = 8;
35  } else if (w >= 5) {
36  w = 5;
37  } else if (w >= 4) {
38  w = 4;
39  } else {
40  CV_Assert(false);
41  }
42  distCoeffs = distCoeffs.row(0).colRange(0,w);
43 }
44 #endif
45 
47 
51 
52  mynteye::Camera cam;
53  mynteye::Resolution resolution;
54 
60 
64 
65  std::string right_frame_id;
66  std::string left_frame_id;
67  std::string depth_frame_id;
68  std::string imu_frame_id;
69 
71 
72  bool enable_cpu;
74 
75 #ifdef VERBOSE_TO_FILE
76  std::ofstream file_imus;
77 #endif
78 
79 sensor_msgs::ImagePtr imageToROSmsg(const cv::Mat &img,
80  const std::string &encodingType,
81  const std::string &frameId,
82  const ros::Time &stamp) {
83  sensor_msgs::ImagePtr ptr = boost::make_shared<sensor_msgs::Image>();
84  sensor_msgs::Image& imgMessage = *ptr;
85  imgMessage.header.stamp = stamp;
86  imgMessage.header.frame_id = frameId;
87  imgMessage.height = img.rows;
88  imgMessage.width = img.cols;
89  imgMessage.encoding = encodingType;
90  int num = 1;
91  imgMessage.is_bigendian = !(*(char *) &num == 1);
92  imgMessage.step = img.cols * img.elemSize();
93  size_t size = imgMessage.step * img.rows;
94  imgMessage.data.resize(size);
95 
96  if (img.isContinuous())
97  memcpy((char*) (&imgMessage.data[0]), img.data, size);
98  else {
99  uchar* opencvData = img.data;
100  uchar* rosData = (uchar*) (&imgMessage.data[0]);
101  for (unsigned int i = 0; i < img.rows; i++) {
102  memcpy(rosData, opencvData, imgMessage.step);
103  rosData += imgMessage.step;
104  opencvData += img.step;
105  }
106  }
107 
108  return ptr;
109 }
110 
111 void publishImage(const cv::Mat &img,
112  const image_transport::Publisher &pub_img,
113  const std::string &img_frame_id,
114  const ros::Time &stamp) {
115  pub_img.publish(imageToROSmsg(img, sensor_msgs::image_encodings::MONO8, img_frame_id, stamp));
116 }
117 
118 void publishDepth(cv::Mat &depth, const ros::Time &stamp) {
119  depth.convertTo(depth, CV_16UC1);
120  pub_depth.publish(imageToROSmsg(depth, sensor_msgs::image_encodings::TYPE_16UC1, depth_frame_id, stamp));
121 }
122 
123 void publishIMU(const mynteye::IMUData &imudata, const ros::Time &stamp) {
124  sensor_msgs::Imu msg;
125 
126  msg.header.stamp = stamp;
127  msg.header.frame_id = imu_frame_id;
128 
129  msg.linear_acceleration.x = imudata.accel_x * 9.8;
130  msg.linear_acceleration.y = imudata.accel_y * 9.8;
131  msg.linear_acceleration.z = imudata.accel_z * 9.8;
132 
133  msg.linear_acceleration_covariance[0] = 0.04;
134  msg.linear_acceleration_covariance[1] = 0;
135  msg.linear_acceleration_covariance[2] = 0;
136 
137  msg.linear_acceleration_covariance[3] = 0;
138  msg.linear_acceleration_covariance[4] = 0.04;
139  msg.linear_acceleration_covariance[5] = 0;
140 
141  msg.linear_acceleration_covariance[6] = 0;
142  msg.linear_acceleration_covariance[7] = 0;
143  msg.linear_acceleration_covariance[8] = 0.04;
144 
145  msg.angular_velocity.x = imudata.gyro_x / 57.2956;
146  msg.angular_velocity.y = imudata.gyro_y / 57.2956;
147  msg.angular_velocity.z = imudata.gyro_z / 57.2956;
148 
149  msg.angular_velocity_covariance[0] = 0.02;
150  msg.angular_velocity_covariance[1] = 0;
151  msg.angular_velocity_covariance[2] = 0;
152 
153  msg.angular_velocity_covariance[3] = 0;
154  msg.angular_velocity_covariance[4] = 0.02;
155  msg.angular_velocity_covariance[5] = 0;
156 
157  msg.angular_velocity_covariance[6] = 0;
158  msg.angular_velocity_covariance[7] = 0;
159  msg.angular_velocity_covariance[8] = 0.02;
160 
161  pub_imu.publish(msg);
162 }
163 
164 void publishCamInfo(const sensor_msgs::CameraInfoPtr &cam_info_msg,
165  const ros::Publisher &pub_cam_info,
166  const ros::Time &stamp) {
167  static int seq = 0;
168  cam_info_msg->header.stamp = stamp;
169  cam_info_msg->header.seq = seq;
170  pub_cam_info.publish(cam_info_msg);
171  ++seq;
172 }
173 
174 void fillCamInfo(const mynteye::Resolution &resolution,
175  mynteye::CalibrationParameters* calibration_parameters,
176  const sensor_msgs::CameraInfoPtr &left_cam_info_msg,
177  const sensor_msgs::CameraInfoPtr &right_cam_info_msg,
178  const std::string &left_frame_id,
179  const std::string &right_frame_id) {
180  int width = resolution.width;
181  int height = resolution.height;
182 
183  left_cam_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
184  right_cam_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
185 
186  left_cam_info_msg->D.resize(5);
187  right_cam_info_msg->D.resize(5);
188 
189  left_cam_info_msg->D[0] = calibration_parameters->D1.at<double>(0, 0);
190  right_cam_info_msg->D[0] = calibration_parameters->D2.at<double>(0, 0);
191 
192  left_cam_info_msg->D[1] = calibration_parameters->D1.at<double>(0, 1);
193  right_cam_info_msg->D[1] = calibration_parameters->D2.at<double>(0, 1);
194 
195  left_cam_info_msg->D[2] = calibration_parameters->D1.at<double>(0, 7);
196  right_cam_info_msg->D[2] = calibration_parameters->D2.at<double>(0, 7);
197 
198  left_cam_info_msg->D[3] = 0.0;
199  right_cam_info_msg->D[3] = 0.0;
200 
201  left_cam_info_msg->D[4] = 0.0;
202  right_cam_info_msg->D[4] = 0.0;
203 
204  for(int i = 0; i < 3; i++) {
205  for(int j = 0; j < 3; j++) {
206  left_cam_info_msg->K[i * 3 + j] = calibration_parameters->M1.at<double>(i, j);
207  right_cam_info_msg->K[i * 3 + j] = calibration_parameters->M2.at<double>(i, j);
208 
209  left_cam_info_msg->R[i * 3 + j] = calibration_parameters->R.at<double>(i, j);
210  right_cam_info_msg->R[i * 3 + j] = calibration_parameters->R.at<double>(i, j);
211  }
212  }
213 
214  cv::Size img_size(width, height);
215  cv::Mat R1, R2, P1, P2, Q;
216  cv::Rect leftROI, rightROI;
217 #ifdef USE_OPENCV2
218  CompatDistCoeffs(calibration_parameters->D1);
219  CompatDistCoeffs(calibration_parameters->D2);
220 #endif
221  cv::stereoRectify(calibration_parameters->M1, calibration_parameters->D1,
222  calibration_parameters->M2, calibration_parameters->D2, img_size,
223  calibration_parameters->R, calibration_parameters->T, R1, R2, P1, P2, Q,
224  cv::CALIB_ZERO_DISPARITY, 0, img_size, &leftROI, &rightROI);
225 
226  for(int i = 0; i < R1.rows; i++) {
227  for(int j = 0; j < R1.cols; j++) {
228  left_cam_info_msg->R[i * R1.cols + j] = R1.at<double>(i, j);
229  right_cam_info_msg->R[i * R1.cols + j] = R2.at<double>(i, j);
230  }
231  }
232  for(int i = 0; i < P1.rows; i++) {
233  for(int j = 0; j < P1.cols; j++) {
234  left_cam_info_msg->P[i * P1.cols + j] = P1.at<double>(i, j);
235  right_cam_info_msg->P[i * P1.cols + j] = P2.at<double>(i, j);
236  }
237  }
238  left_cam_info_msg->width = right_cam_info_msg->width = width;
239  left_cam_info_msg->height = right_cam_info_msg->height = height;
240  left_cam_info_msg->header.frame_id = left_frame_id;
241  right_cam_info_msg->header.frame_id = right_frame_id;
242 }
243 
244 ros::Time getImgStamp(bool reset = false) {
245  static std::uint32_t img_time_beg = -1;
246  static double img_ros_time_beg;
247  if (reset) {
248  img_time_beg = -1;
249  return ros::Time::now();
250  }
251  if (img_time_beg == -1) {
252  img_time_beg = cam.GetTimestamp();
253  img_ros_time_beg = ros::Time::now().toSec();
254  }
255  return ros::Time(img_ros_time_beg + (cam.GetTimestamp() - img_time_beg) * 0.0001f);
256 }
257 
258 ros::Time getIMUStamp(mynteye::IMUData *imudata, bool reset = false) {
259  static std::uint32_t imu_time_beg = -1;
260  static double imu_ros_time_beg;
261  if (reset) {
262  imu_time_beg = -1;
263  return ros::Time::now();
264  }
265  if (imu_time_beg == -1) {
266  imu_time_beg = imudata->time;
267  imu_ros_time_beg = ros::Time::now().toSec();
268  }
269  return ros::Time(imu_ros_time_beg + (imudata->time - imu_time_beg) * 0.0001f);
270 }
271 
272 void device_poll() {
273  using namespace mynteye;
274 
275  if (enable_cpu) {
276  // If you wanna run with CPU instead of GPU
277  cam.SetMode(Mode::MODE_CPU);
278  } else {
279  // otherwise, auto detect
280  }
281 
282  resolution = cam.GetResolution();
283 
284  InitParameters params(std::to_string(device_name));
285  cam.Open(params);
286 
287  if (!cam.IsOpened()) {
288  std::cerr << "Error: Open camera failed" << std::endl;
289  return;
290  }
291 
292  if (enable_depth) {
293  cam.ActivateDepthMapFeature();
294  }
295 
296  CalibrationParameters *calib_params = nullptr;
297  calib_params = new CalibrationParameters(cam.GetCalibrationParameters());
298  cv::Mat img_left, img_right,depthmap,leftImRGB,rightImRGB;
299 
300  sensor_msgs::CameraInfoPtr left_cam_info_msg(new sensor_msgs::CameraInfo());
301  sensor_msgs::CameraInfoPtr right_cam_info_msg(new sensor_msgs::CameraInfo());
302  //sensor_msgs::CameraInfoPtr depth_cam_info_msg(new sensor_msgs::CameraInfo());
303  fillCamInfo(resolution, calib_params, left_cam_info_msg, right_cam_info_msg, left_frame_id, right_frame_id);
304 
305  std::vector<IMUData> imudatas;
306  std::uint32_t timestamp = 0;
307 
308 #ifdef VERBOSE
309  std::uint64_t img_count = 0;
310  std::uint64_t imu_count = 0;
311  //double loop_beg = ros::Time::now().toSec();
312 
313 #ifdef VERBOSE_TO_FILE
314  std::uint64_t imu_get_count = 0;
315  double imu_get_beg;
316 #endif
317 #endif
318 
319  while (nh_ns.ok()) {
320 
321  if (cam.Grab() != ErrorCode::SUCCESS) continue;
322 
323  int left_SubNumber = pub_left.getNumSubscribers();
324  int left_raw_SubNumber = pub_raw_left.getNumSubscribers();
325  int right_SubNumber = pub_right.getNumSubscribers();
326  int right_raw_SubNumber = pub_raw_right.getNumSubscribers();
327  int depth_SubNumber = pub_depth.getNumSubscribers();
328  int imu_SubNumber = pub_imu.getNumSubscribers();
329  bool runLoop = (left_SubNumber + left_raw_SubNumber + right_SubNumber + right_raw_SubNumber + depth_SubNumber + imu_SubNumber) > 0;
330  if (runLoop) {
331  bool img_get = false;
332  if (left_SubNumber > 0 &&
333  cam.RetrieveImage(leftImRGB, View::VIEW_LEFT) == ErrorCode::SUCCESS) {
334  publishCamInfo(left_cam_info_msg, pub_left_cam_info, getImgStamp());
335  publishImage(leftImRGB, pub_left, left_frame_id, getImgStamp());
336  img_get = true;
337  }
338  if (left_raw_SubNumber > 0 &&
339  cam.RetrieveImage(img_left, View::VIEW_LEFT_UNRECTIFIED) == ErrorCode::SUCCESS) {
340  publishCamInfo(left_cam_info_msg, pub_left_cam_info, getImgStamp());
341  publishImage(img_left, pub_raw_left, left_frame_id, getImgStamp());
342  img_get = true;
343  }
344  if (right_SubNumber > 0 &&
345  cam.RetrieveImage(rightImRGB, View::VIEW_RIGHT) == ErrorCode::SUCCESS) {
346  publishCamInfo(right_cam_info_msg, pub_right_cam_info, getImgStamp());
347  publishImage(rightImRGB, pub_right, right_frame_id, getImgStamp());
348  img_get = true;
349  }
350  if (right_raw_SubNumber > 0 &&
351  cam.RetrieveImage(img_right, View::VIEW_RIGHT_UNRECTIFIED) == ErrorCode::SUCCESS) {
352  publishCamInfo(right_cam_info_msg, pub_right_cam_info, getImgStamp());
353  publishImage(img_right, pub_raw_right, right_frame_id, getImgStamp());
354  img_get = true;
355  }
356  if (enable_depth && depth_SubNumber > 0) {
357  if (cam.RetrieveImage(depthmap, View::VIEW_DEPTH_MAP) == ErrorCode::SUCCESS) {
358  publishDepth(depthmap, getImgStamp());
359  }
360  }
361  if (!img_get) {
362  getImgStamp(true); // reset
363  }
364 
365 #ifdef VERBOSE
366  if (img_get) {
367  ++img_count;
368  std::cout << "Img count: " << img_count << std::endl;
369  std::cout << " time: " << cam.GetTimestamp()
370  << ", stamp: " << getImgStamp() << std::endl
371  << std::endl;
372  }
373 #endif
374 
375  if (imu_SubNumber > 0) {
376  if (cam.RetrieveIMUData(imudatas, timestamp) == ErrorCode::SUCCESS && !imudatas.empty()) {
377  size_t size = imudatas.size();
378 
379 #ifdef VERBOSE
380  imu_count += size;
381  std::cout << "IMU count: " << imu_count << std::endl;
382 #ifdef VERBOSE_TO_FILE
383  if (imu_get_count == 0) {
384  imu_get_beg = ros::Time::now().toSec();
385  // ~/.ros/imus_publish.txt
386  file_imus = std::ofstream("imus_publish.txt", std::ios::out);
387  }
388  file_imus << (++imu_get_count) << ", " << size << " IMUs, " << imu_count << " in total." << std::endl;
389 #endif
390 #endif
391 
392  for (size_t i = 0, n = size-1; i <= n; ++i) {
393  auto &imudata = imudatas[i];
394  ros::Time imu_ros_time = getIMUStamp(&imudata);
395 
396 #ifdef VERBOSE
397  std::cout << " IMU[" << i << "]"
398  << " accel(" << imudata.accel_x << "," << imudata.accel_y << "," << imudata.accel_z << ")"
399  << ", gyro(" << imudata.gyro_x << "," << imudata.gyro_y << "," << imudata.gyro_z << ")"
400  << std::endl;
401  std::cout << " time: " << imudata.time
402  << ", stamp: " << imu_ros_time
403  << ", time_offset: " << imudata.time_offset << std::endl;
404 #ifdef VERBOSE_TO_FILE
405  file_imus << "IMU[" << i << "] stamp: " << std::fixed << imu_ros_time
406  << ", accel(" << imudata.accel_x << "," << imudata.accel_y << "," << imudata.accel_z << ")"
407  << ", gyro(" << imudata.gyro_x << "," << imudata.gyro_y << "," << imudata.gyro_z << ")"
408  << std::endl;
409 #endif
410 #endif
411 
412  if (imu_SubNumber > 0) publishIMU(imudata, imu_ros_time);
413  // Sleep 1ms, otherwise publish may drop some IMUs.
414  ros::Duration(0.001).sleep();
415  }
416 #ifdef VERBOSE
417  std::cout << std::endl;
418 #ifdef VERBOSE_TO_FILE
419  file_imus << std::endl;
420 #endif
421 #endif
422  } else {
423  getIMUStamp(nullptr, true); // reset
424  }
425  } else {
426  getIMUStamp(nullptr, true); // reset
427  }
428 
429  // Sleep to control camera hz.
430  //loop_rate.sleep();
431  }
432 
433 #ifdef VERBOSE
434 #ifdef VERBOSE_TO_FILE
435  if (imu_SubNumber <= 0 && imu_get_count > 0) {
436  double imu_get_end = ros::Time::now().toSec();
437  double elapsed = imu_get_end - imu_get_beg;
438  file_imus << "time beg: " << std::fixed << imu_get_beg << " s" << std::endl
439  << "time end: " << std::fixed << imu_get_end << " s" << std::endl
440  << "time cost: " << elapsed << " s" << std::endl << std::endl
441  << "imu: " << (imu_count / imu_get_count) << " per frame, "
442  << imu_count << " in total, " << (imu_count / elapsed) << " Hz" << std::endl;
443 
444  imu_get_count = 0;
445  imu_count = 0;
446  }
447 #endif
448 #endif
449  }
450 }
451 
452 void onInit() {
453  std::string img_topic = "image_rect_color";
454  std::string img_raw_topic = "image_raw_color";
455 
456  std::string left_topic = "left/" + img_topic;
457  std::string left_raw_topic = "left/" + img_raw_topic;
458  std::string left_cam_info_topic = "left/camera_info";
459  left_frame_id = "/mynt_left_frame";
460 
461  std::string right_topic = "right/" + img_topic;
462  std::string right_raw_topic = "right/" + img_raw_topic;
463  std::string right_cam_info_topic = "right/camera_info";
464  right_frame_id = "/mynt_right_frame";
465 
466  std::string depth_topic = "depth/depth_registered";
467  depth_frame_id = "/mynt_depth_frame";
468 
469  std::string imu_topic = "imu";
470  imu_frame_id = "/mynt_imu_frame";
471 
472  device_name = 1;
473  enable_cpu = false;
474  enable_depth = false;
475 
476  nh = getMTNodeHandle();
477  nh_ns = getMTPrivateNodeHandle();
478  nh_ns.getParam("device_name", device_name);
479  nh_ns.getParam("enable_cpu", enable_cpu);
480  nh_ns.getParam("enable_depth", enable_depth);
481  nh_ns.getParam("left_topic", left_topic);
482  nh_ns.getParam("left_raw_topic", left_raw_topic);
483  nh_ns.getParam("left_cam_info_topic", left_cam_info_topic);
484  nh_ns.getParam("right_topic", right_topic);
485  nh_ns.getParam("right_raw_topic", right_raw_topic);
486  nh_ns.getParam("right_cam_info_topic", right_cam_info_topic);
487  nh_ns.getParam("depth_topic", depth_topic);
488  nh_ns.getParam("imu_topic", imu_topic);
489 
490  image_transport::ImageTransport it_mynteye(nh);
491 
492  pub_left = it_mynteye.advertise(left_topic, 1);
493  NODELET_INFO_STREAM("Advertized on topic " << left_topic);
494  pub_raw_left = it_mynteye.advertise(left_raw_topic, 1);
495  NODELET_INFO_STREAM("Advertized on topic " << left_raw_topic);
496  pub_left_cam_info = nh.advertise<sensor_msgs::CameraInfo>(left_cam_info_topic, 1);
497  NODELET_INFO_STREAM("Advertized on topic " << left_cam_info_topic);
498 
499  pub_right = it_mynteye.advertise(right_topic, 1);
500  NODELET_INFO_STREAM("Advertized on topic " << right_topic);
501  pub_raw_right = it_mynteye.advertise(right_raw_topic, 1);
502  NODELET_INFO_STREAM("Advertized on topic " << right_raw_topic);
503  pub_right_cam_info = nh.advertise<sensor_msgs::CameraInfo>(right_cam_info_topic, 1);
504  NODELET_INFO_STREAM("Advertized on topic " << right_cam_info_topic);
505 
506  pub_depth = it_mynteye.advertise(depth_topic, 1);
507  NODELET_INFO_STREAM("Advertized on topic " << depth_topic);
508 
509  pub_imu = nh.advertise<sensor_msgs::Imu>(imu_topic, 1);
510  NODELET_INFO_STREAM("Advertized on topic " << imu_topic);
511 
512 
513  device_poll_thread = boost::shared_ptr<boost::thread>
514  (new boost::thread(boost::bind(&MYNTWrapperNodelet::device_poll, this)));
515 }
516 
518  cam.Close();
519 }
520 
521 };
522 
523 } // namespace mynt_wrapper
524 
#define NODELET_INFO_STREAM(...)
ros::Time getIMUStamp(mynteye::IMUData *imudata, bool reset=false)
void publishCamInfo(const sensor_msgs::CameraInfoPtr &cam_info_msg, const ros::Publisher &pub_cam_info, const ros::Time &stamp)
void fillCamInfo(const mynteye::Resolution &resolution, mynteye::CalibrationParameters *calibration_parameters, const sensor_msgs::CameraInfoPtr &left_cam_info_msg, const sensor_msgs::CameraInfoPtr &right_cam_info_msg, const std::string &left_frame_id, const std::string &right_frame_id)
void publish(const boost::shared_ptr< M > &message) const
f
void publishImage(const cv::Mat &img, const image_transport::Publisher &pub_img, const std::string &img_frame_id, const ros::Time &stamp)
bool sleep() const
Publisher advertise(const std::string &base_topic, uint32_t queue_size, bool latch=false)
ros::NodeHandle & getMTNodeHandle() const
uint32_t getNumSubscribers() const
void publishDepth(cv::Mat &depth, const ros::Time &stamp)
ros::NodeHandle & getMTPrivateNodeHandle() const
sensor_msgs::ImagePtr imageToROSmsg(const cv::Mat &img, const std::string &encodingType, const std::string &frameId, const ros::Time &stamp)
void publish(const sensor_msgs::Image &message) const
PLUGINLIB_EXPORT_CLASS(mynt_wrapper::MYNTWrapperNodelet, nodelet::Nodelet)
const std::string TYPE_16UC1
void publishIMU(const mynteye::IMUData &imudata, const ros::Time &stamp)
boost::shared_ptr< boost::thread > device_poll_thread
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
TFSIMD_FORCE_INLINE const tfScalar & w() const
uint32_t getNumSubscribers() const
bool getParam(const std::string &key, std::string &s) const
static Time now()
bool ok() const


mynteye-roswrapper
Author(s):
autogenerated on Wed Sep 12 2018 02:49:52