PointCloudUtility.cc
Go to the documentation of this file.
1 
37 #ifdef WIN32
38 #ifndef WIN32_LEAN_AND_MEAN
39 #define WIN32_LEAN_AND_MEAN 1
40 #endif
41 
42 #include <windows.h>
43 #include <winsock2.h>
44 #else
45 #include <unistd.h>
46 #include <arpa/inet.h> // htons
47 #endif
48 
49 #include <fstream>
50 #include <iomanip>
51 #include <iostream>
52 #include <memory>
53 #include <signal.h>
54 #include <stdio.h>
55 #include <stdlib.h>
56 #include <string.h>
57 #include <string>
58 
60 
63 
64 using namespace crl::multisense;
65 
66 namespace { // anonymous
67 
68 volatile bool doneG = false;
69 
70 void usage(const char *programNameP)
71 {
72  std::cerr << "USAGE: " << programNameP << " [<options>]" << std::endl;
73  std::cerr << "Where <options> are:" << std::endl;
74  std::cerr << "\t-a <current_address> : CURRENT IPV4 address (default=10.66.171.21)" << std::endl;
75  std::cerr << "\t-m <mtu> : CURRENT MTU (default=7200)" << std::endl;
76  std::cerr << "\t-d <min_disparity> : CURRENT MINIMUM DISPARITY (default=5.0)" << std::endl;
77 
78  exit(1);
79 }
80 
81 #ifdef WIN32
82 BOOL WINAPI signalHandler(DWORD dwCtrlType)
83 {
84  CRL_UNUSED (dwCtrlType);
85  doneG = true;
86  return TRUE;
87 }
88 #else
89 void signalHandler(int sig)
90 {
91  (void) sig;
92  doneG = true;
93 }
94 #endif
95 
96 struct WorldPoint
97 {
98  double x = 0.0;
99  double y = 0.0;
100  double z = 0.0;
101  uint8_t luma = 0u;
102 };
103 
104 //
105 // Wrapper around a Channel pointer to make cleanup easier
106 
107 class ChannelWrapper
108 {
109  public:
110 
111  ChannelWrapper(const std::string &ipAddress):
112  channelPtr_(Channel::Create(ipAddress))
113  {
114  }
115 
116  ~ChannelWrapper()
117  {
118  if (channelPtr_) {
119  Channel::Destroy(channelPtr_);
120  }
121  }
122 
123  Channel* ptr() noexcept
124  {
125  return channelPtr_;
126  }
127 
128  private:
129 
130  ChannelWrapper(const ChannelWrapper&) = delete;
131  ChannelWrapper operator=(const ChannelWrapper&) = delete;
132 
133  Channel* channelPtr_ = nullptr;
134 };
135 
136 //
137 // Wrapper to preserve image data outside of the image callback
138 
139 class ImageBufferWrapper
140 {
141 public:
142  ImageBufferWrapper(crl::multisense::Channel* driver,
143  const crl::multisense::image::Header &data):
144  driver_(driver),
145  callbackBuffer_(driver->reserveCallbackBuffer()),
146  data_(data)
147  {
148  }
149 
150  ~ImageBufferWrapper()
151  {
152  if (driver_) {
153  driver_->releaseCallbackBuffer(callbackBuffer_);
154  }
155  }
156 
157  const image::Header &data() const noexcept
158  {
159  return data_;
160  }
161 
162 private:
163 
164  ImageBufferWrapper(const ImageBufferWrapper&) = delete;
165  ImageBufferWrapper operator=(const ImageBufferWrapper&) = delete;
166 
167  crl::multisense::Channel * driver_ = nullptr;
168  void* callbackBuffer_;
169  const image::Header data_;
170 
171 };
172 
173 struct UserData
174 {
175  Channel *driver = nullptr;
176  std::shared_ptr<const ImageBufferWrapper> disparity = nullptr;
177  std::shared_ptr<const ImageBufferWrapper> leftRectified = nullptr;
180  double minDisparity = 0.0;
181 };
182 
183 std::vector<WorldPoint> reprojectDisparity(const std::shared_ptr<const ImageBufferWrapper> disparity,
184  const std::shared_ptr<const ImageBufferWrapper> leftRectified,
185  const image::Calibration &calibration,
186  const system::DeviceInfo &deviceInfo,
187  double minDisparity)
188 {
189  const size_t width = leftRectified->data().width;
190  const size_t height = leftRectified->data().height;
191 
192  //
193  // Scale our calibration based on the current sensor resolution. Calibrations stored on the camera
194  // are for full resolution images
195 
196  const double xScale = 1.0 / ((static_cast<double>(deviceInfo.imagerWidth) /
197  static_cast<double>(width)));
198 
199  const double yScale = 1.0 / ((static_cast<double>(deviceInfo.imagerHeight) /
200  static_cast<double>(height)));
201 
202  //
203  // Decompose rectified projection matrices sorted in the form
204  //
205  // Left: Right:
206  // Fx 0 Cx 0 Fx 0 Cx FxTx
207  // 0 Fy Cy 0 0 Fy Cy 0
208  // 0 0 0 1 0 0 0 1
209 
210  const double fx = calibration.left.P[0][0] * xScale;
211  const double fy = calibration.left.P[1][1] * yScale;
212  const double cx = calibration.left.P[0][2] * xScale;
213  const double cy = calibration.left.P[1][2] * yScale;
214  const double tx = calibration.right.P[0][3] / calibration.left.P[0][0];
215  const double cxRight = calibration.right.P[0][2] * xScale;
216 
217  const uint16_t *disparityP = reinterpret_cast<const uint16_t*>(disparity->data().imageDataP);
218  const uint8_t *leftRectifiedP = reinterpret_cast<const uint8_t*>(leftRectified->data().imageDataP);
219 
220  std::vector<WorldPoint> points;
221  points.reserve(height * width);
222 
223  for (size_t h = 0 ; h < height ; ++h) {
224  for (size_t w = 0 ; w < width ; ++w) {
225 
226  const size_t index = h * width + w;
227 
228  //
229  // MultiSense 16 bit disparity images are stored in 1/16 of a pixel. This allows us to send subpixel
230  // resolutions with integer values
231 
232  const double d = static_cast<double>(disparityP[index]) / 16.0;
233 
234  if (d < minDisparity) {
235  continue;
236  }
237 
238  //
239  // Q matrix used for reprojection. Manually preform the multiplication with the vector:
240  // [w, h, d, 1]^T
241  //
242  // FyTx 0 0 -FyCxTx
243  // 0 FxTx 0 -FxCyTx
244  // 0 0 0 FxFyTx
245  // 0 0 -Fy Fy(Cx - Cx')
246 
247  const double xB = ((fy * tx) * w) + (-fy * cx * tx);
248  const double yB = ((fx * tx) * h) + (-fx * cy * tx);
249  const double zB = (fx * fy * tx);
250  const double B = (-fy * d) + (fy * (cx - cxRight));
251 
252  points.emplace_back(WorldPoint{xB/B, yB/B, zB/B, leftRectifiedP[index]});
253  }
254  }
255 
256  return points;
257 }
258 
259 bool savePly(const std::string& fileName,
260  const std::vector<WorldPoint> &points)
261 {
262  std::ofstream ply(fileName.c_str());
263 
264  ply << "ply\n";
265  ply << "format ascii 1.0\n";
266  ply << "element vertex " << points.size() << "\n";
267  ply << "property float x\n";
268  ply << "property float y\n";
269  ply << "property float z\n";
270  ply << "property uchar red\n";
271  ply << "property uchar green\n";
272  ply << "property uchar blue\n";
273  ply << "end_header\n";
274 
275  for (const auto &point : points) {
276  const uint32_t luma = static_cast<uint32_t>(point.luma);
277  ply << point.x << " " << point.y << " " << point.z << " " << luma << " " << luma << " " << luma << "\n";
278  }
279 
280  return true;
281 }
282 
283 void imageCallback(const image::Header& header,
284  void *userDataP)
285 {
286  UserData *userData = reinterpret_cast<UserData*>(userDataP);
287 
288  if (!userData->driver) {
289  std::cerr << "Invalid MultiSense channel" << std::endl;
290  return;
291  }
292 
293  switch (header.source) {
295  {
296  userData->leftRectified = std::make_shared<ImageBufferWrapper>(userData->driver, header);
297  if (userData->disparity && userData->disparity->data().frameId == header.frameId) {
298  //
299  // Our disparity and left recitified images match, we have enough info to create a pointcloud
300  break;
301  }
302 
303  return;
304  }
306  {
307  if (header.bitsPerPixel != 16) {
308  std::cerr << "Unsupported disparity pixel depth" << std::endl;
309  return;
310  }
311 
312 
313  userData->disparity = std::make_shared<ImageBufferWrapper>(userData->driver, header);
314  if (userData->leftRectified && userData->leftRectified->data().frameId == header.frameId) {
315  //
316  // Our disparity and left recitified images match, we have enough info to create a pointcloud
317  break;
318  }
319 
320  return;
321  }
322  default:
323  {
324  std::cerr << "Unknown image source: " << header.source << std::endl;
325  return;
326  }
327  };
328 
329  //
330  // Note this implementation of writing ply files can be slow since they are written in ascii
331 
332  savePly(std::to_string(header.frameId) + ".ply", reprojectDisparity(userData->disparity,
333  userData->leftRectified,
334  userData->calibration,
335  userData->deviceInfo,
336  userData->minDisparity));
337 }
338 
339 } // anonymous
340 
341 int main(int argc,
342  char **argvPP)
343 {
344  std::string currentAddress = "10.66.171.21";
345  int32_t mtu = 7200;
346  double minDisparity = 5.0;
347 
348 #if WIN32
349  SetConsoleCtrlHandler (signalHandler, TRUE);
350 #else
351  signal(SIGINT, signalHandler);
352 #endif
353 
354  //
355  // Parse args
356 
357  int c;
358 
359  while(-1 != (c = getopt(argc, argvPP, "a:m:d:")))
360  switch(c) {
361  case 'a': currentAddress = std::string(optarg); break;
362  case 'm': mtu = atoi(optarg); break;
363  case 'd': minDisparity = std::stod(optarg); break;
364  default: usage(*argvPP); break;
365  }
366 
367  Status status;
368 
369  //
370  // Initialize communications.
371 
372  auto channelP = std::make_unique<ChannelWrapper>(currentAddress);
373  if (nullptr == channelP) {
374  std::cerr << "Failed to establish communications with \"" << currentAddress << "\"" << std::endl;
375  return EXIT_FAILURE;
376  }
377 
378  //
379  // Change MTU
380 
381  status = channelP->ptr()->setMtu(mtu);
382  if (Status_Ok != status) {
383  std::cerr << "Failed to set MTU to " << mtu << ": " << Channel::statusString(status) << std::endl;
384  return EXIT_FAILURE;
385  }
386 
387  //
388  // Query calibration
389 
390  image::Calibration calibration;
391  status = channelP->ptr()->getImageCalibration(calibration);
392  if (Status_Ok != status) {
393  std::cerr << "Failed to query calibraiton: " << Channel::statusString(status) << std::endl;
394  return EXIT_FAILURE;
395  }
396 
397  //
398  // Query device info
399 
401  status = channelP->ptr()->getDeviceInfo(deviceInfo);
402  if (Status_Ok != status) {
403  std::cerr << "Failed to query device info: " << Channel::statusString(status) << std::endl;
404  return EXIT_FAILURE;
405  }
406 
407  //
408  // Change framerate and resolution to 1/4 resolution
409 
410  image::Config cfg;
411 
412  status = channelP->ptr()->getImageConfig(cfg);
413  if (Status_Ok != status) {
414  std::cerr << "Failed to get image config: " << Channel::statusString(status) << std::endl;
415  return EXIT_FAILURE;
416  } else {
417  cfg.setFps(10.0);
418  cfg.setResolution(deviceInfo.imagerWidth / 2, deviceInfo.imagerHeight / 2);
419 
420  status = channelP->ptr()->setImageConfig(cfg);
421  if (Status_Ok != status) {
422  std::cerr << "Failed to configure sensor: " << Channel::statusString(status) << std::endl;
423  return EXIT_FAILURE;
424  }
425  }
426 
427  //
428  // Setup user data to store camera state for pointcloud reprojection
429 
430  UserData userData{channelP->ptr(), nullptr, nullptr, calibration, deviceInfo, minDisparity};
431 
432  //
433  // Add callbacks
434 
435  channelP->ptr()->addIsolatedCallback(imageCallback, Source_Luma_Rectified_Left | Source_Disparity_Left, &userData);
436 
437  //
438  // Start streaming
439 
440  status = channelP->ptr()->startStreams(Source_Luma_Rectified_Left | Source_Disparity_Left);
441  if (Status_Ok != status) {
442  std::cerr << "Failed to start streams: " << Channel::statusString(status) << std::endl;
443  return EXIT_FAILURE;
444  }
445 
446  while(!doneG) {
447  usleep(1000000);
448  }
449 
450  status = channelP->ptr()->stopStreams(Source_All);
451  if (Status_Ok != status) {
452  std::cerr << "Failed to stop streams: " << Channel::statusString(status) << std::endl;
453  return EXIT_FAILURE;
454  }
455 
456  return EXIT_SUCCESS;
457 }
d
static CRL_CONSTEXPR DataSource Source_Disparity_Left
static const char * statusString(Status status)
Definition: channel.cc:649
void setResolution(uint32_t w, uint32_t h)
int getopt(int argc, char **argv, char *opts)
Definition: getopt.c:31
#define CRL_UNUSED(var)
Definition: Portability.hh:44
static CRL_CONSTEXPR Status Status_Ok
def usage(progname)
int main(int argc, char **argvPP)
static void Destroy(Channel *instanceP)
Definition: channel.cc:636
char * optarg
Definition: getopt.c:29
static CRL_CONSTEXPR DataSource Source_All
static CRL_CONSTEXPR DataSource Source_Luma_Rectified_Left


multisense_lib
Author(s):
autogenerated on Sun Mar 14 2021 02:34:50