Legacy/PointCloudUtility/PointCloudUtility.cc
Go to the documentation of this file.
1 
37 #ifdef WIN32
38 #ifndef WIN32_LEAN_AND_MEAN
39 #define WIN32_LEAN_AND_MEAN 1
40 #endif
41 
42 #include <windows.h>
43 #include <winsock2.h>
44 #else
45 #include <unistd.h>
46 #include <arpa/inet.h> // htons
47 #endif
48 
49 #include <fstream>
50 #include <iomanip>
51 #include <iostream>
52 #include <memory>
53 #include <sstream>
54 #include <signal.h>
55 #include <stdio.h>
56 #include <stdlib.h>
57 #include <string.h>
58 #include <string>
59 
60 #include <getopt/getopt.h>
61 #include <ChannelUtilities.hh>
62 
64 #include <MultiSense/MultiSenseChannel.hh>
65 
66 using namespace crl::multisense;
67 
68 namespace { // anonymous
69 
70 volatile bool doneG = false;
71 
72 void usage(const char *programNameP)
73 {
74  std::cerr << "USAGE: " << programNameP << " [<options>]" << std::endl;
75  std::cerr << "Where <options> are:" << std::endl;
76  std::cerr << "\t-a <current_address> : CURRENT IPV4 address (default=10.66.171.21)" << std::endl;
77  std::cerr << "\t-m <mtu> : CURRENT MTU (default=1500)" << std::endl;
78  std::cerr << "\t-d <min_disparity> : CURRENT MINIMUM DISPARITY (default=5.0)" << std::endl;
79 
80  exit(1);
81 }
82 
83 #ifdef WIN32
84 BOOL WINAPI signalHandler(DWORD dwCtrlType)
85 {
86  CRL_UNUSED (dwCtrlType);
87  doneG = true;
88  return TRUE;
89 }
90 #else
91 void signalHandler(int sig)
92 {
93  (void) sig;
94  doneG = true;
95 }
96 #endif
97 
98 struct WorldPoint
99 {
100  float x = 0.0;
101  float y = 0.0;
102  float z = 0.0;
103  uint8_t luma = 0u;
104 };
105 
106 struct UserData
107 {
108  Channel *driver = nullptr;
109  std::shared_ptr<const ImageBufferWrapper> disparity = nullptr;
110  std::shared_ptr<const ImageBufferWrapper> leftRectified = nullptr;
113  double minDisparity = 0.0;
114 };
115 
116 std::vector<WorldPoint> reprojectDisparity(const std::shared_ptr<const ImageBufferWrapper> &disparity,
117  const std::shared_ptr<const ImageBufferWrapper> &leftRectified,
118  const image::Calibration &calibration,
119  const system::DeviceInfo &deviceInfo,
120  double minDisparity)
121 {
122  const size_t width = leftRectified->data().width;
123  const size_t height = leftRectified->data().height;
124 
125  //
126  // Scale our calibration based on the current sensor resolution. Calibrations stored on the camera
127  // are for full resolution images
128 
129  const double xScale = 1.0 / ((static_cast<double>(deviceInfo.imagerWidth) /
130  static_cast<double>(width)));
131 
132  const double yScale = 1.0 / ((static_cast<double>(deviceInfo.imagerHeight) /
133  static_cast<double>(height)));
134 
135  //
136  // Decompose rectified projection matrices sorted in the form
137  //
138  // Left: Right:
139  // Fx 0 Cx 0 Fx 0 Cx FxTx
140  // 0 Fy Cy 0 0 Fy Cy 0
141  // 0 0 0 1 0 0 0 1
142 
143  const double fx = calibration.left.P[0][0] * xScale;
144  const double fy = calibration.left.P[1][1] * yScale;
145  const double cx = calibration.left.P[0][2] * xScale;
146  const double cy = calibration.left.P[1][2] * yScale;
147  const double tx = calibration.right.P[0][3] / calibration.right.P[0][0];
148  const double cxRight = calibration.right.P[0][2] * xScale;
149 
150  const uint16_t *disparityP = reinterpret_cast<const uint16_t*>(disparity->data().imageDataP);
151  const uint8_t *leftRectifiedP = reinterpret_cast<const uint8_t*>(leftRectified->data().imageDataP);
152 
153  std::vector<WorldPoint> points;
154  points.reserve(height * width);
155 
156  for (size_t h = 0 ; h < height ; ++h) {
157  for (size_t w = 0 ; w < width ; ++w) {
158 
159  const size_t index = h * width + w;
160 
161  //
162  // MultiSense 16 bit disparity images are stored in 1/16 of a pixel. This allows us to send subpixel
163  // resolutions with integer values
164 
165  const double d = static_cast<double>(disparityP[index]) / 16.0;
166 
167  if (d < minDisparity) {
168  continue;
169  }
170 
171  //
172  // Q matrix used for reprojection. Manually preform the multiplication with the vector:
173  // [w, h, d, 1]^T
174  //
175  // FyTx 0 0 -FyCxTx
176  // 0 FxTx 0 -FxCyTx
177  // 0 0 0 FxFyTx
178  // 0 0 -Fy Fy(Cx - Cx')
179 
180  const double xB = ((fy * tx) * w) + (-fy * cx * tx);
181  const double yB = ((fx * tx) * h) + (-fx * cy * tx);
182  const double zB = (fx * fy * tx);
183  const double invB = 1. / (-fy * d) + (fy * (cx - cxRight));
184 
185  points.emplace_back(WorldPoint{static_cast<float>(xB * invB),
186  static_cast<float>(yB * invB),
187  static_cast<float>(zB * invB),
188  leftRectifiedP[index]});
189  }
190  }
191 
192  return points;
193 }
194 
195 bool savePly(const std::string& fileName,
196  const std::vector<WorldPoint> &points)
197 {
198  std::stringstream ss;
199 
200  ss << "ply\n";
201  ss << "format ascii 1.0\n";
202  ss << "element vertex " << points.size() << "\n";
203  ss << "property float x\n";
204  ss << "property float y\n";
205  ss << "property float z\n";
206  ss << "property uchar red\n";
207  ss << "property uchar green\n";
208  ss << "property uchar blue\n";
209  ss << "end_header\n";
210 
211  for (const auto &point : points) {
212  const uint32_t luma = static_cast<uint32_t>(point.luma);
213  ss << point.x << " " << point.y << " " << point.z << " " << luma << " " << luma << " " << luma << "\n";
214  }
215 
216  std::ofstream ply(fileName.c_str());
217  ply << ss.str();
218 
219  return true;
220 }
221 
222 void imageCallback(const image::Header& header,
223  void *userDataP)
224 {
225  UserData *userData = reinterpret_cast<UserData*>(userDataP);
226 
227  if (!userData->driver) {
228  std::cerr << "Invalid MultiSense channel" << std::endl;
229  return;
230  }
231 
232  switch (header.source) {
234  {
235  userData->leftRectified = std::make_shared<ImageBufferWrapper>(userData->driver, header);
236  if (userData->disparity && userData->disparity->data().frameId == header.frameId) {
237  //
238  // Our disparity and left rectified images match, we have enough info to create a pointcloud
239  break;
240  }
241 
242  return;
243  }
245  {
246  if (header.bitsPerPixel != 16) {
247  std::cerr << "Unsupported disparity pixel depth" << std::endl;
248  return;
249  }
250 
251 
252  userData->disparity = std::make_shared<ImageBufferWrapper>(userData->driver, header);
253  if (userData->leftRectified && userData->leftRectified->data().frameId == header.frameId) {
254  //
255  // Our disparity and left rectified images match, we have enough info to create a pointcloud
256  break;
257  }
258 
259  return;
260  }
261  default:
262  {
263  std::cerr << "Unknown image source: " << header.source << std::endl;
264  return;
265  }
266  };
267 
268  //
269  // Note this implementation of writing ply files can be slow since they are written in ascii
270 
271  std::cout << "Saving pointcloud for image header " << header.frameId << std::endl;
272  savePly(std::to_string(header.frameId) + ".ply", reprojectDisparity(userData->disparity,
273  userData->leftRectified,
274  userData->calibration,
275  userData->deviceInfo,
276  userData->minDisparity));
277 }
278 
279 } // anonymous
280 
281 int main(int argc,
282  char **argvPP)
283 {
284  std::string currentAddress = "10.66.171.21";
285  int32_t mtu = 0;
286  double minDisparity = 5.0;
287 
288 #if WIN32
289  SetConsoleCtrlHandler (signalHandler, TRUE);
290 #else
291  signal(SIGINT, signalHandler);
292 #endif
293 
294  //
295  // Parse args
296 
297  int c;
298 
299  while(-1 != (c = getopt(argc, argvPP, "a:m:d:")))
300  switch(c) {
301  case 'a': currentAddress = std::string(optarg); break;
302  case 'm': mtu = atoi(optarg); break;
303  case 'd': minDisparity = std::stod(optarg); break;
304  default: usage(*argvPP); break;
305  }
306 
307  Status status;
308 
309  //
310  // Initialize communications.
311 
312  auto channelP = std::make_unique<ChannelWrapper>(currentAddress);
313  if (nullptr == channelP || nullptr == channelP->ptr()) {
314  std::cerr << "Failed to establish communications with \"" << currentAddress << "\"" << std::endl;
315  return EXIT_FAILURE;
316  }
317 
318  //
319  // Change MTU
320  if (mtu >= 1500)
321  status = channelP->ptr()->setMtu(mtu);
322  else
323  status = channelP->ptr()->setBestMtu();
324  if (Status_Ok != status) {
325  std::cerr << "Failed to set MTU: " << Channel::statusString(status) << std::endl;
326  return EXIT_FAILURE;
327  }
328 
329  //
330  // Query calibration
331 
332  image::Calibration calibration;
333  status = channelP->ptr()->getImageCalibration(calibration);
334  if (Status_Ok != status) {
335  std::cerr << "Failed to query calibration: " << Channel::statusString(status) << std::endl;
336  return EXIT_FAILURE;
337  }
338 
339  //
340  // Query device info
341 
343  status = channelP->ptr()->getDeviceInfo(deviceInfo);
344  if (Status_Ok != status) {
345  std::cerr << "Failed to query device info: " << Channel::statusString(status) << std::endl;
346  return EXIT_FAILURE;
347  }
348 
349  //
350  // Change framerate and resolution to 1/4 resolution
351 
352  image::Config cfg;
353 
354  status = channelP->ptr()->getImageConfig(cfg);
355  if (Status_Ok != status) {
356  std::cerr << "Failed to get image config: " << Channel::statusString(status) << std::endl;
357  return EXIT_FAILURE;
358  } else {
359  cfg.setFps(10.0);
360  cfg.setResolution(deviceInfo.imagerWidth / 2, deviceInfo.imagerHeight / 2);
361 
362  status = channelP->ptr()->setImageConfig(cfg);
363  if (Status_Ok != status) {
364  std::cerr << "Failed to configure sensor: " << Channel::statusString(status) << std::endl;
365  return EXIT_FAILURE;
366  }
367  }
368 
369  //
370  // Setup user data to store camera state for pointcloud reprojection
371 
372  UserData userData{channelP->ptr(), nullptr, nullptr, calibration, deviceInfo, minDisparity};
373 
374  //
375  // Add callbacks
376 
377  channelP->ptr()->addIsolatedCallback(imageCallback, Source_Luma_Rectified_Left | Source_Disparity_Left, &userData);
378 
379  //
380  // Start streaming
381 
382  status = channelP->ptr()->startStreams(Source_Luma_Rectified_Left | Source_Disparity_Left);
383  if (Status_Ok != status) {
384  std::cerr << "Failed to start streams: " << Channel::statusString(status) << std::endl;
385  return EXIT_FAILURE;
386  }
387 
388  while(!doneG) {
389  usleep(1000000);
390  }
391 
392  status = channelP->ptr()->stopStreams(Source_All);
393  if (Status_Ok != status) {
394  std::cerr << "Failed to stop streams: " << Channel::statusString(status) << std::endl;
395  return EXIT_FAILURE;
396  }
397 
398  return EXIT_SUCCESS;
399 }
usage
static void usage()
Definition: FirmwareUpdateUtility.cc:51
crl::multisense::Status_Ok
static CRL_CONSTEXPR Status Status_Ok
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:99
getopt.h
getopt
int getopt(int argc, char **argv, char *opts)
Definition: getopt.c:31
ChannelUtilities.hh
CRL_UNUSED
#define CRL_UNUSED(var)
Definition: Legacy/include/MultiSense/details/utility/Portability.hh:44
main
int main(int argc, char **argvPP)
Definition: Legacy/PointCloudUtility/PointCloudUtility.cc:281
crl::multisense::image::Calibration::Data::P
float P[3][4]
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:1927
crl::multisense::image::Calibration::left
Data left
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:1931
Portability.hh
crl::multisense::system::DeviceInfo::imagerWidth
uint32_t imagerWidth
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:3307
crl::multisense::image::Config
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:797
crl::multisense::Channel::statusString
static const char * statusString(Status status)
Definition: Legacy/details/channel.cc:876
crl::multisense::system::DeviceInfo
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:3245
crl::multisense::Source_All
static CRL_CONSTEXPR DataSource Source_All
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:118
crl::multisense::image::Calibration
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:1910
d
d
crl::multisense::Source_Disparity_Left
static CRL_CONSTEXPR DataSource Source_Disparity_Left
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:129
crl::multisense::Status
int32_t Status
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:94
header
std_msgs::Header const * header(const M &m)
crl::multisense::Channel
Definition: Legacy/include/MultiSense/MultiSenseChannel.hh:69
crl::multisense
Definition: Legacy/details/channel.cc:62
optarg
char * optarg
Definition: getopt.c:29
crl::multisense::image::Calibration::right
Data right
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:1933
crl::multisense::image::Config::setResolution
void setResolution(uint32_t w, uint32_t h)
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:811
crl::multisense::image::Config::setFps
void setFps(float f)
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:852
crl::multisense::system::DeviceInfo::imagerHeight
uint32_t imagerHeight
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:3309
multisense::to_string
std::string to_string(const Status &status)
Convert a status object to a user readable string.
Definition: utilities.cc:137
crl::multisense::image::Header
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:460
crl::multisense::Source_Luma_Rectified_Left
static CRL_CONSTEXPR DataSource Source_Luma_Rectified_Left
Definition: Legacy/include/MultiSense/MultiSenseTypes.hh:123


multisense_lib
Author(s):
autogenerated on Thu Apr 17 2025 02:49:09