ensenso_grabber.cpp
Go to the documentation of this file.
2 
3 
4 void ensensoExceptionHandling (const NxLibException &ex,
5  std::string func_nam)
6 {
7  PCL_ERROR ("%s: NxLib error %s (%d) occurred while accessing item %s.\n", func_nam.c_str (), ex.getErrorText ().c_str (), ex.getErrorCode (),
8  ex.getItemPath ().c_str ());
9  if (ex.getErrorCode () == NxLibExecutionFailed)
10  {
11  NxLibCommand cmd ("");
12  PCL_WARN ("\n%s\n", cmd.result ().asJson (true, 4, false).c_str ());
13  }
14 }
15 
16 
18  device_open_ (false),
19  mono_device_open_(false),
20  last_stereo_pattern_(""),
21  store_calibration_pattern_ (false),
22  running_ (false),
23  tcp_open_ (false),
24  use_rgb_(false)
25 {
26  point_cloud_signal_ = createSignal<sig_cb_ensenso_point_cloud> ();
27  point_cloud_rgb_signal_ = createSignal<sig_cb_ensenso_point_cloud_rgb> ();
28  images_signal_ = createSignal<sig_cb_ensenso_images> ();
29  images_rgb_signal_ = createSignal<sig_cb_ensenso_images_rgb> ();
30  image_depth_signal_ = createSignal<sig_cb_ensenso_image_depth> ();
31 
32  PCL_INFO ("Initialising nxLib\n");
33  try
34  {
35  nxLibInitialize ();
36  root_.reset (new NxLibItem);
37  }
38  catch (NxLibException &ex)
39  {
40  ensensoExceptionHandling (ex, "EnsensoGrabber");
41  PCL_THROW_EXCEPTION (pcl::IOException, "Could not initialise NxLib."); // If constructor fails; throw exception
42  }
43 }
44 
46 {
47  try
48  {
49  stop ();
50  root_.reset ();
51 
52  disconnect_all_slots<sig_cb_ensenso_point_cloud> ();
53  disconnect_all_slots<sig_cb_ensenso_point_cloud_rgb> ();
54  disconnect_all_slots<sig_cb_ensenso_images> ();
55  disconnect_all_slots<sig_cb_ensenso_images_rgb> ();
56  disconnect_all_slots<sig_cb_ensenso_image_depth> ();
57 
58  if (tcp_open_)
59  closeTcpPort ();
60  nxLibFinalize ();
61  }
62  catch (...)
63  {
64  // destructor never throws
65  }
66 }
67 
68 bool pcl::EnsensoGrabber::calibrateHandEye (const std::vector<Eigen::Affine3d, Eigen::aligned_allocator<Eigen::Affine3d> > &robot_poses,
69  const Eigen::Affine3d &camera_seed,
70  const Eigen::Affine3d &pattern_seed,
71  const std::string setup,
72  Eigen::Affine3d &estimated_camera_pose,
73  Eigen::Affine3d &estimated_pattern_pose,
74  int &iterations,
75  double &reprojection_error) const
76 {
77  if ( (*root_)[itmVersion][itmMajor] <= 1 && (*root_)[itmVersion][itmMinor] < 3)
78  {
79  PCL_WARN("EnsensoSDK 1.3.x fixes bugs into calibration optimization\n");
80  PCL_WARN("please update your SDK! http://www.ensenso.de/support/sdk-download\n");
81  }
82  NxLibCommand calibrate (cmdCalibrateHandEye);
83  try
84  {
85  // Check consistency
86  if (!boost::iequals (setup, valFixed) && !boost::iequals (setup, valMoving))
87  {
88  PCL_WARN("Received invalid setup value: %s\n", setup.c_str());
89  return (false);
90  }
91  // Set Hand-Eye calibration parameters
92  PCL_DEBUG("Setting calibration parameters\n");
93  std::string target;
94  if (boost::iequals (setup, valFixed))
95  target = valWorkspace;
96  else
97  target = valHand;
98  Eigen::Affine3d eigen_pose;
99  // Feed robot transformations
100  PCL_DEBUG("Populating robot poses\n");
101  std::vector<std::string> json_poses;
102  json_poses.resize (robot_poses.size ());
103  for (uint i = 0; i < robot_poses.size(); ++i)
104  {
105  eigen_pose = robot_poses[i];
106  eigen_pose.translation () *= 1000.0; // meters -> millimeters
107  matrixToJson(eigen_pose, json_poses[i]);
108  }
109  PCL_DEBUG("Executing...\n");
110  // Convert camera seed to Json
111  std::string json_camera_seed, json_pattern_seed;
112  PCL_DEBUG("Populating seeds\n");
113  eigen_pose = camera_seed;
114  eigen_pose.translation () *= 1000.0; // meters -> millimeters
115  matrixToJson(eigen_pose, json_camera_seed);
116  PCL_DEBUG("camera_seed:\n %s\n", json_camera_seed.c_str());
117  // Convert pattern seed to Json
118  eigen_pose = pattern_seed;
119  eigen_pose.translation () *= 1000.0; // meters -> millimeters
120  matrixToJson(pattern_seed, json_pattern_seed);
121  PCL_DEBUG("pattern_seed:\n %s\n", json_pattern_seed.c_str());
122  // Populate command parameters
123  // It's very important to write the parameters in alphabetical order and at the same time!
124  calibrate.parameters ()[itmLink].setJson(json_camera_seed, false);
125  calibrate.parameters ()[itmPatternPose].setJson(json_pattern_seed, false);
126  calibrate.parameters ()[itmSetup] = setup;
127  calibrate.parameters ()[itmTarget] = target;
128  for (uint i = 0; i < json_poses.size(); ++i)
129  calibrate.parameters ()[itmTransformations][i].setJson(json_poses[i], false);
130  // Execute the command
131  calibrate.execute (); // It might take some minutes
132  if (calibrate.successful())
133  {
134  // It's very important to read the parameters in alphabetical order and at the same time!
135  iterations = calibrate.result()[itmIterations].asInt();
136  std::string json_camera_pose = calibrate.result()[itmLink].asJson (true);
137  std::string json_pattern_pose = calibrate.result()[itmPatternPose].asJson (true);
138  reprojection_error = calibrate.result()[itmReprojectionError].asDouble();
139  // Estimated camera pose
140  jsonToMatrix(json_camera_pose, estimated_camera_pose);
141  estimated_camera_pose.translation () /= 1000.0; // millimeters -> meters
142  // Estimated pattern pose
143  jsonToMatrix(json_pattern_pose, estimated_pattern_pose);
144  estimated_pattern_pose.translation () /= 1000.0; // millimeters -> meters
145  PCL_DEBUG("Result:\n %s\n", json_camera_pose.c_str());
146  return (true);
147  }
148  else
149  return (false);
150  }
151  catch (NxLibException &ex)
152  {
153  ensensoExceptionHandling (ex, "calibrateHandEye");
154  return (false);
155  }
156 }
157 
159 {
161  return (false);
162 
163  stop ();
164  PCL_INFO ("Closing Ensenso cameras\n");
165 
166  try
167  {
168  NxLibCommand (cmdClose).execute ();
169  device_open_ = false;
170  mono_device_open_ = false;
171  }
172  catch (NxLibException &ex)
173  {
174  ensensoExceptionHandling (ex, "closeDevice");
175  return (false);
176  }
177  return (true);
178 }
179 
181 {
182  try
183  {
184  nxLibCloseTcpPort ();
185  tcp_open_ = false;
186  }
187  catch (NxLibException &ex)
188  {
189  ensensoExceptionHandling (ex, "closeTcpPort");
190  return (false);
191  }
192  return (true);
193 }
194 
195 int pcl::EnsensoGrabber::collectPattern (const bool buffer) const
196 {
197  if (!device_open_ || running_)
198  return (-1);
199  try
200  {
201  NxLibCommand (cmdCapture).execute ();
202  NxLibCommand collect_pattern (cmdCollectPattern);
203  collect_pattern.parameters ()[itmBuffer].set (buffer);
204  collect_pattern.parameters ()[itmDecodeData].set (false);
205  collect_pattern.execute ();
206  }
207  catch (NxLibException &ex)
208  {
209  ensensoExceptionHandling (ex, "collectPattern");
210  return getPatternCount();
211  }
212  return getPatternCount();
213 }
214 
216 {
217  double grid_spacing = -1.0;
218  if (!device_open_ || running_)
219  return (-1.0);
220  try
221  {
222  NxLibCommand (cmdCapture).execute ();
223  NxLibCommand collect_pattern (cmdCollectPattern);
224  collect_pattern.parameters ()[itmBuffer].set (false);
225  collect_pattern.parameters ()[itmDecodeData].set (true);
226  collect_pattern.execute ();
227  grid_spacing = collect_pattern.result()[itmGridSpacing].asDouble();
228  }
229  catch (NxLibException &ex)
230  {
231  ensensoExceptionHandling (ex, "decodePattern");
232  return (-1.0);
233  }
234  return grid_spacing;
235 }
236 
238 {
239  try
240  {
241  NxLibCommand (cmdDiscardPatterns).execute ();
242  }
243  catch (NxLibException &ex)
244  {
245  ensensoExceptionHandling (ex, "discardPatterns");
246  return (false);
247  }
248  return (true);
249 }
250 
251 bool pcl::EnsensoGrabber::estimatePatternPose (Eigen::Affine3d &pose, const bool average) const
252 {
253  try
254  {
255  NxLibCommand estimate_pattern_pose (cmdEstimatePatternPose);
256  estimate_pattern_pose.parameters ()[itmAverage].set (average);
257  estimate_pattern_pose.execute ();
258  NxLibItem tf = estimate_pattern_pose.result ()[itmPatternPose];
259  // Convert tf into a matrix
260  if (!jsonToMatrix (tf.asJson (), pose))
261  return (false);
262  pose.translation () /= 1000.0; // Convert translation in meters (Ensenso API returns milimeters)
263  return (true);
264  }
265  catch (NxLibException &ex)
266  {
267  ensensoExceptionHandling (ex, "estimateCalibrationPatternPoses");
268  return (false);
269  }
270 }
271 
273 {
274  int camera_count = 0;
275  try
276  {
277  NxLibItem cams = NxLibItem ("/Cameras/BySerialNo");
278  camera_count = cams.count ();
279  // Print information for all cameras in the tree
280  PCL_INFO ("Number of connected cameras: %d\n", camera_count);
281  PCL_INFO ("Serial No Model Status\n");
282  for (int n = 0; n < cams.count (); ++n)
283  {
284  PCL_INFO ("%s %s %s\n", cams[n][itmSerialNumber].asString ().c_str (),
285  cams[n][itmModelName].asString ().c_str (),
286  cams[n][itmStatus].asString ().c_str ());
287  }
288  PCL_INFO ("\n");
289  }
290  catch (NxLibException &ex)
291  {
292  ensensoExceptionHandling (ex, "enumDevices");
293  }
294  return (camera_count);
295 }
296 
297 bool pcl::EnsensoGrabber::getCameraInfo(std::string cam, sensor_msgs::CameraInfo &cam_info) const
298 {
299  try
300  {
301  bool depth = false;
302  if (cam == "Depth")
303  {
304  depth = true;
305  cam = use_rgb_ ? "RGB" : "Left";
306  }
307  NxLibItem camera = (cam == "RGB" ) ? monocam_ : camera_;
308  NxLibItem camera_mat = (cam == "RGB") ? monocam_[itmCalibration][itmCamera] : camera_[itmCalibration][itmDynamic][itmStereo][cam][itmCamera];
309  NxLibItem camera_dist = (cam == "RGB") ? monocam_[itmCalibration][itmDistortion] : camera_[itmCalibration][itmMonocular][cam][itmDistortion];
310 
311  cam_info.width = camera[itmSensor][itmSize][0].asInt();
312  cam_info.height = camera[itmSensor][itmSize][1].asInt();
313  cam_info.distortion_model = "plumb_bob";
314  // Distorsion factors (as in ROS CameraInfo Documentation, [K1, K2, T1, T2, K3])
315 
316  cam_info.D.resize(5);
317  if (depth)
318  {
319  for(std::size_t i = 0; i < cam_info.D.size(); ++i)
320  {
321  cam_info.D[i] = 0;
322  }
323  }
324  else
325  {
326  cam_info.D[0] = camera_dist[0].asDouble();
327  cam_info.D[1] = camera_dist[1].asDouble();
328  cam_info.D[2] = camera_dist[5].asDouble();
329  cam_info.D[3] = camera_dist[6].asDouble();
330  cam_info.D[4] = camera_dist[2].asDouble();
331  }
332 
333  // K and R matrices
334  for(std::size_t i = 0; i < 3; ++i)
335  {
336  for(std::size_t j = 0; j < 3; ++j)
337  {
338  if (cam != "RGB")
339  {
340  cam_info.R[3*i+j] = camera[itmCalibration][itmDynamic][itmStereo][cam][itmRotation][j][i].asDouble();
341  cam_info.K[3*i+j] = camera_[itmCalibration][itmMonocular][cam][itmCamera][j][i].asDouble();
342  }
343  else
344  {
345  cam_info.K[3*i+j] = camera_mat[j][i].asDouble();
346  }
347  }
348  }
349  if (cam == "RGB")
350  {
351  cam_info.R[0] = 1.0;
352  cam_info.R[4] = 1.0;
353  cam_info.R[8] = 1.0;
354  }
355  //first row
356  cam_info.P[0] = camera_mat[0][0].asDouble();
357  cam_info.P[1] = camera_mat[1][0].asDouble();
358  cam_info.P[2] = camera_mat[2][0].asDouble();
359  cam_info.P[3] = 0.0;
360  //second row
361  cam_info.P[4] = camera_mat[0][1].asDouble();
362  cam_info.P[5] = camera_mat[1][1].asDouble();
363  cam_info.P[6] = camera_mat[2][1].asDouble();
364  cam_info.P[7] = 0.0;
365  //third row
366  cam_info.P[8] = 0.0;
367  cam_info.P[9] = 0.0;
368  cam_info.P[10] = 1.0;
369  cam_info.P[11] = 0.0;
370  if (cam == "Right")
371  {
372  double B = camera[itmCalibration][itmStereo][itmBaseline].asDouble() / 1000.0;
373  double fx = cam_info.P[0];
374  cam_info.P[3] = (-fx * B);
375  }
376  return true;
377  }
378  catch (NxLibException &ex)
379  {
380  ensensoExceptionHandling (ex, "getCameraInfo");
381  return false;
382  }
383 }
384 
386 {
387  if (!mono_device_open_)
388  {
389  return (false);
390  }
391  //get Translation between left camera and RGB frame
392  try {
393  NxLibCommand convert(cmdConvertTransformation, "convert");
394  convert.parameters()[itmTransformation].setJson(monocam_[itmLink].asJson());
395  convert.execute();
396  Eigen::Affine3d transform;
397  for (int i = 0; i < 4 ; ++i)
398  {
399  for (int j = 0; j < 4 ; ++j)
400  {
401  transform(j,i) = convert.result()[itmTransformation][i][j].asDouble();
402  }
403  }
404  Eigen::Affine3d inv_transform = transform.inverse();
405  Eigen::Quaterniond q(inv_transform.rotation()); //from stereo to rgb
406  q.normalize();
407  tf.qx = q.x();
408  tf.qy = q.y();
409  tf.qz = q.z();
410  tf.qw = q.w();
411  tf.tx = inv_transform.translation().x() / 1000.0;
412  tf.ty = inv_transform.translation().y() / 1000.0;
413  tf.tz = inv_transform.translation().z() / 1000.0;
414  return (true);
415  }
416  catch (NxLibException &ex)
417  {
418  ensensoExceptionHandling (ex, "setUseRGB");
419  return (false);
420  }
421  return (false);
422 }
423 
424 
425 bool pcl::EnsensoGrabber::getLastCalibrationPattern ( std::vector<int> &grid_size, double &grid_spacing,
426  std::vector<Eigen::Vector2d> &left_points,
427  std::vector<Eigen::Vector2d> &right_points,
428  Eigen::Affine3d &pose) const
429 {
430  // Lock
431  pattern_mutex_.lock ();
432  std::string stereo_pattern_json = last_stereo_pattern_;
433  pose = last_pattern_pose_;
434  pattern_mutex_.unlock ();
435  // Process
436  if ( stereo_pattern_json.empty() )
437  return false;
438  // Use NxLib JSON API to extract the Raw Stereo Pattern information
439  NxLibItem stereo_pattern ("/tmpStereoPattern");
440  stereo_pattern.setJson(stereo_pattern_json);
441  // Get pattern details
442  grid_size.resize(2);
443  grid_size[0] = stereo_pattern[itmPattern][itmGridSize][0].asInt();
444  grid_size[1] = stereo_pattern[itmPattern][itmGridSize][1].asInt();
445  grid_spacing = stereo_pattern[itmPattern][itmGridSpacing].asDouble();
446  int rows = grid_size[0];
447  int cols = grid_size[1];
448  left_points.resize(rows*cols);
449  right_points.resize(rows*cols);
450  for (uint i = 0; i < left_points.size(); ++i)
451  {
452  left_points[i][0] = stereo_pattern[itmPoints][0][i][0].asDouble();
453  left_points[i][1] = stereo_pattern[itmPoints][0][i][1].asDouble();
454  right_points[i][0] = stereo_pattern[itmPoints][1][i][0].asDouble();
455  right_points[i][1] = stereo_pattern[itmPoints][1][i][1].asDouble();
456  }
457  stereo_pattern.erase();
458  return true;
459 }
460 
462 {
463  boost::mutex::scoped_lock lock (fps_mutex_);
464  return fps_;
465 }
466 
467 std::string pcl::EnsensoGrabber::getName () const
468 {
469  return ("EnsensoGrabber");
470 }
471 
472 std::string pcl::EnsensoGrabber::getOpenCVType (const int channels,
473  const int bpe,
474  const bool isFlt)
475 {
476  int bits = bpe * 8;
477  char type = isFlt ? 'F' : (bpe > 3 ? 'S' : 'U');
478  return (boost::str (boost::format ("CV_%i%cC%i") % bits % type % channels));
479 }
480 
481 pcl::uint64_t pcl::EnsensoGrabber::getPCLStamp (const double ensenso_stamp)
482 {
483 #if defined _WIN32 || defined _WIN64
484  return (ensenso_stamp * 1000000.0);
485 #else
486  return ( (ensenso_stamp - 11644473600.0) * 1000000.0);
487 #endif
488 }
489 
491 {
492  return ( (*root_)[itmParameters][itmPatternCount].asInt ());
493 }
494 
495 bool pcl::EnsensoGrabber::grabSingleCloud (pcl::PointCloud<pcl::PointXYZ> &cloud)
496 {
497  if (!device_open_)
498  return (false);
499 
500  if (running_)
501  return (false);
502 
503  try
504  {
505  NxLibCommand (cmdCapture).execute ();
506  // Stereo matching task
507  NxLibCommand (cmdComputeDisparityMap).execute ();
508  // Convert disparity map into XYZ data for each pixel
509  NxLibCommand (cmdComputePointMap).execute ();
510  // Get info about the computed point map and copy it into a std::vector
511  double timestamp;
512  std::vector<float> pointMap;
513  int width, height;
514  camera_[itmImages][itmRaw][itmLeft].getBinaryDataInfo (0, 0, 0, 0, 0, &timestamp); // Get raw image timestamp
515  camera_[itmImages][itmPointMap].getBinaryDataInfo (&width, &height, 0, 0, 0, 0);
516  camera_[itmImages][itmPointMap].getBinaryData (pointMap, 0);
517  // Copy point cloud and convert in meters
518  cloud.header.stamp = getPCLStamp (timestamp);
519  cloud.resize (height * width);
520  cloud.width = width;
521  cloud.height = height;
522  cloud.is_dense = false;
523  // Copy data in point cloud (and convert milimeters in meters)
524  for (size_t i = 0; i < pointMap.size (); i += 3)
525  {
526  cloud.points[i / 3].x = pointMap[i] / 1000.0;
527  cloud.points[i / 3].y = pointMap[i + 1] / 1000.0;
528  cloud.points[i / 3].z = pointMap[i + 2] / 1000.0;
529  }
530  return (true);
531  }
532  catch (NxLibException &ex)
533  {
534  ensensoExceptionHandling (ex, "grabSingleCloud");
535  return (false);
536  }
537 }
538 
540 {
541  return (running_);
542 }
543 
545 {
546  return (tcp_open_);
547 }
548 
549 bool pcl::EnsensoGrabber::jsonToMatrix (const std::string json, Eigen::Affine3d &matrix) const
550 {
551  try
552  {
553  NxLibCommand convert (cmdConvertTransformation);
554  convert.parameters ()[itmTransformation].setJson (json);
555  convert.execute ();
556  Eigen::Affine3d tmp (Eigen::Affine3d::Identity ());
557  // Rotation
558  tmp.linear ().col (0) = Eigen::Vector3d (convert.result ()[itmTransformation][0][0].asDouble (),
559  convert.result ()[itmTransformation][0][1].asDouble (),
560  convert.result ()[itmTransformation][0][2].asDouble ());
561  tmp.linear ().col (1) = Eigen::Vector3d (convert.result ()[itmTransformation][1][0].asDouble (),
562  convert.result ()[itmTransformation][1][1].asDouble (),
563  convert.result ()[itmTransformation][1][2].asDouble ());
564  tmp.linear ().col (2) = Eigen::Vector3d (convert.result ()[itmTransformation][2][0].asDouble (),
565  convert.result ()[itmTransformation][2][1].asDouble (),
566  convert.result ()[itmTransformation][2][2].asDouble ());
567  // Translation
568  tmp.translation () = Eigen::Vector3d (convert.result ()[itmTransformation][3][0].asDouble (),
569  convert.result ()[itmTransformation][3][1].asDouble (),
570  convert.result ()[itmTransformation][3][2].asDouble ());
571  matrix = tmp;
572  return (true);
573  }
574  catch (NxLibException &ex)
575  {
576  ensensoExceptionHandling (ex, "jsonToMatrix");
577  return (false);
578  }
579 }
580 
581 bool pcl::EnsensoGrabber::matrixToJson (const Eigen::Affine3d &matrix, std::string &json,
582  const bool pretty_format) const
583 {
584  try
585  {
586  NxLibCommand convert (cmdConvertTransformation);
587  // Rotation
588  convert.parameters ()[itmTransformation][0][0].set (matrix.linear ().col (0)[0]);
589  convert.parameters ()[itmTransformation][0][1].set (matrix.linear ().col (0)[1]);
590  convert.parameters ()[itmTransformation][0][2].set (matrix.linear ().col (0)[2]);
591  convert.parameters ()[itmTransformation][0][3].set (0.0);
592  convert.parameters ()[itmTransformation][1][0].set (matrix.linear ().col (1)[0]);
593  convert.parameters ()[itmTransformation][1][1].set (matrix.linear ().col (1)[1]);
594  convert.parameters ()[itmTransformation][1][2].set (matrix.linear ().col (1)[2]);
595  convert.parameters ()[itmTransformation][1][3].set (0.0);
596  convert.parameters ()[itmTransformation][2][0].set (matrix.linear ().col (2)[0]);
597  convert.parameters ()[itmTransformation][2][1].set (matrix.linear ().col (2)[1]);
598  convert.parameters ()[itmTransformation][2][2].set (matrix.linear ().col (2)[2]);
599  convert.parameters ()[itmTransformation][2][3].set (0.0);
600  // Translation
601  convert.parameters ()[itmTransformation][3][0].set (matrix.translation ()[0]);
602  convert.parameters ()[itmTransformation][3][1].set (matrix.translation ()[1]);
603  convert.parameters ()[itmTransformation][3][2].set (matrix.translation ()[2]);
604  convert.parameters ()[itmTransformation][3][3].set (1.0);
605  convert.execute ();
606  json = convert.result ()[itmTransformation].asJson (pretty_format);
607  return (true);
608  }
609  catch (NxLibException &ex)
610  {
611  ensensoExceptionHandling (ex, "matrixToJson");
612  return (false);
613  }
614 }
615 
616 bool pcl::EnsensoGrabber::openDevice (std::string serial)
617 {
618  if (device_open_)
619  PCL_THROW_EXCEPTION (pcl::IOException, "Cannot open multiple devices!");
620  PCL_INFO ("Opening Ensenso stereo camera S/N: %s\n", serial.c_str());
621  try
622  {
623  // Create a pointer referencing the camera's tree item, for easier access:
624  camera_ = (*root_)[itmCameras][itmBySerialNo][serial];
625  if (!camera_.exists () || camera_[itmType] != valStereo)
626  PCL_THROW_EXCEPTION (pcl::IOException, "Please connect a single stereo camera to your computer!");
627  if (!(*root_)[itmCameras][itmBySerialNo][serial][itmStatus][itmAvailable].asBool())
628  PCL_THROW_EXCEPTION (pcl::IOException, "The device cannot be opened.");
629  NxLibCommand open (cmdOpen);
630  open.parameters ()[itmCameras] = camera_[itmSerialNumber].asString ();
631  open.execute ();
632  }
633  catch (NxLibException &ex)
634  {
635  ensensoExceptionHandling (ex, "openDevice");
636  return (false);
637  }
638  device_open_ = true;
639  return (true);
640 }
641 
642 bool pcl::EnsensoGrabber::openMonoDevice (std::string serial)
643 {
644  if (mono_device_open_)
645  PCL_THROW_EXCEPTION (pcl::IOException, "Cannot open multiple devices!");
646  PCL_INFO ("Opening Ensenso mono camera S/N: %s\n", serial.c_str());
647  try
648  {
649  // Create a pointer referencing the camera's tree item, for easier access:
650  monocam_ = (*root_)[itmCameras][itmBySerialNo][serial];
651  if (!monocam_.exists () || monocam_[itmType] != valMonocular)
652  PCL_THROW_EXCEPTION (pcl::IOException, "Please connect a single mono camera to your computer!");
653  if (!(*root_)[itmCameras][itmBySerialNo][serial][itmStatus][itmAvailable].asBool())
654  PCL_THROW_EXCEPTION (pcl::IOException, "The device cannot be opened.");
655  NxLibCommand open (cmdOpen);
656  open.parameters ()[itmCameras] = monocam_[itmSerialNumber].asString ();
657  open.execute ();
658  }
659  catch (NxLibException &ex)
660  {
661  ensensoExceptionHandling (ex, "openMonoDevice");
662  return (false);
663  }
664  mono_device_open_ = true;
665  return (true);
666 }
667 
668 bool pcl::EnsensoGrabber::openTcpPort (const int port)
669 {
670  try
671  {
672  nxLibOpenTcpPort (port);
673  tcp_open_ = true;
674  }
675  catch (NxLibException &ex)
676  {
677  ensensoExceptionHandling (ex, "openTcpPort");
678  return (false);
679  }
680  return (true);
681 }
682 
684 {
685  bool continue_grabbing = running_;
686  if (use_rgb_)
687  {
688  Transform tf;
690  }
691  while (continue_grabbing)
692  {
693  try
694  {
695  // Publish cloud / images
696  bool need_cloud = num_slots<sig_cb_ensenso_point_cloud> () > 0;
697  bool need_cloud_rgb = num_slots<sig_cb_ensenso_point_cloud_rgb> () > 0;
698  bool need_images = num_slots<sig_cb_ensenso_images> () > 0;
699  bool need_images_rgb = num_slots<sig_cb_ensenso_images_rgb> () > 0;
700  bool need_depth = num_slots<sig_cb_ensenso_image_depth> () > 0;
701 
702  if (need_cloud || need_cloud_rgb || need_images || need_images_rgb || need_depth)
703  {
704  pcl::PointCloud<pcl::PointXYZRGBA>::Ptr rgb_cloud (new pcl::PointCloud<pcl::PointXYZRGBA>);
705  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
706 
711  // Update FPS
712  static double last = pcl::getTime ();
713  double now = pcl::getTime ();
714  fps_mutex_.lock ();
715  fps_ = float( 1.0 / (now - last) );
716  fps_mutex_.unlock ();
717  last = now;
718 
719  triggerCameras();
720 
721  if (!running_) {
722  return;
723  }
724  //get timestamp of aquired image
725  camera_[itmImages][itmRaw][itmLeft].getBinaryDataInfo (0, 0, 0, 0, 0, &timestamp_);
726  bool rectified = false;
727  // Gather point cloud
728  if (need_cloud || need_cloud_rgb || need_depth)
729  {
730  // Stereo matching task
731  NxLibCommand (cmdComputeDisparityMap).execute ();
732  rectified = true;
733  // Convert disparity map into XYZ data for each pixel
735  {
736  getDepthDataRGB(rgb_cloud, depth_image);
737 
738  }
739  else
740  {
741  getDepthData(cloud, depth_image);
742  }
743  }
744  // Gather images
745  pattern_mutex_.lock ();
746  last_stereo_pattern_ = std::string("");
747  pattern_mutex_.unlock ();
748  if (need_images || need_images_rgb)
749  {
750  if (!rectified)
751  {
752  NxLibCommand rectify_images (cmdRectifyImages);
753  rectify_images.execute ();
754  }
755  bool collected_pattern = false;
756  // Try to collect calibration pattern and overlay it to the raw image
757  // If store_calibration_pattern_ is true, it estimates the pattern pose and store it at last_stereo_pattern_
759  discardPatterns();
760  if(find_pattern_)
761  {
762  try
763  {
764  NxLibCommand collect_pattern (cmdCollectPattern);
765  collect_pattern.parameters ()[itmBuffer].set (store_calibration_pattern_);
766  collect_pattern.parameters ()[itmDecodeData].set (true);
767  collect_pattern.execute ();
769  {
770  // estimatePatternPose() takes ages, so, we use the raw data
771  // Raw stereo pattern info
772  NxLibCommand get_pattern_buffers (cmdGetPatternBuffers);
773  get_pattern_buffers.execute ();
774  pattern_mutex_.lock ();
775  last_stereo_pattern_ = get_pattern_buffers.result()[itmStereo][0].asJson (true);
776  // Pattern pose
778  pattern_mutex_.unlock ();
779  }
780  collected_pattern = true;
781  }
782  catch (const NxLibException &ex)
783  {
784  // if failed to collect the pattern will read the RAW images anyway.
785  }
786  }
787  if (find_pattern_ && collected_pattern)
788  {
789  if (use_rgb_ && need_images_rgb)
790  {
791  getImage(monocam_[itmImages][itmWithOverlay], images_rgb->first);
792  getImage(monocam_[itmImages][itmRectified], images_rgb->second);
793  }
794  //images with overlay
795  getImage(camera_[itmImages][itmWithOverlay][itmLeft], images_raw->first);
796  getImage(camera_[itmImages][itmWithOverlay][itmRight], images_raw->second);
797  }
798  else
799  {
800  if (use_rgb_ && need_images_rgb)
801  {
802  //get RGB raw and rect image
803  getImage(monocam_[itmImages][itmRaw], images_rgb->first);
804  getImage(monocam_[itmImages][itmRectified], images_rgb->second);
805  }
806  // get left / right raw images
807  getImage(camera_[itmImages][itmRaw][itmLeft], images_raw->first);
808  getImage(camera_[itmImages][itmRaw][itmRight], images_raw->second);
809  }
810  //get rectified images
811  getImage(camera_[itmImages][itmRectified][itmLeft], images_rect->first);
812  getImage(camera_[itmImages][itmRectified][itmRight], images_rect->second);
813  }
814 
815  // Publish signals
816  if (need_cloud)
817  {
818  point_cloud_signal_->operator () (cloud);
819  }
820  if (need_cloud_rgb)
821  {
822  point_cloud_rgb_signal_->operator () (rgb_cloud);
823  }
824  if (need_images)
825  {
826  images_signal_->operator () (images_raw, images_rect);
827  }
828  if (need_images_rgb)
829  {
830  images_rgb_signal_->operator () (images_raw, images_rect, images_rgb);
831  }
832  if (need_depth)
833  {
834  image_depth_signal_->operator () (depth_image);
835  }
836 
837  }
838  continue_grabbing = running_;
839  }
840  catch (NxLibException &ex)
841  {
842  NxLibItem result = NxLibItem()[itmExecute][itmResult];
843  if (ex.getErrorCode() == NxLibExecutionFailed && result[itmErrorSymbol].asString() == "CUDA")
844  {
845  PCL_WARN("Encountered error due to use of CUDA. Disabling CUDA support.\n");
846  setEnableCUDA(false);
847  ensensoExceptionHandling (ex, "processGrabbing");
848  continue;
849  }
850  ensensoExceptionHandling (ex, "processGrabbing");
851  }
852  }
853 }
854 
855 void pcl::EnsensoGrabber::getDepthDataRGB(const pcl::PointCloud<pcl::PointXYZRGBA>::Ptr& cloud, const pcl::PCLGenImage<float>::Ptr& depth_image)
856 {
857  NxLibCommand renderPM (cmdRenderPointMap);
858  renderPM.parameters()[itmCamera].set(monocam_[itmSerialNumber].asString());
859  renderPM.parameters()[itmNear].set(near_plane_);
860  renderPM.parameters()[itmFar].set(far_plane_);
861  renderPM.execute ();
862 
863  int width, height;
864  std::vector<float> point_map;
865  std::vector<char> rgb_data;
866  (*root_)[itmImages][itmRenderPointMap].getBinaryDataInfo (&width, &height, 0, 0, 0, 0);
867  (*root_)[itmImages][itmRenderPointMap].getBinaryData (point_map, 0);
868  (*root_)[itmImages][itmRenderPointMapTexture].getBinaryDataInfo (&width, &height, 0, 0, 0, 0);
869  (*root_)[itmImages][itmRenderPointMapTexture].getBinaryData (rgb_data, 0);
870  bool get_cloud = (num_slots<sig_cb_ensenso_point_cloud_rgb> () > 0);
871  bool get_depth = (num_slots<sig_cb_ensenso_image_depth> () > 0);
872  // Copy point f and convert in meters
873  if (get_cloud)
874  {
875  cloud->header.stamp = getPCLStamp (timestamp_);
876  cloud->points.resize (height * width);
877  cloud->width = width;
878  cloud->height = height;
879  cloud->is_dense = false;
880  }//copy depth info to image
881  if (get_depth)
882  {
883  depth_image->header.stamp = getPCLStamp (timestamp_);
884  depth_image->width = width;
885  depth_image->height = height;
886  depth_image->data.resize (width * height);
887  depth_image->encoding = "CV_32FC1";
888  }
889  for (size_t i = 0; i < point_map.size (); i += 3)
890  {
891  if (get_depth)
892  {
893  depth_image->data[i / 3] = point_map[i + 2] / 1000.0;
894  }
895  if (get_cloud)
896  {
897  cloud->points[i / 3].x = point_map[i] / 1000.0 - tf_left_to_rgb_.tx;
898  cloud->points[i / 3].y = point_map[i + 1] / 1000.0 - tf_left_to_rgb_.ty;
899  cloud->points[i / 3].z = point_map[i + 2] / 1000.0;
900  }
901  }
902  if (get_cloud)
903  {
904  for (size_t i = 0; i < rgb_data.size (); i += 4)
905  {
906  cloud->points[i / 4].r = rgb_data[i];
907  cloud->points[i / 4].g = rgb_data[i + 1];
908  cloud->points[i / 4].b = rgb_data[i + 2];
909  cloud->points[i / 4].a = rgb_data[i + 3];
910  }
911  }
912 }
913 
914 void pcl::EnsensoGrabber::getDepthData(const pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud, const pcl::PCLGenImage<float>::Ptr& depth_image)
915 {
916  NxLibCommand (cmdComputePointMap).execute ();
917  int width, height;
918  std::vector<float> point_map;
919  camera_[itmImages][itmPointMap].getBinaryDataInfo (&width, &height, 0, 0, 0, 0);
920  camera_[itmImages][itmPointMap].getBinaryData (point_map, 0);
921 
922  bool get_cloud = (num_slots<sig_cb_ensenso_point_cloud> () > 0);
923  bool get_depth = (num_slots<sig_cb_ensenso_image_depth> () > 0);
924  // Copy point f and convert in meters
925  if (get_cloud)
926  {
927  cloud->header.stamp = getPCLStamp (timestamp_);
928  cloud->points.resize (height * width);
929  cloud->width = width;
930  cloud->height = height;
931  cloud->is_dense = false;
932  }
933  //copy depth info to image
934  if (get_depth)
935  {
936  depth_image->header.stamp = getPCLStamp (timestamp_);
937  depth_image->width = width;
938  depth_image->height = height;
939  depth_image->data.resize (width * height);
940  depth_image->encoding = "CV_32FC1";
941  }
942  for (size_t i = 0; i < point_map.size (); i += 3)
943  {
944  if (get_depth)
945  {
946  depth_image->data[i / 3] = point_map[i + 2] / 1000.0;
947  }
948  if (get_cloud)
949  {
950  cloud->points[i / 3].x = point_map[i] / 1000.0;
951  cloud->points[i / 3].y = point_map[i + 1] / 1000.0;
952  cloud->points[i / 3].z = point_map[i + 2] / 1000.0;
953  }
954  }
955 }
956 
957 
959 {
960  NxLibCommand (cmdTrigger).execute();
961  NxLibCommand retrieve (cmdRetrieve);
962  while (running_)
963  {
964  try {
965  retrieve.parameters()[itmTimeout] = 1000; // to wait copy image
966  retrieve.execute();
967  bool retrievedIR = retrieve.result()[camera_[itmSerialNumber].asString().c_str()][itmRetrieved].asBool();
968  bool retrievedRGB = use_rgb_ ? retrieve.result()[monocam_[itmSerialNumber].asString().c_str()][itmRetrieved].asBool() : true;
969  if (retrievedIR && retrievedRGB)
970  {
971  break;
972  }
973  }
974  catch (const NxLibException &ex) {
975  // To ignore timeout exception and others
976  }
977  }
978 }
979 
980 void pcl::EnsensoGrabber::getImage(const NxLibItem& image_node, pcl::PCLGenImage<pcl::uint8_t>& image_out)
981 {
982  int width, height, channels, bpe;
983  bool isFlt;
984  image_node.getBinaryDataInfo (&width, &height, &channels, &bpe, &isFlt, 0);
985  image_out.header.stamp = getPCLStamp (timestamp_);
986  image_out.width = width;
987  image_out.height = height;
988  image_out.data.resize (width * height * sizeof(float));
989  image_out.encoding = getOpenCVType (channels, bpe, isFlt);
990  image_node.getBinaryData (image_out.data.data (), image_out.data.size (), 0, 0);
991 }
992 
994 {
996  if (enable)
997  PCL_WARN ("Storing calibration pattern. This will clean the pattern buffer continuously\n");
998 }
999 
1001 {
1002  bool result = true;
1003  result &= setAutoBlackLevel();
1004  result &= setAutoExposure();
1005  result &= setAutoGain();
1006  result &= setBinning();
1007  result &= setBlackLevelOffset();
1008  result &= setExposure();
1009  result &= setFlexView();
1010  result &= setFrontLight();
1011  result &= setGain();
1012  result &= setGainBoost();
1013  result &= setHardwareGamma();
1014  result &= setHdr();
1015  result &= setMinimumDisparity();
1016  result &= setNumberOfDisparities();
1017  result &= setOptimizationProfile();
1018  result &= setPixelClock();
1019  result &= setProjector();
1020  result &= setScaling();
1021  result &= setTargetBrightness();
1022  result &= setTriggerMode();
1024  result &= setRGBTriggerDelay();
1025  result &= setEnableCUDA();
1026  return result;
1027 }
1028 
1029 bool pcl::EnsensoGrabber::setEnableCUDA (const bool enable) const
1030 {
1031  #ifdef CUDA_IMPLEMENTED
1032  try
1033  {
1034  if ((*root_)[itmParameters][itmCUDA][itmAvailable].asBool())
1035  {
1036  (*root_)[itmParameters][itmCUDA][itmEnabled].set (enable);
1037  }
1038  else
1039  {
1040  PCL_WARN("CUDA is not supported on this machine.");
1041  }
1042  }
1043  catch (NxLibException &ex)
1044  {
1045  ensensoExceptionHandling (ex, "setEnableCUDA");
1046  return (false);
1047  }
1048  #else
1049  PCL_WARN("CUDA is not supported. Upgrade EnsensoSDK to Version >= 2.1.7 in order to use CUDA.");
1050  #endif
1051  return (true);
1052 }
1053 
1054 bool pcl::EnsensoGrabber::setFindPattern (const bool enable)
1055 {
1056  find_pattern_ = enable;
1057  return (true);
1058 }
1059 
1060 
1061 bool pcl::EnsensoGrabber::setAutoBlackLevel (const bool enable) const
1062 {
1063  if (!device_open_)
1064  return (false);
1065  try
1066  {
1067  camera_[itmParameters][itmCapture][itmAutoBlackLevel].set (enable);
1068  }
1069  catch (NxLibException &ex)
1070  {
1071  ensensoExceptionHandling (ex, "setAutoBlackLevel");
1072  return (false);
1073  }
1074  return (true);
1075 }
1076 
1077 bool pcl::EnsensoGrabber::setAutoExposure (const bool enable) const
1078 {
1079  if (!device_open_)
1080  return (false);
1081  try
1082  {
1083  camera_[itmParameters][itmCapture][itmAutoExposure].set (enable);
1084  }
1085  catch (NxLibException &ex)
1086  {
1087  ensensoExceptionHandling (ex, "setAutoExposure");
1088  return (false);
1089  }
1090  return (true);
1091 }
1092 
1093 bool pcl::EnsensoGrabber::setAutoGain (const bool enable) const
1094 {
1095  if (!device_open_)
1096  return (false);
1097  try
1098  {
1099  camera_[itmParameters][itmCapture][itmAutoGain].set (enable);
1100  }
1101  catch (NxLibException &ex)
1102  {
1103  ensensoExceptionHandling (ex, "setAutoGain");
1104  return (false);
1105  }
1106  return (true);
1107 }
1108 
1109 bool pcl::EnsensoGrabber::setBinning (const int binning) const
1110 {
1111  if (!device_open_)
1112  return (false);
1113  try
1114  {
1115  camera_[itmParameters][itmCapture][itmBinning].set (binning);
1116  }
1117  catch (NxLibException &ex)
1118  {
1119  // TODO: Handle better setBinning exceptions
1120  //~ ensensoExceptionHandling (ex, "setBinning");
1121  return (false);
1122  }
1123  return (true);
1124 }
1125 
1126 bool pcl::EnsensoGrabber::setBlackLevelOffset (const float offset) const
1127 {
1128  if (!device_open_)
1129  return (false);
1130  try
1131  {
1132  camera_[itmParameters][itmCapture][itmBlackLevelOffset].set (offset);
1133  }
1134  catch (NxLibException &ex)
1135  {
1136  ensensoExceptionHandling (ex, "setBlackLevelOffset");
1137  return (false);
1138  }
1139  return (true);
1140 }
1141 
1142 bool pcl::EnsensoGrabber::setExposure (const float exposure) const
1143 {
1144  if (!device_open_)
1145  return (false);
1146  try
1147  {
1148  camera_[itmParameters][itmCapture][itmExposure].set (exposure);
1149  }
1150  catch (NxLibException &ex)
1151  {
1152  ensensoExceptionHandling (ex, "setExposure");
1153  return (false);
1154  }
1155  return (true);
1156 }
1157 
1158 bool pcl::EnsensoGrabber::setFlexView (const bool enable, const int imagepairs) const
1159 {
1160  if (!device_open_)
1161  return (false);
1162  try
1163  {
1164  if (enable && 2 <= imagepairs && imagepairs <= 8)
1165  camera_[itmParameters][itmCapture][itmFlexView].set (imagepairs);
1166  else
1167  camera_[itmParameters][itmCapture][itmFlexView].set (false);
1168  }
1169  catch (NxLibException &ex)
1170  {
1171  // TODO: Handle better setFlexView exceptions
1172  //~ ensensoExceptionHandling (ex, "setFlexView");
1173  return (false);
1174  }
1175  return (true);
1176 }
1177 
1178 bool pcl::EnsensoGrabber::setFrontLight (const bool enable) const
1179 {
1180  if (!device_open_)
1181  return (false);
1182  try
1183  {
1184  camera_[itmParameters][itmCapture][itmFrontLight].set (enable);
1185  }
1186  catch (NxLibException &ex)
1187  {
1188  ensensoExceptionHandling (ex, "setFrontLight");
1189  return (false);
1190  }
1191  return (true);
1192 }
1193 
1194 bool pcl::EnsensoGrabber::setGain (const float gain) const
1195 {
1196  if (!device_open_)
1197  return (false);
1198  try
1199  {
1200  camera_[itmParameters][itmCapture][itmGain].set (gain);
1201  }
1202  catch (NxLibException &ex)
1203  {
1204  ensensoExceptionHandling (ex, "setGain");
1205  return (false);
1206  }
1207  return (true);
1208 }
1209 
1210 bool pcl::EnsensoGrabber::setGainBoost (const bool enable) const
1211 {
1212  if (!device_open_)
1213  return (false);
1214  try
1215  {
1216  camera_[itmParameters][itmCapture][itmGainBoost].set (enable);
1217  }
1218  catch (NxLibException &ex)
1219  {
1220  ensensoExceptionHandling (ex, "setGainBoost");
1221  return (false);
1222  }
1223  return (true);
1224 }
1225 
1226 bool pcl::EnsensoGrabber::setGridSpacing (const double grid_spacing) const
1227 {
1228  try
1229  {
1230  (*root_)[itmParameters][itmPattern][itmGridSpacing].set (grid_spacing);
1231  }
1232  catch (NxLibException &ex)
1233  {
1234  ensensoExceptionHandling (ex, "setGridSpacing");
1235  return (false);
1236  }
1237  return (true);
1238 }
1239 
1240 bool pcl::EnsensoGrabber::setHardwareGamma (const bool enable) const
1241 {
1242  if (!device_open_)
1243  return (false);
1244  try
1245  {
1246  camera_[itmParameters][itmCapture][itmHardwareGamma].set (enable);
1247  }
1248  catch (NxLibException &ex)
1249  {
1250  ensensoExceptionHandling (ex, "setHardwareGamma");
1251  return (false);
1252  }
1253  return (true);
1254 }
1255 
1256 bool pcl::EnsensoGrabber::setHdr (const bool enable) const
1257 {
1258  if (!device_open_)
1259  return (false);
1260  try
1261  {
1262  camera_[itmParameters][itmCapture][itmHdr].set (enable);
1263  }
1264  catch (NxLibException &ex)
1265  {
1266  ensensoExceptionHandling (ex, "setHdr");
1267  return (false);
1268  }
1269  return (true);
1270 }
1271 
1272 bool pcl::EnsensoGrabber::setMinimumDisparity (const int disparity) const
1273 {
1274  if (!device_open_)
1275  return (false);
1276  try
1277  {
1278  camera_[itmParameters][itmDisparityMap][itmStereoMatching][itmMinimumDisparity].set (disparity);
1279  }
1280  catch (NxLibException &ex)
1281  {
1282  ensensoExceptionHandling (ex, "setMinimumDisparity");
1283  return (false);
1284  }
1285  return (true);
1286 }
1287 
1288 bool pcl::EnsensoGrabber::setNumberOfDisparities (const int number) const
1289 {
1290  if (!device_open_)
1291  return (false);
1292  try
1293  {
1294  camera_[itmParameters][itmDisparityMap][itmStereoMatching][itmNumberOfDisparities].set (number);
1295  }
1296  catch (NxLibException &ex)
1297  {
1298  ensensoExceptionHandling (ex, "NumberOfDisparities");
1299  return (false);
1300  }
1301  return (true);
1302 }
1303 
1304 bool pcl::EnsensoGrabber::setOptimizationProfile (const std::string profile) const
1305 {
1306  if (!device_open_)
1307  return (false);
1308  try
1309  {
1310  camera_[itmParameters][itmDisparityMap][itmStereoMatching][itmOptimizationProfile].set (profile);
1311  }
1312  catch (NxLibException &ex)
1313  {
1314  ensensoExceptionHandling (ex, "setOptimizationProfile");
1315  return (false);
1316  }
1317  return (true);
1318 }
1319 
1320 bool pcl::EnsensoGrabber::setPixelClock (const int pixel_clock) const
1321 {
1322  if (!device_open_)
1323  return (false);
1324  try
1325  {
1326  camera_[itmParameters][itmCapture][itmPixelClock].set (pixel_clock);
1327  }
1328  catch (NxLibException &ex)
1329  {
1330  ensensoExceptionHandling (ex, "setPixelClock");
1331  return (false);
1332  }
1333  return (true);
1334 }
1335 
1336 bool pcl::EnsensoGrabber::setProjector (const bool enable) const
1337 {
1338  if (!device_open_)
1339  return (false);
1340  try
1341  {
1342  camera_[itmParameters][itmCapture][itmProjector].set (enable);
1343  }
1344  catch (NxLibException &ex)
1345  {
1346  ensensoExceptionHandling (ex, "setProjector");
1347  return (false);
1348  }
1349  return (true);
1350 }
1351 
1352 bool pcl::EnsensoGrabber::setTargetBrightness (const int target) const
1353 {
1354  if (!device_open_)
1355  return (false);
1356  try
1357  {
1358  camera_[itmParameters][itmCapture][itmTargetBrightness].set (target);
1359  }
1360  catch (NxLibException &ex)
1361  {
1362  ensensoExceptionHandling (ex, "setTargetBrightness");
1363  return (false);
1364  }
1365  return (true);
1366 }
1367 
1368 bool pcl::EnsensoGrabber::setScaling (const float scaling) const
1369 {
1370  if (!device_open_)
1371  return (false);
1372  try
1373  {
1374  camera_[itmParameters][itmDisparityMap][itmScaling].set (scaling);
1375  }
1376  catch (NxLibException &ex)
1377  {
1378  ensensoExceptionHandling (ex, "setScaling");
1379  return (false);
1380  }
1381  return (true);
1382 }
1383 
1384 bool pcl::EnsensoGrabber::setTriggerMode (const std::string mode) const
1385 {
1386  if (!device_open_)
1387  return (false);
1388  try
1389  {
1390  camera_[itmParameters][itmCapture][itmTriggerMode].set (mode);
1391  }
1392  catch (NxLibException &ex)
1393  {
1394  ensensoExceptionHandling (ex, "setTriggerMode");
1395  return (false);
1396  }
1397  return (true);
1398 }
1399 
1400 bool pcl::EnsensoGrabber::setRGBTriggerDelay (const float delay) const
1401 {
1402  if (!use_rgb_)
1403  {
1404  return (true);
1405  }
1406  if (!mono_device_open_)
1407  {
1408  return (false);
1409  }
1410  try {
1411  monocam_[itmParameters][itmCapture][itmTriggerDelay] = delay;
1412  }
1413  catch (NxLibException &ex) {
1414  ensensoExceptionHandling (ex, "setRGBTriggerDelay");
1415  return (false);
1416  }
1417  return (true);
1418 }
1419 
1421 {
1422 if (!device_open_)
1423  return (false);
1424  try
1425  {
1426  camera_[itmParameters][itmCapture][itmUseDisparityMapAreaOfInterest].set (enable);
1427  }
1428  catch (NxLibException &ex)
1429  {
1430  ensensoExceptionHandling (ex, "setUseDisparityMapAreaOfInterest");
1431  return (false);
1432  }
1433  return (true);
1434 }
1435 
1436 bool pcl::EnsensoGrabber::setUseOpenGL (const bool enable) const
1437 {
1438 if (!device_open_)
1439  return (false);
1440  try
1441  {
1442  (*root_)[itmParameters][itmRenderPointMap][itmUseOpenGL].set (enable);
1443  }
1444  catch (NxLibException &ex)
1445  {
1446  ensensoExceptionHandling (ex, "setUseOpenGL");
1447  return (false);
1448  }
1449  return (true);
1450 }
1451 
1452 
1453 bool pcl::EnsensoGrabber::setUseRGB (const bool enable)
1454 {
1455  use_rgb_ = enable;
1456  return (true);
1457 }
1458 
1459 bool pcl::EnsensoGrabber::setDepthChangeCost(const int changecost) const
1460 {
1461  if (!device_open_)
1462  return (false);
1463  try
1464  {
1465  camera_[itmParameters][itmDisparityMap][itmStereoMatching][itmDepthChangeCost].set (changecost);
1466  }
1467  catch (NxLibException &ex)
1468  {
1469  ensensoExceptionHandling (ex, "setDepthChangeCost");
1470  return (false);
1471  }
1472  return (true);
1473 }
1474 
1475 bool pcl::EnsensoGrabber::setDepthStepCost(const int stepcost) const
1476 {
1477  if (!device_open_)
1478  return (false);
1479  try
1480  {
1481  camera_[itmParameters][itmDisparityMap][itmStereoMatching][itmDepthStepCost].set (stepcost);
1482  }
1483  catch (NxLibException &ex)
1484  {
1485  ensensoExceptionHandling (ex, "setDepthStepCost");
1486  return (false);
1487  }
1488  return (true);
1489 }
1490 
1491 bool pcl::EnsensoGrabber::setShadowingThreshold(const int shadowingthreshold) const
1492 {
1493  if (!device_open_)
1494  return (false);
1495  try
1496  {
1497  camera_[itmParameters][itmDisparityMap][itmStereoMatching][itmShadowingThreshold].set (shadowingthreshold);
1498  }
1499  catch (NxLibException &ex)
1500  {
1501  ensensoExceptionHandling (ex, "setShadowingThreshold");
1502  return (false);
1503  }
1504  return (true);
1505 }
1506 
1507 bool pcl::EnsensoGrabber::setUniquenessRatio(const int ratio) const
1508 {
1509  if (!device_open_)
1510  return (false);
1511  try
1512  {
1513  camera_[itmParameters][itmDisparityMap][itmPostProcessing][itmUniquenessRatio].set (ratio);
1514  }
1515  catch (NxLibException &ex)
1516  {
1517  ensensoExceptionHandling (ex, "setUniquenessRatio");
1518  return (false);
1519  }
1520  return (true);
1521 }
1522 
1524 {
1525  if (!device_open_)
1526  return (false);
1527  try
1528  {
1529  camera_[itmParameters][itmDisparityMap][itmPostProcessing][itmMedianFilterRadius].set (radius);
1530  }
1531  catch (NxLibException &ex)
1532  {
1533  ensensoExceptionHandling (ex, "setMedianFilterRadius");
1534  return (false);
1535  }
1536  return (true);
1537 }
1538 
1540 {
1541  if (!device_open_)
1542  return (false);
1543  try
1544  {
1545  camera_[itmParameters][itmDisparityMap][itmPostProcessing][itmSpeckleRemoval][itmComponentThreshold].set (threshold);
1546  }
1547  catch (NxLibException &ex)
1548  {
1549  ensensoExceptionHandling (ex, "setComponentThreshold");
1550  return (false);
1551  }
1552  return (true);
1553 }
1554 
1555 bool pcl::EnsensoGrabber::setSpeckleRegionSize(const int regionsize) const
1556 {
1557  if (!device_open_)
1558  return (false);
1559  try
1560  {
1561  camera_[itmParameters][itmDisparityMap][itmPostProcessing][itmSpeckleRemoval][itmRegionSize].set (regionsize);
1562  }
1563  catch (NxLibException &ex)
1564  {
1565  ensensoExceptionHandling (ex, "setRegionSize");
1566  return (false);
1567  }
1568  return (true);
1569 }
1570 
1571 bool pcl::EnsensoGrabber::setFillBorderSpread(const int maximumspread) const
1572 {
1573  if (!device_open_)
1574  return (false);
1575  try
1576  {
1577  camera_[itmParameters][itmDisparityMap][itmPostProcessing][itmFilling][itmBorderSpread].set (maximumspread);
1578  }
1579  catch (NxLibException &ex)
1580  {
1581  ensensoExceptionHandling (ex, "setBorderSpread");
1582  return (false);
1583  }
1584  return (true);
1585 }
1586 
1587 bool pcl::EnsensoGrabber::setFillRegionSize(const int regionsize) const
1588 {
1589  if (!device_open_)
1590  return (false);
1591  try
1592  {
1593  camera_[itmParameters][itmDisparityMap][itmPostProcessing][itmFilling][itmRegionSize].set (regionsize);
1594  }
1595  catch (NxLibException &ex)
1596  {
1597  ensensoExceptionHandling (ex, "setRegionSize");
1598  return (false);
1599  }
1600  return (true);
1601 }
1602 
1603 bool pcl::EnsensoGrabber::setSurfaceConnectivity(const int threshold) const
1604 {
1605  if (!device_open_)
1606  return (false);
1607  try
1608  {
1609  camera_[itmParameters][itmDisparityMap][itmPostProcessing][itmFilling][itmRegionSize].set (threshold);
1610  }
1611  catch (NxLibException &ex)
1612  {
1613  ensensoExceptionHandling (ex, "setSurfaceConnectivity");
1614  return (false);
1615  }
1616  return (true);
1617 }
1618 
1620 {
1621  near_plane_ = near;
1622  return (true);
1623 }
1624 
1626 {
1627  far_plane_ = far;
1628  return (true);
1629 }
1630 
1632 {
1633  if (isRunning ())
1634  return;
1635  if (!device_open_)
1636  openDevice (0);
1637  if(use_rgb_ && !mono_device_open_)
1638  openMonoDevice (0);
1639  fps_ = 0.0;
1640  running_ = true;
1641  grabber_thread_ = boost::thread (&pcl::EnsensoGrabber::processGrabbing, this);
1642 }
1643 
1645 {
1646  if (running_)
1647  {
1648  running_ = false; // Stop processGrabbing () callback
1649  grabber_thread_.join ();
1650  }
1651 }
bool getLastCalibrationPattern(std::vector< int > &grid_size, double &grid_spacing, std::vector< Eigen::Vector2d > &left_points, std::vector< Eigen::Vector2d > &right_points, Eigen::Affine3d &pose) const
Get the raw stereo pattern information and the pattern pose. Before using it enable the storeCalibrat...
boost::signals2::signal< sig_cb_ensenso_point_cloud_rgb > * point_cloud_rgb_signal_
Boost point cloud signal with RGB.
double decodePattern() const
Decodes the pattern grid size and thickness.
boost::signals2::signal< sig_cb_ensenso_images > * images_signal_
Boost images signal.
void getDepthDataRGB(const pcl::PointCloud< pcl::PointXYZRGBA >::Ptr &cloud, const pcl::PCLGenImage< float >::Ptr &depthimage)
Retrieve RGB depth data from NxLib.
bool matrixToJson(const Eigen::Affine3d &matrix, std::string &json, const bool pretty_format=true) const
Converts an Eigen::Affine3d into a JSON string transformation (4x4)
boost::signals2::signal< sig_cb_ensenso_point_cloud > * point_cloud_signal_
Boost point cloud signal.
bool grabSingleCloud(pcl::PointCloud< pcl::PointXYZ > &cloud)
Capture a single point cloud and store it.
double now()
bool setHdr(const bool enable=false) const
Enables the camera&#39;s high dynamic range function with a fixed, piece-wise linear response curve...
bool setFillRegionSize(const int regionsize) const
Defines an upper limit on the region size in pixels, up to which a region is accepted for filling...
bool getCameraInfo(std::string cam, sensor_msgs::CameraInfo &cam_info) const
Get meta information for a monocular camera.
static std::string getOpenCVType(const int channels, const int bpe, const bool isFlt)
Get OpenCV image type corresponding to the parameters given.
void processGrabbing()
Continuously asks for images and or point clouds data from the device and publishes them if available...
bool setNearPlane(const int near)
Specifies the minimum distance to the ViewPose below which surface elements will be excluded from the...
bool setGainBoost(const bool enable=false) const
Enables the cameras analog gain boost function.
bool setEnableCUDA(const bool enable=true) const
Enables CUDA support.
boost::mutex fps_mutex_
Mutual exclusion for FPS computation.
int enumDevices() const
Searches for available devices.
bool setFindPattern(const bool enable=true)
Controls, whether the grabber will try finding the pattern in the scene.
bool setExposure(const float exposure=1.0) const
The current image exposure time.
std::string last_stereo_pattern_
Last raw stereo info of the detected pattern during processGrabbing()
std::vector< T > data
bool setFillBorderSpread(const int maximumspread) const
Defines which missing regions will be filled by setting a threshold on the maximum spread of the disp...
string cmd
bool setRGBTriggerDelay(const float delay=10) const
The delay between trigger of the stereo camera (and projector) and the RGB camera.
bool discardPatterns() const
Clears the pattern buffers of monocular and stereo pattern observations.
bool running_
Whether an Ensenso device is running or not.
bool setScaling(const float scaling=1.0) const
Scaling allows to reduce the camera resolution by an arbitrary non-integer factor during rectificatio...
bool setOptimizationProfile(const std::string profile="AlignedAndDiagonal") const
The type of Semi-Global-Matching optimization carried out on the cost function.
bool setFarPlane(const int far)
Specifies the maximum distance to the ViewPose below which surface elements will be excluded from the...
bool setAutoGain(const bool enable=true) const
Controls whether the gain should be adjusted after each image capture.
bool isTcpPortOpen() const
Check if a TCP port is opened.
bool jsonToMatrix(const std::string json, Eigen::Affine3d &matrix) const
Converts a JSON string into an Eigen::Affine3d.
double timestamp_
timestamp of the current frame
bool getTFLeftToRGB(Transform &tf) const
Get transformation between stereo frame and rgb frame.
bool mono_device_open_
Whether an Ensenso device is opened or not.
boost::thread grabber_thread_
Grabber thread.
static pcl::uint64_t getPCLStamp(const double ensenso_stamp)
Convert an Ensenso time stamp into a PCL/ROS time stamp.
bool setBlackLevelOffset(const float offset=1.0) const
The current black level offset. When AutoBlackLevel is false this value specifies the sensor black le...
boost::signals2::signal< sig_cb_ensenso_image_depth > * image_depth_signal_
Boost depth image signal.
bool setUseOpenGL(const bool enable) const
Configures the use of OpenGL.
bool estimatePatternPose(Eigen::Affine3d &pose, const bool average=false) const
Estimate the calibration pattern pose.
bool setDepthChangeCost(const int changecost) const
The penalty for changes of +/- 1 disparity along an optimization path. Setting a larger value for Dep...
bool openTcpPort(const int port=24000)
Open TCP port to enable access via the nxTreeEdit program.
bool setUniquenessRatio(const int ratio) const
Filters the pixels depending on the uniqueness of the found correspondence. The value indicates the p...
std::string getName() const
Get class name.
bool setUseDisparityMapAreaOfInterest(const bool enable=false) const
Reduces the camera&#39;s capture AOI to the region necessary for the currently set stereo matching AOI...
bool setFrontLight(const bool enable=false) const
Enables the diffuse front light during exposure. This should only be used when calibrating or trackin...
std::pair< pcl::PCLGenImage< pcl::uint8_t >, pcl::PCLGenImage< pcl::uint8_t > > PairOfImages
bool setTargetBrightness(const int target=80) const
The desired average image brightness in gray values used for AutoExposure and AutoGain.
bool setMinimumDisparity(const int disparity=-64) const
The minimum disparity in pixels where correspondences in the stereo image pair are being searched...
NxLibItem camera_
References to the camera tree.
bool setShadowingThreshold(const int shadowingthreshold) const
The disparity map is checked for occluded pixels. This is usually called &#39;left-right consistency chec...
void stop()
Stop the data acquisition.
bool setDepthStepCost(const int stepcost) const
The penalty for steps (changes of more than one disparity) along an optimization path. Setting a larger value for DepthStepCost will yield better detection of planar surfaces in low contrast areas, but too large values will lead to a loss of geometry details and precise object boundaries.
bool closeDevices()
Closes all Ensenso devices.
bool setGain(const float gain=1.0) const
The current analog gain factor. See also MaxGain.
bool closeTcpPort(void)
Close TCP port program.
int getPatternCount() const
Gets the number of collected patterns with successful observations in two cameras.
void storeCalibrationPattern(const bool enable)
Enables collecting the calibration pattern continuous during processGrabbing()
bool setPixelClock(const int pixel_clock=24) const
Sets the pixel clock in MHz. If you have too many devices on the same bus the image transfer might fa...
void ensensoExceptionHandling(const NxLibException &ex, std::string func_nam)
int collectPattern(const bool buffer=true) const
Collects a calibration pattern.
float fps_
Camera frames per second (FPS)
bool restoreDefaultConfiguration() const
Restores the default capture configuration parameters.
boost::mutex pattern_mutex_
Mutual exclusion for reading pattern pose.
bool calibrateHandEye(const std::vector< Eigen::Affine3d, Eigen::aligned_allocator< Eigen::Affine3d > > &robot_poses, const Eigen::Affine3d &camera_seed, const Eigen::Affine3d &pattern_seed, const std::string setup, Eigen::Affine3d &estimated_camera_pose, Eigen::Affine3d &estimated_pattern_pose, int &iterations, double &reprojection_error) const
With this command you can calibrate the position of the camera with respect to a robot. The calibration routine currently supports two types of setups: either your camera is fixed with respect to the robot origin, or your camera mounted on the robot hand and is moving with the robot.
bool device_open_
Whether an Ensenso device is opened or not.
boost::signals2::signal< sig_cb_ensenso_images_rgb > * images_rgb_signal_
Boost images rgb signal.
void getImage(const NxLibItem &image_node, pcl::PCLGenImage< pcl::uint8_t > &image_out)
Retrieve Image from NxLib.
bool store_calibration_pattern_
Whether to read the pattern pose at processGrabbing() or not.
bool find_pattern_
Whether the grabber tries to find the pattern or not.
bool openDevice(std::string serial)
Opens an Ensenso device.
bool setSurfaceConnectivity(const int threshold) const
The distance along a camera&#39;s z direction below which two neighboring points will be connected to a s...
EIGEN_MAKE_ALIGNED_OPERATOR_NEW EnsensoGrabber()
Constructor.
bool setNumberOfDisparities(const int number=128) const
The number of disparities in pixels where correspondences in the stereo image pair are being searched...
bool isRunning() const
Check if the data acquisition is still running.
bool setAutoBlackLevel(const bool enable=true) const
Controls whether the sensor black level should be adjusted automatically by the image sensor...
bool use_rgb_
Whether the grabber uses RGB or not.
virtual ~EnsensoGrabber()
Destructor inherited from the Grabber interface. It never throws.
bool setGridSpacing(const double grid_spacing) const
Sets the grid spacing of the calibration pattern.
bool openMonoDevice(std::string serial)
Opens an Ensenso mono device.
boost::shared_ptr< const NxLibItem > root_
Reference to the NxLib tree root.
bool setHardwareGamma(const bool enable=true) const
Enables the camera&#39;s internal analog gamma correction. This boosts dark pixels while compressing high...
bool setMedianFilterRadius(const int radius) const
Specifies the size of the median filter as radius in pixels, excluding the center pixel...
int far_plane_
Far plane Parameter for the RenderPointMap command.
int near_plane_
Near plane Parameter for the RenderPointMap command.
void start()
Start the point cloud and or image acquisition.
void triggerCameras()
triggers all available cameras
void convert(const A &a, B &b)
bool setProjector(const bool enable=true) const
Enables the texture projector during exposure. This should only be used for depth map computation...
void getDepthData(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud, const pcl::PCLGenImage< float >::Ptr &depthimage)
Retrieve depth data from NxLib.
bool setFlexView(const bool enable=false, const int imagepairs=2) const
The number of image pairs to capture. When FlexView is set to false the camera will operate in normal...
float getFramesPerSecond() const
Obtain the number of frames per second (FPS)
bool setSpeckleRegionSize(const int threshold) const
The size in pixels of a disparity map region below which the region will be removed from the disparit...
bool setBinning(const int binning=1) const
Adjusts the camera&#39;s binning factor. Binning reduces the image resolution by an integer factor direct...
bool setUseRGB(const bool enable=true)
Controls, whether the grabber will use an external ensenso rgb camera.
Eigen::Affine3d last_pattern_pose_
Last pose of the detected pattern during processGrabbing()
bool setSpeckleComponentThreshold(const int threshold) const
Defines how the image is divided into regions for speckle filtering. Whenever two neighboring pixel d...
Transform tf_left_to_rgb_
translation from left camera to RGB frame
bool setAutoExposure(const bool enable=true) const
Controls whether the exposure should be adjusted after each image capture.
bool setTriggerMode(const std::string mode="Software") const
Specifies how an image capture is initiated.
bool tcp_open_
Whether an TCP port is opened or not.


ensenso
Author(s): Francisco Suarez Ruiz
autogenerated on Sat Feb 16 2019 03:42:20