Lidar3D.cpp
Go to the documentation of this file.
1 /*+-------------------------------------------------------------------------+
2  | MultiVehicle simulator (libmvsim) |
3  | |
4  | Copyright (C) 2014-2024 Jose Luis Blanco Claraco |
5  | Copyright (C) 2017 Borys Tymchenko (Odessa Polytechnic University) |
6  | Distributed under 3-clause BSD License |
7  | See COPYING |
8  +-------------------------------------------------------------------------+ */
9 
10 #include <mrpt/core/lock_helper.h>
11 #include <mrpt/maps/CSimplePointsMap.h>
12 #include <mrpt/opengl/COpenGLScene.h>
13 #include <mrpt/opengl/stock_objects.h>
14 #include <mrpt/random.h>
15 #include <mrpt/version.h>
16 #include <mvsim/Sensors/Lidar3D.h>
17 #include <mvsim/VehicleBase.h>
18 #include <mvsim/World.h>
20 
21 //#include "rapidxml_print.hpp"
22 
23 #if MRPT_VERSION >= 0x270
24 #include <mrpt/opengl/OpenGLDepth2LinearLUTs.h>
25 #endif
26 
27 #if MRPT_VERSION >= 0x020b04 // >=2.11.4?
28 #define HAVE_POINTS_XYZIRT
29 #endif
30 
31 #if defined(HAVE_POINTS_XYZIRT)
32 #include <mrpt/maps/CPointsMapXYZIRT.h>
33 #endif
34 
35 #include "xml_utils.h"
36 
37 using namespace mvsim;
38 using namespace rapidxml;
39 
40 // TODO(jlbc): Also store obs as CObservationRotatingScan??
41 
43 {
45 }
46 
48 
50 {
51  gui_uptodate_ = false;
52 
55 
56  // Other scalar params:
57  TParameterDefinitions params;
58  params["pose_3d"] = TParamEntry("%pose3d", &sensorPoseOnVeh_);
59  params["range_std_noise"] = TParamEntry("%lf", &rangeStdNoise_);
60  params["sensor_period"] = TParamEntry("%lf", &sensor_period_);
61  params["min_range"] = TParamEntry("%f", &minRange_);
62  params["max_range"] = TParamEntry("%f", &maxRange_);
63  params["viz_pointSize"] = TParamEntry("%f", &viz_pointSize_);
64  params["ignore_parent_body"] = TParamEntry("%bool", &ignore_parent_body_);
65 
66  params["vert_fov_degrees"] = TParamEntry("%lf", &vertical_fov_);
67  params["vertical_ray_angles"] = TParamEntry("%s", &vertical_ray_angles_str_);
68 
69  params["vert_nrays"] = TParamEntry("%i", &vertNumRays_);
70  params["horz_nrays"] = TParamEntry("%i", &horzNumRays_);
71  params["horz_resolution_factor"] = TParamEntry("%lf", &horzResolutionFactor_);
72  params["vert_resolution_factor"] = TParamEntry("%lf", &vertResolutionFactor_);
73 
74  params["max_vert_relative_depth_to_interpolatate"] =
76  params["max_horz_relative_depth_to_interpolatate"] =
78 
79  // Parse XML params:
81 }
82 
84  const mrpt::optional_ref<mrpt::opengl::COpenGLScene>& viz,
85  [[maybe_unused]] const mrpt::optional_ref<mrpt::opengl::COpenGLScene>& physical,
86  [[maybe_unused]] bool childrenOnly)
87 {
88  mrpt::opengl::CSetOfObjects::Ptr glVizSensors;
89  if (viz)
90  {
91  glVizSensors = std::dynamic_pointer_cast<mrpt::opengl::CSetOfObjects>(
92  viz->get().getByName("group_sensors_viz"));
93  if (!glVizSensors) return; // may happen during shutdown
94  }
95 
96  // 1st time?
97  if (!glPoints_ && glVizSensors)
98  {
99  glPoints_ = mrpt::opengl::CPointCloudColoured::Create();
100  glPoints_->setPointSize(viz_pointSize_);
101  glPoints_->setLocalRepresentativePoint({0, 0, 0.10f});
102 
103  glVizSensors->insert(glPoints_);
104  }
105  if (!gl_sensor_origin_ && viz)
106  {
107  gl_sensor_origin_ = mrpt::opengl::CSetOfObjects::Create();
108 #if MRPT_VERSION >= 0x270
109  gl_sensor_origin_->castShadows(false);
110 #endif
111  gl_sensor_origin_corner_ = mrpt::opengl::stock_objects::CornerXYZSimple(0.15f);
112 
114 
115  gl_sensor_origin_->setVisibility(false);
116  viz->get().insert(gl_sensor_origin_);
118  }
119  if (!gl_sensor_fov_ && viz)
120  {
121  gl_sensor_fov_ = mrpt::opengl::CSetOfObjects::Create();
122 
123  MRPT_TODO("render 3D lidar FOV");
124 #if 0
125  auto fovScan = mrpt::opengl::CPlanarLaserScan::Create();
126  gl_sensor_fov_->insert(fovScan);
127 #endif
128  gl_sensor_fov_->setVisibility(false);
129  viz->get().insert(gl_sensor_fov_);
131  }
132 
133  if (!gui_uptodate_ && glVizSensors->isVisible())
134  {
135  {
136  std::lock_guard<std::mutex> csl(last_scan_cs_);
137  if (last_scan2gui_ && last_scan2gui_->pointcloud)
138  {
139  glPoints_->loadFromPointsMap(last_scan2gui_->pointcloud.get());
140  last_scan2gui_.reset();
141  }
142  }
143  gui_uptodate_ = true;
144  }
145 
146  const mrpt::poses::CPose3D p = vehicle_.getCPose3D() + sensorPoseOnVeh_;
147  const auto pp = parent()->applyWorldRenderOffset(p);
148 
149  if (glPoints_) glPoints_->setPose(pp);
150  if (gl_sensor_fov_) gl_sensor_fov_->setPose(pp);
151  if (gl_sensor_origin_) gl_sensor_origin_->setPose(pp);
152  if (glCustomVisual_) glCustomVisual_->setPose(pp);
153 }
154 
155 void Lidar3D::simul_pre_timestep([[maybe_unused]] const TSimulContext& context) {}
156 
157 // Simulate sensor AFTER timestep, with the updated vehicle dynamical state:
159 {
161 
163  {
164  auto lckHasTo = mrpt::lockHelper(has_to_render_mtx_);
165 
166  // Will run upon next async call of simulateOn3DScene()
167  if (has_to_render_.has_value())
168  {
169  world_->logFmt(
170  mrpt::system::LVL_WARN,
171  "Time for a new sample came without still simulating the "
172  "last one (!) for simul_time=%.03f s.",
173  has_to_render_->simul_time);
174  }
175 
176  has_to_render_ = context;
178  }
179 
180  // Keep sensor global pose up-to-date:
181  const auto& p = vehicle_.getPose();
182  const auto globalSensorPose = p + sensorPoseOnVeh_.asTPose();
183  Simulable::setPose(globalSensorPose, false /*do not notify*/);
184 }
185 
186 void Lidar3D::notifySimulableSetPose(const mrpt::math::TPose3D& newPose)
187 {
188  // The editor has moved the sensor in global coordinates.
189  // Convert back to local:
190  const auto& p = vehicle_.getPose();
191  sensorPoseOnVeh_ = mrpt::poses::CPose3D(newPose - p);
192 }
193 
195 {
196  // Free fbo:
197  fbo_renderer_depth_.reset();
198 }
199 
200 #if MRPT_VERSION >= 0x270
201 // Do the log->linear conversion ourselves for this sensor,
202 // since only a few depth points are actually used:
203 // (older mrpt versions already returned the linearized depth)
204 constexpr int DEPTH_LOG2LIN_BITS = 20;
205 using depth_log2lin_t = mrpt::opengl::OpenGLDepth2LinearLUTs<DEPTH_LOG2LIN_BITS>;
206 #endif
207 
209  const mrpt::math::CMatrixFloat& depthImage, const float maxDepthInterpolationStepVert,
210  const float maxDepthInterpolationStepHorz, const int NCOLS, const int NROWS, float v, float u
211 #if MRPT_VERSION >= 0x270
212  ,
213  const depth_log2lin_t::lut_t& depth_log2lin_lut
214 #endif
215 )
216 {
217  const int u0 = static_cast<int>(u);
218  const int v0 = static_cast<int>(v);
219  const int u1 = std::min(u0 + 1, NCOLS - 1);
220  const int v1 = std::min(v0 + 1, NROWS - 1);
221 
222  const float uw = u - u0;
223  const float vw = v - v0;
224 
225  const float raw_d00 = depthImage(v0, u0);
226  const float raw_d01 = depthImage(v1, u0);
227  const float raw_d10 = depthImage(v0, u1);
228  const float raw_d11 = depthImage(v1, u1);
229 
230  // Linearize:
231 #if MRPT_VERSION >= 0x270
232  // Do the log->linear conversion ourselves for this sensor,
233  // since only a few depth points are actually used:
234 
235  // map d in [-1.0f,+1.0f] ==> real depth values:
236  const float d00 = depth_log2lin_lut[(raw_d00 + 1.0f) * (depth_log2lin_t::NUM_ENTRIES - 1) / 2];
237  const float d01 = depth_log2lin_lut[(raw_d01 + 1.0f) * (depth_log2lin_t::NUM_ENTRIES - 1) / 2];
238  const float d10 = depth_log2lin_lut[(raw_d10 + 1.0f) * (depth_log2lin_t::NUM_ENTRIES - 1) / 2];
239  const float d11 = depth_log2lin_lut[(raw_d11 + 1.0f) * (depth_log2lin_t::NUM_ENTRIES - 1) / 2];
240 #else
241  // "d" is already linear depth
242  const float d00 = raw_d00;
243  const float d01 = raw_d01;
244  const float d10 = raw_d10;
245  const float d11 = raw_d11;
246 #endif
247 
248  // max relative range difference in u and v directions:
249  const float A_u = std::max(std::abs(d00 - d10), std::abs(d01 - d11));
250  const float A_v = std::max(std::abs(d00 - d01), std::abs(d10 - d11));
251 
252  const auto maxStepU = maxDepthInterpolationStepHorz * d00;
253  const auto maxStepV = maxDepthInterpolationStepVert * d00;
254 
255  if (A_v < maxStepV && A_u < maxStepU)
256  {
257  // smooth bilinear interpolation in (u,v):
258  return d00 * (1.0f - uw) * (1.0f - vw) + //
259  d01 * (1.0f - uw) * vw + //
260  d10 * uw * (1.0f - vw) + //
261  d11 * uw * vw;
262  }
263  else if (A_v < maxStepV)
264  {
265  // Linear interpolation in "v" only:
266  // Pick closest "u":
267  const float d0 = uw < 0.5f ? d00 : d10;
268  const float d1 = uw < 0.5f ? d01 : d11;
269 
270  return d0 * (1.0f - vw) + d1 * vw;
271  }
272  else if (A_u < maxStepU)
273  {
274  // Linear interpolation in "u" only:
275 
276  // Pick closest "v":
277  const float d0 = vw < 0.5f ? d00 : d01;
278  const float d1 = vw < 0.5f ? d10 : d11;
279 
280  return d0 * (1.0f - uw) + d1 * uw;
281  }
282  else
283  {
284  // too many changes in depth, do not interpolate:
285  return d00;
286  }
287 }
288 
289 void Lidar3D::simulateOn3DScene(mrpt::opengl::COpenGLScene& world3DScene)
290 {
291  using namespace mrpt; // _deg
292 
293  {
294  auto lckHasTo = mrpt::lockHelper(has_to_render_mtx_);
295  if (!has_to_render_.has_value()) return;
296  }
297 
298  auto tleWhole = mrpt::system::CTimeLoggerEntry(world_->getTimeLogger(), "sensor.3Dlidar");
299 
300  auto tle1 = mrpt::system::CTimeLoggerEntry(world_->getTimeLogger(), "sensor.3Dlidar.acqGuiMtx");
301 
302  tle1.stop();
303 
304  // The sensor body must be made of transparent material! :-)
305  if (glCustomVisual_) glCustomVisual_->setVisibility(false);
306 
307  // Create empty observation:
308  auto curObs = mrpt::obs::CObservationPointCloud::Create();
309  curObs->sensorPose = sensorPoseOnVeh_;
310  curObs->timestamp = world_->get_simul_timestamp();
311  curObs->sensorLabel = name_;
312 
313 #if defined(HAVE_POINTS_XYZIRT)
314  auto curPtsPtr = mrpt::maps::CPointsMapXYZIRT::Create();
315 #else
316  auto curPtsPtr = mrpt::maps::CSimplePointsMap::Create();
317 #endif
318 
319  auto& curPts = *curPtsPtr;
320  curObs->pointcloud = curPtsPtr;
321 
322  // Create FBO on first use, now that we are here at the GUI / OpenGL thread.
323  constexpr double camModel_hFOV = 120.01_deg;
324 
325  // This FBO is for camModel_hFOV only:
326  // Minimum horz resolution=360deg /120 deg
327  const int FBO_NCOLS =
328  mrpt::round(horzResolutionFactor_ * horzNumRays_ / (2 * M_PI / camModel_hFOV));
329 
330  mrpt::img::TCamera camModel;
331  camModel.ncols = FBO_NCOLS;
332  camModel.cx(camModel.ncols / 2.0);
333  camModel.fx(camModel.cx() / tan(camModel_hFOV * 0.5)); // tan(FOV/2)=cx/fx
334 
335  // Build list of vertical angles, in increasing order (first negative, below
336  // horizontal plane, final ones positive, above it):
337  if (vertical_ray_angles_.empty())
338  {
339  if (vertical_ray_angles_str_.empty())
340  {
341  // even distribution:
343  for (int i = 0; i < vertNumRays_; i++)
344  {
345  vertical_ray_angles_[i] = vertical_fov_ * (-0.5 + i * 1.0 / (vertNumRays_ - 1));
346  }
347  }
348  else
349  {
350  // custom distribution:
351  std::vector<std::string> vertAnglesStrs;
352  mrpt::system::tokenize(vertical_ray_angles_str_, " \t\r\n", vertAnglesStrs);
353  ASSERT_EQUAL_(vertAnglesStrs.size(), static_cast<size_t>(vertNumRays_));
354  std::set<double> angs;
355  for (const auto& s : vertAnglesStrs) angs.insert(std::stod(s));
356  ASSERT_EQUAL_(angs.size(), static_cast<size_t>(vertNumRays_));
357  for (const auto a : angs) vertical_ray_angles_.push_back(a);
358  }
359 
360  // Pass to radians:
361  for (double& a : vertical_ray_angles_) a = mrpt::DEG2RAD(a);
362  }
363 
364  // worst vFOV case: at each sub-scan render corner:
365  // (derivation in hand notes... to be passed to a paper)
366  using mrpt::square;
367  const double vertFOVMax = vertical_ray_angles_.back();
368  const double vertFOVMin = std::abs(vertical_ray_angles_.front());
369 
370  const int FBO_NROWS_UP = vertResolutionFactor_ * tan(vertFOVMax) *
371  sqrt(square(camModel.fx()) + square(camModel.cx()));
372  const int FBO_NROWS_DOWN = vertResolutionFactor_ * tan(vertFOVMin) *
373  sqrt(square(camModel.fx()) + square(camModel.cx()));
374 
375  const int FBO_NROWS = FBO_NROWS_DOWN + FBO_NROWS_UP + 1;
376  camModel.nrows = FBO_NROWS;
377  camModel.cy(FBO_NROWS_UP + 1);
378  camModel.fy(camModel.fx());
379 
380  if (!fbo_renderer_depth_)
381  {
382  mrpt::opengl::CFBORender::Parameters p;
383  p.width = FBO_NCOLS;
384  p.height = FBO_NROWS;
385  p.create_EGL_context = world()->sensor_has_to_create_egl_context();
386 
387 #if MRPT_VERSION >= 0x270
388  // Do the log->linear conversion ourselves for this sensor, since only a
389  // few depth points are actually used.
390  p.raw_depth = true;
391 #endif
392 
393  fbo_renderer_depth_ = std::make_shared<mrpt::opengl::CFBORender>(p);
394  }
395 
396  const size_t nCols = horzNumRays_;
397  const size_t nRows = vertNumRays_;
398 
399  mrpt::math::CMatrixDouble rangeImage(nRows, nCols);
400  rangeImage.setZero(); // 0=invalid (no lidar return)
401 
402  auto viewport = world3DScene.getViewport();
403 
404  // Disable rendering of shadows for this sensor:
405 #if MRPT_VERSION >= 0x270
406  const bool wasShadowEnabled = viewport->isShadowCastingEnabled();
407  viewport->enableShadowCasting(false);
408 #endif
409 
410  auto& cam = fbo_renderer_depth_->getCamera(world3DScene);
411 
412  const auto fixedAxisConventionRot =
413  mrpt::poses::CPose3D(0, 0, 0, -90.0_deg, 0.0_deg, -90.0_deg);
414 
415  const auto vehiclePose = mrpt::poses::CPose3D(vehicle_.getPose());
416 
417  // ----------------------------------------------------------
418  // Decompose the horizontal lidar FOV into "n" depth images,
419  // of camModel_hFOV each.
420  // ----------------------------------------------------------
421  ASSERT_GT_(horzNumRays_, 1);
422  ASSERT_GT_(vertNumRays_, 1);
423 
424  constexpr bool scanIsCW = false;
425  constexpr double aperture = 2 * M_PI;
426 
427  const double firstAngle = -aperture * 0.5;
428 
429  const unsigned int numRenders = std::ceil((aperture / camModel_hFOV) - 1e-3);
430  const auto numHorzRaysPerRender =
431  mrpt::round(horzNumRays_ * std::min<double>(1.0, (camModel_hFOV / aperture)));
432 
433  ASSERT_(numHorzRaysPerRender > 0);
434 
435  // Precomputed LUT of bearings to pixel coordinates:
436  // cx - u
437  // tan(horzBearing) = --------------
438  // fx
439  //
440  // cy - v
441  // tan(vertBearing) = -------------------------
442  // fy / cos(horzBearing)
443  //
444  if (lut_.empty())
445  {
446  lut_.resize(numHorzRaysPerRender);
447 
448  for (int i = 0; i < numHorzRaysPerRender; i++)
449  {
450  lut_[i].column.resize(nRows);
451 
452  const double horzAng =
453  (scanIsCW ? -1 : 1) *
454  (camModel_hFOV * 0.5 - i * camModel_hFOV / (numHorzRaysPerRender - 1));
455 
456  const double cosHorzAng = std::cos(horzAng);
457 
458  const auto pixel_u = mrpt::saturate_val<float>(
459  camModel.cx() - camModel.fx() * std::tan(horzAng), 0, camModel.ncols - 1);
460 
461  for (size_t j = 0; j < nRows; j++)
462  {
463  auto& entry = lut_[i].column[j];
464 
465  const double vertAng = vertical_ray_angles_.at(j);
466  const double cosVertAng = std::cos(vertAng);
467 
468  const auto pixel_v = camModel.cy() - camModel.fy() * std::tan(vertAng) / cosHorzAng;
469 
470  // out of the simulated camera (should not happen?)
471  if (pixel_v < 0 || pixel_v >= static_cast<int>(camModel.nrows)) continue;
472 
473  entry.u = pixel_u;
474  entry.v = pixel_v;
475  entry.depth2range = 1.0f / (cosHorzAng * cosVertAng);
476  }
477  }
478  }
479  else
480  {
481  // check:
482  ASSERT_EQUAL_(lut_.size(), static_cast<size_t>(numHorzRaysPerRender));
483  ASSERT_EQUAL_(lut_.at(0).column.size(), nRows);
484  }
485 
486  // ----------------------------------------------------------
487  // "DEPTH camera" to generate lidar readings:
488  // ----------------------------------------------------------
489  cam.set6DOFMode(true);
490  cam.setProjectiveFromPinhole(camModel);
491 
492  viewport->setViewportClipDistances(minRange_, maxRange_);
493  mrpt::math::CMatrixFloat depthImage;
494 
495  // make owner's own body invisible?
496  auto visVeh = dynamic_cast<VisualObject*>(&vehicle_);
497  auto veh = dynamic_cast<VehicleBase*>(&vehicle_);
498  bool formerVisVehState = true;
500  {
501  if (visVeh)
502  {
503  formerVisVehState = visVeh->customVisualVisible();
504  visVeh->customVisualVisible(false);
505  }
506  if (veh) veh->chassisAndWheelsVisible(false);
507  }
508 
509 #if MRPT_VERSION >= 0x270
510  // Do the log->linear conversion ourselves for this sensor,
511  // since only a few depth points are actually used:
512  auto& depth_log2lin = depth_log2lin_t::Instance();
513  const auto& depth_log2lin_lut = depth_log2lin.lut_from_zn_zf(minRange_, maxRange_);
514 #endif
515 
516  for (size_t renderIdx = 0; renderIdx < numRenders; renderIdx++)
517  {
518  const double thisRenderMidAngle =
519  firstAngle + (camModel_hFOV / 2.0 + camModel_hFOV * renderIdx) * (scanIsCW ? 1 : -1);
520 
521  const auto thisDepthSensorPoseWrtSensor =
522  mrpt::poses::CPose3D::FromYawPitchRoll(thisRenderMidAngle, 0.0, 0.0) +
523  fixedAxisConventionRot;
524 
525  const auto thisDepthSensorPoseOnVeh = curObs->sensorPose + thisDepthSensorPoseWrtSensor;
526 
527  const auto thisDepthSensorPose = vehiclePose + thisDepthSensorPoseOnVeh;
528 
529  // Camera pose: vehicle + relativePoseOnVehicle:
530  // Note: relativePoseOnVehicle should be (y,p,r)=(90deg,0,90deg)
531  // to make the camera to look forward:
532  cam.setPose(world()->applyWorldRenderOffset(thisDepthSensorPose));
533 
534  auto tleRender =
535  mrpt::system::CTimeLoggerEntry(world_->getTimeLogger(), "sensor.3Dlidar.renderSubScan");
536 
537  fbo_renderer_depth_->render_depth(world3DScene, depthImage);
538 
539  tleRender.stop();
540 
541  // Add random noise:
542  // Each thread must create its own rng:
543  thread_local mrpt::random::CRandomGenerator rng;
544  thread_local std::vector<float> noiseSeq;
545  thread_local size_t noiseIdx = 0;
546  constexpr size_t noiseLen = 7823; // prime
547  if (rangeStdNoise_ > 0)
548  {
549  if (noiseSeq.empty())
550  {
551  noiseSeq.reserve(noiseLen);
552  for (size_t i = 0; i < noiseLen; i++)
553  noiseSeq.push_back(rng.drawGaussian1D(0.0, rangeStdNoise_));
554  }
555  }
556 
557  auto tleStore =
558  mrpt::system::CTimeLoggerEntry(world_->getTimeLogger(), "sensor.3Dlidar.storeObs");
559 
560  // Convert depth into range and store into polar range images:
561  for (int i = 0; i < numHorzRaysPerRender; i++)
562  {
563  const int iAbs = i + numHorzRaysPerRender * renderIdx;
564  if (iAbs >= rangeImage.cols()) continue; // we don't need this image part
565 
566  for (unsigned int j = 0; j < nRows; j++)
567  {
568  // LUT entry:
569  const auto& e = lut_.at(i).column.at(j);
570  const auto u = e.u;
571  const auto v = e.v;
572 
573  ASSERTDEB_LT_(u, depthImage.cols());
574  ASSERTDEB_LT_(v, depthImage.rows());
575 
576  // Depth:
579  FBO_NCOLS, FBO_NROWS, v, u
580 #if MRPT_VERSION >= 0x270
581  ,
582  depth_log2lin_lut
583 #endif
584  );
585 
586  // Add noise:
587  if (d != 0) // invalid range
588  {
589  const float dNoisy = d + noiseSeq[noiseIdx++];
590  if (noiseIdx >= noiseLen) noiseIdx = 0;
591 
592  if (dNoisy < 0 || dNoisy > maxRange_) continue;
593 
594  d = dNoisy;
595  }
596 
597  // un-project: depth -> range:
598  const float range = d * e.depth2range;
599 
600  if (range <= 0 || range >= maxRange_) continue; // invalid
601 
602  ASSERTDEB_LT_(j, rangeImage.rows());
603  ASSERTDEB_LT_(iAbs, rangeImage.cols());
604 
605  rangeImage(j, iAbs) = range;
606 
607  // add points:
608  const mrpt::math::TPoint3D pt_wrt_cam = {
609  d * (u - camModel.cx()) / camModel.fx(),
610  d * (v - camModel.cy()) / camModel.fy(), d};
611  curPts.insertPoint(thisDepthSensorPoseWrtSensor.composePoint(pt_wrt_cam));
612 
613 #if defined(HAVE_POINTS_XYZIRT)
614  // Add "ring" field:
615  curPtsPtr->getPointsBufferRef_ring()->push_back(j);
616 
617  // Add "timestamp" field: all to zero since we are simulating an ideal "flash"
618  // lidar:
619  curPtsPtr->getPointsBufferRef_timestamp()->push_back(.0);
620 #endif
621  }
622  }
623  tleStore.stop();
624  }
625 
627  {
628  if (visVeh) visVeh->customVisualVisible(formerVisVehState);
629  if (veh) veh->chassisAndWheelsVisible(formerVisVehState);
630  }
631 
632 #if MRPT_VERSION >= 0x270
633  viewport->enableShadowCasting(wasShadowEnabled);
634 #endif
635 
636  // Store generated obs:
637  {
638  auto tle3 =
639  mrpt::system::CTimeLoggerEntry(world_->getTimeLogger(), "sensor.3Dlidar.acqObsMtx");
640 
641  std::lock_guard<std::mutex> csl(last_scan_cs_);
642  last_scan_ = std::move(curObs);
644  }
645 
646  {
647  auto lckHasTo = mrpt::lockHelper(has_to_render_mtx_);
648 
649  auto tlePub =
650  mrpt::system::CTimeLoggerEntry(world_->getTimeLogger(), "sensor.3Dlidar.report");
651 
653 
654  tlePub.stop();
655 
656  if (glCustomVisual_) glCustomVisual_->setVisibility(true);
657 
658  gui_uptodate_ = false;
659 
660  has_to_render_.reset();
661  }
662 }
mvsim::VisualObject::parent
World * parent()
Definition: VisualObject.h:51
mvsim
Definition: Client.h:21
mvsim::VisualObject::world_
World * world_
Definition: VisualObject.h:73
mvsim::SensorBase::make_sure_we_have_a_name
void make_sure_we_have_a_name(const std::string &prefix)
Assign a sensible default name/sensor label if none is provided:
Definition: SensorBase.cpp:252
mvsim::SensorBase::varValues_
std::map< std::string, std::string > varValues_
Filled in by SensorBase::loadConfigFrom()
Definition: SensorBase.h:97
mvsim::SensorBase::reportNewObservation
void reportNewObservation(const std::shared_ptr< mrpt::obs::CObservation > &obs, const TSimulContext &context)
Definition: SensorBase.cpp:159
mvsim::Lidar3D::minRange_
float minRange_
Definition: Lidar3D.h:65
mvsim::Lidar3D::maxDepthInterpolationStepVert_
float maxDepthInterpolationStepVert_
Definition: Lidar3D.h:81
Lidar3D.h
mvsim::Lidar3D::Lidar3D
Lidar3D(Simulable &parent, const rapidxml::xml_node< char > *root)
Definition: Lidar3D.cpp:42
mvsim::Lidar3D::vertical_ray_angles_str_
std::string vertical_ray_angles_str_
Definition: Lidar3D.h:76
mvsim::TParamEntry
Definition: TParameterDefinitions.h:38
s
XmlRpcServer s
mvsim::parse_xmlnode_children_as_param
void parse_xmlnode_children_as_param(const rapidxml::xml_node< char > &xml_node, const TParameterDefinitions &params, const std::map< std::string, std::string > &variableNamesValues={}, const char *functionNameContext="", mrpt::system::COutputLogger *logger=nullptr)
Definition: xml_utils.cpp:215
mvsim::World::applyWorldRenderOffset
mrpt::math::TPose3D applyWorldRenderOffset(mrpt::math::TPose3D p) const
Definition: World.h:631
World.h
mvsim::Simulable::getCPose3D
mrpt::poses::CPose3D getCPose3D() const
Alternative to getPose()
Definition: Simulable.cpp:157
mvsim::Lidar3D::maxRange_
float maxRange_
Definition: Lidar3D.h:66
mvsim::SensorBase::vehicle_
Simulable & vehicle_
The vehicle this sensor is attached to.
Definition: SensorBase.h:82
mvsim::SensorBase
Virtual base class for all sensors.
Definition: SensorBase.h:34
mvsim::Lidar3D::has_to_render_mtx_
std::mutex has_to_render_mtx_
Definition: Lidar3D.h:97
mvsim::Lidar3D::~Lidar3D
virtual ~Lidar3D()
Definition: Lidar3D.cpp:47
mvsim::Lidar3D::vertical_ray_angles_
std::vector< double > vertical_ray_angles_
Definition: Lidar3D.h:115
mvsim::Lidar3D::rangeStdNoise_
double rangeStdNoise_
Definition: Lidar3D.h:61
mrpt
Definition: basic_types.h:36
mvsim::Lidar3D::horzResolutionFactor_
double horzResolutionFactor_
Definition: Lidar3D.h:79
mvsim::World::sensor_has_to_create_egl_context
bool sensor_has_to_create_egl_context()
Definition: World.cpp:191
xml_utils.h
f
f
mvsim::SensorBase::should_simulate_sensor
bool should_simulate_sensor(const TSimulContext &context)
Definition: SensorBase.cpp:262
mvsim::Lidar3D::glPoints_
mrpt::opengl::CPointCloudColoured::Ptr glPoints_
Definition: Lidar3D.h:92
VehicleBase.h
mvsim::Lidar3D::vertNumRays_
int vertNumRays_
Definition: Lidar3D.h:78
mvsim::SensorBase::world
World * world()
Definition: SensorBase.h:84
mvsim::Lidar3D::freeOpenGLResources
void freeOpenGLResources() override
Definition: Lidar3D.cpp:194
rapidxml
Definition: rapidxml.hpp:57
mvsim::TSimulContext
Definition: basic_types.h:58
mvsim::World::get_simul_timestamp
mrpt::Clock::time_point get_simul_timestamp() const
Definition: World.h:137
mvsim::Lidar3D::ignore_parent_body_
bool ignore_parent_body_
Definition: Lidar3D.h:62
mvsim::TParameterDefinitions
std::map< std::string, TParamEntry > TParameterDefinitions
Definition: TParameterDefinitions.h:64
d
d
mvsim::Simulable::simul_post_timestep
virtual void simul_post_timestep(const TSimulContext &context)
Definition: Simulable.cpp:64
safeInterpolateRangeImage
static float safeInterpolateRangeImage(const mrpt::math::CMatrixFloat &depthImage, const float maxDepthInterpolationStepVert, const float maxDepthInterpolationStepHorz, const int NCOLS, const int NROWS, float v, float u)
Definition: Lidar3D.cpp:208
mvsim::Lidar3D::vertical_fov_
double vertical_fov_
In degrees !!
Definition: Lidar3D.h:70
mvsim::World::mark_as_pending_running_sensors_on_3D_scene
void mark_as_pending_running_sensors_on_3D_scene()
Definition: World.h:223
mvsim::Simulable::name_
std::string name_
Definition: Simulable.h:145
mvsim::SensorBase::sensor_period_
double sensor_period_
Definition: SensorBase.h:88
mvsim::Lidar3D::viz_pointSize_
float viz_pointSize_
Definition: Lidar3D.h:64
mvsim::Lidar3D::sensorPoseOnVeh_
mrpt::poses::CPose3D sensorPoseOnVeh_
Definition: Lidar3D.h:59
mvsim::Lidar3D::last_scan2gui_
mrpt::obs::CObservationPointCloud::Ptr last_scan2gui_
Definition: Lidar3D.h:85
mvsim::Lidar3D::last_scan_cs_
std::mutex last_scan_cs_
Definition: Lidar3D.h:86
mvsim::VisualObject::customVisualVisible
void customVisualVisible(const bool visible)
Definition: VisualObject.cpp:250
mvsim::Lidar3D::vertResolutionFactor_
double vertResolutionFactor_
Definition: Lidar3D.h:80
mvsim::Lidar3D::gl_sensor_origin_
mrpt::opengl::CSetOfObjects::Ptr gl_sensor_origin_
Definition: Lidar3D.h:93
mvsim::Simulable::setPose
void setPose(const mrpt::math::TPose3D &p, bool notifyChange=true) const
Definition: Simulable.cpp:474
mvsim::VehicleBase
Definition: VehicleBase.h:44
rapidxml::xml_node< char >
mvsim::Lidar3D::lut_
std::vector< PerHorzAngleLUT > lut_
Definition: Lidar3D.h:111
mvsim::Simulable
Definition: Simulable.h:39
OccupancyGridMap.h
mvsim::Lidar3D::last_scan_
mrpt::obs::CObservationPointCloud::Ptr last_scan_
Definition: Lidar3D.h:85
mvsim::Lidar3D::simul_pre_timestep
virtual void simul_pre_timestep(const TSimulContext &context) override
Definition: Lidar3D.cpp:155
mvsim::Lidar3D::internalGuiUpdate
virtual void internalGuiUpdate(const mrpt::optional_ref< mrpt::opengl::COpenGLScene > &viz, const mrpt::optional_ref< mrpt::opengl::COpenGLScene > &physical, bool childrenOnly) override
Definition: Lidar3D.cpp:83
mvsim::Lidar3D::fbo_renderer_depth_
std::shared_ptr< mrpt::opengl::CFBORender > fbo_renderer_depth_
Definition: Lidar3D.h:99
mvsim::SensorBase::loadConfigFrom
virtual void loadConfigFrom(const rapidxml::xml_node< char > *root)
Definition: SensorBase.cpp:231
mvsim::Lidar3D::maxDepthInterpolationStepHorz_
float maxDepthInterpolationStepHorz_
Definition: Lidar3D.h:82
mvsim::Lidar3D::notifySimulableSetPose
void notifySimulableSetPose(const mrpt::math::TPose3D &newPose) override
Definition: Lidar3D.cpp:186
mvsim::Simulable::getPose
mrpt::math::TPose3D getPose() const
Definition: Simulable.cpp:490
mvsim::Lidar3D::simulateOn3DScene
void simulateOn3DScene(mrpt::opengl::COpenGLScene &gl_scene) override
Definition: Lidar3D.cpp:289
mvsim::World::getTimeLogger
mrpt::system::CTimeLogger & getTimeLogger()
Definition: World.h:321
root
root
mvsim::VisualObject::glCustomVisual_
std::shared_ptr< mrpt::opengl::CSetOfObjects > glCustomVisual_
Definition: VisualObject.h:77
mvsim::Lidar3D::gl_sensor_fov_
mrpt::opengl::CSetOfObjects::Ptr gl_sensor_fov_
Definition: Lidar3D.h:94
mvsim::Lidar3D::simul_post_timestep
virtual void simul_post_timestep(const TSimulContext &context) override
Definition: Lidar3D.cpp:158
mvsim::Lidar3D::gl_sensor_origin_corner_
mrpt::opengl::CSetOfObjects::Ptr gl_sensor_origin_corner_
Definition: Lidar3D.h:93
mvsim::Lidar3D::loadConfigFrom
virtual void loadConfigFrom(const rapidxml::xml_node< char > *root) override
Definition: Lidar3D.cpp:49
mvsim::Lidar3D::has_to_render_
std::optional< TSimulContext > has_to_render_
Definition: Lidar3D.h:96
mvsim::Lidar3D::gui_uptodate_
bool gui_uptodate_
Definition: Lidar3D.h:90
mvsim::Lidar3D::horzNumRays_
int horzNumRays_
Definition: Lidar3D.h:78
mvsim::VisualObject
Definition: VisualObject.h:35
mvsim::SensorBase::RegisterSensorFOVViz
static void RegisterSensorFOVViz(const std::shared_ptr< mrpt::opengl::CSetOfObjects > &o)
Definition: SensorBase.cpp:73
mvsim::SensorBase::RegisterSensorOriginViz
static void RegisterSensorOriginViz(const std::shared_ptr< mrpt::opengl::CSetOfObjects > &o)
Definition: SensorBase.cpp:78


mvsim
Author(s):
autogenerated on Wed May 28 2025 02:13:08