10 #include <mrpt/core/lock_helper.h>
11 #include <mrpt/maps/CSimplePointsMap.h>
12 #include <mrpt/opengl/COpenGLScene.h>
13 #include <mrpt/opengl/stock_objects.h>
14 #include <mrpt/random.h>
15 #include <mrpt/version.h>
23 #if MRPT_VERSION >= 0x270
24 #include <mrpt/opengl/OpenGLDepth2LinearLUTs.h>
27 #if MRPT_VERSION >= 0x020b04 // >=2.11.4?
28 #define HAVE_POINTS_XYZIRT
31 #if defined(HAVE_POINTS_XYZIRT)
32 #include <mrpt/maps/CPointsMapXYZIRT.h>
37 using namespace mvsim;
74 params[
"max_vert_relative_depth_to_interpolatate"] =
76 params[
"max_horz_relative_depth_to_interpolatate"] =
84 const mrpt::optional_ref<mrpt::opengl::COpenGLScene>& viz,
85 [[maybe_unused]]
const mrpt::optional_ref<mrpt::opengl::COpenGLScene>& physical,
86 [[maybe_unused]]
bool childrenOnly)
88 mrpt::opengl::CSetOfObjects::Ptr glVizSensors;
91 glVizSensors = std::dynamic_pointer_cast<mrpt::opengl::CSetOfObjects>(
92 viz->get().getByName(
"group_sensors_viz"));
93 if (!glVizSensors)
return;
99 glPoints_ = mrpt::opengl::CPointCloudColoured::Create();
101 glPoints_->setLocalRepresentativePoint({0, 0, 0.10f});
108 #if MRPT_VERSION >= 0x270
123 MRPT_TODO(
"render 3D lidar FOV");
125 auto fovScan = mrpt::opengl::CPlanarLaserScan::Create();
170 mrpt::system::LVL_WARN,
171 "Time for a new sample came without still simulating the "
172 "last one (!) for simul_time=%.03f s.",
200 #if MRPT_VERSION >= 0x270
204 constexpr
int DEPTH_LOG2LIN_BITS = 20;
205 using depth_log2lin_t = mrpt::opengl::OpenGLDepth2LinearLUTs<DEPTH_LOG2LIN_BITS>;
209 const mrpt::math::CMatrixFloat& depthImage,
const float maxDepthInterpolationStepVert,
210 const float maxDepthInterpolationStepHorz,
const int NCOLS,
const int NROWS,
float v,
float u
211 #
if MRPT_VERSION >= 0x270
213 const depth_log2lin_t::lut_t& depth_log2lin_lut
217 const int u0 =
static_cast<int>(u);
218 const int v0 =
static_cast<int>(v);
219 const int u1 = std::min(u0 + 1, NCOLS - 1);
220 const int v1 = std::min(v0 + 1, NROWS - 1);
222 const float uw = u - u0;
223 const float vw = v - v0;
225 const float raw_d00 = depthImage(v0, u0);
226 const float raw_d01 = depthImage(v1, u0);
227 const float raw_d10 = depthImage(v0, u1);
228 const float raw_d11 = depthImage(v1, u1);
231 #if MRPT_VERSION >= 0x270
236 const float d00 = depth_log2lin_lut[(raw_d00 + 1.0f) * (depth_log2lin_t::NUM_ENTRIES - 1) / 2];
237 const float d01 = depth_log2lin_lut[(raw_d01 + 1.0f) * (depth_log2lin_t::NUM_ENTRIES - 1) / 2];
238 const float d10 = depth_log2lin_lut[(raw_d10 + 1.0f) * (depth_log2lin_t::NUM_ENTRIES - 1) / 2];
239 const float d11 = depth_log2lin_lut[(raw_d11 + 1.0f) * (depth_log2lin_t::NUM_ENTRIES - 1) / 2];
242 const float d00 = raw_d00;
243 const float d01 = raw_d01;
244 const float d10 = raw_d10;
245 const float d11 = raw_d11;
249 const float A_u = std::max(std::abs(d00 - d10), std::abs(d01 - d11));
250 const float A_v = std::max(std::abs(d00 - d01), std::abs(d10 - d11));
252 const auto maxStepU = maxDepthInterpolationStepHorz * d00;
253 const auto maxStepV = maxDepthInterpolationStepVert * d00;
255 if (A_v < maxStepV && A_u < maxStepU)
258 return d00 * (1.0f - uw) * (1.0
f - vw) +
259 d01 * (1.0f - uw) * vw +
260 d10 * uw * (1.0
f - vw) +
263 else if (A_v < maxStepV)
267 const float d0 = uw < 0.5f ? d00 : d10;
268 const float d1 = uw < 0.5f ? d01 : d11;
270 return d0 * (1.0f - vw) + d1 * vw;
272 else if (A_u < maxStepU)
277 const float d0 = vw < 0.5f ? d00 : d01;
278 const float d1 = vw < 0.5f ? d10 : d11;
280 return d0 * (1.0f - uw) + d1 * uw;
291 using namespace mrpt;
300 auto tle1 = mrpt::system::CTimeLoggerEntry(
world_->
getTimeLogger(),
"sensor.3Dlidar.acqGuiMtx");
308 auto curObs = mrpt::obs::CObservationPointCloud::Create();
311 curObs->sensorLabel =
name_;
313 #if defined(HAVE_POINTS_XYZIRT)
314 auto curPtsPtr = mrpt::maps::CPointsMapXYZIRT::Create();
316 auto curPtsPtr = mrpt::maps::CSimplePointsMap::Create();
319 auto& curPts = *curPtsPtr;
320 curObs->pointcloud = curPtsPtr;
323 constexpr
double camModel_hFOV = 120.01_deg;
327 const int FBO_NCOLS =
330 mrpt::img::TCamera camModel;
331 camModel.ncols = FBO_NCOLS;
332 camModel.cx(camModel.ncols / 2.0);
333 camModel.fx(camModel.cx() / tan(camModel_hFOV * 0.5));
351 std::vector<std::string> vertAnglesStrs;
353 ASSERT_EQUAL_(vertAnglesStrs.size(),
static_cast<size_t>(
vertNumRays_));
354 std::set<double> angs;
355 for (
const auto&
s : vertAnglesStrs) angs.insert(std::stod(
s));
356 ASSERT_EQUAL_(angs.size(),
static_cast<size_t>(
vertNumRays_));
371 sqrt(square(camModel.fx()) + square(camModel.cx()));
373 sqrt(square(camModel.fx()) + square(camModel.cx()));
375 const int FBO_NROWS = FBO_NROWS_DOWN + FBO_NROWS_UP + 1;
376 camModel.nrows = FBO_NROWS;
377 camModel.cy(FBO_NROWS_UP + 1);
378 camModel.fy(camModel.fx());
382 mrpt::opengl::CFBORender::Parameters p;
384 p.height = FBO_NROWS;
387 #if MRPT_VERSION >= 0x270
399 mrpt::math::CMatrixDouble rangeImage(nRows, nCols);
400 rangeImage.setZero();
402 auto viewport = world3DScene.getViewport();
405 #if MRPT_VERSION >= 0x270
406 const bool wasShadowEnabled = viewport->isShadowCastingEnabled();
407 viewport->enableShadowCasting(
false);
412 const auto fixedAxisConventionRot =
413 mrpt::poses::CPose3D(0, 0, 0, -90.0_deg, 0.0_deg, -90.0_deg);
424 constexpr
bool scanIsCW =
false;
425 constexpr
double aperture = 2 * M_PI;
427 const double firstAngle = -aperture * 0.5;
429 const unsigned int numRenders = std::ceil((aperture / camModel_hFOV) - 1e-3);
430 const auto numHorzRaysPerRender =
431 mrpt::round(
horzNumRays_ * std::min<double>(1.0, (camModel_hFOV / aperture)));
433 ASSERT_(numHorzRaysPerRender > 0);
446 lut_.resize(numHorzRaysPerRender);
448 for (
int i = 0; i < numHorzRaysPerRender; i++)
450 lut_[i].column.resize(nRows);
452 const double horzAng =
453 (scanIsCW ? -1 : 1) *
454 (camModel_hFOV * 0.5 - i * camModel_hFOV / (numHorzRaysPerRender - 1));
456 const double cosHorzAng = std::cos(horzAng);
458 const auto pixel_u = mrpt::saturate_val<float>(
459 camModel.cx() - camModel.fx() * std::tan(horzAng), 0, camModel.ncols - 1);
461 for (
size_t j = 0; j < nRows; j++)
463 auto& entry =
lut_[i].column[j];
466 const double cosVertAng = std::cos(vertAng);
468 const auto pixel_v = camModel.cy() - camModel.fy() * std::tan(vertAng) / cosHorzAng;
471 if (pixel_v < 0 || pixel_v >=
static_cast<int>(camModel.nrows))
continue;
475 entry.depth2range = 1.0f / (cosHorzAng * cosVertAng);
482 ASSERT_EQUAL_(
lut_.size(),
static_cast<size_t>(numHorzRaysPerRender));
483 ASSERT_EQUAL_(
lut_.at(0).column.size(), nRows);
489 cam.set6DOFMode(
true);
490 cam.setProjectiveFromPinhole(camModel);
493 mrpt::math::CMatrixFloat depthImage;
498 bool formerVisVehState =
true;
504 visVeh->customVisualVisible(
false);
506 if (veh) veh->chassisAndWheelsVisible(
false);
509 #if MRPT_VERSION >= 0x270
512 auto& depth_log2lin = depth_log2lin_t::Instance();
516 for (
size_t renderIdx = 0; renderIdx < numRenders; renderIdx++)
518 const double thisRenderMidAngle =
519 firstAngle + (camModel_hFOV / 2.0 + camModel_hFOV * renderIdx) * (scanIsCW ? 1 : -1);
521 const auto thisDepthSensorPoseWrtSensor =
522 mrpt::poses::CPose3D::FromYawPitchRoll(thisRenderMidAngle, 0.0, 0.0) +
523 fixedAxisConventionRot;
525 const auto thisDepthSensorPoseOnVeh = curObs->sensorPose + thisDepthSensorPoseWrtSensor;
527 const auto thisDepthSensorPose = vehiclePose + thisDepthSensorPoseOnVeh;
532 cam.setPose(
world()->applyWorldRenderOffset(thisDepthSensorPose));
543 thread_local mrpt::random::CRandomGenerator rng;
544 thread_local std::vector<float> noiseSeq;
545 thread_local
size_t noiseIdx = 0;
546 constexpr
size_t noiseLen = 7823;
549 if (noiseSeq.empty())
551 noiseSeq.reserve(noiseLen);
552 for (
size_t i = 0; i < noiseLen; i++)
561 for (
int i = 0; i < numHorzRaysPerRender; i++)
563 const int iAbs = i + numHorzRaysPerRender * renderIdx;
564 if (iAbs >= rangeImage.cols())
continue;
566 for (
unsigned int j = 0; j < nRows; j++)
569 const auto& e =
lut_.at(i).column.at(j);
573 ASSERTDEB_LT_(u, depthImage.cols());
574 ASSERTDEB_LT_(v, depthImage.rows());
579 FBO_NCOLS, FBO_NROWS, v, u
580 #
if MRPT_VERSION >= 0x270
589 const float dNoisy =
d + noiseSeq[noiseIdx++];
590 if (noiseIdx >= noiseLen) noiseIdx = 0;
592 if (dNoisy < 0 || dNoisy >
maxRange_)
continue;
598 const float range =
d * e.depth2range;
600 if (range <= 0 || range >=
maxRange_)
continue;
602 ASSERTDEB_LT_(j, rangeImage.rows());
603 ASSERTDEB_LT_(iAbs, rangeImage.cols());
605 rangeImage(j, iAbs) = range;
608 const mrpt::math::TPoint3D pt_wrt_cam = {
609 d * (u - camModel.cx()) / camModel.fx(),
610 d * (v - camModel.cy()) / camModel.fy(),
d};
611 curPts.insertPoint(thisDepthSensorPoseWrtSensor.composePoint(pt_wrt_cam));
613 #if defined(HAVE_POINTS_XYZIRT)
615 curPtsPtr->getPointsBufferRef_ring()->push_back(j);
619 curPtsPtr->getPointsBufferRef_timestamp()->push_back(.0);
628 if (visVeh) visVeh->customVisualVisible(formerVisVehState);
629 if (veh) veh->chassisAndWheelsVisible(formerVisVehState);
632 #if MRPT_VERSION >= 0x270
633 viewport->enableShadowCasting(wasShadowEnabled);