CameraARCore.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
28 #include "CameraARCore.h"
29 #include "util.h"
33 #include "rtabmap/core/util2d.h"
34 
35 namespace rtabmap {
36 
38 // CameraARCore
40 CameraARCore::CameraARCore(void* env, void* context, void* activity, bool depthFromMotion, bool smoothing, float upstreamRelocalizationAccThr):
41  CameraMobile(smoothing, upstreamRelocalizationAccThr),
42  env_(env),
43  context_(context),
44  activity_(activity),
45  arInstallRequested_(false),
46  depthFromMotion_(depthFromMotion)
47 {
48 }
49 
51  // Disconnect ARCore service
52  close();
53 }
54 
55 
56 struct CameraConfig {
59  std::string config_label;
60  ArCameraConfig* config = nullptr;
61 };
62 
64  std::vector<CameraConfig> & camera_configs,
65  CameraConfig** lowest_resolution_config,
66  CameraConfig** highest_resolution_config) {
67  if (camera_configs.empty()) {
68  return;
69  }
70 
71  int low_resolution_config_idx = 0;
72  int high_resolution_config_idx = 0;
73  int32_t smallest_height = camera_configs[0].height;
74  int32_t largest_height = camera_configs[0].height;
75 
76  for (int i = 1; i < camera_configs.size(); ++i) {
77  int32_t image_height = camera_configs[i].height;
78  if (image_height < smallest_height) {
79  smallest_height = image_height;
80  low_resolution_config_idx = i;
81  } else if (image_height > largest_height) {
82  largest_height = image_height;
83  high_resolution_config_idx = i;
84  }
85  }
86 
87  if (low_resolution_config_idx == high_resolution_config_idx) {
88  *lowest_resolution_config = &camera_configs[low_resolution_config_idx];
89  } else {
90  *lowest_resolution_config = &camera_configs[low_resolution_config_idx];
91  *highest_resolution_config = &camera_configs[high_resolution_config_idx];
92  }
93 }
94 
96  const ArSession* ar_session, const ArCameraConfigList* all_configs,
97  int index, int num_configs, CameraConfig* camera_config) {
98  if (camera_config != nullptr && index >= 0 && index < num_configs) {
99  ArCameraConfig_create(ar_session, &camera_config->config);
100  ArCameraConfigList_getItem(ar_session, all_configs, index,
101  camera_config->config);
102  ArCameraConfig_getImageDimensions(ar_session, camera_config->config,
103  &camera_config->width,
104  &camera_config->height);
105  camera_config->config_label = "(" + std::to_string(camera_config->width) +
106  "x" + std::to_string(camera_config->height) +
107  ")";
108  }
109 }
110 
111 void destroyCameraConfigs(std::vector<CameraConfig> & camera_configs) {
112  for (int i = 0; i < camera_configs.size(); ++i) {
113  if (camera_configs[i].config != nullptr) {
114  ArCameraConfig_destroy(camera_configs[i].config);
115  }
116  }
117 }
118 
119 std::string CameraARCore::getSerial() const
120 {
121  return "ARCore";
122 }
123 
124 bool CameraARCore::init(const std::string & calibrationFolder, const std::string & cameraName)
125 {
126  close();
127 
128  CameraMobile::init(calibrationFolder, cameraName);
129 
131 
132  ArInstallStatus install_status;
133  // If install was not yet requested, that means that we are resuming the
134  // activity first time because of explicit user interaction (such as
135  // launching the application)
136  bool user_requested_install = !arInstallRequested_;
137 
138  // === ATTENTION! ATTENTION! ATTENTION! ===
139  // This method can and will fail in user-facing situations. Your
140  // application must handle these cases at least somewhat gracefully. See
141  // HelloAR Java sample code for reasonable behavior.
142  ArCoreApk_requestInstall(env_, activity_, user_requested_install, &install_status);
143 
144  switch (install_status)
145  {
146  case AR_INSTALL_STATUS_INSTALLED:
147  break;
148  case AR_INSTALL_STATUS_INSTALL_REQUESTED:
149  arInstallRequested_ = true;
150  return false;
151  }
152 
153  // === ATTENTION! ATTENTION! ATTENTION! ===
154  // This method can and will fail in user-facing situations. Your
155  // application must handle these cases at least somewhat gracefully. See
156  // HelloAR Java sample code for reasonable behavior.
157  UASSERT(ArSession_create(env_, context_, &arSession_) == AR_SUCCESS);
159 
160  int32_t is_depth_supported = 0;
161  ArSession_isDepthModeSupported(arSession_, AR_DEPTH_MODE_AUTOMATIC, &is_depth_supported);
162 
163  ArConfig_create(arSession_, &arConfig_);
165 
166  if (is_depth_supported!=0) {
167  ArConfig_setDepthMode(arSession_, arConfig_, AR_DEPTH_MODE_AUTOMATIC);
168  } else {
169  ArConfig_setDepthMode(arSession_, arConfig_, AR_DEPTH_MODE_DISABLED);
170  }
171 
172  ArConfig_setFocusMode(arSession_, arConfig_, AR_FOCUS_MODE_FIXED);
173  UASSERT(ArSession_configure(arSession_, arConfig_) == AR_SUCCESS);
174 
175  ArFrame_create(arSession_, &arFrame_);
176  UASSERT(arFrame_);
177 
178  ArCameraIntrinsics_create(arSession_, &arCameraIntrinsics_);
180 
181  ArPose_create(arSession_, nullptr, &arPose_);
182  UASSERT(arPose_);
183 
184  ArCameraConfigList* all_camera_configs = nullptr;
185  int32_t num_configs = 0;
186  ArCameraConfigList_create(arSession_, &all_camera_configs);
187  // Create filter first to get both 30 and 60 fps.
188  ArCameraConfigFilter* camera_config_filter = nullptr;
189  ArCameraConfigFilter_create(arSession_, &camera_config_filter);
190  ArCameraConfigFilter_setTargetFps(arSession_, camera_config_filter, AR_CAMERA_CONFIG_TARGET_FPS_30 | AR_CAMERA_CONFIG_TARGET_FPS_60);
191  ArSession_getSupportedCameraConfigsWithFilter(arSession_, camera_config_filter, all_camera_configs);
192  ArCameraConfigList_getSize(arSession_, all_camera_configs, &num_configs);
193 
194  if (num_configs < 1) {
195  UERROR("No camera config found");
196  close();
197  return false;
198  }
199 
200  std::vector<CameraConfig> camera_configs;
201  CameraConfig* cpu_low_resolution_camera_config_ptr = nullptr;
202  CameraConfig* cpu_high_resolution_camera_config_ptr = nullptr;
203  camera_configs.resize(num_configs);
204  for (int i = 0; i < num_configs; ++i) {
205  copyCameraConfig(arSession_, all_camera_configs, i, num_configs,
206  &camera_configs[i]);
207  }
208  // Determine the highest and lowest CPU resolutions.
209  cpu_low_resolution_camera_config_ptr = nullptr;
210  cpu_high_resolution_camera_config_ptr = nullptr;
212  camera_configs,
213  &cpu_low_resolution_camera_config_ptr,
214  &cpu_high_resolution_camera_config_ptr);
215 
216  // Cleanup the list obtained as it is safe to destroy the list as camera
217  // config instances were explicitly created and copied. Refer to the
218  // previous comment.
219  ArCameraConfigList_destroy(all_camera_configs);
220  ArSession_setCameraConfig(arSession_, cpu_low_resolution_camera_config_ptr->config);
221 
224  ArConfig_setUpdateMode(arSession_, arConfig_, AR_UPDATE_MODE_BLOCKING);
225 
227 
228  if (ArSession_resume(arSession_) != ArStatus::AR_SUCCESS)
229  {
230  UERROR("Cannot resume camera!");
231  // In a rare case (such as another camera app launching) the camera may be
232  // given to a different app and so may not be available to this app. Handle
233  // this properly and recreate the session at the next iteration.
234  close();
235  return false;
236  }
237 
238  return true;
239 }
240 
242 {
244  if(arSession_!= nullptr)
245  {
246  ArSession_destroy(arSession_);
247  }
248  arSession_ = nullptr;
249 
250  if(arConfig_!= nullptr)
251  {
252  ArConfig_destroy(arConfig_);
253  }
254  arConfig_ = nullptr;
255 
256  if (arFrame_ != nullptr)
257  {
258  ArFrame_destroy(arFrame_);
259  }
260  arFrame_ = nullptr;
261 
262  if (arCameraIntrinsics_ != nullptr)
263  {
264  ArCameraIntrinsics_destroy(arCameraIntrinsics_);
265  }
266  arCameraIntrinsics_ = nullptr;
267 
268  if (arPose_ != nullptr)
269  {
270  ArPose_destroy(arPose_);
271  }
272  arPose_ = nullptr;
273 
275 }
276 
277 void CameraARCore::setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height)
278 {
279  CameraMobile::setScreenRotationAndSize(colorCameraToDisplayRotation, width, height);
280  if(arSession_)
281  {
282  int ret = static_cast<int>(colorCameraToDisplayRotation) + 1; // remove 90deg camera rotation
283  if (ret > 3) {
284  ret -= 4;
285  }
286 
287  ArSession_setDisplayGeometry(arSession_, ret, width, height);
288  }
289 }
290 
292 {
294  //LOGI("Capturing image...");
295 
296  pose.setNull();
298  if(!arSession_)
299  {
300  return data;
301  }
302 
303  if(textureId_ == 0)
304  {
305  glGenTextures(1, &textureId_);
306  glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId_);
307  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
308  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
309  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
310  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
311  }
312  if(textureId_!=0)
313  ArSession_setCameraTextureName(arSession_, textureId_);
314 
315  // Update session to get current frame and render camera background.
316  if (ArSession_update(arSession_, arFrame_) != AR_SUCCESS) {
317  LOGE("CameraARCore::captureImage() ArSession_update error");
318  return data;
319  }
320 
321  // If display rotation changed (also includes view size change), we need to
322  // re-query the uv coordinates for the on-screen portion of the camera image.
323  int32_t geometry_changed = 0;
324  ArFrame_getDisplayGeometryChanged(arSession_, arFrame_, &geometry_changed);
325  if (geometry_changed != 0 || !uvs_initialized_) {
326  ArFrame_transformCoordinates2d(
327  arSession_, arFrame_, AR_COORDINATES_2D_OPENGL_NORMALIZED_DEVICE_COORDINATES,
328  BackgroundRenderer::kNumVertices, BackgroundRenderer_kVerticesDevice, AR_COORDINATES_2D_TEXTURE_NORMALIZED,
331  uvs_initialized_ = true;
332  }
333 
334  ArCamera* ar_camera;
335  ArFrame_acquireCamera(arSession_, arFrame_, &ar_camera);
336 
337  ArCamera_getViewMatrix(arSession_, ar_camera, glm::value_ptr(viewMatrix_));
338  ArCamera_getProjectionMatrix(arSession_, ar_camera,
339  /*near=*/0.1f, /*far=*/100.f,
341 
342  ArTrackingState camera_tracking_state;
343  ArCamera_getTrackingState(arSession_, ar_camera, &camera_tracking_state);
344 
346  if(camera_tracking_state == AR_TRACKING_STATE_TRACKING)
347  {
348  // pose in OpenGL coordinates
349  float pose_raw[7];
350  ArCamera_getPose(arSession_, ar_camera, arPose_);
351  ArPose_getPoseRaw(arSession_, arPose_, pose_raw);
352  Transform poseArCore = Transform(pose_raw[4], pose_raw[5], pose_raw[6], pose_raw[0], pose_raw[1], pose_raw[2], pose_raw[3]);
354 
355  if(pose.isNull())
356  {
357  LOGE("CameraARCore: Pose is null");
358  return data;
359  }
360 
361  // Get calibration parameters
362  float fx,fy, cx, cy;
363  int32_t width, height;
364  ArCamera_getImageIntrinsics(arSession_, ar_camera, arCameraIntrinsics_);
365  ArCameraIntrinsics_getFocalLength(arSession_, arCameraIntrinsics_, &fx, &fy);
366  ArCameraIntrinsics_getPrincipalPoint(arSession_, arCameraIntrinsics_, &cx, &cy);
367  ArCameraIntrinsics_getImageDimensions(arSession_, arCameraIntrinsics_, &width, &height);
368 #ifndef DISABLE_LOG
369  LOGI("%f %f %f %f %d %d", fx, fy, cx, cy, width, height);
370 #endif
371 
372  if(fx > 0 && fy > 0 && width > 0 && height > 0 && cx > 0 && cy > 0)
373  {
374  model = CameraModel(fx, fy, cx, cy, deviceTColorCamera_, 0, cv::Size(width, height));
375 
376  ArPointCloud * pointCloud = nullptr;
377  ArFrame_acquirePointCloud(arSession_, arFrame_, &pointCloud);
378 
379  int32_t is_depth_supported = 0;
380  ArSession_isDepthModeSupported(arSession_, AR_DEPTH_MODE_AUTOMATIC, &is_depth_supported);
381 
382  ArImage * image = nullptr;
383  ArStatus status = ArFrame_acquireCameraImage(arSession_, arFrame_, &image);
384  if(status == AR_SUCCESS)
385  {
386  if(is_depth_supported)
387  {
388  LOGD("Acquire depth image!");
389  ArImage * depthImage = nullptr;
390  ArFrame_acquireDepthImage(arSession_, arFrame_, &depthImage);
391 
392  ArImageFormat format;
393  ArImage_getFormat(arSession_, depthImage, &format);
394  if(format == AR_IMAGE_FORMAT_DEPTH16)
395  {
396  LOGD("Depth format detected!");
397  int planeCount;
398  ArImage_getNumberOfPlanes(arSession_, depthImage, &planeCount);
399  LOGD("planeCount=%d", planeCount);
400  UASSERT_MSG(planeCount == 1, uFormat("Error: getNumberOfPlanes() planceCount = %d", planeCount).c_str());
401  const uint8_t *data = nullptr;
402  int len = 0;
403  int stride;
404  int depth_width;
405  int depth_height;
406  ArImage_getWidth(arSession_, depthImage, &depth_width);
407  ArImage_getHeight(arSession_, depthImage, &depth_height);
408  ArImage_getPlaneRowStride(arSession_, depthImage, 0, &stride);
409  ArImage_getPlaneData(arSession_, depthImage, 0, &data, &len);
410 
411  LOGD("width=%d, height=%d, bytes=%d stride=%d", depth_width, depth_height, len, stride);
412 
413  cv::Mat occlusionImage = cv::Mat(depth_height, depth_width, CV_16UC1, (void*)data).clone();
414 
415  float scaleX = (float)depth_width / (float)width;
416  float scaleY = (float)depth_height / (float)height;
417  CameraModel occlusionModel(fx*scaleX, fy*scaleY, cx*scaleX, cy*scaleY, pose*deviceTColorCamera_, 0, cv::Size(depth_width, depth_height));
418  this->setOcclusionImage(occlusionImage, occlusionModel);
419  }
420  ArImage_release(depthImage);
421  }
422 
423  int64_t timestamp_ns;
424  ArImageFormat format;
425  ArImage_getTimestamp(arSession_, image, &timestamp_ns);
426  ArImage_getFormat(arSession_, image, &format);
427  if(format == AR_IMAGE_FORMAT_YUV_420_888)
428  {
429 #ifndef DISABLE_LOG
430  int32_t num_planes;
431  ArImage_getNumberOfPlanes(arSession_, image, &num_planes);
432  for(int i=0;i<num_planes; ++i)
433  {
434  int32_t pixel_stride;
435  int32_t row_stride;
436  ArImage_getPlanePixelStride(arSession_, image, i, &pixel_stride);
437  ArImage_getPlaneRowStride(arSession_, image, i, &row_stride);
438  LOGI("Plane %d/%d: pixel stride=%d, row stride=%d", i+1, num_planes, pixel_stride, row_stride);
439  }
440 #endif
441  const uint8_t * plane_data;
442  const uint8_t * plane_uv_data;
443  int32_t data_length;
444  ArImage_getPlaneData(arSession_, image, 0, &plane_data, &data_length);
445  int32_t uv_data_length;
446  ArImage_getPlaneData(arSession_, image, 2, &plane_uv_data, &uv_data_length);
447 
448  if(plane_data != nullptr && data_length == height*width)
449  {
450  double stamp = double(timestamp_ns)/10e8;
451 #ifndef DISABLE_LOG
452  LOGI("data_length=%d stamp=%f", data_length, stamp);
453 #endif
454  cv::Mat rgb;
455  if((long)plane_uv_data-(long)plane_data != data_length)
456  {
457  // The uv-plane is not concatenated to y plane in memory, so concatenate them
458  cv::Mat yuv(height+height/2, width, CV_8UC1);
459  memcpy(yuv.data, plane_data, data_length);
460  memcpy(yuv.data+data_length, plane_uv_data, height/2*width);
461  cv::cvtColor(yuv, rgb, cv::COLOR_YUV2BGR_NV21);
462  }
463  else
464  {
465  cv::cvtColor(cv::Mat(height+height/2, width, CV_8UC1, (void*)plane_data), rgb, cv::COLOR_YUV2BGR_NV21);
466  }
467 
468  std::vector<cv::KeyPoint> kpts;
469  std::vector<cv::Point3f> kpts3;
470  LaserScan scan;
471  if(pointCloud)
472  {
473  int32_t points = 0;
474  ArPointCloud_getNumberOfPoints(arSession_, pointCloud, &points);
475  const float * pointCloudData = 0;
476  ArPointCloud_getData(arSession_, pointCloud, &pointCloudData);
477 #ifndef DISABLE_LOG
478  LOGI("pointCloudData=%d size=%d", pointCloudData?1:0, points);
479 #endif
480  if(pointCloudData && points>0)
481  {
482  cv::Mat pointCloudDataMat(1, points, CV_32FC4, (void *)pointCloudData);
483  scan = scanFromPointCloudData(pointCloudDataMat, pose, model, rgb, &kpts, &kpts3);
484 #ifndef DISABLE_LOG
485  LOGI("valid scan points = %d", scan.size());
486 #endif
487  }
488  }
489  else
490  {
491  LOGI("pointCloud empty");
492  }
493 
494  data = SensorData(scan, rgb, depthFromMotion_?getOcclusionImage():cv::Mat(), model, 0, stamp);
495  data.setFeatures(kpts, kpts3, cv::Mat());
496 
497  if(!pose.isNull())
498  {
499  this->poseReceived(pose, stamp);
500  }
501  }
502  }
503  else
504  {
505  LOGE("CameraARCore: cannot convert image format %d", format);
506  }
507  }
508  else
509  {
510  LOGE("CameraARCore: failed to get rgb image (status=%d)", (int)status);
511  }
512 
513  ArImage_release(image);
514  ArPointCloud_release(pointCloud);
515  }
516  }
517 
518  ArCamera_release(ar_camera);
519 
520  return data;
521 }
522 
523 } /* namespace rtabmap */
rtabmap::SensorData
Definition: SensorData.h:51
rtabmap::CameraARCore::activity_
void * activity_
Definition: CameraARCore.h:71
rtabmap::CameraConfig::config
ArCameraConfig * config
Definition: CameraARCore.cpp:60
rtabmap::CameraConfig::height
int32_t height
Definition: CameraARCore.cpp:58
rtabmap::CameraMobile::close
virtual void close()
Definition: CameraMobile.cpp:86
cy
const double cy
format
std::string format(const std::string &str, const std::vector< std::string > &find, const std::vector< std::string > &replace)
rtabmap::CameraMobile::projectionMatrix_
glm::mat4 projectionMatrix_
Definition: CameraMobile.h:140
ret
int ret
rtabmap::CameraConfig
Definition: CameraARCore.cpp:56
BackgroundRenderer_kVerticesDevice
static const GLfloat BackgroundRenderer_kVerticesDevice[]
Definition: background_renderer.h:31
rtabmap::CameraARCore::setScreenRotationAndSize
virtual void setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height)
Definition: CameraARCore.cpp:277
fx
const double fx
rtabmap::CameraARCore::close
virtual void close()
Definition: CameraARCore.cpp:241
rtabmap::CameraModel
Definition: CameraModel.h:38
rtabmap::CameraARCore::arCameraIntrinsics_
ArCameraIntrinsics * arCameraIntrinsics_
Definition: CameraARCore.h:75
rtabmap::CameraARCore::arSession_
ArSession * arSession_
Definition: CameraARCore.h:72
rtabmap::CameraARCore::arInstallRequested_
bool arInstallRequested_
Definition: CameraARCore.h:77
rtabmap::CameraMobile::transformed_uvs_
float transformed_uvs_[8]
Definition: CameraMobile.h:141
rtabmap::CameraMobile::uvs_initialized_
bool uvs_initialized_
Definition: CameraMobile.h:142
int64_t
::int64_t int64_t
rtabmap::CameraARCore::updateDataOnRender
virtual SensorData updateDataOnRender(Transform &pose)
Definition: CameraARCore.cpp:291
glm::value_ptr
GLM_FUNC_DECL const genType::value_type * value_ptr(genType const &vec)
rtabmap::CameraMobile::viewMatrix_
glm::mat4 viewMatrix_
Definition: CameraMobile.h:139
rtabmap::Transform::setNull
void setNull()
Definition: Transform.cpp:152
rtabmap::LaserScan
Definition: LaserScan.h:37
rtabmap::CameraARCore::arSessionMutex_
UMutex arSessionMutex_
Definition: CameraARCore.h:78
rtabmap::CameraMobile::deviceTColorCamera_
Transform deviceTColorCamera_
Definition: CameraMobile.h:136
rtabmap::CameraARCore::context_
void * context_
Definition: CameraARCore.h:70
rtabmap::CameraARCore::getSerial
virtual std::string getSerial() const
Definition: CameraARCore.cpp:119
rtabmap::LaserScan::size
int size() const
Definition: LaserScan.h:131
rtabmap::Transform::isNull
bool isNull() const
Definition: Transform.cpp:107
LOGE
#define LOGE(...)
Definition: tango-gl/include/tango-gl/util.h:61
rtabmap::CameraARCore::arPose_
ArPose * arPose_
Definition: CameraARCore.h:76
rtabmap::rtabmap_world_T_opengl_world
static const rtabmap::Transform rtabmap_world_T_opengl_world(0.0f, 0.0f,-1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f)
data
int data[]
UScopeMutex
Definition: UMutex.h:157
util3d_transforms.h
rtabmap::CameraMobile::textureId_
GLuint textureId_
Definition: CameraMobile.h:138
rtabmap::CameraConfig::config_label
std::string config_label
Definition: CameraARCore.cpp:59
rtabmap::opengl_world_T_rtabmap_world
static const rtabmap::Transform opengl_world_T_rtabmap_world(0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f)
rtabmap::getCameraConfigLowestAndHighestResolutions
void getCameraConfigLowestAndHighestResolutions(std::vector< CameraConfig > &camera_configs, CameraConfig **lowest_resolution_config, CameraConfig **highest_resolution_config)
Definition: CameraARCore.cpp:63
CameraARCore.h
UASSERT
#define UASSERT(condition)
rtabmap::CameraMobile::init
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
Definition: CameraMobile.cpp:76
rtabmap::destroyCameraConfigs
void destroyCameraConfigs(std::vector< CameraConfig > &camera_configs)
Definition: CameraARCore.cpp:111
rtabmap::CameraMobile::opticalRotation
static const rtabmap::Transform opticalRotation
Definition: CameraMobile.h:76
OdometryEvent.h
UASSERT_MSG
#define UASSERT_MSG(condition, msg_str)
Definition: ULogger.h:67
rtabmap::CameraARCore::init
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
Definition: CameraARCore.cpp:124
LOGI
#define LOGI(...)
Definition: tango-gl/include/tango-gl/util.h:53
f
Point2(* f)(const Point3 &, OptionalJacobian< 2, 3 >)
rtabmap::CameraMobile::scanFromPointCloudData
static LaserScan scanFromPointCloudData(const cv::Mat &pointCloudData, const Transform &pose, const CameraModel &model, const cv::Mat &rgb, std::vector< cv::KeyPoint > *kpts=0, std::vector< cv::Point3f > *kpts3D=0, int kptsSize=3)
Definition: CameraMobile.cpp:571
rtabmap::CameraMobile::poseReceived
void poseReceived(const Transform &pose, double deviceStamp)
Definition: CameraMobile.cpp:175
rtabmap::CameraConfig::width
int32_t width
Definition: CameraARCore.cpp:57
uFormat
std::string UTILITE_EXPORT uFormat(const char *fmt,...)
Definition: UConversion.cpp:365
rtabmap::CameraMobile::setScreenRotationAndSize
virtual void setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height)
Definition: CameraMobile.h:114
ULogger.h
ULogger class and convenient macros.
rtabmap::Transform
Definition: Transform.h:41
rtabmap::copyCameraConfig
void copyCameraConfig(const ArSession *ar_session, const ArCameraConfigList *all_configs, int index, int num_configs, CameraConfig *camera_config)
Definition: CameraARCore.cpp:95
util2d.h
uint8_t
::uint8_t uint8_t
rtabmap::CameraARCore::~CameraARCore
virtual ~CameraARCore()
Definition: CameraARCore.cpp:50
rtabmap::CameraMobile::getOcclusionImage
const cv::Mat & getOcclusionImage(CameraModel *model=0) const
Definition: CameraMobile.h:125
LOGD
#define LOGD(...)
Definition: tango-gl/include/tango-gl/util.h:52
rtabmap::ScreenRotation
ScreenRotation
Definition: util.h:194
env
BackgroundRenderer::kNumVertices
static constexpr int kNumVertices
Definition: background_renderer.h:56
c_str
const char * c_str(Args &&...args)
rtabmap::CameraMobile
Definition: CameraMobile.h:71
rtabmap::CameraARCore::CameraARCore
CameraARCore(void *env, void *context, void *activity, bool depthFromMotion=false, bool smoothing=false, float upstreamRelocalizationAccThr=0.0f)
Definition: CameraARCore.cpp:40
float
float
len
size_t len(handle h)
false
#define false
Definition: ConvertUTF.c:56
cx
const double cx
rtabmap::CameraMobile::setOcclusionImage
void setOcclusionImage(const cv::Mat &image, const CameraModel &model)
Definition: CameraMobile.h:124
rtabmap::CameraARCore::arFrame_
ArFrame * arFrame_
Definition: CameraARCore.h:74
rtabmap::CameraARCore::arConfig_
ArConfig * arConfig_
Definition: CameraARCore.h:73
glm::to_string
GLM_FUNC_DECL std::string to_string(genType const &x)
rtabmap
Definition: CameraARCore.cpp:35
config
config
UERROR
#define UERROR(...)
stride
Index stride
util.h
trace.model
model
Definition: trace.py:4
i
int i
int32_t
::int32_t int32_t
fy
const double fy
rtabmap::CameraARCore::depthFromMotion_
bool depthFromMotion_
Definition: CameraARCore.h:80
rtabmap::CameraARCore::env_
void * env_
Definition: CameraARCore.h:69


rtabmap
Author(s): Mathieu Labbe
autogenerated on Sun Dec 1 2024 03:42:42