CameraARCore.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
28 #include "CameraARCore.h"
29 #include "util.h"
33 #include "rtabmap/core/util2d.h"
34 
35 namespace rtabmap {
36 
38 // CameraARCore
40 CameraARCore::CameraARCore(void* env, void* context, void* activity, bool depthFromMotion, bool smoothing):
41  CameraMobile(smoothing),
42  env_(env),
43  context_(context),
44  activity_(activity),
45  arInstallRequested_(false),
46  depthFromMotion_(depthFromMotion)
47 {
48 }
49 
51  // Disconnect ARCore service
52  close();
53 }
54 
55 
56 struct CameraConfig {
57  int32_t width = 0;
58  int32_t height = 0;
59  std::string config_label;
60  ArCameraConfig* config = nullptr;
61 };
62 
64  std::vector<CameraConfig> & camera_configs,
65  CameraConfig** lowest_resolution_config,
66  CameraConfig** highest_resolution_config) {
67  if (camera_configs.empty()) {
68  return;
69  }
70 
71  int low_resolution_config_idx = 0;
72  int high_resolution_config_idx = 0;
73  int32_t smallest_height = camera_configs[0].height;
74  int32_t largest_height = camera_configs[0].height;
75 
76  for (int i = 1; i < camera_configs.size(); ++i) {
77  int32_t image_height = camera_configs[i].height;
78  if (image_height < smallest_height) {
79  smallest_height = image_height;
80  low_resolution_config_idx = i;
81  } else if (image_height > largest_height) {
82  largest_height = image_height;
83  high_resolution_config_idx = i;
84  }
85  }
86 
87  if (low_resolution_config_idx == high_resolution_config_idx) {
88  *lowest_resolution_config = &camera_configs[low_resolution_config_idx];
89  } else {
90  *lowest_resolution_config = &camera_configs[low_resolution_config_idx];
91  *highest_resolution_config = &camera_configs[high_resolution_config_idx];
92  }
93 }
94 
96  const ArSession* ar_session, const ArCameraConfigList* all_configs,
97  int index, int num_configs, CameraConfig* camera_config) {
98  if (camera_config != nullptr && index >= 0 && index < num_configs) {
99  ArCameraConfig_create(ar_session, &camera_config->config);
100  ArCameraConfigList_getItem(ar_session, all_configs, index,
101  camera_config->config);
102  ArCameraConfig_getImageDimensions(ar_session, camera_config->config,
103  &camera_config->width,
104  &camera_config->height);
105  camera_config->config_label = "(" + std::to_string(camera_config->width) +
106  "x" + std::to_string(camera_config->height) +
107  ")";
108  }
109 }
110 
111 void destroyCameraConfigs(std::vector<CameraConfig> & camera_configs) {
112  for (int i = 0; i < camera_configs.size(); ++i) {
113  if (camera_configs[i].config != nullptr) {
114  ArCameraConfig_destroy(camera_configs[i].config);
115  }
116  }
117 }
118 
119 std::string CameraARCore::getSerial() const
120 {
121  return "ARCore";
122 }
123 
124 bool CameraARCore::init(const std::string & calibrationFolder, const std::string & cameraName)
125 {
126  close();
127 
129 
130  ArInstallStatus install_status;
131  // If install was not yet requested, that means that we are resuming the
132  // activity first time because of explicit user interaction (such as
133  // launching the application)
134  bool user_requested_install = !arInstallRequested_;
135 
136  // === ATTENTION! ATTENTION! ATTENTION! ===
137  // This method can and will fail in user-facing situations. Your
138  // application must handle these cases at least somewhat gracefully. See
139  // HelloAR Java sample code for reasonable behavior.
140  ArCoreApk_requestInstall(env_, activity_, user_requested_install, &install_status);
141 
142  switch (install_status)
143  {
144  case AR_INSTALL_STATUS_INSTALLED:
145  break;
146  case AR_INSTALL_STATUS_INSTALL_REQUESTED:
147  arInstallRequested_ = true;
148  return false;
149  }
150 
151  // === ATTENTION! ATTENTION! ATTENTION! ===
152  // This method can and will fail in user-facing situations. Your
153  // application must handle these cases at least somewhat gracefully. See
154  // HelloAR Java sample code for reasonable behavior.
155  UASSERT(ArSession_create(env_, context_, &arSession_) == AR_SUCCESS);
157 
158  int32_t is_depth_supported = 0;
159  ArSession_isDepthModeSupported(arSession_, AR_DEPTH_MODE_AUTOMATIC, &is_depth_supported);
160 
161  ArConfig_create(arSession_, &arConfig_);
163 
164  if (is_depth_supported!=0) {
165  ArConfig_setDepthMode(arSession_, arConfig_, AR_DEPTH_MODE_AUTOMATIC);
166  } else {
167  ArConfig_setDepthMode(arSession_, arConfig_, AR_DEPTH_MODE_DISABLED);
168  }
169 
170  ArConfig_setFocusMode(arSession_, arConfig_, AR_FOCUS_MODE_FIXED);
171  UASSERT(ArSession_configure(arSession_, arConfig_) == AR_SUCCESS);
172 
173  ArFrame_create(arSession_, &arFrame_);
174  UASSERT(arFrame_);
175 
176  ArCameraIntrinsics_create(arSession_, &arCameraIntrinsics_);
178 
179  ArPose_create(arSession_, nullptr, &arPose_);
180  UASSERT(arPose_);
181 
182  ArCameraConfigList* all_camera_configs = nullptr;
183  int32_t num_configs = 0;
184  ArCameraConfigList_create(arSession_, &all_camera_configs);
185  // Create filter first to get both 30 and 60 fps.
186  ArCameraConfigFilter* camera_config_filter = nullptr;
187  ArCameraConfigFilter_create(arSession_, &camera_config_filter);
188  ArCameraConfigFilter_setTargetFps(arSession_, camera_config_filter, AR_CAMERA_CONFIG_TARGET_FPS_30 | AR_CAMERA_CONFIG_TARGET_FPS_60);
189  ArSession_getSupportedCameraConfigsWithFilter(arSession_, camera_config_filter, all_camera_configs);
190  ArCameraConfigList_getSize(arSession_, all_camera_configs, &num_configs);
191 
192  if (num_configs < 1) {
193  UERROR("No camera config found");
194  close();
195  return false;
196  }
197 
198  std::vector<CameraConfig> camera_configs;
199  CameraConfig* cpu_low_resolution_camera_config_ptr = nullptr;
200  CameraConfig* cpu_high_resolution_camera_config_ptr = nullptr;
201  camera_configs.resize(num_configs);
202  for (int i = 0; i < num_configs; ++i) {
203  copyCameraConfig(arSession_, all_camera_configs, i, num_configs,
204  &camera_configs[i]);
205  }
206  // Determine the highest and lowest CPU resolutions.
207  cpu_low_resolution_camera_config_ptr = nullptr;
208  cpu_high_resolution_camera_config_ptr = nullptr;
210  camera_configs,
211  &cpu_low_resolution_camera_config_ptr,
212  &cpu_high_resolution_camera_config_ptr);
213 
214  // Cleanup the list obtained as it is safe to destroy the list as camera
215  // config instances were explicitly created and copied. Refer to the
216  // previous comment.
217  ArCameraConfigList_destroy(all_camera_configs);
218  ArSession_setCameraConfig(arSession_, cpu_low_resolution_camera_config_ptr->config);
219 
222  ArConfig_setUpdateMode(arSession_, arConfig_, AR_UPDATE_MODE_BLOCKING);
223 
225 
226  if (ArSession_resume(arSession_) != ArStatus::AR_SUCCESS)
227  {
228  UERROR("Cannot resume camera!");
229  // In a rare case (such as another camera app launching) the camera may be
230  // given to a different app and so may not be available to this app. Handle
231  // this properly and recreate the session at the next iteration.
232  close();
233  return false;
234  }
235 
236  return true;
237 }
238 
240 {
242  if(arSession_!= nullptr)
243  {
244  ArSession_destroy(arSession_);
245  }
246  arSession_ = nullptr;
247 
248  if(arConfig_!= nullptr)
249  {
250  ArConfig_destroy(arConfig_);
251  }
252  arConfig_ = nullptr;
253 
254  if (arFrame_ != nullptr)
255  {
256  ArFrame_destroy(arFrame_);
257  }
258  arFrame_ = nullptr;
259 
260  if (arCameraIntrinsics_ != nullptr)
261  {
262  ArCameraIntrinsics_destroy(arCameraIntrinsics_);
263  }
264  arCameraIntrinsics_ = nullptr;
265 
266  if (arPose_ != nullptr)
267  {
268  ArPose_destroy(arPose_);
269  }
270  arPose_ = nullptr;
271 
273 }
274 
276  const float * pointCloudData,
277  int points,
278  const Transform & pose,
279  const CameraModel & model,
280  const cv::Mat & rgb,
281  std::vector<cv::KeyPoint> * kpts,
282  std::vector<cv::Point3f> * kpts3D)
283 {
284  if(pointCloudData && points>0)
285  {
286  cv::Mat scanData(1, points, CV_32FC4);
287  float * ptr = scanData.ptr<float>();
288  for(unsigned int i=0;i<points; ++i)
289  {
290  cv::Point3f pt(pointCloudData[i*4], pointCloudData[i*4 + 1], pointCloudData[i*4 + 2]);
292  ptr[i*4] = pt.x;
293  ptr[i*4 + 1] = pt.y;
294  ptr[i*4 + 2] = pt.z;
295 
296  //get color from rgb image
297  cv::Point3f org= pt;
299  int u,v;
300  model.reproject(pt.x, pt.y, pt.z, u, v);
301  unsigned char r=255,g=255,b=255;
302  if(model.inFrame(u, v))
303  {
304  b=rgb.at<cv::Vec3b>(v,u).val[0];
305  g=rgb.at<cv::Vec3b>(v,u).val[1];
306  r=rgb.at<cv::Vec3b>(v,u).val[2];
307  if(kpts)
308  kpts->push_back(cv::KeyPoint(u,v,3));
309  if(kpts3D)
310  kpts3D->push_back(org);
311  }
312  *(int*)&ptr[i*4 + 3] = int(b) | (int(g) << 8) | (int(r) << 16);
313 
314  //confidence
315  //*(int*)&ptr[i*4 + 3] = (int(pointCloudData[i*4 + 3] * 255.0f) << 8) | (int(255) << 16);
316 
317  }
319  }
320  return LaserScan();
321 }
322 
323 void CameraARCore::setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height)
324 {
325  CameraMobile::setScreenRotationAndSize(colorCameraToDisplayRotation, width, height);
326  if(arSession_)
327  {
328  int ret = static_cast<int>(colorCameraToDisplayRotation) + 1; // remove 90deg camera rotation
329  if (ret > 3) {
330  ret -= 4;
331  }
332 
333  ArSession_setDisplayGeometry(arSession_, ret, width, height);
334  }
335 }
336 
338 {
340  //LOGI("Capturing image...");
341 
343  if(!arSession_)
344  {
345  return data;
346  }
347 
348  if(textureId_ == 0)
349  {
350  glGenTextures(1, &textureId_);
351  glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId_);
352  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
353  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
354  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
355  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
356  }
357  if(textureId_!=0)
358  ArSession_setCameraTextureName(arSession_, textureId_);
359 
360  // Update session to get current frame and render camera background.
361  if (ArSession_update(arSession_, arFrame_) != AR_SUCCESS) {
362  LOGE("CameraARCore::captureImage() ArSession_update error");
363  return data;
364  }
365 
366  // If display rotation changed (also includes view size change), we need to
367  // re-query the uv coordinates for the on-screen portion of the camera image.
368  int32_t geometry_changed = 0;
369  ArFrame_getDisplayGeometryChanged(arSession_, arFrame_, &geometry_changed);
370  if (geometry_changed != 0 || !uvs_initialized_) {
371  ArFrame_transformCoordinates2d(
372  arSession_, arFrame_, AR_COORDINATES_2D_OPENGL_NORMALIZED_DEVICE_COORDINATES,
373  BackgroundRenderer::kNumVertices, BackgroundRenderer_kVertices, AR_COORDINATES_2D_TEXTURE_NORMALIZED,
376  uvs_initialized_ = true;
377  }
378 
379  ArCamera* ar_camera;
380  ArFrame_acquireCamera(arSession_, arFrame_, &ar_camera);
381 
382  ArCamera_getViewMatrix(arSession_, ar_camera, glm::value_ptr(viewMatrix_));
383  ArCamera_getProjectionMatrix(arSession_, ar_camera,
384  /*near=*/0.1f, /*far=*/100.f,
386 
387  // adjust origin
388  if(!getOriginOffset().isNull())
389  {
391  }
392 
393  ArTrackingState camera_tracking_state;
394  ArCamera_getTrackingState(arSession_, ar_camera, &camera_tracking_state);
395 
396  Transform pose;
398  if(camera_tracking_state == AR_TRACKING_STATE_TRACKING)
399  {
400  // pose in OpenGL coordinates
401  float pose_raw[7];
402  ArCamera_getPose(arSession_, ar_camera, arPose_);
403  ArPose_getPoseRaw(arSession_, arPose_, pose_raw);
404  pose = Transform(pose_raw[4], pose_raw[5], pose_raw[6], pose_raw[0], pose_raw[1], pose_raw[2], pose_raw[3]);
406 
407  Transform poseArCore = pose;
408  if(pose.isNull())
409  {
410  LOGE("CameraARCore: Pose is null");
411  }
412  else
413  {
414  this->poseReceived(pose);
415  // adjust origin
416  if(!getOriginOffset().isNull())
417  {
418  pose = getOriginOffset() * pose;
419  }
420  info->odomPose = pose;
421  }
422 
423  // Get calibration parameters
424  float fx,fy, cx, cy;
425  int32_t width, height;
426  ArCamera_getImageIntrinsics(arSession_, ar_camera, arCameraIntrinsics_);
427  ArCameraIntrinsics_getFocalLength(arSession_, arCameraIntrinsics_, &fx, &fy);
428  ArCameraIntrinsics_getPrincipalPoint(arSession_, arCameraIntrinsics_, &cx, &cy);
429  ArCameraIntrinsics_getImageDimensions(arSession_, arCameraIntrinsics_, &width, &height);
430 #ifndef DISABLE_LOG
431  LOGI("%f %f %f %f %d %d", fx, fy, cx, cy, width, height);
432 #endif
433 
434  if(fx > 0 && fy > 0 && width > 0 && height > 0 && cx > 0 && cy > 0)
435  {
436  model = CameraModel(fx, fy, cx, cy, deviceTColorCamera_, 0, cv::Size(width, height));
437 
438  ArPointCloud * pointCloud = nullptr;
439  ArFrame_acquirePointCloud(arSession_, arFrame_, &pointCloud);
440 
441  int32_t is_depth_supported = 0;
442  ArSession_isDepthModeSupported(arSession_, AR_DEPTH_MODE_AUTOMATIC, &is_depth_supported);
443 
444  ArImage * image = nullptr;
445  ArStatus status = ArFrame_acquireCameraImage(arSession_, arFrame_, &image);
446  if(status == AR_SUCCESS)
447  {
448  if(is_depth_supported)
449  {
450  LOGD("Acquire depth image!");
451  ArImage * depthImage = nullptr;
452  ArFrame_acquireDepthImage(arSession_, arFrame_, &depthImage);
453 
454  ArImageFormat format;
455  ArImage_getFormat(arSession_, depthImage, &format);
456  if(format == AR_IMAGE_FORMAT_DEPTH16)
457  {
458  LOGD("Depth format detected!");
459  int planeCount;
460  ArImage_getNumberOfPlanes(arSession_, depthImage, &planeCount);
461  LOGD("planeCount=%d", planeCount);
462  UASSERT_MSG(planeCount == 1, uFormat("Error: getNumberOfPlanes() planceCount = %d", planeCount).c_str());
463  const uint8_t *data = nullptr;
464  int len = 0;
465  int stride;
466  int depth_width;
467  int depth_height;
468  ArImage_getWidth(arSession_, depthImage, &depth_width);
469  ArImage_getHeight(arSession_, depthImage, &depth_height);
470  ArImage_getPlaneRowStride(arSession_, depthImage, 0, &stride);
471  ArImage_getPlaneData(arSession_, depthImage, 0, &data, &len);
472 
473  LOGD("width=%d, height=%d, bytes=%d stride=%d", depth_width, depth_height, len, stride);
474 
475  cv::Mat occlusionImage = cv::Mat(depth_height, depth_width, CV_16UC1, (void*)data).clone();
476 
477  float scaleX = (float)depth_width / (float)width;
478  float scaleY = (float)depth_height / (float)height;
479  CameraModel occlusionModel(fx*scaleX, fy*scaleY, cx*scaleX, cy*scaleY, pose*deviceTColorCamera_, 0, cv::Size(depth_width, depth_height));
480  this->setOcclusionImage(occlusionImage, occlusionModel);
481  }
482  ArImage_release(depthImage);
483  }
484 
485  int64_t timestamp_ns;
486  ArImageFormat format;
487  ArImage_getTimestamp(arSession_, image, &timestamp_ns);
488  ArImage_getFormat(arSession_, image, &format);
489  if(format == AR_IMAGE_FORMAT_YUV_420_888)
490  {
491 #ifndef DISABLE_LOG
492  int32_t num_planes;
493  ArImage_getNumberOfPlanes(arSession_, image, &num_planes);
494  for(int i=0;i<num_planes; ++i)
495  {
496  int32_t pixel_stride;
497  int32_t row_stride;
498  ArImage_getPlanePixelStride(arSession_, image, i, &pixel_stride);
499  ArImage_getPlaneRowStride(arSession_, image, i, &row_stride);
500  LOGI("Plane %d/%d: pixel stride=%d, row stride=%d", i+1, num_planes, pixel_stride, row_stride);
501  }
502 #endif
503  const uint8_t * plane_data;
504  const uint8_t * plane_uv_data;
505  int32_t data_length;
506  ArImage_getPlaneData(arSession_, image, 0, &plane_data, &data_length);
507  int32_t uv_data_length;
508  ArImage_getPlaneData(arSession_, image, 2, &plane_uv_data, &uv_data_length);
509 
510  if(plane_data != nullptr && data_length == height*width)
511  {
512  double stamp = double(timestamp_ns)/10e8;
513 #ifndef DISABLE_LOG
514  LOGI("data_length=%d stamp=%f", data_length, stamp);
515 #endif
516  cv::Mat rgb;
517  if((long)plane_uv_data-(long)plane_data != data_length)
518  {
519  // The uv-plane is not concatenated to y plane in memory, so concatenate them
520  cv::Mat yuv(height+height/2, width, CV_8UC1);
521  memcpy(yuv.data, plane_data, data_length);
522  memcpy(yuv.data+data_length, plane_uv_data, height/2*width);
523  cv::cvtColor(yuv, rgb, cv::COLOR_YUV2BGR_NV21);
524  }
525  else
526  {
527  cv::cvtColor(cv::Mat(height+height/2, width, CV_8UC1, (void*)plane_data), rgb, cv::COLOR_YUV2BGR_NV21);
528  }
529 
530  std::vector<cv::KeyPoint> kpts;
531  std::vector<cv::Point3f> kpts3;
532  LaserScan scan;
533  if(pointCloud)
534  {
535  int32_t points = 0;
536  ArPointCloud_getNumberOfPoints(arSession_, pointCloud, &points);
537  const float * pointCloudData = 0;
538  ArPointCloud_getData(arSession_, pointCloud, &pointCloudData);
539 #ifndef DISABLE_LOG
540  LOGI("pointCloudData=%d size=%d", pointCloudData?1:0, points);
541 #endif
542  if(pointCloudData && points>0)
543  {
544  scan = scanFromPointCloudData(pointCloudData, points, poseArCore, model, rgb, &kpts, &kpts3);
545  }
546  }
547  else
548  {
549  LOGI("pointCloud empty");
550  }
551 
552  data = SensorData(scan, rgb, depthFromMotion_?getOcclusionImage():cv::Mat(), model, 0, stamp);
553  data.setFeatures(kpts, kpts3, cv::Mat());
554  }
555  }
556  else
557  {
558  LOGE("CameraARCore: cannot convert image format %d", format);
559  }
560  }
561  else
562  {
563  LOGE("CameraARCore: failed to get rgb image (status=%d)", (int)status);
564  }
565 
566  ArImage_release(image);
567  ArPointCloud_release(pointCloud);
568  }
569  }
570 
571  ArCamera_release(ar_camera);
572 
573  return data;
574 
575 }
576 
578 {
580  //LOGI("Capturing image...");
581 
582  if(!arSession_)
583  {
584  return;
585  }
586 
587  if(textureId_ != 0)
588  {
589  glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId_);
590  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
591  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
592  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
593  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
594  ArSession_setCameraTextureName(arSession_, textureId_);
595  }
596 
597  // Update session to get current frame and render camera background.
598  if (ArSession_update(arSession_, arFrame_) != AR_SUCCESS) {
599  LOGE("CameraARCore::capturePoseOnly() ArSession_update error");
600  return;
601  }
602 
603  // If display rotation changed (also includes view size change), we need to
604  // re-query the uv coordinates for the on-screen portion of the camera image.
605  int32_t geometry_changed = 0;
606  ArFrame_getDisplayGeometryChanged(arSession_, arFrame_, &geometry_changed);
607  if (geometry_changed != 0 || !uvs_initialized_) {
608  ArFrame_transformCoordinates2d(
609  arSession_, arFrame_, AR_COORDINATES_2D_OPENGL_NORMALIZED_DEVICE_COORDINATES,
610  BackgroundRenderer::kNumVertices, BackgroundRenderer_kVertices, AR_COORDINATES_2D_TEXTURE_NORMALIZED,
613  uvs_initialized_ = true;
614  }
615 
616  ArCamera* ar_camera;
617  ArFrame_acquireCamera(arSession_, arFrame_, &ar_camera);
618 
619  ArCamera_getViewMatrix(arSession_, ar_camera, glm::value_ptr(viewMatrix_));
620  ArCamera_getProjectionMatrix(arSession_, ar_camera,
621  /*near=*/0.1f, /*far=*/100.f,
623 
624  // adjust origin
625  if(!getOriginOffset().isNull())
626  {
628  }
629 
630  ArTrackingState camera_tracking_state;
631  ArCamera_getTrackingState(arSession_, ar_camera, &camera_tracking_state);
632 
633  Transform pose;
635  if(camera_tracking_state == AR_TRACKING_STATE_TRACKING)
636  {
637  // pose in OpenGL coordinates
638  float pose_raw[7];
639  ArCamera_getPose(arSession_, ar_camera, arPose_);
640  ArPose_getPoseRaw(arSession_, arPose_, pose_raw);
641  pose = Transform(pose_raw[4], pose_raw[5], pose_raw[6], pose_raw[0], pose_raw[1], pose_raw[2], pose_raw[3]);
642  if(!pose.isNull())
643  {
645  this->poseReceived(pose);
646 
647  if(!getOriginOffset().isNull())
648  {
649  pose = getOriginOffset() * pose;
650  }
651  }
652 
653  int32_t is_depth_supported = 0;
654  ArSession_isDepthModeSupported(arSession_, AR_DEPTH_MODE_AUTOMATIC, &is_depth_supported);
655 
656  if(is_depth_supported)
657  {
658  LOGD("Acquire depth image!");
659  ArImage * depthImage = nullptr;
660  ArFrame_acquireDepthImage(arSession_, arFrame_, &depthImage);
661 
662  ArImageFormat format;
663  ArImage_getFormat(arSession_, depthImage, &format);
664  if(format == AR_IMAGE_FORMAT_DEPTH16)
665  {
666  LOGD("Depth format detected!");
667  int planeCount;
668  ArImage_getNumberOfPlanes(arSession_, depthImage, &planeCount);
669  LOGD("planeCount=%d", planeCount);
670  UASSERT_MSG(planeCount == 1, uFormat("Error: getNumberOfPlanes() planceCount = %d", planeCount).c_str());
671  const uint8_t *data = nullptr;
672  int len = 0;
673  int stride;
674  int width;
675  int height;
676  ArImage_getWidth(arSession_, depthImage, &width);
677  ArImage_getHeight(arSession_, depthImage, &height);
678  ArImage_getPlaneRowStride(arSession_, depthImage, 0, &stride);
679  ArImage_getPlaneData(arSession_, depthImage, 0, &data, &len);
680 
681  LOGD("width=%d, height=%d, bytes=%d stride=%d", width, height, len, stride);
682 
683  cv::Mat occlusionImage = cv::Mat(height, width, CV_16UC1, (void*)data).clone();
684 
685  float fx,fy, cx, cy;
686  int32_t rgb_width, rgb_height;
687  ArCamera_getImageIntrinsics(arSession_, ar_camera, arCameraIntrinsics_);
688  ArCameraIntrinsics_getFocalLength(arSession_, arCameraIntrinsics_, &fx, &fy);
689  ArCameraIntrinsics_getPrincipalPoint(arSession_, arCameraIntrinsics_, &cx, &cy);
690  ArCameraIntrinsics_getImageDimensions(arSession_, arCameraIntrinsics_, &rgb_width, &rgb_height);
691 
692  float scaleX = (float)width / (float)rgb_width;
693  float scaleY = (float)height / (float)rgb_height;
694  CameraModel occlusionModel(fx*scaleX, fy*scaleY, cx*scaleX, cy*scaleY, pose*deviceTColorCamera_, 0, cv::Size(width, height));
695  this->setOcclusionImage(occlusionImage, occlusionModel);
696  }
697  ArImage_release(depthImage);
698  }
699  }
700 
701  ArCamera_release(ar_camera);
702 }
703 
704 } /* namespace rtabmap */
glm::mat4 glmFromTransform(const rtabmap::Transform &transform)
Definition: util.h:124
ArCameraIntrinsics * arCameraIntrinsics_
Definition: CameraARCore.h:94
ArCameraConfig * config
const Transform & getOriginOffset() const
Definition: CameraMobile.h:99
std::string config_label
cv::Point3f RTABMAP_EXP transformPoint(const cv::Point3f &pt, const Transform &transform)
virtual void close()
detail::int64 int64_t
Definition: fwd.hpp:311
f
static Transform getIdentity()
Definition: Transform.cpp:401
data
config
static const rtabmap::Transform rtabmap_world_T_opengl_world(0.0f, 0.0f,-1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f)
ArSession * arSession_
Definition: CameraARCore.h:91
virtual std::string getSerial() const
virtual SensorData captureImage(CameraInfo *info=0)
void reproject(float x, float y, float z, float &u, float &v) const
#define LOGE(...)
void poseReceived(const Transform &pose)
static constexpr int kNumVertices
virtual void capturePoseOnly()
GLM_FUNC_DECL genType::value_type const * value_ptr(genType const &vec)
static const GLfloat BackgroundRenderer_kVertices[]
GLM_FUNC_DECL bool isNull(detail::tmat2x2< T, P > const &m, T const &epsilon)
detail::uint8 uint8_t
Definition: fwd.hpp:908
#define UASSERT(condition)
#define LOGD(...)
Transform deviceTColorCamera_
Definition: CameraMobile.h:133
#define UASSERT_MSG(condition, msg_str)
Definition: ULogger.h:67
static LaserScan scanFromPointCloudData(const float *pointCloudData, int points, const Transform &pose, const CameraModel &model, const cv::Mat &rgb, std::vector< cv::KeyPoint > *kpts=0, std::vector< cv::Point3f > *kpts3D=0)
static const rtabmap::Transform opengl_world_T_rtabmap_world(0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f)
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
virtual void setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height)
Definition: CameraMobile.h:108
void getCameraConfigLowestAndHighestResolutions(std::vector< CameraConfig > &camera_configs, CameraConfig **lowest_resolution_config, CameraConfig **highest_resolution_config)
#define LOGI(...)
bool isNull() const
Definition: Transform.cpp:107
#define false
Definition: ConvertUTF.c:56
const cv::Mat & getOcclusionImage(CameraModel *model=0) const
Definition: CameraMobile.h:122
detail::int32 int32_t
Definition: fwd.hpp:307
void destroyCameraConfigs(std::vector< CameraConfig > &camera_configs)
static const rtabmap::Transform opticalRotation
Definition: CameraMobile.h:76
#define UERROR(...)
ULogger class and convenient macros.
void setOcclusionImage(const cv::Mat &image, const CameraModel &model)
Definition: CameraMobile.h:121
model
Definition: trace.py:4
void copyCameraConfig(const ArSession *ar_session, const ArCameraConfigList *all_configs, int index, int num_configs, CameraConfig *camera_config)
static const rtabmap::Transform opticalRotationInv
Definition: CameraMobile.h:77
bool inFrame(int u, int v) const
virtual void close()
Transform odomPose
Definition: CameraInfo.h:69
GLM_FUNC_DECL std::string to_string(genType const &x)
ScreenRotation
Definition: util.h:194
CameraARCore(void *env, void *context, void *activity, bool depthFromMotion=false, bool smoothing=false)
std::string UTILITE_EXP uFormat(const char *fmt,...)
static LaserScan backwardCompatibility(const cv::Mat &oldScanFormat, int maxPoints=0, int maxRange=0, const Transform &localTransform=Transform::getIdentity())
Definition: LaserScan.cpp:125
glm::mat4 projectionMatrix_
Definition: CameraMobile.h:139
void setFeatures(const std::vector< cv::KeyPoint > &keypoints, const std::vector< cv::Point3f > &keypoints3D, const cv::Mat &descriptors)
Definition: SensorData.cpp:801
virtual void setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height)
Transform inverse() const
Definition: Transform.cpp:178
GLM_FUNC_DECL matType< T, P > inverse(matType< T, P > const &m)


rtabmap
Author(s): Mathieu Labbe
autogenerated on Mon Jan 23 2023 03:37:27