CameraARCore.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
28 #include "CameraARCore.h"
29 #include "util.h"
33 #include "rtabmap/core/util2d.h"
34 
35 namespace rtabmap {
36 
38 // CameraARCore
40 CameraARCore::CameraARCore(void* env, void* context, void* activity, bool depthFromMotion, bool smoothing):
41  CameraMobile(smoothing),
42  env_(env),
43  context_(context),
44  activity_(activity),
45  arInstallRequested_(false),
46  textureId_(9999),
47  uvs_initialized_(false),
48  updateOcclusionImage_(false),
49  depthFromMotion_(depthFromMotion)
50 {
51 }
52 
54  // Disconnect ARCore service
55  close();
56 
57  if(textureId_ != 9999)
58  {
59  glDeleteTextures(1, &textureId_);
60  textureId_ = 9999;
61  }
62 }
63 
64 
65 struct CameraConfig {
66  int32_t width = 0;
67  int32_t height = 0;
68  std::string config_label;
69  ArCameraConfig* config = nullptr;
70 };
71 
73  std::vector<CameraConfig> & camera_configs,
74  CameraConfig** lowest_resolution_config,
75  CameraConfig** highest_resolution_config) {
76  if (camera_configs.empty()) {
77  return;
78  }
79 
80  int low_resolution_config_idx = 0;
81  int high_resolution_config_idx = 0;
82  int32_t smallest_height = camera_configs[0].height;
83  int32_t largest_height = camera_configs[0].height;
84 
85  for (int i = 1; i < camera_configs.size(); ++i) {
86  int32_t image_height = camera_configs[i].height;
87  if (image_height < smallest_height) {
88  smallest_height = image_height;
89  low_resolution_config_idx = i;
90  } else if (image_height > largest_height) {
91  largest_height = image_height;
92  high_resolution_config_idx = i;
93  }
94  }
95 
96  if (low_resolution_config_idx == high_resolution_config_idx) {
97  *lowest_resolution_config = &camera_configs[low_resolution_config_idx];
98  } else {
99  *lowest_resolution_config = &camera_configs[low_resolution_config_idx];
100  *highest_resolution_config = &camera_configs[high_resolution_config_idx];
101  }
102 }
103 
105  const ArSession* ar_session, const ArCameraConfigList* all_configs,
106  int index, int num_configs, CameraConfig* camera_config) {
107  if (camera_config != nullptr && index >= 0 && index < num_configs) {
108  ArCameraConfig_create(ar_session, &camera_config->config);
109  ArCameraConfigList_getItem(ar_session, all_configs, index,
110  camera_config->config);
111  ArCameraConfig_getImageDimensions(ar_session, camera_config->config,
112  &camera_config->width,
113  &camera_config->height);
114  camera_config->config_label = "(" + std::to_string(camera_config->width) +
115  "x" + std::to_string(camera_config->height) +
116  ")";
117  }
118 }
119 
120 void destroyCameraConfigs(std::vector<CameraConfig> & camera_configs) {
121  for (int i = 0; i < camera_configs.size(); ++i) {
122  if (camera_configs[i].config != nullptr) {
123  ArCameraConfig_destroy(camera_configs[i].config);
124  }
125  }
126 }
127 
128 std::string CameraARCore::getSerial() const
129 {
130  return "ARCore";
131 }
132 
133 bool CameraARCore::init(const std::string & calibrationFolder, const std::string & cameraName)
134 {
135  close();
136 
138 
139  ArInstallStatus install_status;
140  // If install was not yet requested, that means that we are resuming the
141  // activity first time because of explicit user interaction (such as
142  // launching the application)
143  bool user_requested_install = !arInstallRequested_;
144 
145  // === ATTENTION! ATTENTION! ATTENTION! ===
146  // This method can and will fail in user-facing situations. Your
147  // application must handle these cases at least somewhat gracefully. See
148  // HelloAR Java sample code for reasonable behavior.
149  ArCoreApk_requestInstall(env_, activity_, user_requested_install, &install_status);
150 
151  switch (install_status)
152  {
153  case AR_INSTALL_STATUS_INSTALLED:
154  break;
155  case AR_INSTALL_STATUS_INSTALL_REQUESTED:
156  arInstallRequested_ = true;
157  return false;
158  }
159 
160  // === ATTENTION! ATTENTION! ATTENTION! ===
161  // This method can and will fail in user-facing situations. Your
162  // application must handle these cases at least somewhat gracefully. See
163  // HelloAR Java sample code for reasonable behavior.
164  UASSERT(ArSession_create(env_, context_, &arSession_) == AR_SUCCESS);
166 
167  int32_t is_depth_supported = 0;
168  ArSession_isDepthModeSupported(arSession_, AR_DEPTH_MODE_AUTOMATIC, &is_depth_supported);
169 
170  ArConfig_create(arSession_, &arConfig_);
172 
173  if (is_depth_supported!=0) {
174  ArConfig_setDepthMode(arSession_, arConfig_, AR_DEPTH_MODE_AUTOMATIC);
175  } else {
176  ArConfig_setDepthMode(arSession_, arConfig_, AR_DEPTH_MODE_DISABLED);
177  }
178 
179  ArConfig_setFocusMode(arSession_, arConfig_, AR_FOCUS_MODE_AUTO);
180  UASSERT(ArSession_configure(arSession_, arConfig_) == AR_SUCCESS);
181 
182  ArFrame_create(arSession_, &arFrame_);
183  UASSERT(arFrame_);
184 
185  ArCameraIntrinsics_create(arSession_, &arCameraIntrinsics_);
187 
188  ArPose_create(arSession_, nullptr, &arPose_);
189  UASSERT(arPose_);
190 
191  ArCameraConfigList* all_camera_configs = nullptr;
192  int32_t num_configs = 0;
193  ArCameraConfigList_create(arSession_, &all_camera_configs);
194  // Create filter first to get both 30 and 60 fps.
195  ArCameraConfigFilter* camera_config_filter = nullptr;
196  ArCameraConfigFilter_create(arSession_, &camera_config_filter);
197  ArCameraConfigFilter_setTargetFps(arSession_, camera_config_filter, AR_CAMERA_CONFIG_TARGET_FPS_30 | AR_CAMERA_CONFIG_TARGET_FPS_60);
198  ArSession_getSupportedCameraConfigsWithFilter(arSession_, camera_config_filter, all_camera_configs);
199  ArCameraConfigList_getSize(arSession_, all_camera_configs, &num_configs);
200 
201  if (num_configs < 1) {
202  UERROR("No camera config found");
203  close();
204  return false;
205  }
206 
207  std::vector<CameraConfig> camera_configs;
208  CameraConfig* cpu_low_resolution_camera_config_ptr = nullptr;
209  CameraConfig* cpu_high_resolution_camera_config_ptr = nullptr;
210  camera_configs.resize(num_configs);
211  for (int i = 0; i < num_configs; ++i) {
212  copyCameraConfig(arSession_, all_camera_configs, i, num_configs,
213  &camera_configs[i]);
214  }
215  // Determine the highest and lowest CPU resolutions.
216  cpu_low_resolution_camera_config_ptr = nullptr;
217  cpu_high_resolution_camera_config_ptr = nullptr;
219  camera_configs,
220  &cpu_low_resolution_camera_config_ptr,
221  &cpu_high_resolution_camera_config_ptr);
222 
223  // Cleanup the list obtained as it is safe to destroy the list as camera
224  // config instances were explicitly created and copied. Refer to the
225  // previous comment.
226  ArCameraConfigList_destroy(all_camera_configs);
227  ArSession_setCameraConfig(arSession_, cpu_low_resolution_camera_config_ptr->config);
228 
231  ArConfig_setUpdateMode(arSession_, arConfig_, AR_UPDATE_MODE_BLOCKING);
232 
234 
235  if (ArSession_resume(arSession_) != ArStatus::AR_SUCCESS)
236  {
237  UERROR("Cannot resume camera!");
238  // In a rare case (such as another camera app launching) the camera may be
239  // given to a different app and so may not be available to this app. Handle
240  // this properly and recreate the session at the next iteration.
241  close();
242  return false;
243  }
244 
245  return true;
246 }
247 
249 {
251  if(arSession_!= nullptr)
252  {
253  ArSession_destroy(arSession_);
254  }
255  arSession_ = nullptr;
256 
257  if(arConfig_!= nullptr)
258  {
259  ArConfig_destroy(arConfig_);
260  }
261  arConfig_ = nullptr;
262 
263  if (arFrame_ != nullptr)
264  {
265  ArFrame_destroy(arFrame_);
266  }
267  arFrame_ = nullptr;
268 
269  if (arCameraIntrinsics_ != nullptr)
270  {
271  ArCameraIntrinsics_destroy(arCameraIntrinsics_);
272  }
273  arCameraIntrinsics_ = nullptr;
274 
275  if (arPose_ != nullptr)
276  {
277  ArPose_destroy(arPose_);
278  }
279  arPose_ = nullptr;
280 
282  occlusionImage_ = cv::Mat();
283 }
284 
286  const float * pointCloudData,
287  int points,
288  const Transform & pose,
289  const CameraModel & model,
290  const cv::Mat & rgb,
291  std::vector<cv::KeyPoint> * kpts,
292  std::vector<cv::Point3f> * kpts3D)
293 {
294  if(pointCloudData && points>0)
295  {
296  cv::Mat scanData(1, points, CV_32FC4);
297  float * ptr = scanData.ptr<float>();
298  for(unsigned int i=0;i<points; ++i)
299  {
300  cv::Point3f pt(pointCloudData[i*4], pointCloudData[i*4 + 1], pointCloudData[i*4 + 2]);
302  ptr[i*4] = pt.x;
303  ptr[i*4 + 1] = pt.y;
304  ptr[i*4 + 2] = pt.z;
305 
306  //get color from rgb image
307  cv::Point3f org= pt;
309  int u,v;
310  model.reproject(pt.x, pt.y, pt.z, u, v);
311  unsigned char r=255,g=255,b=255;
312  if(model.inFrame(u, v))
313  {
314  b=rgb.at<cv::Vec3b>(v,u).val[0];
315  g=rgb.at<cv::Vec3b>(v,u).val[1];
316  r=rgb.at<cv::Vec3b>(v,u).val[2];
317  if(kpts)
318  kpts->push_back(cv::KeyPoint(u,v,3));
319  if(kpts3D)
320  kpts3D->push_back(org);
321  }
322  *(int*)&ptr[i*4 + 3] = int(b) | (int(g) << 8) | (int(r) << 16);
323 
324  //confidence
325  //*(int*)&ptr[i*4 + 3] = (int(pointCloudData[i*4 + 3] * 255.0f) << 8) | (int(255) << 16);
326 
327  }
329  }
330  return LaserScan();
331 }
332 
333 void CameraARCore::setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height)
334 {
335  CameraMobile::setScreenRotationAndSize(colorCameraToDisplayRotation, width, height);
336  if(arSession_)
337  {
338  int ret = static_cast<int>(colorCameraToDisplayRotation) + 1; // remove 90deg camera rotation
339  if (ret > 3) {
340  ret -= 4;
341  }
342 
343  ArSession_setDisplayGeometry(arSession_, ret, width, height);
344  }
345 }
346 
348 {
350  //LOGI("Capturing image...");
351 
352  SensorData data;
353  if(!arSession_)
354  {
355  return data;
356  }
357 
358  if(textureId_ == 9999)
359  {
360  glGenTextures(1, &textureId_);
361  glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId_);
362  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
363  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
364  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
365  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
366  }
367  ArSession_setCameraTextureName(arSession_, textureId_);
368 
369  // Update session to get current frame and render camera background.
370  if (ArSession_update(arSession_, arFrame_) != AR_SUCCESS) {
371  LOGE("CameraARCore::captureImage() ArSession_update error");
372  return data;
373  }
374 
375  // If display rotation changed (also includes view size change), we need to
376  // re-query the uv coordinates for the on-screen portion of the camera image.
377  int32_t geometry_changed = 0;
378  ArFrame_getDisplayGeometryChanged(arSession_, arFrame_, &geometry_changed);
379  if (geometry_changed != 0 || !uvs_initialized_) {
380  ArFrame_transformCoordinates2d(
381  arSession_, arFrame_, AR_COORDINATES_2D_OPENGL_NORMALIZED_DEVICE_COORDINATES,
382  BackgroundRenderer::kNumVertices, BackgroundRenderer_kVertices, AR_COORDINATES_2D_TEXTURE_NORMALIZED,
385  uvs_initialized_ = true;
386  }
387 
388  ArCamera* ar_camera;
389  ArFrame_acquireCamera(arSession_, arFrame_, &ar_camera);
390 
391  ArCamera_getViewMatrix(arSession_, ar_camera, glm::value_ptr(viewMatrix_));
392  ArCamera_getProjectionMatrix(arSession_, ar_camera,
393  /*near=*/0.1f, /*far=*/100.f,
395 
396  ArTrackingState camera_tracking_state;
397  ArCamera_getTrackingState(arSession_, ar_camera, &camera_tracking_state);
398 
399  Transform pose;
400  CameraModel model;
401  if(camera_tracking_state == AR_TRACKING_STATE_TRACKING)
402  {
403  // pose in OpenGL coordinates
404  float pose_raw[7];
405  ArCamera_getPose(arSession_, ar_camera, arPose_);
406  ArPose_getPoseRaw(arSession_, arPose_, pose_raw);
407  pose = Transform(pose_raw[4], pose_raw[5], pose_raw[6], pose_raw[0], pose_raw[1], pose_raw[2], pose_raw[3]);
409 
410  // Get calibration parameters
411  float fx,fy, cx, cy;
412  int32_t width, height;
413  ArCamera_getImageIntrinsics(arSession_, ar_camera, arCameraIntrinsics_);
414  ArCameraIntrinsics_getFocalLength(arSession_, arCameraIntrinsics_, &fx, &fy);
415  ArCameraIntrinsics_getPrincipalPoint(arSession_, arCameraIntrinsics_, &cx, &cy);
416  ArCameraIntrinsics_getImageDimensions(arSession_, arCameraIntrinsics_, &width, &height);
417 #ifndef DISABLE_LOG
418  LOGI("%f %f %f %f %d %d", fx, fy, cx, cy, width, height);
419 #endif
420 
421  if(fx > 0 && fy > 0 && width > 0 && height > 0 && cx > 0 && cy > 0)
422  {
423  model = CameraModel(fx, fy, cx, cy, deviceTColorCamera_, 0, cv::Size(width, height));
424 
425  ArPointCloud * pointCloud = nullptr;
426  ArFrame_acquirePointCloud(arSession_, arFrame_, &pointCloud);
427 
428  int32_t is_depth_supported = 0;
429  ArSession_isDepthModeSupported(arSession_, AR_DEPTH_MODE_AUTOMATIC, &is_depth_supported);
430 
431  ArImage * image = nullptr;
432  ArStatus status = ArFrame_acquireCameraImage(arSession_, arFrame_, &image);
433  if(status == AR_SUCCESS)
434  {
435  if(is_depth_supported && (updateOcclusionImage_||depthFromMotion_))
436  {
437  LOGD("Acquire depth image!");
438  ArImage * depthImage = nullptr;
439  ArFrame_acquireDepthImage(arSession_, arFrame_, &depthImage);
440 
441  ArImageFormat format;
442  ArImage_getFormat(arSession_, depthImage, &format);
443  if(format == AR_IMAGE_FORMAT_DEPTH16)
444  {
445  LOGD("Depth format detected!");
446  int planeCount;
447  ArImage_getNumberOfPlanes(arSession_, depthImage, &planeCount);
448  LOGD("planeCount=%d", planeCount);
449  UASSERT_MSG(planeCount == 1, uFormat("Error: getNumberOfPlanes() planceCount = %d", planeCount).c_str());
450  const uint8_t *data = nullptr;
451  int len = 0;
452  int stride;
453  int depth_width;
454  int depth_height;
455  ArImage_getWidth(arSession_, depthImage, &depth_width);
456  ArImage_getHeight(arSession_, depthImage, &depth_height);
457  ArImage_getPlaneRowStride(arSession_, depthImage, 0, &stride);
458  ArImage_getPlaneData(arSession_, depthImage, 0, &data, &len);
459 
460  LOGD("width=%d, height=%d, bytes=%d stride=%d", depth_width, depth_height, len, stride);
461 
462  occlusionImage_ = cv::Mat(depth_height, depth_width, CV_16UC1, (void*)data).clone();
463 
464  float scaleX = (float)depth_width / (float)width;
465  float scaleY = (float)depth_height / (float)height;
466  occlusionModel_ = CameraModel(fx*scaleX, fy*scaleY, cx*scaleX, cy*scaleY, pose*deviceTColorCamera_, 0, cv::Size(depth_width, depth_height));
467  }
468  ArImage_release(depthImage);
469  }
470 
471  int64_t timestamp_ns;
472  ArImageFormat format;
473  ArImage_getTimestamp(arSession_, image, &timestamp_ns);
474  ArImage_getFormat(arSession_, image, &format);
475  if(format == AR_IMAGE_FORMAT_YUV_420_888)
476  {
477 #ifndef DISABLE_LOG
478  int32_t num_planes;
479  ArImage_getNumberOfPlanes(arSession_, image, &num_planes);
480  for(int i=0;i<num_planes; ++i)
481  {
482  int32_t pixel_stride;
483  int32_t row_stride;
484  ArImage_getPlanePixelStride(arSession_, image, i, &pixel_stride);
485  ArImage_getPlaneRowStride(arSession_, image, i, &row_stride);
486  LOGI("Plane %d/%d: pixel stride=%d, row stride=%d", i+1, num_planes, pixel_stride, row_stride);
487  }
488 #endif
489  const uint8_t * plane_data;
490  const uint8_t * plane_uv_data;
491  int32_t data_length;
492  ArImage_getPlaneData(arSession_, image, 0, &plane_data, &data_length);
493  int32_t uv_data_length;
494  ArImage_getPlaneData(arSession_, image, 2, &plane_uv_data, &uv_data_length);
495 
496  if(plane_data != nullptr && data_length == height*width)
497  {
498  double stamp = double(timestamp_ns)/10e8;
499 #ifndef DISABLE_LOG
500  LOGI("data_length=%d stamp=%f", data_length, stamp);
501 #endif
502  cv::Mat rgb;
503  if((long)plane_uv_data-(long)plane_data != data_length)
504  {
505  // The uv-plane is not concatenated to y plane in memory, so concatenate them
506  cv::Mat yuv(height+height/2, width, CV_8UC1);
507  memcpy(yuv.data, plane_data, data_length);
508  memcpy(yuv.data+data_length, plane_uv_data, height/2*width);
509  cv::cvtColor(yuv, rgb, CV_YUV2BGR_NV21);
510  }
511  else
512  {
513  cv::cvtColor(cv::Mat(height+height/2, width, CV_8UC1, (void*)plane_data), rgb, CV_YUV2BGR_NV21);
514  }
515 
516  std::vector<cv::KeyPoint> kpts;
517  std::vector<cv::Point3f> kpts3;
518  LaserScan scan;
519  if(pointCloud)
520  {
521  int32_t points = 0;
522  ArPointCloud_getNumberOfPoints(arSession_, pointCloud, &points);
523  const float * pointCloudData = 0;
524  ArPointCloud_getData(arSession_, pointCloud, &pointCloudData);
525 #ifndef DISABLE_LOG
526  LOGI("pointCloudData=%d size=%d", pointCloudData?1:0, points);
527 #endif
528  if(pointCloudData && points>0)
529  {
530  scan = scanFromPointCloudData(pointCloudData, points, pose, model, rgb, &kpts, &kpts3);
531  }
532  }
533  else
534  {
535  LOGI("pointCloud empty");
536  }
537 
538  data = SensorData(scan, rgb, depthFromMotion_?occlusionImage_:cv::Mat(), model, 0, stamp);
539  data.setFeatures(kpts, kpts3, cv::Mat());
540  }
541  }
542  else
543  {
544  LOGE("CameraARCore: cannot convert image format %d", format);
545  }
546  }
547  else
548  {
549  LOGE("CameraARCore: failed to get rgb image (status=%d)", (int)status);
550  }
551 
552  ArImage_release(image);
553  ArPointCloud_release(pointCloud);
554  }
555  }
556 
557  ArCamera_release(ar_camera);
558 
559  if(pose.isNull())
560  {
561  LOGE("CameraARCore: Pose is null");
562  }
563  else
564  {
565  this->poseReceived(pose);
566  info->odomPose = pose;
567  }
568  return data;
569 
570 }
571 
573 {
575  //LOGI("Capturing image...");
576 
577  if(!arSession_)
578  {
579  return;
580  }
581 
582  if(textureId_ == 9999)
583  {
584  glGenTextures(1, &textureId_);
585  glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId_);
586  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
587  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
588  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
589  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
590  }
591  ArSession_setCameraTextureName(arSession_, textureId_);
592 
593  // Update session to get current frame and render camera background.
594  if (ArSession_update(arSession_, arFrame_) != AR_SUCCESS) {
595  LOGE("CameraARCore::capturePoseOnly() ArSession_update error");
596  return;
597  }
598 
599  // If display rotation changed (also includes view size change), we need to
600  // re-query the uv coordinates for the on-screen portion of the camera image.
601  int32_t geometry_changed = 0;
602  ArFrame_getDisplayGeometryChanged(arSession_, arFrame_, &geometry_changed);
603  if (geometry_changed != 0 || !uvs_initialized_) {
604  ArFrame_transformCoordinates2d(
605  arSession_, arFrame_, AR_COORDINATES_2D_OPENGL_NORMALIZED_DEVICE_COORDINATES,
606  BackgroundRenderer::kNumVertices, BackgroundRenderer_kVertices, AR_COORDINATES_2D_TEXTURE_NORMALIZED,
609  uvs_initialized_ = true;
610  }
611 
612  ArCamera* ar_camera;
613  ArFrame_acquireCamera(arSession_, arFrame_, &ar_camera);
614 
615  ArCamera_getViewMatrix(arSession_, ar_camera, glm::value_ptr(viewMatrix_));
616  ArCamera_getProjectionMatrix(arSession_, ar_camera,
617  /*near=*/0.1f, /*far=*/100.f,
619 
620  ArTrackingState camera_tracking_state;
621  ArCamera_getTrackingState(arSession_, ar_camera, &camera_tracking_state);
622 
623  Transform pose;
624  CameraModel model;
625  if(camera_tracking_state == AR_TRACKING_STATE_TRACKING)
626  {
627  // pose in OpenGL coordinates
628  float pose_raw[7];
629  ArCamera_getPose(arSession_, ar_camera, arPose_);
630  ArPose_getPoseRaw(arSession_, arPose_, pose_raw);
631  pose = Transform(pose_raw[4], pose_raw[5], pose_raw[6], pose_raw[0], pose_raw[1], pose_raw[2], pose_raw[3]);
632  if(!pose.isNull())
633  {
635  this->poseReceived(pose);
636  }
637 
638  int32_t is_depth_supported = 0;
639  ArSession_isDepthModeSupported(arSession_, AR_DEPTH_MODE_AUTOMATIC, &is_depth_supported);
640 
641  if(is_depth_supported && updateOcclusionImage_)
642  {
643  LOGD("Acquire depth image!");
644  ArImage * depthImage = nullptr;
645  ArFrame_acquireDepthImage(arSession_, arFrame_, &depthImage);
646 
647  ArImageFormat format;
648  ArImage_getFormat(arSession_, depthImage, &format);
649  if(format == AR_IMAGE_FORMAT_DEPTH16)
650  {
651  LOGD("Depth format detected!");
652  int planeCount;
653  ArImage_getNumberOfPlanes(arSession_, depthImage, &planeCount);
654  LOGD("planeCount=%d", planeCount);
655  UASSERT_MSG(planeCount == 1, uFormat("Error: getNumberOfPlanes() planceCount = %d", planeCount).c_str());
656  const uint8_t *data = nullptr;
657  int len = 0;
658  int stride;
659  int width;
660  int height;
661  ArImage_getWidth(arSession_, depthImage, &width);
662  ArImage_getHeight(arSession_, depthImage, &height);
663  ArImage_getPlaneRowStride(arSession_, depthImage, 0, &stride);
664  ArImage_getPlaneData(arSession_, depthImage, 0, &data, &len);
665 
666  LOGD("width=%d, height=%d, bytes=%d stride=%d", width, height, len, stride);
667 
668  occlusionImage_ = cv::Mat(height, width, CV_16UC1, (void*)data).clone();
669 
670  float fx,fy, cx, cy;
671  int32_t rgb_width, rgb_height;
672  ArCamera_getImageIntrinsics(arSession_, ar_camera, arCameraIntrinsics_);
673  ArCameraIntrinsics_getFocalLength(arSession_, arCameraIntrinsics_, &fx, &fy);
674  ArCameraIntrinsics_getPrincipalPoint(arSession_, arCameraIntrinsics_, &cx, &cy);
675  ArCameraIntrinsics_getImageDimensions(arSession_, arCameraIntrinsics_, &rgb_width, &rgb_height);
676 
677  float scaleX = (float)width / (float)rgb_width;
678  float scaleY = (float)height / (float)rgb_height;
679  occlusionModel_ = CameraModel(fx*scaleX, fy*scaleY, cx*scaleX, cy*scaleY, pose*deviceTColorCamera_, 0, cv::Size(width, height));
680  }
681  ArImage_release(depthImage);
682  }
683  }
684 
685  ArCamera_release(ar_camera);
686 }
687 
688 } /* namespace rtabmap */
ArCameraIntrinsics * arCameraIntrinsics_
Definition: CameraARCore.h:97
ArCameraConfig * config
std::string config_label
cv::Point3f RTABMAP_EXP transformPoint(const cv::Point3f &pt, const Transform &transform)
virtual void close()
detail::int64 int64_t
Definition: fwd.hpp:311
f
bool inFrame(int u, int v) const
static Transform getIdentity()
Definition: Transform.cpp:380
config
ArSession * arSession_
Definition: CameraARCore.h:94
virtual SensorData captureImage(CameraInfo *info=0)
#define LOGE(...)
void poseReceived(const Transform &pose)
static constexpr int kNumVertices
virtual void capturePoseOnly()
GLM_FUNC_DECL genType::value_type const * value_ptr(genType const &vec)
static const GLfloat BackgroundRenderer_kVertices[]
detail::uint8 uint8_t
Definition: fwd.hpp:908
#define UASSERT(condition)
static const rtabmap::Transform opengl_world_T_rtabmap_world(0.0f,-1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f,-1.0f, 0.0f, 0.0f, 0.0f)
#define LOGD(...)
virtual std::string getSerial() const
float transformed_uvs_[BackgroundRenderer::kNumVertices *2]
Definition: CameraARCore.h:103
bool isNull() const
Definition: Transform.cpp:107
Transform deviceTColorCamera_
Definition: CameraMobile.h:113
#define UASSERT_MSG(condition, msg_str)
Definition: ULogger.h:67
CameraModel occlusionModel_
Definition: CameraARCore.h:110
static LaserScan scanFromPointCloudData(const float *pointCloudData, int points, const Transform &pose, const CameraModel &model, const cv::Mat &rgb, std::vector< cv::KeyPoint > *kpts=0, std::vector< cv::Point3f > *kpts3D=0)
void reproject(float x, float y, float z, float &u, float &v) const
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
virtual void setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height)
Definition: CameraMobile.h:97
void getCameraConfigLowestAndHighestResolutions(std::vector< CameraConfig > &camera_configs, CameraConfig **lowest_resolution_config, CameraConfig **highest_resolution_config)
#define LOGI(...)
#define false
Definition: ConvertUTF.c:56
detail::int32 int32_t
Definition: fwd.hpp:307
static const rtabmap::Transform rtabmap_world_T_opengl_world(0.0f, 0.0f,-1.0f, 0.0f,-1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f)
void destroyCameraConfigs(std::vector< CameraConfig > &camera_configs)
static const rtabmap::Transform opticalRotation
Definition: CameraMobile.h:76
#define UERROR(...)
ULogger class and convenient macros.
glm::mat4 projectionMatrix_
Definition: CameraARCore.h:106
Transform inverse() const
Definition: Transform.cpp:178
void copyCameraConfig(const ArSession *ar_session, const ArCameraConfigList *all_configs, int index, int num_configs, CameraConfig *camera_config)
static const rtabmap::Transform opticalRotationInv
Definition: CameraMobile.h:77
virtual void close()
Transform odomPose
Definition: CameraInfo.h:69
GLM_FUNC_DECL std::string to_string(genType const &x)
ScreenRotation
Definition: util.h:192
CameraARCore(void *env, void *context, void *activity, bool depthFromMotion=false, bool smoothing=false)
std::string UTILITE_EXP uFormat(const char *fmt,...)
static LaserScan backwardCompatibility(const cv::Mat &oldScanFormat, int maxPoints=0, int maxRange=0, const Transform &localTransform=Transform::getIdentity())
Definition: LaserScan.cpp:125
void setFeatures(const std::vector< cv::KeyPoint > &keypoints, const std::vector< cv::Point3f > &keypoints3D, const cv::Mat &descriptors)
Definition: SensorData.cpp:757
virtual void setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height)


rtabmap
Author(s): Mathieu Labbe
autogenerated on Mon Dec 14 2020 03:34:58