45 arInstallRequested_(
false),
46 depthFromMotion_(depthFromMotion)
64 std::vector<CameraConfig> & camera_configs,
67 if (camera_configs.empty()) {
71 int low_resolution_config_idx = 0;
72 int high_resolution_config_idx = 0;
73 int32_t smallest_height = camera_configs[0].height;
74 int32_t largest_height = camera_configs[0].height;
76 for (
int i = 1;
i < camera_configs.size(); ++
i) {
77 int32_t image_height = camera_configs[
i].height;
78 if (image_height < smallest_height) {
79 smallest_height = image_height;
80 low_resolution_config_idx =
i;
81 }
else if (image_height > largest_height) {
82 largest_height = image_height;
83 high_resolution_config_idx =
i;
87 if (low_resolution_config_idx == high_resolution_config_idx) {
88 *lowest_resolution_config = &camera_configs[low_resolution_config_idx];
90 *lowest_resolution_config = &camera_configs[low_resolution_config_idx];
91 *highest_resolution_config = &camera_configs[high_resolution_config_idx];
96 const ArSession* ar_session,
const ArCameraConfigList* all_configs,
97 int index,
int num_configs,
CameraConfig* camera_config) {
98 if (camera_config !=
nullptr && index >= 0 && index < num_configs) {
99 ArCameraConfig_create(ar_session, &camera_config->
config);
100 ArCameraConfigList_getItem(ar_session, all_configs, index,
102 ArCameraConfig_getImageDimensions(ar_session, camera_config->
config,
103 &camera_config->
width,
112 for (
int i = 0;
i < camera_configs.size(); ++
i) {
113 if (camera_configs[
i].
config !=
nullptr) {
114 ArCameraConfig_destroy(camera_configs[
i].
config);
130 ArInstallStatus install_status;
140 ArCoreApk_requestInstall(
env_,
activity_, user_requested_install, &install_status);
142 switch (install_status)
144 case AR_INSTALL_STATUS_INSTALLED:
146 case AR_INSTALL_STATUS_INSTALL_REQUESTED:
158 int32_t is_depth_supported = 0;
159 ArSession_isDepthModeSupported(
arSession_, AR_DEPTH_MODE_AUTOMATIC, &is_depth_supported);
164 if (is_depth_supported!=0) {
182 ArCameraConfigList* all_camera_configs =
nullptr;
184 ArCameraConfigList_create(
arSession_, &all_camera_configs);
186 ArCameraConfigFilter* camera_config_filter =
nullptr;
187 ArCameraConfigFilter_create(
arSession_, &camera_config_filter);
188 ArCameraConfigFilter_setTargetFps(
arSession_, camera_config_filter, AR_CAMERA_CONFIG_TARGET_FPS_30 | AR_CAMERA_CONFIG_TARGET_FPS_60);
189 ArSession_getSupportedCameraConfigsWithFilter(
arSession_, camera_config_filter, all_camera_configs);
190 ArCameraConfigList_getSize(
arSession_, all_camera_configs, &num_configs);
192 if (num_configs < 1) {
193 UERROR(
"No camera config found");
198 std::vector<CameraConfig> camera_configs;
199 CameraConfig* cpu_low_resolution_camera_config_ptr =
nullptr;
200 CameraConfig* cpu_high_resolution_camera_config_ptr =
nullptr;
201 camera_configs.resize(num_configs);
202 for (
int i = 0;
i < num_configs; ++
i) {
207 cpu_low_resolution_camera_config_ptr =
nullptr;
208 cpu_high_resolution_camera_config_ptr =
nullptr;
211 &cpu_low_resolution_camera_config_ptr,
212 &cpu_high_resolution_camera_config_ptr);
217 ArCameraConfigList_destroy(all_camera_configs);
218 ArSession_setCameraConfig(
arSession_, cpu_low_resolution_camera_config_ptr->
config);
226 if (ArSession_resume(
arSession_) != ArStatus::AR_SUCCESS)
228 UERROR(
"Cannot resume camera!");
276 const float * pointCloudData,
281 std::vector<cv::KeyPoint> * kpts,
282 std::vector<cv::Point3f> * kpts3D)
284 if(pointCloudData && points>0)
286 cv::Mat scanData(1, points, CV_32FC4);
287 float * ptr = scanData.ptr<
float>();
288 for(
unsigned int i=0;
i<points; ++
i)
290 cv::Point3f pt(pointCloudData[
i*4], pointCloudData[
i*4 + 1], pointCloudData[
i*4 + 2]);
300 model.reproject(pt.x, pt.y, pt.z, u,
v);
301 unsigned char r=255,
g=255,
b=255;
304 b=rgb.at<cv::Vec3b>(
v,u).val[0];
305 g=rgb.at<cv::Vec3b>(
v,u).val[1];
306 r=rgb.at<cv::Vec3b>(
v,u).val[2];
308 kpts->push_back(cv::KeyPoint(u,
v,3));
310 kpts3D->push_back(org);
312 *(
int*)&ptr[
i*4 + 3] =
int(
b) | (
int(
g) << 8) | (
int(r) << 16);
328 int ret =
static_cast<int>(colorCameraToDisplayRotation) + 1;
333 ArSession_setDisplayGeometry(
arSession_,
ret, width, height);
352 glBindTexture(GL_TEXTURE_EXTERNAL_OES,
textureId_);
353 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
354 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
355 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
356 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
363 LOGE(
"CameraARCore::captureImage() ArSession_update error");
372 ArFrame_transformCoordinates2d(
384 ArCamera_getProjectionMatrix(
arSession_, ar_camera,
394 ArTrackingState camera_tracking_state;
395 ArCamera_getTrackingState(
arSession_, ar_camera, &camera_tracking_state);
398 if(camera_tracking_state == AR_TRACKING_STATE_TRACKING)
404 Transform poseArCore =
Transform(pose_raw[4], pose_raw[5], pose_raw[6], pose_raw[0], pose_raw[1], pose_raw[2], pose_raw[3]);
409 LOGE(
"CameraARCore: Pose is null");
420 LOGI(
"%f %f %f %f %d %d",
fx,
fy,
cx,
cy, width, height);
423 if(
fx > 0 &&
fy > 0 && width > 0 && height > 0 &&
cx > 0 &&
cy > 0)
427 ArPointCloud * pointCloud =
nullptr;
430 int32_t is_depth_supported = 0;
431 ArSession_isDepthModeSupported(
arSession_, AR_DEPTH_MODE_AUTOMATIC, &is_depth_supported);
433 ArImage * image =
nullptr;
435 if(status == AR_SUCCESS)
437 if(is_depth_supported)
439 LOGD(
"Acquire depth image!");
440 ArImage * depthImage =
nullptr;
445 if(
format == AR_IMAGE_FORMAT_DEPTH16)
447 LOGD(
"Depth format detected!");
449 ArImage_getNumberOfPlanes(
arSession_, depthImage, &planeCount);
450 LOGD(
"planeCount=%d", planeCount);
457 ArImage_getWidth(
arSession_, depthImage, &depth_width);
458 ArImage_getHeight(
arSession_, depthImage, &depth_height);
462 LOGD(
"width=%d, height=%d, bytes=%d stride=%d", depth_width, depth_height,
len,
stride);
464 cv::Mat occlusionImage = cv::Mat(depth_height, depth_width, CV_16UC1, (
void*)
data).clone();
466 float scaleX = (
float)depth_width / (
float)width;
467 float scaleY = (
float)depth_height / (
float)height;
471 ArImage_release(depthImage);
476 ArImage_getTimestamp(
arSession_, image, ×tamp_ns);
478 if(
format == AR_IMAGE_FORMAT_YUV_420_888)
482 ArImage_getNumberOfPlanes(
arSession_, image, &num_planes);
483 for(
int i=0;
i<num_planes; ++
i)
487 ArImage_getPlanePixelStride(
arSession_, image,
i, &pixel_stride);
488 ArImage_getPlaneRowStride(
arSession_, image,
i, &row_stride);
489 LOGI(
"Plane %d/%d: pixel stride=%d, row stride=%d",
i+1, num_planes, pixel_stride, row_stride);
495 ArImage_getPlaneData(
arSession_, image, 0, &plane_data, &data_length);
497 ArImage_getPlaneData(
arSession_, image, 2, &plane_uv_data, &uv_data_length);
499 if(plane_data !=
nullptr && data_length == height*width)
501 double stamp = double(timestamp_ns)/10e8;
503 LOGI(
"data_length=%d stamp=%f", data_length, stamp);
506 if((
long)plane_uv_data-(
long)plane_data != data_length)
509 cv::Mat yuv(height+height/2, width, CV_8UC1);
510 memcpy(yuv.data, plane_data, data_length);
511 memcpy(yuv.data+data_length, plane_uv_data, height/2*width);
512 cv::cvtColor(yuv, rgb, cv::COLOR_YUV2BGR_NV21);
516 cv::cvtColor(cv::Mat(height+height/2, width, CV_8UC1, (
void*)plane_data), rgb, cv::COLOR_YUV2BGR_NV21);
519 std::vector<cv::KeyPoint> kpts;
520 std::vector<cv::Point3f> kpts3;
525 ArPointCloud_getNumberOfPoints(
arSession_, pointCloud, &points);
526 const float * pointCloudData = 0;
527 ArPointCloud_getData(
arSession_, pointCloud, &pointCloudData);
529 LOGI(
"pointCloudData=%d size=%d", pointCloudData?1:0, points);
531 if(pointCloudData && points>0)
538 LOGI(
"pointCloud empty");
542 data.setFeatures(kpts, kpts3, cv::Mat());
558 LOGE(
"CameraARCore: cannot convert image format %d",
format);
563 LOGE(
"CameraARCore: failed to get rgb image (status=%d)", (
int)status);
566 ArImage_release(image);
567 ArPointCloud_release(pointCloud);
571 ArCamera_release(ar_camera);