1 package com.introlab.rtabmap;
3 import java.nio.ByteBuffer;
4 import java.nio.FloatBuffer;
5 import java.util.ArrayList;
6 import java.util.Arrays;
7 import java.util.EnumSet;
9 import java.util.concurrent.atomic.AtomicBoolean;
11 import com.google.ar.core.Anchor;
12 import com.google.ar.core.Camera;
13 import com.google.ar.core.CameraConfig;
14 import com.google.ar.core.CameraConfigFilter;
15 import com.google.ar.core.CameraIntrinsics;
16 import com.google.ar.core.Config;
17 import com.google.ar.core.Coordinates2d;
18 import com.google.ar.core.Frame;
19 import com.google.ar.core.PointCloud;
20 import com.google.ar.core.Pose;
21 import com.google.ar.core.Session;
22 import com.google.ar.core.SharedCamera;
23 import com.google.ar.core.TrackingState;
24 import com.google.ar.core.exceptions.CameraNotAvailableException;
25 import com.google.ar.core.exceptions.NotYetAvailableException;
26 import com.google.ar.core.exceptions.UnavailableException;
28 import android.content.Context;
29 import android.graphics.ImageFormat;
30 import android.hardware.camera2.CameraAccessException;
31 import android.hardware.camera2.CameraCaptureSession;
32 import android.hardware.camera2.CameraCharacteristics;
33 import android.hardware.camera2.CameraDevice;
34 import android.hardware.camera2.CameraManager;
35 import android.hardware.camera2.CameraMetadata;
36 import android.hardware.camera2.CaptureFailure;
37 import android.hardware.camera2.CaptureRequest;
38 import android.hardware.camera2.TotalCaptureResult;
39 import android.media.Image;
40 import android.opengl.GLES20;
41 import android.opengl.GLSurfaceView;
42 import android.os.Handler;
43 import android.os.HandlerThread;
44 import android.support.annotation.NonNull;
45 import android.util.Log;
46 import android.view.Surface;
47 import android.widget.Toast;
55 -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f,
108 private AtomicBoolean
mReady =
new AtomicBoolean(
false);
119 ByteBuffer mPreviousDepth =
null;
120 double mPreviousDepthStamp = 0.0;
131 new CameraDevice.StateCallback() {
133 public void onOpened(@NonNull CameraDevice
cameraDevice) {
140 public void onClosed(@NonNull CameraDevice
cameraDevice) {
146 public void onDisconnected(@NonNull CameraDevice
cameraDevice) {
147 Log.w(
TAG,
"Camera device ID " +
cameraDevice.getId() +
" disconnected.");
161 CameraCaptureSession.StateCallback cameraCaptureCallback =
162 new CameraCaptureSession.StateCallback() {
167 public void onConfigured(@NonNull CameraCaptureSession session) {
168 Log.d(
TAG,
"Camera capture session configured.");
174 public void onSurfacePrepared(
175 @NonNull CameraCaptureSession session, @NonNull Surface surface) {
176 Log.d(
TAG,
"Camera capture surface prepared.");
180 public void onReady(@NonNull CameraCaptureSession session) {
181 Log.d(
TAG,
"Camera capture session ready.");
185 public void onActive(@NonNull CameraCaptureSession session) {
186 Log.d(
TAG,
"Camera capture session active.");
191 public void onClosed(@NonNull CameraCaptureSession session) {
192 Log.d(
TAG,
"Camera capture session closed.");
196 public void onConfigureFailed(@NonNull CameraCaptureSession session) {
197 Log.e(
TAG,
"Failed to configure camera capture session.");
203 new CameraCaptureSession.CaptureCallback() {
206 public void onCaptureCompleted(
207 @NonNull CameraCaptureSession session,
208 @NonNull CaptureRequest request,
209 @NonNull TotalCaptureResult
result) {
214 public void onCaptureBufferLost(
215 @NonNull CameraCaptureSession session,
216 @NonNull CaptureRequest request,
219 Log.e(
TAG,
"onCaptureBufferLost: " + frameNumber);
223 public void onCaptureFailed(
224 @NonNull CameraCaptureSession session,
225 @NonNull CaptureRequest request,
226 @NonNull CaptureFailure failure) {
227 Log.e(
TAG,
"onCaptureFailed: " + failure.getFrameNumber() +
" " + failure.getReason());
231 public void onCaptureSequenceAborted(
232 @NonNull CameraCaptureSession session,
int sequenceId) {
233 Log.e(
TAG,
"onCaptureSequenceAborted: " + sequenceId +
" " + session);
245 Log.i(
TAG,
"Resume ARCore.");
250 }
catch (CameraNotAvailableException
e) {
251 Log.e(
TAG,
"Failed to resume ARCore session",
e);
264 }
catch (CameraAccessException
e) {
265 Log.e(
TAG,
"Failed to set repeating request",
e);
271 Log.e(
TAG,
"createCameraPreviewSession: " +
"starting camera preview session.");
280 List<Surface> surfaceList =
sharedCamera.getArCoreSurfaces();
281 Log.e(
TAG,
" createCameraPreviewSession: " +
"surfaceList: sharedCamera.getArCoreSurfaces(): " + surfaceList.size());
292 for (Surface surface : surfaceList) {
303 }
catch (CameraAccessException
e) {
304 Log.e(
TAG,
"CameraAccessException",
e);
324 }
catch (InterruptedException
e) {
325 Log.e(
TAG,
"Interrupted while trying to join background handler thread",
e);
338 private static boolean contains(
int[] modes,
int mode) {
342 for (
int i : modes) {
359 int[] textures =
new int[1];
360 GLES20.glGenTextures(1, textures, 0);
364 Log.v(
TAG,
"Perform various checks, then open camera device and create CPU image reader.");
374 }
catch (UnavailableException
e) {
375 Log.e(
TAG,
"Failed to create ARCore session that supports camera sharing",
e);
381 CameraConfigFilter cameraConfigFilter =
new CameraConfigFilter(
sharedSession);
382 CameraConfig[] cameraConfigs =
sharedSession.getSupportedCameraConfigs(cameraConfigFilter).toArray(
new CameraConfig[0]);
383 Log.i(
TAG,
"Size of supported CameraConfigs list is " + cameraConfigs.length);
386 int highestResolutionIndex=-1;
387 int highestResolution = 0;
388 for(
int i=0;
i<cameraConfigs.length; ++
i)
390 Log.i(
TAG,
"Camera ID: " + cameraConfigs[
i].getCameraId());
391 Log.i(
TAG,
"Resolution: " + cameraConfigs[
i].getImageSize().getWidth() +
"x" + cameraConfigs[
i].getImageSize().getHeight());
392 if(highestResolution == 0 || highestResolution < cameraConfigs[
i].getImageSize().getWidth())
394 highestResolutionIndex =
i;
395 highestResolution = cameraConfigs[
i].getImageSize().getWidth();
398 if(highestResolutionIndex>=0)
407 config.setFocusMode(Config.FocusMode.FIXED);
408 config.setUpdateMode(Config.UpdateMode.BLOCKING);
409 config.setPlaneFindingMode(Config.PlaneFindingMode.DISABLED);
410 config.setLightEstimationMode(Config.LightEstimationMode.DISABLED);
411 config.setCloudAnchorMode(Config.CloudAnchorMode.DISABLED);
433 CameraCharacteristics characteristics =
cameraManager.getCameraCharacteristics(tmpCameraId);
435 Log.i(
TAG,
"Camera " + tmpCameraId +
" extrinsics:");
437 float[]
translation = characteristics.get(CameraCharacteristics.LENS_POSE_TRANSLATION);
442 float[]
rotation = characteristics.get(CameraCharacteristics.LENS_POSE_ROTATION);
445 Log.i(
TAG, String.format(
"Rotation (qx,qy,qz,qw): %f,%f,%f,%f",
rotation[0],
rotation[1],
rotation[2],
rotation[3]));
451 Log.i(
TAG,
"Set rgb extrinsics!");
454 if (!
contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) ||
455 characteristics.get(CameraCharacteristics.LENS_FACING) == CameraMetadata.LENS_FACING_FRONT) {
458 Log.i(
TAG,
"Camera " + tmpCameraId +
" has depth output available");
464 Log.i(
TAG,
"Set depth extrinsics!");
466 depthIntrinsics = characteristics.get(CameraCharacteristics.LENS_INTRINSIC_CALIBRATION);
467 Log.i(
TAG, String.format(
"Intrinsics (fx,fy,cx,cy,s): %f,%f,%f,%f,%f",
474 }
catch (CameraAccessException
e) {
491 if (resolutions !=
null) {
492 float[] newDepthIntrinsics =
null;
493 int largestWidth = 0;
494 for( String temp : resolutions) {
495 Log.i(
TAG,
"DEPTH16 resolution: " + temp);
496 depthWidth = Integer.parseInt(temp.split(
"x")[0]);
497 depthHeight = Integer.parseInt(temp.split(
"x")[1]);
502 if(largestWidth == 0 &&
depthWidth>largestWidth)
511 newDepthIntrinsics[0] *=
scale;
512 newDepthIntrinsics[1] *=
scale;
513 newDepthIntrinsics[2] *=
scale;
514 newDepthIntrinsics[3] *=
scale;
524 if (resolutions.size()>0) {
526 if(newDepthIntrinsics!=
null) {
558 }
catch (CameraAccessException
e) {
559 Log.e(
TAG,
"Failed to open camera",
e);
561 }
catch (IllegalArgumentException
e) {
562 Log.e(
TAG,
"Failed to open camera",
e);
564 }
catch (SecurityException
e) {
565 Log.e(
TAG,
"Failed to open camera",
e);
573 final float m00 =
R[0];
574 final float m10 =
R[1];
575 final float m20 =
R[2];
576 final float m01 =
R[4];
577 final float m11 =
R[5];
578 final float m21 =
R[6];
579 final float m02 =
R[8];
580 final float m12 =
R[9];
581 final float m22 =
R[10];
583 float tr = m00 + m11 + m22;
586 float S = (
float) Math.sqrt(tr + 1.0) * 2;
588 q[1] = (m21 - m12) /
S;
589 q[2] = (m02 - m20) /
S;
590 q[3] = (m10 - m01) /
S;
591 }
else if ((m00 > m11) & (m00 > m22)) {
592 float S = (
float) Math.sqrt(1.0 + m00 - m11 - m22) * 2;
594 q[0] = (m21 - m12) /
S;
596 q[2] = (m01 + m10) /
S;
597 q[3] = (m02 + m20) /
S;
598 }
else if (m11 > m22) {
599 float S = (
float) Math.sqrt(1.0 + m11 - m00 - m22) * 2;
601 q[0] = (m02 - m20) /
S;
602 q[1] = (m01 + m10) /
S;
604 q[3] = (m12 + m21) /
S;
606 float S = (
float) Math.sqrt(1.0 + m22 - m00 - m11) * 2;
608 q[0] = (m10 - m01) /
S;
609 q[1] = (m02 + m20) /
S;
610 q[2] = (m12 + m21) /
S;
619 Log.w(
TAG,
"close()");
653 public void updateGL() throws CameraNotAvailableException {
666 }
catch (Exception
e) {
673 camera = frame.getCamera();
676 Log.e(
TAG, String.format(
"frame.getCamera() null!"));
681 Log.e(
TAG, String.format(
"camera is null!"));
684 if (
camera.getTrackingState() != TrackingState.TRACKING) {
685 final String trackingState =
camera.getTrackingState().toString();
686 Log.e(
TAG, String.format(
"Tracking lost! state=%s", trackingState));
693 String
msg =
"Tracking lost! If you are mapping, you will need to relocalize before continuing.";
694 if(
mToast.getView() ==
null || !
mToast.getView().isShown())
697 msg, Toast.LENGTH_LONG).show();
711 if (frame.getTimestamp() != 0) {
728 Log.e(
TAG, String.format(
"POTENTIAL TELEPORTATION!!!!!!!!!!!!!! previous anchor moved (speed=%f), new arcorrection: %f %f %f", speed,
t[0],
t[1],
t[2]));
733 float[] t4 = pose.getTranslation();
734 Log.e(
TAG, String.format(
"Odom = %f %f %f -> %f %f %f ArCore= %f %f %f -> %f %f %f",
t[0],
t[1],
t[2], t2[0], t2[1], t2[2], t3[0], t3[1], t3[2], t4[0], t4[1], t4[2]));
740 String
msg = String.format(
"ARCore localization has been suppressed "
741 +
"because of high speed detected (%f m/s) causing a jump! You can change "
742 +
"ARCore localization filtering speed in Settings->Mapping if you are "
743 +
"indeed moving as fast.", speed);
744 if(
mToast.getView() ==
null || !
mToast.getView().isShown())
761 double stamp = (double)frame.getTimestamp()/10e8;
762 if(!
RTABMapActivity.
DISABLE_LOG) Log.d(
TAG, String.format(
"pose=%f %f %f arcore %f %f %f cor= %f %f %f stamp=%f",
odomPose.tx(),
odomPose.ty(),
odomPose.tz(), pose.tx(), pose.ty(), pose.tz(),
arCoreCorrection.tx(),
arCoreCorrection.ty(),
arCoreCorrection.tz(), stamp));
764 CameraIntrinsics intrinsics =
camera.getImageIntrinsics();
766 Image image = frame.acquireCameraImage();
768 PointCloud cloud = frame.acquirePointCloud();
769 FloatBuffer points = cloud.getPoints();
771 if (image.getFormat() != ImageFormat.YUV_420_888) {
772 throw new IllegalArgumentException(
773 "Expected image in YUV_420_888 format, got format " + image.getFormat());
778 for(
int i =0;
i<image.getPlanes().
length;++
i)
780 Log.d(
TAG, String.format(
"Plane[%d] pixel stride = %d, row stride = %d",
i, image.getPlanes()[
i].getPixelStride(), image.getPlanes()[
i].getRowStride()));
784 float[] fl = intrinsics.getFocalLength();
785 float[] pp = intrinsics.getPrincipalPoint();
788 ByteBuffer
y = image.getPlanes()[0].getBuffer().asReadOnlyBuffer();
789 ByteBuffer u = image.getPlanes()[1].getBuffer().asReadOnlyBuffer();
790 ByteBuffer
v = image.getPlanes()[2].getBuffer().asReadOnlyBuffer();
793 image.getWidth(), image.getHeight(),
y.limit(), image.getFormat(), stamp));
795 float[] texCoord =
new float[8];
796 frame.transformCoordinates2d(
797 Coordinates2d.OPENGL_NORMALIZED_DEVICE_COORDINATES,
799 Coordinates2d.IMAGE_NORMALIZED,
802 float[]
p =
new float[16];
803 camera.getProjectionMatrix(
p, 0, 0.1
f, 100.0
f);
805 float[] viewMatrix =
new float[16];
807 float[]
quat =
new float[4];
820 if(mPreviousDepth ==
null)
822 mPreviousDepth = depth;
823 mPreviousDepthStamp = depthStamp;
832 fl[0], fl[1], pp[0], pp[1],
837 depthStamp>stamp?mPreviousDepthStamp:depthStamp,
838 y, u,
v,
y.limit(), image.getWidth(), image.getHeight(), image.getFormat(),
840 points, points.limit()/4,
841 viewMatrix[12], viewMatrix[13], viewMatrix[14],
quat[1],
quat[2],
quat[3],
quat[0],
842 p[0],
p[5],
p[8],
p[9],
p[10],
p[11],
p[14],
843 texCoord[0],texCoord[1],texCoord[2],texCoord[3],texCoord[4],texCoord[5],texCoord[6],texCoord[7]);
846 mPreviousDepthStamp = depthStamp;
847 mPreviousDepth = depth;
854 fl[0], fl[1], pp[0], pp[1],
857 y, u,
v,
y.limit(), image.getWidth(), image.getHeight(), image.getFormat(),
858 points, points.limit()/4,
859 viewMatrix[12], viewMatrix[13], viewMatrix[14],
quat[1],
quat[2],
quat[3],
quat[0],
860 p[0],
p[5],
p[8],
p[9],
p[10],
p[11],
p[14],
861 texCoord[0],texCoord[1],texCoord[2],texCoord[3],texCoord[4],texCoord[5],texCoord[6],texCoord[7]);
867 }
catch (NotYetAvailableException
e) {
879 Log.v(
TAG,
"getResolutions: cameraId:" +
cameraId +
" imageFormat: " + imageFormat);
881 ArrayList<String> output =
new ArrayList<String>();
883 CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
884 CameraCharacteristics characteristics = manager.getCameraCharacteristics(
cameraId);
886 for (android.util.Size
s : characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(imageFormat)) {
887 output.add(
s.getWidth() +
"x" +
s.getHeight());
889 }
catch (Exception
e) {