ARCoreSharedCamera.java
Go to the documentation of this file.
1 package com.introlab.rtabmap;
2 
3 import java.nio.ByteBuffer;
4 import java.nio.FloatBuffer;
5 import java.util.ArrayList;
6 import java.util.Arrays;
7 import java.util.EnumSet;
8 import java.util.List;
9 import java.util.concurrent.atomic.AtomicBoolean;
10 
11 import com.google.ar.core.Anchor;
12 import com.google.ar.core.Camera;
13 import com.google.ar.core.CameraConfig;
14 import com.google.ar.core.CameraConfigFilter;
15 import com.google.ar.core.CameraIntrinsics;
16 import com.google.ar.core.Config;
17 import com.google.ar.core.Coordinates2d;
18 import com.google.ar.core.Frame;
19 import com.google.ar.core.PointCloud;
20 import com.google.ar.core.Pose;
21 import com.google.ar.core.Session;
22 import com.google.ar.core.SharedCamera;
23 import com.google.ar.core.TrackingState;
24 import com.google.ar.core.exceptions.CameraNotAvailableException;
25 import com.google.ar.core.exceptions.NotYetAvailableException;
26 import com.google.ar.core.exceptions.UnavailableException;
27 
28 import android.content.Context;
29 import android.graphics.ImageFormat;
30 import android.hardware.camera2.CameraAccessException;
31 import android.hardware.camera2.CameraCaptureSession;
32 import android.hardware.camera2.CameraCharacteristics;
33 import android.hardware.camera2.CameraDevice;
34 import android.hardware.camera2.CameraManager;
35 import android.hardware.camera2.CameraMetadata;
36 import android.hardware.camera2.CaptureFailure;
37 import android.hardware.camera2.CaptureRequest;
38 import android.hardware.camera2.TotalCaptureResult;
39 import android.media.Image;
40 import android.opengl.GLES20;
41 import android.opengl.GLSurfaceView;
42 import android.os.Handler;
43 import android.os.HandlerThread;
44 import android.support.annotation.NonNull;
45 import android.util.Log;
46 import android.view.Surface;
47 import android.widget.Toast;
48 
49 public class ARCoreSharedCamera {
50 
51  public static final String TAG = ARCoreSharedCamera.class.getSimpleName();
52 
53  private static final float[] QUAD_COORDS =
54  new float[] {
55  -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f,
56  };
57 
58  private static RTABMapActivity mActivity;
59  public ARCoreSharedCamera(RTABMapActivity c, float arCoreLocalizationFilteringSpeed) {
60  mActivity = c;
61  mARCoreLocalizationFilteringSpeed = arCoreLocalizationFilteringSpeed;
62  }
63 
64  // Depth TOF Image.
65  // Use 240 * 180 for now, hardcoded for Huawei P30 Pro
66  private int depthWidth = 640;
67  private int depthHeight = 480;
68 
69  // GL Surface used to draw camera preview image.
70  public GLSurfaceView surfaceView;
71 
72  // ARCore session that supports camera sharing.
73  private Session sharedSession;
74 
75  private Pose previousAnchorPose = null;
77  private Pose arCoreCorrection = Pose.IDENTITY;
78  private Pose odomPose = Pose.IDENTITY;
79  private float mARCoreLocalizationFilteringSpeed = 1.0f;
80 
81  // Camera capture session. Used by both non-AR and AR modes.
82  private CameraCaptureSession captureSession;
83 
84  // Reference to the camera system service.
85  private CameraManager cameraManager;
86 
87  // Camera device. Used by both non-AR and AR modes.
88  private CameraDevice cameraDevice;
89 
90  // Looper handler thread.
91  private HandlerThread backgroundThread;
92  // Looper handler.
93  private Handler backgroundHandler;
94 
95  // ARCore shared camera instance, obtained from ARCore session that supports sharing.
96  private SharedCamera sharedCamera;
97 
98  private Toast mToast = null;
99 
100  // Camera ID for the camera used by ARCore.
101  private String cameraId;
102  private String depthCameraId;
103 
104  private Pose rgbExtrinsics;
105  private Pose depthExtrinsics;
106  private float[] depthIntrinsics = null;
107 
108  private AtomicBoolean mReady = new AtomicBoolean(false);
109 
110  // Camera preview capture request builder
111  private CaptureRequest.Builder previewCaptureRequestBuilder;
112 
113  private int cameraTextureId = -1;
114 
115  // Image reader that continuously processes CPU images.
117  private boolean mTOFAvailable = false;
118 
119  public boolean isDepthSupported() {return mTOFAvailable;}
120 
121  public void setToast(Toast toast)
122  {
123  mToast = toast;
124  }
125 
126  // Camera device state callback.
127  private final CameraDevice.StateCallback cameraDeviceCallback =
128  new CameraDevice.StateCallback() {
129  @Override
130  public void onOpened(@NonNull CameraDevice cameraDevice) {
131  Log.d(TAG, "Camera device ID " + cameraDevice.getId() + " opened.");
134  }
135 
136  @Override
137  public void onClosed(@NonNull CameraDevice cameraDevice) {
138  Log.d(TAG, "Camera device ID " + cameraDevice.getId() + " closed.");
139  ARCoreSharedCamera.this.cameraDevice = null;
140  }
141 
142  @Override
143  public void onDisconnected(@NonNull CameraDevice cameraDevice) {
144  Log.w(TAG, "Camera device ID " + cameraDevice.getId() + " disconnected.");
145  cameraDevice.close();
146  ARCoreSharedCamera.this.cameraDevice = null;
147  }
148 
149  @Override
150  public void onError(@NonNull CameraDevice cameraDevice, int error) {
151  Log.e(TAG, "Camera device ID " + cameraDevice.getId() + " error " + error);
152  cameraDevice.close();
153  ARCoreSharedCamera.this.cameraDevice = null;
154  }
155  };
156 
157  // Repeating camera capture session state callback.
158  CameraCaptureSession.StateCallback cameraCaptureCallback =
159  new CameraCaptureSession.StateCallback() {
160 
161  // Called when the camera capture session is first configured after the app
162  // is initialized, and again each time the activity is resumed.
163  @Override
164  public void onConfigured(@NonNull CameraCaptureSession session) {
165  Log.d(TAG, "Camera capture session configured.");
166  captureSession = session;
168  }
169 
170  @Override
171  public void onSurfacePrepared(
172  @NonNull CameraCaptureSession session, @NonNull Surface surface) {
173  Log.d(TAG, "Camera capture surface prepared.");
174  }
175 
176  @Override
177  public void onReady(@NonNull CameraCaptureSession session) {
178  Log.d(TAG, "Camera capture session ready.");
179  }
180 
181  @Override
182  public void onActive(@NonNull CameraCaptureSession session) {
183  Log.d(TAG, "Camera capture session active.");
184  resumeARCore();
185  }
186 
187  @Override
188  public void onClosed(@NonNull CameraCaptureSession session) {
189  Log.d(TAG, "Camera capture session closed.");
190  }
191 
192  @Override
193  public void onConfigureFailed(@NonNull CameraCaptureSession session) {
194  Log.e(TAG, "Failed to configure camera capture session.");
195  }
196  };
197 
198  // Repeating camera capture session capture callback.
199  private final CameraCaptureSession.CaptureCallback captureSessionCallback =
200  new CameraCaptureSession.CaptureCallback() {
201 
202  @Override
203  public void onCaptureCompleted(
204  @NonNull CameraCaptureSession session,
205  @NonNull CaptureRequest request,
206  @NonNull TotalCaptureResult result) {
207  //Log.i(TAG, "onCaptureCompleted");
208  }
209 
210  //@Override // android 23
211  public void onCaptureBufferLost(
212  @NonNull CameraCaptureSession session,
213  @NonNull CaptureRequest request,
214  @NonNull Surface target,
215  long frameNumber) {
216  Log.e(TAG, "onCaptureBufferLost: " + frameNumber);
217  }
218 
219  @Override
220  public void onCaptureFailed(
221  @NonNull CameraCaptureSession session,
222  @NonNull CaptureRequest request,
223  @NonNull CaptureFailure failure) {
224  Log.e(TAG, "onCaptureFailed: " + failure.getFrameNumber() + " " + failure.getReason());
225  }
226 
227  @Override
228  public void onCaptureSequenceAborted(
229  @NonNull CameraCaptureSession session, int sequenceId) {
230  Log.e(TAG, "onCaptureSequenceAborted: " + sequenceId + " " + session);
231  }
232  };
233 
234  private void resumeARCore() {
235  // Ensure that session is valid before triggering ARCore resume. Handles the case where the user
236  // manually uninstalls ARCore while the app is paused and then resumes.
237  if (sharedSession == null) {
238  return;
239  }
240 
241  try {
242  Log.i(TAG, "Resume ARCore.");
243  // Resume ARCore.
244  sharedSession.resume();
245  // Set capture session callback while in AR mode.
246  sharedCamera.setCaptureCallback(captureSessionCallback, backgroundHandler);
247  } catch (CameraNotAvailableException e) {
248  Log.e(TAG, "Failed to resume ARCore session", e);
249  return;
250  }
251 
252  }
253 
254 
255  // Called when starting non-AR mode or switching to non-AR mode.
256  // Also called when app starts in AR mode, or resumes in AR mode.
257  private void setRepeatingCaptureRequest() {
258  try {
259  captureSession.setRepeatingRequest(
261  } catch (CameraAccessException e) {
262  Log.e(TAG, "Failed to set repeating request", e);
263  }
264  }
265 
266 
267  private void createCameraPreviewSession() {
268  Log.e(TAG, "createCameraPreviewSession: " + "starting camera preview session.");
269  try {
270  // Note that isGlAttached will be set to true in AR mode in onDrawFrame().
271  sharedSession.setCameraTextureName(cameraTextureId);
272 
273  // Create an ARCore compatible capture request using `TEMPLATE_RECORD`.
274  previewCaptureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
275 
276  // Build surfaces list, starting with ARCore provided surfaces.
277  List<Surface> surfaceList = sharedCamera.getArCoreSurfaces();
278  Log.e(TAG, " createCameraPreviewSession: " + "surfaceList: sharedCamera.getArCoreSurfaces(): " + surfaceList.size());
279 
280  // Add a CPU image reader surface. On devices that don't support CPU image access, the image
281  // may arrive significantly later, or not arrive at all.
282  if (mTOFAvailable && cameraId.compareTo(depthCameraId) == 0) surfaceList.add(mTOFImageReader.imageReader.getSurface());
283  // Surface list should now contain three surfacemReadymReadys:
284  // 0. sharedCamera.getSurfaceTexture()
285  // 1. …
286  // 2. depthImageReader.getSurface()
287 
288  // Add ARCore surfaces and CPU image surface targets.
289  for (Surface surface : surfaceList) {
290  previewCaptureRequestBuilder.addTarget(surface);
291  }
292 
293  // Wrap our callback in a shared camera callback.
294  CameraCaptureSession.StateCallback wrappedCallback = sharedCamera.createARSessionStateCallback(cameraCaptureCallback, backgroundHandler);
295 
296  // Create camera capture session for camera preview using ARCore wrapped callback.
297  cameraDevice.createCaptureSession(surfaceList, wrappedCallback, backgroundHandler);
298 
299  mReady.set(true);
300  } catch (CameraAccessException e) {
301  Log.e(TAG, "CameraAccessException", e);
302  }
303  }
304 
305  // Start background handler thread, used to run callbacks without blocking UI thread.
306  private void startBackgroundThread() {
307  backgroundThread = new HandlerThread("sharedCameraBackground");
308  backgroundThread.start();
309  backgroundHandler = new Handler(backgroundThread.getLooper());
310  mTOFImageReader.startBackgroundThread();
311  }
312 
313  // Stop background handler thread.
314  private void stopBackgroundThread() {
315  if (backgroundThread != null) {
316  backgroundThread.quitSafely();
317  try {
318  backgroundThread.join();
319  backgroundThread = null;
320  backgroundHandler = null;
321  } catch (InterruptedException e) {
322  Log.e(TAG, "Interrupted while trying to join background handler thread", e);
323  }
324  }
325  mTOFImageReader.stopBackgroundThread();
326  }
327 
335  private static boolean contains(int[] modes, int mode) {
336  if (modes == null) {
337  return false;
338  }
339  for (int i : modes) {
340  if (i == mode) {
341  return true;
342  }
343  }
344  return false;
345  }
346 
347  // Perform various checks, then open camera device and create CPU image reader.
348  public boolean openCamera() {
349 
350  close();
351 
353 
354  if(cameraTextureId == -1)
355  {
356  int[] textures = new int[1];
357  GLES20.glGenTextures(1, textures, 0);
358  cameraTextureId = textures[0];
359  }
360 
361  Log.v(TAG, "Perform various checks, then open camera device and create CPU image reader.");
362  // Don't open camera if already opened.
363  if (cameraDevice != null) {
364  return false;
365  }
366 
367  if (sharedSession == null) {
368  try {
369  // Create ARCore session that supports camera sharing.
370  sharedSession = new Session(mActivity, EnumSet.of(Session.Feature.SHARED_CAMERA));
371  } catch (UnavailableException e) {
372  Log.e(TAG, "Failed to create ARCore session that supports camera sharing", e);
373  return false;
374  }
375 
376  // First obtain the session handle before getting the list of various camera configs.
377  // Create filter here with desired fps filters.
378  CameraConfigFilter cameraConfigFilter = new CameraConfigFilter(sharedSession);
379  CameraConfig[] cameraConfigs = sharedSession.getSupportedCameraConfigs(cameraConfigFilter).toArray(new CameraConfig[0]);
380  Log.i(TAG, "Size of supported CameraConfigs list is " + cameraConfigs.length);
381 
382  // Determine the highest and lowest CPU resolutions.
383  int highestResolutionIndex=-1;
384  int highestResolution = 0;
385  for(int i=0; i<cameraConfigs.length; ++i)
386  {
387  Log.i(TAG, "Camera ID: " + cameraConfigs[i].getCameraId());
388  Log.i(TAG, "Resolution: " + cameraConfigs[i].getImageSize().getWidth() + "x" + cameraConfigs[i].getImageSize().getHeight());
389  if(highestResolution == 0 || highestResolution < cameraConfigs[i].getImageSize().getWidth())
390  {
391  highestResolutionIndex = i;
392  highestResolution = cameraConfigs[i].getImageSize().getWidth();
393  }
394  }
395  if(highestResolutionIndex>=0)
396  {
397  //Log.i(TAG, "Setting camera resolution to " + cameraConfigs[highestResolutionIndex].getImageSize().getWidth() + "x" + cameraConfigs[highestResolutionIndex].getImageSize().getHeight());
398  //FIXME: Can we make it work to use HD rgb images? To avoid this error "CaptureRequest contains unconfigured Input/Output Surface!"
399  //sharedSession.setCameraConfig(cameraConfigs[highestResolutionIndex]);
400  }
401 
402  // Enable auto focus mode while ARCore is running.
403  Config config = sharedSession.getConfig();
404  config.setFocusMode(Config.FocusMode.FIXED);
405  config.setUpdateMode(Config.UpdateMode.BLOCKING);
406  config.setPlaneFindingMode(Config.PlaneFindingMode.DISABLED);
407  config.setLightEstimationMode(Config.LightEstimationMode.DISABLED);
408  config.setCloudAnchorMode(Config.CloudAnchorMode.DISABLED);
409  sharedSession.configure(config);
410 
411  }
412 
413  // Store the ARCore shared camera reference.
414  sharedCamera = sharedSession.getSharedCamera();
415  // Store the ID of the camera used by ARCore.
416  cameraId = sharedSession.getCameraConfig().getCameraId();
417 
418  Log.d(TAG, "Shared camera ID: " + cameraId);
419 
420  mTOFAvailable = false;
421 
422  // Store a reference to the camera system service.
423  cameraManager = (CameraManager) mActivity.getSystemService(Context.CAMERA_SERVICE);
424 
425  depthCameraId = null;
426  // show all cameras
427  try {
428  // Find a CameraDevice that supports DEPTH16 captures, and configure state.
429  for (String tmpCameraId : cameraManager.getCameraIdList()) {
430  CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(tmpCameraId);
431 
432  Log.i(TAG, "Camera " + tmpCameraId + " extrinsics:");
433 
434  float[] translation = characteristics.get(CameraCharacteristics.LENS_POSE_TRANSLATION);
435  if(translation != null)
436  {
437  Log.i(TAG, String.format("Translation (x,y,z): %f,%f,%f", translation[0], translation[1], translation[2]));
438  }
439  float[] rotation = characteristics.get(CameraCharacteristics.LENS_POSE_ROTATION);
440  if(rotation != null)
441  {
442  Log.i(TAG, String.format("Rotation (qx,qy,qz,qw): %f,%f,%f,%f", rotation[0], rotation[1], rotation[2], rotation[3]));
443  }
444 
445  if(tmpCameraId.compareTo(cameraId)==0 && translation!=null && rotation!=null)
446  {
447  rgbExtrinsics = new Pose(translation, rotation);
448  Log.i(TAG,"Set rgb extrinsics!");
449  }
450 
451  if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) ||
452  characteristics.get(CameraCharacteristics.LENS_FACING) == CameraMetadata.LENS_FACING_FRONT) {
453  continue;
454  }
455  Log.i(TAG, "Camera " + tmpCameraId + " has depth output available");
456 
457  depthCameraId = tmpCameraId;
458  if(translation!=null && rotation != null)
459  {
460  depthExtrinsics = new Pose(translation, rotation);
461  Log.i(TAG,"Set depth extrinsics!");
462  }
463  depthIntrinsics = characteristics.get(CameraCharacteristics.LENS_INTRINSIC_CALIBRATION);
464  Log.i(TAG, String.format("Intrinsics (fx,fy,cx,cy,s): %f,%f,%f,%f,%f",
465  depthIntrinsics[0],
466  depthIntrinsics[1],
467  depthIntrinsics[2],
468  depthIntrinsics[3],
469  depthIntrinsics[4]));
470  }
471  } catch (CameraAccessException e) {
472  e.printStackTrace();
473  }
474  if(rgbExtrinsics == null)
475  {
476  float[] translation = {0,0,0};
477  float[] rotation = {0,0,0,1};
478  rgbExtrinsics = new Pose(translation, rotation);
479  }
480  if(depthExtrinsics == null)
481  {
482  depthExtrinsics = rgbExtrinsics;
483  }
484 
485  if(depthCameraId != null)
486  {
487  ArrayList<String> resolutions = getResolutions(mActivity, depthCameraId, ImageFormat.DEPTH16);
488  if (resolutions != null) {
489  float[] newDepthIntrinsics = null;
490  int largestWidth = 0;
491  for( String temp : resolutions) {
492  Log.i(TAG, "DEPTH16 resolution: " + temp);
493  depthWidth = Integer.parseInt(temp.split("x")[0]);
494  depthHeight = Integer.parseInt(temp.split("x")[1]);
495  if(depthIntrinsics != null)
496  {
497  if(depthIntrinsics[0] != 0)
498  {
499  if(largestWidth == 0 && depthWidth>largestWidth)
500  {
501  largestWidth = depthWidth; // intrinsics should match this resolution
502  }
503  // Samsung Galaxy Note10+: take smallest resolution and match the intrinsics
504  if(depthWidth < largestWidth)
505  {
506  float scale = (float)depthWidth/(float)largestWidth;
507  newDepthIntrinsics = depthIntrinsics.clone();
508  newDepthIntrinsics[0] *= scale;
509  newDepthIntrinsics[1] *= scale;
510  newDepthIntrinsics[2] *= scale;
511  newDepthIntrinsics[3] *= scale;
512  }
513  }
514  else if(depthWidth ==240 && depthHeight==180)
515  {
516  // Huawei P30 Pro: only 240x180 is working
517  break;
518  }
519  }
520  }
521  if (resolutions.size()>0) {
522  mTOFAvailable = true;
523  if(newDepthIntrinsics!=null) {
524  depthIntrinsics = newDepthIntrinsics;
525  }
526  }
527  }
528  }
529  Log.i(TAG, "TOF_available: " + mTOFAvailable);
530 
531  // Color CPU Image.
532  // Use the currently configured CPU image size.
533  //Size desiredCPUImageSize = sharedSession.getCameraConfig().getImageSize();
534 
535  if (mTOFAvailable) mTOFImageReader.createImageReader(depthWidth, depthHeight);
536 
537  // When ARCore is running, make sure it also updates our CPU image surface.
538  if (mTOFAvailable && cameraId.compareTo(depthCameraId) == 0) {
539  sharedCamera.setAppSurfaces(this.cameraId, Arrays.asList(mTOFImageReader.imageReader.getSurface()));
540  }
541 
542  try {
543 
544  // Wrap our callback in a shared camera callback.
545  CameraDevice.StateCallback wrappedCallback = sharedCamera.createARDeviceStateCallback(cameraDeviceCallback, backgroundHandler);
546 
547  // Open the camera device using the ARCore wrapped callback.
548  cameraManager.openCamera(cameraId, wrappedCallback, backgroundHandler);
549 
550  if(mTOFAvailable && cameraId.compareTo(depthCameraId) != 0)
551  {
552  cameraManager.openCamera(depthCameraId, mTOFImageReader.cameraDeviceCallback, mTOFImageReader.backgroundHandler);
553  }
554 
555  } catch (CameraAccessException e) {
556  Log.e(TAG, "Failed to open camera", e);
557  return false;
558  } catch (IllegalArgumentException e) {
559  Log.e(TAG, "Failed to open camera", e);
560  return false;
561  } catch (SecurityException e) {
562  Log.e(TAG, "Failed to open camera", e);
563  return false;
564  }
565 
566  return true;
567  }
568 
569  public static void rotationMatrixToQuaternion(float[] R, float[] q) {
570  final float m00 = R[0];
571  final float m10 = R[1];
572  final float m20 = R[2];
573  final float m01 = R[4];
574  final float m11 = R[5];
575  final float m21 = R[6];
576  final float m02 = R[8];
577  final float m12 = R[9];
578  final float m22 = R[10];
579 
580  float tr = m00 + m11 + m22;
581 
582  if (tr > 0) {
583  float S = (float) Math.sqrt(tr + 1.0) * 2; // S=4*qw
584  q[0] = 0.25f * S;/* w w w.j ava 2s.co m*/
585  q[1] = (m21 - m12) / S;
586  q[2] = (m02 - m20) / S;
587  q[3] = (m10 - m01) / S;
588  } else if ((m00 > m11) & (m00 > m22)) {
589  float S = (float) Math.sqrt(1.0 + m00 - m11 - m22) * 2; //
590  // S=4*q[1]
591  q[0] = (m21 - m12) / S;
592  q[1] = 0.25f * S;
593  q[2] = (m01 + m10) / S;
594  q[3] = (m02 + m20) / S;
595  } else if (m11 > m22) {
596  float S = (float) Math.sqrt(1.0 + m11 - m00 - m22) * 2; //
597  // S=4*q[2]
598  q[0] = (m02 - m20) / S;
599  q[1] = (m01 + m10) / S;
600  q[2] = 0.25f * S;
601  q[3] = (m12 + m21) / S;
602  } else {
603  float S = (float) Math.sqrt(1.0 + m22 - m00 - m11) * 2; //
604  // S=4*q[3]
605  q[0] = (m10 - m01) / S;
606  q[1] = (m02 + m20) / S;
607  q[2] = (m12 + m21) / S;
608  q[3] = 0.25f * S;
609  }
610 
611  }
612 
613 
614  // Close the camera device.
615  public void close() {
616  Log.w(TAG, "close()");
617 
618  if (sharedSession != null) {
619  sharedSession.close();
620  sharedSession = null;
621  }
622 
623  if (captureSession != null) {
624  captureSession.close();
625  captureSession = null;
626  }
627  if (cameraDevice != null) {
628  cameraDevice.close();
629  cameraDevice = null;
630  }
631 
632  mTOFImageReader.close();
633 
634  if(cameraTextureId>=0)
635  {
636  GLES20.glDeleteTextures(1, new int[] {cameraTextureId}, 0);
637  }
638 
640  }
641 
642  public void setDisplayGeometry(int rotation, int width, int height)
643  {
644  if(sharedSession!=null)
645  sharedSession.setDisplayGeometry(rotation, width, height);
646  }
647 
648  /*************************************************** ONDRAWFRAME ARCORE ************************************************************* */
649  // Draw frame when in AR mode. Called on the GL thread.
650  public void updateGL() throws CameraNotAvailableException {
651 
652  if(!mReady.get() || sharedSession == null)
653  {
654  return;
655  }
656 
657  if (mTOFAvailable && mTOFImageReader.frameCount == 0) return;
658 
659  // Perform ARCore per-frame update.
660  Frame frame = null;
661  try {
662  frame = sharedSession.update();
663  } catch (Exception e) {
664  e.printStackTrace();
665  return;
666  }
667 
668  Camera camera = null;
669  if (frame != null) {
670  camera = frame.getCamera();
671  }else
672  {
673  Log.e(TAG, String.format("frame.getCamera() null!"));
674  return;
675  }
676 
677  if (camera == null) {
678  Log.e(TAG, String.format("camera is null!"));
679  return;
680  }
681  if (camera.getTrackingState() != TrackingState.TRACKING) {
682  final String trackingState = camera.getTrackingState().toString();
683  Log.e(TAG, String.format("Tracking lost! state=%s", trackingState));
684  // This will force a new session on the next frame received
686  mActivity.runOnUiThread(new Runnable() {
687  public void run() {
688  if(mToast!=null && previousAnchorPose != null)
689  {
690  String msg = "Tracking lost! If you are mapping, you will need to relocalize before continuing.";
691  if(mToast.getView() == null || !mToast.getView().isShown())
692  {
693  mToast.makeText(mActivity.getApplicationContext(),
694  msg, Toast.LENGTH_LONG).show();
695  }
696  else
697  {
698  mToast.setText(msg);
699  }
700  previousAnchorPose = null;
701  }
702  }
703  });
704  return;
705  }
706 
707  if (frame.getTimestamp() != 0) {
708  Pose pose = camera.getPose();
709 
710  // Remove ARCore SLAM corrections by integrating pose from previous frame anchor
711  if(previousAnchorPose == null || mARCoreLocalizationFilteringSpeed==0)
712  {
713  odomPose = pose;
714  }
715  else
716  {
717  float[] t = previousAnchorPose.inverse().compose(pose).getTranslation();
718  final double speed = Math.sqrt(t[0]*t[0]+t[1]*t[1]+t[2]*t[2])/((double)(frame.getTimestamp()-previousAnchorTimeStamp)/10e8);
719  if(speed>=mARCoreLocalizationFilteringSpeed)
720  {
721  arCoreCorrection = arCoreCorrection.compose(previousAnchorPose).compose(pose.inverse());
722  t = arCoreCorrection.getTranslation();
723  Log.e(TAG, String.format("POTENTIAL TELEPORTATION!!!!!!!!!!!!!! previous anchor moved (speed=%f), new arcorrection: %f %f %f", speed, t[0], t[1], t[2]));
724 
725  t = odomPose.getTranslation();
726  float[] t2 = (arCoreCorrection.compose(pose)).getTranslation();
727  float[] t3 = previousAnchorPose.getTranslation();
728  float[] t4 = pose.getTranslation();
729  Log.e(TAG, String.format("Odom = %f %f %f -> %f %f %f ArCore= %f %f %f -> %f %f %f", t[0], t[1], t[2], t2[0], t2[1], t2[2], t3[0], t3[1], t3[2], t4[0], t4[1], t4[2]));
730 
731  mActivity.runOnUiThread(new Runnable() {
732  public void run() {
733  if(mToast!=null)
734  {
735  String msg = String.format("ARCore localization has been suppressed "
736  + "because of high speed detected (%f m/s) causing a jump! You can change "
737  + "ARCore localization filtering speed in Settings->Mapping if you are "
738  + "indeed moving as fast.", speed);
739  if(mToast.getView() == null || !mToast.getView().isShown())
740  {
741  mToast.makeText(mActivity.getApplicationContext(), msg, Toast.LENGTH_LONG).show();
742  }
743  else
744  {
745  mToast.setText(msg);
746  }
747  previousAnchorPose = null;
748  }
749  }
750  });
751  }
752  odomPose = arCoreCorrection.compose(pose);
753  }
754  previousAnchorPose = pose;
755  previousAnchorTimeStamp = frame.getTimestamp();
756 
757  double stamp = (double)frame.getTimestamp()/10e8;
758  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("pose=%f %f %f q=%f %f %f %f stamp=%f", odomPose.tx(), odomPose.ty(), odomPose.tz(), odomPose.qx(), odomPose.qy(), odomPose.qz(), odomPose.qw(), stamp));
759  RTABMapLib.postCameraPoseEvent(RTABMapActivity.nativeApplication, odomPose.tx(), odomPose.ty(), odomPose.tz(), odomPose.qx(), odomPose.qy(), odomPose.qz(), odomPose.qw(), stamp);
760 
761  CameraIntrinsics intrinsics = camera.getImageIntrinsics();
762  try{
763  Image image = frame.acquireCameraImage();
764  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("frame=%d vs image=%d", frame.getTimestamp(), image.getTimestamp()));
765  PointCloud cloud = frame.acquirePointCloud();
766  FloatBuffer points = cloud.getPoints();
767 
768  if (image.getFormat() != ImageFormat.YUV_420_888) {
769  throw new IllegalArgumentException(
770  "Expected image in YUV_420_888 format, got format " + image.getFormat());
771  }
772 
774  {
775  for(int i =0;i<image.getPlanes().length;++i)
776  {
777  Log.d(TAG, String.format("Plane[%d] pixel stride = %d, row stride = %d", i, image.getPlanes()[i].getPixelStride(), image.getPlanes()[i].getRowStride()));
778  }
779  }
780 
781  float[] fl = intrinsics.getFocalLength();
782  float[] pp = intrinsics.getPrincipalPoint();
783  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("fx=%f fy=%f cx=%f cy=%f", fl[0], fl[1], pp[0], pp[1]));
784 
785  ByteBuffer y = image.getPlanes()[0].getBuffer().asReadOnlyBuffer();
786  ByteBuffer u = image.getPlanes()[1].getBuffer().asReadOnlyBuffer();
787  ByteBuffer v = image.getPlanes()[2].getBuffer().asReadOnlyBuffer();
788 
789  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("RGB %dx%d len=%dbytes format=%d stamp=%f",
790  image.getWidth(), image.getHeight(), y.limit(), image.getFormat(), stamp));
791 
792  float[] texCoord = new float[8];
793  frame.transformCoordinates2d(
794  Coordinates2d.OPENGL_NORMALIZED_DEVICE_COORDINATES,
795  QUAD_COORDS,
796  Coordinates2d.IMAGE_NORMALIZED,
797  texCoord);
798 
799  float[] p = new float[16];
800  camera.getProjectionMatrix(p, 0, 0.1f, 100.0f);
801 
802  float[] viewMatrix = new float[16];
803  arCoreCorrection.compose(camera.getDisplayOrientedPose()).inverse().toMatrix(viewMatrix, 0);
804  float[] quat = new float[4];
805  rotationMatrixToQuaternion(viewMatrix, quat);
806 
807 
808  if(mTOFAvailable)
809  {
810  ByteBuffer depth;
811  double depthStamp;
812  synchronized (mTOFImageReader) {
813  depth = mTOFImageReader.depth16_raw;
814  depthStamp = (double)mTOFImageReader.timestamp/10e8;
815  }
816 
817  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("Depth %dx%d len=%dbytes format=%d stamp=%f",
818  mTOFImageReader.WIDTH, mTOFImageReader.HEIGHT, depth.limit(), ImageFormat.DEPTH16, depthStamp));
819 
822  odomPose.tx(), odomPose.ty(), odomPose.tz(), odomPose.qx(), odomPose.qy(), odomPose.qz(), odomPose.qw(),
823  fl[0], fl[1], pp[0], pp[1],
824  depthIntrinsics[0], depthIntrinsics[1], depthIntrinsics[2], depthIntrinsics[3],
825  rgbExtrinsics.tx(), rgbExtrinsics.ty(), rgbExtrinsics.tz(), rgbExtrinsics.qx(), rgbExtrinsics.qy(), rgbExtrinsics.qz(), rgbExtrinsics.qw(),
826  depthExtrinsics.tx(), depthExtrinsics.ty(), depthExtrinsics.tz(), depthExtrinsics.qx(), depthExtrinsics.qy(), depthExtrinsics.qz(), depthExtrinsics.qw(),
827  stamp,
828  depthStamp,
829  y, u, v, y.limit(), image.getWidth(), image.getHeight(), image.getFormat(),
830  depth, depth.limit(), mTOFImageReader.WIDTH, mTOFImageReader.HEIGHT, ImageFormat.DEPTH16,
831  points, points.limit()/4,
832  viewMatrix[12], viewMatrix[13], viewMatrix[14], quat[1], quat[2], quat[3], quat[0],
833  p[0], p[5], p[8], p[9], p[10], p[11], p[14],
834  texCoord[0],texCoord[1],texCoord[2],texCoord[3],texCoord[4],texCoord[5],texCoord[6],texCoord[7]);
835  }
836  else
837  {
840  odomPose.tx(), odomPose.ty(), odomPose.tz(), odomPose.qx(), odomPose.qy(), odomPose.qz(), odomPose.qw(),
841  fl[0], fl[1], pp[0], pp[1],
842  rgbExtrinsics.tx(), rgbExtrinsics.ty(), rgbExtrinsics.tz(), rgbExtrinsics.qx(), rgbExtrinsics.qy(), rgbExtrinsics.qz(), rgbExtrinsics.qw(),
843  stamp,
844  y, u, v, y.limit(), image.getWidth(), image.getHeight(), image.getFormat(),
845  points, points.limit()/4,
846  viewMatrix[12], viewMatrix[13], viewMatrix[14], quat[1], quat[2], quat[3], quat[0],
847  p[0], p[5], p[8], p[9], p[10], p[11], p[14],
848  texCoord[0],texCoord[1],texCoord[2],texCoord[3],texCoord[4],texCoord[5],texCoord[6],texCoord[7]);
849  }
850 
851  image.close();
852  cloud.close();
853 
854  } catch (NotYetAvailableException e) {
855 
856  }
857  }
858  }
859 
860  /********************************************************************************************************************* */
861  /*************************************************** End ************************************************************* */
862  /********************************************************************************************************************* */
863 
864 
865  public ArrayList<String> getResolutions (Context context, String cameraId, int imageFormat){
866  Log.v(TAG, "getResolutions: cameraId:" + cameraId + " imageFormat: " + imageFormat);
867 
868  ArrayList<String> output = new ArrayList<String>();
869  try {
870  CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
871  CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
872 
873  for (android.util.Size s : characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(imageFormat)) {
874  output.add(s.getWidth() + "x" + s.getHeight());
875  }
876  } catch (Exception e) {
877  e.printStackTrace();
878  }
879  return output;
880  }
881 
882 }
static native void postOdometryEventDepth(long nativeApplication, float x, float y, float z, float qx, float qy, float qz, float qw, float rgb_fx, float rgb_fy, float rgb_cx, float rgb_cy, float depth_fx, float depth_fy, float depth_cx, float depth_cy, float rgbFrameX, float rgbFrameY, float rgbFrameZ, float rgbFrameQX, float rgbFrameQY, float rgbFrameQZ, float rgbFrameQW, float depthFrameX, float depthFrameY, float depthFrameZ, float depthFrameQX, float depthFrameQY, float depthFrameQZ, float depthFrameQW, double rgbStamp, double depthStamp, ByteBuffer yPlane, ByteBuffer uPlane, ByteBuffer vPlane, int yPlaneLen, int rgbWidth, int rgbHeight, int rgbFormat, ByteBuffer depth, int depthLen, int depthWidth, int depthHeight, int depthFormat, FloatBuffer points, int pointsLen, float vx, float vy, float vz, float vqx, float vqy, float vqz, float vqw, float p00, float p11, float p02, float p12, float p22, float p32, float p23, float t0, float t1, float t2, float t3, float t4, float t5, float t6, float t7)
static void rotationMatrixToQuaternion(float[] R, float[] q)
static native void postCameraPoseEvent(long nativeApplication, float x, float y, float z, float qx, float qy, float qz, float qw, double stamp)
void run(class_loader::ClassLoader *loader)
target
f
highp_quat quat
Quaternion of default single-precision floating-point numbers.
Definition: fwd.hpp:68
GLM_FUNC_DECL genType e()
config
GLM_FUNC_DECL detail::tmat4x4< T, P > scale(detail::tmat4x4< T, P > const &m, detail::tvec3< T, P > const &v)
final CameraDevice.StateCallback cameraDeviceCallback
final CameraDevice.StateCallback cameraDeviceCallback
final CameraCaptureSession.CaptureCallback captureSessionCallback
ArrayList< String > getResolutions(Context context, String cameraId, int imageFormat)
static boolean contains(int[] modes, int mode)
translation
GLM_FUNC_DECL detail::tquat< T, P > rotation(detail::tvec3< T, P > const &orig, detail::tvec3< T, P > const &dest)
static native void postOdometryEvent(long nativeApplication, float x, float y, float z, float qx, float qy, float qz, float qw, float rgb_fx, float rgb_fy, float rgb_cx, float rgb_cy, float rgbFrameX, float rgbFrameY, float rgbFrameZ, float rgbFrameQX, float rgbFrameQY, float rgbFrameQZ, float rgbFrameQW, double stamp, ByteBuffer yPlane, ByteBuffer uPlane, ByteBuffer vPlane, int yPlaneLen, int rgbWidth, int rgbHeight, int rgbFormat, FloatBuffer points, int pointsLen, float vx, float vy, float vz, float vqx, float vqy, float vqz, float vqw, float p00, float p11, float p02, float p12, float p22, float p32, float p23, float t0, float t1, float t2, float t3, float t4, float t5, float t6, float t7)
void createImageReader(int width, int height)
void setDisplayGeometry(int rotation, int width, int height)
ARCoreSharedCamera(RTABMapActivity c, float arCoreLocalizationFilteringSpeed)
GLM_FUNC_DECL matType< T, P > inverse(matType< T, P > const &m)


rtabmap
Author(s): Mathieu Labbe
autogenerated on Mon Jan 23 2023 03:37:27