ARCoreSharedCamera.java
Go to the documentation of this file.
1 package com.introlab.rtabmap;
2 
3 import java.nio.ByteBuffer;
4 import java.nio.FloatBuffer;
5 import java.util.ArrayList;
6 import java.util.Arrays;
7 import java.util.EnumSet;
8 import java.util.List;
9 import java.util.concurrent.atomic.AtomicBoolean;
10 
11 import com.google.ar.core.Anchor;
12 import com.google.ar.core.Camera;
13 import com.google.ar.core.CameraConfig;
14 import com.google.ar.core.CameraConfigFilter;
15 import com.google.ar.core.CameraIntrinsics;
16 import com.google.ar.core.Config;
17 import com.google.ar.core.Coordinates2d;
18 import com.google.ar.core.Frame;
19 import com.google.ar.core.PointCloud;
20 import com.google.ar.core.Pose;
21 import com.google.ar.core.Session;
22 import com.google.ar.core.SharedCamera;
23 import com.google.ar.core.TrackingState;
24 import com.google.ar.core.exceptions.CameraNotAvailableException;
25 import com.google.ar.core.exceptions.NotYetAvailableException;
26 import com.google.ar.core.exceptions.UnavailableException;
27 
28 import android.content.Context;
29 import android.graphics.ImageFormat;
30 import android.hardware.camera2.CameraAccessException;
31 import android.hardware.camera2.CameraCaptureSession;
32 import android.hardware.camera2.CameraCharacteristics;
33 import android.hardware.camera2.CameraDevice;
34 import android.hardware.camera2.CameraManager;
35 import android.hardware.camera2.CameraMetadata;
36 import android.hardware.camera2.CaptureFailure;
37 import android.hardware.camera2.CaptureRequest;
38 import android.hardware.camera2.TotalCaptureResult;
39 import android.media.Image;
40 import android.opengl.GLES20;
41 import android.opengl.GLSurfaceView;
42 import android.os.Handler;
43 import android.os.HandlerThread;
44 import android.support.annotation.NonNull;
45 import android.util.Log;
46 import android.view.Surface;
47 import android.widget.Toast;
48 
49 public class ARCoreSharedCamera {
50 
51  public static final String TAG = ARCoreSharedCamera.class.getSimpleName();
52 
53  private static final float[] QUAD_COORDS =
54  new float[] {
55  -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f,
56  };
57 
58  private static RTABMapActivity mActivity;
59  public ARCoreSharedCamera(RTABMapActivity c, float arCoreLocalizationFilteringSpeed) {
60  mActivity = c;
61  mARCoreLocalizationFilteringSpeed = arCoreLocalizationFilteringSpeed;
62  }
63 
64  // Depth TOF Image.
65  // Use 240 * 180 for now, hardcoded for Huawei P30 Pro
66  private int depthWidth = 640;
67  private int depthHeight = 480;
68 
69  // GL Surface used to draw camera preview image.
70  public GLSurfaceView surfaceView;
71 
72  // ARCore session that supports camera sharing.
73  private Session sharedSession;
74 
75  private Pose previousAnchorPose = null;
77  private Pose arCoreCorrection = Pose.IDENTITY;
78  private Pose odomPose = Pose.IDENTITY;
79  private float mARCoreLocalizationFilteringSpeed = 1.0f;
80 
81  // Camera capture session. Used by both non-AR and AR modes.
82  private CameraCaptureSession captureSession;
83 
84  // Reference to the camera system service.
85  private CameraManager cameraManager;
86 
87  // Camera device. Used by both non-AR and AR modes.
88  private CameraDevice cameraDevice;
89 
90  // Looper handler thread.
91  private HandlerThread backgroundThread;
92  // Looper handler.
93  private Handler backgroundHandler;
94 
95  // ARCore shared camera instance, obtained from ARCore session that supports sharing.
96  private SharedCamera sharedCamera;
97 
98  private Toast mToast = null;
99 
100  // Camera ID for the camera used by ARCore.
101  private String cameraId;
102  private String depthCameraId;
103 
106  private float[] depthIntrinsics = null;
107 
108  private AtomicBoolean mReady = new AtomicBoolean(false);
109 
110  // Camera preview capture request builder
111  private CaptureRequest.Builder previewCaptureRequestBuilder;
112 
113  private int cameraTextureId = -1;
114 
115  // Image reader that continuously processes CPU images.
117  private boolean mTOFAvailable = false;
118 
119  ByteBuffer mPreviousDepth = null;
120  double mPreviousDepthStamp = 0.0;
121 
122  public boolean isDepthSupported() {return mTOFAvailable;}
123 
124  public void setToast(Toast toast)
125  {
126  mToast = toast;
127  }
128 
129  // Camera device state callback.
130  private final CameraDevice.StateCallback cameraDeviceCallback =
131  new CameraDevice.StateCallback() {
132  @Override
133  public void onOpened(@NonNull CameraDevice cameraDevice) {
134  Log.d(TAG, "Camera device ID " + cameraDevice.getId() + " opened.");
137  }
138 
139  @Override
140  public void onClosed(@NonNull CameraDevice cameraDevice) {
141  Log.d(TAG, "Camera device ID " + cameraDevice.getId() + " closed.");
142  ARCoreSharedCamera.this.cameraDevice = null;
143  }
144 
145  @Override
146  public void onDisconnected(@NonNull CameraDevice cameraDevice) {
147  Log.w(TAG, "Camera device ID " + cameraDevice.getId() + " disconnected.");
148  cameraDevice.close();
149  ARCoreSharedCamera.this.cameraDevice = null;
150  }
151 
152  @Override
153  public void onError(@NonNull CameraDevice cameraDevice, int error) {
154  Log.e(TAG, "Camera device ID " + cameraDevice.getId() + " error " + error);
155  cameraDevice.close();
156  ARCoreSharedCamera.this.cameraDevice = null;
157  }
158  };
159 
160  // Repeating camera capture session state callback.
161  CameraCaptureSession.StateCallback cameraCaptureCallback =
162  new CameraCaptureSession.StateCallback() {
163 
164  // Called when the camera capture session is first configured after the app
165  // is initialized, and again each time the activity is resumed.
166  @Override
167  public void onConfigured(@NonNull CameraCaptureSession session) {
168  Log.d(TAG, "Camera capture session configured.");
169  captureSession = session;
171  }
172 
173  @Override
174  public void onSurfacePrepared(
175  @NonNull CameraCaptureSession session, @NonNull Surface surface) {
176  Log.d(TAG, "Camera capture surface prepared.");
177  }
178 
179  @Override
180  public void onReady(@NonNull CameraCaptureSession session) {
181  Log.d(TAG, "Camera capture session ready.");
182  }
183 
184  @Override
185  public void onActive(@NonNull CameraCaptureSession session) {
186  Log.d(TAG, "Camera capture session active.");
187  resumeARCore();
188  }
189 
190  @Override
191  public void onClosed(@NonNull CameraCaptureSession session) {
192  Log.d(TAG, "Camera capture session closed.");
193  }
194 
195  @Override
196  public void onConfigureFailed(@NonNull CameraCaptureSession session) {
197  Log.e(TAG, "Failed to configure camera capture session.");
198  }
199  };
200 
201  // Repeating camera capture session capture callback.
202  private final CameraCaptureSession.CaptureCallback captureSessionCallback =
203  new CameraCaptureSession.CaptureCallback() {
204 
205  @Override
206  public void onCaptureCompleted(
207  @NonNull CameraCaptureSession session,
208  @NonNull CaptureRequest request,
209  @NonNull TotalCaptureResult result) {
210  //Log.i(TAG, "onCaptureCompleted");
211  }
212 
213  //@Override // android 23
214  public void onCaptureBufferLost(
215  @NonNull CameraCaptureSession session,
216  @NonNull CaptureRequest request,
217  @NonNull Surface target,
218  long frameNumber) {
219  Log.e(TAG, "onCaptureBufferLost: " + frameNumber);
220  }
221 
222  @Override
223  public void onCaptureFailed(
224  @NonNull CameraCaptureSession session,
225  @NonNull CaptureRequest request,
226  @NonNull CaptureFailure failure) {
227  Log.e(TAG, "onCaptureFailed: " + failure.getFrameNumber() + " " + failure.getReason());
228  }
229 
230  @Override
231  public void onCaptureSequenceAborted(
232  @NonNull CameraCaptureSession session, int sequenceId) {
233  Log.e(TAG, "onCaptureSequenceAborted: " + sequenceId + " " + session);
234  }
235  };
236 
237  private void resumeARCore() {
238  // Ensure that session is valid before triggering ARCore resume. Handles the case where the user
239  // manually uninstalls ARCore while the app is paused and then resumes.
240  if (sharedSession == null) {
241  return;
242  }
243 
244  try {
245  Log.i(TAG, "Resume ARCore.");
246  // Resume ARCore.
247  sharedSession.resume();
248  // Set capture session callback while in AR mode.
250  } catch (CameraNotAvailableException e) {
251  Log.e(TAG, "Failed to resume ARCore session", e);
252  return;
253  }
254 
255  }
256 
257 
258  // Called when starting non-AR mode or switching to non-AR mode.
259  // Also called when app starts in AR mode, or resumes in AR mode.
260  private void setRepeatingCaptureRequest() {
261  try {
262  captureSession.setRepeatingRequest(
264  } catch (CameraAccessException e) {
265  Log.e(TAG, "Failed to set repeating request", e);
266  }
267  }
268 
269 
270  private void createCameraPreviewSession() {
271  Log.e(TAG, "createCameraPreviewSession: " + "starting camera preview session.");
272  try {
273  // Note that isGlAttached will be set to true in AR mode in onDrawFrame().
274  sharedSession.setCameraTextureName(cameraTextureId);
275 
276  // Create an ARCore compatible capture request using `TEMPLATE_RECORD`.
277  previewCaptureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
278 
279  // Build surfaces list, starting with ARCore provided surfaces.
280  List<Surface> surfaceList = sharedCamera.getArCoreSurfaces();
281  Log.e(TAG, " createCameraPreviewSession: " + "surfaceList: sharedCamera.getArCoreSurfaces(): " + surfaceList.size());
282 
283  // Add a CPU image reader surface. On devices that don't support CPU image access, the image
284  // may arrive significantly later, or not arrive at all.
285  if (mTOFAvailable && cameraId.compareTo(depthCameraId) == 0) surfaceList.add(mTOFImageReader.imageReader.getSurface());
286  // Surface list should now contain three surfacemReadymReadys:
287  // 0. sharedCamera.getSurfaceTexture()
288  // 1. …
289  // 2. depthImageReader.getSurface()
290 
291  // Add ARCore surfaces and CPU image surface targets.
292  for (Surface surface : surfaceList) {
293  previewCaptureRequestBuilder.addTarget(surface);
294  }
295 
296  // Wrap our callback in a shared camera callback.
297  CameraCaptureSession.StateCallback wrappedCallback = sharedCamera.createARSessionStateCallback(cameraCaptureCallback, backgroundHandler);
298 
299  // Create camera capture session for camera preview using ARCore wrapped callback.
300  cameraDevice.createCaptureSession(surfaceList, wrappedCallback, backgroundHandler);
301 
302  mReady.set(true);
303  } catch (CameraAccessException e) {
304  Log.e(TAG, "CameraAccessException", e);
305  }
306  }
307 
308  // Start background handler thread, used to run callbacks without blocking UI thread.
309  private void startBackgroundThread() {
310  backgroundThread = new HandlerThread("sharedCameraBackground");
311  backgroundThread.start();
312  backgroundHandler = new Handler(backgroundThread.getLooper());
314  }
315 
316  // Stop background handler thread.
317  private void stopBackgroundThread() {
318  if (backgroundThread != null) {
319  backgroundThread.quitSafely();
320  try {
321  backgroundThread.join();
322  backgroundThread = null;
323  backgroundHandler = null;
324  } catch (InterruptedException e) {
325  Log.e(TAG, "Interrupted while trying to join background handler thread", e);
326  }
327  }
329  }
330 
338  private static boolean contains(int[] modes, int mode) {
339  if (modes == null) {
340  return false;
341  }
342  for (int i : modes) {
343  if (i == mode) {
344  return true;
345  }
346  }
347  return false;
348  }
349 
350  // Perform various checks, then open camera device and create CPU image reader.
351  public boolean openCamera() {
352 
353  close();
354 
356 
357  if(cameraTextureId == -1)
358  {
359  int[] textures = new int[1];
360  GLES20.glGenTextures(1, textures, 0);
361  cameraTextureId = textures[0];
362  }
363 
364  Log.v(TAG, "Perform various checks, then open camera device and create CPU image reader.");
365  // Don't open camera if already opened.
366  if (cameraDevice != null) {
367  return false;
368  }
369 
370  if (sharedSession == null) {
371  try {
372  // Create ARCore session that supports camera sharing.
373  sharedSession = new Session(mActivity, EnumSet.of(Session.Feature.SHARED_CAMERA));
374  } catch (UnavailableException e) {
375  Log.e(TAG, "Failed to create ARCore session that supports camera sharing", e);
376  return false;
377  }
378 
379  // First obtain the session handle before getting the list of various camera configs.
380  // Create filter here with desired fps filters.
381  CameraConfigFilter cameraConfigFilter = new CameraConfigFilter(sharedSession);
382  CameraConfig[] cameraConfigs = sharedSession.getSupportedCameraConfigs(cameraConfigFilter).toArray(new CameraConfig[0]);
383  Log.i(TAG, "Size of supported CameraConfigs list is " + cameraConfigs.length);
384 
385  // Determine the highest and lowest CPU resolutions.
386  int highestResolutionIndex=-1;
387  int highestResolution = 0;
388  for(int i=0; i<cameraConfigs.length; ++i)
389  {
390  Log.i(TAG, "Camera ID: " + cameraConfigs[i].getCameraId());
391  Log.i(TAG, "Resolution: " + cameraConfigs[i].getImageSize().getWidth() + "x" + cameraConfigs[i].getImageSize().getHeight());
392  if(highestResolution == 0 || highestResolution < cameraConfigs[i].getImageSize().getWidth())
393  {
394  highestResolutionIndex = i;
395  highestResolution = cameraConfigs[i].getImageSize().getWidth();
396  }
397  }
398  if(highestResolutionIndex>=0)
399  {
400  //Log.i(TAG, "Setting camera resolution to " + cameraConfigs[highestResolutionIndex].getImageSize().getWidth() + "x" + cameraConfigs[highestResolutionIndex].getImageSize().getHeight());
401  //FIXME: Can we make it work to use HD rgb images? To avoid this error "CaptureRequest contains unconfigured Input/Output Surface!"
402  //sharedSession.setCameraConfig(cameraConfigs[highestResolutionIndex]);
403  }
404 
405  // Enable auto focus mode while ARCore is running.
406  Config config = sharedSession.getConfig();
407  config.setFocusMode(Config.FocusMode.FIXED);
408  config.setUpdateMode(Config.UpdateMode.BLOCKING);
409  config.setPlaneFindingMode(Config.PlaneFindingMode.DISABLED);
410  config.setLightEstimationMode(Config.LightEstimationMode.DISABLED);
411  config.setCloudAnchorMode(Config.CloudAnchorMode.DISABLED);
412  sharedSession.configure(config);
413 
414  }
415 
416  // Store the ARCore shared camera reference.
417  sharedCamera = sharedSession.getSharedCamera();
418  // Store the ID of the camera used by ARCore.
419  cameraId = sharedSession.getCameraConfig().getCameraId();
420 
421  Log.d(TAG, "Shared camera ID: " + cameraId);
422 
423  mTOFAvailable = false;
424 
425  // Store a reference to the camera system service.
426  cameraManager = (CameraManager) mActivity.getSystemService(Context.CAMERA_SERVICE);
427 
428  depthCameraId = null;
429  // show all cameras
430  try {
431  // Find a CameraDevice that supports DEPTH16 captures, and configure state.
432  for (String tmpCameraId : cameraManager.getCameraIdList()) {
433  CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(tmpCameraId);
434 
435  Log.i(TAG, "Camera " + tmpCameraId + " extrinsics:");
436 
437  float[] translation = characteristics.get(CameraCharacteristics.LENS_POSE_TRANSLATION);
438  if(translation != null)
439  {
440  Log.i(TAG, String.format("Translation (x,y,z): %f,%f,%f", translation[0], translation[1], translation[2]));
441  }
442  float[] rotation = characteristics.get(CameraCharacteristics.LENS_POSE_ROTATION);
443  if(rotation != null)
444  {
445  Log.i(TAG, String.format("Rotation (qx,qy,qz,qw): %f,%f,%f,%f", rotation[0], rotation[1], rotation[2], rotation[3]));
446  }
447 
448  if(tmpCameraId.compareTo(cameraId)==0 && translation!=null && rotation!=null)
449  {
451  Log.i(TAG,"Set rgb extrinsics!");
452  }
453 
454  if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) ||
455  characteristics.get(CameraCharacteristics.LENS_FACING) == CameraMetadata.LENS_FACING_FRONT) {
456  continue;
457  }
458  Log.i(TAG, "Camera " + tmpCameraId + " has depth output available");
459 
460  depthCameraId = tmpCameraId;
461  if(translation!=null && rotation != null)
462  {
464  Log.i(TAG,"Set depth extrinsics!");
465  }
466  depthIntrinsics = characteristics.get(CameraCharacteristics.LENS_INTRINSIC_CALIBRATION);
467  Log.i(TAG, String.format("Intrinsics (fx,fy,cx,cy,s): %f,%f,%f,%f,%f",
468  depthIntrinsics[0],
469  depthIntrinsics[1],
470  depthIntrinsics[2],
471  depthIntrinsics[3],
472  depthIntrinsics[4]));
473  }
474  } catch (CameraAccessException e) {
475  e.printStackTrace();
476  }
477  if(rgbExtrinsics == null)
478  {
479  float[] translation = {0,0,0};
480  float[] rotation = {0,0,0,1};
482  }
483  if(depthExtrinsics == null)
484  {
486  }
487 
488  if(depthCameraId != null)
489  {
490  ArrayList<String> resolutions = getResolutions(mActivity, depthCameraId, ImageFormat.DEPTH16);
491  if (resolutions != null) {
492  float[] newDepthIntrinsics = null;
493  int largestWidth = 0;
494  for( String temp : resolutions) {
495  Log.i(TAG, "DEPTH16 resolution: " + temp);
496  depthWidth = Integer.parseInt(temp.split("x")[0]);
497  depthHeight = Integer.parseInt(temp.split("x")[1]);
498  if(depthIntrinsics != null)
499  {
500  if(depthIntrinsics[0] != 0)
501  {
502  if(largestWidth == 0 && depthWidth>largestWidth)
503  {
504  largestWidth = depthWidth; // intrinsics should match this resolution
505  }
506  // Samsung Galaxy Note10+: take smallest resolution and match the intrinsics
507  if(depthWidth < largestWidth)
508  {
509  float scale = (float)depthWidth/(float)largestWidth;
510  newDepthIntrinsics = depthIntrinsics.clone();
511  newDepthIntrinsics[0] *= scale;
512  newDepthIntrinsics[1] *= scale;
513  newDepthIntrinsics[2] *= scale;
514  newDepthIntrinsics[3] *= scale;
515  }
516  }
517  else if(depthWidth ==240 && depthHeight==180)
518  {
519  // Huawei P30 Pro: only 240x180 is working
520  break;
521  }
522  }
523  }
524  if (resolutions.size()>0) {
525  mTOFAvailable = true;
526  if(newDepthIntrinsics!=null) {
527  depthIntrinsics = newDepthIntrinsics;
528  }
529  }
530  }
531  }
532  Log.i(TAG, "TOF_available: " + mTOFAvailable);
533 
534  // Color CPU Image.
535  // Use the currently configured CPU image size.
536  //Size desiredCPUImageSize = sharedSession.getCameraConfig().getImageSize();
537 
539 
540  // When ARCore is running, make sure it also updates our CPU image surface.
541  if (mTOFAvailable && cameraId.compareTo(depthCameraId) == 0) {
542  sharedCamera.setAppSurfaces(this.cameraId, Arrays.asList(mTOFImageReader.imageReader.getSurface()));
543  }
544 
545  try {
546 
547  // Wrap our callback in a shared camera callback.
548  CameraDevice.StateCallback wrappedCallback = sharedCamera.createARDeviceStateCallback(cameraDeviceCallback, backgroundHandler);
549 
550  // Open the camera device using the ARCore wrapped callback.
551  cameraManager.openCamera(cameraId, wrappedCallback, backgroundHandler);
552 
553  if(mTOFAvailable && cameraId.compareTo(depthCameraId) != 0)
554  {
556  }
557 
558  } catch (CameraAccessException e) {
559  Log.e(TAG, "Failed to open camera", e);
560  return false;
561  } catch (IllegalArgumentException e) {
562  Log.e(TAG, "Failed to open camera", e);
563  return false;
564  } catch (SecurityException e) {
565  Log.e(TAG, "Failed to open camera", e);
566  return false;
567  }
568 
569  return true;
570  }
571 
572  public static void rotationMatrixToQuaternion(float[] R, float[] q) {
573  final float m00 = R[0];
574  final float m10 = R[1];
575  final float m20 = R[2];
576  final float m01 = R[4];
577  final float m11 = R[5];
578  final float m21 = R[6];
579  final float m02 = R[8];
580  final float m12 = R[9];
581  final float m22 = R[10];
582 
583  float tr = m00 + m11 + m22;
584 
585  if (tr > 0) {
586  float S = (float) Math.sqrt(tr + 1.0) * 2; // S=4*qw
587  q[0] = 0.25f * S;/* w w w.j ava 2s.co m*/
588  q[1] = (m21 - m12) / S;
589  q[2] = (m02 - m20) / S;
590  q[3] = (m10 - m01) / S;
591  } else if ((m00 > m11) & (m00 > m22)) {
592  float S = (float) Math.sqrt(1.0 + m00 - m11 - m22) * 2; //
593  // S=4*q[1]
594  q[0] = (m21 - m12) / S;
595  q[1] = 0.25f * S;
596  q[2] = (m01 + m10) / S;
597  q[3] = (m02 + m20) / S;
598  } else if (m11 > m22) {
599  float S = (float) Math.sqrt(1.0 + m11 - m00 - m22) * 2; //
600  // S=4*q[2]
601  q[0] = (m02 - m20) / S;
602  q[1] = (m01 + m10) / S;
603  q[2] = 0.25f * S;
604  q[3] = (m12 + m21) / S;
605  } else {
606  float S = (float) Math.sqrt(1.0 + m22 - m00 - m11) * 2; //
607  // S=4*q[3]
608  q[0] = (m10 - m01) / S;
609  q[1] = (m02 + m20) / S;
610  q[2] = (m12 + m21) / S;
611  q[3] = 0.25f * S;
612  }
613 
614  }
615 
616 
617  // Close the camera device.
618  public void close() {
619  Log.w(TAG, "close()");
620 
621  if (sharedSession != null) {
622  sharedSession.close();
623  sharedSession = null;
624  }
625 
626  if (captureSession != null) {
627  captureSession.close();
628  captureSession = null;
629  }
630  if (cameraDevice != null) {
631  cameraDevice.close();
632  cameraDevice = null;
633  }
634 
636 
637  if(cameraTextureId>=0)
638  {
639  GLES20.glDeleteTextures(1, new int[] {cameraTextureId}, 0);
640  }
641 
643  }
644 
645  public void setDisplayGeometry(int rotation, int width, int height)
646  {
647  if(sharedSession!=null)
648  sharedSession.setDisplayGeometry(rotation, width, height);
649  }
650 
651  /*************************************************** ONDRAWFRAME ARCORE ************************************************************* */
652  // Draw frame when in AR mode. Called on the GL thread.
653  public void updateGL() throws CameraNotAvailableException {
654 
655  if(!mReady.get() || sharedSession == null)
656  {
657  return;
658  }
659 
660  if (mTOFAvailable && mTOFImageReader.frameCount == 0) return;
661 
662  // Perform ARCore per-frame update.
663  Frame frame = null;
664  try {
665  frame = sharedSession.update();
666  } catch (Exception e) {
667  e.printStackTrace();
668  return;
669  }
670 
671  Camera camera = null;
672  if (frame != null) {
673  camera = frame.getCamera();
674  }else
675  {
676  Log.e(TAG, String.format("frame.getCamera() null!"));
677  return;
678  }
679 
680  if (camera == null) {
681  Log.e(TAG, String.format("camera is null!"));
682  return;
683  }
684  if (camera.getTrackingState() != TrackingState.TRACKING) {
685  final String trackingState = camera.getTrackingState().toString();
686  Log.e(TAG, String.format("Tracking lost! state=%s", trackingState));
687  // This will force a new session on the next frame received
689  mActivity.runOnUiThread(new Runnable() {
690  public void run() {
691  if(mToast!=null && previousAnchorPose != null)
692  {
693  String msg = "Tracking lost! If you are mapping, you will need to relocalize before continuing.";
694  if(mToast.getView() == null || !mToast.getView().isShown())
695  {
696  mToast.makeText(mActivity.getApplicationContext(),
697  msg, Toast.LENGTH_LONG).show();
698  }
699  else
700  {
701  mToast.setText(msg);
702  }
703  previousAnchorPose = null;
704  arCoreCorrection = Pose.IDENTITY;
705  }
706  }
707  });
708  return;
709  }
710 
711  if (frame.getTimestamp() != 0) {
712  Pose pose = camera.getPose();
713 
714  // Remove ARCore SLAM corrections by integrating pose from previous frame anchor
716  {
717  odomPose = pose;
718  }
719  else
720  {
721  float[] t = previousAnchorPose.inverse().compose(pose).getTranslation();
722  final double speed = Math.sqrt(t[0]*t[0]+t[1]*t[1]+t[2]*t[2])/((double)(frame.getTimestamp()-previousAnchorTimeStamp)/10e8);
724  {
725  // Only correct the translation to not lose rotation aligned with gravity
726  arCoreCorrection = arCoreCorrection.compose(previousAnchorPose.compose(pose.inverse()).extractTranslation());
727  t = arCoreCorrection.getTranslation();
728  Log.e(TAG, String.format("POTENTIAL TELEPORTATION!!!!!!!!!!!!!! previous anchor moved (speed=%f), new arcorrection: %f %f %f", speed, t[0], t[1], t[2]));
729 
730  t = odomPose.getTranslation();
731  float[] t2 = (arCoreCorrection.compose(pose)).getTranslation();
732  float[] t3 = previousAnchorPose.getTranslation();
733  float[] t4 = pose.getTranslation();
734  Log.e(TAG, String.format("Odom = %f %f %f -> %f %f %f ArCore= %f %f %f -> %f %f %f", t[0], t[1], t[2], t2[0], t2[1], t2[2], t3[0], t3[1], t3[2], t4[0], t4[1], t4[2]));
735 
736  mActivity.runOnUiThread(new Runnable() {
737  public void run() {
738  if(mToast!=null)
739  {
740  String msg = String.format("ARCore localization has been suppressed "
741  + "because of high speed detected (%f m/s) causing a jump! You can change "
742  + "ARCore localization filtering speed in Settings->Mapping if you are "
743  + "indeed moving as fast.", speed);
744  if(mToast.getView() == null || !mToast.getView().isShown())
745  {
746  mToast.makeText(mActivity.getApplicationContext(), msg, Toast.LENGTH_LONG).show();
747  }
748  else
749  {
750  mToast.setText(msg);
751  }
752  }
753  }
754  });
755  }
756  odomPose = arCoreCorrection.compose(pose);
757  }
758  previousAnchorPose = pose;
759  previousAnchorTimeStamp = frame.getTimestamp();
760 
761  double stamp = (double)frame.getTimestamp()/10e8;
762  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("pose=%f %f %f arcore %f %f %f cor= %f %f %f stamp=%f", odomPose.tx(), odomPose.ty(), odomPose.tz(), pose.tx(), pose.ty(), pose.tz(), arCoreCorrection.tx(), arCoreCorrection.ty(), arCoreCorrection.tz(), stamp));
764  CameraIntrinsics intrinsics = camera.getImageIntrinsics();
765  try{
766  Image image = frame.acquireCameraImage();
767  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("frame=%d vs image=%d", frame.getTimestamp(), image.getTimestamp()));
768  PointCloud cloud = frame.acquirePointCloud();
769  FloatBuffer points = cloud.getPoints();
770 
771  if (image.getFormat() != ImageFormat.YUV_420_888) {
772  throw new IllegalArgumentException(
773  "Expected image in YUV_420_888 format, got format " + image.getFormat());
774  }
775 
777  {
778  for(int i =0;i<image.getPlanes().length;++i)
779  {
780  Log.d(TAG, String.format("Plane[%d] pixel stride = %d, row stride = %d", i, image.getPlanes()[i].getPixelStride(), image.getPlanes()[i].getRowStride()));
781  }
782  }
783 
784  float[] fl = intrinsics.getFocalLength();
785  float[] pp = intrinsics.getPrincipalPoint();
786  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("fx=%f fy=%f cx=%f cy=%f", fl[0], fl[1], pp[0], pp[1]));
787 
788  ByteBuffer y = image.getPlanes()[0].getBuffer().asReadOnlyBuffer();
789  ByteBuffer u = image.getPlanes()[1].getBuffer().asReadOnlyBuffer();
790  ByteBuffer v = image.getPlanes()[2].getBuffer().asReadOnlyBuffer();
791 
792  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("RGB %dx%d len=%dbytes format=%d stamp=%f",
793  image.getWidth(), image.getHeight(), y.limit(), image.getFormat(), stamp));
794 
795  float[] texCoord = new float[8];
796  frame.transformCoordinates2d(
797  Coordinates2d.OPENGL_NORMALIZED_DEVICE_COORDINATES,
798  QUAD_COORDS,
799  Coordinates2d.IMAGE_NORMALIZED,
800  texCoord);
801 
802  float[] p = new float[16];
803  camera.getProjectionMatrix(p, 0, 0.1f, 100.0f);
804 
805  float[] viewMatrix = new float[16];
806  arCoreCorrection.compose(camera.getDisplayOrientedPose()).inverse().toMatrix(viewMatrix, 0);
807  float[] quat = new float[4];
808  rotationMatrixToQuaternion(viewMatrix, quat);
809 
810 
811  if(mTOFAvailable)
812  {
813  ByteBuffer depth;
814  double depthStamp;
815  synchronized (mTOFImageReader) {
817  depthStamp = (double)mTOFImageReader.timestamp/10e8;
818  }
819 
820  if(mPreviousDepth == null)
821  {
822  mPreviousDepth = depth;
823  mPreviousDepthStamp = depthStamp;
824  }
825 
826  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("Depth %dx%d len=%dbytes format=%d stamp=%f",
827  mTOFImageReader.WIDTH, mTOFImageReader.HEIGHT, depth.limit(), ImageFormat.DEPTH16, depthStamp));
828 
831  odomPose.tx(), odomPose.ty(), odomPose.tz(), odomPose.qx(), odomPose.qy(), odomPose.qz(), odomPose.qw(),
832  fl[0], fl[1], pp[0], pp[1],
836  stamp,
837  depthStamp>stamp?mPreviousDepthStamp:depthStamp,
838  y, u, v, y.limit(), image.getWidth(), image.getHeight(), image.getFormat(),
839  depthStamp>stamp?mPreviousDepth:depth, depthStamp>stamp?mPreviousDepth.limit():depth.limit(), mTOFImageReader.WIDTH, mTOFImageReader.HEIGHT, ImageFormat.DEPTH16,
840  points, points.limit()/4,
841  viewMatrix[12], viewMatrix[13], viewMatrix[14], quat[1], quat[2], quat[3], quat[0],
842  p[0], p[5], p[8], p[9], p[10], p[11], p[14],
843  texCoord[0],texCoord[1],texCoord[2],texCoord[3],texCoord[4],texCoord[5],texCoord[6],texCoord[7]);
844 
845 
846  mPreviousDepthStamp = depthStamp;
847  mPreviousDepth = depth;
848  }
849  else
850  {
853  odomPose.tx(), odomPose.ty(), odomPose.tz(), odomPose.qx(), odomPose.qy(), odomPose.qz(), odomPose.qw(),
854  fl[0], fl[1], pp[0], pp[1],
856  stamp,
857  y, u, v, y.limit(), image.getWidth(), image.getHeight(), image.getFormat(),
858  points, points.limit()/4,
859  viewMatrix[12], viewMatrix[13], viewMatrix[14], quat[1], quat[2], quat[3], quat[0],
860  p[0], p[5], p[8], p[9], p[10], p[11], p[14],
861  texCoord[0],texCoord[1],texCoord[2],texCoord[3],texCoord[4],texCoord[5],texCoord[6],texCoord[7]);
862  }
863 
864  image.close();
865  cloud.close();
866 
867  } catch (NotYetAvailableException e) {
868 
869  }
870  }
871  }
872 
873  /********************************************************************************************************************* */
874  /*************************************************** End ************************************************************* */
875  /********************************************************************************************************************* */
876 
877 
878  public ArrayList<String> getResolutions (Context context, String cameraId, int imageFormat){
879  Log.v(TAG, "getResolutions: cameraId:" + cameraId + " imageFormat: " + imageFormat);
880 
881  ArrayList<String> output = new ArrayList<String>();
882  try {
883  CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
884  CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
885 
886  for (android.util.Size s : characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(imageFormat)) {
887  output.add(s.getWidth() + "x" + s.getHeight());
888  }
889  } catch (Exception e) {
890  e.printStackTrace();
891  }
892  return output;
893  }
894 
895 }
com.introlab.rtabmap.ARCoreSharedCamera.mARCoreLocalizationFilteringSpeed
float mARCoreLocalizationFilteringSpeed
Definition: ARCoreSharedCamera.java:79
glm::length
GLM_FUNC_DECL genType::value_type length(genType const &x)
com.introlab.rtabmap.ARCoreSharedCamera.previousAnchorTimeStamp
long previousAnchorTimeStamp
Definition: ARCoreSharedCamera.java:76
com.introlab.rtabmap.ARCoreSharedCamera.createCameraPreviewSession
void createCameraPreviewSession()
Definition: ARCoreSharedCamera.java:270
run
void run(class_loader::ClassLoader *loader)
com.introlab.rtabmap.TOF_ImageReader.backgroundHandler
Handler backgroundHandler
Definition: TOF_ImageReader.java:39
s
RealScalar s
Pose
Pose2 Pose
com.introlab.rtabmap.ARCoreSharedCamera.cameraId
String cameraId
Definition: ARCoreSharedCamera.java:101
com.introlab.rtabmap.ARCoreSharedCamera.rgbExtrinsics
Pose rgbExtrinsics
Definition: ARCoreSharedCamera.java:104
com.introlab.rtabmap.ARCoreSharedCamera.captureSession
CameraCaptureSession captureSession
Definition: ARCoreSharedCamera.java:82
c
Scalar Scalar * c
glm::quat
highp_quat quat
Quaternion of default single-precision floating-point numbers.
Definition: fwd.hpp:68
com.introlab.rtabmap.ARCoreSharedCamera.captureSessionCallback
final CameraCaptureSession.CaptureCallback captureSessionCallback
Definition: ARCoreSharedCamera.java:202
com.introlab.rtabmap.ARCoreSharedCamera.depthIntrinsics
float[] depthIntrinsics
Definition: ARCoreSharedCamera.java:106
com.introlab.rtabmap.TOF_ImageReader.imageReader
ImageReader imageReader
Definition: TOF_ImageReader.java:32
com.introlab.rtabmap.ARCoreSharedCamera.startBackgroundThread
void startBackgroundThread()
Definition: ARCoreSharedCamera.java:309
com.introlab.rtabmap.RTABMapLib.postOdometryEventDepth
static native void postOdometryEventDepth(long nativeApplication, float x, float y, float z, float qx, float qy, float qz, float qw, float rgb_fx, float rgb_fy, float rgb_cx, float rgb_cy, float depth_fx, float depth_fy, float depth_cx, float depth_cy, float rgbFrameX, float rgbFrameY, float rgbFrameZ, float rgbFrameQX, float rgbFrameQY, float rgbFrameQZ, float rgbFrameQW, float depthFrameX, float depthFrameY, float depthFrameZ, float depthFrameQX, float depthFrameQY, float depthFrameQZ, float depthFrameQW, double rgbStamp, double depthStamp, ByteBuffer yPlane, ByteBuffer uPlane, ByteBuffer vPlane, int yPlaneLen, int rgbWidth, int rgbHeight, int rgbFormat, ByteBuffer depth, int depthLen, int depthWidth, int depthHeight, int depthFormat, FloatBuffer points, int pointsLen, float vx, float vy, float vz, float vqx, float vqy, float vqz, float vqw, float p00, float p11, float p02, float p12, float p22, float p32, float p23, float t0, float t1, float t2, float t3, float t4, float t5, float t6, float t7)
com.introlab.rtabmap.ARCoreSharedCamera.sharedSession
Session sharedSession
Definition: ARCoreSharedCamera.java:73
com.introlab.rtabmap.ARCoreSharedCamera.backgroundHandler
Handler backgroundHandler
Definition: ARCoreSharedCamera.java:93
y
Matrix3f y
com.introlab.rtabmap.ARCoreSharedCamera.sharedCamera
SharedCamera sharedCamera
Definition: ARCoreSharedCamera.java:96
com.introlab.rtabmap.ARCoreSharedCamera.arCoreCorrection
Pose arCoreCorrection
Definition: ARCoreSharedCamera.java:77
com.introlab.rtabmap.TOF_ImageReader.timestamp
long timestamp
Definition: TOF_ImageReader.java:34
com.introlab.rtabmap.ARCoreSharedCamera.surfaceView
GLSurfaceView surfaceView
Definition: ARCoreSharedCamera.java:70
com.introlab.rtabmap.TOF_ImageReader.frameCount
int frameCount
Definition: TOF_ImageReader.java:33
com.introlab.rtabmap.ARCoreSharedCamera.depthWidth
int depthWidth
Definition: ARCoreSharedCamera.java:66
com.introlab.rtabmap.ARCoreSharedCamera.mTOFAvailable
boolean mTOFAvailable
Definition: ARCoreSharedCamera.java:117
com.introlab.rtabmap.ARCoreSharedCamera.contains
static boolean contains(int[] modes, int mode)
Definition: ARCoreSharedCamera.java:338
com.introlab.rtabmap.TOF_ImageReader.startBackgroundThread
void startBackgroundThread()
Definition: TOF_ImageReader.java:244
com.introlab.rtabmap.TOF_ImageReader.close
void close()
Definition: TOF_ImageReader.java:55
com.introlab.rtabmap.TOF_ImageReader.HEIGHT
int HEIGHT
Definition: TOF_ImageReader.java:30
mode
const DiscreteKey mode
com.introlab.rtabmap.RTABMapLib
Definition: RTABMapLib.java:15
com.introlab.rtabmap.ARCoreSharedCamera.stopBackgroundThread
void stopBackgroundThread()
Definition: ARCoreSharedCamera.java:317
com.introlab.rtabmap.ARCoreSharedCamera.depthExtrinsics
Pose depthExtrinsics
Definition: ARCoreSharedCamera.java:105
com.introlab.rtabmap.ARCoreSharedCamera.setToast
void setToast(Toast toast)
Definition: ARCoreSharedCamera.java:124
translation
translation
com.introlab.rtabmap.TOF_ImageReader.WIDTH
int WIDTH
Definition: TOF_ImageReader.java:29
scale
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy y set format x g set format y g set format x2 g set format y2 g set format z g set angles radians set nogrid set key title set key left top Right noreverse box linetype linewidth samplen spacing width set nolabel set noarrow set nologscale set logscale x set set pointsize set encoding default set nopolar set noparametric set set set set surface set nocontour set clabel set mapping cartesian set nohidden3d set cntrparam order set cntrparam linear set cntrparam levels auto set cntrparam points set size set set xzeroaxis lt lw set x2zeroaxis lt lw set yzeroaxis lt lw set y2zeroaxis lt lw set tics in set ticslevel set tics scale
q
EIGEN_DEVICE_FUNC const Scalar & q
com.introlab.rtabmap.ARCoreSharedCamera.mActivity
static RTABMapActivity mActivity
Definition: ARCoreSharedCamera.java:58
com.introlab.rtabmap.ARCoreSharedCamera.resumeARCore
void resumeARCore()
Definition: ARCoreSharedCamera.java:237
com.introlab.rtabmap.ARCoreSharedCamera.previousAnchorPose
Pose previousAnchorPose
Definition: ARCoreSharedCamera.java:75
com.introlab.rtabmap.RTABMapActivity.DISABLE_LOG
static boolean DISABLE_LOG
Definition: RTABMapActivity.java:114
com.introlab.rtabmap.ARCoreSharedCamera.openCamera
boolean openCamera()
Definition: ARCoreSharedCamera.java:351
com.introlab.rtabmap.TOF_ImageReader.depth16_raw
ByteBuffer depth16_raw
Definition: TOF_ImageReader.java:41
com.introlab.rtabmap.ARCoreSharedCamera.close
void close()
Definition: ARCoreSharedCamera.java:618
com.introlab.rtabmap.ARCoreSharedCamera.setRepeatingCaptureRequest
void setRepeatingCaptureRequest()
Definition: ARCoreSharedCamera.java:260
target
target
com.introlab.rtabmap.ARCoreSharedCamera.getResolutions
ArrayList< String > getResolutions(Context context, String cameraId, int imageFormat)
Definition: ARCoreSharedCamera.java:878
com.introlab.rtabmap.ARCoreSharedCamera.mReady
AtomicBoolean mReady
Definition: ARCoreSharedCamera.java:108
error
void error(const char *str)
com.introlab.rtabmap.ARCoreSharedCamera.setDisplayGeometry
void setDisplayGeometry(int rotation, int width, int height)
Definition: ARCoreSharedCamera.java:645
p
Point3_ p(2)
com.introlab.rtabmap.ARCoreSharedCamera.cameraDevice
CameraDevice cameraDevice
Definition: ARCoreSharedCamera.java:88
com.introlab.rtabmap.RTABMapActivity.nativeApplication
static long nativeApplication
Definition: RTABMapActivity.java:110
com.introlab.rtabmap.ARCoreSharedCamera.mTOFImageReader
TOF_ImageReader mTOFImageReader
Definition: ARCoreSharedCamera.java:116
com.introlab.rtabmap.ARCoreSharedCamera.rotationMatrixToQuaternion
static void rotationMatrixToQuaternion(float[] R, float[] q)
Definition: ARCoreSharedCamera.java:572
f
Point2(* f)(const Point3 &, OptionalJacobian< 2, 3 >)
com.introlab.rtabmap.ARCoreSharedCamera.TAG
static final String TAG
Definition: ARCoreSharedCamera.java:51
com.introlab.rtabmap.TOF_ImageReader.createImageReader
void createImageReader(int width, int height)
Definition: TOF_ImageReader.java:206
com.introlab.rtabmap.ARCoreSharedCamera.cameraTextureId
int cameraTextureId
Definition: ARCoreSharedCamera.java:113
com.introlab.rtabmap.ARCoreSharedCamera.cameraManager
CameraManager cameraManager
Definition: ARCoreSharedCamera.java:85
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
com.introlab.rtabmap.ARCoreSharedCamera.depthHeight
int depthHeight
Definition: ARCoreSharedCamera.java:67
com.introlab.rtabmap.TOF_ImageReader.stopBackgroundThread
void stopBackgroundThread()
Definition: TOF_ImageReader.java:251
com.introlab.rtabmap.ARCoreSharedCamera.odomPose
Pose odomPose
Definition: ARCoreSharedCamera.java:78
com.introlab.rtabmap.ARCoreSharedCamera.cameraDeviceCallback
final CameraDevice.StateCallback cameraDeviceCallback
Definition: ARCoreSharedCamera.java:130
glm::rotation
GLM_FUNC_DECL detail::tquat< T, P > rotation(detail::tvec3< T, P > const &orig, detail::tvec3< T, P > const &dest)
com.introlab.rtabmap.ARCoreSharedCamera.updateGL
void updateGL()
Definition: ARCoreSharedCamera.java:653
camera
Camera camera(Pose3(Rot3().retract(Vector3(0.1, 0.2, 0.3)), Point3(0, 5, 0)), Cal3Bundler0(1, 0, 0))
com.introlab.rtabmap.ARCoreSharedCamera.depthCameraId
String depthCameraId
Definition: ARCoreSharedCamera.java:102
com
v
Array< int, Dynamic, 1 > v
Camera
PinholePose< Cal3_S2 > Camera
com.introlab.rtabmap.ARCoreSharedCamera.previewCaptureRequestBuilder
CaptureRequest.Builder previewCaptureRequestBuilder
Definition: ARCoreSharedCamera.java:111
com.introlab.rtabmap.ARCoreSharedCamera
Definition: ARCoreSharedCamera.java:49
com.introlab.rtabmap.ARCoreSharedCamera.mToast
Toast mToast
Definition: ARCoreSharedCamera.java:98
float
float
com.introlab.rtabmap.ARCoreSharedCamera.isDepthSupported
boolean isDepthSupported()
Definition: ARCoreSharedCamera.java:122
com.introlab.rtabmap.ARCoreSharedCamera.backgroundThread
HandlerThread backgroundThread
Definition: ARCoreSharedCamera.java:91
com.introlab.rtabmap.RTABMapLib.postCameraPoseEvent
static native void postCameraPoseEvent(long nativeApplication, float x, float y, float z, float qx, float qy, float qz, float qw, double stamp)
com.introlab.rtabmap.ARCoreSharedCamera.QUAD_COORDS
static final float[] QUAD_COORDS
Definition: ARCoreSharedCamera.java:53
com.introlab.rtabmap.RTABMapLib.postOdometryEvent
static native void postOdometryEvent(long nativeApplication, float x, float y, float z, float qx, float qy, float qz, float qw, float rgb_fx, float rgb_fy, float rgb_cx, float rgb_cy, float rgbFrameX, float rgbFrameY, float rgbFrameZ, float rgbFrameQX, float rgbFrameQY, float rgbFrameQZ, float rgbFrameQW, double stamp, ByteBuffer yPlane, ByteBuffer uPlane, ByteBuffer vPlane, int yPlaneLen, int rgbWidth, int rgbHeight, int rgbFormat, FloatBuffer points, int pointsLen, float vx, float vy, float vz, float vqx, float vqy, float vqz, float vqw, float p00, float p11, float p02, float p12, float p22, float p32, float p23, float t0, float t1, float t2, float t3, float t4, float t5, float t6, float t7)
com.introlab.rtabmap.ARCoreSharedCamera.ARCoreSharedCamera
ARCoreSharedCamera(RTABMapActivity c, float arCoreLocalizationFilteringSpeed)
Definition: ARCoreSharedCamera.java:59
com.introlab.rtabmap.TOF_ImageReader.cameraDeviceCallback
final CameraDevice.StateCallback cameraDeviceCallback
Definition: TOF_ImageReader.java:74
com.introlab.rtabmap.TOF_ImageReader
Definition: TOF_ImageReader.java:25
t
Point2 t(10, 10)
config
config
i
int i
result
RESULT & result
S
S
msg
msg
R
R
com.introlab.rtabmap.RTABMapActivity
Definition: RTABMapActivity.java:107


rtabmap
Author(s): Mathieu Labbe
autogenerated on Thu Jul 25 2024 02:50:06