ARCoreSharedCamera.java
Go to the documentation of this file.
1 package com.introlab.rtabmap;
2 
3 import java.nio.ByteBuffer;
4 import java.nio.FloatBuffer;
5 import java.util.ArrayList;
6 import java.util.Arrays;
7 import java.util.EnumSet;
8 import java.util.List;
9 import java.util.concurrent.atomic.AtomicBoolean;
10 
11 import com.google.ar.core.Camera;
12 import com.google.ar.core.CameraIntrinsics;
13 import com.google.ar.core.Config;
14 import com.google.ar.core.Frame;
15 import com.google.ar.core.ImageMetadata;
16 import com.google.ar.core.PointCloud;
17 import com.google.ar.core.Pose;
18 import com.google.ar.core.Session;
19 import com.google.ar.core.SharedCamera;
20 import com.google.ar.core.TrackingState;
21 import com.google.ar.core.exceptions.CameraNotAvailableException;
22 import com.google.ar.core.exceptions.NotYetAvailableException;
23 import com.google.ar.core.exceptions.UnavailableException;
24 
25 import android.content.Context;
26 import android.graphics.ImageFormat;
27 import android.hardware.camera2.CameraAccessException;
28 import android.hardware.camera2.CameraCaptureSession;
29 import android.hardware.camera2.CameraCharacteristics;
30 import android.hardware.camera2.CameraDevice;
31 import android.hardware.camera2.CameraManager;
32 import android.hardware.camera2.CaptureFailure;
33 import android.hardware.camera2.CaptureRequest;
34 import android.hardware.camera2.TotalCaptureResult;
35 import android.media.Image;
36 import android.opengl.GLES20;
37 import android.opengl.GLSurfaceView;
38 import android.os.Handler;
39 import android.os.HandlerThread;
40 import android.support.annotation.NonNull;
41 import android.util.Log;
42 import android.view.Surface;
43 
44 public class ARCoreSharedCamera {
45 
46  public static final String TAG = ARCoreSharedCamera.class.getSimpleName();
47 
48 
49  private static RTABMapActivity mActivity;
51  mActivity = c;
52  }
53 
54  // Depth TOF Image.
55  // Use 240 * 180 for now, hardcoded for Huawei P30 Pro
56  private static final int DEPTH_WIDTH = 240;
57  private static final int DEPTH_HEIGHT = 180;
58 
59  // GL Surface used to draw camera preview image.
60  public GLSurfaceView surfaceView;
61 
62  // ARCore session that supports camera sharing.
63  private Session sharedSession;
64 
65  // Camera capture session. Used by both non-AR and AR modes.
66  private CameraCaptureSession captureSession;
67 
68  // Reference to the camera system service.
69  private CameraManager cameraManager;
70 
71  // Camera device. Used by both non-AR and AR modes.
72  private CameraDevice cameraDevice;
73 
74  // Looper handler thread.
75  private HandlerThread backgroundThread;
76  // Looper handler.
77  private Handler backgroundHandler;
78 
79  // ARCore shared camera instance, obtained from ARCore session that supports sharing.
80  private SharedCamera sharedCamera;
81 
82  // Camera ID for the camera used by ARCore.
83  private String cameraId;
84 
85  private AtomicBoolean mReady = new AtomicBoolean(false);
86 
87  // Camera preview capture request builder
88  private CaptureRequest.Builder previewCaptureRequestBuilder;
89 
90  private int cameraTextureId = -1;
91 
92  // Image reader that continuously processes CPU images.
94  private boolean mTOFAvailable = false;
95 
96  public boolean isDepthSupported() {return mTOFAvailable;}
97 
98  // Camera device state callback.
99  private final CameraDevice.StateCallback cameraDeviceCallback =
100  new CameraDevice.StateCallback() {
101  @Override
102  public void onOpened(@NonNull CameraDevice cameraDevice) {
103  Log.d(TAG, "Camera device ID " + cameraDevice.getId() + " opened.");
106  }
107 
108  @Override
109  public void onClosed(@NonNull CameraDevice cameraDevice) {
110  Log.d(TAG, "Camera device ID " + cameraDevice.getId() + " closed.");
111  ARCoreSharedCamera.this.cameraDevice = null;
112  }
113 
114  @Override
115  public void onDisconnected(@NonNull CameraDevice cameraDevice) {
116  Log.w(TAG, "Camera device ID " + cameraDevice.getId() + " disconnected.");
117  cameraDevice.close();
118  ARCoreSharedCamera.this.cameraDevice = null;
119  }
120 
121  @Override
122  public void onError(@NonNull CameraDevice cameraDevice, int error) {
123  Log.e(TAG, "Camera device ID " + cameraDevice.getId() + " error " + error);
124  cameraDevice.close();
125  ARCoreSharedCamera.this.cameraDevice = null;
126  }
127  };
128 
129  // Repeating camera capture session state callback.
130  CameraCaptureSession.StateCallback cameraCaptureCallback =
131  new CameraCaptureSession.StateCallback() {
132 
133  // Called when the camera capture session is first configured after the app
134  // is initialized, and again each time the activity is resumed.
135  @Override
136  public void onConfigured(@NonNull CameraCaptureSession session) {
137  Log.d(TAG, "Camera capture session configured.");
138  captureSession = session;
140  }
141 
142  @Override
143  public void onSurfacePrepared(
144  @NonNull CameraCaptureSession session, @NonNull Surface surface) {
145  Log.d(TAG, "Camera capture surface prepared.");
146  }
147 
148  @Override
149  public void onReady(@NonNull CameraCaptureSession session) {
150  Log.d(TAG, "Camera capture session ready.");
151  }
152 
153  @Override
154  public void onActive(@NonNull CameraCaptureSession session) {
155  Log.d(TAG, "Camera capture session active.");
156  resumeARCore();
157  }
158 
159  @Override
160  public void onClosed(@NonNull CameraCaptureSession session) {
161  Log.d(TAG, "Camera capture session closed.");
162  }
163 
164  @Override
165  public void onConfigureFailed(@NonNull CameraCaptureSession session) {
166  Log.e(TAG, "Failed to configure camera capture session.");
167  }
168  };
169 
170  // Repeating camera capture session capture callback.
171  private final CameraCaptureSession.CaptureCallback captureSessionCallback =
172  new CameraCaptureSession.CaptureCallback() {
173 
174  @Override
175  public void onCaptureCompleted(
176  @NonNull CameraCaptureSession session,
177  @NonNull CaptureRequest request,
178  @NonNull TotalCaptureResult result) {
179  Log.i(TAG, "onCaptureCompleted");
180  }
181 
182  //@Override // android 23
183  public void onCaptureBufferLost(
184  @NonNull CameraCaptureSession session,
185  @NonNull CaptureRequest request,
186  @NonNull Surface target,
187  long frameNumber) {
188  Log.e(TAG, "onCaptureBufferLost: " + frameNumber);
189  }
190 
191  @Override
192  public void onCaptureFailed(
193  @NonNull CameraCaptureSession session,
194  @NonNull CaptureRequest request,
195  @NonNull CaptureFailure failure) {
196  Log.e(TAG, "onCaptureFailed: " + failure.getFrameNumber() + " " + failure.getReason());
197  }
198 
199  @Override
200  public void onCaptureSequenceAborted(
201  @NonNull CameraCaptureSession session, int sequenceId) {
202  Log.e(TAG, "onCaptureSequenceAborted: " + sequenceId + " " + session);
203  }
204  };
205 
206  private void resumeARCore() {
207  // Ensure that session is valid before triggering ARCore resume. Handles the case where the user
208  // manually uninstalls ARCore while the app is paused and then resumes.
209  if (sharedSession == null) {
210  return;
211  }
212 
213  try {
214  Log.i(TAG, "Resume ARCore.");
215  // Resume ARCore.
216  sharedSession.resume();
217  // Set capture session callback while in AR mode.
218  sharedCamera.setCaptureCallback(captureSessionCallback, backgroundHandler);
219  } catch (CameraNotAvailableException e) {
220  Log.e(TAG, "Failed to resume ARCore session", e);
221  return;
222  }
223 
224  }
225 
226 
227  // Called when starting non-AR mode or switching to non-AR mode.
228  // Also called when app starts in AR mode, or resumes in AR mode.
229  private void setRepeatingCaptureRequest() {
230  try {
231  captureSession.setRepeatingRequest(
233  } catch (CameraAccessException e) {
234  Log.e(TAG, "Failed to set repeating request", e);
235  }
236  }
237 
238 
239  private void createCameraPreviewSession() {
240  Log.e(TAG, "createCameraPreviewSession: " + "starting camera preview session.");
241  try {
242  // Note that isGlAttached will be set to true in AR mode in onDrawFrame().
243  sharedSession.setCameraTextureName(cameraTextureId);
244 
245  // Create an ARCore compatible capture request using `TEMPLATE_RECORD`.
246  previewCaptureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
247 
248  // Build surfaces list, starting with ARCore provided surfaces.
249  List<Surface> surfaceList = sharedCamera.getArCoreSurfaces();
250  Log.e(TAG, " createCameraPreviewSession: " + "surfaceList: sharedCamera.getArCoreSurfaces(): " + surfaceList.size());
251 
252  // Add a CPU image reader surface. On devices that don't support CPU image access, the image
253  // may arrive significantly later, or not arrive at all.
254  if (mTOFAvailable) surfaceList.add(mTOFImageReader.imageReader.getSurface());
255  // Surface list should now contain three surfacemReadymReadys:
256  // 0. sharedCamera.getSurfaceTexture()
257  // 1. …
258  // 2. depthImageReader.getSurface()
259 
260  // Add ARCore surfaces and CPU image surface targets.
261  for (Surface surface : surfaceList) {
262  previewCaptureRequestBuilder.addTarget(surface);
263  }
264 
265  // Wrap our callback in a shared camera callback.
266  CameraCaptureSession.StateCallback wrappedCallback = sharedCamera.createARSessionStateCallback(cameraCaptureCallback, backgroundHandler);
267 
268  // Create camera capture session for camera preview using ARCore wrapped callback.
269  cameraDevice.createCaptureSession(surfaceList, wrappedCallback, backgroundHandler);
270 
271  mReady.set(true);
272  } catch (CameraAccessException e) {
273  Log.e(TAG, "CameraAccessException", e);
274  }
275 
276 
277  }
278 
279  // Start background handler thread, used to run callbacks without blocking UI thread.
280  private void startBackgroundThread() {
281  backgroundThread = new HandlerThread("sharedCameraBackground");
282  backgroundThread.start();
283  backgroundHandler = new Handler(backgroundThread.getLooper());
284  mTOFImageReader.startBackgroundThread();
285  }
286 
287  // Stop background handler thread.
288  private void stopBackgroundThread() {
289  if (backgroundThread != null) {
290  backgroundThread.quitSafely();
291  try {
292  backgroundThread.join();
293  backgroundThread = null;
294  backgroundHandler = null;
295  } catch (InterruptedException e) {
296  Log.e(TAG, "Interrupted while trying to join background handler thread", e);
297  }
298  }
299  mTOFImageReader.stopBackgroundThread();
300  }
301 
302  private long mPreviousTime = 0;
303 
304  // Perform various checks, then open camera device and create CPU image reader.
305  public boolean openCamera() {
306 
307  close();
308 
310 
311  mPreviousTime = System.currentTimeMillis();
312 
313  if(cameraTextureId == -1)
314  {
315  int[] textures = new int[1];
316  GLES20.glGenTextures(1, textures, 0);
317  cameraTextureId = textures[0];
318  }
319 
320  Log.v(TAG + " opencamera: ", "Perform various checks, then open camera device and create CPU image reader.");
321  // Don't open camera if already opened.
322  if (cameraDevice != null) {
323  return false;
324  }
325 
326  if (sharedSession == null) {
327  try {
328  // Create ARCore session that supports camera sharing.
329  sharedSession = new Session(mActivity, EnumSet.of(Session.Feature.SHARED_CAMERA));
330  } catch (UnavailableException e) {
331  Log.e(TAG, "Failed to create ARCore session that supports camera sharing", e);
332  return false;
333  }
334 
335  // Enable auto focus mode while ARCore is running.
336  Config config = sharedSession.getConfig();
337  config.setFocusMode(Config.FocusMode.FIXED);
338  config.setUpdateMode(Config.UpdateMode.LATEST_CAMERA_IMAGE);
339  config.setPlaneFindingMode(Config.PlaneFindingMode.DISABLED);
340  config.setLightEstimationMode(Config.LightEstimationMode.DISABLED);
341  //config.setCloudAnchorMode(Config.CloudAnchorMode.ENABLED);
342  sharedSession.configure(config);
343 
344  }
345 
346  // Store the ARCore shared camera reference.
347  sharedCamera = sharedSession.getSharedCamera();
348  // Store the ID of the camera used by ARCore.
349  cameraId = sharedSession.getCameraConfig().getCameraId();
350  initCamera(mActivity, cameraId, 1);
351  ArrayList<String> resolutions;
352 
353  mTOFAvailable = false;
354 
355  resolutions = getResolutions(mActivity, cameraId, ImageFormat.DEPTH16);
356  if (resolutions != null) {
357  for( String temp : resolutions) {
358  Log.e(TAG + "DEPTH16 resolution: ", temp);
359  };
360  if (resolutions.size()>0) mTOFAvailable = true;
361  }
362 
363  // Color CPU Image.
364  // Use the currently configured CPU image size.
365  //Size desiredCPUImageSize = sharedSession.getCameraConfig().getImageSize();
366 
367  if (mTOFAvailable) mTOFImageReader.createImageReader(DEPTH_WIDTH, DEPTH_HEIGHT);
368 
369  // When ARCore is running, make sure it also updates our CPU image surface.
370  if (mTOFAvailable) {
371  sharedCamera.setAppSurfaces(this.cameraId, Arrays.asList(mTOFImageReader.imageReader.getSurface()));
372  }
373 
374  try {
375 
376  // Wrap our callback in a shared camera callback.
377  CameraDevice.StateCallback wrappedCallback = sharedCamera.createARDeviceStateCallback(cameraDeviceCallback, backgroundHandler);
378 
379  // Store a reference to the camera system service.
380  cameraManager = (CameraManager) mActivity.getSystemService(Context.CAMERA_SERVICE);
381 
382  // Get the characteristics for the ARCore camera.
383  //CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(this.cameraId);
384 
385  // Open the camera device using the ARCore wrapped callback.
386  cameraManager.openCamera(cameraId, wrappedCallback, backgroundHandler);
387 
388  } catch (CameraAccessException e) {
389  Log.e(TAG, "Failed to open camera", e);
390  return false;
391  } catch (IllegalArgumentException e) {
392  Log.e(TAG, "Failed to open camera", e);
393  return false;
394  } catch (SecurityException e) {
395  Log.e(TAG, "Failed to open camera", e);
396  return false;
397  }
398 
399  Log.i(TAG, " opencamera: TOF_available: " + mTOFAvailable);
400  return true;
401  }
402 
403 
404  // Close the camera device.
405  public void close() {
406 
407  if (sharedSession != null) {
408  sharedSession.pause();
409  }
410 
411  if (captureSession != null) {
412  captureSession.close();
413  captureSession = null;
414  }
415  if (cameraDevice != null) {
416  cameraDevice.close();
417  }
418 
419  if (mTOFImageReader.imageReader != null) {
420  mTOFImageReader.imageReader.close();
421  mTOFImageReader.imageReader = null;
422  }
423 
424  if(cameraTextureId>=0)
425  {
426  GLES20.glDeleteTextures(1, new int[] {cameraTextureId}, 0);
427  }
428 
430  }
431 
432  /*************************************************** ONDRAWFRAME ARCORE ************************************************************* */
433 
434  // Draw frame when in AR mode. Called on the GL thread.
435  public void updateGL() throws CameraNotAvailableException {
436 
437  if(!mReady.get())
438  {
439  return;
440  }
441 
442  if (mTOFAvailable && mTOFImageReader.frameCount == 0) return;
443 
444  // Perform ARCore per-frame update.
445  Frame frame = null;
446  try {
447  frame = sharedSession.update();
448  } catch (Exception e) {
449  e.printStackTrace();
450  return;
451  }
452 
453  Camera camera = null;
454  if (frame != null) {
455  camera = frame.getCamera();
456  }else
457  {
458  return;
459  }
460 
461  if (camera == null) return;
462  // If not tracking, don't draw 3D objects.
463  if (camera.getTrackingState() == TrackingState.PAUSED) return;
464 
465  if (frame.getTimestamp() != 0) {
466 
467  Pose pose = camera.getPose();
468  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("pose=%f %f %f q=%f %f %f %f", pose.tx(), pose.ty(), pose.tz(), pose.qx(), pose.qy(), pose.qz(), pose.qw()));
469  RTABMapLib.postCameraPoseEvent(RTABMapActivity.nativeApplication, pose.tx(), pose.ty(), pose.tz(), pose.qx(), pose.qy(), pose.qz(), pose.qw());
470 
471  int rateMs = 100; // send images at most 10 Hz
472  if(System. currentTimeMillis() - mPreviousTime < rateMs)
473  {
474  return;
475  }
476  mPreviousTime = System. currentTimeMillis();
477 
478  CameraIntrinsics intrinsics = camera.getImageIntrinsics();
479  try{
480  Image image = frame.acquireCameraImage();
481  PointCloud cloud = frame.acquirePointCloud();
482  FloatBuffer points = cloud.getPoints();
483 
484  if (image.getFormat() != ImageFormat.YUV_420_888) {
485  throw new IllegalArgumentException(
486  "Expected image in YUV_420_888 format, got format " + image.getFormat());
487  }
488 
490  {
491  for(int i =0;i<image.getPlanes().length;++i)
492  {
493  Log.d(TAG, String.format("Plane[%d] pixel stride = %d, row stride = %d", i, image.getPlanes()[i].getPixelStride(), image.getPlanes()[i].getRowStride()));
494  }
495  }
496 
497  float[] fl = intrinsics.getFocalLength();
498  float[] pp = intrinsics.getPrincipalPoint();
499  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("fx=%f fy=%f cx=%f cy=%f", fl[0], fl[1], pp[0], pp[1]));
500 
501  ByteBuffer y = image.getPlanes()[0].getBuffer().asReadOnlyBuffer();
502  ByteBuffer u = image.getPlanes()[1].getBuffer().asReadOnlyBuffer();
503  ByteBuffer v = image.getPlanes()[2].getBuffer().asReadOnlyBuffer();
504 
505  double stamp = (double)image.getTimestamp()/10e8;
506  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("RGB %dx%d len=%dbytes format=%d =%f",
507  image.getWidth(), image.getHeight(), y.limit(), image.getFormat(), stamp));
508 
509  if(mTOFAvailable)
510  {
511  if(!RTABMapActivity.DISABLE_LOG) Log.d(TAG, String.format("Depth %dx%d len=%dbytes format=%d stamp=%f",
512  mTOFImageReader.WIDTH, mTOFImageReader.HEIGHT, mTOFImageReader.depth16_raw.limit(), ImageFormat.DEPTH16, (double)mTOFImageReader.timestamp/10e9));
513 
516  pose.tx(), pose.ty(), pose.tz(), pose.qx(), pose.qy(), pose.qz(), pose.qw(),
517  fl[0], fl[1], pp[0], pp[1], stamp,
518  y, u, v, y.limit(), image.getWidth(), image.getHeight(), image.getFormat(),
519  mTOFImageReader.depth16_raw, mTOFImageReader.depth16_raw.limit(), mTOFImageReader.WIDTH, mTOFImageReader.HEIGHT, ImageFormat.DEPTH16,
520  points, points.limit()/4);
521  }
522  else
523  {
524  ByteBuffer bb = ByteBuffer.allocate(0);
527  pose.tx(), pose.ty(), pose.tz(), pose.qx(), pose.qy(), pose.qz(), pose.qw(),
528  fl[0], fl[1], pp[0], pp[1], stamp,
529  y, u, v, y.limit(), image.getWidth(), image.getHeight(), image.getFormat(),
530  bb, 0, 0, 0, ImageFormat.DEPTH16,
531  points, points.limit()/4);
532  }
533 
534  image.close();
535  cloud.close();
536 
537  } catch (NotYetAvailableException e) {
538 
539  }
540  }
541  }
542 
543  /********************************************************************************************************************* */
544  /*************************************************** End ************************************************************* */
545  /********************************************************************************************************************* */
546 
547 
548  public ArrayList<String> getResolutions (Context context, String cameraId,int imageFormat){
549  Log.v(TAG + "getResolutions:", " cameraId:" + cameraId + " imageFormat: " + imageFormat);
550 
551  ArrayList<String> output = new ArrayList<String>();
552  try {
553  CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
554  CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
555 
556  for (android.util.Size s : characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(imageFormat)) {
557  output.add(s.getWidth() + "x" + s.getHeight());
558  }
559  } catch (Exception e) {
560  e.printStackTrace();
561  }
562  return output;
563  }
564 
565 
566  public void initCamera (Context context, String cameraId,int index){
567  boolean ok = false;
568  try {
569  int current = 0;
570  CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
571  CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
572  for (android.util.Size s : characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.DEPTH16)) {
573  ok = true;
574  if (current == index)
575  break;
576  else ;
577  current++;
578  }
579  } catch (Exception e) {
580  e.printStackTrace();
581  }
582  if (!ok) {
583  Log.e(TAG + " initCamera", "Depth sensor not found!");
584  }
585  }
586 
587 
588 }
CaptureRequest.Builder previewCaptureRequestBuilder
GLM_FUNC_DECL genType e()
config
final CameraDevice.StateCallback cameraDeviceCallback
static native void postCameraPoseEvent(long nativeApplication, float x, float y, float z, float qx, float qy, float qz, float qw)
final CameraCaptureSession.CaptureCallback captureSessionCallback
ArrayList< String > getResolutions(Context context, String cameraId, int imageFormat)
static native void postOdometryEvent(long nativeApplication, float x, float y, float z, float qx, float qy, float qz, float qw, float fx, float fy, float cx, float cy, double stamp, ByteBuffer yPlane, ByteBuffer uPlane, ByteBuffer vPlane, int yPlaneLen, int rgbWidth, int rgbHeight, int rgbFormat, ByteBuffer depth, int depthLen, int depthWidth, int depthHeight, int depthFormat, FloatBuffer points, int pointsLen)
void createImageReader(int width, int height)
void initCamera(Context context, String cameraId, int index)


rtabmap
Author(s): Mathieu Labbe
autogenerated on Mon Dec 14 2020 03:34:58