+ * Example: + *
+ * final Executor executor = new ThreadPoolExecutor(); + * + * public void doSomething() { + * executor.execute(this::doSomethingOnExecutor); + * } + * + * {@literal @}ExecutedBy("executor") + * void doSomethingOnExecutor() { + * // Do something while being executed by the executor + * }+ * + *
This can be used to denote that it is not safe to call this method when not executed by a
+ * specific {@link Executor}, if, for instance, the Executor provides certain guarantees of which
+ * thread the code will run on or guarantees of sequential (non-concurrent) execution.
+ *
+ * @hide
+ */
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.SOURCE)
+public @interface ExecutedBy {
+ String value();
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/camera/Camera2CameraImpl.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/camera/Camera2CameraImpl.java
new file mode 100644
index 000000000..898fd6c89
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/camera/Camera2CameraImpl.java
@@ -0,0 +1,835 @@
+package com.quark.quamera.camera.camera;
+
+import android.annotation.SuppressLint;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.os.Build;
+import android.os.Handler;
+import android.os.SystemClock;
+import android.text.TextUtils;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import androidx.lifecycle.MutableLiveData;
+
+import com.quark.quamera.camera.anotaion.ExecutedBy;
+import com.quark.quamera.camera.concurrent.HandlerScheduledExecutorService;
+import com.quark.quamera.camera.sensor.DisplayOrientationDetector;
+import com.quark.quamera.camera.session.ImageCapture;
+import com.quark.quamera.camera.session.InnerImageCaptureCallback;
+import com.quark.quamera.camera.session.RepeatCaptureRequestConfig;
+import com.quark.quamera.camera.session.SessionConfig;
+import com.quark.quamera.camera.session.SingleCaptureConfig;
+import com.quark.quamera.camera.session.SyncCaptureSession;
+import com.quark.quamera.util.CameraLogger;
+import com.quark.quamera.util.CameraShould;
+import com.quark.quamera.util.Preconditions;
+
+import java.util.Collections;
+import java.util.concurrent.Callable;
+import java.util.concurrent.Executor;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.ScheduledFuture;
+
+@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+public class Camera2CameraImpl {
+ public static final String TAG = "AndroidCameraApi";
+
+ private volatile InternalState mState = InternalState.INITIALIZED;
+
+ private final String mCameraId;
+
+ protected CameraDevice mCameraDevice;
+
+ private final Executor mExecutor;
+ private final Handler mHandler;
+
+ private CameraCharacteristics mCameraCharacteristics;
+
+ private final Camera2Info mCamera2Info;
+
+
+ private final MutableLiveData At this state the {@link CameraDevice} should be invalid, but threads should be still
+ * in a valid state. Whenever a camera device is fully closed the camera should return to
+ * this state.
+ *
+ * After an error occurs the camera returns to this state so that the device can be
+ * cleanly reopened.
+ */
+ INITIALIZED,
+ /**
+ * Camera is waiting for the camera to be available to open.
+ *
+ * A camera may enter a pending state if the camera has been stolen by another process
+ * or if the maximum number of available cameras is already open.
+ *
+ * At the end of this state, the camera should move into the OPENING state.
+ */
+ PENDING_OPEN,
+ /**
+ * A transitional state where the camera device is currently opening.
+ *
+ * At the end of this state, the camera should move into either the OPENED or CLOSING
+ * state.
+ */
+ OPENING,
+ /**
+ * A stable state where the camera has been opened.
+ *
+ * During this state the camera device should be valid. It is at this time a valid
+ * capture session can be active. Capture requests should be issued during this state only.
+ */
+ OPENED,
+ /**
+ * A transitional state where the camera device is currently closing.
+ *
+ * At the end of this state, the camera should move into the INITIALIZED state.
+ */
+ CLOSING,
+ /**
+ * A transitional state where the camera was previously closing, but not fully closed before
+ * a call to open was made.
+ *
+ * At the end of this state, the camera should move into one of two states. The OPENING
+ * state if the device becomes fully closed, since it must restart the process of opening a
+ * camera. The OPENED state if the device becomes opened, which can occur if a call to close
+ * had been done during the OPENING state.
+ */
+ REOPENING,
+ /**
+ * A transitional state where the camera will be closing permanently.
+ *
+ * At the end of this state, the camera should move into the RELEASED state.
+ */
+ @Deprecated
+ RELEASING,
+ /**
+ * A stable state where the camera has been permanently closed.
+ *
+ * During this state all resources should be released and all operations on the camera
+ * will do nothing.
+ */
+ @Deprecated
+ RELEASED
+ }
+
+
+ private final CameraManager mCameraManager;
+ private final CameraAbilityCallback mCameraAbilityCallback;
+
+ private SyncCaptureSession mCaptureSession;
+
+ public Camera2CameraImpl(CameraManager cameraManager, String cameraId, Executor executor, Handler handler) {
+ mExecutor = executor;
+ mHandler = handler;
+ mCameraManager = cameraManager;
+ mCameraId = cameraId;
+ mCameraAbilityCallback = new CameraAbilityCallback();
+ mCameraManager.registerAvailabilityCallback(mCameraAbilityCallback, handler);
+ mCamera2Info = new Camera2Info(cameraId);
+ mStateCallback = new CameraStateCallback(new HandlerScheduledExecutorService(mHandler));
+ mCameraPublishStateLD = new MutableLiveData<>();
+ mControl = new Camera2Control(this);
+ mErrorListener = new ComboCameraErrorListener(null);
+
+ }
+
+
+ private class CameraAbilityCallback extends CameraManager.AvailabilityCallback {
+
+ private boolean mIsAvailable = true;
+
+ @Override
+ public void onCameraAvailable(@NonNull String cameraId) {
+ if (!TextUtils.equals(cameraId, mCameraId)) {
+ return;
+ }
+
+ CameraLogger.i(TAG, "Camera2.onCameraAvailable %s", cameraId);
+
+ mIsAvailable = true;
+
+ if (mState == InternalState.PENDING_OPEN) {
+ openCameraDevices(false);
+ }
+ }
+
+ public boolean isAvailable() {
+ return mIsAvailable;
+ }
+
+ @Override
+ public void onCameraUnavailable(@NonNull String cameraId) {
+ if (!TextUtils.equals(cameraId, mCameraId)) {
+ return;
+ }
+ CameraLogger.e(TAG, "Camera2.onCameraUnavailable %s", cameraId);
+
+
+ //我就是要最高权限
+ mIsAvailable = false;
+
+ if (mState == InternalState.PENDING_OPEN) {
+ //TODO 确定是否需要触发一下相机的关闭
+ //TODO 要添加一下统计,看看多少场景下,一开始相机是不可用的
+ }
+ }
+ }
+
+ public void updateSessionConfig(SessionConfig sessionConfig) {
+ mExecutor.execute(() -> {
+ mSessionConfig = sessionConfig;
+ mErrorListener = new ComboCameraErrorListener(Collections.singletonList(mSessionConfig.getCameraErrorListener()));
+ });
+
+ }
+
+ public static final int ERROR_NONE = 0;
+
+ private class CameraStateCallback extends CameraDevice.StateCallback {
+
+
+ private ScheduledReopen mScheduledReopenRunnable;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ ScheduledFuture> mScheduledReopenHandle;
+
+ private final HandlerScheduledExecutorService mScheduler;
+
+
+ public CameraStateCallback(HandlerScheduledExecutorService scheduler) {
+ mScheduler = scheduler;
+ }
+
+ @Override
+ public void onOpened(@NonNull CameraDevice camera) {
+ CameraLogger.i(TAG, "CameraDevice.onOpen when %s ", mState);
+ //This is called when the camera is open
+ mCameraDeviceError = ERROR_NONE;
+ mCameraDevice = camera;
+ switch (mState) {
+ case OPENING:
+ case REOPENING:
+ setState(InternalState.OPENED);
+ mCaptureSession = new SyncCaptureSession(Camera2CameraImpl.this, mHandler);
+ mCaptureSession.open(camera, mSessionConfig);
+ break;
+ case RELEASING:
+ case CLOSING:
+ mCameraDevice.close();
+ mCameraDevice = null;
+ break;
+ default:
+ throw new IllegalStateException(
+ "onOpened() should not be possible from state: " + mState);
+ }
+
+ }
+
+ @Override
+ public void onDisconnected(@NonNull CameraDevice camera) {
+ CameraLogger.e(TAG, "CameraDevice.onDisconnected() " + mState);
+
+ // Can be treated the same as camera in use beca`use in both situations the
+ // CameraDevice needs to be closed before it can be safely reopened and used.
+ onError(camera, CameraDevice.StateCallback.ERROR_CAMERA_IN_USE);
+ }
+
+ @Override
+ public void onError(@NonNull CameraDevice camera, int error) {
+ CameraLogger.e(TAG, "CameraDevice.onError( %s ) when %s ", CameraLogger.getCameraErrorMessage(error), mState);
+
+ onErrorInternal(camera, error);
+
+ mErrorListener.onError(error, CameraLogger.getCameraErrorMessage(error));
+
+ }
+
+
+ @Override
+ public void onClosed(@NonNull CameraDevice camera) {
+ CameraLogger.i(TAG, "CameraDevice.onClosed");
+ switch (mState) {
+ case CLOSING:
+ case RELEASING:
+ finishClose();
+ break;
+ case REOPENING:
+ if (mCameraDeviceError == ERROR_NONE) {
+ openCameraDevices(false);
+ } else {
+ CameraLogger.e("Camera closed due to error: %s", CameraLogger.getCameraErrorMessage(mCameraDeviceError));
+ scheduleCameraReopen();
+ }
+ break;
+ }
+ }
+
+ /**
+ * Resets the camera reopen attempts monitor. This should be called when the camera open is
+ * not triggered by a scheduled camera reopen, but rather by an explicit request.
+ */
+ @ExecutedBy("mExecutor")
+ void resetReopenMonitor() {
+ mCameraReopenMonitor.reset();
+ }
+
+
+ // Delay long enough to guarantee the app could have been backgrounded.
+ // See ProcessLifecycleProvider for where this delay comes from.
+ static final int REOPEN_DELAY_MS = 700;
+
+ @ExecutedBy("mExecutor")
+ boolean scheduleCameraReopen() {
+ Preconditions.checkState(mScheduledReopenRunnable == null);
+ Preconditions.checkState(mScheduledReopenHandle == null);
+
+ if (mCameraReopenMonitor.canScheduleCameraReopen()) {
+ mScheduledReopenRunnable = new ScheduledReopen(mExecutor);
+ CameraLogger.i(TAG, "Attempting camera re-open in %dms: %s", REOPEN_DELAY_MS, mScheduledReopenRunnable);
+ mScheduledReopenHandle = mScheduler.schedule(mScheduledReopenRunnable, REOPEN_DELAY_MS);
+ return true;
+ } else {
+ CameraLogger.e(TAG,
+ "Camera reopening attempted for "
+ + CameraReopenMonitor.REOPEN_LIMIT_MS
+ + "ms without success.");
+ setState(InternalState.INITIALIZED);
+
+
+ mErrorListener.onError(CameraLogger.ERROR_TRY_REOPEN_ERROR,
+ CameraLogger.getCameraErrorMessage(CameraLogger.ERROR_TRY_REOPEN_ERROR) + ":" + CameraLogger.getCameraErrorMessage(mCameraDeviceError));
+ }
+ return false;
+ }
+
+ /**
+ * Attempts to cancel reopen.
+ *
+ * If successful, it is safe to finish closing the camera via {@link #finishClose()} as
+ * a reopen will only be scheduled after {@link #onClosed(CameraDevice)} has been called.
+ *
+ * @return true if reopen was cancelled. False if no re-open was scheduled.
+ */
+ @ExecutedBy("mExecutor")
+ boolean cancelScheduledReopen() {
+ boolean cancelled = false;
+ if (mScheduledReopenHandle != null) {
+ // A reopen has been scheduled
+ CameraLogger.i(TAG, "Cancelling scheduled re-open: " + mScheduledReopenRunnable);
+
+ // Ensure the runnable doesn't try to open the camera if it has already
+ // been pushed to the executor.
+ mScheduledReopenRunnable.cancel();
+ mScheduledReopenRunnable = null;
+
+ // Un-schedule the runnable in case if hasn't run.
+ mScheduledReopenHandle.cancel(/*mayInterruptIfRunning=*/false);
+ mScheduledReopenHandle = null;
+
+ cancelled = true;
+ }
+
+ return cancelled;
+ }
+
+
+ }
+
+
+ // Should only be called once the camera device is actually closed.
+ @ExecutedBy("mExecutor")
+ private void finishClose() {
+ Preconditions.checkState(mState == InternalState.RELEASING || mState == InternalState.CLOSING);
+ mCameraDevice = null;
+
+ if (mState == InternalState.CLOSING) {
+ if (mCaptureSession != null) {
+ mCaptureSession.forceRelease();
+ mCaptureSession = null;
+ }
+ mSessionConfig = null;
+ setState(InternalState.INITIALIZED);
+ } else {
+ // After a camera is released, it cannot be reopened, so we don't need to listen for
+ // available camera changes.
+ mCameraManager.unregisterAvailabilityCallback(mCameraAbilityCallback);
+
+ setState(InternalState.RELEASED);
+ }
+ }
+
+
+ public void open() {
+ mExecutor.execute(this::openInternal);
+ }
+
+
+ @ExecutedBy("mExecutor")
+ private void openInternal() {
+ Preconditions.cameraThreadCheck();
+
+
+ switch (mState) {
+ case INITIALIZED:
+ openCameraDevices(false);
+ break;
+ case CLOSING:
+ setState(InternalState.REOPENING);
+ break;
+ }
+ }
+
+ @SuppressLint("MissingPermission")
+ private void openCameraDevices(boolean fromScheduledCameraReopen) {
+ Preconditions.cameraThreadCheck();
+
+ if (!fromScheduledCameraReopen) {
+ mStateCallback.resetReopenMonitor();
+ }
+ mStateCallback.cancelScheduledReopen();
+
+
+ /**
+ * As of API level 23, devices for which the AvailabilityCallback#onCameraUnavailable(String) callback has been called due to the device being in use by a lower-priority, background camera API client can still potentially be opened by calling this method when the calling camera API client has a higher priority than the current camera API client using this device.
+ * In general, if the top, foreground activity is running within your application process, your process will be given the highest priority when accessing the camera, and this method will succeed even if the camera device is in use by another camera API client.
+ * Any lower-priority application that loses control of the camera in this way will receive an CameraDevice.StateCallback.onDisconnected(CameraDevice) callback.
+ * Opening the same camera ID twice in the same application will similarly cause the CameraDevice.StateCallback.onDisconnected(CameraDevice) callback being fired for the CameraDevice from the first open call and all ongoing tasks being droppped.
+ */
+ //By Design : false && mCameraAbilityCallback.isAvailable()
+ if (false && !mCameraAbilityCallback.isAvailable()) {
+ setState(InternalState.PENDING_OPEN);
+ CameraLogger.e(TAG, "camera (%s) is inability", mCameraId);
+ return;
+ }
+
+ setState(InternalState.OPENING);
+
+ try {
+ mCameraManager.openCamera(mCameraId, mStateCallback, mHandler);
+ } catch (CameraAccessException e) {
+ // Camera2 will call the onError() callback with the specific error code that
+ // caused this failure. No need to do anything here.
+ CameraLogger.e(TAG, "open camera error (CameraAccessException) %s ", e.getMessage());
+ } catch (IllegalArgumentException e) {
+ CameraLogger.e(TAG, "open camera error %s ", e.getMessage());
+ String errorMessage = e.getMessage();
+ /**
+ * In the process of a error restart, the camera hardware did not recover so quickly
+ * On Mi8 Android 11 ,devices will throws IllegalArgumentException(SupportCameraApi) when check
+ * SupportCameraApi error rather than callback onError. This causes us to have only one
+ * restart opportunity to take
+ * effect.But this devices need 5s+ to recover.
+ *
+ * see https://cs.android.com/android/platform/superproject/+/master:frameworks/av/services/camera/libcameraservice/CameraService.cpp;drc=master;bpv=0;bpt=1;l=2324?hl=zh-cn
+ */
+ if (errorMessage != null
+ && errorMessage.contains("supportsCameraApi")
+ && errorMessage.contains("Unknown camera ID")
+ && mCameraDeviceError != ERROR_NONE) {
+ if (mStateCallback.scheduleCameraReopen()) {
+ setState(InternalState.REOPENING);
+ } else {
+ setState(InternalState.INITIALIZED);
+ }
+ } else {
+ //Camera has not been open
+ setState(InternalState.INITIALIZED);
+ }
+ } catch (Exception e) {
+ CameraLogger.e(TAG, "open camera error %s ", e.getMessage());
+ //Camera has not been open
+ setState(InternalState.INITIALIZED);
+ }
+ }
+
+
+ private void setState(InternalState state) {
+ CameraState publicState;
+ switch (state) {
+ case INITIALIZED:
+ publicState = CameraState.CLOSED;
+ break;
+ case PENDING_OPEN:
+ publicState = CameraState.PENDING_OPEN;
+ break;
+ case OPENING:
+ case REOPENING:
+ publicState = CameraState.OPENING;
+ break;
+ case OPENED:
+ publicState = CameraState.OPEN;
+ break;
+ case CLOSING:
+ publicState = CameraState.CLOSING;
+ break;
+ case RELEASING:
+ publicState = CameraState.RELEASING;
+ break;
+ case RELEASED:
+ publicState = CameraState.RELEASED;
+ break;
+ default:
+ throw new IllegalStateException("Unknown state: " + state);
+
+ }
+ CameraLogger.i(TAG, "camera state change publish_state(%s -> %s) , inner_state(%s -> %s) ",
+ mCameraPublishState, publicState, mState, state);
+ mState = state;
+ mCameraPublishState = publicState;
+ mCameraPublishStateLD.postValue(mCameraPublishState);
+ mStateObserver.notifyStateChange(mCameraPublishState);
+ }
+
+ private final CameraStateObservable mStateObserver = new CameraStateObservable();
+
+ /**
+ * 立即更新的相机状态,更新时机为相机线程中,立即更新
+ */
+ public CameraStateObservable getCameraStateImmediatelyObservable() {
+ return mStateObserver;
+ }
+
+ /**
+ * UI线程更新的相机状态更新
+ */
+ public MutableLiveData
+ * FOV.x = 2 * atan(SENSOR_INFO_PHYSICAL_SIZE.x / (2 * LENS_FOCAL_LENGTH))
+ * FOV.y = 2 * atan(SENSOR_INFO_PHYSICAL_SIZE.y / (2 * LENS_FOCAL_LENGTH))
+ *
+ *
+ * ignore sensor_pixel info and capture crop region first @(- 3 -)
+ */
+ private void calculateFOV() {
+
+ mFocalLengthInfos.clear();
+
+ try {
+
+ SizeF size = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE);
+
+ float[] focalLens = mCameraCharacteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS);
+
+ double width = size.getWidth();
+
+ double height = size.getHeight();
+
+
+ boolean firstOne = true;
+ for (float focalLength : focalLens) {
+ FocalLengthInfo lengthInfo = new FocalLengthInfo();
+ lengthInfo.horizontalAngle = (2 * Math.atan(width / (focalLength * 2)));
+ lengthInfo.verticalAngle = (2 * Math.atan(height / (focalLength * 2)));
+ lengthInfo.focalLength = focalLength;
+ //一般情况下,第一个焦距信息为默认焦距
+ lengthInfo.isDefaultFocal = firstOne;
+ firstOne = false;
+ mFocalLengthInfos.add(lengthInfo);
+ }
+
+ Collections.sort(mFocalLengthInfos);
+
+
+ } catch (Exception e) {
+ CameraShould.fail("", e);
+ }
+
+ }
+
+
+ public List The camera will automatically transition to an {@link #OPENING} state once resources
+ * have become available. Resources are typically made available by other cameras closing.
+ */
+ PENDING_OPEN(/*holdsCameraSlot=*/false),
+ /**
+ * Camera is in the process of opening.
+ *
+ * This is a transient state.
+ */
+ OPENING(/*holdsCameraSlot=*/true),
+ /**
+ * Camera is open and producing (or ready to produce) image data.
+ */
+ OPEN(/*holdsCameraSlot=*/true),
+ /**
+ * Camera is in the process of closing.
+ *
+ * This is a transient state.
+ */
+ CLOSING(/*holdsCameraSlot=*/true),
+ /**
+ * Camera has been closed and should not be producing data.
+ */
+ CLOSED(/*holdsCameraSlot=*/false),
+ /**
+ * Camera is in the process of being released and cannot be reopened.
+ *
+ * This is a transient state. Note that this state holds a camera slot even though the
+ * implementation may not actually hold camera resources.
+ */
+ // TODO: Check if this needs to be split up into multiple RELEASING states to
+ // differentiate between when the camera slot is being held or not.
+ RELEASING(/*holdsCameraSlot=*/true),
+ /**
+ * Camera has been closed and has released all held resources.
+ */
+ RELEASED(/*holdsCameraSlot=*/false);
+
+ private final boolean mHoldsCameraSlot;
+
+ CameraState(boolean holdsCameraSlot) {
+ mHoldsCameraSlot = holdsCameraSlot;
+ }
+
+ /**
+ * Returns whether a camera in this state could be holding on to a camera slot.
+ *
+ * Holding on to a camera slot may preclude other cameras from being open. This is
+ * generally the case when the camera implementation is in the process of opening a
+ * camera, has already opened a camera, or is in the process of closing the camera.
+ */
+ boolean holdsCameraSlot() {
+ return mHoldsCameraSlot;
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/camera/CameraStateObservable.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/camera/CameraStateObservable.java
new file mode 100644
index 000000000..6a35bbf6b
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/camera/CameraStateObservable.java
@@ -0,0 +1,64 @@
+package com.quark.quamera.camera.camera;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 2021/6/6
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import java.lang.ref.SoftReference;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+public class CameraStateObservable {
+
+ private final List Since {@link ScheduledExecutorService} implements {@link Executor}, this can also be used
+ * as a simple Executor.
+ */
+public final class MainThreadExecutor implements Executor {
+ private static volatile MainThreadExecutor sInstance;
+
+ private final Handler mMainHandler;
+
+ private MainThreadExecutor() {
+ mMainHandler = new Handler(Looper.getMainLooper());
+ }
+
+ public static MainThreadExecutor getInstance() {
+ if (sInstance != null) {
+ return sInstance;
+ }
+ synchronized (MainThreadExecutor.class) {
+ sInstance = new MainThreadExecutor();
+ }
+
+ return sInstance;
+ }
+
+ @Override
+ public void execute(Runnable command) {
+ mMainHandler.post(command);
+ }
+
+}
\ No newline at end of file
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/imagereader/BlockImageAnalyzer.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/imagereader/BlockImageAnalyzer.java
new file mode 100644
index 000000000..0c25f9f05
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/imagereader/BlockImageAnalyzer.java
@@ -0,0 +1,11 @@
+package com.quark.quamera.camera.imagereader;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 2021/4/22
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+class BlockImageAnalyzer {
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/imagereader/DeferrableImageReader.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/imagereader/DeferrableImageReader.java
new file mode 100644
index 000000000..f70a5dc4c
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/imagereader/DeferrableImageReader.java
@@ -0,0 +1,216 @@
+package com.quark.quamera.camera.imagereader;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 20-11-26
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import android.media.ImageReader;
+import android.os.Build;
+import android.os.Handler;
+
+import com.quark.quamera.camera.camera.CameraSurfaceHelper;
+import com.quark.quamera.camera.sensor.ImageRotationHelper;
+import com.quark.quamera.util.Preconditions;
+
+import java.util.concurrent.Executor;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+
+/**
+ * 延迟初始化话的ImageReader
+ * 核心解决ImageReader的宽高只有相机支持的才能生效,而当业务使用的时候,只能知道自己预期的宽高
+ */
+@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+public class DeferrableImageReader {
+
+ private int mExpectWidth;
+
+ private int mExpectHeight;
+
+ private int mActualWidth;
+
+ private int mActualHeight;
+
+ private final int mMaxImage;
+
+ private int mFormat;
+
+ private Handler mSubscriptHandler;
+
+ private ImageReader mImageReader;
+
+ private boolean mNonBlock = false;
+
+ private NoBlockImageAnalyzer mNoBlockImageAnalyzer;
+
+ private ImageRotationHelper mImageRotationHelper;
+
+ private CameraSurfaceHelper.ISuggestionCalculation mSizeCalculation = CameraSurfaceHelper.AreaClosestSizeCalculation.getInstance();
+
+
+ public static class Builder {
+
+ private int mExpectWidth;
+
+ private int mExpectHeight;
+
+ private Handler mSubscriptHandler;
+
+ private int mFormat;
+
+ private ImageAnalyzer mImageAnalyzer;
+
+ private Executor mHandlerExecutor;
+
+
+ private CameraSurfaceHelper.ISuggestionCalculation mSizeCalculation = CameraSurfaceHelper.AreaClosestSizeCalculation.getInstance();
+
+ public Builder setSizeCalculation(@NonNull CameraSurfaceHelper.ISuggestionCalculation sizeCalculation) {
+ mSizeCalculation = sizeCalculation;
+ return this;
+ }
+
+ public Builder setExpectWidth(int expectWidth) {
+ mExpectWidth = expectWidth;
+ return this;
+ }
+
+ public Builder setExpectHeight(int expectHeight) {
+ mExpectHeight = expectHeight;
+ return this;
+ }
+
+
+ public Builder setSubscriptHandler(Handler subscriptHandler) {
+ mSubscriptHandler = subscriptHandler;
+ return this;
+
+ }
+
+ public Builder setFormat(int format) {
+ mFormat = format;
+ return this;
+ }
+
+ public Builder setImageAnalyzer(ImageAnalyzer imageAnalyzer) {
+ mImageAnalyzer = imageAnalyzer;
+ return this;
+ }
+
+
+ public Builder setHandlerExecutor(Executor handlerExecutor) {
+ mHandlerExecutor = handlerExecutor;
+ return this;
+ }
+
+ public DeferrableImageReader build() {
+ Preconditions.checkState(mExpectWidth != 0);
+ Preconditions.checkState(mExpectHeight != 0);
+ Preconditions.checkState(mHandlerExecutor != null);
+ Preconditions.checkState(mFormat != 0);
+ return new DeferrableImageReader(
+ mExpectWidth, mExpectHeight, mFormat,
+ new NoBlockImageAnalyzer(mHandlerExecutor).setImageAnalyzer(mImageAnalyzer),
+ /**
+ * 在当前{@link NoBlockImageAnalyzer}的模式下,必须是3
+ * 1,用来消费接收
+ * 2.用来缓存
+ * 3.用来消费
+ */
+ 3,
+ mSubscriptHandler,
+ mSizeCalculation
+ );
+
+ }
+ }
+
+ private DeferrableImageReader(int expectWidth,
+ int expectHeight,
+ int format,
+ NoBlockImageAnalyzer analyzer,
+ int maxImage,
+ Handler subscriptHandler,
+ CameraSurfaceHelper.ISuggestionCalculation calculation
+ ) {
+ mExpectWidth = expectWidth;
+ mExpectHeight = expectHeight;
+ mFormat = format;
+ mMaxImage = maxImage;
+ mSubscriptHandler = subscriptHandler;
+ mNoBlockImageAnalyzer = analyzer;
+ mSizeCalculation = calculation;
+ }
+
+ public NoBlockImageAnalyzer getNoBlockImageAnalyzer() {
+ return mNoBlockImageAnalyzer;
+ }
+
+ public void closePipe() {
+ mNoBlockImageAnalyzer.close();
+ }
+
+ public void openPipe() {
+ mNoBlockImageAnalyzer.open();
+ }
+
+
+ public int getFormat() {
+ return mFormat;
+ }
+
+ public void createAndroidImageReader(int width, int height, ImageRotationHelper imageRotationHelper) {
+ mActualWidth = width;
+ mActualHeight = height;
+ mImageReader = ImageReader.newInstance(width, height, mFormat, mMaxImage);
+ mImageRotationHelper = imageRotationHelper;
+ mNoBlockImageAnalyzer.setImageRotationHelper(imageRotationHelper);
+ mImageReader.setOnImageAvailableListener(mNoBlockImageAnalyzer, mSubscriptHandler);
+ }
+
+
+ public ImageRotationHelper getImageRotationHelper() {
+ return mImageRotationHelper;
+ }
+
+ public ImageReader unWrapper() {
+ return mImageReader;
+ }
+
+ public int getExpectWidth() {
+ return mExpectWidth;
+ }
+
+ public int getExpectHeight() {
+ return mExpectHeight;
+ }
+
+ public int getActualWidth() {
+ return mActualWidth;
+ }
+
+ public int getActualHeight() {
+ return mActualHeight;
+ }
+
+ public void safeClose() {
+ //TODO 目前safeClose的逻辑没有生效,后续需要在所有Image处理完成之后,调用SafeClose
+ if (mImageReader != null) {
+ try {
+ mImageReader.close();
+ } catch (Exception e) {
+
+ }
+ mImageReader = null;
+ }
+ }
+
+ @NonNull
+ public CameraSurfaceHelper.ISuggestionCalculation getSizeCalculation() {
+ return mSizeCalculation;
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/imagereader/ImageAnalyzer.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/imagereader/ImageAnalyzer.java
new file mode 100644
index 000000000..e1d87cb58
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/imagereader/ImageAnalyzer.java
@@ -0,0 +1,14 @@
+package com.quark.quamera.camera.imagereader;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 20-12-2
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import android.media.Image;
+
+public interface ImageAnalyzer {
+ public void analyze(Image image, int cameraSensorRotation, int imageRotation);
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/imagereader/NoBlockImageAnalyzer.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/imagereader/NoBlockImageAnalyzer.java
new file mode 100644
index 000000000..2eae5657c
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/imagereader/NoBlockImageAnalyzer.java
@@ -0,0 +1,167 @@
+package com.quark.quamera.camera.imagereader;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 20-12-2
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import android.media.Image;
+import android.media.ImageReader;
+import android.os.Build;
+
+import androidx.annotation.GuardedBy;
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+
+import com.quark.quamera.camera.sensor.ImageRotationHelper;
+import com.quark.quamera.util.Preconditions;
+
+import java.util.concurrent.Executor;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+public class NoBlockImageAnalyzer implements ImageReader.OnImageAvailableListener {
+
+ private Executor mExecutor;
+
+ private ImageAnalyzer mImageAnalyzer;
+
+ private ImageRotationHelper mImageRotationHelper;
+
+ public NoBlockImageAnalyzer(@NonNull Executor executor) {
+ Preconditions.checkState(executor != null);
+ mExecutor = executor;
+ }
+
+
+ public void setImageRotationHelper(@NonNull ImageRotationHelper imageRotationHelper) {
+ mImageRotationHelper = imageRotationHelper;
+ }
+
+ public enum State {
+ IDEAL,
+ WORKING,
+ }
+
+ public NoBlockImageAnalyzer setImageAnalyzer(ImageAnalyzer imageAnalyzer) {
+ mImageAnalyzer = imageAnalyzer;
+ return this;
+ }
+
+ @GuardedBy("mStateLock")
+ State mState = State.IDEAL;
+
+ final Object mStateLock = new Object();
+
+
+ @GuardedBy("mStateLock")
+ //producer data
+ private Image mCacheImage;
+
+ @GuardedBy("mStateLock")
+ //consumer data
+ private Image mWaitingProcessImg;
+
+ public boolean analyze(ImageReader reader) {
+ if (reader == null) {
+ return false;
+ }
+
+ //TEST
+ //TEST_RESULT : acquireLatestImage和close即使在低端机器上,也是不太耗时(acquire:1ms,close:7ms)
+ Image image = reader.acquireLatestImage();
+
+ if (image == null) {
+ return false;
+ }
+
+ if (mIsClose.get()) {
+ image.close();
+ return false;
+ }
+
+ //生产Image逻辑(后续如果复杂,将代码重构带Producer角色上)
+ synchronized (mStateLock) {
+ if (mState == State.IDEAL) {
+ mState = State.WORKING;
+ mWaitingProcessImg = image;
+ } else {
+ //更新最新Cache图片
+ if (mCacheImage != null) {
+ mCacheImage.close();
+ }
+ mCacheImage = image;
+ }
+ }
+
+ //有可能图片在等待执行,但是消息比较多,一直无法执行,导致将要执行的图片也cache,用于异常场景可以关闭
+ mExecutor.execute(() -> {
+
+
+ Image nextImg;
+
+ //消费Image逻辑(后续如果复杂,将代码重构到Consumer角色上)
+ synchronized (mStateLock) {
+ nextImg = mWaitingProcessImg;
+ mWaitingProcessImg = null;
+ }
+
+ if (mIsClose.get()) {
+ return;
+ }
+
+ while (nextImg != null) {
+ try {
+ if (mImageAnalyzer != null) {
+ mImageAnalyzer.analyze(
+ nextImg,
+ mImageRotationHelper != null ? mImageRotationHelper.getCameraSensorOrientation() : 0,
+ mImageRotationHelper != null ? mImageRotationHelper.getImageRotation() : 0
+ );
+ }
+ } finally {
+ nextImg.close();
+ }
+ synchronized (mStateLock) {
+ //get next cache
+ nextImg = mCacheImage;
+ mCacheImage = null;
+ }
+ }
+
+ synchronized (mStateLock) {
+ mState = State.IDEAL;
+ }
+ });
+
+ return true;
+ }
+
+ private AtomicBoolean mIsClose = new AtomicBoolean(false);
+
+ public void close() {
+ mIsClose.set(true);
+ synchronized (mStateLock) {
+ if (mCacheImage != null) {
+ mCacheImage.close();
+ mCacheImage = null;
+ }
+ if (mWaitingProcessImg != null) {
+ mWaitingProcessImg.close();
+ mWaitingProcessImg = null;
+ }
+ }
+ }
+
+ public void open() {
+ mIsClose.set(false);
+ }
+
+
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ analyze(reader);
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/preview/IPreviewSurfaceProvider.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/preview/IPreviewSurfaceProvider.java
new file mode 100644
index 000000000..d5ee7a1ab
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/preview/IPreviewSurfaceProvider.java
@@ -0,0 +1,41 @@
+package com.quark.quamera.camera.preview;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 20-12-18
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import android.view.Surface;
+
+import com.quark.quamera.camera.camera.Camera2CameraImpl;
+import com.quark.quamera.camera.session.CameraCaptureComboCallback;
+
+import androidx.annotation.NonNull;
+import androidx.camera.core.Preview;
+
+public interface IPreviewSurfaceProvider {
+ default Surface provide(@NonNull SurfaceRequest request) {
+ return null;
+ }
+
+ default void onUseComplete(Surface surface) {
+
+ }
+
+ /**
+ * 只用于CameraX,共用接口
+ */
+ default Preview.SurfaceProvider providerSurface() {
+ return null;
+ }
+
+ class SurfaceRequest {
+ public int width;
+ public int height;
+ public Camera2CameraImpl camera2Camera;
+ public CameraCaptureComboCallback repeatCaptureCallback;
+
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/preview/IPreviewView.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/preview/IPreviewView.java
new file mode 100644
index 000000000..466723b34
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/preview/IPreviewView.java
@@ -0,0 +1,50 @@
+package com.quark.quamera.camera.preview;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 2021/7/12
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import android.graphics.RectF;
+import android.util.Rational;
+import android.util.Size;
+
+import androidx.camera.core.FocusMeteringResult;
+
+import com.google.common.util.concurrent.ListenableFuture;
+import com.quark.quamera.render.photo.SnapShotCommand;
+
+public interface IPreviewView {
+
+ void doTakePhotoAnimation();
+
+ int getViewRotation();
+
+ @ViewPort.ScaleType
+ int getScaleType();
+
+ @Deprecated
+ Rational getAspectRatio();
+
+ int getViewHeight();
+
+ int getViewWidth();
+
+ /**
+ * 相机画面显示的区域,这个区域可能不是塞满整一个屏幕的
+ * 夸克的布局方式比较复杂,无法仅仅通过 scaleType 知道相机渲染的位置
+ */
+ RectF getCameraShowRect();
+
+ void updateCameraSurfaceSize(Size size);
+
+ Size getCameraSurfaceSize();
+
+ IPreviewSurfaceProvider getSurfaceProvider();
+
+ void snapshot(final SnapShotCommand snapShotCommand);
+
+ ListenableFuture
+ * This may cause the output to be cropped if the output aspect ratio does not match that of
+ * the {@link ViewPort}.
+ */
+ public static final int FILL_START = 0;
+
+ /**
+ * Generate a crop rect that once applied, it scales the output while maintaining its aspect
+ * ratio, so it fills the entire {@link ViewPort} and center it.
+ *
+ * This may cause the output to be cropped if the output aspect ratio does not match that of
+ * the {@link ViewPort}.
+ */
+ public static final int FILL_CENTER = 1;
+
+ /**
+ * Generate a crop rect that once applied, it scales the output while maintaining its aspect
+ * ratio, so it fills the entire {@link ViewPort}, and align it to the end of the
+ * {@link ViewPort}, which is the bottom right corner in a left-to-right (LTR) layout, or the
+ * bottom left corner in a right-to-left (RTL) layout.
+ *
+ * This may cause the output to be cropped if the output aspect ratio does not match that of
+ * the {@link ViewPort}.
+ */
+ public static final int FILL_END = 2;
+
+ /**
+ * Generate the max possible crop rect ignoring the aspect ratio. For {@link ImageAnalysis}
+ * and {@link ImageCapture}, the output will be an image defined by the crop rect.
+ *
+ * For {@link Preview}, further calculation is needed to to fit the crop rect into the
+ * viewfinder. Code sample below is a simplified version assuming {@link Surface}
+ * orientation is the same as the camera sensor orientation, the viewfinder is a
+ * {@link SurfaceView} and the viewfinder's pixel width/height is the same as the size
+ * request by CameraX in {@link SurfaceRequest#getResolution()}. For more complicated
+ * scenarios, please check out the source code of PreviewView in androidx.camera.view artifact.
+ *
+ * First, calculate the transformation to fit the crop rect in the center of the viewfinder:
+ *
+ * Then apply the transformation to the viewfinder:
+ *
+ * This method calculates the crop rect for each use cases. It only thinks in abstract terms
+ * like the original dimension, output rotation and desired crop rect expressed via viewport.
+ * It does not care about the use case types or the device/display rotation.
+ *
+ * @param fullSensorRect The full size of the viewport.
+ * @param viewPortAspectRatio The aspect ratio of the viewport.
+ * @param outputRotationDegrees Clockwise rotation to correct the surfaces to display
+ * rotation.
+ * @param scaleType The scale type to calculate
+ * @param layoutDirection The direction of layout.
+ * @param surfaceOutResolution The resolutions of the UseCases
+ * @return The set of Viewports that should be set for each UseCase
+ */
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ @NonNull
+ public static Rect calculateViewPortRect(
+ @NonNull Rect fullSensorRect,
+ boolean isFrontCamera,
+ @NonNull Rational viewPortAspectRatio,
+ @IntRange(from = 0, to = 359) int outputRotationDegrees,
+ @ViewPort.ScaleType int scaleType,
+ @ViewPort.LayoutDirection int layoutDirection,
+ @NonNull Size surfaceOutResolution) {
+ CameraShould.beTrue(
+ fullSensorRect.width() > 0 && fullSensorRect.height() > 0,
+ "Cannot compute viewport crop rects zero sized sensor rect.");
+
+ CameraLogger.i("ViewPorts", "calculateViewPortRect { \n\t\t fullSensorRect:%s \n\t\t isFrontCamera:%b " +
+ "\n\t\t viewPortAspectRatio:%s \n\t\t outputRotationDegrees:%d \n\t\t scaleType:%d \n\t\t " +
+ "surfaceOutResolution:%s \n}", fullSensorRect, isFrontCamera, viewPortAspectRatio,
+ outputRotationDegrees, scaleType, surfaceOutResolution);
+
+ // The key to calculate the crop rect is that all the crop rect should match to the same
+ // region on camera sensor. This method first calculates the shared camera region, and then
+ // maps it use cases to find out their crop rects.
+
+ // Calculate the mapping between sensor buffer and UseCases, and the sensor rect shared
+ // by all use cases.
+ RectF fullSensorRectF = new RectF(fullSensorRect);
+ RectF sensorIntersectionRect = new RectF(fullSensorRect);
+ // Calculate the transformation from UseCase to sensor.
+ Matrix useCaseToSensorTransformation = new Matrix();
+ RectF srcRect = new RectF(0, 0, surfaceOutResolution.getWidth(),
+ surfaceOutResolution.getHeight());
+ useCaseToSensorTransformation.setRectToRect(srcRect, fullSensorRectF,
+ Matrix.ScaleToFit.CENTER);
+// useCaseToSensorTransformations.put(entry.getKey(), useCaseToSensorTransformation);
+
+ // Calculate the UseCase intersection in sensor coordinates.
+ RectF useCaseSensorRect = new RectF();
+ useCaseToSensorTransformation.mapRect(useCaseSensorRect, srcRect);
+ sensorIntersectionRect.intersect(useCaseSensorRect);
+
+ // Crop the shared sensor rect based on viewport parameters.
+ Rational rotatedViewPortAspectRatio = ImageUtils.getRotatedAspectRatio(
+ outputRotationDegrees, viewPortAspectRatio);
+ RectF viewPortRect = getScaledRect(
+ sensorIntersectionRect, rotatedViewPortAspectRatio, scaleType, isFrontCamera,
+ layoutDirection, outputRotationDegrees);
+
+ // Map the cropped shared sensor rect to UseCase coordinates.
+ RectF useCaseOutputRect = new RectF();
+ Matrix sensorToUseCaseTransformation = new Matrix();
+ // Transform the sensor crop rect to UseCase coordinates.
+ useCaseToSensorTransformation.invert(sensorToUseCaseTransformation);
+ sensorToUseCaseTransformation.mapRect(useCaseOutputRect, viewPortRect);
+ Rect outputCropRect = new Rect();
+ useCaseOutputRect.round(outputCropRect);
+ return outputCropRect;
+ }
+
+ /**
+ * Returns the container rect that the given rect fills.
+ *
+ * For FILL types, returns the largest container rect that is smaller than the view port.
+ * The returned rectangle is also required to 1) have the view port's aspect ratio and 2) be
+ * in the surface coordinates.
+ *
+ * For FIT, returns the largest possible rect shared by all use cases.
+ */
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ @SuppressLint("SwitchIntDef")
+ @NonNull
+ public static RectF getScaledRect(
+ @NonNull RectF fittingRect,
+ @NonNull Rational containerAspectRatio,
+ @ViewPort.ScaleType int scaleType,
+ boolean isFrontCamera,
+ @ViewPort.LayoutDirection int layoutDirection,
+ @IntRange(from = 0, to = 359) int rotationDegrees) {
+ if (scaleType == ViewPort.FIT) {
+ // Return the fitting rect if the rect is fully covered by the container.
+ return fittingRect;
+ }
+ // Using Matrix' convenience methods fill the rect into the containing rect with given
+ // aspect ratio.
+ // NOTE: By using the Matrix#setRectToRect, we assume the "start" is always (0, 0) and
+ // the "end" is always (w, h), which is NOT always true depending on rotation, layout
+ // orientation and/or camera lens facing. We need to correct the rect based on rotation and
+ // layout direction.
+ Matrix viewPortToSurfaceTransformation = new Matrix();
+ RectF viewPortRect = new RectF(0, 0, containerAspectRatio.getNumerator(),
+ containerAspectRatio.getDenominator());
+ switch (scaleType) {
+ case ViewPort.FILL_CENTER:
+ viewPortToSurfaceTransformation.setRectToRect(
+ viewPortRect, fittingRect, Matrix.ScaleToFit.CENTER);
+ break;
+ case ViewPort.FILL_START:
+ viewPortToSurfaceTransformation.setRectToRect(
+ viewPortRect, fittingRect, Matrix.ScaleToFit.START);
+ break;
+ case ViewPort.FILL_END:
+ viewPortToSurfaceTransformation.setRectToRect(
+ viewPortRect, fittingRect, Matrix.ScaleToFit.END);
+ break;
+ case ViewPort.CENTER_INSIDE:
+ //not crop
+ viewPortToSurfaceTransformation.setRectToRect(
+ fittingRect, fittingRect, Matrix.ScaleToFit.FILL);
+ break;
+ default:
+ throw new IllegalStateException("Unexpected scale type: " + scaleType);
+ }
+
+ RectF viewPortRectInSurfaceCoordinates = new RectF();
+ viewPortToSurfaceTransformation.mapRect(viewPortRectInSurfaceCoordinates, viewPortRect);
+
+ // Correct the crop rect based on rotation and layout direction.
+ return correctStartOrEnd(
+ shouldMirrorStartAndEnd(isFrontCamera, layoutDirection),
+ rotationDegrees,
+ fittingRect,
+ viewPortRectInSurfaceCoordinates);
+ }
+
+ /**
+ * Correct viewport based on rotation and layout direction.
+ *
+ * Both rotation and mirroring change the definition of the "start" and "end" in
+ * scale type. For rotation, since the value is clockwise rotation should be applied to the
+ * output buffer, the start/end point should be rotated counterclockwisely. If mirroring is
+ * needed, the start/end point should be mirrored based on the upright direction of the
+ * image.
+ */
+ private static RectF correctStartOrEnd(boolean isMirrored,
+ @IntRange(from = 0, to = 359) int rotationDegrees,
+ RectF containerRect,
+ RectF cropRect) {
+ // For each scenario there is an illustration of the output buffer without correction.
+ // The arrow represents the opposite direction of gravity. The start/end point should
+ // rotate counterclockwisely based on rotationDegrees, and mirror along the line of the
+ // arrow if mirroring is needed.
+
+ //
+ // Start +-----+
+ // | ^ |
+ // +-----+ End
+ //
+ boolean ltrRotation0 = rotationDegrees == 0 && !isMirrored;
+ //
+ // Start +-----+ 90° +-----+ End Mirrored Start +-----+
+ // | ^ | ===> | < | ==> | < |
+ // +-----+ End Start +-----+ +-----+ End
+ //
+ boolean rtlRotation90 = rotationDegrees == 90 && isMirrored;
+ if (ltrRotation0 || rtlRotation90) {
+ return cropRect;
+ }
+
+ //
+ // Start +-----+ Mirrored +-----+ Start
+ // | ^ | ===> | ^ |
+ // +-----+ End End +-----+
+ //
+ boolean rtlRotation0 = rotationDegrees == 0 && isMirrored;
+ //
+ // Start +-----+ 270° +-----+ Start
+ // | ^ | ===> | > |
+ // +-----+ End End +-----+
+ //
+ boolean ltrRotation270 = rotationDegrees == 270 && !isMirrored;
+ if (rtlRotation0 || ltrRotation270) {
+ return flipHorizontally(cropRect, containerRect.centerX());
+ }
+
+ //
+ // Start +-----+ 90° +-----+ End
+ // | ^ | ===> | < |
+ // +-----+ End Start +-----+
+ //
+ boolean ltrRotation90 = rotationDegrees == 90 && !isMirrored;
+ //
+ // Start +-----+ 180° End +-----+ Mirrored +-----+ End
+ // | ^ | ===> | v | ==> | v |
+ // +-----+ End +-----+ Start Start +-----+
+ //
+ boolean rtlRotation180 = rotationDegrees == 180 && isMirrored;
+ if (ltrRotation90 || rtlRotation180) {
+ return flipVertically(cropRect, containerRect.centerY());
+ }
+
+ //
+ // Start +-----+ 180° End +-----+
+ // | ^ | ===> | v |
+ // +-----+ End +-----+ Start
+ //
+ boolean ltrRotation180 = rotationDegrees == 180 && !isMirrored;
+ //
+ // Start +-----+ 270° +-----+ Start Mirrored End +-----+
+ // | ^ | ===> | > | ==> | > |
+ // +-----+ End End +-----+ +-----+ Start
+ //
+ boolean rtlRotation270 = rotationDegrees == 270 && isMirrored;
+ if (ltrRotation180 || rtlRotation270) {
+ return flipHorizontally(flipVertically(cropRect, containerRect.centerY()),
+ containerRect.centerX());
+ }
+
+ throw new IllegalArgumentException("Invalid argument: mirrored " + isMirrored + " "
+ + "rotation " + rotationDegrees);
+ }
+
+ /**
+ * Checks if the start/end direction in scale type should be mirrored.
+ *
+ * They should be mirrored if one and only one of the following is true: the front camera is
+ * used or layout direction is RTL.
+ */
+ private static boolean shouldMirrorStartAndEnd(boolean isFrontCamera,
+ @ViewPort.LayoutDirection int layoutDirection) {
+ return isFrontCamera ^ layoutDirection == LayoutDirection.RTL;
+ }
+
+ private static RectF flipHorizontally(RectF original, float flipLineX) {
+ return new RectF(
+ flipX(original.right, flipLineX),
+ original.top,
+ flipX(original.left, flipLineX),
+ original.bottom);
+ }
+
+ private static RectF flipVertically(RectF original, float flipLineY) {
+ return new RectF(
+ original.left,
+ flipY(original.bottom, flipLineY),
+ original.right,
+ flipY(original.top, flipLineY));
+ }
+
+ private static float flipX(float x, float flipLineX) {
+ return flipLineX + flipLineX - x;
+ }
+
+ private static float flipY(float y, float flipLineY) {
+ return flipLineY + flipLineY - y;
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/sensor/CameraOrientationUtil.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/sensor/CameraOrientationUtil.java
new file mode 100644
index 000000000..718587a11
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/sensor/CameraOrientationUtil.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.quark.quamera.camera.sensor;
+
+import android.util.Log;
+import android.view.Surface;
+
+import androidx.annotation.RestrictTo;
+import androidx.annotation.RestrictTo.Scope;
+
+import com.quark.quamera.camera.session.CameraSelector;
+
+/**
+ * Contains utility methods related to camera orientation.
+ *
+ * @hide
+ */
+@RestrictTo(Scope.LIBRARY_GROUP)
+public final class CameraOrientationUtil {
+ private static final String TAG = "CameraOrientationUtil";
+ private static final boolean DEBUG = false;
+
+ // Do not allow instantiation
+ private CameraOrientationUtil() {
+ }
+
+ /**
+ * Calculates the delta between a source rotation and destination rotation.
+ *
+ * A typical use of this method would be calculating the angular difference between the
+ * display orientation (destRotationDegrees) and camera sensor orientation
+ * (sourceRotationDegrees).
+ *
+ * @param destRotationDegrees The destination rotation relative to the device's natural
+ * rotation.
+ * @param sourceRotationDegrees The source rotation relative to the device's natural rotation.
+ * @param isOppositeFacing Whether the source and destination planes are facing opposite
+ * directions.
+ */
+ public static int getRelativeImageRotation(
+ int destRotationDegrees, int sourceRotationDegrees, boolean isOppositeFacing) {
+ int result;
+ if (isOppositeFacing) {
+ result = (sourceRotationDegrees - destRotationDegrees + 360) % 360;
+ } else {
+ result = (sourceRotationDegrees + destRotationDegrees) % 360;
+ }
+ if (DEBUG) {
+ Log.d(
+ TAG,
+ String.format(
+ "getRelativeImageRotation: destRotationDegrees=%s, "
+ + "sourceRotationDegrees=%s, isOppositeFacing=%s, "
+ + "result=%s",
+ destRotationDegrees, sourceRotationDegrees, isOppositeFacing, result));
+ }
+ return result;
+ }
+
+ /**
+ * Converts rotation values enumerated in {@link Surface} to their equivalent in degrees.
+ *
+ * Valid values for the relative rotation are {@link Surface#ROTATION_0}, {@link
+ * Surface#ROTATION_90}, {@link Surface#ROTATION_180}, {@link Surface#ROTATION_270}.
+ *
+ * @param rotationEnum One of the enumerated rotation values from {@link Surface}.
+ * @return The equivalent rotation value in degrees.
+ * @throws IllegalArgumentException If the provided rotation enum is not one of those defined in
+ * {@link Surface}.
+ */
+ public static int surfaceRotationToDegrees(int rotationEnum) {
+ int rotationDegrees;
+ switch (rotationEnum) {
+ case Surface.ROTATION_0:
+ rotationDegrees = 0;
+ break;
+ case Surface.ROTATION_90:
+ rotationDegrees = 90;
+ break;
+ case Surface.ROTATION_180:
+ rotationDegrees = 180;
+ break;
+ case Surface.ROTATION_270:
+ rotationDegrees = 270;
+ break;
+ default:
+ throw new IllegalArgumentException("Unsupported surface rotation: " + rotationEnum);
+ }
+
+ return rotationDegrees;
+ }
+
+ /**
+ * 用户视觉上图片旋转角度
+ */
+ public static int getCameraImageRotation(CameraSelector.CameraLenFacing lenFacing, int cameraSensorRotation, int displaySensorRotation) {
+ int rotation = lenFacing == CameraSelector.CameraLenFacing.LEN_FACING_BACK ? (360 - displaySensorRotation)
+ : displaySensorRotation;
+ return (cameraSensorRotation + rotation) % 360;
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/sensor/DisplayOrientationDetector.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/sensor/DisplayOrientationDetector.java
new file mode 100644
index 000000000..60a54f7ac
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/sensor/DisplayOrientationDetector.java
@@ -0,0 +1,141 @@
+package com.quark.quamera.camera.sensor;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 20-12-14
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import android.content.Context;
+import android.view.OrientationEventListener;
+
+import com.quark.quamera.util.CameraInit;
+import com.quark.quamera.util.CameraLogger;
+import com.quark.quamera.util.CollectionUtil;
+
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * TODO: 目前来看,OrientationEventListener的灵敏度不太够
+ *
+ * TODO: 后续基于磁场,重力传感器,实现灵命的横竖屏感应
+ */
+public class DisplayOrientationDetector extends OrientationEventListener implements IOrientationDetector {
+
+
+ private final AtomicInteger mDeviceNatureRotation = new AtomicInteger();
+
+ private final List
+ * 0 :竖直屏幕的自然方向
+ * 90 :
+ * 180:
+ * 270:
+ */
+ public int getDeviceRotation() {
+ return mDisplayOrientationDetector.getDeviceDisplayRotation().get();
+ }
+
+ /**
+ * 相机原始纹理,拍正到用户自然角度,纹理需要旋转的角度
+ */
+ public int getImageRotation() {
+ return CameraOrientationUtil.getCameraImageRotation(
+ mLenFacing,
+ mCameraSensorOrientation,
+ mDisplayOrientationDetector.getDeviceDisplayRotation().get()
+ );
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/Camera2CameraCaptureResult.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/Camera2CameraCaptureResult.java
new file mode 100644
index 000000000..02f686b49
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/Camera2CameraCaptureResult.java
@@ -0,0 +1,217 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.quark.quamera.camera.session;
+
+import android.hardware.camera2.CaptureResult;
+import android.os.Build;
+import android.util.Log;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+
+import com.quark.quamera.camera.session.CameraCaptureMetaData.AeState;
+import com.quark.quamera.camera.session.CameraCaptureMetaData.AfMode;
+import com.quark.quamera.camera.session.CameraCaptureMetaData.AfState;
+import com.quark.quamera.camera.session.CameraCaptureMetaData.AwbState;
+import com.quark.quamera.camera.session.CameraCaptureMetaData.FlashState;
+
+/**
+ * The camera2 implementation for the capture result of a single image capture.
+ */
+@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+final class Camera2CameraCaptureResult implements CameraCaptureResult {
+ private static final String TAG = "C2CameraCaptureResult";
+
+ private final Object mTag;
+
+ /**
+ * The actual camera2 {@link CaptureResult}.
+ */
+ private final CaptureResult mCaptureResult;
+
+ Camera2CameraCaptureResult(@Nullable Object tag, CaptureResult captureResult) {
+ mTag = tag;
+ mCaptureResult = captureResult;
+ }
+
+ /**
+ * Converts the camera2 {@link CaptureResult#CONTROL_AF_MODE} to {@link AfMode}.
+ *
+ * @return the {@link AfMode}.
+ */
+ @NonNull
+ @Override
+ public AfMode getAfMode() {
+ Integer mode = mCaptureResult.get(CaptureResult.CONTROL_AF_MODE);
+ if (mode == null) {
+ return AfMode.UNKNOWN;
+ }
+ switch (mode) {
+ case CaptureResult.CONTROL_AF_MODE_OFF:
+ case CaptureResult.CONTROL_AF_MODE_EDOF:
+ return AfMode.OFF;
+ case CaptureResult.CONTROL_AF_MODE_AUTO:
+ case CaptureResult.CONTROL_AF_MODE_MACRO:
+ return AfMode.ON_MANUAL_AUTO;
+ case CaptureResult.CONTROL_AF_MODE_CONTINUOUS_PICTURE:
+ case CaptureResult.CONTROL_AF_MODE_CONTINUOUS_VIDEO:
+ return AfMode.ON_CONTINUOUS_AUTO;
+ default: // fall out
+ }
+ Log.e(TAG, "Undefined af mode: " + mode);
+ return AfMode.UNKNOWN;
+ }
+
+ /**
+ * Converts the camera2 {@link CaptureResult#CONTROL_AF_STATE} to {@link AfState}.
+ *
+ * @return the {@link AfState}.
+ */
+ @NonNull
+ @Override
+ public AfState getAfState() {
+ Integer state = mCaptureResult.get(CaptureResult.CONTROL_AF_STATE);
+ if (state == null) {
+ return AfState.UNKNOWN;
+ }
+ switch (state) {
+ case CaptureResult.CONTROL_AF_STATE_INACTIVE:
+ return AfState.INACTIVE;
+ case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
+ case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
+ case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
+ return AfState.SCANNING;
+ case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
+ return AfState.LOCKED_FOCUSED;
+ case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
+ return AfState.LOCKED_NOT_FOCUSED;
+ case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
+ return AfState.FOCUSED;
+ default: // fall out
+ }
+ Log.e(TAG, "Undefined af state: " + state);
+ return AfState.UNKNOWN;
+ }
+
+ /**
+ * Converts the camera2 {@link CaptureResult#CONTROL_AE_STATE} to {@link AeState}.
+ *
+ * @return the {@link AeState}.
+ */
+ @NonNull
+ @Override
+ public AeState getAeState() {
+ Integer state = mCaptureResult.get(CaptureResult.CONTROL_AE_STATE);
+ if (state == null) {
+ return AeState.UNKNOWN;
+ }
+ switch (state) {
+ case CaptureResult.CONTROL_AE_STATE_INACTIVE:
+ return AeState.INACTIVE;
+ case CaptureResult.CONTROL_AE_STATE_SEARCHING:
+ case CaptureResult.CONTROL_AE_STATE_PRECAPTURE:
+ return AeState.SEARCHING;
+ case CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED:
+ return AeState.FLASH_REQUIRED;
+ case CaptureResult.CONTROL_AE_STATE_CONVERGED:
+ return AeState.CONVERGED;
+ case CaptureResult.CONTROL_AE_STATE_LOCKED:
+ return AeState.LOCKED;
+ default: // fall out
+ }
+ Log.e(TAG, "Undefined ae state: " + state);
+ return AeState.UNKNOWN;
+ }
+
+ /**
+ * Converts the camera2 {@link CaptureResult#CONTROL_AWB_STATE} to {@link AwbState}.
+ *
+ * @return the {@link AwbState}.
+ */
+ @NonNull
+ @Override
+ public AwbState getAwbState() {
+ Integer state = mCaptureResult.get(CaptureResult.CONTROL_AWB_STATE);
+ if (state == null) {
+ return AwbState.UNKNOWN;
+ }
+ switch (state) {
+ case CaptureResult.CONTROL_AWB_STATE_INACTIVE:
+ return AwbState.INACTIVE;
+ case CaptureResult.CONTROL_AWB_STATE_SEARCHING:
+ return AwbState.METERING;
+ case CaptureResult.CONTROL_AWB_STATE_CONVERGED:
+ return AwbState.CONVERGED;
+ case CaptureResult.CONTROL_AWB_STATE_LOCKED:
+ return AwbState.LOCKED;
+ default: // fall out
+ }
+ Log.e(TAG, "Undefined awb state: " + state);
+ return AwbState.UNKNOWN;
+ }
+
+ /**
+ * Converts the camera2 {@link CaptureResult#FLASH_STATE} to {@link FlashState}.
+ *
+ * @return the {@link FlashState}.
+ */
+ @NonNull
+ @Override
+ public FlashState getFlashState() {
+ Integer state = mCaptureResult.get(CaptureResult.FLASH_STATE);
+ if (state == null) {
+ return FlashState.UNKNOWN;
+ }
+ switch (state) {
+ case CaptureResult.FLASH_STATE_UNAVAILABLE:
+ case CaptureResult.FLASH_STATE_CHARGING:
+ return FlashState.NONE;
+ case CaptureResult.FLASH_STATE_READY:
+ return FlashState.READY;
+ case CaptureResult.FLASH_STATE_FIRED:
+ case CaptureResult.FLASH_STATE_PARTIAL:
+ return FlashState.FIRED;
+ default: // fall out
+ }
+ Log.e(TAG, "Undefined flash state: " + state);
+ return FlashState.UNKNOWN;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public long getTimestamp() {
+ Long timestamp = mCaptureResult.get(CaptureResult.SENSOR_TIMESTAMP);
+ if (timestamp == null) {
+ return -1L;
+ }
+
+ return timestamp;
+ }
+
+ @Override
+ public Object getTag() {
+ return mTag;
+ }
+
+
+ public CaptureResult getCaptureResult() {
+ return mCaptureResult;
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraCaptureCallback.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraCaptureCallback.java
new file mode 100644
index 000000000..ee388272e
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraCaptureCallback.java
@@ -0,0 +1,24 @@
+package com.quark.quamera.camera.session;
+
+import androidx.annotation.NonNull;
+
+public abstract class CameraCaptureCallback {
+
+ /**
+ * This method is called when an image capture has fully completed and all the result metadata
+ * is available.
+ *
+ * @param cameraCaptureResult The output metadata from the capture.
+ */
+ public void onCaptureCompleted(@NonNull CameraCaptureResult cameraCaptureResult) {
+ }
+
+ /**
+ * This method is called instead of {@link #onCaptureCompleted} when the camera device failed to
+ * produce a {@link CameraCaptureResult} for the request.
+ *
+ * @param failure The output failure from the capture, including the failure reason.
+ */
+ public void onCaptureFailed(@NonNull CameraCaptureFailure failure) {
+ }
+}
\ No newline at end of file
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraCaptureCallbackHandlerWrapper.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraCaptureCallbackHandlerWrapper.java
new file mode 100644
index 000000000..c3e89777d
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraCaptureCallbackHandlerWrapper.java
@@ -0,0 +1,33 @@
+package com.quark.quamera.camera.session;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 20-12-19
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import android.os.Handler;
+
+import androidx.annotation.NonNull;
+
+
+public class CameraCaptureCallbackHandlerWrapper extends CameraCaptureCallback {
+ private Handler mHandler;
+ private CameraCaptureCallback mCallback;
+
+ public CameraCaptureCallbackHandlerWrapper(@NonNull Handler handler, @NonNull CameraCaptureCallback callback) {
+ mHandler = handler;
+ mCallback = callback;
+ }
+
+ @Override
+ public void onCaptureCompleted(@NonNull CameraCaptureResult cameraCaptureResult) {
+ mHandler.post(() -> mCallback.onCaptureCompleted(cameraCaptureResult));
+ }
+
+ @Override
+ public void onCaptureFailed(@NonNull CameraCaptureFailure failure) {
+ mHandler.post(() -> mCallback.onCaptureFailed(failure));
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraCaptureComboCallback.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraCaptureComboCallback.java
new file mode 100644
index 000000000..6dd2d1dd9
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraCaptureComboCallback.java
@@ -0,0 +1,39 @@
+package com.quark.quamera.camera.session;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 2021/5/26
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import androidx.annotation.NonNull;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class CameraCaptureComboCallback extends CameraCaptureCallback {
+ private final List In this mode, the lens does not move unless the auto focus trigger action is called.
+ */
+ ON_MANUAL_AUTO,
+
+ /**
+ * AF is continually scanning.
+ *
+ * In this mode, the AF algorithm modifies the lens position continually to attempt to
+ * provide a constantly-in-focus stream.
+ */
+ ON_CONTINUOUS_AUTO
+ }
+
+ /** Auto focus (AF) state. */
+ public enum AfState {
+
+ /** AF state is currently unknown. */
+ UNKNOWN,
+
+ /** AF is off or not yet has been triggered. */
+ INACTIVE,
+
+ /** AF is performing an AF scan. */
+ SCANNING,
+
+ /** AF currently believes it is in focus. */
+ FOCUSED,
+
+ /** AF believes it is focused correctly and has locked focus. */
+ LOCKED_FOCUSED,
+
+ /** AF has failed to focus and has locked focus. */
+ LOCKED_NOT_FOCUSED
+ }
+
+ /** Auto exposure (AE) state. */
+ public enum AeState {
+
+ /** AE state is currently unknown. */
+ UNKNOWN,
+
+ /** AE is off or has not yet been triggered. */
+ INACTIVE,
+
+ /** AE is performing an AE search. */
+ SEARCHING,
+
+ /**
+ * AE has a good set of control values, but flash needs to be fired for good quality still
+ * capture.
+ */
+ FLASH_REQUIRED,
+
+ /** AE has a good set of control values for the current scene. */
+ CONVERGED,
+
+ /** AE has been locked. */
+ LOCKED
+ }
+
+ /** Auto white balance (AWB) state. */
+ public enum AwbState {
+
+ /** AWB state is currently unknown. */
+ UNKNOWN,
+
+ /** AWB is not in auto mode, or has not yet started metering. */
+ INACTIVE,
+
+ /** AWB is performing AWB metering. */
+ METERING,
+
+ /** AWB has a good set of control values for the current scene. */
+ CONVERGED,
+
+ /** AWB has been locked. */
+ LOCKED
+ }
+
+ /** Flash state. */
+ public enum FlashState {
+
+ /** Flash state is unknown. */
+ UNKNOWN,
+
+ /** Flash is unavailable or not ready to fire. */
+ NONE,
+
+ /** Flash is ready to fire. */
+ READY,
+
+ /** Flash has been fired. */
+ FIRED
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraCaptureResult.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraCaptureResult.java
new file mode 100644
index 000000000..13407ca0d
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraCaptureResult.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.quark.quamera.camera.session;
+
+import android.hardware.camera2.CaptureResult;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.RestrictTo;
+import androidx.annotation.RestrictTo.Scope;
+
+
+/**
+ * The result of a single image capture.
+ *
+ * @hide
+ */
+@RestrictTo(Scope.LIBRARY_GROUP)
+public interface CameraCaptureResult {
+
+ /**
+ * Returns the current auto focus mode of operation.
+ */
+ @NonNull
+ CameraCaptureMetaData.AfMode getAfMode();
+
+ /**
+ * Returns the current auto focus state.
+ */
+ @NonNull
+ CameraCaptureMetaData.AfState getAfState();
+
+ /**
+ * Returns the current auto exposure state.
+ */
+ @NonNull
+ CameraCaptureMetaData.AeState getAeState();
+
+ /**
+ * Returns the current auto white balance state.
+ */
+ @NonNull
+ CameraCaptureMetaData.AwbState getAwbState();
+
+ /**
+ * Returns the current flash state.
+ */
+ @NonNull
+ CameraCaptureMetaData.FlashState getFlashState();
+
+ /**
+ * Returns the timestamp in nanoseconds.
+ *
+ * If the timestamp was unavailable then it will return {@code -1L}.
+ */
+ long getTimestamp();
+
+ /**
+ * Returns the tag associated with the capture request.
+ */
+ Object getTag();
+
+ CaptureResult getCaptureResult();
+
+
+ /**
+ * An implementation of CameraCaptureResult which always return default results.
+ */
+ final class EmptyCameraCaptureResult implements CameraCaptureResult {
+
+ public static CameraCaptureResult create() {
+ return new EmptyCameraCaptureResult();
+ }
+
+ @NonNull
+ @Override
+ public CameraCaptureMetaData.AfMode getAfMode() {
+ return CameraCaptureMetaData.AfMode.UNKNOWN;
+ }
+
+ @NonNull
+ @Override
+ public CameraCaptureMetaData.AfState getAfState() {
+ return CameraCaptureMetaData.AfState.UNKNOWN;
+ }
+
+ @NonNull
+ @Override
+ public CameraCaptureMetaData.AeState getAeState() {
+ return CameraCaptureMetaData.AeState.UNKNOWN;
+ }
+
+ @NonNull
+ @Override
+ public CameraCaptureMetaData.AwbState getAwbState() {
+ return CameraCaptureMetaData.AwbState.UNKNOWN;
+ }
+
+ @NonNull
+ @Override
+ public CameraCaptureMetaData.FlashState getFlashState() {
+ return CameraCaptureMetaData.FlashState.UNKNOWN;
+ }
+
+ @Override
+ public long getTimestamp() {
+ return -1L;
+ }
+
+ @Override
+ public Object getTag() {
+ return null;
+ }
+
+ @Override
+ public CaptureResult getCaptureResult() {
+ return null;
+ }
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraSelector.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraSelector.java
new file mode 100644
index 000000000..8bfe2b596
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/CameraSelector.java
@@ -0,0 +1,78 @@
+package com.quark.quamera.camera.session;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 20-11-18
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import java.util.List;
+
+public class CameraSelector implements ISelector {
+
+ @Override
+ public List The original crop rect is calculated based on camera sensor buffer. On some devices,
+ * the buffer is rotated before being passed to users, in which case the crop rect also
+ * needs additional transformations.
+ *
+ * There are two most common scenarios: 1) exif rotation is 0, or 2) exif rotation
+ * equals output rotation. 1) means the HAL rotated the buffer based on target
+ * rotation. 2) means HAL no-oped on the rotation. Theoretically only 1) needs
+ * additional transformations, but this method is also generic enough to handle all possible
+ * HAL rotations.
+ */
+ @NonNull
+ static Rect getDispatchCropRect(@NonNull Rect surfaceCropRect, int surfaceToOutputDegrees,
+ @NonNull Size dispatchResolution, int dispatchToOutputDegrees) {
+
+
+ CameraLogger.i("ViewPorts", String.format(Locale.CHINA, "getDispatchCropRect surfaceCropRect:%s " +
+ "surfaceToOutputDegrees:%d dispatchResolution:%s dispatchToOutputDegrees:%d", surfaceCropRect,
+ surfaceToOutputDegrees, dispatchResolution, dispatchToOutputDegrees));
+
+
+ // There are 3 coordinate systems: surface, dispatch and output. Surface is where
+ // the original crop rect is defined. We need to figure out what HAL
+ // has done to the buffer (the surface->dispatch mapping) and apply the same
+ // transformation to the crop rect.
+ // The surface->dispatch mapping is calculated by inverting a dispatch->surface mapping.
+
+ Matrix matrix = new Matrix();
+ // Apply the dispatch->surface rotation.
+ matrix.setRotate(dispatchToOutputDegrees - surfaceToOutputDegrees);
+ // Apply the dispatch->surface translation. The translation is calculated by
+ // compensating for the offset caused by the dispatch->surface rotation.
+ float[] vertexes = sizeToVertexes(dispatchResolution);
+ matrix.mapPoints(vertexes);
+ float left = min(vertexes[0], vertexes[2], vertexes[4], vertexes[6]);
+ float top = min(vertexes[1], vertexes[3], vertexes[5], vertexes[7]);
+ matrix.postTranslate(-left, -top);
+ // Inverting the dispatch->surface mapping to get the surface->dispatch mapping.
+ matrix.invert(matrix);
+
+ // Apply the surface->dispatch mapping to surface crop rect.
+ RectF dispatchCropRectF = new RectF();
+ matrix.mapRect(dispatchCropRectF, new RectF(surfaceCropRect));
+ dispatchCropRectF.sort();
+ Rect dispatchCropRect = new Rect();
+ dispatchCropRectF.round(dispatchCropRect);
+ return dispatchCropRect;
+ }
+
+
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/InnerImageCaptureCallback.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/InnerImageCaptureCallback.java
new file mode 100644
index 000000000..a996c0fdb
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/InnerImageCaptureCallback.java
@@ -0,0 +1,21 @@
+package com.quark.quamera.camera.session;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 2021/7/13
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import android.media.Image;
+
+public interface InnerImageCaptureCallback {
+
+
+ void onCaptureStart();
+
+ void onCaptureSuccess(Image image);
+
+ void onError(Exception e);
+
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/PreviewConfig.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/PreviewConfig.java
new file mode 100644
index 000000000..8d8d91708
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/PreviewConfig.java
@@ -0,0 +1,204 @@
+package com.quark.quamera.camera.session;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 20-11-24
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build;
+import android.util.Range;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+
+import com.quark.quamera.camera.camera.Camera2Info;
+import com.quark.quamera.camera.camera.CameraSurfaceHelper;
+import com.quark.quamera.camera.imagereader.DeferrableImageReader;
+import com.quark.quamera.camera.preview.IPreviewView;
+import com.quark.quamera.camera.session.config.CameraConfigUtils;
+
+import java.util.List;
+import java.util.concurrent.Executor;
+
+@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+public class PreviewConfig {
+
+ private final int mExceptWidth;
+ private final int mExceptHeight;
+
+ private int mActualWidth;
+ private int mActualHeight;
+
+ private RepeatCaptureRequestConfig mPreviewCaptureConfig;
+
+
+ private CameraSurfaceHelper.ISuggestionCalculation mSuggestionCalculation;
+
+ private @NonNull
+ IPreviewView mPreviewView;
+
+ private List Compression quality of the final JPEG
+ * image. 85-95 is typical usage range. This tag is also used to describe the quality
+ * of the HEIC image capture. Range of valid values: This key is available on all devices. State changes are ignored once the CaptureSession has been closed.
+ */
+ final class StateControlCallback extends CameraCaptureSession.StateCallback {
+ /**
+ * {@inheritDoc}
+ *
+ * Once the {@link CameraCaptureSession} has been configured then the capture request
+ * will be immediately issued.
+ */
+ @Override
+ public void onConfigured(@NonNull CameraCaptureSession session) {
+ synchronized (mStateLock) {
+ switch (mState) {
+ case INITIALIZED:
+ case OPENED:
+ case RELEASED:
+ throw new IllegalStateException(
+ "onConfigured() should not be possible in state: " + mState);
+ case OPENING:
+ if (mCamera.getCameraState() != CameraState.OPEN) {
+ //相机已经关闭了,直接关闭session
+ changeState(State.RELEASED);
+ return;
+ }
+ mState = State.OPENED;
+ changeState(State.OPENED);
+
+ mCameraCaptureSession = session;
+ CameraLogger.i("AndroidCameraApi", "Attempting to send capture request onConfigured");
+ issueRepeatingCaptureRequests(null);
+ break;
+ case RELEASING:
+ changeState(State.RELEASED);
+ session.close();
+ break;
+ }
+ CameraLogger.i(TAG, "CameraCaptureSession.onConfigured() mState=" + mState);
+ }
+ }
+
+ @Override
+ public void onReady(@NonNull CameraCaptureSession session) {
+ synchronized (mStateLock) {
+ switch (mState) {
+ case RELEASING:
+ if (mCameraCaptureSession == null) {
+ // No-op for releasing an unopened session.
+ break;
+ }
+ // The abortCaptures() called in release() has successfully finished.
+ mCameraCaptureSession.close();
+ break;
+ default:
+ }
+ CameraLogger.i(TAG, "CameraCaptureSession.onReady() " + mState);
+ }
+ }
+
+ @Override
+ public void onClosed(@NonNull CameraCaptureSession session) {
+ synchronized (mStateLock) {
+
+ if (mState == State.RELEASED) {
+ // If released then onClosed() has already been called, but it can be ignored
+ // since a session can be forceClosed.
+ return;
+ }
+
+ Log.d(TAG, "CameraCaptureSession.onClosed()");
+
+
+ changeState(State.RELEASED);
+ mCameraCaptureSession = null;
+
+ if (mSessionConfig != null) {
+ mSessionConfig.getPreviewConfig().getPreviewView().getSurfaceProvider().onUseComplete(null);
+ mSessionConfig = null;
+ }
+ }
+ }
+
+ @Override
+ public void onConfigureFailed(@NonNull CameraCaptureSession session) {
+ synchronized (mStateLock) {
+ switch (mState) {
+ case INITIALIZED:
+ case OPENED:
+ case RELEASED:
+ throw new IllegalStateException(
+ "onConfiguredFailed() should not be possible in state: " + mState);
+ case OPENING:
+ case RELEASING:
+ changeState(State.RELEASING);
+ session.close();
+ break;
+ }
+ CameraLogger.i(TAG, "CameraCaptureSession.onConfiguredFailed() " + mState);
+ }
+ }
+ }
+
+
+ public void doRepeatingCaptureAction(@NonNull RepeatCaptureRequestConfig config) {
+ mHandler.post(() -> {
+ switch (mState) {
+ case OPENED:
+ issueRepeatingCaptureRequests(config);
+ break;
+ default:
+ if (config.getCallback() != null) {
+ if (config.getCallbackExecutor() != null) {
+ config.getCallbackExecutor().execute(() -> config.getCallback().onCaptureFailed(new CameraCaptureFailure(CameraCaptureFailure.Reason.ERROR)));
+ } else {
+ config.getCallback().onCaptureFailed(new CameraCaptureFailure(CameraCaptureFailure.Reason.ERROR));
+ }
+ break;
+ }
+ }
+ });
+
+ }
+
+
+ private void issueRepeatingCaptureRequests(RepeatCaptureRequestConfig action) {
+ if (mSessionConfig == null) {
+ CameraLogger.e(TAG, "Skipping issueRepeatingCaptureRequests for no configuration case.");
+ return;
+ }
+
+ try {
+
+ //设置反复捕获数据的请求,这样预览界面就会一直有数据显示
+ List This releases all of the sessions resources and should be called when ready to close the
+ * camera.
+ *
+ * Once a session is released it can no longer be opened again. After the session is released
+ * all method calls on it do nothing.
+ */
+ @ExecutedBy("mHandler")
+ public void release() {
+ synchronized (mStateLock) {
+ CameraLogger.i(Camera2CameraImpl.TAG, "CameraCaptureSession.releaseCaptureSession when (%s) %s ", mState, mCameraCaptureSession);
+ switch (mState) {
+ case OPENED:
+ if (mCameraCaptureSession != null) {
+ mCameraCaptureSession.close();
+ }
+ // Fall through
+ case OPENING:
+ mState = State.RELEASING;
+ // Fall through
+ case RELEASING:
+ break;
+ case INITIALIZED:
+ mState = State.RELEASED;
+ // Fall through
+ case RELEASED:
+ break;
+ }
+ }
+ }
+
+ @ExecutedBy("mHandler")
+ public void forceRelease() {
+ synchronized (mStateLock) {
+ if (mSessionConfig != null) {
+ mSessionConfig.getPreviewConfig()
+ .getPreviewView()
+ .getSurfaceProvider()
+ .onUseComplete(null);
+ if (mSessionConfig.getImageCapture() != null
+ && mSessionConfig.getImageCapture().getDeferrableImageReader() != null) {
+ mSessionConfig.getImageCapture().getDeferrableImageReader().safeClose();
+ }
+ mSessionConfig = null;
+ }
+ }
+ }
+
+ private static CameraCaptureSession.CaptureCallback convert2SystemApiCaptureCallback(Executor executor, CameraCaptureCallback callback) {
+ if (callback == null) {
+ return null;
+ }
+ if (executor == null) {
+ return new CaptureCallbackAdapter(callback);
+ }
+
+
+ return new CaptureCallbackHandlerWrapper(executor, new CaptureCallbackAdapter(callback));
+ }
+
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/UserCameraSession.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/UserCameraSession.java
new file mode 100644
index 000000000..b6432858f
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/session/UserCameraSession.java
@@ -0,0 +1,109 @@
+package com.quark.quamera.camera.session;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 20-11-23
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import android.os.Build;
+
+import com.quark.quamera.camera.camera.Camera2Info;
+import com.quark.quamera.camera.camera.CameraLifeManager;
+import com.quark.quamera.camera.camera.CameraState;
+import com.quark.quamera.camera.session.config.CameraSelectConfig;
+
+import java.util.concurrent.Executor;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import androidx.lifecycle.MutableLiveData;
+
+
+/**
+ * 不直接通过open,close的行为来直接使用相机,而是通过抽象窗口Session,使用Session的生命事件来控制相机
+ *
+ * {@link IUserCameraSession#active()} 开启相机
+ * {@link IUserCameraSession#inactive()} 关闭相机
+ */
+@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+public final class UserCameraSession implements IUserCameraSession {
+
+ private State mState = State.INACTIVE;
+
+ private CameraSelector mCameraSelector;
+
+ private final CameraLifeManager mCameraLifeManager;
+
+ private final SessionConfig mSessionConfig;
+
+
+ public UserCameraSession(@NonNull CameraLifeManager cameraLifeManager, @NonNull SessionConfig sessionConfig) {
+ mCameraLifeManager = cameraLifeManager;
+ mSessionConfig = sessionConfig;
+ }
+
+ public UserCameraSession setCameraSelector(CameraSelector cameraSelector) {
+ mCameraSelector = cameraSelector;
+ return this;
+ }
+
+
+ private boolean openCamera() {
+ if (mCameraSelector == null) {
+ return false;
+ }
+ mSessionConfig.setSelectConfig(new CameraSelectConfig(mCameraSelector));
+ return mCameraLifeManager.openCamera(mCameraSelector, mSessionConfig);
+ }
+
+ public Camera2Info getCamera2Info() {
+ return mCameraLifeManager.getCamera2Info();
+ }
+
+ public void enableFlash(boolean enable, CameraCaptureCallback callback, Executor executor) {
+ mCameraLifeManager.enableFlash(enable, callback, executor);
+ }
+
+ public void takePicture(@NonNull SingleCaptureConfig singleCaptureConfig,
+ @NonNull ImageCapture.OnImageCapturedCallback capturedCallback) {
+ mCameraLifeManager.takePicture(singleCaptureConfig, capturedCallback);
+ }
+
+ public void closeCamera() {
+ mCameraLifeManager.closeCamera();
+ }
+
+ public @Nullable
+ MutableLiveData
+ * TODO(b/185272953): instrument test getting attribution tag once the view artifact depends
+ * on a core version that has the fix.
+ */
+ private static Context getApplicationContext(@NonNull Context context) {
+ Context applicationContext = context.getApplicationContext();
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
+ String attributeTag = Api30Impl.getAttributionTag(context);
+
+ if (attributeTag != null) {
+ return Api30Impl.createAttributionContext(applicationContext, attributeTag);
+ }
+ }
+
+ return applicationContext;
+ }
+
+ @NonNull
+ public ListenableFuture {@link ImageCapture} is enabled by default. It has to be enabled before
+ * {@link #takePicture} can be called.
+ *
+ * @see ImageCapture
+ */
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public boolean isImageCaptureEnabled() {
+ Threads.checkMainThread();
+ return isUseCaseEnabled(IMAGE_CAPTURE);
+ }
+
+ /**
+ * Gets the flash mode for {@link ImageCapture}.
+ *
+ * @return the flashMode. Value is {@link ImageCapture#FLASH_MODE_AUTO},
+ * {@link ImageCapture#FLASH_MODE_ON}, or {@link ImageCapture#FLASH_MODE_OFF}.
+ * @see ImageCapture
+ */
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ @ImageCapture.FlashMode
+ public int getImageCaptureFlashMode() {
+ Threads.checkMainThread();
+ return mImageCapture.getFlashMode();
+ }
+
+ /**
+ * Sets the flash mode for {@link ImageCapture}.
+ *
+ * If not set, the flash mode will default to {@link ImageCapture#FLASH_MODE_OFF}.
+ *
+ * @param flashMode the flash mode for {@link ImageCapture}.
+ */
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public void setImageCaptureFlashMode(@ImageCapture.FlashMode int flashMode) {
+ Threads.checkMainThread();
+ mImageCapture.setFlashMode(flashMode);
+ }
+
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public void takePicture(
+ @NonNull ImageCapture.OutputFileOptions outputFileOptions,
+ @NonNull Executor executor,
+ @NonNull ImageCapture.OnImageSavedCallback imageSavedCallback,
+ Rect viewPortCropRect) {
+ Threads.checkMainThread();
+ Preconditions.checkState(isCameraInitialized(), CAMERA_NOT_INITIALIZED);
+ Preconditions.checkState(isImageCaptureEnabled(), IMAGE_CAPTURE_DISABLED);
+
+ mImageCapture.setViewPortCropRect(viewPortCropRect);
+ takePicture(outputFileOptions, executor, imageSavedCallback);
+ }
+
+
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public void takePicture(
+ @NonNull ImageCapture.OutputFileOptions outputFileOptions,
+ @NonNull Executor executor,
+ @NonNull ImageCapture.OnImageSavedCallback imageSavedCallback) {
+
+ Threads.checkMainThread();
+ Preconditions.checkState(isCameraInitialized(), CAMERA_NOT_INITIALIZED);
+ Preconditions.checkState(isImageCaptureEnabled(), IMAGE_CAPTURE_DISABLED);
+
+ updateMirroringFlagInOutputFileOptions(outputFileOptions);
+ mImageCapture.takePicture(outputFileOptions, executor, imageSavedCallback);
+ }
+
+ /**
+ * Update {@link ImageCapture.OutputFileOptions} based on config.
+ *
+ * Mirror the output image if front camera is used and if the flag is not set explicitly by
+ * the app.
+ *
+ * @hide
+ */
+ @SuppressLint("RestrictedApi")
+ @VisibleForTesting
+ @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
+ void updateMirroringFlagInOutputFileOptions(
+ @NonNull ImageCapture.OutputFileOptions outputFileOptions) {
+ if (mCameraSelector.getLensFacing() != null
+ && !outputFileOptions.getMetadata().isReversedHorizontalSet()) {
+ outputFileOptions.getMetadata().setReversedHorizontal(
+ mCameraSelector.getLensFacing() == CameraSelector.LENS_FACING_FRONT);
+ }
+ }
+
+ /**
+ * Captures a new still image for in memory access.
+ *
+ * The listener is responsible for calling {@link ImageProxy#close()} on the returned image.
+ *
+ * @param executor The executor in which the callback methods will be run.
+ * @param callback Callback to be invoked for the newly captured image
+ * @see ImageCapture#takePicture(Executor, ImageCapture.OnImageCapturedCallback)
+ */
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public void takePicture(
+ @NonNull Executor executor,
+ @NonNull ImageCapture.OnImageCapturedCallback callback) {
+ Threads.checkMainThread();
+ Preconditions.checkState(isCameraInitialized(), CAMERA_NOT_INITIALIZED);
+ Preconditions.checkState(isImageCaptureEnabled(), IMAGE_CAPTURE_DISABLED);
+ mImageCapture.takePicture(executor, callback);
+ }
+
+ /**
+ * Sets the image capture mode.
+ *
+ * Valid capture modes are {@link ImageCapture.CaptureMode#CAPTURE_MODE_MINIMIZE_LATENCY},
+ * which prioritizes latency over image quality, or
+ * {@link ImageCapture.CaptureMode#CAPTURE_MODE_MAXIMIZE_QUALITY},
+ * which prioritizes image quality over latency.
+ *
+ * @param captureMode the requested image capture mode.
+ */
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public void setImageCaptureMode(@ImageCapture.CaptureMode int captureMode) {
+ Threads.checkMainThread();
+ if (mImageCapture.getCaptureMode() == captureMode) {
+ return;
+ }
+ unbindImageCaptureAndRecreate(captureMode);
+ startCameraAndTrackStates();
+ }
+
+ /**
+ * Returns the image capture mode.
+ *
+ * @see ImageCapture#getCaptureMode()
+ */
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public int getImageCaptureMode() {
+ Threads.checkMainThread();
+ return mImageCapture.getCaptureMode();
+ }
+
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public void setImageCaptureTargetSize(@Nullable OutputSize targetSize) {
+ Threads.checkMainThread();
+ if (isOutputSizeEqual(mImageCaptureTargetSize, targetSize)) {
+ return;
+ }
+ mImageCaptureTargetSize = targetSize;
+ unbindImageCaptureAndRecreate(getImageCaptureMode());
+ startCameraAndTrackStates();
+ }
+
+ /**
+ * Returns the intended output size for {@link ImageCapture} set by
+ * {@link #setImageCaptureTargetSize(OutputSize)}, or null if not set.
+ */
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ @Nullable
+ public OutputSize getImageCaptureTargetSize() {
+ Threads.checkMainThread();
+ return mImageCaptureTargetSize;
+ }
+
+ /**
+ * Sets the default executor that will be used for {@link ImageCapture} IO tasks.
+ *
+ * This executor will be used for any IO tasks specifically for {@link ImageCapture},
+ * such as {@link #takePicture(ImageCapture.OutputFileOptions, Executor,
+ * ImageCapture.OnImageSavedCallback)}. If no executor is set, then a default Executor
+ * specifically for IO will be used instead.
+ *
+ * @param executor The executor which will be used for IO tasks.
+ * TODO(b/187842789) add @see link for ImageCapture.
+ */
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public void setImageCaptureIoExecutor(@Nullable Executor executor) {
+ Threads.checkMainThread();
+ if (mImageCaptureIoExecutor == executor) {
+ return;
+ }
+ mImageCaptureIoExecutor = executor;
+ unbindImageCaptureAndRecreate(mImageCapture.getCaptureMode());
+ startCameraAndTrackStates();
+ }
+
+
+ /**
+ * Unbinds {@link ImageCapture} and recreates with the latest parameters.
+ */
+ private void unbindImageCaptureAndRecreate(int imageCaptureMode) {
+ if (isCameraInitialized()) {
+ mCameraProvider.unbind(mImageCapture);
+ }
+ ImageCapture.Builder builder = new ImageCapture.Builder().setCaptureMode(imageCaptureMode);
+ setTargetOutputSize(builder, mImageCaptureTargetSize);
+ if (mImageCaptureIoExecutor != null) {
+ builder.setIoExecutor(mImageCaptureIoExecutor);
+ }
+ mImageCapture = builder.build();
+ }
+
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public boolean isImageAnalysisEnabled() {
+ Threads.checkMainThread();
+ return isUseCaseEnabled(IMAGE_ANALYSIS);
+ }
+
+ // -----------------
+ // Camera control
+ // -----------------
+
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public void initCameraSelector(@NonNull CameraSelector cameraSelector) {
+ Threads.checkMainThread();
+ if (mCameraSelector == cameraSelector) {
+ return;
+ }
+ mCameraSelector = cameraSelector;
+ }
+
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public void setCameraSelector(@NonNull CameraSelector cameraSelector) {
+ Threads.checkMainThread();
+ if (mCameraSelector == cameraSelector) {
+ return;
+ }
+
+ CameraSelector oldCameraSelector = mCameraSelector;
+ mCameraSelector = cameraSelector;
+
+ if (mCameraProvider == null) {
+ return;
+ }
+ mCameraProvider.unbindAll();
+ startCameraAndTrackStates(() -> mCameraSelector = oldCameraSelector);
+ }
+
+ /**
+ * Checks if the given {@link CameraSelector} can be resolved to a camera.
+ *
+ * Use this method to check if the device has the given camera.
+ *
+ * Only call this method after camera is initialized. e.g. after the
+ * {@link ListenableFuture} from {@link #getInitializationFuture()} is finished. Calling it
+ * prematurely throws {@link IllegalStateException}. Example:
+ *
+ * The default value is{@link CameraSelector#DEFAULT_BACK_CAMERA}.
+ *
+ * @see CameraSelector
+ */
+ @SuppressLint("RestrictedApi")
+ @NonNull
+ @MainThread
+ public CameraSelector getCameraSelector() {
+ Threads.checkMainThread();
+ return mCameraSelector;
+ }
+
+ /**
+ * Returns whether pinch-to-zoom is enabled.
+ *
+ * By default pinch-to-zoom is enabled.
+ *
+ * @return True if pinch-to-zoom is enabled.
+ */
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public boolean isPinchToZoomEnabled() {
+ Threads.checkMainThread();
+ return mPinchToZoomEnabled;
+ }
+
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public void setPinchToZoomEnabled(boolean enabled) {
+ Threads.checkMainThread();
+ mPinchToZoomEnabled = enabled;
+ }
+
+
+ @SuppressLint("RestrictedApi")
+ @SuppressWarnings("FutureReturnValueIgnored")
+ public void onPinchToZoom(float pinchToZoomScale) {
+ if (!isCameraAttached()) {
+ Logger.w(TAG, CAMERA_NOT_ATTACHED);
+ return;
+ }
+ if (!mPinchToZoomEnabled) {
+ Logger.d(TAG, "Pinch to zoom disabled.");
+ return;
+ }
+ Logger.d(TAG, "Pinch to zoom with scale: " + pinchToZoomScale);
+
+ ZoomState zoomState = getZoomState().getValue();
+ if (zoomState == null) {
+ return;
+ }
+ float clampedRatio = zoomState.getZoomRatio() * speedUpZoomBy2X(pinchToZoomScale);
+ // Clamp the ratio with the zoom range.
+ clampedRatio = Math.min(Math.max(clampedRatio, zoomState.getMinZoomRatio()),
+ zoomState.getMaxZoomRatio());
+ setZoomRatio(clampedRatio);
+ }
+
+ private float speedUpZoomBy2X(float scaleFactor) {
+ if (scaleFactor > 1f) {
+ return 1.0f + (scaleFactor - 1.0f) * 2;
+ } else {
+ return 1.0f - (1.0f - scaleFactor) * 2;
+ }
+ }
+
+ /**
+ * 点击对焦
+ */
+ @SuppressLint("RestrictedApi")
+ @SuppressWarnings("FutureReturnValueIgnored")
+ public void onTapToFocus(MeteringPointFactory meteringPointFactory, float x, float y) {
+ if (!isCameraAttached()) {
+ Logger.w(TAG, CAMERA_NOT_ATTACHED);
+ return;
+ }
+ if (!mTapToFocusEnabled) {
+ Logger.d(TAG, "Tap to focus disabled. ");
+ return;
+ }
+ Logger.d(TAG, "Tap to focus started: start:" + x + ", " + y);
+ mTapToFocusState.postValue(TAP_TO_FOCUS_STARTED);
+ MeteringPoint afPoint = meteringPointFactory.createPoint(x, y, AF_SIZE);
+ MeteringPoint aePoint = meteringPointFactory.createPoint(x, y, AE_SIZE);
+ Logger.d(TAG, "Tap to focus started: after:" + afPoint.getX() + ", " + afPoint.getY());
+
+ FocusMeteringAction focusMeteringAction = new FocusMeteringAction
+ .Builder(afPoint, FocusMeteringAction.FLAG_AF)
+ .addPoint(aePoint, FocusMeteringAction.FLAG_AE)
+ .build();
+ Futures.addCallback(mCamera.getCameraControl().startFocusAndMetering(focusMeteringAction),
+ new FutureCallback By default tap-to-focus is enabled.
+ *
+ * @return True if tap-to-focus is enabled.
+ */
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public boolean isTapToFocusEnabled() {
+ Threads.checkMainThread();
+ return mTapToFocusEnabled;
+ }
+
+
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public void setTapToFocusEnabled(boolean enabled) {
+ Threads.checkMainThread();
+ mTapToFocusEnabled = enabled;
+ }
+
+
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ @NonNull
+ public LiveData This class transforms the camera output and display it in a PreviewView. The goal is
+ * to transform it in a way so that the entire area of
+ * {@link SurfaceRequest.TransformationInfo#getCropRect()} is 1) visible to end users, and 2)
+ * displayed as large as possible.
+ *
+ * The inputs for the calculation are 1) the dimension of the Surface, 2) the crop rect, 3) the
+ * dimension of the PreviewView and 4) rotation degrees:
+ *
+ * The transformed Surface is how the PreviewView's inner view should behave, to make the
+ * crop rect matches the PreviewView.
+ */
+final class PreviewTransformation {
+
+ private static final String TAG = "PreviewTransform";
+
+
+ // SurfaceRequest.getResolution().
+ private Size mResolution;
+ // This represents the area of the Surface that should be visible to end users. The value
+ // is based on TransformationInfo.getCropRect() with possible corrections due to device quirks.
+ private Rect mSurfaceCropRect;
+ // This rect represents the size of the viewport in preview. It's always the same as
+ // TransformationInfo.getCropRect().
+ private Rect mViewportRect;
+ // TransformationInfo.getRotationDegrees().
+ private int mPreviewRotationDegrees;
+ // TransformationInfo.getTargetRotation.
+ private int mTargetRotation;
+ // Whether the preview is using front camera.
+ private boolean mIsFrontCamera;
+
+
+ PreviewTransformation() {
+ }
+
+ /**
+ * Sets the inputs.
+ *
+ * All the values originally come from a {@link SurfaceRequest}.
+ */
+ @SuppressLint({"RestrictedApi", "UnsafeExperimentalUsageError"})
+ void setTransformationInfo(@NonNull SurfaceRequest.TransformationInfo transformationInfo,
+ Size resolution, boolean isFrontCamera) {
+ Logger.d(TAG, "Transformation info set: " + transformationInfo + " " + resolution + " "
+ + isFrontCamera);
+ mSurfaceCropRect = getCorrectedCropRect(transformationInfo.getCropRect());
+ mViewportRect = transformationInfo.getCropRect();
+ mPreviewRotationDegrees = transformationInfo.getRotationDegrees();
+ mTargetRotation = transformationInfo.getTargetRotation();
+ mResolution = resolution;
+ mIsFrontCamera = isFrontCamera;
+ }
+
+ /**
+ * Creates a matrix that makes {@link TextureView}'s rotation matches the
+ * {@link #mTargetRotation}.
+ *
+ * The value should be applied by calling {@link TextureView#setTransform(Matrix)}. Usually
+ * {@link #mTargetRotation} is the display rotation. In that case, this
+ * matrix will just make a {@link TextureView} works like a {@link SurfaceView}. If not, then
+ * it will further correct it to the desired rotation.
+ *
+ * This method is also needed in {@link #createTransformedBitmap} to correct the screenshot.
+ */
+ @SuppressLint("RestrictedApi")
+ @VisibleForTesting
+ Matrix getTextureViewCorrectionMatrix() {
+ Preconditions.checkState(isTransformationInfoReady());
+ RectF surfaceRect = new RectF(0, 0, mResolution.getWidth(), mResolution.getHeight());
+ @SuppressLint("RestrictedApi") int rotationDegrees = -TransformUtils.surfaceRotationToRotationDegrees(mTargetRotation);
+
+ TextureViewRotationQuirk textureViewRotationQuirk =
+ DeviceQuirks.get(TextureViewRotationQuirk.class);
+ if (textureViewRotationQuirk != null) {
+ rotationDegrees += textureViewRotationQuirk.getCorrectionRotation(mIsFrontCamera);
+ }
+ return TransformUtils.getRectToRect(surfaceRect, surfaceRect, rotationDegrees);
+ }
+
+ /**
+ * Calculates the transformation and applies it to the inner view ofPreviewView.
+ *
+ * The inner view could be {@link SurfaceView} or a {@link TextureView}.
+ * {@link TextureView} needs a preliminary correction since it doesn't handle the
+ * display rotation.
+ */
+ @SuppressLint("RestrictedApi")
+ void transformView(Size previewViewSize, int layoutDirection, @NonNull View preview) {
+ if (previewViewSize.getHeight() == 0 || previewViewSize.getWidth() == 0) {
+ Logger.w(TAG, "Transform not applied due to PreviewView size: " + previewViewSize);
+ return;
+ }
+ if (!isTransformationInfoReady()) {
+ return;
+ }
+
+ if (preview instanceof TextureView) {
+ // For TextureView, correct the orientation to match the target rotation.
+ ((TextureView) preview).setTransform(getTextureViewCorrectionMatrix());
+ } else {
+ // Logs an error if non-display rotation is used with SurfaceView.
+ Display display = preview.getDisplay();
+ if (display != null && display.getRotation() != mTargetRotation) {
+ Logger.e(TAG, "Non-display rotation not supported with SurfaceView / PERFORMANCE "
+ + "mode.");
+ }
+ }
+
+ RectF surfaceRectInPreviewView = getTransformedSurfaceRect(previewViewSize,
+ layoutDirection);
+ preview.setPivotX(0);
+ preview.setPivotY(0);
+ preview.setScaleX(surfaceRectInPreviewView.width() / mResolution.getWidth());
+ preview.setScaleY(surfaceRectInPreviewView.height() / mResolution.getHeight());
+ preview.setTranslationX(surfaceRectInPreviewView.left - preview.getLeft());
+ preview.setTranslationY(surfaceRectInPreviewView.top - preview.getTop());
+ }
+
+ /**
+ * Gets the transformed {@link Surface} rect in PreviewView coordinates.
+ *
+ * Returns desired rect of the inner view that once applied, the only part visible to
+ * end users is the crop rect.
+ */
+ @SuppressLint("RestrictedApi")
+ private RectF getTransformedSurfaceRect(Size previewViewSize, int layoutDirection) {
+ Preconditions.checkState(isTransformationInfoReady());
+ Matrix surfaceToPreviewView =
+ getSurfaceToPreviewViewMatrix(previewViewSize, layoutDirection);
+ RectF rect = new RectF(0, 0, mResolution.getWidth(), mResolution.getHeight());
+ surfaceToPreviewView.mapRect(rect);
+ return rect;
+ }
+
+ /**
+ * Calculates the transformation from {@link Surface} coordinates to PreviewView
+ * coordinates.
+ *
+ * The calculation is based on making the crop rect to fill or fit the PreviewView.
+ */
+ @SuppressLint("RestrictedApi")
+ Matrix getSurfaceToPreviewViewMatrix(Size previewViewSize, int layoutDirection) {
+ Preconditions.checkState(isTransformationInfoReady());
+
+ // Get the target of the mapping, the coordinates of the crop rect in PreviewView.
+ RectF previewViewCropRect;
+ if (isViewportAspectRatioMatchPreviewView(previewViewSize)) {
+ // If crop rect has the same aspect ratio as PreviewView, scale the crop rect to fill
+ // the entire PreviewView. This happens if the scale type is FILL_* AND a
+ // PreviewView-based viewport is used.
+ previewViewCropRect = new RectF(0, 0, previewViewSize.getWidth(),
+ previewViewSize.getHeight());
+ } else {
+ // If the aspect ratios don't match, it could be 1) scale type is FIT_*, 2) the
+ // Viewport is not based on the PreviewView or 3) both.
+ previewViewCropRect = getPreviewViewViewportRectForMismatchedAspectRatios(
+ previewViewSize, layoutDirection);
+ }
+ Matrix matrix = TransformUtils.getRectToRect(new RectF(mSurfaceCropRect), previewViewCropRect,
+ mPreviewRotationDegrees);
+ if (mIsFrontCamera) {
+ // SurfaceView/TextureView automatically mirrors the Surface for front camera, which
+ // needs to be compensated by mirroring the Surface around the upright direction of the
+ // output image.
+ if (TransformUtils.is90or270(mPreviewRotationDegrees)) {
+ // If the rotation is 90/270, the Surface should be flipped vertically.
+ // +---+ 90 +---+ 270 +---+
+ // | ^ | --> | < | | > |
+ // +---+ +---+ +---+
+ matrix.preScale(1F, -1F, mSurfaceCropRect.centerX(), mSurfaceCropRect.centerY());
+ } else {
+ // If the rotation is 0/180, the Surface should be flipped horizontally.
+ // +---+ 0 +---+ 180 +---+
+ // | ^ | --> | ^ | | v |
+ // +---+ +---+ +---+
+ matrix.preScale(-1F, 1F, mSurfaceCropRect.centerX(), mSurfaceCropRect.centerY());
+ }
+ }
+ return matrix;
+ }
+
+ /**
+ * Gets the vertices of the crop rect in Surface.
+ */
+ @SuppressLint("RestrictedApi")
+ private Rect getCorrectedCropRect(Rect surfaceCropRect) {
+ PreviewOneThirdWiderQuirk quirk = DeviceQuirks.get(PreviewOneThirdWiderQuirk.class);
+ if (quirk != null) {
+ // Correct crop rect if the device has a quirk.
+ RectF cropRectF = new RectF(surfaceCropRect);
+ Matrix correction = new Matrix();
+ correction.setScale(
+ quirk.getCropRectScaleX(),
+ 1f,
+ surfaceCropRect.centerX(),
+ surfaceCropRect.centerY());
+ correction.mapRect(cropRectF);
+ Rect correctRect = new Rect();
+ cropRectF.round(correctRect);
+ return correctRect;
+ }
+ return surfaceCropRect;
+ }
+
+ /**
+ * Gets the viewport rect in PreviewView coordinates for the case where viewport's
+ * aspect ratio doesn't match PreviewView's aspect ratio.
+ *
+ * When aspect ratios don't match, additional calculation is needed to figure out how to
+ * fit crop rect into the PreviewView
+ */
+ RectF getPreviewViewViewportRectForMismatchedAspectRatios(Size previewViewSize,
+ int layoutDirection) {
+ RectF previewViewRect = new RectF(0, 0, previewViewSize.getWidth(),
+ previewViewSize.getHeight());
+ Size rotatedViewportSize = getRotatedViewportSize();
+ RectF rotatedViewportRect = new RectF(0, 0, rotatedViewportSize.getWidth(),
+ rotatedViewportSize.getHeight());
+ Matrix matrix = new Matrix();
+ setMatrixRectToRect(matrix, rotatedViewportRect, previewViewRect);
+ matrix.mapRect(rotatedViewportRect);
+ if (layoutDirection == LayoutDirection.RTL) {
+ return flipHorizontally(rotatedViewportRect, (float) previewViewSize.getWidth() / 2);
+ }
+ return rotatedViewportRect;
+ }
+
+ /**
+ * Set the matrix that maps the source rectangle to the destination rectangle.
+ *
+ * This static method is an extension of {@link Matrix#setRectToRect} with an additional
+ * support for FILL_* types.
+ */
+ private static void setMatrixRectToRect(Matrix matrix, RectF source, RectF destination) {
+ Matrix.ScaleToFit matrixScaleType = Matrix.ScaleToFit.FILL;
+ // TODO: 后续可能需要更改ScaleType
+// boolean isFitTypes =
+// scaleType == FIT_CENTER || scaleType == FIT_START || scaleType == FIT_END;
+ boolean isFitTypes = false;
+ if (isFitTypes) {
+ matrix.setRectToRect(source, destination, matrixScaleType);
+ } else {
+ // android.graphics.Matrix doesn't support fill scale types. The workaround is
+ // mapping inversely from destination to source, then invert the matrix.
+ matrix.setRectToRect(destination, source, matrixScaleType);
+ matrix.invert(matrix);
+ }
+ }
+
+ /**
+ * Flips the given rect along a vertical line for RTL layout direction.
+ */
+ private static RectF flipHorizontally(RectF original, float flipLineX) {
+ return new RectF(
+ flipLineX + flipLineX - original.right,
+ original.top,
+ flipLineX + flipLineX - original.left,
+ original.bottom);
+ }
+
+ /**
+ * Returns viewport size with target rotation applied.
+ */
+ @SuppressLint("RestrictedApi")
+ private Size getRotatedViewportSize() {
+ if (TransformUtils.is90or270(mPreviewRotationDegrees)) {
+ return new Size(mViewportRect.height(), mViewportRect.width());
+ }
+ return new Size(mViewportRect.width(), mViewportRect.height());
+ }
+
+ /**
+ * Checks if the viewport's aspect ratio matches that of the PreviewView.
+ */
+ @SuppressLint("RestrictedApi")
+ @VisibleForTesting
+ boolean isViewportAspectRatioMatchPreviewView(Size previewViewSize) {
+ // Using viewport rect to check if the viewport is based on the PreviewView.
+ Size rotatedViewportSize = getRotatedViewportSize();
+ return TransformUtils.isAspectRatioMatchingWithRoundingError(
+ previewViewSize, /* isAccurate1= */ true,
+ rotatedViewportSize, /* isAccurate2= */ false);
+ }
+
+ /**
+ * Return the crop rect of the preview surface.
+ */
+ @Nullable
+ Rect getSurfaceCropRect() {
+ return mSurfaceCropRect;
+ }
+
+ /**
+ * Creates a transformed screenshot of PreviewView.
+ *
+ * Creates the transformed {@link Bitmap} by applying the same transformation applied to
+ * the inner view. T
+ *
+ * @param original a snapshot of the untransformed inner view.
+ */
+ Bitmap createTransformedBitmap(@NonNull Bitmap original, Size previewViewSize,
+ int layoutDirection) {
+ if (!isTransformationInfoReady()) {
+ return original;
+ }
+ Matrix textureViewCorrection = getTextureViewCorrectionMatrix();
+ RectF surfaceRectInPreviewView = getTransformedSurfaceRect(previewViewSize,
+ layoutDirection);
+
+ Bitmap transformed = Bitmap.createBitmap(
+ previewViewSize.getWidth(), previewViewSize.getHeight(), original.getConfig());
+ Canvas canvas = new Canvas(transformed);
+
+ Matrix canvasTransform = new Matrix();
+ canvasTransform.postConcat(textureViewCorrection);
+ canvasTransform.postScale(surfaceRectInPreviewView.width() / mResolution.getWidth(),
+ surfaceRectInPreviewView.height() / mResolution.getHeight());
+ canvasTransform.postTranslate(surfaceRectInPreviewView.left, surfaceRectInPreviewView.top);
+
+ canvas.drawBitmap(original, canvasTransform,
+ new Paint(ANTI_ALIAS_FLAG | FILTER_BITMAP_FLAG | DITHER_FLAG));
+ return transformed;
+ }
+
+ /**
+ * Calculates the mapping from a UI touch point (0, 0) - (width, height) to normalized
+ * space (-1, -1) - (1, 1).
+ *
+ * This is used by {@link PreviewViewMeteringPointFactory}.
+ *
+ * @return null if transformation info is not set.
+ */
+ @Nullable
+ Matrix getPreviewViewToNormalizedSurfaceMatrix(Size previewViewSize, int layoutDirection) {
+ if (!isTransformationInfoReady()) {
+ return null;
+ }
+ Matrix matrix = new Matrix();
+
+ // Map PreviewView coordinates to Surface coordinates.
+ getSurfaceToPreviewViewMatrix(previewViewSize, layoutDirection).invert(matrix);
+
+ // Map Surface coordinates to normalized coordinates (-1, -1) - (1, 1).
+ Matrix normalization = new Matrix();
+ normalization.setRectToRect(
+ new RectF(0, 0, mResolution.getWidth(), mResolution.getHeight()),
+ new RectF(0, 0, 1, 1), Matrix.ScaleToFit.FILL);
+ matrix.postConcat(normalization);
+
+ return matrix;
+ }
+
+ private boolean isTransformationInfoReady() {
+ return mSurfaceCropRect != null && mResolution != null;
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/PreviewViewMeteringPointFactory.java b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/PreviewViewMeteringPointFactory.java
new file mode 100644
index 000000000..db4b46425
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/PreviewViewMeteringPointFactory.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.quark.quamera.camerax.controller;
+
+import android.graphics.Matrix;
+import android.graphics.PointF;
+import android.os.Build;
+import android.util.Size;
+
+import androidx.annotation.AnyThread;
+import androidx.annotation.GuardedBy;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RestrictTo;
+import androidx.annotation.UiThread;
+import androidx.camera.core.MeteringPointFactory;
+
+
+@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
+public class PreviewViewMeteringPointFactory extends MeteringPointFactory {
+
+ static final PointF INVALID_POINT = new PointF(2F, 2F);
+
+ @NonNull
+ private final PreviewTransformation mPreviewTransformation;
+
+ @GuardedBy("this")
+ @Nullable
+ private Matrix mMatrix;
+
+ PreviewViewMeteringPointFactory(@NonNull PreviewTransformation previewTransformation) {
+ mPreviewTransformation = previewTransformation;
+ }
+
+ @AnyThread
+ @NonNull
+ @Override
+ protected PointF convertPoint(float x, float y) {
+ float[] point = new float[]{x, y};
+ synchronized (this) {
+ if (mMatrix == null) {
+ return INVALID_POINT;
+ }
+ mMatrix.mapPoints(point);
+ }
+ return new PointF(point[0], point[1]);
+ }
+
+ @UiThread
+ void recalculate(@NonNull Size previewViewSize, int layoutDirection) {
+ synchronized (this) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
+ return;
+ }
+ if (previewViewSize.getWidth() == 0 || previewViewSize.getHeight() == 0) {
+ mMatrix = null;
+ return;
+ }
+ mMatrix = mPreviewTransformation.getPreviewViewToNormalizedSurfaceMatrix(
+ previewViewSize,
+ layoutDirection);
+ }
+ }
+
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/RotationReceiver.java b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/RotationReceiver.java
new file mode 100644
index 000000000..02da6b660
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/RotationReceiver.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.quark.quamera.camerax.controller;
+
+import android.content.Context;
+import android.view.OrientationEventListener;
+import android.view.Surface;
+
+import androidx.annotation.NonNull;
+
+
+public abstract class RotationReceiver {
+
+ private static final int INVALID_SURFACE_ROTATION = -1;
+
+ // Synthetic access
+ @SuppressWarnings("WeakerAccess")
+ int mRotation = INVALID_SURFACE_ROTATION;
+
+ private final OrientationEventListener mOrientationEventListener;
+
+ public RotationReceiver(@NonNull Context context) {
+ mOrientationEventListener = new OrientationEventListener(context) {
+ @Override
+ public void onOrientationChanged(int orientation) {
+ if (orientation == OrientationEventListener.ORIENTATION_UNKNOWN) {
+ // Short-circuit if orientation is unknown. Unknown rotation can't be handled
+ // so it shouldn't be sent.
+ return;
+ }
+
+ int newRotation;
+ if (orientation >= 315 || orientation < 45) {
+ newRotation = Surface.ROTATION_0;
+ } else if (orientation >= 225) {
+ newRotation = Surface.ROTATION_90;
+ } else if (orientation >= 135) {
+ newRotation = Surface.ROTATION_180;
+ } else {
+ newRotation = Surface.ROTATION_270;
+ }
+ if (mRotation != newRotation) {
+ mRotation = newRotation;
+ onRotationChanged(newRotation);
+ }
+ }
+ };
+ }
+
+ /**
+ * Checks if the RotationReceiver can detect orientation changes.
+ *
+ * @see OrientationEventListener#canDetectOrientation()
+ */
+ public boolean canDetectOrientation() {
+ return mOrientationEventListener.canDetectOrientation();
+ }
+
+ /**
+ * Enables the RotationReceiver so it will monitor the sensor and call onRotationChanged when
+ * the device orientation changes.
+ *
+ * By default, the receiver is not enabled.
+ *
+ * @see OrientationEventListener#enable()
+ */
+ public void enable() {
+ mOrientationEventListener.enable();
+ }
+
+ /**
+ * Disables the RotationReceiver.
+ *
+ * @see OrientationEventListener#disable()
+ */
+ public void disable() {
+ mOrientationEventListener.disable();
+ }
+
+ /**
+ * Called when the physical rotation of the device changes.
+ *
+ * The rotation is one of the {@link Surface} rotations mapped from orientation
+ * degrees.
+ *
+ * The vertices representation uses a float array to represent a rectangle with arbitrary
+ * rotation and rotation-direction. It could be otherwise represented by a triple of a
+ * {@link RectF}, a rotation degrees integer and a boolean flag for the rotation-direction
+ * (clockwise v.s. counter-clockwise).
+ *
+ * TODO(b/179827713): merge this with {@link androidx.camera.core.internal.utils.ImageUtil}.
+ *
+ * @hide
+ */
+@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
+public class TransformUtils {
+
+ // Normalized space (-1, -1) - (1, 1).
+ public static final RectF NORMALIZED_RECT = new RectF(-1, -1, 1, 1);
+
+ private TransformUtils() {
+ }
+
+ /**
+ * Gets the size of the {@link Rect}.
+ */
+ @NonNull
+ public static Size rectToSize(@NonNull Rect rect) {
+ return new Size(rect.width(), rect.height());
+ }
+
+ /**
+ * Converts an array of vertices to a {@link RectF}.
+ */
+ @NonNull
+ public static RectF verticesToRect(@NonNull float[] vertices) {
+ return new RectF(
+ min(vertices[0], vertices[2], vertices[4], vertices[6]),
+ min(vertices[1], vertices[3], vertices[5], vertices[7]),
+ max(vertices[0], vertices[2], vertices[4], vertices[6]),
+ max(vertices[1], vertices[3], vertices[5], vertices[7])
+ );
+ }
+
+ /**
+ * Returns the max value.
+ */
+ public static float max(float value1, float value2, float value3, float value4) {
+ return Math.max(Math.max(value1, value2), Math.max(value3, value4));
+ }
+
+ /**
+ * Returns the min value.
+ */
+ public static float min(float value1, float value2, float value3, float value4) {
+ return Math.min(Math.min(value1, value2), Math.min(value3, value4));
+ }
+
+ /**
+ * Converts {@link Surface} rotation to rotation degrees: 90, 180, 270 or 0.
+ */
+ public static int surfaceRotationToRotationDegrees(int rotationValue) {
+ switch (rotationValue) {
+ case Surface.ROTATION_0:
+ return 0;
+ case Surface.ROTATION_90:
+ return 90;
+ case Surface.ROTATION_180:
+ return 180;
+ case Surface.ROTATION_270:
+ return 270;
+ default:
+ throw new IllegalStateException("Unexpected rotation value " + rotationValue);
+ }
+ }
+
+ /**
+ * Returns true if the rotation degrees is 90 or 270.
+ */
+ public static boolean is90or270(int rotationDegrees) {
+ if (rotationDegrees == 90 || rotationDegrees == 270) {
+ return true;
+ }
+ if (rotationDegrees == 0 || rotationDegrees == 180) {
+ return false;
+ }
+ throw new IllegalArgumentException("Invalid rotation degrees: " + rotationDegrees);
+ }
+
+ /**
+ * Converts a {@link Size} to a float array of vertices.
+ */
+ @NonNull
+ public static float[] sizeToVertices(@NonNull Size size) {
+ return new float[]{0, 0, size.getWidth(), 0, size.getWidth(), size.getHeight(), 0,
+ size.getHeight()};
+ }
+
+ /**
+ * Converts a {@link RectF} defined by top, left, right and bottom to an array of vertices.
+ */
+ @NonNull
+ public static float[] rectToVertices(@NonNull RectF rectF) {
+ return new float[]{rectF.left, rectF.top, rectF.right, rectF.top, rectF.right, rectF.bottom,
+ rectF.left, rectF.bottom};
+ }
+
+ /**
+ * Checks if aspect ratio matches while tolerating rounding error.
+ *
+ * One example of the usage is comparing the viewport-based crop rect from different use
+ * cases. The crop rect is rounded because pixels are integers, which may introduce an error
+ * when we check if the aspect ratio matches. For example, when PreviewView's
+ * width/height are prime numbers 601x797, the crop rect from other use cases cannot have a
+ * matching aspect ratio even if they are based on the same viewport. This method checks the
+ * aspect ratio while tolerating a rounding error.
+ *
+ * @param size1 the rounded size1
+ * @param isAccurate1 if size1 is accurate. e.g. it's true if it's the PreviewView's
+ * dimension which viewport is based on
+ * @param size2 the rounded size2
+ * @param isAccurate2 if size2 is accurate.
+ */
+ public static boolean isAspectRatioMatchingWithRoundingError(
+ @NonNull Size size1, boolean isAccurate1, @NonNull Size size2, boolean isAccurate2) {
+ // The crop rect coordinates are rounded values. Each value is at most .5 away from their
+ // true values. So the width/height, which is the difference of 2 coordinates, are at most
+ // 1.0 away from their true value.
+ // First figure out the possible range of the aspect ratio's ture value.
+ float ratio1UpperBound;
+ float ratio1LowerBound;
+ if (isAccurate1) {
+ ratio1UpperBound = (float) size1.getWidth() / size1.getHeight();
+ ratio1LowerBound = ratio1UpperBound;
+ } else {
+ ratio1UpperBound = (size1.getWidth() + 1F) / (size1.getHeight() - 1F);
+ ratio1LowerBound = (size1.getWidth() - 1F) / (size1.getHeight() + 1F);
+ }
+ float ratio2UpperBound;
+ float ratio2LowerBound;
+ if (isAccurate2) {
+ ratio2UpperBound = (float) size2.getWidth() / size2.getHeight();
+ ratio2LowerBound = ratio2UpperBound;
+ } else {
+ ratio2UpperBound = (size2.getWidth() + 1F) / (size2.getHeight() - 1F);
+ ratio2LowerBound = (size2.getWidth() - 1F) / (size2.getHeight() + 1F);
+ }
+ // Then we check if the true value range overlaps.
+ return ratio1UpperBound >= ratio2LowerBound && ratio2UpperBound >= ratio1LowerBound;
+ }
+
+ /**
+ * Gets the transform from one {@link Rect} to another with rotation degrees.
+ *
+ * Following is how the source is mapped to the target with a 90° rotation. The rect
+ * is mapped to .
+ *
+ *
+ * Device specific quirks depend on device properties, including the manufacturer
+ * ({@link android.os.Build#MANUFACTURER}), model ({@link android.os.Build#MODEL}) and OS
+ * level ({@link android.os.Build.VERSION#SDK_INT}).
+ *
+ * Device specific quirks are lazily loaded, i.e. They are loaded the first time they're needed.
+ */
+@SuppressLint("RestrictedApi")
+public class DeviceQuirks {
+
+ @NonNull
+ private static final Quirks QUIRKS;
+
+ static {
+ QUIRKS = new Quirks(DeviceQuirksLoader.loadQuirks());
+ }
+
+ private DeviceQuirks() {
+ }
+
+ /**
+ * Retrieves a specific device {@link Quirk} instance given its type.
+ *
+ * @param quirkClass The type of device quirk to retrieve.
+ * @return A device {@link Quirk} instance of the provided type, or {@code null} if it isn't
+ * found.
+ */
+ @Nullable
+ public static The symptom is, the preview's FOV is always 1/3 wider than intended. For example, if the
+ * preview Surface is 800x600, it's actually has a FOV of 1066x600 with the same center point,
+ * but squeezed to fit the 800x600 buffer.
+ */
+public class PreviewOneThirdWiderQuirk implements Quirk {
+
+ private static final String SAMSUNG_A3_2017 = "A3Y17LTE"; // b/180121821
+ private static final String SAMSUNG_J5_PRIME = "ON5XELTE"; // b/183329599
+
+ static boolean load() {
+ boolean isSamsungJ5PrimeAndApi26 =
+ SAMSUNG_J5_PRIME.equals(Build.DEVICE.toUpperCase()) && Build.VERSION.SDK_INT >= 26;
+ boolean isSamsungA3 = SAMSUNG_A3_2017.equals(Build.DEVICE.toUpperCase());
+ return isSamsungJ5PrimeAndApi26 || isSamsungA3;
+ }
+
+ /**
+ * The mount that the crop rect needs to be scaled in x.
+ */
+ public float getCropRectScaleX() {
+ return 0.75f;
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/internal/compat/quirk/SurfaceViewStretchedQuirk.java b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/internal/compat/quirk/SurfaceViewStretchedQuirk.java
new file mode 100644
index 000000000..e796167cd
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/internal/compat/quirk/SurfaceViewStretchedQuirk.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.quark.quamera.camerax.controller.internal.compat.quirk;
+
+import android.os.Build;
+
+import androidx.camera.core.impl.Quirk;
+
+/**
+ * A quirk where SurfaceView is stretched.
+ *
+ * On Samsung Galaxy Z Fold2, transform APIs (e.g. View#setScaleX) do not work as intended.
+ * b/129403806
+ */
+public class SurfaceViewStretchedQuirk implements Quirk {
+
+ // Samsung Galaxy Z Fold2 b/129403806
+ private static final String SAMSUNG = "SAMSUNG";
+ private static final String GALAXY_Z_FOLD_2 = "F2Q";
+
+ static boolean load() {
+ return SAMSUNG.equals(Build.MANUFACTURER.toUpperCase()) && GALAXY_Z_FOLD_2.equals(
+ Build.DEVICE.toUpperCase());
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/internal/compat/quirk/TextureViewRotationQuirk.java b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/internal/compat/quirk/TextureViewRotationQuirk.java
new file mode 100644
index 000000000..369cdd370
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/internal/compat/quirk/TextureViewRotationQuirk.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.quark.quamera.camerax.controller.internal.compat.quirk;
+
+import android.os.Build;
+import android.view.TextureView;
+
+import androidx.camera.core.impl.Quirk;
+
+/**
+ * A quirk that requires applying extra rotation on {@link TextureView}
+ *
+ * On certain devices, the rotation of the output is incorrect. One example is b/177561470.
+ * In which case, the extra rotation is needed to correct the output on {@link TextureView}.
+ */
+public class TextureViewRotationQuirk implements Quirk {
+
+ private static final String FAIRPHONE = "Fairphone";
+ private static final String FAIRPHONE_2_MODEL = "FP2";
+
+ static boolean load() {
+ return isFairphone2();
+ }
+
+ /**
+ * Gets correction needed for the given camera.
+ */
+ public int getCorrectionRotation(boolean isFrontCamera) {
+ if (isFairphone2() && isFrontCamera) {
+ // On Fairphone2, the front camera output on TextureView is rotated 180°.
+ // See: b/177561470.
+ return 180;
+ }
+ return 0;
+ }
+
+ private static boolean isFairphone2() {
+ return FAIRPHONE.equalsIgnoreCase(Build.MANUFACTURER)
+ && FAIRPHONE_2_MODEL.equalsIgnoreCase(Build.MODEL);
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camerax/filter/CameraIdLensFacingCameraFilter.java b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/filter/CameraIdLensFacingCameraFilter.java
new file mode 100644
index 000000000..69f706626
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/filter/CameraIdLensFacingCameraFilter.java
@@ -0,0 +1,53 @@
+package com.quark.quamera.camerax.filter;
+
+import android.annotation.SuppressLint;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import androidx.annotation.NonNull;
+import androidx.camera.core.CameraInfo;
+import androidx.camera.core.CameraSelector;
+import androidx.camera.core.impl.CameraInfoInternal;
+import androidx.camera.core.impl.LensFacingCameraFilter;
+import androidx.core.util.Preconditions;
+
+/**
+ * @author : liujian
+ * @date : 2021/7/30
+ */
+@SuppressLint({"UnsafeExperimentalUsageError", "RestrictedApi", "UnsafeOptInUsageError"})
+public class CameraIdLensFacingCameraFilter extends LensFacingCameraFilter {
+
+ @CameraSelector.LensFacing
+ private final int mLensFacing;
+ @NonNull
+ private final String mCameraId;
+
+ @SuppressLint("RestrictedApi")
+ public CameraIdLensFacingCameraFilter(int lensFacing, @NonNull String cameraId) {
+ super(lensFacing);
+ this.mLensFacing = lensFacing;
+ this.mCameraId = cameraId;
+ }
+
+ @NonNull
+ @Override
+ public List
+ * CameraMetadataNative 默认情况下是执行GC的时候,会释放C++内存,但是会不及时,因为Native Heap上涨不会导致GC触发,但是会导致OOM
+ *
+ * 目前的方式通过反射实现,存在一定的兼容性风险;对于而已。GC+内存检测兼容性好点,但是性能可能没有那么好
+ */
+public class CameraXCaptureResultManager {
+
+ public final static String TAG = "Camera2MemManager";
+ private int mMaxCache = 40;
+ private boolean mEnable = true;
+
+ public CameraXCaptureResultManager() {
+ this(40);
+ }
+
+ public CameraXCaptureResultManager(int maxCache) {
+ this.mMaxCache = maxCache;
+ mEnable = CameraInit.getConfig().enableHighMemoryGC();
+ }
+
+
+ private final Queue
+ * CameraMetadataNative 默认情况下是执行GC的时候,会释放C++内存,但是会不及时,因为Native Heap上涨不会导致GC触发,但是会导致OOM
+ */
+ @SuppressLint("RestrictedApi")
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ public boolean releaseCaptureResultNow(Camera2CameraCaptureResult captureResult) {
+ if (!mEnable) {
+ return false;
+ }
+
+ if (captureResult == null || captureResult.getCaptureResult() == null) {
+ return false;
+ }
+ try {
+ if (sCameraMetaDataNativeField == null) {
+ sCameraMetaDataNativeField = CaptureResult.class.getDeclaredField("mResults");
+ sCameraMetaDataNativeField.setAccessible(true);
+ }
+
+ if (sClass == null) {
+ sClass = Class.forName("android.hardware.camera2.impl.CameraMetadataNative");
+ }
+
+ if (sCameraMetaDataCloseMethod == null) {
+ sCameraMetaDataCloseMethod = CameraReflection.findMethod(sClass, "finalize");
+ sCameraMetaDataCloseMethod.setAccessible(true);
+ }
+
+ } catch (NoSuchFieldException e) {
+ e.printStackTrace();
+ } catch (ClassNotFoundException e) {
+ e.printStackTrace();
+ } catch (NoSuchMethodException e) {
+ e.printStackTrace();
+ }
+
+ if (sCameraMetaDataCloseMethod == null || sCameraMetaDataNativeField == null) {
+ return false;
+ }
+
+ try {
+ long timeState = captureResult.getTimestamp();
+ sCameraMetaDataCloseMethod.invoke(sCameraMetaDataNativeField.get(captureResult.getCaptureResult()));
+ Log.d(DefaultCameraRender.TAG, "CaptureFrameHelper.release --- " + timeState);
+ return true;
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ return false;
+ }
+
+
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camerax/utils/FocalLengthInfo.java b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/utils/FocalLengthInfo.java
new file mode 100644
index 000000000..0e5204c70
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/utils/FocalLengthInfo.java
@@ -0,0 +1,21 @@
+package com.quark.quamera.camerax.utils;
+
+public class FocalLengthInfo implements Comparable
+ * A GLSurfaceView provides the following features:
+ *
+ * For more information about how to use OpenGL, read the
+ * OpenGL developer guide.
+ * Typically you use GLSurfaceView by subclassing it and overriding one or more of the
+ * View system input event methods. If your application does not need to override event
+ * methods then GLSurfaceView can be used as-is. For the most part
+ * GLSurfaceView behavior is customized by calling "set" methods rather than by subclassing.
+ * For example, unlike a regular View, drawing is delegated to a separate Renderer object which
+ * is registered with the GLSurfaceView
+ * using the {@link #setRenderer(Renderer)} call.
+ *
+ *
+ *
+ *
+ * By default GLSurfaceView chooses a EGLConfig that has an RGB_888 pixel format,
+ * with at least a 16-bit depth buffer and no stencil.
+ *
+ * If you would prefer a different EGLConfig
+ * you can override the default behavior by calling one of the
+ * setEGLConfigChooser methods.
+ *
+ *
+ *
+ *
+ *
+ *
+ * To handle an event you will typically subclass GLSurfaceView and override the
+ * appropriate method, just as you would with any other View. However, when handling
+ * the event, you may need to communicate with the Renderer object
+ * that's running in the rendering thread. You can do this using any
+ * standard Java cross-thread communication mechanism. In addition,
+ * one relatively easy way to communicate with your renderer is
+ * to call
+ * {@link #queueEvent(Runnable)}. For example:
+ *
+ * Wrapping is typically used for debugging purposes.
+ *
+ * The default value is null.
+ * @param glWrapper the new GLWrapper
+ */
+ public void setGLWrapper(GLWrapper glWrapper) {
+ mGLWrapper = glWrapper;
+ }
+
+ /**
+ * Set the debug flags to a new value. The value is
+ * constructed by OR-together zero or more
+ * of the DEBUG_CHECK_* constants. The debug flags take effect
+ * whenever a surface is created. The default value is zero.
+ * @param debugFlags the new debug flags
+ * @see #DEBUG_CHECK_GL_ERROR
+ * @see #DEBUG_LOG_GL_CALLS
+ */
+ public void setDebugFlags(int debugFlags) {
+ mDebugFlags = debugFlags;
+ }
+
+ /**
+ * Get the current value of the debug flags.
+ * @return the current value of the debug flags.
+ */
+ public int getDebugFlags() {
+ return mDebugFlags;
+ }
+
+ /**
+ * Control whether the EGL context is preserved when the GLSurfaceView is paused and
+ * resumed.
+ *
+ * If set to true, then the EGL context may be preserved when the GLSurfaceView is paused.
+ *
+ * Prior to API level 11, whether the EGL context is actually preserved or not
+ * depends upon whether the Android device can support an arbitrary number of
+ * EGL contexts or not. Devices that can only support a limited number of EGL
+ * contexts must release the EGL context in order to allow multiple applications
+ * to share the GPU.
+ *
+ * If set to false, the EGL context will be released when the GLSurfaceView is paused,
+ * and recreated when the GLSurfaceView is resumed.
+ *
+ *
+ * The default is false.
+ *
+ * @param preserveOnPause preserve the EGL context when paused
+ */
+ public void setPreserveEGLContextOnPause(boolean preserveOnPause) {
+ mPreserveEGLContextOnPause = preserveOnPause;
+ }
+
+ /**
+ * @return true if the EGL context will be preserved when paused
+ */
+ public boolean getPreserveEGLContextOnPause() {
+ return mPreserveEGLContextOnPause;
+ }
+
+ /**
+ * Set the renderer associated with this view. Also starts the thread that
+ * will call the renderer, which in turn causes the rendering to start.
+ * This method should be called once and only once in the life-cycle of
+ * a GLSurfaceView.
+ * The following GLSurfaceView methods can only be called before
+ * setRenderer is called:
+ *
+ * The following GLSurfaceView methods can only be called after
+ * setRenderer is called:
+ * If this method is
+ * called, it must be called before {@link #setRenderer(Renderer)}
+ * is called.
+ *
+ * If this method is not called, then by default
+ * a context will be created with no shared context and
+ * with a null attribute list.
+ */
+ public void setEGLContextFactory(EGLContextFactory factory) {
+ checkRenderThreadState();
+ mEGLContextFactory = factory;
+ }
+
+ /**
+ * Install a custom EGLWindowSurfaceFactory.
+ * If this method is
+ * called, it must be called before {@link #setRenderer(Renderer)}
+ * is called.
+ *
+ * If this method is not called, then by default
+ * a window surface will be created with a null attribute list.
+ */
+ public void setEGLWindowSurfaceFactory(EGLWindowSurfaceFactory factory) {
+ checkRenderThreadState();
+ mEGLWindowSurfaceFactory = factory;
+ }
+
+ /**
+ * Install a custom EGLConfigChooser.
+ * If this method is
+ * called, it must be called before {@link #setRenderer(Renderer)}
+ * is called.
+ *
+ * If no setEGLConfigChooser method is called, then by default the
+ * view will choose an EGLConfig that is compatible with the current
+ * android.view.Surface, with a depth buffer depth of
+ * at least 16 bits.
+ * @param configChooser
+ */
+ public void setEGLConfigChooser(EGLConfigChooser configChooser) {
+ checkRenderThreadState();
+ mEGLConfigChooser = configChooser;
+ }
+
+ /**
+ * Install a config chooser which will choose a config
+ * as close to 16-bit RGB as possible, with or without an optional depth
+ * buffer as close to 16-bits as possible.
+ * If this method is
+ * called, it must be called before {@link #setRenderer(Renderer)}
+ * is called.
+ *
+ * If no setEGLConfigChooser method is called, then by default the
+ * view will choose an RGB_888 surface with a depth buffer depth of
+ * at least 16 bits.
+ *
+ * @param needDepth
+ */
+ public void setEGLConfigChooser(boolean needDepth) {
+ setEGLConfigChooser(new SimpleEGLConfigChooser(needDepth));
+ }
+
+ /**
+ * Install a config chooser which will choose a config
+ * with at least the specified depthSize and stencilSize,
+ * and exactly the specified redSize, greenSize, blueSize and alphaSize.
+ * If this method is
+ * called, it must be called before {@link #setRenderer(Renderer)}
+ * is called.
+ *
+ * If no setEGLConfigChooser method is called, then by default the
+ * view will choose an RGB_888 surface with a depth buffer depth of
+ * at least 16 bits.
+ *
+ */
+ public void setEGLConfigChooser(int redSize, int greenSize, int blueSize,
+ int alphaSize, int depthSize, int stencilSize) {
+ setEGLConfigChooser(new ComponentSizeChooser(redSize, greenSize,
+ blueSize, alphaSize, depthSize, stencilSize));
+ }
+
+ /**
+ * Inform the default EGLContextFactory and default EGLConfigChooser
+ * which EGLContext client version to pick.
+ * Use this method to create an OpenGL ES 2.0-compatible context.
+ * Example:
+ * Note: Activities which require OpenGL ES 2.0 should indicate this by
+ * setting @lt;uses-feature android:glEsVersion="0x00020000" /> in the activity's
+ * AndroidManifest.xml file.
+ * If this method is called, it must be called before {@link #setRenderer(Renderer)}
+ * is called.
+ * This method only affects the behavior of the default EGLContexFactory and the
+ * default EGLConfigChooser. If
+ * {@link #setEGLContextFactory(EGLContextFactory)} has been called, then the supplied
+ * EGLContextFactory is responsible for creating an OpenGL ES 2.0-compatible context.
+ * If
+ * {@link #setEGLConfigChooser(EGLConfigChooser)} has been called, then the supplied
+ * EGLConfigChooser is responsible for choosing an OpenGL ES 2.0-compatible config.
+ * @param version The EGLContext client version to choose. Use 2 for OpenGL ES 2.0
+ */
+ public void setEGLContextClientVersion(int version) {
+ checkRenderThreadState();
+ mEGLContextClientVersion = version;
+ }
+
+ /**
+ * Set the rendering mode. When renderMode is
+ * RENDERMODE_CONTINUOUSLY, the renderer is called
+ * repeatedly to re-render the scene. When renderMode
+ * is RENDERMODE_WHEN_DIRTY, the renderer only rendered when the surface
+ * is created, or when {@link #requestRender} is called. Defaults to RENDERMODE_CONTINUOUSLY.
+ *
+ * Using RENDERMODE_WHEN_DIRTY can improve battery life and overall system performance
+ * by allowing the GPU and CPU to idle when the view does not need to be updated.
+ *
+ * This method can only be called after {@link #setRenderer(Renderer)}
+ *
+ * @param renderMode one of the RENDERMODE_X constants
+ * @see #RENDERMODE_CONTINUOUSLY
+ * @see #RENDERMODE_WHEN_DIRTY
+ */
+ public void setRenderMode(int renderMode) {
+ mGLThread.setRenderMode(renderMode);
+ }
+
+ /**
+ * Get the current rendering mode. May be called
+ * from any thread. Must not be called before a renderer has been set.
+ * @return the current rendering mode.
+ * @see #RENDERMODE_CONTINUOUSLY
+ * @see #RENDERMODE_WHEN_DIRTY
+ */
+ public int getRenderMode() {
+ return mGLThread.getRenderMode();
+ }
+
+ /**
+ * Request that the renderer render a frame.
+ * This method is typically used when the render mode has been set to
+ * {@link #RENDERMODE_WHEN_DIRTY}, so that frames are only rendered on demand.
+ * May be called
+ * from any thread. Must not be called before a renderer has been set.
+ */
+ public void requestRender() {
+ mGLThread.requestRender();
+ }
+
+ /**
+ * This method is part of the SurfaceHolder.Callback interface, and is
+ * not normally called or subclassed by clients of GLSurfaceView.
+ */
+ public void surfaceCreated(SurfaceHolder holder) {
+ mGLThread.surfaceCreated();
+ }
+
+ /**
+ * This method is part of the SurfaceHolder.Callback interface, and is
+ * not normally called or subclassed by clients of GLSurfaceView.
+ */
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ // Surface will be destroyed when we return
+ mGLThread.surfaceDestroyed();
+ }
+
+ /**
+ * This method is part of the SurfaceHolder.Callback interface, and is
+ * not normally called or subclassed by clients of GLSurfaceView.
+ */
+ public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
+ mGLThread.onWindowResize(w, h);
+ }
+
+
+
+
+ /**
+ * This method is part of the SurfaceHolder.Callback2 interface, and is
+ * not normally called or subclassed by clients of GLSurfaceView.
+ */
+ @Override
+ public void surfaceRedrawNeededAsync(SurfaceHolder holder, Runnable finishDrawing) {
+ if (mGLThread != null) {
+ mGLThread.requestRenderAndNotify(finishDrawing);
+ }
+ }
+
+ /**
+ * This method is part of the SurfaceHolder.Callback2 interface, and is
+ * not normally called or subclassed by clients of GLSurfaceView.
+ */
+ @Deprecated
+ @Override
+ public void surfaceRedrawNeeded(SurfaceHolder holder) {
+ // Since we are part of the framework we know only surfaceRedrawNeededAsync
+ // will be called.
+ }
+
+
+ /**
+ * Pause the rendering thread, optionally tearing down the EGL context
+ * depending upon the value of {@link #setPreserveEGLContextOnPause(boolean)}.
+ *
+ * This method should be called when it is no longer desirable for the
+ * GLSurfaceView to continue rendering, such as in response to
+ * {@link android.app.Activity#onStop Activity.onStop}.
+ *
+ * Must not be called before a renderer has been set.
+ */
+ public void onPause() {
+ mGLThread.onPause();
+ }
+
+ /**
+ * Resumes the rendering thread, re-creating the OpenGL context if necessary. It
+ * is the counterpart to {@link #onPause()}.
+ *
+ * This method should typically be called in
+ * {@link android.app.Activity#onStart Activity.onStart}.
+ *
+ * Must not be called before a renderer has been set.
+ */
+ public void onResume() {
+ mGLThread.onResume();
+ }
+
+ /**
+ * Queue a runnable to be run on the GL rendering thread. This can be used
+ * to communicate with the Renderer on the rendering thread.
+ * Must not be called before a renderer has been set.
+ * @param r the runnable to be run on the GL rendering thread.
+ */
+ public void queueEvent(Runnable r) {
+ mGLThread.queueEvent(r);
+ }
+
+ /**
+ * This method is used as part of the View class and is not normally
+ * called or subclassed by clients of GLSurfaceView.
+ */
+ @Override
+ protected void onAttachedToWindow() {
+ super.onAttachedToWindow();
+ if (LOG_ATTACH_DETACH) {
+ Log.d(TAG, "onAttachedToWindow reattach =" + mDetached);
+ }
+ if (mDetached && (mRenderer != null)) {
+ int renderMode = RENDERMODE_CONTINUOUSLY;
+ if (mGLThread != null) {
+ renderMode = mGLThread.getRenderMode();
+ }
+ mGLThread = new GLThread(mThisWeakRef);
+ if (renderMode != RENDERMODE_CONTINUOUSLY) {
+ mGLThread.setRenderMode(renderMode);
+ }
+ mGLThread.start();
+ }
+ mDetached = false;
+ }
+
+ //新增方法,用于主线程等待销毁,不能再GL线程中调用
+ public void requestExitAndWait(){
+ if (mGLThread != null) {
+ if(mGLThread.getId() ==Thread.currentThread().getId()){
+ throw new RuntimeException("don't call this from GLThread thread or it is a guaranteed !! deadlock!");
+ }
+ mGLThread.requestExitAndWait();
+ }
+ }
+
+ @Override
+ protected void onDetachedFromWindow() {
+ if (LOG_ATTACH_DETACH) {
+ Log.d(TAG, "onDetachedFromWindow");
+ }
+ if (mGLThread != null) {
+ mGLThread.requestExitAndWait();
+ }
+ mDetached = true;
+ super.onDetachedFromWindow();
+ }
+
+ // ----------------------------------------------------------------------
+
+ /**
+ * An interface used to wrap a GL interface.
+ * Typically
+ * used for implementing debugging and tracing on top of the default
+ * GL interface. You would typically use this by creating your own class
+ * that implemented all the GL methods by delegating to another GL instance.
+ * Then you could add your own behavior before or after calling the
+ * delegate. All the GLWrapper would do was instantiate and return the
+ * wrapper GL instance:
+ *
+ * The renderer is responsible for making OpenGL calls to render a frame.
+ *
+ * GLSurfaceView clients typically create their own classes that implement
+ * this interface, and then call {@link AndroidGLSurfaceView#setRenderer} to
+ * register the renderer with the GLSurfaceView.
+ *
+ *
+ * For more information about how to use OpenGL, read the
+ * OpenGL developer guide.
+ *
+ * Called when the rendering thread
+ * starts and whenever the EGL context is lost. The EGL context will typically
+ * be lost when the Android device awakes after going to sleep.
+ *
+ * Since this method is called at the beginning of rendering, as well as
+ * every time the EGL context is lost, this method is a convenient place to put
+ * code to create resources that need to be created when the rendering
+ * starts, and that need to be recreated when the EGL context is lost.
+ * Textures are an example of a resource that you might want to create
+ * here.
+ *
+ * Note that when the EGL context is lost, all OpenGL resources associated
+ * with that context will be automatically deleted. You do not need to call
+ * the corresponding "glDelete" methods such as glDeleteTextures to
+ * manually delete these lost resources.
+ *
+ * @param gl the GL interface. Use
+ * Called after the surface is created and whenever
+ * the OpenGL ES surface size changes.
+ *
+ * Typically you will set your viewport here. If your camera
+ * is fixed then you could also set your projection matrix here:
+ *
+ * This method is responsible for drawing the current frame.
+ *
+ * The implementation of this method typically looks like this:
+ *
+ * This interface must be implemented by clients wishing to call
+ * {@link AndroidGLSurfaceView#setEGLContextFactory(EGLContextFactory)}
+ */
+ public interface EGLContextFactory {
+ EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig);
+ void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context);
+ }
+
+ private class DefaultContextFactory implements EGLContextFactory {
+ private int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+
+ public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig config) {
+ int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, mEGLContextClientVersion,
+ EGL10.EGL_NONE };
+
+ return egl.eglCreateContext(display, config, EGL10.EGL_NO_CONTEXT,
+ mEGLContextClientVersion != 0 ? attrib_list : null);
+ }
+
+ public void destroyContext(EGL10 egl, EGLDisplay display,
+ EGLContext context) {
+ if (!egl.eglDestroyContext(display, context)) {
+ Log.e("DefaultContextFactory", "display:" + display + " context: " + context);
+ if (LOG_THREADS) {
+ Log.i("DefaultContextFactory", "tid=" + Thread.currentThread().getId());
+ }
+ EglHelper.throwEglException("eglDestroyContex", egl.eglGetError());
+ }
+ }
+ }
+
+ /**
+ * An interface for customizing the eglCreateWindowSurface and eglDestroySurface calls.
+ *
+ * This interface must be implemented by clients wishing to call
+ * {@link AndroidGLSurfaceView#setEGLWindowSurfaceFactory(EGLWindowSurfaceFactory)}
+ */
+ public interface EGLWindowSurfaceFactory {
+ /**
+ * @return null if the surface cannot be constructed.
+ */
+ EGLSurface createWindowSurface(EGL10 egl, EGLDisplay display, EGLConfig config,
+ Object nativeWindow);
+ void destroySurface(EGL10 egl, EGLDisplay display, EGLSurface surface);
+ }
+
+ private static class DefaultWindowSurfaceFactory implements EGLWindowSurfaceFactory {
+
+ public EGLSurface createWindowSurface(EGL10 egl, EGLDisplay display,
+ EGLConfig config, Object nativeWindow) {
+ EGLSurface result = null;
+ try {
+ result = egl.eglCreateWindowSurface(display, config, nativeWindow, null);
+ } catch (IllegalArgumentException e) {
+ // This exception indicates that the surface flinger surface
+ // is not valid. This can happen if the surface flinger surface has
+ // been torn down, but the application has not yet been
+ // notified via SurfaceHolder.Callback.surfaceDestroyed.
+ // In theory the application should be notified first,
+ // but in practice sometimes it is not. See b/4588890
+ Log.e(TAG, "eglCreateWindowSurface", e);
+ }
+ return result;
+ }
+
+ public void destroySurface(EGL10 egl, EGLDisplay display,
+ EGLSurface surface) {
+ egl.eglDestroySurface(display, surface);
+ }
+ }
+
+ /**
+ * An interface for choosing an EGLConfig configuration from a list of
+ * potential configurations.
+ *
+ * This interface must be implemented by clients wishing to call
+ * {@link AndroidGLSurfaceView#setEGLConfigChooser(EGLConfigChooser)}
+ */
+ public interface EGLConfigChooser {
+ /**
+ * Choose a configuration from the list. Implementors typically
+ * implement this method by calling
+ * {@link EGL10#eglChooseConfig} and iterating through the results. Please consult the
+ * EGL specification available from The Khronos Group to learn how to call eglChooseConfig.
+ * @param egl the EGL10 for the current display.
+ * @param display the current display.
+ * @return the chosen configuration.
+ */
+ EGLConfig chooseConfig(EGL10 egl, EGLDisplay display);
+ }
+
+ private abstract class BaseConfigChooser
+ implements EGLConfigChooser {
+ public BaseConfigChooser(int[] configSpec) {
+ mConfigSpec = filterConfigSpec(configSpec);
+ }
+
+ public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
+ int[] num_config = new int[1];
+ if (!egl.eglChooseConfig(display, mConfigSpec, null, 0,
+ num_config)) {
+ throw new IllegalArgumentException("eglChooseConfig failed");
+ }
+
+ int numConfigs = num_config[0];
+
+ if (numConfigs <= 0) {
+ throw new IllegalArgumentException(
+ "No configs match configSpec");
+ }
+
+ EGLConfig[] configs = new EGLConfig[numConfigs];
+ if (!egl.eglChooseConfig(display, mConfigSpec, configs, numConfigs,
+ num_config)) {
+ throw new IllegalArgumentException("eglChooseConfig#2 failed");
+ }
+ EGLConfig config = chooseConfig(egl, display, configs);
+ if (config == null) {
+ throw new IllegalArgumentException("No config chosen");
+ }
+ return config;
+ }
+
+ abstract EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
+ EGLConfig[] configs);
+
+ protected int[] mConfigSpec;
+
+ private int[] filterConfigSpec(int[] configSpec) {
+ if (mEGLContextClientVersion != 2 && mEGLContextClientVersion != 3) {
+ return configSpec;
+ }
+ /* We know none of the subclasses define EGL_RENDERABLE_TYPE.
+ * And we know the configSpec is well formed.
+ */
+ int len = configSpec.length;
+ int[] newConfigSpec = new int[len + 2];
+ System.arraycopy(configSpec, 0, newConfigSpec, 0, len-1);
+ newConfigSpec[len-1] = EGL10.EGL_RENDERABLE_TYPE;
+ if (mEGLContextClientVersion == 2) {
+ newConfigSpec[len] = EGL14.EGL_OPENGL_ES2_BIT; /* EGL_OPENGL_ES2_BIT */
+ } else {
+ newConfigSpec[len] = EGLExt.EGL_OPENGL_ES3_BIT_KHR; /* EGL_OPENGL_ES3_BIT_KHR */
+ }
+ newConfigSpec[len+1] = EGL10.EGL_NONE;
+ return newConfigSpec;
+ }
+ }
+
+ /**
+ * Choose a configuration with exactly the specified r,g,b,a sizes,
+ * and at least the specified depth and stencil sizes.
+ */
+ private class ComponentSizeChooser extends BaseConfigChooser {
+ public ComponentSizeChooser(int redSize, int greenSize, int blueSize,
+ int alphaSize, int depthSize, int stencilSize) {
+ super(new int[] {
+ EGL10.EGL_RED_SIZE, redSize,
+ EGL10.EGL_GREEN_SIZE, greenSize,
+ EGL10.EGL_BLUE_SIZE, blueSize,
+ EGL10.EGL_ALPHA_SIZE, alphaSize,
+ EGL10.EGL_DEPTH_SIZE, depthSize,
+ EGL10.EGL_STENCIL_SIZE, stencilSize,
+ EGL10.EGL_NONE});
+ mValue = new int[1];
+ mRedSize = redSize;
+ mGreenSize = greenSize;
+ mBlueSize = blueSize;
+ mAlphaSize = alphaSize;
+ mDepthSize = depthSize;
+ mStencilSize = stencilSize;
+ }
+
+ @Override
+ public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
+ EGLConfig[] configs) {
+ for (EGLConfig config : configs) {
+ int d = findConfigAttrib(egl, display, config,
+ EGL10.EGL_DEPTH_SIZE, 0);
+ int s = findConfigAttrib(egl, display, config,
+ EGL10.EGL_STENCIL_SIZE, 0);
+ if ((d >= mDepthSize) && (s >= mStencilSize)) {
+ int r = findConfigAttrib(egl, display, config,
+ EGL10.EGL_RED_SIZE, 0);
+ int g = findConfigAttrib(egl, display, config,
+ EGL10.EGL_GREEN_SIZE, 0);
+ int b = findConfigAttrib(egl, display, config,
+ EGL10.EGL_BLUE_SIZE, 0);
+ int a = findConfigAttrib(egl, display, config,
+ EGL10.EGL_ALPHA_SIZE, 0);
+ if ((r == mRedSize) && (g == mGreenSize)
+ && (b == mBlueSize) && (a == mAlphaSize)) {
+ return config;
+ }
+ }
+ }
+ return null;
+ }
+
+ private int findConfigAttrib(EGL10 egl, EGLDisplay display,
+ EGLConfig config, int attribute, int defaultValue) {
+
+ if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
+ return mValue[0];
+ }
+ return defaultValue;
+ }
+
+ private int[] mValue;
+ // Subclasses can adjust these values:
+ protected int mRedSize;
+ protected int mGreenSize;
+ protected int mBlueSize;
+ protected int mAlphaSize;
+ protected int mDepthSize;
+ protected int mStencilSize;
+ }
+
+ /**
+ * This class will choose a RGB_888 surface with
+ * or without a depth buffer.
+ *
+ */
+ private class SimpleEGLConfigChooser extends ComponentSizeChooser {
+ public SimpleEGLConfigChooser(boolean withDepthBuffer) {
+ super(8, 8, 8, 0, withDepthBuffer ? 16 : 0, 0);
+ }
+ }
+
+ /**
+ * An EGL helper class.
+ */
+
+ private static class EglHelper {
+ public EglHelper(WeakReference
+ * CameraMetadataNative 默认情况下是执行GC的时候,会释放C++内存,但是会不及时,因为Native Heap上涨不会导致GC触发,但是会导致OOM
+ *
+ * 目前的方式通过反射实现,存在一定的兼容性风险;对于而已。GC+内存检测兼容性好点,但是性能可能没有那么好
+ */
+public class Camera2CaptureResultManager {
+
+ public final static String TAG = "Camera2MemManager";
+ private int mMaxCache = 40;
+ private boolean mEnable = true;
+
+ public Camera2CaptureResultManager() {
+ this(40);
+ }
+
+ public Camera2CaptureResultManager(int maxCache) {
+ this.mMaxCache = maxCache;
+ mEnable = CameraInit.getConfig().enableHighMemoryGC();
+ }
+
+
+ private final Queue
+ * CameraMetadataNative 默认情况下是执行GC的时候,会释放C++内存,但是会不及时,因为Native Heap上涨不会导致GC触发,但是会导致OOM
+ */
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ public boolean releaseCaptureResultNow(CameraCaptureResult captureResult) {
+ if (!mEnable) {
+ return false;
+ }
+
+ if (captureResult == null || captureResult.getCaptureResult() == null) {
+ return false;
+ }
+ try {
+ if (sCameraMetaDataNativeField == null) {
+ sCameraMetaDataNativeField = CaptureResult.class.getDeclaredField("mResults");
+ sCameraMetaDataNativeField.setAccessible(true);
+ }
+
+ if (sClass == null) {
+ sClass = Class.forName("android.hardware.camera2.impl.CameraMetadataNative");
+ }
+
+ if (sCameraMetaDataCloseMethod == null) {
+ sCameraMetaDataCloseMethod = CameraReflection.findMethod(sClass, "finalize");
+ sCameraMetaDataCloseMethod.setAccessible(true);
+ }
+
+ } catch (NoSuchFieldException e) {
+ e.printStackTrace();
+ } catch (ClassNotFoundException e) {
+ e.printStackTrace();
+ } catch (NoSuchMethodException e) {
+ e.printStackTrace();
+ }
+
+ if (sCameraMetaDataCloseMethod == null || sCameraMetaDataNativeField == null) {
+ return false;
+ }
+
+ try {
+ long timeState = captureResult.getTimestamp();
+ sCameraMetaDataCloseMethod.invoke(sCameraMetaDataNativeField.get(captureResult.getCaptureResult()));
+ Log.d(DefaultCameraRender.TAG, "CaptureFrameHelper.release --- " + timeState);
+ return true;
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ return false;
+ }
+
+
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/util/CameraInit.java b/mediapipe/render/android/camera/java/com/quark/quamera/util/CameraInit.java
new file mode 100644
index 000000000..25b1909b2
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/util/CameraInit.java
@@ -0,0 +1,52 @@
+package com.quark.quamera.util;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 2021/4/15
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import androidx.annotation.NonNull;
+
+public class CameraInit {
+
+
+ private static CameraEvnConfig sConfig = defaultConfig();
+
+ private static CameraEvnConfig defaultConfig() {
+ return new CameraEvnConfig()
+ .setDebuggable(false);
+ }
+
+ public static class CameraEvnConfig {
+ private boolean mDebuggable = false;
+ private boolean mEnableHighMemoryGC = true;
+
+ public CameraEvnConfig setDebuggable(boolean debuggable) {
+ mDebuggable = debuggable;
+ return CameraEvnConfig.this;
+ }
+
+ public boolean isDebuggable() {
+ return mDebuggable;
+ }
+
+ public boolean enableHighMemoryGC() {
+ return mEnableHighMemoryGC;
+ }
+
+ public CameraEvnConfig setEnableHighMemoryGC(boolean enableHighMemoryGC) {
+ mEnableHighMemoryGC = enableHighMemoryGC;
+ return CameraEvnConfig.this;
+ }
+ }
+
+ public static void init(@NonNull CameraEvnConfig config) {
+ sConfig = config;
+ }
+
+ public static CameraEvnConfig getConfig() {
+ return sConfig;
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/util/CameraLogger.java b/mediapipe/render/android/camera/java/com/quark/quamera/util/CameraLogger.java
new file mode 100644
index 000000000..59244a744
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/util/CameraLogger.java
@@ -0,0 +1,129 @@
+package com.quark.quamera.util;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 20-11-19
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import android.hardware.camera2.CameraDevice;
+import android.os.Build;
+
+import androidx.annotation.RequiresApi;
+
+
+public class CameraLogger {
+
+ public interface ILogger {
+ void onError(String tag, String message);
+
+ void onInfo(String tag, String message);
+
+ void onTestLongLog(String tag, String message);
+
+ void uploadError(String tag, String message);
+ }
+
+ private static ILogger sLogger;
+
+ public static void setLoggerImp(ILogger logger) {
+ sLogger = logger;
+ }
+
+
+
+ public static void e(String tag, String message, Object... other) {
+ try {
+
+ if (sLogger == null) {
+ return;
+ }
+
+ if (other != null) {
+ message = String.format(message, other);
+ }
+
+ sLogger.onError(tag, message);
+ } catch (Exception e) {
+ }
+ }
+
+
+ public static void i(String tag, String message, Object... other) {
+ try {
+
+ if (sLogger == null) {
+ return;
+ }
+
+ if (other != null) {
+ message = String.format(message, other);
+ }
+
+ sLogger.onInfo(tag, message);
+
+ } catch (Exception e) {
+ CameraShould.fail(message, e);
+ }
+ }
+
+ /**
+ * 通用debug开启
+ */
+ @TestOnly
+ public static void testLongLog(String tag, String message, Object... other) {
+ try {
+ if (sLogger == null) {
+ return;
+ }
+ message = String.format(message, other);
+
+ sLogger.onTestLongLog(tag, message);
+
+ } catch (Exception e) {
+ CameraShould.fail(message, e);
+ }
+ }
+
+ public static void uploadError(String tag, String message) {
+ try {
+ if (sLogger == null) {
+ return;
+ }
+
+ sLogger.uploadError(tag, message);
+
+ } catch (Exception e) {
+ CameraShould.fail(message, e);
+ }
+ }
+
+
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ public static String getCameraErrorMessage(int errorCode) {
+ switch (errorCode) {
+// case Camera2CameraImpl.ERROR_NONE:
+// return "ERROR_NONE";
+ case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
+ return "ERROR_CAMERA_DEVICE";
+ case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
+ return "ERROR_CAMERA_DISABLED";
+ case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
+ return "ERROR_CAMERA_IN_USE";
+ case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
+ return "ERROR_CAMERA_SERVICE";
+ case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
+ return "ERROR_MAX_CAMERAS_IN_USE";
+ case ERROR_TRY_REOPEN_ERROR:
+ return "ERROR_TRY_REOPEN_ERROR_OVER_MAX_TIMES";
+ default: // fall out
+ }
+ return "UNKNOWN ERROR ( " + errorCode + " ) ";
+ }
+
+ public static final int ERROR_TRY_REOPEN_ERROR = -10;
+
+
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/util/CameraReflection.java b/mediapipe/render/android/camera/java/com/quark/quamera/util/CameraReflection.java
new file mode 100644
index 000000000..a390cd1bd
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/util/CameraReflection.java
@@ -0,0 +1,205 @@
+package com.quark.quamera.util;
+
+import java.lang.reflect.Array;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.util.Arrays;
+
+/**
+ * Source code from Tinker
+ */
+public final class CameraReflection {
+
+ private CameraReflection() {
+
+ }
+
+ /**
+ * Locates a given field anywhere in the class inheritance hierarchy.
+ *
+ * @param instance an object to search the field from.
+ * @param name field name
+ * @return a field object
+ * @throws NoSuchFieldException if the field cannot be located
+ */
+ static Field findField(Object instance, String name) throws NoSuchFieldException {
+ for (Class> clazz = instance.getClass(); clazz != null; clazz = clazz.getSuperclass()) {
+ try {
+ Field field = clazz.getDeclaredField(name);
+ if (!field.isAccessible()) {
+ field.setAccessible(true);
+ }
+ return field;
+ } catch (NoSuchFieldException e) {
+ // ignore and search next
+ }
+ }
+
+ throw new NoSuchFieldException("Field " + name + " not found in " + instance.getClass());
+ }
+
+ static Field findField(Class> originClazz, String name) throws NoSuchFieldException {
+ for (Class> clazz = originClazz; clazz != null; clazz = clazz.getSuperclass()) {
+ try {
+ Field field = clazz.getDeclaredField(name);
+
+ if (!field.isAccessible()) {
+ field.setAccessible(true);
+ }
+
+ return field;
+ } catch (NoSuchFieldException e) {
+ // ignore and search next
+ }
+ }
+
+ throw new NoSuchFieldException("Field " + name + " not found in " + originClazz);
+ }
+
+
+ /**
+ * Locates a given method anywhere in the class inheritance hierarchy.
+ *
+ * @param clazz a class to search the method from.
+ * @param name method name
+ * @param parameterTypes method parameter types
+ * @return a method object
+ * @throws NoSuchMethodException if the method cannot be located
+ */
+ public static Method findMethod(Class> clazz, String name, Class>... parameterTypes)
+ throws NoSuchMethodException {
+ for (; clazz != null; clazz = clazz.getSuperclass()) {
+ try {
+ Method method = clazz.getDeclaredMethod(name, parameterTypes);
+
+ if (!method.isAccessible()) {
+ method.setAccessible(true);
+ }
+
+ return method;
+ } catch (NoSuchMethodException e) {
+ // ignore and search next
+ }
+ }
+
+ throw new NoSuchMethodException("Method "
+ + name
+ + " with parameters "
+ + Arrays.asList(parameterTypes)
+ + " not found in " + clazz);
+ }
+
+ /**
+ * Locates a given constructor anywhere in the class inheritance hierarchy.
+ *
+ * @param instance an object to search the constructor from.
+ * @param parameterTypes constructor parameter types
+ * @return a constructor object
+ * @throws NoSuchMethodException if the constructor cannot be located
+ */
+ static Constructor> findConstructor(Object instance, Class>... parameterTypes)
+ throws NoSuchMethodException {
+ for (Class> clazz = instance.getClass(); clazz != null; clazz = clazz.getSuperclass()) {
+ try {
+ Constructor> ctor = clazz.getDeclaredConstructor(parameterTypes);
+
+ if (!ctor.isAccessible()) {
+ ctor.setAccessible(true);
+ }
+
+ return ctor;
+ } catch (NoSuchMethodException e) {
+ // ignore and search next
+ }
+ }
+
+ throw new NoSuchMethodException("Constructor"
+ + " with parameters "
+ + Arrays.asList(parameterTypes)
+ + " not found in " + instance.getClass());
+ }
+
+ /**
+ * Locates a given constructor anywhere in the class inheritance hierarchy.
+ *
+ * @param clazz a class to search the method from.
+ * @param parameterTypes constructor parameter types
+ * @return a constructor object
+ * @throws NoSuchMethodException if the constructor cannot be located
+ */
+ static Constructor> findConstructor(Class> clazz, Class>... parameterTypes)
+ throws NoSuchMethodException {
+ for (; clazz != null; clazz = clazz.getSuperclass()) {
+ try {
+ Constructor> ctor = clazz.getDeclaredConstructor(parameterTypes);
+
+ if (!ctor.isAccessible()) {
+ ctor.setAccessible(true);
+ }
+
+ return ctor;
+ } catch (NoSuchMethodException e) {
+ // ignore and search next
+ }
+ }
+
+ throw new NoSuchMethodException("Constructor"
+ + " with parameters "
+ + Arrays.asList(parameterTypes)
+ + " not found in " + clazz);
+ }
+
+ /**
+ * Replace the value of a field containing a non-null array, by a new array containing the
+ * elements of the original array plus the elements of extraElements.
+ *
+ * @param instance the instance whose field is to be modified.
+ * @param fieldName the field to modify.
+ * @param extraElements elements to append at the end of the array.
+ */
+ static void expandFieldArray(Object instance, String fieldName, Object[] extraElements)
+ throws NoSuchFieldException, IllegalArgumentException, IllegalAccessException {
+ Field jlrField = findField(instance, fieldName);
+
+ Object[] original = (Object[]) jlrField.get(instance);
+ Object[] combined = (Object[]) Array.newInstance(original.getClass().getComponentType(), original.length + extraElements.length);
+
+ // NOTE: changed to copy extraElements first, for patch load first
+
+ System.arraycopy(extraElements, 0, combined, 0, extraElements.length);
+ System.arraycopy(original, 0, combined, extraElements.length, original.length);
+
+ jlrField.set(instance, combined);
+ }
+
+ /**
+ * Replace the value of a field containing a non-null array, by a new array containing the
+ * elements of the original array plus the elements of extraElements.
+ *
+ * @param instance the instance whose field is to be modified.
+ * @param fieldName the field to modify.
+ */
+ static void reduceFieldArray(Object instance, String fieldName, int reduceSize)
+ throws NoSuchFieldException, IllegalArgumentException, IllegalAccessException {
+ if (reduceSize <= 0) {
+ return;
+ }
+
+ Field jlrField = findField(instance, fieldName);
+
+ Object[] original = (Object[]) jlrField.get(instance);
+ int finalLength = original.length - reduceSize;
+
+ if (finalLength <= 0) {
+ return;
+ }
+
+ Object[] combined = (Object[]) Array.newInstance(original.getClass().getComponentType(), finalLength);
+
+ System.arraycopy(original, reduceSize, combined, 0, finalLength);
+
+ jlrField.set(instance, combined);
+ }
+}
+
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/util/CameraShould.java b/mediapipe/render/android/camera/java/com/quark/quamera/util/CameraShould.java
new file mode 100644
index 000000000..d15f19c1b
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/util/CameraShould.java
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2004 - 2016 UCWeb Inc. All Rights Reserved.
+ * Creation : 2016/5/5
+ * Author : lzm89888@alibaba-inc.com
+ */
+
+package com.quark.quamera.util;
+
+import android.text.TextUtils;
+import android.util.Log;
+
+import java.util.Collection;
+
+
+/**
+ * 一个简单的断言工具
+ */
+public class CameraShould {
+ private static final String TAG = "Should";
+
+
+ private static boolean sShouldThrowError = true;
+
+ /**
+ * 设置当Should的条件不满足时是否抛出AssertionError, 如果设置为否,则当Should的条件不满足时,仅
+ * 输出error log, 不会抛出AssertionError
+ * 一般情况下,开发和内测版本设置抛出异常,正式和灰度版本为了产品的最大可用性,仅输出error log.
+ *
+ * @param enable 是否需要抛出AssertionError
+ */
+ public static void setThrowAssertionError(boolean enable) {
+ sShouldThrowError = enable;
+ }
+
+ public static void notNull(Object obj) {
+ notNull(obj, "notNull assert fail");
+ }
+
+ public static void notNull(Object obj, String msg) {
+ notNullIf(obj, true, msg);
+ }
+
+ /**
+ * 在ifCondition为true的情况下,obj不应为null
+ */
+ public static void notNullIf(Object obj, boolean ifCondition) {
+ notNullIf(obj, ifCondition, "notNullIf assert fail");
+ }
+
+ public static void notNullIf(Object obj, boolean ifCondition, String msg) {
+ if (ifCondition && obj == null) {
+ throwAssertionError(msg);
+ }
+ }
+
+ private static void throwAssertionError(String msg) {
+ throwAssertionError(msg, null);
+ }
+
+ private static void throwAssertionError(String msg, Throwable t) {
+ if (sShouldThrowError) {
+ if (t != null) {
+ Log.e("throwAssertionError", msg);
+ throw new AssertionError(t);
+ } else {
+ throw new AssertionError(msg);
+ }
+ } else {
+ CameraLogger.uploadError(msg, Log.getStackTraceString(t));
+ }
+ }
+
+ public static void beNullIf(Object obj, boolean ifCondition) {
+ beNullIf(obj, ifCondition, "beNullIf assert fail");
+ }
+
+ public static void beNullIf(Object obj, boolean ifCondition, String msg) {
+ if (ifCondition && obj != null) {
+ throwAssertionError(msg);
+ }
+ }
+
+ public static void notEmpty(CharSequence str) {
+ notEmptyIf(str, true, "notEmpty assert fail");
+ }
+
+ public static void notEmptyIf(CharSequence str, boolean ifCondition) {
+ notEmptyIf(str, ifCondition, "notEmptyIf assert fail");
+ }
+
+ public static void notEmptyIf(CharSequence str, boolean ifCondition, String msg) {
+ if (ifCondition && TextUtils.isEmpty(str)) {
+ throwAssertionError(msg);
+ }
+ }
+
+ public static void notEmpty(Collection c) {
+ notEmptyIf(c, true, "notEmpty assert fail");
+ }
+
+ public static void notEmptyIf(Collection c, boolean ifCondition, String msg) {
+ if (ifCondition && (c == null || c.isEmpty())) {
+ throwAssertionError(msg);
+ }
+ }
+
+ public static void beTrue(boolean b) {
+ beTrueIf(b, true);
+ }
+
+ public static void beTrue(boolean b, String msg) {
+ beTrueIf(b, true, msg);
+ }
+
+ /**
+ * 在ifCondition为true的情况下,断言b为true
+ */
+ public static void beTrueIf(boolean b, boolean ifCondition) {
+ beTrueIf(b, ifCondition, "beTrueIf assert fail");
+ }
+
+ public static void beTrueIf(boolean b, boolean ifCondition, String msg) {
+ if (ifCondition && !b) {
+ throwAssertionError(msg);
+ }
+ }
+
+ public static void beFalse(boolean b, String msg) {
+ beFalseIf(b, true, msg);
+ }
+
+ public static void beFalse(boolean b) {
+ beFalseIf(b, true);
+ }
+
+ public static void beFalseIf(boolean b, boolean ifCondition) {
+ beFalseIf(b, ifCondition, "beFalseIf assert fail");
+ }
+
+ public static void beFalseIf(boolean b, boolean ifCondition, String msg) {
+ if (ifCondition && b) {
+ throwAssertionError(msg);
+ }
+ }
+
+ public static void fail() {
+ fail("assert fail");
+ }
+
+ public static void fail(String msg) {
+ throwAssertionError(msg);
+ }
+
+ public static void fail(String msg, Throwable t) {
+ throwAssertionError(msg, t);
+ }
+
+ public static void beEqual(int origin, int expect) {
+ if (origin != expect) {
+ throwAssertionError("" + origin + " not equal to " + expect);
+ }
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/util/CollectionUtil.java b/mediapipe/render/android/camera/java/com/quark/quamera/util/CollectionUtil.java
new file mode 100644
index 000000000..083729a8d
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/util/CollectionUtil.java
@@ -0,0 +1,63 @@
+package com.quark.quamera.util;
+
+import androidx.annotation.RestrictTo;
+
+import java.util.Collection;
+
+@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
+public class CollectionUtil {
+
+ /**
+ * An easy way to test a Collection is empty or not.
+ *
+ * @param container
+ * @return {@code true} if the container is empty
+ */
+ public static boolean isEmpty(Collection> container) {
+ return container == null || container.isEmpty();
+ }
+
+
+ public interface Predicate On some devices, the orientation value in the embedded exif of the captured images may
+ * be 0 but the image buffer data actually is not rotated to upright orientation by HAL. For
+ * these devices, the exif orientation value should not be used for the final output image.
+ *
+ * @param image The captured image object.
+ */
+ @RequiresApi(api = Build.VERSION_CODES.KITKAT)
+ public static boolean shouldUseExifOrientation(@NonNull Image image) {
+ if (image.getFormat() != ImageFormat.JPEG) {
+ return false;
+ }
+
+ if (isHuaweiMate20Lite() || isHonor9X()) {
+ return false;
+ }
+
+ return true;
+ }
+
+ private static boolean isHuaweiMate20Lite() {
+ return "HUAWEI".equalsIgnoreCase(Build.BRAND) && "SNE-LX1".equalsIgnoreCase(Build.MODEL);
+ }
+
+ private static boolean isHonor9X() {
+ return "HONOR".equalsIgnoreCase(Build.BRAND) && "STK-LX1".equalsIgnoreCase(
+ Build.MODEL);
+ }
+
+
+ /**
+ * @return The degree of rotation (eg. 0, 90, 180, 270).
+ */
+ public static int getRotation(int exifRotation) {
+ switch (exifRotation) {
+ case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
+ return 0;
+ case ExifInterface.ORIENTATION_ROTATE_180:
+ return 180;
+ case ExifInterface.ORIENTATION_FLIP_VERTICAL:
+ return 180;
+ case ExifInterface.ORIENTATION_TRANSPOSE:
+ return 270;
+ case ExifInterface.ORIENTATION_ROTATE_90:
+ return 90;
+ case ExifInterface.ORIENTATION_TRANSVERSE:
+ return 90;
+ case ExifInterface.ORIENTATION_ROTATE_270:
+ return 270;
+ case ExifInterface.ORIENTATION_NORMAL:
+ // Fall-through
+ case ExifInterface.ORIENTATION_UNDEFINED:
+ // Fall-through
+ default:
+ return 0;
+ }
+ }
+
+ /**
+ * Rotates aspect ratio based on rotation degrees.
+ */
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ @NonNull
+ public static Rational getRotatedAspectRatio(
+ @IntRange(from = 0, to = 359) int rotationDegrees,
+ @NonNull Rational aspectRatio) {
+ if (rotationDegrees == 90 || rotationDegrees == 270) {
+ return inverseRational(aspectRatio);
+ }
+
+ return new Rational(aspectRatio.getNumerator(), aspectRatio.getDenominator());
+ }
+
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ private static Rational inverseRational(Rational rational) {
+ if (rational == null) {
+ return rational;
+ }
+ return new Rational(
+ /*numerator=*/ rational.getDenominator(),
+ /*denominator=*/ rational.getNumerator());
+ }
+
+ /**
+ * 根据scale类型,对顶点坐标矩阵进行转换
+ *
+ * @param scaleType 目前仅仅支持 {@link ViewPort#FILL_CENTER}
+ */
+ public static boolean getScaleVertexMatrix(@ViewPort.ScaleType int scaleType, float[] outMatrix,
+ int in_width, int in_height, int out_width, int out_height) {
+ if (in_width <= 0
+ || in_height <= 0
+ || out_width <= 0
+ || out_height <= 0) {
+ return false;
+ }
+
+ switch (scaleType) {
+ case ViewPort.FILL_CENTER: {
+ Matrix.setIdentityM(outMatrix, 0);
+ float input_radio = (float) in_height / (float) in_width;
+ float out_radio = (float) out_height / (float) out_width;
+
+ //注意顶点坐标空间为 -1,1,所以只要直接缩放就行
+ if (input_radio > out_radio) {
+ Matrix.scaleM(outMatrix, 0, 1f, (float) input_radio / (float) out_radio, 1.0F);
+ } else {
+ Matrix.scaleM(outMatrix, 0, (float) out_radio / (float) input_radio, 1f, 1.0F);
+ }
+ return true;
+ }
+ case ViewPort.CENTER_INSIDE: {
+ Matrix.setIdentityM(outMatrix, 0);
+ float input_radio = (float) in_height / (float) in_width;
+ float out_radio = (float) out_height / (float) out_width;
+
+ Log.i("A_TAG", "in_height: " + in_height + " in_width: " + in_width + " " +
+ " input_radio: " + input_radio + " out_height:" + out_height + " out_width:" + out_width + " out_radio: " + out_radio);
+ if (input_radio > out_radio) {
+ Matrix.scaleM(outMatrix, 0, out_radio / input_radio, 1f, 1.0F);
+ } else {
+ Matrix.scaleM(outMatrix, 0, 1f, input_radio / out_radio, 1.0F);
+ }
+ break;
+ }
+ default: {
+ CameraShould.fail("not support now");
+ }
+ }
+ return false;
+ }
+
+ /**
+ * 注意顶点坐标空间为 -1,1
+ *
+ * @param heightPercentage 需要裁剪成的Y的高度,0.7表示裁剪掉底部0.3区域
+ */
+ public static boolean getClipVertexMatrix(float[] outMatrix, float[] marginPercentage) {
+ Matrix.setIdentityM(outMatrix, 0);
+ Matrix.translateM(outMatrix, 0, 0, marginPercentage[3] - marginPercentage[1], 0.0F);
+ Matrix.scaleM(outMatrix, 0, 1f, 1 - marginPercentage[1] - marginPercentage[3], 1.0F);
+ return true;
+ }
+
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ public static Rect convert2JpegRect(Rect jpegRect, int jpegRotation, Rect desRect, Size dstSize) {
+ RectF jpegShowClip = new RectF(desRect);
+ android.graphics.Matrix matrix = new android.graphics.Matrix();
+ matrix.postScale(1f / dstSize.getWidth(), 1f / dstSize.getHeight());
+ if (jpegRotation != 0) {
+ matrix.postRotate(-jpegRotation, 0.5f, 0.5f);
+ }
+
+ matrix.postScale(jpegRect.width(), jpegRect.height());
+ matrix.postTranslate(jpegRect.left, jpegRect.top);
+
+ matrix.mapRect(jpegShowClip);
+
+ Rect resultJpegShowClip = new Rect();
+ jpegShowClip.round(resultJpegShowClip);
+
+ return resultJpegShowClip;
+ }
+
+
+ private final static RectF sPreviewScaleRect = new RectF();
+ private final static RectF sCameraSurfaceRect = new RectF();
+ private final static RectF sCameraShowRect = new RectF();
+ private final static android.graphics.Matrix sSurface2PreviewScaleRectTransform = new android.graphics.Matrix();
+
+ public synchronized static void calculateCameraShowRect(RectF result,
+ @ViewPort.ScaleType int scaleType,
+ float[] marginPercentage,
+ int[] cameraSurfaceSize, int[] windowSize) {
+
+
+ // sPreviewScaleRect.set(0, 0,
+ // (1 - marginPercentage[0] - marginPercentage[2]) * windowSize[0],
+ // (1 - marginPercentage[1] - marginPercentage[3]) * windowSize[1]);
+ //
+ // sCameraSurfaceRect.set(0, 0, cameraSurfaceSize[0], cameraSurfaceSize[1]);
+ //
+ // sSurface2PreviewScaleRectTransform.reset();
+
+ switch (scaleType) {
+ case ViewPort.CENTER_INSIDE:
+ CameraShould.fail("not support now");
+ break;
+ case ViewPort.FILL_CENTER:
+ result.set(marginPercentage[0] * windowSize[0],
+ marginPercentage[1] * windowSize[1],
+ (1 - marginPercentage[0] - marginPercentage[2]) * windowSize[0],
+ (1 - marginPercentage[1] - marginPercentage[3]) * windowSize[1]
+ );
+ return;
+ default:
+ CameraShould.fail("not support now");
+ }
+
+ // sSurface2PreviewScaleRectTransform.mapRect(sCameraShowRect, sCameraSurfaceRect);
+
+
+ }
+
+
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/util/MatrixUtils.java b/mediapipe/render/android/camera/java/com/quark/quamera/util/MatrixUtils.java
new file mode 100644
index 000000000..956d00a5c
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/util/MatrixUtils.java
@@ -0,0 +1,92 @@
+//
+// Source code recreated from a .class file by IntelliJ IDEA
+// (powered by Fernflower decompiler)
+//
+
+package com.quark.quamera.util;
+
+import android.opengl.Matrix;
+import android.os.Build;
+import android.util.Size;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+
+public class MatrixUtils {
+ public static final int TYPE_FITXY = 0;
+ public static final int TYPE_CENTERCROP = 1;
+ public static final int TYPE_CENTERINSIDE = 2;
+ public static final int TYPE_FITSTART = 3;
+ public static final int TYPE_FITEND = 4;
+
+ private MatrixUtils() {
+ }
+
+ public static void getMatrix(float[] matrix, int type, int imgWidth, int imgHeight, int viewWidth, int viewHeight) {
+ if (imgHeight > 0 && imgWidth > 0 && viewWidth > 0 && viewHeight > 0) {
+ float[] projection = new float[16];
+ float[] camera = new float[16];
+ if (type == 0) {
+ Matrix.orthoM(projection, 0, -1.0F, 1.0F, -1.0F, 1.0F, 1.0F, 3.0F);
+ Matrix.setLookAtM(camera, 0, 0.0F, 0.0F, 1.0F, 0.0F, 0.0F, 0.0F, 0.0F, 1.0F, 0.0F);
+ Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0);
+ return;
+ }
+
+ float sWhView = (float) viewWidth / (float) viewHeight;
+ float sWhImg = (float) imgWidth / (float) imgHeight;
+ if (sWhImg > sWhView) {
+ switch (type) {
+ case 1:
+ Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1.0F, 1.0F, 1.0F, 3.0F);
+ break;
+ case 2:
+ Matrix.orthoM(projection, 0, -1.0F, 1.0F, -sWhImg / sWhView, sWhImg / sWhView, 1.0F, 3.0F);
+ break;
+ case 3:
+ Matrix.orthoM(projection, 0, -1.0F, 1.0F, 1.0F - 2.0F * sWhImg / sWhView, 1.0F, 1.0F, 3.0F);
+ break;
+ case 4:
+ Matrix.orthoM(projection, 0, -1.0F, 1.0F, -1.0F, 2.0F * sWhImg / sWhView - 1.0F, 1.0F, 3.0F);
+ }
+ } else {
+ switch (type) {
+ case 1:
+ Matrix.orthoM(projection, 0, -1.0F, 1.0F, -sWhImg / sWhView, sWhImg / sWhView, 1.0F, 3.0F);
+ break;
+ case 2:
+ Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1.0F, 1.0F, 1.0F, 3.0F);
+ break;
+ case 3:
+ Matrix.orthoM(projection, 0, -1.0F, 2.0F * sWhView / sWhImg - 1.0F, -1.0F, 1.0F, 1.0F, 3.0F);
+ break;
+ case 4:
+ Matrix.orthoM(projection, 0, 1.0F - 2.0F * sWhView / sWhImg, 1.0F, -1.0F, 1.0F, 1.0F, 3.0F);
+ }
+ }
+
+ Matrix.setLookAtM(camera, 0, 0.0F, 0.0F, 1.0F, 0.0F, 0.0F, 0.0F, 0.0F, 1.0F, 0.0F);
+ Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0);
+ }
+
+ }
+
+ public static float[] flip(float[] m, boolean x, boolean y) {
+ if (x || y) {
+ Matrix.scaleM(m, 0, x ? -1.0F : 1.0F, y ? -1.0F : 1.0F, 1.0F);
+ }
+
+ return m;
+ }
+
+ public static float[] flipF(float[] matrix, boolean x, boolean y) {
+ if (x || y) {
+ Matrix.scaleM(matrix, 0, x ? -1.0F : 1.0F, y ? -1.0F : 1.0F, 1.0F);
+ }
+
+ return matrix;
+ }
+
+
+
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/util/OpenGlUtils.java b/mediapipe/render/android/camera/java/com/quark/quamera/util/OpenGlUtils.java
new file mode 100644
index 000000000..97590821c
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/util/OpenGlUtils.java
@@ -0,0 +1,60 @@
+package com.quark.quamera.util;
+
+import android.content.Context;
+import android.opengl.Matrix;
+
+import androidx.annotation.RestrictTo;
+
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
+public class OpenGlUtils {
+
+
+ public static String readRawShaderFile(Context context, int shareId) {
+
+ BufferedReader br = null;
+ String line;
+ StringBuffer sb = new StringBuffer();
+ try {
+ InputStream is = context.getResources().openRawResource(shareId);
+ br = new BufferedReader(new InputStreamReader(is));
+ while ((line = br.readLine()) != null) {
+ sb.append(line);
+ sb.append("\n");
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ } finally {
+ IOUtils.safeClose(br);
+ }
+ return sb.toString();
+ }
+
+ public static float[] createIdentityMtx() {
+ float[] m = new float[16];
+ Matrix.setIdentityM(m, 0);
+ return m;
+ }
+
+ public static FloatBuffer createSquareVtx() {
+ float[] vtx = new float[]{
+ -1.0F, 1.0F, 0.0F, 0.0F, 1.0F, -1.0F, -1.0F, 0.0F, 0.0F, 0.0F, 1.0F, 1.0F, 0.0F, 1.0F, 1.0F, 1.0F, -1.0F, 0.0F, 1.0F, 0.0F};
+ ByteBuffer bb = ByteBuffer.allocateDirect(4 * vtx.length);
+ bb.order(ByteOrder.nativeOrder());
+ FloatBuffer fb = bb.asFloatBuffer();
+ fb.put(vtx);
+ fb.position(0);
+ return fb;
+ }
+
+
+
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/util/Preconditions.java b/mediapipe/render/android/camera/java/com/quark/quamera/util/Preconditions.java
new file mode 100644
index 000000000..1fa874800
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/util/Preconditions.java
@@ -0,0 +1,46 @@
+package com.quark.quamera.util;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 20-11-24
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import androidx.annotation.RestrictTo;
+
+//@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
+public class Preconditions {
+
+
+ public static long CAMERA_THREAD_ID = -1;
+
+ public static void setCameraThread(long id) {
+ CAMERA_THREAD_ID = id;
+ }
+
+ public static void checkState(boolean state) {
+ if (!state) {
+ CameraShould.fail("");
+ }
+ }
+
+ public static void checkState(boolean state, String message) {
+ if (!state) {
+ CameraShould.fail(message);
+ }
+ }
+
+ public static void onException(Exception e) {
+ CameraShould.fail("", e);
+ }
+
+
+ public static void cameraThreadCheck() {
+ if (CAMERA_THREAD_ID != -1) {
+ if (Thread.currentThread().getId() != CAMERA_THREAD_ID) {
+ CameraShould.fail("");
+ }
+ }
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/util/SequentialExecutor.java b/mediapipe/render/android/camera/java/com/quark/quamera/util/SequentialExecutor.java
new file mode 100644
index 000000000..9a4fd63c4
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/util/SequentialExecutor.java
@@ -0,0 +1,57 @@
+package com.quark.quamera.util;
+/*
+ * Copyright (C) 2005-2019 UCWeb Inc. All rights reserved.
+ * Description :
+ *
+ * Creation : 20-11-13
+ * Author : jiaming.wjm@alibaba-inc.com
+ */
+
+import android.os.Handler;
+import android.os.HandlerThread;
+
+import androidx.annotation.RestrictTo;
+
+import java.util.concurrent.Executor;
+
+//@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
+public class SequentialExecutor implements Executor {
+
+
+ private HandlerThread mHandlerThread;
+ private Handler mHandler;
+ private final String mName;
+
+ public SequentialExecutor(String name) {
+ mName = name;
+ }
+
+ public synchronized void start() {
+ mHandlerThread = new HandlerThread(mName);
+ mHandlerThread.start();
+ mHandler = new Handler(mHandlerThread.getLooper());
+ }
+
+ public synchronized void stop() {
+ if (mHandlerThread != null) {
+ mHandlerThread.quit();
+ mHandlerThread = null;
+ mHandler = null;
+ }
+ }
+
+ public Handler getHandler() {
+ if (mHandler == null) {
+ throw new RuntimeException("Start First");
+ }
+ return mHandler;
+ }
+
+ @Override
+ public synchronized void execute(Runnable command) {
+ if (mHandler == null) {
+ throw new RuntimeException("Executor (" + mName + ") Start First");
+ }
+ mHandler.post(command);
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/util/Should.java b/mediapipe/render/android/camera/java/com/quark/quamera/util/Should.java
new file mode 100644
index 000000000..89924a0be
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/util/Should.java
@@ -0,0 +1,182 @@
+/*
+ * Copyright (C) 2004 - 2016 UCWeb Inc. All Rights Reserved.
+ * Creation : 2016/5/5
+ * Author : lzm89888@alibaba-inc.com
+ */
+
+package com.quark.quamera.util;
+
+import android.text.TextUtils;
+import android.util.Log;
+
+import androidx.annotation.RestrictTo;
+
+import java.util.Collection;
+
+
+/**
+ * 一个简单的断言工具
+ */
+@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
+public class Should {
+ private static final String TAG = "Should";
+
+ public interface Logger {
+ void log(String message, Throwable tr);
+ }
+
+ public static class DefaultLogger implements Logger {
+ @Override
+ public void log(String message, Throwable tr) {
+ Log.e(TAG, message, tr);
+ }
+ }
+
+ private static boolean sShouldThrowError = true;
+ private static Logger sLogger = new DefaultLogger();
+
+ /**
+ * 设置当Should的条件不满足时是否抛出AssertionError, 如果设置为否,则当Should的条件不满足时,仅
+ * 输出error log, 不会抛出AssertionError
+ * 一般情况下,开发和内测版本设置抛出异常,正式和灰度版本为了产品的最大可用性,仅输出error log.
+ * @param enable 是否需要抛出AssertionError
+ */
+ public static void setThrowAssertionError(boolean enable) {
+ sShouldThrowError = enable;
+ }
+
+ public static void setLogger(Logger logger) {
+ sLogger = logger;
+ }
+
+ public static void notNull(Object obj) {
+ notNull(obj, "notNull assert fail");
+ }
+
+ public static void notNull(Object obj, String msg) {
+ notNullIf(obj, true, msg);
+ }
+
+ /**
+ * 在ifCondition为true的情况下,obj不应为null
+ */
+ public static void notNullIf(Object obj, boolean ifCondition) {
+ notNullIf(obj, ifCondition, "notNullIf assert fail");
+ }
+
+ public static void notNullIf(Object obj, boolean ifCondition, String msg) {
+ if(ifCondition && obj == null) {
+ throwAssertionError(msg);
+ }
+ }
+
+ private static void throwAssertionError(String msg) {
+ throwAssertionError(msg, null);
+ }
+
+ private static void throwAssertionError(String msg, Throwable t) {
+ if(sShouldThrowError) {
+ if(t != null) {
+ sLogger.log(msg, null);
+ throw new AssertionError(t);
+ } else {
+ throw new AssertionError(msg);
+ }
+ } else {
+ sLogger.log(msg, t != null ? t : new Throwable());
+ }
+ }
+
+ public static void beNullIf(Object obj, boolean ifCondition) {
+ beNullIf(obj, ifCondition, "beNullIf assert fail");
+ }
+
+ public static void beNullIf(Object obj, boolean ifCondition, String msg) {
+ if(ifCondition && obj != null) {
+ throwAssertionError(msg);
+ }
+ }
+
+ public static void notEmpty(CharSequence str) {
+ notEmptyIf(str, true, "notEmpty assert fail");
+ }
+
+ public static void notEmptyIf(CharSequence str, boolean ifCondition) {
+ notEmptyIf(str, ifCondition, "notEmptyIf assert fail");
+ }
+
+ public static void notEmptyIf(CharSequence str, boolean ifCondition, String msg) {
+ if (ifCondition && TextUtils.isEmpty(str)) {
+ throwAssertionError(msg);
+ }
+ }
+
+ public static void notEmpty(Collection c) {
+ notEmptyIf(c, true, "notEmpty assert fail");
+ }
+
+ public static void notEmptyIf(Collection c, boolean ifCondition, String msg) {
+ if (ifCondition && (c == null || c.isEmpty())) {
+ throwAssertionError(msg);
+ }
+ }
+
+ public static void beTrue(boolean b) {
+ beTrueIf(b, true);
+ }
+
+ public static void beTrue(boolean b, String msg) {
+ beTrueIf(b, true, msg);
+ }
+
+ /**
+ * 在ifCondition为true的情况下,断言b为true
+ */
+ public static void beTrueIf(boolean b, boolean ifCondition) {
+ beTrueIf(b, ifCondition, "beTrueIf assert fail");
+ }
+
+ public static void beTrueIf(boolean b, boolean ifCondition, String msg) {
+ if (ifCondition && !b ) {
+ throwAssertionError(msg);
+ }
+ }
+
+ public static void beFalse(boolean b, String msg) {
+ beFalseIf(b, true, msg);
+ }
+
+ public static void beFalse(boolean b) {
+ beFalseIf(b, true);
+ }
+
+ public static void beFalseIf(boolean b, boolean ifCondition) {
+ beFalseIf(b, ifCondition, "beFalseIf assert fail");
+ }
+
+ public static void beFalseIf(boolean b, boolean ifCondition, String msg) {
+ if (ifCondition && b) {
+ throwAssertionError(msg);
+ }
+ }
+
+ public static void fail() {
+ fail("assert fail");
+ }
+
+ public static void fail(String msg) {
+ throwAssertionError(msg);
+ }
+
+ public static void fail(String msg, Throwable t) {
+ throwAssertionError(msg, t);
+ }
+
+ public static void beEqual(int origin, int expect) {
+ if(sShouldThrowError) {
+ if (origin != expect) {
+ throwAssertionError("" + origin + " not equal to " + expect);
+ }
+ }
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/util/TestOnly.java b/mediapipe/render/android/camera/java/com/quark/quamera/util/TestOnly.java
new file mode 100644
index 000000000..32d6e2b38
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/util/TestOnly.java
@@ -0,0 +1,25 @@
+package com.quark.quamera.util;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import static java.lang.annotation.ElementType.ANNOTATION_TYPE;
+import static java.lang.annotation.ElementType.CONSTRUCTOR;
+import static java.lang.annotation.ElementType.FIELD;
+import static java.lang.annotation.ElementType.METHOD;
+import static java.lang.annotation.ElementType.PACKAGE;
+import static java.lang.annotation.ElementType.TYPE;
+
+/**
+ * A method/constructor annotated with TestOnly claims that it should be called from testing code only.
+ * {@code
+ * val transformation = Matrix()
+ * transformation.setRectToRect(
+ * cropRect, new RectF(0, 0, viewFinder.width, viewFinder.height, ScaleToFit.CENTER))
+ * }
+ *
+ * {@code
+ * val transformedRect = RectF(0, 0, viewFinder.width, viewFinder.height)
+ * transformation.mapRect(surfaceRect)
+ * viewFinder.pivotX = 0
+ * viewFinder.pivotY = 0
+ * viewFinder.translationX = transformedRect.left
+ * viewFinder.translationY = transformedRect.top
+ * viewFinder.scaleX = surfaceRect.width/transformedRect.width
+ * viewFinder.scaleY = surfaceRect.height/transformedRect.height
+ * }
+ */
+ public static final int FIT = 3;
+
+
+ public static final int CENTER_INSIDE = 4;
+
+
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camera/preview/ViewPorts.java b/mediapipe/render/android/camera/java/com/quark/quamera/camera/preview/ViewPorts.java
new file mode 100644
index 000000000..ed65aadf7
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camera/preview/ViewPorts.java
@@ -0,0 +1,304 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.quark.quamera.camera.preview;
+
+import android.annotation.SuppressLint;
+import android.graphics.Matrix;
+import android.graphics.Rect;
+import android.graphics.RectF;
+import android.os.Build;
+import android.util.LayoutDirection;
+import android.util.Rational;
+import android.util.Size;
+
+import androidx.annotation.IntRange;
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+
+import com.quark.quamera.util.CameraLogger;
+import com.quark.quamera.util.CameraShould;
+import com.quark.quamera.util.ImageUtils;
+
+/**
+ * Utility methods for calculating viewports.
+ */
+public class ViewPorts {
+ private ViewPorts() {
+
+ }
+
+ /**
+ * Calculate a set of ViewPorts based on the combination of the camera, viewport, and use cases.
+ *
+ *
+ * 1-100; larger is higher quality
+ *
+ * @return true if the {@link CameraSelector} can be resolved to a camera.
+ * @throws IllegalStateException if the camera is not initialized.
+ */
+ @SuppressLint("RestrictedApi")
+ @MainThread
+ public boolean hasCamera(@NonNull CameraSelector cameraSelector) {
+ Threads.checkMainThread();
+ Preconditions.checkNotNull(cameraSelector);
+
+ if (mCameraProvider == null) {
+ throw new IllegalStateException("Camera not initialized. Please wait for "
+ + "the initialization future to finish. See #getInitializationFuture().");
+ }
+
+ try {
+ return mCameraProvider.hasCamera(cameraSelector);
+ } catch (CameraInfoUnavailableException e) {
+ Logger.w(TAG, "Failed to check camera availability", e);
+ return false;
+ }
+ }
+
+ /**
+ * Gets the {@link CameraSelector}.
+ *
+ *
+ * controller.getInitializationFuture().addListener(() -> {
+ * if (controller.hasCamera(cameraSelector)) {
+ * controller.setCameraSelector(cameraSelector);
+ * } else {
+ * // Update UI if the camera is not available.
+ * }
+ * // Attach PreviewView after we know the camera is available.
+ * previewView.setController(controller);
+ * }, ContextCompat.getMainExecutor(requireContext()));
+ *
+ * Source: +-----Surface-----+ Destination: +-----PreviewView----+
+ * | | | |
+ * | +-crop rect-+ | | |
+ * | | | | +--------------------+
+ * | | | |
+ * | | --> | | Rotation: <-----+
+ * | | | | 270°|
+ * | | | | |
+ * | +-----------+ |
+ * +-----------------+
+ *
+ * By mapping the Surface crop rect to match the PreviewView, we have:
+ *
+ * +------transformed Surface-------+
+ * | |
+ * | +----PreviewView-----+ |
+ * | | ^ | |
+ * | | | | |
+ * | +--------------------+ |
+ * | |
+ * +--------------------------------+
+ *
+ *
+ *
+ *
+ *
+ * @see OrientationEventListener#onOrientationChanged(int)
+ */
+ public abstract void onRotationChanged(int rotation);
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/TransformUtils.java b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/TransformUtils.java
new file mode 100644
index 000000000..1ac010122
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/TransformUtils.java
@@ -0,0 +1,288 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.quark.quamera.camerax.controller;
+
+import android.graphics.Matrix;
+import android.graphics.Rect;
+import android.graphics.RectF;
+import android.media.ExifInterface;
+import android.util.Size;
+import android.view.Surface;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.RestrictTo;
+
+/**
+ * Utility class for transform.
+ *
+ *
+ * Orientation degrees Surface rotation
+ * [-45°, 45°) {@link Surface#ROTATION_0}
+ * [45°, 135°) {@link Surface#ROTATION_270}
+ * [135°, 225°) {@link Surface#ROTATION_180}
+ * [225°, 315°) {@link Surface#ROTATION_90}
+ * a----------b d'-----------a'
+ * | source | -90°-> | |
+ * d----------c | target |
+ * | |
+ * c'-----------b'
+ *
+ */
+ @NonNull
+ public static Matrix getRectToRect(
+ @NonNull RectF source, @NonNull RectF target, int rotationDegrees) {
+ // Map source to normalized space.
+ Matrix matrix = new Matrix();
+ matrix.setRectToRect(source, NORMALIZED_RECT, Matrix.ScaleToFit.FILL);
+ // Add rotation.
+ matrix.postRotate(rotationDegrees);
+ // Restore the normalized space to target's coordinates.
+ matrix.postConcat(getNormalizedToBuffer(target));
+ return matrix;
+ }
+
+ /**
+ * Gets the transform from a normalized space (-1, -1) - (1, 1) to the given rect.
+ */
+ @NonNull
+ public static Matrix getNormalizedToBuffer(@NonNull Rect viewPortRect) {
+ return getNormalizedToBuffer(new RectF(viewPortRect));
+ }
+
+ /**
+ * Gets the transform from a normalized space (-1, -1) - (1, 1) to the given rect.
+ */
+ @NonNull
+ private static Matrix getNormalizedToBuffer(@NonNull RectF viewPortRect) {
+ Matrix normalizedToBuffer = new Matrix();
+ normalizedToBuffer.setRectToRect(NORMALIZED_RECT, viewPortRect, Matrix.ScaleToFit.FILL);
+ return normalizedToBuffer;
+ }
+
+ /**
+ * Gets the transform matrix based on exif orientation.
+ */
+ @NonNull
+ public static Matrix getExifTransform(int exifOrientation, int width, int height) {
+ Matrix matrix = new Matrix();
+
+ // Map the bitmap to a normalized space and perform transform. It's more readable, and it
+ // can be tested with Robolectric's ShadowMatrix (Matrix#setPolyToPoly is currently not
+ // shadowed by ShadowMatrix).
+ RectF rect = new RectF(0, 0, width, height);
+ matrix.setRectToRect(rect, NORMALIZED_RECT, Matrix.ScaleToFit.FILL);
+
+ // A flag that checks if the image has been rotated 90/270.
+ boolean isWidthHeightSwapped = false;
+
+ // Transform the normalized space based on exif orientation.
+ switch (exifOrientation) {
+ case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
+ matrix.postScale(-1f, 1f);
+ break;
+ case ExifInterface.ORIENTATION_ROTATE_180:
+ matrix.postRotate(180);
+ break;
+ case ExifInterface.ORIENTATION_FLIP_VERTICAL:
+ matrix.postScale(1f, -1f);
+ break;
+ case ExifInterface.ORIENTATION_TRANSPOSE:
+ // Flipped about top-left <--> bottom-right axis, it can also be represented by
+ // flip horizontally and then rotate 270 degree clockwise.
+ matrix.postScale(-1f, 1f);
+ matrix.postRotate(270);
+ isWidthHeightSwapped = true;
+ break;
+ case ExifInterface.ORIENTATION_ROTATE_90:
+ matrix.postRotate(90);
+ isWidthHeightSwapped = true;
+ break;
+ case ExifInterface.ORIENTATION_TRANSVERSE:
+ // Flipped about top-right <--> bottom left axis, it can also be represented by
+ // flip horizontally and then rotate 90 degree clockwise.
+ matrix.postScale(-1f, 1f);
+ matrix.postRotate(90);
+ isWidthHeightSwapped = true;
+ break;
+ case ExifInterface.ORIENTATION_ROTATE_270:
+ matrix.postRotate(270);
+ isWidthHeightSwapped = true;
+ break;
+ case ExifInterface.ORIENTATION_NORMAL:
+ // Fall-through
+ case ExifInterface.ORIENTATION_UNDEFINED:
+ // Fall-through
+ default:
+ break;
+ }
+
+ // Map the normalized space back to the bitmap coordinates.
+ RectF restoredRect = isWidthHeightSwapped ? new RectF(0, 0, height, width) : rect;
+ Matrix restore = new Matrix();
+ restore.setRectToRect(NORMALIZED_RECT, restoredRect, Matrix.ScaleToFit.FILL);
+ matrix.postConcat(restore);
+
+ return matrix;
+ }
+}
diff --git a/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/internal/compat/quirk/DeviceQuirks.java b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/internal/compat/quirk/DeviceQuirks.java
new file mode 100644
index 000000000..7b161e858
--- /dev/null
+++ b/mediapipe/render/android/camera/java/com/quark/quamera/camerax/controller/internal/compat/quirk/DeviceQuirks.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.quark.quamera.camerax.controller.internal.compat.quirk;
+
+import android.annotation.SuppressLint;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.camera.core.impl.Quirk;
+import androidx.camera.core.impl.Quirks;
+
+/**
+ * Provider of device specific quirks for the view module, which are used for device specific
+ * workarounds.
+ *
+ *
+ *
+ * Developer Guides
+ * Using GLSurfaceView
+ * Initializing GLSurfaceView
+ * All you have to do to initialize a GLSurfaceView is call {@link #setRenderer(Renderer)}.
+ * However, if desired, you can modify the default behavior of GLSurfaceView by calling one or
+ * more of these methods before calling setRenderer:
+ *
+ *
+ * Specifying the android.view.Surface
+ * By default GLSurfaceView will create a PixelFormat.RGB_888 format surface. If a translucent
+ * surface is required, call getHolder().setFormat(PixelFormat.TRANSLUCENT).
+ * The exact format of a TRANSLUCENT surface is device dependent, but it will be
+ * a 32-bit-per-pixel surface with 8 bits per component.
+ * Choosing an EGL Configuration
+ * A given Android device may support multiple EGLConfig rendering configurations.
+ * The available configurations may differ in how many channels of data are present, as
+ * well as how many bits are allocated to each channel. Therefore, the first thing
+ * GLSurfaceView has to do when starting to render is choose what EGLConfig to use.
+ * Debug Behavior
+ * You can optionally modify the behavior of GLSurfaceView by calling
+ * one or more of the debugging methods {@link #setDebugFlags(int)},
+ * and {@link #setGLWrapper}. These methods may be called before and/or after setRenderer, but
+ * typically they are called before setRenderer so that they take effect immediately.
+ * Setting a Renderer
+ * Finally, you must call {@link #setRenderer} to register a {@link Renderer}.
+ * The renderer is
+ * responsible for doing the actual OpenGL rendering.
+ * Rendering Mode
+ * Once the renderer is set, you can control whether the renderer draws
+ * continuously or on-demand by calling
+ * {@link #setRenderMode}. The default is continuous rendering.
+ * Activity Life-cycle
+ * A GLSurfaceView must be notified when to pause and resume rendering. GLSurfaceView clients
+ * are required to call {@link #onPause()} when the activity stops and
+ * {@link #onResume()} when the activity starts. These calls allow GLSurfaceView to
+ * pause and resume the rendering thread, and also allow GLSurfaceView to release and recreate
+ * the OpenGL display.
+ * Handling events
+ *
+ * class MyGLSurfaceView extends GLSurfaceView {
+ *
+ * private MyRenderer mMyRenderer;
+ *
+ * public void start() {
+ * mMyRenderer = ...;
+ * setRenderer(mMyRenderer);
+ * }
+ *
+ * public boolean onKeyDown(int keyCode, KeyEvent event) {
+ * if (keyCode == KeyEvent.KEYCODE_DPAD_CENTER) {
+ * queueEvent(new Runnable() {
+ * // This method will be called on the rendering
+ * // thread:
+ * public void run() {
+ * mMyRenderer.handleDpadCenter();
+ * }});
+ * return true;
+ * }
+ * return super.onKeyDown(keyCode, event);
+ * }
+ * }
+ *
+ *
+ */
+public class AndroidGLSurfaceView extends SurfaceView implements SurfaceHolder.Callback2 {
+ private final static String TAG = "GLSurfaceView";
+ private final static boolean LOG_ATTACH_DETACH = true;
+ private final static boolean LOG_THREADS = true;
+ private final static boolean LOG_PAUSE_RESUME = true;
+ private final static boolean LOG_SURFACE = true;
+ private final static boolean LOG_RENDERER = false;
+ private final static boolean LOG_RENDERER_DRAW_FRAME = false;
+ private final static boolean LOG_EGL = true;
+ /**
+ * The renderer only renders
+ * when the surface is created, or when {@link #requestRender} is called.
+ *
+ * @see #getRenderMode()
+ * @see #setRenderMode(int)
+ * @see #requestRender()
+ */
+ public final static int RENDERMODE_WHEN_DIRTY = 0;
+ /**
+ * The renderer is called
+ * continuously to re-render the scene.
+ *
+ * @see #getRenderMode()
+ * @see #setRenderMode(int)
+ */
+ public final static int RENDERMODE_CONTINUOUSLY = 1;
+
+ /**
+ * Check glError() after every GL call and throw an exception if glError indicates
+ * that an error has occurred. This can be used to help track down which OpenGL ES call
+ * is causing an error.
+ *
+ * @see #getDebugFlags
+ * @see #setDebugFlags
+ */
+ public final static int DEBUG_CHECK_GL_ERROR = 1;
+
+ /**
+ * Log GL calls to the system log at "verbose" level with tag "GLSurfaceView".
+ *
+ * @see #getDebugFlags
+ * @see #setDebugFlags
+ */
+ public final static int DEBUG_LOG_GL_CALLS = 2;
+
+ /**
+ * Standard View constructor. In order to render something, you
+ * must call {@link #setRenderer} to register a renderer.
+ */
+ public AndroidGLSurfaceView(Context context) {
+ super(context);
+ init();
+ }
+
+ /**
+ * Standard View constructor. In order to render something, you
+ * must call {@link #setRenderer} to register a renderer.
+ */
+ public AndroidGLSurfaceView(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ init();
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ if (mGLThread != null) {
+ // GLThread may still be running if this view was never
+ // attached to a window.
+ mGLThread.requestExitAndWait();
+ }
+ } finally {
+ super.finalize();
+ }
+ }
+
+ private void init() {
+ // Install a SurfaceHolder.Callback so we get notified when the
+ // underlying surface is created and destroyed
+ SurfaceHolder holder = getHolder();
+ holder.addCallback(this);
+ // setFormat is done by SurfaceView in SDK 2.3 and newer. Uncomment
+ // this statement if back-porting to 2.2 or older:
+ // holder.setFormat(PixelFormat.RGB_565);
+ //
+ // setType is not needed for SDK 2.0 or newer. Uncomment this
+ // statement if back-porting this code to older SDKs.
+ // holder.setType(SurfaceHolder.SURFACE_TYPE_GPU);
+ }
+
+ /**
+ * Set the glWrapper. If the glWrapper is not null, its
+ * {@link GLWrapper#wrap(GL)} method is called
+ * whenever a surface is created. A GLWrapper can be used to wrap
+ * the GL object that's passed to the renderer. Wrapping a GL
+ * object enables examining and modifying the behavior of the
+ * GL calls made by the renderer.
+ *
+ *
+ *
+ *
+ *
+ * @param renderer the renderer to use to perform OpenGL drawing.
+ */
+ public void setRenderer(Renderer renderer) {
+ checkRenderThreadState();
+ if (mEGLConfigChooser == null) {
+ mEGLConfigChooser = new SimpleEGLConfigChooser(true);
+ }
+ if (mEGLContextFactory == null) {
+ mEGLContextFactory = new DefaultContextFactory();
+ }
+ if (mEGLWindowSurfaceFactory == null) {
+ mEGLWindowSurfaceFactory = new DefaultWindowSurfaceFactory();
+ }
+ mRenderer = renderer;
+ mGLThread = new GLThread(mThisWeakRef);
+ mGLThread.start();
+ }
+
+ /**
+ * Install a custom EGLContextFactory.
+ *
+ * public MyView(Context context) {
+ * super(context);
+ * setEGLContextClientVersion(2); // Pick an OpenGL ES 2.0 context.
+ * setRenderer(new MyRenderer());
+ * }
+ *
+ *
+ * class MyGLWrapper implements GLWrapper {
+ * GL wrap(GL gl) {
+ * return new MyGLImplementation(gl);
+ * }
+ * static class MyGLImplementation implements GL,GL10,GL11,... {
+ * ...
+ * }
+ * }
+ *
+ * @see #setGLWrapper(GLWrapper)
+ */
+ public interface GLWrapper {
+ /**
+ * Wraps a gl interface in another gl interface.
+ * @param gl a GL interface that is to be wrapped.
+ * @return either the input argument or another GL object that wraps the input argument.
+ */
+ GL wrap(GL gl);
+ }
+
+ /**
+ * A generic renderer interface.
+ * Developer Guides
+ * Threading
+ * The renderer will be called on a separate thread, so that rendering
+ * performance is decoupled from the UI thread. Clients typically need to
+ * communicate with the renderer from the UI thread, because that's where
+ * input events are received. Clients can communicate using any of the
+ * standard Java techniques for cross-thread communication, or they can
+ * use the {@link AndroidGLSurfaceView#queueEvent(Runnable)} convenience method.
+ * EGL Context Lost
+ * There are situations where the EGL rendering context will be lost. This
+ * typically happens when device wakes up after going to sleep. When
+ * the EGL context is lost, all OpenGL resources (such as textures) that are
+ * associated with that context will be automatically deleted. In order to
+ * keep rendering correctly, a renderer must recreate any lost resources
+ * that it still needs. The {@link #onSurfaceCreated(GL10, EGLConfig)} method
+ * is a convenient place to do this.
+ *
+ *
+ * @see #setRenderer(Renderer)
+ */
+ public interface Renderer {
+ /**
+ * Called when the surface is created or recreated.
+ * instanceof
to
+ * test if the interface supports GL11 or higher interfaces.
+ * @param config the EGLConfig of the created surface. Can be used
+ * to create matching pbuffers.
+ */
+ void onSurfaceCreated(GL10 gl, EGLConfig config);
+
+ /**
+ * Called when the surface changed size.
+ *
+ * void onSurfaceChanged(GL10 gl, int width, int height) {
+ * gl.glViewport(0, 0, width, height);
+ * // for a fixed camera, set the projection too
+ * float ratio = (float) width / height;
+ * gl.glMatrixMode(GL10.GL_PROJECTION);
+ * gl.glLoadIdentity();
+ * gl.glFrustumf(-ratio, ratio, -1, 1, 1, 10);
+ * }
+ *
+ * @param gl the GL interface. Use instanceof
to
+ * test if the interface supports GL11 or higher interfaces.
+ * @param width
+ * @param height
+ */
+ void onSurfaceChanged(GL10 gl, int width, int height);
+
+ /**
+ * Called to draw the current frame.
+ *
+ * void onDrawFrame(GL10 gl) {
+ * gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
+ * //... other gl calls to render the scene ...
+ * }
+ *
+ * @param gl the GL interface. Use instanceof
to
+ * test if the interface supports GL11 or higher interfaces.
+ */
+ void onDrawFrame(GL10 gl);
+
+
+ }
+
+ /**
+ * An interface for customizing the eglCreateContext and eglDestroyContext calls.
+ *