summaryrefslogtreecommitdiff
path: root/core/java
diff options
context:
space:
mode:
authorEino-Ville Talvala <etalvala@google.com>2020-07-13 17:12:42 +0000
committerAndroid (Google) Code Review <android-gerrit@google.com>2020-07-13 17:12:42 +0000
commit79d4a4c181d5c4c8cfca51e83903b96ca1f30829 (patch)
treeb4f6169b515b5ea4f980576ef231354ffcdbe8f8 /core/java
parentd8e4cdc656143772577dd98ba32ce22df347b1b7 (diff)
parent7208d0af87dda3b6484e7a549d51751ae57d9a30 (diff)
Merge "Camera: Remove all camera HALv1 code"
Diffstat (limited to 'core/java')
-rw-r--r--core/java/android/hardware/Camera.java84
-rw-r--r--core/java/android/hardware/camera2/CameraManager.java113
-rw-r--r--core/java/android/hardware/camera2/legacy/BurstHolder.java90
-rw-r--r--core/java/android/hardware/camera2/legacy/CameraDeviceState.java362
-rw-r--r--core/java/android/hardware/camera2/legacy/CameraDeviceUserShim.java805
-rw-r--r--core/java/android/hardware/camera2/legacy/CaptureCollector.java673
-rw-r--r--core/java/android/hardware/camera2/legacy/GLThreadManager.java264
-rw-r--r--core/java/android/hardware/camera2/legacy/LegacyCameraDevice.java886
-rw-r--r--core/java/android/hardware/camera2/legacy/LegacyExceptionUtils.java138
-rw-r--r--core/java/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java265
-rw-r--r--core/java/android/hardware/camera2/legacy/LegacyFocusStateMapper.java321
-rw-r--r--core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java1532
-rw-r--r--core/java/android/hardware/camera2/legacy/LegacyRequest.java67
-rw-r--r--core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java688
-rw-r--r--core/java/android/hardware/camera2/legacy/LegacyResultMapper.java529
-rw-r--r--core/java/android/hardware/camera2/legacy/ParameterUtils.java1099
-rw-r--r--core/java/android/hardware/camera2/legacy/PerfMeasurement.java308
-rw-r--r--core/java/android/hardware/camera2/legacy/RequestHandlerThread.java113
-rw-r--r--core/java/android/hardware/camera2/legacy/RequestHolder.java283
-rw-r--r--core/java/android/hardware/camera2/legacy/RequestQueue.java174
-rw-r--r--core/java/android/hardware/camera2/legacy/RequestThreadManager.java1126
-rw-r--r--core/java/android/hardware/camera2/legacy/SizeAreaComparator.java72
-rw-r--r--core/java/android/hardware/camera2/legacy/SurfaceTextureRenderer.java882
-rw-r--r--core/java/android/hardware/camera2/legacy/package.html3
-rw-r--r--core/java/android/hardware/camera2/params/StreamConfigurationMap.java5
-rw-r--r--core/java/android/hardware/camera2/utils/SurfaceUtils.java105
26 files changed, 116 insertions, 10871 deletions
diff --git a/core/java/android/hardware/Camera.java b/core/java/android/hardware/Camera.java
index 25279b31b5d1..215990568fbf 100644
--- a/core/java/android/hardware/Camera.java
+++ b/core/java/android/hardware/Camera.java
@@ -249,14 +249,10 @@ public class Camera {
public static final int CAMERA_HAL_API_VERSION_1_0 = 0x100;
/**
- * A constant meaning the normal camera connect/open will be used.
- */
- private static final int CAMERA_HAL_API_VERSION_NORMAL_CONNECT = -2;
-
- /**
- * Used to indicate HAL version un-specified.
+ * Camera HAL device API version 3.0
+ * @hide
*/
- private static final int CAMERA_HAL_API_VERSION_UNSPECIFIED = -1;
+ public static final int CAMERA_HAL_API_VERSION_3_0 = 0x300;
/**
* Hardware face detection. It does not use much CPU.
@@ -427,7 +423,7 @@ public class Camera {
* Creates a new Camera object to access a particular hardware camera with
* given hal API version. If the same camera is opened by other applications
* or the hal API version is not supported by this device, this will throw a
- * RuntimeException.
+ * RuntimeException. As of Android 12, HAL version 1 is no longer supported.
* <p>
* You must call {@link #release()} when you are done using the camera,
* otherwise it will remain locked and be unavailable to other applications.
@@ -463,49 +459,14 @@ public class Camera {
*/
@UnsupportedAppUsage
public static Camera openLegacy(int cameraId, int halVersion) {
- if (halVersion < CAMERA_HAL_API_VERSION_1_0) {
- throw new IllegalArgumentException("Invalid HAL version " + halVersion);
+ if (halVersion < CAMERA_HAL_API_VERSION_3_0) {
+ throw new IllegalArgumentException("Unsupported HAL version " + halVersion);
}
- return new Camera(cameraId, halVersion);
- }
-
- /**
- * Create a legacy camera object.
- *
- * @param cameraId The hardware camera to access, between 0 and
- * {@link #getNumberOfCameras()}-1.
- * @param halVersion The HAL API version this camera device to be opened as.
- */
- private Camera(int cameraId, int halVersion) {
- int err = cameraInitVersion(cameraId, halVersion);
- if (checkInitErrors(err)) {
- if (err == -EACCES) {
- throw new RuntimeException("Fail to connect to camera service");
- } else if (err == -ENODEV) {
- throw new RuntimeException("Camera initialization failed");
- } else if (err == -ENOSYS) {
- throw new RuntimeException("Camera initialization failed because some methods"
- + " are not implemented");
- } else if (err == -EOPNOTSUPP) {
- throw new RuntimeException("Camera initialization failed because the hal"
- + " version is not supported by this device");
- } else if (err == -EINVAL) {
- throw new RuntimeException("Camera initialization failed because the input"
- + " arugments are invalid");
- } else if (err == -EBUSY) {
- throw new RuntimeException("Camera initialization failed because the camera"
- + " device was already opened");
- } else if (err == -EUSERS) {
- throw new RuntimeException("Camera initialization failed because the max"
- + " number of camera devices were already opened");
- }
- // Should never hit this.
- throw new RuntimeException("Unknown camera error");
- }
+ return new Camera(cameraId);
}
- private int cameraInitVersion(int cameraId, int halVersion) {
+ private int cameraInit(int cameraId) {
mShutterCallback = null;
mRawImageCallback = null;
mJpegCallback = null;
@@ -523,35 +484,13 @@ public class Camera {
mEventHandler = null;
}
- return native_setup(new WeakReference<Camera>(this), cameraId, halVersion,
+ return native_setup(new WeakReference<Camera>(this), cameraId,
ActivityThread.currentOpPackageName());
}
- private int cameraInitNormal(int cameraId) {
- return cameraInitVersion(cameraId, CAMERA_HAL_API_VERSION_NORMAL_CONNECT);
- }
-
- /**
- * Connect to the camera service using #connectLegacy
- *
- * <p>
- * This acts the same as normal except that it will return
- * the detailed error code if open fails instead of
- * converting everything into {@code NO_INIT}.</p>
- *
- * <p>Intended to use by the camera2 shim only, do <i>not</i> use this for other code.</p>
- *
- * @return a detailed errno error code, or {@code NO_ERROR} on success
- *
- * @hide
- */
- public int cameraInitUnspecified(int cameraId) {
- return cameraInitVersion(cameraId, CAMERA_HAL_API_VERSION_UNSPECIFIED);
- }
-
/** used by Camera#open, Camera#open(int) */
Camera(int cameraId) {
- int err = cameraInitNormal(cameraId);
+ int err = cameraInit(cameraId);
if (checkInitErrors(err)) {
if (err == -EACCES) {
throw new RuntimeException("Fail to connect to camera service");
@@ -616,8 +555,7 @@ public class Camera {
}
@UnsupportedAppUsage
- private native final int native_setup(Object camera_this, int cameraId, int halVersion,
- String packageName);
+ private native int native_setup(Object cameraThis, int cameraId, String packageName);
private native final void native_release();
diff --git a/core/java/android/hardware/camera2/CameraManager.java b/core/java/android/hardware/camera2/CameraManager.java
index 7f834afd7b30..8469f5f981ed 100644
--- a/core/java/android/hardware/camera2/CameraManager.java
+++ b/core/java/android/hardware/camera2/CameraManager.java
@@ -23,14 +23,11 @@ import android.annotation.RequiresPermission;
import android.annotation.SystemService;
import android.annotation.TestApi;
import android.content.Context;
-import android.hardware.CameraInfo;
import android.hardware.CameraStatus;
import android.hardware.ICameraService;
import android.hardware.ICameraServiceListener;
import android.hardware.camera2.impl.CameraDeviceImpl;
import android.hardware.camera2.impl.CameraMetadataNative;
-import android.hardware.camera2.legacy.CameraDeviceUserShim;
-import android.hardware.camera2.legacy.LegacyMetadataMapper;
import android.hardware.camera2.params.SessionConfiguration;
import android.hardware.camera2.utils.CameraIdAndSessionConfiguration;
import android.hardware.camera2.utils.ConcurrentCameraIdCombination;
@@ -405,10 +402,6 @@ public final class CameraManager {
throw new IllegalArgumentException("No cameras available on device");
}
synchronized (mLock) {
- /*
- * Get the camera characteristics from the camera service directly if it supports it,
- * otherwise get them from the legacy shim instead.
- */
ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
if (cameraService == null) {
throw new CameraAccessException(CameraAccessException.CAMERA_DISCONNECTED,
@@ -417,34 +410,18 @@ public final class CameraManager {
try {
Size displaySize = getDisplaySize();
- // First check isHiddenPhysicalCamera to avoid supportsCamera2ApiLocked throwing
- // exception in case cameraId is a hidden physical camera.
- if (!isHiddenPhysicalCamera(cameraId) && !supportsCamera2ApiLocked(cameraId)) {
- // Legacy backwards compatibility path; build static info from the camera
- // parameters
- int id = Integer.parseInt(cameraId);
-
- String parameters = cameraService.getLegacyParameters(id);
-
- CameraInfo info = cameraService.getCameraInfo(id);
-
- characteristics = LegacyMetadataMapper.createCharacteristics(parameters, info,
- id, displaySize);
- } else {
- // Normal path: Get the camera characteristics directly from the camera service
- CameraMetadataNative info = cameraService.getCameraCharacteristics(cameraId);
- try {
- info.setCameraId(Integer.parseInt(cameraId));
- } catch (NumberFormatException e) {
- // For external camera, reaching here is expected.
- Log.v(TAG, "Failed to parse camera Id " + cameraId + " to integer");
- }
- boolean hasConcurrentStreams =
- CameraManagerGlobal.get().cameraIdHasConcurrentStreamsLocked(cameraId);
- info.setHasMandatoryConcurrentStreams(hasConcurrentStreams);
- info.setDisplaySize(displaySize);
- characteristics = new CameraCharacteristics(info);
+ CameraMetadataNative info = cameraService.getCameraCharacteristics(cameraId);
+ try {
+ info.setCameraId(Integer.parseInt(cameraId));
+ } catch (NumberFormatException e) {
+ Log.v(TAG, "Failed to parse camera Id " + cameraId + " to integer");
}
+ boolean hasConcurrentStreams =
+ CameraManagerGlobal.get().cameraIdHasConcurrentStreamsLocked(cameraId);
+ info.setHasMandatoryConcurrentStreams(hasConcurrentStreams);
+ info.setDisplaySize(displaySize);
+ characteristics = new CameraCharacteristics(info);
+
} catch (ServiceSpecificException e) {
throwAsPublicException(e);
} catch (RemoteException e) {
@@ -500,30 +477,14 @@ public final class CameraManager {
ICameraDeviceCallbacks callbacks = deviceImpl.getCallbacks();
try {
- if (supportsCamera2ApiLocked(cameraId)) {
- // Use cameraservice's cameradeviceclient implementation for HAL3.2+ devices
- ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
- if (cameraService == null) {
- throw new ServiceSpecificException(
- ICameraService.ERROR_DISCONNECTED,
- "Camera service is currently unavailable");
- }
- cameraUser = cameraService.connectDevice(callbacks, cameraId,
- mContext.getOpPackageName(), mContext.getAttributionTag(), uid);
- } else {
- // Use legacy camera implementation for HAL1 devices
- int id;
- try {
- id = Integer.parseInt(cameraId);
- } catch (NumberFormatException e) {
- throw new IllegalArgumentException("Expected cameraId to be numeric, but it was: "
- + cameraId);
- }
-
- Log.i(TAG, "Using legacy camera HAL.");
- cameraUser = CameraDeviceUserShim.connectBinderShim(callbacks, id,
- getDisplaySize());
+ ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
+ if (cameraService == null) {
+ throw new ServiceSpecificException(
+ ICameraService.ERROR_DISCONNECTED,
+ "Camera service is currently unavailable");
}
+ cameraUser = cameraService.connectDevice(callbacks, cameraId,
+ mContext.getOpPackageName(), mContext.getAttributionTag(), uid);
} catch (ServiceSpecificException e) {
if (e.errorCode == ICameraService.ERROR_DEPRECATED_HAL) {
throw new AssertionError("Should've gone down the shim path");
@@ -1021,44 +982,6 @@ public final class CameraManager {
}
/**
- * Queries the camera service if it supports the camera2 api directly, or needs a shim.
- *
- * @param cameraId a non-{@code null} camera identifier
- * @return {@code false} if the legacy shim needs to be used, {@code true} otherwise.
- */
- private boolean supportsCamera2ApiLocked(String cameraId) {
- return supportsCameraApiLocked(cameraId, API_VERSION_2);
- }
-
- /**
- * Queries the camera service if it supports a camera api directly, or needs a shim.
- *
- * @param cameraId a non-{@code null} camera identifier
- * @param apiVersion the version, i.e. {@code API_VERSION_1} or {@code API_VERSION_2}
- * @return {@code true} if connecting will work for that device version.
- */
- private boolean supportsCameraApiLocked(String cameraId, int apiVersion) {
- /*
- * Possible return values:
- * - NO_ERROR => CameraX API is supported
- * - CAMERA_DEPRECATED_HAL => CameraX API is *not* supported (thrown as an exception)
- * - Remote exception => If the camera service died
- *
- * Anything else is an unexpected error we don't want to recover from.
- */
- try {
- ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
- // If no camera service, no support
- if (cameraService == null) return false;
-
- return cameraService.supportsCameraApi(cameraId, apiVersion);
- } catch (RemoteException e) {
- // Camera service is now down, no support for any API level
- }
- return false;
- }
-
- /**
* Queries the camera service if a cameraId is a hidden physical camera that belongs to a
* logical camera device.
*
diff --git a/core/java/android/hardware/camera2/legacy/BurstHolder.java b/core/java/android/hardware/camera2/legacy/BurstHolder.java
deleted file mode 100644
index 23efe15fc03b..000000000000
--- a/core/java/android/hardware/camera2/legacy/BurstHolder.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.hardware.camera2.CaptureRequest;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-/**
- * Immutable container for a burst of capture results.
- */
-public class BurstHolder {
- private static final String TAG = "BurstHolder";
- private final ArrayList<RequestHolder.Builder> mRequestBuilders;
- private final boolean mRepeating;
- private final int mRequestId;
-
- /**
- * Immutable container for a burst of capture results.
- *
- * @param requestId id of the burst request.
- * @param repeating true if this burst is repeating.
- * @param requests the array of {@link CaptureRequest}s for this burst.
- * @param jpegSurfaceIds a {@link Collection} of IDs for the surfaces that have jpeg outputs.
- */
- public BurstHolder(int requestId, boolean repeating, CaptureRequest[] requests,
- Collection<Long> jpegSurfaceIds) {
- mRequestBuilders = new ArrayList<>();
- int i = 0;
- for (CaptureRequest r : requests) {
- mRequestBuilders.add(new RequestHolder.Builder(requestId, /*subsequenceId*/i,
- /*request*/r, repeating, jpegSurfaceIds));
- ++i;
- }
- mRepeating = repeating;
- mRequestId = requestId;
- }
-
- /**
- * Get the id of this request.
- */
- public int getRequestId() {
- return mRequestId;
- }
-
- /**
- * Return true if this repeating.
- */
- public boolean isRepeating() {
- return mRepeating;
- }
-
- /**
- * Return the number of requests in this burst sequence.
- */
- public int getNumberOfRequests() {
- return mRequestBuilders.size();
- }
-
- /**
- * Create a list of {@link RequestHolder} objects encapsulating the requests in this burst.
- *
- * @param frameNumber the starting framenumber for this burst.
- * @return the list of {@link RequestHolder} objects.
- */
- public List<RequestHolder> produceRequestHolders(long frameNumber) {
- ArrayList<RequestHolder> holders = new ArrayList<RequestHolder>();
- int i = 0;
- for (RequestHolder.Builder b : mRequestBuilders) {
- holders.add(b.build(frameNumber + i));
- ++i;
- }
- return holders;
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/CameraDeviceState.java b/core/java/android/hardware/camera2/legacy/CameraDeviceState.java
deleted file mode 100644
index 89ecd5f1ce63..000000000000
--- a/core/java/android/hardware/camera2/legacy/CameraDeviceState.java
+++ /dev/null
@@ -1,362 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.hardware.camera2.impl.CameraDeviceImpl;
-import android.hardware.camera2.impl.CameraMetadataNative;
-import android.os.Handler;
-import android.util.Log;
-
-/**
- * Emulates a the state of a single Camera2 device.
- *
- * <p>
- * This class acts as the state machine for a camera device. Valid state transitions are given
- * in the table below:
- * </p>
- *
- * <ul>
- * <li>{@code UNCONFIGURED -> CONFIGURING}</li>
- * <li>{@code CONFIGURING -> IDLE}</li>
- * <li>{@code IDLE -> CONFIGURING}</li>
- * <li>{@code IDLE -> CAPTURING}</li>
- * <li>{@code IDLE -> IDLE}</li>
- * <li>{@code CAPTURING -> IDLE}</li>
- * <li>{@code ANY -> ERROR}</li>
- * </ul>
- */
-public class CameraDeviceState {
- private static final String TAG = "CameraDeviceState";
- private static final boolean DEBUG = false;
-
- private static final int STATE_ERROR = 0;
- private static final int STATE_UNCONFIGURED = 1;
- private static final int STATE_CONFIGURING = 2;
- private static final int STATE_IDLE = 3;
- private static final int STATE_CAPTURING = 4;
-
- private static final String[] sStateNames = { "ERROR", "UNCONFIGURED", "CONFIGURING", "IDLE",
- "CAPTURING"};
-
- private int mCurrentState = STATE_UNCONFIGURED;
- private int mCurrentError = NO_CAPTURE_ERROR;
-
- private RequestHolder mCurrentRequest = null;
-
- private Handler mCurrentHandler = null;
- private CameraDeviceStateListener mCurrentListener = null;
-
- /**
- * Error code used by {@link #setCaptureStart} and {@link #setCaptureResult} to indicate that no
- * error has occurred.
- */
- public static final int NO_CAPTURE_ERROR = -1;
-
- /**
- * CameraDeviceStateListener callbacks to be called after state transitions.
- */
- public interface CameraDeviceStateListener {
- void onError(int errorCode, Object errorArg, RequestHolder holder);
- void onConfiguring();
- void onIdle();
- void onBusy();
- void onCaptureStarted(RequestHolder holder, long timestamp);
- void onCaptureResult(CameraMetadataNative result, RequestHolder holder);
- void onRequestQueueEmpty();
- void onRepeatingRequestError(long lastFrameNumber, int repeatingRequestId);
- }
-
- /**
- * Transition to the {@code ERROR} state.
- *
- * <p>
- * The device cannot exit the {@code ERROR} state. If the device was not already in the
- * {@code ERROR} state, {@link CameraDeviceStateListener#onError(int, RequestHolder)} will be
- * called.
- * </p>
- *
- * @param error the error to set. Should be one of the error codes defined in
- * {@link CameraDeviceImpl.CameraDeviceCallbacks}.
- */
- public synchronized void setError(int error) {
- mCurrentError = error;
- doStateTransition(STATE_ERROR);
- }
-
- /**
- * Transition to the {@code CONFIGURING} state, or {@code ERROR} if in an invalid state.
- *
- * <p>
- * If the device was not already in the {@code CONFIGURING} state,
- * {@link CameraDeviceStateListener#onConfiguring()} will be called.
- * </p>
- *
- * @return {@code false} if an error has occurred.
- */
- public synchronized boolean setConfiguring() {
- doStateTransition(STATE_CONFIGURING);
- return mCurrentError == NO_CAPTURE_ERROR;
- }
-
- /**
- * Transition to the {@code IDLE} state, or {@code ERROR} if in an invalid state.
- *
- * <p>
- * If the device was not already in the {@code IDLE} state,
- * {@link CameraDeviceStateListener#onIdle()} will be called.
- * </p>
- *
- * @return {@code false} if an error has occurred.
- */
- public synchronized boolean setIdle() {
- doStateTransition(STATE_IDLE);
- return mCurrentError == NO_CAPTURE_ERROR;
- }
-
- /**
- * Transition to the {@code CAPTURING} state, or {@code ERROR} if in an invalid state.
- *
- * <p>
- * If the device was not already in the {@code CAPTURING} state,
- * {@link CameraDeviceStateListener#onCaptureStarted(RequestHolder)} will be called.
- * </p>
- *
- * @param request A {@link RequestHolder} containing the request for the current capture.
- * @param timestamp The timestamp of the capture start in nanoseconds.
- * @param captureError Report a recoverable error for a single request using a valid
- * error code for {@code ICameraDeviceCallbacks}, or
- * {@link #NO_CAPTURE_ERROR}
- * @return {@code false} if an error has occurred.
- */
- public synchronized boolean setCaptureStart(final RequestHolder request, long timestamp,
- int captureError) {
- mCurrentRequest = request;
- doStateTransition(STATE_CAPTURING, timestamp, captureError);
- return mCurrentError == NO_CAPTURE_ERROR;
- }
-
- /**
- * Set the result for a capture.
- *
- * <p>
- * If the device was in the {@code CAPTURING} state,
- * {@link CameraDeviceStateListener#onCaptureResult(CameraMetadataNative, RequestHolder)} will
- * be called with the given result, otherwise this will result in the device transitioning to
- * the {@code ERROR} state,
- * </p>
- *
- * @param request The {@link RequestHolder} request that created this result.
- * @param result The {@link CameraMetadataNative} result to set.
- * @param captureError Report a recoverable error for a single buffer or result using a valid
- * error code for {@code ICameraDeviceCallbacks}, or
- * {@link #NO_CAPTURE_ERROR}.
- * @param captureErrorArg An argument for some error captureError codes.
- * @return {@code false} if an error has occurred.
- */
- public synchronized boolean setCaptureResult(final RequestHolder request,
- final CameraMetadataNative result,
- final int captureError, final Object captureErrorArg) {
- if (mCurrentState != STATE_CAPTURING) {
- Log.e(TAG, "Cannot receive result while in state: " + mCurrentState);
- mCurrentError = CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE;
- doStateTransition(STATE_ERROR);
- return mCurrentError == NO_CAPTURE_ERROR;
- }
-
- if (mCurrentHandler != null && mCurrentListener != null) {
- if (captureError != NO_CAPTURE_ERROR) {
- mCurrentHandler.post(new Runnable() {
- @Override
- public void run() {
- mCurrentListener.onError(captureError, captureErrorArg, request);
- }
- });
- } else {
- mCurrentHandler.post(new Runnable() {
- @Override
- public void run() {
- mCurrentListener.onCaptureResult(result, request);
- }
- });
- }
- }
- return mCurrentError == NO_CAPTURE_ERROR;
- }
-
- public synchronized boolean setCaptureResult(final RequestHolder request,
- final CameraMetadataNative result) {
- return setCaptureResult(request, result, NO_CAPTURE_ERROR, /*errorArg*/null);
- }
-
- /**
- * Set repeating request error.
- *
- * <p>Repeating request has been stopped due to an error such as abandoned output surfaces.</p>
- *
- * @param lastFrameNumber Frame number of the last repeating request before it is stopped.
- * @param repeatingRequestId The ID of the repeating request being stopped
- */
- public synchronized void setRepeatingRequestError(final long lastFrameNumber,
- final int repeatingRequestId) {
- mCurrentHandler.post(new Runnable() {
- @Override
- public void run() {
- mCurrentListener.onRepeatingRequestError(lastFrameNumber, repeatingRequestId);
- }
- });
- }
-
- /**
- * Indicate that request queue (non-repeating) becomes empty.
- *
- * <p> Send notification that all non-repeating requests have been sent to camera device. </p>
- */
- public synchronized void setRequestQueueEmpty() {
- mCurrentHandler.post(new Runnable() {
- @Override
- public void run() {
- mCurrentListener.onRequestQueueEmpty();
- }
- });
- }
-
- /**
- * Set the listener for state transition callbacks.
- *
- * @param handler handler on which to call the callbacks.
- * @param listener the {@link CameraDeviceStateListener} callbacks to call.
- */
- public synchronized void setCameraDeviceCallbacks(Handler handler,
- CameraDeviceStateListener listener) {
- mCurrentHandler = handler;
- mCurrentListener = listener;
- }
-
- private void doStateTransition(int newState) {
- doStateTransition(newState, /*timestamp*/0, NO_CAPTURE_ERROR);
- }
-
- private void doStateTransition(int newState, final long timestamp, final int error) {
- if (newState != mCurrentState) {
- String stateName = "UNKNOWN";
- if (newState >= 0 && newState < sStateNames.length) {
- stateName = sStateNames[newState];
- }
- Log.i(TAG, "Legacy camera service transitioning to state " + stateName);
- }
-
- // If we transitioned into a non-IDLE/non-ERROR state then mark the device as busy
- if(newState != STATE_ERROR && newState != STATE_IDLE) {
- if (mCurrentState != newState && mCurrentHandler != null &&
- mCurrentListener != null) {
- mCurrentHandler.post(new Runnable() {
- @Override
- public void run() {
- mCurrentListener.onBusy();
- }
- });
- }
- }
-
- switch(newState) {
- case STATE_ERROR:
- if (mCurrentState != STATE_ERROR && mCurrentHandler != null &&
- mCurrentListener != null) {
- mCurrentHandler.post(new Runnable() {
- @Override
- public void run() {
- mCurrentListener.onError(mCurrentError, /*errorArg*/null, mCurrentRequest);
- }
- });
- }
- mCurrentState = STATE_ERROR;
- break;
- case STATE_CONFIGURING:
- if (mCurrentState != STATE_UNCONFIGURED && mCurrentState != STATE_IDLE) {
- Log.e(TAG, "Cannot call configure while in state: " + mCurrentState);
- mCurrentError = CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE;
- doStateTransition(STATE_ERROR);
- break;
- }
- if (mCurrentState != STATE_CONFIGURING && mCurrentHandler != null &&
- mCurrentListener != null) {
- mCurrentHandler.post(new Runnable() {
- @Override
- public void run() {
- mCurrentListener.onConfiguring();
- }
- });
- }
- mCurrentState = STATE_CONFIGURING;
- break;
- case STATE_IDLE:
- if (mCurrentState == STATE_IDLE) {
- break;
- }
-
- if (mCurrentState != STATE_CONFIGURING && mCurrentState != STATE_CAPTURING) {
- Log.e(TAG, "Cannot call idle while in state: " + mCurrentState);
- mCurrentError = CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE;
- doStateTransition(STATE_ERROR);
- break;
- }
-
- if (mCurrentState != STATE_IDLE && mCurrentHandler != null &&
- mCurrentListener != null) {
- mCurrentHandler.post(new Runnable() {
- @Override
- public void run() {
- mCurrentListener.onIdle();
- }
- });
- }
- mCurrentState = STATE_IDLE;
- break;
- case STATE_CAPTURING:
- if (mCurrentState != STATE_IDLE && mCurrentState != STATE_CAPTURING) {
- Log.e(TAG, "Cannot call capture while in state: " + mCurrentState);
- mCurrentError = CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE;
- doStateTransition(STATE_ERROR);
- break;
- }
-
- if (mCurrentHandler != null && mCurrentListener != null) {
- if (error != NO_CAPTURE_ERROR) {
- mCurrentHandler.post(new Runnable() {
- @Override
- public void run() {
- mCurrentListener.onError(error, /*errorArg*/null, mCurrentRequest);
- }
- });
- } else {
- mCurrentHandler.post(new Runnable() {
- @Override
- public void run() {
- mCurrentListener.onCaptureStarted(mCurrentRequest, timestamp);
- }
- });
- }
- }
- mCurrentState = STATE_CAPTURING;
- break;
- default:
- throw new IllegalStateException("Transition to unknown state: " + newState);
- }
- }
-
-
-}
diff --git a/core/java/android/hardware/camera2/legacy/CameraDeviceUserShim.java b/core/java/android/hardware/camera2/legacy/CameraDeviceUserShim.java
deleted file mode 100644
index cf8cab2cbc44..000000000000
--- a/core/java/android/hardware/camera2/legacy/CameraDeviceUserShim.java
+++ /dev/null
@@ -1,805 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.hardware.ICameraService;
-import android.hardware.Camera;
-import android.hardware.Camera.CameraInfo;
-import android.hardware.camera2.CameraAccessException;
-import android.hardware.camera2.CameraCharacteristics;
-import android.hardware.camera2.CaptureRequest;
-import android.hardware.camera2.ICameraDeviceCallbacks;
-import android.hardware.camera2.ICameraDeviceUser;
-import android.hardware.camera2.ICameraOfflineSession;
-import android.hardware.camera2.impl.CameraMetadataNative;
-import android.hardware.camera2.impl.CaptureResultExtras;
-import android.hardware.camera2.impl.PhysicalCaptureResultInfo;
-import android.hardware.camera2.params.OutputConfiguration;
-import android.hardware.camera2.params.SessionConfiguration;
-import android.hardware.camera2.utils.SubmitInfo;
-import android.os.ConditionVariable;
-import android.os.IBinder;
-import android.os.Looper;
-import android.os.Handler;
-import android.os.HandlerThread;
-import android.os.Message;
-import android.os.RemoteException;
-import android.os.ServiceSpecificException;
-import android.util.Log;
-import android.util.Size;
-import android.util.SparseArray;
-import android.view.Surface;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static android.system.OsConstants.EACCES;
-import static android.system.OsConstants.ENODEV;
-
-/**
- * Compatibility implementation of the Camera2 API binder interface.
- *
- * <p>
- * This is intended to be called from the same process as client
- * {@link android.hardware.camera2.CameraDevice}, and wraps a
- * {@link android.hardware.camera2.legacy.LegacyCameraDevice} that emulates Camera2 service using
- * the Camera1 API.
- * </p>
- *
- * <p>
- * Keep up to date with ICameraDeviceUser.aidl.
- * </p>
- */
-@SuppressWarnings("deprecation")
-public class CameraDeviceUserShim implements ICameraDeviceUser {
- private static final String TAG = "CameraDeviceUserShim";
-
- private static final boolean DEBUG = false;
- private static final int OPEN_CAMERA_TIMEOUT_MS = 5000; // 5 sec (same as api1 cts timeout)
-
- private final LegacyCameraDevice mLegacyDevice;
-
- private final Object mConfigureLock = new Object();
- private int mSurfaceIdCounter;
- private boolean mConfiguring;
- private final SparseArray<Surface> mSurfaces;
- private final CameraCharacteristics mCameraCharacteristics;
- private final CameraLooper mCameraInit;
- private final CameraCallbackThread mCameraCallbacks;
-
-
- protected CameraDeviceUserShim(int cameraId, LegacyCameraDevice legacyCamera,
- CameraCharacteristics characteristics, CameraLooper cameraInit,
- CameraCallbackThread cameraCallbacks) {
- mLegacyDevice = legacyCamera;
- mConfiguring = false;
- mSurfaces = new SparseArray<Surface>();
- mCameraCharacteristics = characteristics;
- mCameraInit = cameraInit;
- mCameraCallbacks = cameraCallbacks;
-
- mSurfaceIdCounter = 0;
- }
-
- private static int translateErrorsFromCamera1(int errorCode) {
- if (errorCode == -EACCES) {
- return ICameraService.ERROR_PERMISSION_DENIED;
- }
-
- return errorCode;
- }
-
- /**
- * Create a separate looper/thread for the camera to run on; open the camera.
- *
- * <p>Since the camera automatically latches on to the current thread's looper,
- * it's important that we have our own thread with our own looper to guarantee
- * that the camera callbacks get correctly posted to our own thread.</p>
- */
- private static class CameraLooper implements Runnable, AutoCloseable {
- private final int mCameraId;
- private Looper mLooper;
- private volatile int mInitErrors;
- private final Camera mCamera = Camera.openUninitialized();
- private final ConditionVariable mStartDone = new ConditionVariable();
- private final Thread mThread;
-
- /**
- * Spin up a new thread, immediately open the camera in the background.
- *
- * <p>Use {@link #waitForOpen} to block until the camera is finished opening.</p>
- *
- * @param cameraId numeric camera Id
- *
- * @see #waitForOpen
- */
- public CameraLooper(int cameraId) {
- mCameraId = cameraId;
-
- mThread = new Thread(this, "LegacyCameraLooper");
- mThread.start();
- }
-
- public Camera getCamera() {
- return mCamera;
- }
-
- @Override
- public void run() {
- // Set up a looper to be used by camera.
- Looper.prepare();
-
- // Save the looper so that we can terminate this thread
- // after we are done with it.
- mLooper = Looper.myLooper();
- mInitErrors = mCamera.cameraInitUnspecified(mCameraId);
- mStartDone.open();
- Looper.loop(); // Blocks forever until #close is called.
- }
-
- /**
- * Quit the looper safely; then join until the thread shuts down.
- */
- @Override
- public void close() {
- if (mLooper == null) {
- return;
- }
-
- mLooper.quitSafely();
- try {
- mThread.join();
- } catch (InterruptedException e) {
- throw new AssertionError(e);
- }
-
- mLooper = null;
- }
-
- /**
- * Block until the camera opens; then return its initialization error code (if any).
- *
- * @param timeoutMs timeout in milliseconds
- *
- * @return int error code
- *
- * @throws ServiceSpecificException if the camera open times out with ({@code CAMERA_ERROR})
- */
- public int waitForOpen(int timeoutMs) {
- // Block until the camera is open asynchronously
- if (!mStartDone.block(timeoutMs)) {
- Log.e(TAG, "waitForOpen - Camera failed to open after timeout of "
- + OPEN_CAMERA_TIMEOUT_MS + " ms");
- try {
- mCamera.release();
- } catch (RuntimeException e) {
- Log.e(TAG, "connectBinderShim - Failed to release camera after timeout ", e);
- }
-
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION);
- }
-
- return mInitErrors;
- }
- }
-
- /**
- * A thread to process callbacks to send back to the camera client.
- *
- * <p>This effectively emulates one-way binder semantics when in the same process as the
- * callee.</p>
- */
- private static class CameraCallbackThread implements ICameraDeviceCallbacks {
- private static final int CAMERA_ERROR = 0;
- private static final int CAMERA_IDLE = 1;
- private static final int CAPTURE_STARTED = 2;
- private static final int RESULT_RECEIVED = 3;
- private static final int PREPARED = 4;
- private static final int REPEATING_REQUEST_ERROR = 5;
- private static final int REQUEST_QUEUE_EMPTY = 6;
-
- private final HandlerThread mHandlerThread;
- private Handler mHandler;
-
- private final ICameraDeviceCallbacks mCallbacks;
-
- public CameraCallbackThread(ICameraDeviceCallbacks callbacks) {
- mCallbacks = callbacks;
-
- mHandlerThread = new HandlerThread("LegacyCameraCallback");
- mHandlerThread.start();
- }
-
- public void close() {
- mHandlerThread.quitSafely();
- }
-
- @Override
- public void onDeviceError(final int errorCode, final CaptureResultExtras resultExtras) {
- Message msg = getHandler().obtainMessage(CAMERA_ERROR,
- /*arg1*/ errorCode, /*arg2*/ 0,
- /*obj*/ resultExtras);
- getHandler().sendMessage(msg);
- }
-
- @Override
- public void onDeviceIdle() {
- Message msg = getHandler().obtainMessage(CAMERA_IDLE);
- getHandler().sendMessage(msg);
- }
-
- @Override
- public void onCaptureStarted(final CaptureResultExtras resultExtras, final long timestamp) {
- Message msg = getHandler().obtainMessage(CAPTURE_STARTED,
- /*arg1*/ (int) (timestamp & 0xFFFFFFFFL),
- /*arg2*/ (int) ( (timestamp >> 32) & 0xFFFFFFFFL),
- /*obj*/ resultExtras);
- getHandler().sendMessage(msg);
- }
-
- @Override
- public void onResultReceived(final CameraMetadataNative result,
- final CaptureResultExtras resultExtras,
- PhysicalCaptureResultInfo physicalResults[]) {
- Object[] resultArray = new Object[] { result, resultExtras };
- Message msg = getHandler().obtainMessage(RESULT_RECEIVED,
- /*obj*/ resultArray);
- getHandler().sendMessage(msg);
- }
-
- @Override
- public void onPrepared(int streamId) {
- Message msg = getHandler().obtainMessage(PREPARED,
- /*arg1*/ streamId, /*arg2*/ 0);
- getHandler().sendMessage(msg);
- }
-
- @Override
- public void onRepeatingRequestError(long lastFrameNumber, int repeatingRequestId) {
- Object[] objArray = new Object[] { lastFrameNumber, repeatingRequestId };
- Message msg = getHandler().obtainMessage(REPEATING_REQUEST_ERROR,
- /*obj*/ objArray);
- getHandler().sendMessage(msg);
- }
-
- @Override
- public void onRequestQueueEmpty() {
- Message msg = getHandler().obtainMessage(REQUEST_QUEUE_EMPTY,
- /* arg1 */ 0, /* arg2 */ 0);
- getHandler().sendMessage(msg);
- }
-
- @Override
- public IBinder asBinder() {
- // This is solely intended to be used for in-process binding.
- return null;
- }
-
- private Handler getHandler() {
- if (mHandler == null) {
- mHandler = new CallbackHandler(mHandlerThread.getLooper());
- }
- return mHandler;
- }
-
- private class CallbackHandler extends Handler {
- public CallbackHandler(Looper l) {
- super(l);
- }
-
- @Override
- public void handleMessage(Message msg) {
- try {
- switch (msg.what) {
- case CAMERA_ERROR: {
- int errorCode = msg.arg1;
- CaptureResultExtras resultExtras = (CaptureResultExtras) msg.obj;
- mCallbacks.onDeviceError(errorCode, resultExtras);
- break;
- }
- case CAMERA_IDLE:
- mCallbacks.onDeviceIdle();
- break;
- case CAPTURE_STARTED: {
- long timestamp = msg.arg2 & 0xFFFFFFFFL;
- timestamp = (timestamp << 32) | (msg.arg1 & 0xFFFFFFFFL);
- CaptureResultExtras resultExtras = (CaptureResultExtras) msg.obj;
- mCallbacks.onCaptureStarted(resultExtras, timestamp);
- break;
- }
- case RESULT_RECEIVED: {
- Object[] resultArray = (Object[]) msg.obj;
- CameraMetadataNative result = (CameraMetadataNative) resultArray[0];
- CaptureResultExtras resultExtras = (CaptureResultExtras) resultArray[1];
- mCallbacks.onResultReceived(result, resultExtras,
- new PhysicalCaptureResultInfo[0]);
- break;
- }
- case PREPARED: {
- int streamId = msg.arg1;
- mCallbacks.onPrepared(streamId);
- break;
- }
- case REPEATING_REQUEST_ERROR: {
- Object[] objArray = (Object[]) msg.obj;
- long lastFrameNumber = (Long) objArray[0];
- int repeatingRequestId = (Integer) objArray[1];
- mCallbacks.onRepeatingRequestError(lastFrameNumber, repeatingRequestId);
- break;
- }
- case REQUEST_QUEUE_EMPTY: {
- mCallbacks.onRequestQueueEmpty();
- break;
- }
- default:
- throw new IllegalArgumentException(
- "Unknown callback message " + msg.what);
- }
- } catch (RemoteException e) {
- throw new IllegalStateException(
- "Received remote exception during camera callback " + msg.what, e);
- }
- }
- }
- }
-
- public static CameraDeviceUserShim connectBinderShim(ICameraDeviceCallbacks callbacks,
- int cameraId, Size displaySize) {
- if (DEBUG) {
- Log.d(TAG, "Opening shim Camera device");
- }
-
- /*
- * Put the camera open on a separate thread with its own looper; otherwise
- * if the main thread is used then the callbacks might never get delivered
- * (e.g. in CTS which run its own default looper only after tests)
- */
-
- CameraLooper init = new CameraLooper(cameraId);
-
- CameraCallbackThread threadCallbacks = new CameraCallbackThread(callbacks);
-
- // TODO: Make this async instead of blocking
- int initErrors = init.waitForOpen(OPEN_CAMERA_TIMEOUT_MS);
- Camera legacyCamera = init.getCamera();
-
- // Check errors old HAL initialization
- LegacyExceptionUtils.throwOnServiceError(initErrors);
-
- // Disable shutter sounds (this will work unconditionally) for api2 clients
- legacyCamera.disableShutterSound();
-
- CameraInfo info = new CameraInfo();
- Camera.getCameraInfo(cameraId, info);
-
- Camera.Parameters legacyParameters = null;
- try {
- legacyParameters = legacyCamera.getParameters();
- } catch (RuntimeException e) {
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION,
- "Unable to get initial parameters: " + e.getMessage());
- }
-
- CameraCharacteristics characteristics =
- LegacyMetadataMapper.createCharacteristics(legacyParameters, info, cameraId,
- displaySize);
- LegacyCameraDevice device = new LegacyCameraDevice(
- cameraId, legacyCamera, characteristics, threadCallbacks);
- return new CameraDeviceUserShim(cameraId, device, characteristics, init, threadCallbacks);
- }
-
- @Override
- public void disconnect() {
- if (DEBUG) {
- Log.d(TAG, "disconnect called.");
- }
-
- if (mLegacyDevice.isClosed()) {
- Log.w(TAG, "Cannot disconnect, device has already been closed.");
- }
-
- try {
- mLegacyDevice.close();
- } finally {
- mCameraInit.close();
- mCameraCallbacks.close();
- }
- }
-
- @Override
- public SubmitInfo submitRequest(CaptureRequest request, boolean streaming) {
- if (DEBUG) {
- Log.d(TAG, "submitRequest called.");
- }
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot submit request, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- synchronized(mConfigureLock) {
- if (mConfiguring) {
- String err = "Cannot submit request, configuration change in progress.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
- }
- }
- return mLegacyDevice.submitRequest(request, streaming);
- }
-
- @Override
- public SubmitInfo submitRequestList(CaptureRequest[] request, boolean streaming) {
- if (DEBUG) {
- Log.d(TAG, "submitRequestList called.");
- }
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot submit request list, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- synchronized(mConfigureLock) {
- if (mConfiguring) {
- String err = "Cannot submit request, configuration change in progress.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
- }
- }
- return mLegacyDevice.submitRequestList(request, streaming);
- }
-
- @Override
- public long cancelRequest(int requestId) {
- if (DEBUG) {
- Log.d(TAG, "cancelRequest called.");
- }
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot cancel request, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- synchronized(mConfigureLock) {
- if (mConfiguring) {
- String err = "Cannot cancel request, configuration change in progress.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
- }
- }
- return mLegacyDevice.cancelRequest(requestId);
- }
-
- @Override
- public boolean isSessionConfigurationSupported(SessionConfiguration sessionConfig) {
- if (sessionConfig.getSessionType() != SessionConfiguration.SESSION_REGULAR) {
- Log.e(TAG, "Session type: " + sessionConfig.getSessionType() + " is different from " +
- " regular. Legacy devices support only regular session types!");
- return false;
- }
-
- if (sessionConfig.getInputConfiguration() != null) {
- Log.e(TAG, "Input configuration present, legacy devices do not support this feature!");
- return false;
- }
-
- List<OutputConfiguration> outputConfigs = sessionConfig.getOutputConfigurations();
- if (outputConfigs.isEmpty()) {
- Log.e(TAG, "Empty output configuration list!");
- return false;
- }
-
- SparseArray<Surface> surfaces = new SparseArray<Surface>(outputConfigs.size());
- int idx = 0;
- for (OutputConfiguration outputConfig : outputConfigs) {
- List<Surface> surfaceList = outputConfig.getSurfaces();
- if (surfaceList.isEmpty() || (surfaceList.size() > 1)) {
- Log.e(TAG, "Legacy devices do not support deferred or shared surfaces!");
- return false;
- }
-
- surfaces.put(idx++, outputConfig.getSurface());
- }
-
- int ret = mLegacyDevice.configureOutputs(surfaces, /*validateSurfacesOnly*/true);
-
- return ret == LegacyExceptionUtils.NO_ERROR;
- }
-
- @Override
- public void beginConfigure() {
- if (DEBUG) {
- Log.d(TAG, "beginConfigure called.");
- }
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot begin configure, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- synchronized(mConfigureLock) {
- if (mConfiguring) {
- String err = "Cannot begin configure, configuration change already in progress.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
- }
- mConfiguring = true;
- }
- }
-
- @Override
- public int[] endConfigure(int operatingMode, CameraMetadataNative sessionParams) {
- if (DEBUG) {
- Log.d(TAG, "endConfigure called.");
- }
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot end configure, device has been closed.";
- Log.e(TAG, err);
- synchronized(mConfigureLock) {
- mConfiguring = false;
- }
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- if (operatingMode != ICameraDeviceUser.NORMAL_MODE) {
- String err = "LEGACY devices do not support this operating mode";
- Log.e(TAG, err);
- synchronized(mConfigureLock) {
- mConfiguring = false;
- }
- throw new ServiceSpecificException(ICameraService.ERROR_ILLEGAL_ARGUMENT, err);
- }
-
- SparseArray<Surface> surfaces = null;
- synchronized(mConfigureLock) {
- if (!mConfiguring) {
- String err = "Cannot end configure, no configuration change in progress.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
- }
- if (mSurfaces != null) {
- surfaces = mSurfaces.clone();
- }
- mConfiguring = false;
- }
- mLegacyDevice.configureOutputs(surfaces);
-
- return new int[0]; // Offline mode is not supported
- }
-
- @Override
- public void deleteStream(int streamId) {
- if (DEBUG) {
- Log.d(TAG, "deleteStream called.");
- }
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot delete stream, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- synchronized(mConfigureLock) {
- if (!mConfiguring) {
- String err = "Cannot delete stream, no configuration change in progress.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
- }
- int index = mSurfaces.indexOfKey(streamId);
- if (index < 0) {
- String err = "Cannot delete stream, stream id " + streamId + " doesn't exist.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_ILLEGAL_ARGUMENT, err);
- }
- mSurfaces.removeAt(index);
- }
- }
-
- @Override
- public int createStream(OutputConfiguration outputConfiguration) {
- if (DEBUG) {
- Log.d(TAG, "createStream called.");
- }
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot create stream, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- synchronized(mConfigureLock) {
- if (!mConfiguring) {
- String err = "Cannot create stream, beginConfigure hasn't been called yet.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
- }
- if (outputConfiguration.getRotation() != OutputConfiguration.ROTATION_0) {
- String err = "Cannot create stream, stream rotation is not supported.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_ILLEGAL_ARGUMENT, err);
- }
- int id = ++mSurfaceIdCounter;
- mSurfaces.put(id, outputConfiguration.getSurface());
- return id;
- }
- }
-
- @Override
- public void finalizeOutputConfigurations(int steamId, OutputConfiguration config) {
- String err = "Finalizing output configuration is not supported on legacy devices";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
- }
-
- @Override
- public int createInputStream(int width, int height, int format) {
- String err = "Creating input stream is not supported on legacy devices";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
- }
-
- @Override
- public Surface getInputSurface() {
- String err = "Getting input surface is not supported on legacy devices";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
- }
-
- @Override
- public CameraMetadataNative createDefaultRequest(int templateId) {
- if (DEBUG) {
- Log.d(TAG, "createDefaultRequest called.");
- }
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot create default request, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- CameraMetadataNative template;
- try {
- template =
- LegacyMetadataMapper.createRequestTemplate(mCameraCharacteristics, templateId);
- } catch (IllegalArgumentException e) {
- String err = "createDefaultRequest - invalid templateId specified";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_ILLEGAL_ARGUMENT, err);
- }
-
- return template;
- }
-
- @Override
- public CameraMetadataNative getCameraInfo() {
- if (DEBUG) {
- Log.d(TAG, "getCameraInfo called.");
- }
- // TODO: implement getCameraInfo.
- Log.e(TAG, "getCameraInfo unimplemented.");
- return null;
- }
-
- @Override
- public void updateOutputConfiguration(int streamId, OutputConfiguration config) {
- // TODO: b/63912484 implement updateOutputConfiguration.
- }
-
- @Override
- public void waitUntilIdle() throws RemoteException {
- if (DEBUG) {
- Log.d(TAG, "waitUntilIdle called.");
- }
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot wait until idle, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- synchronized(mConfigureLock) {
- if (mConfiguring) {
- String err = "Cannot wait until idle, configuration change in progress.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
- }
- }
- mLegacyDevice.waitUntilIdle();
- }
-
- @Override
- public long flush() {
- if (DEBUG) {
- Log.d(TAG, "flush called.");
- }
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot flush, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- synchronized(mConfigureLock) {
- if (mConfiguring) {
- String err = "Cannot flush, configuration change in progress.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
- }
- }
- return mLegacyDevice.flush();
- }
-
- public void prepare(int streamId) {
- if (DEBUG) {
- Log.d(TAG, "prepare called.");
- }
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot prepare stream, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- // LEGACY doesn't support actual prepare, just signal success right away
- mCameraCallbacks.onPrepared(streamId);
- }
-
- public void prepare2(int maxCount, int streamId) {
- // We don't support this in LEGACY mode.
- prepare(streamId);
- }
-
- public void tearDown(int streamId) {
- if (DEBUG) {
- Log.d(TAG, "tearDown called.");
- }
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot tear down stream, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- // LEGACY doesn't support actual teardown, so just a no-op
- }
-
- @Override
- public void setCameraAudioRestriction(int mode) {
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot set camera audio restriction, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- mLegacyDevice.setAudioRestriction(mode);
- }
-
- @Override
- public int getGlobalAudioRestriction() {
- if (mLegacyDevice.isClosed()) {
- String err = "Cannot set camera audio restriction, device has been closed.";
- Log.e(TAG, err);
- throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
- }
-
- return mLegacyDevice.getAudioRestriction();
- }
-
- @Override
- public ICameraOfflineSession switchToOffline(ICameraDeviceCallbacks cbs,
- int[] offlineOutputIds) {
- throw new UnsupportedOperationException("Legacy device does not support offline mode");
- }
-
- @Override
- public IBinder asBinder() {
- // This is solely intended to be used for in-process binding.
- return null;
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/CaptureCollector.java b/core/java/android/hardware/camera2/legacy/CaptureCollector.java
deleted file mode 100644
index 113927c4c4b2..000000000000
--- a/core/java/android/hardware/camera2/legacy/CaptureCollector.java
+++ /dev/null
@@ -1,673 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.hardware.camera2.legacy;
-
-import android.hardware.camera2.impl.CameraDeviceImpl;
-import android.util.Log;
-import android.util.MutableLong;
-import android.util.Pair;
-import android.view.Surface;
-import java.util.ArrayDeque;
-import java.util.ArrayList;
-import java.util.TreeSet;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.locks.Condition;
-import java.util.concurrent.locks.ReentrantLock;
-
-/**
- * Collect timestamps and state for each {@link CaptureRequest} as it passes through
- * the Legacy camera pipeline.
- */
-public class CaptureCollector {
- private static final String TAG = "CaptureCollector";
-
- private static final boolean DEBUG = false;
-
- private static final int FLAG_RECEIVED_JPEG = 1;
- private static final int FLAG_RECEIVED_JPEG_TS = 2;
- private static final int FLAG_RECEIVED_PREVIEW = 4;
- private static final int FLAG_RECEIVED_PREVIEW_TS = 8;
- private static final int FLAG_RECEIVED_ALL_JPEG = FLAG_RECEIVED_JPEG | FLAG_RECEIVED_JPEG_TS;
- private static final int FLAG_RECEIVED_ALL_PREVIEW = FLAG_RECEIVED_PREVIEW |
- FLAG_RECEIVED_PREVIEW_TS;
-
- private static final int MAX_JPEGS_IN_FLIGHT = 1;
-
- private class CaptureHolder implements Comparable<CaptureHolder>{
- private final RequestHolder mRequest;
- private final LegacyRequest mLegacy;
- public final boolean needsJpeg;
- public final boolean needsPreview;
-
- private long mTimestamp = 0;
- private int mReceivedFlags = 0;
- private boolean mHasStarted = false;
- private boolean mFailedJpeg = false;
- private boolean mFailedPreview = false;
- private boolean mCompleted = false;
- private boolean mPreviewCompleted = false;
-
- public CaptureHolder(RequestHolder request, LegacyRequest legacyHolder) {
- mRequest = request;
- mLegacy = legacyHolder;
- needsJpeg = request.hasJpegTargets();
- needsPreview = request.hasPreviewTargets();
- }
-
- public boolean isPreviewCompleted() {
- return (mReceivedFlags & FLAG_RECEIVED_ALL_PREVIEW) == FLAG_RECEIVED_ALL_PREVIEW;
- }
-
- public boolean isJpegCompleted() {
- return (mReceivedFlags & FLAG_RECEIVED_ALL_JPEG) == FLAG_RECEIVED_ALL_JPEG;
- }
-
- public boolean isCompleted() {
- return (needsJpeg == isJpegCompleted()) && (needsPreview == isPreviewCompleted());
- }
-
- public void tryComplete() {
- if (!mPreviewCompleted && needsPreview && isPreviewCompleted()) {
- CaptureCollector.this.onPreviewCompleted();
- mPreviewCompleted = true;
- }
-
- if (isCompleted() && !mCompleted) {
- if (mFailedPreview || mFailedJpeg) {
- if (!mHasStarted) {
- // Send a request error if the capture has not yet started.
- mRequest.failRequest();
- CaptureCollector.this.mDeviceState.setCaptureStart(mRequest, mTimestamp,
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_REQUEST);
- } else {
- // Send buffer dropped errors for each pending buffer if the request has
- // started.
- for (Surface targetSurface : mRequest.getRequest().getTargets() ) {
- try {
- if (mRequest.jpegType(targetSurface)) {
- if (mFailedJpeg) {
- CaptureCollector.this.mDeviceState.setCaptureResult(mRequest,
- /*result*/null,
- CameraDeviceImpl.CameraDeviceCallbacks.
- ERROR_CAMERA_BUFFER,
- targetSurface);
- }
- } else {
- // preview buffer
- if (mFailedPreview) {
- CaptureCollector.this.mDeviceState.setCaptureResult(mRequest,
- /*result*/null,
- CameraDeviceImpl.CameraDeviceCallbacks.
- ERROR_CAMERA_BUFFER,
- targetSurface);
- }
- }
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.e(TAG, "Unexpected exception when querying Surface: " + e);
- }
- }
- }
- }
- CaptureCollector.this.onRequestCompleted(CaptureHolder.this);
- mCompleted = true;
- }
- }
-
- public void setJpegTimestamp(long timestamp) {
- if (DEBUG) {
- Log.d(TAG, "setJpegTimestamp - called for request " + mRequest.getRequestId());
- }
- if (!needsJpeg) {
- throw new IllegalStateException(
- "setJpegTimestamp called for capture with no jpeg targets.");
- }
- if (isCompleted()) {
- throw new IllegalStateException(
- "setJpegTimestamp called on already completed request.");
- }
-
- mReceivedFlags |= FLAG_RECEIVED_JPEG_TS;
-
- if (mTimestamp == 0) {
- mTimestamp = timestamp;
- }
-
- if (!mHasStarted) {
- mHasStarted = true;
- CaptureCollector.this.mDeviceState.setCaptureStart(mRequest, mTimestamp,
- CameraDeviceState.NO_CAPTURE_ERROR);
- }
-
- tryComplete();
- }
-
- public void setJpegProduced() {
- if (DEBUG) {
- Log.d(TAG, "setJpegProduced - called for request " + mRequest.getRequestId());
- }
- if (!needsJpeg) {
- throw new IllegalStateException(
- "setJpegProduced called for capture with no jpeg targets.");
- }
- if (isCompleted()) {
- throw new IllegalStateException(
- "setJpegProduced called on already completed request.");
- }
-
- mReceivedFlags |= FLAG_RECEIVED_JPEG;
- tryComplete();
- }
-
- public void setJpegFailed() {
- if (DEBUG) {
- Log.d(TAG, "setJpegFailed - called for request " + mRequest.getRequestId());
- }
- if (!needsJpeg || isJpegCompleted()) {
- return;
- }
- mFailedJpeg = true;
-
- mReceivedFlags |= FLAG_RECEIVED_JPEG;
- mReceivedFlags |= FLAG_RECEIVED_JPEG_TS;
- tryComplete();
- }
-
- public void setPreviewTimestamp(long timestamp) {
- if (DEBUG) {
- Log.d(TAG, "setPreviewTimestamp - called for request " + mRequest.getRequestId());
- }
- if (!needsPreview) {
- throw new IllegalStateException(
- "setPreviewTimestamp called for capture with no preview targets.");
- }
- if (isCompleted()) {
- throw new IllegalStateException(
- "setPreviewTimestamp called on already completed request.");
- }
-
- mReceivedFlags |= FLAG_RECEIVED_PREVIEW_TS;
-
- if (mTimestamp == 0) {
- mTimestamp = timestamp;
- }
-
- if (!needsJpeg) {
- if (!mHasStarted) {
- mHasStarted = true;
- CaptureCollector.this.mDeviceState.setCaptureStart(mRequest, mTimestamp,
- CameraDeviceState.NO_CAPTURE_ERROR);
- }
- }
-
- tryComplete();
- }
-
- public void setPreviewProduced() {
- if (DEBUG) {
- Log.d(TAG, "setPreviewProduced - called for request " + mRequest.getRequestId());
- }
- if (!needsPreview) {
- throw new IllegalStateException(
- "setPreviewProduced called for capture with no preview targets.");
- }
- if (isCompleted()) {
- throw new IllegalStateException(
- "setPreviewProduced called on already completed request.");
- }
-
- mReceivedFlags |= FLAG_RECEIVED_PREVIEW;
- tryComplete();
- }
-
- public void setPreviewFailed() {
- if (DEBUG) {
- Log.d(TAG, "setPreviewFailed - called for request " + mRequest.getRequestId());
- }
- if (!needsPreview || isPreviewCompleted()) {
- return;
- }
- mFailedPreview = true;
-
- mReceivedFlags |= FLAG_RECEIVED_PREVIEW;
- mReceivedFlags |= FLAG_RECEIVED_PREVIEW_TS;
- tryComplete();
- }
-
- // Comparison and equals based on frame number.
- @Override
- public int compareTo(CaptureHolder captureHolder) {
- return (mRequest.getFrameNumber() > captureHolder.mRequest.getFrameNumber()) ? 1 :
- ((mRequest.getFrameNumber() == captureHolder.mRequest.getFrameNumber()) ? 0 :
- -1);
- }
-
- // Comparison and equals based on frame number.
- @Override
- public boolean equals(Object o) {
- return o instanceof CaptureHolder && compareTo((CaptureHolder) o) == 0;
- }
- }
-
- private final TreeSet<CaptureHolder> mActiveRequests;
- private final ArrayDeque<CaptureHolder> mJpegCaptureQueue;
- private final ArrayDeque<CaptureHolder> mJpegProduceQueue;
- private final ArrayDeque<CaptureHolder> mPreviewCaptureQueue;
- private final ArrayDeque<CaptureHolder> mPreviewProduceQueue;
- private final ArrayList<CaptureHolder> mCompletedRequests = new ArrayList<>();
-
- private final ReentrantLock mLock = new ReentrantLock();
- private final Condition mIsEmpty;
- private final Condition mPreviewsEmpty;
- private final Condition mNotFull;
- private final CameraDeviceState mDeviceState;
- private int mInFlight = 0;
- private int mInFlightPreviews = 0;
- private final int mMaxInFlight;
-
- /**
- * Create a new {@link CaptureCollector} that can modify the given {@link CameraDeviceState}.
- *
- * @param maxInFlight max allowed in-flight requests.
- * @param deviceState the {@link CameraDeviceState} to update as requests are processed.
- */
- public CaptureCollector(int maxInFlight, CameraDeviceState deviceState) {
- mMaxInFlight = maxInFlight;
- mJpegCaptureQueue = new ArrayDeque<>(MAX_JPEGS_IN_FLIGHT);
- mJpegProduceQueue = new ArrayDeque<>(MAX_JPEGS_IN_FLIGHT);
- mPreviewCaptureQueue = new ArrayDeque<>(mMaxInFlight);
- mPreviewProduceQueue = new ArrayDeque<>(mMaxInFlight);
- mActiveRequests = new TreeSet<>();
- mIsEmpty = mLock.newCondition();
- mNotFull = mLock.newCondition();
- mPreviewsEmpty = mLock.newCondition();
- mDeviceState = deviceState;
- }
-
- /**
- * Queue a new request.
- *
- * <p>
- * For requests that use the Camera1 API preview output stream, this will block if there are
- * already {@code maxInFlight} requests in progress (until at least one prior request has
- * completed). For requests that use the Camera1 API jpeg callbacks, this will block until
- * all prior requests have been completed to avoid stopping preview for
- * {@link android.hardware.Camera#takePicture} before prior preview requests have been
- * completed.
- * </p>
- * @param holder the {@link RequestHolder} for this request.
- * @param legacy the {@link LegacyRequest} for this request; this will not be mutated.
- * @param timeout a timeout to use for this call.
- * @param unit the units to use for the timeout.
- * @return {@code false} if this method timed out.
- * @throws InterruptedException if this thread is interrupted.
- */
- public boolean queueRequest(RequestHolder holder, LegacyRequest legacy, long timeout,
- TimeUnit unit)
- throws InterruptedException {
- CaptureHolder h = new CaptureHolder(holder, legacy);
- long nanos = unit.toNanos(timeout);
- final ReentrantLock lock = this.mLock;
- lock.lock();
- try {
- if (DEBUG) {
- Log.d(TAG, "queueRequest for request " + holder.getRequestId() +
- " - " + mInFlight + " requests remain in flight.");
- }
-
- if (!(h.needsJpeg || h.needsPreview)) {
- throw new IllegalStateException("Request must target at least one output surface!");
- }
-
- if (h.needsJpeg) {
- // Wait for all current requests to finish before queueing jpeg.
- while (mInFlight > 0) {
- if (nanos <= 0) {
- return false;
- }
- nanos = mIsEmpty.awaitNanos(nanos);
- }
- mJpegCaptureQueue.add(h);
- mJpegProduceQueue.add(h);
- }
- if (h.needsPreview) {
- while (mInFlight >= mMaxInFlight) {
- if (nanos <= 0) {
- return false;
- }
- nanos = mNotFull.awaitNanos(nanos);
- }
- mPreviewCaptureQueue.add(h);
- mPreviewProduceQueue.add(h);
- mInFlightPreviews++;
- }
- mActiveRequests.add(h);
-
- mInFlight++;
- return true;
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Wait all queued requests to complete.
- *
- * @param timeout a timeout to use for this call.
- * @param unit the units to use for the timeout.
- * @return {@code false} if this method timed out.
- * @throws InterruptedException if this thread is interrupted.
- */
- public boolean waitForEmpty(long timeout, TimeUnit unit) throws InterruptedException {
- long nanos = unit.toNanos(timeout);
- final ReentrantLock lock = this.mLock;
- lock.lock();
- try {
- while (mInFlight > 0) {
- if (nanos <= 0) {
- return false;
- }
- nanos = mIsEmpty.awaitNanos(nanos);
- }
- return true;
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Wait all queued requests that use the Camera1 API preview output to complete.
- *
- * @param timeout a timeout to use for this call.
- * @param unit the units to use for the timeout.
- * @return {@code false} if this method timed out.
- * @throws InterruptedException if this thread is interrupted.
- */
- public boolean waitForPreviewsEmpty(long timeout, TimeUnit unit) throws InterruptedException {
- long nanos = unit.toNanos(timeout);
- final ReentrantLock lock = this.mLock;
- lock.lock();
- try {
- while (mInFlightPreviews > 0) {
- if (nanos <= 0) {
- return false;
- }
- nanos = mPreviewsEmpty.awaitNanos(nanos);
- }
- return true;
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Wait for the specified request to be completed (all buffers available).
- *
- * <p>May not wait for the same request more than once, since a successful wait
- * will erase the history of that request.</p>
- *
- * @param holder the {@link RequestHolder} for this request.
- * @param timeout a timeout to use for this call.
- * @param unit the units to use for the timeout.
- * @param timestamp the timestamp of the request will be written out to here, in ns
- *
- * @return {@code false} if this method timed out.
- *
- * @throws InterruptedException if this thread is interrupted.
- */
- public boolean waitForRequestCompleted(RequestHolder holder, long timeout, TimeUnit unit,
- MutableLong timestamp)
- throws InterruptedException {
- long nanos = unit.toNanos(timeout);
- final ReentrantLock lock = this.mLock;
- lock.lock();
- try {
- while (!removeRequestIfCompleted(holder, /*out*/timestamp)) {
- if (nanos <= 0) {
- return false;
- }
- nanos = mNotFull.awaitNanos(nanos);
- }
- return true;
- } finally {
- lock.unlock();
- }
- }
-
- private boolean removeRequestIfCompleted(RequestHolder holder, MutableLong timestamp) {
- int i = 0;
- for (CaptureHolder h : mCompletedRequests) {
- if (h.mRequest.equals(holder)) {
- timestamp.value = h.mTimestamp;
- mCompletedRequests.remove(i);
- return true;
- }
- i++;
- }
-
- return false;
- }
-
- /**
- * Called to alert the {@link CaptureCollector} that the jpeg capture has begun.
- *
- * @param timestamp the time of the jpeg capture.
- * @return the {@link RequestHolder} for the request associated with this capture.
- */
- public RequestHolder jpegCaptured(long timestamp) {
- final ReentrantLock lock = this.mLock;
- lock.lock();
- try {
- CaptureHolder h = mJpegCaptureQueue.poll();
- if (h == null) {
- Log.w(TAG, "jpegCaptured called with no jpeg request on queue!");
- return null;
- }
- h.setJpegTimestamp(timestamp);
- return h.mRequest;
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Called to alert the {@link CaptureCollector} that the jpeg capture has completed.
- *
- * @return a pair containing the {@link RequestHolder} and the timestamp of the capture.
- */
- public Pair<RequestHolder, Long> jpegProduced() {
- final ReentrantLock lock = this.mLock;
- lock.lock();
- try {
- CaptureHolder h = mJpegProduceQueue.poll();
- if (h == null) {
- Log.w(TAG, "jpegProduced called with no jpeg request on queue!");
- return null;
- }
- h.setJpegProduced();
- return new Pair<>(h.mRequest, h.mTimestamp);
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Check if there are any pending capture requests that use the Camera1 API preview output.
- *
- * @return {@code true} if there are pending preview requests.
- */
- public boolean hasPendingPreviewCaptures() {
- final ReentrantLock lock = this.mLock;
- lock.lock();
- try {
- return !mPreviewCaptureQueue.isEmpty();
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Called to alert the {@link CaptureCollector} that the preview capture has begun.
- *
- * @param timestamp the time of the preview capture.
- * @return a pair containing the {@link RequestHolder} and the timestamp of the capture.
- */
- public Pair<RequestHolder, Long> previewCaptured(long timestamp) {
- final ReentrantLock lock = this.mLock;
- lock.lock();
- try {
- CaptureHolder h = mPreviewCaptureQueue.poll();
- if (h == null) {
- if (DEBUG) {
- Log.d(TAG, "previewCaptured called with no preview request on queue!");
- }
- return null;
- }
- h.setPreviewTimestamp(timestamp);
- return new Pair<>(h.mRequest, h.mTimestamp);
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Called to alert the {@link CaptureCollector} that the preview capture has completed.
- *
- * @return the {@link RequestHolder} for the request associated with this capture.
- */
- public RequestHolder previewProduced() {
- final ReentrantLock lock = this.mLock;
- lock.lock();
- try {
- CaptureHolder h = mPreviewProduceQueue.poll();
- if (h == null) {
- Log.w(TAG, "previewProduced called with no preview request on queue!");
- return null;
- }
- h.setPreviewProduced();
- return h.mRequest;
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Called to alert the {@link CaptureCollector} that the next pending preview capture has failed.
- */
- public void failNextPreview() {
- final ReentrantLock lock = this.mLock;
- lock.lock();
- try {
- CaptureHolder h1 = mPreviewCaptureQueue.peek();
- CaptureHolder h2 = mPreviewProduceQueue.peek();
-
- // Find the request with the lowest frame number.
- CaptureHolder h = (h1 == null) ? h2 :
- ((h2 == null) ? h1 :
- ((h1.compareTo(h2) <= 0) ? h1 :
- h2));
-
- if (h != null) {
- mPreviewCaptureQueue.remove(h);
- mPreviewProduceQueue.remove(h);
- mActiveRequests.remove(h);
- h.setPreviewFailed();
- }
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Called to alert the {@link CaptureCollector} that the next pending jpeg capture has failed.
- */
- public void failNextJpeg() {
- final ReentrantLock lock = this.mLock;
- lock.lock();
- try {
- CaptureHolder h1 = mJpegCaptureQueue.peek();
- CaptureHolder h2 = mJpegProduceQueue.peek();
-
- // Find the request with the lowest frame number.
- CaptureHolder h = (h1 == null) ? h2 :
- ((h2 == null) ? h1 :
- ((h1.compareTo(h2) <= 0) ? h1 :
- h2));
-
- if (h != null) {
- mJpegCaptureQueue.remove(h);
- mJpegProduceQueue.remove(h);
- mActiveRequests.remove(h);
- h.setJpegFailed();
- }
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Called to alert the {@link CaptureCollector} all pending captures have failed.
- */
- public void failAll() {
- final ReentrantLock lock = this.mLock;
- lock.lock();
- try {
- CaptureHolder h;
- while ((h = mActiveRequests.pollFirst()) != null) {
- h.setPreviewFailed();
- h.setJpegFailed();
- }
- mPreviewCaptureQueue.clear();
- mPreviewProduceQueue.clear();
- mJpegCaptureQueue.clear();
- mJpegProduceQueue.clear();
- } finally {
- lock.unlock();
- }
- }
-
- private void onPreviewCompleted() {
- mInFlightPreviews--;
- if (mInFlightPreviews < 0) {
- throw new IllegalStateException(
- "More preview captures completed than requests queued.");
- }
- if (mInFlightPreviews == 0) {
- mPreviewsEmpty.signalAll();
- }
- }
-
- private void onRequestCompleted(CaptureHolder capture) {
- RequestHolder request = capture.mRequest;
-
- mInFlight--;
- if (DEBUG) {
- Log.d(TAG, "Completed request " + request.getRequestId() +
- ", " + mInFlight + " requests remain in flight.");
- }
- if (mInFlight < 0) {
- throw new IllegalStateException(
- "More captures completed than requests queued.");
- }
-
- mCompletedRequests.add(capture);
- mActiveRequests.remove(capture);
-
- mNotFull.signalAll();
- if (mInFlight == 0) {
- mIsEmpty.signalAll();
- }
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/GLThreadManager.java b/core/java/android/hardware/camera2/legacy/GLThreadManager.java
deleted file mode 100644
index 152d82d5a6da..000000000000
--- a/core/java/android/hardware/camera2/legacy/GLThreadManager.java
+++ /dev/null
@@ -1,264 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.graphics.SurfaceTexture;
-import android.hardware.camera2.impl.CameraDeviceImpl;
-import android.os.ConditionVariable;
-import android.os.Handler;
-import android.os.Message;
-import android.util.Log;
-import android.util.Pair;
-import android.util.Size;
-import android.view.Surface;
-
-import java.util.Collection;
-
-import static com.android.internal.util.Preconditions.*;
-
-/**
- * GLThreadManager handles the thread used for rendering into the configured output surfaces.
- */
-public class GLThreadManager {
- private final String TAG;
- private static final boolean DEBUG = false;
-
- private static final int MSG_NEW_CONFIGURATION = 1;
- private static final int MSG_NEW_FRAME = 2;
- private static final int MSG_CLEANUP = 3;
- private static final int MSG_DROP_FRAMES = 4;
- private static final int MSG_ALLOW_FRAMES = 5;
-
- private CaptureCollector mCaptureCollector;
-
- private final CameraDeviceState mDeviceState;
-
- private final SurfaceTextureRenderer mTextureRenderer;
-
- private final RequestHandlerThread mGLHandlerThread;
-
- private final RequestThreadManager.FpsCounter mPrevCounter =
- new RequestThreadManager.FpsCounter("GL Preview Producer");
-
- /**
- * Container object for Configure messages.
- */
- private static class ConfigureHolder {
- public final ConditionVariable condition;
- public final Collection<Pair<Surface, Size>> surfaces;
- public final CaptureCollector collector;
-
- public ConfigureHolder(ConditionVariable condition, Collection<Pair<Surface,
- Size>> surfaces, CaptureCollector collector) {
- this.condition = condition;
- this.surfaces = surfaces;
- this.collector = collector;
- }
- }
-
- private final Handler.Callback mGLHandlerCb = new Handler.Callback() {
- private boolean mCleanup = false;
- private boolean mConfigured = false;
- private boolean mDroppingFrames = false;
-
- @SuppressWarnings("unchecked")
- @Override
- public boolean handleMessage(Message msg) {
- if (mCleanup) {
- return true;
- }
- try {
- switch (msg.what) {
- case MSG_NEW_CONFIGURATION:
- ConfigureHolder configure = (ConfigureHolder) msg.obj;
- mTextureRenderer.cleanupEGLContext();
- mTextureRenderer.configureSurfaces(configure.surfaces);
- mCaptureCollector = checkNotNull(configure.collector);
- configure.condition.open();
- mConfigured = true;
- break;
- case MSG_NEW_FRAME:
- if (mDroppingFrames) {
- Log.w(TAG, "Ignoring frame.");
- break;
- }
- if (DEBUG) {
- mPrevCounter.countAndLog();
- }
- if (!mConfigured) {
- Log.e(TAG, "Dropping frame, EGL context not configured!");
- }
- mTextureRenderer.drawIntoSurfaces(mCaptureCollector);
- break;
- case MSG_CLEANUP:
- mTextureRenderer.cleanupEGLContext();
- mCleanup = true;
- mConfigured = false;
- break;
- case MSG_DROP_FRAMES:
- mDroppingFrames = true;
- break;
- case MSG_ALLOW_FRAMES:
- mDroppingFrames = false;
- break;
- case RequestHandlerThread.MSG_POKE_IDLE_HANDLER:
- // OK: Ignore message.
- break;
- default:
- Log.e(TAG, "Unhandled message " + msg.what + " on GLThread.");
- break;
- }
- } catch (Exception e) {
- Log.e(TAG, "Received exception on GL render thread: ", e);
- mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- }
- return true;
- }
- };
-
- /**
- * Create a new GL thread and renderer.
- *
- * @param cameraId the camera id for this thread.
- * @param facing direction the camera is facing.
- * @param state {@link CameraDeviceState} to use for error handling.
- */
- public GLThreadManager(int cameraId, int facing, CameraDeviceState state) {
- mTextureRenderer = new SurfaceTextureRenderer(facing);
- TAG = String.format("CameraDeviceGLThread-%d", cameraId);
- mGLHandlerThread = new RequestHandlerThread(TAG, mGLHandlerCb);
- mDeviceState = state;
- }
-
- /**
- * Start the thread.
- *
- * <p>
- * This must be called before queueing new frames.
- * </p>
- */
- public void start() {
- mGLHandlerThread.start();
- }
-
- /**
- * Wait until the thread has started.
- */
- public void waitUntilStarted() {
- mGLHandlerThread.waitUntilStarted();
- }
-
- /**
- * Quit the thread.
- *
- * <p>
- * No further methods can be called after this.
- * </p>
- */
- public void quit() {
- Handler handler = mGLHandlerThread.getHandler();
- handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
- mGLHandlerThread.quitSafely();
- try {
- mGLHandlerThread.join();
- } catch (InterruptedException e) {
- Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
- mGLHandlerThread.getName(), mGLHandlerThread.getId()));
- }
- }
-
- /**
- * Queue a new call to draw into the surfaces specified in the next available preview
- * request from the {@link CaptureCollector} passed to
- * {@link #setConfigurationAndWait(java.util.Collection, CaptureCollector)};
- */
- public void queueNewFrame() {
- Handler handler = mGLHandlerThread.getHandler();
-
- /**
- * Avoid queuing more than one new frame. If we are not consuming faster than frames
- * are produced, drop frames rather than allowing the queue to back up.
- */
- if (!handler.hasMessages(MSG_NEW_FRAME)) {
- handler.sendMessage(handler.obtainMessage(MSG_NEW_FRAME));
- } else {
- Log.e(TAG, "GLThread dropping frame. Not consuming frames quickly enough!");
- }
- }
-
- /**
- * Configure the GL renderer for the given set of output surfaces, and block until
- * this configuration has been applied.
- *
- * @param surfaces a collection of pairs of {@link android.view.Surface}s and their
- * corresponding sizes to configure.
- * @param collector a {@link CaptureCollector} to retrieve requests from.
- */
- public void setConfigurationAndWait(Collection<Pair<Surface, Size>> surfaces,
- CaptureCollector collector) {
- checkNotNull(collector, "collector must not be null");
- Handler handler = mGLHandlerThread.getHandler();
-
- final ConditionVariable condition = new ConditionVariable(/*closed*/false);
- ConfigureHolder configure = new ConfigureHolder(condition, surfaces, collector);
-
- Message m = handler.obtainMessage(MSG_NEW_CONFIGURATION, /*arg1*/0, /*arg2*/0, configure);
- handler.sendMessage(m);
-
- // Block until configuration applied.
- condition.block();
- }
-
- /**
- * Get the underlying surface to produce frames from.
- *
- * <p>
- * This returns the surface that is drawn into the set of surfaces passed in for each frame.
- * This method should only be called after a call to
- * {@link #setConfigurationAndWait(java.util.Collection)}. Calling this before the first call
- * to {@link #setConfigurationAndWait(java.util.Collection)}, after {@link #quit()}, or
- * concurrently to one of these calls may result in an invalid
- * {@link android.graphics.SurfaceTexture} being returned.
- * </p>
- *
- * @return an {@link android.graphics.SurfaceTexture} to draw to.
- */
- public SurfaceTexture getCurrentSurfaceTexture() {
- return mTextureRenderer.getSurfaceTexture();
- }
-
- /**
- * Ignore any subsequent calls to {@link #queueNewFrame(java.util.Collection)}.
- */
- public void ignoreNewFrames() {
- mGLHandlerThread.getHandler().sendEmptyMessage(MSG_DROP_FRAMES);
- }
-
- /**
- * Wait until no messages are queued.
- */
- public void waitUntilIdle() {
- mGLHandlerThread.waitUntilIdle();
- }
-
- /**
- * Re-enable drawing new frames after a call to {@link #ignoreNewFrames()}.
- */
- public void allowNewFrames() {
- mGLHandlerThread.getHandler().sendEmptyMessage(MSG_ALLOW_FRAMES);
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/LegacyCameraDevice.java b/core/java/android/hardware/camera2/legacy/LegacyCameraDevice.java
deleted file mode 100644
index fdd578c419d8..000000000000
--- a/core/java/android/hardware/camera2/legacy/LegacyCameraDevice.java
+++ /dev/null
@@ -1,886 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.graphics.ImageFormat;
-import android.graphics.SurfaceTexture;
-import android.hardware.Camera;
-import android.hardware.camera2.CameraCharacteristics;
-import android.hardware.camera2.CaptureRequest;
-import android.hardware.camera2.impl.CameraDeviceImpl;
-import android.hardware.camera2.impl.CaptureResultExtras;
-import android.hardware.camera2.impl.PhysicalCaptureResultInfo;
-import android.hardware.camera2.ICameraDeviceCallbacks;
-import android.hardware.camera2.params.StreamConfigurationMap;
-import android.hardware.camera2.utils.ArrayUtils;
-import android.hardware.camera2.utils.SubmitInfo;
-import android.hardware.camera2.impl.CameraMetadataNative;
-import android.os.ConditionVariable;
-import android.os.Handler;
-import android.os.HandlerThread;
-import android.os.RemoteException;
-import android.os.ServiceSpecificException;
-import android.util.Log;
-import android.util.Pair;
-import android.util.Size;
-import android.util.SparseArray;
-import android.view.Surface;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-
-import static android.hardware.camera2.legacy.LegacyExceptionUtils.*;
-import static com.android.internal.util.Preconditions.*;
-
-/**
- * This class emulates the functionality of a Camera2 device using a the old Camera class.
- *
- * <p>
- * There are two main components that are used to implement this:
- * - A state machine containing valid Camera2 device states ({@link CameraDeviceState}).
- * - A message-queue based pipeline that manages an old Camera class, and executes capture and
- * configuration requests.
- * </p>
- */
-public class LegacyCameraDevice implements AutoCloseable {
- private final String TAG;
-
- private static final boolean DEBUG = false;
- private final int mCameraId;
- private final CameraCharacteristics mStaticCharacteristics;
- private final ICameraDeviceCallbacks mDeviceCallbacks;
- private final CameraDeviceState mDeviceState = new CameraDeviceState();
- private SparseArray<Surface> mConfiguredSurfaces;
- private boolean mClosed = false;
-
- private final ConditionVariable mIdle = new ConditionVariable(/*open*/true);
-
- private final HandlerThread mResultThread = new HandlerThread("ResultThread");
- private final HandlerThread mCallbackHandlerThread = new HandlerThread("CallbackThread");
- private final Handler mCallbackHandler;
- private final Handler mResultHandler;
- private static final int ILLEGAL_VALUE = -1;
-
- // Keep up to date with values in hardware/libhardware/include/hardware/gralloc.h
- private static final int GRALLOC_USAGE_RENDERSCRIPT = 0x00100000;
- private static final int GRALLOC_USAGE_SW_READ_OFTEN = 0x00000003;
- private static final int GRALLOC_USAGE_HW_TEXTURE = 0x00000100;
- private static final int GRALLOC_USAGE_HW_COMPOSER = 0x00000800;
- private static final int GRALLOC_USAGE_HW_RENDER = 0x00000200;
- private static final int GRALLOC_USAGE_HW_VIDEO_ENCODER = 0x00010000;
-
- public static final int MAX_DIMEN_FOR_ROUNDING = 1920; // maximum allowed width for rounding
-
- // Keep up to date with values in system/core/include/system/window.h
- public static final int NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW = 1;
-
- private CaptureResultExtras getExtrasFromRequest(RequestHolder holder) {
- return getExtrasFromRequest(holder,
- /*errorCode*/CameraDeviceState.NO_CAPTURE_ERROR, /*errorArg*/null);
- }
-
- private CaptureResultExtras getExtrasFromRequest(RequestHolder holder,
- int errorCode, Object errorArg) {
- int errorStreamId = -1;
- if (errorCode == CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_BUFFER) {
- Surface errorTarget = (Surface) errorArg;
- int indexOfTarget = mConfiguredSurfaces.indexOfValue(errorTarget);
- if (indexOfTarget < 0) {
- Log.e(TAG, "Buffer drop error reported for unknown Surface");
- } else {
- errorStreamId = mConfiguredSurfaces.keyAt(indexOfTarget);
- }
- }
- if (holder == null) {
- return new CaptureResultExtras(ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE,
- ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE, null,
- ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE);
- }
- return new CaptureResultExtras(holder.getRequestId(), holder.getSubsequeceId(),
- /*afTriggerId*/0, /*precaptureTriggerId*/0, holder.getFrameNumber(),
- /*partialResultCount*/1, errorStreamId, null, holder.getFrameNumber(), -1, -1);
- }
-
- /**
- * Listener for the camera device state machine. Calls the appropriate
- * {@link ICameraDeviceCallbacks} for each state transition.
- */
- private final CameraDeviceState.CameraDeviceStateListener mStateListener =
- new CameraDeviceState.CameraDeviceStateListener() {
- @Override
- public void onError(final int errorCode, final Object errorArg, final RequestHolder holder) {
- if (DEBUG) {
- Log.d(TAG, "onError called, errorCode = " + errorCode + ", errorArg = " + errorArg);
- }
- switch (errorCode) {
- /*
- * Only be considered idle if we hit a fatal error
- * and no further requests can be processed.
- */
- case CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DISCONNECTED:
- case CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_SERVICE:
- case CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE: {
- mIdle.open();
-
- if (DEBUG) {
- Log.d(TAG, "onError - opening idle");
- }
- }
- }
-
- final CaptureResultExtras extras = getExtrasFromRequest(holder, errorCode, errorArg);
- mResultHandler.post(new Runnable() {
- @Override
- public void run() {
- if (DEBUG) {
- Log.d(TAG, "doing onError callback for request " + holder.getRequestId() +
- ", with error code " + errorCode);
- }
- try {
- mDeviceCallbacks.onDeviceError(errorCode, extras);
- } catch (RemoteException e) {
- throw new IllegalStateException(
- "Received remote exception during onCameraError callback: ", e);
- }
- }
- });
- }
-
- @Override
- public void onConfiguring() {
- // Do nothing
- if (DEBUG) {
- Log.d(TAG, "doing onConfiguring callback.");
- }
- }
-
- @Override
- public void onIdle() {
- if (DEBUG) {
- Log.d(TAG, "onIdle called");
- }
-
- mIdle.open();
-
- mResultHandler.post(new Runnable() {
- @Override
- public void run() {
- if (DEBUG) {
- Log.d(TAG, "doing onIdle callback.");
- }
- try {
- mDeviceCallbacks.onDeviceIdle();
- } catch (RemoteException e) {
- throw new IllegalStateException(
- "Received remote exception during onCameraIdle callback: ", e);
- }
- }
- });
- }
-
- @Override
- public void onBusy() {
- mIdle.close();
-
- if (DEBUG) {
- Log.d(TAG, "onBusy called");
- }
- }
-
- @Override
- public void onCaptureStarted(final RequestHolder holder, final long timestamp) {
- final CaptureResultExtras extras = getExtrasFromRequest(holder);
-
- mResultHandler.post(new Runnable() {
- @Override
- public void run() {
- if (DEBUG) {
- Log.d(TAG, "doing onCaptureStarted callback for request " +
- holder.getRequestId());
- }
- try {
- mDeviceCallbacks.onCaptureStarted(extras, timestamp);
- } catch (RemoteException e) {
- throw new IllegalStateException(
- "Received remote exception during onCameraError callback: ", e);
- }
- }
- });
- }
-
- @Override
- public void onRequestQueueEmpty() {
- mResultHandler.post(new Runnable() {
- @Override
- public void run() {
- if (DEBUG) {
- Log.d(TAG, "doing onRequestQueueEmpty callback");
- }
- try {
- mDeviceCallbacks.onRequestQueueEmpty();
- } catch (RemoteException e) {
- throw new IllegalStateException(
- "Received remote exception during onRequestQueueEmpty callback: ",
- e);
- }
- }
- });
- }
-
- @Override
- public void onCaptureResult(final CameraMetadataNative result, final RequestHolder holder) {
- final CaptureResultExtras extras = getExtrasFromRequest(holder);
-
- mResultHandler.post(new Runnable() {
- @Override
- public void run() {
- if (DEBUG) {
- Log.d(TAG, "doing onCaptureResult callback for request " +
- holder.getRequestId());
- }
- try {
- mDeviceCallbacks.onResultReceived(result, extras,
- new PhysicalCaptureResultInfo[0]);
- } catch (RemoteException e) {
- throw new IllegalStateException(
- "Received remote exception during onCameraError callback: ", e);
- }
- }
- });
- }
-
- @Override
- public void onRepeatingRequestError(final long lastFrameNumber,
- final int repeatingRequestId) {
- mResultHandler.post(new Runnable() {
- @Override
- public void run() {
- if (DEBUG) {
- Log.d(TAG, "doing onRepeatingRequestError callback.");
- }
- try {
- mDeviceCallbacks.onRepeatingRequestError(lastFrameNumber,
- repeatingRequestId);
- } catch (RemoteException e) {
- throw new IllegalStateException(
- "Received remote exception during onRepeatingRequestError " +
- "callback: ", e);
- }
- }
- });
- }
- };
-
- private final RequestThreadManager mRequestThreadManager;
-
- /**
- * Check if a given surface uses {@link ImageFormat#YUV_420_888} or format that can be readily
- * converted to this; YV12 and NV21 are the two currently supported formats.
- *
- * @param s the surface to check.
- * @return {@code true} if the surfaces uses {@link ImageFormat#YUV_420_888} or a compatible
- * format.
- */
- static boolean needsConversion(Surface s) throws BufferQueueAbandonedException {
- int nativeType = detectSurfaceType(s);
- return nativeType == ImageFormat.YUV_420_888 || nativeType == ImageFormat.YV12 ||
- nativeType == ImageFormat.NV21;
- }
-
- /**
- * Create a new emulated camera device from a given Camera 1 API camera.
- *
- * <p>
- * The {@link Camera} provided to this constructor must already have been successfully opened,
- * and ownership of the provided camera is passed to this object. No further calls to the
- * camera methods should be made following this constructor.
- * </p>
- *
- * @param cameraId the id of the camera.
- * @param camera an open {@link Camera} device.
- * @param characteristics the static camera characteristics for this camera device
- * @param callbacks {@link ICameraDeviceCallbacks} callbacks to call for Camera2 API operations.
- */
- public LegacyCameraDevice(int cameraId, Camera camera, CameraCharacteristics characteristics,
- ICameraDeviceCallbacks callbacks) {
- mCameraId = cameraId;
- mDeviceCallbacks = callbacks;
- TAG = String.format("CameraDevice-%d-LE", mCameraId);
-
- mResultThread.start();
- mResultHandler = new Handler(mResultThread.getLooper());
- mCallbackHandlerThread.start();
- mCallbackHandler = new Handler(mCallbackHandlerThread.getLooper());
- mDeviceState.setCameraDeviceCallbacks(mCallbackHandler, mStateListener);
- mStaticCharacteristics = characteristics;
- mRequestThreadManager =
- new RequestThreadManager(cameraId, camera, characteristics, mDeviceState);
- mRequestThreadManager.start();
- }
-
- /**
- * Configure the device with a set of output surfaces.
- *
- * <p>Using empty or {@code null} {@code outputs} is the same as unconfiguring.</p>
- *
- * <p>Every surface in {@code outputs} must be non-{@code null}.</p>
- *
- * @param outputs a list of surfaces to set. LegacyCameraDevice will take ownership of this
- * list; it must not be modified by the caller once it's passed in.
- * @return an error code for this binder operation, or {@link NO_ERROR}
- * on success.
- */
- public int configureOutputs(SparseArray<Surface> outputs) {
- return configureOutputs(outputs, /*validateSurfacesOnly*/false);
- }
-
- /**
- * Configure the device with a set of output surfaces.
- *
- * <p>Using empty or {@code null} {@code outputs} is the same as unconfiguring.</p>
- *
- * <p>Every surface in {@code outputs} must be non-{@code null}.</p>
- *
- * @param outputs a list of surfaces to set. LegacyCameraDevice will take ownership of this
- * list; it must not be modified by the caller once it's passed in.
- * @param validateSurfacesOnly If set it will only check whether the outputs are supported
- * and avoid any device configuration.
- * @return an error code for this binder operation, or {@link NO_ERROR}
- * on success.
- * @hide
- */
- public int configureOutputs(SparseArray<Surface> outputs, boolean validateSurfacesOnly) {
- List<Pair<Surface, Size>> sizedSurfaces = new ArrayList<>();
- if (outputs != null) {
- int count = outputs.size();
- for (int i = 0; i < count; i++) {
- Surface output = outputs.valueAt(i);
- if (output == null) {
- Log.e(TAG, "configureOutputs - null outputs are not allowed");
- return BAD_VALUE;
- }
- if (!output.isValid()) {
- Log.e(TAG, "configureOutputs - invalid output surfaces are not allowed");
- return BAD_VALUE;
- }
- StreamConfigurationMap streamConfigurations = mStaticCharacteristics.
- get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
-
- // Validate surface size and format.
- try {
- Size s = getSurfaceSize(output);
- int surfaceType = detectSurfaceType(output);
-
- boolean flexibleConsumer = isFlexibleConsumer(output);
-
- Size[] sizes = streamConfigurations.getOutputSizes(surfaceType);
- if (sizes == null) {
- if (surfaceType == ImageFormat.PRIVATE) {
-
- // YUV_420_888 is always present in LEGACY for all
- // IMPLEMENTATION_DEFINED output sizes, and is publicly visible in the
- // API (i.e. {@code #getOutputSizes} works here).
- sizes = streamConfigurations.getOutputSizes(ImageFormat.YUV_420_888);
- } else if (surfaceType == LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB) {
- sizes = streamConfigurations.getOutputSizes(ImageFormat.JPEG);
- }
- }
-
- if (!ArrayUtils.contains(sizes, s)) {
- if (flexibleConsumer && (s = findClosestSize(s, sizes)) != null) {
- sizedSurfaces.add(new Pair<>(output, s));
- } else {
- String reason = (sizes == null) ? "format is invalid." :
- ("size not in valid set: " + Arrays.toString(sizes));
- Log.e(TAG, String.format("Surface with size (w=%d, h=%d) and format " +
- "0x%x is not valid, %s", s.getWidth(), s.getHeight(),
- surfaceType, reason));
- return BAD_VALUE;
- }
- } else {
- sizedSurfaces.add(new Pair<>(output, s));
- }
- // Lock down the size before configuration
- if (!validateSurfacesOnly) {
- setSurfaceDimens(output, s.getWidth(), s.getHeight());
- }
- } catch (BufferQueueAbandonedException e) {
- Log.e(TAG, "Surface bufferqueue is abandoned, cannot configure as output: ", e);
- return BAD_VALUE;
- }
-
- }
- }
-
- if (validateSurfacesOnly) {
- return LegacyExceptionUtils.NO_ERROR;
- }
-
- boolean success = false;
- if (mDeviceState.setConfiguring()) {
- mRequestThreadManager.configure(sizedSurfaces);
- success = mDeviceState.setIdle();
- }
-
- if (success) {
- mConfiguredSurfaces = outputs;
- } else {
- return LegacyExceptionUtils.INVALID_OPERATION;
- }
- return LegacyExceptionUtils.NO_ERROR;
- }
-
- /**
- * Submit a burst of capture requests.
- *
- * @param requestList a list of capture requests to execute.
- * @param repeating {@code true} if this burst is repeating.
- * @return the submission info, including the new request id, and the last frame number, which
- * contains either the frame number of the last frame that will be returned for this request,
- * or the frame number of the last frame that will be returned for the current repeating
- * request if this burst is set to be repeating.
- */
- public SubmitInfo submitRequestList(CaptureRequest[] requestList, boolean repeating) {
- if (requestList == null || requestList.length == 0) {
- Log.e(TAG, "submitRequestList - Empty/null requests are not allowed");
- throw new ServiceSpecificException(BAD_VALUE,
- "submitRequestList - Empty/null requests are not allowed");
- }
-
- List<Long> surfaceIds;
-
- try {
- surfaceIds = (mConfiguredSurfaces == null) ? new ArrayList<Long>() :
- getSurfaceIds(mConfiguredSurfaces);
- } catch (BufferQueueAbandonedException e) {
- throw new ServiceSpecificException(BAD_VALUE,
- "submitRequestList - configured surface is abandoned.");
- }
-
- // Make sure that there all requests have at least 1 surface; all surfaces are non-null
- for (CaptureRequest request : requestList) {
- if (request.getTargets().isEmpty()) {
- Log.e(TAG, "submitRequestList - "
- + "Each request must have at least one Surface target");
- throw new ServiceSpecificException(BAD_VALUE,
- "submitRequestList - "
- + "Each request must have at least one Surface target");
- }
-
- for (Surface surface : request.getTargets()) {
- if (surface == null) {
- Log.e(TAG, "submitRequestList - Null Surface targets are not allowed");
- throw new ServiceSpecificException(BAD_VALUE,
- "submitRequestList - Null Surface targets are not allowed");
- } else if (mConfiguredSurfaces == null) {
- Log.e(TAG, "submitRequestList - must configure " +
- " device with valid surfaces before submitting requests");
- throw new ServiceSpecificException(INVALID_OPERATION,
- "submitRequestList - must configure " +
- " device with valid surfaces before submitting requests");
- } else if (!containsSurfaceId(surface, surfaceIds)) {
- Log.e(TAG, "submitRequestList - cannot use a surface that wasn't configured");
- throw new ServiceSpecificException(BAD_VALUE,
- "submitRequestList - cannot use a surface that wasn't configured");
- }
- }
- }
-
- // TODO: further validation of request here
- mIdle.close();
- return mRequestThreadManager.submitCaptureRequests(requestList, repeating);
- }
-
- /**
- * Submit a single capture request.
- *
- * @param request the capture request to execute.
- * @param repeating {@code true} if this request is repeating.
- * @return the submission info, including the new request id, and the last frame number, which
- * contains either the frame number of the last frame that will be returned for this request,
- * or the frame number of the last frame that will be returned for the current repeating
- * request if this burst is set to be repeating.
- */
- public SubmitInfo submitRequest(CaptureRequest request, boolean repeating) {
- CaptureRequest[] requestList = { request };
- return submitRequestList(requestList, repeating);
- }
-
- /**
- * Cancel the repeating request with the given request id.
- *
- * @param requestId the request id of the request to cancel.
- * @return the last frame number to be returned from the HAL for the given repeating request, or
- * {@code INVALID_FRAME} if none exists.
- */
- public long cancelRequest(int requestId) {
- return mRequestThreadManager.cancelRepeating(requestId);
- }
-
- /**
- * Block until the {@link ICameraDeviceCallbacks#onCameraIdle()} callback is received.
- */
- public void waitUntilIdle() {
- mIdle.block();
- }
-
- /**
- * Flush any pending requests.
- *
- * @return the last frame number.
- */
- public long flush() {
- long lastFrame = mRequestThreadManager.flush();
- waitUntilIdle();
- return lastFrame;
- }
-
- public void setAudioRestriction(int mode) {
- mRequestThreadManager.setAudioRestriction(mode);
- }
-
- public int getAudioRestriction() {
- return mRequestThreadManager.getAudioRestriction();
- }
-
- /**
- * Return {@code true} if the device has been closed.
- */
- public boolean isClosed() {
- return mClosed;
- }
-
- @Override
- public void close() {
- mRequestThreadManager.quit();
- mCallbackHandlerThread.quitSafely();
- mResultThread.quitSafely();
-
- try {
- mCallbackHandlerThread.join();
- } catch (InterruptedException e) {
- Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
- mCallbackHandlerThread.getName(), mCallbackHandlerThread.getId()));
- }
-
- try {
- mResultThread.join();
- } catch (InterruptedException e) {
- Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
- mResultThread.getName(), mResultThread.getId()));
- }
-
- mClosed = true;
- }
-
- @Override
- protected void finalize() throws Throwable {
- try {
- close();
- } catch (ServiceSpecificException e) {
- Log.e(TAG, "Got error while trying to finalize, ignoring: " + e.getMessage());
- } finally {
- super.finalize();
- }
- }
-
- static long findEuclidDistSquare(Size a, Size b) {
- long d0 = a.getWidth() - b.getWidth();
- long d1 = a.getHeight() - b.getHeight();
- return d0 * d0 + d1 * d1;
- }
-
- // Keep up to date with rounding behavior in
- // frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
- static Size findClosestSize(Size size, Size[] supportedSizes) {
- if (size == null || supportedSizes == null) {
- return null;
- }
- Size bestSize = null;
- for (Size s : supportedSizes) {
- if (s.equals(size)) {
- return size;
- } else if (s.getWidth() <= MAX_DIMEN_FOR_ROUNDING && (bestSize == null ||
- LegacyCameraDevice.findEuclidDistSquare(size, s) <
- LegacyCameraDevice.findEuclidDistSquare(bestSize, s))) {
- bestSize = s;
- }
- }
- return bestSize;
- }
-
- /**
- * Query the surface for its currently configured default buffer size.
- * @param surface a non-{@code null} {@code Surface}
- * @return the width and height of the surface
- *
- * @throws NullPointerException if the {@code surface} was {@code null}
- * @throws BufferQueueAbandonedException if the {@code surface} was invalid
- */
- public static Size getSurfaceSize(Surface surface) throws BufferQueueAbandonedException {
- checkNotNull(surface);
-
- int[] dimens = new int[2];
- LegacyExceptionUtils.throwOnError(nativeDetectSurfaceDimens(surface, /*out*/dimens));
-
- return new Size(dimens[0], dimens[1]);
- }
-
- public static boolean isFlexibleConsumer(Surface output) {
- int usageFlags = detectSurfaceUsageFlags(output);
-
- // Keep up to date with allowed consumer types in
- // frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
- int disallowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_RENDERSCRIPT;
- int allowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_OFTEN |
- GRALLOC_USAGE_HW_COMPOSER;
- boolean flexibleConsumer = ((usageFlags & disallowedFlags) == 0 &&
- (usageFlags & allowedFlags) != 0);
- return flexibleConsumer;
- }
-
- public static boolean isPreviewConsumer(Surface output) {
- int usageFlags = detectSurfaceUsageFlags(output);
- int disallowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_RENDERSCRIPT |
- GRALLOC_USAGE_SW_READ_OFTEN;
- int allowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER |
- GRALLOC_USAGE_HW_RENDER;
- boolean previewConsumer = ((usageFlags & disallowedFlags) == 0 &&
- (usageFlags & allowedFlags) != 0);
- int surfaceFormat = ImageFormat.UNKNOWN;
- try {
- surfaceFormat = detectSurfaceType(output);
- } catch(BufferQueueAbandonedException e) {
- throw new IllegalArgumentException("Surface was abandoned", e);
- }
-
- return previewConsumer;
- }
-
- public static boolean isVideoEncoderConsumer(Surface output) {
- int usageFlags = detectSurfaceUsageFlags(output);
- int disallowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER |
- GRALLOC_USAGE_RENDERSCRIPT | GRALLOC_USAGE_SW_READ_OFTEN;
- int allowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER;
- boolean videoEncoderConsumer = ((usageFlags & disallowedFlags) == 0 &&
- (usageFlags & allowedFlags) != 0);
-
- int surfaceFormat = ImageFormat.UNKNOWN;
- try {
- surfaceFormat = detectSurfaceType(output);
- } catch(BufferQueueAbandonedException e) {
- throw new IllegalArgumentException("Surface was abandoned", e);
- }
-
- return videoEncoderConsumer;
- }
-
- /**
- * Query the surface for its currently configured usage flags
- */
- static int detectSurfaceUsageFlags(Surface surface) {
- checkNotNull(surface);
- return nativeDetectSurfaceUsageFlags(surface);
- }
-
- /**
- * Query the surface for its currently configured format
- */
- public static int detectSurfaceType(Surface surface) throws BufferQueueAbandonedException {
- checkNotNull(surface);
- int surfaceType = nativeDetectSurfaceType(surface);
-
- // TODO: remove this override since the default format should be
- // ImageFormat.PRIVATE. b/9487482
- if ((surfaceType >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 &&
- surfaceType <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) {
- surfaceType = ImageFormat.PRIVATE;
- }
-
- return LegacyExceptionUtils.throwOnError(surfaceType);
- }
-
- /**
- * Query the surface for its currently configured dataspace
- */
- public static int detectSurfaceDataspace(Surface surface) throws BufferQueueAbandonedException {
- checkNotNull(surface);
- return LegacyExceptionUtils.throwOnError(nativeDetectSurfaceDataspace(surface));
- }
-
- static void connectSurface(Surface surface) throws BufferQueueAbandonedException {
- checkNotNull(surface);
-
- LegacyExceptionUtils.throwOnError(nativeConnectSurface(surface));
- }
-
- static void disconnectSurface(Surface surface) throws BufferQueueAbandonedException {
- if (surface == null) return;
-
- LegacyExceptionUtils.throwOnError(nativeDisconnectSurface(surface));
- }
-
- static void produceFrame(Surface surface, byte[] pixelBuffer, int width,
- int height, int pixelFormat)
- throws BufferQueueAbandonedException {
- checkNotNull(surface);
- checkNotNull(pixelBuffer);
- checkArgumentPositive(width, "width must be positive.");
- checkArgumentPositive(height, "height must be positive.");
-
- LegacyExceptionUtils.throwOnError(nativeProduceFrame(surface, pixelBuffer, width, height,
- pixelFormat));
- }
-
- static void setSurfaceFormat(Surface surface, int pixelFormat)
- throws BufferQueueAbandonedException {
- checkNotNull(surface);
-
- LegacyExceptionUtils.throwOnError(nativeSetSurfaceFormat(surface, pixelFormat));
- }
-
- static void setSurfaceDimens(Surface surface, int width, int height)
- throws BufferQueueAbandonedException {
- checkNotNull(surface);
- checkArgumentPositive(width, "width must be positive.");
- checkArgumentPositive(height, "height must be positive.");
-
- LegacyExceptionUtils.throwOnError(nativeSetSurfaceDimens(surface, width, height));
- }
-
- public static long getSurfaceId(Surface surface) throws BufferQueueAbandonedException {
- checkNotNull(surface);
- try {
- return nativeGetSurfaceId(surface);
- } catch (IllegalArgumentException e) {
- throw new BufferQueueAbandonedException();
- }
- }
-
- static List<Long> getSurfaceIds(SparseArray<Surface> surfaces)
- throws BufferQueueAbandonedException {
- if (surfaces == null) {
- throw new NullPointerException("Null argument surfaces");
- }
- List<Long> surfaceIds = new ArrayList<>();
- int count = surfaces.size();
- for (int i = 0; i < count; i++) {
- long id = getSurfaceId(surfaces.valueAt(i));
- if (id == 0) {
- throw new IllegalStateException(
- "Configured surface had null native GraphicBufferProducer pointer!");
- }
- surfaceIds.add(id);
- }
- return surfaceIds;
- }
-
- static List<Long> getSurfaceIds(Collection<Surface> surfaces)
- throws BufferQueueAbandonedException {
- if (surfaces == null) {
- throw new NullPointerException("Null argument surfaces");
- }
- List<Long> surfaceIds = new ArrayList<>();
- for (Surface s : surfaces) {
- long id = getSurfaceId(s);
- if (id == 0) {
- throw new IllegalStateException(
- "Configured surface had null native GraphicBufferProducer pointer!");
- }
- surfaceIds.add(id);
- }
- return surfaceIds;
- }
-
- static boolean containsSurfaceId(Surface s, Collection<Long> ids) {
- long id = 0;
- try {
- id = getSurfaceId(s);
- } catch (BufferQueueAbandonedException e) {
- // If surface is abandoned, return false.
- return false;
- }
- return ids.contains(id);
- }
-
- static void setSurfaceOrientation(Surface surface, int facing, int sensorOrientation)
- throws BufferQueueAbandonedException {
- checkNotNull(surface);
- LegacyExceptionUtils.throwOnError(nativeSetSurfaceOrientation(surface, facing,
- sensorOrientation));
- }
-
- static Size getTextureSize(SurfaceTexture surfaceTexture)
- throws BufferQueueAbandonedException {
- checkNotNull(surfaceTexture);
-
- int[] dimens = new int[2];
- LegacyExceptionUtils.throwOnError(nativeDetectTextureDimens(surfaceTexture,
- /*out*/dimens));
-
- return new Size(dimens[0], dimens[1]);
- }
-
- static void setNextTimestamp(Surface surface, long timestamp)
- throws BufferQueueAbandonedException {
- checkNotNull(surface);
- LegacyExceptionUtils.throwOnError(nativeSetNextTimestamp(surface, timestamp));
- }
-
- static void setScalingMode(Surface surface, int mode)
- throws BufferQueueAbandonedException {
- checkNotNull(surface);
- LegacyExceptionUtils.throwOnError(nativeSetScalingMode(surface, mode));
- }
-
-
- private static native int nativeDetectSurfaceType(Surface surface);
-
- private static native int nativeDetectSurfaceDataspace(Surface surface);
-
- private static native int nativeDetectSurfaceDimens(Surface surface,
- /*out*/int[/*2*/] dimens);
-
- private static native int nativeConnectSurface(Surface surface);
-
- private static native int nativeProduceFrame(Surface surface, byte[] pixelBuffer, int width,
- int height, int pixelFormat);
-
- private static native int nativeSetSurfaceFormat(Surface surface, int pixelFormat);
-
- private static native int nativeSetSurfaceDimens(Surface surface, int width, int height);
-
- private static native long nativeGetSurfaceId(Surface surface);
-
- private static native int nativeSetSurfaceOrientation(Surface surface, int facing,
- int sensorOrientation);
-
- private static native int nativeDetectTextureDimens(SurfaceTexture surfaceTexture,
- /*out*/int[/*2*/] dimens);
-
- private static native int nativeSetNextTimestamp(Surface surface, long timestamp);
-
- private static native int nativeDetectSurfaceUsageFlags(Surface surface);
-
- private static native int nativeSetScalingMode(Surface surface, int scalingMode);
-
- private static native int nativeDisconnectSurface(Surface surface);
-
- static native int nativeGetJpegFooterSize();
-}
diff --git a/core/java/android/hardware/camera2/legacy/LegacyExceptionUtils.java b/core/java/android/hardware/camera2/legacy/LegacyExceptionUtils.java
deleted file mode 100644
index 55130c8f2839..000000000000
--- a/core/java/android/hardware/camera2/legacy/LegacyExceptionUtils.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.hardware.ICameraService;
-import android.os.ServiceSpecificException;
-import android.util.AndroidException;
-
-import static android.system.OsConstants.*;
-
-/**
- * Utility class containing exception handling used solely by the compatibility mode shim.
- */
-public class LegacyExceptionUtils {
- private static final String TAG = "LegacyExceptionUtils";
-
- public static final int NO_ERROR = 0;
- public static final int PERMISSION_DENIED = -EPERM;
- public static final int ALREADY_EXISTS = -EEXIST;
- public static final int BAD_VALUE = -EINVAL;
- public static final int DEAD_OBJECT = -ENOSYS;
- public static final int INVALID_OPERATION = -EPIPE;
- public static final int TIMED_OUT = -ETIMEDOUT;
-
- /**
- * Checked exception thrown when a BufferQueue has been abandoned by its consumer.
- */
- public static class BufferQueueAbandonedException extends AndroidException {
- public BufferQueueAbandonedException () {}
-
- public BufferQueueAbandonedException(String name) {
- super(name);
- }
-
- public BufferQueueAbandonedException(String name, Throwable cause) {
- super(name, cause);
- }
-
- public BufferQueueAbandonedException(Exception cause) {
- super(cause);
- }
- }
-
- /**
- * Throw error codes used by legacy device methods as exceptions.
- *
- * <p>Non-negative return values are passed through, negative return values are thrown as
- * exceptions.</p>
- *
- * @param errorFlag error to throw as an exception.
- * @throws {@link BufferQueueAbandonedException} for BAD_VALUE.
- * @throws {@link UnsupportedOperationException} for an unknown negative error code.
- * @return {@code errorFlag} if the value was non-negative, throws otherwise.
- */
- public static int throwOnError(int errorFlag) throws BufferQueueAbandonedException {
- if (errorFlag == NO_ERROR) {
- return NO_ERROR;
- } else if (errorFlag == BAD_VALUE) {
- throw new BufferQueueAbandonedException();
- }
-
- if (errorFlag < 0) {
- throw new UnsupportedOperationException("Unknown error " + errorFlag);
- }
- return errorFlag;
- }
-
- /**
- * Throw error codes returned by the camera service as exceptions.
- *
- * @param errorFlag error to throw as an exception.
- */
- public static void throwOnServiceError(int errorFlag) {
- int errorCode = ICameraService.ERROR_INVALID_OPERATION;
- String errorMsg;
-
- if (errorFlag >= NO_ERROR) {
- return;
- } else if (errorFlag == PERMISSION_DENIED) {
- errorCode = ICameraService.ERROR_PERMISSION_DENIED;
- errorMsg = "Lacking privileges to access camera service";
- } else if (errorFlag == ALREADY_EXISTS) {
- // This should be handled at the call site. Typically this isn't bad,
- // just means we tried to do an operation that already completed.
- return;
- } else if (errorFlag == BAD_VALUE) {
- errorCode = ICameraService.ERROR_ILLEGAL_ARGUMENT;
- errorMsg = "Bad argument passed to camera service";
- } else if (errorFlag == DEAD_OBJECT) {
- errorCode = ICameraService.ERROR_DISCONNECTED;
- errorMsg = "Camera service not available";
- } else if (errorFlag == TIMED_OUT) {
- errorCode = ICameraService.ERROR_INVALID_OPERATION;
- errorMsg = "Operation timed out in camera service";
- } else if (errorFlag == -EACCES) {
- errorCode = ICameraService.ERROR_DISABLED;
- errorMsg = "Camera disabled by policy";
- } else if (errorFlag == -EBUSY) {
- errorCode = ICameraService.ERROR_CAMERA_IN_USE;
- errorMsg = "Camera already in use";
- } else if (errorFlag == -EUSERS) {
- errorCode = ICameraService.ERROR_MAX_CAMERAS_IN_USE;
- errorMsg = "Maximum number of cameras in use";
- } else if (errorFlag == -ENODEV) {
- errorCode = ICameraService.ERROR_DISCONNECTED;
- errorMsg = "Camera device not available";
- } else if (errorFlag == -EOPNOTSUPP) {
- errorCode = ICameraService.ERROR_DEPRECATED_HAL;
- errorMsg = "Deprecated camera HAL does not support this";
- } else if (errorFlag == INVALID_OPERATION) {
- errorCode = ICameraService.ERROR_INVALID_OPERATION;
- errorMsg = "Illegal state encountered in camera service.";
- } else {
- errorCode = ICameraService.ERROR_INVALID_OPERATION;
- errorMsg = "Unknown camera device error " + errorFlag;
- }
-
- throw new ServiceSpecificException(errorCode, errorMsg);
- }
-
- private LegacyExceptionUtils() {
- throw new AssertionError();
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java b/core/java/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java
deleted file mode 100644
index b3b4549426f0..000000000000
--- a/core/java/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java
+++ /dev/null
@@ -1,265 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.graphics.Rect;
-import android.hardware.Camera;
-import android.hardware.Camera.FaceDetectionListener;
-import android.hardware.camera2.impl.CameraMetadataNative;
-import android.hardware.camera2.legacy.ParameterUtils.ZoomData;
-import android.hardware.camera2.CameraCharacteristics;
-import android.hardware.camera2.CaptureRequest;
-import android.hardware.camera2.CaptureResult;
-import android.hardware.camera2.params.Face;
-import android.hardware.camera2.utils.ListUtils;
-import android.hardware.camera2.utils.ParamsUtils;
-import android.util.Log;
-import android.util.Size;
-
-import com.android.internal.util.ArrayUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static android.hardware.camera2.CaptureRequest.*;
-import static com.android.internal.util.Preconditions.*;
-
-/**
- * Map legacy face detect callbacks into face detection results.
- */
-@SuppressWarnings("deprecation")
-public class LegacyFaceDetectMapper {
- private static String TAG = "LegacyFaceDetectMapper";
- private static final boolean DEBUG = false;
-
- private final Camera mCamera;
- /** Is the camera capable of face detection? */
- private final boolean mFaceDetectSupported;
- /** Is the camera is running face detection? */
- private boolean mFaceDetectEnabled = false;
- /** Did the last request say to use SCENE_MODE = FACE_PRIORITY? */
- private boolean mFaceDetectScenePriority = false;
- /** Did the last request enable the face detect mode to ON? */
- private boolean mFaceDetectReporting = false;
-
- /** Synchronize access to all fields */
- private final Object mLock = new Object();
- private Camera.Face[] mFaces;
- private Camera.Face[] mFacesPrev;
- /**
- * Instantiate a new face detect mapper.
- *
- * @param camera a non-{@code null} camera1 device
- * @param characteristics a non-{@code null} camera characteristics for that camera1
- *
- * @throws NullPointerException if any of the args were {@code null}
- */
- public LegacyFaceDetectMapper(Camera camera, CameraCharacteristics characteristics) {
- mCamera = checkNotNull(camera, "camera must not be null");
- checkNotNull(characteristics, "characteristics must not be null");
-
- mFaceDetectSupported = ArrayUtils.contains(
- characteristics.get(
- CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES),
- STATISTICS_FACE_DETECT_MODE_SIMPLE);
-
- if (!mFaceDetectSupported) {
- return;
- }
-
- mCamera.setFaceDetectionListener(new FaceDetectionListener() {
-
- @Override
- public void onFaceDetection(Camera.Face[] faces, Camera camera) {
- int lengthFaces = faces == null ? 0 : faces.length;
- synchronized (mLock) {
- if (mFaceDetectEnabled) {
- mFaces = faces;
- } else if (lengthFaces > 0) {
- // stopFaceDetectMode could race against the requests, print a debug log
- Log.d(TAG,
- "onFaceDetection - Ignored some incoming faces since" +
- "face detection was disabled");
- }
- }
-
- if (DEBUG) {
- Log.v(TAG, "onFaceDetection - read " + lengthFaces + " faces");
- }
- }
- });
- }
-
- /**
- * Process the face detect mode from the capture request into an api1 face detect toggle.
- *
- * <p>This method should be called after the parameters are {@link LegacyRequestMapper mapped}
- * with the request.</p>
- *
- * <p>Callbacks are processed in the background, and the next call to {@link #mapResultTriggers}
- * will have the latest faces detected as reflected by the camera1 callbacks.</p>
- *
- * <p>None of the arguments will be mutated.</p>
- *
- * @param captureRequest a non-{@code null} request
- * @param parameters a non-{@code null} parameters corresponding to this request (read-only)
- */
- public void processFaceDetectMode(CaptureRequest captureRequest,
- Camera.Parameters parameters) {
- checkNotNull(captureRequest, "captureRequest must not be null");
-
- /*
- * statistics.faceDetectMode
- */
- int fdMode = ParamsUtils.getOrDefault(captureRequest, STATISTICS_FACE_DETECT_MODE,
- STATISTICS_FACE_DETECT_MODE_OFF);
-
- if (fdMode != STATISTICS_FACE_DETECT_MODE_OFF && !mFaceDetectSupported) {
- Log.w(TAG,
- "processFaceDetectMode - Ignoring statistics.faceDetectMode; " +
- "face detection is not available");
- return;
- }
-
- /*
- * control.sceneMode
- */
- int sceneMode = ParamsUtils.getOrDefault(captureRequest, CONTROL_SCENE_MODE,
- CONTROL_SCENE_MODE_DISABLED);
- if (sceneMode == CONTROL_SCENE_MODE_FACE_PRIORITY && !mFaceDetectSupported) {
- Log.w(TAG, "processFaceDetectMode - ignoring control.sceneMode == FACE_PRIORITY; " +
- "face detection is not available");
- return;
- }
-
- // Print some warnings out in case the values were wrong
- switch (fdMode) {
- case STATISTICS_FACE_DETECT_MODE_OFF:
- case STATISTICS_FACE_DETECT_MODE_SIMPLE:
- break;
- case STATISTICS_FACE_DETECT_MODE_FULL:
- Log.w(TAG,
- "processFaceDetectMode - statistics.faceDetectMode == FULL unsupported, " +
- "downgrading to SIMPLE");
- break;
- default:
- Log.w(TAG, "processFaceDetectMode - ignoring unknown statistics.faceDetectMode = "
- + fdMode);
- return;
- }
-
- boolean enableFaceDetect = (fdMode != STATISTICS_FACE_DETECT_MODE_OFF)
- || (sceneMode == CONTROL_SCENE_MODE_FACE_PRIORITY);
- synchronized (mLock) {
- // Enable/disable face detection if it's changed since last time
- if (enableFaceDetect != mFaceDetectEnabled) {
- if (enableFaceDetect) {
- mCamera.startFaceDetection();
-
- if (DEBUG) {
- Log.v(TAG, "processFaceDetectMode - start face detection");
- }
- } else {
- mCamera.stopFaceDetection();
-
- if (DEBUG) {
- Log.v(TAG, "processFaceDetectMode - stop face detection");
- }
-
- mFaces = null;
- }
-
- mFaceDetectEnabled = enableFaceDetect;
- mFaceDetectScenePriority = sceneMode == CONTROL_SCENE_MODE_FACE_PRIORITY;
- mFaceDetectReporting = fdMode != STATISTICS_FACE_DETECT_MODE_OFF;
- }
- }
- }
-
- /**
- * Update the {@code result} camera metadata map with the new value for the
- * {@code statistics.faces} and {@code statistics.faceDetectMode}.
- *
- * <p>Face detect callbacks are processed in the background, and each call to
- * {@link #mapResultFaces} will have the latest faces as reflected by the camera1 callbacks.</p>
- *
- * <p>If the scene mode was set to {@code FACE_PRIORITY} but face detection is disabled,
- * the camera will still run face detection in the background, but no faces will be reported
- * in the capture result.</p>
- *
- * @param result a non-{@code null} result
- * @param legacyRequest a non-{@code null} request (read-only)
- */
- public void mapResultFaces(CameraMetadataNative result, LegacyRequest legacyRequest) {
- checkNotNull(result, "result must not be null");
- checkNotNull(legacyRequest, "legacyRequest must not be null");
-
- Camera.Face[] faces, previousFaces;
- int fdMode;
- boolean fdScenePriority;
- synchronized (mLock) {
- fdMode = mFaceDetectReporting ?
- STATISTICS_FACE_DETECT_MODE_SIMPLE : STATISTICS_FACE_DETECT_MODE_OFF;
-
- if (mFaceDetectReporting) {
- faces = mFaces;
- } else {
- faces = null;
- }
-
- fdScenePriority = mFaceDetectScenePriority;
-
- previousFaces = mFacesPrev;
- mFacesPrev = faces;
- }
-
- CameraCharacteristics characteristics = legacyRequest.characteristics;
- CaptureRequest request = legacyRequest.captureRequest;
- Size previewSize = legacyRequest.previewSize;
- Camera.Parameters params = legacyRequest.parameters;
-
- Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
- ZoomData zoomData = ParameterUtils.convertToLegacyZoom(activeArray,
- request.get(CaptureRequest.SCALER_CROP_REGION),
- request.get(CaptureRequest.CONTROL_ZOOM_RATIO),
- previewSize, params);
-
- List<Face> convertedFaces = new ArrayList<>();
- if (faces != null) {
- for (Camera.Face face : faces) {
- if (face != null) {
- convertedFaces.add(
- ParameterUtils.convertFaceFromLegacy(face, activeArray, zoomData));
- } else {
- Log.w(TAG, "mapResultFaces - read NULL face from camera1 device");
- }
- }
- }
-
- if (DEBUG && previousFaces != faces) { // Log only in verbose and IF the faces changed
- Log.v(TAG, "mapResultFaces - changed to " + ListUtils.listToString(convertedFaces));
- }
-
- result.set(CaptureResult.STATISTICS_FACES, convertedFaces.toArray(new Face[0]));
- result.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, fdMode);
-
- // Override scene mode with FACE_PRIORITY if the request was using FACE_PRIORITY
- if (fdScenePriority) {
- result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_FACE_PRIORITY);
- }
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/LegacyFocusStateMapper.java b/core/java/android/hardware/camera2/legacy/LegacyFocusStateMapper.java
deleted file mode 100644
index d33c09eac85d..000000000000
--- a/core/java/android/hardware/camera2/legacy/LegacyFocusStateMapper.java
+++ /dev/null
@@ -1,321 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.hardware.Camera;
-import android.hardware.Camera.Parameters;
-import android.hardware.camera2.impl.CameraMetadataNative;
-import android.hardware.camera2.CaptureRequest;
-import android.hardware.camera2.CaptureResult;
-import android.hardware.camera2.utils.ParamsUtils;
-import android.util.Log;
-
-import java.util.Objects;
-
-import static android.hardware.camera2.CaptureRequest.*;
-import static com.android.internal.util.Preconditions.*;
-
-/**
- * Map capture request data into legacy focus state transitions.
- *
- * <p>This object will asynchronously process auto-focus changes, so no interaction
- * with it is necessary beyond reading the current state and updating with the latest trigger.</p>
- */
-@SuppressWarnings("deprecation")
-public class LegacyFocusStateMapper {
- private static String TAG = "LegacyFocusStateMapper";
- private static final boolean DEBUG = false;
-
- private final Camera mCamera;
-
- private int mAfStatePrevious = CONTROL_AF_STATE_INACTIVE;
- private String mAfModePrevious = null;
-
- /** Guard mAfRun and mAfState */
- private final Object mLock = new Object();
- /** Guard access with mLock */
- private int mAfRun = 0;
- /** Guard access with mLock */
- private int mAfState = CONTROL_AF_STATE_INACTIVE;
-
- /**
- * Instantiate a new focus state mapper.
- *
- * @param camera a non-{@code null} camera1 device
- *
- * @throws NullPointerException if any of the args were {@code null}
- */
- public LegacyFocusStateMapper(Camera camera) {
- mCamera = checkNotNull(camera, "camera must not be null");
- }
-
- /**
- * Process the AF triggers from the request as a camera1 autofocus routine.
- *
- * <p>This method should be called after the parameters are {@link LegacyRequestMapper mapped}
- * with the request.</p>
- *
- * <p>Callbacks are processed in the background, and the next call to {@link #mapResultTriggers}
- * will have the latest AF state as reflected by the camera1 callbacks.</p>
- *
- * <p>None of the arguments will be mutated.</p>
- *
- * @param captureRequest a non-{@code null} request
- * @param parameters a non-{@code null} parameters corresponding to this request (read-only)
- */
- public void processRequestTriggers(CaptureRequest captureRequest,
- Camera.Parameters parameters) {
- checkNotNull(captureRequest, "captureRequest must not be null");
-
- /*
- * control.afTrigger
- */
- int afTrigger = ParamsUtils.getOrDefault(captureRequest, CONTROL_AF_TRIGGER,
- CONTROL_AF_TRIGGER_IDLE);
-
- final String afMode = parameters.getFocusMode();
-
- if (!Objects.equals(mAfModePrevious, afMode)) {
- if (DEBUG) {
- Log.v(TAG, "processRequestTriggers - AF mode switched from " + mAfModePrevious +
- " to " + afMode);
- }
-
- // Switching modes always goes back to INACTIVE; ignore callbacks from previous modes
-
- synchronized (mLock) {
- ++mAfRun;
- mAfState = CONTROL_AF_STATE_INACTIVE;
- }
- mCamera.cancelAutoFocus();
- }
-
- mAfModePrevious = afMode;
-
- // Passive AF Scanning
- {
- final int currentAfRun;
-
- synchronized (mLock) {
- currentAfRun = mAfRun;
- }
-
- Camera.AutoFocusMoveCallback afMoveCallback = new Camera.AutoFocusMoveCallback() {
- @Override
- public void onAutoFocusMoving(boolean start, Camera camera) {
- synchronized (mLock) {
- int latestAfRun = mAfRun;
-
- if (DEBUG) {
- Log.v(TAG,
- "onAutoFocusMoving - start " + start + " latest AF run " +
- latestAfRun + ", last AF run " + currentAfRun
- );
- }
-
- if (currentAfRun != latestAfRun) {
- Log.d(TAG,
- "onAutoFocusMoving - ignoring move callbacks from old af run"
- + currentAfRun
- );
- return;
- }
-
- int newAfState = start ?
- CONTROL_AF_STATE_PASSIVE_SCAN :
- CONTROL_AF_STATE_PASSIVE_FOCUSED;
- // We never send CONTROL_AF_STATE_PASSIVE_UNFOCUSED
-
- switch (afMode) {
- case Parameters.FOCUS_MODE_CONTINUOUS_PICTURE:
- case Parameters.FOCUS_MODE_CONTINUOUS_VIDEO:
- break;
- // This callback should never be sent in any other AF mode
- default:
- Log.w(TAG, "onAutoFocus - got unexpected onAutoFocus in mode "
- + afMode);
-
- }
-
- mAfState = newAfState;
- }
- }
- };
-
- // Only set move callback if we can call autofocus.
- switch (afMode) {
- case Parameters.FOCUS_MODE_AUTO:
- case Parameters.FOCUS_MODE_MACRO:
- case Parameters.FOCUS_MODE_CONTINUOUS_PICTURE:
- case Parameters.FOCUS_MODE_CONTINUOUS_VIDEO:
- mCamera.setAutoFocusMoveCallback(afMoveCallback);
- }
- }
-
-
- // AF Locking
- switch (afTrigger) {
- case CONTROL_AF_TRIGGER_START:
-
- int afStateAfterStart;
- switch (afMode) {
- case Parameters.FOCUS_MODE_AUTO:
- case Parameters.FOCUS_MODE_MACRO:
- afStateAfterStart = CONTROL_AF_STATE_ACTIVE_SCAN;
- break;
- case Parameters.FOCUS_MODE_CONTINUOUS_PICTURE:
- case Parameters.FOCUS_MODE_CONTINUOUS_VIDEO:
- afStateAfterStart = CONTROL_AF_STATE_PASSIVE_SCAN;
- break;
- default:
- // EDOF, INFINITY
- afStateAfterStart = CONTROL_AF_STATE_INACTIVE;
- }
-
- final int currentAfRun;
- synchronized (mLock) {
- currentAfRun = ++mAfRun;
- mAfState = afStateAfterStart;
- }
-
- if (DEBUG) {
- Log.v(TAG, "processRequestTriggers - got AF_TRIGGER_START, " +
- "new AF run is " + currentAfRun);
- }
-
- // Avoid calling autofocus unless we are in a state that supports calling this.
- if (afStateAfterStart == CONTROL_AF_STATE_INACTIVE) {
- break;
- }
-
- mCamera.autoFocus(new Camera.AutoFocusCallback() {
- @Override
- public void onAutoFocus(boolean success, Camera camera) {
- synchronized (mLock) {
- int latestAfRun = mAfRun;
-
- if (DEBUG) {
- Log.v(TAG, "onAutoFocus - success " + success + " latest AF run " +
- latestAfRun + ", last AF run " + currentAfRun);
- }
-
- // Ignore old auto-focus results, since another trigger was requested
- if (latestAfRun != currentAfRun) {
- Log.d(TAG, String.format("onAutoFocus - ignoring AF callback " +
- "(old run %d, new run %d)", currentAfRun, latestAfRun));
-
- return;
- }
-
- int newAfState = success ?
- CONTROL_AF_STATE_FOCUSED_LOCKED :
- CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
-
- switch (afMode) {
- case Parameters.FOCUS_MODE_AUTO:
- case Parameters.FOCUS_MODE_CONTINUOUS_PICTURE:
- case Parameters.FOCUS_MODE_CONTINUOUS_VIDEO:
- case Parameters.FOCUS_MODE_MACRO:
- break;
- // This callback should never be sent in any other AF mode
- default:
- Log.w(TAG, "onAutoFocus - got unexpected onAutoFocus in mode "
- + afMode);
-
- }
-
- mAfState = newAfState;
- }
- }
- });
-
- break;
- case CONTROL_AF_TRIGGER_CANCEL:
- synchronized (mLock) {
- int updatedAfRun;
-
- synchronized (mLock) {
- updatedAfRun = ++mAfRun;
- mAfState = CONTROL_AF_STATE_INACTIVE;
- }
-
- mCamera.cancelAutoFocus();
-
- if (DEBUG) {
- Log.v(TAG, "processRequestTriggers - got AF_TRIGGER_CANCEL, " +
- "new AF run is " + updatedAfRun);
- }
- }
-
- break;
- case CONTROL_AF_TRIGGER_IDLE:
- // No action necessary. The callbacks will handle transitions.
- break;
- default:
- Log.w(TAG, "processRequestTriggers - ignoring unknown control.afTrigger = "
- + afTrigger);
- }
- }
-
- /**
- * Update the {@code result} camera metadata map with the new value for the
- * {@code control.afState}.
- *
- * <p>AF callbacks are processed in the background, and each call to {@link #mapResultTriggers}
- * will have the latest AF state as reflected by the camera1 callbacks.</p>
- *
- * @param result a non-{@code null} result
- */
- public void mapResultTriggers(CameraMetadataNative result) {
- checkNotNull(result, "result must not be null");
-
- int newAfState;
- synchronized (mLock) {
- newAfState = mAfState;
- }
-
- if (DEBUG && newAfState != mAfStatePrevious) {
- Log.v(TAG, String.format("mapResultTriggers - afState changed from %s to %s",
- afStateToString(mAfStatePrevious), afStateToString(newAfState)));
- }
-
- result.set(CaptureResult.CONTROL_AF_STATE, newAfState);
-
- mAfStatePrevious = newAfState;
- }
-
- private static String afStateToString(int afState) {
- switch (afState) {
- case CONTROL_AF_STATE_ACTIVE_SCAN:
- return "ACTIVE_SCAN";
- case CONTROL_AF_STATE_FOCUSED_LOCKED:
- return "FOCUSED_LOCKED";
- case CONTROL_AF_STATE_INACTIVE:
- return "INACTIVE";
- case CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
- return "NOT_FOCUSED_LOCKED";
- case CONTROL_AF_STATE_PASSIVE_FOCUSED:
- return "PASSIVE_FOCUSED";
- case CONTROL_AF_STATE_PASSIVE_SCAN:
- return "PASSIVE_SCAN";
- case CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
- return "PASSIVE_UNFOCUSED";
- default :
- return "UNKNOWN(" + afState + ")";
- }
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java b/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java
deleted file mode 100644
index 362ddfae67bf..000000000000
--- a/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java
+++ /dev/null
@@ -1,1532 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.graphics.ImageFormat;
-import android.graphics.PixelFormat;
-import android.graphics.Rect;
-import android.hardware.Camera;
-import android.hardware.Camera.CameraInfo;
-import android.hardware.Camera.Parameters;
-import android.hardware.camera2.CameraCharacteristics;
-import android.hardware.camera2.CameraDevice;
-import android.hardware.camera2.CameraMetadata;
-import android.hardware.camera2.CaptureRequest;
-import android.hardware.camera2.CaptureResult;
-import android.hardware.camera2.impl.CameraMetadataNative;
-import android.hardware.camera2.params.MeteringRectangle;
-import android.hardware.camera2.params.StreamConfiguration;
-import android.hardware.camera2.params.StreamConfigurationDuration;
-import android.hardware.camera2.utils.ArrayUtils;
-import android.hardware.camera2.utils.ListUtils;
-import android.hardware.camera2.utils.ParamsUtils;
-import android.util.Log;
-import android.util.Range;
-import android.util.Size;
-import android.util.SizeF;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-import static com.android.internal.util.Preconditions.*;
-import static android.hardware.camera2.CameraCharacteristics.*;
-import static android.hardware.camera2.legacy.ParameterUtils.*;
-
-/**
- * Provide legacy-specific implementations of camera2 metadata for legacy devices, such as the
- * camera characteristics.
- */
-@SuppressWarnings("deprecation")
-public class LegacyMetadataMapper {
- private static final String TAG = "LegacyMetadataMapper";
- private static final boolean DEBUG = false;
-
- private static final long NS_PER_MS = 1000000;
-
- // from graphics.h
- public static final int HAL_PIXEL_FORMAT_RGBA_8888 = PixelFormat.RGBA_8888;
- public static final int HAL_PIXEL_FORMAT_BGRA_8888 = 0x5;
- public static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
- public static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
-
- // for metadata
- private static final float LENS_INFO_MINIMUM_FOCUS_DISTANCE_FIXED_FOCUS = 0.0f;
-
- private static final int REQUEST_MAX_NUM_OUTPUT_STREAMS_COUNT_RAW = 0; // no raw support
- private static final int REQUEST_MAX_NUM_OUTPUT_STREAMS_COUNT_PROC = 3; // preview, video, cb
- private static final int REQUEST_MAX_NUM_OUTPUT_STREAMS_COUNT_PROC_STALL = 1; // 1 jpeg only
- private static final int REQUEST_MAX_NUM_INPUT_STREAMS_COUNT = 0; // no reprocessing
-
- /** Assume 3 HAL1 stages: Exposure, Read-out, Post-Processing */
- private static final int REQUEST_PIPELINE_MAX_DEPTH_HAL1 = 3;
- /** Assume 3 shim stages: Preview input, Split output, Format conversion for output */
- private static final int REQUEST_PIPELINE_MAX_DEPTH_OURS = 3;
- /* TODO: Update above maxDepth values once we do more performance measurements */
-
- // For approximating JPEG stall durations
- private static final long APPROXIMATE_CAPTURE_DELAY_MS = 200; // 200 milliseconds
- private static final long APPROXIMATE_SENSOR_AREA_PX = (1 << 23); // 8 megapixels
- private static final long APPROXIMATE_JPEG_ENCODE_TIME_MS = 600; // 600 milliseconds
-
- static final int UNKNOWN_MODE = -1;
-
- // Maximum difference between a preview size aspect ratio and a jpeg size aspect ratio
- private static final float PREVIEW_ASPECT_RATIO_TOLERANCE = 0.01f;
-
- /*
- * Development hijinks: Lie about not supporting certain capabilities
- *
- * - Unblock some CTS tests from running whose main intent is not the metadata itself
- *
- * TODO: Remove these constants and strip out any code that previously relied on them
- * being set to true.
- */
- static final boolean LIE_ABOUT_AE_STATE = false;
- static final boolean LIE_ABOUT_AE_MAX_REGIONS = false;
- static final boolean LIE_ABOUT_AF = false;
- static final boolean LIE_ABOUT_AF_MAX_REGIONS = false;
- static final boolean LIE_ABOUT_AWB_STATE = false;
- static final boolean LIE_ABOUT_AWB = false;
-
-
- /**
- * Create characteristics for a legacy device by mapping the {@code parameters}
- * and {@code info}
- *
- * @param parameters A non-{@code null} parameters set
- * @param info Camera info with camera facing direction and angle of orientation
- * @param cameraId Current camera Id
- * @param displaySize Device display size
- *
- * @return static camera characteristics for a camera device
- *
- * @throws NullPointerException if any of the args were {@code null}
- */
- public static CameraCharacteristics createCharacteristics(Camera.Parameters parameters,
- CameraInfo info, int cameraId, Size displaySize) {
- checkNotNull(parameters, "parameters must not be null");
- checkNotNull(info, "info must not be null");
-
- String paramStr = parameters.flatten();
- android.hardware.CameraInfo outerInfo = new android.hardware.CameraInfo();
- outerInfo.info = info;
-
- return createCharacteristics(paramStr, outerInfo, cameraId, displaySize);
- }
-
- /**
- * Create characteristics for a legacy device by mapping the {@code parameters}
- * and {@code info}
- *
- * @param parameters A string parseable by {@link Camera.Parameters#unflatten}
- * @param info Camera info with camera facing direction and angle of orientation
- * @param cameraId Current camera id
- * @param displaySize Device display size
- * @return static camera characteristics for a camera device
- *
- * @throws NullPointerException if any of the args were {@code null}
- */
- public static CameraCharacteristics createCharacteristics(String parameters,
- android.hardware.CameraInfo info, int cameraId, Size displaySize) {
- checkNotNull(parameters, "parameters must not be null");
- checkNotNull(info, "info must not be null");
- checkNotNull(info.info, "info.info must not be null");
-
- CameraMetadataNative m = new CameraMetadataNative();
-
- mapCharacteristicsFromInfo(m, info.info);
-
- Camera.Parameters params = Camera.getEmptyParameters();
- params.unflatten(parameters);
- mapCharacteristicsFromParameters(m, params);
-
- if (DEBUG) {
- Log.v(TAG, "createCharacteristics metadata:");
- Log.v(TAG, "--------------------------------------------------- (start)");
- m.dumpToLog();
- Log.v(TAG, "--------------------------------------------------- (end)");
- }
-
- m.setCameraId(cameraId);
- m.setDisplaySize(displaySize);
-
- return new CameraCharacteristics(m);
- }
-
- private static void mapCharacteristicsFromInfo(CameraMetadataNative m, CameraInfo i) {
- m.set(LENS_FACING, i.facing == CameraInfo.CAMERA_FACING_BACK ?
- LENS_FACING_BACK : LENS_FACING_FRONT);
- m.set(SENSOR_ORIENTATION, i.orientation);
- }
-
- private static void mapCharacteristicsFromParameters(CameraMetadataNative m,
- Camera.Parameters p) {
-
- /*
- * colorCorrection.*
- */
- m.set(COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
- new int[] { COLOR_CORRECTION_ABERRATION_MODE_FAST,
- COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY });
- /*
- * control.ae*
- */
- mapControlAe(m, p);
- /*
- * control.af*
- */
- mapControlAf(m, p);
- /*
- * control.awb*
- */
- mapControlAwb(m, p);
- /*
- * control.*
- * - Anything that doesn't have a set of related fields
- */
- mapControlOther(m, p);
- /*
- * lens.*
- */
- mapLens(m, p);
- /*
- * flash.*
- */
- mapFlash(m, p);
- /*
- * jpeg.*
- */
- mapJpeg(m, p);
-
- /*
- * noiseReduction.*
- */
- m.set(NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
- new int[] { NOISE_REDUCTION_MODE_FAST,
- NOISE_REDUCTION_MODE_HIGH_QUALITY});
-
- /*
- * scaler.*
- */
- mapScaler(m, p);
-
- /*
- * sensor.*
- */
- mapSensor(m, p);
-
- /*
- * statistics.*
- */
- mapStatistics(m, p);
-
- /*
- * sync.*
- */
- mapSync(m, p);
-
- /*
- * info.supportedHardwareLevel
- */
- m.set(INFO_SUPPORTED_HARDWARE_LEVEL, INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY);
-
- /*
- * scaler.availableStream*, scaler.available*Durations, sensor.info.maxFrameDuration
- */
- mapScalerStreamConfigs(m, p);
-
- // Order matters below: Put this last so that we can read the metadata set previously
-
- /*
- * request.*
- */
- mapRequest(m, p);
-
- }
-
- private static void mapScalerStreamConfigs(CameraMetadataNative m, Camera.Parameters p) {
-
- ArrayList<StreamConfiguration> availableStreamConfigs = new ArrayList<>();
- /*
- * Implementation-defined (preview, recording, etc) -> use camera1 preview sizes
- * YUV_420_888 cpu callbacks -> use camera1 preview sizes
- * Other preview callbacks (CPU) -> use camera1 preview sizes
- * JPEG still capture -> use camera1 still capture sizes
- *
- * Use platform-internal format constants here, since StreamConfigurationMap does the
- * remapping to public format constants.
- */
- List<Camera.Size> previewSizes = p.getSupportedPreviewSizes();
- List<Camera.Size> jpegSizes = p.getSupportedPictureSizes();
- /*
- * Work-around for b/17589233:
- * - Some HALs's largest preview size aspect ratio does not match the largest JPEG size AR
- * - This causes a large amount of problems with focus/metering because it's relative to
- * preview, making the difference between the JPEG and preview viewport inaccessible
- * - This boils down to metering or focusing areas being "arbitrarily" cropped
- * in the capture result.
- * - Work-around the HAL limitations by removing all of the largest preview sizes
- * until we get one with the same aspect ratio as the jpeg size.
- */
- {
- SizeAreaComparator areaComparator = new SizeAreaComparator();
-
- // Sort preview to min->max
- Collections.sort(previewSizes, areaComparator);
-
- Camera.Size maxJpegSize = SizeAreaComparator.findLargestByArea(jpegSizes);
- float jpegAspectRatio = maxJpegSize.width * 1.0f / maxJpegSize.height;
-
- if (DEBUG) {
- Log.v(TAG, String.format("mapScalerStreamConfigs - largest JPEG area %dx%d, AR=%f",
- maxJpegSize.width, maxJpegSize.height, jpegAspectRatio));
- }
-
- // Now remove preview sizes from the end (largest->smallest) until aspect ratio matches
- while (!previewSizes.isEmpty()) {
- int index = previewSizes.size() - 1; // max is always at the end
- Camera.Size size = previewSizes.get(index);
-
- float previewAspectRatio = size.width * 1.0f / size.height;
-
- if (Math.abs(jpegAspectRatio - previewAspectRatio) >=
- PREVIEW_ASPECT_RATIO_TOLERANCE) {
- previewSizes.remove(index); // Assume removing from end is O(1)
-
- if (DEBUG) {
- Log.v(TAG, String.format(
- "mapScalerStreamConfigs - removed preview size %dx%d, AR=%f "
- + "was not the same",
- size.width, size.height, previewAspectRatio));
- }
- } else {
- break;
- }
- }
-
- if (previewSizes.isEmpty()) {
- // Fall-back to the original faulty behavior, but at least work
- Log.w(TAG, "mapScalerStreamConfigs - failed to find any preview size matching " +
- "JPEG aspect ratio " + jpegAspectRatio);
- previewSizes = p.getSupportedPreviewSizes();
- }
-
- // Sort again, this time in descending order max->min
- Collections.sort(previewSizes, Collections.reverseOrder(areaComparator));
- }
-
- appendStreamConfig(availableStreamConfigs,
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, previewSizes);
- appendStreamConfig(availableStreamConfigs,
- ImageFormat.YUV_420_888, previewSizes);
- for (int format : p.getSupportedPreviewFormats()) {
- if (ImageFormat.isPublicFormat(format) && format != ImageFormat.NV21) {
- appendStreamConfig(availableStreamConfigs, format, previewSizes);
- } else if (DEBUG) {
- /*
- * Do not add any formats unknown to us
- * (since it would fail runtime checks in StreamConfigurationMap)
- */
- Log.v(TAG,
- String.format("mapStreamConfigs - Skipping format %x", format));
- }
- }
-
- appendStreamConfig(availableStreamConfigs,
- HAL_PIXEL_FORMAT_BLOB, p.getSupportedPictureSizes());
- /*
- * scaler.availableStreamConfigurations
- */
- m.set(SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
- availableStreamConfigs.toArray(new StreamConfiguration[0]));
-
- /*
- * scaler.availableMinFrameDurations
- */
- // No frame durations available
- m.set(SCALER_AVAILABLE_MIN_FRAME_DURATIONS, new StreamConfigurationDuration[0]);
-
- StreamConfigurationDuration[] jpegStalls =
- new StreamConfigurationDuration[jpegSizes.size()];
- int i = 0;
- long longestStallDuration = -1;
- for (Camera.Size s : jpegSizes) {
- long stallDuration = calculateJpegStallDuration(s);
- jpegStalls[i++] = new StreamConfigurationDuration(HAL_PIXEL_FORMAT_BLOB, s.width,
- s.height, stallDuration);
- if (longestStallDuration < stallDuration) {
- longestStallDuration = stallDuration;
- }
- }
- /*
- * scaler.availableStallDurations
- */
- // Set stall durations for jpeg, other formats use default stall duration
- m.set(SCALER_AVAILABLE_STALL_DURATIONS, jpegStalls);
-
- /*
- * sensor.info.maxFrameDuration
- */
- m.set(SENSOR_INFO_MAX_FRAME_DURATION, longestStallDuration);
- }
-
- @SuppressWarnings({"unchecked"})
- private static void mapControlAe(CameraMetadataNative m, Camera.Parameters p) {
- /*
- * control.aeAvailableAntiBandingModes
- */
- List<String> antiBandingModes = p.getSupportedAntibanding();
- if (antiBandingModes != null && antiBandingModes.size() > 0) { // antibanding is optional
- int[] modes = new int[antiBandingModes.size()];
- int j = 0;
- for (String mode : antiBandingModes) {
- int convertedMode = convertAntiBandingMode(mode);
- if (DEBUG && convertedMode == -1) {
- Log.v(TAG, "Antibanding mode " + ((mode == null) ? "NULL" : mode) +
- " not supported, skipping...");
- } else {
- modes[j++] = convertedMode;
- }
- }
- m.set(CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, Arrays.copyOf(modes, j));
- } else {
- m.set(CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, new int[0]);
- }
-
- /*
- * control.aeAvailableTargetFpsRanges
- */
- {
- List<int[]> fpsRanges = p.getSupportedPreviewFpsRange();
- if (fpsRanges == null) {
- throw new AssertionError("Supported FPS ranges cannot be null.");
- }
- int rangesSize = fpsRanges.size();
- if (rangesSize <= 0) {
- throw new AssertionError("At least one FPS range must be supported.");
- }
- Range<Integer>[] ranges = new Range[rangesSize];
- int i = 0;
- for (int[] r : fpsRanges) {
- ranges[i++] = Range.create(
- (int) Math.floor(r[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] / 1000.0),
- (int) Math.ceil(r[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] / 1000.0));
- }
- m.set(CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, ranges);
- }
-
- /*
- * control.aeAvailableModes
- */
- {
- List<String> flashModes = p.getSupportedFlashModes();
-
- String[] flashModeStrings = new String[] {
- Camera.Parameters.FLASH_MODE_OFF,
- Camera.Parameters.FLASH_MODE_AUTO,
- Camera.Parameters.FLASH_MODE_ON,
- Camera.Parameters.FLASH_MODE_RED_EYE,
- // Map these manually
- Camera.Parameters.FLASH_MODE_TORCH,
- };
- int[] flashModeInts = new int[] {
- CONTROL_AE_MODE_ON,
- CONTROL_AE_MODE_ON_AUTO_FLASH,
- CONTROL_AE_MODE_ON_ALWAYS_FLASH,
- CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
- };
- int[] aeAvail = ArrayUtils.convertStringListToIntArray(
- flashModes, flashModeStrings, flashModeInts);
-
- // No flash control -> AE is always on
- if (aeAvail == null || aeAvail.length == 0) {
- aeAvail = new int[] {
- CONTROL_AE_MODE_ON
- };
- }
-
- // Note that AE_MODE_OFF is never available.
- m.set(CONTROL_AE_AVAILABLE_MODES, aeAvail);
- }
-
- /*
- * control.aeCompensationRanges
- */
- {
- int min = p.getMinExposureCompensation();
- int max = p.getMaxExposureCompensation();
-
- m.set(CONTROL_AE_COMPENSATION_RANGE, Range.create(min, max));
- }
-
- /*
- * control.aeCompensationStep
- */
- {
- float step = p.getExposureCompensationStep();
-
- m.set(CONTROL_AE_COMPENSATION_STEP, ParamsUtils.createRational(step));
- }
-
- /*
- * control.aeLockAvailable
- */
- {
- boolean aeLockAvailable = p.isAutoExposureLockSupported();
-
- m.set(CONTROL_AE_LOCK_AVAILABLE, aeLockAvailable);
- }
- }
-
-
- @SuppressWarnings({"unchecked"})
- private static void mapControlAf(CameraMetadataNative m, Camera.Parameters p) {
- /*
- * control.afAvailableModes
- */
- {
- List<String> focusModes = p.getSupportedFocusModes();
-
- String[] focusModeStrings = new String[] {
- Camera.Parameters.FOCUS_MODE_AUTO,
- Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE,
- Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO,
- Camera.Parameters.FOCUS_MODE_EDOF,
- Camera.Parameters.FOCUS_MODE_INFINITY,
- Camera.Parameters.FOCUS_MODE_MACRO,
- Camera.Parameters.FOCUS_MODE_FIXED,
- };
-
- int[] focusModeInts = new int[] {
- CONTROL_AF_MODE_AUTO,
- CONTROL_AF_MODE_CONTINUOUS_PICTURE,
- CONTROL_AF_MODE_CONTINUOUS_VIDEO,
- CONTROL_AF_MODE_EDOF,
- CONTROL_AF_MODE_OFF,
- CONTROL_AF_MODE_MACRO,
- CONTROL_AF_MODE_OFF
- };
-
- List<Integer> afAvail = ArrayUtils.convertStringListToIntList(
- focusModes, focusModeStrings, focusModeInts);
-
- // No AF modes supported? That's unpossible!
- if (afAvail == null || afAvail.size() == 0) {
- Log.w(TAG, "No AF modes supported (HAL bug); defaulting to AF_MODE_OFF only");
- afAvail = new ArrayList<Integer>(/*capacity*/1);
- afAvail.add(CONTROL_AF_MODE_OFF);
- }
-
- m.set(CONTROL_AF_AVAILABLE_MODES, ArrayUtils.toIntArray(afAvail));
-
- if (DEBUG) {
- Log.v(TAG, "mapControlAf - control.afAvailableModes set to " +
- ListUtils.listToString(afAvail));
- }
- }
- }
-
- private static void mapControlAwb(CameraMetadataNative m, Camera.Parameters p) {
- /*
- * control.awbAvailableModes
- */
-
- {
- List<String> wbModes = p.getSupportedWhiteBalance();
-
- String[] wbModeStrings = new String[] {
- Camera.Parameters.WHITE_BALANCE_AUTO ,
- Camera.Parameters.WHITE_BALANCE_INCANDESCENT ,
- Camera.Parameters.WHITE_BALANCE_FLUORESCENT ,
- Camera.Parameters.WHITE_BALANCE_WARM_FLUORESCENT ,
- Camera.Parameters.WHITE_BALANCE_DAYLIGHT ,
- Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT ,
- Camera.Parameters.WHITE_BALANCE_TWILIGHT ,
- Camera.Parameters.WHITE_BALANCE_SHADE ,
- };
-
- int[] wbModeInts = new int[] {
- CONTROL_AWB_MODE_AUTO,
- CONTROL_AWB_MODE_INCANDESCENT ,
- CONTROL_AWB_MODE_FLUORESCENT ,
- CONTROL_AWB_MODE_WARM_FLUORESCENT ,
- CONTROL_AWB_MODE_DAYLIGHT ,
- CONTROL_AWB_MODE_CLOUDY_DAYLIGHT ,
- CONTROL_AWB_MODE_TWILIGHT ,
- CONTROL_AWB_MODE_SHADE ,
- // Note that CONTROL_AWB_MODE_OFF is unsupported
- };
-
- List<Integer> awbAvail = ArrayUtils.convertStringListToIntList(
- wbModes, wbModeStrings, wbModeInts);
-
- // No AWB modes supported? That's unpossible!
- if (awbAvail == null || awbAvail.size() == 0) {
- Log.w(TAG, "No AWB modes supported (HAL bug); defaulting to AWB_MODE_AUTO only");
- awbAvail = new ArrayList<Integer>(/*capacity*/1);
- awbAvail.add(CONTROL_AWB_MODE_AUTO);
- }
-
- m.set(CONTROL_AWB_AVAILABLE_MODES, ArrayUtils.toIntArray(awbAvail));
-
- if (DEBUG) {
- Log.v(TAG, "mapControlAwb - control.awbAvailableModes set to " +
- ListUtils.listToString(awbAvail));
- }
-
-
- /*
- * control.awbLockAvailable
- */
- {
- boolean awbLockAvailable = p.isAutoWhiteBalanceLockSupported();
-
- m.set(CONTROL_AWB_LOCK_AVAILABLE, awbLockAvailable);
- }
- }
- }
-
- private static void mapControlOther(CameraMetadataNative m, Camera.Parameters p) {
- /*
- * android.control.availableVideoStabilizationModes
- */
- {
- int stabModes[] = p.isVideoStabilizationSupported() ?
- new int[] { CONTROL_VIDEO_STABILIZATION_MODE_OFF,
- CONTROL_VIDEO_STABILIZATION_MODE_ON } :
- new int[] { CONTROL_VIDEO_STABILIZATION_MODE_OFF };
-
- m.set(CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, stabModes);
- }
-
- /*
- * android.control.maxRegions
- */
- final int AE = 0, AWB = 1, AF = 2;
-
- int[] maxRegions = new int[3];
- maxRegions[AE] = p.getMaxNumMeteringAreas();
- maxRegions[AWB] = 0; // AWB regions not supported in API1
- maxRegions[AF] = p.getMaxNumFocusAreas();
-
- if (LIE_ABOUT_AE_MAX_REGIONS) {
- maxRegions[AE] = 0;
- }
- if (LIE_ABOUT_AF_MAX_REGIONS) {
- maxRegions[AF] = 0;
- }
-
- m.set(CONTROL_MAX_REGIONS, maxRegions);
-
- /*
- * android.control.availableEffects
- */
- List<String> effectModes = p.getSupportedColorEffects();
- int[] supportedEffectModes = (effectModes == null) ? new int[0] :
- ArrayUtils.convertStringListToIntArray(effectModes, sLegacyEffectMode,
- sEffectModes);
- m.set(CONTROL_AVAILABLE_EFFECTS, supportedEffectModes);
-
- /*
- * android.control.availableSceneModes
- */
- int maxNumDetectedFaces = p.getMaxNumDetectedFaces();
- List<String> sceneModes = p.getSupportedSceneModes();
- List<Integer> supportedSceneModes =
- ArrayUtils.convertStringListToIntList(sceneModes, sLegacySceneModes, sSceneModes);
-
- // Special case where the only scene mode listed is AUTO => no scene mode
- if (sceneModes != null && sceneModes.size() == 1 &&
- sceneModes.get(0).equals(Parameters.SCENE_MODE_AUTO)) {
- supportedSceneModes = null;
- }
-
- boolean sceneModeSupported = true;
- if (supportedSceneModes == null && maxNumDetectedFaces == 0) {
- sceneModeSupported = false;
- }
-
- if (sceneModeSupported) {
- if (supportedSceneModes == null) {
- supportedSceneModes = new ArrayList<Integer>();
- }
- if (maxNumDetectedFaces > 0) { // always supports FACE_PRIORITY when face detecting
- supportedSceneModes.add(CONTROL_SCENE_MODE_FACE_PRIORITY);
- }
- // Remove all DISABLED occurrences
- if (supportedSceneModes.contains(CONTROL_SCENE_MODE_DISABLED)) {
- while(supportedSceneModes.remove(new Integer(CONTROL_SCENE_MODE_DISABLED))) {}
- }
- m.set(CONTROL_AVAILABLE_SCENE_MODES, ArrayUtils.toIntArray(supportedSceneModes));
- } else {
- m.set(CONTROL_AVAILABLE_SCENE_MODES, new int[] {CONTROL_SCENE_MODE_DISABLED});
- }
-
- /*
- * android.control.availableModes
- */
- m.set(CONTROL_AVAILABLE_MODES, sceneModeSupported ?
- new int[] { CONTROL_MODE_AUTO, CONTROL_MODE_USE_SCENE_MODE } :
- new int[] { CONTROL_MODE_AUTO });
- }
-
- private static void mapLens(CameraMetadataNative m, Camera.Parameters p) {
- /*
- * We can tell if the lens is fixed focus;
- * but if it's not, we can't tell the minimum focus distance, so leave it null then.
- */
- if (DEBUG) {
- Log.v(TAG, "mapLens - focus-mode='" + p.getFocusMode() + "'");
- }
-
- if (Camera.Parameters.FOCUS_MODE_FIXED.equals(p.getFocusMode())) {
- /*
- * lens.info.minimumFocusDistance
- */
- m.set(LENS_INFO_MINIMUM_FOCUS_DISTANCE, LENS_INFO_MINIMUM_FOCUS_DISTANCE_FIXED_FOCUS);
-
- if (DEBUG) {
- Log.v(TAG, "mapLens - lens.info.minimumFocusDistance = 0");
- }
- } else {
- if (DEBUG) {
- Log.v(TAG, "mapLens - lens.info.minimumFocusDistance is unknown");
- }
- }
-
- float[] focalLengths = new float[] { p.getFocalLength() };
- m.set(LENS_INFO_AVAILABLE_FOCAL_LENGTHS, focalLengths);
- }
-
- private static void mapFlash(CameraMetadataNative m, Camera.Parameters p) {
- boolean flashAvailable = false;
- List<String> supportedFlashModes = p.getSupportedFlashModes();
-
- if (supportedFlashModes != null) {
- // If only 'OFF' is available, we don't really have flash support
- flashAvailable = !ListUtils.listElementsEqualTo(
- supportedFlashModes, Camera.Parameters.FLASH_MODE_OFF);
- }
-
- /*
- * flash.info.available
- */
- m.set(FLASH_INFO_AVAILABLE, flashAvailable);
- }
-
- private static void mapJpeg(CameraMetadataNative m, Camera.Parameters p) {
- List<Camera.Size> thumbnailSizes = p.getSupportedJpegThumbnailSizes();
-
- if (thumbnailSizes != null) {
- Size[] sizes = convertSizeListToArray(thumbnailSizes);
- Arrays.sort(sizes, new android.hardware.camera2.utils.SizeAreaComparator());
- m.set(JPEG_AVAILABLE_THUMBNAIL_SIZES, sizes);
- }
- }
-
- private static void mapRequest(CameraMetadataNative m, Parameters p) {
- /*
- * request.availableCapabilities
- */
- int[] capabilities = { REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE };
- m.set(REQUEST_AVAILABLE_CAPABILITIES, capabilities);
-
- /*
- * request.availableCharacteristicsKeys
- */
- {
- // TODO: check if the underlying key is supported before listing a key as available
-
- // Note: We only list public keys. Native HALs should list ALL keys regardless of visibility.
-
- Key<?> availableKeys[] = new Key<?>[] {
- CameraCharacteristics.COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES ,
- CameraCharacteristics.CONTROL_AE_AVAILABLE_ANTIBANDING_MODES ,
- CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES ,
- CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES ,
- CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE ,
- CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP ,
- CameraCharacteristics.CONTROL_AE_LOCK_AVAILABLE ,
- CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES ,
- CameraCharacteristics.CONTROL_AVAILABLE_EFFECTS ,
- CameraCharacteristics.CONTROL_AVAILABLE_MODES ,
- CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES ,
- CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES ,
- CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES ,
- CameraCharacteristics.CONTROL_AWB_LOCK_AVAILABLE ,
- CameraCharacteristics.CONTROL_MAX_REGIONS ,
- CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE ,
- CameraCharacteristics.FLASH_INFO_AVAILABLE ,
- CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL ,
- CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES ,
- CameraCharacteristics.LENS_FACING ,
- CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS ,
- CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES ,
- CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES ,
- CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS ,
- CameraCharacteristics.REQUEST_PARTIAL_RESULT_COUNT ,
- CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH ,
- CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM ,
-// CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP ,
- CameraCharacteristics.SCALER_CROPPING_TYPE ,
- CameraCharacteristics.SENSOR_AVAILABLE_TEST_PATTERN_MODES ,
- CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE ,
- CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE ,
- CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE ,
- CameraCharacteristics.SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE ,
- CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE ,
- CameraCharacteristics.SENSOR_ORIENTATION ,
- CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES ,
- CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT ,
- CameraCharacteristics.SYNC_MAX_LATENCY ,
- };
- List<Key<?>> characteristicsKeys = new ArrayList<>(Arrays.asList(availableKeys));
-
- /*
- * Add the conditional keys
- */
- if (m.get(LENS_INFO_MINIMUM_FOCUS_DISTANCE) != null) {
- characteristicsKeys.add(LENS_INFO_MINIMUM_FOCUS_DISTANCE);
- }
-
- m.set(REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
- getTagsForKeys(characteristicsKeys.toArray(new Key<?>[0])));
- }
-
- /*
- * request.availableRequestKeys
- */
- {
- CaptureRequest.Key<?> defaultAvailableKeys[] = new CaptureRequest.Key<?>[] {
- CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE,
- CaptureRequest.CONTROL_AE_ANTIBANDING_MODE,
- CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION,
- CaptureRequest.CONTROL_AE_LOCK,
- CaptureRequest.CONTROL_AE_MODE,
- CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
- CaptureRequest.CONTROL_AF_MODE,
- CaptureRequest.CONTROL_AF_TRIGGER,
- CaptureRequest.CONTROL_AWB_LOCK,
- CaptureRequest.CONTROL_AWB_MODE,
- CaptureRequest.CONTROL_CAPTURE_INTENT,
- CaptureRequest.CONTROL_EFFECT_MODE,
- CaptureRequest.CONTROL_MODE,
- CaptureRequest.CONTROL_SCENE_MODE,
- CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
- CaptureRequest.CONTROL_ZOOM_RATIO,
- CaptureRequest.FLASH_MODE,
- CaptureRequest.JPEG_GPS_COORDINATES,
- CaptureRequest.JPEG_GPS_PROCESSING_METHOD,
- CaptureRequest.JPEG_GPS_TIMESTAMP,
- CaptureRequest.JPEG_ORIENTATION,
- CaptureRequest.JPEG_QUALITY,
- CaptureRequest.JPEG_THUMBNAIL_QUALITY,
- CaptureRequest.JPEG_THUMBNAIL_SIZE,
- CaptureRequest.LENS_FOCAL_LENGTH,
- CaptureRequest.NOISE_REDUCTION_MODE,
- CaptureRequest.SCALER_CROP_REGION,
- CaptureRequest.STATISTICS_FACE_DETECT_MODE,
- };
- ArrayList<CaptureRequest.Key<?>> availableKeys =
- new ArrayList<CaptureRequest.Key<?>>(Arrays.asList(defaultAvailableKeys));
-
- if (p.getMaxNumMeteringAreas() > 0) {
- availableKeys.add(CaptureRequest.CONTROL_AE_REGIONS);
- }
- if (p.getMaxNumFocusAreas() > 0) {
- availableKeys.add(CaptureRequest.CONTROL_AF_REGIONS);
- }
-
- CaptureRequest.Key<?> availableRequestKeys[] =
- new CaptureRequest.Key<?>[availableKeys.size()];
- availableKeys.toArray(availableRequestKeys);
- m.set(REQUEST_AVAILABLE_REQUEST_KEYS, getTagsForKeys(availableRequestKeys));
- }
-
- /*
- * request.availableResultKeys
- */
- {
- CaptureResult.Key<?> defaultAvailableKeys[] = new CaptureResult.Key<?>[] {
- CaptureResult.COLOR_CORRECTION_ABERRATION_MODE ,
- CaptureResult.CONTROL_AE_ANTIBANDING_MODE ,
- CaptureResult.CONTROL_AE_EXPOSURE_COMPENSATION ,
- CaptureResult.CONTROL_AE_LOCK ,
- CaptureResult.CONTROL_AE_MODE ,
- CaptureResult.CONTROL_AF_MODE ,
- CaptureResult.CONTROL_AF_STATE ,
- CaptureResult.CONTROL_AWB_MODE ,
- CaptureResult.CONTROL_AWB_LOCK ,
- CaptureResult.CONTROL_MODE ,
- CaptureResult.CONTROL_ZOOM_RATIO ,
- CaptureResult.FLASH_MODE ,
- CaptureResult.JPEG_GPS_COORDINATES ,
- CaptureResult.JPEG_GPS_PROCESSING_METHOD ,
- CaptureResult.JPEG_GPS_TIMESTAMP ,
- CaptureResult.JPEG_ORIENTATION ,
- CaptureResult.JPEG_QUALITY ,
- CaptureResult.JPEG_THUMBNAIL_QUALITY ,
- CaptureResult.LENS_FOCAL_LENGTH ,
- CaptureResult.NOISE_REDUCTION_MODE ,
- CaptureResult.REQUEST_PIPELINE_DEPTH ,
- CaptureResult.SCALER_CROP_REGION ,
- CaptureResult.SENSOR_TIMESTAMP ,
- CaptureResult.STATISTICS_FACE_DETECT_MODE ,
-// CaptureResult.STATISTICS_FACES ,
- };
- List<CaptureResult.Key<?>> availableKeys =
- new ArrayList<CaptureResult.Key<?>>(Arrays.asList(defaultAvailableKeys));
-
- if (p.getMaxNumMeteringAreas() > 0) {
- availableKeys.add(CaptureResult.CONTROL_AE_REGIONS);
- }
- if (p.getMaxNumFocusAreas() > 0) {
- availableKeys.add(CaptureResult.CONTROL_AF_REGIONS);
- }
-
- CaptureResult.Key<?> availableResultKeys[] =
- new CaptureResult.Key<?>[availableKeys.size()];
- availableKeys.toArray(availableResultKeys);
- m.set(REQUEST_AVAILABLE_RESULT_KEYS, getTagsForKeys(availableResultKeys));
- }
-
- /*
- * request.maxNumOutputStreams
- */
- int[] outputStreams = {
- /* RAW */
- REQUEST_MAX_NUM_OUTPUT_STREAMS_COUNT_RAW,
- /* Processed & Not-Stalling */
- REQUEST_MAX_NUM_OUTPUT_STREAMS_COUNT_PROC,
- /* Processed & Stalling */
- REQUEST_MAX_NUM_OUTPUT_STREAMS_COUNT_PROC_STALL,
- };
- m.set(REQUEST_MAX_NUM_OUTPUT_STREAMS, outputStreams);
-
- /*
- * request.maxNumInputStreams
- */
- m.set(REQUEST_MAX_NUM_INPUT_STREAMS, REQUEST_MAX_NUM_INPUT_STREAMS_COUNT);
-
- /*
- * request.partialResultCount
- */
- m.set(REQUEST_PARTIAL_RESULT_COUNT, 1); // No partial results supported
-
- /*
- * request.pipelineMaxDepth
- */
- m.set(REQUEST_PIPELINE_MAX_DEPTH,
- (byte)(REQUEST_PIPELINE_MAX_DEPTH_HAL1 + REQUEST_PIPELINE_MAX_DEPTH_OURS));
- }
-
- private static void mapScaler(CameraMetadataNative m, Parameters p) {
- /*
- * control.zoomRatioRange
- */
- Range<Float> zoomRatioRange = new Range<Float>(1.0f, ParameterUtils.getMaxZoomRatio(p));
- m.set(CONTROL_ZOOM_RATIO_RANGE, zoomRatioRange);
-
- /*
- * scaler.availableMaxDigitalZoom
- */
- m.set(SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, ParameterUtils.getMaxZoomRatio(p));
-
- /*
- * scaler.croppingType = CENTER_ONLY
- */
- m.set(SCALER_CROPPING_TYPE, SCALER_CROPPING_TYPE_CENTER_ONLY);
- }
-
- private static void mapSensor(CameraMetadataNative m, Parameters p) {
- // Use the largest jpeg size (by area) for both active array and pixel array
- Size largestJpegSize = getLargestSupportedJpegSizeByArea(p);
- /*
- * sensor.info.activeArraySize, and preCorrectionActiveArraySize
- */
- {
- Rect activeArrayRect = ParamsUtils.createRect(largestJpegSize);
- m.set(SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArrayRect);
- m.set(SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, activeArrayRect);
- }
-
- /*
- * sensor.availableTestPatternModes
- */
- {
- // Only "OFF" test pattern mode is available
- m.set(SENSOR_AVAILABLE_TEST_PATTERN_MODES, new int[] { SENSOR_TEST_PATTERN_MODE_OFF });
- }
-
- /*
- * sensor.info.pixelArraySize
- */
- m.set(SENSOR_INFO_PIXEL_ARRAY_SIZE, largestJpegSize);
-
- /*
- * sensor.info.physicalSize
- */
- {
- /*
- * Assume focal length is at infinity focus and that the lens is rectilinear.
- */
- float focalLength = p.getFocalLength(); // in mm
- double angleHor = p.getHorizontalViewAngle() * Math.PI / 180; // to radians
- double angleVer = p.getVerticalViewAngle() * Math.PI / 180; // to radians
-
- float height = (float)Math.abs(2 * focalLength * Math.tan(angleVer / 2));
- float width = (float)Math.abs(2 * focalLength * Math.tan(angleHor / 2));
-
- m.set(SENSOR_INFO_PHYSICAL_SIZE, new SizeF(width, height)); // in mm
- }
-
- /*
- * sensor.info.timestampSource
- */
- {
- m.set(SENSOR_INFO_TIMESTAMP_SOURCE, SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
- }
- }
-
- private static void mapStatistics(CameraMetadataNative m, Parameters p) {
- /*
- * statistics.info.availableFaceDetectModes
- */
- int[] fdModes;
-
- if (p.getMaxNumDetectedFaces() > 0) {
- fdModes = new int[] {
- STATISTICS_FACE_DETECT_MODE_OFF,
- STATISTICS_FACE_DETECT_MODE_SIMPLE
- // FULL is never-listed, since we have no way to query it statically
- };
- } else {
- fdModes = new int[] {
- STATISTICS_FACE_DETECT_MODE_OFF
- };
- }
- m.set(STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, fdModes);
-
- /*
- * statistics.info.maxFaceCount
- */
- m.set(STATISTICS_INFO_MAX_FACE_COUNT, p.getMaxNumDetectedFaces());
- }
-
- private static void mapSync(CameraMetadataNative m, Parameters p) {
- /*
- * sync.maxLatency
- */
- m.set(SYNC_MAX_LATENCY, SYNC_MAX_LATENCY_UNKNOWN);
- }
-
- private static void appendStreamConfig(
- ArrayList<StreamConfiguration> configs, int format, List<Camera.Size> sizes) {
- for (Camera.Size size : sizes) {
- StreamConfiguration config =
- new StreamConfiguration(format, size.width, size.height, /*input*/false);
- configs.add(config);
- }
- }
-
- private final static String[] sLegacySceneModes = {
- Parameters.SCENE_MODE_AUTO,
- Parameters.SCENE_MODE_ACTION,
- Parameters.SCENE_MODE_PORTRAIT,
- Parameters.SCENE_MODE_LANDSCAPE,
- Parameters.SCENE_MODE_NIGHT,
- Parameters.SCENE_MODE_NIGHT_PORTRAIT,
- Parameters.SCENE_MODE_THEATRE,
- Parameters.SCENE_MODE_BEACH,
- Parameters.SCENE_MODE_SNOW,
- Parameters.SCENE_MODE_SUNSET,
- Parameters.SCENE_MODE_STEADYPHOTO,
- Parameters.SCENE_MODE_FIREWORKS,
- Parameters.SCENE_MODE_SPORTS,
- Parameters.SCENE_MODE_PARTY,
- Parameters.SCENE_MODE_CANDLELIGHT,
- Parameters.SCENE_MODE_BARCODE,
- Parameters.SCENE_MODE_HDR,
- };
-
- private final static int[] sSceneModes = {
- CameraCharacteristics.CONTROL_SCENE_MODE_DISABLED,
- CameraCharacteristics.CONTROL_SCENE_MODE_ACTION,
- CameraCharacteristics.CONTROL_SCENE_MODE_PORTRAIT,
- CameraCharacteristics.CONTROL_SCENE_MODE_LANDSCAPE,
- CameraCharacteristics.CONTROL_SCENE_MODE_NIGHT,
- CameraCharacteristics.CONTROL_SCENE_MODE_NIGHT_PORTRAIT,
- CameraCharacteristics.CONTROL_SCENE_MODE_THEATRE,
- CameraCharacteristics.CONTROL_SCENE_MODE_BEACH,
- CameraCharacteristics.CONTROL_SCENE_MODE_SNOW,
- CameraCharacteristics.CONTROL_SCENE_MODE_SUNSET,
- CameraCharacteristics.CONTROL_SCENE_MODE_STEADYPHOTO,
- CameraCharacteristics.CONTROL_SCENE_MODE_FIREWORKS,
- CameraCharacteristics.CONTROL_SCENE_MODE_SPORTS,
- CameraCharacteristics.CONTROL_SCENE_MODE_PARTY,
- CameraCharacteristics.CONTROL_SCENE_MODE_CANDLELIGHT,
- CameraCharacteristics.CONTROL_SCENE_MODE_BARCODE,
- CameraCharacteristics.CONTROL_SCENE_MODE_HDR,
- };
-
- static int convertSceneModeFromLegacy(String mode) {
- if (mode == null) {
- return CameraCharacteristics.CONTROL_SCENE_MODE_DISABLED;
- }
- int index = ArrayUtils.getArrayIndex(sLegacySceneModes, mode);
- if (index < 0) {
- return UNKNOWN_MODE;
- }
- return sSceneModes[index];
- }
-
- static String convertSceneModeToLegacy(int mode) {
- if (mode == CONTROL_SCENE_MODE_FACE_PRIORITY) {
- // OK: Let LegacyFaceDetectMapper handle turning face detection on/off
- return Parameters.SCENE_MODE_AUTO;
- }
-
- int index = ArrayUtils.getArrayIndex(sSceneModes, mode);
- if (index < 0) {
- return null;
- }
- return sLegacySceneModes[index];
- }
-
- private final static String[] sLegacyEffectMode = {
- Parameters.EFFECT_NONE,
- Parameters.EFFECT_MONO,
- Parameters.EFFECT_NEGATIVE,
- Parameters.EFFECT_SOLARIZE,
- Parameters.EFFECT_SEPIA,
- Parameters.EFFECT_POSTERIZE,
- Parameters.EFFECT_WHITEBOARD,
- Parameters.EFFECT_BLACKBOARD,
- Parameters.EFFECT_AQUA,
- };
-
- private final static int[] sEffectModes = {
- CameraCharacteristics.CONTROL_EFFECT_MODE_OFF,
- CameraCharacteristics.CONTROL_EFFECT_MODE_MONO,
- CameraCharacteristics.CONTROL_EFFECT_MODE_NEGATIVE,
- CameraCharacteristics.CONTROL_EFFECT_MODE_SOLARIZE,
- CameraCharacteristics.CONTROL_EFFECT_MODE_SEPIA,
- CameraCharacteristics.CONTROL_EFFECT_MODE_POSTERIZE,
- CameraCharacteristics.CONTROL_EFFECT_MODE_WHITEBOARD,
- CameraCharacteristics.CONTROL_EFFECT_MODE_BLACKBOARD,
- CameraCharacteristics.CONTROL_EFFECT_MODE_AQUA,
- };
-
- static int convertEffectModeFromLegacy(String mode) {
- if (mode == null) {
- return CameraCharacteristics.CONTROL_EFFECT_MODE_OFF;
- }
- int index = ArrayUtils.getArrayIndex(sLegacyEffectMode, mode);
- if (index < 0) {
- return UNKNOWN_MODE;
- }
- return sEffectModes[index];
- }
-
- static String convertEffectModeToLegacy(int mode) {
- int index = ArrayUtils.getArrayIndex(sEffectModes, mode);
- if (index < 0) {
- return null;
- }
- return sLegacyEffectMode[index];
- }
-
- /**
- * Convert the ae antibanding mode from api1 into api2.
- *
- * @param mode the api1 mode, {@code null} is allowed and will return {@code -1}.
- *
- * @return The api2 value, or {@code -1} by default if conversion failed
- */
- private static int convertAntiBandingMode(String mode) {
- if (mode == null) {
- return -1;
- }
-
- switch (mode) {
- case Camera.Parameters.ANTIBANDING_OFF: {
- return CONTROL_AE_ANTIBANDING_MODE_OFF;
- }
- case Camera.Parameters.ANTIBANDING_50HZ: {
- return CONTROL_AE_ANTIBANDING_MODE_50HZ;
- }
- case Camera.Parameters.ANTIBANDING_60HZ: {
- return CONTROL_AE_ANTIBANDING_MODE_60HZ;
- }
- case Camera.Parameters.ANTIBANDING_AUTO: {
- return CONTROL_AE_ANTIBANDING_MODE_AUTO;
- }
- default: {
- Log.w(TAG, "convertAntiBandingMode - Unknown antibanding mode " + mode);
- return -1;
- }
- }
- }
-
- /**
- * Convert the ae antibanding mode from api1 into api2.
- *
- * @param mode the api1 mode, {@code null} is allowed and will return {@code MODE_OFF}.
- *
- * @return The api2 value, or {@code MODE_OFF} by default if conversion failed
- */
- static int convertAntiBandingModeOrDefault(String mode) {
- int antiBandingMode = convertAntiBandingMode(mode);
- if (antiBandingMode == -1) {
- return CONTROL_AE_ANTIBANDING_MODE_OFF;
- }
-
- return antiBandingMode;
- }
-
- private static int[] convertAeFpsRangeToLegacy(Range<Integer> fpsRange) {
- int[] legacyFps = new int[2];
- legacyFps[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] = fpsRange.getLower();
- legacyFps[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] = fpsRange.getUpper();
- return legacyFps;
- }
-
- /**
- * Return the stall duration for a given output jpeg size in nanoseconds.
- *
- * <p>An 8mp image is chosen to have a stall duration of 0.8 seconds.</p>
- */
- private static long calculateJpegStallDuration(Camera.Size size) {
- long baseDuration = APPROXIMATE_CAPTURE_DELAY_MS * NS_PER_MS; // 200ms for capture
- long area = size.width * (long) size.height;
- long stallPerArea = APPROXIMATE_JPEG_ENCODE_TIME_MS * NS_PER_MS /
- APPROXIMATE_SENSOR_AREA_PX; // 600ms stall for 8mp
- return baseDuration + area * stallPerArea;
- }
-
- /**
- * Set the legacy parameters using the {@link LegacyRequest legacy request}.
- *
- * <p>The legacy request's parameters are changed as a side effect of calling this
- * method.</p>
- *
- * @param request a non-{@code null} legacy request
- */
- public static void convertRequestMetadata(LegacyRequest request) {
- LegacyRequestMapper.convertRequestMetadata(request);
- }
-
- private static final int[] sAllowedTemplates = {
- CameraDevice.TEMPLATE_PREVIEW,
- CameraDevice.TEMPLATE_STILL_CAPTURE,
- CameraDevice.TEMPLATE_RECORD,
- // Disallowed templates in legacy mode:
- // CameraDevice.TEMPLATE_VIDEO_SNAPSHOT,
- // CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG,
- // CameraDevice.TEMPLATE_MANUAL
- };
-
- /**
- * Create a request template
- *
- * @param c a non-{@code null} camera characteristics for this camera
- * @param templateId a non-negative template ID
- *
- * @return a non-{@code null} request template
- *
- * @throws IllegalArgumentException if {@code templateId} was invalid
- *
- * @see android.hardware.camera2.CameraDevice#TEMPLATE_MANUAL
- */
- public static CameraMetadataNative createRequestTemplate(
- CameraCharacteristics c, int templateId) {
- if (!ArrayUtils.contains(sAllowedTemplates, templateId)) {
- throw new IllegalArgumentException("templateId out of range");
- }
-
- CameraMetadataNative m = new CameraMetadataNative();
-
- /*
- * NOTE: If adding new code here and it needs to query the static info,
- * query the camera characteristics, so we can reuse this for api2 code later
- * to create our own templates in the framework
- */
-
- /*
- * control.*
- */
-
- // control.awbMode
- m.set(CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AWB_MODE_AUTO);
- // AWB is always unconditionally available in API1 devices
-
- // control.aeAntibandingMode
- m.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, CONTROL_AE_ANTIBANDING_MODE_AUTO);
-
- // control.aeExposureCompensation
- m.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
-
- // control.aeLock
- m.set(CaptureRequest.CONTROL_AE_LOCK, false);
-
- // control.aePrecaptureTrigger
- m.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
-
- // control.afTrigger
- m.set(CaptureRequest.CONTROL_AF_TRIGGER, CONTROL_AF_TRIGGER_IDLE);
-
- // control.awbMode
- m.set(CaptureRequest.CONTROL_AWB_MODE, CONTROL_AWB_MODE_AUTO);
-
- // control.awbLock
- m.set(CaptureRequest.CONTROL_AWB_LOCK, false);
-
- // control.aeRegions, control.awbRegions, control.afRegions
- {
- Rect activeArray = c.get(SENSOR_INFO_ACTIVE_ARRAY_SIZE);
- MeteringRectangle[] activeRegions = new MeteringRectangle[] {
- new MeteringRectangle(/*x*/0, /*y*/0, /*width*/activeArray.width() - 1,
- /*height*/activeArray.height() - 1,/*weight*/0)};
- m.set(CaptureRequest.CONTROL_AE_REGIONS, activeRegions);
- m.set(CaptureRequest.CONTROL_AWB_REGIONS, activeRegions);
- m.set(CaptureRequest.CONTROL_AF_REGIONS, activeRegions);
- }
-
- // control.captureIntent
- {
- int captureIntent;
- switch (templateId) {
- case CameraDevice.TEMPLATE_PREVIEW:
- captureIntent = CONTROL_CAPTURE_INTENT_PREVIEW;
- break;
- case CameraDevice.TEMPLATE_STILL_CAPTURE:
- captureIntent = CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
- break;
- case CameraDevice.TEMPLATE_RECORD:
- captureIntent = CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
- break;
- default:
- // Can't get anything else since it's guarded by the IAE check
- throw new AssertionError("Impossible; keep in sync with sAllowedTemplates");
- }
- m.set(CaptureRequest.CONTROL_CAPTURE_INTENT, captureIntent);
- }
-
- // control.aeMode
- m.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON);
- // AE is always unconditionally available in API1 devices
-
- // control.mode
- m.set(CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
-
- // control.afMode
- {
- Float minimumFocusDistance = c.get(LENS_INFO_MINIMUM_FOCUS_DISTANCE);
-
- int afMode;
- if (minimumFocusDistance != null &&
- minimumFocusDistance == LENS_INFO_MINIMUM_FOCUS_DISTANCE_FIXED_FOCUS) {
- // Cannot control auto-focus with fixed-focus cameras
- afMode = CameraMetadata.CONTROL_AF_MODE_OFF;
- } else {
- // If a minimum focus distance is reported; the camera must have AF
- afMode = CameraMetadata.CONTROL_AF_MODE_AUTO;
-
- if (templateId == CameraDevice.TEMPLATE_RECORD ||
- templateId == CameraDevice.TEMPLATE_VIDEO_SNAPSHOT) {
- if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES),
- CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
- afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO;
- }
- } else if (templateId == CameraDevice.TEMPLATE_PREVIEW ||
- templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
- if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES),
- CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
- afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
- }
- }
- }
-
- if (DEBUG) {
- Log.v(TAG, "createRequestTemplate (templateId=" + templateId + ")," +
- " afMode=" + afMode + ", minimumFocusDistance=" + minimumFocusDistance);
- }
-
- m.set(CaptureRequest.CONTROL_AF_MODE, afMode);
- }
-
- {
- // control.aeTargetFpsRange
- Range<Integer>[] availableFpsRange = c.
- get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
-
- // Pick FPS range with highest max value, tiebreak on higher min value
- Range<Integer> bestRange = availableFpsRange[0];
- for (Range<Integer> r : availableFpsRange) {
- if (bestRange.getUpper() < r.getUpper()) {
- bestRange = r;
- } else if (bestRange.getUpper() == r.getUpper() &&
- bestRange.getLower() < r.getLower()) {
- bestRange = r;
- }
- }
- m.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, bestRange);
- }
-
- // control.sceneMode -- DISABLED is always available
- m.set(CaptureRequest.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
-
- // control.zoomRatio -- 1.0
- m.set(CaptureRequest.CONTROL_ZOOM_RATIO, 1.0f);
-
- /*
- * statistics.*
- */
-
- // statistics.faceDetectMode
- m.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, STATISTICS_FACE_DETECT_MODE_OFF);
-
- /*
- * flash.*
- */
-
- // flash.mode
- m.set(CaptureRequest.FLASH_MODE, FLASH_MODE_OFF);
-
- /*
- * noiseReduction.*
- */
- if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
- m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_HIGH_QUALITY);
- } else {
- m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_FAST);
- }
-
- /*
- * colorCorrection.*
- */
- if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
- m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE,
- COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY);
- } else {
- m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE,
- COLOR_CORRECTION_ABERRATION_MODE_FAST);
- }
-
- /*
- * lens.*
- */
-
- // lens.focalLength
- m.set(CaptureRequest.LENS_FOCAL_LENGTH,
- c.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)[0]);
-
- /*
- * jpeg.*
- */
-
- // jpeg.thumbnailSize - set smallest non-zero size if possible
- Size[] sizes = c.get(CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES);
- m.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, (sizes.length > 1) ? sizes[1] : sizes[0]);
-
- // TODO: map other request template values
- return m;
- }
-
- private static int[] getTagsForKeys(Key<?>[] keys) {
- int[] tags = new int[keys.length];
-
- for (int i = 0; i < keys.length; ++i) {
- tags[i] = keys[i].getNativeKey().getTag();
- }
-
- return tags;
- }
-
- private static int[] getTagsForKeys(CaptureRequest.Key<?>[] keys) {
- int[] tags = new int[keys.length];
-
- for (int i = 0; i < keys.length; ++i) {
- tags[i] = keys[i].getNativeKey().getTag();
- }
-
- return tags;
- }
-
- private static int[] getTagsForKeys(CaptureResult.Key<?>[] keys) {
- int[] tags = new int[keys.length];
-
- for (int i = 0; i < keys.length; ++i) {
- tags[i] = keys[i].getNativeKey().getTag();
- }
-
- return tags;
- }
-
- /**
- * Convert the requested AF mode into its equivalent supported parameter.
- *
- * @param mode {@code CONTROL_AF_MODE}
- * @param supportedFocusModes list of camera1's supported focus modes
- * @return the stringified af mode, or {@code null} if its not supported
- */
- static String convertAfModeToLegacy(int mode, List<String> supportedFocusModes) {
- if (supportedFocusModes == null || supportedFocusModes.isEmpty()) {
- Log.w(TAG, "No focus modes supported; API1 bug");
- return null;
- }
-
- String param = null;
- switch (mode) {
- case CONTROL_AF_MODE_AUTO:
- param = Parameters.FOCUS_MODE_AUTO;
- break;
- case CONTROL_AF_MODE_CONTINUOUS_PICTURE:
- param = Parameters.FOCUS_MODE_CONTINUOUS_PICTURE;
- break;
- case CONTROL_AF_MODE_CONTINUOUS_VIDEO:
- param = Parameters.FOCUS_MODE_CONTINUOUS_VIDEO;
- break;
- case CONTROL_AF_MODE_EDOF:
- param = Parameters.FOCUS_MODE_EDOF;
- break;
- case CONTROL_AF_MODE_MACRO:
- param = Parameters.FOCUS_MODE_MACRO;
- break;
- case CONTROL_AF_MODE_OFF:
- if (supportedFocusModes.contains(Parameters.FOCUS_MODE_FIXED)) {
- param = Parameters.FOCUS_MODE_FIXED;
- } else {
- param = Parameters.FOCUS_MODE_INFINITY;
- }
- }
-
- if (!supportedFocusModes.contains(param)) {
- // Weed out bad user input by setting to the first arbitrary focus mode
- String defaultMode = supportedFocusModes.get(0);
- Log.w(TAG,
- String.format(
- "convertAfModeToLegacy - ignoring unsupported mode %d, " +
- "defaulting to %s", mode, defaultMode));
- param = defaultMode;
- }
-
- return param;
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/LegacyRequest.java b/core/java/android/hardware/camera2/legacy/LegacyRequest.java
deleted file mode 100644
index f13ac5c881e0..000000000000
--- a/core/java/android/hardware/camera2/legacy/LegacyRequest.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.hardware.Camera;
-import android.hardware.camera2.CameraCharacteristics;
-import android.hardware.camera2.CaptureRequest;
-import android.util.Size;
-
-import static com.android.internal.util.Preconditions.*;
-
-/**
- * Hold important data necessary to build the camera1 parameters up from a capture request.
- */
-public class LegacyRequest {
- /** Immutable characteristics for the camera corresponding to this request */
- public final CameraCharacteristics characteristics;
- /** Immutable capture request, as requested by the user */
- public final CaptureRequest captureRequest;
- /** Immutable api1 preview buffer size at the time of the request */
- public final Size previewSize;
- /** <em>Mutable</em> camera parameters */
- public final Camera.Parameters parameters;
-
- /**
- * Create a new legacy request; the parameters are copied.
- *
- * @param characteristics immutable static camera characteristics for this camera
- * @param captureRequest immutable user-defined capture request
- * @param previewSize immutable internal preview size used for {@link Camera#setPreviewSurface}
- * @param parameters the initial camera1 parameter state; (copied) can be mutated
- */
- public LegacyRequest(CameraCharacteristics characteristics, CaptureRequest captureRequest,
- Size previewSize, Camera.Parameters parameters) {
- this.characteristics = checkNotNull(characteristics, "characteristics must not be null");
- this.captureRequest = checkNotNull(captureRequest, "captureRequest must not be null");
- this.previewSize = checkNotNull(previewSize, "previewSize must not be null");
- checkNotNull(parameters, "parameters must not be null");
-
- this.parameters = Camera.getParametersCopy(parameters);
- }
-
- /**
- * Update the current parameters in-place to be a copy of the new parameters.
- *
- * @param parameters non-{@code null} parameters for api1 camera
- */
- public void setParameters(Camera.Parameters parameters) {
- checkNotNull(parameters, "parameters must not be null");
-
- this.parameters.copyFrom(parameters);
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java b/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java
deleted file mode 100644
index 3a46379477e9..000000000000
--- a/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java
+++ /dev/null
@@ -1,688 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.graphics.Rect;
-import android.hardware.Camera;
-import android.hardware.Camera.Parameters;
-import android.hardware.camera2.CameraCharacteristics;
-import android.hardware.camera2.CaptureRequest;
-import android.hardware.camera2.params.MeteringRectangle;
-import android.hardware.camera2.utils.ListUtils;
-import android.hardware.camera2.utils.ParamsUtils;
-import android.location.Location;
-import android.util.Log;
-import android.util.Range;
-import android.util.Size;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Objects;
-
-import static android.hardware.camera2.CaptureRequest.*;
-
-/**
- * Provide legacy-specific implementations of camera2 CaptureRequest for legacy devices.
- */
-@SuppressWarnings("deprecation")
-public class LegacyRequestMapper {
- private static final String TAG = "LegacyRequestMapper";
- private static final boolean DEBUG = false;
-
- /** Default quality for android.jpeg.quality, android.jpeg.thumbnailQuality */
- private static final byte DEFAULT_JPEG_QUALITY = 85;
-
- /**
- * Set the legacy parameters using the {@link LegacyRequest legacy request}.
- *
- * <p>The legacy request's parameters are changed as a side effect of calling this
- * method.</p>
- *
- * @param legacyRequest a non-{@code null} legacy request
- */
- public static void convertRequestMetadata(LegacyRequest legacyRequest) {
- CameraCharacteristics characteristics = legacyRequest.characteristics;
- CaptureRequest request = legacyRequest.captureRequest;
- Size previewSize = legacyRequest.previewSize;
- Camera.Parameters params = legacyRequest.parameters;
-
- Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
-
- /*
- * scaler.cropRegion
- */
- ParameterUtils.ZoomData zoomData;
- {
- zoomData = ParameterUtils.convertToLegacyZoom(activeArray,
- request.get(SCALER_CROP_REGION),
- request.get(CONTROL_ZOOM_RATIO),
- previewSize,
- params);
-
- if (params.isZoomSupported()) {
- params.setZoom(zoomData.zoomIndex);
- } else if (DEBUG) {
- Log.v(TAG, "convertRequestToMetadata - zoom is not supported");
- }
- }
-
- /*
- * colorCorrection.*
- */
- // colorCorrection.aberrationMode
- {
- int aberrationMode = ParamsUtils.getOrDefault(request,
- COLOR_CORRECTION_ABERRATION_MODE,
- /*defaultValue*/COLOR_CORRECTION_ABERRATION_MODE_FAST);
-
- if (aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_FAST &&
- aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
- Log.w(TAG, "convertRequestToMetadata - Ignoring unsupported " +
- "colorCorrection.aberrationMode = " + aberrationMode);
- }
- }
-
- /*
- * control.ae*
- */
- // control.aeAntibandingMode
- {
- String legacyMode;
- Integer antiBandingMode = request.get(CONTROL_AE_ANTIBANDING_MODE);
- if (antiBandingMode != null) {
- legacyMode = convertAeAntiBandingModeToLegacy(antiBandingMode);
- } else {
- legacyMode = ListUtils.listSelectFirstFrom(params.getSupportedAntibanding(),
- new String[] {
- Parameters.ANTIBANDING_AUTO,
- Parameters.ANTIBANDING_OFF,
- Parameters.ANTIBANDING_50HZ,
- Parameters.ANTIBANDING_60HZ,
- });
- }
-
- if (legacyMode != null) {
- params.setAntibanding(legacyMode);
- }
- }
-
- /*
- * control.aeRegions, afRegions
- */
- {
- // aeRegions
- {
- // Use aeRegions if available, fall back to using awbRegions if present
- MeteringRectangle[] aeRegions = request.get(CONTROL_AE_REGIONS);
- if (request.get(CONTROL_AWB_REGIONS) != null) {
- Log.w(TAG, "convertRequestMetadata - control.awbRegions setting is not " +
- "supported, ignoring value");
- }
- int maxNumMeteringAreas = params.getMaxNumMeteringAreas();
- List<Camera.Area> meteringAreaList = convertMeteringRegionsToLegacy(
- activeArray, zoomData, aeRegions, maxNumMeteringAreas,
- /*regionName*/"AE");
-
- // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
- if (maxNumMeteringAreas > 0) {
- params.setMeteringAreas(meteringAreaList);
- }
- }
-
- // afRegions
- {
- MeteringRectangle[] afRegions = request.get(CONTROL_AF_REGIONS);
- int maxNumFocusAreas = params.getMaxNumFocusAreas();
- List<Camera.Area> focusAreaList = convertMeteringRegionsToLegacy(
- activeArray, zoomData, afRegions, maxNumFocusAreas,
- /*regionName*/"AF");
-
- // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
- if (maxNumFocusAreas > 0) {
- params.setFocusAreas(focusAreaList);
- }
- }
- }
-
- // control.aeTargetFpsRange
- Range<Integer> aeFpsRange = request.get(CONTROL_AE_TARGET_FPS_RANGE);
- if (aeFpsRange != null) {
- int[] legacyFps = convertAeFpsRangeToLegacy(aeFpsRange);
-
- int[] rangeToApply = null;
- for(int[] range : params.getSupportedPreviewFpsRange()) {
- // Round range up/down to integer FPS value
- int intRangeLow = (int) Math.floor(range[0] / 1000.0) * 1000;
- int intRangeHigh = (int) Math.ceil(range[1] / 1000.0) * 1000;
- if (legacyFps[0] == intRangeLow && legacyFps[1] == intRangeHigh) {
- rangeToApply = range;
- break;
- }
- }
- if (rangeToApply != null) {
- params.setPreviewFpsRange(rangeToApply[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
- rangeToApply[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
- } else {
- Log.w(TAG, "Unsupported FPS range set [" + legacyFps[0] + "," + legacyFps[1] + "]");
- }
- }
-
- /*
- * control
- */
-
- // control.aeExposureCompensation
- {
- Range<Integer> compensationRange =
- characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
- int compensation = ParamsUtils.getOrDefault(request,
- CONTROL_AE_EXPOSURE_COMPENSATION,
- /*defaultValue*/0);
-
- if (!compensationRange.contains(compensation)) {
- Log.w(TAG,
- "convertRequestMetadata - control.aeExposureCompensation " +
- "is out of range, ignoring value");
- compensation = 0;
- }
-
- params.setExposureCompensation(compensation);
- }
-
- // control.aeLock
- {
- Boolean aeLock = getIfSupported(request, CONTROL_AE_LOCK, /*defaultValue*/false,
- params.isAutoExposureLockSupported(),
- /*allowedValue*/false);
-
- if (aeLock != null) {
- params.setAutoExposureLock(aeLock);
- }
-
- if (DEBUG) {
- Log.v(TAG, "convertRequestToMetadata - control.aeLock set to " + aeLock);
- }
-
- // TODO: Don't add control.aeLock to availableRequestKeys if it's not supported
- }
-
- // control.aeMode, flash.mode
- mapAeAndFlashMode(request, /*out*/params);
-
- // control.afMode
- {
- int afMode = ParamsUtils.getOrDefault(request, CONTROL_AF_MODE,
- /*defaultValue*/CONTROL_AF_MODE_OFF);
- String focusMode = LegacyMetadataMapper.convertAfModeToLegacy(afMode,
- params.getSupportedFocusModes());
-
- if (focusMode != null) {
- params.setFocusMode(focusMode);
- }
-
- if (DEBUG) {
- Log.v(TAG, "convertRequestToMetadata - control.afMode "
- + afMode + " mapped to " + focusMode);
- }
- }
-
- // control.awbMode
- {
- Integer awbMode = getIfSupported(request, CONTROL_AWB_MODE,
- /*defaultValue*/CONTROL_AWB_MODE_AUTO,
- params.getSupportedWhiteBalance() != null,
- /*allowedValue*/CONTROL_AWB_MODE_AUTO);
-
- String whiteBalanceMode = null;
- if (awbMode != null) { // null iff AWB is not supported by camera1 api
- whiteBalanceMode = convertAwbModeToLegacy(awbMode);
- params.setWhiteBalance(whiteBalanceMode);
- }
-
- if (DEBUG) {
- Log.v(TAG, "convertRequestToMetadata - control.awbMode "
- + awbMode + " mapped to " + whiteBalanceMode);
- }
- }
-
- // control.awbLock
- {
- Boolean awbLock = getIfSupported(request, CONTROL_AWB_LOCK, /*defaultValue*/false,
- params.isAutoWhiteBalanceLockSupported(),
- /*allowedValue*/false);
-
- if (awbLock != null) {
- params.setAutoWhiteBalanceLock(awbLock);
- }
-
- // TODO: Don't add control.awbLock to availableRequestKeys if it's not supported
- }
-
- // control.captureIntent
- {
- int captureIntent = ParamsUtils.getOrDefault(request,
- CONTROL_CAPTURE_INTENT,
- /*defaultValue*/CONTROL_CAPTURE_INTENT_PREVIEW);
-
- captureIntent = filterSupportedCaptureIntent(captureIntent);
-
- params.setRecordingHint(
- captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_RECORD ||
- captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT);
- }
-
- // control.videoStabilizationMode
- {
- Integer stabMode = getIfSupported(request, CONTROL_VIDEO_STABILIZATION_MODE,
- /*defaultValue*/CONTROL_VIDEO_STABILIZATION_MODE_OFF,
- params.isVideoStabilizationSupported(),
- /*allowedValue*/CONTROL_VIDEO_STABILIZATION_MODE_OFF);
-
- if (stabMode != null) {
- params.setVideoStabilization(stabMode == CONTROL_VIDEO_STABILIZATION_MODE_ON);
- }
- }
-
- // lens.focusDistance
- {
- boolean infinityFocusSupported =
- ListUtils.listContains(params.getSupportedFocusModes(),
- Parameters.FOCUS_MODE_INFINITY);
- Float focusDistance = getIfSupported(request, LENS_FOCUS_DISTANCE,
- /*defaultValue*/0f, infinityFocusSupported, /*allowedValue*/0f);
-
- if (focusDistance == null || focusDistance != 0f) {
- Log.w(TAG,
- "convertRequestToMetadata - Ignoring android.lens.focusDistance "
- + infinityFocusSupported + ", only 0.0f is supported");
- }
- }
-
- // control.sceneMode, control.mode
- {
- // TODO: Map FACE_PRIORITY scene mode to face detection.
-
- if (params.getSupportedSceneModes() != null) {
- int controlMode = ParamsUtils.getOrDefault(request, CONTROL_MODE,
- /*defaultValue*/CONTROL_MODE_AUTO);
- String modeToSet;
- switch (controlMode) {
- case CONTROL_MODE_USE_SCENE_MODE: {
- int sceneMode = ParamsUtils.getOrDefault(request, CONTROL_SCENE_MODE,
- /*defaultValue*/CONTROL_SCENE_MODE_DISABLED);
- String legacySceneMode = LegacyMetadataMapper.
- convertSceneModeToLegacy(sceneMode);
- if (legacySceneMode != null) {
- modeToSet = legacySceneMode;
- } else {
- modeToSet = Parameters.SCENE_MODE_AUTO;
- Log.w(TAG, "Skipping unknown requested scene mode: " + sceneMode);
- }
- break;
- }
- case CONTROL_MODE_AUTO: {
- modeToSet = Parameters.SCENE_MODE_AUTO;
- break;
- }
- default: {
- Log.w(TAG, "Control mode " + controlMode +
- " is unsupported, defaulting to AUTO");
- modeToSet = Parameters.SCENE_MODE_AUTO;
- }
- }
- params.setSceneMode(modeToSet);
- }
- }
-
- // control.effectMode
- {
- if (params.getSupportedColorEffects() != null) {
- int effectMode = ParamsUtils.getOrDefault(request, CONTROL_EFFECT_MODE,
- /*defaultValue*/CONTROL_EFFECT_MODE_OFF);
- String legacyEffectMode = LegacyMetadataMapper.convertEffectModeToLegacy(effectMode);
- if (legacyEffectMode != null) {
- params.setColorEffect(legacyEffectMode);
- } else {
- params.setColorEffect(Parameters.EFFECT_NONE);
- Log.w(TAG, "Skipping unknown requested effect mode: " + effectMode);
- }
- }
- }
-
- /*
- * sensor
- */
-
- // sensor.testPattern
- {
- int testPatternMode = ParamsUtils.getOrDefault(request, SENSOR_TEST_PATTERN_MODE,
- /*defaultValue*/SENSOR_TEST_PATTERN_MODE_OFF);
- if (testPatternMode != SENSOR_TEST_PATTERN_MODE_OFF) {
- Log.w(TAG, "convertRequestToMetadata - ignoring sensor.testPatternMode "
- + testPatternMode + "; only OFF is supported");
- }
- }
-
- /*
- * jpeg.*
- */
-
- // jpeg.gpsLocation
- {
- Location location = request.get(JPEG_GPS_LOCATION);
- if (location != null) {
- if (checkForCompleteGpsData(location)) {
- params.setGpsAltitude(location.getAltitude());
- params.setGpsLatitude(location.getLatitude());
- params.setGpsLongitude(location.getLongitude());
- params.setGpsProcessingMethod(location.getProvider().toUpperCase());
- params.setGpsTimestamp(location.getTime());
- } else {
- Log.w(TAG, "Incomplete GPS parameters provided in location " + location);
- }
- } else {
- params.removeGpsData();
- }
- }
-
- // jpeg.orientation
- {
- Integer orientation = request.get(CaptureRequest.JPEG_ORIENTATION);
- params.setRotation(ParamsUtils.getOrDefault(request, JPEG_ORIENTATION,
- (orientation == null) ? 0 : orientation));
- }
-
- // jpeg.quality
- {
- params.setJpegQuality(0xFF & ParamsUtils.getOrDefault(request, JPEG_QUALITY,
- DEFAULT_JPEG_QUALITY));
- }
-
- // jpeg.thumbnailQuality
- {
- params.setJpegThumbnailQuality(0xFF & ParamsUtils.getOrDefault(request,
- JPEG_THUMBNAIL_QUALITY, DEFAULT_JPEG_QUALITY));
- }
-
- // jpeg.thumbnailSize
- {
- List<Camera.Size> sizes = params.getSupportedJpegThumbnailSizes();
-
- if (sizes != null && sizes.size() > 0) {
- Size s = request.get(JPEG_THUMBNAIL_SIZE);
- boolean invalidSize = (s == null) ? false : !ParameterUtils.containsSize(sizes,
- s.getWidth(), s.getHeight());
- if (invalidSize) {
- Log.w(TAG, "Invalid JPEG thumbnail size set " + s + ", skipping thumbnail...");
- }
- if (s == null || invalidSize) {
- // (0,0) = "no thumbnail" in Camera API 1
- params.setJpegThumbnailSize(/*width*/0, /*height*/0);
- } else {
- params.setJpegThumbnailSize(s.getWidth(), s.getHeight());
- }
- }
- }
-
- /*
- * noiseReduction.*
- */
- // noiseReduction.mode
- {
- int mode = ParamsUtils.getOrDefault(request,
- NOISE_REDUCTION_MODE,
- /*defaultValue*/NOISE_REDUCTION_MODE_FAST);
-
- if (mode != NOISE_REDUCTION_MODE_FAST &&
- mode != NOISE_REDUCTION_MODE_HIGH_QUALITY) {
- Log.w(TAG, "convertRequestToMetadata - Ignoring unsupported " +
- "noiseReduction.mode = " + mode);
- }
- }
- }
-
- private static boolean checkForCompleteGpsData(Location location) {
- return location != null && location.getProvider() != null && location.getTime() != 0;
- }
-
- static int filterSupportedCaptureIntent(int captureIntent) {
- switch (captureIntent) {
- case CONTROL_CAPTURE_INTENT_CUSTOM:
- case CONTROL_CAPTURE_INTENT_PREVIEW:
- case CONTROL_CAPTURE_INTENT_STILL_CAPTURE:
- case CONTROL_CAPTURE_INTENT_VIDEO_RECORD:
- case CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT:
- break;
- case CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG:
- case CONTROL_CAPTURE_INTENT_MANUAL:
- captureIntent = CONTROL_CAPTURE_INTENT_PREVIEW;
- Log.w(TAG, "Unsupported control.captureIntent value " + captureIntent
- + "; default to PREVIEW");
- default:
- captureIntent = CONTROL_CAPTURE_INTENT_PREVIEW;
- Log.w(TAG, "Unknown control.captureIntent value " + captureIntent
- + "; default to PREVIEW");
- }
-
- return captureIntent;
- }
-
- private static List<Camera.Area> convertMeteringRegionsToLegacy(
- Rect activeArray, ParameterUtils.ZoomData zoomData,
- MeteringRectangle[] meteringRegions, int maxNumMeteringAreas, String regionName) {
- if (meteringRegions == null || maxNumMeteringAreas <= 0) {
- if (maxNumMeteringAreas > 0) {
- return Arrays.asList(ParameterUtils.CAMERA_AREA_DEFAULT);
- } else {
- return null;
- }
- }
-
- // Add all non-zero weight regions to the list
- List<MeteringRectangle> meteringRectangleList = new ArrayList<>();
- for (MeteringRectangle rect : meteringRegions) {
- if (rect.getMeteringWeight() != MeteringRectangle.METERING_WEIGHT_DONT_CARE) {
- meteringRectangleList.add(rect);
- }
- }
-
- if (meteringRectangleList.size() == 0) {
- Log.w(TAG, "Only received metering rectangles with weight 0.");
- return Arrays.asList(ParameterUtils.CAMERA_AREA_DEFAULT);
- }
-
- // Ignore any regions beyond our maximum supported count
- int countMeteringAreas =
- Math.min(maxNumMeteringAreas, meteringRectangleList.size());
- List<Camera.Area> meteringAreaList = new ArrayList<>(countMeteringAreas);
-
- for (int i = 0; i < countMeteringAreas; ++i) {
- MeteringRectangle rect = meteringRectangleList.get(i);
-
- ParameterUtils.MeteringData meteringData =
- ParameterUtils.convertMeteringRectangleToLegacy(activeArray, rect, zoomData);
- meteringAreaList.add(meteringData.meteringArea);
- }
-
- if (maxNumMeteringAreas < meteringRectangleList.size()) {
- Log.w(TAG,
- "convertMeteringRegionsToLegacy - Too many requested " + regionName +
- " regions, ignoring all beyond the first " + maxNumMeteringAreas);
- }
-
- if (DEBUG) {
- Log.v(TAG, "convertMeteringRegionsToLegacy - " + regionName + " areas = "
- + ParameterUtils.stringFromAreaList(meteringAreaList));
- }
-
- return meteringAreaList;
- }
-
- private static void mapAeAndFlashMode(CaptureRequest r, /*out*/Parameters p) {
- int flashMode = ParamsUtils.getOrDefault(r, FLASH_MODE, FLASH_MODE_OFF);
- int aeMode = ParamsUtils.getOrDefault(r, CONTROL_AE_MODE, CONTROL_AE_MODE_ON);
-
- List<String> supportedFlashModes = p.getSupportedFlashModes();
-
- String flashModeSetting = null;
-
- // Flash is OFF by default, on cameras that support flash
- if (ListUtils.listContains(supportedFlashModes, Parameters.FLASH_MODE_OFF)) {
- flashModeSetting = Parameters.FLASH_MODE_OFF;
- }
-
- /*
- * Map all of the control.aeMode* enums, but ignore AE_MODE_OFF since we never support it
- */
-
- // Ignore flash.mode controls unless aeMode == ON
- if (aeMode == CONTROL_AE_MODE_ON) {
- if (flashMode == FLASH_MODE_TORCH) {
- if (ListUtils.listContains(supportedFlashModes, Parameters.FLASH_MODE_TORCH)) {
- flashModeSetting = Parameters.FLASH_MODE_TORCH;
- } else {
- Log.w(TAG, "mapAeAndFlashMode - Ignore flash.mode == TORCH;" +
- "camera does not support it");
- }
- } else if (flashMode == FLASH_MODE_SINGLE) {
- if (ListUtils.listContains(supportedFlashModes, Parameters.FLASH_MODE_ON)) {
- flashModeSetting = Parameters.FLASH_MODE_ON;
- } else {
- Log.w(TAG, "mapAeAndFlashMode - Ignore flash.mode == SINGLE;" +
- "camera does not support it");
- }
- } else {
- // Use the default FLASH_MODE_OFF
- }
- } else if (aeMode == CONTROL_AE_MODE_ON_ALWAYS_FLASH) {
- if (ListUtils.listContains(supportedFlashModes, Parameters.FLASH_MODE_ON)) {
- flashModeSetting = Parameters.FLASH_MODE_ON;
- } else {
- Log.w(TAG, "mapAeAndFlashMode - Ignore control.aeMode == ON_ALWAYS_FLASH;" +
- "camera does not support it");
- }
- } else if (aeMode == CONTROL_AE_MODE_ON_AUTO_FLASH) {
- if (ListUtils.listContains(supportedFlashModes, Parameters.FLASH_MODE_AUTO)) {
- flashModeSetting = Parameters.FLASH_MODE_AUTO;
- } else {
- Log.w(TAG, "mapAeAndFlashMode - Ignore control.aeMode == ON_AUTO_FLASH;" +
- "camera does not support it");
- }
- } else if (aeMode == CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
- if (ListUtils.listContains(supportedFlashModes, Parameters.FLASH_MODE_RED_EYE)) {
- flashModeSetting = Parameters.FLASH_MODE_RED_EYE;
- } else {
- Log.w(TAG, "mapAeAndFlashMode - Ignore control.aeMode == ON_AUTO_FLASH_REDEYE;"
- + "camera does not support it");
- }
- } else {
- // Default to aeMode == ON, flash = OFF
- }
-
- if (flashModeSetting != null) {
- p.setFlashMode(flashModeSetting);
- }
-
- if (DEBUG) {
- Log.v(TAG,
- "mapAeAndFlashMode - set flash.mode (api1) to " + flashModeSetting
- + ", requested (api2) " + flashMode
- + ", supported (api1) " + ListUtils.listToString(supportedFlashModes));
- }
- }
-
- /**
- * Returns null if the anti-banding mode enum is not supported.
- */
- private static String convertAeAntiBandingModeToLegacy(int mode) {
- switch (mode) {
- case CONTROL_AE_ANTIBANDING_MODE_OFF: {
- return Parameters.ANTIBANDING_OFF;
- }
- case CONTROL_AE_ANTIBANDING_MODE_50HZ: {
- return Parameters.ANTIBANDING_50HZ;
- }
- case CONTROL_AE_ANTIBANDING_MODE_60HZ: {
- return Parameters.ANTIBANDING_60HZ;
- }
- case CONTROL_AE_ANTIBANDING_MODE_AUTO: {
- return Parameters.ANTIBANDING_AUTO;
- }
- default: {
- return null;
- }
- }
- }
-
- private static int[] convertAeFpsRangeToLegacy(Range<Integer> fpsRange) {
- int[] legacyFps = new int[2];
- legacyFps[Parameters.PREVIEW_FPS_MIN_INDEX] = fpsRange.getLower() * 1000;
- legacyFps[Parameters.PREVIEW_FPS_MAX_INDEX] = fpsRange.getUpper() * 1000;
- return legacyFps;
- }
-
- private static String convertAwbModeToLegacy(int mode) {
- switch (mode) {
- case CONTROL_AWB_MODE_AUTO:
- return Camera.Parameters.WHITE_BALANCE_AUTO;
- case CONTROL_AWB_MODE_INCANDESCENT:
- return Camera.Parameters.WHITE_BALANCE_INCANDESCENT;
- case CONTROL_AWB_MODE_FLUORESCENT:
- return Camera.Parameters.WHITE_BALANCE_FLUORESCENT;
- case CONTROL_AWB_MODE_WARM_FLUORESCENT:
- return Camera.Parameters.WHITE_BALANCE_WARM_FLUORESCENT;
- case CONTROL_AWB_MODE_DAYLIGHT:
- return Camera.Parameters.WHITE_BALANCE_DAYLIGHT;
- case CONTROL_AWB_MODE_CLOUDY_DAYLIGHT:
- return Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT;
- case CONTROL_AWB_MODE_TWILIGHT:
- return Camera.Parameters.WHITE_BALANCE_TWILIGHT;
- case CONTROL_AWB_MODE_SHADE:
- return Parameters.WHITE_BALANCE_SHADE;
- default:
- Log.w(TAG, "convertAwbModeToLegacy - unrecognized control.awbMode" + mode);
- return Camera.Parameters.WHITE_BALANCE_AUTO;
- }
- }
-
-
- /**
- * Return {@code null} if the value is not supported, otherwise return the retrieved key's
- * value from the request (or the default value if it wasn't set).
- *
- * <p>If the fetched value in the request is equivalent to {@code allowedValue},
- * then omit the warning (e.g. turning off AF lock on a camera
- * that always has the AF lock turned off is a silent no-op), but still return {@code null}.</p>
- *
- * <p>Logs a warning to logcat if the key is not supported by api1 camera device.</p.
- */
- private static <T> T getIfSupported(
- CaptureRequest r, CaptureRequest.Key<T> key, T defaultValue, boolean isSupported,
- T allowedValue) {
- T val = ParamsUtils.getOrDefault(r, key, defaultValue);
-
- if (!isSupported) {
- if (!Objects.equals(val, allowedValue)) {
- Log.w(TAG, key.getName() + " is not supported; ignoring requested value " + val);
- }
- return null;
- }
-
- return val;
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java b/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java
deleted file mode 100644
index 09edf74f0d4c..000000000000
--- a/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java
+++ /dev/null
@@ -1,529 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.graphics.Rect;
-import android.hardware.Camera;
-import android.hardware.Camera.Parameters;
-import android.hardware.camera2.CameraCharacteristics;
-import android.hardware.camera2.CaptureRequest;
-import android.hardware.camera2.CaptureResult;
-import android.hardware.camera2.impl.CameraMetadataNative;
-import android.hardware.camera2.legacy.ParameterUtils.WeightedRectangle;
-import android.hardware.camera2.legacy.ParameterUtils.ZoomData;
-import android.hardware.camera2.params.MeteringRectangle;
-import android.hardware.camera2.utils.ListUtils;
-import android.hardware.camera2.utils.ParamsUtils;
-import android.util.Log;
-import android.util.Size;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static android.hardware.camera2.CaptureResult.*;
-
-/**
- * Provide legacy-specific implementations of camera2 CaptureResult for legacy devices.
- */
-@SuppressWarnings("deprecation")
-public class LegacyResultMapper {
- private static final String TAG = "LegacyResultMapper";
- private static final boolean DEBUG = false;
-
- private LegacyRequest mCachedRequest = null;
- private CameraMetadataNative mCachedResult = null;
-
- /**
- * Generate capture result metadata from the legacy camera request.
- *
- * <p>This method caches and reuses the result from the previous call to this method if
- * the {@code parameters} of the subsequent {@link LegacyRequest} passed to this method
- * have not changed.</p>
- *
- * @param legacyRequest a non-{@code null} legacy request containing the latest parameters
- * @param timestamp the timestamp to use for this result in nanoseconds.
- *
- * @return {@link CameraMetadataNative} object containing result metadata.
- */
- public CameraMetadataNative cachedConvertResultMetadata(
- LegacyRequest legacyRequest, long timestamp) {
- CameraMetadataNative result;
- boolean cached;
-
- /*
- * Attempt to look up the result from the cache if the parameters haven't changed
- */
- if (mCachedRequest != null &&
- legacyRequest.parameters.same(mCachedRequest.parameters) &&
- legacyRequest.captureRequest.equals(mCachedRequest.captureRequest)) {
- result = new CameraMetadataNative(mCachedResult);
- cached = true;
- } else {
- result = convertResultMetadata(legacyRequest);
- cached = false;
-
- // Always cache a *copy* of the metadata result,
- // since api2's client side takes ownership of it after it receives a result
- mCachedRequest = legacyRequest;
- mCachedResult = new CameraMetadataNative(result);
- }
-
- /*
- * Unconditionally set fields that change in every single frame
- */
- {
- // sensor.timestamp
- result.set(SENSOR_TIMESTAMP, timestamp);
- }
-
- if (DEBUG) {
- Log.v(TAG, "cachedConvertResultMetadata - cached? " + cached +
- " timestamp = " + timestamp);
-
- Log.v(TAG, "----- beginning of result dump ------");
- result.dumpToLog();
- Log.v(TAG, "----- end of result dump ------");
- }
-
- return result;
- }
-
- /**
- * Generate capture result metadata from the legacy camera request.
- *
- * @param legacyRequest a non-{@code null} legacy request containing the latest parameters
- * @return a {@link CameraMetadataNative} object containing result metadata.
- */
- private static CameraMetadataNative convertResultMetadata(LegacyRequest legacyRequest) {
- CameraCharacteristics characteristics = legacyRequest.characteristics;
- CaptureRequest request = legacyRequest.captureRequest;
- Size previewSize = legacyRequest.previewSize;
- Camera.Parameters params = legacyRequest.parameters;
-
- CameraMetadataNative result = new CameraMetadataNative();
-
- Rect activeArraySize = characteristics.get(
- CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
- ZoomData zoomData = ParameterUtils.convertToLegacyZoom(activeArraySize,
- request.get(CaptureRequest.SCALER_CROP_REGION),
- request.get(CaptureRequest.CONTROL_ZOOM_RATIO),
- previewSize, params);
-
- /*
- * colorCorrection
- */
- // colorCorrection.aberrationMode
- {
- result.set(COLOR_CORRECTION_ABERRATION_MODE,
- request.get(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE));
- }
-
- /*
- * control
- */
-
- /*
- * control.ae*
- */
- mapAe(result, characteristics, request, activeArraySize, zoomData, /*out*/params);
-
- /*
- * control.af*
- */
- mapAf(result, activeArraySize, zoomData, /*out*/params);
-
- /*
- * control.awb*
- */
- mapAwb(result, /*out*/params);
-
- /*
- * control.captureIntent
- */
- {
- int captureIntent = ParamsUtils.getOrDefault(request,
- CaptureRequest.CONTROL_CAPTURE_INTENT,
- /*defaultValue*/CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
-
- captureIntent = LegacyRequestMapper.filterSupportedCaptureIntent(captureIntent);
-
- result.set(CONTROL_CAPTURE_INTENT, captureIntent);
- }
-
- /*
- * control.mode
- */
- {
- int controlMode = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_MODE,
- CONTROL_MODE_AUTO);
- if (controlMode == CaptureResult.CONTROL_MODE_USE_SCENE_MODE) {
- result.set(CONTROL_MODE, CONTROL_MODE_USE_SCENE_MODE);
- } else {
- result.set(CONTROL_MODE, CONTROL_MODE_AUTO);
- }
- }
-
- /*
- * control.sceneMode
- */
- {
- String legacySceneMode = params.getSceneMode();
- int mode = LegacyMetadataMapper.convertSceneModeFromLegacy(legacySceneMode);
- if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
- result.set(CaptureResult.CONTROL_SCENE_MODE, mode);
- // In case of SCENE_MODE == FACE_PRIORITY, LegacyFaceDetectMapper will override
- // the result to say SCENE_MODE == FACE_PRIORITY.
- } else {
- Log.w(TAG, "Unknown scene mode " + legacySceneMode +
- " returned by camera HAL, setting to disabled.");
- result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
- }
- }
-
- /*
- * control.effectMode
- */
- {
- String legacyEffectMode = params.getColorEffect();
- int mode = LegacyMetadataMapper.convertEffectModeFromLegacy(legacyEffectMode);
- if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
- result.set(CaptureResult.CONTROL_EFFECT_MODE, mode);
- } else {
- Log.w(TAG, "Unknown effect mode " + legacyEffectMode +
- " returned by camera HAL, setting to off.");
- result.set(CaptureResult.CONTROL_EFFECT_MODE, CONTROL_EFFECT_MODE_OFF);
- }
- }
-
- // control.videoStabilizationMode
- {
- int stabMode =
- (params.isVideoStabilizationSupported() && params.getVideoStabilization()) ?
- CONTROL_VIDEO_STABILIZATION_MODE_ON :
- CONTROL_VIDEO_STABILIZATION_MODE_OFF;
- result.set(CONTROL_VIDEO_STABILIZATION_MODE, stabMode);
- }
-
- /*
- * flash
- */
- {
- // flash.mode, flash.state mapped in mapAeAndFlashMode
- }
-
- /*
- * lens
- */
- // lens.focusDistance
- {
- if (Parameters.FOCUS_MODE_INFINITY.equals(params.getFocusMode())) {
- result.set(CaptureResult.LENS_FOCUS_DISTANCE, 0.0f);
- }
- }
-
- // lens.focalLength
- result.set(CaptureResult.LENS_FOCAL_LENGTH, params.getFocalLength());
-
- /*
- * request
- */
- // request.pipelineDepth
- result.set(REQUEST_PIPELINE_DEPTH,
- characteristics.get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH));
-
- /*
- * scaler
- */
- mapScaler(result, zoomData, /*out*/params);
-
- /*
- * sensor
- */
- // sensor.timestamp varies every frame; mapping is done in #cachedConvertResultMetadata
- {
- // Unconditionally no test patterns
- result.set(SENSOR_TEST_PATTERN_MODE, SENSOR_TEST_PATTERN_MODE_OFF);
- }
-
- /*
- * jpeg
- */
- // jpeg.gpsLocation
- result.set(JPEG_GPS_LOCATION, request.get(CaptureRequest.JPEG_GPS_LOCATION));
-
- // jpeg.orientation
- result.set(JPEG_ORIENTATION, request.get(CaptureRequest.JPEG_ORIENTATION));
-
- // jpeg.quality
- result.set(JPEG_QUALITY, (byte) params.getJpegQuality());
-
- // jpeg.thumbnailQuality
- result.set(JPEG_THUMBNAIL_QUALITY, (byte) params.getJpegThumbnailQuality());
-
- // jpeg.thumbnailSize
- Camera.Size s = params.getJpegThumbnailSize();
- if (s != null) {
- result.set(JPEG_THUMBNAIL_SIZE, ParameterUtils.convertSize(s));
- } else {
- Log.w(TAG, "Null thumbnail size received from parameters.");
- }
-
- /*
- * noiseReduction.*
- */
- // noiseReduction.mode
- result.set(NOISE_REDUCTION_MODE, request.get(CaptureRequest.NOISE_REDUCTION_MODE));
-
- return result;
- }
-
- private static void mapAe(CameraMetadataNative m,
- CameraCharacteristics characteristics,
- CaptureRequest request, Rect activeArray, ZoomData zoomData, /*out*/Parameters p) {
- // control.aeAntiBandingMode
- {
- int antiBandingMode = LegacyMetadataMapper.convertAntiBandingModeOrDefault(
- p.getAntibanding());
- m.set(CONTROL_AE_ANTIBANDING_MODE, antiBandingMode);
- }
-
- // control.aeExposureCompensation
- {
- m.set(CONTROL_AE_EXPOSURE_COMPENSATION, p.getExposureCompensation());
- }
-
- // control.aeLock
- {
- boolean lock = p.isAutoExposureLockSupported() ? p.getAutoExposureLock() : false;
- m.set(CONTROL_AE_LOCK, lock);
- if (DEBUG) {
- Log.v(TAG,
- "mapAe - android.control.aeLock = " + lock +
- ", supported = " + p.isAutoExposureLockSupported());
- }
-
- Boolean requestLock = request.get(CaptureRequest.CONTROL_AE_LOCK);
- if (requestLock != null && requestLock != lock) {
- Log.w(TAG,
- "mapAe - android.control.aeLock was requested to " + requestLock +
- " but resulted in " + lock);
- }
- }
-
- // control.aeMode, flash.mode, flash.state
- mapAeAndFlashMode(m, characteristics, p);
-
- // control.aeState
- if (LegacyMetadataMapper.LIE_ABOUT_AE_STATE) {
- // Lie to pass CTS temporarily.
- // TODO: Implement precapture trigger, after which we can report CONVERGED ourselves
- m.set(CONTROL_AE_STATE, CONTROL_AE_STATE_CONVERGED);
- }
-
- // control.aeRegions
- if (p.getMaxNumMeteringAreas() > 0) {
- if (DEBUG) {
- String meteringAreas = p.get("metering-areas");
- Log.v(TAG, "mapAe - parameter dump; metering-areas: " + meteringAreas);
- }
-
- MeteringRectangle[] meteringRectArray = getMeteringRectangles(activeArray,
- zoomData, p.getMeteringAreas(), "AE");
-
- m.set(CONTROL_AE_REGIONS, meteringRectArray);
- }
-
- }
-
- private static void mapAf(CameraMetadataNative m,
- Rect activeArray, ZoomData zoomData, Camera.Parameters p) {
- // control.afMode
- m.set(CaptureResult.CONTROL_AF_MODE, convertLegacyAfMode(p.getFocusMode()));
-
- // control.afRegions
- if (p.getMaxNumFocusAreas() > 0) {
- if (DEBUG) {
- String focusAreas = p.get("focus-areas");
- Log.v(TAG, "mapAe - parameter dump; focus-areas: " + focusAreas);
- }
-
- MeteringRectangle[] meteringRectArray = getMeteringRectangles(activeArray,
- zoomData, p.getFocusAreas(), "AF");
-
- m.set(CONTROL_AF_REGIONS, meteringRectArray);
- }
- }
-
- private static void mapAwb(CameraMetadataNative m, Camera.Parameters p) {
- // control.awbLock
- {
- boolean lock = p.isAutoWhiteBalanceLockSupported() ?
- p.getAutoWhiteBalanceLock() : false;
- m.set(CONTROL_AWB_LOCK, lock);
- }
-
- // control.awbMode
- {
- int awbMode = convertLegacyAwbMode(p.getWhiteBalance());
- m.set(CONTROL_AWB_MODE, awbMode);
- }
- }
-
- private static MeteringRectangle[] getMeteringRectangles(Rect activeArray, ZoomData zoomData,
- List<Camera.Area> meteringAreaList, String regionName) {
- List<MeteringRectangle> meteringRectList = new ArrayList<>();
- if (meteringAreaList != null) {
- for (Camera.Area area : meteringAreaList) {
- WeightedRectangle rect =
- ParameterUtils.convertCameraAreaToActiveArrayRectangle(
- activeArray, zoomData, area);
-
- meteringRectList.add(rect.toMetering());
- }
- }
-
- if (DEBUG) {
- Log.v(TAG,
- "Metering rectangles for " + regionName + ": "
- + ListUtils.listToString(meteringRectList));
- }
-
- return meteringRectList.toArray(new MeteringRectangle[0]);
- }
-
- /** Map results for control.aeMode, flash.mode, flash.state */
- private static void mapAeAndFlashMode(CameraMetadataNative m,
- CameraCharacteristics characteristics, Parameters p) {
- // Default: AE mode on but flash never fires
- int flashMode = FLASH_MODE_OFF;
- // If there is no flash on this camera, the state is always unavailable
- // , otherwise it's only known for TORCH/SINGLE modes
- Integer flashState = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)
- ? null : FLASH_STATE_UNAVAILABLE;
- int aeMode = CONTROL_AE_MODE_ON;
-
- String flashModeSetting = p.getFlashMode();
-
- if (flashModeSetting != null) {
- switch (flashModeSetting) {
- case Parameters.FLASH_MODE_OFF:
- break; // ok, using default
- case Parameters.FLASH_MODE_AUTO:
- aeMode = CONTROL_AE_MODE_ON_AUTO_FLASH;
- break;
- case Parameters.FLASH_MODE_ON:
- // flashMode = SINGLE + aeMode = ON is indistinguishable from ON_ALWAYS_FLASH
- flashMode = FLASH_MODE_SINGLE;
- aeMode = CONTROL_AE_MODE_ON_ALWAYS_FLASH;
- flashState = FLASH_STATE_FIRED;
- break;
- case Parameters.FLASH_MODE_RED_EYE:
- aeMode = CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
- break;
- case Parameters.FLASH_MODE_TORCH:
- flashMode = FLASH_MODE_TORCH;
- flashState = FLASH_STATE_FIRED;
- break;
- default:
- Log.w(TAG,
- "mapAeAndFlashMode - Ignoring unknown flash mode " + p.getFlashMode());
- }
- }
-
- // flash.state
- m.set(FLASH_STATE, flashState);
- // flash.mode
- m.set(FLASH_MODE, flashMode);
- // control.aeMode
- m.set(CONTROL_AE_MODE, aeMode);
- }
-
- private static int convertLegacyAfMode(String mode) {
- if (mode == null) {
- Log.w(TAG, "convertLegacyAfMode - no AF mode, default to OFF");
- return CONTROL_AF_MODE_OFF;
- }
-
- switch (mode) {
- case Parameters.FOCUS_MODE_AUTO:
- return CONTROL_AF_MODE_AUTO;
- case Parameters.FOCUS_MODE_CONTINUOUS_PICTURE:
- return CONTROL_AF_MODE_CONTINUOUS_PICTURE;
- case Parameters.FOCUS_MODE_CONTINUOUS_VIDEO:
- return CONTROL_AF_MODE_CONTINUOUS_VIDEO;
- case Parameters.FOCUS_MODE_EDOF:
- return CONTROL_AF_MODE_EDOF;
- case Parameters.FOCUS_MODE_MACRO:
- return CONTROL_AF_MODE_MACRO;
- case Parameters.FOCUS_MODE_FIXED:
- return CONTROL_AF_MODE_OFF;
- case Parameters.FOCUS_MODE_INFINITY:
- return CONTROL_AF_MODE_OFF;
- default:
- Log.w(TAG, "convertLegacyAfMode - unknown mode " + mode + " , ignoring");
- return CONTROL_AF_MODE_OFF;
- }
- }
-
- private static int convertLegacyAwbMode(String mode) {
- if (mode == null) {
- // OK: camera1 api may not support changing WB modes; assume AUTO
- return CONTROL_AWB_MODE_AUTO;
- }
-
- switch (mode) {
- case Camera.Parameters.WHITE_BALANCE_AUTO:
- return CONTROL_AWB_MODE_AUTO;
- case Camera.Parameters.WHITE_BALANCE_INCANDESCENT:
- return CONTROL_AWB_MODE_INCANDESCENT;
- case Camera.Parameters.WHITE_BALANCE_FLUORESCENT:
- return CONTROL_AWB_MODE_FLUORESCENT;
- case Camera.Parameters.WHITE_BALANCE_WARM_FLUORESCENT:
- return CONTROL_AWB_MODE_WARM_FLUORESCENT;
- case Camera.Parameters.WHITE_BALANCE_DAYLIGHT:
- return CONTROL_AWB_MODE_DAYLIGHT;
- case Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT:
- return CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
- case Camera.Parameters.WHITE_BALANCE_TWILIGHT:
- return CONTROL_AWB_MODE_TWILIGHT;
- case Camera.Parameters.WHITE_BALANCE_SHADE:
- return CONTROL_AWB_MODE_SHADE;
- default:
- Log.w(TAG, "convertAwbMode - unrecognized WB mode " + mode);
- return CONTROL_AWB_MODE_AUTO;
- }
- }
-
- /** Map results for scaler.* */
- private static void mapScaler(CameraMetadataNative m,
- ZoomData zoomData,
- /*out*/Parameters p) {
- /*
- * scaler.cropRegion
- */
- {
- m.set(SCALER_CROP_REGION, zoomData.reportedCrop);
- }
-
- /*
- * control.zoomRatio
- */
- {
- m.set(CONTROL_ZOOM_RATIO, zoomData.reportedZoomRatio);
- }
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/ParameterUtils.java b/core/java/android/hardware/camera2/legacy/ParameterUtils.java
deleted file mode 100644
index eb435989e9a0..000000000000
--- a/core/java/android/hardware/camera2/legacy/ParameterUtils.java
+++ /dev/null
@@ -1,1099 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.graphics.Matrix;
-import android.graphics.Point;
-import android.graphics.Rect;
-import android.graphics.RectF;
-import android.hardware.Camera;
-import android.hardware.Camera.Area;
-import android.hardware.camera2.params.Face;
-import android.hardware.camera2.params.MeteringRectangle;
-import android.hardware.camera2.utils.ListUtils;
-import android.hardware.camera2.utils.ParamsUtils;
-import android.hardware.camera2.utils.SizeAreaComparator;
-import android.util.Size;
-import android.util.SizeF;
-
-import android.util.Log;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import static com.android.internal.util.Preconditions.*;
-
-/**
- * Various utilities for dealing with camera API1 parameters.
- */
-@SuppressWarnings("deprecation")
-public class ParameterUtils {
- /** Upper/left minimal point of a normalized rectangle */
- public static final int NORMALIZED_RECTANGLE_MIN = -1000;
- /** Lower/right maximal point of a normalized rectangle */
- public static final int NORMALIZED_RECTANGLE_MAX = 1000;
- /** The default normalized rectangle spans the entire size of the preview viewport */
- public static final Rect NORMALIZED_RECTANGLE_DEFAULT = new Rect(
- NORMALIZED_RECTANGLE_MIN,
- NORMALIZED_RECTANGLE_MIN,
- NORMALIZED_RECTANGLE_MAX,
- NORMALIZED_RECTANGLE_MAX);
- /** The default normalized area uses the default normalized rectangle with a weight=1 */
- public static final Camera.Area CAMERA_AREA_DEFAULT =
- new Camera.Area(new Rect(NORMALIZED_RECTANGLE_DEFAULT),
- /*weight*/1);
- /** Empty rectangle {@code 0x0+0,0} */
- public static final Rect RECTANGLE_EMPTY =
- new Rect(/*left*/0, /*top*/0, /*right*/0, /*bottom*/0);
-
- private static final double ASPECT_RATIO_TOLERANCE = 0.05f;
-
- /**
- * Calculate effective/reported zoom data from a user-specified crop region.
- */
- public static class ZoomData {
- /** Zoom index used by {@link Camera.Parameters#setZoom} */
- public final int zoomIndex;
- /** Effective crop-region given the zoom index, coordinates relative to active-array */
- public final Rect previewCrop;
- /** Reported crop-region given the zoom index, coordinates relative to active-array */
- public final Rect reportedCrop;
- /** Reported zoom ratio given the zoom index */
- public final float reportedZoomRatio;
-
- public ZoomData(int zoomIndex, Rect previewCrop, Rect reportedCrop,
- float reportedZoomRatio) {
- this.zoomIndex = zoomIndex;
- this.previewCrop = previewCrop;
- this.reportedCrop = reportedCrop;
- this.reportedZoomRatio = reportedZoomRatio;
- }
- }
-
- /**
- * Calculate effective/reported metering data from a user-specified metering region.
- */
- public static class MeteringData {
- /**
- * The metering area scaled to the range of [-1000, 1000].
- * <p>Values outside of this range are clipped to be within the range.</p>
- */
- public final Camera.Area meteringArea;
- /**
- * Effective preview metering region, coordinates relative to active-array.
- *
- * <p>Clipped to fit inside of the (effective) preview crop region.</p>
- */
- public final Rect previewMetering;
- /**
- * Reported metering region, coordinates relative to active-array.
- *
- * <p>Clipped to fit inside of the (reported) resulting crop region.</p>
- */
- public final Rect reportedMetering;
-
- public MeteringData(Area meteringArea, Rect previewMetering, Rect reportedMetering) {
- this.meteringArea = meteringArea;
- this.previewMetering = previewMetering;
- this.reportedMetering = reportedMetering;
- }
- }
-
- /**
- * A weighted rectangle is an arbitrary rectangle (the coordinate system is unknown) with an
- * arbitrary weight.
- *
- * <p>The user of this class must know what the coordinate system ahead of time; it's
- * then possible to convert to a more concrete type such as a metering rectangle or a face.
- * </p>
- *
- * <p>When converting to a more concrete type, out-of-range values are clipped; this prevents
- * possible illegal argument exceptions being thrown at runtime.</p>
- */
- public static class WeightedRectangle {
- /** Arbitrary rectangle (the range is user-defined); never {@code null}. */
- public final Rect rect;
- /** Arbitrary weight (the range is user-defined). */
- public final int weight;
-
- /**
- * Create a new weighted-rectangle from a non-{@code null} rectangle; the {@code weight}
- * can be unbounded.
- */
- public WeightedRectangle(Rect rect, int weight) {
- this.rect = checkNotNull(rect, "rect must not be null");
- this.weight = weight;
- }
-
- /**
- * Convert to a metering rectangle, clipping any of the values to stay within range.
- *
- * <p>If values are clipped, a warning is printed to logcat.</p>
- *
- * @return a new metering rectangle
- */
- public MeteringRectangle toMetering() {
- int weight = clip(this.weight,
- MeteringRectangle.METERING_WEIGHT_MIN,
- MeteringRectangle.METERING_WEIGHT_MAX,
- rect,
- "weight");
-
- int x = clipLower(rect.left, /*lo*/0, rect, "left");
- int y = clipLower(rect.top, /*lo*/0, rect, "top");
- int w = clipLower(rect.width(), /*lo*/0, rect, "width");
- int h = clipLower(rect.height(), /*lo*/0, rect, "height");
-
- return new MeteringRectangle(x, y, w, h, weight);
- }
-
- /**
- * Convert to a face; the rect is considered to be the bounds, and the weight
- * is considered to be the score.
- *
- * <p>If the score is out of range of {@value Face#SCORE_MIN}, {@value Face#SCORE_MAX},
- * the score is clipped first and a warning is printed to logcat.</p>
- *
- * <p>If the id is negative, the id is changed to 0 and a warning is printed to
- * logcat.</p>
- *
- * <p>All other parameters are passed-through as-is.</p>
- *
- * @return a new face with the optional features set
- */
- public Face toFace(
- int id, Point leftEyePosition, Point rightEyePosition, Point mouthPosition) {
- int idSafe = clipLower(id, /*lo*/0, rect, "id");
- int score = clip(weight,
- Face.SCORE_MIN,
- Face.SCORE_MAX,
- rect,
- "score");
-
- return new Face(rect, score, idSafe, leftEyePosition, rightEyePosition, mouthPosition);
- }
-
- /**
- * Convert to a face; the rect is considered to be the bounds, and the weight
- * is considered to be the score.
- *
- * <p>If the score is out of range of {@value Face#SCORE_MIN}, {@value Face#SCORE_MAX},
- * the score is clipped first and a warning is printed to logcat.</p>
- *
- * <p>All other parameters are passed-through as-is.</p>
- *
- * @return a new face without the optional features
- */
- public Face toFace() {
- int score = clip(weight,
- Face.SCORE_MIN,
- Face.SCORE_MAX,
- rect,
- "score");
-
- return new Face(rect, score);
- }
-
- private static int clipLower(int value, int lo, Rect rect, String name) {
- return clip(value, lo, /*hi*/Integer.MAX_VALUE, rect, name);
- }
-
- private static int clip(int value, int lo, int hi, Rect rect, String name) {
- if (value < lo) {
- Log.w(TAG, "toMetering - Rectangle " + rect + " "
- + name + " too small, clip to " + lo);
- value = lo;
- } else if (value > hi) {
- Log.w(TAG, "toMetering - Rectangle " + rect + " "
- + name + " too small, clip to " + hi);
- value = hi;
- }
-
- return value;
- }
- }
-
- private static final String TAG = "ParameterUtils";
- private static final boolean DEBUG = false;
-
- /** getZoomRatios stores zoom ratios in 1/100 increments, e.x. a zoom of 3.2 is 320 */
- private static final int ZOOM_RATIO_MULTIPLIER = 100;
-
- /**
- * Convert a camera API1 size into a util size
- */
- public static Size convertSize(Camera.Size size) {
- checkNotNull(size, "size must not be null");
-
- return new Size(size.width, size.height);
- }
-
- /**
- * Convert a camera API1 list of sizes into a util list of sizes
- */
- public static List<Size> convertSizeList(List<Camera.Size> sizeList) {
- checkNotNull(sizeList, "sizeList must not be null");
-
- List<Size> sizes = new ArrayList<>(sizeList.size());
- for (Camera.Size s : sizeList) {
- sizes.add(new Size(s.width, s.height));
- }
- return sizes;
- }
-
- /**
- * Convert a camera API1 list of sizes into an array of sizes
- */
- public static Size[] convertSizeListToArray(List<Camera.Size> sizeList) {
- checkNotNull(sizeList, "sizeList must not be null");
-
- Size[] array = new Size[sizeList.size()];
- int ctr = 0;
- for (Camera.Size s : sizeList) {
- array[ctr++] = new Size(s.width, s.height);
- }
- return array;
- }
-
- /**
- * Check if the camera API1 list of sizes contains a size with the given dimens.
- */
- public static boolean containsSize(List<Camera.Size> sizeList, int width, int height) {
- checkNotNull(sizeList, "sizeList must not be null");
- for (Camera.Size s : sizeList) {
- if (s.height == height && s.width == width) {
- return true;
- }
- }
- return false;
- }
-
- /**
- * Returns the largest supported picture size, as compared by its area.
- */
- public static Size getLargestSupportedJpegSizeByArea(Camera.Parameters params) {
- checkNotNull(params, "params must not be null");
-
- List<Size> supportedJpegSizes = convertSizeList(params.getSupportedPictureSizes());
- return SizeAreaComparator.findLargestByArea(supportedJpegSizes);
- }
-
- /**
- * Convert a camera area into a human-readable string.
- */
- public static String stringFromArea(Camera.Area area) {
- if (area == null) {
- return null;
- } else {
- StringBuilder sb = new StringBuilder();
- Rect r = area.rect;
-
- sb.setLength(0);
- sb.append("(["); sb.append(r.left); sb.append(',');
- sb.append(r.top); sb.append("]["); sb.append(r.right);
- sb.append(','); sb.append(r.bottom); sb.append(']');
-
- sb.append(',');
- sb.append(area.weight);
- sb.append(')');
-
- return sb.toString();
- }
- }
-
- /**
- * Convert a camera area list into a human-readable string
- * @param areaList a list of areas (null is ok)
- */
- public static String stringFromAreaList(List<Camera.Area> areaList) {
- StringBuilder sb = new StringBuilder();
-
- if (areaList == null) {
- return null;
- }
-
- int i = 0;
- for (Camera.Area area : areaList) {
- if (area == null) {
- sb.append("null");
- } else {
- sb.append(stringFromArea(area));
- }
-
- if (i != areaList.size() - 1) {
- sb.append(", ");
- }
-
- i++;
- }
-
- return sb.toString();
- }
-
- /**
- * Calculate the closest zoom index for the user-requested crop region by rounding
- * up to the closest (largest or equal) possible zoom crop.
- *
- * <p>If the requested crop region exceeds the size of the active array, it is
- * shrunk to fit inside of the active array first.</p>
- *
- * <p>Since all api1 camera devices only support a discrete set of zooms, we have
- * to translate the per-pixel-granularity requested crop region into a per-zoom-index
- * granularity.</p>
- *
- * <p>Furthermore, since the zoom index and zoom levels also depends on the field-of-view
- * of the preview, the current preview {@code streamSize} is also used.</p>
- *
- * <p>The calculated crop regions are then written to in-place to {@code reportedCropRegion}
- * and {@code previewCropRegion}, in coordinates relative to the active array.</p>
- *
- * @param params non-{@code null} camera api1 parameters
- * @param activeArray active array dimensions, in sensor space
- * @param streamSize stream size dimensions, in pixels
- * @param cropRegion user-specified crop region, in active array coordinates
- * @param reportedCropRegion (out parameter) what the result for {@code cropRegion} looks like
- * @param previewCropRegion (out parameter) what the visual preview crop is
- * @return
- * the zoom index inclusively between 0 and {@code Parameters#getMaxZoom},
- * where 0 means the camera is not zoomed
- *
- * @throws NullPointerException if any of the args were {@code null}
- */
- public static int getClosestAvailableZoomCrop(
- Camera.Parameters params, Rect activeArray,
- Size streamSize, Rect cropRegion,
- /*out*/
- Rect reportedCropRegion,
- Rect previewCropRegion) {
- checkNotNull(params, "params must not be null");
- checkNotNull(activeArray, "activeArray must not be null");
- checkNotNull(streamSize, "streamSize must not be null");
- checkNotNull(reportedCropRegion, "reportedCropRegion must not be null");
- checkNotNull(previewCropRegion, "previewCropRegion must not be null");
-
- Rect actualCrop = new Rect(cropRegion);
-
- /*
- * Shrink requested crop region to fit inside of the active array size
- */
- if (!actualCrop.intersect(activeArray)) {
- Log.w(TAG, "getClosestAvailableZoomCrop - Crop region out of range; " +
- "setting to active array size");
- actualCrop.set(activeArray);
- }
-
- Rect previewCrop = getPreviewCropRectangleUnzoomed(activeArray, streamSize);
-
- // Make the user-requested crop region the same aspect ratio as the preview stream size
- Rect cropRegionAsPreview =
- shrinkToSameAspectRatioCentered(previewCrop, actualCrop);
-
- if (DEBUG) {
- Log.v(TAG, "getClosestAvailableZoomCrop - actualCrop = " + actualCrop);
- Log.v(TAG,
- "getClosestAvailableZoomCrop - previewCrop = " + previewCrop);
- Log.v(TAG,
- "getClosestAvailableZoomCrop - cropRegionAsPreview = " + cropRegionAsPreview);
- }
-
- /*
- * Iterate all available zoom rectangles and find the closest zoom index
- */
- Rect bestReportedCropRegion = null;
- Rect bestPreviewCropRegion = null;
- int bestZoomIndex = -1;
-
- List<Rect> availableReportedCropRegions =
- getAvailableZoomCropRectangles(params, activeArray);
- List<Rect> availablePreviewCropRegions =
- getAvailablePreviewZoomCropRectangles(params, activeArray, streamSize);
-
- if (DEBUG) {
- Log.v(TAG,
- "getClosestAvailableZoomCrop - availableReportedCropRegions = " +
- ListUtils.listToString(availableReportedCropRegions));
- Log.v(TAG,
- "getClosestAvailableZoomCrop - availablePreviewCropRegions = " +
- ListUtils.listToString(availablePreviewCropRegions));
- }
-
- if (availableReportedCropRegions.size() != availablePreviewCropRegions.size()) {
- throw new AssertionError("available reported/preview crop region size mismatch");
- }
-
- for (int i = 0; i < availableReportedCropRegions.size(); ++i) {
- Rect currentPreviewCropRegion = availablePreviewCropRegions.get(i);
- Rect currentReportedCropRegion = availableReportedCropRegions.get(i);
-
- boolean isBest;
- if (bestZoomIndex == -1) {
- isBest = true;
- } else if (currentPreviewCropRegion.width() >= cropRegionAsPreview.width() &&
- currentPreviewCropRegion.height() >= cropRegionAsPreview.height()) {
- isBest = true;
- } else {
- isBest = false;
- }
-
- // Sizes are sorted largest-to-smallest, so once the available crop is too small,
- // we the rest are too small. Furthermore, this is the final best crop,
- // since its the largest crop that still fits the requested crop
- if (isBest) {
- bestPreviewCropRegion = currentPreviewCropRegion;
- bestReportedCropRegion = currentReportedCropRegion;
- bestZoomIndex = i;
- } else {
- break;
- }
- }
-
- if (bestZoomIndex == -1) {
- // Even in the worst case, we should always at least return 0 here
- throw new AssertionError("Should've found at least one valid zoom index");
- }
-
- // Write the rectangles in-place
- reportedCropRegion.set(bestReportedCropRegion);
- previewCropRegion.set(bestPreviewCropRegion);
-
- return bestZoomIndex;
- }
-
- /**
- * Calculate the effective crop rectangle for this preview viewport;
- * assumes the preview is centered to the sensor and scaled to fit across one of the dimensions
- * without skewing.
- *
- * <p>The preview size must be a subset of the active array size; the resulting
- * rectangle will also be a subset of the active array rectangle.</p>
- *
- * <p>The unzoomed crop rectangle is calculated only.</p>
- *
- * @param activeArray active array dimensions, in sensor space
- * @param previewSize size of the preview buffer render target, in pixels (not in sensor space)
- * @return a rectangle which serves as the preview stream's effective crop region (unzoomed),
- * in sensor space
- *
- * @throws NullPointerException
- * if any of the args were {@code null}
- * @throws IllegalArgumentException
- * if {@code previewSize} is wider or taller than {@code activeArray}
- */
- private static Rect getPreviewCropRectangleUnzoomed(Rect activeArray, Size previewSize) {
- if (previewSize.getWidth() > activeArray.width()) {
- throw new IllegalArgumentException("previewSize must not be wider than activeArray");
- } else if (previewSize.getHeight() > activeArray.height()) {
- throw new IllegalArgumentException("previewSize must not be taller than activeArray");
- }
-
- float aspectRatioArray = activeArray.width() * 1.0f / activeArray.height();
- float aspectRatioPreview = previewSize.getWidth() * 1.0f / previewSize.getHeight();
-
- float cropH, cropW;
- if (Math.abs(aspectRatioPreview - aspectRatioArray) < ASPECT_RATIO_TOLERANCE) {
- cropH = activeArray.height();
- cropW = activeArray.width();
- } else if (aspectRatioPreview < aspectRatioArray) {
- // The new width must be smaller than the height, so scale the width by AR
- cropH = activeArray.height();
- cropW = cropH * aspectRatioPreview;
- } else {
- // The new height must be smaller (or equal) than the width, so scale the height by AR
- cropW = activeArray.width();
- cropH = cropW / aspectRatioPreview;
- }
-
- Matrix translateMatrix = new Matrix();
- RectF cropRect = new RectF(/*left*/0, /*top*/0, cropW, cropH);
-
- // Now center the crop rectangle so its center is in the center of the active array
- translateMatrix.setTranslate(activeArray.exactCenterX(), activeArray.exactCenterY());
- translateMatrix.postTranslate(-cropRect.centerX(), -cropRect.centerY());
-
- translateMatrix.mapRect(/*inout*/cropRect);
-
- // Round the rect corners towards the nearest integer values
- return ParamsUtils.createRect(cropRect);
- }
-
- /**
- * Shrink the {@code shrinkTarget} rectangle to snugly fit inside of {@code reference};
- * the aspect ratio of {@code shrinkTarget} will change to be the same aspect ratio as
- * {@code reference}.
- *
- * <p>At most a single dimension will scale (down). Both dimensions will never be scaled.</p>
- *
- * @param reference the rectangle whose aspect ratio will be used as the new aspect ratio
- * @param shrinkTarget the rectangle which will be scaled down to have a new aspect ratio
- *
- * @return a new rectangle, a subset of {@code shrinkTarget},
- * whose aspect ratio will match that of {@code reference}
- */
- private static Rect shrinkToSameAspectRatioCentered(Rect reference, Rect shrinkTarget) {
- float aspectRatioReference = reference.width() * 1.0f / reference.height();
- float aspectRatioShrinkTarget = shrinkTarget.width() * 1.0f / shrinkTarget.height();
-
- float cropH, cropW;
- if (aspectRatioShrinkTarget < aspectRatioReference) {
- // The new width must be smaller than the height, so scale the width by AR
- cropH = reference.height();
- cropW = cropH * aspectRatioShrinkTarget;
- } else {
- // The new height must be smaller (or equal) than the width, so scale the height by AR
- cropW = reference.width();
- cropH = cropW / aspectRatioShrinkTarget;
- }
-
- Matrix translateMatrix = new Matrix();
- RectF shrunkRect = new RectF(shrinkTarget);
-
- // Scale the rectangle down, but keep its center in the same place as before
- translateMatrix.setScale(cropW / reference.width(), cropH / reference.height(),
- shrinkTarget.exactCenterX(), shrinkTarget.exactCenterY());
-
- translateMatrix.mapRect(/*inout*/shrunkRect);
-
- return ParamsUtils.createRect(shrunkRect);
- }
-
- /**
- * Get the available 'crop' (zoom) rectangles for this camera that will be reported
- * via a {@code CaptureResult} when a zoom is requested.
- *
- * <p>These crops ignores the underlying preview buffer size, and will always be reported
- * the same values regardless of what configuration of outputs is used.</p>
- *
- * <p>When zoom is supported, this will return a list of {@code 1 + #getMaxZoom} size,
- * where each crop rectangle corresponds to a zoom ratio (and is centered at the middle).</p>
- *
- * <p>Each crop rectangle is changed to have the same aspect ratio as {@code streamSize},
- * by shrinking the rectangle if necessary.</p>
- *
- * <p>To get the reported crop region when applying a zoom to the sensor, use {@code streamSize}
- * = {@code activeArray size}.</p>
- *
- * @param params non-{@code null} camera api1 parameters
- * @param activeArray active array dimensions, in sensor space
- * @param streamSize stream size dimensions, in pixels
- *
- * @return a list of available zoom rectangles, sorted from least zoomed to most zoomed
- */
- public static List<Rect> getAvailableZoomCropRectangles(
- Camera.Parameters params, Rect activeArray) {
- checkNotNull(params, "params must not be null");
- checkNotNull(activeArray, "activeArray must not be null");
-
- return getAvailableCropRectangles(params, activeArray, ParamsUtils.createSize(activeArray));
- }
-
- /**
- * Get the available 'crop' (zoom) rectangles for this camera.
- *
- * <p>This is the effective (real) crop that is applied by the camera api1 device
- * when projecting the zoom onto the intermediate preview buffer. Use this when
- * deciding which zoom ratio to apply.</p>
- *
- * <p>When zoom is supported, this will return a list of {@code 1 + #getMaxZoom} size,
- * where each crop rectangle corresponds to a zoom ratio (and is centered at the middle).</p>
- *
- * <p>Each crop rectangle is changed to have the same aspect ratio as {@code streamSize},
- * by shrinking the rectangle if necessary.</p>
- *
- * <p>To get the reported crop region when applying a zoom to the sensor, use {@code streamSize}
- * = {@code activeArray size}.</p>
- *
- * @param params non-{@code null} camera api1 parameters
- * @param activeArray active array dimensions, in sensor space
- * @param streamSize stream size dimensions, in pixels
- *
- * @return a list of available zoom rectangles, sorted from least zoomed to most zoomed
- */
- public static List<Rect> getAvailablePreviewZoomCropRectangles(Camera.Parameters params,
- Rect activeArray, Size previewSize) {
- checkNotNull(params, "params must not be null");
- checkNotNull(activeArray, "activeArray must not be null");
- checkNotNull(previewSize, "previewSize must not be null");
-
- return getAvailableCropRectangles(params, activeArray, previewSize);
- }
-
- /**
- * Get the available 'crop' (zoom) rectangles for this camera.
- *
- * <p>When zoom is supported, this will return a list of {@code 1 + #getMaxZoom} size,
- * where each crop rectangle corresponds to a zoom ratio (and is centered at the middle).</p>
- *
- * <p>Each crop rectangle is changed to have the same aspect ratio as {@code streamSize},
- * by shrinking the rectangle if necessary.</p>
- *
- * <p>To get the reported crop region when applying a zoom to the sensor, use {@code streamSize}
- * = {@code activeArray size}.</p>
- *
- * @param params non-{@code null} camera api1 parameters
- * @param activeArray active array dimensions, in sensor space
- * @param streamSize stream size dimensions, in pixels
- *
- * @return a list of available zoom rectangles, sorted from least zoomed to most zoomed
- */
- private static List<Rect> getAvailableCropRectangles(Camera.Parameters params,
- Rect activeArray, Size streamSize) {
- checkNotNull(params, "params must not be null");
- checkNotNull(activeArray, "activeArray must not be null");
- checkNotNull(streamSize, "streamSize must not be null");
-
- // TODO: change all uses of Rect activeArray to Size activeArray,
- // since we want the crop to be active-array relative, not pixel-array relative
-
- Rect unzoomedStreamCrop = getPreviewCropRectangleUnzoomed(activeArray, streamSize);
-
- if (!params.isZoomSupported()) {
- // Trivial case: No zoom -> only support the full size as the crop region
- return new ArrayList<>(Arrays.asList(unzoomedStreamCrop));
- }
-
- List<Rect> zoomCropRectangles = new ArrayList<>(params.getMaxZoom() + 1);
- Matrix scaleMatrix = new Matrix();
- RectF scaledRect = new RectF();
-
- for (int zoom : params.getZoomRatios()) {
- float shrinkRatio = ZOOM_RATIO_MULTIPLIER * 1.0f / zoom; // normalize to 1.0 and smaller
-
- // set scaledRect to unzoomedStreamCrop
- ParamsUtils.convertRectF(unzoomedStreamCrop, /*out*/scaledRect);
-
- scaleMatrix.setScale(
- shrinkRatio, shrinkRatio,
- activeArray.exactCenterX(),
- activeArray.exactCenterY());
-
- scaleMatrix.mapRect(scaledRect);
-
- Rect intRect = ParamsUtils.createRect(scaledRect);
-
- // Round the rect corners towards the nearest integer values
- zoomCropRectangles.add(intRect);
- }
-
- return zoomCropRectangles;
- }
-
- /**
- * Get the largest possible zoom ratio (normalized to {@code 1.0f} and higher)
- * that the camera can support.
- *
- * <p>If the camera does not support zoom, it always returns {@code 1.0f}.</p>
- *
- * @param params non-{@code null} camera api1 parameters
- * @return normalized max zoom ratio, at least {@code 1.0f}
- */
- public static float getMaxZoomRatio(Camera.Parameters params) {
- if (!params.isZoomSupported()) {
- return 1.0f; // no zoom
- }
-
- List<Integer> zoomRatios = params.getZoomRatios(); // sorted smallest->largest
- int zoom = zoomRatios.get(zoomRatios.size() - 1); // largest zoom ratio
- float zoomRatio = zoom * 1.0f / ZOOM_RATIO_MULTIPLIER; // normalize to 1.0 and smaller
-
- return zoomRatio;
- }
-
- /**
- * Returns the component-wise zoom ratio (each greater or equal than {@code 1.0});
- * largest values means more zoom.
- *
- * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
- * @param cropSize size of the crop/zoom
- *
- * @return {@link SizeF} with width/height being the component-wise zoom ratio
- *
- * @throws NullPointerException if any of the args were {@code null}
- * @throws IllegalArgumentException if any component of {@code cropSize} was {@code 0}
- */
- private static SizeF getZoomRatio(Size activeArraySize, Size cropSize) {
- checkNotNull(activeArraySize, "activeArraySize must not be null");
- checkNotNull(cropSize, "cropSize must not be null");
- checkArgumentPositive(cropSize.getWidth(), "cropSize.width must be positive");
- checkArgumentPositive(cropSize.getHeight(), "cropSize.height must be positive");
-
- float zoomRatioWidth = activeArraySize.getWidth() * 1.0f / cropSize.getWidth();
- float zoomRatioHeight = activeArraySize.getHeight() * 1.0f / cropSize.getHeight();
-
- return new SizeF(zoomRatioWidth, zoomRatioHeight);
- }
-
- /**
- * Convert the user-specified crop region/zoom into zoom data; which can be used
- * to set the parameters to a specific zoom index, or to report back to the user what
- * the actual zoom was, or for other calculations requiring the current preview crop region.
- *
- * <p>None of the parameters are mutated.<p>
- *
- * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
- * @param cropRegion the user-specified crop region
- * @param zoomRatio the user-specified zoom ratio
- * @param previewSize the current preview size (in pixels)
- * @param params the current camera parameters (not mutated)
- *
- * @return the zoom index, and the effective/reported crop regions (relative to active array)
- */
- public static ZoomData convertToLegacyZoom(Rect activeArraySize, Rect
- cropRegion, Float zoomRatio, Size previewSize, Camera.Parameters params) {
- final float FLOAT_EQUAL_THRESHOLD = 0.0001f;
- if (zoomRatio != null &&
- Math.abs(1.0f - zoomRatio) > FLOAT_EQUAL_THRESHOLD) {
- // User uses CONTROL_ZOOM_RATIO to control zoom
- return convertZoomRatio(activeArraySize, zoomRatio, previewSize, params);
- }
-
- return convertScalerCropRegion(activeArraySize, cropRegion, previewSize, params);
- }
-
- /**
- * Convert the user-specified zoom ratio into zoom data; which can be used
- * to set the parameters to a specific zoom index, or to report back to the user what the
- * actual zoom was, or for other calculations requiring the current preview crop region.
- *
- * <p>None of the parameters are mutated.</p>
- *
- * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
- * @param zoomRatio the current zoom ratio
- * @param previewSize the current preview size (in pixels)
- * @param params the current camera parameters (not mutated)
- *
- * @return the zoom index, and the effective/reported crop regions (relative to active array)
- */
- public static ZoomData convertZoomRatio(Rect activeArraySize, float zoomRatio,
- Size previewSize, Camera.Parameters params) {
- if (DEBUG) {
- Log.v(TAG, "convertZoomRatio - user zoom ratio was " + zoomRatio);
- }
-
- List<Rect> availableReportedCropRegions =
- getAvailableZoomCropRectangles(params, activeArraySize);
- List<Rect> availablePreviewCropRegions =
- getAvailablePreviewZoomCropRectangles(params, activeArraySize, previewSize);
- if (availableReportedCropRegions.size() != availablePreviewCropRegions.size()) {
- throw new AssertionError("available reported/preview crop region size mismatch");
- }
-
- // Find the best matched legacy zoom ratio for the requested camera2 zoom ratio.
- int bestZoomIndex = 0;
- Rect reportedCropRegion = new Rect(availableReportedCropRegions.get(0));
- Rect previewCropRegion = new Rect(availablePreviewCropRegions.get(0));
- float reportedZoomRatio = 1.0f;
- if (params.isZoomSupported()) {
- List<Integer> zoomRatios = params.getZoomRatios();
- for (int i = 1; i < zoomRatios.size(); i++) {
- if (zoomRatio * ZOOM_RATIO_MULTIPLIER >= zoomRatios.get(i)) {
- bestZoomIndex = i;
- reportedCropRegion = availableReportedCropRegions.get(i);
- previewCropRegion = availablePreviewCropRegions.get(i);
- reportedZoomRatio = zoomRatios.get(i);
- } else {
- break;
- }
- }
- }
-
- if (DEBUG) {
- Log.v(TAG, "convertZoomRatio - zoom calculated to: " +
- "zoomIndex = " + bestZoomIndex +
- ", reported crop region = " + reportedCropRegion +
- ", preview crop region = " + previewCropRegion +
- ", reported zoom ratio = " + reportedZoomRatio);
- }
-
- return new ZoomData(bestZoomIndex, reportedCropRegion,
- previewCropRegion, reportedZoomRatio);
- }
-
- /**
- * Convert the user-specified crop region into zoom data; which can be used
- * to set the parameters to a specific zoom index, or to report back to the user what the
- * actual zoom was, or for other calculations requiring the current preview crop region.
- *
- * <p>None of the parameters are mutated.</p>
- *
- * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
- * @param cropRegion the user-specified crop region
- * @param previewSize the current preview size (in pixels)
- * @param params the current camera parameters (not mutated)
- *
- * @return the zoom index, and the effective/reported crop regions (relative to active array)
- */
- public static ZoomData convertScalerCropRegion(Rect activeArraySize, Rect
- cropRegion, Size previewSize, Camera.Parameters params) {
- Rect activeArraySizeOnly = new Rect(
- /*left*/0, /*top*/0,
- activeArraySize.width(), activeArraySize.height());
-
- Rect userCropRegion = cropRegion;
-
- if (userCropRegion == null) {
- userCropRegion = activeArraySizeOnly;
- }
-
- if (DEBUG) {
- Log.v(TAG, "convertScalerCropRegion - user crop region was " + userCropRegion);
- }
-
- final Rect reportedCropRegion = new Rect();
- final Rect previewCropRegion = new Rect();
- final int zoomIdx = ParameterUtils.getClosestAvailableZoomCrop(params, activeArraySizeOnly,
- previewSize, userCropRegion,
- /*out*/reportedCropRegion, /*out*/previewCropRegion);
- final float reportedZoomRatio = 1.0f;
-
- if (DEBUG) {
- Log.v(TAG, "convertScalerCropRegion - zoom calculated to: " +
- "zoomIndex = " + zoomIdx +
- ", reported crop region = " + reportedCropRegion +
- ", preview crop region = " + previewCropRegion +
- ", reported zoom ratio = " + reportedZoomRatio);
- }
-
- return new ZoomData(zoomIdx, previewCropRegion, reportedCropRegion, reportedZoomRatio);
- }
-
- /**
- * Calculate the actual/effective/reported normalized rectangle data from a metering
- * rectangle.
- *
- * <p>If any of the rectangles are out-of-range of their intended bounding box,
- * the {@link #RECTANGLE_EMPTY empty rectangle} is substituted instead
- * (with a weight of {@code 0}).</p>
- *
- * <p>The metering rectangle is bound by the crop region (effective/reported respectively).
- * The metering {@link Camera.Area area} is bound by {@code [-1000, 1000]}.</p>
- *
- * <p>No parameters are mutated; returns the new metering data.</p>
- *
- * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
- * @param meteringRect the user-specified metering rectangle
- * @param zoomData the calculated zoom data corresponding to this request
- *
- * @return the metering area, the reported/effective metering rectangles
- */
- public static MeteringData convertMeteringRectangleToLegacy(
- Rect activeArray, MeteringRectangle meteringRect, ZoomData zoomData) {
- Rect previewCrop = zoomData.previewCrop;
-
- float scaleW = (NORMALIZED_RECTANGLE_MAX - NORMALIZED_RECTANGLE_MIN) * 1.0f /
- previewCrop.width();
- float scaleH = (NORMALIZED_RECTANGLE_MAX - NORMALIZED_RECTANGLE_MIN) * 1.0f /
- previewCrop.height();
-
- Matrix transform = new Matrix();
- // Move the preview crop so that top,left is at (0,0), otherwise after scaling
- // the corner bounds will be outside of [-1000, 1000]
- transform.setTranslate(-previewCrop.left, -previewCrop.top);
- // Scale into [0, 2000] range about the center of the preview
- transform.postScale(scaleW, scaleH);
- // Move so that top left of a typical rect is at [-1000, -1000]
- transform.postTranslate(/*dx*/NORMALIZED_RECTANGLE_MIN, /*dy*/NORMALIZED_RECTANGLE_MIN);
-
- /*
- * Calculate the preview metering region (effective), and the camera1 api
- * normalized metering region.
- */
- Rect normalizedRegionUnbounded = ParamsUtils.mapRect(transform, meteringRect.getRect());
-
- /*
- * Try to intersect normalized area with [-1000, 1000] rectangle; otherwise
- * it's completely out of range
- */
- Rect normalizedIntersected = new Rect(normalizedRegionUnbounded);
-
- Camera.Area meteringArea;
- if (!normalizedIntersected.intersect(NORMALIZED_RECTANGLE_DEFAULT)) {
- Log.w(TAG,
- "convertMeteringRectangleToLegacy - metering rectangle too small, " +
- "no metering will be done");
- normalizedIntersected.set(RECTANGLE_EMPTY);
- meteringArea = new Camera.Area(RECTANGLE_EMPTY,
- MeteringRectangle.METERING_WEIGHT_DONT_CARE);
- } else {
- meteringArea = new Camera.Area(normalizedIntersected,
- meteringRect.getMeteringWeight());
- }
-
- /*
- * Calculate effective preview metering region
- */
- Rect previewMetering = meteringRect.getRect();
- if (!previewMetering.intersect(previewCrop)) {
- previewMetering.set(RECTANGLE_EMPTY);
- }
-
- /*
- * Calculate effective reported metering region
- * - Transform the calculated metering area back into active array space
- * - Clip it to be a subset of the reported crop region
- */
- Rect reportedMetering;
- {
- Camera.Area normalizedAreaUnbounded = new Camera.Area(
- normalizedRegionUnbounded, meteringRect.getMeteringWeight());
- WeightedRectangle reportedMeteringRect = convertCameraAreaToActiveArrayRectangle(
- activeArray, zoomData, normalizedAreaUnbounded, /*usePreviewCrop*/false);
- reportedMetering = reportedMeteringRect.rect;
- }
-
- if (DEBUG) {
- Log.v(TAG, String.format(
- "convertMeteringRectangleToLegacy - activeArray = %s, meteringRect = %s, " +
- "previewCrop = %s, meteringArea = %s, previewMetering = %s, " +
- "reportedMetering = %s, normalizedRegionUnbounded = %s",
- activeArray, meteringRect,
- previewCrop, stringFromArea(meteringArea), previewMetering,
- reportedMetering, normalizedRegionUnbounded));
- }
-
- return new MeteringData(meteringArea, previewMetering, reportedMetering);
- }
-
- /**
- * Convert the normalized camera area from [-1000, 1000] coordinate space
- * into the active array-based coordinate space.
- *
- * <p>Values out of range are clipped to be within the resulting (reported) crop
- * region. It is possible to have values larger than the preview crop.</p>
- *
- * <p>Weights out of range of [0, 1000] are clipped to be within the range.</p>
- *
- * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
- * @param zoomData the calculated zoom data corresponding to this request
- * @param area the normalized camera area
- *
- * @return the weighed rectangle in active array coordinate space, with the weight
- */
- public static WeightedRectangle convertCameraAreaToActiveArrayRectangle(
- Rect activeArray, ZoomData zoomData, Camera.Area area) {
- return convertCameraAreaToActiveArrayRectangle(activeArray, zoomData, area,
- /*usePreviewCrop*/true);
- }
-
- /**
- * Convert an api1 face into an active-array based api2 face.
- *
- * <p>Out-of-ranges scores and ids will be clipped to be within range (with a warning).</p>
- *
- * @param face a non-{@code null} api1 face
- * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
- * @param zoomData the calculated zoom data corresponding to this request
- *
- * @return a non-{@code null} api2 face
- *
- * @throws NullPointerException if the {@code face} was {@code null}
- */
- public static Face convertFaceFromLegacy(Camera.Face face, Rect activeArray,
- ZoomData zoomData) {
- checkNotNull(face, "face must not be null");
-
- Face api2Face;
-
- Camera.Area fakeArea = new Camera.Area(face.rect, /*weight*/1);
-
- WeightedRectangle faceRect =
- convertCameraAreaToActiveArrayRectangle(activeArray, zoomData, fakeArea);
-
- Point leftEye = face.leftEye, rightEye = face.rightEye, mouth = face.mouth;
- if (leftEye != null && rightEye != null && mouth != null && leftEye.x != -2000 &&
- leftEye.y != -2000 && rightEye.x != -2000 && rightEye.y != -2000 &&
- mouth.x != -2000 && mouth.y != -2000) {
- leftEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData,
- leftEye, /*usePreviewCrop*/true);
- rightEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData,
- leftEye, /*usePreviewCrop*/true);
- mouth = convertCameraPointToActiveArrayPoint(activeArray, zoomData,
- leftEye, /*usePreviewCrop*/true);
-
- api2Face = faceRect.toFace(face.id, leftEye, rightEye, mouth);
- } else {
- api2Face = faceRect.toFace();
- }
-
- return api2Face;
- }
-
- private static Point convertCameraPointToActiveArrayPoint(
- Rect activeArray, ZoomData zoomData, Point point, boolean usePreviewCrop) {
- Rect pointedRect = new Rect(point.x, point.y, point.x, point.y);
- Camera.Area pointedArea = new Area(pointedRect, /*weight*/1);
-
- WeightedRectangle adjustedRect =
- convertCameraAreaToActiveArrayRectangle(activeArray,
- zoomData, pointedArea, usePreviewCrop);
-
- Point transformedPoint = new Point(adjustedRect.rect.left, adjustedRect.rect.top);
-
- return transformedPoint;
- }
-
- private static WeightedRectangle convertCameraAreaToActiveArrayRectangle(
- Rect activeArray, ZoomData zoomData, Camera.Area area, boolean usePreviewCrop) {
- Rect previewCrop = zoomData.previewCrop;
- Rect reportedCrop = zoomData.reportedCrop;
-
- float scaleW = previewCrop.width() * 1.0f /
- (NORMALIZED_RECTANGLE_MAX - NORMALIZED_RECTANGLE_MIN);
- float scaleH = previewCrop.height() * 1.0f /
- (NORMALIZED_RECTANGLE_MAX - NORMALIZED_RECTANGLE_MIN);
-
- /*
- * Calculate the reported metering region from the non-intersected normalized region
- * by scaling and translating back into active array-relative coordinates.
- */
- Matrix transform = new Matrix();
-
- // Move top left from (-1000, -1000) to (0, 0)
- transform.setTranslate(/*dx*/NORMALIZED_RECTANGLE_MAX, /*dy*/NORMALIZED_RECTANGLE_MAX);
-
- // Scale from [0, 2000] back into the preview rectangle
- transform.postScale(scaleW, scaleH);
-
- // Move the rect so that the [-1000,-1000] point ends up at the preview [left, top]
- transform.postTranslate(previewCrop.left, previewCrop.top);
-
- Rect cropToIntersectAgainst = usePreviewCrop ? previewCrop : reportedCrop;
-
- // Now apply the transformation backwards to get the reported metering region
- Rect reportedMetering = ParamsUtils.mapRect(transform, area.rect);
- // Intersect it with the crop region, to avoid reporting out-of-bounds
- // metering regions
- if (!reportedMetering.intersect(cropToIntersectAgainst)) {
- reportedMetering.set(RECTANGLE_EMPTY);
- }
-
- int weight = area.weight;
- if (weight < MeteringRectangle.METERING_WEIGHT_MIN) {
- Log.w(TAG,
- "convertCameraAreaToMeteringRectangle - rectangle "
- + stringFromArea(area) + " has too small weight, clip to 0");
- weight = 0;
- }
-
- return new WeightedRectangle(reportedMetering, area.weight);
- }
-
-
- private ParameterUtils() {
- throw new AssertionError();
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/PerfMeasurement.java b/core/java/android/hardware/camera2/legacy/PerfMeasurement.java
deleted file mode 100644
index 53278c7e4f97..000000000000
--- a/core/java/android/hardware/camera2/legacy/PerfMeasurement.java
+++ /dev/null
@@ -1,308 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.os.SystemClock;
-import android.util.Log;
-
-import java.io.BufferedWriter;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.LinkedList;
-import java.util.Queue;
-
-/**
- * GPU and CPU performance measurement for the legacy implementation.
- *
- * <p>Measures CPU and GPU processing duration for a set of operations, and dumps
- * the results into a file.</p>
- *
- * <p>Rough usage:
- * <pre>
- * {@code
- * <set up workload>
- * <start long-running workload>
- * mPerfMeasurement.startTimer();
- * ...render a frame...
- * mPerfMeasurement.stopTimer();
- * <end workload>
- * mPerfMeasurement.dumpPerformanceData("/sdcard/my_data.txt");
- * }
- * </pre>
- * </p>
- *
- * <p>All calls to this object must be made within the same thread, and the same GL context.
- * PerfMeasurement cannot be used outside of a GL context. The only exception is
- * dumpPerformanceData, which can be called outside of a valid GL context.</p>
- */
-class PerfMeasurement {
- private static final String TAG = "PerfMeasurement";
-
- public static final int DEFAULT_MAX_QUERIES = 3;
-
- private final long mNativeContext;
-
- private int mCompletedQueryCount = 0;
-
- /**
- * Values for completed measurements
- */
- private ArrayList<Long> mCollectedGpuDurations = new ArrayList<>();
- private ArrayList<Long> mCollectedCpuDurations = new ArrayList<>();
- private ArrayList<Long> mCollectedTimestamps = new ArrayList<>();
-
- /**
- * Values for in-progress measurements (waiting for async GPU results)
- */
- private Queue<Long> mTimestampQueue = new LinkedList<>();
- private Queue<Long> mCpuDurationsQueue = new LinkedList<>();
-
- private long mStartTimeNs;
-
- /**
- * The value returned by {@link #nativeGetNextGlDuration} if no new timing
- * measurement is available since the last call.
- */
- private static final long NO_DURATION_YET = -1l;
-
- /**
- * The value returned by {@link #nativeGetNextGlDuration} if timing failed for
- * the next timing interval
- */
- private static final long FAILED_TIMING = -2l;
-
- /**
- * Create a performance measurement object with a maximum of {@value #DEFAULT_MAX_QUERIES}
- * in-progess queries.
- */
- public PerfMeasurement() {
- mNativeContext = nativeCreateContext(DEFAULT_MAX_QUERIES);
- }
-
- /**
- * Create a performance measurement object with maxQueries as the maximum number of
- * in-progress queries.
- *
- * @param maxQueries maximum in-progress queries, must be larger than 0.
- * @throws IllegalArgumentException if maxQueries is less than 1.
- */
- public PerfMeasurement(int maxQueries) {
- if (maxQueries < 1) throw new IllegalArgumentException("maxQueries is less than 1");
- mNativeContext = nativeCreateContext(maxQueries);
- }
-
- /**
- * Returns true if the Gl timing methods will work, false otherwise.
- *
- * <p>Must be called within a valid GL context.</p>
- */
- public static boolean isGlTimingSupported() {
- return nativeQuerySupport();
- }
-
- /**
- * Dump collected data to file, and clear the stored data.
- *
- * <p>
- * Format is a simple csv-like text file with a header,
- * followed by a 3-column list of values in nanoseconds:
- * <pre>
- * timestamp gpu_duration cpu_duration
- * <long> <long> <long>
- * <long> <long> <long>
- * <long> <long> <long>
- * ....
- * </pre>
- * </p>
- */
- public void dumpPerformanceData(String path) {
- try (BufferedWriter dump = new BufferedWriter(new FileWriter(path))) {
- dump.write("timestamp gpu_duration cpu_duration\n");
- for (int i = 0; i < mCollectedGpuDurations.size(); i++) {
- dump.write(String.format("%d %d %d\n",
- mCollectedTimestamps.get(i),
- mCollectedGpuDurations.get(i),
- mCollectedCpuDurations.get(i)));
- }
- mCollectedTimestamps.clear();
- mCollectedGpuDurations.clear();
- mCollectedCpuDurations.clear();
- } catch (IOException e) {
- Log.e(TAG, "Error writing data dump to " + path + ":" + e);
- }
- }
-
- /**
- * Start a GPU/CPU timing measurement.
- *
- * <p>Call before starting a rendering pass. Only one timing measurement can be active at once,
- * so {@link #stopTimer} must be called before the next call to this method.</p>
- *
- * @throws IllegalStateException if the maximum number of queries are in progress already,
- * or the method is called multiple times in a row, or there is
- * a GPU error.
- */
- public void startTimer() {
- nativeStartGlTimer(mNativeContext);
- mStartTimeNs = SystemClock.elapsedRealtimeNanos();
- }
-
- /**
- * Finish a GPU/CPU timing measurement.
- *
- * <p>Call after finishing all the drawing for a rendering pass. Only one timing measurement can
- * be active at once, so {@link #startTimer} must be called before the next call to this
- * method.</p>
- *
- * @throws IllegalStateException if no GL timer is currently started, or there is a GPU
- * error.
- */
- public void stopTimer() {
- // Complete CPU timing
- long endTimeNs = SystemClock.elapsedRealtimeNanos();
- mCpuDurationsQueue.add(endTimeNs - mStartTimeNs);
- // Complete GL timing
- nativeStopGlTimer(mNativeContext);
-
- // Poll to see if GL timing results have arrived; if so
- // store the results for a frame
- long duration = getNextGlDuration();
- if (duration > 0) {
- mCollectedGpuDurations.add(duration);
- mCollectedTimestamps.add(mTimestampQueue.isEmpty() ?
- NO_DURATION_YET : mTimestampQueue.poll());
- mCollectedCpuDurations.add(mCpuDurationsQueue.isEmpty() ?
- NO_DURATION_YET : mCpuDurationsQueue.poll());
- }
- if (duration == FAILED_TIMING) {
- // Discard timestamp and CPU measurement since GPU measurement failed
- if (!mTimestampQueue.isEmpty()) {
- mTimestampQueue.poll();
- }
- if (!mCpuDurationsQueue.isEmpty()) {
- mCpuDurationsQueue.poll();
- }
- }
- }
-
- /**
- * Add a timestamp to a timing measurement. These are queued up and matched to completed
- * workload measurements as they become available.
- */
- public void addTimestamp(long timestamp) {
- mTimestampQueue.add(timestamp);
- }
-
- /**
- * Get the next available GPU timing measurement.
- *
- * <p>Since the GPU works asynchronously, the results of a single start/stopGlTimer measurement
- * will only be available some time after the {@link #stopTimer} call is made. Poll this method
- * until the result becomes available. If multiple start/endTimer measurements are made in a
- * row, the results will be available in FIFO order.</p>
- *
- * @return The measured duration of the GPU workload for the next pending query, or
- * {@link #NO_DURATION_YET} if no queries are pending or the next pending query has not
- * yet finished, or {@link #FAILED_TIMING} if the GPU was unable to complete the
- * measurement.
- *
- * @throws IllegalStateException If there is a GPU error.
- *
- */
- private long getNextGlDuration() {
- long duration = nativeGetNextGlDuration(mNativeContext);
- if (duration > 0) {
- mCompletedQueryCount++;
- }
- return duration;
- }
-
- /**
- * Returns the number of measurements so far that returned a valid duration
- * measurement.
- */
- public int getCompletedQueryCount() {
- return mCompletedQueryCount;
- }
-
- @Override
- protected void finalize() {
- nativeDeleteContext(mNativeContext);
- }
-
- /**
- * Create a native performance measurement context.
- *
- * @param maxQueryCount maximum in-progress queries; must be >= 1.
- */
- private static native long nativeCreateContext(int maxQueryCount);
-
- /**
- * Delete the native context.
- *
- * <p>Not safe to call more than once.</p>
- */
- private static native void nativeDeleteContext(long contextHandle);
-
- /**
- * Query whether the relevant Gl extensions are available for Gl timing
- */
- private static native boolean nativeQuerySupport();
-
- /**
- * Start a GL timing section.
- *
- * <p>All GL commands between this method and the next {@link #nativeEndGlTimer} will be
- * included in the timing.</p>
- *
- * <p>Must be called from the same thread as calls to {@link #nativeEndGlTimer} and
- * {@link #nativeGetNextGlDuration}.</p>
- *
- * @throws IllegalStateException if a GL error occurs or start is called repeatedly.
- */
- protected static native void nativeStartGlTimer(long contextHandle);
-
- /**
- * Finish a GL timing section.
- *
- * <p>Some time after this call returns, the time the GPU took to
- * execute all work submitted between the latest {@link #nativeStartGlTimer} and
- * this call, will become available from calling {@link #nativeGetNextGlDuration}.</p>
- *
- * <p>Must be called from the same thread as calls to {@link #nativeStartGlTimer} and
- * {@link #nativeGetNextGlDuration}.</p>
- *
- * @throws IllegalStateException if a GL error occurs or stop is called before start
- */
- protected static native void nativeStopGlTimer(long contextHandle);
-
- /**
- * Get the next available GL duration measurement, in nanoseconds.
- *
- * <p>Must be called from the same thread as calls to {@link #nativeStartGlTimer} and
- * {@link #nativeEndGlTimer}.</p>
- *
- * @return the next GL duration measurement, or {@link #NO_DURATION_YET} if
- * no new measurement is available, or {@link #FAILED_TIMING} if timing
- * failed for the next duration measurement.
- * @throws IllegalStateException if a GL error occurs
- */
- protected static native long nativeGetNextGlDuration(long contextHandle);
-
-
-}
diff --git a/core/java/android/hardware/camera2/legacy/RequestHandlerThread.java b/core/java/android/hardware/camera2/legacy/RequestHandlerThread.java
deleted file mode 100644
index e19ebf2d616b..000000000000
--- a/core/java/android/hardware/camera2/legacy/RequestHandlerThread.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.os.ConditionVariable;
-import android.os.Handler;
-import android.os.HandlerThread;
-import android.os.Looper;
-import android.os.MessageQueue;
-
-public class RequestHandlerThread extends HandlerThread {
-
- /**
- * Ensure that the MessageQueue's idle handler gets run by poking the message queue;
- * normally if the message queue is already idle, the idle handler won't get invoked.
- *
- * <p>Users of this handler thread should ignore this message.</p>
- */
- public final static int MSG_POKE_IDLE_HANDLER = -1;
-
- private final ConditionVariable mStarted = new ConditionVariable(false);
- private final ConditionVariable mIdle = new ConditionVariable(true);
- private Handler.Callback mCallback;
- private volatile Handler mHandler;
-
- public RequestHandlerThread(String name, Handler.Callback callback) {
- super(name, Thread.MAX_PRIORITY);
- mCallback = callback;
- }
-
- @Override
- protected void onLooperPrepared() {
- mHandler = new Handler(getLooper(), mCallback);
- mStarted.open();
- }
-
- // Blocks until thread has started
- public void waitUntilStarted() {
- mStarted.block();
- }
-
- // May return null if the handler is not set up yet.
- public Handler getHandler() {
- return mHandler;
- }
-
- // Blocks until thread has started
- public Handler waitAndGetHandler() {
- waitUntilStarted();
- return getHandler();
- }
-
- // Atomic multi-type message existence check
- public boolean hasAnyMessages(int[] what) {
- synchronized (mHandler.getLooper().getQueue()) {
- for (int i : what) {
- if (mHandler.hasMessages(i)) {
- return true;
- }
- }
- }
- return false;
- }
-
- // Atomic multi-type message remove
- public void removeMessages(int[] what) {
- synchronized (mHandler.getLooper().getQueue()) {
- for (int i : what) {
- mHandler.removeMessages(i);
- }
- }
- }
-
- private final MessageQueue.IdleHandler mIdleHandler = new MessageQueue.IdleHandler() {
- @Override
- public boolean queueIdle() {
- mIdle.open();
- return false;
- }
- };
-
- // Blocks until thread is idling
- public void waitUntilIdle() {
- Handler handler = waitAndGetHandler();
- MessageQueue queue = handler.getLooper().getQueue();
- if (queue.isIdle()) {
- return;
- }
- mIdle.close();
- queue.addIdleHandler(mIdleHandler);
- // Ensure that the idle handler gets run even if the looper already went idle
- handler.sendEmptyMessage(MSG_POKE_IDLE_HANDLER);
- if (queue.isIdle()) {
- return;
- }
- mIdle.block();
- }
-
-}
diff --git a/core/java/android/hardware/camera2/legacy/RequestHolder.java b/core/java/android/hardware/camera2/legacy/RequestHolder.java
deleted file mode 100644
index 98b761b8a04f..000000000000
--- a/core/java/android/hardware/camera2/legacy/RequestHolder.java
+++ /dev/null
@@ -1,283 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.hardware.camera2.CaptureRequest;
-import android.util.Log;
-import android.view.Surface;
-
-import java.util.Collection;
-
-import static com.android.internal.util.Preconditions.*;
-
-/**
- * Semi-immutable container for a single capture request and associated information,
- * the only mutable characteristic of this container is whether or not is has been
- * marked as "failed" using {@code #failRequest}.
- */
-public class RequestHolder {
- private static final String TAG = "RequestHolder";
-
- private final boolean mRepeating;
- private final CaptureRequest mRequest;
- private final int mRequestId;
- private final int mSubsequeceId;
- private final long mFrameNumber;
- private final int mNumJpegTargets;
- private final int mNumPreviewTargets;
- private volatile boolean mFailed = false;
- private boolean mOutputAbandoned = false;
-
- private final Collection<Long> mJpegSurfaceIds;
-
- /**
- * A builder class for {@link RequestHolder} objects.
- *
- * <p>
- * This allows per-request queries to be cached for repeating {@link CaptureRequest} objects.
- * </p>
- */
- public final static class Builder {
- private final int mRequestId;
- private final int mSubsequenceId;
- private final CaptureRequest mRequest;
- private final boolean mRepeating;
- private final int mNumJpegTargets;
- private final int mNumPreviewTargets;
- private final Collection<Long> mJpegSurfaceIds;
-
- /**
- * Construct a new {@link Builder} to generate {@link RequestHolder} objects.
- *
- * @param requestId the ID to set in {@link RequestHolder} objects.
- * @param subsequenceId the sequence ID to set in {@link RequestHolder} objects.
- * @param request the original {@link CaptureRequest} to set in {@link RequestHolder}
- * objects.
- * @param repeating {@code true} if the request is repeating.
- */
- public Builder(int requestId, int subsequenceId, CaptureRequest request,
- boolean repeating, Collection<Long> jpegSurfaceIds) {
- checkNotNull(request, "request must not be null");
- mRequestId = requestId;
- mSubsequenceId = subsequenceId;
- mRequest = request;
- mRepeating = repeating;
- mJpegSurfaceIds = jpegSurfaceIds;
- mNumJpegTargets = numJpegTargets(mRequest);
- mNumPreviewTargets = numPreviewTargets(mRequest);
- }
-
- /**
- * Returns true if the given surface requires jpeg buffers.
- *
- * @param s a {@link android.view.Surface} to check.
- * @return true if the surface requires a jpeg buffer.
- */
- private boolean jpegType(Surface s)
- throws LegacyExceptionUtils.BufferQueueAbandonedException {
- return LegacyCameraDevice.containsSurfaceId(s, mJpegSurfaceIds);
- }
-
- /**
- * Returns true if the given surface requires non-jpeg buffer types.
- *
- * <p>
- * "Jpeg buffer" refers to the buffers returned in the jpeg
- * {@link android.hardware.Camera.PictureCallback}. Non-jpeg buffers are created using a tee
- * of the preview stream drawn to the surface
- * set via {@link android.hardware.Camera#setPreviewDisplay(android.view.SurfaceHolder)} or
- * equivalent methods.
- * </p>
- * @param s a {@link android.view.Surface} to check.
- * @return true if the surface requires a non-jpeg buffer type.
- */
- private boolean previewType(Surface s)
- throws LegacyExceptionUtils.BufferQueueAbandonedException {
- return !jpegType(s);
- }
-
- /**
- * Returns the number of surfaces targeted by the request that require jpeg buffers.
- */
- private int numJpegTargets(CaptureRequest request) {
- int count = 0;
- for (Surface s : request.getTargets()) {
- try {
- if (jpegType(s)) {
- ++count;
- }
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.d(TAG, "Surface abandoned, skipping...", e);
- }
- }
- return count;
- }
-
- /**
- * Returns the number of surfaces targeted by the request that require non-jpeg buffers.
- */
- private int numPreviewTargets(CaptureRequest request) {
- int count = 0;
- for (Surface s : request.getTargets()) {
- try {
- if (previewType(s)) {
- ++count;
- }
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.d(TAG, "Surface abandoned, skipping...", e);
- }
- }
- return count;
- }
-
- /**
- * Build a new {@link RequestHolder} using with parameters generated from this
- * {@link Builder}.
- *
- * @param frameNumber the {@code framenumber} to generate in the {@link RequestHolder}.
- * @return a {@link RequestHolder} constructed with the {@link Builder}'s parameters.
- */
- public RequestHolder build(long frameNumber) {
- return new RequestHolder(mRequestId, mSubsequenceId, mRequest, mRepeating, frameNumber,
- mNumJpegTargets, mNumPreviewTargets, mJpegSurfaceIds);
- }
- }
-
- private RequestHolder(int requestId, int subsequenceId, CaptureRequest request,
- boolean repeating, long frameNumber, int numJpegTargets,
- int numPreviewTargets, Collection<Long> jpegSurfaceIds) {
- mRepeating = repeating;
- mRequest = request;
- mRequestId = requestId;
- mSubsequeceId = subsequenceId;
- mFrameNumber = frameNumber;
- mNumJpegTargets = numJpegTargets;
- mNumPreviewTargets = numPreviewTargets;
- mJpegSurfaceIds = jpegSurfaceIds;
- }
-
- /**
- * Return the request id for the contained {@link CaptureRequest}.
- */
- public int getRequestId() {
- return mRequestId;
- }
-
- /**
- * Returns true if the contained request is repeating.
- */
- public boolean isRepeating() {
- return mRepeating;
- }
-
- /**
- * Return the subsequence id for this request.
- */
- public int getSubsequeceId() {
- return mSubsequeceId;
- }
-
- /**
- * Returns the frame number for this request.
- */
- public long getFrameNumber() {
- return mFrameNumber;
- }
-
- /**
- * Returns the contained request.
- */
- public CaptureRequest getRequest() {
- return mRequest;
- }
-
- /**
- * Returns a read-only collection of the surfaces targeted by the contained request.
- */
- public Collection<Surface> getHolderTargets() {
- return getRequest().getTargets();
- }
-
- /**
- * Returns true if any of the surfaces targeted by the contained request require jpeg buffers.
- */
- public boolean hasJpegTargets() {
- return mNumJpegTargets > 0;
- }
-
- /**
- * Returns true if any of the surfaces targeted by the contained request require a
- * non-jpeg buffer type.
- */
- public boolean hasPreviewTargets(){
- return mNumPreviewTargets > 0;
- }
-
- /**
- * Return the number of jpeg-type surfaces targeted by this request.
- */
- public int numJpegTargets() {
- return mNumJpegTargets;
- }
-
- /**
- * Return the number of non-jpeg-type surfaces targeted by this request.
- */
- public int numPreviewTargets() {
- return mNumPreviewTargets;
- }
-
- /**
- * Returns true if the given surface requires jpeg buffers.
- *
- * @param s a {@link android.view.Surface} to check.
- * @return true if the surface requires a jpeg buffer.
- */
- public boolean jpegType(Surface s)
- throws LegacyExceptionUtils.BufferQueueAbandonedException {
- return LegacyCameraDevice.containsSurfaceId(s, mJpegSurfaceIds);
- }
-
- /**
- * Mark this request as failed.
- */
- public void failRequest() {
- Log.w(TAG, "Capture failed for request: " + getRequestId());
- mFailed = true;
- }
-
- /**
- * Return {@code true} if this request failed.
- */
- public boolean requestFailed() {
- return mFailed;
- }
-
- /**
- * Mark at least one of this request's output surfaces is abandoned.
- */
- public void setOutputAbandoned() {
- mOutputAbandoned = true;
- }
-
- /**
- * Return if any of this request's output surface is abandoned.
- */
- public boolean isOutputAbandoned() {
- return mOutputAbandoned;
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/RequestQueue.java b/core/java/android/hardware/camera2/legacy/RequestQueue.java
deleted file mode 100644
index fb444022c6db..000000000000
--- a/core/java/android/hardware/camera2/legacy/RequestQueue.java
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.hardware.camera2.legacy;
-
-import android.hardware.camera2.CaptureRequest;
-import android.hardware.camera2.utils.SubmitInfo;
-import android.util.Log;
-
-import java.util.ArrayDeque;
-import java.util.List;
-
-/**
- * A queue of bursts of requests.
- *
- * <p>This queue maintains the count of frames that have been produced, and is thread safe.</p>
- */
-public class RequestQueue {
- private static final String TAG = "RequestQueue";
-
- public static final long INVALID_FRAME = -1;
-
- private BurstHolder mRepeatingRequest = null;
- private final ArrayDeque<BurstHolder> mRequestQueue = new ArrayDeque<BurstHolder>();
-
- private long mCurrentFrameNumber = 0;
- private long mCurrentRepeatingFrameNumber = INVALID_FRAME;
- private int mCurrentRequestId = 0;
- private final List<Long> mJpegSurfaceIds;
-
- public final class RequestQueueEntry {
- private final BurstHolder mBurstHolder;
- private final Long mFrameNumber;
- private final boolean mQueueEmpty;
-
- public BurstHolder getBurstHolder() {
- return mBurstHolder;
- }
- public Long getFrameNumber() {
- return mFrameNumber;
- }
- public boolean isQueueEmpty() {
- return mQueueEmpty;
- }
-
- public RequestQueueEntry(BurstHolder burstHolder, Long frameNumber, boolean queueEmpty) {
- mBurstHolder = burstHolder;
- mFrameNumber = frameNumber;
- mQueueEmpty = queueEmpty;
- }
- }
-
- public RequestQueue(List<Long> jpegSurfaceIds) {
- mJpegSurfaceIds = jpegSurfaceIds;
- }
-
- /**
- * Return and remove the next burst on the queue.
- *
- * <p>If a repeating burst is returned, it will not be removed.</p>
- *
- * @return an entry containing the next burst, the current frame number, and flag about whether
- * request queue becomes empty. Null if no burst exists.
- */
- public synchronized RequestQueueEntry getNext() {
- BurstHolder next = mRequestQueue.poll();
- boolean queueEmptied = (next != null && mRequestQueue.size() == 0);
- if (next == null && mRepeatingRequest != null) {
- next = mRepeatingRequest;
- mCurrentRepeatingFrameNumber = mCurrentFrameNumber +
- next.getNumberOfRequests();
- }
-
- if (next == null) {
- return null;
- }
-
- RequestQueueEntry ret = new RequestQueueEntry(next, mCurrentFrameNumber, queueEmptied);
- mCurrentFrameNumber += next.getNumberOfRequests();
- return ret;
- }
-
- /**
- * Cancel a repeating request.
- *
- * @param requestId the id of the repeating request to cancel.
- * @return the last frame to be returned from the HAL for the given repeating request, or
- * {@code INVALID_FRAME} if none exists.
- */
- public synchronized long stopRepeating(int requestId) {
- long ret = INVALID_FRAME;
- if (mRepeatingRequest != null && mRepeatingRequest.getRequestId() == requestId) {
- mRepeatingRequest = null;
- ret = (mCurrentRepeatingFrameNumber == INVALID_FRAME) ? INVALID_FRAME :
- mCurrentRepeatingFrameNumber - 1;
- mCurrentRepeatingFrameNumber = INVALID_FRAME;
- Log.i(TAG, "Repeating capture request cancelled.");
- } else {
- Log.e(TAG, "cancel failed: no repeating request exists for request id: " + requestId);
- }
- return ret;
- }
-
- /**
- * Cancel a repeating request.
- *
- * @return the last frame to be returned from the HAL for the given repeating request, or
- * {@code INVALID_FRAME} if none exists.
- */
- public synchronized long stopRepeating() {
- if (mRepeatingRequest == null) {
- Log.e(TAG, "cancel failed: no repeating request exists.");
- return INVALID_FRAME;
- }
- return stopRepeating(mRepeatingRequest.getRequestId());
- }
-
- /**
- * Add a the given burst to the queue.
- *
- * <p>If the burst is repeating, replace the current repeating burst.</p>
- *
- * @param requests the burst of requests to add to the queue.
- * @param repeating true if the burst is repeating.
- * @return the submission info, including the new request id, and the last frame number, which
- * contains either the frame number of the last frame that will be returned for this request,
- * or the frame number of the last frame that will be returned for the current repeating
- * request if this burst is set to be repeating.
- */
- public synchronized SubmitInfo submit(CaptureRequest[] requests, boolean repeating) {
- int requestId = mCurrentRequestId++;
- BurstHolder burst = new BurstHolder(requestId, repeating, requests, mJpegSurfaceIds);
- long lastFrame = INVALID_FRAME;
- if (burst.isRepeating()) {
- Log.i(TAG, "Repeating capture request set.");
- if (mRepeatingRequest != null) {
- lastFrame = (mCurrentRepeatingFrameNumber == INVALID_FRAME) ? INVALID_FRAME :
- mCurrentRepeatingFrameNumber - 1;
- }
- mCurrentRepeatingFrameNumber = INVALID_FRAME;
- mRepeatingRequest = burst;
- } else {
- mRequestQueue.offer(burst);
- lastFrame = calculateLastFrame(burst.getRequestId());
- }
- SubmitInfo info = new SubmitInfo(requestId, lastFrame);
- return info;
- }
-
- private long calculateLastFrame(int requestId) {
- long total = mCurrentFrameNumber;
- for (BurstHolder b : mRequestQueue) {
- total += b.getNumberOfRequests();
- if (b.getRequestId() == requestId) {
- return total - 1;
- }
- }
- throw new IllegalStateException(
- "At least one request must be in the queue to calculate frame number");
- }
-
-}
diff --git a/core/java/android/hardware/camera2/legacy/RequestThreadManager.java b/core/java/android/hardware/camera2/legacy/RequestThreadManager.java
deleted file mode 100644
index f9a5029bffaa..000000000000
--- a/core/java/android/hardware/camera2/legacy/RequestThreadManager.java
+++ /dev/null
@@ -1,1126 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.legacy;
-
-import android.graphics.SurfaceTexture;
-import android.hardware.Camera;
-import android.hardware.camera2.CameraCharacteristics;
-import android.hardware.camera2.CaptureRequest;
-import android.hardware.camera2.impl.CameraDeviceImpl;
-import android.hardware.camera2.utils.SubmitInfo;
-import android.hardware.camera2.utils.SizeAreaComparator;
-import android.hardware.camera2.impl.CameraMetadataNative;
-import android.os.ConditionVariable;
-import android.os.Handler;
-import android.os.Message;
-import android.os.SystemClock;
-import android.util.Log;
-import android.util.MutableLong;
-import android.util.Pair;
-import android.util.Size;
-import android.view.Surface;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import static com.android.internal.util.Preconditions.*;
-
-/**
- * This class executes requests to the {@link Camera}.
- *
- * <p>
- * The main components of this class are:
- * - A message queue of requests to the {@link Camera}.
- * - A thread that consumes requests to the {@link Camera} and executes them.
- * - A {@link GLThreadManager} that draws to the configured output {@link Surface}s.
- * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations.
- * </p>
- */
-@SuppressWarnings("deprecation")
-public class RequestThreadManager {
- private final String TAG;
- private final int mCameraId;
- private final RequestHandlerThread mRequestThread;
-
- private static final boolean DEBUG = false;
- // For slightly more spammy messages that will get repeated every frame
- private static final boolean VERBOSE = false;
- private Camera mCamera;
- private final CameraCharacteristics mCharacteristics;
-
- private final CameraDeviceState mDeviceState;
- private final CaptureCollector mCaptureCollector;
- private final LegacyFocusStateMapper mFocusStateMapper;
- private final LegacyFaceDetectMapper mFaceDetectMapper;
-
- private static final int MSG_CONFIGURE_OUTPUTS = 1;
- private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2;
- private static final int MSG_CLEANUP = 3;
-
- private static final int MAX_IN_FLIGHT_REQUESTS = 2;
-
- private static final int PREVIEW_FRAME_TIMEOUT = 1000; // ms
- private static final int JPEG_FRAME_TIMEOUT = 4000; // ms (same as CTS for API2)
- private static final int REQUEST_COMPLETE_TIMEOUT = JPEG_FRAME_TIMEOUT;
-
- private static final float ASPECT_RATIO_TOLERANCE = 0.01f;
- private boolean mPreviewRunning = false;
-
- private final List<Surface> mPreviewOutputs = new ArrayList<>();
- private final List<Surface> mCallbackOutputs = new ArrayList<>();
- private GLThreadManager mGLThreadManager;
- private SurfaceTexture mPreviewTexture;
- private Camera.Parameters mParams;
-
- private final List<Long> mJpegSurfaceIds = new ArrayList<>();
-
- private Size mIntermediateBufferSize;
-
- private final RequestQueue mRequestQueue = new RequestQueue(mJpegSurfaceIds);
- private LegacyRequest mLastRequest = null;
- private SurfaceTexture mDummyTexture;
- private Surface mDummySurface;
-
- private final Object mIdleLock = new Object();
- private final FpsCounter mPrevCounter = new FpsCounter("Incoming Preview");
- private final FpsCounter mRequestCounter = new FpsCounter("Incoming Requests");
-
- private final AtomicBoolean mQuit = new AtomicBoolean(false);
-
- // Stuff JPEGs into HAL_PIXEL_FORMAT_RGBA_8888 gralloc buffers to get around SW write
- // limitations for (b/17379185).
- private static final boolean USE_BLOB_FORMAT_OVERRIDE = true;
-
- /**
- * Container object for Configure messages.
- */
- private static class ConfigureHolder {
- public final ConditionVariable condition;
- public final Collection<Pair<Surface, Size>> surfaces;
-
- public ConfigureHolder(ConditionVariable condition, Collection<Pair<Surface,
- Size>> surfaces) {
- this.condition = condition;
- this.surfaces = surfaces;
- }
- }
-
- /**
- * Counter class used to calculate and log the current FPS of frame production.
- */
- public static class FpsCounter {
- //TODO: Hook this up to SystTrace?
- private static final String TAG = "FpsCounter";
- private int mFrameCount = 0;
- private long mLastTime = 0;
- private long mLastPrintTime = 0;
- private double mLastFps = 0;
- private final String mStreamType;
- private static final long NANO_PER_SECOND = 1000000000; //ns
-
- public FpsCounter(String streamType) {
- mStreamType = streamType;
- }
-
- public synchronized void countFrame() {
- mFrameCount++;
- long nextTime = SystemClock.elapsedRealtimeNanos();
- if (mLastTime == 0) {
- mLastTime = nextTime;
- }
- if (nextTime > mLastTime + NANO_PER_SECOND) {
- long elapsed = nextTime - mLastTime;
- mLastFps = mFrameCount * (NANO_PER_SECOND / (double) elapsed);
- mFrameCount = 0;
- mLastTime = nextTime;
- }
- }
-
- public synchronized double checkFps() {
- return mLastFps;
- }
-
- public synchronized void staggeredLog() {
- if (mLastTime > mLastPrintTime + 5 * NANO_PER_SECOND) {
- mLastPrintTime = mLastTime;
- Log.d(TAG, "FPS for " + mStreamType + " stream: " + mLastFps );
- }
- }
-
- public synchronized void countAndLog() {
- countFrame();
- staggeredLog();
- }
- }
- /**
- * Fake preview for jpeg captures when there is no active preview
- */
- private void createDummySurface() {
- if (mDummyTexture == null || mDummySurface == null) {
- mDummyTexture = new SurfaceTexture(/*ignored*/0);
- // TODO: use smallest default sizes
- mDummyTexture.setDefaultBufferSize(640, 480);
- mDummySurface = new Surface(mDummyTexture);
- }
- }
-
- private final Camera.ErrorCallback mErrorCallback = new Camera.ErrorCallback() {
- @Override
- public void onError(int i, Camera camera) {
- switch(i) {
- case Camera.CAMERA_ERROR_EVICTED: {
- flush();
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DISCONNECTED);
- } break;
- case Camera.CAMERA_ERROR_DISABLED: {
- flush();
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DISABLED);
- } break;
- default: {
- Log.e(TAG, "Received error " + i + " from the Camera1 ErrorCallback");
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- } break;
- }
- }
- };
-
- private final ConditionVariable mReceivedJpeg = new ConditionVariable(false);
-
- private final Camera.PictureCallback mJpegCallback = new Camera.PictureCallback() {
- @Override
- public void onPictureTaken(byte[] data, Camera camera) {
- Log.i(TAG, "Received jpeg.");
- Pair<RequestHolder, Long> captureInfo = mCaptureCollector.jpegProduced();
- if (captureInfo == null || captureInfo.first == null) {
- Log.e(TAG, "Dropping jpeg frame.");
- return;
- }
- RequestHolder holder = captureInfo.first;
- long timestamp = captureInfo.second;
- for (Surface s : holder.getHolderTargets()) {
- try {
- if (LegacyCameraDevice.containsSurfaceId(s, mJpegSurfaceIds)) {
- Log.i(TAG, "Producing jpeg buffer...");
-
- int totalSize = data.length + LegacyCameraDevice.nativeGetJpegFooterSize();
- totalSize = (totalSize + 3) & ~0x3; // round up to nearest octonibble
- LegacyCameraDevice.setNextTimestamp(s, timestamp);
-
- if (USE_BLOB_FORMAT_OVERRIDE) {
- // Override to RGBA_8888 format.
- LegacyCameraDevice.setSurfaceFormat(s,
- LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888);
-
- int dimen = (int) Math.ceil(Math.sqrt(totalSize));
- dimen = (dimen + 0xf) & ~0xf; // round up to nearest multiple of 16
- LegacyCameraDevice.setSurfaceDimens(s, dimen, dimen);
- LegacyCameraDevice.produceFrame(s, data, dimen, dimen,
- CameraMetadataNative.NATIVE_JPEG_FORMAT);
- } else {
- LegacyCameraDevice.setSurfaceDimens(s, totalSize, /*height*/1);
- LegacyCameraDevice.produceFrame(s, data, totalSize, /*height*/1,
- CameraMetadataNative.NATIVE_JPEG_FORMAT);
- }
- }
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.w(TAG, "Surface abandoned, dropping frame. ", e);
- }
- }
-
- mReceivedJpeg.open();
- }
- };
-
- private final Camera.ShutterCallback mJpegShutterCallback = new Camera.ShutterCallback() {
- @Override
- public void onShutter() {
- mCaptureCollector.jpegCaptured(SystemClock.elapsedRealtimeNanos());
- }
- };
-
- private final SurfaceTexture.OnFrameAvailableListener mPreviewCallback =
- new SurfaceTexture.OnFrameAvailableListener() {
- @Override
- public void onFrameAvailable(SurfaceTexture surfaceTexture) {
- if (DEBUG) {
- mPrevCounter.countAndLog();
- }
- mGLThreadManager.queueNewFrame();
- }
- };
-
- private void stopPreview() {
- if (VERBOSE) {
- Log.v(TAG, "stopPreview - preview running? " + mPreviewRunning);
- }
- if (mPreviewRunning) {
- mCamera.stopPreview();
- mPreviewRunning = false;
- }
- }
-
- private void startPreview() {
- if (VERBOSE) {
- Log.v(TAG, "startPreview - preview running? " + mPreviewRunning);
- }
- if (!mPreviewRunning) {
- // XX: CameraClient:;startPreview is not getting called after a stop
- mCamera.startPreview();
- mPreviewRunning = true;
- }
- }
-
- private void doJpegCapturePrepare(RequestHolder request) throws IOException {
- if (DEBUG) Log.d(TAG, "doJpegCapturePrepare - preview running? " + mPreviewRunning);
-
- if (!mPreviewRunning) {
- if (DEBUG) Log.d(TAG, "doJpegCapture - create fake surface");
-
- createDummySurface();
- mCamera.setPreviewTexture(mDummyTexture);
- startPreview();
- }
- }
-
- private void doJpegCapture(RequestHolder request) {
- if (DEBUG) Log.d(TAG, "doJpegCapturePrepare");
-
- mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback);
- mPreviewRunning = false;
- }
-
- private void doPreviewCapture(RequestHolder request) throws IOException {
- if (VERBOSE) {
- Log.v(TAG, "doPreviewCapture - preview running? " + mPreviewRunning);
- }
-
- if (mPreviewRunning) {
- return; // Already running
- }
-
- if (mPreviewTexture == null) {
- throw new IllegalStateException(
- "Preview capture called with no preview surfaces configured.");
- }
-
- mPreviewTexture.setDefaultBufferSize(mIntermediateBufferSize.getWidth(),
- mIntermediateBufferSize.getHeight());
- mCamera.setPreviewTexture(mPreviewTexture);
-
- startPreview();
- }
-
- private void disconnectCallbackSurfaces() {
- for (Surface s : mCallbackOutputs) {
- try {
- LegacyCameraDevice.disconnectSurface(s);
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.d(TAG, "Surface abandoned, skipping...", e);
- }
- }
- }
-
- private void configureOutputs(Collection<Pair<Surface, Size>> outputs) {
- if (DEBUG) {
- String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces");
- Log.d(TAG, "configureOutputs with " + outputsStr);
- }
-
- try {
- stopPreview();
- } catch (RuntimeException e) {
- Log.e(TAG, "Received device exception in configure call: ", e);
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- return;
- }
-
- /*
- * Try to release the previous preview's surface texture earlier if we end up
- * using a different one; this also reduces the likelihood of getting into a deadlock
- * when disconnecting from the old previous texture at a later time.
- */
- try {
- mCamera.setPreviewTexture(/*surfaceTexture*/null);
- } catch (IOException e) {
- Log.w(TAG, "Failed to clear prior SurfaceTexture, may cause GL deadlock: ", e);
- } catch (RuntimeException e) {
- Log.e(TAG, "Received device exception in configure call: ", e);
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- return;
- }
-
- if (mGLThreadManager != null) {
- mGLThreadManager.waitUntilStarted();
- mGLThreadManager.ignoreNewFrames();
- mGLThreadManager.waitUntilIdle();
- }
- resetJpegSurfaceFormats(mCallbackOutputs);
- disconnectCallbackSurfaces();
-
- mPreviewOutputs.clear();
- mCallbackOutputs.clear();
- mJpegSurfaceIds.clear();
- mPreviewTexture = null;
-
- List<Size> previewOutputSizes = new ArrayList<>();
- List<Size> callbackOutputSizes = new ArrayList<>();
-
- int facing = mCharacteristics.get(CameraCharacteristics.LENS_FACING);
- int orientation = mCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
- if (outputs != null) {
- for (Pair<Surface, Size> outPair : outputs) {
- Surface s = outPair.first;
- Size outSize = outPair.second;
- try {
- int format = LegacyCameraDevice.detectSurfaceType(s);
- LegacyCameraDevice.setSurfaceOrientation(s, facing, orientation);
- switch (format) {
- case CameraMetadataNative.NATIVE_JPEG_FORMAT:
- if (USE_BLOB_FORMAT_OVERRIDE) {
- // Override to RGBA_8888 format.
- LegacyCameraDevice.setSurfaceFormat(s,
- LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888);
- }
- mJpegSurfaceIds.add(LegacyCameraDevice.getSurfaceId(s));
- mCallbackOutputs.add(s);
- callbackOutputSizes.add(outSize);
-
- // LegacyCameraDevice is the producer of JPEG output surfaces
- // so LegacyCameraDevice needs to connect to the surfaces.
- LegacyCameraDevice.connectSurface(s);
- break;
- default:
- LegacyCameraDevice.setScalingMode(s, LegacyCameraDevice.
- NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
- mPreviewOutputs.add(s);
- previewOutputSizes.add(outSize);
- break;
- }
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.w(TAG, "Surface abandoned, skipping...", e);
- }
- }
- }
- try {
- mParams = mCamera.getParameters();
- } catch (RuntimeException e) {
- Log.e(TAG, "Received device exception: ", e);
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- return;
- }
-
- List<int[]> supportedFpsRanges = mParams.getSupportedPreviewFpsRange();
- int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges);
- if (DEBUG) {
- Log.d(TAG, "doPreviewCapture - Selected range [" +
- bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," +
- bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]");
- }
- mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
- bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
-
- Size smallestSupportedJpegSize = calculatePictureSize(mCallbackOutputs,
- callbackOutputSizes, mParams);
-
- if (previewOutputSizes.size() > 0) {
-
- Size largestOutput = SizeAreaComparator.findLargestByArea(previewOutputSizes);
-
- // Find largest jpeg dimension - assume to have the same aspect ratio as sensor.
- Size largestJpegDimen = ParameterUtils.getLargestSupportedJpegSizeByArea(mParams);
-
- Size chosenJpegDimen = (smallestSupportedJpegSize != null) ? smallestSupportedJpegSize
- : largestJpegDimen;
-
- List<Size> supportedPreviewSizes = ParameterUtils.convertSizeList(
- mParams.getSupportedPreviewSizes());
-
- // Use smallest preview dimension with same aspect ratio as sensor that is >= than all
- // of the configured output dimensions. If none exists, fall back to using the largest
- // supported preview size.
- long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth();
- Size bestPreviewDimen = SizeAreaComparator.findLargestByArea(supportedPreviewSizes);
- for (Size s : supportedPreviewSizes) {
- long currArea = s.getWidth() * s.getHeight();
- long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight();
- if (checkAspectRatiosMatch(chosenJpegDimen, s) && (currArea < bestArea &&
- currArea >= largestOutputArea)) {
- bestPreviewDimen = s;
- }
- }
-
- mIntermediateBufferSize = bestPreviewDimen;
- mParams.setPreviewSize(mIntermediateBufferSize.getWidth(),
- mIntermediateBufferSize.getHeight());
-
- if (DEBUG) {
- Log.d(TAG, "Intermediate buffer selected with dimens: " +
- bestPreviewDimen.toString());
- }
- } else {
- mIntermediateBufferSize = null;
- if (DEBUG) {
- Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured");
- }
- }
-
- if (smallestSupportedJpegSize != null) {
- /*
- * Set takePicture size to the smallest supported JPEG size large enough
- * to scale/crop out of for the bounding rectangle of the configured JPEG sizes.
- */
-
- Log.i(TAG, "configureOutputs - set take picture size to " + smallestSupportedJpegSize);
- mParams.setPictureSize(
- smallestSupportedJpegSize.getWidth(), smallestSupportedJpegSize.getHeight());
- }
-
- // TODO: Detect and optimize single-output paths here to skip stream teeing.
- if (mGLThreadManager == null) {
- mGLThreadManager = new GLThreadManager(mCameraId, facing, mDeviceState);
- mGLThreadManager.start();
- }
- mGLThreadManager.waitUntilStarted();
- List<Pair<Surface, Size>> previews = new ArrayList<>();
- Iterator<Size> previewSizeIter = previewOutputSizes.iterator();
- for (Surface p : mPreviewOutputs) {
- previews.add(new Pair<>(p, previewSizeIter.next()));
- }
- mGLThreadManager.setConfigurationAndWait(previews, mCaptureCollector);
-
- for (Surface p : mPreviewOutputs) {
- try {
- LegacyCameraDevice.setSurfaceOrientation(p, facing, orientation);
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.e(TAG, "Surface abandoned, skipping setSurfaceOrientation()", e);
- }
- }
-
- mGLThreadManager.allowNewFrames();
- mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture();
- if (mPreviewTexture != null) {
- mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback);
- }
-
- try {
- mCamera.setParameters(mParams);
- } catch (RuntimeException e) {
- Log.e(TAG, "Received device exception while configuring: ", e);
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
-
- }
- }
-
- private void resetJpegSurfaceFormats(Collection<Surface> surfaces) {
- if (!USE_BLOB_FORMAT_OVERRIDE || surfaces == null) {
- return;
- }
- for(Surface s : surfaces) {
- if (s == null || !s.isValid()) {
- Log.w(TAG, "Jpeg surface is invalid, skipping...");
- continue;
- }
- try {
- LegacyCameraDevice.setSurfaceFormat(s, LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB);
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.w(TAG, "Surface abandoned, skipping...", e);
- }
- }
- }
-
- /**
- * Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger
- * than all of the configured {@code JPEG} outputs (by both width and height).
- *
- * <p>If multiple supported JPEG sizes are larger, select the smallest of them which
- * still satisfies the above constraint.</p>
- *
- * <p>As a result, the returned size is guaranteed to be usable without needing
- * to upscale any of the outputs. If only one {@code JPEG} surface is used,
- * then no scaling/cropping is necessary between the taken picture and
- * the {@code JPEG} output surface.</p>
- *
- * @param callbackOutputs a non-{@code null} list of {@code Surface}s with any image formats
- * @param params api1 parameters (used for reading only)
- *
- * @return a size large enough to fit all of the configured {@code JPEG} outputs, or
- * {@code null} if the {@code callbackOutputs} did not have any {@code JPEG}
- * surfaces.
- */
- private Size calculatePictureSize( List<Surface> callbackOutputs,
- List<Size> callbackSizes, Camera.Parameters params) {
- /*
- * Find the largest JPEG size (if any), from the configured outputs:
- * - the api1 picture size should be set to the smallest legal size that's at least as large
- * as the largest configured JPEG size
- */
- if (callbackOutputs.size() != callbackSizes.size()) {
- throw new IllegalStateException("Input collections must be same length");
- }
- List<Size> configuredJpegSizes = new ArrayList<>();
- Iterator<Size> sizeIterator = callbackSizes.iterator();
- for (Surface callbackSurface : callbackOutputs) {
- Size jpegSize = sizeIterator.next();
- if (!LegacyCameraDevice.containsSurfaceId(callbackSurface, mJpegSurfaceIds)) {
- continue; // Ignore non-JPEG callback formats
- }
-
- configuredJpegSizes.add(jpegSize);
- }
- if (!configuredJpegSizes.isEmpty()) {
- /*
- * Find the largest configured JPEG width, and height, independently
- * of the rest.
- *
- * The rest of the JPEG streams can be cropped out of this smallest bounding
- * rectangle.
- */
- int maxConfiguredJpegWidth = -1;
- int maxConfiguredJpegHeight = -1;
- for (Size jpegSize : configuredJpegSizes) {
- maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ?
- jpegSize.getWidth() : maxConfiguredJpegWidth;
- maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ?
- jpegSize.getHeight() : maxConfiguredJpegHeight;
- }
- Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight);
-
- List<Size> supportedJpegSizes = ParameterUtils.convertSizeList(
- params.getSupportedPictureSizes());
-
- /*
- * Find the smallest supported JPEG size that can fit the smallest bounding
- * rectangle for the configured JPEG sizes.
- */
- List<Size> candidateSupportedJpegSizes = new ArrayList<>();
- for (Size supportedJpegSize : supportedJpegSizes) {
- if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth &&
- supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) {
- candidateSupportedJpegSizes.add(supportedJpegSize);
- }
- }
-
- if (candidateSupportedJpegSizes.isEmpty()) {
- throw new AssertionError(
- "Could not find any supported JPEG sizes large enough to fit " +
- smallestBoundJpegSize);
- }
-
- Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes,
- new SizeAreaComparator());
-
- if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) {
- Log.w(TAG,
- String.format(
- "configureOutputs - Will need to crop picture %s into "
- + "smallest bound size %s",
- smallestSupportedJpegSize, smallestBoundJpegSize));
- }
-
- return smallestSupportedJpegSize;
- }
-
- return null;
- }
-
- private static boolean checkAspectRatiosMatch(Size a, Size b) {
- float aAspect = a.getWidth() / (float) a.getHeight();
- float bAspect = b.getWidth() / (float) b.getHeight();
-
- return Math.abs(aAspect - bAspect) < ASPECT_RATIO_TOLERANCE;
- }
-
- // Calculate the highest FPS range supported
- private int[] getPhotoPreviewFpsRange(List<int[]> frameRates) {
- if (frameRates.size() == 0) {
- Log.e(TAG, "No supported frame rates returned!");
- return null;
- }
-
- int bestMin = 0;
- int bestMax = 0;
- int bestIndex = 0;
- int index = 0;
- for (int[] rate : frameRates) {
- int minFps = rate[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
- int maxFps = rate[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
- if (maxFps > bestMax || (maxFps == bestMax && minFps > bestMin)) {
- bestMin = minFps;
- bestMax = maxFps;
- bestIndex = index;
- }
- index++;
- }
-
- return frameRates.get(bestIndex);
- }
-
- private final Handler.Callback mRequestHandlerCb = new Handler.Callback() {
- private boolean mCleanup = false;
- private final LegacyResultMapper mMapper = new LegacyResultMapper();
-
- @Override
- public boolean handleMessage(Message msg) {
- if (mCleanup) {
- return true;
- }
-
- if (DEBUG) {
- Log.d(TAG, "Request thread handling message:" + msg.what);
- }
- long startTime = 0;
- if (DEBUG) {
- startTime = SystemClock.elapsedRealtimeNanos();
- }
- switch (msg.what) {
- case MSG_CONFIGURE_OUTPUTS:
- ConfigureHolder config = (ConfigureHolder) msg.obj;
- int sizes = config.surfaces != null ? config.surfaces.size() : 0;
- Log.i(TAG, "Configure outputs: " + sizes + " surfaces configured.");
-
- try {
- boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT,
- TimeUnit.MILLISECONDS);
- if (!success) {
- Log.e(TAG, "Timed out while queueing configure request.");
- mCaptureCollector.failAll();
- }
- } catch (InterruptedException e) {
- Log.e(TAG, "Interrupted while waiting for requests to complete.");
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- break;
- }
-
- configureOutputs(config.surfaces);
- config.condition.open();
- if (DEBUG) {
- long totalTime = SystemClock.elapsedRealtimeNanos() - startTime;
- Log.d(TAG, "Configure took " + totalTime + " ns");
- }
- break;
- case MSG_SUBMIT_CAPTURE_REQUEST:
- Handler handler = RequestThreadManager.this.mRequestThread.getHandler();
- boolean anyRequestOutputAbandoned = false;
-
- // Get the next burst from the request queue.
- RequestQueue.RequestQueueEntry nextBurst = mRequestQueue.getNext();
-
- if (nextBurst == null) {
- // If there are no further requests queued, wait for any currently executing
- // requests to complete, then switch to idle state.
- try {
- boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT,
- TimeUnit.MILLISECONDS);
- if (!success) {
- Log.e(TAG,
- "Timed out while waiting for prior requests to complete.");
- mCaptureCollector.failAll();
- }
- } catch (InterruptedException e) {
- Log.e(TAG, "Interrupted while waiting for requests to complete: ", e);
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- break;
- }
-
- synchronized (mIdleLock) {
- // Retry the the request queue.
- nextBurst = mRequestQueue.getNext();
-
- // If we still have no queued requests, go idle.
- if (nextBurst == null) {
- mDeviceState.setIdle();
- break;
- }
- }
- }
-
- if (nextBurst != null) {
- // Queue another capture if we did not get the last burst.
- handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
-
- // Check whether capture queue becomes empty
- if (nextBurst.isQueueEmpty()) {
- mDeviceState.setRequestQueueEmpty();
- }
- }
-
- // Complete each request in the burst
- BurstHolder burstHolder = nextBurst.getBurstHolder();
- List<RequestHolder> requests =
- burstHolder.produceRequestHolders(nextBurst.getFrameNumber());
- for (RequestHolder holder : requests) {
- CaptureRequest request = holder.getRequest();
-
- boolean paramsChanged = false;
-
- // Only update parameters if the request has changed
- if (mLastRequest == null || mLastRequest.captureRequest != request) {
-
- // The intermediate buffer is sometimes null, but we always need
- // the Camera1 API configured preview size
- Size previewSize = ParameterUtils.convertSize(mParams.getPreviewSize());
-
- LegacyRequest legacyRequest = new LegacyRequest(mCharacteristics,
- request, previewSize, mParams); // params are copied
-
-
- // Parameters are mutated as a side-effect
- LegacyMetadataMapper.convertRequestMetadata(/*inout*/legacyRequest);
-
- // If the parameters have changed, set them in the Camera1 API.
- if (!mParams.same(legacyRequest.parameters)) {
- try {
- mCamera.setParameters(legacyRequest.parameters);
- } catch (RuntimeException e) {
- // If setting the parameters failed, report a request error to
- // the camera client, and skip any further work for this request
- Log.e(TAG, "Exception while setting camera parameters: ", e);
- holder.failRequest();
- mDeviceState.setCaptureStart(holder, /*timestamp*/0,
- CameraDeviceImpl.CameraDeviceCallbacks.
- ERROR_CAMERA_REQUEST);
- continue;
- }
- paramsChanged = true;
- mParams = legacyRequest.parameters;
- }
-
- mLastRequest = legacyRequest;
- }
-
- try {
- boolean success = mCaptureCollector.queueRequest(holder,
- mLastRequest, JPEG_FRAME_TIMEOUT, TimeUnit.MILLISECONDS);
-
- if (!success) {
- // Report a request error if we timed out while queuing this.
- Log.e(TAG, "Timed out while queueing capture request.");
- holder.failRequest();
- mDeviceState.setCaptureStart(holder, /*timestamp*/0,
- CameraDeviceImpl.CameraDeviceCallbacks.
- ERROR_CAMERA_REQUEST);
- continue;
- }
-
- // Starting the preview needs to happen before enabling
- // face detection or auto focus
- if (holder.hasPreviewTargets()) {
- doPreviewCapture(holder);
- }
- if (holder.hasJpegTargets()) {
- while(!mCaptureCollector.waitForPreviewsEmpty(PREVIEW_FRAME_TIMEOUT,
- TimeUnit.MILLISECONDS)) {
- // Fail preview requests until the queue is empty.
- Log.e(TAG, "Timed out while waiting for preview requests to " +
- "complete.");
- mCaptureCollector.failNextPreview();
- }
- mReceivedJpeg.close();
- doJpegCapturePrepare(holder);
- }
-
- /*
- * Do all the actions that require a preview to have been started
- */
-
- // Toggle face detection on/off
- // - do this before AF to give AF a chance to use faces
- mFaceDetectMapper.processFaceDetectMode(request, /*in*/mParams);
-
- // Unconditionally process AF triggers, since they're non-idempotent
- // - must be done after setting the most-up-to-date AF mode
- mFocusStateMapper.processRequestTriggers(request, mParams);
-
- if (holder.hasJpegTargets()) {
- doJpegCapture(holder);
- if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) {
- Log.e(TAG, "Hit timeout for jpeg callback!");
- mCaptureCollector.failNextJpeg();
- }
- }
-
- } catch (IOException e) {
- Log.e(TAG, "Received device exception during capture call: ", e);
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- break;
- } catch (InterruptedException e) {
- Log.e(TAG, "Interrupted during capture: ", e);
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- break;
- } catch (RuntimeException e) {
- Log.e(TAG, "Received device exception during capture call: ", e);
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- break;
- }
-
- if (paramsChanged) {
- if (DEBUG) {
- Log.d(TAG, "Params changed -- getting new Parameters from HAL.");
- }
- try {
- mParams = mCamera.getParameters();
- } catch (RuntimeException e) {
- Log.e(TAG, "Received device exception: ", e);
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- break;
- }
-
- // Update parameters to the latest that we think the camera is using
- mLastRequest.setParameters(mParams);
- }
-
- MutableLong timestampMutable = new MutableLong(/*value*/0L);
- try {
- boolean success = mCaptureCollector.waitForRequestCompleted(holder,
- REQUEST_COMPLETE_TIMEOUT, TimeUnit.MILLISECONDS,
- /*out*/timestampMutable);
-
- if (!success) {
- Log.e(TAG, "Timed out while waiting for request to complete.");
- mCaptureCollector.failAll();
- }
- } catch (InterruptedException e) {
- Log.e(TAG, "Interrupted waiting for request completion: ", e);
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- break;
- }
-
- CameraMetadataNative result = mMapper.cachedConvertResultMetadata(
- mLastRequest, timestampMutable.value);
- /*
- * Order matters: The default result mapper is state-less; the
- * other mappers carry state and may override keys set by the default
- * mapper with their own values.
- */
-
- // Update AF state
- mFocusStateMapper.mapResultTriggers(result);
- // Update face-related results
- mFaceDetectMapper.mapResultFaces(result, mLastRequest);
-
- if (!holder.requestFailed()) {
- mDeviceState.setCaptureResult(holder, result);
- }
-
- if (holder.isOutputAbandoned()) {
- anyRequestOutputAbandoned = true;
- }
- }
-
- // Stop the repeating request if any of its output surfaces is abandoned.
- if (anyRequestOutputAbandoned && burstHolder.isRepeating()) {
- long lastFrameNumber = cancelRepeating(burstHolder.getRequestId());
- if (DEBUG) {
- Log.d(TAG, "Stopped repeating request. Last frame number is " +
- lastFrameNumber);
- }
- if (lastFrameNumber != RequestQueue.INVALID_FRAME) {
- mDeviceState.setRepeatingRequestError(lastFrameNumber,
- burstHolder.getRequestId());
- } else {
- Log.e(TAG, "Repeating request id: " + burstHolder.getRequestId() +
- " already canceled!");
- }
- }
-
- if (DEBUG) {
- long totalTime = SystemClock.elapsedRealtimeNanos() - startTime;
- Log.d(TAG, "Capture request took " + totalTime + " ns");
- mRequestCounter.countAndLog();
- }
- break;
- case MSG_CLEANUP:
- mCleanup = true;
- try {
- boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT,
- TimeUnit.MILLISECONDS);
- if (!success) {
- Log.e(TAG, "Timed out while queueing cleanup request.");
- mCaptureCollector.failAll();
- }
- } catch (InterruptedException e) {
- Log.e(TAG, "Interrupted while waiting for requests to complete: ", e);
- mDeviceState.setError(
- CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
- }
- if (mGLThreadManager != null) {
- mGLThreadManager.quit();
- mGLThreadManager = null;
- }
- disconnectCallbackSurfaces();
- if (mCamera != null) {
- mCamera.release();
- mCamera = null;
- }
- break;
- case RequestHandlerThread.MSG_POKE_IDLE_HANDLER:
- // OK: Ignore message.
- break;
- default:
- throw new AssertionError("Unhandled message " + msg.what +
- " on RequestThread.");
- }
- return true;
- }
- };
-
- /**
- * Create a new RequestThreadManager.
- *
- * @param cameraId the id of the camera to use.
- * @param camera an open camera object. The RequestThreadManager takes ownership of this camera
- * object, and is responsible for closing it.
- * @param characteristics the static camera characteristics corresponding to this camera device
- * @param deviceState a {@link CameraDeviceState} state machine.
- */
- public RequestThreadManager(int cameraId, Camera camera, CameraCharacteristics characteristics,
- CameraDeviceState deviceState) {
- mCamera = checkNotNull(camera, "camera must not be null");
- mCameraId = cameraId;
- mCharacteristics = checkNotNull(characteristics, "characteristics must not be null");
- String name = String.format("RequestThread-%d", cameraId);
- TAG = name;
- mDeviceState = checkNotNull(deviceState, "deviceState must not be null");
- mFocusStateMapper = new LegacyFocusStateMapper(mCamera);
- mFaceDetectMapper = new LegacyFaceDetectMapper(mCamera, mCharacteristics);
- mCaptureCollector = new CaptureCollector(MAX_IN_FLIGHT_REQUESTS, mDeviceState);
- mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb);
- mCamera.setDetailedErrorCallback(mErrorCallback);
- }
-
- /**
- * Start the request thread.
- */
- public void start() {
- mRequestThread.start();
- }
-
- /**
- * Flush any pending requests.
- *
- * @return the last frame number.
- */
- public long flush() {
- Log.i(TAG, "Flushing all pending requests.");
- long lastFrame = mRequestQueue.stopRepeating();
- mCaptureCollector.failAll();
- return lastFrame;
- }
-
- /**
- * Quit the request thread, and clean up everything.
- */
- public void quit() {
- if (!mQuit.getAndSet(true)) { // Avoid sending messages on dead thread's handler.
- Handler handler = mRequestThread.waitAndGetHandler();
- handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
- mRequestThread.quitSafely();
- try {
- mRequestThread.join();
- } catch (InterruptedException e) {
- Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
- mRequestThread.getName(), mRequestThread.getId()));
- }
- }
- }
-
- /**
- * Submit the given burst of requests to be captured.
- *
- * <p>If the burst is repeating, replace the current repeating burst.</p>
- *
- * @param requests the burst of requests to add to the queue.
- * @param repeating true if the burst is repeating.
- * @return the submission info, including the new request id, and the last frame number, which
- * contains either the frame number of the last frame that will be returned for this request,
- * or the frame number of the last frame that will be returned for the current repeating
- * request if this burst is set to be repeating.
- */
- public SubmitInfo submitCaptureRequests(CaptureRequest[] requests, boolean repeating) {
- Handler handler = mRequestThread.waitAndGetHandler();
- SubmitInfo info;
- synchronized (mIdleLock) {
- info = mRequestQueue.submit(requests, repeating);
- handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
- }
- return info;
- }
-
- /**
- * Cancel a repeating request.
- *
- * @param requestId the id of the repeating request to cancel.
- * @return the last frame to be returned from the HAL for the given repeating request, or
- * {@code INVALID_FRAME} if none exists.
- */
- public long cancelRepeating(int requestId) {
- return mRequestQueue.stopRepeating(requestId);
- }
-
- /**
- * Configure with the current list of output Surfaces.
- *
- * <p>
- * This operation blocks until the configuration is complete.
- * </p>
- *
- * <p>Using a {@code null} or empty {@code outputs} list is the equivalent of unconfiguring.</p>
- *
- * @param outputs a {@link java.util.Collection} of outputs to configure.
- */
- public void configure(Collection<Pair<Surface, Size>> outputs) {
- Handler handler = mRequestThread.waitAndGetHandler();
- final ConditionVariable condition = new ConditionVariable(/*closed*/false);
- ConfigureHolder holder = new ConfigureHolder(condition, outputs);
- handler.sendMessage(handler.obtainMessage(MSG_CONFIGURE_OUTPUTS, 0, 0, holder));
- condition.block();
- }
-
- public void setAudioRestriction(int mode) {
- if (mCamera != null) {
- mCamera.setAudioRestriction(mode);
- }
- throw new IllegalStateException("Camera has been released!");
- }
-
- public int getAudioRestriction() {
- if (mCamera != null) {
- return mCamera.getAudioRestriction();
- }
- throw new IllegalStateException("Camera has been released!");
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/SizeAreaComparator.java b/core/java/android/hardware/camera2/legacy/SizeAreaComparator.java
deleted file mode 100644
index 75a5bab94867..000000000000
--- a/core/java/android/hardware/camera2/legacy/SizeAreaComparator.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License
- */
-
-package android.hardware.camera2.legacy;
-
-import android.hardware.Camera;
-
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-
-import static com.android.internal.util.Preconditions.*;
-
-/**
- * Comparator for api1 {@link Camera.Size} objects by the area.
- *
- * <p>This comparator totally orders by rectangle area. Tie-breaks on width.</p>
- */
-@SuppressWarnings("deprecation")
-public class SizeAreaComparator implements Comparator<Camera.Size> {
- /**
- * {@inheritDoc}
- */
- @Override
- public int compare(Camera.Size size, Camera.Size size2) {
- checkNotNull(size, "size must not be null");
- checkNotNull(size2, "size2 must not be null");
-
- if (size.equals(size2)) {
- return 0;
- }
-
- long width = size.width;
- long width2 = size2.width;
- long area = width * size.height;
- long area2 = width2 * size2.height;
-
- if (area == area2) {
- return (width > width2) ? 1 : -1;
- }
-
- return (area > area2) ? 1 : -1;
- }
-
- /**
- * Get the largest api1 {@code Camera.Size} from the list by comparing each size's area
- * by each other using {@link SizeAreaComparator}.
- *
- * @param sizes a non-{@code null} list of non-{@code null} sizes
- * @return a non-{@code null} size
- *
- * @throws NullPointerException if {@code sizes} or any elements in it were {@code null}
- */
- public static Camera.Size findLargestByArea(List<Camera.Size> sizes) {
- checkNotNull(sizes, "sizes must not be null");
-
- return Collections.max(sizes, new SizeAreaComparator());
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/SurfaceTextureRenderer.java b/core/java/android/hardware/camera2/legacy/SurfaceTextureRenderer.java
deleted file mode 100644
index a4c65aeb1050..000000000000
--- a/core/java/android/hardware/camera2/legacy/SurfaceTextureRenderer.java
+++ /dev/null
@@ -1,882 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.hardware.camera2.legacy;
-
-import android.graphics.ImageFormat;
-import android.graphics.RectF;
-import android.graphics.SurfaceTexture;
-import android.hardware.camera2.CameraCharacteristics;
-import android.os.Environment;
-import android.opengl.EGL14;
-import android.opengl.EGLConfig;
-import android.opengl.EGLContext;
-import android.opengl.EGLDisplay;
-import android.opengl.EGLSurface;
-import android.opengl.GLES11Ext;
-import android.opengl.GLES20;
-import android.opengl.Matrix;
-import android.util.Log;
-import android.util.Pair;
-import android.util.Size;
-import android.view.Surface;
-import android.os.SystemProperties;
-
-import java.io.File;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.nio.FloatBuffer;
-import java.time.Instant;
-import java.time.LocalDateTime;
-import java.time.ZoneId;
-import java.time.format.DateTimeFormatter;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.Locale;
-
-/**
- * A renderer class that manages the GL state, and can draw a frame into a set of output
- * {@link Surface}s.
- */
-public class SurfaceTextureRenderer {
- private static final String TAG = SurfaceTextureRenderer.class.getSimpleName();
- private static final boolean DEBUG = false;
- private static final int EGL_RECORDABLE_ANDROID = 0x3142; // from EGL/eglext.h
- private static final int GL_MATRIX_SIZE = 16;
- private static final int VERTEX_POS_SIZE = 3;
- private static final int VERTEX_UV_SIZE = 2;
- private static final int EGL_COLOR_BITLENGTH = 8;
- private static final int GLES_VERSION = 2;
- private static final int PBUFFER_PIXEL_BYTES = 4;
-
- private static final int FLIP_TYPE_NONE = 0;
- private static final int FLIP_TYPE_HORIZONTAL = 1;
- private static final int FLIP_TYPE_VERTICAL = 2;
- private static final int FLIP_TYPE_BOTH = FLIP_TYPE_HORIZONTAL | FLIP_TYPE_VERTICAL;
-
- private static final DateTimeFormatter LOG_NAME_TIME_FORMATTER =
- DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss", Locale.ROOT);
-
- private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
- private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
- private EGLConfig mConfigs;
-
- private class EGLSurfaceHolder {
- Surface surface;
- EGLSurface eglSurface;
- int width;
- int height;
- }
-
- private List<EGLSurfaceHolder> mSurfaces = new ArrayList<EGLSurfaceHolder>();
- private List<EGLSurfaceHolder> mConversionSurfaces = new ArrayList<EGLSurfaceHolder>();
-
- private ByteBuffer mPBufferPixels;
-
- // Hold this to avoid GC
- private volatile SurfaceTexture mSurfaceTexture;
-
- private static final int FLOAT_SIZE_BYTES = 4;
- private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
- private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
- private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
-
- // Sampling is mirrored across the horizontal axis
- private static final float[] sHorizontalFlipTriangleVertices = {
- // X, Y, Z, U, V
- -1.0f, -1.0f, 0, 1.f, 0.f,
- 1.0f, -1.0f, 0, 0.f, 0.f,
- -1.0f, 1.0f, 0, 1.f, 1.f,
- 1.0f, 1.0f, 0, 0.f, 1.f,
- };
-
- // Sampling is mirrored across the vertical axis
- private static final float[] sVerticalFlipTriangleVertices = {
- // X, Y, Z, U, V
- -1.0f, -1.0f, 0, 0.f, 1.f,
- 1.0f, -1.0f, 0, 1.f, 1.f,
- -1.0f, 1.0f, 0, 0.f, 0.f,
- 1.0f, 1.0f, 0, 1.f, 0.f,
- };
-
- // Sampling is mirrored across the both axes
- private static final float[] sBothFlipTriangleVertices = {
- // X, Y, Z, U, V
- -1.0f, -1.0f, 0, 1.f, 1.f,
- 1.0f, -1.0f, 0, 0.f, 1.f,
- -1.0f, 1.0f, 0, 1.f, 0.f,
- 1.0f, 1.0f, 0, 0.f, 0.f,
- };
-
- // Sampling is 1:1 for a straight copy for the back camera
- private static final float[] sRegularTriangleVertices = {
- // X, Y, Z, U, V
- -1.0f, -1.0f, 0, 0.f, 0.f,
- 1.0f, -1.0f, 0, 1.f, 0.f,
- -1.0f, 1.0f, 0, 0.f, 1.f,
- 1.0f, 1.0f, 0, 1.f, 1.f,
- };
-
- private FloatBuffer mRegularTriangleVertices;
- private FloatBuffer mHorizontalFlipTriangleVertices;
- private FloatBuffer mVerticalFlipTriangleVertices;
- private FloatBuffer mBothFlipTriangleVertices;
- private final int mFacing;
-
- /**
- * As used in this file, this vertex shader maps a unit square to the view, and
- * tells the fragment shader to interpolate over it. Each surface pixel position
- * is mapped to a 2D homogeneous texture coordinate of the form (s, t, 0, 1) with
- * s and t in the inclusive range [0, 1], and the matrix from
- * {@link SurfaceTexture#getTransformMatrix(float[])} is used to map this
- * coordinate to a texture location.
- */
- private static final String VERTEX_SHADER =
- "uniform mat4 uMVPMatrix;\n" +
- "uniform mat4 uSTMatrix;\n" +
- "attribute vec4 aPosition;\n" +
- "attribute vec4 aTextureCoord;\n" +
- "varying vec2 vTextureCoord;\n" +
- "void main() {\n" +
- " gl_Position = uMVPMatrix * aPosition;\n" +
- " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
- "}\n";
-
- /**
- * This fragment shader simply draws the color in the 2D texture at
- * the location from the {@code VERTEX_SHADER}.
- */
- private static final String FRAGMENT_SHADER =
- "#extension GL_OES_EGL_image_external : require\n" +
- "precision mediump float;\n" +
- "varying vec2 vTextureCoord;\n" +
- "uniform samplerExternalOES sTexture;\n" +
- "void main() {\n" +
- " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
- "}\n";
-
- private float[] mMVPMatrix = new float[GL_MATRIX_SIZE];
- private float[] mSTMatrix = new float[GL_MATRIX_SIZE];
-
- private int mProgram;
- private int mTextureID = 0;
- private int muMVPMatrixHandle;
- private int muSTMatrixHandle;
- private int maPositionHandle;
- private int maTextureHandle;
-
- private PerfMeasurement mPerfMeasurer = null;
- private static final String LEGACY_PERF_PROPERTY = "persist.camera.legacy_perf";
-
- public SurfaceTextureRenderer(int facing) {
- mFacing = facing;
-
- mRegularTriangleVertices = ByteBuffer.allocateDirect(sRegularTriangleVertices.length *
- FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
- mRegularTriangleVertices.put(sRegularTriangleVertices).position(0);
-
- mHorizontalFlipTriangleVertices = ByteBuffer.allocateDirect(
- sHorizontalFlipTriangleVertices.length * FLOAT_SIZE_BYTES).
- order(ByteOrder.nativeOrder()).asFloatBuffer();
- mHorizontalFlipTriangleVertices.put(sHorizontalFlipTriangleVertices).position(0);
-
- mVerticalFlipTriangleVertices = ByteBuffer.allocateDirect(
- sVerticalFlipTriangleVertices.length * FLOAT_SIZE_BYTES).
- order(ByteOrder.nativeOrder()).asFloatBuffer();
- mVerticalFlipTriangleVertices.put(sVerticalFlipTriangleVertices).position(0);
-
- mBothFlipTriangleVertices = ByteBuffer.allocateDirect(
- sBothFlipTriangleVertices.length * FLOAT_SIZE_BYTES).
- order(ByteOrder.nativeOrder()).asFloatBuffer();
- mBothFlipTriangleVertices.put(sBothFlipTriangleVertices).position(0);
-
- Matrix.setIdentityM(mSTMatrix, 0);
- }
-
- private int loadShader(int shaderType, String source) {
- int shader = GLES20.glCreateShader(shaderType);
- checkGlError("glCreateShader type=" + shaderType);
- GLES20.glShaderSource(shader, source);
- GLES20.glCompileShader(shader);
- int[] compiled = new int[1];
- GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
- if (compiled[0] == 0) {
- Log.e(TAG, "Could not compile shader " + shaderType + ":");
- Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
- GLES20.glDeleteShader(shader);
- // TODO: handle this more gracefully
- throw new IllegalStateException("Could not compile shader " + shaderType);
- }
- return shader;
- }
-
- private int createProgram(String vertexSource, String fragmentSource) {
- int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
- if (vertexShader == 0) {
- return 0;
- }
- int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
- if (pixelShader == 0) {
- return 0;
- }
-
- int program = GLES20.glCreateProgram();
- checkGlError("glCreateProgram");
- if (program == 0) {
- Log.e(TAG, "Could not create program");
- }
- GLES20.glAttachShader(program, vertexShader);
- checkGlError("glAttachShader");
- GLES20.glAttachShader(program, pixelShader);
- checkGlError("glAttachShader");
- GLES20.glLinkProgram(program);
- int[] linkStatus = new int[1];
- GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
- if (linkStatus[0] != GLES20.GL_TRUE) {
- Log.e(TAG, "Could not link program: ");
- Log.e(TAG, GLES20.glGetProgramInfoLog(program));
- GLES20.glDeleteProgram(program);
- // TODO: handle this more gracefully
- throw new IllegalStateException("Could not link program");
- }
- return program;
- }
-
- private void drawFrame(SurfaceTexture st, int width, int height, int flipType)
- throws LegacyExceptionUtils.BufferQueueAbandonedException {
- checkGlError("onDrawFrame start");
- st.getTransformMatrix(mSTMatrix);
-
- Matrix.setIdentityM(mMVPMatrix, /*smOffset*/0);
-
- // Find intermediate buffer dimensions
- Size dimens;
- try {
- dimens = LegacyCameraDevice.getTextureSize(st);
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- // Should never hit this.
- throw new IllegalStateException("Surface abandoned, skipping drawFrame...", e);
- }
- float texWidth = dimens.getWidth();
- float texHeight = dimens.getHeight();
-
- if (texWidth <= 0 || texHeight <= 0) {
- throw new IllegalStateException("Illegal intermediate texture with dimension of 0");
- }
-
- // Letterbox or pillar-box output dimensions into intermediate dimensions.
- RectF intermediate = new RectF(/*left*/0, /*top*/0, /*right*/texWidth, /*bottom*/texHeight);
- RectF output = new RectF(/*left*/0, /*top*/0, /*right*/width, /*bottom*/height);
- android.graphics.Matrix boxingXform = new android.graphics.Matrix();
- boxingXform.setRectToRect(output, intermediate, android.graphics.Matrix.ScaleToFit.CENTER);
- boxingXform.mapRect(output);
-
- // Find scaling factor from pillar-boxed/letter-boxed output dimensions to intermediate
- // buffer dimensions.
- float scaleX = intermediate.width() / output.width();
- float scaleY = intermediate.height() / output.height();
-
- // Intermediate texture is implicitly scaled to 'fill' the output dimensions in clip space
- // coordinates in the shader. To avoid stretching, we need to scale the larger dimension
- // of the intermediate buffer so that the output buffer is actually letter-boxed
- // or pillar-boxed into the intermediate buffer after clipping.
- Matrix.scaleM(mMVPMatrix, /*offset*/0, /*x*/scaleX, /*y*/scaleY, /*z*/1);
-
- if (DEBUG) {
- Log.d(TAG, "Scaling factors (S_x = " + scaleX + ",S_y = " + scaleY + ") used for " +
- width + "x" + height + " surface, intermediate buffer size is " + texWidth +
- "x" + texHeight);
- }
-
- // Set viewport to be output buffer dimensions
- GLES20.glViewport(0, 0, width, height);
-
- if (DEBUG) {
- GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
- GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
- }
-
- GLES20.glUseProgram(mProgram);
- checkGlError("glUseProgram");
-
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
- GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
-
- FloatBuffer triangleVertices;
- switch(flipType) {
- case FLIP_TYPE_HORIZONTAL:
- triangleVertices = mHorizontalFlipTriangleVertices;
- break;
- case FLIP_TYPE_VERTICAL:
- triangleVertices = mVerticalFlipTriangleVertices;
- break;
- case FLIP_TYPE_BOTH:
- triangleVertices = mBothFlipTriangleVertices;
- break;
- default:
- triangleVertices = mRegularTriangleVertices;
- break;
- }
-
- triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
- GLES20.glVertexAttribPointer(maPositionHandle, VERTEX_POS_SIZE, GLES20.GL_FLOAT,
- /*normalized*/ false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
- checkGlError("glVertexAttribPointer maPosition");
- GLES20.glEnableVertexAttribArray(maPositionHandle);
- checkGlError("glEnableVertexAttribArray maPositionHandle");
-
- triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
- GLES20.glVertexAttribPointer(maTextureHandle, VERTEX_UV_SIZE, GLES20.GL_FLOAT,
- /*normalized*/ false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
- checkGlError("glVertexAttribPointer maTextureHandle");
- GLES20.glEnableVertexAttribArray(maTextureHandle);
- checkGlError("glEnableVertexAttribArray maTextureHandle");
-
- GLES20.glUniformMatrix4fv(muMVPMatrixHandle, /*count*/ 1, /*transpose*/ false, mMVPMatrix,
- /*offset*/ 0);
- GLES20.glUniformMatrix4fv(muSTMatrixHandle, /*count*/ 1, /*transpose*/ false, mSTMatrix,
- /*offset*/ 0);
-
- GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /*offset*/ 0, /*count*/ 4);
- checkGlDrawError("glDrawArrays");
- }
-
- /**
- * Initializes GL state. Call this after the EGL surface has been created and made current.
- */
- private void initializeGLState() {
- mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
- if (mProgram == 0) {
- throw new IllegalStateException("failed creating program");
- }
- maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
- checkGlError("glGetAttribLocation aPosition");
- if (maPositionHandle == -1) {
- throw new IllegalStateException("Could not get attrib location for aPosition");
- }
- maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
- checkGlError("glGetAttribLocation aTextureCoord");
- if (maTextureHandle == -1) {
- throw new IllegalStateException("Could not get attrib location for aTextureCoord");
- }
-
- muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
- checkGlError("glGetUniformLocation uMVPMatrix");
- if (muMVPMatrixHandle == -1) {
- throw new IllegalStateException("Could not get attrib location for uMVPMatrix");
- }
-
- muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
- checkGlError("glGetUniformLocation uSTMatrix");
- if (muSTMatrixHandle == -1) {
- throw new IllegalStateException("Could not get attrib location for uSTMatrix");
- }
-
- int[] textures = new int[1];
- GLES20.glGenTextures(/*n*/ 1, textures, /*offset*/ 0);
-
- mTextureID = textures[0];
- GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
- checkGlError("glBindTexture mTextureID");
-
- GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
- GLES20.GL_NEAREST);
- GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
- GLES20.GL_LINEAR);
- GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
- GLES20.GL_CLAMP_TO_EDGE);
- GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
- GLES20.GL_CLAMP_TO_EDGE);
- checkGlError("glTexParameter");
- }
-
- private int getTextureId() {
- return mTextureID;
- }
-
- private void clearState() {
- mSurfaces.clear();
- for (EGLSurfaceHolder holder : mConversionSurfaces) {
- try {
- LegacyCameraDevice.disconnectSurface(holder.surface);
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.w(TAG, "Surface abandoned, skipping...", e);
- }
- }
- mConversionSurfaces.clear();
- mPBufferPixels = null;
- if (mSurfaceTexture != null) {
- mSurfaceTexture.release();
- }
- mSurfaceTexture = null;
- }
-
- private void configureEGLContext() {
- mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
- if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
- throw new IllegalStateException("No EGL14 display");
- }
- int[] version = new int[2];
- if (!EGL14.eglInitialize(mEGLDisplay, version, /*offset*/ 0, version, /*offset*/ 1)) {
- throw new IllegalStateException("Cannot initialize EGL14");
- }
-
- int[] attribList = {
- EGL14.EGL_RED_SIZE, EGL_COLOR_BITLENGTH,
- EGL14.EGL_GREEN_SIZE, EGL_COLOR_BITLENGTH,
- EGL14.EGL_BLUE_SIZE, EGL_COLOR_BITLENGTH,
- EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
- EGL_RECORDABLE_ANDROID, 1,
- EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT | EGL14.EGL_WINDOW_BIT,
- EGL14.EGL_NONE
- };
- EGLConfig[] configs = new EGLConfig[1];
- int[] numConfigs = new int[1];
- EGL14.eglChooseConfig(mEGLDisplay, attribList, /*offset*/ 0, configs, /*offset*/ 0,
- configs.length, numConfigs, /*offset*/ 0);
- checkEglError("eglCreateContext RGB888+recordable ES2");
- mConfigs = configs[0];
- int[] attrib_list = {
- EGL14.EGL_CONTEXT_CLIENT_VERSION, GLES_VERSION,
- EGL14.EGL_NONE
- };
- mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
- attrib_list, /*offset*/ 0);
- checkEglError("eglCreateContext");
- if(mEGLContext == EGL14.EGL_NO_CONTEXT) {
- throw new IllegalStateException("No EGLContext could be made");
- }
- }
-
- private void configureEGLOutputSurfaces(Collection<EGLSurfaceHolder> surfaces) {
- if (surfaces == null || surfaces.size() == 0) {
- throw new IllegalStateException("No Surfaces were provided to draw to");
- }
- int[] surfaceAttribs = {
- EGL14.EGL_NONE
- };
- for (EGLSurfaceHolder holder : surfaces) {
- holder.eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mConfigs,
- holder.surface, surfaceAttribs, /*offset*/ 0);
- checkEglError("eglCreateWindowSurface");
- }
- }
-
- private void configureEGLPbufferSurfaces(Collection<EGLSurfaceHolder> surfaces) {
- if (surfaces == null || surfaces.size() == 0) {
- throw new IllegalStateException("No Surfaces were provided to draw to");
- }
-
- int maxLength = 0;
- for (EGLSurfaceHolder holder : surfaces) {
- int length = holder.width * holder.height;
- // Find max surface size, ensure PBuffer can hold this many pixels
- maxLength = (length > maxLength) ? length : maxLength;
- int[] surfaceAttribs = {
- EGL14.EGL_WIDTH, holder.width,
- EGL14.EGL_HEIGHT, holder.height,
- EGL14.EGL_NONE
- };
- holder.eglSurface =
- EGL14.eglCreatePbufferSurface(mEGLDisplay, mConfigs, surfaceAttribs, 0);
- checkEglError("eglCreatePbufferSurface");
- }
- mPBufferPixels = ByteBuffer.allocateDirect(maxLength * PBUFFER_PIXEL_BYTES)
- .order(ByteOrder.nativeOrder());
- }
-
- private void releaseEGLContext() {
- if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
- EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
- EGL14.EGL_NO_CONTEXT);
- dumpGlTiming();
- if (mSurfaces != null) {
- for (EGLSurfaceHolder holder : mSurfaces) {
- if (holder.eglSurface != null) {
- EGL14.eglDestroySurface(mEGLDisplay, holder.eglSurface);
- }
- }
- }
- if (mConversionSurfaces != null) {
- for (EGLSurfaceHolder holder : mConversionSurfaces) {
- if (holder.eglSurface != null) {
- EGL14.eglDestroySurface(mEGLDisplay, holder.eglSurface);
- }
- }
- }
- EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
- EGL14.eglReleaseThread();
- EGL14.eglTerminate(mEGLDisplay);
- }
-
- mConfigs = null;
- mEGLDisplay = EGL14.EGL_NO_DISPLAY;
- mEGLContext = EGL14.EGL_NO_CONTEXT;
- clearState();
- }
-
- private void makeCurrent(EGLSurface surface)
- throws LegacyExceptionUtils.BufferQueueAbandonedException {
- EGL14.eglMakeCurrent(mEGLDisplay, surface, surface, mEGLContext);
- checkEglDrawError("makeCurrent");
- }
-
- private boolean swapBuffers(EGLSurface surface)
- throws LegacyExceptionUtils.BufferQueueAbandonedException {
- boolean result = EGL14.eglSwapBuffers(mEGLDisplay, surface);
-
- int error = EGL14.eglGetError();
- switch (error) {
- case EGL14.EGL_SUCCESS:
- return result;
-
- // Check for an abandoned buffer queue, or other error conditions out
- // of the user's control.
- //
- // From the EGL 1.4 spec (2013-12-04), Section 3.9.4 Posting Errors:
- //
- // If eglSwapBuffers is called and the native window associated with
- // surface is no longer valid, an EGL_BAD_NATIVE_WINDOW error is
- // generated.
- //
- // We also interpret EGL_BAD_SURFACE as indicating an abandoned
- // surface, even though the EGL spec does not document it as such, for
- // backwards compatibility with older versions of this file.
- case EGL14.EGL_BAD_NATIVE_WINDOW:
- case EGL14.EGL_BAD_SURFACE:
- throw new LegacyExceptionUtils.BufferQueueAbandonedException();
-
- default:
- throw new IllegalStateException(
- "swapBuffers: EGL error: 0x" + Integer.toHexString(error));
- }
- }
-
- private void checkEglDrawError(String msg)
- throws LegacyExceptionUtils.BufferQueueAbandonedException {
- int error;
- if ((error = EGL14.eglGetError()) == EGL14.EGL_BAD_NATIVE_WINDOW) {
- throw new LegacyExceptionUtils.BufferQueueAbandonedException();
- }
- if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
- throw new IllegalStateException(msg + ": EGL error: 0x" + Integer.toHexString(error));
- }
- }
-
- private void checkEglError(String msg) {
- int error;
- if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
- throw new IllegalStateException(msg + ": EGL error: 0x" + Integer.toHexString(error));
- }
- }
-
- private void checkGlError(String msg) {
- int error;
- while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
- throw new IllegalStateException(
- msg + ": GLES20 error: 0x" + Integer.toHexString(error));
- }
- }
-
- private void checkGlDrawError(String msg)
- throws LegacyExceptionUtils.BufferQueueAbandonedException {
- int error;
- boolean surfaceAbandoned = false;
- boolean glError = false;
- while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
- if (error == GLES20.GL_OUT_OF_MEMORY) {
- surfaceAbandoned = true;
- } else {
- glError = true;
- }
- }
- if (glError) {
- throw new IllegalStateException(
- msg + ": GLES20 error: 0x" + Integer.toHexString(error));
- }
- if (surfaceAbandoned) {
- throw new LegacyExceptionUtils.BufferQueueAbandonedException();
- }
- }
-
- /**
- * Save a measurement dump to disk, in
- * {@code /sdcard/CameraLegacy/durations_<time>_<width1>x<height1>_...txt}
- */
- private void dumpGlTiming() {
- if (mPerfMeasurer == null) return;
-
- File legacyStorageDir = new File(Environment.getExternalStorageDirectory(), "CameraLegacy");
- if (!legacyStorageDir.exists()){
- if (!legacyStorageDir.mkdirs()){
- Log.e(TAG, "Failed to create directory for data dump");
- return;
- }
- }
-
- StringBuilder path = new StringBuilder(legacyStorageDir.getPath());
- path.append(File.separator);
- path.append("durations_");
-
- path.append(formatTimestamp(System.currentTimeMillis()));
- path.append("_S");
- for (EGLSurfaceHolder surface : mSurfaces) {
- path.append(String.format("_%d_%d", surface.width, surface.height));
- }
- path.append("_C");
- for (EGLSurfaceHolder surface : mConversionSurfaces) {
- path.append(String.format("_%d_%d", surface.width, surface.height));
- }
- path.append(".txt");
- mPerfMeasurer.dumpPerformanceData(path.toString());
- }
-
- private static String formatTimestamp(long timeMillis) {
- // This is a replacement for {@link Time#format2445()} that doesn't suffer from Y2038
- // issues.
- Instant instant = Instant.ofEpochMilli(timeMillis);
- ZoneId zoneId = ZoneId.systemDefault();
- LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, zoneId);
- return LOG_NAME_TIME_FORMATTER.format(localDateTime);
- }
-
- private void setupGlTiming() {
- if (PerfMeasurement.isGlTimingSupported()) {
- Log.d(TAG, "Enabling GL performance measurement");
- mPerfMeasurer = new PerfMeasurement();
- } else {
- Log.d(TAG, "GL performance measurement not supported on this device");
- mPerfMeasurer = null;
- }
- }
-
- private void beginGlTiming() {
- if (mPerfMeasurer == null) return;
- mPerfMeasurer.startTimer();
- }
-
- private void addGlTimestamp(long timestamp) {
- if (mPerfMeasurer == null) return;
- mPerfMeasurer.addTimestamp(timestamp);
- }
-
- private void endGlTiming() {
- if (mPerfMeasurer == null) return;
- mPerfMeasurer.stopTimer();
- }
-
- /**
- * Return the surface texture to draw to - this is the texture use to when producing output
- * surface buffers.
- *
- * @return a {@link SurfaceTexture}.
- */
- public SurfaceTexture getSurfaceTexture() {
- return mSurfaceTexture;
- }
-
- /**
- * Set a collection of output {@link Surface}s that can be drawn to.
- *
- * @param surfaces a {@link Collection} of surfaces.
- */
- public void configureSurfaces(Collection<Pair<Surface, Size>> surfaces) {
- releaseEGLContext();
-
- if (surfaces == null || surfaces.size() == 0) {
- Log.w(TAG, "No output surfaces configured for GL drawing.");
- return;
- }
-
- for (Pair<Surface, Size> p : surfaces) {
- Surface s = p.first;
- Size surfaceSize = p.second;
- // If pixel conversions aren't handled by egl, use a pbuffer
- try {
- EGLSurfaceHolder holder = new EGLSurfaceHolder();
- holder.surface = s;
- holder.width = surfaceSize.getWidth();
- holder.height = surfaceSize.getHeight();
- if (LegacyCameraDevice.needsConversion(s)) {
- mConversionSurfaces.add(holder);
- // LegacyCameraDevice is the producer of surfaces if it's not handled by EGL,
- // so LegacyCameraDevice needs to connect to the surfaces.
- LegacyCameraDevice.connectSurface(s);
- } else {
- mSurfaces.add(holder);
- }
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.w(TAG, "Surface abandoned, skipping configuration... ", e);
- }
- }
-
- // Set up egl display
- configureEGLContext();
-
- // Set up regular egl surfaces if needed
- if (mSurfaces.size() > 0) {
- configureEGLOutputSurfaces(mSurfaces);
- }
-
- // Set up pbuffer surface if needed
- if (mConversionSurfaces.size() > 0) {
- configureEGLPbufferSurfaces(mConversionSurfaces);
- }
-
- try {
- makeCurrent((mSurfaces.size() > 0) ? mSurfaces.get(0).eglSurface :
- mConversionSurfaces.get(0).eglSurface);
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.w(TAG, "Surface abandoned, skipping configuration... ", e);
- }
-
- initializeGLState();
- mSurfaceTexture = new SurfaceTexture(getTextureId());
-
- // Set up performance tracking if enabled
- if (SystemProperties.getBoolean(LEGACY_PERF_PROPERTY, false)) {
- setupGlTiming();
- }
- }
-
- /**
- * Draw the current buffer in the {@link SurfaceTexture} returned from
- * {@link #getSurfaceTexture()} into the set of target {@link Surface}s
- * in the next request from the given {@link CaptureCollector}, or drop
- * the frame if none is available.
- *
- * <p>
- * Any {@link Surface}s targeted must be a subset of the {@link Surface}s
- * set in the last {@link #configureSurfaces(java.util.Collection)} call.
- * </p>
- *
- * @param targetCollector the surfaces to draw to.
- */
- public void drawIntoSurfaces(CaptureCollector targetCollector) {
- if ((mSurfaces == null || mSurfaces.size() == 0)
- && (mConversionSurfaces == null || mConversionSurfaces.size() == 0)) {
- return;
- }
-
- boolean doTiming = targetCollector.hasPendingPreviewCaptures();
- checkGlError("before updateTexImage");
-
- if (doTiming) {
- beginGlTiming();
- }
-
- mSurfaceTexture.updateTexImage();
-
- long timestamp = mSurfaceTexture.getTimestamp();
-
- Pair<RequestHolder, Long> captureHolder = targetCollector.previewCaptured(timestamp);
-
- // No preview request queued, drop frame.
- if (captureHolder == null) {
- if (DEBUG) {
- Log.d(TAG, "Dropping preview frame.");
- }
- if (doTiming) {
- endGlTiming();
- }
- return;
- }
-
- RequestHolder request = captureHolder.first;
-
- Collection<Surface> targetSurfaces = request.getHolderTargets();
- if (doTiming) {
- addGlTimestamp(timestamp);
- }
-
- List<Long> targetSurfaceIds = new ArrayList();
- try {
- targetSurfaceIds = LegacyCameraDevice.getSurfaceIds(targetSurfaces);
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.w(TAG, "Surface abandoned, dropping frame. ", e);
- request.setOutputAbandoned();
- }
-
- for (EGLSurfaceHolder holder : mSurfaces) {
- if (LegacyCameraDevice.containsSurfaceId(holder.surface, targetSurfaceIds)) {
- try{
- LegacyCameraDevice.setSurfaceDimens(holder.surface, holder.width,
- holder.height);
- makeCurrent(holder.eglSurface);
-
- LegacyCameraDevice.setNextTimestamp(holder.surface, captureHolder.second);
- drawFrame(mSurfaceTexture, holder.width, holder.height,
- (mFacing == CameraCharacteristics.LENS_FACING_FRONT) ?
- FLIP_TYPE_HORIZONTAL : FLIP_TYPE_NONE);
- swapBuffers(holder.eglSurface);
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.w(TAG, "Surface abandoned, dropping frame. ", e);
- request.setOutputAbandoned();
- }
- }
- }
- for (EGLSurfaceHolder holder : mConversionSurfaces) {
- if (LegacyCameraDevice.containsSurfaceId(holder.surface, targetSurfaceIds)) {
- // glReadPixels reads from the bottom of the buffer, so add an extra vertical flip
- try {
- makeCurrent(holder.eglSurface);
- drawFrame(mSurfaceTexture, holder.width, holder.height,
- (mFacing == CameraCharacteristics.LENS_FACING_FRONT) ?
- FLIP_TYPE_BOTH : FLIP_TYPE_VERTICAL);
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- // Should never hit this.
- throw new IllegalStateException("Surface abandoned, skipping drawFrame...", e);
- }
- mPBufferPixels.clear();
- GLES20.glReadPixels(/*x*/ 0, /*y*/ 0, holder.width, holder.height,
- GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPBufferPixels);
- checkGlError("glReadPixels");
-
- try {
- int format = LegacyCameraDevice.detectSurfaceType(holder.surface);
- LegacyCameraDevice.setSurfaceDimens(holder.surface, holder.width,
- holder.height);
- LegacyCameraDevice.setNextTimestamp(holder.surface, captureHolder.second);
- LegacyCameraDevice.produceFrame(holder.surface, mPBufferPixels.array(),
- holder.width, holder.height, format);
- } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
- Log.w(TAG, "Surface abandoned, dropping frame. ", e);
- request.setOutputAbandoned();
- }
- }
- }
- targetCollector.previewProduced();
-
- if (doTiming) {
- endGlTiming();
- }
- }
-
- /**
- * Clean up the current GL context.
- */
- public void cleanupEGLContext() {
- releaseEGLContext();
- }
-
- /**
- * Drop all current GL operations on the floor.
- */
- public void flush() {
- // TODO: implement flush
- Log.e(TAG, "Flush not yet implemented.");
- }
-}
diff --git a/core/java/android/hardware/camera2/legacy/package.html b/core/java/android/hardware/camera2/legacy/package.html
deleted file mode 100644
index db6f78bbf628..000000000000
--- a/core/java/android/hardware/camera2/legacy/package.html
+++ /dev/null
@@ -1,3 +0,0 @@
-<body>
-{@hide}
-</body> \ No newline at end of file
diff --git a/core/java/android/hardware/camera2/params/StreamConfigurationMap.java b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
index c37f9fe2465c..52251ba90b98 100644
--- a/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
+++ b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
@@ -24,7 +24,6 @@ import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
-import android.hardware.camera2.legacy.LegacyCameraDevice;
import android.hardware.camera2.utils.HashCodeHelpers;
import android.hardware.camera2.utils.SurfaceUtils;
import android.util.Range;
@@ -69,6 +68,8 @@ public final class StreamConfigurationMap {
private static final String TAG = "StreamConfigurationMap";
+ private static final int MAX_DIMEN_FOR_ROUNDING = 1920; // maximum allowed width for rounding
+
/**
* Create a new {@link StreamConfigurationMap}.
*
@@ -568,7 +569,7 @@ public final class StreamConfigurationMap {
if (config.getSize().equals(surfaceSize)) {
return true;
} else if (isFlexible &&
- (config.getSize().getWidth() <= LegacyCameraDevice.MAX_DIMEN_FOR_ROUNDING)) {
+ (config.getSize().getWidth() <= MAX_DIMEN_FOR_ROUNDING)) {
return true;
}
}
diff --git a/core/java/android/hardware/camera2/utils/SurfaceUtils.java b/core/java/android/hardware/camera2/utils/SurfaceUtils.java
index abe1372ebde4..35b5c1599070 100644
--- a/core/java/android/hardware/camera2/utils/SurfaceUtils.java
+++ b/core/java/android/hardware/camera2/utils/SurfaceUtils.java
@@ -16,10 +16,14 @@
package android.hardware.camera2.utils;
+import static android.system.OsConstants.EINVAL;
+
+import static com.android.internal.util.Preconditions.checkNotNull;
+
import android.compat.annotation.UnsupportedAppUsage;
import android.graphics.ImageFormat;
-import android.hardware.camera2.legacy.LegacyCameraDevice;
-import android.hardware.camera2.legacy.LegacyExceptionUtils.BufferQueueAbandonedException;
+import android.graphics.PixelFormat;
+import android.hardware.HardwareBuffer;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.util.Range;
import android.util.Size;
@@ -35,6 +39,15 @@ import java.util.List;
*/
public class SurfaceUtils {
+ // Usage flags not yet included in HardwareBuffer
+ private static final int USAGE_RENDERSCRIPT = 0x00100000;
+ private static final int USAGE_HW_COMPOSER = 0x00000800;
+
+ // Image formats not yet included in PixelFormat
+ private static final int BGRA_8888 = 0x5;
+
+ private static final int BAD_VALUE = -EINVAL;
+
/**
* Check if a surface is for preview consumer based on consumer end point Gralloc usage flags.
*
@@ -42,7 +55,17 @@ public class SurfaceUtils {
* @return true if the surface is for preview consumer, false otherwise.
*/
public static boolean isSurfaceForPreview(Surface surface) {
- return LegacyCameraDevice.isPreviewConsumer(surface);
+ checkNotNull(surface);
+ long usageFlags = nativeDetectSurfaceUsageFlags(surface);
+ long disallowedFlags = HardwareBuffer.USAGE_VIDEO_ENCODE | USAGE_RENDERSCRIPT
+ | HardwareBuffer.USAGE_CPU_READ_OFTEN;
+ long allowedFlags = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | USAGE_HW_COMPOSER
+ | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
+ boolean previewConsumer = ((usageFlags & disallowedFlags) == 0
+ && (usageFlags & allowedFlags) != 0);
+ int surfaceFormat = getSurfaceFormat(surface);
+
+ return previewConsumer;
}
/**
@@ -53,7 +76,17 @@ public class SurfaceUtils {
* @return true if the surface is for hardware video encoder consumer, false otherwise.
*/
public static boolean isSurfaceForHwVideoEncoder(Surface surface) {
- return LegacyCameraDevice.isVideoEncoderConsumer(surface);
+ checkNotNull(surface);
+ long usageFlags = nativeDetectSurfaceUsageFlags(surface);
+ long disallowedFlags = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | USAGE_HW_COMPOSER
+ | USAGE_RENDERSCRIPT | HardwareBuffer.USAGE_CPU_READ_OFTEN;
+ long allowedFlags = HardwareBuffer.USAGE_VIDEO_ENCODE;
+ boolean videoEncoderConsumer = ((usageFlags & disallowedFlags) == 0
+ && (usageFlags & allowedFlags) != 0);
+
+ int surfaceFormat = getSurfaceFormat(surface);
+
+ return videoEncoderConsumer;
}
/**
@@ -63,9 +96,10 @@ public class SurfaceUtils {
* @return the native object id of the surface, 0 if surface is not backed by a native object.
*/
public static long getSurfaceId(Surface surface) {
+ checkNotNull(surface);
try {
- return LegacyCameraDevice.getSurfaceId(surface);
- } catch (BufferQueueAbandonedException e) {
+ return nativeGetSurfaceId(surface);
+ } catch (IllegalArgumentException e) {
return 0;
}
}
@@ -80,11 +114,13 @@ public class SurfaceUtils {
*/
@UnsupportedAppUsage
public static Size getSurfaceSize(Surface surface) {
- try {
- return LegacyCameraDevice.getSurfaceSize(surface);
- } catch (BufferQueueAbandonedException e) {
- throw new IllegalArgumentException("Surface was abandoned", e);
- }
+ checkNotNull(surface);
+
+ int[] dimens = new int[2];
+ int errorFlag = nativeDetectSurfaceDimens(surface, /*out*/dimens);
+ if (errorFlag == BAD_VALUE) throw new IllegalArgumentException("Surface was abandoned");
+
+ return new Size(dimens[0], dimens[1]);
}
/**
@@ -96,11 +132,17 @@ public class SurfaceUtils {
* @throws IllegalArgumentException if the surface is already abandoned.
*/
public static int getSurfaceFormat(Surface surface) {
- try {
- return LegacyCameraDevice.detectSurfaceType(surface);
- } catch (BufferQueueAbandonedException e) {
- throw new IllegalArgumentException("Surface was abandoned", e);
+ checkNotNull(surface);
+ int surfaceType = nativeDetectSurfaceType(surface);
+ if (surfaceType == BAD_VALUE) throw new IllegalArgumentException("Surface was abandoned");
+
+ // TODO: remove this override since the default format should be
+ // ImageFormat.PRIVATE. b/9487482
+ if ((surfaceType >= PixelFormat.RGBA_8888
+ && surfaceType <= BGRA_8888)) {
+ surfaceType = ImageFormat.PRIVATE;
}
+ return surfaceType;
}
/**
@@ -112,11 +154,10 @@ public class SurfaceUtils {
* @throws IllegalArgumentException if the surface is already abandoned.
*/
public static int getSurfaceDataspace(Surface surface) {
- try {
- return LegacyCameraDevice.detectSurfaceDataspace(surface);
- } catch (BufferQueueAbandonedException e) {
- throw new IllegalArgumentException("Surface was abandoned", e);
- }
+ checkNotNull(surface);
+ int dataSpace = nativeDetectSurfaceDataspace(surface);
+ if (dataSpace == BAD_VALUE) throw new IllegalArgumentException("Surface was abandoned");
+ return dataSpace;
}
/**
@@ -125,9 +166,21 @@ public class SurfaceUtils {
*
*/
public static boolean isFlexibleConsumer(Surface output) {
- return LegacyCameraDevice.isFlexibleConsumer(output);
+ checkNotNull(output);
+ long usageFlags = nativeDetectSurfaceUsageFlags(output);
+
+ // Keep up to date with allowed consumer types in
+ // frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+ long disallowedFlags = HardwareBuffer.USAGE_VIDEO_ENCODE | USAGE_RENDERSCRIPT;
+ long allowedFlags = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE
+ | HardwareBuffer.USAGE_CPU_READ_OFTEN
+ | USAGE_HW_COMPOSER;
+ boolean flexibleConsumer = ((usageFlags & disallowedFlags) == 0
+ && (usageFlags & allowedFlags) != 0);
+ return flexibleConsumer;
}
+
/**
* A high speed output surface can only be preview or hardware encoder surface.
*
@@ -209,4 +262,14 @@ public class SurfaceUtils {
}
}
+ private static native int nativeDetectSurfaceType(Surface surface);
+
+ private static native int nativeDetectSurfaceDataspace(Surface surface);
+
+ private static native long nativeDetectSurfaceUsageFlags(Surface surface);
+
+ private static native int nativeDetectSurfaceDimens(Surface surface,
+ /*out*/int[/*2*/] dimens);
+
+ private static native long nativeGetSurfaceId(Surface surface);
}