From 733341bf0db89c93ee1341ddfca9b0c49731c836 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 29 Jul 2014 18:38:04 -0700 Subject: [PATCH 1/2] camera2: (legacy) Support awb mode, test mode metadata keys Change-Id: Ic013aa820bbea02a662d546eb9f70baa20c0136e --- .../camera2/legacy/LegacyMetadataMapper.java | 71 +++++++++++++++-- .../camera2/legacy/LegacyRequestMapper.java | 56 +++++++++++++ .../camera2/legacy/LegacyResultMapper.java | 79 ++++++++++++++----- 3 files changed, 177 insertions(+), 29 deletions(-) diff --git a/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java b/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java index 986f9a8538684..711edf4762e76 100644 --- a/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java +++ b/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java @@ -94,7 +94,7 @@ public class LegacyMetadataMapper { static final boolean LIE_ABOUT_AF = false; static final boolean LIE_ABOUT_AF_MAX_REGIONS = false; static final boolean LIE_ABOUT_AWB_STATE = false; - static final boolean LIE_ABOUT_AWB = true; + static final boolean LIE_ABOUT_AWB = false; /** * Create characteristics for a legacy device by mapping the {@code parameters} @@ -436,8 +436,52 @@ public class LegacyMetadataMapper { } private static void mapControlAwb(CameraMetadataNative m, Camera.Parameters p) { - if (!LIE_ABOUT_AWB) { - throw new AssertionError("Not implemented yet"); + /* + * control.awbAvailableModes + */ + + { + List wbModes = p.getSupportedWhiteBalance(); + + String[] wbModeStrings = new String[] { + Camera.Parameters.WHITE_BALANCE_AUTO , + Camera.Parameters.WHITE_BALANCE_INCANDESCENT , + Camera.Parameters.WHITE_BALANCE_FLUORESCENT , + Camera.Parameters.WHITE_BALANCE_WARM_FLUORESCENT , + Camera.Parameters.WHITE_BALANCE_DAYLIGHT , + Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT , + Camera.Parameters.WHITE_BALANCE_TWILIGHT , + Camera.Parameters.WHITE_BALANCE_SHADE , + }; + + int[] wbModeInts = new int[] { + CONTROL_AWB_MODE_AUTO, + CONTROL_AWB_MODE_INCANDESCENT , + CONTROL_AWB_MODE_FLUORESCENT , + CONTROL_AWB_MODE_WARM_FLUORESCENT , + CONTROL_AWB_MODE_DAYLIGHT , + CONTROL_AWB_MODE_CLOUDY_DAYLIGHT , + CONTROL_AWB_MODE_TWILIGHT , + CONTROL_AWB_MODE_SHADE , + // Note that CONTROL_AWB_MODE_OFF is unsupported + }; + + List awbAvail = ArrayUtils.convertStringListToIntList( + wbModes, wbModeStrings, wbModeInts); + + // No AWB modes supported? That's unpossible! + if (awbAvail == null || awbAvail.size() == 0) { + Log.w(TAG, "No AWB modes supported (HAL bug); defaulting to AWB_MODE_AUTO only"); + awbAvail = new ArrayList(/*capacity*/1); + awbAvail.add(CONTROL_AWB_MODE_AUTO); + } + + m.set(CONTROL_AWB_AVAILABLE_MODES, ArrayUtils.toIntArray(awbAvail)); + + if (VERBOSE) { + Log.v(TAG, "mapControlAwb - control.awbAvailableModes set to " + + ListUtils.listToString(awbAvail)); + } } } @@ -649,6 +693,11 @@ public class LegacyMetadataMapper { */ m.set(REQUEST_MAX_NUM_INPUT_STREAMS, REQUEST_MAX_NUM_INPUT_STREAMS_COUNT); + /* + * request.partialResultCount + */ + m.set(REQUEST_PARTIAL_RESULT_COUNT, 1); // No partial results supported + /* * request.pipelineMaxDepth */ @@ -679,6 +728,14 @@ public class LegacyMetadataMapper { m.set(SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArrayRect); } + /* + * sensor.availableTestPatternModes + */ + { + // Only "OFF" test pattern mode is available + m.set(SENSOR_AVAILABLE_TEST_PATTERN_MODES, new int[] { SENSOR_TEST_PATTERN_MODE_OFF }); + } + /* * sensor.info.pixelArraySize */ @@ -921,11 +978,9 @@ public class LegacyMetadataMapper { * control.* */ - if (LIE_ABOUT_AWB) { - m.set(CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AWB_MODE_AUTO); - } else { - throw new AssertionError("Valid control.awbMode not implemented yet"); - } + // control.awbMode + m.set(CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AWB_MODE_AUTO); + // AWB is always unconditionally available in API1 devices // control.aeAntibandingMode m.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, CONTROL_AE_ANTIBANDING_MODE_AUTO); diff --git a/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java b/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java index dfec90081bdde..a6fe035ca1446 100644 --- a/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java +++ b/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java @@ -216,6 +216,25 @@ public class LegacyRequestMapper { } } + // control.awbMode + { + Integer awbMode = getIfSupported(request, CONTROL_AWB_MODE, + /*defaultValue*/CONTROL_AWB_MODE_AUTO, + params.getSupportedWhiteBalance() != null, + /*allowedValue*/CONTROL_AWB_MODE_AUTO); + + String whiteBalanceMode = null; + if (awbMode != null) { // null iff AWB is not supported by camera1 api + whiteBalanceMode = convertAwbModeToLegacy(awbMode); + params.setWhiteBalance(whiteBalanceMode); + } + + if (VERBOSE) { + Log.v(TAG, "convertRequestToMetadata - control.awbMode " + + awbMode + " mapped to " + whiteBalanceMode); + } + } + // control.awbLock { Boolean awbLock = getIfSupported(request, CONTROL_AWB_LOCK, /*defaultValue*/false, @@ -294,6 +313,20 @@ public class LegacyRequestMapper { } } } + + /* + * sensor + */ + + // sensor.testPattern + { + int testPatternMode = ParamsUtils.getOrDefault(request, SENSOR_TEST_PATTERN_MODE, + /*defaultValue*/SENSOR_TEST_PATTERN_MODE_OFF); + if (testPatternMode != SENSOR_TEST_PATTERN_MODE_OFF) { + Log.w(TAG, "convertRequestToMetadata - ignoring sensor.testPatternMode " + + testPatternMode + "; only OFF is supported"); + } + } } private static List convertMeteringRegionsToLegacy( @@ -445,6 +478,29 @@ public class LegacyRequestMapper { return legacyFps; } + private static String convertAwbModeToLegacy(int mode) { + switch (mode) { + case CONTROL_AWB_MODE_AUTO: + return Camera.Parameters.WHITE_BALANCE_AUTO; + case CONTROL_AWB_MODE_INCANDESCENT: + return Camera.Parameters.WHITE_BALANCE_INCANDESCENT; + case CONTROL_AWB_MODE_FLUORESCENT: + return Camera.Parameters.WHITE_BALANCE_FLUORESCENT; + case CONTROL_AWB_MODE_WARM_FLUORESCENT: + return Camera.Parameters.WHITE_BALANCE_WARM_FLUORESCENT; + case CONTROL_AWB_MODE_DAYLIGHT: + return Camera.Parameters.WHITE_BALANCE_DAYLIGHT; + case CONTROL_AWB_MODE_CLOUDY_DAYLIGHT: + return Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT; + case CONTROL_AWB_MODE_TWILIGHT: + return Camera.Parameters.WHITE_BALANCE_TWILIGHT; + default: + Log.w(TAG, "convertAwbModeToLegacy - unrecognized control.awbMode" + mode); + return Camera.Parameters.WHITE_BALANCE_AUTO; + } + } + + /** * Return {@code null} if the value is not supported, otherwise return the retrieved key's * value from the request (or the default value if it wasn't set). diff --git a/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java b/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java index 6da5dd0323be4..9eff943ac268f 100644 --- a/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java +++ b/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java @@ -20,7 +20,6 @@ import android.graphics.Rect; import android.hardware.Camera; import android.hardware.Camera.Parameters; import android.hardware.camera2.CameraCharacteristics; -import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.CaptureResult; import android.hardware.camera2.impl.CameraMetadataNative; @@ -133,26 +132,15 @@ public class LegacyResultMapper { */ mapAe(result, characteristics, request, activeArraySize, zoomData, /*out*/params); - // control.afMode - result.set(CaptureResult.CONTROL_AF_MODE, convertLegacyAfMode(params.getFocusMode())); + /* + * control.af* + */ + mapAf(result, activeArraySize, zoomData, /*out*/params); - // control.awbLock - result.set(CaptureResult.CONTROL_AWB_LOCK, params.getAutoWhiteBalanceLock()); - - // control.awbState - if (LegacyMetadataMapper.LIE_ABOUT_AWB_STATE) { - // Lie to pass CTS temporarily. - // TODO: CTS needs to be updated not to query this value - // for LIMITED devices unless its guaranteed to be available. - result.set(CaptureResult.CONTROL_AWB_STATE, - CameraMetadata.CONTROL_AWB_STATE_CONVERGED); - // TODO: Read the awb mode from parameters instead - } - - if (LegacyMetadataMapper.LIE_ABOUT_AWB) { - result.set(CaptureResult.CONTROL_AWB_MODE, - request.get(CaptureRequest.CONTROL_AWB_MODE)); - } + /* + * control.awb* + */ + mapAwb(result, /*out*/params); /* @@ -203,7 +191,7 @@ public class LegacyResultMapper { * flash */ { - // TODO + // flash.mode, flash.state mapped in mapAeAndFlashMode } /* @@ -234,6 +222,11 @@ public class LegacyResultMapper { /* * sensor */ + // sensor.timestamp varies every frame; mapping is done in #cachedConvertResultMetadata + { + // Unconditionally no test patterns + result.set(SENSOR_TEST_PATTERN_MODE, SENSOR_TEST_PATTERN_MODE_OFF); + } // TODO: Remaining result metadata tags conversions. return result; @@ -295,6 +288,13 @@ public class LegacyResultMapper { m.set(CONTROL_AE_REGIONS, meteringRectArray); } + } + + private static void mapAf(CameraMetadataNative m, + Rect activeArray, ZoomData zoomData, Camera.Parameters p) { + // control.afMode + m.set(CaptureResult.CONTROL_AF_MODE, convertLegacyAfMode(p.getFocusMode())); + // control.afRegions { if (VERBOSE) { @@ -307,13 +307,21 @@ public class LegacyResultMapper { m.set(CONTROL_AF_REGIONS, meteringRectArray); } + } + private static void mapAwb(CameraMetadataNative m, Camera.Parameters p) { // control.awbLock { boolean lock = p.isAutoWhiteBalanceLockSupported() ? p.getAutoWhiteBalanceLock() : false; m.set(CONTROL_AWB_LOCK, lock); } + + // control.awbMode + { + int awbMode = convertLegacyAwbMode(p.getWhiteBalance()); + m.set(CONTROL_AWB_MODE, awbMode); + } } private static MeteringRectangle[] getMeteringRectangles(Rect activeArray, ZoomData zoomData, @@ -412,6 +420,35 @@ public class LegacyResultMapper { } } + private static int convertLegacyAwbMode(String mode) { + if (mode == null) { + // OK: camera1 api may not support changing WB modes; assume AUTO + return CONTROL_AWB_MODE_AUTO; + } + + switch (mode) { + case Camera.Parameters.WHITE_BALANCE_AUTO: + return CONTROL_AWB_MODE_AUTO; + case Camera.Parameters.WHITE_BALANCE_INCANDESCENT: + return CONTROL_AWB_MODE_INCANDESCENT; + case Camera.Parameters.WHITE_BALANCE_FLUORESCENT: + return CONTROL_AWB_MODE_FLUORESCENT; + case Camera.Parameters.WHITE_BALANCE_WARM_FLUORESCENT: + return CONTROL_AWB_MODE_WARM_FLUORESCENT; + case Camera.Parameters.WHITE_BALANCE_DAYLIGHT: + return CONTROL_AWB_MODE_DAYLIGHT; + case Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT: + return CONTROL_AWB_MODE_CLOUDY_DAYLIGHT; + case Camera.Parameters.WHITE_BALANCE_TWILIGHT: + return CONTROL_AWB_MODE_TWILIGHT; + case Camera.Parameters.WHITE_BALANCE_SHADE: + return CONTROL_AWB_MODE_SHADE; + default: + Log.w(TAG, "convertAwbMode - unrecognized WB mode " + mode); + return CONTROL_AWB_MODE_AUTO; + } + } + /** Map results for scaler.* */ private static void mapScaler(CameraMetadataNative m, ZoomData zoomData, From 0a1ef4dbf39aa3dfae1a91daf972ae3457ce27fe Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 31 Jul 2014 15:53:34 -0700 Subject: [PATCH 2/2] camera2: (LEGACY) Add face detection support and vstab modes Also work-around CTS freezing forever in Camera#setPreviewTexture * Set preview texture to null after unconfiguring (Camera#stopPreview) * Remove all uses of recording hint (Some devices disable face detection unconditionally when recording is not using HDR mode; recording hints will be introduced after we add capture intent support) Change-Id: I2f0a80f1d580af214cb0f53abcbf33c7391790f1 --- .../camera2/impl/CameraMetadataNative.java | 86 ++++++- .../legacy/LegacyFaceDetectMapper.java | 231 ++++++++++++++++++ .../legacy/LegacyFocusStateMapper.java | 3 +- .../camera2/legacy/LegacyMetadataMapper.java | 42 ++++ .../camera2/legacy/LegacyRequestMapper.java | 14 +- .../camera2/legacy/LegacyResultMapper.java | 14 +- .../camera2/legacy/ParameterUtils.java | 62 ++++- .../camera2/legacy/RequestThreadManager.java | 70 +++++- 8 files changed, 502 insertions(+), 20 deletions(-) create mode 100644 core/java/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java diff --git a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java index dc71a060f91ed..febb015a7fd09 100644 --- a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java +++ b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java @@ -67,6 +67,7 @@ import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.HashMap; +import java.util.List; /** * Implementation of camera metadata marshal/unmarshal across Binder to @@ -227,6 +228,7 @@ public class CameraMetadataNative implements Parcelable { private static final String CELLID_PROCESS = "CELLID"; private static final String GPS_PROCESS = "GPS"; + private static final int FACE_LANDMARK_SIZE = 6; private static String translateLocationProviderToProcess(final String provider) { if (provider == null) { @@ -347,7 +349,7 @@ public class CameraMetadataNative implements Parcelable { // Check if key has been overridden to use a wrapper class on the java side. GetCommand g = sGetCommandMap.get(key); if (g != null) { - return (T) g.getValue(this, key); + return g.getValue(this, key); } return getBase(key); } @@ -587,9 +589,71 @@ public class CameraMetadataNative implements Parcelable { return availableFormats; } - private Face[] getFaces() { - final int FACE_LANDMARK_SIZE = 6; + private boolean setFaces(Face[] faces) { + if (faces == null) { + return false; + } + int numFaces = faces.length; + + // Detect if all faces are SIMPLE or not; count # of valid faces + boolean fullMode = true; + for (Face face : faces) { + if (face == null) { + numFaces--; + Log.w(TAG, "setFaces - null face detected, skipping"); + continue; + } + + if (face.getId() == Face.ID_UNSUPPORTED) { + fullMode = false; + } + } + + Rect[] faceRectangles = new Rect[numFaces]; + byte[] faceScores = new byte[numFaces]; + int[] faceIds = null; + int[] faceLandmarks = null; + + if (fullMode) { + faceIds = new int[numFaces]; + faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE]; + } + + int i = 0; + for (Face face : faces) { + if (face == null) { + continue; + } + + faceRectangles[i] = face.getBounds(); + faceScores[i] = (byte)face.getScore(); + + if (fullMode) { + faceIds[i] = face.getId(); + + int j = 0; + + faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x; + faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y; + faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x; + faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y; + faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x; + faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y; + } + + i++; + } + + set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles); + set(CaptureResult.STATISTICS_FACE_IDS, faceIds); + set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks); + set(CaptureResult.STATISTICS_FACE_SCORES, faceScores); + + return true; + } + + private Face[] getFaces() { Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE); if (faceDetectMode == null) { Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE"); @@ -653,9 +717,12 @@ public class CameraMetadataNative implements Parcelable { if (faceScores[i] <= Face.SCORE_MAX && faceScores[i] >= Face.SCORE_MIN && faceIds[i] >= 0) { - Point leftEye = new Point(faceLandmarks[i*6], faceLandmarks[i*6+1]); - Point rightEye = new Point(faceLandmarks[i*6+2], faceLandmarks[i*6+3]); - Point mouth = new Point(faceLandmarks[i*6+4], faceLandmarks[i*6+5]); + Point leftEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE], + faceLandmarks[i*FACE_LANDMARK_SIZE+1]); + Point rightEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+2], + faceLandmarks[i*FACE_LANDMARK_SIZE+3]); + Point mouth = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+4], + faceLandmarks[i*FACE_LANDMARK_SIZE+5]); Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i], leftEye, rightEye, mouth); faceList.add(face); @@ -865,6 +932,13 @@ public class CameraMetadataNative implements Parcelable { metadata.setFaceRectangles((Rect[]) value); } }); + sSetCommandMap.put(CaptureResult.STATISTICS_FACES.getNativeKey(), + new SetCommand() { + @Override + public void setValue(CameraMetadataNative metadata, T value) { + metadata.setFaces((Face[])value); + } + }); sSetCommandMap.put(CaptureRequest.TONEMAP_CURVE.getNativeKey(), new SetCommand() { @Override public void setValue(CameraMetadataNative metadata, T value) { diff --git a/core/java/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java b/core/java/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java new file mode 100644 index 0000000000000..1470b70b4234a --- /dev/null +++ b/core/java/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java @@ -0,0 +1,231 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera2.legacy; + +import android.graphics.Rect; +import android.hardware.Camera; +import android.hardware.Camera.FaceDetectionListener; +import android.hardware.camera2.impl.CameraMetadataNative; +import android.hardware.camera2.legacy.ParameterUtils.ZoomData; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.params.Face; +import android.hardware.camera2.utils.ListUtils; +import android.hardware.camera2.utils.ParamsUtils; +import android.util.Log; +import android.util.Size; + +import com.android.internal.util.ArrayUtils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static android.hardware.camera2.CaptureRequest.*; +import static com.android.internal.util.Preconditions.*; + +/** + * Map legacy face detect callbacks into face detection results. + */ +@SuppressWarnings("deprecation") +public class LegacyFaceDetectMapper { + private static String TAG = "LegacyFaceDetectMapper"; + private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); + + private final Camera mCamera; + private final boolean mFaceDetectSupported; + private boolean mFaceDetectEnabled = false; + + private final Object mLock = new Object(); + private Camera.Face[] mFaces; + private Camera.Face[] mFacesPrev; + /** + * Instantiate a new face detect mapper. + * + * @param camera a non-{@code null} camera1 device + * @param characteristics a non-{@code null} camera characteristics for that camera1 + * + * @throws NullPointerException if any of the args were {@code null} + */ + public LegacyFaceDetectMapper(Camera camera, CameraCharacteristics characteristics) { + mCamera = checkNotNull(camera, "camera must not be null"); + checkNotNull(characteristics, "characteristics must not be null"); + + mFaceDetectSupported = ArrayUtils.contains( + characteristics.get( + CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES), + STATISTICS_FACE_DETECT_MODE_SIMPLE); + + if (!mFaceDetectSupported) { + return; + } + + mCamera.setFaceDetectionListener(new FaceDetectionListener() { + + @Override + public void onFaceDetection(Camera.Face[] faces, Camera camera) { + int lengthFaces = faces == null ? 0 : faces.length; + synchronized (mLock) { + if (mFaceDetectEnabled) { + mFaces = faces; + } else if (lengthFaces > 0) { + // stopFaceDetectMode could race against the requests, print a debug log + Log.d(TAG, + "onFaceDetection - Ignored some incoming faces since" + + "face detection was disabled"); + } + } + + if (VERBOSE) { + Log.v(TAG, "onFaceDetection - read " + lengthFaces + " faces"); + } + } + }); + } + + /** + * Process the face detect mode from the capture request into an api1 face detect toggle. + * + *

This method should be called after the parameters are {@link LegacyRequestMapper mapped} + * with the request.

+ * + *

Callbacks are processed in the background, and the next call to {@link #mapResultTriggers} + * will have the latest faces detected as reflected by the camera1 callbacks.

+ * + *

None of the arguments will be mutated.

+ * + * @param captureRequest a non-{@code null} request + * @param parameters a non-{@code null} parameters corresponding to this request (read-only) + */ + public void processFaceDetectMode(CaptureRequest captureRequest, + Camera.Parameters parameters) { + checkNotNull(captureRequest, "captureRequest must not be null"); + + /* + * statistics.faceDetectMode + */ + int fdMode = ParamsUtils.getOrDefault(captureRequest, STATISTICS_FACE_DETECT_MODE, + STATISTICS_FACE_DETECT_MODE_OFF); + + if (fdMode != STATISTICS_FACE_DETECT_MODE_OFF && !mFaceDetectSupported) { + Log.w(TAG, + "processFaceDetectMode - Ignoring statistics.faceDetectMode; " + + "face detection is not available"); + return; + } + + // Print some warnings out in case the values were wrong + switch (fdMode) { + case STATISTICS_FACE_DETECT_MODE_OFF: + case STATISTICS_FACE_DETECT_MODE_SIMPLE: + break; + case STATISTICS_FACE_DETECT_MODE_FULL: + Log.w(TAG, + "processFaceDetectMode - statistics.faceDetectMode == FULL unsupported, " + + "downgrading to SIMPLE"); + break; + default: + Log.w(TAG, "processFaceDetectMode - ignoring unknown statistics.faceDetectMode = " + + fdMode); + return; + } + + boolean enableFaceDetect = fdMode != STATISTICS_FACE_DETECT_MODE_OFF; + synchronized (mLock) { + // Enable/disable face detection if it's changed since last time + if (enableFaceDetect != mFaceDetectEnabled) { + if (enableFaceDetect) { + mCamera.startFaceDetection(); + + if (VERBOSE) { + Log.v(TAG, "processFaceDetectMode - start face detection"); + } + } else { + mCamera.stopFaceDetection(); + + if (VERBOSE) { + Log.v(TAG, "processFaceDetectMode - stop face detection"); + } + + mFaces = null; + } + + mFaceDetectEnabled = enableFaceDetect; + } + } + } + + /** + * Update the {@code result} camera metadata map with the new value for the + * {@code statistics.faces} and {@code statistics.faceDetectMode}. + * + *

Face detect callbacks are processed in the background, and each call to + * {@link #mapResultFaces} will have the latest faces as reflected by the camera1 callbacks.

+ * + * @param result a non-{@code null} result + * @param legacyRequest a non-{@code null} request (read-only) + */ + public void mapResultFaces(CameraMetadataNative result, LegacyRequest legacyRequest) { + checkNotNull(result, "result must not be null"); + checkNotNull(legacyRequest, "legacyRequest must not be null"); + + Camera.Face[] faces, previousFaces; + int fdMode; + synchronized (mLock) { + fdMode = mFaceDetectEnabled ? + STATISTICS_FACE_DETECT_MODE_SIMPLE : STATISTICS_FACE_DETECT_MODE_OFF; + + if (mFaceDetectEnabled) { + faces = mFaces; + } else { + faces = null; + } + + previousFaces = mFacesPrev; + mFacesPrev = faces; + } + + CameraCharacteristics characteristics = legacyRequest.characteristics; + CaptureRequest request = legacyRequest.captureRequest; + Size previewSize = legacyRequest.previewSize; + Camera.Parameters params = legacyRequest.parameters; + + Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArray, + request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params); + + List convertedFaces = new ArrayList<>(); + if (faces != null) { + for (Camera.Face face : faces) { + if (face != null) { + convertedFaces.add( + ParameterUtils.convertFaceFromLegacy(face, activeArray, zoomData)); + } else { + Log.w(TAG, "mapResultFaces - read NULL face from camera1 device"); + } + } + } + + if (VERBOSE && previousFaces != faces) { // Log only in verbose and IF the faces changed + Log.v(TAG, "mapResultFaces - changed to " + ListUtils.listToString(convertedFaces)); + } + + result.set(CaptureResult.STATISTICS_FACES, convertedFaces.toArray(new Face[0])); + result.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, fdMode); + } +} diff --git a/core/java/android/hardware/camera2/legacy/LegacyFocusStateMapper.java b/core/java/android/hardware/camera2/legacy/LegacyFocusStateMapper.java index e576b43da154b..d0a3a3fd3e682 100644 --- a/core/java/android/hardware/camera2/legacy/LegacyFocusStateMapper.java +++ b/core/java/android/hardware/camera2/legacy/LegacyFocusStateMapper.java @@ -247,7 +247,8 @@ public class LegacyFocusStateMapper { // No action necessary. The callbacks will handle transitions. break; default: - Log.w(TAG, "mapTriggers - ignoring unknown control.afTrigger = " + afTrigger); + Log.w(TAG, "processRequestTriggers - ignoring unknown control.afTrigger = " + + afTrigger); } } diff --git a/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java b/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java index 711edf4762e76..b05508b25824e 100644 --- a/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java +++ b/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java @@ -203,6 +203,11 @@ public class LegacyMetadataMapper { */ mapSensor(m, p); + /* + * statistics.* + */ + mapStatistics(m, p); + /* * sync.* */ @@ -486,6 +491,18 @@ public class LegacyMetadataMapper { } private static void mapControlOther(CameraMetadataNative m, Camera.Parameters p) { + /* + * android.control.availableVideoStabilizationModes + */ + { + int stabModes[] = p.isVideoStabilizationSupported() ? + new int[] { CONTROL_VIDEO_STABILIZATION_MODE_OFF, + CONTROL_VIDEO_STABILIZATION_MODE_ON } : + new int[] { CONTROL_VIDEO_STABILIZATION_MODE_OFF }; + + m.set(CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, stabModes); + } + /* * android.control.maxRegions */ @@ -742,6 +759,31 @@ public class LegacyMetadataMapper { m.set(SENSOR_INFO_PIXEL_ARRAY_SIZE, largestJpegSize); } + private static void mapStatistics(CameraMetadataNative m, Parameters p) { + /* + * statistics.info.availableFaceDetectModes + */ + int[] fdModes; + + if (p.getMaxNumDetectedFaces() > 0) { + fdModes = new int[] { + STATISTICS_FACE_DETECT_MODE_OFF, + STATISTICS_FACE_DETECT_MODE_SIMPLE + // FULL is never-listed, since we have no way to query it statically + }; + } else { + fdModes = new int[] { + STATISTICS_FACE_DETECT_MODE_OFF + }; + } + m.set(STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, fdModes); + + /* + * statistics.info.maxFaceCount + */ + m.set(STATISTICS_INFO_MAX_FACE_COUNT, p.getMaxNumDetectedFaces()); + } + private static void mapSync(CameraMetadataNative m, Parameters p) { /* * sync.maxLatency diff --git a/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java b/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java index a6fe035ca1446..20f3fd2f7b5b2 100644 --- a/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java +++ b/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java @@ -150,10 +150,8 @@ public class LegacyRequestMapper { if (supported) { params.setPreviewFpsRange(legacyFps[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], legacyFps[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); - params.setRecordingHint(false); } else { Log.w(TAG, "Unsupported FPS range set [" + legacyFps[0] + "," + legacyFps[1] + "]"); - params.setRecordingHint(true); } } @@ -248,6 +246,18 @@ public class LegacyRequestMapper { // TODO: Don't add control.awbLock to availableRequestKeys if it's not supported } + // control.videoStabilizationMode + { + Integer stabMode = getIfSupported(request, CONTROL_VIDEO_STABILIZATION_MODE, + /*defaultValue*/CONTROL_VIDEO_STABILIZATION_MODE_OFF, + params.isVideoStabilizationSupported(), + /*allowedValue*/CONTROL_VIDEO_STABILIZATION_MODE_OFF); + + if (stabMode != null) { + params.setVideoStabilization(stabMode == CONTROL_VIDEO_STABILIZATION_MODE_ON); + } + } + // lens.focusDistance { boolean infinityFocusSupported = diff --git a/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java b/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java index 9eff943ac268f..a2487f4bd9b85 100644 --- a/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java +++ b/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java @@ -35,6 +35,9 @@ import java.util.ArrayList; import java.util.List; import static com.android.internal.util.Preconditions.*; +import static android.hardware.camera2.CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF; +import static android.hardware.camera2.CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON; +import static android.hardware.camera2.CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE; import static android.hardware.camera2.CaptureResult.*; /** @@ -142,7 +145,6 @@ public class LegacyResultMapper { */ mapAwb(result, /*out*/params); - /* * control.mode */ @@ -171,7 +173,6 @@ public class LegacyResultMapper { } } - /* * control.effectMode */ @@ -187,6 +188,15 @@ public class LegacyResultMapper { } } + // control.videoStabilizationMode + { + int stabMode = + (params.isVideoStabilizationSupported() && params.getVideoStabilization()) ? + CONTROL_VIDEO_STABILIZATION_MODE_ON : + CONTROL_VIDEO_STABILIZATION_MODE_OFF; + result.set(CONTROL_VIDEO_STABILIZATION_MODE, stabMode); + } + /* * flash */ diff --git a/core/java/android/hardware/camera2/legacy/ParameterUtils.java b/core/java/android/hardware/camera2/legacy/ParameterUtils.java index efd12f2896260..385f8440afb2f 100644 --- a/core/java/android/hardware/camera2/legacy/ParameterUtils.java +++ b/core/java/android/hardware/camera2/legacy/ParameterUtils.java @@ -43,6 +43,7 @@ import static com.android.internal.util.Preconditions.*; /** * Various utilities for dealing with camera API1 parameters. */ +@SuppressWarnings("deprecation") public class ParameterUtils { /** Upper/left minimal point of a normalized rectangle */ public static final int NORMALIZED_RECTANGLE_MIN = -1000; @@ -164,19 +165,23 @@ public class ParameterUtils { *

If the score is out of range of {@value Face#SCORE_MIN}, {@value Face#SCORE_MAX}, * the score is clipped first and a warning is printed to logcat.

* + *

If the id is negative, the id is changed to 0 and a warning is printed to + * logcat.

+ * *

All other parameters are passed-through as-is.

* * @return a new face with the optional features set */ public Face toFace( int id, Point leftEyePosition, Point rightEyePosition, Point mouthPosition) { + int idSafe = clipLower(id, /*lo*/0, rect, "id"); int score = clip(weight, Face.SCORE_MIN, Face.SCORE_MAX, rect, "score"); - return new Face(rect, score, id, leftEyePosition, rightEyePosition, mouthPosition); + return new Face(rect, score, idSafe, leftEyePosition, rightEyePosition, mouthPosition); } /** @@ -861,6 +866,61 @@ public class ParameterUtils { /*usePreviewCrop*/true); } + /** + * Convert an api1 face into an active-array based api2 face. + * + *

Out-of-ranges scores and ids will be clipped to be within range (with a warning).

+ * + * @param face a non-{@code null} api1 face + * @param activeArraySize active array size of the sensor (e.g. max jpeg size) + * @param zoomData the calculated zoom data corresponding to this request + * + * @return a non-{@code null} api2 face + * + * @throws NullPointerException if the {@code face} was {@code null} + */ + public static Face convertFaceFromLegacy(Camera.Face face, Rect activeArray, + ZoomData zoomData) { + checkNotNull(face, "face must not be null"); + + Face api2Face; + + Camera.Area fakeArea = new Camera.Area(face.rect, /*weight*/1); + + WeightedRectangle faceRect = + convertCameraAreaToActiveArrayRectangle(activeArray, zoomData, fakeArea); + + Point leftEye = face.leftEye, rightEye = face.rightEye, mouth = face.mouth; + if (leftEye != null && rightEye != null && mouth != null) { + leftEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData, + leftEye, /*usePreviewCrop*/true); + rightEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData, + leftEye, /*usePreviewCrop*/true); + mouth = convertCameraPointToActiveArrayPoint(activeArray, zoomData, + leftEye, /*usePreviewCrop*/true); + + api2Face = faceRect.toFace(face.id, leftEye, rightEye, mouth); + } else { + api2Face = faceRect.toFace(); + } + + return api2Face; + } + + private static Point convertCameraPointToActiveArrayPoint( + Rect activeArray, ZoomData zoomData, Point point, boolean usePreviewCrop) { + Rect pointedRect = new Rect(point.x, point.y, point.x, point.y); + Camera.Area pointedArea = new Area(pointedRect, /*weight*/1); + + WeightedRectangle adjustedRect = + convertCameraAreaToActiveArrayRectangle(activeArray, + zoomData, pointedArea, usePreviewCrop); + + Point transformedPoint = new Point(adjustedRect.rect.left, adjustedRect.rect.top); + + return transformedPoint; + } + private static WeightedRectangle convertCameraAreaToActiveArrayRectangle( Rect activeArray, ZoomData zoomData, Camera.Area area, boolean usePreviewCrop) { Rect previewCrop = zoomData.previewCrop; diff --git a/core/java/android/hardware/camera2/legacy/RequestThreadManager.java b/core/java/android/hardware/camera2/legacy/RequestThreadManager.java index c556c3286ae89..2533a28be50db 100644 --- a/core/java/android/hardware/camera2/legacy/RequestThreadManager.java +++ b/core/java/android/hardware/camera2/legacy/RequestThreadManager.java @@ -39,7 +39,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.TimeUnit; import static com.android.internal.util.Preconditions.*; @@ -55,18 +54,23 @@ import static com.android.internal.util.Preconditions.*; * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations. *

*/ +@SuppressWarnings("deprecation") public class RequestThreadManager { private final String TAG; private final int mCameraId; private final RequestHandlerThread mRequestThread; private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG); + // For slightly more spammy messages that will get repeated every frame + private static final boolean VERBOSE = + Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.VERBOSE); private final Camera mCamera; private final CameraCharacteristics mCharacteristics; private final CameraDeviceState mDeviceState; private final CaptureCollector mCaptureCollector; private final LegacyFocusStateMapper mFocusStateMapper; + private final LegacyFaceDetectMapper mFaceDetectMapper; private static final int MSG_CONFIGURE_OUTPUTS = 1; private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2; @@ -219,6 +223,9 @@ public class RequestThreadManager { }; private void stopPreview() { + if (VERBOSE) { + Log.v(TAG, "stopPreview - preview running? " + mPreviewRunning); + } if (mPreviewRunning) { mCamera.stopPreview(); mPreviewRunning = false; @@ -226,14 +233,18 @@ public class RequestThreadManager { } private void startPreview() { + if (VERBOSE) { + Log.v(TAG, "startPreview - preview running? " + mPreviewRunning); + } if (!mPreviewRunning) { + // XX: CameraClient:;startPreview is not getting called after a stop mCamera.startPreview(); mPreviewRunning = true; } } - private void doJpegCapture(RequestHolder request) throws IOException { - if (DEBUG) Log.d(TAG, "doJpegCapture"); + private void doJpegCapturePrepare(RequestHolder request) throws IOException { + if (DEBUG) Log.d(TAG, "doJpegCapturePrepare - preview running? " + mPreviewRunning); if (!mPreviewRunning) { if (DEBUG) Log.d(TAG, "doJpegCapture - create fake surface"); @@ -242,11 +253,20 @@ public class RequestThreadManager { mCamera.setPreviewTexture(mDummyTexture); startPreview(); } + } + + private void doJpegCapture(RequestHolder request) { + if (DEBUG) Log.d(TAG, "doJpegCapturePrepare"); + mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback); mPreviewRunning = false; } private void doPreviewCapture(RequestHolder request) throws IOException { + if (VERBOSE) { + Log.v(TAG, "doPreviewCapture - preview running? " + mPreviewRunning); + } + if (mPreviewRunning) { return; // Already running } @@ -264,7 +284,20 @@ public class RequestThreadManager { } private void configureOutputs(Collection outputs) throws IOException { + if (DEBUG) { + String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces"); + + Log.d(TAG, "configureOutputs with " + outputsStr); + } + stopPreview(); + /* + * Try to release the previous preview's surface texture earlier if we end up + * using a different one; this also reduces the likelihood of getting into a deadlock + * when disconnecting from the old previous texture at a later time. + */ + mCamera.setPreviewTexture(/*surfaceTexture*/null); + if (mGLThreadManager != null) { mGLThreadManager.waitUntilStarted(); mGLThreadManager.ignoreNewFrames(); @@ -305,7 +338,6 @@ public class RequestThreadManager { } mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); - mParams.setRecordingHint(true); if (mPreviewOutputs.size() > 0) { List outputSizes = new ArrayList<>(outputs.size()); @@ -613,10 +645,6 @@ public class RequestThreadManager { } } - // Unconditionally process AF triggers, since they're non-idempotent - // - must be done after setting the most-up-to-date AF mode - mFocusStateMapper.processRequestTriggers(request, mParams); - try { boolean success = mCaptureCollector.queueRequest(holder, mLastRequest, JPEG_FRAME_TIMEOUT, TimeUnit.MILLISECONDS); @@ -624,6 +652,8 @@ public class RequestThreadManager { if (!success) { Log.e(TAG, "Timed out while queueing capture request."); } + // Starting the preview needs to happen before enabling + // face detection or auto focus if (holder.hasPreviewTargets()) { doPreviewCapture(holder); } @@ -635,12 +665,33 @@ public class RequestThreadManager { Log.e(TAG, "Timed out waiting for prior requests to complete."); } mReceivedJpeg.close(); + doJpegCapturePrepare(holder); + if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) { + // TODO: report error to CameraDevice + Log.e(TAG, "Hit timeout for jpeg callback!"); + } + } + + /* + * Do all the actions that require a preview to have been started + */ + + // Toggle face detection on/off + // - do this before AF to give AF a chance to use faces + mFaceDetectMapper.processFaceDetectMode(request, /*in*/mParams); + + // Unconditionally process AF triggers, since they're non-idempotent + // - must be done after setting the most-up-to-date AF mode + mFocusStateMapper.processRequestTriggers(request, mParams); + + if (holder.hasJpegTargets()) { doJpegCapture(holder); if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) { // TODO: report error to CameraDevice Log.e(TAG, "Hit timeout for jpeg callback!"); } } + } catch (IOException e) { // TODO: report error to CameraDevice throw new IOError(e); @@ -677,6 +728,8 @@ public class RequestThreadManager { mLastRequest, timestampMutable.value); // Update AF state mFocusStateMapper.mapResultTriggers(result); + // Update detected faces list + mFaceDetectMapper.mapResultFaces(result, mLastRequest); mDeviceState.setCaptureResult(holder, result); } @@ -731,6 +784,7 @@ public class RequestThreadManager { TAG = name; mDeviceState = checkNotNull(deviceState, "deviceState must not be null"); mFocusStateMapper = new LegacyFocusStateMapper(mCamera); + mFaceDetectMapper = new LegacyFaceDetectMapper(mCamera, mCharacteristics); mCaptureCollector = new CaptureCollector(MAX_IN_FLIGHT_REQUESTS, mDeviceState); mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb); }