Merge "camera2: (LEGACY) Add face detection support and vstab modes" into lmp-dev

This commit is contained in:
Igor Murashkin
2014-08-07 20:51:25 +00:00
committed by Android (Google) Code Review
8 changed files with 502 additions and 20 deletions

View File

@@ -67,6 +67,7 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
/**
* Implementation of camera metadata marshal/unmarshal across Binder to
@@ -227,6 +228,7 @@ public class CameraMetadataNative implements Parcelable {
private static final String CELLID_PROCESS = "CELLID";
private static final String GPS_PROCESS = "GPS";
private static final int FACE_LANDMARK_SIZE = 6;
private static String translateLocationProviderToProcess(final String provider) {
if (provider == null) {
@@ -347,7 +349,7 @@ public class CameraMetadataNative implements Parcelable {
// Check if key has been overridden to use a wrapper class on the java side.
GetCommand g = sGetCommandMap.get(key);
if (g != null) {
return (T) g.getValue(this, key);
return g.getValue(this, key);
}
return getBase(key);
}
@@ -587,9 +589,71 @@ public class CameraMetadataNative implements Parcelable {
return availableFormats;
}
private Face[] getFaces() {
final int FACE_LANDMARK_SIZE = 6;
private boolean setFaces(Face[] faces) {
if (faces == null) {
return false;
}
int numFaces = faces.length;
// Detect if all faces are SIMPLE or not; count # of valid faces
boolean fullMode = true;
for (Face face : faces) {
if (face == null) {
numFaces--;
Log.w(TAG, "setFaces - null face detected, skipping");
continue;
}
if (face.getId() == Face.ID_UNSUPPORTED) {
fullMode = false;
}
}
Rect[] faceRectangles = new Rect[numFaces];
byte[] faceScores = new byte[numFaces];
int[] faceIds = null;
int[] faceLandmarks = null;
if (fullMode) {
faceIds = new int[numFaces];
faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE];
}
int i = 0;
for (Face face : faces) {
if (face == null) {
continue;
}
faceRectangles[i] = face.getBounds();
faceScores[i] = (byte)face.getScore();
if (fullMode) {
faceIds[i] = face.getId();
int j = 0;
faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x;
faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y;
faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x;
faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y;
faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x;
faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y;
}
i++;
}
set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles);
set(CaptureResult.STATISTICS_FACE_IDS, faceIds);
set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks);
set(CaptureResult.STATISTICS_FACE_SCORES, faceScores);
return true;
}
private Face[] getFaces() {
Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE);
if (faceDetectMode == null) {
Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE");
@@ -653,9 +717,12 @@ public class CameraMetadataNative implements Parcelable {
if (faceScores[i] <= Face.SCORE_MAX &&
faceScores[i] >= Face.SCORE_MIN &&
faceIds[i] >= 0) {
Point leftEye = new Point(faceLandmarks[i*6], faceLandmarks[i*6+1]);
Point rightEye = new Point(faceLandmarks[i*6+2], faceLandmarks[i*6+3]);
Point mouth = new Point(faceLandmarks[i*6+4], faceLandmarks[i*6+5]);
Point leftEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE],
faceLandmarks[i*FACE_LANDMARK_SIZE+1]);
Point rightEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+2],
faceLandmarks[i*FACE_LANDMARK_SIZE+3]);
Point mouth = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+4],
faceLandmarks[i*FACE_LANDMARK_SIZE+5]);
Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i],
leftEye, rightEye, mouth);
faceList.add(face);
@@ -865,6 +932,13 @@ public class CameraMetadataNative implements Parcelable {
metadata.setFaceRectangles((Rect[]) value);
}
});
sSetCommandMap.put(CaptureResult.STATISTICS_FACES.getNativeKey(),
new SetCommand() {
@Override
public <T> void setValue(CameraMetadataNative metadata, T value) {
metadata.setFaces((Face[])value);
}
});
sSetCommandMap.put(CaptureRequest.TONEMAP_CURVE.getNativeKey(), new SetCommand() {
@Override
public <T> void setValue(CameraMetadataNative metadata, T value) {

View File

@@ -0,0 +1,231 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.hardware.camera2.legacy;
import android.graphics.Rect;
import android.hardware.Camera;
import android.hardware.Camera.FaceDetectionListener;
import android.hardware.camera2.impl.CameraMetadataNative;
import android.hardware.camera2.legacy.ParameterUtils.ZoomData;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.params.Face;
import android.hardware.camera2.utils.ListUtils;
import android.hardware.camera2.utils.ParamsUtils;
import android.util.Log;
import android.util.Size;
import com.android.internal.util.ArrayUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static android.hardware.camera2.CaptureRequest.*;
import static com.android.internal.util.Preconditions.*;
/**
* Map legacy face detect callbacks into face detection results.
*/
@SuppressWarnings("deprecation")
public class LegacyFaceDetectMapper {
private static String TAG = "LegacyFaceDetectMapper";
private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
private final Camera mCamera;
private final boolean mFaceDetectSupported;
private boolean mFaceDetectEnabled = false;
private final Object mLock = new Object();
private Camera.Face[] mFaces;
private Camera.Face[] mFacesPrev;
/**
* Instantiate a new face detect mapper.
*
* @param camera a non-{@code null} camera1 device
* @param characteristics a non-{@code null} camera characteristics for that camera1
*
* @throws NullPointerException if any of the args were {@code null}
*/
public LegacyFaceDetectMapper(Camera camera, CameraCharacteristics characteristics) {
mCamera = checkNotNull(camera, "camera must not be null");
checkNotNull(characteristics, "characteristics must not be null");
mFaceDetectSupported = ArrayUtils.contains(
characteristics.get(
CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES),
STATISTICS_FACE_DETECT_MODE_SIMPLE);
if (!mFaceDetectSupported) {
return;
}
mCamera.setFaceDetectionListener(new FaceDetectionListener() {
@Override
public void onFaceDetection(Camera.Face[] faces, Camera camera) {
int lengthFaces = faces == null ? 0 : faces.length;
synchronized (mLock) {
if (mFaceDetectEnabled) {
mFaces = faces;
} else if (lengthFaces > 0) {
// stopFaceDetectMode could race against the requests, print a debug log
Log.d(TAG,
"onFaceDetection - Ignored some incoming faces since" +
"face detection was disabled");
}
}
if (VERBOSE) {
Log.v(TAG, "onFaceDetection - read " + lengthFaces + " faces");
}
}
});
}
/**
* Process the face detect mode from the capture request into an api1 face detect toggle.
*
* <p>This method should be called after the parameters are {@link LegacyRequestMapper mapped}
* with the request.</p>
*
* <p>Callbacks are processed in the background, and the next call to {@link #mapResultTriggers}
* will have the latest faces detected as reflected by the camera1 callbacks.</p>
*
* <p>None of the arguments will be mutated.</p>
*
* @param captureRequest a non-{@code null} request
* @param parameters a non-{@code null} parameters corresponding to this request (read-only)
*/
public void processFaceDetectMode(CaptureRequest captureRequest,
Camera.Parameters parameters) {
checkNotNull(captureRequest, "captureRequest must not be null");
/*
* statistics.faceDetectMode
*/
int fdMode = ParamsUtils.getOrDefault(captureRequest, STATISTICS_FACE_DETECT_MODE,
STATISTICS_FACE_DETECT_MODE_OFF);
if (fdMode != STATISTICS_FACE_DETECT_MODE_OFF && !mFaceDetectSupported) {
Log.w(TAG,
"processFaceDetectMode - Ignoring statistics.faceDetectMode; " +
"face detection is not available");
return;
}
// Print some warnings out in case the values were wrong
switch (fdMode) {
case STATISTICS_FACE_DETECT_MODE_OFF:
case STATISTICS_FACE_DETECT_MODE_SIMPLE:
break;
case STATISTICS_FACE_DETECT_MODE_FULL:
Log.w(TAG,
"processFaceDetectMode - statistics.faceDetectMode == FULL unsupported, " +
"downgrading to SIMPLE");
break;
default:
Log.w(TAG, "processFaceDetectMode - ignoring unknown statistics.faceDetectMode = "
+ fdMode);
return;
}
boolean enableFaceDetect = fdMode != STATISTICS_FACE_DETECT_MODE_OFF;
synchronized (mLock) {
// Enable/disable face detection if it's changed since last time
if (enableFaceDetect != mFaceDetectEnabled) {
if (enableFaceDetect) {
mCamera.startFaceDetection();
if (VERBOSE) {
Log.v(TAG, "processFaceDetectMode - start face detection");
}
} else {
mCamera.stopFaceDetection();
if (VERBOSE) {
Log.v(TAG, "processFaceDetectMode - stop face detection");
}
mFaces = null;
}
mFaceDetectEnabled = enableFaceDetect;
}
}
}
/**
* Update the {@code result} camera metadata map with the new value for the
* {@code statistics.faces} and {@code statistics.faceDetectMode}.
*
* <p>Face detect callbacks are processed in the background, and each call to
* {@link #mapResultFaces} will have the latest faces as reflected by the camera1 callbacks.</p>
*
* @param result a non-{@code null} result
* @param legacyRequest a non-{@code null} request (read-only)
*/
public void mapResultFaces(CameraMetadataNative result, LegacyRequest legacyRequest) {
checkNotNull(result, "result must not be null");
checkNotNull(legacyRequest, "legacyRequest must not be null");
Camera.Face[] faces, previousFaces;
int fdMode;
synchronized (mLock) {
fdMode = mFaceDetectEnabled ?
STATISTICS_FACE_DETECT_MODE_SIMPLE : STATISTICS_FACE_DETECT_MODE_OFF;
if (mFaceDetectEnabled) {
faces = mFaces;
} else {
faces = null;
}
previousFaces = mFacesPrev;
mFacesPrev = faces;
}
CameraCharacteristics characteristics = legacyRequest.characteristics;
CaptureRequest request = legacyRequest.captureRequest;
Size previewSize = legacyRequest.previewSize;
Camera.Parameters params = legacyRequest.parameters;
Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArray,
request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
List<Face> convertedFaces = new ArrayList<>();
if (faces != null) {
for (Camera.Face face : faces) {
if (face != null) {
convertedFaces.add(
ParameterUtils.convertFaceFromLegacy(face, activeArray, zoomData));
} else {
Log.w(TAG, "mapResultFaces - read NULL face from camera1 device");
}
}
}
if (VERBOSE && previousFaces != faces) { // Log only in verbose and IF the faces changed
Log.v(TAG, "mapResultFaces - changed to " + ListUtils.listToString(convertedFaces));
}
result.set(CaptureResult.STATISTICS_FACES, convertedFaces.toArray(new Face[0]));
result.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, fdMode);
}
}

View File

@@ -247,7 +247,8 @@ public class LegacyFocusStateMapper {
// No action necessary. The callbacks will handle transitions.
break;
default:
Log.w(TAG, "mapTriggers - ignoring unknown control.afTrigger = " + afTrigger);
Log.w(TAG, "processRequestTriggers - ignoring unknown control.afTrigger = "
+ afTrigger);
}
}

View File

@@ -203,6 +203,11 @@ public class LegacyMetadataMapper {
*/
mapSensor(m, p);
/*
* statistics.*
*/
mapStatistics(m, p);
/*
* sync.*
*/
@@ -486,6 +491,18 @@ public class LegacyMetadataMapper {
}
private static void mapControlOther(CameraMetadataNative m, Camera.Parameters p) {
/*
* android.control.availableVideoStabilizationModes
*/
{
int stabModes[] = p.isVideoStabilizationSupported() ?
new int[] { CONTROL_VIDEO_STABILIZATION_MODE_OFF,
CONTROL_VIDEO_STABILIZATION_MODE_ON } :
new int[] { CONTROL_VIDEO_STABILIZATION_MODE_OFF };
m.set(CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, stabModes);
}
/*
* android.control.maxRegions
*/
@@ -742,6 +759,31 @@ public class LegacyMetadataMapper {
m.set(SENSOR_INFO_PIXEL_ARRAY_SIZE, largestJpegSize);
}
private static void mapStatistics(CameraMetadataNative m, Parameters p) {
/*
* statistics.info.availableFaceDetectModes
*/
int[] fdModes;
if (p.getMaxNumDetectedFaces() > 0) {
fdModes = new int[] {
STATISTICS_FACE_DETECT_MODE_OFF,
STATISTICS_FACE_DETECT_MODE_SIMPLE
// FULL is never-listed, since we have no way to query it statically
};
} else {
fdModes = new int[] {
STATISTICS_FACE_DETECT_MODE_OFF
};
}
m.set(STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, fdModes);
/*
* statistics.info.maxFaceCount
*/
m.set(STATISTICS_INFO_MAX_FACE_COUNT, p.getMaxNumDetectedFaces());
}
private static void mapSync(CameraMetadataNative m, Parameters p) {
/*
* sync.maxLatency

View File

@@ -150,10 +150,8 @@ public class LegacyRequestMapper {
if (supported) {
params.setPreviewFpsRange(legacyFps[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
legacyFps[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
params.setRecordingHint(false);
} else {
Log.w(TAG, "Unsupported FPS range set [" + legacyFps[0] + "," + legacyFps[1] + "]");
params.setRecordingHint(true);
}
}
@@ -248,6 +246,18 @@ public class LegacyRequestMapper {
// TODO: Don't add control.awbLock to availableRequestKeys if it's not supported
}
// control.videoStabilizationMode
{
Integer stabMode = getIfSupported(request, CONTROL_VIDEO_STABILIZATION_MODE,
/*defaultValue*/CONTROL_VIDEO_STABILIZATION_MODE_OFF,
params.isVideoStabilizationSupported(),
/*allowedValue*/CONTROL_VIDEO_STABILIZATION_MODE_OFF);
if (stabMode != null) {
params.setVideoStabilization(stabMode == CONTROL_VIDEO_STABILIZATION_MODE_ON);
}
}
// lens.focusDistance
{
boolean infinityFocusSupported =

View File

@@ -35,6 +35,9 @@ import java.util.ArrayList;
import java.util.List;
import static com.android.internal.util.Preconditions.*;
import static android.hardware.camera2.CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF;
import static android.hardware.camera2.CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON;
import static android.hardware.camera2.CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE;
import static android.hardware.camera2.CaptureResult.*;
/**
@@ -142,7 +145,6 @@ public class LegacyResultMapper {
*/
mapAwb(result, /*out*/params);
/*
* control.mode
*/
@@ -171,7 +173,6 @@ public class LegacyResultMapper {
}
}
/*
* control.effectMode
*/
@@ -187,6 +188,15 @@ public class LegacyResultMapper {
}
}
// control.videoStabilizationMode
{
int stabMode =
(params.isVideoStabilizationSupported() && params.getVideoStabilization()) ?
CONTROL_VIDEO_STABILIZATION_MODE_ON :
CONTROL_VIDEO_STABILIZATION_MODE_OFF;
result.set(CONTROL_VIDEO_STABILIZATION_MODE, stabMode);
}
/*
* flash
*/

View File

@@ -43,6 +43,7 @@ import static com.android.internal.util.Preconditions.*;
/**
* Various utilities for dealing with camera API1 parameters.
*/
@SuppressWarnings("deprecation")
public class ParameterUtils {
/** Upper/left minimal point of a normalized rectangle */
public static final int NORMALIZED_RECTANGLE_MIN = -1000;
@@ -164,19 +165,23 @@ public class ParameterUtils {
* <p>If the score is out of range of {@value Face#SCORE_MIN}, {@value Face#SCORE_MAX},
* the score is clipped first and a warning is printed to logcat.</p>
*
* <p>If the id is negative, the id is changed to 0 and a warning is printed to
* logcat.</p>
*
* <p>All other parameters are passed-through as-is.</p>
*
* @return a new face with the optional features set
*/
public Face toFace(
int id, Point leftEyePosition, Point rightEyePosition, Point mouthPosition) {
int idSafe = clipLower(id, /*lo*/0, rect, "id");
int score = clip(weight,
Face.SCORE_MIN,
Face.SCORE_MAX,
rect,
"score");
return new Face(rect, score, id, leftEyePosition, rightEyePosition, mouthPosition);
return new Face(rect, score, idSafe, leftEyePosition, rightEyePosition, mouthPosition);
}
/**
@@ -861,6 +866,61 @@ public class ParameterUtils {
/*usePreviewCrop*/true);
}
/**
* Convert an api1 face into an active-array based api2 face.
*
* <p>Out-of-ranges scores and ids will be clipped to be within range (with a warning).</p>
*
* @param face a non-{@code null} api1 face
* @param activeArraySize active array size of the sensor (e.g. max jpeg size)
* @param zoomData the calculated zoom data corresponding to this request
*
* @return a non-{@code null} api2 face
*
* @throws NullPointerException if the {@code face} was {@code null}
*/
public static Face convertFaceFromLegacy(Camera.Face face, Rect activeArray,
ZoomData zoomData) {
checkNotNull(face, "face must not be null");
Face api2Face;
Camera.Area fakeArea = new Camera.Area(face.rect, /*weight*/1);
WeightedRectangle faceRect =
convertCameraAreaToActiveArrayRectangle(activeArray, zoomData, fakeArea);
Point leftEye = face.leftEye, rightEye = face.rightEye, mouth = face.mouth;
if (leftEye != null && rightEye != null && mouth != null) {
leftEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData,
leftEye, /*usePreviewCrop*/true);
rightEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData,
leftEye, /*usePreviewCrop*/true);
mouth = convertCameraPointToActiveArrayPoint(activeArray, zoomData,
leftEye, /*usePreviewCrop*/true);
api2Face = faceRect.toFace(face.id, leftEye, rightEye, mouth);
} else {
api2Face = faceRect.toFace();
}
return api2Face;
}
private static Point convertCameraPointToActiveArrayPoint(
Rect activeArray, ZoomData zoomData, Point point, boolean usePreviewCrop) {
Rect pointedRect = new Rect(point.x, point.y, point.x, point.y);
Camera.Area pointedArea = new Area(pointedRect, /*weight*/1);
WeightedRectangle adjustedRect =
convertCameraAreaToActiveArrayRectangle(activeArray,
zoomData, pointedArea, usePreviewCrop);
Point transformedPoint = new Point(adjustedRect.rect.left, adjustedRect.rect.top);
return transformedPoint;
}
private static WeightedRectangle convertCameraAreaToActiveArrayRectangle(
Rect activeArray, ZoomData zoomData, Camera.Area area, boolean usePreviewCrop) {
Rect previewCrop = zoomData.previewCrop;

View File

@@ -39,7 +39,6 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.TimeUnit;
import static com.android.internal.util.Preconditions.*;
@@ -55,18 +54,23 @@ import static com.android.internal.util.Preconditions.*;
* - An {@link CameraDeviceState} state machine that manages the callbacks for various operations.
* </p>
*/
@SuppressWarnings("deprecation")
public class RequestThreadManager {
private final String TAG;
private final int mCameraId;
private final RequestHandlerThread mRequestThread;
private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG);
// For slightly more spammy messages that will get repeated every frame
private static final boolean VERBOSE =
Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.VERBOSE);
private final Camera mCamera;
private final CameraCharacteristics mCharacteristics;
private final CameraDeviceState mDeviceState;
private final CaptureCollector mCaptureCollector;
private final LegacyFocusStateMapper mFocusStateMapper;
private final LegacyFaceDetectMapper mFaceDetectMapper;
private static final int MSG_CONFIGURE_OUTPUTS = 1;
private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2;
@@ -219,6 +223,9 @@ public class RequestThreadManager {
};
private void stopPreview() {
if (VERBOSE) {
Log.v(TAG, "stopPreview - preview running? " + mPreviewRunning);
}
if (mPreviewRunning) {
mCamera.stopPreview();
mPreviewRunning = false;
@@ -226,14 +233,18 @@ public class RequestThreadManager {
}
private void startPreview() {
if (VERBOSE) {
Log.v(TAG, "startPreview - preview running? " + mPreviewRunning);
}
if (!mPreviewRunning) {
// XX: CameraClient:;startPreview is not getting called after a stop
mCamera.startPreview();
mPreviewRunning = true;
}
}
private void doJpegCapture(RequestHolder request) throws IOException {
if (DEBUG) Log.d(TAG, "doJpegCapture");
private void doJpegCapturePrepare(RequestHolder request) throws IOException {
if (DEBUG) Log.d(TAG, "doJpegCapturePrepare - preview running? " + mPreviewRunning);
if (!mPreviewRunning) {
if (DEBUG) Log.d(TAG, "doJpegCapture - create fake surface");
@@ -242,11 +253,20 @@ public class RequestThreadManager {
mCamera.setPreviewTexture(mDummyTexture);
startPreview();
}
}
private void doJpegCapture(RequestHolder request) {
if (DEBUG) Log.d(TAG, "doJpegCapturePrepare");
mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback);
mPreviewRunning = false;
}
private void doPreviewCapture(RequestHolder request) throws IOException {
if (VERBOSE) {
Log.v(TAG, "doPreviewCapture - preview running? " + mPreviewRunning);
}
if (mPreviewRunning) {
return; // Already running
}
@@ -264,7 +284,20 @@ public class RequestThreadManager {
}
private void configureOutputs(Collection<Surface> outputs) throws IOException {
if (DEBUG) {
String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces");
Log.d(TAG, "configureOutputs with " + outputsStr);
}
stopPreview();
/*
* Try to release the previous preview's surface texture earlier if we end up
* using a different one; this also reduces the likelihood of getting into a deadlock
* when disconnecting from the old previous texture at a later time.
*/
mCamera.setPreviewTexture(/*surfaceTexture*/null);
if (mGLThreadManager != null) {
mGLThreadManager.waitUntilStarted();
mGLThreadManager.ignoreNewFrames();
@@ -305,7 +338,6 @@ public class RequestThreadManager {
}
mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
mParams.setRecordingHint(true);
if (mPreviewOutputs.size() > 0) {
List<Size> outputSizes = new ArrayList<>(outputs.size());
@@ -613,10 +645,6 @@ public class RequestThreadManager {
}
}
// Unconditionally process AF triggers, since they're non-idempotent
// - must be done after setting the most-up-to-date AF mode
mFocusStateMapper.processRequestTriggers(request, mParams);
try {
boolean success = mCaptureCollector.queueRequest(holder,
mLastRequest, JPEG_FRAME_TIMEOUT, TimeUnit.MILLISECONDS);
@@ -624,6 +652,8 @@ public class RequestThreadManager {
if (!success) {
Log.e(TAG, "Timed out while queueing capture request.");
}
// Starting the preview needs to happen before enabling
// face detection or auto focus
if (holder.hasPreviewTargets()) {
doPreviewCapture(holder);
}
@@ -635,12 +665,33 @@ public class RequestThreadManager {
Log.e(TAG, "Timed out waiting for prior requests to complete.");
}
mReceivedJpeg.close();
doJpegCapturePrepare(holder);
if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) {
// TODO: report error to CameraDevice
Log.e(TAG, "Hit timeout for jpeg callback!");
}
}
/*
* Do all the actions that require a preview to have been started
*/
// Toggle face detection on/off
// - do this before AF to give AF a chance to use faces
mFaceDetectMapper.processFaceDetectMode(request, /*in*/mParams);
// Unconditionally process AF triggers, since they're non-idempotent
// - must be done after setting the most-up-to-date AF mode
mFocusStateMapper.processRequestTriggers(request, mParams);
if (holder.hasJpegTargets()) {
doJpegCapture(holder);
if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) {
// TODO: report error to CameraDevice
Log.e(TAG, "Hit timeout for jpeg callback!");
}
}
} catch (IOException e) {
// TODO: report error to CameraDevice
throw new IOError(e);
@@ -677,6 +728,8 @@ public class RequestThreadManager {
mLastRequest, timestampMutable.value);
// Update AF state
mFocusStateMapper.mapResultTriggers(result);
// Update detected faces list
mFaceDetectMapper.mapResultFaces(result, mLastRequest);
mDeviceState.setCaptureResult(holder, result);
}
@@ -731,6 +784,7 @@ public class RequestThreadManager {
TAG = name;
mDeviceState = checkNotNull(deviceState, "deviceState must not be null");
mFocusStateMapper = new LegacyFocusStateMapper(mCamera);
mFaceDetectMapper = new LegacyFaceDetectMapper(mCamera, mCharacteristics);
mCaptureCollector = new CaptureCollector(MAX_IN_FLIGHT_REQUESTS, mDeviceState);
mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb);
}