am b14a1ac0: Merge changes from topic \'remove-high-res\' into mnc-dev

* commit 'b14a1ac057879108bd53ab54c10869f7539ecdf8':
  Camera2: Fix StreamConfigurationMap#isOutputSupportedFor for depth
  Camera2: Remove HIGH_RESOLUTION level and modify FULL requirements
This commit is contained in:
Eino-Ville Talvala
2015-06-23 06:20:02 +00:00
committed by Android Git Automerger
13 changed files with 300 additions and 189 deletions

View File

@@ -13500,7 +13500,6 @@ package android.hardware.camera2 {
field public static final int HOT_PIXEL_MODE_HIGH_QUALITY = 2; // 0x2
field public static final int HOT_PIXEL_MODE_OFF = 0; // 0x0
field public static final int INFO_SUPPORTED_HARDWARE_LEVEL_FULL = 1; // 0x1
field public static final int INFO_SUPPORTED_HARDWARE_LEVEL_HIGH_RESOLUTION = 3; // 0x3
field public static final int INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY = 2; // 0x2
field public static final int INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED = 0; // 0x0
field public static final int LENS_FACING_BACK = 1; // 0x1
@@ -13853,6 +13852,7 @@ package android.hardware.camera2.params {
}
public final class StreamConfigurationMap {
method public android.util.Size[] getHighResolutionOutputSizes(int);
method public android.util.Range<java.lang.Integer>[] getHighSpeedVideoFpsRanges();
method public android.util.Range<java.lang.Integer>[] getHighSpeedVideoFpsRangesFor(android.util.Size);
method public android.util.Size[] getHighSpeedVideoSizes();

View File

@@ -13831,7 +13831,6 @@ package android.hardware.camera2 {
field public static final int HOT_PIXEL_MODE_HIGH_QUALITY = 2; // 0x2
field public static final int HOT_PIXEL_MODE_OFF = 0; // 0x0
field public static final int INFO_SUPPORTED_HARDWARE_LEVEL_FULL = 1; // 0x1
field public static final int INFO_SUPPORTED_HARDWARE_LEVEL_HIGH_RESOLUTION = 3; // 0x3
field public static final int INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY = 2; // 0x2
field public static final int INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED = 0; // 0x0
field public static final int LENS_FACING_BACK = 1; // 0x1
@@ -14184,6 +14183,7 @@ package android.hardware.camera2.params {
}
public final class StreamConfigurationMap {
method public android.util.Size[] getHighResolutionOutputSizes(int);
method public android.util.Range<java.lang.Integer>[] getHighSpeedVideoFpsRanges();
method public android.util.Range<java.lang.Integer>[] getHighSpeedVideoFpsRangesFor(android.util.Size);
method public android.util.Size[] getHighSpeedVideoSizes();

View File

@@ -2679,9 +2679,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
* <p>Camera devices will come in three flavors: LEGACY, LIMITED and FULL.</p>
* <p>A FULL device will support below capabilities:</p>
* <ul>
* <li>30fps operation at maximum resolution (== sensor resolution) is preferred, more than
* 20fps is required, for at least uncompressed YUV
* output. ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains BURST_CAPTURE)</li>
* <li>BURST_CAPTURE capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains BURST_CAPTURE)</li>
* <li>Per frame control ({@link CameraCharacteristics#SYNC_MAX_LATENCY android.sync.maxLatency} <code>==</code> PER_FRAME_CONTROL)</li>
* <li>Manual sensor control ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains MANUAL_SENSOR)</li>
* <li>Manual post-processing control ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains
@@ -2689,7 +2687,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
* <li>Arbitrary cropping region ({@link CameraCharacteristics#SCALER_CROPPING_TYPE android.scaler.croppingType} <code>==</code> FREEFORM)</li>
* <li>At least 3 processed (but not stalling) format output streams
* ({@link CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_PROC android.request.maxNumOutputProc} <code>&gt;=</code> 3)</li>
* <li>The required stream configuration defined in android.scaler.availableStreamConfigurations</li>
* <li>The required stream configurations defined in android.scaler.availableStreamConfigurations</li>
* <li>The required exposure time range defined in {@link CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE android.sensor.info.exposureTimeRange}</li>
* <li>The required maxFrameDuration defined in {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration}</li>
* </ul>
@@ -2709,23 +2707,11 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
* post-processing, arbitrary cropping regions, and has relaxed performance constraints.</p>
* <p>Each higher level supports everything the lower level supports
* in this order: FULL <code>&gt;</code> LIMITED <code>&gt;</code> LEGACY.</p>
* <p>A HIGH_RESOLUTION device is equivalent to a FULL device, except that:</p>
* <ul>
* <li>At least one output resolution of 8 megapixels or higher in uncompressed YUV is
* supported at <code>&gt;=</code> 20 fps.</li>
* <li>Maximum-size (sensor resolution) uncompressed YUV is supported at <code>&gt;=</code> 10
* fps.</li>
* <li>For devices that list the RAW capability and support either RAW10 or RAW12 output,
* maximum-resolution RAW10 or RAW12 capture will operate at least at the rate of
* maximum-resolution YUV capture, and at least one supported output resolution of
* 8 megapixels or higher in RAW10 or RAW12 is supported <code>&gt;=</code> 20 fps.</li>
* </ul>
* <p><b>Possible values:</b>
* <ul>
* <li>{@link #INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED}</li>
* <li>{@link #INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL}</li>
* <li>{@link #INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY}</li>
* <li>{@link #INFO_SUPPORTED_HARDWARE_LEVEL_HIGH_RESOLUTION HIGH_RESOLUTION}</li>
* </ul></p>
* <p>This key is available on all devices.</p>
*
@@ -2743,7 +2729,6 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
* @see #INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED
* @see #INFO_SUPPORTED_HARDWARE_LEVEL_FULL
* @see #INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY
* @see #INFO_SUPPORTED_HARDWARE_LEVEL_HIGH_RESOLUTION
*/
@PublicKey
public static final Key<Integer> INFO_SUPPORTED_HARDWARE_LEVEL =

View File

@@ -531,37 +531,32 @@ public abstract class CameraMetadata<TKey> {
public static final int REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS = 5;
/**
* <p>The camera device supports capturing maximum-resolution
* images at &gt;= 20 frames per second, in at least the
* uncompressed YUV format, when post-processing settings
* are set to FAST.</p>
* <p>More specifically, this means that a size matching the
* camera device's active array size is listed as a
* supported size for the YUV_420_888 format in
* {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}, the minimum frame
* duration for that format and size is &lt;= 1/20 s, and
* the {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges} entry
* lists at least one FPS range where the minimum FPS is</p>
* <blockquote>
* <p>= 1 / minimumFrameDuration for the maximum-size
* YUV_420_888 format.</p>
* </blockquote>
* <p>In addition, the {@link CameraCharacteristics#SYNC_MAX_LATENCY android.sync.maxLatency} field is
* guaranted to have a value between 0 and 4, inclusive.
* {@link CameraCharacteristics#CONTROL_AE_LOCK_AVAILABLE android.control.aeLockAvailable} and
* {@link CameraCharacteristics#CONTROL_AWB_LOCK_AVAILABLE android.control.awbLockAvailable} are also guaranteed
* to be <code>true</code> so burst capture with these two locks ON
* yields consistent image output.</p>
* <p>On a camera device that reports the HIGH_RESOLUTION hardware
* level, meaning the device supports very large capture sizes,
* BURST_CAPTURE means that at least 8-megapixel images can be
* captured at <code>&gt;=</code> 20 fps, and maximum-resolution images can be
* captured at <code>&gt;=</code> 10 fps.</p>
* <p>The camera device supports capturing high-resolution images at &gt;= 20 frames per
* second, in at least the uncompressed YUV format, when post-processing settings are set
* to FAST. Additionally, maximum-resolution images can be captured at &gt;= 10 frames
* per second. Here, 'high resolution' means at least 8 megapixels, or the maximum
* resolution of the device, whichever is smaller.</p>
* <p>More specifically, this means that a size matching the camera device's active array
* size is listed as a supported size for the {@link android.graphics.ImageFormat#YUV_420_888 } format in either {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes } or {@link android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes },
* with a minimum frame duration for that format and size of either &lt;= 1/20 s, or
* &lt;= 1/10 s, respectively; and the {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges} entry
* lists at least one FPS range where the minimum FPS is &gt;= 1 / minimumFrameDuration
* for the maximum-size YUV_420_888 format. If that maximum size is listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes },
* then the list of resolutions for YUV_420_888 from {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes } contains at
* least one resolution &gt;= 8 megapixels, with a minimum frame duration of &lt;= 1/20
* s.</p>
* <p>If the device supports the {@link android.graphics.ImageFormat#RAW10 }, {@link android.graphics.ImageFormat#RAW12 }, then those can also be captured at the same rate
* as the maximum-size YUV_420_888 resolution is.</p>
* <p>If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees
* as for the YUV_420_888 format also apply to the {@link android.graphics.ImageFormat#PRIVATE } format.</p>
* <p>In addition, the {@link CameraCharacteristics#SYNC_MAX_LATENCY android.sync.maxLatency} field is guaranted to have a value between 0
* and 4, inclusive. {@link CameraCharacteristics#CONTROL_AE_LOCK_AVAILABLE android.control.aeLockAvailable} and {@link CameraCharacteristics#CONTROL_AWB_LOCK_AVAILABLE android.control.awbLockAvailable}
* are also guaranteed to be <code>true</code> so burst capture with these two locks ON yields
* consistent image output.</p>
*
* @see CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES
* @see CameraCharacteristics#CONTROL_AE_LOCK_AVAILABLE
* @see CameraCharacteristics#CONTROL_AWB_LOCK_AVAILABLE
* @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
* @see CameraCharacteristics#SYNC_MAX_LATENCY
* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
*/
@@ -954,13 +949,6 @@ public abstract class CameraMetadata<TKey> {
*/
public static final int INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY = 2;
/**
* <p>This camera device is capable of supporting advanced imaging applications at full rate,
* and additional high-resolution outputs at lower rates.</p>
* @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
*/
public static final int INFO_SUPPORTED_HARDWARE_LEVEL_HIGH_RESOLUTION = 3;
//
// Enumeration values for CameraCharacteristics#SYNC_MAX_LATENCY
//

View File

@@ -398,7 +398,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
@Override
public int hashCode() {
return HashCodeHelpers.hashCode(mSettings, mSurfaceSet, mUserTag);
return HashCodeHelpers.hashCodeGeneric(mSettings, mSurfaceSet, mUserTag);
}
public static final Parcelable.Creator<CaptureRequest> CREATOR =

View File

@@ -842,11 +842,19 @@ public class CameraMetadataNative implements Parcelable {
CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS);
ReprocessFormatsMap inputOutputFormatsMap = getBase(
CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP);
int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
boolean listHighResolution = false;
for (int capability : capabilities) {
if (capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE) {
listHighResolution = true;
break;
}
}
return new StreamConfigurationMap(
configurations, minFrameDurations, stallDurations,
depthConfigurations, depthMinFrameDurations, depthStallDurations,
highSpeedVideoConfigurations, inputOutputFormatsMap);
highSpeedVideoConfigurations, inputOutputFormatsMap,
listHighResolution);
}
private <T> Integer getMaxRegions(Key<T> key) {

View File

@@ -605,6 +605,14 @@ public class LegacyCameraDevice implements AutoCloseable {
return LegacyExceptionUtils.throwOnError(nativeDetectSurfaceType(surface));
}
/**
* Query the surface for its currently configured dataspace
*/
public static int detectSurfaceDataspace(Surface surface) throws BufferQueueAbandonedException {
checkNotNull(surface);
return LegacyExceptionUtils.throwOnError(nativeDetectSurfaceDataspace(surface));
}
static void configureSurface(Surface surface, int width, int height,
int pixelFormat) throws BufferQueueAbandonedException {
checkNotNull(surface);
@@ -702,6 +710,8 @@ public class LegacyCameraDevice implements AutoCloseable {
private static native int nativeDetectSurfaceType(Surface surface);
private static native int nativeDetectSurfaceDataspace(Surface surface);
private static native int nativeDetectSurfaceDimens(Surface surface,
/*out*/int[/*2*/] dimens);

View File

@@ -22,12 +22,13 @@ import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.utils.HashCodeHelpers;
import android.hardware.camera2.utils.SurfaceUtils;
import android.hardware.camera2.legacy.LegacyCameraDevice;
import android.hardware.camera2.legacy.LegacyMetadataMapper;
import android.hardware.camera2.legacy.LegacyExceptionUtils.BufferQueueAbandonedException;
import android.view.Surface;
import android.util.Range;
import android.util.Size;
import android.util.SparseIntArray;
import java.util.Arrays;
import java.util.HashMap;
@@ -79,7 +80,8 @@ public final class StreamConfigurationMap {
* @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
* @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
* camera device does not support high speed video recording
*
* @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
* and thus needs a separate list of slow high-resolution output sizes
* @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
* were {@code null} or any subelements were {@code null}
*
@@ -93,10 +95,12 @@ public final class StreamConfigurationMap {
StreamConfigurationDuration[] depthMinFrameDurations,
StreamConfigurationDuration[] depthStallDurations,
HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
ReprocessFormatsMap inputOutputFormatsMap) {
ReprocessFormatsMap inputOutputFormatsMap,
boolean listHighResolution) {
mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations");
mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations");
mListHighResolution = listHighResolution;
if (depthConfigurations == null) {
mDepthConfigurations = new StreamConfiguration[0];
@@ -120,15 +124,27 @@ public final class StreamConfigurationMap {
// For each format, track how many sizes there are available to configure
for (StreamConfiguration config : configurations) {
HashMap<Integer, Integer> map = config.isOutput() ? mOutputFormats : mInputFormats;
Integer count = map.get(config.getFormat());
if (count == null) {
count = 0;
int fmt = config.getFormat();
SparseIntArray map = null;
if (config.isOutput()) {
mAllOutputFormats.put(fmt, mAllOutputFormats.get(fmt) + 1);
long duration = 0;
if (mListHighResolution) {
for (StreamConfigurationDuration configurationDuration : mMinFrameDurations) {
if (configurationDuration.getFormat() == fmt &&
configurationDuration.getWidth() == config.getSize().getWidth() &&
configurationDuration.getHeight() == config.getSize().getHeight()) {
duration = configurationDuration.getDuration();
break;
}
}
}
map = duration <= DURATION_20FPS_NS ?
mOutputFormats : mHighResOutputFormats;
} else {
map = mInputFormats;
}
map.put(config.getFormat(), count + 1);
map.put(fmt, map.get(fmt) + 1);
}
// For each depth format, track how many sizes there are available to configure
@@ -138,16 +154,11 @@ public final class StreamConfigurationMap {
continue;
}
Integer count = mDepthOutputFormats.get(config.getFormat());
if (count == null) {
count = 0;
}
mDepthOutputFormats.put(config.getFormat(), count + 1);
mDepthOutputFormats.put(config.getFormat(),
mDepthOutputFormats.get(config.getFormat()) + 1);
}
if (!mOutputFormats.containsKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
if (mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) {
throw new AssertionError(
"At least one stream configuration for IMPLEMENTATION_DEFINED must exist");
}
@@ -241,7 +252,7 @@ public final class StreamConfigurationMap {
* @return a non-empty array of sizes, or {@code null} if the format was not available.
*/
public Size[] getInputSizes(final int format) {
return getPublicFormatSizes(format, /*output*/false);
return getPublicFormatSizes(format, /*output*/false, /*highRes*/false);
}
/**
@@ -274,9 +285,9 @@ public final class StreamConfigurationMap {
int internalFormat = imageFormatToInternal(format);
int dataspace = imageFormatToDataspace(format);
if (dataspace == HAL_DATASPACE_DEPTH) {
return mDepthOutputFormats.containsKey(internalFormat);
return mDepthOutputFormats.indexOfKey(internalFormat) >= 0;
} else {
return getFormatsMap(/*output*/true).containsKey(internalFormat);
return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0;
}
}
@@ -378,27 +389,24 @@ public final class StreamConfigurationMap {
public boolean isOutputSupportedFor(Surface surface) {
checkNotNull(surface, "surface must not be null");
Size surfaceSize;
int surfaceFormat = -1;
try {
surfaceSize = LegacyCameraDevice.getSurfaceSize(surface);
surfaceFormat = LegacyCameraDevice.detectSurfaceType(surface);
} catch(BufferQueueAbandonedException e) {
throw new IllegalArgumentException("Abandoned surface", e);
}
Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
int surfaceFormat = SurfaceUtils.getSurfaceFormat(surface);
int surfaceDataspace = SurfaceUtils.getSurfaceDataspace(surface);
// See if consumer is flexible.
boolean isFlexible = LegacyCameraDevice.isFlexibleConsumer(surface);
boolean isFlexible = SurfaceUtils.isFlexibleConsumer(surface);
// Override RGB formats to IMPLEMENTATION_DEFINED, b/9487482
if ((surfaceFormat >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 &&
surfaceFormat <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) {
surfaceFormat = LegacyMetadataMapper.HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
surfaceFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
}
for (StreamConfiguration config : mConfigurations) {
StreamConfiguration[] configs =
surfaceDataspace != HAL_DATASPACE_DEPTH ? mConfigurations : mDepthConfigurations;
for (StreamConfiguration config : configs) {
if (config.getFormat() == surfaceFormat && config.isOutput()) {
// Mathing format, either need exact size match, or a flexible consumer
// Matching format, either need exact size match, or a flexible consumer
// and a size no bigger than MAX_DIMEN_FOR_ROUNDING
if (config.getSize().equals(surfaceSize)) {
return true;
@@ -414,12 +422,12 @@ public final class StreamConfigurationMap {
/**
* Get a list of sizes compatible with {@code klass} to use as an output.
*
* <p>Since some of the supported classes may support additional formats beyond
* <p>Some of the supported classes may support additional formats beyond
* {@link ImageFormat#PRIVATE}; this function only returns
* sizes for {@link ImageFormat#PRIVATE}. For example, {@link android.media.ImageReader}
* supports {@link ImageFormat#YUV_420_888} and {@link ImageFormat#PRIVATE}, this method will
* only return the sizes for {@link ImageFormat#PRIVATE} for {@link android.media.ImageReader}
* class .</p>
* class.</p>
*
* <p>If a well-defined format such as {@code NV21} is required, use
* {@link #getOutputSizes(int)} instead.</p>
@@ -444,7 +452,7 @@ public final class StreamConfigurationMap {
}
return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
HAL_DATASPACE_UNKNOWN,/*output*/true);
HAL_DATASPACE_UNKNOWN,/*output*/true, /*highRes*/false);
}
/**
@@ -453,6 +461,14 @@ public final class StreamConfigurationMap {
* <p>The {@code format} should be a supported format (one of the formats returned by
* {@link #getOutputFormats}).</p>
*
* As of API level 23, the {@link #getHighResolutionOutputSizes} method can be used on devices
* that support the
* {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
* capability to get a list of high-resolution output sizes that cannot operate at the preferred
* 20fps rate. This means that for some supported formats, this method will return an empty
* list, if all the supported resolutions operate at below 20fps. For devices that do not
* support the BURST_CAPTURE capability, all output resolutions are listed through this method.
*
* @param format an image format from {@link ImageFormat} or {@link PixelFormat}
* @return
* an array of supported sizes,
@@ -463,7 +479,7 @@ public final class StreamConfigurationMap {
* @see #getOutputFormats
*/
public Size[] getOutputSizes(int format) {
return getPublicFormatSizes(format, /*output*/true);
return getPublicFormatSizes(format, /*output*/true, /*highRes*/ false);
}
/**
@@ -615,6 +631,32 @@ public final class StreamConfigurationMap {
return sizes;
}
/**
* Get a list of supported high resolution sizes, which cannot operate at full BURST_CAPTURE
* rate.
*
* <p>This includes all output sizes that cannot meet the 20 fps frame rate requirements for the
* {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
* capability. This does not include the stall duration, so for example, a JPEG or RAW16 output
* resolution with a large stall duration but a minimum frame duration that's above 20 fps will
* still be listed in the regular {@link #getOutputSizes} list. All the sizes on this list are
* still guaranteed to operate at a rate of at least 10 fps, not including stall duration.</p>
*
* <p>For a device that does not support the BURST_CAPTURE capability, this list will be
* {@code null}, since resolutions in the {@link #getOutputSizes} list are already not
* guaranteed to meet &gt;= 20 fps rate requirements. For a device that does support the
* BURST_CAPTURE capability, this list may be empty, if all supported resolutions meet the 20
* fps requirement.</p>
*
* @return an array of supported slower high-resolution sizes, or {@code null} if the
* BURST_CAPTURE capability is not supported
*/
public Size[] getHighResolutionOutputSizes(int format) {
if (!mListHighResolution) return null;
return getPublicFormatSizes(format, /*output*/true, /*highRes*/ true);
}
/**
* Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
* for the format/size combination (in nanoseconds).
@@ -867,6 +909,7 @@ public final class StreamConfigurationMap {
return Arrays.equals(mConfigurations, other.mConfigurations) &&
Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
Arrays.equals(mStallDurations, other.mStallDurations) &&
Arrays.equals(mDepthConfigurations, other.mDepthConfigurations) &&
Arrays.equals(mHighSpeedVideoConfigurations,
other.mHighSpeedVideoConfigurations);
}
@@ -879,18 +922,31 @@ public final class StreamConfigurationMap {
@Override
public int hashCode() {
// XX: do we care about order?
return HashCodeHelpers.hashCode(
return HashCodeHelpers.hashCodeGeneric(
mConfigurations, mMinFrameDurations,
mStallDurations, mHighSpeedVideoConfigurations);
mStallDurations,
mDepthConfigurations, mHighSpeedVideoConfigurations);
}
// Check that the argument is supported by #getOutputFormats or #getInputFormats
private int checkArgumentFormatSupported(int format, boolean output) {
checkArgumentFormat(format);
int[] formats = output ? getOutputFormats() : getInputFormats();
for (int i = 0; i < formats.length; ++i) {
if (format == formats[i]) {
int internalFormat = imageFormatToInternal(format);
int internalDataspace = imageFormatToDataspace(format);
if (output) {
if (internalDataspace == HAL_DATASPACE_DEPTH) {
if (mDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
return format;
}
} else {
if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) {
return format;
}
}
} else {
if (mInputFormats.indexOfKey(internalFormat) >= 0) {
return format;
}
}
@@ -1175,7 +1231,7 @@ public final class StreamConfigurationMap {
return formats;
}
private Size[] getPublicFormatSizes(int format, boolean output) {
private Size[] getPublicFormatSizes(int format, boolean output, boolean highRes) {
try {
checkArgumentFormatSupported(format, output);
} catch (IllegalArgumentException e) {
@@ -1185,36 +1241,57 @@ public final class StreamConfigurationMap {
int internalFormat = imageFormatToInternal(format);
int dataspace = imageFormatToDataspace(format);
return getInternalFormatSizes(internalFormat, dataspace, output);
return getInternalFormatSizes(internalFormat, dataspace, output, highRes);
}
private Size[] getInternalFormatSizes(int format, int dataspace, boolean output) {
private Size[] getInternalFormatSizes(int format, int dataspace,
boolean output, boolean highRes) {
SparseIntArray formatsMap =
!output ? mInputFormats :
dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats :
highRes ? mHighResOutputFormats :
mOutputFormats;
HashMap<Integer, Integer> formatsMap =
(dataspace == HAL_DATASPACE_DEPTH) ? mDepthOutputFormats : getFormatsMap(output);
Integer sizesCount = formatsMap.get(format);
if (sizesCount == null) {
int sizesCount = formatsMap.get(format);
if ( ((!output || dataspace == HAL_DATASPACE_DEPTH) && sizesCount == 0) ||
(output && dataspace != HAL_DATASPACE_DEPTH && mAllOutputFormats.get(format) == 0)) {
// Only throw if this is really not supported at all
throw new IllegalArgumentException("format not available");
}
int len = sizesCount;
Size[] sizes = new Size[len];
Size[] sizes = new Size[sizesCount];
int sizeIndex = 0;
StreamConfiguration[] configurations =
(dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations;
for (StreamConfiguration config : configurations) {
if (config.getFormat() == format && config.isOutput() == output) {
int fmt = config.getFormat();
if (fmt == format && config.isOutput() == output) {
if (output) {
// Filter slow high-res output formats; include for
// highRes, remove for !highRes
long duration = 0;
for (int i = 0; i < mMinFrameDurations.length; i++) {
StreamConfigurationDuration d = mMinFrameDurations[i];
if (d.getFormat() == fmt &&
d.getWidth() == config.getSize().getWidth() &&
d.getHeight() == config.getSize().getHeight()) {
duration = d.getDuration();
break;
}
}
if (highRes != (duration > DURATION_20FPS_NS)) {
continue;
}
}
sizes[sizeIndex++] = config.getSize();
}
}
if (sizeIndex != len) {
if (sizeIndex != sizesCount) {
throw new AssertionError(
"Too few sizes (expected " + len + ", actual " + sizeIndex + ")");
"Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")");
}
return sizes;
@@ -1226,14 +1303,16 @@ public final class StreamConfigurationMap {
int i = 0;
for (int format : getFormatsMap(output).keySet()) {
SparseIntArray map = getFormatsMap(output);
for (int j = 0; j < map.size(); j++) {
int format = map.keyAt(j);
if (format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
formats[i++] = imageFormatToPublic(format);
}
}
if (output) {
for (int format : mDepthOutputFormats.keySet()) {
formats[i++] = depthFormatToPublic(format);
for (int j = 0; j < mDepthOutputFormats.size(); j++) {
formats[i++] = depthFormatToPublic(mDepthOutputFormats.keyAt(j));
}
}
if (formats.length != i) {
@@ -1244,14 +1323,14 @@ public final class StreamConfigurationMap {
}
/** Get the format -> size count map for either output or input formats */
private HashMap<Integer, Integer> getFormatsMap(boolean output) {
return output ? mOutputFormats : mInputFormats;
private SparseIntArray getFormatsMap(boolean output) {
return output ? mAllOutputFormats : mInputFormats;
}
private long getInternalFormatDuration(int format, int dataspace, Size size, int duration) {
// assume format is already checked, since its internal
if (!arrayContains(getInternalFormatSizes(format, dataspace, /*output*/true), size)) {
if (!isSupportedInternalConfiguration(format, dataspace, size)) {
throw new IllegalArgumentException("size was not supported");
}
@@ -1289,10 +1368,9 @@ public final class StreamConfigurationMap {
/** Count the number of publicly-visible output formats */
private int getPublicFormatCount(boolean output) {
HashMap<Integer, Integer> formatsMap = getFormatsMap(output);
SparseIntArray formatsMap = getFormatsMap(output);
int size = formatsMap.size();
if (formatsMap.containsKey(HAL_PIXEL_FORMAT_RAW_OPAQUE)) {
if (formatsMap.indexOfKey(HAL_PIXEL_FORMAT_RAW_OPAQUE) >= 0) {
size -= 1;
}
if (output) {
@@ -1316,6 +1394,21 @@ public final class StreamConfigurationMap {
return false;
}
private boolean isSupportedInternalConfiguration(int format, int dataspace,
Size size) {
StreamConfiguration[] configurations =
(dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations;
for (int i = 0; i < configurations.length; i++) {
if (configurations[i].getFormat() == format &&
configurations[i].getSize().equals(size)) {
return true;
}
}
return false;
}
/**
* Return this {@link StreamConfigurationMap} as a string representation.
*
@@ -1351,6 +1444,8 @@ public final class StreamConfigurationMap {
StringBuilder sb = new StringBuilder("StreamConfiguration(");
appendOutputsString(sb);
sb.append(", ");
appendHighResOutputsString(sb);
sb.append(", ");
appendInputsString(sb);
sb.append(", ");
appendValidOutputFormatsForInputString(sb);
@@ -1381,6 +1476,27 @@ public final class StreamConfigurationMap {
sb.append(")");
}
private void appendHighResOutputsString(StringBuilder sb) {
sb.append("HighResolutionOutputs(");
int[] formats = getOutputFormats();
for (int format : formats) {
Size[] sizes = getHighResolutionOutputSizes(format);
if (sizes == null) continue;
for (Size size : sizes) {
long minFrameDuration = getOutputMinFrameDuration(format, size);
long stallDuration = getOutputStallDuration(format, size);
sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
"stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
format, minFrameDuration, stallDuration));
}
}
// Remove the pending ", "
if (sb.charAt(sb.length() - 1) == ' ') {
sb.delete(sb.length() - 2, sb.length());
}
sb.append(")");
}
private void appendInputsString(StringBuilder sb) {
sb.append("Inputs(");
int[] formats = getInputFormats();
@@ -1479,15 +1595,21 @@ public final class StreamConfigurationMap {
}
// from system/core/include/system/graphics.h
private static final int HAL_PIXEL_FORMAT_RAW16 = 0x20;
private static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
private static final int HAL_PIXEL_FORMAT_YCbCr_420_888 = 0x23;
private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24;
private static final int HAL_PIXEL_FORMAT_RAW10 = 0x25;
private static final int HAL_PIXEL_FORMAT_RAW12 = 0x26;
private static final int HAL_PIXEL_FORMAT_Y16 = 0x20363159;
private static final int HAL_DATASPACE_UNKNOWN = 0x0;
private static final int HAL_DATASPACE_JFIF = 0x101;
private static final int HAL_DATASPACE_DEPTH = 0x1000;
private static final long DURATION_20FPS_NS = 50000000L;
/**
* @see #getDurations(int, int)
*/
@@ -1505,15 +1627,20 @@ public final class StreamConfigurationMap {
private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations;
private final ReprocessFormatsMap mInputOutputFormatsMap;
/** ImageFormat -> num output sizes mapping */
private final HashMap</*ImageFormat*/Integer, /*Count*/Integer> mOutputFormats =
new HashMap<Integer, Integer>();
/** ImageFormat -> num input sizes mapping */
private final HashMap</*ImageFormat*/Integer, /*Count*/Integer> mInputFormats =
new HashMap<Integer, Integer>();
/** ImageFormat -> num depth output sizes mapping */
private final HashMap</*ImageFormat*/Integer, /*Count*/Integer> mDepthOutputFormats =
new HashMap<Integer, Integer>();
private final boolean mListHighResolution;
/** internal format -> num output sizes mapping, not including slow high-res sizes, for
* non-depth dataspaces */
private final SparseIntArray mOutputFormats = new SparseIntArray();
/** internal format -> num output sizes mapping for slow high-res sizes, for non-depth
* dataspaces */
private final SparseIntArray mHighResOutputFormats = new SparseIntArray();
/** internal format -> num output sizes mapping for all non-depth dataspaces */
private final SparseIntArray mAllOutputFormats = new SparseIntArray();
/** internal format -> num input sizes mapping, for input reprocessing formats */
private final SparseIntArray mInputFormats = new SparseIntArray();
/** internal format -> num depth output sizes mapping, for HAL_DATASPACE_DEPTH */
private final SparseIntArray mDepthOutputFormats = new SparseIntArray();
/** High speed video Size -> FPS range count mapping*/
private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap =
new HashMap<Size, Integer>();
@@ -1522,4 +1649,3 @@ public final class StreamConfigurationMap {
mHighSpeedVideoFpsRangeMap = new HashMap<Range<Integer>, Integer>();
}

View File

@@ -277,7 +277,7 @@ public final class TonemapCurve {
return mHashCode;
}
mHashCode = HashCodeHelpers.hashCode(mRed, mGreen, mBlue);
mHashCode = HashCodeHelpers.hashCodeGeneric(mRed, mGreen, mBlue);
mHashCalculated = true;
return mHashCode;

View File

@@ -30,7 +30,7 @@ public final class HashCodeHelpers {
*
* @return the numeric hash code
*/
public static int hashCode(int[] array) {
public static int hashCode(int... array) {
if (array == null) {
return 0;
}
@@ -60,7 +60,7 @@ public final class HashCodeHelpers {
*
* @return the numeric hash code
*/
public static int hashCode(float[] array) {
public static int hashCode(float... array) {
if (array == null) {
return 0;
}
@@ -83,7 +83,7 @@ public final class HashCodeHelpers {
*
* @return the numeric hash code
*/
public static <T> int hashCode(T[] array) {
public static <T> int hashCodeGeneric(T... array) {
if (array == null) {
return 0;
}
@@ -97,56 +97,4 @@ public final class HashCodeHelpers {
return h;
}
public static <T> int hashCode(T a) {
return (a == null) ? 0 : a.hashCode();
}
public static <T> int hashCode(T a, T b) {
int h = hashCode(a);
int x = (b == null) ? 0 : b.hashCode();
h = ((h << 5) - h) ^ x; // (h * 31) XOR x
return h;
}
public static <T> int hashCode(T a, T b, T c) {
int h = hashCode(a, b);
int x = (c == null) ? 0 : c.hashCode();
h = ((h << 5) - h) ^ x; // (h * 31) XOR x
return h;
}
public static <T> int hashCode(T a, T b, T c, T d) {
int h = hashCode(a, b, c);
int x = (d == null) ? 0 : d.hashCode();
h = ((h << 5) - h) ^ x; // (h * 31) XOR x
return h;
}
public static int hashCode(int x) {
return hashCode(new int[] { x } );
}
public static int hashCode(int x, int y) {
return hashCode(new int[] { x, y } );
}
public static int hashCode(int x, int y, int z) {
return hashCode(new int[] { x, y, z } );
}
public static int hashCode(int x, int y, int z, int w) {
return hashCode(new int[] { x, y, z, w } );
}
public static int hashCode(int x, int y, int z, int w, int t) {
return hashCode(new int[] { x, y, z, w, t } );
}
}

View File

@@ -79,4 +79,30 @@ public class SurfaceUtils {
throw new IllegalArgumentException("Surface was abandoned", e);
}
}
/**
* Get the Surface dataspace.
*
* @param surface The surface to be queried for dataspace.
* @return dataspace of the surface.
*
* @throws IllegalArgumentException if the surface is already abandoned.
*/
public static int getSurfaceDataspace(Surface surface) {
try {
return LegacyCameraDevice.detectSurfaceDataspace(surface);
} catch (BufferQueueAbandonedException e) {
throw new IllegalArgumentException("Surface was abandoned", e);
}
}
/**
* Return true is the consumer is one of the consumers that can accept
* producer overrides of the default dimensions and format.
*
*/
public static boolean isFlexibleConsumer(Surface output) {
return LegacyCameraDevice.isFlexibleConsumer(output);
}
}

View File

@@ -350,7 +350,7 @@ public final class Range<T extends Comparable<? super T>> {
*/
@Override
public int hashCode() {
return HashCodeHelpers.hashCode(mLower, mUpper);
return HashCodeHelpers.hashCodeGeneric(mLower, mUpper);
}
private final T mLower;

View File

@@ -436,6 +436,23 @@ static jint LegacyCameraDevice_nativeDetectSurfaceType(JNIEnv* env, jobject thiz
return fmt;
}
static jint LegacyCameraDevice_nativeDetectSurfaceDataspace(JNIEnv* env, jobject thiz, jobject surface) {
ALOGV("nativeDetectSurfaceDataspace");
sp<ANativeWindow> anw;
if ((anw = getNativeWindow(env, surface)) == NULL) {
ALOGE("%s: Could not retrieve native window from surface.", __FUNCTION__);
return BAD_VALUE;
}
int32_t fmt = 0;
status_t err = anw->query(anw.get(), NATIVE_WINDOW_DEFAULT_DATASPACE, &fmt);
if(err != NO_ERROR) {
ALOGE("%s: Error while querying surface dataspace %s (%d).", __FUNCTION__, strerror(-err),
err);
return err;
}
return fmt;
}
static jint LegacyCameraDevice_nativeDetectSurfaceDimens(JNIEnv* env, jobject thiz,
jobject surface, jintArray dimens) {
ALOGV("nativeGetSurfaceDimens");
@@ -717,6 +734,9 @@ static JNINativeMethod gCameraDeviceMethods[] = {
{ "nativeDetectSurfaceType",
"(Landroid/view/Surface;)I",
(void *)LegacyCameraDevice_nativeDetectSurfaceType },
{ "nativeDetectSurfaceDataspace",
"(Landroid/view/Surface;)I",
(void *)LegacyCameraDevice_nativeDetectSurfaceDataspace },
{ "nativeDetectSurfaceDimens",
"(Landroid/view/Surface;[I)I",
(void *)LegacyCameraDevice_nativeDetectSurfaceDimens },