diff --git a/api/current.txt b/api/current.txt
index 473b809bc1694..f92f05f5db65e 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -14121,6 +14121,7 @@ package android.graphics {
ctor public ImageFormat();
method public static int getBitsPerPixel(int);
field public static final int DEPTH16 = 1144402265; // 0x44363159
+ field public static final int DEPTH_JPEG = 1768253795; // 0x69656963
field public static final int DEPTH_POINT_CLOUD = 257; // 0x101
field public static final int FLEX_RGBA_8888 = 42; // 0x2a
field public static final int FLEX_RGB_888 = 41; // 0x29
diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java
index 105ae6815589b..d13f1af2efaaa 100644
--- a/core/java/android/hardware/camera2/CameraCharacteristics.java
+++ b/core/java/android/hardware/camera2/CameraCharacteristics.java
@@ -3720,6 +3720,59 @@ public final class CameraCharacteristics extends CameraMetadata The available dynamic depth dataspace stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream). These are output stream configurations for use with
+ * dataSpace DYNAMIC_DEPTH. The configurations are
+ * listed as Only devices that support depth output for at least
+ * the HAL_PIXEL_FORMAT_Y16 dense depth map along with
+ * HAL_PIXEL_FORMAT_BLOB with the same size or size with
+ * the same aspect ratio can have dynamic depth dataspace
+ * stream configuration. {@link CameraCharacteristics#DEPTH_DEPTH_IS_EXCLUSIVE android.depth.depthIsExclusive} also
+ * needs to be set to FALSE. Optional - This value may be {@code null} on some devices. This lists the minimum frame duration for each
+ * format/size combination for dynamic depth output streams. This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST. When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations). The minimum frame duration of a stream (of a particular format, size)
+ * is the same regardless of whether the stream is input or output. Units: (format, width, height, ns) x n Optional - This value may be {@code null} on some devices. This lists the maximum stall duration for each
+ * output format/size combination for dynamic depth streams. A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall. All dynamic depth output streams may have a nonzero stall
+ * duration. Units: (format, width, height, ns) x n Optional - This value may be {@code null} on some devices. String containing the ids of the underlying physical cameras. For a logical camera, this is concatenation of all underlying physical camera ids.
diff --git a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
index c527ab4c67304..7877a4d51313c 100644
--- a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
+++ b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
@@ -1119,6 +1119,8 @@ public class CameraMetadataNative implements Parcelable {
continue;
}
+ // Dynamic depth streams involve alot of SW processing and currently cannot be
+ // recommended.
StreamConfigurationMap map = null;
switch (i) {
case RecommendedStreamConfigurationMap.USECASE_PREVIEW:
@@ -1127,28 +1129,44 @@ public class CameraMetadataNative implements Parcelable {
map = new StreamConfigurationMap(scData.streamConfigurationArray,
scData.minDurationArray, scData.stallDurationArray,
/*depthconfiguration*/ null, /*depthminduration*/ null,
- /*depthstallduration*/ null, /*highspeedvideoconfigurations*/ null,
+ /*depthstallduration*/ null,
+ /*dynamicDepthConfigurations*/ null,
+ /*dynamicDepthMinFrameDurations*/ null,
+ /*dynamicDepthStallDurations*/ null,
+ /*highspeedvideoconfigurations*/ null,
/*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]);
break;
case RecommendedStreamConfigurationMap.USECASE_RECORD:
map = new StreamConfigurationMap(scData.streamConfigurationArray,
scData.minDurationArray, scData.stallDurationArray,
/*depthconfiguration*/ null, /*depthminduration*/ null,
- /*depthstallduration*/ null, highSpeedVideoConfigurations,
+ /*depthstallduration*/ null,
+ /*dynamicDepthConfigurations*/ null,
+ /*dynamicDepthMinFrameDurations*/ null,
+ /*dynamicDepthStallDurations*/ null,
+ highSpeedVideoConfigurations,
/*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]);
break;
case RecommendedStreamConfigurationMap.USECASE_ZSL:
map = new StreamConfigurationMap(scData.streamConfigurationArray,
scData.minDurationArray, scData.stallDurationArray,
depthScData.streamConfigurationArray, depthScData.minDurationArray,
- depthScData.stallDurationArray, /*highSpeedVideoConfigurations*/ null,
+ depthScData.stallDurationArray,
+ /*dynamicDepthConfigurations*/ null,
+ /*dynamicDepthMinFrameDurations*/ null,
+ /*dynamicDepthStallDurations*/ null,
+ /*highSpeedVideoConfigurations*/ null,
inputOutputFormatsMap, listHighResolution, supportsPrivate[i]);
break;
default:
map = new StreamConfigurationMap(scData.streamConfigurationArray,
scData.minDurationArray, scData.stallDurationArray,
depthScData.streamConfigurationArray, depthScData.minDurationArray,
- depthScData.stallDurationArray, /*highSpeedVideoConfigurations*/ null,
+ depthScData.stallDurationArray,
+ /*dynamicDepthConfigurations*/ null,
+ /*dynamicDepthMinFrameDurations*/ null,
+ /*dynamicDepthStallDurations*/ null,
+ /*highSpeedVideoConfigurations*/ null,
/*inputOutputFormatsMap*/ null, listHighResolution, supportsPrivate[i]);
}
@@ -1206,6 +1224,12 @@ public class CameraMetadataNative implements Parcelable {
CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS);
StreamConfigurationDuration[] depthStallDurations = getBase(
CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
+ StreamConfiguration[] dynamicDepthConfigurations = getBase(
+ CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
+ StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase(
+ CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS);
+ StreamConfigurationDuration[] dynamicDepthStallDurations = getBase(
+ CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS);
HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase(
CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS);
ReprocessFormatsMap inputOutputFormatsMap = getBase(
@@ -1214,7 +1238,8 @@ public class CameraMetadataNative implements Parcelable {
return new StreamConfigurationMap(
configurations, minFrameDurations, stallDurations,
depthConfigurations, depthMinFrameDurations, depthStallDurations,
- highSpeedVideoConfigurations, inputOutputFormatsMap,
+ dynamicDepthConfigurations, dynamicDepthMinFrameDurations,
+ dynamicDepthStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap,
listHighResolution);
}
diff --git a/core/java/android/hardware/camera2/params/StreamConfigurationMap.java b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
index dd052a8db1d9d..a22e008a65fd9 100644
--- a/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
+++ b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
@@ -95,13 +95,17 @@ public final class StreamConfigurationMap {
StreamConfiguration[] depthConfigurations,
StreamConfigurationDuration[] depthMinFrameDurations,
StreamConfigurationDuration[] depthStallDurations,
+ StreamConfiguration[] dynamicDepthConfigurations,
+ StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
+ StreamConfigurationDuration[] dynamicDepthStallDurations,
HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
ReprocessFormatsMap inputOutputFormatsMap,
boolean listHighResolution) {
this(configurations, minFrameDurations, stallDurations,
depthConfigurations, depthMinFrameDurations, depthStallDurations,
- highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution,
- /*enforceImplementationDefined*/ true);
+ dynamicDepthConfigurations, dynamicDepthMinFrameDurations,
+ dynamicDepthStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap,
+ listHighResolution, /*enforceImplementationDefined*/ true);
}
/**
@@ -131,6 +135,9 @@ public final class StreamConfigurationMap {
StreamConfiguration[] depthConfigurations,
StreamConfigurationDuration[] depthMinFrameDurations,
StreamConfigurationDuration[] depthStallDurations,
+ StreamConfiguration[] dynamicDepthConfigurations,
+ StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
+ StreamConfigurationDuration[] dynamicDepthStallDurations,
HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
ReprocessFormatsMap inputOutputFormatsMap,
boolean listHighResolution,
@@ -163,6 +170,19 @@ public final class StreamConfigurationMap {
"depthStallDurations");
}
+ if (dynamicDepthConfigurations == null) {
+ mDynamicDepthConfigurations = new StreamConfiguration[0];
+ mDynamicDepthMinFrameDurations = new StreamConfigurationDuration[0];
+ mDynamicDepthStallDurations = new StreamConfigurationDuration[0];
+ } else {
+ mDynamicDepthConfigurations = checkArrayElementsNotNull(dynamicDepthConfigurations,
+ "dynamicDepthConfigurations");
+ mDynamicDepthMinFrameDurations = checkArrayElementsNotNull(
+ dynamicDepthMinFrameDurations, "dynamicDepthMinFrameDurations");
+ mDynamicDepthStallDurations = checkArrayElementsNotNull(dynamicDepthStallDurations,
+ "dynamicDepthStallDurations");
+ }
+
if (highSpeedVideoConfigurations == null) {
mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0];
} else {
@@ -205,6 +225,15 @@ public final class StreamConfigurationMap {
mDepthOutputFormats.put(config.getFormat(),
mDepthOutputFormats.get(config.getFormat()) + 1);
}
+ for (StreamConfiguration config : mDynamicDepthConfigurations) {
+ if (!config.isOutput()) {
+ // Ignoring input configs
+ continue;
+ }
+
+ mDynamicDepthOutputFormats.put(config.getFormat(),
+ mDynamicDepthOutputFormats.get(config.getFormat()) + 1);
+ }
if (configurations != null && enforceImplementationDefined &&
mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) {
@@ -335,6 +364,8 @@ public final class StreamConfigurationMap {
int dataspace = imageFormatToDataspace(format);
if (dataspace == HAL_DATASPACE_DEPTH) {
return mDepthOutputFormats.indexOfKey(internalFormat) >= 0;
+ } else if (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
+ return mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0;
} else {
return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0;
}
@@ -446,7 +477,9 @@ public final class StreamConfigurationMap {
boolean isFlexible = SurfaceUtils.isFlexibleConsumer(surface);
StreamConfiguration[] configs =
- surfaceDataspace != HAL_DATASPACE_DEPTH ? mConfigurations : mDepthConfigurations;
+ surfaceDataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
+ surfaceDataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
+ mConfigurations;
for (StreamConfiguration config : configs) {
if (config.getFormat() == surfaceFormat && config.isOutput()) {
// Matching format, either need exact size match, or a flexible consumer
@@ -479,7 +512,9 @@ public final class StreamConfigurationMap {
int dataspace = imageFormatToDataspace(format);
StreamConfiguration[] configs =
- dataspace != HAL_DATASPACE_DEPTH ? mConfigurations : mDepthConfigurations;
+ dataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
+ dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
+ mConfigurations;
for (StreamConfiguration config : configs) {
if ((config.getFormat() == internalFormat) && config.isOutput() &&
config.getSize().equals(size)) {
@@ -992,6 +1027,12 @@ public final class StreamConfigurationMap {
Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
Arrays.equals(mStallDurations, other.mStallDurations) &&
Arrays.equals(mDepthConfigurations, other.mDepthConfigurations) &&
+ Arrays.equals(mDepthMinFrameDurations, other.mDepthMinFrameDurations) &&
+ Arrays.equals(mDepthStallDurations, other.mDepthStallDurations) &&
+ Arrays.equals(mDynamicDepthConfigurations, other.mDynamicDepthConfigurations) &&
+ Arrays.equals(mDynamicDepthMinFrameDurations,
+ other.mDynamicDepthMinFrameDurations) &&
+ Arrays.equals(mDynamicDepthStallDurations, other.mDynamicDepthStallDurations) &&
Arrays.equals(mHighSpeedVideoConfigurations,
other.mHighSpeedVideoConfigurations);
}
@@ -1005,9 +1046,10 @@ public final class StreamConfigurationMap {
public int hashCode() {
// XX: do we care about order?
return HashCodeHelpers.hashCodeGeneric(
- mConfigurations, mMinFrameDurations,
- mStallDurations,
- mDepthConfigurations, mHighSpeedVideoConfigurations);
+ mConfigurations, mMinFrameDurations, mStallDurations,
+ mDepthConfigurations, mDepthMinFrameDurations, mDepthStallDurations,
+ mDynamicDepthConfigurations, mDynamicDepthMinFrameDurations,
+ mDynamicDepthStallDurations, mHighSpeedVideoConfigurations);
}
// Check that the argument is supported by #getOutputFormats or #getInputFormats
@@ -1022,6 +1064,10 @@ public final class StreamConfigurationMap {
if (mDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
return format;
}
+ } else if (internalDataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
+ if (mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
+ return format;
+ }
} else {
if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) {
return format;
@@ -1245,6 +1291,7 @@ public final class StreamConfigurationMap {
switch (format) {
case ImageFormat.JPEG:
case ImageFormat.DEPTH_POINT_CLOUD:
+ case ImageFormat.DEPTH_JPEG:
return HAL_PIXEL_FORMAT_BLOB;
case ImageFormat.DEPTH16:
return HAL_PIXEL_FORMAT_Y16;
@@ -1264,6 +1311,7 @@ public final class StreamConfigurationMap {
* (format, width, height, input?) tuples.
JPEG compressed main image along with XMP embedded depth metadata + * following ISO 16684-1:2011(E).
+ */ + public static final int DEPTH_JPEG = 0x69656963; + /** *Multi-plane Android YUV 420 format
* @@ -787,6 +795,7 @@ public class ImageFormat { case PRIVATE: case RAW_DEPTH: case Y8: + case DEPTH_JPEG: return true; } diff --git a/media/java/android/media/ImageReader.java b/media/java/android/media/ImageReader.java index 8ec0e353ac730..19fca88f52acc 100644 --- a/media/java/android/media/ImageReader.java +++ b/media/java/android/media/ImageReader.java @@ -834,6 +834,7 @@ public class ImageReader implements AutoCloseable { case ImageFormat.JPEG: case ImageFormat.DEPTH_POINT_CLOUD: case ImageFormat.RAW_PRIVATE: + case ImageFormat.DEPTH_JPEG: width = ImageReader.this.getWidth(); break; default: @@ -850,6 +851,7 @@ public class ImageReader implements AutoCloseable { case ImageFormat.JPEG: case ImageFormat.DEPTH_POINT_CLOUD: case ImageFormat.RAW_PRIVATE: + case ImageFormat.DEPTH_JPEG: height = ImageReader.this.getHeight(); break; default: diff --git a/media/java/android/media/ImageUtils.java b/media/java/android/media/ImageUtils.java index 2a0e04ebf0515..b77a884d34126 100644 --- a/media/java/android/media/ImageUtils.java +++ b/media/java/android/media/ImageUtils.java @@ -63,6 +63,7 @@ class ImageUtils { case ImageFormat.DEPTH16: case ImageFormat.DEPTH_POINT_CLOUD: case ImageFormat.RAW_DEPTH: + case ImageFormat.DEPTH_JPEG: return 1; case ImageFormat.PRIVATE: return 0; @@ -192,6 +193,7 @@ class ImageUtils { // 10x compression from RGB_888 case ImageFormat.JPEG: case ImageFormat.DEPTH_POINT_CLOUD: + case ImageFormat.DEPTH_JPEG: estimatedBytePerPixel = 0.3; break; case ImageFormat.Y8: diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestUtils.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestUtils.java index 5ab50925a2680..0340cec7432d4 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestUtils.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestUtils.java @@ -857,7 +857,7 @@ public class CameraTestUtils extends Assert { // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer. // Same goes for DEPTH_POINT_CLOUD if (format == ImageFormat.JPEG || format == ImageFormat.DEPTH_POINT_CLOUD || - format == ImageFormat.RAW_PRIVATE) { + format == ImageFormat.DEPTH_JPEG || format == ImageFormat.RAW_PRIVATE) { buffer = planes[0].getBuffer(); assertNotNull("Fail to get jpeg or depth ByteBuffer", buffer); data = new byte[buffer.remaining()]; @@ -940,6 +940,7 @@ public class CameraTestUtils extends Assert { case ImageFormat.RAW_PRIVATE: case ImageFormat.DEPTH16: case ImageFormat.DEPTH_POINT_CLOUD: + case ImageFormat.DEPTH_JPEG: assertEquals("JPEG/RAW/depth Images should have one plane", 1, planes.length); break; default: @@ -1363,6 +1364,9 @@ public class CameraTestUtils extends Assert { case ImageFormat.RAW_PRIVATE: validateRawPrivateData(data, width, height, image.getTimestamp(), filePath); break; + case ImageFormat.DEPTH_JPEG: + validateDepthJpegData(data, width, height, format, image.getTimestamp(), filePath); + break; default: throw new UnsupportedOperationException("Unsupported format for validation: " + format); @@ -1528,6 +1532,23 @@ public class CameraTestUtils extends Assert { } + private static void validateDepthJpegData(byte[] depthData, int width, int height, int format, + long ts, String filePath) { + + if (VERBOSE) Log.v(TAG, "Validating depth jpeg data"); + + // Can't validate size since it is variable + + if (DEBUG && filePath != null) { + String fileName = + filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".jpg"; + dumpFile(fileName, depthData); + } + + return; + + } + public static