Add short audio descriptor and encapsulation type in AudioProfile.

Short audio descriptor is defined in HDMI specification 1.4b section 7
that is used to describe the audio capabilities over HDMI.

The encapsulation types represent the encapsulation formats that must be
used when sending the audio data with the format associated with the
AudioProfile to Android.

Bug: 131736540
Bug: 178619392
Test: atest AudioManagerTest
Change-Id: Icc1d02bd1239561f7f3740bf7bd9ecf17fc1420b
This commit is contained in:
jiabin
2021-03-05 06:25:26 +00:00
parent 71cf0b48ce
commit a937979b4d
10 changed files with 329 additions and 10 deletions

View File

@@ -20078,6 +20078,14 @@ package android.media {
method public android.media.AudioAttributes.Builder setUsage(int);
}
public class AudioDescriptor {
method @NonNull public byte[] getDescriptor();
method public int getEncapsulationType();
method public int getStandard();
field public static final int STANDARD_EDID = 1; // 0x1
field public static final int STANDARD_NONE = 0; // 0x0
}
public abstract class AudioDeviceCallback {
ctor public AudioDeviceCallback();
method public void onAudioDevicesAdded(android.media.AudioDeviceInfo[]);
@@ -20086,6 +20094,7 @@ package android.media {
public final class AudioDeviceInfo {
method @NonNull public String getAddress();
method @NonNull public java.util.List<android.media.AudioDescriptor> getAudioDescriptors();
method @NonNull public java.util.List<android.media.AudioProfile> getAudioProfiles();
method @NonNull public int[] getChannelCounts();
method @NonNull public int[] getChannelIndexMasks();
@@ -20540,8 +20549,11 @@ package android.media {
public class AudioProfile {
method @NonNull public int[] getChannelIndexMasks();
method @NonNull public int[] getChannelMasks();
method public int getEncapsulationType();
method public int getFormat();
method @NonNull public int[] getSampleRates();
field public static final int AUDIO_ENCAPSULATION_TYPE_IEC61937 = 1; // 0x1
field public static final int AUDIO_ENCAPSULATION_TYPE_NONE = 0; // 0x0
}
public class AudioRecord implements android.media.AudioRecordingMonitor android.media.AudioRouting android.media.MicrophoneDirection {

View File

@@ -0,0 +1,46 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_MEDIA_EXTRAAUDIODESCRIPTOR_H
#define ANDROID_MEDIA_EXTRAAUDIODESCRIPTOR_H
#include <system/audio.h>
#include <utils/Errors.h>
namespace android {
// keep these values in sync with ExtraAudioDescriptor.java
#define STANDARD_NONE 0
#define STANDARD_EDID 1
static inline status_t audioStandardFromNative(audio_standard_t nStandard, int* standard) {
status_t result = NO_ERROR;
switch (nStandard) {
case AUDIO_STANDARD_NONE:
*standard = STANDARD_NONE;
break;
case AUDIO_STANDARD_EDID:
*standard = STANDARD_EDID;
break;
default:
result = BAD_VALUE;
}
return result;
}
} // namespace android
#endif // ANDROID_MEDIA_EXTRAAUDIODESCRIPTOR_H

View File

@@ -0,0 +1,47 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_MEDIA_AUDIOPROFILE_H
#define ANDROID_MEDIA_AUDIOPROFILE_H
#include <system/audio.h>
#include <utils/Errors.h>
namespace android {
// keep these values in sync with AudioProfile.java
#define ENCAPSULATION_TYPE_NONE 0
#define ENCAPSULATION_TYPE_IEC61937 1
static inline status_t audioEncapsulationTypeFromNative(
audio_encapsulation_type_t nEncapsulationType, int* encapsulationType) {
status_t result = NO_ERROR;
switch (nEncapsulationType) {
case AUDIO_ENCAPSULATION_TYPE_NONE:
*encapsulationType = ENCAPSULATION_TYPE_NONE;
break;
case AUDIO_ENCAPSULATION_TYPE_IEC61937:
*encapsulationType = ENCAPSULATION_TYPE_IEC61937;
break;
default:
result = BAD_VALUE;
}
return result;
}
} // namespace android
#endif // ANDROID_MEDIA_AUDIOPROFILE_H

View File

@@ -34,10 +34,12 @@
#include <system/audio.h>
#include <system/audio_policy.h>
#include "android_media_AudioAttributes.h"
#include "android_media_AudioDescriptor.h"
#include "android_media_AudioDeviceAttributes.h"
#include "android_media_AudioEffectDescriptor.h"
#include "android_media_AudioErrors.h"
#include "android_media_AudioFormat.h"
#include "android_media_AudioProfile.h"
#include "android_media_MicrophoneInfo.h"
// ----------------------------------------------------------------------------
@@ -176,6 +178,9 @@ static struct {
static struct { jmethodID add; } gListMethods;
static jclass gAudioDescriptorClass;
static jmethodID gAudiODescriptorCstor;
//
// JNI Initialization for OpenSLES routing
//
@@ -1217,6 +1222,7 @@ static jint convertAudioPortFromNative(JNIEnv *env, jobject *jAudioPort,
jobject jAudioPortConfig = NULL;
jstring jDeviceName = NULL;
jobject jAudioProfiles = NULL;
jobject jAudioDescriptors = nullptr;
bool useInMask;
ALOGV("convertAudioPortFromNative id %d role %d type %d name %s",
@@ -1293,13 +1299,21 @@ static jint convertAudioPortFromNative(JNIEnv *env, jobject *jAudioPort,
}
}
int encapsulationType;
if (audioEncapsulationTypeFromNative(nAudioPort->audio_profiles[i].encapsulation_type,
&encapsulationType) != NO_ERROR) {
ALOGW("Unknown encapsualtion type for JAVA API: %u",
nAudioPort->audio_profiles[i].encapsulation_type);
continue;
}
ScopedLocalRef<jobject>
jAudioProfile(env,
env->NewObject(gAudioProfileClass, gAudioProfileCstor,
audioFormatFromNative(
nAudioPort->audio_profiles[i].format),
jSamplingRates.get(), jChannelMasks.get(),
jChannelIndexMasks.get()));
jChannelIndexMasks.get(), encapsulationType));
if (jAudioProfile == nullptr) {
jStatus = (jint)AUDIO_JAVA_ERROR;
goto exit;
@@ -1307,6 +1321,42 @@ static jint convertAudioPortFromNative(JNIEnv *env, jobject *jAudioPort,
env->CallBooleanMethod(jAudioProfiles, gArrayListMethods.add, jAudioProfile.get());
}
jAudioDescriptors = env->NewObject(gArrayListClass, gArrayListMethods.cstor);
if (jAudioDescriptors == nullptr) {
jStatus = (jint)AUDIO_JAVA_ERROR;
goto exit;
}
for (size_t i = 0; i < nAudioPort->num_extra_audio_descriptors; ++i) {
const auto &extraAudioDescriptor = nAudioPort->extra_audio_descriptors[i];
ScopedLocalRef<jobject> jAudioDescriptor(env);
if (extraAudioDescriptor.descriptor_length == 0) {
continue;
}
int standard;
if (audioStandardFromNative(extraAudioDescriptor.standard, &standard) != NO_ERROR) {
ALOGW("Unknown standard for JAVA API: %u", extraAudioDescriptor.standard);
continue;
}
int encapsulationType;
if (audioEncapsulationTypeFromNative(extraAudioDescriptor.encapsulation_type,
&encapsulationType) != NO_ERROR) {
ALOGW("Unknown encapsualtion type for JAVA API: %u",
extraAudioDescriptor.encapsulation_type);
continue;
}
ScopedLocalRef<jbyteArray> jDescriptor(env,
env->NewByteArray(
extraAudioDescriptor.descriptor_length));
env->SetByteArrayRegion(jDescriptor.get(), 0, extraAudioDescriptor.descriptor_length,
reinterpret_cast<const jbyte *>(extraAudioDescriptor.descriptor));
jAudioDescriptor =
ScopedLocalRef<jobject>(env,
env->NewObject(gAudioDescriptorClass, gAudiODescriptorCstor,
standard, encapsulationType,
jDescriptor.get()));
env->CallBooleanMethod(jAudioDescriptors, gArrayListMethods.add, jAudioDescriptor.get());
}
// gains
jGains = env->NewObjectArray(nAudioPort->num_gains,
gAudioGainClass, NULL);
@@ -1365,7 +1415,7 @@ static jint convertAudioPortFromNative(JNIEnv *env, jobject *jAudioPort,
*jAudioPort =
env->NewObject(gAudioDevicePortClass, gAudioDevicePortCstor, jHandle, jDeviceName,
jAudioProfiles, jGains, nAudioPort->ext.device.type, jAddress,
jEncapsulationModes, jEncapsulationMetadataTypes);
jEncapsulationModes, jEncapsulationMetadataTypes, jAudioDescriptors);
env->DeleteLocalRef(jAddress);
} else if (nAudioPort->type == AUDIO_PORT_TYPE_MIX) {
ALOGV("convertAudioPortFromNative is a mix");
@@ -1414,6 +1464,9 @@ exit:
if (jAudioPortConfig != NULL) {
env->DeleteLocalRef(jAudioPortConfig);
}
if (jAudioDescriptors != nullptr) {
env->DeleteLocalRef(jAudioDescriptors);
}
return jStatus;
}
@@ -2790,7 +2843,8 @@ int register_android_media_AudioSystem(JNIEnv *env)
gAudioDevicePortCstor =
GetMethodIDOrDie(env, audioDevicePortClass, "<init>",
"(Landroid/media/AudioHandle;Ljava/lang/String;Ljava/util/List;"
"[Landroid/media/AudioGain;ILjava/lang/String;[I[I)V");
"[Landroid/media/AudioGain;ILjava/lang/String;[I[I"
"Ljava/util/List;)V");
// When access AudioPort as AudioDevicePort
gAudioPortFields.mType = GetFieldIDOrDie(env, audioDevicePortClass, "mType", "I");
@@ -2909,7 +2963,11 @@ int register_android_media_AudioSystem(JNIEnv *env)
jclass audioProfileClass = FindClassOrDie(env, "android/media/AudioProfile");
gAudioProfileClass = MakeGlobalRefOrDie(env, audioProfileClass);
gAudioProfileCstor = GetMethodIDOrDie(env, audioProfileClass, "<init>", "(I[I[I[I)V");
gAudioProfileCstor = GetMethodIDOrDie(env, audioProfileClass, "<init>", "(I[I[I[II)V");
jclass audioDescriptorClass = FindClassOrDie(env, "android/media/AudioDescriptor");
gAudioDescriptorClass = MakeGlobalRefOrDie(env, audioDescriptorClass);
gAudiODescriptorCstor = GetMethodIDOrDie(env, audioDescriptorClass, "<init>", "(II[B)V");
AudioSystem::addErrorCallback(android_media_AudioSystem_error_callback);

View File

@@ -0,0 +1,90 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.media;
import android.annotation.IntDef;
import android.annotation.NonNull;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* The AudioDescriptor contains the information to describe the audio playback/capture
* capabilities. The capabilities are described by a byte array, which is defined by a
* particular standard. This is used when the format is unrecognized to the platform.
*/
public class AudioDescriptor {
/**
* The audio standard is not specified.
*/
public static final int STANDARD_NONE = 0;
/**
* The Extended Display Identification Data (EDID) standard for a short audio descriptor.
*/
public static final int STANDARD_EDID = 1;
/** @hide */
@IntDef({
STANDARD_NONE,
STANDARD_EDID,
})
@Retention(RetentionPolicy.SOURCE)
public @interface AudioDescriptorStandard {}
private final int mStandard;
private final byte[] mDescriptor;
private final int mEncapsulationType;
AudioDescriptor(int standard, int encapsulationType, @NonNull byte[] descriptor) {
mStandard = standard;
mEncapsulationType = encapsulationType;
mDescriptor = descriptor;
}
/**
* @return the standard that defines audio playback/capture capabilities.
*/
public @AudioDescriptorStandard int getStandard() {
return mStandard;
}
/**
* @return a byte array that describes audio playback/capture capabilities as encoded by the
* standard for this AudioDescriptor.
*/
public @NonNull byte[] getDescriptor() {
return mDescriptor;
}
/**
* The encapsulation type indicates what encapsulation type is required when the framework is
* using this extra audio descriptor for playing to a device exposing this audio profile.
* When encapsulation is required, only playback with {@link android.media.AudioTrack} API is
* supported. But playback with {@link android.media.MediaPlayer} is not.
* When an encapsulation type is required, the {@link AudioFormat} encoding selected when
* creating the {@link AudioTrack} must match the encapsulation type, e.g
* AudioFormat#ENCODING_IEC61937 for AudioProfile.AUDIO_ENCAPSULATION_TYPE_IEC61937.
*
* @return an integer representing the encapsulation type
*
* @see AudioProfile#AUDIO_ENCAPSULATION_TYPE_NONE
* @see AudioProfile#AUDIO_ENCAPSULATION_TYPE_IEC61937
*/
public @AudioProfile.EncapsulationType int getEncapsulationType() {
return mEncapsulationType;
}
}

View File

@@ -518,6 +518,13 @@ public final class AudioDeviceInfo {
return mPort.profiles();
}
/**
* @return A list of {@link AudioDescriptor} supported by the audio devices.
*/
public @NonNull List<AudioDescriptor> getAudioDescriptors() {
return mPort.audioDescriptors();
}
/**
* Returns an array of supported encapsulation modes for the device.
*

View File

@@ -60,10 +60,11 @@ public class AudioDevicePort extends AudioPort {
AudioDevicePort(AudioHandle handle, String deviceName, List<AudioProfile> profiles,
AudioGain[] gains, int type, String address, int[] encapsulationModes,
@AudioTrack.EncapsulationMetadataType int[] encapsulationMetadataTypes) {
@AudioTrack.EncapsulationMetadataType int[] encapsulationMetadataTypes,
List<AudioDescriptor> descriptors) {
super(handle,
AudioManager.isInputDevice(type) ? AudioPort.ROLE_SOURCE : AudioPort.ROLE_SINK,
deviceName, profiles, gains);
deviceName, profiles, gains, descriptors);
mType = type;
mAddress = address;
mEncapsulationModes = encapsulationModes;

View File

@@ -46,7 +46,7 @@ public class AudioMixPort extends AudioPort {
AudioMixPort(AudioHandle handle, int ioHandle, int role, String deviceName,
List<AudioProfile> profiles, AudioGain[] gains) {
super(handle, role, deviceName, profiles, gains);
super(handle, role, deviceName, profiles, gains, null);
mIoHandle = ioHandle;
}

View File

@@ -86,6 +86,7 @@ public class AudioPort {
private final int[] mChannelIndexMasks;
private final int[] mFormats;
private final List<AudioProfile> mProfiles;
private final List<AudioDescriptor> mDescriptors;
@UnsupportedAppUsage
private final AudioGain[] mGains;
@UnsupportedAppUsage
@@ -107,17 +108,21 @@ public class AudioPort {
if (mFormats != null) {
for (int format : mFormats) {
mProfiles.add(new AudioProfile(
format, samplingRates, channelMasks, channelIndexMasks));
format, samplingRates, channelMasks, channelIndexMasks,
AudioProfile.AUDIO_ENCAPSULATION_TYPE_NONE));
}
}
mDescriptors = new ArrayList<>();
}
AudioPort(AudioHandle handle, int role, String name,
List<AudioProfile> profiles, AudioGain[] gains) {
List<AudioProfile> profiles, AudioGain[] gains,
List<AudioDescriptor> descriptors) {
mHandle = handle;
mRole = role;
mName = name;
mProfiles = profiles;
mDescriptors = descriptors;
mGains = gains;
Set<Integer> formats = new HashSet<>();
Set<Integer> samplingRates = new HashSet<>();
@@ -209,6 +214,13 @@ public class AudioPort {
return mProfiles;
}
/**
* Get the list of audio descriptor
*/
public List<AudioDescriptor> audioDescriptors() {
return mDescriptors;
}
/**
* Get the list of gain descriptors
* Empty array if this port does not have gain control

View File

@@ -16,27 +16,55 @@
package android.media;
import android.annotation.IntDef;
import android.annotation.NonNull;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.Arrays;
import java.util.stream.Collectors;
/**
* An AudioProfile is specific to an audio format and lists supported sampling rates and
* channel masks for that format. An {@link AudioDeviceInfo} has a list of supported AudioProfiles.
* There can be multiple profiles whose encoding format is the same. This usually happens when
* an encoding format is only supported when it is encapsulated by some particular encapsulation
* types. If there are multiple encapsulation types that can carry this encoding format, they will
* be reported in different audio profiles. The application can choose any of the encapsulation
* types.
*/
public class AudioProfile {
/**
* No encapsulation type is specified.
*/
public static final int AUDIO_ENCAPSULATION_TYPE_NONE = 0;
/**
* Encapsulation format is defined in standard IEC 61937.
*/
public static final int AUDIO_ENCAPSULATION_TYPE_IEC61937 = 1;
/** @hide */
@IntDef({
AUDIO_ENCAPSULATION_TYPE_NONE,
AUDIO_ENCAPSULATION_TYPE_IEC61937,
})
@Retention(RetentionPolicy.SOURCE)
public @interface EncapsulationType {}
private final int mFormat;
private final int[] mSamplingRates;
private final int[] mChannelMasks;
private final int[] mChannelIndexMasks;
private final int mEncapsulationType;
AudioProfile(int format, @NonNull int[] samplingRates, @NonNull int[] channelMasks,
@NonNull int[] channelIndexMasks) {
@NonNull int[] channelIndexMasks,
int encapsulationType) {
mFormat = format;
mSamplingRates = samplingRates;
mChannelMasks = channelMasks;
mChannelIndexMasks = channelIndexMasks;
mEncapsulationType = encapsulationType;
}
/**
@@ -67,6 +95,24 @@ public class AudioProfile {
return mSamplingRates;
}
/**
* The encapsulation type indicates what encapsulation type is required when the framework is
* using this format when playing to a device exposing this audio profile.
* When encapsulation is required, only playback with {@link android.media.AudioTrack} API is
* supported. But playback with {@link android.media.MediaPlayer} is not.
* When an encapsulation type is required, the {@link AudioFormat} encoding selected when
* creating the {@link AudioTrack} must match the encapsulation type, e.g
* AudioFormat.ENCODING_IEC61937 for AUDIO_ENCAPSULATION_TYPE_IEC61937.
*
* @return an integer representing the encapsulation type
*
* @see #AUDIO_ENCAPSULATION_TYPE_NONE
* @see #AUDIO_ENCAPSULATION_TYPE_IEC61937
*/
public @EncapsulationType int getEncapsulationType() {
return mEncapsulationType;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("{");