Merge "audio: add implementation for TV related API" am: f57ff9e7e5

Original change: https://android-review.googlesource.com/c/platform/frameworks/base/+/1562807

MUST ONLY BE SUBMITTED BY AUTOMERGER

Change-Id: I3ecaa15c7d3dfbd38be54065d8dc09ba630c33ea
This commit is contained in:
Andy Hung
2021-01-28 18:20:34 +00:00
committed by Automerger Merge Worker
6 changed files with 262 additions and 39 deletions

View File

@@ -35,7 +35,7 @@ enum {
AUDIO_JAVA_WOULD_BLOCK = -7,
};
static inline jint nativeToJavaStatus(status_t status) {
static constexpr inline jint nativeToJavaStatus(status_t status) {
switch (status) {
case NO_ERROR:
return AUDIO_JAVA_SUCCESS;

View File

@@ -263,18 +263,7 @@ static jint android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject we
return (jint) AUDIO_JAVA_ERROR;
}
// TODO: replace when we land matching AudioTrack::set() in frameworks/av in r or r-tv-dev.
if (tunerConfiguration != nullptr) {
const TunerConfigurationHelper tunerHelper(env, tunerConfiguration);
ALOGE("Error creating AudioTrack: unsupported tuner contentId:%d syncId:%d",
tunerHelper.getContentId(), tunerHelper.getSyncId());
return (jint)AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED;
}
// TODO: replace when we land matching AudioTrack::set() in frameworks/av in r or r-tv-dev.
if (encapsulationMode != 0 /* ENCAPSULATION_MODE_NONE */) {
ALOGE("Error creating AudioTrack: unsupported encapsulationMode %d", encapsulationMode);
return (jint)AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED;
}
const TunerConfigurationHelper tunerHelper(env, tunerConfiguration);
jint* nSession = (jint *) env->GetPrimitiveArrayCritical(jSession, NULL);
if (nSession == NULL) {
@@ -369,6 +358,18 @@ static jint android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject we
offloadInfo.stream_type = AUDIO_STREAM_MUSIC; //required for offload
}
if (encapsulationMode != 0) {
offloadInfo = AUDIO_INFO_INITIALIZER;
offloadInfo.format = format;
offloadInfo.sample_rate = sampleRateInHertz;
offloadInfo.channel_mask = nativeChannelMask;
offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
offloadInfo.encapsulation_mode =
static_cast<audio_encapsulation_mode_t>(encapsulationMode);
offloadInfo.content_id = tunerHelper.getContentId();
offloadInfo.sync_id = tunerHelper.getSyncId();
}
// initialize the native AudioTrack object
status_t status = NO_ERROR;
switch (memoryMode) {
@@ -389,7 +390,8 @@ static jint android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject we
sessionId, // audio session ID
offload ? AudioTrack::TRANSFER_SYNC_NOTIF_CALLBACK
: AudioTrack::TRANSFER_SYNC,
offload ? &offloadInfo : NULL, -1, -1, // default uid, pid values
(offload || encapsulationMode) ? &offloadInfo : NULL, -1,
-1, // default uid, pid values
paa.get());
break;
@@ -1364,8 +1366,7 @@ static jint android_media_AudioTrack_setAudioDescriptionMixLeveldB(JNIEnv *env,
return (jint)AUDIO_JAVA_ERROR;
}
// TODO: replace in r-dev or r-tv-dev with code if HW is able to set audio mix level.
return (jint)AUDIO_JAVA_ERROR;
return nativeToJavaStatus(lpTrack->setAudioDescriptionMixLevel(level));
}
static jint android_media_AudioTrack_getAudioDescriptionMixLeveldB(JNIEnv *env, jobject thiz,
@@ -1381,12 +1382,10 @@ static jint android_media_AudioTrack_getAudioDescriptionMixLeveldB(JNIEnv *env,
return (jint)AUDIO_JAVA_ERROR;
}
// TODO: replace in r-dev or r-tv-dev with code if HW is able to set audio mix level.
// By contract we can return -infinity if unsupported.
*nativeLevel = -std::numeric_limits<float>::infinity();
status_t status = lpTrack->getAudioDescriptionMixLevel(reinterpret_cast<float *>(nativeLevel));
env->ReleasePrimitiveArrayCritical(level, nativeLevel, 0 /* mode */);
nativeLevel = nullptr;
return (jint)AUDIO_JAVA_SUCCESS;
return nativeToJavaStatus(status);
}
static jint android_media_AudioTrack_setDualMonoMode(JNIEnv *env, jobject thiz, jint dualMonoMode) {
@@ -1396,8 +1395,8 @@ static jint android_media_AudioTrack_setDualMonoMode(JNIEnv *env, jobject thiz,
return (jint)AUDIO_JAVA_ERROR;
}
// TODO: replace in r-dev or r-tv-dev with code if HW is able to set audio mix level.
return (jint)AUDIO_JAVA_ERROR;
return nativeToJavaStatus(
lpTrack->setDualMonoMode(static_cast<audio_dual_mono_mode_t>(dualMonoMode)));
}
static jint android_media_AudioTrack_getDualMonoMode(JNIEnv *env, jobject thiz,
@@ -1407,18 +1406,17 @@ static jint android_media_AudioTrack_getDualMonoMode(JNIEnv *env, jobject thiz,
ALOGE("%s: AudioTrack not initialized", __func__);
return (jint)AUDIO_JAVA_ERROR;
}
jfloat *nativeDualMonoMode = (jfloat *)env->GetPrimitiveArrayCritical(dualMonoMode, NULL);
jint *nativeDualMonoMode = (jint *)env->GetPrimitiveArrayCritical(dualMonoMode, NULL);
if (nativeDualMonoMode == nullptr) {
ALOGE("%s: Cannot retrieve dualMonoMode pointer", __func__);
return (jint)AUDIO_JAVA_ERROR;
}
// TODO: replace in r-dev or r-tv-dev with code if HW is able to select dual mono mode.
// By contract we can return DUAL_MONO_MODE_OFF if unsupported.
*nativeDualMonoMode = 0; // DUAL_MONO_MODE_OFF for now.
status_t status = lpTrack->getDualMonoMode(
reinterpret_cast<audio_dual_mono_mode_t *>(nativeDualMonoMode));
env->ReleasePrimitiveArrayCritical(dualMonoMode, nativeDualMonoMode, 0 /* mode */);
nativeDualMonoMode = nullptr;
return (jint)AUDIO_JAVA_SUCCESS;
return nativeToJavaStatus(status);
}
// ----------------------------------------------------------------------------

View File

@@ -5434,8 +5434,12 @@ public class AudioManager {
public boolean setAdditionalOutputDeviceDelay(
@NonNull AudioDeviceInfo device, @IntRange(from = 0) long delayMillis) {
Objects.requireNonNull(device);
// Implement the setter in r-dev or r-tv-dev as needed.
return false;
try {
return getService().setAdditionalOutputDeviceDelay(
new AudioDeviceAttributes(device), delayMillis);
} catch (RemoteException e) {
throw e.rethrowFromSystemServer();
}
}
/**
@@ -5450,8 +5454,11 @@ public class AudioManager {
@IntRange(from = 0)
public long getAdditionalOutputDeviceDelay(@NonNull AudioDeviceInfo device) {
Objects.requireNonNull(device);
// Implement the getter in r-dev or r-tv-dev as needed.
return 0;
try {
return getService().getAdditionalOutputDeviceDelay(new AudioDeviceAttributes(device));
} catch (RemoteException e) {
throw e.rethrowFromSystemServer();
}
}
/**
@@ -5468,8 +5475,12 @@ public class AudioManager {
@IntRange(from = 0)
public long getMaxAdditionalOutputDeviceDelay(@NonNull AudioDeviceInfo device) {
Objects.requireNonNull(device);
// Implement the getter in r-dev or r-tv-dev as needed.
return 0;
try {
return getService().getMaxAdditionalOutputDeviceDelay(
new AudioDeviceAttributes(device));
} catch (RemoteException e) {
throw e.rethrowFromSystemServer();
}
}
/**

View File

@@ -1269,10 +1269,12 @@ public class AudioTrack extends PlayerBase
// native code figure out the minimum buffer size.
if (mMode == MODE_STREAM && mBufferSizeInBytes == 0) {
int bytesPerSample = 1;
try {
bytesPerSample = mFormat.getBytesPerSample(mFormat.getEncoding());
} catch (IllegalArgumentException e) {
// do nothing
if (AudioFormat.isEncodingLinearFrames(mFormat.getEncoding())) {
try {
bytesPerSample = mFormat.getBytesPerSample(mFormat.getEncoding());
} catch (IllegalArgumentException e) {
// do nothing
}
}
mBufferSizeInBytes = mFormat.getChannelCount() * bytesPerSample;
}

View File

@@ -330,4 +330,10 @@ interface IAudioService {
oneway void unregisterCommunicationDeviceDispatcher(
ICommunicationDeviceDispatcher dispatcher);
boolean setAdditionalOutputDeviceDelay(in AudioDeviceAttributes device, long delayMillis);
long getAdditionalOutputDeviceDelay(in AudioDeviceAttributes device);
long getMaxAdditionalOutputDeviceDelay(in AudioDeviceAttributes device);
}

View File

@@ -32,6 +32,7 @@ import static com.android.server.audio.AudioEventLogger.Event.ALOGW;
import android.Manifest;
import android.annotation.IntDef;
import android.annotation.IntRange;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.annotation.UserIdInt;
@@ -166,6 +167,7 @@ import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
@@ -173,6 +175,7 @@ import java.util.Objects;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BooleanSupplier;
import java.util.stream.Collectors;
/**
@@ -563,6 +566,117 @@ public class AudioService extends IAudioService.Stub
private boolean mDockAudioMediaEnabled = true;
/**
* RestorableParameters is a thread-safe class used to store a
* first-in first-out history of parameters for replay / restoration.
*
* The idealized implementation of restoration would have a list of setting methods and
* values to be called for restoration. Explicitly managing such setters and
* values would be tedious - a simpler method is to store the values and the
* method implicitly by lambda capture (the values must be immutable or synchronization
* needs to be taken).
*
* We provide queueRestoreWithRemovalIfTrue() to allow
* the caller to provide a BooleanSupplier lambda, which conveniently packages
* the setter and its parameters needed for restoration. If during restoration,
* the BooleanSupplier returns true, it is removed from the mMap.
*
* We provide a setParameters() method as an example helper method.
*/
private static class RestorableParameters {
/**
* Sets a parameter and queues for restoration if successful.
*
* @param id a string handle associated with this parameter.
* @param parameter the actual parameter string.
* @return the result of AudioSystem.setParameters
*/
public int setParameters(@NonNull String id, @NonNull String parameter) {
Objects.requireNonNull(id, "id must not be null");
Objects.requireNonNull(parameter, "parameter must not be null");
synchronized (mMap) {
final int status = AudioSystem.setParameters(parameter);
if (status == AudioSystem.AUDIO_STATUS_OK) { // Java uses recursive mutexes.
queueRestoreWithRemovalIfTrue(id, () -> { // remove me if set fails.
return AudioSystem.setParameters(parameter) != AudioSystem.AUDIO_STATUS_OK;
});
}
// Implementation detail: We do not mMap.remove(id); on failure.
return status;
}
}
/**
* Queues a restore method which is executed on restoreAll().
*
* If the supplier null, the id is removed from the restore map.
*
* Note: When the BooleanSupplier restore method is executed
* during restoreAll, if it returns true, it is removed from the
* restore map.
*
* @param id a unique tag associated with the restore method.
* @param supplier is a BooleanSupplier lambda.
*/
public void queueRestoreWithRemovalIfTrue(
@NonNull String id, @Nullable BooleanSupplier supplier) {
Objects.requireNonNull(id, "id must not be null");
synchronized (mMap) {
if (supplier != null) {
mMap.put(id, supplier);
} else {
mMap.remove(id);
}
}
}
/**
* Restore all parameters
*
* During restoration after audioserver death, any BooleanSupplier that returns
* true will be removed from mMap.
*/
public void restoreAll() {
synchronized (mMap) {
// Note: removing from values() also removes from the backing map.
// TODO: Consider catching exceptions?
mMap.values().removeIf(v -> {
return v.getAsBoolean();
});
}
}
/**
* mMap is a LinkedHashMap<Key, Value> of parameters restored by restore().
* The Key is a unique id tag for identification.
* The Value is a lambda expression which returns true if the entry is to
* be removed.
*
* 1) For memory limitation purposes, mMap keeps the latest MAX_ENTRIES
* accessed in the map.
* 2) Parameters are restored in order of queuing, first in first out,
* from earliest to latest.
*/
@GuardedBy("mMap")
private Map</* @NonNull */ String, /* @NonNull */ BooleanSupplier> mMap =
new LinkedHashMap<>() {
// TODO: do we need this memory limitation?
private static final int MAX_ENTRIES = 1000; // limit our memory for now.
@Override
protected boolean removeEldestEntry(Map.Entry eldest) {
if (size() <= MAX_ENTRIES) return false;
Log.w(TAG, "Parameter map exceeds "
+ MAX_ENTRIES + " removing " + eldest.getKey()); // don't silently remove.
return true;
}
};
}
// We currently have one instance for mRestorableParameters used for
// setAdditionalOutputDeviceDelay(). Other methods requiring restoration could share this
// or use their own instance.
private RestorableParameters mRestorableParameters = new RestorableParameters();
private int mDockState = Intent.EXTRA_DOCK_STATE_UNDOCKED;
// Used when safe volume warning message display is requested by setStreamVolume(). In this
@@ -1095,6 +1209,9 @@ public class AudioService extends IAudioService.Stub
RotationHelper.updateOrientation();
}
// Restore setParameters and other queued setters.
mRestorableParameters.restoreAll();
synchronized (mSettingsLock) {
final int forDock = mDockAudioMediaEnabled ?
AudioSystem.FORCE_ANALOG_DOCK : AudioSystem.FORCE_NONE;
@@ -9303,6 +9420,95 @@ public class AudioService extends IAudioService.Stub
}
}
/**
* @hide
* Sets an additional audio output device delay in milliseconds.
*
* The additional output delay is a request to the output device to
* delay audio presentation (generally with respect to video presentation for better
* synchronization).
* It may not be supported by all output devices,
* and typically increases the audio latency by the amount of additional
* audio delay requested.
*
* If additional audio delay is supported by an audio output device,
* it is expected to be supported for all output streams (and configurations)
* opened on that device.
*
* @param deviceType
* @param address
* @param delayMillis delay in milliseconds desired. This should be in range of {@code 0}
* to the value returned by {@link #getMaxAdditionalOutputDeviceDelay()}.
* @return true if successful, false if the device does not support output device delay
* or the delay is not in range of {@link #getMaxAdditionalOutputDeviceDelay()}.
*/
@Override
//@RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING)
public boolean setAdditionalOutputDeviceDelay(
@NonNull AudioDeviceAttributes device, @IntRange(from = 0) long delayMillis) {
Objects.requireNonNull(device, "device must not be null");
enforceModifyAudioRoutingPermission();
final String getterKey = "additional_output_device_delay="
+ AudioDeviceInfo.convertDeviceTypeToInternalDevice(device.getType())
+ "," + device.getAddress(); // "getter" key as an id.
final String setterKey = getterKey + "," + delayMillis; // append the delay for setter
return mRestorableParameters.setParameters(getterKey, setterKey)
== AudioSystem.AUDIO_STATUS_OK;
}
/**
* @hide
* Returns the current additional audio output device delay in milliseconds.
*
* @param deviceType
* @param address
* @return the additional output device delay. This is a non-negative number.
* {@code 0} is returned if unsupported.
*/
@Override
@IntRange(from = 0)
public long getAdditionalOutputDeviceDelay(@NonNull AudioDeviceAttributes device) {
Objects.requireNonNull(device, "device must not be null");
final String key = "additional_output_device_delay";
final String reply = AudioSystem.getParameters(
key + "=" + AudioDeviceInfo.convertDeviceTypeToInternalDevice(device.getType())
+ "," + device.getAddress());
long delayMillis;
try {
delayMillis = Long.parseLong(reply.substring(key.length() + 1));
} catch (NullPointerException e) {
delayMillis = 0;
}
return delayMillis;
}
/**
* @hide
* Returns the maximum additional audio output device delay in milliseconds.
*
* @param deviceType
* @param address
* @return the maximum output device delay in milliseconds that can be set.
* This is a non-negative number
* representing the additional audio delay supported for the device.
* {@code 0} is returned if unsupported.
*/
@Override
@IntRange(from = 0)
public long getMaxAdditionalOutputDeviceDelay(@NonNull AudioDeviceAttributes device) {
Objects.requireNonNull(device, "device must not be null");
final String key = "max_additional_output_device_delay";
final String reply = AudioSystem.getParameters(
key + "=" + AudioDeviceInfo.convertDeviceTypeToInternalDevice(device.getType())
+ "," + device.getAddress());
long delayMillis;
try {
delayMillis = Long.parseLong(reply.substring(key.length() + 1));
} catch (NullPointerException e) {
delayMillis = 0;
}
return delayMillis;
}
//======================
// misc