Merge "Notification playback synchronized with audio focus"

This commit is contained in:
Jean-Michel Trivi
2017-02-01 18:43:32 +00:00
committed by Android (Google) Code Review
15 changed files with 443 additions and 68 deletions

View File

@@ -22085,6 +22085,7 @@ package android.media {
method public int describeContents();
method public android.media.AudioAttributes getAttributes();
method public java.lang.String getClientId();
method public int getClientUid();
method public int getFlags();
method public int getGainRequest();
method public int getLossReceived();
@@ -24269,10 +24270,12 @@ package android.media {
}
public class PlayerProxy {
method public void pause() throws java.lang.IllegalStateException;
method public void setVolume(float) throws java.lang.IllegalStateException;
method public void start() throws java.lang.IllegalStateException;
method public void stop() throws java.lang.IllegalStateException;
method public void pause();
method public void setPan(float);
method public void setStartDelayMs(int);
method public void setVolume(float);
method public void start();
method public void stop();
}
public final class Rating implements android.os.Parcelable {

View File

@@ -29,9 +29,10 @@ import java.util.Objects;
@SystemApi
public final class AudioFocusInfo implements Parcelable {
private AudioAttributes mAttributes;
private String mClientId;
private String mPackageName;
private final AudioAttributes mAttributes;
private final int mClientUid;
private final String mClientId;
private final String mPackageName;
private int mGainRequest;
private int mLossReceived;
private int mFlags;
@@ -47,9 +48,10 @@ public final class AudioFocusInfo implements Parcelable {
* @param flags
* @hide
*/
public AudioFocusInfo(AudioAttributes aa, String clientId, String packageName,
public AudioFocusInfo(AudioAttributes aa, int clientUid, String clientId, String packageName,
int gainRequest, int lossReceived, int flags) {
mAttributes = aa == null ? new AudioAttributes.Builder().build() : aa;
mClientUid = clientUid;
mClientId = clientId == null ? "" : clientId;
mPackageName = packageName == null ? "" : packageName;
mGainRequest = gainRequest;
@@ -65,6 +67,9 @@ public final class AudioFocusInfo implements Parcelable {
@SystemApi
public AudioAttributes getAttributes() { return mAttributes; }
@SystemApi
public int getClientUid() { return mClientUid; }
@SystemApi
public String getClientId() { return mClientId; }
@@ -111,6 +116,7 @@ public final class AudioFocusInfo implements Parcelable {
@Override
public void writeToParcel(Parcel dest, int flags) {
mAttributes.writeToParcel(dest, flags);
dest.writeInt(mClientUid);
dest.writeString(mClientId);
dest.writeString(mPackageName);
dest.writeInt(mGainRequest);
@@ -121,7 +127,7 @@ public final class AudioFocusInfo implements Parcelable {
@SystemApi
@Override
public int hashCode() {
return Objects.hash(mAttributes, mClientId, mPackageName, mGainRequest, mFlags);
return Objects.hash(mAttributes, mClientUid, mClientId, mPackageName, mGainRequest, mFlags);
}
@SystemApi
@@ -137,6 +143,9 @@ public final class AudioFocusInfo implements Parcelable {
if (!mAttributes.equals(other.mAttributes)) {
return false;
}
if (mClientUid != other.mClientUid) {
return false;
}
if (!mClientId.equals(other.mClientId)) {
return false;
}
@@ -161,6 +170,7 @@ public final class AudioFocusInfo implements Parcelable {
public AudioFocusInfo createFromParcel(Parcel in) {
return new AudioFocusInfo(
AudioAttributes.CREATOR.createFromParcel(in), //AudioAttributes aa
in.readInt(), // int clientUid
in.readString(), //String clientId
in.readString(), //String packageName
in.readInt(), //int gainRequest

View File

@@ -2428,6 +2428,23 @@ public class AudioManager {
}
}
/**
* @hide
* Return the volume ramping time for a sound to be played after the given focus request,
* and to play a sound of the given attributes
* @param focusGain
* @param attr
* @return
*/
public int getFocusRampTimeMs(int focusGain, AudioAttributes attr) {
IAudioService service = getService();
try {
return service.getFocusRampTimeMs(focusGain, attr);
} catch (RemoteException e) {
throw e.rethrowFromSystemServer();
}
}
/**
* @hide
* Used internally by telephony package to abandon audio focus, typically after a call or

View File

@@ -1880,6 +1880,26 @@ public class AudioTrack extends PlayerBase
if (mState != STATE_INITIALIZED) {
throw new IllegalStateException("play() called on uninitialized AudioTrack.");
}
//FIXME use lambda to pass startImpl to superclass
final int delay = getStartDelayMs();
if (delay == 0) {
startImpl();
} else {
new Thread() {
public void run() {
try {
Thread.sleep(delay);
} catch (InterruptedException e) {
e.printStackTrace();
}
baseSetStartDelayMs(0);
startImpl();
}
}.start();
}
}
private void startImpl() {
synchronized(mPlayStateLock) {
baseStart();
native_start();

View File

@@ -194,5 +194,7 @@ interface IAudioService {
void disableRingtoneSync();
int getFocusRampTimeMs(in int focusGain, in AudioAttributes attr);
// WARNING: read warning at top of file, it is recommended to add new methods at the end
}

View File

@@ -25,4 +25,6 @@ interface IPlayer {
oneway void pause();
oneway void stop();
oneway void setVolume(float vol);
oneway void setPan(float pan);
oneway void setStartDelayMs(int delayMs);
}

View File

@@ -1245,6 +1245,26 @@ public class MediaPlayer extends PlayerBase
* @throws IllegalStateException if it is called in an invalid state
*/
public void start() throws IllegalStateException {
//FIXME use lambda to pass startImpl to superclass
final int delay = getStartDelayMs();
if (delay == 0) {
startImpl();
} else {
new Thread() {
public void run() {
try {
Thread.sleep(delay);
} catch (InterruptedException e) {
e.printStackTrace();
}
baseSetStartDelayMs(0);
startImpl();
}
}.start();
}
}
private void startImpl() {
baseStart();
stayAwake(true);
_start();

View File

@@ -58,14 +58,17 @@ public abstract class PlayerBase {
// for AppOps
private IAppOpsService mAppOps;
private IAppOpsCallback mAppOpsCallback;
private boolean mHasAppOpsPlayAudio = true;
private final Object mAppOpsLock = new Object();
private boolean mHasAppOpsPlayAudio = true; // sync'd on mLock
private final Object mLock = new Object();
private final int mImplType;
// uniquely identifies the Player Interface throughout the system (P I Id)
private int mPlayerIId;
private int mState;
private int mState; // sync'd on mLock
private int mStartDelayMs = 0; // sync'd on mLock
private float mPanMultiplierL = 1.0f; // sync'd on mLock
private float mPanMultiplierR = 1.0f; // sync'd on mLock
/**
* Constructor. Must be given audio attributes, as they are required for AppOps.
@@ -89,11 +92,13 @@ public abstract class PlayerBase {
IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE);
mAppOps = IAppOpsService.Stub.asInterface(b);
// initialize mHasAppOpsPlayAudio
updateAppOpsPlayAudio_sync();
synchronized (mLock) {
updateAppOpsPlayAudio_sync();
}
// register a callback to monitor whether the OP_PLAY_AUDIO is still allowed
mAppOpsCallback = new IAppOpsCallback.Stub() {
public void opChanged(int op, int uid, String packageName) {
synchronized (mAppOpsLock) {
synchronized (mLock) {
if (op == AppOpsManager.OP_PLAY_AUDIO) {
updateAppOpsPlayAudio_sync();
}
@@ -130,7 +135,7 @@ public abstract class PlayerBase {
} catch (RemoteException e) {
Log.e(TAG, "Error talking to audio service, STARTED state will not be tracked", e);
}
synchronized (mAppOpsLock) {
synchronized (mLock) {
mAttributes = attr;
updateAppOpsPlayAudio_sync();
}
@@ -139,23 +144,39 @@ public abstract class PlayerBase {
void baseStart() {
if (DEBUG) { Log.v(TAG, "baseStart() piid=" + mPlayerIId); }
try {
mState = AudioPlaybackConfiguration.PLAYER_STATE_STARTED;
getService().playerEvent(mPlayerIId, mState);
synchronized (mLock) {
mState = AudioPlaybackConfiguration.PLAYER_STATE_STARTED;
getService().playerEvent(mPlayerIId, mState);
}
} catch (RemoteException e) {
Log.e(TAG, "Error talking to audio service, STARTED state will not be tracked", e);
}
synchronized (mAppOpsLock) {
synchronized (mLock) {
if (isRestricted_sync()) {
playerSetVolume(true/*muting*/,0, 0);
}
}
}
void baseSetStartDelayMs(int delayMs) {
synchronized(mLock) {
mStartDelayMs = Math.max(delayMs, 0);
}
}
protected int getStartDelayMs() {
synchronized(mLock) {
return mStartDelayMs;
}
}
void basePause() {
if (DEBUG) { Log.v(TAG, "basePause() piid=" + mPlayerIId); }
try {
mState = AudioPlaybackConfiguration.PLAYER_STATE_PAUSED;
getService().playerEvent(mPlayerIId, mState);
synchronized (mLock) {
mState = AudioPlaybackConfiguration.PLAYER_STATE_PAUSED;
getService().playerEvent(mPlayerIId, mState);
}
} catch (RemoteException e) {
Log.e(TAG, "Error talking to audio service, PAUSED state will not be tracked", e);
}
@@ -164,26 +185,45 @@ public abstract class PlayerBase {
void baseStop() {
if (DEBUG) { Log.v(TAG, "baseStop() piid=" + mPlayerIId); }
try {
mState = AudioPlaybackConfiguration.PLAYER_STATE_STOPPED;
getService().playerEvent(mPlayerIId, mState);
synchronized (mLock) {
mState = AudioPlaybackConfiguration.PLAYER_STATE_STOPPED;
getService().playerEvent(mPlayerIId, mState);
}
} catch (RemoteException e) {
Log.e(TAG, "Error talking to audio service, STOPPED state will not be tracked", e);
}
}
void baseSetPan(float pan) {
final float p = Math.min(Math.max(-1.0f, pan), 1.0f);
synchronized (mLock) {
if (p >= 0.0f) {
mPanMultiplierL = 1.0f - p;
mPanMultiplierR = 1.0f;
} else {
mPanMultiplierL = 1.0f;
mPanMultiplierR = 1.0f + p;
}
}
baseSetVolume(mLeftVolume, mRightVolume);
}
void baseSetVolume(float leftVolume, float rightVolume) {
synchronized (mAppOpsLock) {
final boolean hasAppOpsPlayAudio;
synchronized (mLock) {
mLeftVolume = leftVolume;
mRightVolume = rightVolume;
hasAppOpsPlayAudio = mHasAppOpsPlayAudio;
if (isRestricted_sync()) {
return;
}
}
playerSetVolume(false/*muting*/,leftVolume, rightVolume);
playerSetVolume(!hasAppOpsPlayAudio/*muting*/,
leftVolume * mPanMultiplierL, rightVolume * mPanMultiplierR);
}
int baseSetAuxEffectSendLevel(float level) {
synchronized (mAppOpsLock) {
synchronized (mLock) {
mAuxEffectSendLevel = level;
if (isRestricted_sync()) {
return AudioSystem.SUCCESS;
@@ -199,9 +239,11 @@ public abstract class PlayerBase {
void baseRelease() {
if (DEBUG) { Log.v(TAG, "baseRelease() piid=" + mPlayerIId + " state=" + mState); }
try {
if (mState != AudioPlaybackConfiguration.PLAYER_STATE_RELEASED) {
getService().releasePlayer(mPlayerIId);
mState = AudioPlaybackConfiguration.PLAYER_STATE_RELEASED;
synchronized (mLock) {
if (mState != AudioPlaybackConfiguration.PLAYER_STATE_RELEASED) {
getService().releasePlayer(mPlayerIId);
mState = AudioPlaybackConfiguration.PLAYER_STATE_RELEASED;
}
}
} catch (RemoteException e) {
Log.e(TAG, "Error talking to audio service, the player will still be tracked", e);
@@ -215,7 +257,7 @@ public abstract class PlayerBase {
/**
* To be called whenever a condition that might affect audibility of this player is updated.
* Must be called synchronized on mAppOpsLock.
* Must be called synchronized on mLock.
*/
void updateAppOpsPlayAudio_sync() {
boolean oldHasAppOpsPlayAudio = mHasAppOpsPlayAudio;
@@ -237,7 +279,8 @@ public abstract class PlayerBase {
Log.v(TAG, "updateAppOpsPlayAudio: unmuting player, vol=" + mLeftVolume
+ "/" + mRightVolume);
}
playerSetVolume(false/*muting*/, mLeftVolume, mRightVolume);
playerSetVolume(false/*muting*/,
mLeftVolume * mPanMultiplierL, mRightVolume * mPanMultiplierR);
playerSetAuxEffectSendLevel(false/*muting*/, mAuxEffectSendLevel);
} else {
if (DEBUG_APP_OPS) {
@@ -297,6 +340,14 @@ public abstract class PlayerBase {
return sService;
}
/**
* @hide
* @param delayMs
*/
public void setStartDelayMs(int delayMs) {
baseSetStartDelayMs(delayMs);
}
//=====================================================================
// Abstract methods a subclass needs to implement
/**
@@ -335,6 +386,16 @@ public abstract class PlayerBase {
public void setVolume(float vol) {
baseSetVolume(vol, vol);
}
@Override
public void setPan(float pan) {
baseSetPan(pan);
}
@Override
public void setStartDelayMs(int delayMs) {
baseSetStartDelayMs(delayMs);
}
};
//=====================================================================

View File

@@ -52,10 +52,9 @@ public class PlayerProxy {
// Methods matching the IPlayer interface
/**
* @hide
* @throws IllegalStateException
*/
@SystemApi
public void start() throws IllegalStateException {
public void start() {
try {
mConf.getIPlayer().start();
} catch (NullPointerException|RemoteException e) {
@@ -66,10 +65,9 @@ public class PlayerProxy {
/**
* @hide
* @throws IllegalStateException
*/
@SystemApi
public void pause() throws IllegalStateException {
public void pause() {
try {
mConf.getIPlayer().pause();
} catch (NullPointerException|RemoteException e) {
@@ -80,10 +78,9 @@ public class PlayerProxy {
/**
* @hide
* @throws IllegalStateException
*/
@SystemApi
public void stop() throws IllegalStateException {
public void stop() {
try {
mConf.getIPlayer().stop();
} catch (NullPointerException|RemoteException e) {
@@ -94,10 +91,10 @@ public class PlayerProxy {
/**
* @hide
* @throws IllegalStateException
* @param vol
*/
@SystemApi
public void setVolume(float vol) throws IllegalStateException {
public void setVolume(float vol) {
try {
mConf.getIPlayer().setVolume(vol);
} catch (NullPointerException|RemoteException e) {
@@ -106,4 +103,33 @@ public class PlayerProxy {
}
}
/**
* @hide
* @param pan
*/
@SystemApi
public void setPan(float pan) {
try {
mConf.getIPlayer().setPan(pan);
} catch (NullPointerException|RemoteException e) {
throw new IllegalStateException(
"No player to proxy for setPan operation, player already released?", e);
}
}
/**
* @hide
* @param delayMs
*/
@SystemApi
public void setStartDelayMs(int delayMs) {
try {
mConf.getIPlayer().setStartDelayMs(delayMs);
} catch (NullPointerException|RemoteException e) {
throw new IllegalStateException(
"No player to proxy for setStartDelayMs operation, player already released?",
e);
}
}
}

View File

@@ -41,7 +41,7 @@ import java.util.LinkedList;
public class NotificationPlayer implements OnCompletionListener, OnErrorListener {
private static final int PLAY = 1;
private static final int STOP = 2;
private static final boolean mDebug = false;
private static final boolean DEBUG = false;
private static final class Command {
int code;
@@ -97,17 +97,18 @@ public class NotificationPlayer implements OnCompletionListener, OnErrorListener
if (!audioManager.isMusicActiveRemotely()) {
synchronized(mQueueAudioFocusLock) {
if (mAudioManagerWithAudioFocus == null) {
if (mDebug) Log.d(mTag, "requesting AudioFocus");
if (DEBUG) Log.d(mTag, "requesting AudioFocus");
int focusGain = AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK;
if (mCmd.looping) {
audioManager.requestAudioFocus(null, mCmd.attributes,
AudioManager.AUDIOFOCUS_GAIN, 0);
} else {
audioManager.requestAudioFocus(null, mCmd.attributes,
AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK, 0);
focusGain = AudioManager.AUDIOFOCUS_GAIN;
}
mNotificationRampTimeMs = audioManager.getFocusRampTimeMs(
focusGain, mCmd.attributes);
audioManager.requestAudioFocus(null, mCmd.attributes,
focusGain, 0);
mAudioManagerWithAudioFocus = audioManager;
} else {
if (mDebug) Log.d(mTag, "AudioFocus was previously requested");
if (DEBUG) Log.d(mTag, "AudioFocus was previously requested");
}
}
}
@@ -119,6 +120,9 @@ public class NotificationPlayer implements OnCompletionListener, OnErrorListener
// command are issued, and on which it receives the completion callbacks.
player.setOnCompletionListener(NotificationPlayer.this);
player.setOnErrorListener(NotificationPlayer.this);
if (DEBUG) { Log.d(mTag, "notification will be delayed by "
+ mNotificationRampTimeMs + "ms"); }
player.setStartDelayMs(mNotificationRampTimeMs);
player.start();
if (mPlayer != null) {
mPlayer.release();
@@ -139,7 +143,7 @@ public class NotificationPlayer implements OnCompletionListener, OnErrorListener
// is playing, let it continue until we're done, so there
// is less of a glitch.
try {
if (mDebug) Log.d(mTag, "Starting playback");
if (DEBUG) Log.d(mTag, "Starting playback");
//-----------------------------------
// This is were we deviate from the AsyncPlayer implementation and create the
// MediaPlayer in a new thread with which we're synchronized
@@ -179,17 +183,17 @@ public class NotificationPlayer implements OnCompletionListener, OnErrorListener
Command cmd = null;
synchronized (mCmdQueue) {
if (mDebug) Log.d(mTag, "RemoveFirst");
if (DEBUG) Log.d(mTag, "RemoveFirst");
cmd = mCmdQueue.removeFirst();
}
switch (cmd.code) {
case PLAY:
if (mDebug) Log.d(mTag, "PLAY");
if (DEBUG) Log.d(mTag, "PLAY");
startSound(cmd);
break;
case STOP:
if (mDebug) Log.d(mTag, "STOP");
if (DEBUG) Log.d(mTag, "STOP");
if (mPlayer != null) {
long delay = SystemClock.uptimeMillis() - cmd.requestTime;
if (delay > 1000) {
@@ -232,11 +236,11 @@ public class NotificationPlayer implements OnCompletionListener, OnErrorListener
public void onCompletion(MediaPlayer mp) {
synchronized(mQueueAudioFocusLock) {
if (mAudioManagerWithAudioFocus != null) {
if (mDebug) Log.d(mTag, "onCompletion() abandonning AudioFocus");
if (DEBUG) Log.d(mTag, "onCompletion() abandonning AudioFocus");
mAudioManagerWithAudioFocus.abandonAudioFocus(null);
mAudioManagerWithAudioFocus = null;
} else {
if (mDebug) Log.d(mTag, "onCompletion() no need to abandon AudioFocus");
if (DEBUG) Log.d(mTag, "onCompletion() no need to abandon AudioFocus");
}
}
// if there are no more sounds to play, end the Looper to listen for media completion
@@ -267,6 +271,7 @@ public class NotificationPlayer implements OnCompletionListener, OnErrorListener
private PowerManager.WakeLock mWakeLock;
private final Object mQueueAudioFocusLock = new Object();
private AudioManager mAudioManagerWithAudioFocus; // synchronized on mQueueAudioFocusLock
private int mNotificationRampTimeMs = 0;
// The current state according to the caller. Reality lags behind
// because of the asynchronous nature of this class.

View File

@@ -688,7 +688,7 @@ public class AudioService extends IAudioService.Stub
mSettingsObserver = new SettingsObserver();
createStreamStates();
mMediaFocusControl = new MediaFocusControl(mContext);
mMediaFocusControl = new MediaFocusControl(mContext, mPlaybackMonitor);
readAndSetLowRamDevice();
@@ -5581,6 +5581,10 @@ public class AudioService extends IAudioService.Stub
return mMediaFocusControl.getCurrentAudioFocus();
}
public int getFocusRampTimeMs(int focusGain, AudioAttributes attr) {
return mMediaFocusControl.getFocusRampTimeMs(focusGain, attr);
}
private boolean readCameraSoundForced() {
return SystemProperties.getBoolean("audio.camerasound.force", false) ||
mContext.getResources().getBoolean(

View File

@@ -17,6 +17,7 @@
package com.android.server.audio;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.media.AudioAttributes;
import android.media.AudioFocusInfo;
import android.media.AudioManager;
@@ -47,6 +48,7 @@ public class FocusRequester {
private final String mPackageName;
private final int mCallingUid;
private final MediaFocusControl mFocusController; // never null
/**
* the audio focus gain request that caused the addition of this object in the focus stack.
*/
@@ -61,6 +63,10 @@ public class FocusRequester {
* it never lost focus.
*/
private int mFocusLossReceived;
/**
* whether this focus owner listener was notified when it lost focus
*/
private boolean mFocusLossWasNotified;
/**
* the audio attributes associated with the focus request
*/
@@ -124,6 +130,10 @@ public class FocusRequester {
return mCallingUid == uid;
}
int getClientUid() {
return mCallingUid;
}
String getClientId() {
return mClientId;
}
@@ -195,6 +205,7 @@ public class FocusRequester {
+ " -- gain: " + focusGainToString()
+ " -- flags: " + flagsToString(mGrantFlags)
+ " -- loss: " + focusLossToString()
+ " -- notified: " + mFocusLossWasNotified
+ " -- uid: " + mCallingUid
+ " -- attr: " + mAttributes);
}
@@ -263,9 +274,9 @@ public class FocusRequester {
/**
* Called synchronized on MediaFocusControl.mAudioFocusLock
*/
void handleExternalFocusGain(int focusGain) {
void handleExternalFocusGain(int focusGain, final FocusRequester fr) {
int focusLoss = focusLossForGainRequest(focusGain);
handleFocusLoss(focusLoss);
handleFocusLoss(focusLoss, fr);
}
/**
@@ -273,6 +284,7 @@ public class FocusRequester {
*/
void handleFocusGain(int focusGain) {
try {
final int oldLoss = mFocusLossReceived;
mFocusLossReceived = AudioManager.AUDIOFOCUS_NONE;
mFocusController.notifyExtPolicyFocusGrant_syncAf(toAudioFocusInfo(),
AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
@@ -282,8 +294,13 @@ public class FocusRequester {
Log.v(TAG, "dispatching " + focusChangeToString(focusGain) + " to "
+ mClientId);
}
fd.dispatchAudioFocusChange(focusGain, mClientId);
if (mFocusLossWasNotified) {
fd.dispatchAudioFocusChange(focusGain, mClientId);
} else if (oldLoss == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK) {
mFocusController.unduckPlayers(this);
}
}
mFocusLossWasNotified = false;
} catch (android.os.RemoteException e) {
Log.e(TAG, "Failure to signal gain of audio focus due to: ", e);
}
@@ -292,10 +309,11 @@ public class FocusRequester {
/**
* Called synchronized on MediaFocusControl.mAudioFocusLock
*/
void handleFocusLoss(int focusLoss) {
void handleFocusLoss(int focusLoss, @Nullable final FocusRequester fr) {
try {
if (focusLoss != mFocusLossReceived) {
mFocusLossReceived = focusLoss;
mFocusLossWasNotified = false;
// before dispatching a focus loss, check if the following conditions are met:
// 1/ the framework is not supposed to notify the focus loser on a DUCK loss
// 2/ it is a DUCK loss
@@ -313,6 +331,27 @@ public class FocusRequester {
toAudioFocusInfo(), false /* wasDispatched */);
return;
}
// check enforcement by the framework
boolean handled = false;
if (focusLoss == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK
&& MediaFocusControl.ENFORCE_DUCKING
&& fr != null) {
// candidate for enforcement by the framework
if (fr.mCallingUid != this.mCallingUid) {
handled = mFocusController.duckPlayers(fr, this);
} // else: the focus change is within the same app, so let the dispatching
// happen as if the framework was not involved.
}
if (handled) {
if (DEBUG) {
Log.v(TAG, "NOT dispatching " + focusChangeToString(mFocusLossReceived)
+ " to " + mClientId + ", ducking implemented by framework");
}
return; // with mFocusLossWasNotified = false
}
final IAudioFocusDispatcher fd = mFocusDispatcher;
if (fd != null) {
if (DEBUG) {
@@ -321,6 +360,7 @@ public class FocusRequester {
}
mFocusController.notifyExtPolicyFocusLoss_syncAf(
toAudioFocusInfo(), true /* wasDispatched */);
mFocusLossWasNotified = true;
fd.dispatchAudioFocusChange(mFocusLossReceived, mClientId);
}
}
@@ -330,7 +370,7 @@ public class FocusRequester {
}
AudioFocusInfo toAudioFocusInfo() {
return new AudioFocusInfo(mAttributes, mClientId, mPackageName,
return new AudioFocusInfo(mAttributes, mCallingUid, mClientId, mPackageName,
mFocusGainRequest, mFocusLossReceived, mGrantFlags);
}
}

View File

@@ -40,16 +40,24 @@ import java.text.DateFormat;
* @hide
*
*/
public class MediaFocusControl {
public class MediaFocusControl implements PlayerFocusEnforcer {
private static final String TAG = "MediaFocusControl";
/**
* set to true so the framework enforces ducking itself, without communicating to apps
* that they lost focus.
*/
static final boolean ENFORCE_DUCKING = false;
private final Context mContext;
private final AppOpsManager mAppOps;
private PlayerFocusEnforcer mFocusEnforcer; // never null
protected MediaFocusControl(Context cntxt) {
protected MediaFocusControl(Context cntxt, PlayerFocusEnforcer pfe) {
mContext = cntxt;
mAppOps = (AppOpsManager)mContext.getSystemService(Context.APP_OPS_SERVICE);
mFocusEnforcer = pfe;
}
protected void dump(PrintWriter pw) {
@@ -58,6 +66,17 @@ public class MediaFocusControl {
dumpFocusStack(pw);
}
//=================================================================
// PlayerFocusEnforcer implementation
@Override
public boolean duckPlayers(FocusRequester winner, FocusRequester loser) {
return mFocusEnforcer.duckPlayers(winner, loser);
}
@Override
public void unduckPlayers(FocusRequester winner) {
mFocusEnforcer.unduckPlayers(winner);
}
//==========================================================================================
// AudioFocus
@@ -75,7 +94,7 @@ public class MediaFocusControl {
if (!mFocusStack.empty()) {
// notify the current focus owner it lost focus after removing it from stack
final FocusRequester exFocusOwner = mFocusStack.pop();
exFocusOwner.handleFocusLoss(AudioManager.AUDIOFOCUS_LOSS);
exFocusOwner.handleFocusLoss(AudioManager.AUDIOFOCUS_LOSS, null);
exFocusOwner.release();
}
}
@@ -97,12 +116,12 @@ public class MediaFocusControl {
* Focus is requested, propagate the associated loss throughout the stack.
* @param focusGain the new focus gain that will later be added at the top of the stack
*/
private void propagateFocusLossFromGain_syncAf(int focusGain) {
private void propagateFocusLossFromGain_syncAf(int focusGain, final FocusRequester fr) {
// going through the audio focus stack to signal new focus, traversing order doesn't
// matter as all entries respond to the same external focus gain
Iterator<FocusRequester> stackIterator = mFocusStack.iterator();
while(stackIterator.hasNext()) {
stackIterator.next().handleExternalFocusGain(focusGain);
stackIterator.next().handleExternalFocusGain(focusGain, fr);
}
}
@@ -237,7 +256,7 @@ public class MediaFocusControl {
Log.e(TAG, "No exclusive focus owner found in propagateFocusLossFromGain_syncAf()",
new Exception());
// no exclusive owner, push at top of stack, focus is granted, propagate change
propagateFocusLossFromGain_syncAf(nfr.getGainRequest());
propagateFocusLossFromGain_syncAf(nfr.getGainRequest(), nfr);
mFocusStack.push(nfr);
return AudioManager.AUDIOFOCUS_REQUEST_GRANTED;
} else {
@@ -381,6 +400,38 @@ public class MediaFocusControl {
}
}
/**
* Return the volume ramp time expected before playback with the given AudioAttributes would
* start after gaining audio focus.
* @param attr attributes of the sound about to start playing
* @return time in ms
*/
protected int getFocusRampTimeMs(int focusGain, AudioAttributes attr) {
switch (attr.getUsage()) {
case AudioAttributes.USAGE_MEDIA:
case AudioAttributes.USAGE_GAME:
return 1000;
case AudioAttributes.USAGE_ALARM:
case AudioAttributes.USAGE_NOTIFICATION_RINGTONE:
case AudioAttributes.USAGE_ASSISTANT:
case AudioAttributes.USAGE_ASSISTANCE_ACCESSIBILITY:
case AudioAttributes.USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
return 700;
case AudioAttributes.USAGE_VOICE_COMMUNICATION:
case AudioAttributes.USAGE_VOICE_COMMUNICATION_SIGNALLING:
case AudioAttributes.USAGE_NOTIFICATION:
case AudioAttributes.USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
case AudioAttributes.USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
case AudioAttributes.USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
case AudioAttributes.USAGE_NOTIFICATION_EVENT:
case AudioAttributes.USAGE_ASSISTANCE_SONIFICATION:
return 500;
case AudioAttributes.USAGE_UNKNOWN:
default:
return 0;
}
}
/** @see AudioManager#requestAudioFocus(AudioManager.OnAudioFocusChangeListener, int, int, int) */
protected int requestAudioFocus(AudioAttributes aa, int focusChangeHint, IBinder cb,
IAudioFocusDispatcher fd, String clientId, String callingPackageName, int flags) {
@@ -463,7 +514,7 @@ public class MediaFocusControl {
} else {
// propagate the focus change through the stack
if (!mFocusStack.empty()) {
propagateFocusLossFromGain_syncAf(focusChangeHint);
propagateFocusLossFromGain_syncAf(focusChangeHint, nfr);
}
// push focus requester at the top of the audio focus stack

View File

@@ -37,15 +37,16 @@ import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* Class to receive and dispatch updates from AudioSystem about recording configurations.
*/
public final class PlaybackActivityMonitor
implements AudioPlaybackConfiguration.PlayerDeathMonitor {
implements AudioPlaybackConfiguration.PlayerDeathMonitor, PlayerFocusEnforcer {
public final static String TAG = "AudioService.PlaybackActivityMonitor";
private final static boolean DEBUG = false;
private final static boolean DEBUG = true;
private ArrayList<PlayMonitorClient> mClients = new ArrayList<PlayMonitorClient>();
// a public client is one that needs an anonymized version of the playback configurations, we
@@ -134,12 +135,18 @@ public final class PlaybackActivityMonitor
}
protected void dump(PrintWriter pw) {
// players
pw.println("\nPlaybackActivityMonitor dump time: "
+ DateFormat.getTimeInstance().format(new Date()));
synchronized(mPlayerLock) {
for (AudioPlaybackConfiguration conf : mPlayers.values()) {
conf.dump(pw);
}
// ducked players
pw.println("\n ducked player piids:");
for (int piid : mDuckedPlayers) {
pw.println(" " + piid);
}
}
}
@@ -211,7 +218,7 @@ public final class PlaybackActivityMonitor
List<AudioPlaybackConfiguration> sysConfigs) {
ArrayList<AudioPlaybackConfiguration> publicConfigs =
new ArrayList<AudioPlaybackConfiguration>();
// only add active anonymized configurations,
// only add active anonymized configurations,
for (AudioPlaybackConfiguration config : sysConfigs) {
if (config.isActive()) {
publicConfigs.add(AudioPlaybackConfiguration.anonymizedCopy(config));
@@ -220,6 +227,82 @@ public final class PlaybackActivityMonitor
return publicConfigs;
}
//=================================================================
// PlayerFocusEnforcer implementation
private final ArrayList<Integer> mDuckedPlayers = new ArrayList<Integer>();
@Override
public boolean duckPlayers(FocusRequester winner, FocusRequester loser) {
if (DEBUG) {
Log.v(TAG, String.format("duckPlayers: uids winner=%d loser=%d",
winner.getClientUid(), loser.getClientUid())); }
synchronized (mPlayerLock) {
if (mPlayers.isEmpty()) {
return true;
}
final Set<Integer> piidSet = mPlayers.keySet();
final Iterator<Integer> piidIterator = piidSet.iterator();
// find which players to duck
while (piidIterator.hasNext()) {
final Integer piid = piidIterator.next();
final AudioPlaybackConfiguration apc = mPlayers.get(piid);
if (!winner.hasSameUid(apc.getClientUid())
&& loser.hasSameUid(apc.getClientUid())
&& apc.getPlayerState() == AudioPlaybackConfiguration.PLAYER_STATE_STARTED)
{
if (mDuckedPlayers.contains(piid)) {
if (DEBUG) { Log.v(TAG, "player " + piid + " already ducked"); }
} else if (apc.getAudioAttributes().getContentType() ==
AudioAttributes.CONTENT_TYPE_SPEECH) {
// the player is speaking, ducking will make the speech unintelligible
// so let the app handle it instead
return false;
} else {
try {
if (DEBUG) { Log.v(TAG, "ducking player " + piid); }
//FIXME just a test before we have VolumeShape
apc.getPlayerProxy().setPan(-1.0f);
mDuckedPlayers.add(piid);
} catch (Exception e) {
Log.e(TAG, "Error ducking player " + piid, e);
// something went wrong trying to duck, so let the app handle it
// instead, it may know things we don't
return false;
}
}
}
}
}
return true;
}
@Override
public void unduckPlayers(FocusRequester winner) {
if (DEBUG) { Log.v(TAG, "unduckPlayers: uids winner=" + winner.getClientUid()); }
synchronized (mPlayerLock) {
if (mDuckedPlayers.isEmpty()) {
return;
}
for (int piid : mDuckedPlayers) {
final AudioPlaybackConfiguration apc = mPlayers.get(piid);
if (apc != null
&& winner.hasSameUid(apc.getClientUid())) {
try {
if (DEBUG) { Log.v(TAG, "unducking player" + piid); }
//FIXME just a test before we have VolumeShape
apc.getPlayerProxy().setPan(0.0f);
mDuckedPlayers.remove(new Integer(piid));
} catch (Exception e) {
Log.e(TAG, "Error unducking player " + piid, e);
}
} else {
Log.e(TAG, "Error unducking player " + piid + ", player not found");
}
}
}
}
//=================================================================
// Track playback activity listeners

View File

@@ -0,0 +1,31 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.server.audio;
public interface PlayerFocusEnforcer {
/**
* Ducks the players associated with the "loser" focus owner (i.e. same UID). Returns true if
* at least one active player was found and ducked, false otherwise.
* @param winner
* @param loser
* @return
*/
public boolean duckPlayers(FocusRequester winner, FocusRequester loser);
public void unduckPlayers(FocusRequester winner);
}