Merge "AudioTrack Java constructor with AudioAttributes and AudioFormat"

This commit is contained in:
Jean-Michel Trivi
2014-06-25 14:53:41 +00:00
committed by Android (Google) Code Review
5 changed files with 197 additions and 58 deletions

View File

@@ -40,14 +40,23 @@ using namespace android;
// ----------------------------------------------------------------------------
static const char* const kClassPathName = "android/media/AudioTrack";
static const char* const kAudioAttributesClassPathName = "android/media/AudioAttributes";
struct fields_t {
struct audio_track_fields_t {
// these fields provide access from C++ to the...
jmethodID postNativeEventInJava; //... event post callback method
jfieldID nativeTrackInJavaObj; // stores in Java the native AudioTrack object
jfieldID jniData; // stores in Java additional resources used by the native AudioTrack
jfieldID fieldStreamType; // ... mStreamType field in the AudioTrack Java object
};
static fields_t javaAudioTrackFields;
struct audio_attributes_fields_t {
jfieldID fieldUsage; // AudioAttributes.mUsage
jfieldID fieldContentType; // AudioAttributes.mContentType
jfieldID fieldFlags; // AudioAttributes.mFlags
jfieldID fieldTags; // AudioAttributes.mTags
};
static audio_track_fields_t javaAudioTrackFields;
static audio_attributes_fields_t javaAudioAttrFields;
struct audiotrack_callback_cookie {
jclass audioTrack_class;
@@ -66,12 +75,10 @@ class AudioTrackJniStorage {
sp<MemoryHeapBase> mMemHeap;
sp<MemoryBase> mMemBase;
audiotrack_callback_cookie mCallbackData;
audio_stream_type_t mStreamType;
AudioTrackJniStorage() {
mCallbackData.audioTrack_class = 0;
mCallbackData.audioTrack_ref = 0;
mStreamType = AUDIO_STREAM_DEFAULT;
}
~AudioTrackJniStorage() {
@@ -174,16 +181,21 @@ static sp<AudioTrack> setAudioTrack(JNIEnv* env, jobject thiz, const sp<AudioTra
env->SetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj, (jlong)at.get());
return old;
}
// ----------------------------------------------------------------------------
static jint
android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this,
jint streamType, jint sampleRateInHertz, jint javaChannelMask,
jint audioFormat, jint buffSizeInBytes, jint memoryMode, jintArray jSession)
{
jobject jaa,
jint sampleRateInHertz, jint javaChannelMask,
jint audioFormat, jint buffSizeInBytes, jint memoryMode, jintArray jSession) {
ALOGV("sampleRate=%d, audioFormat(from Java)=%d, channel mask=%x, buffSize=%d",
sampleRateInHertz, audioFormat, javaChannelMask, buffSizeInBytes);
if (jaa == 0) {
ALOGE("Error creating AudioTrack: invalid audio attributes");
return (jint) AUDIO_JAVA_ERROR;
}
// Java channel masks don't map directly to the native definition, but it's a simple shift
// to skip the two deprecated channel configurations "default" and "mono".
audio_channel_mask_t nativeChannelMask = ((uint32_t)javaChannelMask) >> 2;
@@ -195,9 +207,6 @@ android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this,
uint32_t channelCount = popcount(nativeChannelMask);
// stream type already checked in Java
audio_stream_type_t atStreamType = (audio_stream_type_t) streamType;
// check the format.
// This function was called from Java, so we compare the format against the Java constants
audio_format_t format = audioFormatToNative(audioFormat);
@@ -251,10 +260,25 @@ android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this,
// create the native AudioTrack object
sp<AudioTrack> lpTrack = new AudioTrack();
audio_attributes_t *paa = NULL;
// read the AudioAttributes values
paa = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
const jstring jtags = (jstring) env->GetObjectField(jaa, javaAudioAttrFields.fieldTags);
const char* tags = env->GetStringUTFChars(jtags, NULL);
// copying array size -1, char array for tags was calloc'd, no need to NULL-terminate it
strncpy(paa->tags, tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
env->ReleaseStringUTFChars(jtags, tags);
paa->usage = (audio_usage_t) env->GetIntField(jaa, javaAudioAttrFields.fieldUsage);
paa->content_type =
(audio_content_type_t) env->GetIntField(jaa, javaAudioAttrFields.fieldContentType);
paa->flags = env->GetIntField(jaa, javaAudioAttrFields.fieldFlags);
ALOGV("AudioTrack_setup for usage=%d content=%d flags=0x%#x tags=%s",
paa->usage, paa->content_type, paa->flags, paa->tags);
// initialize the callback information:
// this data will be passed with every AudioTrack callback
AudioTrackJniStorage* lpJniStorage = new AudioTrackJniStorage();
lpJniStorage->mStreamType = atStreamType;
lpJniStorage->mCallbackData.audioTrack_class = (jclass)env->NewGlobalRef(clazz);
// we use a weak reference so the AudioTrack object can be garbage collected.
lpJniStorage->mCallbackData.audioTrack_ref = env->NewGlobalRef(weak_this);
@@ -266,17 +290,21 @@ android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this,
case MODE_STREAM:
status = lpTrack->set(
atStreamType,// stream type
sampleRateInHertz,
format,// word length, PCM
nativeChannelMask,
frameCount,
audio_is_linear_pcm(format) ? AUDIO_OUTPUT_FLAG_NONE : AUDIO_OUTPUT_FLAG_DIRECT,
audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user)
0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack
0,// shared mem
true,// thread can call Java
sessionId);// audio session ID
AUDIO_STREAM_DEFAULT,// stream type
sampleRateInHertz,
format,// word length, PCM
nativeChannelMask,
frameCount,
AUDIO_OUTPUT_FLAG_NONE,
audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user)
0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack
0,// shared mem
true,// thread can call Java
sessionId,// audio session ID
AudioTrack::TRANSFER_DEFAULT, // default transfer mode
NULL, // default offloadInfo
-1, -1, // default uid, pid values
paa);
break;
case MODE_STATIC:
@@ -288,17 +316,21 @@ android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this,
}
status = lpTrack->set(
atStreamType,// stream type
sampleRateInHertz,
format,// word length, PCM
nativeChannelMask,
frameCount,
AUDIO_OUTPUT_FLAG_NONE,
audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user));
0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack
lpJniStorage->mMemBase,// shared mem
true,// thread can call Java
sessionId);// audio session ID
AUDIO_STREAM_DEFAULT,// stream type
sampleRateInHertz,
format,// word length, PCM
nativeChannelMask,
frameCount,
AUDIO_OUTPUT_FLAG_NONE,
audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user));
0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack
lpJniStorage->mMemBase,// shared mem
true,// thread can call Java
sessionId,// audio session ID
AudioTrack::TRANSFER_DEFAULT, // default transfer mode
NULL, // default offloadInfo
-1, -1, // default uid, pid values
paa);
break;
default:
@@ -333,10 +365,21 @@ android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this,
//ALOGV("storing lpJniStorage: %x\n", (long)lpJniStorage);
env->SetLongField(thiz, javaAudioTrackFields.jniData, (jlong)lpJniStorage);
// since we had audio attributes, the stream type was derived from them during the
// creation of the native AudioTrack: push the same value to the Java object
env->SetIntField(thiz, javaAudioTrackFields.fieldStreamType, (jint) lpTrack->streamType());
// audio attributes were copied in AudioTrack creation
free(paa);
paa = NULL;
return (jint) AUDIO_JAVA_SUCCESS;
// failures:
native_init_failure:
if (paa != NULL) {
free(paa);
}
if (nSession != NULL) {
env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
}
@@ -948,7 +991,7 @@ static JNINativeMethod gMethods[] = {
{"native_stop", "()V", (void *)android_media_AudioTrack_stop},
{"native_pause", "()V", (void *)android_media_AudioTrack_pause},
{"native_flush", "()V", (void *)android_media_AudioTrack_flush},
{"native_setup", "(Ljava/lang/Object;IIIIII[I)I",
{"native_setup", "(Ljava/lang/Object;Ljava/lang/Object;IIIII[I)I",
(void *)android_media_AudioTrack_setup},
{"native_finalize", "()V", (void *)android_media_AudioTrack_finalize},
{"native_release", "()V", (void *)android_media_AudioTrack_release},
@@ -992,6 +1035,7 @@ static JNINativeMethod gMethods[] = {
#define JAVA_POSTEVENT_CALLBACK_NAME "postEventFromNative"
#define JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME "mNativeTrackInJavaObj"
#define JAVA_JNIDATA_FIELD_NAME "mJniData"
#define JAVA_STREAMTYPE_FIELD_NAME "mStreamType"
// ----------------------------------------------------------------------------
// preconditions:
@@ -1041,7 +1085,7 @@ int register_android_media_AudioTrack(JNIEnv *env)
ALOGE("Can't find AudioTrack.%s", JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME);
return -1;
}
// jniData;
// jniData
javaAudioTrackFields.jniData = env->GetFieldID(
audioTrackClass,
JAVA_JNIDATA_FIELD_NAME, "J");
@@ -1049,6 +1093,33 @@ int register_android_media_AudioTrack(JNIEnv *env)
ALOGE("Can't find AudioTrack.%s", JAVA_JNIDATA_FIELD_NAME);
return -1;
}
// fieldStreamType
javaAudioTrackFields.fieldStreamType = env->GetFieldID(audioTrackClass,
JAVA_STREAMTYPE_FIELD_NAME, "I");
if (javaAudioTrackFields.fieldStreamType == NULL) {
ALOGE("Can't find AudioTrack.%s", JAVA_STREAMTYPE_FIELD_NAME);
return -1;
}
// Get the AudioAttributes class and fields
jclass audioAttrClass = env->FindClass(kAudioAttributesClassPathName);
if (audioAttrClass == NULL) {
ALOGE("Can't find %s", kAudioAttributesClassPathName);
return -1;
}
jclass audioAttributesClassRef = (jclass)env->NewGlobalRef(audioAttrClass);
javaAudioAttrFields.fieldUsage = env->GetFieldID(audioAttributesClassRef, "mUsage", "I");
javaAudioAttrFields.fieldContentType
= env->GetFieldID(audioAttributesClassRef, "mContentType", "I");
javaAudioAttrFields.fieldFlags = env->GetFieldID(audioAttributesClassRef, "mFlags", "I");
javaAudioAttrFields.fieldTags = env->GetFieldID(audioAttributesClassRef, "mFormattedTags",
"Ljava/lang/String;");
env->DeleteGlobalRef(audioAttributesClassRef);
if (javaAudioAttrFields.fieldUsage == NULL || javaAudioAttrFields.fieldContentType == NULL
|| javaAudioAttrFields.fieldFlags == NULL || javaAudioAttrFields.fieldTags == NULL) {
ALOGE("Can't initialize AudioAttributes fields");
return -1;
}
return AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods));
}

View File

@@ -25,6 +25,7 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
/**
@@ -154,6 +155,7 @@ public final class AudioAttributes implements Parcelable {
private int mContentType = CONTENT_TYPE_UNKNOWN;
private int mFlags = 0x0;
private HashSet<String> mTags;
private String mFormattedTags;
private AudioAttributes() {
}
@@ -241,6 +243,12 @@ public final class AudioAttributes implements Parcelable {
aa.mUsage = mUsage;
aa.mFlags = mFlags;
aa.mTags = (HashSet<String>) mTags.clone();
final Iterator<String> tagIterator = mTags.iterator();
String allTagsInOne = new String();
while (tagIterator.hasNext()) {
allTagsInOne += tagIterator.next() + ";";
}
aa.mFormattedTags = allTagsInOne;
return aa;
}

View File

@@ -227,6 +227,22 @@ public class AudioFormat {
private int mChannelMask;
private int mPropertySetMask;
int getEncoding() {
return mEncoding;
}
int getSampleRate() {
return mSampleRate;
}
int getChannelMask() {
return mChannelMask;
}
int getPropertySetMask() {
return mPropertySetMask;
}
/**
* @hide CANDIDATE FOR PUBLIC API
* Builder class for {@link AudioFormat} objects.

View File

@@ -28,11 +28,13 @@ import java.util.ArrayList;
*/
public class AudioSystem
{
/* These values must be kept in sync with AudioSystem.h */
/* These values must be kept in sync with system/audio.h */
/*
* If these are modified, please also update Settings.System.VOLUME_SETTINGS
* and attrs.xml and AudioManager.java.
*/
/* The default audio stream */
public static final int STREAM_DEFAULT = -1;
/* The audio stream for phone calls */
public static final int STREAM_VOICE_CALL = 0;
/* The audio stream for system sounds */

View File

@@ -21,6 +21,8 @@ import java.lang.annotation.RetentionPolicy;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.nio.NioUtils;
import java.util.Iterator;
import java.util.Set;
import android.annotation.IntDef;
import android.app.ActivityThread;
@@ -233,6 +235,8 @@ public class AudioTrack
* {@link AudioManager#STREAM_DTMF}.
*/
private int mStreamType = AudioManager.STREAM_MUSIC;
private final AudioAttributes mAttributes;
/**
* The way audio is consumed by the audio sink, streaming or static.
*/
@@ -349,21 +353,69 @@ public class AudioTrack
int bufferSizeInBytes, int mode, int sessionId)
throws IllegalArgumentException {
// mState already == STATE_UNINITIALIZED
this((new AudioAttributes.Builder())
.setLegacyStreamType(streamType)
.build(),
(new AudioFormat.Builder())
.setChannelMask(channelConfig)
.setEncoding(audioFormat)
.setSampleRate(sampleRateInHz)
.build(),
bufferSizeInBytes,
mode, sessionId);
}
/**
* @hide
* CANDIDATE FOR PUBLIC API
* Constructor with AudioAttributes and AudioFormat
* @param aa
* @param format
* @param bufferSizeInBytes
* @param mode
* @param sessionId
*/
public AudioTrack(AudioAttributes attributes, AudioFormat format, int bufferSizeInBytes,
int mode, int sessionId)
throws IllegalArgumentException {
// mState already == STATE_UNINITIALIZED
// remember which looper is associated with the AudioTrack instantiation
Looper looper;
if ((looper = Looper.myLooper()) == null) {
looper = Looper.getMainLooper();
}
mInitializationLooper = looper;
audioParamCheck(streamType, sampleRateInHz, channelConfig, audioFormat, mode);
int rate = 0;
if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE) != 0)
{
rate = format.getSampleRate();
} else {
rate = AudioSystem.getPrimaryOutputSamplingRate();
if (rate <= 0) {
rate = 44100;
}
}
int channelMask = AudioFormat.CHANNEL_OUT_FRONT_LEFT | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0)
{
channelMask = format.getChannelMask();
}
int encoding = AudioFormat.ENCODING_DEFAULT;
if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_ENCODING) != 0) {
encoding = format.getEncoding();
}
audioParamCheck(rate, channelMask, encoding, mode);
mStreamType = AudioSystem.STREAM_DEFAULT;
audioBuffSizeCheck(bufferSizeInBytes);
mInitializationLooper = looper;
IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE);
mAppOps = IAppOpsService.Stub.asInterface(b);
mAttributes = (new AudioAttributes.Builder(attributes).build());
if (sessionId < 0) {
throw new IllegalArgumentException("Invalid audio session ID: "+sessionId);
}
@@ -371,8 +423,8 @@ public class AudioTrack
int[] session = new int[1];
session[0] = sessionId;
// native initialization
int initResult = native_setup(new WeakReference<AudioTrack>(this),
mStreamType, mSampleRate, mChannels, mAudioFormat,
int initResult = native_setup(new WeakReference<AudioTrack>(this), mAttributes,
mSampleRate, mChannels, mAudioFormat,
mNativeBufferSizeInBytes, mDataLoadMode, session);
if (initResult != SUCCESS) {
loge("Error code "+initResult+" when initializing AudioTrack.");
@@ -401,27 +453,13 @@ public class AudioTrack
// Convenience method for the constructor's parameter checks.
// This is where constructor IllegalArgumentException-s are thrown
// postconditions:
// mStreamType is valid
// mChannelCount is valid
// mChannels is valid
// mAudioFormat is valid
// mSampleRate is valid
// mDataLoadMode is valid
private void audioParamCheck(int streamType, int sampleRateInHz,
private void audioParamCheck(int sampleRateInHz,
int channelConfig, int audioFormat, int mode) {
//--------------
// stream type
if( (streamType != AudioManager.STREAM_ALARM) && (streamType != AudioManager.STREAM_MUSIC)
&& (streamType != AudioManager.STREAM_RING) && (streamType != AudioManager.STREAM_SYSTEM)
&& (streamType != AudioManager.STREAM_VOICE_CALL)
&& (streamType != AudioManager.STREAM_NOTIFICATION)
&& (streamType != AudioManager.STREAM_BLUETOOTH_SCO)
&& (streamType != AudioManager.STREAM_DTMF)) {
throw new IllegalArgumentException("Invalid stream type.");
}
mStreamType = streamType;
//--------------
// sample rate, note these values are subject to change
if ( (sampleRateInHz < 4000) || (sampleRateInHz > 48000) ) {
@@ -1559,8 +1597,12 @@ public class AudioTrack
// Native methods called from the Java side
//--------------------
private native final int native_setup(Object audiotrack_this,
int streamType, int sampleRate, int channelMask, int audioFormat,
// post-condition: mStreamType is overwritten with a value
// that reflects the audio attributes (e.g. an AudioAttributes object with a usage of
// AudioAttributes.USAGE_MEDIA will map to AudioManager.STREAM_MUSIC
private native final int native_setup(Object /*WeakReference<AudioTrack>*/ audiotrack_this,
Object /*AudioAttributes*/ attributes,
int sampleRate, int channelMask, int audioFormat,
int buffSizeInBytes, int mode, int[] sessionId);
private native final void native_finalize();