Merge "synchronized audio beat detection: DO NOT MERGE" into ics-aah

This commit is contained in:
Dake Gu
2012-07-24 19:15:05 -07:00
committed by Android (Google) Code Review
25 changed files with 2354 additions and 63 deletions

View File

@@ -34,6 +34,7 @@ static struct {
{ AID_MEDIA, "media.player" },
{ AID_MEDIA, "media.camera" },
{ AID_MEDIA, "media.audio_policy" },
{ AID_MEDIA, "android.media.IAAHMetaDataService" },
{ AID_DRM, "drm.drmManager" },
{ AID_NFC, "nfc" },
{ AID_RADIO, "radio.phone" },

View File

@@ -0,0 +1,412 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "AAHMetaDataServiceJNI"
#include <android_runtime/AndroidRuntime.h>
#include <binder/IServiceManager.h>
#include <utils/Log.h>
#include <utils/misc.h>
#include "jni.h"
#include "JNIHelp.h"
#include "IAAHMetaData.h"
// error code, synced with MetaDataServiceRtp.java
enum {
SUCCESS = 0,
ERROR = -1,
ALREADY_EXISTS = -2,
};
namespace android {
static const char* kAAHMetaDataServiceBinderName =
"android.media.IAAHMetaDataService";
static const char* kAAHMetaDataServiceClassName =
"android/media/libaah/MetaDataServiceRtp";
static struct {
jmethodID postEventFromNativeId;
jmethodID flushFromNativeId;
jfieldID mCookieId;
jclass clazz;
} jnireflect;
static void ensureArraySize(JNIEnv *env, jbyteArray *array, uint32_t size) {
if (NULL != *array) {
uint32_t len = env->GetArrayLength(*array);
if (len >= size)
return;
env->DeleteGlobalRef(*array);
*array = NULL;
}
jbyteArray localRef = env->NewByteArray(size);
if (NULL != localRef) {
// Promote to global ref.
*array = (jbyteArray) env->NewGlobalRef(localRef);
// Release our (now pointless) local ref.
env->DeleteLocalRef(localRef);
}
}
// JNIMetaDataService acts as IAAHMetaDataClient, propagates message to java.
// It also starts a background thread that querying and monitoring life cycle
// of IAAHMetaDataService
// JNIMetaDataService will shoot itself when the related java object is garbage
// collected. This might not be important if java program is using singleton
// pattern; but it's also safe if java program create and destroy the object
// repeatedly.
class JNIMetaDataService : virtual public BnAAHMetaDataClient,
virtual public android::IBinder::DeathRecipient,
virtual public Thread {
public:
JNIMetaDataService();
// start working, must be called only once during initialize
bool start(jobject ref);
// stop thread and unref this object, you should never access the object
// after calling destroy()
void destroy();
// override BnAAHMetaDataClient
virtual void notify(uint16_t typeId, uint32_t item_len, const void* data);
virtual void flush();
// enable / disable the searching service
void setEnabled(bool e);
// override Thread
virtual bool threadLoop();
// override android::IBinder::DeathRecipient
virtual void binderDied(const wp<IBinder>& who);
private:
virtual ~JNIMetaDataService();
sp<JNIMetaDataService> self_strongref;
sp<JNIMetaDataService> thread_strongref;
jobject metadataservice_ref;
jbyteArray metadata_buffer;
Mutex lock;
Condition cond;
volatile bool remote_service_invalid;
volatile bool exitThread;
volatile bool enabled;
};
void JNIMetaDataService::notify(uint16_t typeId, uint32_t item_len,
const void* data) {
LOGV("notify received type=%d item_len=%d", typeId, item_len);
if (!enabled) {
return;
}
JNIEnv *env = AndroidRuntime::getJNIEnv();
// ensureArraySize provides some simple optimization of reusing
// the byte array object. If in the future that different types
// of metadata hit client, then more sophisticated strategy is needed.
ensureArraySize(env, &metadata_buffer, item_len);
if (metadata_buffer) {
jbyte *nArray = env->GetByteArrayElements(metadata_buffer, NULL);
memcpy(nArray, data, item_len);
env->ReleaseByteArrayElements(metadata_buffer, nArray, 0);
}
env->CallStaticVoidMethod(jnireflect.clazz,
jnireflect.postEventFromNativeId,
metadataservice_ref, typeId, item_len,
metadata_buffer);
}
void JNIMetaDataService::flush() {
if (!enabled) {
return;
}
JNIEnv *env = AndroidRuntime::getJNIEnv();
env->CallStaticVoidMethod(jnireflect.clazz,
jnireflect.flushFromNativeId,
metadataservice_ref);
}
JNIMetaDataService::JNIMetaDataService()
: metadataservice_ref(NULL),
metadata_buffer(NULL),
remote_service_invalid(true),
exitThread(false),
enabled(false) {
// Holds strong reference to myself, because the way that binder works
// requires to use RefBase, we cannot explicitly delete this object,
// otherwise, access from service manager might cause segfault.
// So we hold this reference until destroy() is called.
// Alternative solution is to create another JNIMetaDataServiceCookie class
// which holds the strong reference but that adds more memory fragmentation
self_strongref = this;
}
JNIMetaDataService::~JNIMetaDataService() {
LOGV("~JNIMetaDataService");
JNIEnv *env = AndroidRuntime::getJNIEnv();
if (metadata_buffer) {
env->DeleteGlobalRef(metadata_buffer);
metadata_buffer = NULL;
}
if (metadataservice_ref) {
env->DeleteGlobalRef(metadataservice_ref);
metadataservice_ref = NULL;
}
}
bool JNIMetaDataService::threadLoop() {
LOGV("Enter JNIMetaDataService::threadLoop");
sp < IServiceManager > sm = defaultServiceManager();
sp < IBinder > binder;
sp<IAAHMetaDataService> remote_service;
lock.lock();
while (true) {
if (exitThread) {
break;
} else if (remote_service_invalid && enabled) {
// getService() may block 10s, so we do this not holding lock
lock.unlock();
binder = sm->getService(
String16(kAAHMetaDataServiceBinderName));
lock.lock();
if (binder != NULL) {
LOGD("found remote %s", kAAHMetaDataServiceBinderName);
if (remote_service.get()) {
remote_service->asBinder()->unlinkToDeath(this);
remote_service->removeClient(thread_strongref);
remote_service = NULL;
}
remote_service = interface_cast < IAAHMetaDataService
> (binder);
remote_service->asBinder()->linkToDeath(this);
remote_service->addClient(thread_strongref);
remote_service_invalid = false;
}
}
if (!exitThread && !(remote_service_invalid && enabled)) {
// if exitThread flag is not set and we are not searching remote
// service, wait next signal to be triggered either
// - destroy() being called
// - enabled or remote_service_invalid changed
cond.wait(lock);
}
}
if (remote_service.get()) {
remote_service->removeClient(thread_strongref);
remote_service->asBinder()->unlinkToDeath(this);
remote_service = NULL;
}
lock.unlock();
binder = NULL;
sm = NULL;
// cleanup the thread reference
thread_strongref = NULL;
LOGV("Exit JNIMetaDataService::threadLoop");
return false;
}
bool JNIMetaDataService::start(jobject ref) {
metadataservice_ref = ref;
// now add a strong ref, used in threadLoop()
thread_strongref = this;
if (NO_ERROR
!= run("aah_metadataservice_monitor", ANDROID_PRIORITY_NORMAL)) {
thread_strongref = NULL;
return false;
}
return true;
}
void JNIMetaDataService::destroy() {
lock.lock();
exitThread = true;
lock.unlock();
cond.signal();
// unref JNIMetaDataService, JNIMetaDataService will not be deleted for now;
// it will be deleted when thread exits and cleans thread_strongref.
self_strongref = NULL;
}
void JNIMetaDataService::setEnabled(bool e) {
bool sendSignal;
lock.lock();
sendSignal = e && !enabled;
enabled = e;
lock.unlock();
if (sendSignal) {
cond.signal();
}
}
void JNIMetaDataService::binderDied(const wp<IBinder>& who) {
LOGD("remote %s died, re-searching...", kAAHMetaDataServiceBinderName);
bool sendSignal;
lock.lock();
remote_service_invalid = true;
sendSignal = enabled;
lock.unlock();
if (sendSignal) {
cond.signal();
}
}
// called by java object to initialize the native part
static jint aahmetadataservice_native_setup(JNIEnv* env, jobject thiz,
jobject weak_this) {
jint retvalue = SUCCESS;
jobject ref;
JNIMetaDataService* lpJniService = new JNIMetaDataService();
if (lpJniService == NULL) {
LOGE("setup: Error in allocating JNIMetaDataService");
retvalue = ERROR;
goto setup_failure;
}
// we use a weak reference so the java object can be garbage collected.
ref = env->NewGlobalRef(weak_this);
if (ref == NULL) {
LOGE("setup: Error in NewGlobalRef");
retvalue = ERROR;
goto setup_failure;
}
LOGV("setup: lpJniService: %p metadataservice_ref %p", lpJniService, ref);
env->SetIntField(thiz, jnireflect.mCookieId,
reinterpret_cast<jint>(lpJniService));
if (!lpJniService->start(ref)) {
retvalue = ERROR;
LOGE("setup: Error in starting JNIMetaDataService");
goto setup_failure;
}
return retvalue;
// failures:
setup_failure:
if (lpJniService != NULL) {
lpJniService->destroy();
}
return retvalue;
}
inline JNIMetaDataService* get_service(JNIEnv* env, jobject thiz) {
return reinterpret_cast<JNIMetaDataService*>(env->GetIntField(
thiz, jnireflect.mCookieId));
}
// called when the java object is garbaged collected
static void aahmetadataservice_native_finalize(JNIEnv* env, jobject thiz) {
JNIMetaDataService* pService = get_service(env, thiz);
if (pService == NULL) {
return;
}
LOGV("finalize jni object");
// clean up the service object
pService->destroy();
env->SetIntField(thiz, jnireflect.mCookieId, 0);
}
static void aahmetadataservice_native_enable(JNIEnv* env, jobject thiz) {
JNIMetaDataService* pService = get_service(env, thiz);
if (pService == NULL) {
LOGD("native service already deleted");
return;
}
pService->setEnabled(true);
}
static void aahmetadataservice_native_disable(JNIEnv* env, jobject thiz) {
JNIMetaDataService* pService = get_service(env, thiz);
if (pService == NULL) {
LOGD("native service already deleted");
return;
}
pService->setEnabled(false);
}
static JNINativeMethod kAAHMetaDataServiceMethods[] = {
{ "native_setup", "(Ljava/lang/Object;)I",
(void *) aahmetadataservice_native_setup },
{ "native_enable", "()V", (void *) aahmetadataservice_native_enable },
{ "native_disable", "()V", (void *) aahmetadataservice_native_disable },
{ "native_finalize", "()V", (void *) aahmetadataservice_native_finalize },
};
static jint jniOnLoad(JavaVM* vm, void* reserved) {
LOGV("jniOnLoad");
JNIEnv* env = NULL;
if (vm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
LOGE("ERROR: GetEnv failed\n");
return -1;
}
jclass clazz = env->FindClass(kAAHMetaDataServiceClassName);
if (!clazz) {
LOGE("ERROR: FindClass failed\n");
return -1;
}
jnireflect.clazz = (jclass) env->NewGlobalRef(clazz);
if (env->RegisterNatives(jnireflect.clazz, kAAHMetaDataServiceMethods,
NELEM(kAAHMetaDataServiceMethods)) < 0) {
LOGE("ERROR: RegisterNatives failed\n");
return -1;
}
jnireflect.postEventFromNativeId = env->GetStaticMethodID(
jnireflect.clazz, "postMetaDataFromNative",
"(Ljava/lang/Object;SI[B)V");
if (!jnireflect.postEventFromNativeId) {
LOGE("Can't find %s", "postMetaDataFromNative");
return -1;
}
jnireflect.flushFromNativeId = env->GetStaticMethodID(
jnireflect.clazz, "flushFromNative",
"(Ljava/lang/Object;)V");
if (!jnireflect.flushFromNativeId) {
LOGE("Can't find %s", "flushFromNative");
return -1;
}
jnireflect.mCookieId = env->GetFieldID(jnireflect.clazz, "mCookie", "I");
if (!jnireflect.mCookieId) {
LOGE("Can't find %s", "mCookie");
return -1;
}
return JNI_VERSION_1_4;
}
}
jint JNI_OnLoad(JavaVM* vm, void* reserved) {
return android::jniOnLoad(vm, reserved);
}

View File

@@ -9,6 +9,8 @@ LOCAL_MODULE := libaah_rtp
LOCAL_MODULE_TAGS := optional
LOCAL_SRC_FILES := \
aah_audio_algorithm.cpp \
aah_audio_processor.cpp \
aah_decoder_pump.cpp \
aah_rx_player.cpp \
aah_rx_player_core.cpp \
@@ -17,6 +19,8 @@ LOCAL_SRC_FILES := \
aah_tx_group.cpp \
aah_tx_packet.cpp \
aah_tx_player.cpp \
AAHMetaDataService_jni.cpp \
IAAHMetaData.cpp \
utils.cpp
LOCAL_C_INCLUDES := \
@@ -26,6 +30,7 @@ LOCAL_C_INCLUDES := \
frameworks/base/media/libstagefright
LOCAL_SHARED_LIBRARIES := \
libandroid_runtime \
libbinder \
libcommon_time_client \
libcutils \
@@ -39,3 +44,4 @@ LOCAL_LDLIBS := \
include $(BUILD_SHARED_LIBRARY)
include $(call all-makefiles-under,$(LOCAL_PATH))

View File

@@ -0,0 +1,222 @@
/*
**
** Copyright 2012, The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
#define LOG_TAG "LibAAH_RTP"
//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <binder/IServiceManager.h>
#include <utils/RefBase.h>
#include <utils/threads.h>
#include <binder/IInterface.h>
#include <binder/Parcel.h>
#include <media/stagefright/Utils.h>
#include <time.h>
#include "IAAHMetaData.h"
namespace android {
enum {
NOTIFY = IBinder::FIRST_CALL_TRANSACTION,
FLUSH = IBinder::FIRST_CALL_TRANSACTION + 1,
};
class BpAAHMetaDataClient : public BpInterface<IAAHMetaDataClient> {
public:
BpAAHMetaDataClient(const sp<IBinder>& impl)
: BpInterface<IAAHMetaDataClient>(impl) {
}
virtual void notify(uint16_t typeId, uint32_t item_len, const void* buf) {
Parcel data, reply;
data.writeInterfaceToken(IAAHMetaDataClient::getInterfaceDescriptor());
data.writeInt32((int32_t) typeId);
data.writeInt32((int32_t) item_len);
data.write(buf, item_len);
remote()->transact(NOTIFY, data, &reply, IBinder::FLAG_ONEWAY);
}
virtual void flush() {
Parcel data, reply;
data.writeInterfaceToken(IAAHMetaDataClient::getInterfaceDescriptor());
remote()->transact(FLUSH, data, &reply, IBinder::FLAG_ONEWAY);
}
};
IMPLEMENT_META_INTERFACE(AAHMetaDataClient, "android.media.IAAHMetaDataClient");
// ----------------------------------------------------------------------
status_t BnAAHMetaDataClient::onTransact(uint32_t code, const Parcel& data,
Parcel* reply, uint32_t flags) {
switch (code) {
case NOTIFY: {
CHECK_INTERFACE(IAAHMetaDataClient, data, reply);
uint16_t typeId = (uint16_t) data.readInt32();
uint32_t item_len = (uint32_t) data.readInt32();
const void* buf = data.readInplace(item_len);
notify(typeId, item_len, buf);
return NO_ERROR;
}
break;
case FLUSH: {
CHECK_INTERFACE(IAAHMetaDataClient, data, reply);
flush();
return NO_ERROR;
}
break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
enum {
ADDCLIENT = IBinder::FIRST_CALL_TRANSACTION,
REMOVECLIENT = ADDCLIENT + 1,
};
class BpAAHMetaDataService : public BpInterface<IAAHMetaDataService> {
public:
BpAAHMetaDataService(const sp<IBinder>& impl)
: BpInterface<IAAHMetaDataService>(impl) {
}
virtual void addClient(const sp<IAAHMetaDataClient>& client) {
Parcel data, reply;
data.writeInterfaceToken(IAAHMetaDataService::getInterfaceDescriptor());
data.writeStrongBinder(client->asBinder());
remote()->transact(ADDCLIENT, data, &reply, IBinder::FLAG_ONEWAY);
}
virtual void removeClient(const sp<IAAHMetaDataClient>& client) {
Parcel data, reply;
data.writeInterfaceToken(IAAHMetaDataService::getInterfaceDescriptor());
data.writeStrongBinder(client->asBinder());
remote()->transact(REMOVECLIENT, data, &reply, IBinder::FLAG_ONEWAY);
}
};
IMPLEMENT_META_INTERFACE(AAHMetaDataService, "android.media.IAAHMetaDataService");
// ----------------------------------------------------------------------
status_t BnAAHMetaDataService::onTransact(uint32_t code, const Parcel& data,
Parcel* reply, uint32_t flags) {
switch (code) {
case ADDCLIENT: {
CHECK_INTERFACE(IAAHMetaDataService, data, reply);
sp<IAAHMetaDataClient> client = interface_cast < IAAHMetaDataClient
> (data.readStrongBinder());
addClient(client);
return NO_ERROR;
}
break;
case REMOVECLIENT: {
CHECK_INTERFACE(IAAHMetaDataService, data, reply);
sp<IAAHMetaDataClient> client = interface_cast < IAAHMetaDataClient
> (data.readStrongBinder());
removeClient(client);
return NO_ERROR;
}
break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
static bool s_aah_metadata_service_initialized = false;
static sp<AAHMetaDataService> s_aah_metadata_service = NULL;
static Mutex s_aah_metadata_service_lock;
const sp<AAHMetaDataService>& AAHMetaDataService::getInstance() {
Mutex::Autolock autolock(&s_aah_metadata_service_lock);
if (!s_aah_metadata_service_initialized) {
s_aah_metadata_service = new AAHMetaDataService();
status_t ret = android::defaultServiceManager()->addService(
IAAHMetaDataService::descriptor, s_aah_metadata_service);
if (ret != 0) {
LOGE("failed to add AAHMetaDataService error code %d", ret);
s_aah_metadata_service = NULL;
}
s_aah_metadata_service_initialized = true;
}
return s_aah_metadata_service;
}
AAHMetaDataService::AAHMetaDataService() {
}
void AAHMetaDataService::addClient(const sp<IAAHMetaDataClient>& client) {
Mutex::Autolock lock(mLock);
IAAHMetaDataClient* obj = client.get();
LOGV("addClient %p", obj);
client->asBinder()->linkToDeath(this);
mClients.add(client);
}
void AAHMetaDataService::binderDied(const wp<IBinder>& who) {
Mutex::Autolock lock(mLock);
for (uint32_t i = 0; i < mClients.size(); ++i) {
const sp<IAAHMetaDataClient>& c = mClients[i];
if (who == c->asBinder()) {
LOGD("IAAHMetaDataClient binder Died");
LOGV("removed died client %p", c.get());
mClients.removeAt(i);
return;
}
}
}
void AAHMetaDataService::removeClient(const sp<IAAHMetaDataClient>& client) {
IAAHMetaDataClient* obj = client.get();
Mutex::Autolock lock(mLock);
for (uint32_t i = 0; i < mClients.size(); ++i) {
const sp<IAAHMetaDataClient>& c = mClients[i];
if (c->asBinder() == client->asBinder()) {
LOGV("removeClient %p", c.get());
mClients.removeAt(i);
return;
}
}
}
void AAHMetaDataService::broadcast(uint16_t typeId, uint32_t item_len,
void* data) {
LOGV("broadcast %d", typeId);
Mutex::Autolock lock(mLock);
uint8_t* buf = reinterpret_cast<uint8_t*>(data);
for (uint32_t i = 0; i < mClients.size(); ++i) {
const sp<IAAHMetaDataClient> c = mClients[i];
LOGV("notify %p", c.get());
c->notify(typeId, item_len, data);
}
}
void AAHMetaDataService::flush() {
Mutex::Autolock lock(mLock);
for (uint32_t i = 0; i < mClients.size(); ++i) {
const sp<IAAHMetaDataClient> c = mClients[i];
c->flush();
}
}
}
;
// namespace android

View File

@@ -0,0 +1,89 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_IAAHMETADATA_H
#define ANDROID_IAAHMETADATA_H
#include <utils/SortedVector.h>
#include <utils/RefBase.h>
#include <utils/String8.h>
#include <utils/threads.h>
#include <binder/IInterface.h>
#include <binder/Parcel.h>
#include "utils.h"
namespace android {
class IAAHMetaDataClient : public IInterface {
public:
DECLARE_META_INTERFACE (AAHMetaDataClient);
virtual void notify(uint16_t typeId, uint32_t item_len,
const void* data) = 0;
virtual void flush() = 0;
};
// ----------------------------------------------------------------------------
class BnAAHMetaDataClient : public BnInterface<IAAHMetaDataClient> {
public:
virtual status_t onTransact(uint32_t code, const Parcel& data,
Parcel* reply, uint32_t flags = 0);
};
// ----------------------------------------------------------------------------
class IAAHMetaDataService : public IInterface {
public:
DECLARE_META_INTERFACE (AAHMetaDataService);
virtual void addClient(const sp<IAAHMetaDataClient>& client) = 0;
virtual void removeClient(const sp<IAAHMetaDataClient>& client) = 0;
};
// ----------------------------------------------------------------------------
class BnAAHMetaDataService : public BnInterface<IAAHMetaDataService> {
public:
virtual status_t onTransact(uint32_t code, const Parcel& data,
Parcel* reply, uint32_t flags = 0);
};
// ----------------------------------------------------------------------------
class AAHMetaDataService : public BnAAHMetaDataService,
public android::IBinder::DeathRecipient {
public:
static const sp<AAHMetaDataService>& getInstance();
void broadcast(uint16_t typeId, uint32_t item_len, void* data);
void flush();
virtual void addClient(const sp<IAAHMetaDataClient>& client);
virtual void removeClient(const sp<IAAHMetaDataClient>& client);
virtual void binderDied(const wp<IBinder>& who);
private:
AAHMetaDataService();
SortedVector<sp<IAAHMetaDataClient> > mClients;
Mutex mLock;
};
}
;
// namespace android
#endif // ANDROID_IAAHMETADATA_H

View File

@@ -0,0 +1,291 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "LibAAH_RTP"
//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <math.h>
#include <stdlib.h>
#include "aah_audio_algorithm.h"
// #define DEBUG_BEAT_VALUE
namespace android {
// magic number, the bar should set higher if kBands is bigger
const float BeatDetectionAlgorithm::kThreshHold = 8;
const float BeatDetectionAlgorithm::kSumThreshold = 250;
// back trace time 1s
const float BeatDetectionAlgorithm::kBacktraceTime = 1;
// we must wait 1 second before switch generate a new sequence number, this is
// to prevent visualizer switches too much
const int64_t BeatDetectionAlgorithm::kBeatInterval = 1000000;
const float BeatDetectionAlgorithm::kMaxBeatValue = 100000;
// how many beat information will be cached before send out? We group beats
// in one packet to reduce the cost of sending too much packets. The time
// should be shorter than kAAHBufferTimeUs defined in TxPlayer
// The extra latency is introduced by fft, beat algorithm, time transform,
// binder service latency, jni latency, etc. If all these extra latency
// add up too much, then kAAHBufferTimeUs must be increased
const int32_t BeatDetectionAlgorithm::kAAHBeatInfoBufferTimeMS = 250;
// each thread holds a random data structure
static __thread unsigned short sRandData[3];
static __thread bool sRandDataInitialized = false;
static inline float normalizeBeatValue(float scale, float threshold) {
if (scale < 1) {
return 1;
} else if (scale > threshold) {
return threshold;
}
return scale;
}
BeatDetectionAlgorithm::BeatDetectionAlgorithm()
: mSamplesPerSegment(0),
mSegments(0),
mEnergyTrain(NULL),
mBeatTrain(NULL) {
if (!sRandDataInitialized) {
seed48(sRandData);
sRandDataInitialized = true;
}
mBeatSequenceNumber = nrand48(sRandData);
}
BeatDetectionAlgorithm::~BeatDetectionAlgorithm() {
cleanup();
}
bool BeatDetectionAlgorithm::initialize(uint32_t samples_per_seg,
uint32_t sample_rates) {
LOGV("initialize algorithm samples_per_seg %d sample_rates %d",
samples_per_seg, sample_rates);
uint32_t segments = (uint32_t)(
sample_rates / samples_per_seg * kBacktraceTime);
if (mSamplesPerSegment == samples_per_seg && mSegments == segments) {
return true;
}
uint32_t samplesPerBand = samples_per_seg / kBands;
if (samplesPerBand * kBands != samples_per_seg) {
LOGE("%s samples per segment not divided evenly by bands",
__PRETTY_FUNCTION__);
return false;
}
if (samplesPerBand & 1) {
LOGE("%s each band must contain even number of samples",
__PRETTY_FUNCTION__);
return false;
}
cleanup();
mSamplesPerSegment = samples_per_seg;
mSegments = segments;
mSamplesPerBand = samplesPerBand;
mTrainMatrixSize = kSearchBands * mSegments;
mEnergyTrain = new uint64_t[mTrainMatrixSize];
mBeatTrain = new float[mTrainMatrixSize];
if (!mEnergyTrain || !mBeatTrain) {
LOGE("%s failed allocating memory", __PRETTY_FUNCTION__);
return false;
}
flush();
return true;
}
void BeatDetectionAlgorithm::process(int64_t ts, int32_t* fft,
uint32_t samples_per_seg) {
CHECK(samples_per_seg == mSamplesPerSegment);
if (mSegments == 0) {
return;
}
// access fft array as 16bits
int16_t* segmentFt = (int16_t*)fft;
float maxNewEnergyScale = 0;
int maxBeatIdx = -1;
float sum = 0;
for (int i = 0, trainIndexForBand = 0;
i < kBandEnd - kBandStart;
i++, trainIndexForBand += mSegments) {
uint64_t newEnergy = 0;
// mSamplesPerBand is already validated to be even in initialize()
uint32_t startSample = (kBandStart + i) * mSamplesPerBand;
for (uint32_t li = startSample;
li < startSample + mSamplesPerBand;
li += 2) {
uint64_t amplitude = (int32_t)segmentFt[li] * (int32_t)segmentFt[li]
+ (int32_t)segmentFt[li + 1] * (int32_t)segmentFt[li + 1];
newEnergy += amplitude;
}
newEnergy = newEnergy / (mSamplesPerBand >> 1);
if (mEnergyTrainFilled) {
// update beat train
float newEnergyScale = (float) newEnergy
/ ((double) mEnergyTrainSum[i] / (double) mSegments);
mBeatTrain[trainIndexForBand + mBeatTrainIdx] = newEnergyScale;
if (isnan(newEnergyScale) || isinf(newEnergyScale)
|| newEnergyScale > maxNewEnergyScale) {
maxNewEnergyScale = newEnergyScale;
maxBeatIdx = i;
}
if (newEnergyScale > kThreshHold) {
sum += newEnergyScale;
}
}
// Update the energy train and energy sum
mEnergyTrainSum[i] -= mEnergyTrain[trainIndexForBand + mEnergyTrainIdx];
mEnergyTrain[trainIndexForBand + mEnergyTrainIdx] = newEnergy;
mEnergyTrainSum[i] += mEnergyTrain[trainIndexForBand + mEnergyTrainIdx];
}
if (isnan(maxNewEnergyScale) || isinf(maxNewEnergyScale)
|| maxNewEnergyScale > kMaxBeatValue) {
maxNewEnergyScale = kMaxBeatValue;
}
bool beat = false;
if (sum >= kSumThreshold /*&& maxNewEnergyScale > kThreshHold*/
&& (mBeatLastTs == -1 || (ts - mBeatLastTs) > kBeatInterval)) {
mBeatLastTs = ts;
mBeatSequenceNumber++;
beat = true;
LOGV("BEAT!!!! %d %f", mBeatSequenceNumber, maxNewEnergyScale);
}
mBeatValue = maxNewEnergyScale;
mBeatValueSmoothed = mBeatValueSmoothed * 0.7
+ normalizeBeatValue(mBeatValue, 30) * 0.3;
AudioBeatInfo beatInfo(ts, mBeatValue, mBeatValueSmoothed,
mBeatSequenceNumber);
// allowing overwrite existing item in the queue if we didn't send out
// data in time: lost beats is very unlikely to happen
mBeatInfoQueue.writeAllowOverflow(beatInfo);
#ifdef DEBUG_BEAT_VALUE
char debugstr[256];
uint32_t i;
for (i = 0; i < mBeatValue && i < sizeof(debugstr) - 1; i++) {
debugstr[i] = beat ? 'B' : '*';
}
debugstr[i] = 0;
LOGD("%lld %lld %f %f %s", mBeatLastTs, ts, mBeatValue, sum, debugstr);
#endif
mEnergyTrainIdx = mEnergyTrainIdx + 1;
if (mEnergyTrainIdx == mSegments) {
mEnergyTrainIdx = 0;
mEnergyTrainFilled = true;
}
if (mEnergyTrainFilled) {
mBeatTrainIdx = mBeatTrainIdx + 1;
if (mBeatTrainIdx == mSegments) {
mBeatTrainIdx = 0;
}
}
}
void BeatDetectionAlgorithm::cleanup() {
if (mEnergyTrain) {
delete mEnergyTrain;
mEnergyTrain = NULL;
}
if (mBeatTrain) {
delete mBeatTrain;
mBeatTrain = NULL;
}
}
class TRTPMetaDataBeat : public TRTPMetaDataBlock {
public:
TRTPMetaDataBeat()
: TRTPMetaDataBlock(kMetaDataBeat, 0) {}
TRTPMetaDataBeat(uint16_t beats,
AudioBeatInfo* beatInfo)
: TRTPMetaDataBlock(kMetaDataBeat, calculateItemLength(beats))
, mCount(beats)
{
memcpy(&beatInfos, beatInfo, beats * sizeof(AudioBeatInfo) );
}
static inline uint32_t calculateItemLength(uint16_t beats) {
return 2 + BeatDetectionAlgorithm::kItemLength * beats;
}
virtual ~TRTPMetaDataBeat() {}
virtual void write(uint8_t*& buf) const;
uint16_t mCount;
struct AudioBeatInfo beatInfos[BeatDetectionAlgorithm::kBeatQueueLen];
};
void TRTPMetaDataBeat::write(uint8_t*& buf) const {
writeBlockHead(buf);
TRTPPacket::writeU16(buf, mCount);
for (uint16_t i = 0; i < mCount; i++) {
TRTPPacket::writeU64(buf, beatInfos[i].ts);
TRTPPacket::writeFloat(buf, beatInfos[i].beatValue);
TRTPPacket::writeFloat(buf, beatInfos[i].smoothedBeatValue);
TRTPPacket::writeU32(buf, beatInfos[i].sequenceNumber);
}
}
TRTPMetaDataBlock* BeatDetectionAlgorithm::collectMetaData(bool flushOut) {
AudioBeatInfo beatInfo[kBeatQueueLen];
uint32_t min_read;
if (flushOut) {
min_read = 0;
} else {
min_read = mSegments * kAAHBeatInfoBufferTimeMS / 1000;
if (min_read > kBeatQueueLen) {
min_read = kBeatQueueLen;
}
}
int beats = mBeatInfoQueue.readBulk(beatInfo, min_read,
kBeatQueueLen);
if (beats > 0) {
uint32_t privateSize = TRTPMetaDataBeat::calculateItemLength(beats);
if (privateSize > 0xffff) {
LOGE("metadata packet too big");
return NULL;
}
return new TRTPMetaDataBeat(beats, beatInfo);
} else {
return NULL;
}
}
void BeatDetectionAlgorithm::flush() {
if (mEnergyTrain == NULL || mBeatTrain == NULL) {
return;
}
mEnergyTrainIdx = 0;
mBeatTrainIdx = 0;
mEnergyTrainFilled = false;
mBeatValue = 0;
mBeatValueSmoothed = 0;
mBeatLastTs = -1;
memset(mEnergyTrain, 0, mTrainMatrixSize * sizeof(uint64_t));
// IEEE745: all zero bytes generates 0.0f
memset(mBeatTrain, 0, mTrainMatrixSize * sizeof(float));
memset(&mEnergyTrainSum, 0, sizeof(mEnergyTrainSum));
}
} // namespace android

View File

@@ -0,0 +1,145 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __AAH_AUDIO_ALGORITHM_H__
#define __AAH_AUDIO_ALGORITHM_H__
#include <utils/RefBase.h>
#include <utils.h>
#include "aah_tx_packet.h"
namespace android {
class AudioAlgorithm : public virtual RefBase {
public:
explicit AudioAlgorithm() {}
virtual bool initialize(uint32_t samples_per_seg, uint32_t samplerate) = 0;
virtual void process(int64_t ts, int32_t* fft,
uint32_t samples_per_seg) = 0;
virtual void flush() = 0;
virtual void cleanup() = 0;
virtual TRTPMetaDataBlock* collectMetaData(bool flushOut) = 0;
virtual ~AudioAlgorithm() {}
private:
DISALLOW_EVIL_CONSTRUCTORS (AudioAlgorithm);
};
struct AudioBeatInfo {
AudioBeatInfo()
: ts(0)
, beatValue(0)
, smoothedBeatValue(0)
, sequenceNumber(0) {}
AudioBeatInfo(int64_t t, float b, float sb, uint32_t s)
: ts(t)
, beatValue(b)
, smoothedBeatValue(sb)
, sequenceNumber(s) {}
int64_t ts;
float beatValue;
float smoothedBeatValue;
uint32_t sequenceNumber;
};
class BeatDetectionAlgorithm : public virtual AudioAlgorithm {
public:
static const int kItemLength = 20;
explicit BeatDetectionAlgorithm();
virtual bool initialize(uint32_t samples_per_seg, uint32_t samplerate);
// each 32 bits fft value consists of real part on high 16 bits
// and imaginary part on low 16 bits
virtual void process(int64_t ts, int32_t* fft, uint32_t samples_per_seg);
virtual void flush();
virtual void cleanup();
virtual TRTPMetaDataBlock* collectMetaData(bool flushOut);
virtual ~BeatDetectionAlgorithm();
protected:
// =======================
// constant definition
// =======================
// divide frequency domain to kBands
static const int32_t kBands = 128;
// we search from kBandStart(inclusive) to kBandEnd (exclusive)
static const int32_t kBandStart = 0;
static const int32_t kBandEnd = 64;
static const int32_t kSearchBands = kBandEnd - kBandStart;
// magic number, the bar should set higher if kBands is bigger
static const float kThreshHold;
static const float kSumThreshold;
static const float kBacktraceTime;
static const int64_t kBeatInterval;
static const float kMaxBeatValue;
static const int32_t kAAHBeatInfoBufferTimeMS;
// 128 maximum beat allowed, this is roughly 3 seconds data for 44KHZ, 1024
// fft samples per segment
static const uint32_t kBeatQueueLen = 128;
uint32_t mSamplesPerSegment;
uint32_t mSegments;
uint32_t mSamplesPerBand;
// =======================
// Energy train
// =======================
// circular energy value buffer for each BAND, each maintains one second
uint32_t mEnergyTrainIdx;
uint64_t* mEnergyTrain; // 2d array, size is kSearchBands * mSegments
uint32_t mTrainMatrixSize; // kSearchBands * mSegments
// sum of last second energy for each sub band
uint64_t mEnergyTrainSum[kSearchBands];
// if energy train has been filled for 1 second
bool mEnergyTrainFilled;
// =======================
// Beat train
// =======================
// beat value train buffer for each BAND
// It's not necessary keep a train now, we may need it for detecting peak
float* mBeatTrain; // 2d array of kSearchBands * mSegments
uint32_t mBeatTrainIdx;
// =======================
// Energy extraction stuff passed to outside
// There is multi thread issue, but not critical.
// So we not using synchronized or other mechanism
// =======================
float mBeatValue;
float mBeatValueSmoothed;
CircularArray<AudioBeatInfo, kBeatQueueLen> mBeatInfoQueue;
uint32_t mBeatSequenceNumber;
int64_t mBeatLastTs;
friend class TRTPMetaDataBeat;
private:
DISALLOW_EVIL_CONSTRUCTORS (BeatDetectionAlgorithm);
};
} // namespace android
#endif // __AAH_AUDIO_ALGORITHM_H__

View File

@@ -0,0 +1,224 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "LibAAH_RTP"
//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <common_time/cc_helper.h>
#include <media/AudioSystem.h>
#include <media/AudioTrack.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
#include <media/stagefright/Utils.h>
#include <utils/Timers.h>
#include <utils/threads.h>
#include <stdio.h>
#include "aah_audio_processor.h"
extern void fixed_fft_real(int n, int32_t *v);
#define CLIP(x, low, high) ((x) < (low) ? (low) : ((x) > (high) ? (high) : (x)))
#define SAMPLES_TO_TS(samples, sample_rate) \
( static_cast<int64_t>(samples) * 1000000 / (sample_rate) )
// fill mono audio data into 16 bits workspace
template<int BITS, typename SRCTYPE>
static inline void fillWorkspaceOneChannel(int32_t* dst, SRCTYPE* src,
uint32_t to_fill) {
for (uint32_t inIdx = 0; inIdx + 1 < to_fill; inIdx += 2) {
int32_t smp1 = src[inIdx];
int32_t smp2 = src[inIdx + 1];
// following "if" clause will be optimized at compilation time
if (BITS < 16) {
smp1 = (smp1 << (16 - BITS)) & 0xffff;
smp2 = (smp2 << (16 - BITS)) & 0xffff;
} else {
smp1 = (smp1 >> (BITS - 16)) & 0xffff;
smp2 = (smp2 >> (BITS - 16)) & 0xffff;
}
*(dst++) = (smp1 << 16) | (smp2 << 0);
}
}
// fill stereo audio data into 16 bits workspace, left/right are mixed
template<int BITS, typename SRCTYPE>
static inline void fillWorkspaceTwoChannel(int32_t* dst, SRCTYPE* src,
uint32_t to_fill) {
for (uint32_t inIdx = 0; inIdx + 3 < to_fill; inIdx += 4) {
int32_t smp1 = static_cast<int32_t>(src[inIdx]) + src[inIdx + 1];
int32_t smp2 = static_cast<int32_t>(src[inIdx + 2]) + src[inIdx + 3];
// following "if" clause will be optimized at compilation time
if (BITS < 16) {
smp1 = (smp1 << (15 - BITS)) & 0xffff;
smp2 = (smp2 << (15 - BITS)) & 0xffff;
} else {
smp1 = (smp1 >> (BITS - 15)) & 0xffff;
smp2 = (smp2 >> (BITS - 15)) & 0xffff;
}
*(dst++) = (smp1 << 16) | (smp2 << 0);
}
}
template<int BITS, typename SRCTYPE>
static inline bool fillWorkspace(int32_t* dst, SRCTYPE* src,
uint32_t to_fill, int32_t channels) {
switch(channels) {
case 2:
fillWorkspaceTwoChannel<BITS, SRCTYPE>(dst, src, to_fill);
return true;
case 1:
fillWorkspaceOneChannel<BITS, SRCTYPE>(dst, src, to_fill);
return true;
default:
LOGE("Unsupported channel %d", channels);
return false;
}
}
namespace android {
AAH_AudioProcessor::AAH_AudioProcessor(OMXClient& omx)
: AAH_DecoderPumpBase(omx),
filled_(0) {
}
AAH_AudioProcessor::~AAH_AudioProcessor() {
}
void AAH_AudioProcessor::setAlgorithm(const sp<AudioAlgorithm>& processor) {
processor_ = processor;
}
const sp<AudioAlgorithm>& AAH_AudioProcessor::getAlgorithm() {
return processor_;
}
status_t AAH_AudioProcessor::shutdown_l() {
status_t ret = AAH_DecoderPumpBase::shutdown_l();
LOGV("Shutdown AAH_AudioProcessor");
return ret;
}
void AAH_AudioProcessor::queueToSink(MediaBuffer* decoded_frames) {
sp<MetaData> meta;
int64_t ts;
status_t res;
// Fetch the metadata and make sure the sample has a timestamp. We
// cannot process samples which are missing PTSs.
meta = decoded_frames->meta_data();
if ((meta == NULL) || (!meta->findInt64(kKeyTime, &ts))) {
LOGV("Decoded sample missing timestamp, cannot process.");
return;
}
if (!processor_->initialize(kFFTSize, format_sample_rate_)) {
return;
}
uint8_t* decoded_data = reinterpret_cast<uint8_t*>(decoded_frames->data());
uint32_t decoded_amt = decoded_frames->range_length();
decoded_data += decoded_frames->range_offset();
// timestamp for the current workspace start position is calculated by
// current ts minus filled samples.
int64_t start_ts = ts - SAMPLES_TO_TS(filled_, format_sample_rate_);
// following code is an excerpt of system visualizer, the differences are
// in three places in order to get a more accurate output fft value
// - full 16 bits are kept comparing to dynamic shifting in system
// visualizer
// - full audio stream are processed unlike the "sparse" sampling in system
// visualizer
// - system visualizer uses a weird dynamic shifting down of output fft
// values, we output full 16 bits
uint32_t sampleBytes = 2; // android system assumes 16bits for now
uint32_t frameBytes = sampleBytes * format_channels_;
int loopcount = 0; // how many fft chunks have been sent to algorithm
while (decoded_amt >= frameBytes * 2) { // at least two samples
uint32_t decoded_frames = decoded_amt / frameBytes;
decoded_frames &= (~1); // only handle even samples
uint32_t to_fill = MIN(kFFTSize - filled_, decoded_frames);
uint32_t to_fill_bytes = to_fill * frameBytes;
// workspace is array of 32bits integer, each 32bits has two samples.
// The integer order in CPU register is "S1 00 S2 00" from high to low.
// In memory, the workspace layout depends on endian order.
// In another word, memory layout is different on different endian
// system, but when they are read into CPU 32bits register, they are the
// same order to perform arithmetic and bitwise operations
// For details see fixedfft.cpp
int32_t* dst = workspace_ + (filled_ >> 1);
switch (sampleBytes) {
case 2:
if (!fillWorkspace<16, int16_t>(
dst, reinterpret_cast<int16_t*>(decoded_data), to_fill,
format_channels_)) {
return;
}
break;
case 1:
if (!fillWorkspace<8, int8_t>(
dst, reinterpret_cast<int8_t*>(decoded_data), to_fill,
format_channels_)) {
return;
}
break;
default:
LOGE("Unsupported sample size %d", sampleBytes);
return;
}
decoded_data += to_fill_bytes;
decoded_amt -= to_fill_bytes;
filled_ += to_fill;
if (filled_ == kFFTSize) {
// workspace_ is full, calcuate fft
fixed_fft_real(kFFTSize >> 1, workspace_);
// now workspace_ contains 16 bits fft values
processor_->process(
start_ts + SAMPLES_TO_TS((kFFTSize) * loopcount,
format_sample_rate_),
workspace_, kFFTSize);
// open business for next chunk of kFFTSize samples
filled_ = 0;
loopcount++;
}
}
}
void AAH_AudioProcessor::stopAndCleanupSink() {
processor_->cleanup();
}
void AAH_AudioProcessor::flush() {
filled_ = 0;
if (processor_ != NULL) {
processor_->flush();
}
}
} // namespace android

View File

@@ -0,0 +1,53 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __AAH_AUDIO_PROCESSOR_H__
#define __AAH_AUDIO_PROCESSOR_H__
#include "aah_decoder_pump.h"
#include "aah_audio_algorithm.h"
namespace android {
// decode audio, calculate fft and invoke AudioAlgorithm
class AAH_AudioProcessor : public AAH_DecoderPumpBase {
public:
explicit AAH_AudioProcessor(OMXClient& omx);
void flush();
void setAlgorithm(const sp<AudioAlgorithm>& processor);
const sp<AudioAlgorithm>& getAlgorithm();
virtual ~AAH_AudioProcessor();
private:
// fft array size must be 2^n
static const uint32_t kFFTSize = (1 << 10);
uint32_t filled_;
int32_t workspace_[kFFTSize >> 1];
sp<AudioAlgorithm> processor_;
virtual void queueToSink(MediaBuffer* decoded_sample);
virtual void stopAndCleanupSink();
virtual status_t shutdown_l();
DISALLOW_EVIL_CONSTRUCTORS (AAH_AudioProcessor);
};
} // namespace android
#endif // __AAH_AUDIO_PROCESSOR_H__

View File

@@ -40,24 +40,17 @@ static const long long kLongDecodeErrorThreshold = 1000000ll;
static const uint32_t kMaxLongErrorsBeforeFatal = 3;
static const uint32_t kMaxErrorsBeforeFatal = 60;
AAH_DecoderPump::AAH_DecoderPump(OMXClient& omx)
AAH_DecoderPumpBase::AAH_DecoderPumpBase(OMXClient& omx)
: omx_(omx)
, thread_status_(OK)
, renderer_(NULL)
, last_queued_pts_valid_(false)
, last_queued_pts_(0)
, last_ts_transform_valid_(false)
, last_left_volume_(1.0f)
, last_right_volume_(1.0f)
, last_stream_type_(AUDIO_STREAM_DEFAULT) {
, thread_status_(OK) {
thread_ = new ThreadWrapper(this);
}
AAH_DecoderPump::~AAH_DecoderPump() {
AAH_DecoderPumpBase::~AAH_DecoderPumpBase() {
shutdown();
}
status_t AAH_DecoderPump::initCheck() {
status_t AAH_DecoderPumpBase::initCheck() {
if (thread_ == NULL) {
LOGE("Failed to allocate thread");
return NO_MEMORY;
@@ -66,7 +59,7 @@ status_t AAH_DecoderPump::initCheck() {
return OK;
}
status_t AAH_DecoderPump::queueForDecode(MediaBuffer* buf) {
status_t AAH_DecoderPumpBase::queueForDecode(MediaBuffer* buf) {
if (NULL == buf) {
return BAD_VALUE;
}
@@ -85,7 +78,7 @@ status_t AAH_DecoderPump::queueForDecode(MediaBuffer* buf) {
return OK;
}
void AAH_DecoderPump::queueToRenderer(MediaBuffer* decoded_sample) {
void AAH_DecoderPump::queueToSink(MediaBuffer* decoded_sample) {
Mutex::Autolock lock(&render_lock_);
sp<MetaData> meta;
int64_t ts;
@@ -173,7 +166,7 @@ void AAH_DecoderPump::queueToRenderer(MediaBuffer* decoded_sample) {
}
}
void AAH_DecoderPump::stopAndCleanupRenderer() {
void AAH_DecoderPump::stopAndCleanupSink() {
if (NULL == renderer_) {
return;
}
@@ -297,7 +290,7 @@ bool AAH_DecoderPump::isAboutToUnderflow(int64_t threshold) {
return ((tt_now + threshold - last_queued_pts_tt) > 0);
}
void* AAH_DecoderPump::workThread() {
void* AAH_DecoderPumpBase::workThread() {
// No need to lock when accessing decoder_ from the thread. The
// implementation of init and shutdown ensure that other threads never touch
// decoder_ while the work thread is running.
@@ -367,7 +360,7 @@ void* AAH_DecoderPump::workThread() {
}
// If the format has actually changed, destroy our current renderer
// so that a new one can be created during queueToRenderer with the
// so that a new one can be created during queueToSink with the
// proper format.
//
// TODO : In order to transition seamlessly, we should change this
@@ -375,7 +368,7 @@ void* AAH_DecoderPump::workThread() {
// we destroy it. We can still create a new renderer, the timed
// nature of the renderer should ensure a seamless splice.
if (formatActuallyChanged)
stopAndCleanupRenderer();
stopAndCleanupSink();
res = OK;
}
@@ -445,17 +438,17 @@ void* AAH_DecoderPump::workThread() {
consecutive_errors = 0;
consecutive_long_errors = 0;
queueToRenderer(bufOut);
queueToSink(bufOut);
bufOut->release();
}
decoder_->stop();
stopAndCleanupRenderer();
stopAndCleanupSink();
return NULL;
}
status_t AAH_DecoderPump::init(sp<MetaData> params) {
status_t AAH_DecoderPumpBase::init(sp<MetaData> params) {
Mutex::Autolock lock(&init_lock_);
if (decoder_ != NULL) {
@@ -511,12 +504,12 @@ bailout:
return OK;
}
status_t AAH_DecoderPump::shutdown() {
status_t AAH_DecoderPumpBase::shutdown() {
Mutex::Autolock lock(&init_lock_);
return shutdown_l();
}
status_t AAH_DecoderPump::shutdown_l() {
status_t AAH_DecoderPumpBase::shutdown_l() {
thread_->requestExit();
thread_cond_.signal();
thread_->requestExitAndWait();
@@ -527,10 +520,6 @@ status_t AAH_DecoderPump::shutdown_l() {
}
in_queue_.clear();
last_queued_pts_valid_ = false;
last_ts_transform_valid_ = false;
last_left_volume_ = 1.0f;
last_right_volume_ = 1.0f;
thread_status_ = OK;
decoder_ = NULL;
@@ -539,7 +528,7 @@ status_t AAH_DecoderPump::shutdown_l() {
return OK;
}
status_t AAH_DecoderPump::read(MediaBuffer **buffer,
status_t AAH_DecoderPumpBase::read(MediaBuffer **buffer,
const ReadOptions *options) {
if (!buffer) {
return BAD_VALUE;
@@ -566,15 +555,35 @@ status_t AAH_DecoderPump::read(MediaBuffer **buffer,
return (NULL == *buffer) ? INVALID_OPERATION : OK;
}
AAH_DecoderPump::ThreadWrapper::ThreadWrapper(AAH_DecoderPump* owner)
AAH_DecoderPumpBase::ThreadWrapper::ThreadWrapper(AAH_DecoderPumpBase* owner)
: Thread(false /* canCallJava*/ )
, owner_(owner) {
}
bool AAH_DecoderPump::ThreadWrapper::threadLoop() {
bool AAH_DecoderPumpBase::ThreadWrapper::threadLoop() {
CHECK(NULL != owner_);
owner_->workThread();
return false;
}
AAH_DecoderPump::AAH_DecoderPump(OMXClient& omx)
: AAH_DecoderPumpBase(omx)
, renderer_(NULL)
, last_queued_pts_valid_(false)
, last_queued_pts_(0)
, last_ts_transform_valid_(false)
, last_left_volume_(1.0f)
, last_right_volume_(1.0f)
, last_stream_type_(AUDIO_STREAM_DEFAULT) {
}
status_t AAH_DecoderPump::shutdown_l() {
status_t ret = AAH_DecoderPumpBase::shutdown_l();
last_queued_pts_valid_ = false;
last_ts_transform_valid_ = false;
last_left_volume_ = 1.0f;
last_right_volume_ = 1.0f;
return ret;
}
} // namespace android

View File

@@ -31,9 +31,11 @@ class MetaData;
class OMXClient;
class TimedAudioTrack;
class AAH_DecoderPump : public MediaSource {
// omx decoder wrapper, how to process the output buffer is to be defined in sub
// classes such as android audio render or an audio processor
class AAH_DecoderPumpBase : public MediaSource {
public:
explicit AAH_DecoderPump(OMXClient& omx);
explicit AAH_DecoderPumpBase(OMXClient& omx);
status_t initCheck();
status_t queueForDecode(MediaBuffer* buf);
@@ -41,10 +43,6 @@ class AAH_DecoderPump : public MediaSource {
status_t init(sp<MetaData> params);
status_t shutdown();
void setRenderTSTransform(const LinearTransform& trans);
void setRenderVolume(float left, float right);
void setRenderStreamType(int stream_type);
bool isAboutToUnderflow(int64_t threshold);
bool getStatus() const { return thread_status_; }
// MediaSource methods
@@ -55,29 +53,32 @@ class AAH_DecoderPump : public MediaSource {
const ReadOptions *options);
protected:
virtual ~AAH_DecoderPump();
virtual ~AAH_DecoderPumpBase();
virtual status_t shutdown_l();
sp<MetaData> format_;
int32_t format_channels_;
int32_t format_sample_rate_;
CCHelper cc_helper_;
private:
class ThreadWrapper : public Thread {
public:
friend class AAH_DecoderPump;
explicit ThreadWrapper(AAH_DecoderPump* owner);
friend class AAH_DecoderPumpBase;
explicit ThreadWrapper(AAH_DecoderPumpBase* owner);
private:
virtual bool threadLoop();
AAH_DecoderPump* owner_;
AAH_DecoderPumpBase* owner_;
DISALLOW_EVIL_CONSTRUCTORS(ThreadWrapper);
};
void* workThread();
virtual status_t shutdown_l();
void queueToRenderer(MediaBuffer* decoded_sample);
void stopAndCleanupRenderer();
sp<MetaData> format_;
int32_t format_channels_;
int32_t format_sample_rate_;
virtual void queueToSink(MediaBuffer* decoded_sample) = 0;
virtual void stopAndCleanupSink() = 0;
sp<MediaSource> decoder_;
OMXClient& omx_;
@@ -88,8 +89,26 @@ class AAH_DecoderPump : public MediaSource {
Mutex thread_lock_;
status_t thread_status_;
// protected by the thread_lock_
typedef List<MediaBuffer*> MBQueue;
MBQueue in_queue_;
DISALLOW_EVIL_CONSTRUCTORS(AAH_DecoderPumpBase);
};
// decode audio and write to TimeAudioTrack
class AAH_DecoderPump : public AAH_DecoderPumpBase {
public:
explicit AAH_DecoderPump(OMXClient& omx);
void setRenderTSTransform(const LinearTransform& trans);
void setRenderVolume(float left, float right);
void setRenderStreamType(int stream_type);
bool isAboutToUnderflow(int64_t threshold);
private:
Mutex render_lock_;
TimedAudioTrack* renderer_;
bool last_queued_pts_valid_;
int64_t last_queued_pts_;
bool last_ts_transform_valid_;
@@ -97,11 +116,10 @@ class AAH_DecoderPump : public MediaSource {
float last_left_volume_;
float last_right_volume_;
int last_stream_type_;
CCHelper cc_helper_;
// protected by the thread_lock_
typedef List<MediaBuffer*> MBQueue;
MBQueue in_queue_;
virtual void queueToSink(MediaBuffer* decoded_sample);
virtual void stopAndCleanupSink();
virtual status_t shutdown_l();
DISALLOW_EVIL_CONSTRUCTORS(AAH_DecoderPump);
};

View File

@@ -30,6 +30,7 @@
#include <utils/threads.h>
#include "aah_decoder_pump.h"
#include "IAAHMetaData.h"
#include "utils.h"
namespace android {
@@ -192,6 +193,8 @@ class AAH_RXPlayer : public MediaPlayerHWInterface {
int32_t ts_lower);
void processPayloadCont (uint8_t* buf,
uint32_t amt);
void processMetaData(uint8_t* buf, uint32_t amt);
void flushMetaDataService();
void processTSTransform(const LinearTransform& trans);
bool isAboutToUnderflow();
@@ -260,9 +263,14 @@ class AAH_RXPlayer : public MediaPlayerHWInterface {
uint8_t audio_volume_remote_;
int audio_stream_type_;
MediaToSystemTransform mMetaDataTsTransform;
static const int64_t kAboutToUnderflowThreshold;
static const int kInactivityTimeoutMsec;
// singleton of the IBinder service provider for metadata
sp<AAHMetaDataService> aah_metadata_service_;
DISALLOW_EVIL_CONSTRUCTORS(Substream);
};
@@ -278,6 +286,8 @@ class AAH_RXPlayer : public MediaPlayerHWInterface {
bool processRX(PacketBuffer* pb);
void processRingBuffer();
void processCommandPacket(PacketBuffer* pb);
void processMetaDataPacket(uint8_t program_id,
PacketBuffer* pb);
bool processGaps();
bool processRetransmitNAK(const uint8_t* data, size_t amt);
void setGapStatus(GapStatus status);

View File

@@ -595,6 +595,8 @@ void AAH_RXPlayer::processRingBuffer() {
goto process_next_packet;
}
uint32_t program_id = (ssrc >> 5) & 0x1F;
// Is there a timestamp transformation present on this packet? If
// so, extract it and pass it to the appropriate substreams.
if (trtp_flags & 0x02) {
@@ -613,7 +615,6 @@ void AAH_RXPlayer::processRingBuffer() {
trans.a_to_b_denom = U32_AT(data + offset + 12);
foundTrans = true;
uint32_t program_id = (ssrc >> 5) & 0x1F;
for (size_t i = 0; i < substreams_.size(); ++i) {
sp<Substream> iter = substreams_.valueAt(i);
CHECK(iter != NULL);
@@ -630,6 +631,10 @@ void AAH_RXPlayer::processRingBuffer() {
if (4 == payload_type) {
processCommandPacket(pb);
goto process_next_packet;
} else if (5 == payload_type) {
// if it's MetaDataPacket, send to associated substream
processMetaDataPacket(program_id, pb);
goto process_next_packet;
}
}
@@ -738,6 +743,7 @@ void AAH_RXPlayer::processCommandPacket(PacketBuffer* pb) {
for (size_t i = 0; i < substreams_.size(); ++i) {
const sp<Substream>& stream = substreams_.valueAt(i);
if (stream->getProgramID() == program_id) {
stream->flushMetaDataService();
stream->clearInactivityTimeout();
}
}
@@ -795,6 +801,50 @@ void AAH_RXPlayer::processCommandPacket(PacketBuffer* pb) {
cleanoutExpiredSubstreams();
}
void AAH_RXPlayer::processMetaDataPacket(uint8_t program_id, PacketBuffer* pb) {
CHECK(NULL != pb);
uint8_t* data = pb->data_;
ssize_t amt = pb->length_;
// verify that this packet meets the minimum length of a metadata packet
if (amt < 22) {
return;
}
uint8_t trtp_version = data[12];
uint8_t trtp_flags = data[13] & 0xF;
if (1 != trtp_version) {
LOGV("Dropping packet, bad trtp version %hhu", trtp_version);
return;
}
// calculate the start of the metadata payload
ssize_t offset = 18;
if (trtp_flags & 0x01) {
// timestamp is present (4 bytes)
offset += 4;
// we don't sent timestamp in metadata packet header
// however the content of metadata may contain timestamp
}
if (trtp_flags & 0x02) {
// transform is present (24 bytes)
offset += 24;
// ignore for now, we don't sent transform in metadata packet
}
for (size_t i = 0; i < substreams_.size(); ++i) {
sp<Substream> iter = substreams_.valueAt(i);
CHECK(iter != NULL);
if (iter->getProgramID() == program_id) {
iter->processMetaData(data + offset, amt - offset);
}
}
}
bool AAH_RXPlayer::processGaps() {
// Deal with the current gap situation. Specifically...
//

View File

@@ -28,8 +28,7 @@
#include "aah_rx_player.h"
#include "aah_tx_packet.h"
#define MIN(a, b) ((a) < (b) ? (a) : (b))
#include "aah_audio_algorithm.h"
namespace android {
@@ -57,6 +56,8 @@ AAH_RXPlayer::Substream::Substream(uint32_t ssrc, OMXClient& omx) {
LOGE("%s failed to initialize decoder pump!", __PRETTY_FUNCTION__);
}
aah_metadata_service_ = AAHMetaDataService::getInstance();
// cleanupBufferInProgress will reset most of the internal state variables.
// Just need to make sure that buffer_in_progress_ is NULL before calling.
cleanupBufferInProgress();
@@ -642,6 +643,9 @@ void AAH_RXPlayer::Substream::processTSTransform(const LinearTransform& trans) {
if (decoder_ != NULL) {
decoder_->setRenderTSTransform(trans);
}
if (aah_metadata_service_ != NULL) {
mMetaDataTsTransform.setMediaToCommonTransform(trans);
}
}
void AAH_RXPlayer::Substream::signalEOS() {
@@ -732,4 +736,44 @@ void AAH_RXPlayer::Substream::applyVolume() {
}
}
void AAH_RXPlayer::Substream::processMetaData(uint8_t* data, uint32_t amt) {
if (aah_metadata_service_ == NULL) {
return;
}
uint32_t offset = 0;
// the packet must contain 4 bytes of TRTPMetaDataBlock payload
// beyond the TRTP header
while (offset <= amt - 4) {
uint16_t typeId = U16_AT(data + offset);
uint16_t item_length = U16_AT(data + offset + 2);
offset += 4;
if (offset <= amt - item_length) {
if (kMetaDataBeat == typeId) {
uint16_t count = U16_AT(data + offset);
uint8_t* buf = data + offset + 2;
mMetaDataTsTransform.prepareCommonToSystem();
for (int i = 0, c = count * BeatDetectionAlgorithm::kItemLength;
i < c;
i += BeatDetectionAlgorithm::kItemLength) {
uint8_t* ptr = buf + i;
int64_t ts = static_cast<int64_t>(U64_AT(ptr));
mMetaDataTsTransform.mediaToSystem(&ts);
TRTPPacket::writeU64(ptr, ts);
}
}
// TODO: in future, we may pass programID to java layer to identify
// the song; the java layer has no knowledge of ID at this moment
aah_metadata_service_->broadcast(typeId, item_length,
data + offset);
}
offset += item_length;
}
}
void AAH_RXPlayer::Substream::flushMetaDataService() {
if (aah_metadata_service_ != NULL) {
aah_metadata_service_->flush();
}
}
} // namespace android

View File

@@ -19,8 +19,7 @@
#include <arpa/inet.h>
#include <string.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/foundation/ADebug.h>
#include "aah_tx_packet.h"
@@ -190,6 +189,12 @@ void TRTPPacket::writeU64(uint8_t*& buf, uint64_t val) {
buf += 8;
}
void TRTPPacket::writeFloat(uint8_t*& buf, float val) {
uint32_t intBits = *(reinterpret_cast<uint32_t *>(&val));
*reinterpret_cast<uint32_t*>(buf) = htonl(intBits);
buf += 4;
}
void TRTPPacket::writeTRTPHeader(uint8_t*& buf,
bool isFirstFragment,
int totalPacketLen) {
@@ -381,4 +386,58 @@ bool TRTPActiveProgramUpdatePacket::pack() {
return true;
}
void TRTPMetaDataBlock::writeBlockHead(uint8_t*& buf) const {
TRTPPacket::writeU16(buf, mTypeId);
TRTPPacket::writeU16(buf, mItemLength);
}
TRTPMetaDataPacket::~TRTPMetaDataPacket() {
for (size_t i=0; i < mBlocks.size(); i++) {
delete mBlocks[i];
}
}
void TRTPMetaDataPacket::append(TRTPMetaDataBlock* block) {
mBlocks.add(block);
}
bool TRTPMetaDataPacket::pack() {
if (mIsPacked) {
return false;
}
// Active program update packets contain a list of block, each block
// is 2 bytes typeId, 2 bytes item_length and item_length buffer
int packetLen = kRTPHeaderLen + TRTPHeaderLen();
for (size_t i = 0; i < mBlocks.size(); i++) {
const TRTPMetaDataBlock* block = mBlocks[i];
packetLen += 4 + block->mItemLength;
}
// compare to maximum UDP payload length
// (IP header 20, UDP header 8)
// TODO: split by MTU (normally 1500)
if (packetLen > (0xffff - 28)) {
return false;
}
mPacket = new uint8_t[packetLen];
if (!mPacket) {
return false;
}
mPacketLen = packetLen;
uint8_t* cur = mPacket;
writeTRTPHeader(cur, true, packetLen);
for (size_t i = 0; i < mBlocks.size(); i++) {
const TRTPMetaDataBlock* block = mBlocks[i];
block->write(cur);
}
mIsPacked = true;
return true;
}
} // namespace android

View File

@@ -20,6 +20,7 @@
#include <utils/LinearTransform.h>
#include <utils/RefBase.h>
#include <utils/Timers.h>
#include <utils/Vector.h>
namespace android {
@@ -30,6 +31,7 @@ class TRTPPacket : public RefBase {
kHeaderTypeVideo = 2,
kHeaderTypeSubpicture = 3,
kHeaderTypeControl = 4,
kHeaderTypeMetaData = 5,
};
TRTPPacket(TRTPHeaderType headerType)
@@ -86,6 +88,12 @@ class TRTPPacket : public RefBase {
static const uint32_t kCNC_LeaveGroupID = 'Tlgp';
static const uint32_t kCNC_NakJoinGroupID = 'Tngp';
static void writeU8(uint8_t*& buf, uint8_t val);
static void writeU16(uint8_t*& buf, uint16_t val);
static void writeU32(uint8_t*& buf, uint32_t val);
static void writeU64(uint8_t*& buf, uint64_t val);
static void writeFloat(uint8_t*& buf, float val);
protected:
static const int kRTPHeaderLen = 12;
virtual int TRTPHeaderLen() const;
@@ -94,11 +102,6 @@ class TRTPPacket : public RefBase {
bool isFirstFragment,
int totalPacketLen);
void writeU8(uint8_t*& buf, uint8_t val);
void writeU16(uint8_t*& buf, uint16_t val);
void writeU32(uint8_t*& buf, uint32_t val);
void writeU64(uint8_t*& buf, uint64_t val);
bool mIsPacked;
uint8_t mVersion;
@@ -215,6 +218,42 @@ class TRTPActiveProgramUpdatePacket : public TRTPControlPacket {
uint8_t mProgramIDs[kMaxProgramIDs];
};
enum TRTPMetaDataTypeID {
kMetaDataNone = 0,
kMetaDataBeat = 1,
};
class TRTPMetaDataBlock {
public:
TRTPMetaDataBlock(TRTPMetaDataTypeID typeId, uint16_t item_length)
: mTypeId(typeId)
, mItemLength(item_length) {}
void writeBlockHead(uint8_t*& buf) const;
virtual void write(uint8_t*& buf) const = 0;
virtual ~TRTPMetaDataBlock() {}
TRTPMetaDataTypeID mTypeId;
uint16_t mItemLength;
};
// TRTPMetaDataPacket contains multiple TRTPMetaDataBlocks of different types
class TRTPMetaDataPacket : public TRTPPacket {
public:
TRTPMetaDataPacket()
: TRTPPacket(kHeaderTypeMetaData) {}
void append(TRTPMetaDataBlock* block);
virtual bool pack();
virtual ~TRTPMetaDataPacket();
protected:
Vector<TRTPMetaDataBlock*> mBlocks;
};
} // namespace android
#endif // __AAH_TX_PLAYER_H__

View File

@@ -27,7 +27,6 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <utils/Timers.h>
@@ -114,6 +113,10 @@ AAH_TXPlayer::~AAH_TXPlayer() {
if (mQueueStarted) {
mQueue.stop();
}
if (mAudioProcessor != NULL) {
mAudioProcessor->shutdown();
}
omx_.disconnect();
reset();
}
@@ -140,7 +143,7 @@ status_t AAH_TXPlayer::initCheck() {
return res;
}
return OK;
return omx_.connect();
}
status_t AAH_TXPlayer::setDataSource(
@@ -486,6 +489,25 @@ void AAH_TXPlayer::onPrepareAsyncEvent() {
return;
}
if (mAudioProcessor == NULL) {
mAudioProcessor = new AAH_AudioProcessor(omx_);
if (mAudioProcessor == NULL || OK != mAudioProcessor->initCheck()) {
LOGE("%s failed to initialize audio processor",
__PRETTY_FUNCTION__);
mAudioProcessor = NULL;
abortPrepare_l(err);
return;
}
}
if (mEnergyProcessor == NULL) {
mEnergyProcessor = new BeatDetectionAlgorithm();
if (mEnergyProcessor == NULL) {
LOGE("%s failed to initialize audio processor",
__PRETTY_FUNCTION__);
}
mAudioProcessor->setAlgorithm(mEnergyProcessor);
}
mFlags |= PREPARING_CONNECTED;
if (mCachedSource != NULL) {
@@ -642,6 +664,10 @@ void AAH_TXPlayer::sendEOS_l() {
void AAH_TXPlayer::sendFlush_l() {
if (mAAH_TXGroup != NULL) {
if (mEnergyProcessor != NULL) {
mEnergyProcessor->flush();
}
sendMetaPacket_l(true);
sp<TRTPControlPacket> packet = new TRTPControlPacket();
if (packet != NULL) {
packet->setCommandID(TRTPControlPacket::kCommandFlush);
@@ -1349,8 +1375,26 @@ void AAH_TXPlayer::onPumpAudio() {
}
}
mediaBuffer->release();
if (mAudioProcessor != NULL) {
// FIXME init() should detect metadata change
status_t res = mAudioProcessor->init(mAudioFormat);
if (OK != res) {
LOGE("Failed to init decoder (res = %d)", res);
cleanupDecoder();
mediaBuffer->release();
} else {
// decoder_pump steals the reference, so no need to call release()
res = mAudioProcessor->queueForDecode(mediaBuffer);
if (OK != res) {
LOGE("Failed in queueForDecode (res = %d)", res);
cleanupDecoder();
mediaBuffer->release();
}
}
sendMetaPacket_l(false);
}
bailout:
mLastQueuedMediaTimePTSValid = true;
mLastQueuedMediaTimePTS = mediaTimeUs;
}
@@ -1371,6 +1415,20 @@ void AAH_TXPlayer::onPumpAudio() {
}
}
void AAH_TXPlayer::sendMetaPacket_l(bool flushOut) {
// collect metadata from different components and send TRTPMetaDataPacket
// currently only support beat processor
if (mEnergyProcessor == NULL) {
return;
}
TRTPMetaDataBlock* block = mEnergyProcessor->collectMetaData(flushOut);
if (block != NULL) {
sp<TRTPMetaDataPacket> packet = new TRTPMetaDataPacket();
packet->append(block);
sendPacket_l(packet);
}
}
void AAH_TXPlayer::sendPacket_l(const sp<TRTPPacket>& packet) {
CHECK(mAAH_TXGroup != NULL);
CHECK(packet != NULL);
@@ -1378,4 +1436,11 @@ void AAH_TXPlayer::sendPacket_l(const sp<TRTPPacket>& packet) {
mAAH_TXGroup->sendPacket(packet);
}
void AAH_TXPlayer::cleanupDecoder() {
if (mAudioProcessor != NULL) {
mAudioProcessor->shutdown();
mAudioProcessor = NULL;
}
}
} // namespace android

View File

@@ -24,11 +24,16 @@
#include <media/MediaPlayerInterface.h>
#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXClient.h>
#include <utils/LinearTransform.h>
#include <utils/String8.h>
#include <utils/threads.h>
#include "aah_tx_group.h"
#include "aah_audio_processor.h"
namespace android {
@@ -109,6 +114,7 @@ class AAH_TXPlayer : public MediaPlayerHWInterface {
void updateClockTransform_l(bool pause);
void sendEOS_l();
void sendFlush_l();
void sendMetaPacket_l(bool flushOut);
void sendTSUpdateNop_l();
void cancelPlayerEvents(bool keepBufferingGoing = false);
void reset_l();
@@ -122,9 +128,14 @@ class AAH_TXPlayer : public MediaPlayerHWInterface {
void onBufferingUpdate();
void onPumpAudio();
void sendPacket_l(const sp<TRTPPacket>& packet);
void cleanupDecoder();
Mutex mLock;
OMXClient omx_;
sp<AAH_AudioProcessor> mAudioProcessor;
sp<AudioAlgorithm> mEnergyProcessor;
TimedEventQueue mQueue;
bool mQueueStarted;

View File

@@ -0,0 +1,28 @@
# Copyright 2012, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
# builds the separate AAHMetaDataService.jar as part of 'make dist'
LOCAL_MODULE := AAHMetaDataService
LOCAL_MODULE_TAGS := optional
LOCAL_SDK_VERSION := 8
LOCAL_JAVA_LAYERS_FILE := layers.txt
LOCAL_SRC_FILES := \
$(call all-java-files-under, src) \
$(call all-Iaidl-files-under, src)
include $(BUILD_STATIC_JAVA_LIBRARY)

View File

@@ -0,0 +1,8 @@
# Dependency file for use by layers.py
# These packages are high-level and no other packages may import them.
android.media.libaah
# There are no legacy packages, but if there were, they would be listed like this.
# They would be allowed to freely import and be imported.
# -com.example.some.legacy.package

View File

@@ -0,0 +1,57 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.media.libaah;
import android.util.Log;
public interface BeatListener {
public static class BeatInfo {
public volatile long timestamp;
public volatile float beatValue;
public volatile float smoothedBeatValue;
public volatile int sequenceNumber;
public BeatInfo() {
this(0, 0, 0, 0);
}
public BeatInfo(long t, float b, float sb, int s) {
timestamp = t;
beatValue = b;
smoothedBeatValue = sb;
sequenceNumber = s;
}
public BeatInfo(BeatInfo other) {
copyFrom(other);
}
public void copyFrom(BeatInfo other) {
timestamp = other.timestamp;
beatValue = other.beatValue;
smoothedBeatValue = other.smoothedBeatValue;
sequenceNumber = other.sequenceNumber;
}
};
void onBeat(short count, BeatInfo[] info);
void onFlush();
}

View File

@@ -0,0 +1,125 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.media.libaah;
import android.util.Log;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import java.lang.ref.WeakReference;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.List;
import java.util.ArrayList;
/**
* The MetaDataService class enables application to retrieve metadata e.g.
* beat information of current played track
*/
public class MetaDataService {
protected final static String TAG = "AAHMetaData-JAVA";
protected List<BeatListener> mBeatListeners;
protected BeatListener.BeatInfo[] mCachedBeats;
protected static final int TYPEID_BEAT = 1;
public static final int BEAT_FIXED_LENGTH = 20;
protected MetaDataService() {
mBeatListeners = new ArrayList<BeatListener>();
mCachedBeats = null;
}
public static MetaDataService create()
throws ClassNotFoundException, IllegalAccessException, InstantiationException {
Class cls = Class.forName("android.media.libaah.MetaDataServiceRtp");
return (MetaDataService) cls.newInstance();
}
/**
* release native resources, it's suggested to call this instead of relying
* on java garbage collection
*/
public void release() {
}
public void enable() {
}
public void disable() {
}
public synchronized void addBeatListener(BeatListener aahBeatListener) {
if (!mBeatListeners.contains(aahBeatListener)) {
mBeatListeners.add(aahBeatListener);
}
}
public synchronized void removeBeatListener(BeatListener aahBeatListener) {
mBeatListeners.remove(aahBeatListener);
}
protected void processBeat(int item_len, byte[] buffer) {
if (buffer == null) {
return;
}
ByteBuffer byteBuffer = ByteBuffer.wrap(buffer, 0, item_len);
// buffer is in network order (big endian)
byteBuffer.order(ByteOrder.BIG_ENDIAN);
short count = byteBuffer.getShort();
if (count * BEAT_FIXED_LENGTH + 2 != item_len ) {
return;
}
if (mCachedBeats == null || mCachedBeats.length < count) {
BeatListener.BeatInfo[] beats = new BeatListener.BeatInfo[count];
int i = 0;
if (mCachedBeats != null) {
for (; i < mCachedBeats.length; i++) {
beats[i] = mCachedBeats[i];
}
}
for (; i < count; i++) {
beats[i] = new BeatListener.BeatInfo();
}
mCachedBeats = beats;
}
for (int idx = 0; idx < count; idx++) {
mCachedBeats[idx].timestamp = byteBuffer.getLong();
mCachedBeats[idx].beatValue = byteBuffer.getFloat();
mCachedBeats[idx].smoothedBeatValue = byteBuffer.getFloat();
mCachedBeats[idx].sequenceNumber = byteBuffer.getInt();
}
synchronized (this) {
for (int i = 0, c = mBeatListeners.size(); i < c; i++) {
mBeatListeners.get(i).onBeat(count, mCachedBeats);
}
}
}
protected void flush() {
synchronized (this) {
for (int i = 0, c = mBeatListeners.size(); i < c; i++) {
mBeatListeners.get(i).onFlush();
}
}
}
}

View File

@@ -0,0 +1,163 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.media.libaah;
import android.util.Log;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import java.lang.ref.WeakReference;
import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
/*
* The implementation reads data from libaah_rtp
*/
public class MetaDataServiceRtp extends MetaDataService {
static {
System.loadLibrary("aah_rtp");
}
private final static String TAG = "AAHMetaData-JAVA";
/**
* State of a MetaDataService object that was not successfully
* initialized upon creation
*/
private static final int STATE_UNINITIALIZED = 0;
/**
* State of a MetaDataService object that is ready to be used.
*/
private static final int STATE_INITIALIZED = 1;
// -------------------------------------------------------------------------
// Member variables
// --------------------
/**
* Indicates the state of the MetaDataService instance
*/
private int mState = STATE_UNINITIALIZED;
private int mCookie;
/*
* Successful operation.
*/
private static final int SUCCESS = 0;
/*
* Unspecified error.
*/
private static final int ERROR = -1;
/*
* Internal operation status. Not returned by any method.
*/
private static final int ALREADY_EXISTS = -2;
/*
* Operation failed due to bad object initialization.
*/
private static final int ERROR_NO_INIT = -3;
/*
* Operation failed due to bad parameter value.
*/
private static final int ERROR_BAD_VALUE = -4;
/*
* Operation failed because it was requested in wrong state.
*/
private static final int ERROR_INVALID_OPERATION = -5;
/*
* Operation failed due to lack of memory.
*/
private static final int ERROR_NO_MEMORY = -6;
/*
* Operation failed due to dead remote object.
*/
private static final int ERROR_DEAD_OBJECT = -7;
/*
* only called by MetaDataService.create()
*/
MetaDataServiceRtp() {
mState = STATE_UNINITIALIZED;
// native initialization
int result = native_setup(new WeakReference<MetaDataServiceRtp>(this));
if (result != SUCCESS) {
Log.e(TAG, "Error code " + result + " when initializing.");
}
mState = STATE_INITIALIZED;
}
@Override
protected void finalize() {
native_finalize();
}
@Override
public void release() {
native_finalize();
}
@Override
public void enable() {
native_enable();
}
@Override
public void disable() {
native_disable();
}
private native final int native_setup(Object metadataservice_this);
private native final void native_enable();
private native final void native_disable();
private native final void native_finalize();
// ---------------------------------------------------------
// Java methods called from the native side
// --------------------
@SuppressWarnings("unused")
private static void postMetaDataFromNative(Object s_ref,
short type, int item_len, byte[] buffer) {
MetaDataService service =
(MetaDataService) ((WeakReference) s_ref).get();
if (service == null) {
return;
}
switch (type) {
case TYPEID_BEAT:
service.processBeat(item_len, buffer);
break;
default:
Log.w(TAG, "unknown type metadata type " + type);
break;
}
}
@SuppressWarnings("unused")
private static void flushFromNative(Object s_ref) {
MetaDataService service =
(MetaDataService) ((WeakReference) s_ref).get();
if (service == null) {
return;
}
service.flush();
}
}

View File

@@ -13,11 +13,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <utils/SystemClock.h>
#include "utils.h"
namespace android {
// check ICommonTime every 60 seconds, common to local difference
// shouldn't drift a lot
#define CHECK_CC_INTERNAL 60000
void Timeout::setTimeout(int msec) {
if (msec < 0) {
mSystemEndTime = 0;
@@ -46,4 +51,61 @@ int Timeout::msecTillTimeout(nsecs_t nowTime) {
return static_cast<int>(delta);
}
CommonToSystemTransform::CommonToSystemTransform()
: mLastTs(-1) {
mCCHelper.getCommonFreq(&mCommonFreq);
mCommonToSystem.a_to_b_numer = 1000;
mCommonToSystem.a_to_b_denom = mCommonFreq;
LinearTransform::reduce(&mCommonToSystem.a_to_b_numer,
&mCommonToSystem.a_to_b_denom);
}
const LinearTransform& CommonToSystemTransform::getCommonToSystem() {
int64_t st = elapsedRealtime();
if (mLastTs == -1 || st - mLastTs > CHECK_CC_INTERNAL) {
int64_t ct;
mCCHelper.getCommonTime(&ct);
mCommonToSystem.a_zero = ct;
mCommonToSystem.b_zero = st;
mLastTs = st;
}
return mCommonToSystem;
}
MediaToSystemTransform::MediaToSystemTransform()
: mMediaToCommonValid(false) {
}
void MediaToSystemTransform::prepareCommonToSystem() {
memcpy(&mCommonToSystem, &mCommonToSystemTrans.getCommonToSystem(),
sizeof(mCommonToSystem));
}
void MediaToSystemTransform::setMediaToCommonTransform(
const LinearTransform& t) {
mMediaToCommon = t;
mMediaToCommonValid = true;
}
bool MediaToSystemTransform::mediaToSystem(int64_t* ts) {
if (!mMediaToCommonValid)
return false;
// TODO: this is not efficient, we could combine two transform into one
// during prepareCommonToSystem() and setMediaToCommonTransform()
int64_t media_time = *ts;
int64_t common_time;
int64_t system_time;
if (!mMediaToCommon.doForwardTransform(media_time, &common_time)) {
return false;
}
if (!mCommonToSystem.doForwardTransform(common_time, &system_time)) {
return false;
}
*ts = system_time;
return true;
}
} // namespace android

View File

@@ -23,7 +23,13 @@
#include <netinet/in.h>
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/ADebug.h>
#include <utils/Timers.h>
#include <utils/threads.h>
#include <utils/LinearTransform.h>
#include <common_time/cc_helper.h>
#define MIN(a, b) ((a) < (b) ? (a) : (b))
#define IP_PRINTF_HELPER(a) ((a >> 24) & 0xFF), ((a >> 16) & 0xFF), \
((a >> 8) & 0xFF), (a & 0xFF)
@@ -97,6 +103,100 @@ inline void clearEventFD(int fd) {
}
}
// thread safe circular array
template<typename T, uint32_t LEN>
class CircularArray {
public:
CircularArray()
: mReadIndex(0)
, mWriteIndex(0)
, mLength(0) {}
bool write(const T& t) {
Mutex::Autolock autolock(&mLock);
if (mLength < LEN) {
mData[mWriteIndex] = t;
mWriteIndex = (mWriteIndex + 1) % LEN;
mLength++;
return true;
}
return false;
}
void writeAllowOverflow(const T& t) {
Mutex::Autolock autolock(&mLock);
mData[mWriteIndex] = t;
mWriteIndex = (mWriteIndex + 1) % LEN;
if (mLength < LEN) {
mLength++;
} else {
mReadIndex = (mReadIndex + 1) % LEN;
}
}
bool read(T* t) {
CHECK(t != NULL);
Mutex::Autolock autolock(&mLock);
if (mLength > 0) {
*t = mData[mReadIndex];
mReadIndex = (mReadIndex + 1) % LEN;
mLength--;
return true;
}
return false;
}
uint32_t readBulk(T* t, uint32_t count) {
return readBulk(t, 0, count);
}
uint32_t readBulk(T* t, uint32_t mincount, uint32_t count) {
CHECK(t != NULL);
Mutex::Autolock autolock(&mLock);
if (mincount > count) {
// illegal argument
return 0;
}
if (mincount > mLength) {
// not enough items
return 0;
}
uint32_t i;
for (i = 0; i < count && mLength; i++) {
*t = mData[mReadIndex];
mReadIndex = (mReadIndex + 1) % LEN;
mLength--;
t++;
}
return i;
}
private:
Mutex mLock;
T mData[LEN];
uint32_t mReadIndex;
uint32_t mWriteIndex;
uint32_t mLength;
};
class CommonToSystemTransform {
public:
CommonToSystemTransform();
const LinearTransform& getCommonToSystem();
private:
LinearTransform mCommonToSystem;
uint64_t mCommonFreq;
CCHelper mCCHelper;
int64_t mLastTs;
};
class MediaToSystemTransform {
public:
MediaToSystemTransform();
void setMediaToCommonTransform(const LinearTransform&);
void prepareCommonToSystem();
bool mediaToSystem(int64_t* ts);
private:
bool mMediaToCommonValid;
LinearTransform mMediaToCommon;
LinearTransform mCommonToSystem;
CommonToSystemTransform mCommonToSystemTrans;
};
} // namespace android
#endif // __UTILS_H__