Music visualizer support hack.

This currently assumes 44k stereo (won't crash on other formats, but won't give the correct results either), and links statically with libspeex to get FFT data, increasing the size of libmedia by about 45kb.
This commit is contained in:
Marco Nelissen
2009-09-20 10:42:13 -07:00
parent 52cde7279b
commit c39d2e3c01
9 changed files with 243 additions and 4 deletions

View File

@@ -43,6 +43,10 @@ public:
virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0;
virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0;
virtual sp<IOMX> createOMX() = 0;
// Take a peek at currently playing audio, for visualization purposes.
// This returns a buffer of 16 bit mono PCM data, or NULL if no visualization buffer is currently available.
virtual sp<IMemory> snoop() = 0;
};
// ----------------------------------------------------------------------------

View File

@@ -153,6 +153,7 @@ public:
void notify(int msg, int ext1, int ext2);
static sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
static sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
static int snoop(short *data, int len, int kind);
status_t invoke(const Parcel& request, Parcel *reply);
status_t setMetadataFilter(const Parcel& filter);
status_t getMetadata(bool update_only, bool apply_filter, Parcel *metadata);

View File

@@ -1494,4 +1494,9 @@ public class MediaPlayer
}
private OnInfoListener mOnInfoListener;
/**
* @hide
*/
public native static int snoop(short [] outData, int kind);
}

View File

@@ -594,6 +594,18 @@ android_media_MediaPlayer_native_finalize(JNIEnv *env, jobject thiz)
android_media_MediaPlayer_release(env, thiz);
}
static jint
android_media_MediaPlayer_snoop(JNIEnv* env, jobject thiz, jobject data, jint kind) {
jshort* ar = (jshort*)env->GetPrimitiveArrayCritical((jarray)data, 0);
jsize len = env->GetArrayLength((jarray)data);
int ret = 0;
if (ar) {
ret = MediaPlayer::snoop(ar, len, kind);
env->ReleasePrimitiveArrayCritical((jarray)data, ar, 0);
}
return ret;
}
// ----------------------------------------------------------------------------
static JNINativeMethod gMethods[] = {
@@ -624,6 +636,7 @@ static JNINativeMethod gMethods[] = {
{"native_init", "()V", (void *)android_media_MediaPlayer_native_init},
{"native_setup", "(Ljava/lang/Object;)V", (void *)android_media_MediaPlayer_native_setup},
{"native_finalize", "()V", (void *)android_media_MediaPlayer_native_finalize},
{"snoop", "([SI)I", (void *)android_media_MediaPlayer_snoop},
};
static const char* const kClassPathName = "android/media/MediaPlayer";

View File

@@ -37,8 +37,12 @@ LOCAL_SHARED_LIBRARIES += libdl
endif
LOCAL_C_INCLUDES := \
$(JNI_H_INCLUDE) \
$(call include-path-for, graphics corecg) \
$(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include
$(JNI_H_INCLUDE) \
$(call include-path-for, graphics corecg) \
$(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include \
external/speex/include \
external/speex/libspeex
LOCAL_STATIC_LIBRARIES := libspeex
include $(BUILD_SHARED_LIBRARY)

View File

@@ -36,6 +36,7 @@ enum {
CREATE_MEDIA_RECORDER,
CREATE_METADATA_RETRIEVER,
CREATE_OMX,
SNOOP
};
class BpMediaPlayerService: public BpInterface<IMediaPlayerService>
@@ -114,6 +115,14 @@ public:
return interface_cast<IMemory>(reply.readStrongBinder());
}
virtual sp<IMemory> snoop()
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
remote()->transact(SNOOP, data, &reply);
return interface_cast<IMemory>(reply.readStrongBinder());
}
virtual sp<IOMX> createOMX() {
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
@@ -178,6 +187,12 @@ status_t BnMediaPlayerService::onTransact(
reply->writeStrongBinder(player->asBinder());
return NO_ERROR;
} break;
case SNOOP: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
sp<IMemory> snooped_audio = snoop();
reply->writeStrongBinder(snooped_audio->asBinder());
return NO_ERROR;
} break;
case CREATE_MEDIA_RECORDER: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
pid_t pid = data.readInt32();

View File

@@ -690,4 +690,61 @@ MediaPlayer::DeathNotifier::~DeathNotifier()
}
extern "C" {
#define FLOATING_POINT 1
#include "fftwrap.h"
}
static void *ffttable = NULL;
// peeks at the audio data and fills 'data' with the requested kind
// (currently kind=0 returns mono 16 bit PCM data, and kind=1 returns
// 256 point FFT data). Return value is number of samples returned,
// which may be 0.
/*static*/ int MediaPlayer::snoop(short* data, int len, int kind) {
sp<IMemory> p;
const sp<IMediaPlayerService>& service = getMediaPlayerService();
if (service != 0) {
// Take a peek at the waveform. The returned data consists of 16 bit mono PCM data.
p = service->snoop();
if (p == NULL) {
return 0;
}
if (kind == 0) { // return waveform data
int plen = p->size();
len *= 2; // number of shorts -> number of bytes
short *src = (short*) p->pointer();
if (plen > len) {
plen = len;
}
memcpy(data, src, plen);
return plen / sizeof(short); // return number of samples
} else if (kind == 1) {
// TODO: use a more efficient FFT
// Right now this uses the speex library, which is compiled to do a float FFT
if (!ffttable) ffttable = spx_fft_init(512);
short *usrc = (short*) p->pointer();
float fsrc[512];
for (int i=0;i<512;i++)
fsrc[i] = usrc[i];
float fdst[512];
spx_fft_float(ffttable, fsrc, fdst);
if (len > 512) {
len = 512;
}
len /= 2; // only half the output data is valid
for (int i=0; i < len; i++)
data[i] = fdst[i];
return len;
}
} else {
LOGE("Unable to locate media service");
}
return 0;
}
}; // namespace android

View File

@@ -41,6 +41,7 @@
#include <binder/MemoryBase.h>
#include <utils/Errors.h> // for status_t
#include <utils/String8.h>
#include <utils/SystemClock.h>
#include <utils/Vector.h>
#include <cutils/properties.h>
@@ -1185,6 +1186,117 @@ Exit:
return mem;
}
/*
* Avert your eyes, ugly hack ahead.
* The following is to support music visualizations.
*/
static const int NUMVIZBUF = 32;
static const int VIZBUFFRAMES = 1024;
static const int TOTALBUFTIMEMSEC = NUMVIZBUF * VIZBUFFRAMES * 1000 / 44100;
static bool gotMem = false;
static sp<MemoryBase> mem[NUMVIZBUF];
static uint64_t timeStamp[NUMVIZBUF];
static uint64_t lastReadTime;
static uint64_t lastWriteTime;
static int writeIdx = 0;
static void allocVizBufs() {
if (!gotMem) {
for (int i=0;i<NUMVIZBUF;i++) {
sp<MemoryHeapBase> heap = new MemoryHeapBase(VIZBUFFRAMES*2, 0, "snooper");
mem[i] = new MemoryBase(heap, 0, heap->getSize());
timeStamp[i] = 0;
}
gotMem = true;
}
}
/*
* Get a buffer of audio data that is about to be played.
* We don't synchronize this because in practice the writer
* is ahead of the reader, and even if we did happen to catch
* a buffer while it's being written, it's just a visualization,
* so no harm done.
*/
static sp<MemoryBase> getVizBuffer() {
allocVizBufs();
lastReadTime = uptimeMillis() + 100; // account for renderer delay (we shouldn't be doing this here)
// if there is no recent buffer (yet), just return empty handed
if (lastWriteTime + TOTALBUFTIMEMSEC < lastReadTime) {
//LOGI("@@@@ no audio data to look at yet");
return NULL;
}
char buf[200];
int closestIdx = -1;
uint32_t closestTime = 0x7ffffff;
for (int i = 0; i < NUMVIZBUF; i++) {
uint64_t tsi = timeStamp[i];
uint64_t diff = tsi > lastReadTime ? tsi - lastReadTime : lastReadTime - tsi;
if (diff < closestTime) {
closestIdx = i;
closestTime = diff;
}
}
if (closestIdx >= 0) {
//LOGI("@@@ return buffer %d, %d/%d", closestIdx, uint32_t(lastReadTime), uint32_t(timeStamp[closestIdx]));
return mem[closestIdx];
}
// we won't get here, since we either bailed out early, or got a buffer
LOGD("Didn't expect to be here");
return NULL;
}
static void storeVizBuf(const void *data, int len, uint64_t time) {
// Copy the data in to the visualizer buffer
// Assume a 16 bit stereo source for now.
short *viz = (short*)mem[writeIdx]->pointer();
short *src = (short*)data;
for (int i = 0; i < VIZBUFFRAMES; i++) {
// Degrade quality by mixing to mono and clearing the lowest 3 bits.
// This should still be good enough for a visualization
*viz++ = ((int(src[0]) + int(src[1])) >> 1) & ~0x7;
src += 2;
}
timeStamp[writeIdx++] = time;
if (writeIdx >= NUMVIZBUF) {
writeIdx = 0;
}
}
static void makeVizBuffers(const char *data, int len, uint64_t time) {
allocVizBufs();
uint64_t startTime = time;
const int frameSize = 4; // 16 bit stereo sample is 4 bytes
while (len >= VIZBUFFRAMES * frameSize) {
storeVizBuf(data, len, time);
data += VIZBUFFRAMES * frameSize;
len -= VIZBUFFRAMES * frameSize;
time += 1000 * VIZBUFFRAMES / 44100;
}
//LOGI("@@@ stored buffers from %d to %d", uint32_t(startTime), uint32_t(time));
}
sp<IMemory> MediaPlayerService::snoop()
{
sp<MemoryBase> mem = getVizBuffer();
return mem;
}
#undef LOG_TAG
#define LOG_TAG "AudioSink"
MediaPlayerService::AudioOutput::AudioOutput()
@@ -1196,6 +1308,7 @@ MediaPlayerService::AudioOutput::AudioOutput()
mRightVolume = 1.0;
mLatency = 0;
mMsecsPerFrame = 0;
mNumFramesWritten = 0;
setMinBufferCount();
}
@@ -1327,6 +1440,7 @@ void MediaPlayerService::AudioOutput::start()
if (mTrack) {
mTrack->setVolume(mLeftVolume, mRightVolume);
mTrack->start();
mTrack->getPosition(&mNumFramesWritten);
}
}
@@ -1335,7 +1449,29 @@ ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size)
LOG_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");
//LOGV("write(%p, %u)", buffer, size);
if (mTrack) return mTrack->write(buffer, size);
if (mTrack) {
// Only make visualization buffers if anyone recently requested visualization data
uint64_t now = uptimeMillis();
if (lastReadTime + TOTALBUFTIMEMSEC >= now) {
// Based on the current play counter, the number of frames written and
// the current real time we can calculate the approximate real start
// time of the buffer we're about to write.
uint32_t pos;
mTrack->getPosition(&pos);
// we're writing ahead by this many frames:
int ahead = mNumFramesWritten - pos;
//LOGI("@@@ written: %d, playpos: %d, latency: %d", mNumFramesWritten, pos, mTrack->latency());
// which is this many milliseconds, assuming 44100 Hz:
ahead /= 44;
makeVizBuffers((const char*)buffer, size, now + ahead + mTrack->latency());
lastWriteTime = now;
}
ssize_t ret = mTrack->write(buffer, size);
mNumFramesWritten += ret / 4; // assume 16 bit stereo
return ret;
}
return NO_INIT;
}
@@ -1343,6 +1479,7 @@ void MediaPlayerService::AudioOutput::stop()
{
LOGV("stop");
if (mTrack) mTrack->stop();
lastWriteTime = 0;
}
void MediaPlayerService::AudioOutput::flush()

View File

@@ -112,6 +112,8 @@ class MediaPlayerService : public BnMediaPlayerService
static bool mIsOnEmulator;
static int mMinBufferCount; // 12 for emulator; otherwise 4
public: // visualization hack support
uint32_t mNumFramesWritten;
};
class AudioCache : public MediaPlayerBase::AudioSink
@@ -180,6 +182,7 @@ public:
virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length);
virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
virtual sp<IMemory> snoop();
virtual sp<IOMX> createOMX();
virtual status_t dump(int fd, const Vector<String16>& args);