Merge "OpenSL-based audio support for BootAnimation" into nyc-mr1-dev

This commit is contained in:
Geoffrey Pitsch
2016-07-12 17:29:55 +00:00
committed by Android (Google) Code Review
8 changed files with 393 additions and 416 deletions

View File

@@ -3,14 +3,16 @@ include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
bootanimation_main.cpp \
AudioPlayer.cpp \
audioplay.cpp \
BootAnimation.cpp
LOCAL_CFLAGS += -DGL_GLEXT_PROTOTYPES -DEGL_EGLEXT_PROTOTYPES
LOCAL_CFLAGS += -Wall -Werror -Wunused -Wunreachable-code
LOCAL_C_INCLUDES += external/tinyalsa/include
LOCAL_C_INCLUDES += \
external/tinyalsa/include \
frameworks/wilhelm/include
LOCAL_SHARED_LIBRARIES := \
libcutils \
@@ -23,6 +25,7 @@ LOCAL_SHARED_LIBRARIES := \
libEGL \
libGLESv1_CM \
libgui \
libOpenSLES \
libtinyalsa
LOCAL_MODULE:= bootanimation
@@ -33,4 +36,8 @@ ifdef TARGET_32_BIT_SURFACEFLINGER
LOCAL_32_BIT_ONLY := true
endif
# get asserts to work
APP_OPTIM := debug
LOCAL_CFLAGS += -UNDEBUG
include $(BUILD_EXECUTABLE)

View File

@@ -1,313 +0,0 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "BootAnim_AudioPlayer"
#include "AudioPlayer.h"
#include <androidfw/ZipFileRO.h>
#include <tinyalsa/asoundlib.h>
#include <utils/Log.h>
#include <utils/String8.h>
#define ID_RIFF 0x46464952
#define ID_WAVE 0x45564157
#define ID_FMT 0x20746d66
#define ID_DATA 0x61746164
// Maximum line length for audio_conf.txt
// We only accept lines less than this length to avoid overflows using sscanf()
#define MAX_LINE_LENGTH 1024
struct riff_wave_header {
uint32_t riff_id;
uint32_t riff_sz;
uint32_t wave_id;
};
struct chunk_header {
uint32_t id;
uint32_t sz;
};
struct chunk_fmt {
uint16_t audio_format;
uint16_t num_channels;
uint32_t sample_rate;
uint32_t byte_rate;
uint16_t block_align;
uint16_t bits_per_sample;
};
namespace android {
AudioPlayer::AudioPlayer()
: mCard(-1),
mDevice(-1),
mPeriodSize(0),
mPeriodCount(0),
mCurrentFile(NULL)
{
}
AudioPlayer::~AudioPlayer() {
}
static bool setMixerValue(struct mixer* mixer, const char* name, const char* values)
{
if (!mixer) {
ALOGE("no mixer in setMixerValue");
return false;
}
struct mixer_ctl *ctl = mixer_get_ctl_by_name(mixer, name);
if (!ctl) {
ALOGE("mixer_get_ctl_by_name failed for %s", name);
return false;
}
enum mixer_ctl_type type = mixer_ctl_get_type(ctl);
int numValues = mixer_ctl_get_num_values(ctl);
int intValue;
char stringValue[MAX_LINE_LENGTH];
for (int i = 0; i < numValues && values; i++) {
// strip leading space
while (*values == ' ') values++;
if (*values == 0) break;
switch (type) {
case MIXER_CTL_TYPE_BOOL:
case MIXER_CTL_TYPE_INT:
if (sscanf(values, "%d", &intValue) == 1) {
if (mixer_ctl_set_value(ctl, i, intValue) != 0) {
ALOGE("mixer_ctl_set_value failed for %s %d", name, intValue);
}
} else {
ALOGE("Could not parse %s as int for %s", values, name);
}
break;
case MIXER_CTL_TYPE_ENUM:
if (sscanf(values, "%s", stringValue) == 1) {
if (mixer_ctl_set_enum_by_string(ctl, stringValue) != 0) {
ALOGE("mixer_ctl_set_enum_by_string failed for %s %s", name, stringValue);
}
} else {
ALOGE("Could not parse %s as enum for %s", values, name);
}
break;
default:
ALOGE("unsupported mixer type %d for %s", type, name);
break;
}
values = strchr(values, ' ');
}
return true;
}
/*
* Parse the audio configuration file.
* The file is named audio_conf.txt and must begin with the following header:
*
* card=<ALSA card number>
* device=<ALSA device number>
* period_size=<period size>
* period_count=<period count>
*
* This header is followed by zero or more mixer settings, each with the format:
* mixer "<name>" = <value list>
* Since mixer names can contain spaces, the name must be enclosed in double quotes.
* The values in the value list can be integers, booleans (represented by 0 or 1)
* or strings for enum values.
*/
bool AudioPlayer::init(const char* config)
{
int tempInt;
struct mixer* mixer = NULL;
char name[MAX_LINE_LENGTH];
for (;;) {
const char* endl = strstr(config, "\n");
if (!endl) break;
String8 line(config, endl - config);
if (line.length() >= MAX_LINE_LENGTH) {
ALOGE("Line too long in audio_conf.txt");
return false;
}
const char* l = line.string();
if (sscanf(l, "card=%d", &tempInt) == 1) {
ALOGD("card=%d", tempInt);
mCard = tempInt;
mixer = mixer_open(mCard);
if (!mixer) {
ALOGE("could not open mixer for card %d", mCard);
return false;
}
} else if (sscanf(l, "device=%d", &tempInt) == 1) {
ALOGD("device=%d", tempInt);
mDevice = tempInt;
} else if (sscanf(l, "period_size=%d", &tempInt) == 1) {
ALOGD("period_size=%d", tempInt);
mPeriodSize = tempInt;
} else if (sscanf(l, "period_count=%d", &tempInt) == 1) {
ALOGD("period_count=%d", tempInt);
mPeriodCount = tempInt;
} else if (sscanf(l, "mixer \"%[0-9a-zA-Z _]s\"", name) == 1) {
const char* values = strchr(l, '=');
if (values) {
values++; // skip '='
ALOGD("name: \"%s\" = %s", name, values);
setMixerValue(mixer, name, values);
} else {
ALOGE("values missing for name: \"%s\"", name);
}
}
config = ++endl;
}
mixer_close(mixer);
if (mCard >= 0 && mDevice >= 0) {
return true;
}
return false;
}
void AudioPlayer::playFile(FileMap* fileMap) {
// stop any currently playing sound
requestExitAndWait();
mCurrentFile = fileMap;
run("bootanim audio", PRIORITY_URGENT_AUDIO);
}
bool AudioPlayer::threadLoop()
{
struct pcm_config config;
struct pcm *pcm = NULL;
bool moreChunks = true;
const struct chunk_fmt* chunkFmt = NULL;
int bufferSize;
const uint8_t* wavData;
size_t wavLength;
const struct riff_wave_header* wavHeader;
if (mCurrentFile == NULL) {
ALOGE("mCurrentFile is NULL");
return false;
}
wavData = (const uint8_t *)mCurrentFile->getDataPtr();
if (!wavData) {
ALOGE("Could not access WAV file data");
goto exit;
}
wavLength = mCurrentFile->getDataLength();
wavHeader = (const struct riff_wave_header *)wavData;
if (wavLength < sizeof(*wavHeader) || (wavHeader->riff_id != ID_RIFF) ||
(wavHeader->wave_id != ID_WAVE)) {
ALOGE("Error: audio file is not a riff/wave file\n");
goto exit;
}
wavData += sizeof(*wavHeader);
wavLength -= sizeof(*wavHeader);
do {
const struct chunk_header* chunkHeader = (const struct chunk_header*)wavData;
if (wavLength < sizeof(*chunkHeader)) {
ALOGE("EOF reading chunk headers");
goto exit;
}
wavData += sizeof(*chunkHeader);
wavLength -= sizeof(*chunkHeader);
switch (chunkHeader->id) {
case ID_FMT:
chunkFmt = (const struct chunk_fmt *)wavData;
wavData += chunkHeader->sz;
wavLength -= chunkHeader->sz;
break;
case ID_DATA:
/* Stop looking for chunks */
moreChunks = 0;
break;
default:
/* Unknown chunk, skip bytes */
wavData += chunkHeader->sz;
wavLength -= chunkHeader->sz;
}
} while (moreChunks);
if (!chunkFmt) {
ALOGE("format not found in WAV file");
goto exit;
}
memset(&config, 0, sizeof(config));
config.channels = chunkFmt->num_channels;
config.rate = chunkFmt->sample_rate;
config.period_size = mPeriodSize;
config.period_count = mPeriodCount;
config.start_threshold = mPeriodSize / 4;
config.stop_threshold = INT_MAX;
config.avail_min = config.start_threshold;
if (chunkFmt->bits_per_sample != 16) {
ALOGE("only 16 bit WAV files are supported");
goto exit;
}
config.format = PCM_FORMAT_S16_LE;
pcm = pcm_open(mCard, mDevice, PCM_OUT, &config);
if (!pcm || !pcm_is_ready(pcm)) {
ALOGE("Unable to open PCM device (%s)\n", pcm_get_error(pcm));
goto exit;
}
bufferSize = pcm_frames_to_bytes(pcm, pcm_get_buffer_size(pcm));
while (wavLength > 0) {
if (exitPending()) goto exit;
size_t count = bufferSize;
if (count > wavLength)
count = wavLength;
if (pcm_write(pcm, wavData, count)) {
ALOGE("pcm_write failed (%s)", pcm_get_error(pcm));
goto exit;
}
wavData += count;
wavLength -= count;
}
exit:
if (pcm)
pcm_close(pcm);
delete mCurrentFile;
mCurrentFile = NULL;
return false;
}
} // namespace android

View File

@@ -1,48 +0,0 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef _BOOTANIMATION_AUDIOPLAYER_H
#define _BOOTANIMATION_AUDIOPLAYER_H
#include <utils/Thread.h>
#include <utils/FileMap.h>
namespace android {
class AudioPlayer : public Thread
{
public:
AudioPlayer();
virtual ~AudioPlayer();
bool init(const char* config);
void playFile(FileMap* fileMap);
private:
virtual bool threadLoop();
private:
int mCard; // ALSA card to use
int mDevice; // ALSA device to use
int mPeriodSize;
int mPeriodCount;
FileMap* mCurrentFile;
};
} // namespace android
#endif // _BOOTANIMATION_AUDIOPLAYER_H

View File

@@ -58,7 +58,7 @@
#include <EGL/eglext.h>
#include "BootAnimation.h"
#include "AudioPlayer.h"
#include "audioplay.h"
namespace android {
@@ -106,9 +106,7 @@ void BootAnimation::binderDied(const wp<IBinder>&)
// might be blocked on a condition variable that will never be updated.
kill( getpid(), SIGKILL );
requestExit();
if (mAudioPlayer != NULL) {
mAudioPlayer->requestExit();
}
audioplay::destroy();
}
status_t BootAnimation::initTexture(Texture* texture, AssetManager& assets,
@@ -400,9 +398,6 @@ void BootAnimation::checkExit() {
int exitnow = atoi(value);
if (exitnow) {
requestExit();
if (mAudioPlayer != NULL) {
mAudioPlayer->requestExit();
}
}
}
@@ -524,16 +519,6 @@ bool BootAnimation::parseAnimationDesc(Animation& animation)
}
char const* s = desString.string();
// Create and initialize an AudioPlayer if we have an audio_conf.txt file
String8 audioConf;
if (readFile(animation.zip, "audio_conf.txt", audioConf)) {
mAudioPlayer = new AudioPlayer;
if (!mAudioPlayer->init(audioConf.string())) {
ALOGE("mAudioPlayer.init failed");
mAudioPlayer = NULL;
}
}
// Parse the description file
for (;;) {
const char* endl = strstr(s, "\n");
@@ -564,7 +549,7 @@ bool BootAnimation::parseAnimationDesc(Animation& animation)
part.pause = pause;
part.path = path;
part.clockPosY = clockPosY;
part.audioFile = NULL;
part.audioData = NULL;
part.animation = NULL;
if (!parseColor(color, part.backgroundColor)) {
ALOGE("> invalid color '#%s'", color);
@@ -580,7 +565,7 @@ bool BootAnimation::parseAnimationDesc(Animation& animation)
part.playUntilComplete = false;
part.count = 1;
part.pause = 0;
part.audioFile = NULL;
part.audioData = NULL;
part.animation = loadAnimation(String8(SYSTEM_BOOTANIMATION_FILE));
if (part.animation != NULL)
animation.parts.add(part);
@@ -601,6 +586,7 @@ bool BootAnimation::preloadZip(Animation& animation)
return false;
}
bool hasAudio = false;
ZipEntryRO entry;
char name[ANIM_ENTRY_NAME_MAX];
while ((entry = zip->nextEntry(cookie)) != NULL) {
@@ -624,8 +610,10 @@ bool BootAnimation::preloadZip(Animation& animation)
if (map) {
Animation::Part& part(animation.parts.editItemAt(j));
if (leaf == "audio.wav") {
hasAudio = true;
// a part may have at most one audio file
part.audioFile = map;
part.audioData = (uint8_t *)map->getDataPtr();
part.audioLength = map->getDataLength();
} else if (leaf == "trim.txt") {
part.trimData.setTo((char const*)map->getDataPtr(),
map->getDataLength());
@@ -640,6 +628,8 @@ bool BootAnimation::preloadZip(Animation& animation)
part.frames.add(frame);
}
}
} else {
ALOGE("bootanimation.zip is compressed; must be only stored");
}
}
}
@@ -673,6 +663,12 @@ bool BootAnimation::preloadZip(Animation& animation)
}
}
// Create and initialize audioplay if there is a wav file in any of the animations.
if (hasAudio) {
ALOGD("found audio.wav, creating playback engine");
audioplay::create();
}
zip->endIteration(cookie);
return true;
@@ -777,8 +773,9 @@ bool BootAnimation::playAnimation(const Animation& animation)
break;
// only play audio file the first time we animate the part
if (r == 0 && mAudioPlayer != NULL && part.audioFile) {
mAudioPlayer->playFile(part.audioFile);
if (r == 0 && part.audioData) {
ALOGD("playing clip for part%d, size=%d", (int) i, part.audioLength);
audioplay::playClip(part.audioData, part.audioLength);
}
glClearColor(
@@ -865,6 +862,11 @@ bool BootAnimation::playAnimation(const Animation& animation)
}
}
}
// we've finally played everything we're going to play
audioplay::setPlaying(false);
audioplay::destroy();
return true;
}

View File

@@ -30,7 +30,6 @@ class SkBitmap;
namespace android {
class AudioPlayer;
class Surface;
class SurfaceComposerClient;
class SurfaceControl;
@@ -98,7 +97,8 @@ private:
SortedVector<Frame> frames;
bool playUntilComplete;
float backgroundColor[3];
FileMap* audioFile;
uint8_t* audioData;
int audioLength;
Animation* animation;
};
int fps;
@@ -124,7 +124,6 @@ private:
void checkExit();
sp<SurfaceComposerClient> mSession;
sp<AudioPlayer> mAudioPlayer;
AssetManager mAssets;
Texture mAndroid[2];
Texture mClock;

View File

@@ -67,34 +67,8 @@ If the file is not present, each frame is assumed to be the same size as the ani
## audio.wav
Each part may optionally play a `wav` sample when it starts. To enable this for an animation,
you must also include a `audio_conf.txt` file in the ZIP archive. Its format is as follows:
card=<ALSA card number>
device=<ALSA device number>
period_size=<period size>
period_count=<period count>
This header is followed by zero or more mixer settings, each with the format:
mixer "<name>" = <value list>
Here's an example `audio_conf.txt` from Shamu:
card=0
device=15
period_size=1024
period_count=4
mixer "QUAT_MI2S_RX Audio Mixer MultiMedia5" = 1
mixer "Playback Channel Map" = 0 220 157 195 0 0 0 0
mixer "QUAT_MI2S_RX Channels" = Two
mixer "BOOST_STUB Right Mixer right" = 1
mixer "BOOST_STUB Left Mixer left" = 1
mixer "Compress Playback 9 Volume" = 80 80
You will probably need to get these mixer names and values out of `audio_platform_info.xml`
and `mixer_paths.xml` for your device.
Each part may optionally play a `wav` sample when it starts. To enable this, add a file
with the name `audio.wav` in the part directory.
## exiting

View File

@@ -0,0 +1,321 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// cribbed from samples/native-audio
#include "audioplay.h"
#define CHATTY ALOGD
#include <assert.h>
#include <string.h>
#include <utils/Log.h>
// for native audio
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
namespace audioplay {
namespace {
// engine interfaces
static SLObjectItf engineObject = NULL;
static SLEngineItf engineEngine;
// output mix interfaces
static SLObjectItf outputMixObject = NULL;
// buffer queue player interfaces
static SLObjectItf bqPlayerObject = NULL;
static SLPlayItf bqPlayerPlay;
static SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
static SLMuteSoloItf bqPlayerMuteSolo;
static SLVolumeItf bqPlayerVolume;
// pointer and size of the next player buffer to enqueue, and number of remaining buffers
static const uint8_t* nextBuffer;
static unsigned nextSize;
static const uint32_t ID_RIFF = 0x46464952;
static const uint32_t ID_WAVE = 0x45564157;
static const uint32_t ID_FMT = 0x20746d66;
static const uint32_t ID_DATA = 0x61746164;
struct RiffWaveHeader {
uint32_t riff_id;
uint32_t riff_sz;
uint32_t wave_id;
};
struct ChunkHeader {
uint32_t id;
uint32_t sz;
};
struct ChunkFormat {
uint16_t audio_format;
uint16_t num_channels;
uint32_t sample_rate;
uint32_t byte_rate;
uint16_t block_align;
uint16_t bits_per_sample;
};
// this callback handler is called every time a buffer finishes playing
void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context) {
(void)bq;
(void)context;
assert(bq == bqPlayerBufferQueue);
assert(NULL == context);
audioplay::setPlaying(false);
}
bool hasPlayer() {
return (engineObject != NULL && bqPlayerObject != NULL);
}
// create the engine and output mix objects
void createEngine() {
SLresult result;
// create engine
result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
assert(SL_RESULT_SUCCESS == result);
(void)result;
// realize the engine
result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
assert(SL_RESULT_SUCCESS == result);
(void)result;
// get the engine interface, which is needed in order to create other objects
result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
assert(SL_RESULT_SUCCESS == result);
(void)result;
// create output mix, with environmental reverb specified as a non-required interface
const SLInterfaceID ids[1] = {SL_IID_ENVIRONMENTALREVERB};
const SLboolean req[1] = {SL_BOOLEAN_FALSE};
result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
assert(SL_RESULT_SUCCESS == result);
(void)result;
// realize the output mix
result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
assert(SL_RESULT_SUCCESS == result);
(void)result;
}
// create buffer queue audio player
void createBufferQueueAudioPlayer(const ChunkFormat* chunkFormat) {
SLresult result;
// configure audio source
SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 1};
SLDataFormat_PCM format_pcm = {
SL_DATAFORMAT_PCM,
chunkFormat->num_channels,
chunkFormat->sample_rate * 1000, // convert to milliHz
chunkFormat->bits_per_sample,
16,
SL_SPEAKER_FRONT_CENTER,
SL_BYTEORDER_LITTLEENDIAN
};
SLDataSource audioSrc = {&loc_bufq, &format_pcm};
// configure audio sink
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk = {&loc_outmix, NULL};
// create audio player
const SLInterfaceID ids[2] = {SL_IID_BUFFERQUEUE, SL_IID_VOLUME};
const SLboolean req[2] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk,
2, ids, req);
assert(SL_RESULT_SUCCESS == result);
(void)result;
// realize the player
result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
assert(SL_RESULT_SUCCESS == result);
(void)result;
// get the play interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
assert(SL_RESULT_SUCCESS == result);
(void)result;
// get the buffer queue interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
&bqPlayerBufferQueue);
assert(SL_RESULT_SUCCESS == result);
(void)result;
// register callback on the buffer queue
result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
assert(SL_RESULT_SUCCESS == result);
(void)result;
#if 0 // mute/solo is not supported for sources that are known to be mono, as this is
// get the mute/solo interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_MUTESOLO, &bqPlayerMuteSolo);
assert(SL_RESULT_SUCCESS == result);
(void)result;
#endif
// get the volume interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_VOLUME, &bqPlayerVolume);
assert(SL_RESULT_SUCCESS == result);
(void)result;
// set the player's state to playing
audioplay::setPlaying(true);
CHATTY("Created buffer queue player: %p", bqPlayerBufferQueue);
}
} // namespace
void create() {
createEngine();
}
bool playClip(const uint8_t* buf, int size) {
// Parse the WAV header
nextBuffer = buf;
nextSize = size;
const RiffWaveHeader* wavHeader = (const RiffWaveHeader*)buf;
if (nextSize < sizeof(*wavHeader) || (wavHeader->riff_id != ID_RIFF) ||
(wavHeader->wave_id != ID_WAVE)) {
ALOGE("Error: audio file is not a riff/wave file\n");
return false;
}
nextBuffer += sizeof(*wavHeader);
nextSize -= sizeof(*wavHeader);
const ChunkFormat* chunkFormat = nullptr;
while (true) {
const ChunkHeader* chunkHeader = (const ChunkHeader*)nextBuffer;
if (nextSize < sizeof(*chunkHeader)) {
ALOGE("EOF reading chunk headers");
return false;
}
nextBuffer += sizeof(*chunkHeader);
nextSize -= sizeof(*chunkHeader);
bool endLoop = false;
switch (chunkHeader->id) {
case ID_FMT:
chunkFormat = (const ChunkFormat*)nextBuffer;
nextBuffer += chunkHeader->sz;
nextSize -= chunkHeader->sz;
break;
case ID_DATA:
/* Stop looking for chunks */
endLoop = true;
break;
default:
/* Unknown chunk, skip bytes */
nextBuffer += chunkHeader->sz;
nextSize -= chunkHeader->sz;
}
if (endLoop) {
break;
}
}
if (!chunkFormat) {
ALOGE("format not found in WAV file");
return false;
}
// If this is the first clip, create the buffer based on this WAV's header.
// We assume all future clips with be in the same format.
if (bqPlayerBufferQueue == nullptr) {
createBufferQueueAudioPlayer(chunkFormat);
}
assert(bqPlayerBufferQueue != nullptr);
assert(buf != nullptr);
if (!hasPlayer()) {
ALOGD("cannot play clip %p without a player", buf);
return false;
}
CHATTY("playClip on player %p: buf=%p size=%d", bqPlayerBufferQueue, buf, size);
if (nextSize > 0) {
// here we only enqueue one buffer because it is a long clip,
// but for streaming playback we would typically enqueue at least 2 buffers to start
SLresult result;
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, nextBuffer, nextSize);
if (SL_RESULT_SUCCESS != result) {
return false;
}
audioplay::setPlaying(true);
}
return true;
}
// set the playing state for the buffer queue audio player
void setPlaying(bool isPlaying) {
if (!hasPlayer()) return;
SLresult result;
if (NULL != bqPlayerPlay) {
// set the player's state
result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay,
isPlaying ? SL_PLAYSTATE_PLAYING : SL_PLAYSTATE_STOPPED);
assert(SL_RESULT_SUCCESS == result);
(void)result;
}
}
void destroy() {
// destroy buffer queue audio player object, and invalidate all associated interfaces
if (bqPlayerObject != NULL) {
CHATTY("destroying audio player");
(*bqPlayerObject)->Destroy(bqPlayerObject);
bqPlayerObject = NULL;
bqPlayerPlay = NULL;
bqPlayerBufferQueue = NULL;
bqPlayerMuteSolo = NULL;
bqPlayerVolume = NULL;
}
// destroy output mix object, and invalidate all associated interfaces
if (outputMixObject != NULL) {
(*outputMixObject)->Destroy(outputMixObject);
outputMixObject = NULL;
}
// destroy engine object, and invalidate all associated interfaces
if (engineObject != NULL) {
CHATTY("destroying audio engine");
(*engineObject)->Destroy(engineObject);
engineObject = NULL;
engineEngine = NULL;
}
}
} // namespace audioplay

View File

@@ -0,0 +1,35 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef AUDIOPLAY_H_
#define AUDIOPLAY_H_
#include <string.h>
namespace audioplay {
void create();
// Play a WAV pointed to by buf. All clips are assumed to be in the same format.
// playClip should not be called while a clip is still playing.
bool playClip(const uint8_t* buf, int size);
void setPlaying(bool isPlaying);
void destroy();
}
#endif // AUDIOPLAY_H_