Merge "Audio/video sync during recording (second part)" into gingerbread

This commit is contained in:
James Dong
2010-06-22 10:05:02 -07:00
committed by Android (Google) Code Review
10 changed files with 112 additions and 42 deletions

View File

@@ -56,6 +56,7 @@ private:
int64_t mTotalReadTimeUs;
int64_t mTotalReadBytes;
int64_t mTotalReads;
int64_t mStartTimeUs;
MediaBufferGroup *mGroup;

View File

@@ -60,11 +60,14 @@ private:
List<sp<IMemory> > mFramesBeingEncoded;
List<int64_t> mFrameTimes;
int64_t mStartTimeUs;
int64_t mFirstFrameTimeUs;
int64_t mLastFrameTimestampUs;
int32_t mNumFramesReceived;
int32_t mNumFramesEncoded;
int32_t mNumFramesDropped;
int32_t mNumGlitches;
int64_t mGlitchDurationThresholdUs;
bool mCollectStats;
bool mStarted;
@@ -74,6 +77,7 @@ private:
int64_t timestampUs, int32_t msgType, const sp<IMemory> &data);
void releaseQueuedFrames();
void releaseOneRecordingFrame(const sp<IMemory>& frame);
CameraSource(const CameraSource &);
CameraSource &operator=(const CameraSource &);

View File

@@ -78,8 +78,8 @@ private:
List<off_t> mBoxes;
void setStartTimestamp(int64_t timeUs);
int64_t getStartTimestamp(); // Not const
void setStartTimestampUs(int64_t timeUs);
int64_t getStartTimestampUs(); // Not const
status_t startTracks();
void lock();

View File

@@ -77,6 +77,12 @@ status_t AudioSource::start(MetaData *params) {
&& (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
mCollectStats = true;
}
mStartTimeUs = 0;
int64_t startTimeUs;
if (params && params->findInt64(kKeyTime, &startTimeUs)) {
mStartTimeUs = startTimeUs;
}
status_t err = mRecord->start();
if (err == OK) {
@@ -132,21 +138,22 @@ status_t AudioSource::read(
uint32_t numFramesRecorded;
mRecord->getPosition(&numFramesRecorded);
int64_t latency = mRecord->latency() * 1000;
uint32_t sampleRate = mRecord->getSampleRate();
int64_t timestampUs = (1000000LL * numFramesRecorded) / sampleRate - latency;
LOGV("latency: %lld, sample rate: %d, timestamp: %lld",
latency, sampleRate, timestampUs);
buffer->meta_data()->setInt64(kKeyTime, timestampUs);
int64_t readTime = systemTime() / 1000;
if (numFramesRecorded == 0) {
// Initial delay
if (mStartTimeUs > 0) {
mStartTimeUs = readTime - mStartTimeUs;
} else {
mStartTimeUs += latency;
}
}
ssize_t n = 0;
if (mCollectStats) {
struct timeval tv_start, tv_end;
gettimeofday(&tv_start, NULL);
n = mRecord->read(buffer->data(), buffer->size());
gettimeofday(&tv_end, NULL);
mTotalReadTimeUs += ((1000000LL * (tv_end.tv_sec - tv_start.tv_sec))
+ (tv_end.tv_usec - tv_start.tv_usec));
int64_t endTime = systemTime() / 1000;
mTotalReadTimeUs += (endTime - readTime);
if (n >= 0) {
mTotalReadBytes += n;
}
@@ -161,6 +168,12 @@ status_t AudioSource::read(
return (status_t)n;
}
uint32_t sampleRate = mRecord->getSampleRate();
int64_t timestampUs = (1000000LL * numFramesRecorded) / sampleRate + mStartTimeUs;
buffer->meta_data()->setInt64(kKeyTime, timestampUs);
LOGV("initial delay: %lld, sample rate: %d, timestamp: %lld",
mStartTimeUs, sampleRate, timestampUs);
buffer->set_range(0, n);
*out = buffer;

View File

@@ -123,6 +123,8 @@ CameraSource::CameraSource(const sp<Camera> &camera)
mNumFramesReceived(0),
mNumFramesEncoded(0),
mNumFramesDropped(0),
mNumGlitches(0),
mGlitchDurationThresholdUs(200000),
mCollectStats(false),
mStarted(false) {
@@ -136,6 +138,13 @@ CameraSource::CameraSource(const sp<Camera> &camera)
CameraParameters params(s);
params.getPreviewSize(&width, &height);
// Calculate glitch duraton threshold based on frame rate
int32_t frameRate = params.getPreviewFrameRate();
int64_t glitchDurationUs = (1000000LL / frameRate);
if (glitchDurationUs > mGlitchDurationThresholdUs) {
mGlitchDurationThresholdUs = glitchDurationUs;
}
const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
CHECK(colorFormatStr != NULL);
int32_t colorFormat = getColorFormat(colorFormatStr);
@@ -161,8 +170,7 @@ CameraSource::~CameraSource() {
}
}
status_t CameraSource::start(MetaData *) {
LOGV("start");
status_t CameraSource::start(MetaData *meta) {
CHECK(!mStarted);
char value[PROPERTY_VALUE_MAX];
@@ -171,6 +179,12 @@ status_t CameraSource::start(MetaData *) {
mCollectStats = true;
}
mStartTimeUs = 0;
int64_t startTimeUs;
if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
mStartTimeUs = startTimeUs;
}
int64_t token = IPCThreadState::self()->clearCallingIdentity();
mCamera->setListener(new CameraSourceListener(this));
CHECK_EQ(OK, mCamera->startRecording());
@@ -222,16 +236,19 @@ sp<MetaData> CameraSource::getFormat() {
return mMeta;
}
void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
mCamera->releaseRecordingFrame(frame);
IPCThreadState::self()->restoreCallingIdentity(token);
}
void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
LOGV("signalBufferReturned: %p", buffer->data());
for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
it != mFramesBeingEncoded.end(); ++it) {
if ((*it)->pointer() == buffer->data()) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
mCamera->releaseRecordingFrame((*it));
IPCThreadState::self()->restoreCallingIdentity(token);
releaseOneRecordingFrame((*it));
mFramesBeingEncoded.erase(it);
++mNumFramesEncoded;
buffer->setObserver(0);
@@ -285,22 +302,41 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
Mutex::Autolock autoLock(mLock);
if (!mStarted) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
mCamera->releaseRecordingFrame(data);
IPCThreadState::self()->restoreCallingIdentity(token);
releaseOneRecordingFrame(data);
++mNumFramesReceived;
++mNumFramesDropped;
return;
}
if (mNumFramesReceived > 0 &&
timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
if (mNumGlitches % 10 == 0) { // Don't spam the log
LOGW("Long delay detected in video recording");
}
++mNumGlitches;
}
mLastFrameTimestampUs = timestampUs;
if (mNumFramesReceived == 0) {
mFirstFrameTimeUs = timestampUs;
// Initial delay
if (mStartTimeUs > 0) {
if (timestampUs < mStartTimeUs) {
// Frame was captured before recording was started
// Drop it without updating the statistical data.
releaseOneRecordingFrame(data);
return;
}
mStartTimeUs = timestampUs - mStartTimeUs;
}
}
++mNumFramesReceived;
mFramesReceived.push_back(data);
mFrameTimes.push_back(timestampUs - mFirstFrameTimeUs);
int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
mFrameTimes.push_back(timeUs);
LOGV("initial delay: %lld, current time stamp: %lld",
mStartTimeUs, timeUs);
mFrameAvailableCondition.signal();
}

View File

@@ -41,7 +41,7 @@ public:
Track(MPEG4Writer *owner, const sp<MediaSource> &source);
~Track();
status_t start();
status_t start(int64_t startTimeUs);
void stop();
void pause();
bool reachedEOS();
@@ -161,9 +161,10 @@ status_t MPEG4Writer::addSource(const sp<MediaSource> &source) {
}
status_t MPEG4Writer::startTracks() {
int64_t startTimeUs = systemTime() / 1000;
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
status_t err = (*it)->start();
status_t err = (*it)->start(startTimeUs);
if (err != OK) {
for (List<Track *>::iterator it2 = mTracks.begin();
@@ -182,6 +183,7 @@ status_t MPEG4Writer::start() {
return UNKNOWN_ERROR;
}
mStartTimestampUs = 0;
if (mStarted) {
if (mPaused) {
mPaused = false;
@@ -190,7 +192,6 @@ status_t MPEG4Writer::start() {
return OK;
}
mStartTimestampUs = 0;
mStreamableFile = true;
mWriteMoovBoxToMemory = false;
mMoovBoxBuffer = NULL;
@@ -541,17 +542,19 @@ bool MPEG4Writer::reachedEOS() {
return allDone;
}
void MPEG4Writer::setStartTimestamp(int64_t timeUs) {
LOGI("setStartTimestamp: %lld", timeUs);
void MPEG4Writer::setStartTimestampUs(int64_t timeUs) {
LOGI("setStartTimestampUs: %lld", timeUs);
CHECK(timeUs >= 0);
Mutex::Autolock autoLock(mLock);
if (mStartTimestampUs != 0) {
return; // Sorry, too late
if (mStartTimestampUs == 0 ||
(mStartTimestampUs > 0 && mStartTimestampUs > timeUs)) {
mStartTimestampUs = timeUs;
LOGI("Earliest track starting time: %lld", mStartTimestampUs);
}
mStartTimestampUs = timeUs;
}
int64_t MPEG4Writer::getStartTimestamp() {
LOGI("getStartTimestamp: %lld", mStartTimestampUs);
int64_t MPEG4Writer::getStartTimestampUs() {
LOGI("getStartTimestampUs: %lld", mStartTimestampUs);
Mutex::Autolock autoLock(mLock);
return mStartTimestampUs;
}
@@ -584,14 +587,16 @@ MPEG4Writer::Track::~Track() {
}
}
status_t MPEG4Writer::Track::start() {
status_t MPEG4Writer::Track::start(int64_t startTimeUs) {
if (!mDone && mPaused) {
mPaused = false;
mResumed = true;
return OK;
}
status_t err = mSource->start();
sp<MetaData> meta = new MetaData;
meta->setInt64(kKeyTime, startTimeUs);
status_t err = mSource->start(meta.get());
if (err != OK) {
mDone = mReachedEOS = true;
return err;
@@ -933,8 +938,8 @@ void MPEG4Writer::Track::threadEntry() {
////////////////////////////////////////////////////////////////////////////////
if (mSampleInfos.empty()) {
mOwner->setStartTimestamp(timestampUs);
mStartTimestampUs = (timestampUs - mOwner->getStartTimestamp());
mStartTimestampUs = timestampUs;
mOwner->setStartTimestampUs(mStartTimestampUs);
}
if (mResumed) {
@@ -1188,13 +1193,16 @@ void MPEG4Writer::Track::writeTrackHeader(int32_t trackID) {
}
mOwner->endBox(); // tkhd
if (mStartTimestampUs != 0) {
int64_t moovStartTimeUs = mOwner->getStartTimestampUs();
if (mStartTimestampUs != moovStartTimeUs) {
mOwner->beginBox("edts");
mOwner->writeInt32(0); // version=0, flags=0
mOwner->beginBox("elst");
mOwner->writeInt32(0); // version=0, flags=0
mOwner->writeInt32(1); // a single entry
mOwner->writeInt32(mStartTimestampUs / 1000); // edit duration
int64_t durationMs =
(mStartTimestampUs - moovStartTimeUs) / 1000;
mOwner->writeInt32(durationMs); // edit duration
mOwner->writeInt32(-1); // empty edit box to signal starting time offset
mOwner->writeInt32(1); // x1 rate
mOwner->endBox();

View File

@@ -2538,7 +2538,7 @@ void OMXCodec::clearCodecSpecificData() {
mCodecSpecificDataIndex = 0;
}
status_t OMXCodec::start(MetaData *) {
status_t OMXCodec::start(MetaData *meta) {
Mutex::Autolock autoLock(mLock);
if (mState != LOADED) {
@@ -2549,6 +2549,14 @@ status_t OMXCodec::start(MetaData *) {
if (mQuirks & kWantsNALFragments) {
params->setInt32(kKeyWantsNALFragments, true);
}
if (meta) {
int64_t startTimeUs = 0;
int64_t timeUs;
if (meta->findInt64(kKeyTime, &timeUs)) {
startTimeUs = timeUs;
}
params->setInt64(kKeyTime, startTimeUs);
}
status_t err = mSource->start(params.get());
if (err != OK) {

View File

@@ -140,7 +140,7 @@ status_t AACEncoder::start(MetaData *params) {
CHECK_EQ(OK, initCheck());
mFrameCount = 0;
mSource->start();
mSource->start(params);
mStarted = true;

View File

@@ -79,7 +79,7 @@ status_t AMRNBEncoder::start(MetaData *params) {
&mEncState, &mSidState, false /* dtx_enable */),
0);
mSource->start();
mSource->start(params);
mAnchorTimeUs = 0;
mNumFramesOutput = 0;

View File

@@ -134,7 +134,7 @@ status_t AMRWBEncoder::start(MetaData *params) {
CHECK_EQ(OK, initCheck());
mNumFramesOutput = 0;
mSource->start();
mSource->start(params);
mStarted = true;