Merge "Make sure that key frame is generated for timelapse video recording if there are at least two input video frames from camera source." into honeycomb

This commit is contained in:
James Dong
2011-01-26 14:43:59 -08:00
committed by Android (Google) Code Review
6 changed files with 35 additions and 4 deletions

View File

@@ -158,6 +158,7 @@ protected:
int32_t mNumFramesReceived;
int64_t mLastFrameTimestampUs;
bool mStarted;
int32_t mNumFramesEncoded;
CameraSource(const sp<ICamera>& camera, int32_t cameraId,
Size videoSize, int32_t frameRate,
@@ -189,7 +190,6 @@ private:
List<int64_t> mFrameTimes;
int64_t mFirstFrameTimeUs;
int32_t mNumFramesEncoded;
int32_t mNumFramesDropped;
int32_t mNumGlitches;
int64_t mGlitchDurationThresholdUs;

View File

@@ -47,6 +47,9 @@ struct OMXCodec : public MediaSource,
// Store meta data in video buffers
kStoreMetaDataInVideoBuffers = 32,
// Only submit one input buffer at one time.
kOnlySubmitOneInputBufferAtOneTime = 64,
};
static sp<MediaSource> Create(
const sp<IOMX> &omx,
@@ -192,6 +195,7 @@ private:
Condition mBufferFilled;
bool mIsMetaDataStoredInVideoBuffers;
bool mOnlySubmitOneBufferAtOneTime;
OMXCodec(const sp<IOMX> &omx, IOMX::node_id node, uint32_t quirks,
bool isEncoder, const char *mime, const char *componentName,

View File

@@ -1239,6 +1239,14 @@ status_t StagefrightRecorder::setupVideoEncoder(
encoder_flags |= OMXCodec::kHardwareCodecsOnly;
encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers;
}
// Do not wait for all the input buffers to become available.
// This give timelapse video recording faster response in
// receiving output from video encoder component.
if (mCaptureTimeLapse) {
encoder_flags |= OMXCodec::kOnlySubmitOneInputBufferAtOneTime;
}
sp<MediaSource> encoder = OMXCodec::Create(
client.interface(), enc_meta,
true /* createEncoder */, cameraSource,

View File

@@ -147,8 +147,8 @@ CameraSource::CameraSource(
mNumFramesReceived(0),
mLastFrameTimestampUs(0),
mStarted(false),
mFirstFrameTimeUs(0),
mNumFramesEncoded(0),
mFirstFrameTimeUs(0),
mNumFramesDropped(0),
mNumGlitches(0),
mGlitchDurationThresholdUs(200000),

View File

@@ -491,7 +491,10 @@ bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
}
}
if (*timestampUs <
// Workaround to bypass the first 2 input frames for skipping.
// The first 2 output frames from the encoder are: decoder specific info and
// the compressed video frame data for the first input video frame.
if (mNumFramesEncoded >= 1 && *timestampUs <
(mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
// Skip all frames from last encoded frame until
// sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.

View File

@@ -522,6 +522,12 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) {
if (flags & kStoreMetaDataInVideoBuffers) {
mIsMetaDataStoredInVideoBuffers = true;
}
mOnlySubmitOneBufferAtOneTime = false;
if (flags & kOnlySubmitOneInputBufferAtOneTime) {
mOnlySubmitOneBufferAtOneTime = true;
}
if (!(flags & kIgnoreCodecSpecificData)) {
uint32_t type;
const void *data;
@@ -2610,7 +2616,17 @@ void OMXCodec::drainInputBuffers() {
Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
for (size_t i = 0; i < buffers->size(); ++i) {
if (!drainInputBuffer(&buffers->editItemAt(i))) {
BufferInfo *info = &buffers->editItemAt(i);
if (info->mStatus != OWNED_BY_US) {
continue;
}
if (!drainInputBuffer(info)) {
break;
}
if (mOnlySubmitOneBufferAtOneTime) {
break;
}
}