Make camera source ready for handling meta-data video buffers.

bug - 3042125

Change-Id: I877b265c6bf8e0593121c8d5a95ae5599cdc6fb9
This commit is contained in:
James Dong
2010-10-18 21:42:27 -07:00
parent 382f7a140e
commit ab79d1febc
3 changed files with 105 additions and 12 deletions

View File

@@ -56,6 +56,14 @@ public:
* @param frameRate the target frames per second
* @param surface the preview surface for display where preview
* frames are sent to
* @param storeMetaDataInVideoBuffers true to request the camera
* source to store meta data in video buffers; false to
* request the camera source to store real YUV frame data
* in the video buffers. The camera source may not support
* storing meta data in video buffers, if so, a request
* to do that will NOT be honored. To find out whether
* meta data is actually being stored in video buffers
* during recording, call isMetaDataStoredInVideoBuffers().
*
* @return NULL on error.
*/
@@ -63,12 +71,15 @@ public:
int32_t cameraId,
Size videoSize,
int32_t frameRate,
const sp<Surface>& surface);
const sp<Surface>& surface,
bool storeMetaDataInVideoBuffers = false);
virtual ~CameraSource();
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
virtual status_t read(
MediaBuffer **buffer, const ReadOptions *options = NULL);
/**
* Check whether a CameraSource object is properly initialized.
@@ -87,8 +98,43 @@ public:
*/
virtual sp<MetaData> getFormat();
virtual status_t read(
MediaBuffer **buffer, const ReadOptions *options = NULL);
/**
* Retrieve the total number of video buffers available from
* this source.
*
* This method is useful if these video buffers are used
* for passing video frame data to other media components,
* such as OMX video encoders, in order to eliminate the
* memcpy of the data.
*
* @return the total numbner of video buffers. Returns 0 to
* indicate that this source does not make the video
* buffer information availalble.
*/
size_t getNumberOfVideoBuffers() const;
/**
* Retrieve the individual video buffer available from
* this source.
*
* @param index the index corresponding to the video buffer.
* Valid range of the index is [0, n], where n =
* getNumberOfVideoBuffers() - 1.
*
* @return the video buffer corresponding to the given index.
* If index is out of range, 0 should be returned.
*/
sp<IMemory> getVideoBuffer(size_t index) const;
/**
* Tell whether this camera source stores meta data or real YUV
* frame data in video buffers.
*
* @return true if meta data is stored in the video
* buffers; false if real YUV data is stored in
* the video buffers.
*/
bool isMetaDataStoredInVideoBuffers() const;
virtual void signalBufferReturned(MediaBuffer* buffer);
@@ -115,7 +161,8 @@ protected:
CameraSource(const sp<ICamera>& camera, int32_t cameraId,
Size videoSize, int32_t frameRate,
const sp<Surface>& surface);
const sp<Surface>& surface,
bool storeMetaDataInVideoBuffers);
virtual void startCameraRecording();
virtual void stopCameraRecording();
@@ -147,13 +194,15 @@ private:
int32_t mNumGlitches;
int64_t mGlitchDurationThresholdUs;
bool mCollectStats;
bool mIsMetaDataStoredInVideoBuffers;
void releaseQueuedFrames();
void releaseOneRecordingFrame(const sp<IMemory>& frame);
status_t init(const sp<ICamera>& camera, int32_t cameraId,
Size videoSize, int32_t frameRate);
Size videoSize, int32_t frameRate,
bool storeMetaDataInVideoBuffers);
status_t isCameraAvailable(const sp<ICamera>& camera, int32_t cameraId);
status_t isCameraColorFormatSupported(const CameraParameters& params);
status_t configureCamera(CameraParameters* params,

View File

@@ -115,7 +115,7 @@ CameraSource *CameraSource::Create() {
size.height = -1;
sp<ICamera> camera;
return new CameraSource(camera, 0, size, -1, NULL);
return new CameraSource(camera, 0, size, -1, NULL, false);
}
// static
@@ -124,10 +124,12 @@ CameraSource *CameraSource::CreateFromCamera(
int32_t cameraId,
Size videoSize,
int32_t frameRate,
const sp<Surface>& surface) {
const sp<Surface>& surface,
bool storeMetaDataInVideoBuffers) {
CameraSource *source = new CameraSource(camera, cameraId,
videoSize, frameRate, surface);
videoSize, frameRate, surface,
storeMetaDataInVideoBuffers);
if (source != NULL) {
if (source->initCheck() != OK) {
@@ -143,7 +145,8 @@ CameraSource::CameraSource(
int32_t cameraId,
Size videoSize,
int32_t frameRate,
const sp<Surface>& surface)
const sp<Surface>& surface,
bool storeMetaDataInVideoBuffers)
: mCameraFlags(0),
mVideoFrameRate(-1),
mCamera(0),
@@ -161,7 +164,9 @@ CameraSource::CameraSource(
mVideoSize.width = -1;
mVideoSize.height = -1;
mInitCheck = init(camera, cameraId, videoSize, frameRate);
mInitCheck = init(camera, cameraId,
videoSize, frameRate,
storeMetaDataInVideoBuffers);
}
status_t CameraSource::initCheck() const {
@@ -411,13 +416,19 @@ status_t CameraSource::checkFrameRate(
* width and heigth settings by the camera
* @param frameRate the target frame rate in frames per second.
* if it is -1, use the current camera frame rate setting.
* @param storeMetaDataInVideoBuffers request to store meta
* data or real YUV data in video buffers. Request to
* store meta data in video buffers may not be honored
* if the source does not support this feature.
*
* @return OK if no error.
*/
status_t CameraSource::init(
const sp<ICamera>& camera,
int32_t cameraId,
Size videoSize,
int32_t frameRate) {
int32_t frameRate,
bool storeMetaDataInVideoBuffers) {
status_t err = OK;
int64_t token = IPCThreadState::self()->clearCallingIdentity();
@@ -452,6 +463,12 @@ status_t CameraSource::init(
// check earlier by calling mCamera->setParameters().
CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
mIsMetaDataStoredInVideoBuffers = false;
if (storeMetaDataInVideoBuffers &&
OK == mCamera->storeMetaDataInBuffers(true)) {
mIsMetaDataStoredInVideoBuffers = true;
}
/*
* mCamera->startRecording() signals camera hal to make
* available the video buffers (for instance, allocation
@@ -722,4 +739,31 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
mFrameAvailableCondition.signal();
}
size_t CameraSource::getNumberOfVideoBuffers() const {
LOGV("getNumberOfVideoBuffers");
size_t nBuffers = 0;
int64_t token = IPCThreadState::self()->clearCallingIdentity();
if (mInitCheck == OK && mCamera != 0) {
nBuffers = mCamera->getNumberOfVideoBuffers();
}
IPCThreadState::self()->restoreCallingIdentity(token);
return nBuffers;
}
sp<IMemory> CameraSource::getVideoBuffer(size_t index) const {
LOGV("getVideoBuffer: %d", index);
sp<IMemory> buffer = 0;
int64_t token = IPCThreadState::self()->clearCallingIdentity();
if (mInitCheck == OK && mCamera != 0) {
buffer = mCamera->getVideoBuffer(index);
}
IPCThreadState::self()->restoreCallingIdentity(token);
return buffer;
}
bool CameraSource::isMetaDataStoredInVideoBuffers() const {
LOGV("isMetaDataStoredInVideoBuffers");
return mIsMetaDataStoredInVideoBuffers;
}
} // namespace android

View File

@@ -66,7 +66,7 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(
int32_t videoFrameRate,
const sp<Surface>& surface,
int64_t timeBetweenTimeLapseFrameCaptureUs)
: CameraSource(camera, cameraId, videoSize, videoFrameRate, surface),
: CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, false),
mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
mLastTimeLapseFrameRealTimestampUs(0),