Merge change Ib64cab41 into eclair-mr2

* changes:
  Split the ColorConverter off SoftwareRenderer, metadata support in stagefright.
This commit is contained in:
Android (Google) Code Review
2009-10-09 11:44:25 -04:00
12 changed files with 643 additions and 287 deletions

View File

@@ -0,0 +1,65 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef COLOR_CONVERTER_H_
#define COLOR_CONVERTER_H_
#include <sys/types.h>
#include <OMX_Video.h>
namespace android {
struct ColorConverter {
ColorConverter(OMX_COLOR_FORMATTYPE from, OMX_COLOR_FORMATTYPE to);
~ColorConverter();
bool isValid() const;
void convert(
size_t width, size_t height,
const void *srcBits, size_t srcSkip,
void *dstBits, size_t dstSkip);
private:
OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
uint8_t *mClip;
uint8_t *initClip();
void convertCbYCrY(
size_t width, size_t height,
const void *srcBits, size_t srcSkip,
void *dstBits, size_t dstSkip);
void convertYUV420Planar(
size_t width, size_t height,
const void *srcBits, size_t srcSkip,
void *dstBits, size_t dstSkip);
void convertQCOMYUV420SemiPlanar(
size_t width, size_t height,
const void *srcBits, size_t srcSkip,
void *dstBits, size_t dstSkip);
ColorConverter(const ColorConverter &);
ColorConverter &operator=(const ColorConverter &);
};
} // namespace android
#endif // COLOR_CONVERTER_H_

View File

@@ -18,7 +18,7 @@
#define SOFTWARE_RENDERER_H_
#include <OMX_Video.h>
#include <media/stagefright/ColorConverter.h>
#include <media/stagefright/VideoRenderer.h>
#include <utils/RefBase.h>
@@ -41,13 +41,8 @@ public:
const void *data, size_t size, void *platformPrivate);
private:
uint8_t *initClip();
void renderCbYCrY(const void *data, size_t size);
void renderYUV420Planar(const void *data, size_t size);
void renderQCOMYUV420SemiPlanar(const void *data, size_t size);
OMX_COLOR_FORMATTYPE mColorFormat;
ColorConverter mConverter;
sp<ISurface> mISurface;
size_t mDisplayWidth, mDisplayHeight;
size_t mDecodedWidth, mDecodedHeight;
@@ -55,8 +50,6 @@ private:
sp<MemoryHeapBase> mMemoryHeap;
int mIndex;
uint8_t *mClip;
SoftwareRenderer(const SoftwareRenderer &);
SoftwareRenderer &operator=(const SoftwareRenderer &);
};

View File

@@ -18,8 +18,9 @@ LOCAL_SRC_FILES:= \
ifeq ($(BUILD_WITH_FULL_STAGEFRIGHT),true)
LOCAL_SRC_FILES += \
StagefrightPlayer.cpp
LOCAL_SRC_FILES += \
StagefrightPlayer.cpp \
StagefrightMetadataRetriever.cpp
LOCAL_CFLAGS += -DBUILD_WITH_FULL_STAGEFRIGHT=1

View File

@@ -37,6 +37,7 @@
#include "VorbisMetadataRetriever.h"
#include "MidiMetadataRetriever.h"
#include "MetadataRetrieverClient.h"
#include "StagefrightMetadataRetriever.h"
namespace android {
@@ -105,9 +106,15 @@ static sp<MediaMetadataRetrieverBase> createRetriever(player_type playerType)
LOGV("create midi metadata retriever");
p = new MidiMetadataRetriever();
break;
#if BUILD_WITH_FULL_STAGEFRIGHT
case STAGEFRIGHT_PLAYER:
LOGV("create StagefrightMetadataRetriever");
p = new StagefrightMetadataRetriever;
break;
#endif
default:
// TODO:
// support for STAGEFRIGHT_PLAYER and TEST_PLAYER
// support for TEST_PLAYER
LOGE("player type %d is not supported", playerType);
break;
}

View File

@@ -0,0 +1,194 @@
/*
**
** Copyright 2009, The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "StagefrightMetadataRetriever"
#include <utils/Log.h>
#include "StagefrightMetadataRetriever.h"
#include <media/stagefright/CachingDataSource.h>
#include <media/stagefright/ColorConverter.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/HTTPDataSource.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/MmapSource.h>
#include <media/stagefright/OMXCodec.h>
namespace android {
StagefrightMetadataRetriever::StagefrightMetadataRetriever() {
LOGV("StagefrightMetadataRetriever()");
DataSource::RegisterDefaultSniffers();
CHECK_EQ(mClient.connect(), OK);
}
StagefrightMetadataRetriever::~StagefrightMetadataRetriever() {
LOGV("~StagefrightMetadataRetriever()");
mClient.disconnect();
}
status_t StagefrightMetadataRetriever::setDataSource(const char *uri) {
LOGV("setDataSource(%s)", uri);
sp<DataSource> source;
if (!strncasecmp("file://", uri, 7)) {
sp<MmapSource> mmapSource = new MmapSource(uri + 7);
if (mmapSource->InitCheck() != OK) {
return ERROR_IO;
}
source = mmapSource;
} else if (!strncasecmp("http://", uri, 7)) {
source = new HTTPDataSource(uri);
source = new CachingDataSource(source, 64 * 1024, 10);
} else {
// Assume it's a filename.
sp<MmapSource> mmapSource = new MmapSource(uri);
if (mmapSource->InitCheck() != OK) {
return ERROR_IO;
}
source = mmapSource;
}
mExtractor = MediaExtractor::Create(source);
return mExtractor.get() != NULL ? OK : UNKNOWN_ERROR;
}
status_t StagefrightMetadataRetriever::setDataSource(
int fd, int64_t offset, int64_t length) {
LOGV("setDataSource(%d, %lld, %lld)", fd, offset, length);
mExtractor = MediaExtractor::Create(
new MmapSource(fd, offset, length));
return OK;
}
VideoFrame *StagefrightMetadataRetriever::captureFrame() {
LOGV("captureFrame");
if (mExtractor.get() == NULL) {
LOGE("no extractor.");
return NULL;
}
size_t n = mExtractor->countTracks();
size_t i;
for (i = 0; i < n; ++i) {
sp<MetaData> meta = mExtractor->getTrackMetaData(i);
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
if (!strncasecmp(mime, "video/", 6)) {
break;
}
}
if (i == n) {
LOGE("no video track found.");
return NULL;
}
sp<MediaSource> source = mExtractor->getTrack(i);
if (source.get() == NULL) {
LOGE("unable to instantiate video track.");
return NULL;
}
sp<MetaData> meta = source->getFormat();
sp<MediaSource> decoder =
OMXCodec::Create(
mClient.interface(), meta, false, source);
if (decoder.get() == NULL) {
LOGE("unable to instantiate video decoder.");
return NULL;
}
decoder->start();
MediaBuffer *buffer;
status_t err = decoder->read(&buffer);
if (err != OK) {
CHECK_EQ(buffer, NULL);
LOGE("decoding frame failed.");
decoder->stop();
return NULL;
}
LOGI("successfully decoded video frame.");
meta = decoder->getFormat();
int32_t width, height;
CHECK(meta->findInt32(kKeyWidth, &width));
CHECK(meta->findInt32(kKeyHeight, &height));
VideoFrame *frame = new VideoFrame;
frame->mWidth = width;
frame->mHeight = height;
frame->mDisplayWidth = width;
frame->mDisplayHeight = height;
frame->mSize = width * height * 2;
frame->mData = new uint8_t[frame->mSize];
int32_t srcFormat;
CHECK(meta->findInt32(kKeyColorFormat, &srcFormat));
ColorConverter converter(
(OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);
CHECK(converter.isValid());
converter.convert(
width, height,
(const uint8_t *)buffer->data() + buffer->range_offset(),
0,
frame->mData, width * 2);
buffer->release();
buffer = NULL;
decoder->stop();
return frame;
}
MediaAlbumArt *StagefrightMetadataRetriever::extractAlbumArt() {
LOGV("extractAlbumArt (extractor: %s)", mExtractor.get() != NULL ? "YES" : "NO");
return NULL;
}
const char *StagefrightMetadataRetriever::extractMetadata(int keyCode) {
LOGV("extractMetadata %d (extractor: %s)",
keyCode, mExtractor.get() != NULL ? "YES" : "NO");
return NULL;
}
} // namespace android

View File

@@ -0,0 +1,53 @@
/*
**
** Copyright 2009, The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
#ifndef STAGEFRIGHT_METADATA_RETRIEVER_H_
#define STAGEFRIGHT_METADATA_RETRIEVER_H_
#include <media/MediaMetadataRetrieverInterface.h>
#include <media/stagefright/OMXClient.h>
namespace android {
class MediaExtractor;
struct StagefrightMetadataRetriever : public MediaMetadataRetrieverInterface {
StagefrightMetadataRetriever();
virtual ~StagefrightMetadataRetriever();
virtual status_t setDataSource(const char *url);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
virtual VideoFrame *captureFrame();
virtual MediaAlbumArt *extractAlbumArt();
virtual const char *extractMetadata(int keyCode);
private:
OMXClient mClient;
sp<MediaExtractor> mExtractor;
StagefrightMetadataRetriever(const StagefrightMetadataRetriever &);
StagefrightMetadataRetriever &operator=(
const StagefrightMetadataRetriever &);
};
} // namespace android
#endif // STAGEFRIGHT_METADATA_RETRIEVER_H_

View File

@@ -34,7 +34,10 @@ MmapSource::MmapSource(const char *filename)
mBase(NULL),
mSize(0) {
LOGV("MmapSource '%s'", filename);
CHECK(mFd >= 0);
if (mFd < 0) {
return;
}
off_t size = lseek(mFd, 0, SEEK_END);
mSize = (size_t)size;

View File

@@ -204,7 +204,7 @@ sp<OMXCodec> OMXCodec::Create(
status_t err = omx->allocate_node(componentName, &node);
if (err == OK) {
LOGI("Successfully allocated OMX node '%s'", componentName);
LOGV("Successfully allocated OMX node '%s'", componentName);
break;
}
}
@@ -321,9 +321,10 @@ sp<OMXCodec> OMXCodec::Create(
size -= length;
}
LOGI("AVC profile = %d (%s), level = %d",
LOGV("AVC profile = %d (%s), level = %d",
(int)profile, AVCProfileToString(profile), (int)level / 10);
#if 0
if (!strcmp(componentName, "OMX.TI.Video.Decoder")
&& (profile != kAVCProfileBaseline || level > 39)) {
// This stream exceeds the decoder's capabilities.
@@ -331,6 +332,7 @@ sp<OMXCodec> OMXCodec::Create(
LOGE("Profile and/or level exceed the decoder's capabilities.");
return NULL;
}
#endif
}
if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, mime)) {
@@ -440,7 +442,7 @@ status_t OMXCodec::setVideoPortFormatType(
// CHECK_EQ(format.nIndex, index);
#if 1
CODEC_LOGI("portIndex: %ld, index: %ld, eCompressionFormat=%d eColorFormat=%d",
CODEC_LOGV("portIndex: %ld, index: %ld, eCompressionFormat=%d eColorFormat=%d",
portIndex,
index, format.eCompressionFormat, format.eColorFormat);
#endif
@@ -473,7 +475,7 @@ status_t OMXCodec::setVideoPortFormatType(
return UNKNOWN_ERROR;
}
CODEC_LOGI("found a match.");
CODEC_LOGV("found a match.");
status_t err = mOMX->set_parameter(
mNode, OMX_IndexParamVideoPortFormat,
&format, sizeof(format));
@@ -483,7 +485,7 @@ status_t OMXCodec::setVideoPortFormatType(
void OMXCodec::setVideoInputFormat(
const char *mime, OMX_U32 width, OMX_U32 height) {
CODEC_LOGI("setVideoInputFormat width=%ld, height=%ld", width, height);
CODEC_LOGV("setVideoInputFormat width=%ld, height=%ld", width, height);
OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;
if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
@@ -543,7 +545,7 @@ void OMXCodec::setVideoInputFormat(
CHECK_EQ(err, OK);
def.nBufferSize = (width * height * 2); // (width * height * 3) / 2;
CODEC_LOGI("Setting nBufferSize = %ld", def.nBufferSize);
CODEC_LOGV("Setting nBufferSize = %ld", def.nBufferSize);
CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
@@ -559,7 +561,7 @@ void OMXCodec::setVideoInputFormat(
void OMXCodec::setVideoOutputFormat(
const char *mime, OMX_U32 width, OMX_U32 height) {
CODEC_LOGI("setVideoOutputFormat width=%ld, height=%ld", width, height);
CODEC_LOGV("setVideoOutputFormat width=%ld, height=%ld", width, height);
OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;
if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {

View File

@@ -10,6 +10,7 @@ LOCAL_C_INCLUDES += $(TOP)/hardware/ti/omap3/liboverlay
LOCAL_C_INCLUDES += $(JNI_H_INCLUDE)
LOCAL_SRC_FILES:= \
ColorConverter.cpp \
OMX.cpp \
QComHardwareRenderer.cpp \
SoftwareRenderer.cpp \

View File

@@ -0,0 +1,297 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <media/stagefright/ColorConverter.h>
#include <media/stagefright/MediaDebug.h>
namespace android {
static const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00;
ColorConverter::ColorConverter(
OMX_COLOR_FORMATTYPE from, OMX_COLOR_FORMATTYPE to)
: mSrcFormat(from),
mDstFormat(to),
mClip(NULL) {
}
ColorConverter::~ColorConverter() {
delete[] mClip;
mClip = NULL;
}
bool ColorConverter::isValid() const {
if (mDstFormat != OMX_COLOR_Format16bitRGB565) {
return false;
}
switch (mSrcFormat) {
case OMX_COLOR_FormatYUV420Planar:
case OMX_COLOR_FormatCbYCrY:
case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
return true;
default:
return false;
}
}
void ColorConverter::convert(
size_t width, size_t height,
const void *srcBits, size_t srcSkip,
void *dstBits, size_t dstSkip) {
CHECK_EQ(mDstFormat, OMX_COLOR_Format16bitRGB565);
switch (mSrcFormat) {
case OMX_COLOR_FormatYUV420Planar:
convertYUV420Planar(
width, height, srcBits, srcSkip, dstBits, dstSkip);
break;
case OMX_COLOR_FormatCbYCrY:
convertCbYCrY(
width, height, srcBits, srcSkip, dstBits, dstSkip);
break;
case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
convertQCOMYUV420SemiPlanar(
width, height, srcBits, srcSkip, dstBits, dstSkip);
break;
default:
{
CHECK(!"Should not be here. Unknown color conversion.");
break;
}
}
}
void ColorConverter::convertCbYCrY(
size_t width, size_t height,
const void *srcBits, size_t srcSkip,
void *dstBits, size_t dstSkip) {
CHECK_EQ(srcSkip, 0); // Doesn't really make sense for YUV formats.
CHECK(dstSkip >= width * 2);
CHECK((dstSkip & 3) == 0);
uint8_t *kAdjustedClip = initClip();
uint32_t *dst_ptr = (uint32_t *)dstBits;
const uint8_t *src = (const uint8_t *)srcBits;
for (size_t y = 0; y < height; ++y) {
for (size_t x = 0; x < width; x += 2) {
signed y1 = (signed)src[2 * x + 1] - 16;
signed y2 = (signed)src[2 * x + 3] - 16;
signed u = (signed)src[2 * x] - 128;
signed v = (signed)src[2 * x + 2] - 128;
signed u_b = u * 517;
signed u_g = -u * 100;
signed v_g = -v * 208;
signed v_r = v * 409;
signed tmp1 = y1 * 298;
signed b1 = (tmp1 + u_b) / 256;
signed g1 = (tmp1 + v_g + u_g) / 256;
signed r1 = (tmp1 + v_r) / 256;
signed tmp2 = y2 * 298;
signed b2 = (tmp2 + u_b) / 256;
signed g2 = (tmp2 + v_g + u_g) / 256;
signed r2 = (tmp2 + v_r) / 256;
uint32_t rgb1 =
((kAdjustedClip[r1] >> 3) << 11)
| ((kAdjustedClip[g1] >> 2) << 5)
| (kAdjustedClip[b1] >> 3);
uint32_t rgb2 =
((kAdjustedClip[r2] >> 3) << 11)
| ((kAdjustedClip[g2] >> 2) << 5)
| (kAdjustedClip[b2] >> 3);
dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
}
src += width * 2;
dst_ptr += dstSkip / 4;
}
}
void ColorConverter::convertYUV420Planar(
size_t width, size_t height,
const void *srcBits, size_t srcSkip,
void *dstBits, size_t dstSkip) {
CHECK_EQ(srcSkip, 0); // Doesn't really make sense for YUV formats.
CHECK(dstSkip >= width * 2);
CHECK((dstSkip & 3) == 0);
uint8_t *kAdjustedClip = initClip();
uint32_t *dst_ptr = (uint32_t *)dstBits;
const uint8_t *src_y = (const uint8_t *)srcBits;
const uint8_t *src_u =
(const uint8_t *)src_y + width * height;
const uint8_t *src_v =
(const uint8_t *)src_u + (width / 2) * (height / 2);
for (size_t y = 0; y < height; ++y) {
for (size_t x = 0; x < width; x += 2) {
// B = 1.164 * (Y - 16) + 2.018 * (U - 128)
// G = 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128)
// R = 1.164 * (Y - 16) + 1.596 * (V - 128)
// B = 298/256 * (Y - 16) + 517/256 * (U - 128)
// G = .................. - 208/256 * (V - 128) - 100/256 * (U - 128)
// R = .................. + 409/256 * (V - 128)
// min_B = (298 * (- 16) + 517 * (- 128)) / 256 = -277
// min_G = (298 * (- 16) - 208 * (255 - 128) - 100 * (255 - 128)) / 256 = -172
// min_R = (298 * (- 16) + 409 * (- 128)) / 256 = -223
// max_B = (298 * (255 - 16) + 517 * (255 - 128)) / 256 = 534
// max_G = (298 * (255 - 16) - 208 * (- 128) - 100 * (- 128)) / 256 = 432
// max_R = (298 * (255 - 16) + 409 * (255 - 128)) / 256 = 481
// clip range -278 .. 535
signed y1 = (signed)src_y[x] - 16;
signed y2 = (signed)src_y[x + 1] - 16;
signed u = (signed)src_u[x / 2] - 128;
signed v = (signed)src_v[x / 2] - 128;
signed u_b = u * 517;
signed u_g = -u * 100;
signed v_g = -v * 208;
signed v_r = v * 409;
signed tmp1 = y1 * 298;
signed b1 = (tmp1 + u_b) / 256;
signed g1 = (tmp1 + v_g + u_g) / 256;
signed r1 = (tmp1 + v_r) / 256;
signed tmp2 = y2 * 298;
signed b2 = (tmp2 + u_b) / 256;
signed g2 = (tmp2 + v_g + u_g) / 256;
signed r2 = (tmp2 + v_r) / 256;
uint32_t rgb1 =
((kAdjustedClip[r1] >> 3) << 11)
| ((kAdjustedClip[g1] >> 2) << 5)
| (kAdjustedClip[b1] >> 3);
uint32_t rgb2 =
((kAdjustedClip[r2] >> 3) << 11)
| ((kAdjustedClip[g2] >> 2) << 5)
| (kAdjustedClip[b2] >> 3);
dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
}
src_y += width;
if (y & 1) {
src_u += width / 2;
src_v += width / 2;
}
dst_ptr += dstSkip / 4;
}
}
void ColorConverter::convertQCOMYUV420SemiPlanar(
size_t width, size_t height,
const void *srcBits, size_t srcSkip,
void *dstBits, size_t dstSkip) {
CHECK_EQ(srcSkip, 0); // Doesn't really make sense for YUV formats.
CHECK(dstSkip >= width * 2);
CHECK((dstSkip & 3) == 0);
uint8_t *kAdjustedClip = initClip();
uint32_t *dst_ptr = (uint32_t *)dstBits;
const uint8_t *src_y = (const uint8_t *)srcBits;
const uint8_t *src_u =
(const uint8_t *)src_y + width * height;
for (size_t y = 0; y < height; ++y) {
for (size_t x = 0; x < width; x += 2) {
signed y1 = (signed)src_y[x] - 16;
signed y2 = (signed)src_y[x + 1] - 16;
signed u = (signed)src_u[x & ~1] - 128;
signed v = (signed)src_u[(x & ~1) + 1] - 128;
signed u_b = u * 517;
signed u_g = -u * 100;
signed v_g = -v * 208;
signed v_r = v * 409;
signed tmp1 = y1 * 298;
signed b1 = (tmp1 + u_b) / 256;
signed g1 = (tmp1 + v_g + u_g) / 256;
signed r1 = (tmp1 + v_r) / 256;
signed tmp2 = y2 * 298;
signed b2 = (tmp2 + u_b) / 256;
signed g2 = (tmp2 + v_g + u_g) / 256;
signed r2 = (tmp2 + v_r) / 256;
uint32_t rgb1 =
((kAdjustedClip[b1] >> 3) << 11)
| ((kAdjustedClip[g1] >> 2) << 5)
| (kAdjustedClip[r1] >> 3);
uint32_t rgb2 =
((kAdjustedClip[b2] >> 3) << 11)
| ((kAdjustedClip[g2] >> 2) << 5)
| (kAdjustedClip[r2] >> 3);
dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
}
src_y += width;
if (y & 1) {
src_u += width;
}
dst_ptr += dstSkip / 4;
}
}
uint8_t *ColorConverter::initClip() {
static const signed kClipMin = -278;
static const signed kClipMax = 535;
if (mClip == NULL) {
mClip = new uint8_t[kClipMax - kClipMin + 1];
for (signed i = kClipMin; i <= kClipMax; ++i) {
mClip[i - kClipMin] = (i < 0) ? 0 : (i > 255) ? 255 : (uint8_t)i;
}
}
return &mClip[-kClipMin];
}
} // namespace android

View File

@@ -283,7 +283,7 @@ status_t OMX::allocate_node(const char *name, node_id *node) {
&handle, const_cast<char *>(name), meta, &kCallbacks);
if (err != OMX_ErrorNone) {
LOGE("FAILED to allocate omx component '%s'", name);
LOGV("FAILED to allocate omx component '%s'", name);
delete meta;
meta = NULL;

View File

@@ -24,14 +24,13 @@
namespace android {
#define QCOM_YUV 0
SoftwareRenderer::SoftwareRenderer(
OMX_COLOR_FORMATTYPE colorFormat,
const sp<ISurface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight)
: mColorFormat(colorFormat),
mConverter(colorFormat, OMX_COLOR_Format16bitRGB565),
mISurface(surface),
mDisplayWidth(displayWidth),
mDisplayHeight(displayHeight),
@@ -39,12 +38,12 @@ SoftwareRenderer::SoftwareRenderer(
mDecodedHeight(decodedHeight),
mFrameSize(mDecodedWidth * mDecodedHeight * 2), // RGB565
mMemoryHeap(new MemoryHeapBase(2 * mFrameSize)),
mIndex(0),
mClip(NULL) {
mIndex(0) {
CHECK(mISurface.get() != NULL);
CHECK(mDecodedWidth > 0);
CHECK(mDecodedHeight > 0);
CHECK(mMemoryHeap->heapID() >= 0);
CHECK(mConverter.isValid());
ISurface::BufferHeap bufferHeap(
mDisplayWidth, mDisplayHeight,
@@ -58,278 +57,19 @@ SoftwareRenderer::SoftwareRenderer(
SoftwareRenderer::~SoftwareRenderer() {
mISurface->unregisterBuffers();
delete[] mClip;
mClip = NULL;
}
void SoftwareRenderer::render(
const void *data, size_t size, void *platformPrivate) {
static const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00;
switch (mColorFormat) {
case OMX_COLOR_FormatYUV420Planar:
return renderYUV420Planar(data, size);
case OMX_COLOR_FormatCbYCrY:
return renderCbYCrY(data, size);
case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
return renderQCOMYUV420SemiPlanar(data, size);
default:
{
LOGW("Cannot render color format %ld", mColorFormat);
break;
}
}
}
void SoftwareRenderer::renderYUV420Planar(
const void *data, size_t size) {
if (size != (mDecodedHeight * mDecodedWidth * 3) / 2) {
LOGE("size is %d, expected %d",
size, (mDecodedHeight * mDecodedWidth * 3) / 2);
}
CHECK(size >= (mDecodedWidth * mDecodedHeight * 3) / 2);
uint8_t *kAdjustedClip = initClip();
size_t offset = mIndex * mFrameSize;
void *dst = (uint8_t *)mMemoryHeap->getBase() + offset;
uint32_t *dst_ptr = (uint32_t *)dst;
const uint8_t *src_y = (const uint8_t *)data;
const uint8_t *src_u =
(const uint8_t *)src_y + mDecodedWidth * mDecodedHeight;
#if !QCOM_YUV
const uint8_t *src_v =
(const uint8_t *)src_u + (mDecodedWidth / 2) * (mDecodedHeight / 2);
#endif
for (size_t y = 0; y < mDecodedHeight; ++y) {
for (size_t x = 0; x < mDecodedWidth; x += 2) {
// B = 1.164 * (Y - 16) + 2.018 * (U - 128)
// G = 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128)
// R = 1.164 * (Y - 16) + 1.596 * (V - 128)
// B = 298/256 * (Y - 16) + 517/256 * (U - 128)
// G = .................. - 208/256 * (V - 128) - 100/256 * (U - 128)
// R = .................. + 409/256 * (V - 128)
// min_B = (298 * (- 16) + 517 * (- 128)) / 256 = -277
// min_G = (298 * (- 16) - 208 * (255 - 128) - 100 * (255 - 128)) / 256 = -172
// min_R = (298 * (- 16) + 409 * (- 128)) / 256 = -223
// max_B = (298 * (255 - 16) + 517 * (255 - 128)) / 256 = 534
// max_G = (298 * (255 - 16) - 208 * (- 128) - 100 * (- 128)) / 256 = 432
// max_R = (298 * (255 - 16) + 409 * (255 - 128)) / 256 = 481
// clip range -278 .. 535
signed y1 = (signed)src_y[x] - 16;
signed y2 = (signed)src_y[x + 1] - 16;
#if QCOM_YUV
signed u = (signed)src_u[x & ~1] - 128;
signed v = (signed)src_u[(x & ~1) + 1] - 128;
#else
signed u = (signed)src_u[x / 2] - 128;
signed v = (signed)src_v[x / 2] - 128;
#endif
signed u_b = u * 517;
signed u_g = -u * 100;
signed v_g = -v * 208;
signed v_r = v * 409;
signed tmp1 = y1 * 298;
signed b1 = (tmp1 + u_b) / 256;
signed g1 = (tmp1 + v_g + u_g) / 256;
signed r1 = (tmp1 + v_r) / 256;
signed tmp2 = y2 * 298;
signed b2 = (tmp2 + u_b) / 256;
signed g2 = (tmp2 + v_g + u_g) / 256;
signed r2 = (tmp2 + v_r) / 256;
uint32_t rgb1 =
((kAdjustedClip[r1] >> 3) << 11)
| ((kAdjustedClip[g1] >> 2) << 5)
| (kAdjustedClip[b1] >> 3);
uint32_t rgb2 =
((kAdjustedClip[r2] >> 3) << 11)
| ((kAdjustedClip[g2] >> 2) << 5)
| (kAdjustedClip[b2] >> 3);
dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
}
src_y += mDecodedWidth;
if (y & 1) {
#if QCOM_YUV
src_u += mDecodedWidth;
#else
src_u += mDecodedWidth / 2;
src_v += mDecodedWidth / 2;
#endif
}
dst_ptr += mDecodedWidth / 2;
}
mConverter.convert(
mDecodedWidth, mDecodedHeight,
data, 0, dst, 2 * mDecodedWidth);
mISurface->postBuffer(offset);
mIndex = 1 - mIndex;
}
void SoftwareRenderer::renderCbYCrY(
const void *data, size_t size) {
if (size != (mDecodedHeight * mDecodedWidth * 2)) {
LOGE("size is %d, expected %d",
size, (mDecodedHeight * mDecodedWidth * 2));
}
CHECK(size >= (mDecodedWidth * mDecodedHeight * 2));
uint8_t *kAdjustedClip = initClip();
size_t offset = mIndex * mFrameSize;
void *dst = (uint8_t *)mMemoryHeap->getBase() + offset;
uint32_t *dst_ptr = (uint32_t *)dst;
const uint8_t *src = (const uint8_t *)data;
for (size_t y = 0; y < mDecodedHeight; ++y) {
for (size_t x = 0; x < mDecodedWidth; x += 2) {
signed y1 = (signed)src[2 * x + 1] - 16;
signed y2 = (signed)src[2 * x + 3] - 16;
signed u = (signed)src[2 * x] - 128;
signed v = (signed)src[2 * x + 2] - 128;
signed u_b = u * 517;
signed u_g = -u * 100;
signed v_g = -v * 208;
signed v_r = v * 409;
signed tmp1 = y1 * 298;
signed b1 = (tmp1 + u_b) / 256;
signed g1 = (tmp1 + v_g + u_g) / 256;
signed r1 = (tmp1 + v_r) / 256;
signed tmp2 = y2 * 298;
signed b2 = (tmp2 + u_b) / 256;
signed g2 = (tmp2 + v_g + u_g) / 256;
signed r2 = (tmp2 + v_r) / 256;
uint32_t rgb1 =
((kAdjustedClip[r1] >> 3) << 11)
| ((kAdjustedClip[g1] >> 2) << 5)
| (kAdjustedClip[b1] >> 3);
uint32_t rgb2 =
((kAdjustedClip[r2] >> 3) << 11)
| ((kAdjustedClip[g2] >> 2) << 5)
| (kAdjustedClip[b2] >> 3);
dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
}
src += mDecodedWidth * 2;
dst_ptr += mDecodedWidth / 2;
}
mISurface->postBuffer(offset);
mIndex = 1 - mIndex;
}
void SoftwareRenderer::renderQCOMYUV420SemiPlanar(
const void *data, size_t size) {
if (size != (mDecodedHeight * mDecodedWidth * 3) / 2) {
LOGE("size is %d, expected %d",
size, (mDecodedHeight * mDecodedWidth * 3) / 2);
}
CHECK(size >= (mDecodedWidth * mDecodedHeight * 3) / 2);
uint8_t *kAdjustedClip = initClip();
size_t offset = mIndex * mFrameSize;
void *dst = (uint8_t *)mMemoryHeap->getBase() + offset;
uint32_t *dst_ptr = (uint32_t *)dst;
const uint8_t *src_y = (const uint8_t *)data;
const uint8_t *src_u =
(const uint8_t *)src_y + mDecodedWidth * mDecodedHeight;
for (size_t y = 0; y < mDecodedHeight; ++y) {
for (size_t x = 0; x < mDecodedWidth; x += 2) {
signed y1 = (signed)src_y[x] - 16;
signed y2 = (signed)src_y[x + 1] - 16;
signed u = (signed)src_u[x & ~1] - 128;
signed v = (signed)src_u[(x & ~1) + 1] - 128;
signed u_b = u * 517;
signed u_g = -u * 100;
signed v_g = -v * 208;
signed v_r = v * 409;
signed tmp1 = y1 * 298;
signed b1 = (tmp1 + u_b) / 256;
signed g1 = (tmp1 + v_g + u_g) / 256;
signed r1 = (tmp1 + v_r) / 256;
signed tmp2 = y2 * 298;
signed b2 = (tmp2 + u_b) / 256;
signed g2 = (tmp2 + v_g + u_g) / 256;
signed r2 = (tmp2 + v_r) / 256;
uint32_t rgb1 =
((kAdjustedClip[b1] >> 3) << 11)
| ((kAdjustedClip[g1] >> 2) << 5)
| (kAdjustedClip[r1] >> 3);
uint32_t rgb2 =
((kAdjustedClip[b2] >> 3) << 11)
| ((kAdjustedClip[g2] >> 2) << 5)
| (kAdjustedClip[r2] >> 3);
dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
}
src_y += mDecodedWidth;
if (y & 1) {
src_u += mDecodedWidth;
}
dst_ptr += mDecodedWidth / 2;
}
mISurface->postBuffer(offset);
mIndex = 1 - mIndex;
}
uint8_t *SoftwareRenderer::initClip() {
static const signed kClipMin = -278;
static const signed kClipMax = 535;
if (mClip == NULL) {
mClip = new uint8_t[kClipMax - kClipMin + 1];
for (signed i = kClipMin; i <= kClipMax; ++i) {
mClip[i - kClipMin] = (i < 0) ? 0 : (i > 255) ? 255 : (uint8_t)i;
}
}
return &mClip[-kClipMin];
}
} // namespace android