am fd499830: Merge change I595c2d60 into eclair-mr2

Merge commit 'fd49983009e9e68de5d3f7f5c93689c8da1a9b6c' into eclair-mr2-plus-aosp

* commit 'fd49983009e9e68de5d3f7f5c93689c8da1a9b6c':
  Initial checkin of software AVC video decoder based on PV source code.
This commit is contained in:
Andreas Huber
2009-12-11 08:43:01 -08:00
committed by Android Git Automerger
38 changed files with 16372 additions and 18 deletions

View File

@@ -56,10 +56,14 @@ ifeq ($(BUILD_WITH_FULL_STAGEFRIGHT),true)
LOCAL_STATIC_LIBRARIES := \ LOCAL_STATIC_LIBRARIES := \
libstagefright_aacdec \ libstagefright_aacdec \
libstagefright_amrnbdec \ libstagefright_amrnbdec \
libstagefright_amrnb_common \
libstagefright_amrwbdec \ libstagefright_amrwbdec \
libstagefright_avcdec \
libstagefright_mp3dec libstagefright_mp3dec
LOCAL_SHARED_LIBRARIES += \
libstagefright_amrnb_common \
libstagefright_avc_common
endif endif
ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true) ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)

View File

@@ -22,6 +22,7 @@
#include "include/AACDecoder.h" #include "include/AACDecoder.h"
#include "include/AMRNBDecoder.h" #include "include/AMRNBDecoder.h"
#include "include/AMRWBDecoder.h" #include "include/AMRWBDecoder.h"
#include "include/AVCDecoder.h"
#include "include/MP3Decoder.h" #include "include/MP3Decoder.h"
#endif #endif
@@ -300,6 +301,9 @@ sp<MediaSource> OMXCodec::Create(
return new AMRWBDecoder(source); return new AMRWBDecoder(source);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
return new MP3Decoder(source); return new MP3Decoder(source);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
&& (flags & kPreferSoftwareCodecs)) {
return new AVCDecoder(source);
} }
#endif #endif

View File

@@ -149,10 +149,6 @@ LOCAL_CFLAGS := -DAAC_PLUS -DHQ_SBR -DPARAMETRICSTEREO -DOSCL_IMPORT_REF= -DOSCL
LOCAL_C_INCLUDES := frameworks/base/media/libstagefright/include LOCAL_C_INCLUDES := frameworks/base/media/libstagefright/include
LOCAL_SHARED_LIBRARIES := \
libstagefright \
libutils
LOCAL_MODULE := libstagefright_aacdec LOCAL_MODULE := libstagefright_aacdec
include $(BUILD_STATIC_LIBRARY) include $(BUILD_STATIC_LIBRARY)

View File

@@ -69,6 +69,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \ LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF= -DOSCL_EXPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF= -DOSCL_EXPORT_REF=
LOCAL_PRELINK_MODULE:= false
LOCAL_MODULE := libstagefright_amrnb_common LOCAL_MODULE := libstagefright_amrnb_common
include $(BUILD_STATIC_LIBRARY) include $(BUILD_SHARED_LIBRARY)

View File

@@ -49,10 +49,6 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \ LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF=
LOCAL_SHARED_LIBRARIES := \
libstagefright \
libutils
LOCAL_MODULE := libstagefright_amrnbdec LOCAL_MODULE := libstagefright_amrnbdec
include $(BUILD_STATIC_LIBRARY) include $(BUILD_STATIC_LIBRARY)

View File

@@ -51,10 +51,6 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \ LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF=
LOCAL_SHARED_LIBRARIES := \
libstagefright \
libutils
LOCAL_MODULE := libstagefright_amrwbdec LOCAL_MODULE := libstagefright_amrwbdec
include $(BUILD_STATIC_LIBRARY) include $(BUILD_STATIC_LIBRARY)

View File

@@ -0,0 +1,4 @@
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
include $(call all-makefiles-under,$(LOCAL_PATH))

View File

@@ -0,0 +1,21 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
src/deblock.cpp \
src/dpb.cpp \
src/fmo.cpp \
src/mb_access.cpp \
src/reflist.cpp
LOCAL_MODULE := libstagefright_avc_common
LOCAL_CFLAGS := -DOSCL_EXPORT_REF= -DOSCL_IMPORT_REF=
LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/src \
$(LOCAL_PATH)/include
LOCAL_PRELINK_MODULE:= false
include $(BUILD_SHARED_LIBRARY)

View File

@@ -0,0 +1,14 @@
#ifndef AVC_TYPES_H_
#define AVC_TYPES_H_
#include <stdint.h>
typedef uint8_t uint8;
typedef uint16_t uint16;
typedef int16_t int16;
typedef uint32_t uint32;
typedef int32_t int32;
typedef unsigned int uint;
#endif // AVC_TYPES_H_

View File

@@ -0,0 +1,274 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/**
This file contains common type definitions and enumerations used by AVC encoder
and decoder libraries which are exposed to the users.
@publishedAll
*/
#ifndef AVCAPI_COMMON_H_INCLUDED
#define AVCAPI_COMMON_H_INCLUDED
#include "avc_types.h"
#define PV_MEMORY_POOL
/**
This is common return status.
@publishedAll
*/
typedef enum
{
AVC_NO_BUFFER = -2,
AVC_MEMORY_FAIL = -1,
AVC_FAIL = 0,
AVC_SUCCESS = 1,
AVC_PICTURE_OUTPUT_READY = 2
} AVCStatus;
/**
This enumeration is for profiles. The value follows the profile_idc in sequence
parameter set rbsp. See Annex A.
@publishedAll
*/
typedef enum
{
AVC_BASELINE = 66,
AVC_MAIN = 77,
AVC_EXTENDED = 88,
AVC_HIGH = 100,
AVC_HIGH10 = 110,
AVC_HIGH422 = 122,
AVC_HIGH444 = 144
} AVCProfile;
/**
This enumeration is for levels. The value follows the level_idc in sequence
parameter set rbsp. See Annex A.
@published All
*/
typedef enum
{
AVC_LEVEL_AUTO = 0,
AVC_LEVEL1_B = 9,
AVC_LEVEL1 = 10,
AVC_LEVEL1_1 = 11,
AVC_LEVEL1_2 = 12,
AVC_LEVEL1_3 = 13,
AVC_LEVEL2 = 20,
AVC_LEVEL2_1 = 21,
AVC_LEVEL2_2 = 22,
AVC_LEVEL3 = 30,
AVC_LEVEL3_1 = 31,
AVC_LEVEL3_2 = 32,
AVC_LEVEL4 = 40,
AVC_LEVEL4_1 = 41,
AVC_LEVEL4_2 = 42,
AVC_LEVEL5 = 50,
AVC_LEVEL5_1 = 51
} AVCLevel;
/**
This enumeration follows Table 7-1 for NAL unit type codes.
This may go to avccommon_api.h later (external common).
@publishedAll
*/
typedef enum
{
AVC_NALTYPE_SLICE = 1, /* non-IDR non-data partition */
AVC_NALTYPE_DPA = 2, /* data partition A */
AVC_NALTYPE_DPB = 3, /* data partition B */
AVC_NALTYPE_DPC = 4, /* data partition C */
AVC_NALTYPE_IDR = 5, /* IDR NAL */
AVC_NALTYPE_SEI = 6, /* supplemental enhancement info */
AVC_NALTYPE_SPS = 7, /* sequence parameter set */
AVC_NALTYPE_PPS = 8, /* picture parameter set */
AVC_NALTYPE_AUD = 9, /* access unit delimiter */
AVC_NALTYPE_EOSEQ = 10, /* end of sequence */
AVC_NALTYPE_EOSTREAM = 11, /* end of stream */
AVC_NALTYPE_FILL = 12 /* filler data */
} AVCNalUnitType;
/**
This enumeration specifies debug logging type.
This may go to avccommon_api.h later (external common).
@publishedAll
*/
typedef enum
{
AVC_LOGTYPE_ERROR = 0,
AVC_LOGTYPE_WARNING = 1,
AVC_LOGTYPE_INFO = 2
} AVCLogType;
/**
This enumerate the status of certain flags.
@publishedAll
*/
typedef enum
{
AVC_OFF = 0,
AVC_ON = 1
} AVCFlag;
/**
This structure contains input information.
Note, this structure is identical to AVCDecOutput for now.
*/
typedef struct tagAVCFrameIO
{
/** A unique identification number for a particular instance of this structure.
To remain unchanged by the application between the time when it is given to the
library and the time when the library returns it back. */
uint32 id;
/** Array of pointers to Y,Cb,Cr content in 4:2:0 format. For AVC decoding,
this memory is allocated by the AVC decoder library. For AVC encoding, only the
memory for original unencoded frame is allocated by the application. Internal
memory is also allocated by the AVC encoder library. */
uint8 *YCbCr[3];
/** In/Out: Coded width of the luma component, it has to be multiple of 16. */
int pitch;
/** In/Out: Coded height of the luma component, must be multiple of 16. */
int height;
/** In/Out: Display width, less than picth */
int clip_width;
/** In/Out: Display height, less than height */
int clip_height;
/** Input: Origin of the display area [0]=>row, [1]=>column */
int clip_origin[2];
/** Output: Frame number in de/encoding order (not necessary)*/
uint32 coding_order;
/** Output: Frame number in displaying order (this may or may not be associated with the POC at all!!!). */
uint32 disp_order;
/** In/Out: Flag for use for reference or not. */
uint is_reference;
/** In/Out: Coding timestamp in msec (not display timestamp) */
uint32 coding_timestamp;
/* there could be something else here such as format, DON (decoding order number)
if available thru SEI, etc. */
} AVCFrameIO;
/** CALLBACK FUNCTION TO BE IMPLEMENTED BY APPLICATION */
/** In AVCDecControls structure, userData is a pointer to an object with the following
member functions.
*/
/** @brief Decoded picture buffers (DPB) must be allocated or re-allocated before an
IDR frame is decoded. If PV_MEMORY_POOL is not defined, AVC lib will allocate DPB
internally which cannot be shared with the application. In that case, this function
will not be called.
@param userData The same value of userData in AVCHandle object.
@param frame_size_in_mbs The size of each frame in number of macroblocks.
@param num_frames The number of frames in DPB.
@return 1 for success, 0 for fail (cannot allocate DPB)
*/
typedef int (*FunctionType_DPBAlloc)(void *userData, uint frame_size_in_mbs, uint num_buffers);
/** @brief AVC library calls this function is reserve a memory of one frame from the DPB.
Once reserved, this frame shall not be deleted or over-written by the app.
@param userData The same value of userData in AVCHandle object.
@param indx Index of a frame in DPB (AVC library keeps track of the index).
@param yuv The address of the yuv pointer returned to the AVC lib.
@return 1 for success, 0 for fail (no frames available to bind).
*/
typedef int (*FunctionType_FrameBind)(void *userData, int indx, uint8 **yuv);
/** @brief AVC library calls this function once a bound frame is not needed for decoding
operation (falls out of the sliding window, or marked unused for reference).
@param userData The same value of userData in AVCHandle object.
@param indx Index of frame to be unbound (AVC library keeps track of the index).
@return none.
*/
typedef void (*FuctionType_FrameUnbind)(void *userData, int);
/** Pointer to malloc function for general memory allocation, so that application can keep track of
memory usage.
\param "size" "Size of requested memory in bytes."
\param "attribute" "Some value specifying types, priority, etc. of the memory."
\return "The address of the allocated memory casted to int"
*/
typedef int (*FunctionType_Malloc)(void *userData, int32 size, int attribute);
/** Function pointer to free
\param "mem" "Pointer to the memory to be freed casted to int"
\return "void"
*/
typedef void (*FunctionType_Free)(void *userData, int mem);
/** Debug logging information is returned to the application thru this function.
\param "type" "Type of logging message, see definition of AVCLogType."
\param "string1" "Logging message."
\param "string2" "To be defined."
*/
typedef void (*FunctionType_DebugLog)(uint32 *userData, AVCLogType type, char *string1, int val1, int val2);
/**
This structure has to be allocated and maintained by the user of the library.
This structure is used as a handle to the library object.
*/
typedef struct tagAVCHandle
{
/** A pointer to the internal data structure. Users have to make sure that this value
is NULL at the beginning.
*/
void *AVCObject;
/** A pointer to user object which has the following member functions used for
callback purpose. !!! */
void *userData;
/** Pointers to functions implemented by the users of AVC library */
FunctionType_DPBAlloc CBAVC_DPBAlloc;
FunctionType_FrameBind CBAVC_FrameBind;
FuctionType_FrameUnbind CBAVC_FrameUnbind;
FunctionType_Malloc CBAVC_Malloc;
FunctionType_Free CBAVC_Free;
FunctionType_DebugLog CBAVC_DebugLog;
/** Flag to enable debugging */
uint32 debugEnable;
} AVCHandle;
#ifdef PVDEBUGMSG_LOG
#define DEBUG_LOG(a,b,c,d,e) CBAVC_DebugLog(a,b,c,d,e)
#else
#define DEBUG_LOG(a,b,c,d,e)
#endif
#endif /* _AVCAPI_COMMON_H_ */

View File

@@ -0,0 +1,882 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/**
This file contains common code shared between AVC decoder and AVC encoder for
internal use only.
@publishedAll
*/
#ifndef AVCINT_COMMON_H_INCLUDED
#define AVCINT_COMMON_H_INCLUDED
#ifndef AVCAPI_COMMON_H_INCLUDED
#include "avcapi_common.h"
#endif
#ifndef TRUE
#define TRUE 1
#define FALSE 0
#endif
/**
Mathematic functions defined in subclause 5.7.
Can be replaced with assembly instructions for speedup.
@publishedAll
*/
#define AVC_ABS(x) (((x)<0)? -(x) : (x))
#define AVC_SIGN(x) (((x)<0)? -1 : 1)
#define AVC_SIGN0(x) (((x)<0)? -1 : (((x)>0) ? 1 : 0))
#define AVC_MAX(x,y) ((x)>(y)? (x):(y))
#define AVC_MIN(x,y) ((x)<(y)? (x):(y))
#define AVC_MEDIAN(A,B,C) ((A) > (B) ? ((A) < (C) ? (A) : (B) > (C) ? (B) : (C)): (B) < (C) ? (B) : (C) > (A) ? (C) : (A))
#define AVC_CLIP3(a,b,x) (AVC_MAX(a,AVC_MIN(x,b))) /* clip x between a and b */
#define AVC_CLIP(x) AVC_CLIP3(0,255,x)
#define AVC_FLOOR(x) ((int)(x))
#define AVC_RASTER_SCAN(x,y,n) ((x)+(y)*(n))
#define AVC_ROUND(x) (AVC_SIGN(x)*AVC_FLOOR(AVC_ABS(x)+0.5))
#define AVC_INVERSE_RASTER_SCAN(a,b,c,d,e) (((e)==0)? (((a)%((d)/(b)))*(b)): (((a)/((d)/(b)))*(c)))
/* a:block address, b:block width, c:block height, d:total_width, e:x or y coordinate */
#define DEFAULT_ATTR 0 /* default memory attribute */
#define FAST_MEM_ATTR 1 /* fast memory attribute */
/* This section is for definition of constants. */
#define MB_SIZE 16
#define BLOCK_SIZE 4
#define EMULATION_PREVENTION_THREE_BYTE 0x3
#define NUM_PIXELS_IN_MB (24*16)
#define NUM_BLKS_IN_MB 24
#define AVCNumI4PredMode 9
#define AVCNumI16PredMode 4
#define AVCNumIChromaMode 4
/* constants used in the structures below */
#define MAXIMUMVALUEOFcpb_cnt 32 /* used in HRDParams */
#define MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE 255 /* used in SeqParamSet */
#define MAX_NUM_SLICE_GROUP 8 /* used in PicParamSet */
#define MAX_REF_PIC_LIST_REORDERING 32 /* 32 is maximum according to Annex A, SliceHeader */
#define MAX_DEC_REF_PIC_MARKING 64 /* 64 is the maximum possible given the max num ref pictures to 31. */
#define MAX_FS (16+1) /* pre-defined size of frame store array */
#define MAX_LEVEL_IDX 15 /* only 15 levels defined for now */
#define MAX_REF_PIC_LIST 33 /* max size of the RefPicList0 and RefPicList1 */
/**
Architectural related macros.
@publishedAll
*/
#ifdef USE_PRED_BLOCK
#define MB_BASED_DEBLOCK
#endif
/**
Picture type, PV created.
@publishedAll
*/
typedef enum
{
AVC_FRAME = 3
} AVCPictureType;
/**
This slice type follows Table 7-3. The bottom 5 items may not needed.
@publishedAll
*/
typedef enum
{
AVC_P_SLICE = 0,
AVC_B_SLICE = 1,
AVC_I_SLICE = 2,
AVC_SP_SLICE = 3,
AVC_SI_SLICE = 4,
AVC_P_ALL_SLICE = 5,
AVC_B_ALL_SLICE = 6,
AVC_I_ALL_SLICE = 7,
AVC_SP_ALL_SLICE = 8,
AVC_SI_ALL_SLICE = 9
} AVCSliceType;
/**
Types of the macroblock and partition. PV Created.
@publishedAll
*/
typedef enum
{
/* intra */
AVC_I4,
AVC_I16,
AVC_I_PCM,
AVC_SI4,
/* inter for both P and B*/
AVC_BDirect16,
AVC_P16,
AVC_P16x8,
AVC_P8x16,
AVC_P8,
AVC_P8ref0,
AVC_SKIP
} AVCMBMode;
/**
Enumeration for sub-macroblock mode, interpreted from sub_mb_type.
@publishedAll
*/
typedef enum
{
/* for sub-partition mode */
AVC_BDirect8,
AVC_8x8,
AVC_8x4,
AVC_4x8,
AVC_4x4
} AVCSubMBMode;
/**
Mode of prediction of partition or sub-partition. PV Created.
Do not change the order!!! Used in table look-up mode prediction in
vlc.c.
@publishedAll
*/
typedef enum
{
AVC_Pred_L0 = 0,
AVC_Pred_L1,
AVC_BiPred,
AVC_Direct
} AVCPredMode;
/**
Mode of intra 4x4 prediction. Table 8-2
@publishedAll
*/
typedef enum
{
AVC_I4_Vertical = 0,
AVC_I4_Horizontal,
AVC_I4_DC,
AVC_I4_Diagonal_Down_Left,
AVC_I4_Diagonal_Down_Right,
AVC_I4_Vertical_Right,
AVC_I4_Horizontal_Down,
AVC_I4_Vertical_Left,
AVC_I4_Horizontal_Up
} AVCIntra4x4PredMode;
/**
Mode of intra 16x16 prediction. Table 8-3
@publishedAll
*/
typedef enum
{
AVC_I16_Vertical = 0,
AVC_I16_Horizontal,
AVC_I16_DC,
AVC_I16_Plane
} AVCIntra16x16PredMode;
/**
Mode of intra chroma prediction. Table 8-4
@publishedAll
*/
typedef enum
{
AVC_IC_DC = 0,
AVC_IC_Horizontal,
AVC_IC_Vertical,
AVC_IC_Plane
} AVCIntraChromaPredMode;
/**
Type of residual going to residual_block_cavlc function, PV created.
@publishedAll
*/
typedef enum
{
AVC_Luma,
AVC_Intra16DC,
AVC_Intra16AC,
AVC_ChromaDC,
AVC_ChromaAC
} AVCResidualType;
/**
This structure contains VUI parameters as specified in Annex E.
Some variables may be removed from the structure if they are found to be useless to store.
@publishedAll
*/
typedef struct tagHRDParams
{
uint cpb_cnt_minus1; /* ue(v), range 0..31 */
uint bit_rate_scale; /* u(4) */
uint cpb_size_scale; /* u(4) */
uint32 bit_rate_value_minus1[MAXIMUMVALUEOFcpb_cnt];/* ue(v), range 0..2^32-2 */
uint32 cpb_size_value_minus1[MAXIMUMVALUEOFcpb_cnt]; /* ue(v), range 0..2^32-2 */
uint cbr_flag[MAXIMUMVALUEOFcpb_cnt]; /* u(1) */
uint initial_cpb_removal_delay_length_minus1; /* u(5), default 23 */
uint cpb_removal_delay_length_minus1; /* u(5), default 23 */
uint dpb_output_delay_length_minus1; /* u(5), default 23 */
uint time_offset_length; /* u(5), default 24 */
} AVCHRDParams;
/**
This structure contains VUI parameters as specified in Annex E.
Some variables may be removed from the structure if they are found to be useless to store.
@publishedAll
*/
typedef struct tagVUIParam
{
uint aspect_ratio_info_present_flag; /* u(1) */
uint aspect_ratio_idc; /* u(8), table E-1 */
uint sar_width; /* u(16) */
uint sar_height; /* u(16) */
uint overscan_info_present_flag; /* u(1) */
uint overscan_appropriate_flag; /* u(1) */
uint video_signal_type_present_flag; /* u(1) */
uint video_format; /* u(3), Table E-2, default 5, unspecified */
uint video_full_range_flag; /* u(1) */
uint colour_description_present_flag; /* u(1) */
uint colour_primaries; /* u(8), Table E-3, default 2, unspecified */
uint transfer_characteristics; /* u(8), Table E-4, default 2, unspecified */
uint matrix_coefficients; /* u(8), Table E-5, default 2, unspecified */
uint chroma_location_info_present_flag; /* u(1) */
uint chroma_sample_loc_type_top_field; /* ue(v), Fig. E-1range 0..5, default 0 */
uint chroma_sample_loc_type_bottom_field; /* ue(v) */
uint timing_info_present_flag; /* u(1) */
uint num_units_in_tick; /* u(32), must be > 0 */
uint time_scale; /* u(32), must be > 0 */
uint fixed_frame_rate_flag; /* u(1), Eq. C-13 */
uint nal_hrd_parameters_present_flag; /* u(1) */
AVCHRDParams nal_hrd_parameters; /* hrd_paramters */
uint vcl_hrd_parameters_present_flag; /* u(1) */
AVCHRDParams vcl_hrd_parameters; /* hrd_paramters */
/* if ((nal_hrd_parameters_present_flag || (vcl_hrd_parameters_present_flag)) */
uint low_delay_hrd_flag; /* u(1) */
uint pic_struct_present_flag;
uint bitstream_restriction_flag; /* u(1) */
uint motion_vectors_over_pic_boundaries_flag; /* u(1) */
uint max_bytes_per_pic_denom; /* ue(v), default 2 */
uint max_bits_per_mb_denom; /* ue(v), range 0..16, default 1 */
uint log2_max_mv_length_vertical; /* ue(v), range 0..16, default 16 */
uint log2_max_mv_length_horizontal; /* ue(v), range 0..16, default 16 */
uint max_dec_frame_reordering; /* ue(v) */
uint max_dec_frame_buffering; /* ue(v) */
} AVCVUIParams;
/**
This structure contains information in a sequence parameter set NAL.
Some variables may be removed from the structure if they are found to be useless to store.
@publishedAll
*/
typedef struct tagSeqParamSet
{
uint Valid; /* indicates the parameter set is valid */
uint profile_idc; /* u(8) */
uint constrained_set0_flag; /* u(1) */
uint constrained_set1_flag; /* u(1) */
uint constrained_set2_flag; /* u(1) */
uint constrained_set3_flag; /* u(1) */
uint level_idc; /* u(8) */
uint seq_parameter_set_id; /* ue(v), range 0..31 */
uint log2_max_frame_num_minus4; /* ue(v), range 0..12 */
uint pic_order_cnt_type; /* ue(v), range 0..2 */
/* if( pic_order_cnt_type == 0 ) */
uint log2_max_pic_order_cnt_lsb_minus4; /* ue(v), range 0..12 */
/* else if( pic_order_cnt_type == 1 ) */
uint delta_pic_order_always_zero_flag; /* u(1) */
int32 offset_for_non_ref_pic; /* se(v) */
int32 offset_for_top_to_bottom_field; /* se(v) */
uint num_ref_frames_in_pic_order_cnt_cycle; /* ue(v) , range 0..255 */
/* for( i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i++ ) */
int32 offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE]; /* se(v) */
uint num_ref_frames; /* ue(v), range 0..16 */
uint gaps_in_frame_num_value_allowed_flag; /* u(1) */
uint pic_width_in_mbs_minus1; /* ue(v) */
uint pic_height_in_map_units_minus1; /* ue(v) */
uint frame_mbs_only_flag; /* u(1) */
/* if( !frame_mbs_only_flag ) */
uint mb_adaptive_frame_field_flag; /* u(1) */
uint direct_8x8_inference_flag; /* u(1), must be 1 when frame_mbs_only_flag is 0 */
uint frame_cropping_flag; /* u(1) */
/* if( frmae_cropping_flag) */
uint frame_crop_left_offset; /* ue(v) */
uint frame_crop_right_offset; /* ue(v) */
uint frame_crop_top_offset; /* ue(v) */
uint frame_crop_bottom_offset; /* ue(v) */
uint vui_parameters_present_flag; /* u(1) */
// uint nal_hrd_parameters_present_flag;
// uint vcl_hrd_parameters_present_flag;
// AVCHRDParams *nal_hrd_parameters;
// AVCHRDParams *vcl_hrd_parameters;
AVCVUIParams vui_parameters; /* AVCVUIParam */
} AVCSeqParamSet;
/**
This structure contains information in a picture parameter set NAL.
Some variables may be removed from the structure if they are found to be useless to store.
@publishedAll
*/
typedef struct tagPicParamSet
{
uint pic_parameter_set_id; /* ue(v), range 0..255 */
uint seq_parameter_set_id; /* ue(v), range 0..31 */
uint entropy_coding_mode_flag; /* u(1) */
uint pic_order_present_flag; /* u(1) */
uint num_slice_groups_minus1; /* ue(v), range in Annex A */
/* if( num_slice_groups_minus1 > 0) */
uint slice_group_map_type; /* ue(v), range 0..6 */
/* if( slice_group_map_type = = 0 ) */
/* for(0:1:num_slice_groups_minus1) */
uint run_length_minus1[MAX_NUM_SLICE_GROUP]; /* ue(v) */
/* else if( slice_group_map_type = = 2 ) */
/* for(0:1:num_slice_groups_minus1-1) */
uint top_left[MAX_NUM_SLICE_GROUP-1]; /* ue(v) */
uint bottom_right[MAX_NUM_SLICE_GROUP-1]; /* ue(v) */
/* else if( slice_group_map_type = = 3 || 4 || 5 */
uint slice_group_change_direction_flag; /* u(1) */
uint slice_group_change_rate_minus1; /* ue(v) */
/* else if( slice_group_map_type = = 6 ) */
uint pic_size_in_map_units_minus1; /* ue(v) */
/* for(0:1:pic_size_in_map_units_minus1) */
uint *slice_group_id; /* complete MBAmap u(v) */
uint num_ref_idx_l0_active_minus1; /* ue(v), range 0..31 */
uint num_ref_idx_l1_active_minus1; /* ue(v), range 0..31 */
uint weighted_pred_flag; /* u(1) */
uint weighted_bipred_idc; /* u(2), range 0..2 */
int pic_init_qp_minus26; /* se(v), range -26..25 */
int pic_init_qs_minus26; /* se(v), range -26..25 */
int chroma_qp_index_offset; /* se(v), range -12..12 */
uint deblocking_filter_control_present_flag; /* u(1) */
uint constrained_intra_pred_flag; /* u(1) */
uint redundant_pic_cnt_present_flag; /* u(1) */
} AVCPicParamSet;
/**
This structure contains slice header information.
Some variables may be removed from the structure if they are found to be useless to store.
@publishedAll
*/
typedef struct tagSliceHeader
{
uint first_mb_in_slice; /* ue(v) */
AVCSliceType slice_type; /* ue(v), Table 7-3, range 0..9 */
uint pic_parameter_set_id; /* ue(v), range 0..255 */
uint frame_num; /* u(v), see log2max_frame_num_minus4 */
/* if( !frame_mbs_only_flag) */
uint field_pic_flag; /* u(1) */
/* if(field_pic_flag) */
uint bottom_field_flag; /* u(1) */
/* if(nal_unit_type == 5) */
uint idr_pic_id; /* ue(v), range 0..65535 */
/* if(pic_order_cnt_type==0) */
uint pic_order_cnt_lsb; /* u(v), range 0..MaxPicOrderCntLsb-1 */
/* if(pic_order_present_flag && !field_pic_flag) */
int32 delta_pic_order_cnt_bottom; /* se(v) */
/* if(pic_order_cnt_type==1 && !delta_pic_order_always_zero_flag) */
/* if(pic_order_present_flag && !field_pic_flag) */
int32 delta_pic_order_cnt[2];
/* if(redundant_pic_cnt_present_flag) */
uint redundant_pic_cnt; /* ue(v), range 0..127 */
/* if(slice_type == B) */
uint direct_spatial_mv_pred_flag; /* u(1) */
/* if(slice_type == P || slice_type==SP || slice_type==B) */
uint num_ref_idx_active_override_flag; /* u(1) */
/* if(num_ref_idx_active_override_flag) */
uint num_ref_idx_l0_active_minus1; /* ue(v) */
/* if(slie_type == B) */
uint num_ref_idx_l1_active_minus1; /* ue(v) */
/* ref_pic_list_reordering() */
uint ref_pic_list_reordering_flag_l0; /* u(1) */
uint reordering_of_pic_nums_idc_l0[MAX_REF_PIC_LIST_REORDERING]; /* ue(v), range 0..3 */
uint abs_diff_pic_num_minus1_l0[MAX_REF_PIC_LIST_REORDERING]; /* ue(v) */
uint long_term_pic_num_l0[MAX_REF_PIC_LIST_REORDERING]; /* ue(v) */
uint ref_pic_list_reordering_flag_l1; /* u(1) */
uint reordering_of_pic_nums_idc_l1[MAX_REF_PIC_LIST_REORDERING]; /* ue(v), range 0..3 */
uint abs_diff_pic_num_minus1_l1[MAX_REF_PIC_LIST_REORDERING]; /* ue(v) */
uint long_term_pic_num_l1[MAX_REF_PIC_LIST_REORDERING]; /* ue(v) */
/* end ref_pic_list_reordering() */
/* if(nal_ref_idc!=0) */
/* dec_ref_pic_marking() */
uint no_output_of_prior_pics_flag; /* u(1) */
uint long_term_reference_flag; /* u(1) */
uint adaptive_ref_pic_marking_mode_flag; /* u(1) */
uint memory_management_control_operation[MAX_DEC_REF_PIC_MARKING]; /* ue(v), range 0..6 */
uint difference_of_pic_nums_minus1[MAX_DEC_REF_PIC_MARKING]; /* ue(v) */
uint long_term_pic_num[MAX_DEC_REF_PIC_MARKING]; /* ue(v) */
uint long_term_frame_idx[MAX_DEC_REF_PIC_MARKING]; /* ue(v) */
uint max_long_term_frame_idx_plus1[MAX_DEC_REF_PIC_MARKING]; /* ue(v) */
/* end dec_ref_pic_marking() */
/* if(entropy_coding_mode_flag && slice_type!=I && slice_type!=SI) */
uint cabac_init_idc; /* ue(v), range 0..2 */
int slice_qp_delta; /* se(v), range 0..51 */
/* if(slice_type==SP || slice_type==SI) */
/* if(slice_type==SP) */
uint sp_for_switch_flag; /* u(1) */
int slice_qs_delta; /* se(v) */
/* if(deblocking_filter_control_present_flag)*/
uint disable_deblocking_filter_idc; /* ue(v), range 0..2 */
/* if(disable_deblocking_filter_idc!=1) */
int slice_alpha_c0_offset_div2; /* se(v), range -6..6, default 0 */
int slice_beta_offset_div_2; /* se(v), range -6..6, default 0 */
/* if(num_slice_groups_minus1>0 && slice_group_map_type>=3 && slice_group_map_type<=5)*/
uint slice_group_change_cycle; /* u(v), use ceil(log2(PicSizeInMapUnits/SliceGroupChangeRate + 1)) bits*/
} AVCSliceHeader;
/**
This struct contains information about the neighboring pixel.
@publishedAll
*/
typedef struct tagPixPos
{
int available;
int mb_addr; /* macroblock address of the current pixel, see below */
int x; /* x,y positions of current pixel relative to the macroblock mb_addr */
int y;
int pos_x; /* x,y positions of current pixel relative to the picture. */
int pos_y;
} AVCPixelPos;
typedef struct tagNeighborAvailability
{
int left;
int top; /* macroblock address of the current pixel, see below */
int top_right; /* x,y positions of current pixel relative to the macroblock mb_addr */
} AVCNeighborAvailability;
/**
This structure contains picture data and related information necessary to be used as
reference frame.
@publishedAll
*/
typedef struct tagPictureData
{
uint16 RefIdx; /* index used for reference frame */
uint8 *Sl; /* derived from base_dpb in AVCFrameStore */
uint8 *Scb; /* for complementary fields, YUV are interlaced */
uint8 *Scr; /* Sl of top_field and bottom_fields will be one line apart and the
stride will be 2 times the width. */
/* For non-complementary field, the above still applies. A special
output formatting is required. */
/* Then, necessary variables that need to be stored */
AVCPictureType picType; /* frame, top-field or bot-field */
/*bool*/
uint isReference;
/*bool*/
uint isLongTerm;
int PicOrderCnt;
int PicNum;
int LongTermPicNum;
int width; /* how many pixel per line */
int height;/* how many line */
int pitch; /* how many pixel between the line */
uint padded; /* flag for being padded */
} AVCPictureData;
/**
This structure contains information for frame storage.
@publishedAll
*/
typedef struct tagFrameStore
{
uint8 *base_dpb; /* base pointer for the YCbCr */
int IsReference; /* 0=not used for ref; 1=top used; 2=bottom used; 3=both fields (or frame) used */
int IsLongTerm; /* 0=not used for ref; 1=top used; 2=bottom used; 3=both fields (or frame) used */
/* if IsLongTerm is true, IsReference can be ignored. */
/* if IsReference is true, IsLongterm will be checked for short-term or long-term. */
/* IsUsed must be true to enable the validity of IsReference and IsLongTerm */
int IsOutputted; /* has it been outputted via AVCDecGetOutput API, then don't output it again,
wait until it is returned. */
AVCPictureData frame;
int FrameNum;
int FrameNumWrap;
int LongTermFrameIdx;
int PicOrderCnt; /* of the frame, smaller of the 2 fields */
} AVCFrameStore;
/**
This structure maintains the actual memory for the decoded picture buffer (DPB) which is
allocated at the beginning according to profile/level.
Once decoded_picture_buffer is allocated, Sl,Scb,Scr in
AVCPictureData structure just point to the address in decoded_picture_buffer.
used_size maintains the used space.
NOTE:: In order to maintain contiguous memory space, memory equal to a single frame is
assigned at a time. Two opposite fields reside in the same frame memory.
|-------|---|---|---|xxx|-------|xxx|---|-------| decoded_picture_buffer
frame top bot top frame bot frame
0 1 1 2 3 4 5
bot 2 and top 4 do not exist, the memory is not used.
@publishedAll
*/
typedef struct tagDecPicBuffer
{
uint8 *decoded_picture_buffer; /* actual memory */
uint32 dpb_size; /* size of dpb in bytes */
uint32 used_size; /* used size */
struct tagFrameStore *fs[MAX_FS]; /* list of frame stored, actual buffer */
int num_fs; /* size of fs */
} AVCDecPicBuffer;
/**
This structure contains macroblock related variables.
@publishedAll
*/
typedef struct tagMacroblock
{
AVCIntraChromaPredMode intra_chroma_pred_mode; /* ue(v) */
int32 mvL0[16]; /* motion vectors, 16 bit packed (x,y) per element */
int32 mvL1[16];
int16 ref_idx_L0[4];
int16 ref_idx_L1[4];
uint16 RefIdx[4]; /* ref index, has value of AVCPictureData->RefIdx */
/* stored data */
/*bool*/
uint mb_intra; /* intra flag */
/*bool*/
uint mb_bottom_field;
AVCMBMode mbMode; /* type of MB prediction */
AVCSubMBMode subMbMode[4]; /* for each 8x8 partition */
uint CBP; /* CodeBlockPattern */
AVCIntra16x16PredMode i16Mode; /* Intra16x16PredMode */
AVCIntra4x4PredMode i4Mode[16]; /* Intra4x4PredMode, in raster scan order */
int NumMbPart; /* number of partition */
AVCPredMode MBPartPredMode[4][4]; /* prediction mode [MBPartIndx][subMBPartIndx] */
int MbPartWidth;
int MbPartHeight;
int NumSubMbPart[4]; /* for each 8x8 partition */
int SubMbPartWidth[4]; /* for each 8x8 partition */
int SubMbPartHeight[4]; /* for each 8x8 partition */
uint8 nz_coeff[NUM_BLKS_IN_MB]; /* [blk_y][blk_x], Chroma is [4..5][0...3], see predict_nnz() function */
int QPy; /* Luma QP */
int QPc; /* Chroma QP */
int QSc; /* Chroma QP S-picture */
int slice_id; // MC slice
} AVCMacroblock;
/**
This structure contains common internal variables between the encoder and decoder
such that some functions can be shared among them.
@publishedAll
*/
typedef struct tagCommonObj
{
/* put these 2 up here to make sure they are word-aligned */
int16 block[NUM_PIXELS_IN_MB]; /* for transformed residue coefficient */
uint8 *pred_block; /* pointer to prediction block, could point to a frame */
#ifdef USE_PRED_BLOCK
uint8 pred[688]; /* for prediction */
/* Luma [0-399], Cb [400-543], Cr[544-687] */
#endif
int pred_pitch; /* either equal to 20 or to frame pitch */
/* temporary buffers for intra prediction */
/* these variables should remain inside fast RAM */
#ifdef MB_BASED_DEBLOCK
uint8 *intra_pred_top; /* a row of pixel for intra prediction */
uint8 intra_pred_left[17]; /* a column of pixel for intra prediction */
uint8 *intra_pred_top_cb;
uint8 intra_pred_left_cb[9];
uint8 *intra_pred_top_cr;
uint8 intra_pred_left_cr[9];
#endif
/* pointer to the prediction area for intra prediction */
uint8 *pintra_pred_top; /* pointer to the top intra prediction value */
uint8 *pintra_pred_left; /* pointer to the left intra prediction value */
uint8 intra_pred_topleft; /* the [-1,-1] neighboring pixel */
uint8 *pintra_pred_top_cb;
uint8 *pintra_pred_left_cb;
uint8 intra_pred_topleft_cb;
uint8 *pintra_pred_top_cr;
uint8 *pintra_pred_left_cr;
uint8 intra_pred_topleft_cr;
int QPy;
int QPc;
int QPy_div_6;
int QPy_mod_6;
int QPc_div_6;
int QPc_mod_6;
/**** nal_unit ******/
/* previously in AVCNALUnit format */
uint NumBytesInRBSP;
int forbidden_bit;
int nal_ref_idc;
AVCNalUnitType nal_unit_type;
AVCNalUnitType prev_nal_unit_type;
/*bool*/
uint slice_data_partitioning; /* flag when nal_unit_type is between 2 and 4 */
/**** ******** ******/
AVCSliceType slice_type;
AVCDecPicBuffer *decPicBuf; /* decoded picture buffer */
AVCSeqParamSet *currSeqParams; /* the currently used one */
AVCPicParamSet *currPicParams; /* the currently used one */
uint seq_parameter_set_id;
/* slice header */
AVCSliceHeader *sliceHdr; /* slice header param syntax variables */
AVCPictureData *currPic; /* pointer to current picture */
AVCFrameStore *currFS; /* pointer to current frame store */
AVCPictureType currPicType; /* frame, top-field or bot-field */
/*bool*/
uint newPic; /* flag for new picture */
uint newSlice; /* flag for new slice */
AVCPictureData *prevRefPic; /* pointer to previous picture */
AVCMacroblock *mblock; /* array of macroblocks covering entire picture */
AVCMacroblock *currMB; /* pointer to current macroblock */
uint mbNum; /* number of current MB */
int mb_x; /* x-coordinate of the current mbNum */
int mb_y; /* y-coordinate of the current mbNum */
/* For internal operation, scratch memory for MV, prediction, transform, etc.*/
uint32 cbp4x4; /* each bit represent nonzero 4x4 block in reverse raster scan order */
/* starting from luma, Cb and Cr, lsb toward msb */
int mvd_l0[4][4][2]; /* [mbPartIdx][subMbPartIdx][compIdx], se(v) */
int mvd_l1[4][4][2]; /* [mbPartIdx][subMbPartIdx][compIdx], se(v) */
int mbAddrA, mbAddrB, mbAddrC, mbAddrD; /* address of neighboring MBs */
/*bool*/
uint mbAvailA, mbAvailB, mbAvailC, mbAvailD; /* availability */
/*bool*/
uint intraAvailA, intraAvailB, intraAvailC, intraAvailD; /* for intra mode */
/***********************************************/
/* The following variables are defined in the draft. */
/* They may need to be stored in PictureData structure and used for reference. */
/* In that case, just move or copy it to AVCDecPictureData structure. */
int padded_size; /* size of extra padding to a frame */
uint MaxFrameNum; /*2^(log2_max_frame_num_minus4+4), range 0.. 2^16-1 */
uint MaxPicOrderCntLsb; /*2^(log2_max_pic_order_cnt_lsb_minus4+4), 0..2^16-1 */
uint PicWidthInMbs; /*pic_width_in_mbs_minus1+1 */
uint PicWidthInSamplesL; /* PicWidthInMbs*16 */
uint PicWidthInSamplesC; /* PicWIdthInMbs*8 */
uint PicHeightInMapUnits; /* pic_height_in_map_units_minus1+1 */
uint PicSizeInMapUnits; /* PicWidthInMbs*PicHeightInMapUnits */
uint FrameHeightInMbs; /*(2-frame_mbs_only_flag)*PicHeightInMapUnits */
uint SliceGroupChangeRate; /* slice_group_change_rate_minus1 + 1 */
/* access unit */
uint primary_pic_type; /* u(3), Table 7-2, kinda informative only */
/* slice data partition */
uint slice_id; /* ue(v) */
uint UnusedShortTermFrameNum;
uint PrevRefFrameNum;
uint MbaffFrameFlag; /* (mb_adaptive_frame_field_flag && !field_pic_flag) */
uint PicHeightInMbs; /* FrameHeightInMbs/(1+field_pic_flag) */
int PicHeightInSamplesL; /* PicHeightInMbs*16 */
int PicHeightInSamplesC; /* PicHeightInMbs*8 */
uint PicSizeInMbs; /* PicWidthInMbs*PicHeightInMbs */
uint level_idc;
int numMBs;
uint MaxPicNum;
uint CurrPicNum;
int QSy; /* 26+pic_init_qp_minus26+slice_qs_delta */
int FilterOffsetA;
int FilterOffsetB;
uint MapUnitsInSliceGroup0; /* Min(slie_group_change_cycle*SliceGroupChangeRate,PicSizeInMapUnits) */
/* dec_ref_pic_marking */
int MaxLongTermFrameIdx;
int LongTermFrameIdx;
/* POC related variables */
/*bool*/
uint mem_mgr_ctrl_eq_5; /* if memory_management_control_operation equal to 5 flag */
int PicOrderCnt;
int BottomFieldOrderCnt, TopFieldOrderCnt;
/* POC mode 0 */
int prevPicOrderCntMsb;
uint prevPicOrderCntLsb;
int PicOrderCntMsb;
/* POC mode 1 */
int prevFrameNumOffset, FrameNumOffset;
uint prevFrameNum;
int absFrameNum;
int picOrderCntCycleCnt, frameNumInPicOrderCntCycle;
int expectedDeltaPerPicOrderCntCycle;
int expectedPicOrderCnt;
/* FMO */
int *MbToSliceGroupMap; /* to be re-calculate at the beginning */
/* ref pic list */
AVCPictureData *RefPicList0[MAX_REF_PIC_LIST]; /* list 0 */
AVCPictureData *RefPicList1[MAX_REF_PIC_LIST]; /* list 1 */
AVCFrameStore *refFrameList0ShortTerm[32];
AVCFrameStore *refFrameList1ShortTerm[32];
AVCFrameStore *refFrameListLongTerm[32];
int refList0Size;
int refList1Size;
/* slice data semantics*/
int mb_skip_run; /* ue(v) */
/*uint mb_skip_flag;*/ /* ae(v) */
/* uint end_of_slice_flag;*//* ae(v) */
/***********************************************/
/* function pointers */
int (*is_short_ref)(AVCPictureData *s);
int (*is_long_ref)(AVCPictureData *s);
} AVCCommonObj;
/**
Commonly used constant arrays.
@publishedAll
*/
/**
Zigzag scan from 1-D to 2-D. */
const static uint8 ZZ_SCAN[16] = {0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15};
/* Zigzag scan from 1-D to 2-D output to block[24][16]. */
const static uint8 ZZ_SCAN_BLOCK[16] = {0, 1, 16, 32, 17, 2, 3, 18, 33, 48, 49, 34, 19, 35, 50, 51};
/**
From zigzag to raster for luma DC value */
const static uint8 ZIGZAG2RASTERDC[16] = {0, 4, 64, 128, 68, 8, 12, 72, 132, 192, 196, 136, 76, 140, 200, 204};
/**
Mapping from coding scan block indx to raster scan block index */
const static int blkIdx2blkX[16] = {0, 1, 0, 1, 2, 3, 2, 3, 0, 1, 0, 1, 2, 3, 2, 3};
const static int blkIdx2blkY[16] = {0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 3, 3, 2, 2, 3, 3};
/** from [blk8indx][blk4indx] to raster scan index */
const static int blkIdx2blkXY[4][4] = {{0, 1, 4, 5}, {2, 3, 6, 7}, {8, 9, 12, 13}, {10, 11, 14, 15}};
/*
Availability of the neighboring top-right block relative to the current block. */
const static int BlkTopRight[16] = {2, 2, 2, 3, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0};
/**
Table 8-13 Specification of QPc as a function of qPI. */
const static uint8 mapQPi2QPc[52] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 29, 30, 31, 32, 32, 33, 34, 34, 35, 35, 36, 36,
37, 37, 37, 38, 38, 38, 39, 39, 39, 39
};
/**
See 8.5.5 equation (8-252 and 8-253) the definition of v matrix. */
/* in zigzag scan */
const static int dequant_coefres[6][16] =
{
{10, 13, 13, 10, 16, 10, 13, 13, 13, 13, 16, 10, 16, 13, 13, 16},
{11, 14, 14, 11, 18, 11, 14, 14, 14, 14, 18, 11, 18, 14, 14, 18},
{13, 16, 16, 13, 20, 13, 16, 16, 16, 16, 20, 13, 20, 16, 16, 20},
{14, 18, 18, 14, 23, 14, 18, 18, 18, 18, 23, 14, 23, 18, 18, 23},
{16, 20, 20, 16, 25, 16, 20, 20, 20, 20, 25, 16, 25, 20, 20, 25},
{18, 23, 23, 18, 29, 18, 23, 23, 23, 23, 29, 18, 29, 23, 23, 29}
};
/**
From jm7.6 block.c. (in zigzag scan) */
const static int quant_coef[6][16] =
{
{13107, 8066, 8066, 13107, 5243, 13107, 8066, 8066, 8066, 8066, 5243, 13107, 5243, 8066, 8066, 5243},
{11916, 7490, 7490, 11916, 4660, 11916, 7490, 7490, 7490, 7490, 4660, 11916, 4660, 7490, 7490, 4660},
{10082, 6554, 6554, 10082, 4194, 10082, 6554, 6554, 6554, 6554, 4194, 10082, 4194, 6554, 6554, 4194},
{9362, 5825, 5825, 9362, 3647, 9362, 5825, 5825, 5825, 5825, 3647, 9362, 3647, 5825, 5825, 3647},
{8192, 5243, 5243, 8192, 3355, 8192, 5243, 5243, 5243, 5243, 3355, 8192, 3355, 5243, 5243, 3355},
{7282, 4559, 4559, 7282, 2893, 7282, 4559, 4559, 4559, 4559, 2893, 7282, 2893, 4559, 4559, 2893}
};
/**
Convert scan from raster scan order to block decoding order and
from block decoding order to raster scan order. Same table!!!
*/
const static uint8 ras2dec[16] = {0, 1, 4, 5, 2, 3, 6, 7, 8, 9, 12, 13, 10, 11, 14, 15};
/* mapping from level_idc to index map */
const static uint8 mapLev2Idx[61] = {255, 255, 255, 255, 255, 255, 255, 255, 255, 1,
0, 1, 2, 3, 255, 255, 255, 255, 255, 255,
4, 5, 6, 255, 255, 255, 255, 255, 255, 255,
7, 8, 9, 255, 255, 255, 255, 255, 255, 255,
10, 11, 12, 255, 255, 255, 255, 255, 255, 255,
13, 14, 255, 255, 255, 255, 255, 255, 255, 255
};
/* map back from index to Level IDC */
const static uint8 mapIdx2Lev[MAX_LEVEL_IDX] = {10, 11, 12, 13, 20, 21, 22, 30, 31, 32, 40, 41, 42, 50, 51};
/**
from the index map to the MaxDPB value times 2 */
const static int32 MaxDPBX2[MAX_LEVEL_IDX] = {297, 675, 1782, 1782, 1782, 3564, 6075, 6075,
13500, 15360, 24576, 24576, 24576, 82620, 138240
};
/* map index to the max frame size */
const static int MaxFS[MAX_LEVEL_IDX] = {99, 396, 396, 396, 396, 792, 1620, 1620, 3600, 5120,
8192, 8192, 8192, 22080, 36864
};
/* map index to max MB processing rate */
const static int32 MaxMBPS[MAX_LEVEL_IDX] = {1485, 3000, 6000, 11880, 11880, 19800, 20250, 40500,
108000, 216000, 245760, 245760, 491520, 589824, 983040
};
/* map index to max video bit rate */
const static uint32 MaxBR[MAX_LEVEL_IDX] = {64, 192, 384, 768, 2000, 4000, 4000, 10000, 14000, 20000,
20000, 50000, 50000, 135000, 240000
};
/* map index to max CPB size */
const static uint32 MaxCPB[MAX_LEVEL_IDX] = {175, 500, 1000, 2000, 2000, 4000, 4000, 10000, 14000,
20000, 25000, 62500, 62500, 135000, 240000
};
/* map index to max vertical MV range */
const static int MaxVmvR[MAX_LEVEL_IDX] = {64, 128, 128, 128, 128, 256, 256, 256, 512, 512, 512, 512, 512, 512, 512};
#endif /* _AVCINT_COMMON_H_ */

View File

@@ -0,0 +1,555 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/**
This file contains declarations of internal functions for common encoder/decoder library.
@publishedAll
*/
#ifndef AVCCOMMON_LIB_H_INCLUDED
#define AVCCOMMON_LIB_H_INCLUDED
#ifndef AVCINT_COMMON_H_INCLUDED
#include "avcint_common.h"
#endif
/*----------- deblock.c --------------*/
/**
This function performs conditional deblocking on a complete picture.
\param "video" "Pointer to AVCCommonObj."
\return "AVC_SUCCESS for success and AVC_FAIL otherwise."
*/
OSCL_IMPORT_REF AVCStatus DeblockPicture(AVCCommonObj *video);
/**
This function performs MB-based deblocking when MB_BASED_DEBLOCK
is defined at compile time.
\param "video" "Pointer to AVCCommonObj."
\return "AVC_SUCCESS for success and AVC_FAIL otherwise."
*/
void MBInLoopDeblock(AVCCommonObj *video);
/*---------- dpb.c --------------------*/
/**
This function is called everytime a new sequence is detected.
\param "avcHandle" "Pointer to AVCHandle."
\param "video" "Pointer to AVCCommonObj."
\param "padding" "Flag specifying whether padding in luma component is needed (used for encoding)."
\return "AVC_SUCCESS or AVC_FAIL."
*/
OSCL_IMPORT_REF AVCStatus AVCConfigureSequence(AVCHandle *avcHandle, AVCCommonObj *video, bool padding);
/**
This function allocates and initializes the decoded picture buffer structure based on
the profile and level for the first sequence parameter set. Currently,
it does not allow changing in profile/level for subsequent SPS.
\param "avcHandle" "Pointer to AVCHandle."
\param "video" "Pointer to AVCCommonObj."
\param "FrameHeightInMbs" "Height of the frame in the unit of MBs."
\param "PicWidthInMbs" "Width of the picture in the unit of MBs."
\param "padding" "Flag specifying whether padding in luma component is needed (used for encoding)."
\return "AVC_SUCCESS or AVC_FAIL."
*/
AVCStatus InitDPB(AVCHandle *avcHandle, AVCCommonObj *video, int FrameHeightInMbs, int PicWidthInMbs, bool padding);
/**
This function frees the DPB memory.
\param "avcHandle" "Pointer to AVCHandle."
\param "video" "Pointer to AVCCommonObj."
\return "AVC_SUCCESS or AVC_FAIL."
*/
OSCL_IMPORT_REF AVCStatus CleanUpDPB(AVCHandle *avcHandle, AVCCommonObj *video);
/**
This function finds empty frame in the decoded picture buffer to be used for the
current picture, initializes the corresponding picture structure with Sl, Scb, Scr,
width, height and pitch.
\param "avcHandle" "Pointer to the main handle object."
\param "video" "Pointer to AVCCommonObj."
\return "AVC_SUCCESS or AVC_FAIL."
*/
OSCL_IMPORT_REF AVCStatus DPBInitBuffer(AVCHandle *avcHandle, AVCCommonObj *video);
/**
This function finds empty frame in the decoded picture buffer to be used for the
current picture, initializes the corresponding picture structure with Sl, Scb, Scr,
width, height and pitch.
\param "video" "Pointer to AVCCommonObj."
\param "CurrPicNum" "Current picture number (only used in decoder)."
\return "AVC_SUCCESS or AVC_FAIL."
*/
OSCL_IMPORT_REF void DPBInitPic(AVCCommonObj *video, int CurrPicNum);
/**
This function releases the current frame back to the available pool for skipped frame after encoding.
\param "avcHandle" "Pointer to the main handle object."
\param "video" "Pointer to the AVCCommonObj."
\return "void."
*/
OSCL_IMPORT_REF void DPBReleaseCurrentFrame(AVCHandle *avcHandle, AVCCommonObj *video);
/**
This function performs decoded reference picture marking process and store the current picture to the
corresponding frame storage in the decoded picture buffer.
\param "avcHandle" "Pointer to the main handle object."
\param "video" "Pointer to the AVCCommonObj."
\return "AVC_SUCCESS or AVC_FAIL."
*/
OSCL_IMPORT_REF AVCStatus StorePictureInDPB(AVCHandle *avcHandle, AVCCommonObj *video);
/**
This function perform sliding window operation on the reference picture lists, see subclause 8.2.5.3.
It removes short-term ref frames with smallest FrameNumWrap from the reference list.
\param "avcHandle" "Pointer to the main handle object."
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
\return "AVC_SUCCESS or AVC_FAIL (contradicting values or scenario as in the Note in the draft)."
*/
AVCStatus sliding_window_process(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb);
/**
This function perform adaptive memory marking operation on the reference picture lists,
see subclause 8.2.5.4. It calls other functions for specific operations.
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "sliceHdr" "Pointer to the AVCSliceHeader."
\return "AVC_SUCCESS or AVC_FAIL (contradicting values or scenario as in the Note in the draft)."
*/
AVCStatus adaptive_memory_marking(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, AVCSliceHeader *sliceHdr);
/**
This function performs memory management control operation 1, marking a short-term picture
as unused for reference. See subclause 8.2.5.4.1.
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "difference_of_pic_nums_minus1" "From the syntax in dec_ref_pic_marking()."
*/
void MemMgrCtrlOp1(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, int difference_of_pic_nums_minus1);
/**
This function performs memory management control operation 2, marking a long-term picture
as unused for reference. See subclause 8.2.5.4.2.
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "field_pic_flag" "Flag whether the current picture is field or not."
\param "long_term_pic_num" "From the syntax in dec_ref_pic_marking()."
*/
void MemMgrCtrlOp2(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, int long_term_pic_num);
/**
This function performs memory management control operation 3, assigning a LongTermFrameIdx to
a short-term reference picture. See subclause 8.2.5.4.3.
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "difference_of_pic_nums_minus1" "From the syntax in dec_ref_pic_marking()."
\param "long_term_pic_num" "From the syntax in dec_ref_pic_marking()."
*/
void MemMgrCtrlOp3(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, uint difference_of_pic_nums_minus1,
uint long_term_frame_idx);
/**
This function performs memory management control operation 4, getting new MaxLongTermFrameIdx.
See subclause 8.2.5.4.4.
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "max_long_term_frame_idx_plus1" "From the syntax in dec_ref_pic_marking()."
*/
void MemMgrCtrlOp4(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, uint max_long_term_frame_idx_plus1);
/**
This function performs memory management control operation 5, marking all reference pictures
as unused for reference and set MaxLongTermFrameIdx to no long-termframe indices.
See subclause 8.2.5.4.5.
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
*/
void MemMgrCtrlOp5(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb);
/**
This function performs memory management control operation 6, assigning a long-term frame index
to the current picture. See subclause 8.2.5.4.6.
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "long_term_frame_idx" "From the syntax in dec_ref_pic_marking()."
*/
void MemMgrCtrlOp6(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, uint long_term_frame_idx);
/**
This function mark a long-term ref frame with a specific frame index as unused for reference.
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "long_term_frame_idx" "To look for"
*/
void unmark_long_term_frame_for_reference_by_frame_idx(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, uint long_term_frame_idx);
/**
This function mark a long-term ref field with a specific frame index as unused for reference except
a frame that contains a picture with picNumX.
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "long_term_frame_idx" "To look for."
\param "picNumX" "To look for."
*/
void unmark_long_term_field_for_reference_by_frame_idx(AVCCommonObj *video, AVCDecPicBuffer *dpb, uint long_term_frame_indx, int picNumX);
/**
This function mark a frame to unused for reference.
\param "fs" "Pointer to AVCFrameStore to be unmarked."
*/
void unmark_for_reference(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, uint idx);
void update_ref_list(AVCDecPicBuffer *dpb);
/*---------- fmo.c --------------*/
/**
This function initializes flexible macroblock reordering.
\param "video" "Pointer to AVCCommonObj."
\return "AVC_SUCCESS for success and AVC_FAIL otherwise."
*/
OSCL_IMPORT_REF AVCStatus FMOInit(AVCCommonObj *video);
/**
This function fills up an array that maps Map unit to the slice group
following the interleaved slice group map type.
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "run_length_minus1" "Array of the run-length."
\param "num_slice_groups_minus_1" "Number of slice group minus 1."
\param "PicSizeInMapUnit" "Size of the picture in number Map units."
\return "Void."
*/
void FmoGenerateType0MapUnitMap(int *mapUnitToSliceGroupMap, uint *run_length_minus1, uint num_slice_groups_minus1, uint PicSizeInMapUnits);
/**
This function fills up an array that maps Map unit to the slice group
following the dispersed slice group map type.
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "PicWidthInMbs" "Width of the luma picture in macroblock unit."
\param "num_slice_groups_minus_1" "Number of slice group minus 1."
\param "PicSizeInMapUnit" "Size of the picture in number Map units."
\return "Void."
*/
void FmoGenerateType1MapUnitMap(int *mapUnitToSliceGroupMap, int PicWidthInMbs, uint num_slice_groups_minus1, uint PicSizeInMapUnits);
/**
This function fills up an array that maps Map unit to the slice group
following the foreground with left-over slice group map type.
\param "pps" "Pointer to AVCPicParamSets structure."
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "PicWidthInMbs" "Width of the luma picture in macroblock unit."
\param "num_slice_groups_minus_1" "Number of slice group minus 1."
\param "PicSizeInMapUnit" "Size of the picture in number Map units."
\return "Void."
*/
void FmoGenerateType2MapUnitMap(AVCPicParamSet *pps, int *mapUnitToSliceGroupMap, int PicWidthInMbs,
uint num_slice_groups_minus1, uint PicSizeInMapUnits);
/**
This function fills up an array that maps Map unit to the slice group
following the box-out slice group map type.
\param "pps" "Pointer to AVCPicParamSets structure."
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "PicWidthInMbs" "Width of the luma picture in macroblock unit."
\return "Void."
*/
void FmoGenerateType3MapUnitMap(AVCCommonObj *video, AVCPicParamSet* pps, int *mapUnitToSliceGroupMap,
int PicWidthInMbs);
/**
This function fills up an array that maps Map unit to the slice group
following the raster scan slice group map type.
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "MapUnitsInSliceGroup0" "Derived in subclause 7.4.3."
\param "slice_group_change_direction_flag" "A value from the slice header."
\param "PicSizeInMapUnit" "Size of the picture in number Map units."
\return "void"
*/
void FmoGenerateType4MapUnitMap(int *mapUnitToSliceGroupMap, int MapUnitsInSliceGroup0,
int slice_group_change_direction_flag, uint PicSizeInMapUnits);
/**
This function fills up an array that maps Map unit to the slice group
following wipe slice group map type.
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "video" "Pointer to AVCCommonObj structure."
\param "slice_group_change_direction_flag" "A value from the slice header."
\param "PicSizeInMapUnit" "Size of the picture in number Map units."
\return "void"
*/
void FmoGenerateType5MapUnitMap(int *mapUnitsToSliceGroupMap, AVCCommonObj *video,
int slice_group_change_direction_flag, uint PicSizeInMapUnits);
/**
This function fills up an array that maps Map unit to the slice group
following wipe slice group map type.
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "slice_group_id" "Array of slice_group_id from AVCPicParamSet structure."
\param "PicSizeInMapUnit" "Size of the picture in number Map units."
\return "void"
*/
void FmoGenerateType6MapUnitMap(int *mapUnitsToSliceGroupMap, int *slice_group_id, uint PicSizeInMapUnits);
/*------------- itrans.c --------------*/
/**
This function performs transformation of the Intra16x16DC value according to
subclause 8.5.6.
\param "block" "Pointer to the video->block[0][0][0]."
\param "QPy" "Quantization parameter."
\return "void."
*/
void Intra16DCTrans(int16 *block, int Qq, int Rq);
/**
This function performs transformation of a 4x4 block according to
subclause 8.5.8.
\param "block" "Pointer to the origin of transform coefficient area."
\param "pred" "Pointer to the origin of predicted area."
\param "cur" "Pointer to the origin of the output area."
\param "width" "Pitch of cur."
\return "void."
*/
void itrans(int16 *block, uint8 *pred, uint8 *cur, int width);
/*
This function is the same one as itrans except for chroma.
\param "block" "Pointer to the origin of transform coefficient area."
\param "pred" "Pointer to the origin of predicted area."
\param "cur" "Pointer to the origin of the output area."
\param "width" "Pitch of cur."
\return "void."
*/
void ictrans(int16 *block, uint8 *pred, uint8 *cur, int width);
/**
This function performs transformation of the DCChroma value according to
subclause 8.5.7.
\param "block" "Pointer to the video->block[0][0][0]."
\param "QPc" "Quantization parameter."
\return "void."
*/
void ChromaDCTrans(int16 *block, int Qq, int Rq);
/**
This function copies a block from pred to cur.
\param "pred" "Pointer to prediction block."
\param "cur" "Pointer to the current YUV block."
\param "width" "Pitch of cur memory."
\param "pred_pitch" "Pitch for pred memory.
\return "void."
*/
void copy_block(uint8 *pred, uint8 *cur, int width, int pred_pitch);
/*--------- mb_access.c ----------------*/
/**
This function initializes the neighboring information before start macroblock decoding.
\param "video" "Pointer to AVCCommonObj."
\param "mbNum" "The current macroblock index."
\param "currMB" "Pointer to the current AVCMacroblock structure."
\return "void"
*/
OSCL_IMPORT_REF void InitNeighborAvailability(AVCCommonObj *video, int mbNum);
/**
This function checks whether the requested neighboring macroblock is available.
\param "MbToSliceGroupMap" "Array containing the slice group ID mapping to MB index."
\param "PicSizeInMbs" "Size of the picture in number of MBs."
\param "mbAddr" "Neighboring macroblock index to check."
\param "currMbAddr" "Current macroblock index."
\return "TRUE if the neighboring MB is available, FALSE otherwise."
*/
bool mb_is_available(AVCMacroblock *mblock, uint PicSizeInMbs, int mbAddr, int currMbAddr);
/**
This function performs prediction of the nonzero coefficient for a luma block (i,j).
\param "video" "Pointer to AVCCommonObj."
\param "i" "Block index, horizontal."
\param "j" "Block index, vertical."
\return "Predicted number of nonzero coefficient."
*/
OSCL_IMPORT_REF int predict_nnz(AVCCommonObj *video, int i, int j);
/**
This function performs prediction of the nonzero coefficient for a chroma block (i,j).
\param "video" "Pointer to AVCCommonObj."
\param "i" "Block index, horizontal."
\param "j" "Block index, vertical."
\return "Predicted number of nonzero coefficient."
*/
OSCL_IMPORT_REF int predict_nnz_chroma(AVCCommonObj *video, int i, int j);
/**
This function calculates the predicted motion vectors for the current macroblock.
\param "video" "Pointer to AVCCommonObj."
\param "encFlag" "Boolean whether this function is used by encoder or decoder."
\return "void."
*/
OSCL_IMPORT_REF void GetMotionVectorPredictor(AVCCommonObj *video, int encFlag);
/*---------- reflist.c -----------------*/
/**
This function initializes reference picture list used in INTER prediction
at the beginning of each slice decoding. See subclause 8.2.4.
\param "video" "Pointer to AVCCommonObj."
\return "void"
Output is video->RefPicList0, video->RefPicList1, video->refList0Size and video->refList1Size.
*/
OSCL_IMPORT_REF void RefListInit(AVCCommonObj *video);
/**
This function generates picture list from frame list. Used when current picture is field.
see subclause 8.2.4.2.5.
\param "video" "Pointer to AVCCommonObj."
\param "IsL1" "Is L1 list?"
\param "long_term" "Is long-term prediction?"
\return "void"
*/
void GenPicListFromFrameList(AVCCommonObj *video, int IsL1, int long_term);
/**
This function performs reference picture list reordering according to the
ref_pic_list_reordering() syntax. See subclause 8.2.4.3.
\param "video" "Pointer to AVCCommonObj."
\return "AVC_SUCCESS or AVC_FAIL"
Output is video->RefPicList0, video->RefPicList1, video->refList0Size and video->refList1Size.
*/
OSCL_IMPORT_REF AVCStatus ReOrderList(AVCCommonObj *video);
/**
This function performs reference picture list reordering according to the
ref_pic_list_reordering() syntax regardless of list 0 or list 1. See subclause 8.2.4.3.
\param "video" "Pointer to AVCCommonObj."
\param "isL1" "Is list 1 or not."
\return "AVC_SUCCESS or AVC_FAIL"
Output is video->RefPicList0 and video->refList0Size or video->RefPicList1 and video->refList1Size.
*/
AVCStatus ReorderRefPicList(AVCCommonObj *video, int isL1);
/**
This function performs reordering process of reference picture list for short-term pictures.
See subclause 8.2.4.3.1.
\param "video" "Pointer to AVCCommonObj."
\param "picNumLX" "picNumLX of an entry in the reference list."
\param "refIdxLX" "Pointer to the current entry index in the reference."
\param "isL1" "Is list 1 or not."
\return "AVC_SUCCESS or AVC_FAIL"
*/
AVCStatus ReorderShortTerm(AVCCommonObj *video, int picNumLX, int *refIdxLX, int isL1);
/**
This function performs reordering process of reference picture list for long-term pictures.
See subclause 8.2.4.3.2.
\param "video" "Pointer to AVCCommonObj."
\param "LongTermPicNum" "LongTermPicNum of an entry in the reference list."
\param "refIdxLX" "Pointer to the current entry index in the reference."
\param "isL1" "Is list 1 or not."
\return "AVC_SUCCESS or AVC_FAIL"
*/
AVCStatus ReorderLongTerm(AVCCommonObj *video, int LongTermPicNum, int *refIdxLX, int isL1);
/**
This function gets the pictures in DPB according to the PicNum.
\param "video" "Pointer to AVCCommonObj."
\param "picNum" "PicNum of the picture we are looking for."
\return "Pointer to the AVCPictureData or NULL if not found"
*/
AVCPictureData* GetShortTermPic(AVCCommonObj *video, int picNum);
/**
This function gets the pictures in DPB according to the LongtermPicNum.
\param "video" "Pointer to AVCCommonObj."
\param "LongtermPicNum" "LongtermPicNum of the picture we are looking for."
\return "Pointer to the AVCPictureData."
*/
AVCPictureData* GetLongTermPic(AVCCommonObj *video, int LongtermPicNum);
/**
This function indicates whether the picture is used for short-term reference or not.
\param "s" "Pointer to AVCPictureData."
\return "1 if it is used for short-term, 0 otherwise."
*/
int is_short_ref(AVCPictureData *s);
/**
This function indicates whether the picture is used for long-term reference or not.
\param "s" "Pointer to AVCPictureData."
\return "1 if it is used for long-term, 0 otherwise."
*/
int is_long_ref(AVCPictureData *s);
/**
This function sorts array of pointers to AVCPictureData in descending order of
the PicNum value.
\param "data" "Array of pointers to AVCPictureData."
\param "num" "Size of the array."
\return "void"
*/
void SortPicByPicNum(AVCPictureData *data[], int num);
/**
This function sorts array of pointers to AVCPictureData in ascending order of
the PicNum value.
\param "data" "Array of pointers to AVCPictureData."
\param "num" "Size of the array."
\return "void"
*/
void SortPicByPicNumLongTerm(AVCPictureData *data[], int num);
/**
This function sorts array of pointers to AVCFrameStore in descending order of
the FrameNumWrap value.
\param "data" "Array of pointers to AVCFrameStore."
\param "num" "Size of the array."
\return "void"
*/
void SortFrameByFrameNumWrap(AVCFrameStore *data[], int num);
/**
This function sorts array of pointers to AVCFrameStore in ascending order of
the LongTermFrameIdx value.
\param "data" "Array of pointers to AVCFrameStore."
\param "num" "Size of the array."
\return "void"
*/
void SortFrameByLTFrameIdx(AVCFrameStore *data[], int num);
/**
This function sorts array of pointers to AVCPictureData in descending order of
the PicOrderCnt value.
\param "data" "Array of pointers to AVCPictureData."
\param "num" "Size of the array."
\return "void"
*/
void SortPicByPOC(AVCPictureData *data[], int num, int descending);
/**
This function sorts array of pointers to AVCPictureData in ascending order of
the LongTermPicNum value.
\param "data" "Array of pointers to AVCPictureData."
\param "num" "Size of the array."
\return "void"
*/
void SortPicByLTPicNum(AVCPictureData *data[], int num);
/**
This function sorts array of pointers to AVCFrameStore in descending order of
the PicOrderCnt value.
\param "data" "Array of pointers to AVCFrameStore."
\param "num" "Size of the array."
\return "void"
*/
void SortFrameByPOC(AVCFrameStore *data[], int num, int descending);
#endif /* _AVCCOMMON_LIB_H_ */

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,724 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#include "avclib_common.h"
#define DPB_MEM_ATTR 0
AVCStatus InitDPB(AVCHandle *avcHandle, AVCCommonObj *video, int FrameHeightInMbs, int PicWidthInMbs, bool padding)
{
AVCDecPicBuffer *dpb = video->decPicBuf;
int level, framesize, num_fs;
void *userData = avcHandle->userData;
#ifndef PV_MEMORY_POOL
uint32 addr;
#endif
uint16 refIdx = 0;
level = video->currSeqParams->level_idc;
for (num_fs = 0; num_fs < MAX_FS; num_fs++)
{
dpb->fs[num_fs] = NULL;
}
framesize = (int)(((FrameHeightInMbs * PicWidthInMbs) << 7) * 3);
if (padding)
{
video->padded_size = (int)((((FrameHeightInMbs + 2) * (PicWidthInMbs + 2)) << 7) * 3) - framesize;
}
else
{
video->padded_size = 0;
}
#ifndef PV_MEMORY_POOL
if (dpb->decoded_picture_buffer)
{
avcHandle->CBAVC_Free(userData, (int)dpb->decoded_picture_buffer);
dpb->decoded_picture_buffer = NULL;
}
#endif
/* need to allocate one extra frame for current frame, DPB only defines for reference frames */
dpb->num_fs = (uint32)(MaxDPBX2[mapLev2Idx[level]] << 2) / (3 * FrameHeightInMbs * PicWidthInMbs) + 1;
if (dpb->num_fs > MAX_FS)
{
dpb->num_fs = MAX_FS;
}
if (video->currSeqParams->num_ref_frames + 1 > (uint32)dpb->num_fs)
{
dpb->num_fs = video->currSeqParams->num_ref_frames + 1;
}
dpb->dpb_size = dpb->num_fs * (framesize + video->padded_size);
// dpb->dpb_size = (uint32)MaxDPBX2[mapLev2Idx[level]]*512 + framesize;
#ifndef PV_MEMORY_POOL
dpb->decoded_picture_buffer = (uint8*) avcHandle->CBAVC_Malloc(userData, dpb->dpb_size, 100/*DPB_MEM_ATTR*/);
if (dpb->decoded_picture_buffer == NULL || dpb->decoded_picture_buffer&0x3) // not word aligned
return AVC_MEMORY_FAIL;
#endif
dpb->used_size = 0;
num_fs = 0;
while (num_fs < dpb->num_fs)
{
/* fs is an array pointers to AVCDecPicture */
dpb->fs[num_fs] = (AVCFrameStore*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCFrameStore), 101/*DEFAULT_ATTR*/);
if (dpb->fs[num_fs] == NULL)
{
return AVC_MEMORY_FAIL;
}
#ifndef PV_MEMORY_POOL
/* assign the actual memory for Sl, Scb, Scr */
dpb->fs[num_fs]->base_dpb = dpb->decoded_picture_buffer + dpb->used_size;
#endif
dpb->fs[num_fs]->IsReference = 0;
dpb->fs[num_fs]->IsLongTerm = 0;
dpb->fs[num_fs]->IsOutputted = 3;
dpb->fs[num_fs]->frame.RefIdx = refIdx++; /* this value will remain unchanged through out the encoding session */
dpb->fs[num_fs]->frame.picType = AVC_FRAME;
dpb->fs[num_fs]->frame.isLongTerm = 0;
dpb->fs[num_fs]->frame.isReference = 0;
video->RefPicList0[num_fs] = &(dpb->fs[num_fs]->frame);
dpb->fs[num_fs]->frame.padded = 0;
dpb->used_size += (framesize + video->padded_size);
num_fs++;
}
return AVC_SUCCESS;
}
OSCL_EXPORT_REF AVCStatus AVCConfigureSequence(AVCHandle *avcHandle, AVCCommonObj *video, bool padding)
{
void *userData = avcHandle->userData;
AVCDecPicBuffer *dpb = video->decPicBuf;
int framesize, ii; /* size of one frame */
uint PicWidthInMbs, PicHeightInMapUnits, FrameHeightInMbs, PicSizeInMapUnits;
uint num_fs;
/* derived variables from SPS */
PicWidthInMbs = video->currSeqParams->pic_width_in_mbs_minus1 + 1;
PicHeightInMapUnits = video->currSeqParams->pic_height_in_map_units_minus1 + 1 ;
FrameHeightInMbs = (2 - video->currSeqParams->frame_mbs_only_flag) * PicHeightInMapUnits ;
PicSizeInMapUnits = PicWidthInMbs * PicHeightInMapUnits ;
if (video->PicSizeInMapUnits != PicSizeInMapUnits || video->currSeqParams->level_idc != video->level_idc)
{
/* make sure you mark all the frames as unused for reference for flushing*/
for (ii = 0; ii < dpb->num_fs; ii++)
{
dpb->fs[ii]->IsReference = 0;
dpb->fs[ii]->IsOutputted |= 0x02;
}
num_fs = (uint32)(MaxDPBX2[(uint32)mapLev2Idx[video->currSeqParams->level_idc]] << 2) / (3 * PicSizeInMapUnits) + 1;
if (num_fs >= MAX_FS)
{
num_fs = MAX_FS;
}
#ifdef PV_MEMORY_POOL
if (padding)
{
avcHandle->CBAVC_DPBAlloc(avcHandle->userData,
PicSizeInMapUnits + ((PicWidthInMbs + 2) << 1) + (PicHeightInMapUnits << 1), num_fs);
}
else
{
avcHandle->CBAVC_DPBAlloc(avcHandle->userData, PicSizeInMapUnits, num_fs);
}
#endif
CleanUpDPB(avcHandle, video);
if (InitDPB(avcHandle, video, FrameHeightInMbs, PicWidthInMbs, padding) != AVC_SUCCESS)
{
return AVC_FAIL;
}
/* Allocate video->mblock upto PicSizeInMbs and populate the structure such as the neighboring MB pointers. */
framesize = (FrameHeightInMbs * PicWidthInMbs);
if (video->mblock)
{
avcHandle->CBAVC_Free(userData, (uint32)video->mblock);
video->mblock = NULL;
}
video->mblock = (AVCMacroblock*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCMacroblock) * framesize, DEFAULT_ATTR);
if (video->mblock == NULL)
{
return AVC_FAIL;
}
for (ii = 0; ii < framesize; ii++)
{
video->mblock[ii].slice_id = -1;
}
/* Allocate memory for intra prediction */
#ifdef MB_BASED_DEBLOCK
video->intra_pred_top = (uint8*) avcHandle->CBAVC_Malloc(userData, PicWidthInMbs << 4, FAST_MEM_ATTR);
if (video->intra_pred_top == NULL)
{
return AVC_FAIL;
}
video->intra_pred_top_cb = (uint8*) avcHandle->CBAVC_Malloc(userData, PicWidthInMbs << 3, FAST_MEM_ATTR);
if (video->intra_pred_top_cb == NULL)
{
return AVC_FAIL;
}
video->intra_pred_top_cr = (uint8*) avcHandle->CBAVC_Malloc(userData, PicWidthInMbs << 3, FAST_MEM_ATTR);
if (video->intra_pred_top_cr == NULL)
{
return AVC_FAIL;
}
#endif
/* Allocate slice group MAP map */
if (video->MbToSliceGroupMap)
{
avcHandle->CBAVC_Free(userData, (uint32)video->MbToSliceGroupMap);
video->MbToSliceGroupMap = NULL;
}
video->MbToSliceGroupMap = (int*) avcHandle->CBAVC_Malloc(userData, sizeof(uint) * PicSizeInMapUnits * 2, 7/*DEFAULT_ATTR*/);
if (video->MbToSliceGroupMap == NULL)
{
return AVC_FAIL;
}
video->PicSizeInMapUnits = PicSizeInMapUnits;
video->level_idc = video->currSeqParams->level_idc;
}
return AVC_SUCCESS;
}
OSCL_EXPORT_REF AVCStatus CleanUpDPB(AVCHandle *avcHandle, AVCCommonObj *video)
{
AVCDecPicBuffer *dpb = video->decPicBuf;
int ii;
void *userData = avcHandle->userData;
for (ii = 0; ii < MAX_FS; ii++)
{
if (dpb->fs[ii] != NULL)
{
avcHandle->CBAVC_Free(userData, (int)dpb->fs[ii]);
dpb->fs[ii] = NULL;
}
}
#ifndef PV_MEMORY_POOL
if (dpb->decoded_picture_buffer)
{
avcHandle->CBAVC_Free(userData, (int)dpb->decoded_picture_buffer);
dpb->decoded_picture_buffer = NULL;
}
#endif
dpb->used_size = 0;
dpb->dpb_size = 0;
return AVC_SUCCESS;
}
OSCL_EXPORT_REF AVCStatus DPBInitBuffer(AVCHandle *avcHandle, AVCCommonObj *video)
{
AVCDecPicBuffer *dpb = video->decPicBuf;
int ii, status;
/* Before doing any decoding, check if there's a frame memory available */
/* look for next unused dpb->fs, or complementary field pair */
/* video->currPic is assigned to this */
/* There's also restriction on the frame_num, see page 59 of JVT-I1010.doc. */
for (ii = 0; ii < dpb->num_fs; ii++)
{
/* looking for the one not used or not reference and has been outputted */
if (dpb->fs[ii]->IsReference == 0 && dpb->fs[ii]->IsOutputted == 3)
{
video->currFS = dpb->fs[ii];
#ifdef PV_MEMORY_POOL
status = avcHandle->CBAVC_FrameBind(avcHandle->userData, ii, &(video->currFS->base_dpb));
if (status == AVC_FAIL)
{
return AVC_NO_BUFFER; /* this should not happen */
}
#endif
break;
}
}
if (ii == dpb->num_fs)
{
return AVC_PICTURE_OUTPUT_READY; /* no empty frame available */
}
return AVC_SUCCESS;
}
OSCL_EXPORT_REF void DPBInitPic(AVCCommonObj *video, int CurrPicNum)
{
int offset = 0;
int offsetc = 0;
int luma_framesize;
/* this part has to be set here, assuming that slice header and POC have been decoded. */
/* used in GetOutput API */
video->currFS->PicOrderCnt = video->PicOrderCnt;
video->currFS->FrameNum = video->sliceHdr->frame_num;
video->currFS->FrameNumWrap = CurrPicNum; // MC_FIX
/* initialize everything to zero */
video->currFS->IsOutputted = 0;
video->currFS->IsReference = 0;
video->currFS->IsLongTerm = 0;
video->currFS->frame.isReference = FALSE;
video->currFS->frame.isLongTerm = FALSE;
/* initialize the pixel pointer to NULL */
video->currFS->frame.Sl = video->currFS->frame.Scb = video->currFS->frame.Scr = NULL;
/* determine video->currPic */
/* assign dbp->base_dpb to fs[i]->frame.Sl, Scb, Scr .*/
/* For PicSizeInMbs, see DecodeSliceHeader() */
video->currPic = &(video->currFS->frame);
video->currPic->padded = 0; // reset this flag to not-padded
if (video->padded_size)
{
offset = ((video->PicWidthInSamplesL + 32) << 4) + 16; // offset to the origin
offsetc = (offset >> 2) + 4;
luma_framesize = (int)((((video->FrameHeightInMbs + 2) * (video->PicWidthInMbs + 2)) << 8));
}
else
luma_framesize = video->PicSizeInMbs << 8;
video->currPic->Sl = video->currFS->base_dpb + offset;
video->currPic->Scb = video->currFS->base_dpb + luma_framesize + offsetc;
video->currPic->Scr = video->currPic->Scb + (luma_framesize >> 2);
video->currPic->pitch = video->PicWidthInSamplesL + (video->padded_size == 0 ? 0 : 32);
video->currPic->height = video->PicHeightInSamplesL;
video->currPic->width = video->PicWidthInSamplesL;
video->currPic->PicNum = CurrPicNum;
}
/* to release skipped frame after encoding */
OSCL_EXPORT_REF void DPBReleaseCurrentFrame(AVCHandle *avcHandle, AVCCommonObj *video)
{
AVCDecPicBuffer *dpb = video->decPicBuf;
int ii;
video->currFS->IsOutputted = 3; // return this buffer.
#ifdef PV_MEMORY_POOL /* for non-memory pool, no need to do anything */
/* search for current frame index */
ii = dpb->num_fs;
while (ii--)
{
if (dpb->fs[ii] == video->currFS)
{
avcHandle->CBAVC_FrameUnbind(avcHandle->userData, ii);
break;
}
}
#endif
return ;
}
/* see subclause 8.2.5.1 */
OSCL_EXPORT_REF AVCStatus StorePictureInDPB(AVCHandle *avcHandle, AVCCommonObj *video)
{
AVCStatus status;
AVCDecPicBuffer *dpb = video->decPicBuf;
AVCSliceHeader *sliceHdr = video->sliceHdr;
int ii, num_ref;
/* number 1 of 8.2.5.1, we handle gaps in frame_num differently without using the memory */
/* to be done!!!! */
/* number 3 of 8.2.5.1 */
if (video->nal_unit_type == AVC_NALTYPE_IDR)
{
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii] != video->currFS) /* not current frame */
{
dpb->fs[ii]->IsReference = 0; /* mark as unused for reference */
dpb->fs[ii]->IsLongTerm = 0; /* but still used until output */
dpb->fs[ii]->IsOutputted |= 0x02;
#ifdef PV_MEMORY_POOL
if (dpb->fs[ii]->IsOutputted == 3)
{
avcHandle->CBAVC_FrameUnbind(avcHandle->userData, ii);
}
#endif
}
}
video->currPic->isReference = TRUE;
video->currFS->IsReference = 3;
if (sliceHdr->long_term_reference_flag == 0)
{
video->currPic->isLongTerm = FALSE;
video->currFS->IsLongTerm = 0;
video->MaxLongTermFrameIdx = -1;
}
else
{
video->currPic->isLongTerm = TRUE;
video->currFS->IsLongTerm = 3;
video->currFS->LongTermFrameIdx = 0;
video->MaxLongTermFrameIdx = 0;
}
if (sliceHdr->no_output_of_prior_pics_flag)
{
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii] != video->currFS) /* not current frame */
{
dpb->fs[ii]->IsOutputted = 3;
#ifdef PV_MEMORY_POOL
avcHandle->CBAVC_FrameUnbind(avcHandle->userData, ii);
#endif
}
}
}
video->mem_mgr_ctrl_eq_5 = TRUE; /* flush reference frames MC_FIX */
}
else
{
if (video->currPic->isReference == TRUE)
{
if (sliceHdr->adaptive_ref_pic_marking_mode_flag == 0)
{
status = sliding_window_process(avcHandle, video, dpb); /* we may have to do this after adaptive_memory_marking */
}
else
{
status = adaptive_memory_marking(avcHandle, video, dpb, sliceHdr);
}
if (status != AVC_SUCCESS)
{
return status;
}
}
}
/* number 4 of 8.2.5.1 */
/* This basically says every frame must be at least used for short-term ref. */
/* Need to be revisited!!! */
/* look at insert_picture_in_dpb() */
if (video->nal_unit_type != AVC_NALTYPE_IDR && video->currPic->isLongTerm == FALSE)
{
if (video->currPic->isReference)
{
video->currFS->IsReference = 3;
}
else
{
video->currFS->IsReference = 0;
}
video->currFS->IsLongTerm = 0;
}
/* check if number of reference frames doesn't exceed num_ref_frames */
num_ref = 0;
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsReference)
{
num_ref++;
}
}
if (num_ref > (int)video->currSeqParams->num_ref_frames)
{
return AVC_FAIL; /* out of range */
}
return AVC_SUCCESS;
}
AVCStatus sliding_window_process(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb)
{
int ii, numShortTerm, numLongTerm;
int32 MinFrameNumWrap;
int MinIdx;
numShortTerm = 0;
numLongTerm = 0;
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii] != video->currFS) /* do not count the current frame */
{
if (dpb->fs[ii]->IsLongTerm)
{
numLongTerm++;
}
else if (dpb->fs[ii]->IsReference)
{
numShortTerm++;
}
}
}
while (numShortTerm + numLongTerm >= (int)video->currSeqParams->num_ref_frames)
{
/* get short-term ref frame with smallest PicOrderCnt */
/* this doesn't work for all I-slice clip since PicOrderCnt will not be initialized */
MinFrameNumWrap = 0x7FFFFFFF;
MinIdx = -1;
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsReference && !dpb->fs[ii]->IsLongTerm)
{
if (dpb->fs[ii]->FrameNumWrap < MinFrameNumWrap)
{
MinFrameNumWrap = dpb->fs[ii]->FrameNumWrap;
MinIdx = ii;
}
}
}
if (MinIdx < 0) /* something wrong, impossible */
{
return AVC_FAIL;
}
/* mark the frame with smallest PicOrderCnt to be unused for reference */
dpb->fs[MinIdx]->IsReference = 0;
dpb->fs[MinIdx]->IsLongTerm = 0;
dpb->fs[MinIdx]->frame.isReference = FALSE;
dpb->fs[MinIdx]->frame.isLongTerm = FALSE;
dpb->fs[MinIdx]->IsOutputted |= 0x02;
#ifdef PV_MEMORY_POOL
if (dpb->fs[MinIdx]->IsOutputted == 3)
{
avcHandle->CBAVC_FrameUnbind(avcHandle->userData, MinIdx);
}
#endif
numShortTerm--;
}
return AVC_SUCCESS;
}
/* see subclause 8.2.5.4 */
AVCStatus adaptive_memory_marking(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, AVCSliceHeader *sliceHdr)
{
int ii;
ii = 0;
while (ii < MAX_DEC_REF_PIC_MARKING && sliceHdr->memory_management_control_operation[ii] != 0)
{
switch (sliceHdr->memory_management_control_operation[ii])
{
case 1:
MemMgrCtrlOp1(avcHandle, video, dpb, sliceHdr->difference_of_pic_nums_minus1[ii]);
// update_ref_list(dpb);
break;
case 2:
MemMgrCtrlOp2(avcHandle, dpb, sliceHdr->long_term_pic_num[ii]);
break;
case 3:
MemMgrCtrlOp3(avcHandle, video, dpb, sliceHdr->difference_of_pic_nums_minus1[ii], sliceHdr->long_term_frame_idx[ii]);
break;
case 4:
MemMgrCtrlOp4(avcHandle, video, dpb, sliceHdr->max_long_term_frame_idx_plus1[ii]);
break;
case 5:
MemMgrCtrlOp5(avcHandle, video, dpb);
video->currFS->FrameNum = 0; //
video->currFS->PicOrderCnt = 0;
break;
case 6:
MemMgrCtrlOp6(avcHandle, video, dpb, sliceHdr->long_term_frame_idx[ii]);
break;
}
ii++;
}
if (ii == MAX_DEC_REF_PIC_MARKING)
{
return AVC_FAIL; /* exceed the limit */
}
return AVC_SUCCESS;
}
/* see subclause 8.2.5.4.1, mark short-term picture as "unused for reference" */
void MemMgrCtrlOp1(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, int difference_of_pic_nums_minus1)
{
int picNumX, ii;
picNumX = video->CurrPicNum - (difference_of_pic_nums_minus1 + 1);
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsReference == 3 && dpb->fs[ii]->IsLongTerm == 0)
{
if (dpb->fs[ii]->frame.PicNum == picNumX)
{
unmark_for_reference(avcHandle, dpb, ii);
return ;
}
}
}
return ;
}
/* see subclause 8.2.5.4.2 mark long-term picture as "unused for reference" */
void MemMgrCtrlOp2(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, int long_term_pic_num)
{
int ii;
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsLongTerm == 3)
{
if (dpb->fs[ii]->frame.LongTermPicNum == long_term_pic_num)
{
unmark_for_reference(avcHandle, dpb, ii);
}
}
}
}
/* see subclause 8.2.5.4.3 assign LongTermFrameIdx to a short-term ref picture */
void MemMgrCtrlOp3(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, uint difference_of_pic_nums_minus1,
uint long_term_frame_idx)
{
int picNumX, ii;
picNumX = video->CurrPicNum - (difference_of_pic_nums_minus1 + 1);
/* look for fs[i] with long_term_frame_idx */
unmark_long_term_frame_for_reference_by_frame_idx(avcHandle, dpb, long_term_frame_idx);
/* now mark the picture with picNumX to long term frame idx */
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsReference == 3)
{
if ((dpb->fs[ii]->frame.isLongTerm == FALSE) && (dpb->fs[ii]->frame.PicNum == picNumX))
{
dpb->fs[ii]->LongTermFrameIdx = long_term_frame_idx;
dpb->fs[ii]->frame.LongTermPicNum = long_term_frame_idx;
dpb->fs[ii]->frame.isLongTerm = TRUE;
dpb->fs[ii]->IsLongTerm = 3;
return;
}
}
}
}
/* see subclause 8.2.5.4.4, MaxLongTermFrameIdx */
void MemMgrCtrlOp4(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, uint max_long_term_frame_idx_plus1)
{
int ii;
video->MaxLongTermFrameIdx = max_long_term_frame_idx_plus1 - 1;
/* then mark long term frame with exceeding LongTermFrameIdx to unused for reference. */
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsLongTerm && dpb->fs[ii] != video->currFS)
{
if (dpb->fs[ii]->LongTermFrameIdx > video->MaxLongTermFrameIdx)
{
unmark_for_reference(avcHandle, dpb, ii);
}
}
}
}
/* see subclause 8.2.5.4.5 mark all reference picture as "unused for reference" and setting
MaxLongTermFrameIdx to "no long-term frame indices" */
void MemMgrCtrlOp5(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb)
{
int ii;
video->MaxLongTermFrameIdx = -1;
for (ii = 0; ii < dpb->num_fs; ii++) /* including the current frame ??????*/
{
if (dpb->fs[ii] != video->currFS) // MC_FIX
{
unmark_for_reference(avcHandle, dpb, ii);
}
}
video->mem_mgr_ctrl_eq_5 = TRUE;
}
/* see subclause 8.2.5.4.6 assing long-term frame index to the current picture */
void MemMgrCtrlOp6(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, uint long_term_frame_idx)
{
unmark_long_term_frame_for_reference_by_frame_idx(avcHandle, dpb, long_term_frame_idx);
video->currFS->IsLongTerm = 3;
video->currFS->IsReference = 3;
video->currPic->isLongTerm = TRUE;
video->currPic->isReference = TRUE;
video->currFS->LongTermFrameIdx = long_term_frame_idx;
}
void unmark_for_reference(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, uint idx)
{
AVCFrameStore *fs = dpb->fs[idx];
fs->frame.isReference = FALSE;
fs->frame.isLongTerm = FALSE;
fs->IsLongTerm = 0;
fs->IsReference = 0;
fs->IsOutputted |= 0x02;
#ifdef PV_MEMORY_POOL
if (fs->IsOutputted == 3)
{
avcHandle->CBAVC_FrameUnbind(avcHandle->userData, idx);
}
#endif
return ;
}
void unmark_long_term_frame_for_reference_by_frame_idx(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, uint long_term_frame_idx)
{
int ii;
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsLongTerm && (dpb->fs[ii]->LongTermFrameIdx == (int)long_term_frame_idx))
{
unmark_for_reference(avcHandle, dpb, ii);
}
}
}

View File

@@ -0,0 +1,249 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#include <string.h>
#include "avclib_common.h"
/* see subclause 8.2.2 Decoding process for macroblock to slice group map */
OSCL_EXPORT_REF AVCStatus FMOInit(AVCCommonObj *video)
{
AVCPicParamSet *currPPS = video->currPicParams;
int *MbToSliceGroupMap = video->MbToSliceGroupMap;
int PicSizeInMapUnits = video->PicSizeInMapUnits;
int PicWidthInMbs = video->PicWidthInMbs;
if (currPPS->num_slice_groups_minus1 == 0)
{
memset(video->MbToSliceGroupMap, 0, video->PicSizeInMapUnits*sizeof(uint));
}
else
{
switch (currPPS->slice_group_map_type)
{
case 0:
FmoGenerateType0MapUnitMap(MbToSliceGroupMap, currPPS->run_length_minus1, currPPS->num_slice_groups_minus1, PicSizeInMapUnits);
break;
case 1:
FmoGenerateType1MapUnitMap(MbToSliceGroupMap, PicWidthInMbs, currPPS->num_slice_groups_minus1, PicSizeInMapUnits);
break;
case 2:
FmoGenerateType2MapUnitMap(currPPS, MbToSliceGroupMap, PicWidthInMbs, currPPS->num_slice_groups_minus1, PicSizeInMapUnits);
break;
case 3:
FmoGenerateType3MapUnitMap(video, currPPS, MbToSliceGroupMap, PicWidthInMbs);
break;
case 4:
FmoGenerateType4MapUnitMap(MbToSliceGroupMap, video->MapUnitsInSliceGroup0, currPPS->slice_group_change_direction_flag, PicSizeInMapUnits);
break;
case 5:
FmoGenerateType5MapUnitMap(MbToSliceGroupMap, video, currPPS->slice_group_change_direction_flag, PicSizeInMapUnits);
break;
case 6:
FmoGenerateType6MapUnitMap(MbToSliceGroupMap, (int*)currPPS->slice_group_id, PicSizeInMapUnits);
break;
default:
return AVC_FAIL; /* out of range, shouldn't come this far */
}
}
return AVC_SUCCESS;
}
/* see subclause 8.2.2.1 interleaved slice group map type*/
void FmoGenerateType0MapUnitMap(int *mapUnitToSliceGroupMap, uint *run_length_minus1, uint num_slice_groups_minus1, uint PicSizeInMapUnits)
{
uint iGroup, j;
uint i = 0;
do
{
for (iGroup = 0;
(iGroup <= num_slice_groups_minus1) && (i < PicSizeInMapUnits);
i += run_length_minus1[iGroup++] + 1)
{
for (j = 0; j <= run_length_minus1[ iGroup ] && i + j < PicSizeInMapUnits; j++)
mapUnitToSliceGroupMap[i+j] = iGroup;
}
}
while (i < PicSizeInMapUnits);
}
/* see subclause 8.2.2.2 dispersed slice group map type*/
void FmoGenerateType1MapUnitMap(int *mapUnitToSliceGroupMap, int PicWidthInMbs, uint num_slice_groups_minus1, uint PicSizeInMapUnits)
{
uint i;
for (i = 0; i < PicSizeInMapUnits; i++)
{
mapUnitToSliceGroupMap[i] = ((i % PicWidthInMbs) + (((i / PicWidthInMbs) * (num_slice_groups_minus1 + 1)) / 2))
% (num_slice_groups_minus1 + 1);
}
}
/* see subclause 8.2.2.3 foreground with left-over slice group map type */
void FmoGenerateType2MapUnitMap(AVCPicParamSet *pps, int *mapUnitToSliceGroupMap, int PicWidthInMbs,
uint num_slice_groups_minus1, uint PicSizeInMapUnits)
{
int iGroup;
uint i, x, y;
uint yTopLeft, xTopLeft, yBottomRight, xBottomRight;
for (i = 0; i < PicSizeInMapUnits; i++)
{
mapUnitToSliceGroupMap[ i ] = num_slice_groups_minus1;
}
for (iGroup = num_slice_groups_minus1 - 1 ; iGroup >= 0; iGroup--)
{
yTopLeft = pps->top_left[ iGroup ] / PicWidthInMbs;
xTopLeft = pps->top_left[ iGroup ] % PicWidthInMbs;
yBottomRight = pps->bottom_right[ iGroup ] / PicWidthInMbs;
xBottomRight = pps->bottom_right[ iGroup ] % PicWidthInMbs;
for (y = yTopLeft; y <= yBottomRight; y++)
{
for (x = xTopLeft; x <= xBottomRight; x++)
{
mapUnitToSliceGroupMap[ y * PicWidthInMbs + x ] = iGroup;
}
}
}
}
/* see subclause 8.2.2.4 box-out slice group map type */
/* follow the text rather than the JM, it's quite different. */
void FmoGenerateType3MapUnitMap(AVCCommonObj *video, AVCPicParamSet* pps, int *mapUnitToSliceGroupMap,
int PicWidthInMbs)
{
uint i, k;
int leftBound, topBound, rightBound, bottomBound;
int x, y, xDir, yDir;
int mapUnitVacant;
uint PicSizeInMapUnits = video->PicSizeInMapUnits;
uint MapUnitsInSliceGroup0 = video->MapUnitsInSliceGroup0;
for (i = 0; i < PicSizeInMapUnits; i++)
{
mapUnitToSliceGroupMap[ i ] = 1;
}
x = (PicWidthInMbs - pps->slice_group_change_direction_flag) / 2;
y = (video->PicHeightInMapUnits - pps->slice_group_change_direction_flag) / 2;
leftBound = x;
topBound = y;
rightBound = x;
bottomBound = y;
xDir = pps->slice_group_change_direction_flag - 1;
yDir = pps->slice_group_change_direction_flag;
for (k = 0; k < MapUnitsInSliceGroup0; k += mapUnitVacant)
{
mapUnitVacant = (mapUnitToSliceGroupMap[ y * PicWidthInMbs + x ] == 1);
if (mapUnitVacant)
{
mapUnitToSliceGroupMap[ y * PicWidthInMbs + x ] = 0;
}
if (xDir == -1 && x == leftBound)
{
leftBound = AVC_MAX(leftBound - 1, 0);
x = leftBound;
xDir = 0;
yDir = 2 * pps->slice_group_change_direction_flag - 1;
}
else if (xDir == 1 && x == rightBound)
{
rightBound = AVC_MIN(rightBound + 1, (int)PicWidthInMbs - 1);
x = rightBound;
xDir = 0;
yDir = 1 - 2 * pps->slice_group_change_direction_flag;
}
else if (yDir == -1 && y == topBound)
{
topBound = AVC_MAX(topBound - 1, 0);
y = topBound;
xDir = 1 - 2 * pps->slice_group_change_direction_flag;
yDir = 0;
}
else if (yDir == 1 && y == bottomBound)
{
bottomBound = AVC_MIN(bottomBound + 1, (int)video->PicHeightInMapUnits - 1);
y = bottomBound;
xDir = 2 * pps->slice_group_change_direction_flag - 1;
yDir = 0;
}
else
{
x = x + xDir;
y = y + yDir;
}
}
}
/* see subclause 8.2.2.5 raster scan slice group map types */
void FmoGenerateType4MapUnitMap(int *mapUnitToSliceGroupMap, int MapUnitsInSliceGroup0, int slice_group_change_direction_flag, uint PicSizeInMapUnits)
{
uint sizeOfUpperLeftGroup = slice_group_change_direction_flag ? (PicSizeInMapUnits - MapUnitsInSliceGroup0) : MapUnitsInSliceGroup0;
uint i;
for (i = 0; i < PicSizeInMapUnits; i++)
if (i < sizeOfUpperLeftGroup)
mapUnitToSliceGroupMap[ i ] = 1 - slice_group_change_direction_flag;
else
mapUnitToSliceGroupMap[ i ] = slice_group_change_direction_flag;
}
/* see subclause 8.2.2.6, wipe slice group map type. */
void FmoGenerateType5MapUnitMap(int *mapUnitToSliceGroupMap, AVCCommonObj *video,
int slice_group_change_direction_flag, uint PicSizeInMapUnits)
{
int PicWidthInMbs = video->PicWidthInMbs;
int PicHeightInMapUnits = video->PicHeightInMapUnits;
int MapUnitsInSliceGroup0 = video->MapUnitsInSliceGroup0;
int sizeOfUpperLeftGroup = slice_group_change_direction_flag ? (PicSizeInMapUnits - MapUnitsInSliceGroup0) : MapUnitsInSliceGroup0;
int i, j, k = 0;
for (j = 0; j < PicWidthInMbs; j++)
{
for (i = 0; i < PicHeightInMapUnits; i++)
{
if (k++ < sizeOfUpperLeftGroup)
{
mapUnitToSliceGroupMap[ i * PicWidthInMbs + j ] = 1 - slice_group_change_direction_flag;
}
else
{
mapUnitToSliceGroupMap[ i * PicWidthInMbs + j ] = slice_group_change_direction_flag;
}
}
}
}
/* see subclause 8.2.2.7, explicit slice group map */
void FmoGenerateType6MapUnitMap(int *mapUnitToSliceGroupMap, int *slice_group_id, uint PicSizeInMapUnits)
{
uint i;
for (i = 0; i < PicSizeInMapUnits; i++)
{
mapUnitToSliceGroupMap[i] = slice_group_id[i];
}
}

View File

@@ -0,0 +1,471 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#include <string.h>
#include "avclib_common.h"
OSCL_EXPORT_REF void InitNeighborAvailability(AVCCommonObj *video, int mbNum)
{
int PicWidthInMbs = video->PicWidthInMbs;
// do frame-only and postpone intraAvail calculattion
video->mbAddrA = mbNum - 1;
video->mbAddrB = mbNum - PicWidthInMbs;
video->mbAddrC = mbNum - PicWidthInMbs + 1;
video->mbAddrD = mbNum - PicWidthInMbs - 1;
video->mbAvailA = video->mbAvailB = video->mbAvailC = video->mbAvailD = 0;
if (video->mb_x)
{
video->mbAvailA = (video->mblock[video->mbAddrA].slice_id == video->currMB->slice_id);
if (video->mb_y)
{
video->mbAvailD = (video->mblock[video->mbAddrD].slice_id == video->currMB->slice_id);
}
}
if (video->mb_y)
{
video->mbAvailB = (video->mblock[video->mbAddrB].slice_id == video->currMB->slice_id);
if (video->mb_x < (PicWidthInMbs - 1))
{
video->mbAvailC = (video->mblock[video->mbAddrC].slice_id == video->currMB->slice_id);
}
}
return ;
}
bool mb_is_available(AVCMacroblock *mblock, uint PicSizeInMbs, int mbAddr, int currMbAddr)
{
if (mbAddr < 0 || mbAddr >= (int)PicSizeInMbs)
{
return FALSE;
}
if (mblock[mbAddr].slice_id != mblock[currMbAddr].slice_id)
{
return FALSE;
}
return TRUE;
}
OSCL_EXPORT_REF int predict_nnz(AVCCommonObj *video, int i, int j)
{
int pred_nnz = 0;
int cnt = 1;
AVCMacroblock *tempMB;
/* left block */
/*getLuma4x4Neighbour(video, mb_nr, i, j, -1, 0, &pix);
leftMB = video->mblock + pix.mb_addr; */
/* replace the above with below (won't work for field decoding), 1/19/04 */
if (i)
{
pred_nnz = video->currMB->nz_coeff[(j<<2)+i-1];
}
else
{
if (video->mbAvailA)
{
tempMB = video->mblock + video->mbAddrA;
pred_nnz = tempMB->nz_coeff[(j<<2)+3];
}
else
{
cnt = 0;
}
}
/* top block */
/*getLuma4x4Neighbour(video, mb_nr, i, j, 0, -1, &pix);
topMB = video->mblock + pix.mb_addr;*/
/* replace the above with below (won't work for field decoding), 1/19/04 */
if (j)
{
pred_nnz += video->currMB->nz_coeff[((j-1)<<2)+i];
cnt++;
}
else
{
if (video->mbAvailB)
{
tempMB = video->mblock + video->mbAddrB;
pred_nnz += tempMB->nz_coeff[12+i];
cnt++;
}
}
if (cnt == 2)
{
pred_nnz = (pred_nnz + 1) >> 1;
}
return pred_nnz;
}
OSCL_EXPORT_REF int predict_nnz_chroma(AVCCommonObj *video, int i, int j)
{
int pred_nnz = 0;
int cnt = 1;
AVCMacroblock *tempMB;
/* left block */
/*getChroma4x4Neighbour(video, mb_nr, i%2, j-4, -1, 0, &pix);
leftMB = video->mblock + pix.mb_addr;*/
/* replace the above with below (won't work for field decoding), 1/19/04 */
if (i&1)
{
pred_nnz = video->currMB->nz_coeff[(j<<2)+i-1];
}
else
{
if (video->mbAvailA)
{
tempMB = video->mblock + video->mbAddrA;
pred_nnz = tempMB->nz_coeff[(j<<2)+i+1];
}
else
{
cnt = 0;
}
}
/* top block */
/*getChroma4x4Neighbour(video, mb_nr, i%2, j-4, 0, -1, &pix);
topMB = video->mblock + pix.mb_addr;*/
/* replace the above with below (won't work for field decoding), 1/19/04 */
if (j&1)
{
pred_nnz += video->currMB->nz_coeff[((j-1)<<2)+i];
cnt++;
}
else
{
if (video->mbAvailB)
{
tempMB = video->mblock + video->mbAddrB;
pred_nnz += tempMB->nz_coeff[20+i];
cnt++;
}
}
if (cnt == 2)
{
pred_nnz = (pred_nnz + 1) >> 1;
}
return pred_nnz;
}
OSCL_EXPORT_REF void GetMotionVectorPredictor(AVCCommonObj *video, int encFlag)
{
AVCMacroblock *currMB = video->currMB;
AVCMacroblock *MB_A, *MB_B, *MB_C, *MB_D;
int block_x, block_y, block_x_1, block_y_1, new_block_x;
int mbPartIdx, subMbPartIdx, offset_indx;
int16 *mv, pmv_x, pmv_y;
int nmSubMbHeight, nmSubMbWidth, mbPartIdx_X, mbPartIdx_Y;
int avail_a, avail_b, avail_c;
const static uint32 C = 0x5750;
int i, j, offset_MbPart_indx, refIdxLXA, refIdxLXB, refIdxLXC = 0, curr_ref_idx;
int pmv_A_x, pmv_B_x, pmv_C_x = 0, pmv_A_y, pmv_B_y, pmv_C_y = 0;
/* we have to take care of Intra/skip blocks somewhere, i.e. set MV to 0 and set ref to -1! */
/* we have to populate refIdx as well */
MB_A = &video->mblock[video->mbAddrA];
MB_B = &video->mblock[video->mbAddrB];
if (currMB->mbMode == AVC_SKIP /* && !encFlag */) /* only for decoder */
{
currMB->ref_idx_L0[0] = currMB->ref_idx_L0[1] = currMB->ref_idx_L0[2] = currMB->ref_idx_L0[3] = 0;
if (video->mbAvailA && video->mbAvailB)
{
if ((MB_A->ref_idx_L0[1] == 0 && MB_A->mvL0[3] == 0) ||
(MB_B->ref_idx_L0[2] == 0 && MB_B->mvL0[12] == 0))
{
memset(currMB->mvL0, 0, sizeof(int32)*16);
return;
}
}
else
{
memset(currMB->mvL0, 0, sizeof(int32)*16);
return;
}
video->mvd_l0[0][0][0] = 0;
video->mvd_l0[0][0][1] = 0;
}
MB_C = &video->mblock[video->mbAddrC];
MB_D = &video->mblock[video->mbAddrD];
offset_MbPart_indx = 0;
for (mbPartIdx = 0; mbPartIdx < currMB->NumMbPart; mbPartIdx++)
{
offset_indx = 0;
nmSubMbHeight = currMB->SubMbPartHeight[mbPartIdx] >> 2;
nmSubMbWidth = currMB->SubMbPartWidth[mbPartIdx] >> 2;
mbPartIdx_X = ((mbPartIdx + offset_MbPart_indx) & 1) << 1;
mbPartIdx_Y = (mbPartIdx + offset_MbPart_indx) & 2;
for (subMbPartIdx = 0; subMbPartIdx < currMB->NumSubMbPart[mbPartIdx]; subMbPartIdx++)
{
block_x = mbPartIdx_X + ((subMbPartIdx + offset_indx) & 1);
block_y = mbPartIdx_Y + (((subMbPartIdx + offset_indx) >> 1) & 1);
block_x_1 = block_x - 1;
block_y_1 = block_y - 1;
refIdxLXA = refIdxLXB = refIdxLXC = -1;
pmv_A_x = pmv_A_y = pmv_B_x = pmv_B_y = pmv_C_x = pmv_C_y = 0;
if (block_x)
{
avail_a = 1;
refIdxLXA = currMB->ref_idx_L0[(block_y & 2) + (block_x_1 >> 1)];
mv = (int16*)(currMB->mvL0 + (block_y << 2) + block_x_1);
pmv_A_x = *mv++;
pmv_A_y = *mv;
}
else
{
avail_a = video->mbAvailA;
if (avail_a)
{
refIdxLXA = MB_A->ref_idx_L0[(block_y & 2) + 1];
mv = (int16*)(MB_A->mvL0 + (block_y << 2) + 3);
pmv_A_x = *mv++;
pmv_A_y = *mv;
}
}
if (block_y)
{
avail_b = 1;
refIdxLXB = currMB->ref_idx_L0[(block_y_1 & 2) + (block_x >> 1)];
mv = (int16*)(currMB->mvL0 + (block_y_1 << 2) + block_x);
pmv_B_x = *mv++;
pmv_B_y = *mv;
}
else
{
avail_b = video->mbAvailB;
if (avail_b)
{
refIdxLXB = MB_B->ref_idx_L0[2 + (block_x >> 1)];
mv = (int16*)(MB_B->mvL0 + 12 + block_x);
pmv_B_x = *mv++;
pmv_B_y = *mv;
}
}
new_block_x = block_x + (currMB->SubMbPartWidth[mbPartIdx] >> 2) - 1;
avail_c = (C >> ((block_y << 2) + new_block_x)) & 0x1;
if (avail_c)
{
/* it guaranteed that block_y > 0 && new_block_x<3 ) */
refIdxLXC = currMB->ref_idx_L0[(block_y_1 & 2) + ((new_block_x+1) >> 1)];
mv = (int16*)(currMB->mvL0 + (block_y_1 << 2) + (new_block_x + 1));
pmv_C_x = *mv++;
pmv_C_y = *mv;
}
else
{
if (block_y == 0 && new_block_x < 3)
{
avail_c = video->mbAvailB;
if (avail_c)
{
refIdxLXC = MB_B->ref_idx_L0[2 + ((new_block_x+1)>>1)];
mv = (int16*)(MB_B->mvL0 + 12 + (new_block_x + 1));
pmv_C_x = *mv++;
pmv_C_y = *mv;
}
}
else if (block_y == 0 && new_block_x == 3)
{
avail_c = video->mbAvailC;
if (avail_c)
{
refIdxLXC = MB_C->ref_idx_L0[2];
mv = (int16*)(MB_C->mvL0 + 12);
pmv_C_x = *mv++;
pmv_C_y = *mv;
}
}
if (avail_c == 0)
{ /* check D */
if (block_x && block_y)
{
avail_c = 1;
refIdxLXC = currMB->ref_idx_L0[(block_y_1 & 2) + (block_x_1 >> 1)];
mv = (int16*)(currMB->mvL0 + (block_y_1 << 2) + block_x_1);
pmv_C_x = *mv++;
pmv_C_y = *mv;
}
else if (block_y)
{
avail_c = video->mbAvailA;
if (avail_c)
{
refIdxLXC = MB_A->ref_idx_L0[(block_y_1 & 2) + 1];
mv = (int16*)(MB_A->mvL0 + (block_y_1 << 2) + 3);
pmv_C_x = *mv++;
pmv_C_y = *mv;
}
}
else if (block_x)
{
avail_c = video->mbAvailB;
if (avail_c)
{
refIdxLXC = MB_B->ref_idx_L0[2 + (block_x_1 >> 1)];
mv = (int16*)(MB_B->mvL0 + 12 + block_x_1);
pmv_C_x = *mv++;
pmv_C_y = *mv;
}
}
else
{
avail_c = video->mbAvailD;
if (avail_c)
{
refIdxLXC = MB_D->ref_idx_L0[3];
mv = (int16*)(MB_D->mvL0 + 15);
pmv_C_x = *mv++;
pmv_C_y = *mv;
}
}
}
}
offset_indx = currMB->SubMbPartWidth[mbPartIdx] >> 3;
curr_ref_idx = currMB->ref_idx_L0[(block_y & 2) + (block_x >> 1)];
if (avail_a && !(avail_b || avail_c))
{
pmv_x = pmv_A_x;
pmv_y = pmv_A_y;
}
else if (((curr_ref_idx == refIdxLXA) + (curr_ref_idx == refIdxLXB) + (curr_ref_idx == refIdxLXC)) == 1)
{
if (curr_ref_idx == refIdxLXA)
{
pmv_x = pmv_A_x;
pmv_y = pmv_A_y;
}
else if (curr_ref_idx == refIdxLXB)
{
pmv_x = pmv_B_x;
pmv_y = pmv_B_y;
}
else
{
pmv_x = pmv_C_x;
pmv_y = pmv_C_y;
}
}
else
{
pmv_x = AVC_MEDIAN(pmv_A_x, pmv_B_x, pmv_C_x);
pmv_y = AVC_MEDIAN(pmv_A_y, pmv_B_y, pmv_C_y);
}
/* overwrite if special case */
if (currMB->NumMbPart == 2)
{
if (currMB->MbPartWidth == 16)
{
if (mbPartIdx == 0)
{
if (refIdxLXB == curr_ref_idx)
{
pmv_x = pmv_B_x;
pmv_y = pmv_B_y;
}
}
else if (refIdxLXA == curr_ref_idx)
{
pmv_x = pmv_A_x;
pmv_y = pmv_A_y;
}
}
else
{
if (mbPartIdx == 0)
{
if (refIdxLXA == curr_ref_idx)
{
pmv_x = pmv_A_x;
pmv_y = pmv_A_y;
}
}
else if (refIdxLXC == curr_ref_idx)
{
pmv_x = pmv_C_x;
pmv_y = pmv_C_y;
}
}
}
mv = (int16*)(currMB->mvL0 + block_x + (block_y << 2));
if (encFlag) /* calculate residual MV video->mvd_l0 */
{
video->mvd_l0[mbPartIdx][subMbPartIdx][0] = *mv++ - pmv_x;
video->mvd_l0[mbPartIdx][subMbPartIdx][1] = *mv++ - pmv_y;
}
else /* calculate original MV currMB->mvL0 */
{
pmv_x += video->mvd_l0[mbPartIdx][subMbPartIdx][0];
pmv_y += video->mvd_l0[mbPartIdx][subMbPartIdx][1];
for (i = 0; i < nmSubMbHeight; i++)
{
for (j = 0; j < nmSubMbWidth; j++)
{
*mv++ = pmv_x;
*mv++ = pmv_y;
}
mv += (8 - (j << 1));
}
}
}
offset_MbPart_indx = currMB->MbPartWidth >> 4;
}
}

View File

@@ -0,0 +1,596 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#include "avclib_common.h"
/** see subclause 8.2.4 Decoding process for reference picture lists construction. */
OSCL_EXPORT_REF void RefListInit(AVCCommonObj *video)
{
AVCSliceHeader *sliceHdr = video->sliceHdr;
AVCDecPicBuffer *dpb = video->decPicBuf;
int slice_type = video->slice_type;
int i, list0idx;
AVCPictureData *tmp_s;
list0idx = 0;
if (slice_type == AVC_I_SLICE)
{
video->refList0Size = 0;
video->refList1Size = 0;
/* we still have to calculate FrameNumWrap to make sure that all I-slice clip
can perform sliding_window_operation properly. */
for (i = 0; i < dpb->num_fs; i++)
{
if ((dpb->fs[i]->IsReference == 3) && (!dpb->fs[i]->IsLongTerm))
{
/* subclause 8.2.4.1 Decoding process for picture numbers. */
if (dpb->fs[i]->FrameNum > (int)sliceHdr->frame_num)
{
dpb->fs[i]->FrameNumWrap = dpb->fs[i]->FrameNum - video->MaxFrameNum;
}
else
{
dpb->fs[i]->FrameNumWrap = dpb->fs[i]->FrameNum;
}
dpb->fs[i]->frame.PicNum = dpb->fs[i]->FrameNumWrap;
}
}
return ;
}
if (slice_type == AVC_P_SLICE)
{
/* Calculate FrameNumWrap and PicNum */
for (i = 0; i < dpb->num_fs; i++)
{
if ((dpb->fs[i]->IsReference == 3) && (!dpb->fs[i]->IsLongTerm))
{
/* subclause 8.2.4.1 Decoding process for picture numbers. */
if (dpb->fs[i]->FrameNum > (int)sliceHdr->frame_num)
{
dpb->fs[i]->FrameNumWrap = dpb->fs[i]->FrameNum - video->MaxFrameNum;
}
else
{
dpb->fs[i]->FrameNumWrap = dpb->fs[i]->FrameNum;
}
dpb->fs[i]->frame.PicNum = dpb->fs[i]->FrameNumWrap;
video->RefPicList0[list0idx++] = &(dpb->fs[i]->frame);
}
}
if (list0idx == 0)
{
dpb->fs[0]->IsReference = 3;
video->RefPicList0[0] = &(dpb->fs[0]->frame);
list0idx = 1;
}
/* order list 0 by PicNum from max to min, see subclause 8.2.4.2.1 */
SortPicByPicNum(video->RefPicList0, list0idx);
video->refList0Size = list0idx;
/* long term handling */
for (i = 0; i < dpb->num_fs; i++)
{
if (dpb->fs[i]->IsLongTerm == 3)
{
/* subclause 8.2.4.1 Decoding process for picture numbers. */
dpb->fs[i]->frame.LongTermPicNum = dpb->fs[i]->LongTermFrameIdx;
video->RefPicList0[list0idx++] = &(dpb->fs[i]->frame);
}
}
/* order PicNum from min to max, see subclause 8.2.4.2.1 */
SortPicByPicNumLongTerm(&(video->RefPicList0[video->refList0Size]), list0idx - video->refList0Size);
video->refList0Size = list0idx;
video->refList1Size = 0;
}
if ((video->refList0Size == video->refList1Size) && (video->refList0Size > 1))
{
/* check if lists are identical, if yes swap first two elements of listX[1] */
/* last paragraph of subclause 8.2.4.2.4 */
for (i = 0; i < video->refList0Size; i++)
{
if (video->RefPicList0[i] != video->RefPicList1[i])
{
break;
}
}
if (i == video->refList0Size)
{
tmp_s = video->RefPicList1[0];
video->RefPicList1[0] = video->RefPicList1[1];
video->RefPicList1[1] = tmp_s;
}
}
/* set max size */
video->refList0Size = AVC_MIN(video->refList0Size, (int)video->sliceHdr->num_ref_idx_l0_active_minus1 + 1);
video->refList1Size = AVC_MIN(video->refList1Size, (int)video->sliceHdr->num_ref_idx_l1_active_minus1 + 1);
return ;
}
/* see subclause 8.2.4.3 */
OSCL_EXPORT_REF AVCStatus ReOrderList(AVCCommonObj *video)
{
AVCSliceHeader *sliceHdr = video->sliceHdr;
AVCStatus status = AVC_SUCCESS;
int slice_type = video->slice_type;
if (slice_type != AVC_I_SLICE)
{
if (sliceHdr->ref_pic_list_reordering_flag_l0)
{
status = ReorderRefPicList(video, 0);
if (status != AVC_SUCCESS)
return status;
}
if (video->refList0Size == 0)
{
return AVC_FAIL;
}
}
return status;
}
AVCStatus ReorderRefPicList(AVCCommonObj *video, int isL1)
{
AVCSliceHeader *sliceHdr = video->sliceHdr;
AVCStatus status;
int *list_size;
int num_ref_idx_lX_active_minus1;
uint *remapping_of_pic_nums_idc;
int *abs_diff_pic_num_minus1;
int *long_term_pic_idx;
int i;
int maxPicNum, currPicNum, picNumLXNoWrap, picNumLXPred, picNumLX;
int refIdxLX = 0;
void* tmp;
if (!isL1) /* list 0 */
{
list_size = &(video->refList0Size);
num_ref_idx_lX_active_minus1 = sliceHdr->num_ref_idx_l0_active_minus1;
remapping_of_pic_nums_idc = sliceHdr->reordering_of_pic_nums_idc_l0;
tmp = (void*)sliceHdr->abs_diff_pic_num_minus1_l0;
abs_diff_pic_num_minus1 = (int*) tmp;
tmp = (void*)sliceHdr->long_term_pic_num_l0;
long_term_pic_idx = (int*) tmp;
}
else
{
list_size = &(video->refList1Size);
num_ref_idx_lX_active_minus1 = sliceHdr->num_ref_idx_l1_active_minus1;
remapping_of_pic_nums_idc = sliceHdr->reordering_of_pic_nums_idc_l1;
tmp = (void*) sliceHdr->abs_diff_pic_num_minus1_l1;
abs_diff_pic_num_minus1 = (int*) tmp;
tmp = (void*) sliceHdr->long_term_pic_num_l1;
long_term_pic_idx = (int*)tmp;
}
maxPicNum = video->MaxPicNum;
currPicNum = video->CurrPicNum;
picNumLXPred = currPicNum; /* initial value */
for (i = 0; remapping_of_pic_nums_idc[i] != 3; i++)
{
if ((remapping_of_pic_nums_idc[i] > 3) || (i >= MAX_REF_PIC_LIST_REORDERING))
{
return AVC_FAIL; /* out of range */
}
/* see subclause 8.2.4.3.1 */
if (remapping_of_pic_nums_idc[i] < 2)
{
if (remapping_of_pic_nums_idc[i] == 0)
{
if (picNumLXPred - (abs_diff_pic_num_minus1[i] + 1) < 0)
picNumLXNoWrap = picNumLXPred - (abs_diff_pic_num_minus1[i] + 1) + maxPicNum;
else
picNumLXNoWrap = picNumLXPred - (abs_diff_pic_num_minus1[i] + 1);
}
else /* (remapping_of_pic_nums_idc[i] == 1) */
{
if (picNumLXPred + (abs_diff_pic_num_minus1[i] + 1) >= maxPicNum)
picNumLXNoWrap = picNumLXPred + (abs_diff_pic_num_minus1[i] + 1) - maxPicNum;
else
picNumLXNoWrap = picNumLXPred + (abs_diff_pic_num_minus1[i] + 1);
}
picNumLXPred = picNumLXNoWrap; /* prediction for the next one */
if (picNumLXNoWrap > currPicNum)
picNumLX = picNumLXNoWrap - maxPicNum;
else
picNumLX = picNumLXNoWrap;
status = ReorderShortTerm(video, picNumLX, &refIdxLX, isL1);
if (status != AVC_SUCCESS)
{
return status;
}
}
else /* (remapping_of_pic_nums_idc[i] == 2), subclause 8.2.4.3.2 */
{
status = ReorderLongTerm(video, long_term_pic_idx[i], &refIdxLX, isL1);
if (status != AVC_SUCCESS)
{
return status;
}
}
}
/* that's a definition */
*list_size = num_ref_idx_lX_active_minus1 + 1;
return AVC_SUCCESS;
}
/* see subclause 8.2.4.3.1 */
AVCStatus ReorderShortTerm(AVCCommonObj *video, int picNumLX, int *refIdxLX, int isL1)
{
int cIdx, nIdx;
int num_ref_idx_lX_active_minus1;
AVCPictureData *picLX, **RefPicListX;
if (!isL1) /* list 0 */
{
RefPicListX = video->RefPicList0;
num_ref_idx_lX_active_minus1 = video->sliceHdr->num_ref_idx_l0_active_minus1;
}
else
{
RefPicListX = video->RefPicList1;
num_ref_idx_lX_active_minus1 = video->sliceHdr->num_ref_idx_l1_active_minus1;
}
picLX = GetShortTermPic(video, picNumLX);
if (picLX == NULL)
{
return AVC_FAIL;
}
/* Note RefPicListX has to access element number num_ref_idx_lX_active */
/* There could be access violation here. */
if (num_ref_idx_lX_active_minus1 + 1 >= MAX_REF_PIC_LIST)
{
return AVC_FAIL;
}
for (cIdx = num_ref_idx_lX_active_minus1 + 1; cIdx > *refIdxLX; cIdx--)
{
RefPicListX[ cIdx ] = RefPicListX[ cIdx - 1];
}
RefPicListX[(*refIdxLX)++ ] = picLX;
nIdx = *refIdxLX;
for (cIdx = *refIdxLX; cIdx <= num_ref_idx_lX_active_minus1 + 1; cIdx++)
{
if (RefPicListX[ cIdx ])
{
if ((RefPicListX[ cIdx ]->isLongTerm) || ((int)RefPicListX[ cIdx ]->PicNum != picNumLX))
{
RefPicListX[ nIdx++ ] = RefPicListX[ cIdx ];
}
}
}
return AVC_SUCCESS;
}
/* see subclause 8.2.4.3.2 */
AVCStatus ReorderLongTerm(AVCCommonObj *video, int LongTermPicNum, int *refIdxLX, int isL1)
{
AVCPictureData **RefPicListX;
int num_ref_idx_lX_active_minus1;
int cIdx, nIdx;
AVCPictureData *picLX;
if (!isL1) /* list 0 */
{
RefPicListX = video->RefPicList0;
num_ref_idx_lX_active_minus1 = video->sliceHdr->num_ref_idx_l0_active_minus1;
}
else
{
RefPicListX = video->RefPicList1;
num_ref_idx_lX_active_minus1 = video->sliceHdr->num_ref_idx_l1_active_minus1;
}
picLX = GetLongTermPic(video, LongTermPicNum);
if (picLX == NULL)
{
return AVC_FAIL;
}
/* Note RefPicListX has to access element number num_ref_idx_lX_active */
/* There could be access violation here. */
if (num_ref_idx_lX_active_minus1 + 1 >= MAX_REF_PIC_LIST)
{
return AVC_FAIL;
}
for (cIdx = num_ref_idx_lX_active_minus1 + 1; cIdx > *refIdxLX; cIdx--)
RefPicListX[ cIdx ] = RefPicListX[ cIdx - 1];
RefPicListX[(*refIdxLX)++ ] = picLX;
nIdx = *refIdxLX;
for (cIdx = *refIdxLX; cIdx <= num_ref_idx_lX_active_minus1 + 1; cIdx++)
{
if ((!RefPicListX[ cIdx ]->isLongTerm) || ((int)RefPicListX[ cIdx ]->LongTermPicNum != LongTermPicNum))
{
RefPicListX[ nIdx++ ] = RefPicListX[ cIdx ];
}
}
return AVC_SUCCESS;
}
AVCPictureData* GetShortTermPic(AVCCommonObj *video, int picNum)
{
int i;
AVCDecPicBuffer *dpb = video->decPicBuf;
for (i = 0; i < dpb->num_fs; i++)
{
if (dpb->fs[i]->IsReference == 3)
{
if ((dpb->fs[i]->frame.isLongTerm == FALSE) && (dpb->fs[i]->frame.PicNum == picNum))
{
return &(dpb->fs[i]->frame);
}
}
}
return NULL;
}
AVCPictureData* GetLongTermPic(AVCCommonObj *video, int LongtermPicNum)
{
AVCDecPicBuffer *dpb = video->decPicBuf;
int i;
for (i = 0; i < dpb->num_fs; i++)
{
if (dpb->fs[i]->IsReference == 3)
{
if ((dpb->fs[i]->frame.isLongTerm == TRUE) && (dpb->fs[i]->frame.LongTermPicNum == LongtermPicNum))
{
return &(dpb->fs[i]->frame);
}
}
}
return NULL;
}
int is_short_ref(AVCPictureData *s)
{
return ((s->isReference) && !(s->isLongTerm));
}
int is_long_ref(AVCPictureData *s)
{
return ((s->isReference) && (s->isLongTerm));
}
/* sort by PicNum, descending order */
void SortPicByPicNum(AVCPictureData *data[], int num)
{
int i, j;
AVCPictureData *temp;
for (i = 0; i < num - 1; i++)
{
for (j = i + 1; j < num; j++)
{
if (data[j]->PicNum > data[i]->PicNum)
{
temp = data[j];
data[j] = data[i];
data[i] = temp;
}
}
}
return ;
}
/* sort by PicNum, ascending order */
void SortPicByPicNumLongTerm(AVCPictureData *data[], int num)
{
int i, j;
AVCPictureData *temp;
for (i = 0; i < num - 1; i++)
{
for (j = i + 1; j < num; j++)
{
if (data[j]->LongTermPicNum < data[i]->LongTermPicNum)
{
temp = data[j];
data[j] = data[i];
data[i] = temp;
}
}
}
return ;
}
/* sort by FrameNumWrap, descending order */
void SortFrameByFrameNumWrap(AVCFrameStore *data[], int num)
{
int i, j;
AVCFrameStore *temp;
for (i = 0; i < num - 1; i++)
{
for (j = i + 1; j < num; j++)
{
if (data[j]->FrameNumWrap > data[i]->FrameNumWrap)
{
temp = data[j];
data[j] = data[i];
data[i] = temp;
}
}
}
return ;
}
/* sort frames by LongTermFrameIdx, ascending order */
void SortFrameByLTFrameIdx(AVCFrameStore *data[], int num)
{
int i, j;
AVCFrameStore *temp;
for (i = 0; i < num - 1; i++)
{
for (j = i + 1; j < num; j++)
{
if (data[j]->LongTermFrameIdx < data[i]->LongTermFrameIdx)
{
temp = data[j];
data[j] = data[i];
data[i] = temp;
}
}
}
return ;
}
/* sort PictureData by POC in descending order */
void SortPicByPOC(AVCPictureData *data[], int num, int descending)
{
int i, j;
AVCPictureData *temp;
if (descending)
{
for (i = 0; i < num - 1; i++)
{
for (j = i + 1; j < num; j++)
{
if (data[j]->PicOrderCnt > data[i]->PicOrderCnt)
{
temp = data[j];
data[j] = data[i];
data[i] = temp;
}
}
}
}
else
{
for (i = 0; i < num - 1; i++)
{
for (j = i + 1; j < num; j++)
{
if (data[j]->PicOrderCnt < data[i]->PicOrderCnt)
{
temp = data[j];
data[j] = data[i];
data[i] = temp;
}
}
}
}
return ;
}
/* sort PictureData by LongTermPicNum in ascending order */
void SortPicByLTPicNum(AVCPictureData *data[], int num)
{
int i, j;
AVCPictureData *temp;
for (i = 0; i < num - 1; i++)
{
for (j = i + 1; j < num; j++)
{
if (data[j]->LongTermPicNum < data[i]->LongTermPicNum)
{
temp = data[j];
data[j] = data[i];
data[i] = temp;
}
}
}
return ;
}
/* sort by PicOrderCnt, descending order */
void SortFrameByPOC(AVCFrameStore *data[], int num, int descending)
{
int i, j;
AVCFrameStore *temp;
if (descending)
{
for (i = 0; i < num - 1; i++)
{
for (j = i + 1; j < num; j++)
{
if (data[j]->PicOrderCnt > data[i]->PicOrderCnt)
{
temp = data[j];
data[j] = data[i];
data[i] = temp;
}
}
}
}
else
{
for (i = 0; i < num - 1; i++)
{
for (j = i + 1; j < num; j++)
{
if (data[j]->PicOrderCnt < data[i]->PicOrderCnt)
{
temp = data[j];
data[j] = data[i];
data[i] = temp;
}
}
}
}
return ;
}

View File

@@ -0,0 +1,439 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "AVCDecoder.h"
#include "avcdec_api.h"
#include "avcdec_int.h"
#include <OMX_Component.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/Utils.h>
namespace android {
static int32_t Malloc(void *userData, int32_t size, int32_t attrs) {
return reinterpret_cast<int32_t>(malloc(size));
}
static void Free(void *userData, int32_t ptr) {
free(reinterpret_cast<void *>(ptr));
}
AVCDecoder::AVCDecoder(const sp<MediaSource> &source)
: mSource(source),
mStarted(false),
mHandle(new tagAVCHandle),
mInputBuffer(NULL),
mAnchorTimeUs(0),
mNumSamplesOutput(0) {
memset(mHandle, 0, sizeof(tagAVCHandle));
mHandle->AVCObject = NULL;
mHandle->userData = this;
mHandle->CBAVC_DPBAlloc = ActivateSPSWrapper;
mHandle->CBAVC_FrameBind = BindFrameWrapper;
mHandle->CBAVC_FrameUnbind = UnbindFrame;
mHandle->CBAVC_Malloc = Malloc;
mHandle->CBAVC_Free = Free;
mFormat = new MetaData;
mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
int32_t width, height;
CHECK(mSource->getFormat()->findInt32(kKeyWidth, &width));
CHECK(mSource->getFormat()->findInt32(kKeyHeight, &height));
mFormat->setInt32(kKeyWidth, width);
mFormat->setInt32(kKeyHeight, height);
mFormat->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);
mFormat->setCString(kKeyDecoderComponent, "AVCDecoder");
}
AVCDecoder::~AVCDecoder() {
if (mStarted) {
stop();
}
delete mHandle;
mHandle = NULL;
}
status_t AVCDecoder::start(MetaData *) {
CHECK(!mStarted);
uint32_t type;
const void *data;
size_t size;
if (mSource->getFormat()->findData(kKeyAVCC, &type, &data, &size)) {
// Parse the AVCDecoderConfigurationRecord
const uint8_t *ptr = (const uint8_t *)data;
CHECK(size >= 7);
CHECK_EQ(ptr[0], 1); // configurationVersion == 1
uint8_t profile = ptr[1];
uint8_t level = ptr[3];
// There is decodable content out there that fails the following
// assertion, let's be lenient for now...
// CHECK((ptr[4] >> 2) == 0x3f); // reserved
size_t lengthSize = 1 + (ptr[4] & 3);
// commented out check below as H264_QVGA_500_NO_AUDIO.3gp
// violates it...
// CHECK((ptr[5] >> 5) == 7); // reserved
size_t numSeqParameterSets = ptr[5] & 31;
ptr += 6;
size -= 6;
for (size_t i = 0; i < numSeqParameterSets; ++i) {
CHECK(size >= 2);
size_t length = U16_AT(ptr);
ptr += 2;
size -= 2;
CHECK(size >= length);
addCodecSpecificData(ptr, length);
ptr += length;
size -= length;
}
CHECK(size >= 1);
size_t numPictureParameterSets = *ptr;
++ptr;
--size;
for (size_t i = 0; i < numPictureParameterSets; ++i) {
CHECK(size >= 2);
size_t length = U16_AT(ptr);
ptr += 2;
size -= 2;
CHECK(size >= length);
addCodecSpecificData(ptr, length);
ptr += length;
size -= length;
}
}
sp<MetaData> params = new MetaData;
params->setInt32(kKeyWantsNALFragments, true);
mSource->start(params.get());
mAnchorTimeUs = 0;
mNumSamplesOutput = 0;
mStarted = true;
return OK;
}
void AVCDecoder::addCodecSpecificData(const uint8_t *data, size_t size) {
MediaBuffer *buffer = new MediaBuffer(size);
memcpy(buffer->data(), data, size);
buffer->set_range(0, size);
mCodecSpecificData.push(buffer);
}
status_t AVCDecoder::stop() {
CHECK(mStarted);
for (size_t i = 0; i < mCodecSpecificData.size(); ++i) {
(*mCodecSpecificData.editItemAt(i)).release();
}
mCodecSpecificData.clear();
if (mInputBuffer) {
mInputBuffer->release();
mInputBuffer = NULL;
}
mSource->stop();
releaseFrames();
mStarted = false;
return OK;
}
sp<MetaData> AVCDecoder::getFormat() {
return mFormat;
}
status_t AVCDecoder::read(
MediaBuffer **out, const ReadOptions *options) {
*out = NULL;
if (mInputBuffer == NULL) {
LOGV("fetching new input buffer.");
if (!mCodecSpecificData.isEmpty()) {
mInputBuffer = mCodecSpecificData.editItemAt(0);
mCodecSpecificData.removeAt(0);
} else {
for (;;) {
status_t err = mSource->read(&mInputBuffer);
if (err != OK) {
return err;
}
if (mInputBuffer->range_length() > 0) {
break;
}
mInputBuffer->release();
mInputBuffer = NULL;
}
}
}
const uint8_t *inPtr =
(const uint8_t *)mInputBuffer->data() + mInputBuffer->range_offset();
int nalType;
int nalRefIdc;
AVCDec_Status res =
PVAVCDecGetNALType(
const_cast<uint8_t *>(inPtr), mInputBuffer->range_length(),
&nalType, &nalRefIdc);
if (res != AVCDEC_SUCCESS) {
mInputBuffer->release();
mInputBuffer = NULL;
return UNKNOWN_ERROR;
}
switch (nalType) {
case AVC_NALTYPE_SPS:
{
res = PVAVCDecSeqParamSet(
mHandle, const_cast<uint8_t *>(inPtr),
mInputBuffer->range_length());
if (res != AVCDEC_SUCCESS) {
mInputBuffer->release();
mInputBuffer = NULL;
return UNKNOWN_ERROR;
}
AVCDecObject *pDecVid = (AVCDecObject *)mHandle->AVCObject;
int32_t width =
(pDecVid->seqParams[0]->pic_width_in_mbs_minus1 + 1) * 16;
int32_t height =
(pDecVid->seqParams[0]->pic_height_in_map_units_minus1 + 1) * 16;
int32_t crop_left, crop_right, crop_top, crop_bottom;
if (pDecVid->seqParams[0]->frame_cropping_flag)
{
crop_left = 2 * pDecVid->seqParams[0]->frame_crop_left_offset;
crop_right =
width - (2 * pDecVid->seqParams[0]->frame_crop_right_offset + 1);
if (pDecVid->seqParams[0]->frame_mbs_only_flag)
{
crop_top = 2 * pDecVid->seqParams[0]->frame_crop_top_offset;
crop_bottom =
height -
(2 * pDecVid->seqParams[0]->frame_crop_bottom_offset + 1);
}
else
{
crop_top = 4 * pDecVid->seqParams[0]->frame_crop_top_offset;
crop_bottom =
height -
(4 * pDecVid->seqParams[0]->frame_crop_bottom_offset + 1);
}
} else {
crop_bottom = height - 1;
crop_right = width - 1;
crop_top = crop_left = 0;
}
mFormat->setInt32(kKeyWidth, crop_right - crop_left + 1);
mFormat->setInt32(kKeyHeight, crop_bottom - crop_top + 1);
mInputBuffer->release();
mInputBuffer = NULL;
return INFO_FORMAT_CHANGED;
}
case AVC_NALTYPE_PPS:
{
res = PVAVCDecPicParamSet(
mHandle, const_cast<uint8_t *>(inPtr),
mInputBuffer->range_length());
mInputBuffer->release();
mInputBuffer = NULL;
if (res != AVCDEC_SUCCESS) {
return UNKNOWN_ERROR;
}
*out = new MediaBuffer(0);
return OK;
}
case AVC_NALTYPE_SLICE:
case AVC_NALTYPE_IDR:
{
res = PVAVCDecodeSlice(
mHandle, const_cast<uint8_t *>(inPtr),
mInputBuffer->range_length());
if (res == AVCDEC_PICTURE_OUTPUT_READY) {
int32_t index;
int32_t Release;
AVCFrameIO Output;
Output.YCbCr[0] = Output.YCbCr[1] = Output.YCbCr[2] = NULL;
CHECK_EQ(PVAVCDecGetOutput(
mHandle, &index, &Release, &Output),
AVCDEC_SUCCESS);
CHECK(index >= 0);
CHECK(index < (int32_t)mFrames.size());
*out = mFrames.editItemAt(index);
(*out)->set_range(0, (*out)->size());
(*out)->add_ref();
// Do _not_ release input buffer yet.
return OK;
}
mInputBuffer->release();
mInputBuffer = NULL;
if (res == AVCDEC_PICTURE_READY) {
*out = new MediaBuffer(0);
return OK;
} else {
return UNKNOWN_ERROR;
}
}
case AVC_NALTYPE_SEI:
{
res = PVAVCDecodeSlice(
mHandle, const_cast<uint8_t *>(inPtr),
mInputBuffer->range_length());
mInputBuffer->release();
mInputBuffer = NULL;
if (res != AVCDEC_SUCCESS) {
return UNKNOWN_ERROR;
}
*out = new MediaBuffer(0);
return OK;
}
default:
{
LOGE("Should not be here, unknown nalType %d", nalType);
CHECK(!"Should not be here");
break;
}
}
mInputBuffer->release();
mInputBuffer = NULL;
return UNKNOWN_ERROR;
}
// static
int32_t AVCDecoder::ActivateSPSWrapper(
void *userData, unsigned int sizeInMbs, unsigned int numBuffers) {
return static_cast<AVCDecoder *>(userData)->activateSPS(sizeInMbs, numBuffers);
}
// static
int32_t AVCDecoder::BindFrameWrapper(
void *userData, int32_t index, uint8_t **yuv) {
return static_cast<AVCDecoder *>(userData)->bindFrame(index, yuv);
}
// static
void AVCDecoder::UnbindFrame(void *userData, int32_t index) {
}
int32_t AVCDecoder::activateSPS(
unsigned int sizeInMbs, unsigned int numBuffers) {
CHECK(mFrames.isEmpty());
size_t frameSize = (sizeInMbs << 7) * 3;
for (unsigned int i = 0; i < numBuffers; ++i) {
MediaBuffer *buffer = new MediaBuffer(frameSize);
buffer->setObserver(this);
mFrames.push(buffer);
}
return 1;
}
int32_t AVCDecoder::bindFrame(int32_t index, uint8_t **yuv) {
CHECK(index >= 0);
CHECK(index < (int32_t)mFrames.size());
CHECK(mInputBuffer != NULL);
int64_t timeUs;
CHECK(mInputBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
mFrames[index]->meta_data()->setInt64(kKeyTime, timeUs);
*yuv = (uint8_t *)mFrames[index]->data();
return 1;
}
void AVCDecoder::releaseFrames() {
for (size_t i = 0; i < mFrames.size(); ++i) {
MediaBuffer *buffer = mFrames.editItemAt(i);
buffer->setObserver(NULL);
buffer->release();
}
mFrames.clear();
}
void AVCDecoder::signalBufferReturned(MediaBuffer *buffer) {
}
} // namespace android

View File

@@ -0,0 +1,27 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
AVCDecoder.cpp \
src/avcdec_api.cpp \
src/avc_bitstream.cpp \
src/header.cpp \
src/itrans.cpp \
src/pred_inter.cpp \
src/pred_intra.cpp \
src/residual.cpp \
src/slice.cpp \
src/vlc.cpp
LOCAL_MODULE := libstagefright_avcdec
LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/src \
$(LOCAL_PATH)/include \
$(LOCAL_PATH)/../common/include \
$(TOP)/frameworks/base/media/libstagefright/include \
$(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include
LOCAL_CFLAGS := -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
include $(BUILD_STATIC_LIBRARY)

View File

@@ -0,0 +1,200 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/**
This file contains application function interfaces to the AVC decoder library
and necessary type defitionitions and enumerations.
@publishedAll
*/
#ifndef _AVCDEC_API_H_
#define _AVCDEC_API_H_
#include "avcapi_common.h"
/**
This enumeration is used for the status returned from the library interface.
*/
typedef enum
{
/**
The followings are fail with details. Their values are negative.
*/
AVCDEC_NO_DATA = -4,
AVCDEC_PACKET_LOSS = -3,
/**
Fail information
*/
AVCDEC_NO_BUFFER = -2, /* no output picture buffer available */
AVCDEC_MEMORY_FAIL = -1, /* memory allocation failed */
AVCDEC_FAIL = 0,
/**
Generic success value
*/
AVCDEC_SUCCESS = 1,
AVCDEC_PICTURE_OUTPUT_READY = 2,
AVCDEC_PICTURE_READY = 3,
/**
The followings are success with warnings. Their values are positive integers.
*/
AVCDEC_NO_NEXT_SC = 4,
AVCDEC_REDUNDANT_FRAME = 5,
AVCDEC_CONCEALED_FRAME = 6 /* detect and conceal the error */
} AVCDec_Status;
/**
This structure contains sequence parameters information.
*/
typedef struct tagAVCDecSPSInfo
{
int FrameWidth;
int FrameHeight;
uint frame_only_flag;
int frame_crop_left;
int frame_crop_right;
int frame_crop_top;
int frame_crop_bottom;
} AVCDecSPSInfo;
#ifdef __cplusplus
extern "C"
{
#endif
/** THE FOLLOWINGS ARE APIS */
/**
This function parses one NAL unit from byte stream format input according to Annex B.
\param "bitstream" "Pointer to the bitstream buffer."
\param "nal_unit" "Point to pointer and the location of the start of the first NAL unit
found in bitstream."
\param "size" "As input, the pointer to the size of bitstream in bytes. As output,
the value is changed to be the size of the found NAL unit."
\return "AVCDEC_SUCCESS if success, AVCDEC_FAIL if no first start code is found, AVCDEC_NO_NEX_SC if
the first start code is found, but the second start code is missing (potential partial NAL)."
*/
OSCL_IMPORT_REF AVCDec_Status PVAVCAnnexBGetNALUnit(uint8 *bitstream, uint8 **nal_unit, int *size);
/**
This function sniffs the nal_unit_type such that users can call corresponding APIs.
\param "bitstream" "Pointer to the beginning of a NAL unit (start with forbidden_zero_bit, etc.)."
\param "size" "size of the bitstream (NumBytesInNALunit + 1)."
\param "nal_unit_type" "Pointer to the return value of nal unit type."
\return "AVCDEC_SUCCESS if success, AVCDEC_FAIL otherwise."
*/
OSCL_IMPORT_REF AVCDec_Status PVAVCDecGetNALType(uint8 *bitstream, int size, int *nal_type, int *nal_ref_idc);
/**
This function decodes the sequence parameters set, initializes related parameters and
allocates memory (reference frames list), must also be compliant with Annex A.
It is equivalent to decode VOL header of MPEG4.
\param "avcHandle" "Handle to the AVC decoder library object."
\param "nal_unit" "Pointer to the buffer containing single NAL unit.
The content will change due to EBSP-to-RBSP conversion."
\param "nal_size" "size of the bitstream NumBytesInNALunit."
\return "AVCDEC_SUCCESS if success,
AVCDEC_FAIL if profile and level is not supported,
AVCDEC_MEMORY_FAIL if memory allocations return null."
*/
OSCL_IMPORT_REF AVCDec_Status PVAVCDecSeqParamSet(AVCHandle *avcHandle, uint8 *nal_unit, int nal_size);
/**
This function returns sequence parameters such as dimension and field flag of the most recently
decoded SPS. More can be added later or grouped together into a structure. This API can be called
after PVAVCInitSequence. If no sequence parameter has been decoded yet, it will return AVCDEC_FAIL.
\param "avcHandle" "Handle to the AVC decoder library object."
\param "seqInfo" "Pointer to the AVCDecSeqParamInfo structure."
\return "AVCDEC_SUCCESS if success and AVCDEC_FAIL if fail."
\note "This API can be combined with PVAVCInitSequence if wanted to be consistent with m4vdec lib."
*/
OSCL_IMPORT_REF AVCDec_Status PVAVCDecGetSeqInfo(AVCHandle *avcHandle, AVCDecSPSInfo *seqInfo);
/**
This function decodes the picture parameters set and initializes related parameters. Note thate
the PPS may not be present for every picture.
\param "avcHandle" "Handle to the AVC decoder library object."
\param "nal_unit" "Pointer to the buffer containing single NAL unit.
The content will change due to EBSP-to-RBSP conversion."
\param "nal_size" "size of the bitstream NumBytesInNALunit."
\return "AVCDEC_SUCCESS if success, AVCDEC_FAIL if profile and level is not supported."
*/
OSCL_IMPORT_REF AVCDec_Status PVAVCDecPicParamSet(AVCHandle *avcHandle, uint8 *nal_unit, int nal_size);
/**
This function decodes one NAL unit of bitstream. The type of nal unit is one of the
followings, 1, 5. (for now, no data partitioning, type 2,3,4).
\param "avcHandle" "Handle to the AVC decoder library object."
\param "nal_unit" "Pointer to the buffer containing a single or partial NAL unit.
The content will change due to EBSP-to-RBSP conversion."
\param "buf_size" "Size of the buffer (less than or equal nal_size)."
\param "nal_size" "size of the current NAL unit NumBytesInNALunit."
\return "AVCDEC_PICTURE_READY for success and an output is ready,
AVCDEC_SUCCESS for success but no output is ready,
AVCDEC_PACKET_LOSS is GetData returns AVCDEC_PACKET_LOSS,
AVCDEC_FAIL if syntax error is detected,
AVCDEC_MEMORY_FAIL if memory is corrupted.
AVCDEC_NO_PICTURE if no frame memory to write to (users need to get output and/or return picture).
AVCDEC_REDUNDANT_PICTURE if error has been detected in the primary picture and redundant picture is available,
AVCDEC_CONCEALED_PICTURE if error has been detected and decoder has concealed it."
*/
OSCL_IMPORT_REF AVCDec_Status PVAVCDecSEI(AVCHandle *avcHandle, uint8 *nal_unit, int nal_size);
OSCL_IMPORT_REF AVCDec_Status PVAVCDecodeSlice(AVCHandle *avcHandle, uint8 *buffer, int buf_size);
/**
Check the availability of the decoded picture in decoding order (frame_num).
The AVCFrameIO also provide displaying order information such that the application
can re-order the frame for display. A picture can be retrieved only once.
\param "avcHandle" "Handle to the AVC decoder library object."
\param "output" "Pointer to the AVCOutput structure. Note that decoder library will
not re-used the pixel memory in this structure until it has been returned
thru PVAVCReleaseOutput API."
\return "AVCDEC_SUCCESS for success, AVCDEC_FAIL if no picture is available to be displayed,
AVCDEC_PICTURE_READY if there is another picture to be displayed."
*/
OSCL_IMPORT_REF AVCDec_Status PVAVCDecGetOutput(AVCHandle *avcHandle, int *indx, int *release_flag, AVCFrameIO *output);
/**
This function resets the decoder and expects to see the next IDR slice.
\param "avcHandle" "Handle to the AVC decoder library object."
*/
OSCL_IMPORT_REF void PVAVCDecReset(AVCHandle *avcHandle);
/**
This function performs clean up operation including memory deallocation.
\param "avcHandle" "Handle to the AVC decoder library object."
*/
OSCL_IMPORT_REF void PVAVCCleanUpDecoder(AVCHandle *avcHandle);
//AVCDec_Status EBSPtoRBSP(uint8 *nal_unit,int *size);
/** CALLBACK FUNCTION TO BE IMPLEMENTED BY APPLICATION */
/** In AVCHandle structure, userData is a pointer to an object with the following
member functions.
*/
AVCDec_Status CBAVCDec_GetData(uint32 *userData, unsigned char **buffer, unsigned int *size);
#ifdef __cplusplus
}
#endif
#endif /* _AVCDEC_API_H_ */

View File

@@ -0,0 +1,49 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#ifndef PVAVCDECODER_H_INCLUDED
#define PVAVCDECODER_H_INCLUDED
#ifndef PVAVCDECODERINTERFACE_H_INCLUDED
#include "pvavcdecoderinterface.h"
#endif
// AVC video decoder
class PVAVCDecoder : public PVAVCDecoderInterface
{
public:
virtual ~PVAVCDecoder();
static PVAVCDecoder* New(void);
virtual bool InitAVCDecoder(FunctionType_SPS, FunctionType_Alloc, FunctionType_Unbind,
FunctionType_Malloc, FunctionType_Free, void *);
virtual void CleanUpAVCDecoder(void);
virtual void ResetAVCDecoder(void);
virtual int32 DecodeSPS(uint8 *bitstream, int32 buffer_size);
virtual int32 DecodePPS(uint8 *bitstream, int32 buffer_size);
virtual int32 DecodeAVCSlice(uint8 *bitstream, int32 *buffer_size);
virtual bool GetDecOutput(int *indx, int *release);
virtual void GetVideoDimensions(int32 *width, int32 *height, int32 *top, int32 *left, int32 *bottom, int32 *right);
int AVC_Malloc(int32 size, int attribute);
void AVC_Free(int mem);
private:
PVAVCDecoder();
bool Construct(void);
void *iAVCHandle;
};
#endif

View File

@@ -0,0 +1,43 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#ifndef PVAVCDECODER_FACTORY_H_INCLUDED
#define PVAVCDECODER_FACTORY_H_INCLUDED
class PVAVCDecoderInterface;
class PVAVCDecoderFactory
{
public:
/**
* Creates an instance of a PVAVCDecoder. If the creation fails, this function will leave.
*
* @returns A pointer to an instance of PVAVCDecoder as PVAVCDecoderInterface reference or leaves if instantiation fails
**/
OSCL_IMPORT_REF static PVAVCDecoderInterface* CreatePVAVCDecoder(void);
/**
* Deletes an instance of PVAVCDecoder and reclaims all allocated resources.
*
* @param aVideoDec The PVAVCDecoder instance to be deleted
* @returns A status code indicating success or failure of deletion
**/
OSCL_IMPORT_REF static bool DeletePVAVCDecoder(PVAVCDecoderInterface* aVideoDec);
};
#endif

View File

@@ -0,0 +1,48 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#ifndef PVAVCDECODERINTERFACE_H_INCLUDED
#define PVAVCDECODERINTERFACE_H_INCLUDED
typedef void (*FunctionType_Unbind)(void *, int);
typedef int (*FunctionType_Alloc)(void *, int, uint8 **);
typedef int (*FunctionType_SPS)(void *, uint, uint);
typedef int (*FunctionType_Malloc)(void *, int32, int);
typedef void(*FunctionType_Free)(void *, int);
// PVAVCDecoderInterface pure virtual interface class
class PVAVCDecoderInterface
{
public:
virtual ~PVAVCDecoderInterface() {};
virtual bool InitAVCDecoder(FunctionType_SPS, FunctionType_Alloc, FunctionType_Unbind,
FunctionType_Malloc, FunctionType_Free, void *) = 0;
virtual void CleanUpAVCDecoder(void) = 0;
virtual void ResetAVCDecoder(void) = 0;
virtual int32 DecodeSPS(uint8 *bitstream, int32 buffer_size) = 0;
virtual int32 DecodePPS(uint8 *bitstream, int32 buffer_size) = 0;
virtual int32 DecodeAVCSlice(uint8 *bitstream, int32 *buffer_size) = 0;
virtual bool GetDecOutput(int *indx, int *release) = 0;
virtual void GetVideoDimensions(int32 *width, int32 *height, int32 *top, int32 *left, int32 *bottom, int32 *right) = 0;
// virtual int AVC_Malloc(int32 size, int attribute);
// virtual void AVC_Free(int mem);
};
#endif // PVAVCDECODERINTERFACE_H_INCLUDED

View File

@@ -0,0 +1,276 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#include "avcdec_bitstream.h"
/* Swapping may not be needed anymore since we read one byte at a time and perform
EBSP to RBSP conversion in bitstream. */
#ifdef LITTLE_ENDIAN
#if (WORD_SIZE==32) /* this can be replaced with assembly instructions */
#define SWAP_BYTES(x) ((((x)&0xFF)<<24) | (((x)&0xFF00)<<8) | (((x)&0xFF0000)>>8) | (((x)&0xFF000000)>>24))
#else /* for 16-bit */
#define SWAP_BYTES(x) ((((x)&0xFF)<<8) | (((x)&0xFF00)>>8))
#endif
#else
#define SWAP_BYTES(x) (x)
#endif
/* array for trailing bit pattern as function of number of bits */
/* the first one is unused. */
const static uint8 trailing_bits[9] = {0, 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80};
/* ======================================================================== */
/* Function : BitstreamInit() */
/* Date : 11/4/2003 */
/* Purpose : Populate bitstream structure with bitstream buffer and size */
/* it also initializes internal data */
/* In/out : */
/* Return : AVCDEC_SUCCESS if successed, AVCDEC_FAIL if failed. */
/* Modified : */
/* ======================================================================== */
/* |--------|--------|----~~~~~-----|---------|---------|---------|
^ ^read_pos ^data_end_pos
bitstreamBuffer <--------->
current_word
|xxxxxxxxxxxxx----| = current_word 32 or 16 bits
<------------>
bit_left
======================================================================== */
/* ======================================================================== */
/* Function : BitstreamNextWord() */
/* Date : 12/4/2003 */
/* Purpose : Read up to machine word. */
/* In/out : */
/* Return : Next word with emulation prevention code removed. Everything
in the bitstream structure got modified except current_word */
/* Modified : */
/* ======================================================================== */
AVCDec_Status BitstreamInit(AVCDecBitstream *stream, uint8 *buffer, int size)
{
EBSPtoRBSP(buffer, &size);
stream->incnt = 0;
stream->incnt_next = 0;
stream->bitcnt = 0;
stream->curr_word = stream->next_word = 0;
stream->read_pos = 0;
stream->bitstreamBuffer = buffer;
stream->data_end_pos = size;
stream->nal_size = size;
return AVCDEC_SUCCESS;
}
/* ======================================================================== */
/* Function : AVC_BitstreamFillCache() */
/* Date : 1/1/2005 */
/* Purpose : Read up to machine word. */
/* In/out : */
/* Return : Read in 4 bytes of input data */
/* Modified : */
/* ======================================================================== */
AVCDec_Status AVC_BitstreamFillCache(AVCDecBitstream *stream)
{
uint8 *bitstreamBuffer = stream->bitstreamBuffer;
uint8 *v;
int num_bits, i;
stream->curr_word |= (stream->next_word >> stream->incnt); // stream->incnt cannot be 32
stream->next_word <<= (31 - stream->incnt);
stream->next_word <<= 1;
num_bits = stream->incnt_next + stream->incnt;
if (num_bits >= 32)
{
stream->incnt_next -= (32 - stream->incnt);
stream->incnt = 32;
return AVCDEC_SUCCESS;
}
/* this check can be removed if there is additional extra 4 bytes at the end of the bitstream */
v = bitstreamBuffer + stream->read_pos;
if (stream->read_pos > stream->data_end_pos - 4)
{
if (stream->data_end_pos <= stream->read_pos)
{
stream->incnt = num_bits;
stream->incnt_next = 0;
return AVCDEC_SUCCESS;
}
stream->next_word = 0;
for (i = 0; i < stream->data_end_pos - stream->read_pos; i++)
{
stream->next_word |= (v[i] << ((3 - i) << 3));
}
stream->read_pos = stream->data_end_pos;
stream->curr_word |= (stream->next_word >> num_bits); // this is safe
stream->next_word <<= (31 - num_bits);
stream->next_word <<= 1;
num_bits = i << 3;
stream->incnt += stream->incnt_next;
stream->incnt_next = num_bits - (32 - stream->incnt);
if (stream->incnt_next < 0)
{
stream->incnt += num_bits;
stream->incnt_next = 0;
}
else
{
stream->incnt = 32;
}
return AVCDEC_SUCCESS;
}
stream->next_word = ((uint32)v[0] << 24) | (v[1] << 16) | (v[2] << 8) | v[3];
stream->read_pos += 4;
stream->curr_word |= (stream->next_word >> num_bits); // this is safe
stream->next_word <<= (31 - num_bits);
stream->next_word <<= 1;
stream->incnt_next += stream->incnt;
stream->incnt = 32;
return AVCDEC_SUCCESS;
}
/* ======================================================================== */
/* Function : BitstreamReadBits() */
/* Date : 11/4/2003 */
/* Purpose : Read up to machine word. */
/* In/out : */
/* Return : AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits */
/* is greater than the word-size, AVCDEC_PACKET_LOSS or */
/* AVCDEC_NO_DATA if callback to get data fails. */
/* Modified : */
/* ======================================================================== */
AVCDec_Status BitstreamReadBits(AVCDecBitstream *stream, int nBits, uint *code)
{
if (stream->incnt < nBits)
{
/* frame-based decoding */
AVC_BitstreamFillCache(stream);
}
*code = stream->curr_word >> (32 - nBits);
BitstreamFlushBits(stream, nBits);
return AVCDEC_SUCCESS;
}
/* ======================================================================== */
/* Function : BitstreamShowBits() */
/* Date : 11/4/2003 */
/* Purpose : Show up to machine word without advancing the pointer. */
/* In/out : */
/* Return : AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits */
/* is greater than the word-size, AVCDEC_NO_DATA if it needs */
/* to callback to get data. */
/* Modified : */
/* ======================================================================== */
AVCDec_Status BitstreamShowBits(AVCDecBitstream *stream, int nBits, uint *code)
{
if (stream->incnt < nBits)
{
/* frame-based decoding */
AVC_BitstreamFillCache(stream);
}
*code = stream->curr_word >> (32 - nBits);
return AVCDEC_SUCCESS;
}
/* ======================================================================== */
/* Function : BitstreamRead1Bit() */
/* Date : 11/4/2003 */
/* Purpose : Read 1 bit from the bitstream. */
/* In/out : */
/* Return : AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits */
/* is greater than the word-size, AVCDEC_PACKET_LOSS or */
/* AVCDEC_NO_DATA if callback to get data fails. */
/* Modified : */
/* ======================================================================== */
AVCDec_Status BitstreamRead1Bit(AVCDecBitstream *stream, uint *code)
{
if (stream->incnt < 1)
{
/* frame-based decoding */
AVC_BitstreamFillCache(stream);
}
*code = stream->curr_word >> 31;
BitstreamFlushBits(stream, 1);
return AVCDEC_SUCCESS;
}
AVCDec_Status BitstreamByteAlign(AVCDecBitstream *stream)
{
uint n_stuffed;
n_stuffed = (8 - (stream->bitcnt & 0x7)) & 0x7; /* 07/05/01 */
stream->bitcnt += n_stuffed;
stream->incnt -= n_stuffed;
if (stream->incnt < 0)
{
stream->bitcnt += stream->incnt;
stream->incnt = 0;
}
stream->curr_word <<= n_stuffed;
return AVCDEC_SUCCESS;
}
/* check whether there are more RBSP data. */
/* ignore the emulation prevention code, assume it has been taken out. */
bool more_rbsp_data(AVCDecBitstream *stream)
{
int total_bit_left;
uint code;
if (stream->read_pos >= stream->nal_size)
{
total_bit_left = stream->incnt_next + stream->incnt;
if (total_bit_left <= 0)
{
return FALSE;
}
else if (total_bit_left <= 8)
{
BitstreamShowBits(stream, total_bit_left, &code);
if (code == trailing_bits[total_bit_left])
{
return FALSE;
}
}
}
return TRUE;
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,125 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/**
This file contains bitstream related functions.
@publishedAll
*/
#ifndef _AVCDEC_BITSTREAM_H_
#define _AVCDEC_BITSTREAM_H_
#include "avcdec_lib.h"
#define WORD_SIZE 32 /* this can vary, default to 32 bit for now */
#ifndef __cplusplus
#define AVC_GETDATA(x,y) userData->AVC_GetData(x,y)
#endif
#ifdef __cplusplus
extern "C"
{
#endif
#define BitstreamFlushBits(A,B) {(A)->bitcnt += (B); (A)->incnt -= (B); (A)->curr_word <<= (B);}
AVCDec_Status AVC_BitstreamFillCache(AVCDecBitstream *stream);
/**
This function populates bitstream structure.
\param "stream" "Pointer to bitstream structure."
\param "buffer" "Pointer to the bitstream buffer."
\param "size" "Size of the buffer."
\param "nal_size" "Size of the NAL unit."
\param "resetall" "Flag for reset everything."
\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL for fail."
*/
AVCDec_Status BitstreamInit(AVCDecBitstream *stream, uint8 *buffer, int size);
/**
This function reads next aligned word and remove the emulation prevention code
if necessary.
\param "stream" "Pointer to bitstream structure."
\return "Next word."
*/
uint BitstreamNextWord(AVCDecBitstream *stream);
/**
This function reads nBits bits from the current position and advance the pointer.
\param "stream" "Pointer to bitstream structure."
\param "nBits" "Number of bits to be read."
\param "code" "Point to the read value."
\return "AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits
is greater than the word-size, AVCDEC_PACKET_LOSS or
AVCDEC_NO_DATA if callback to get data fails."
*/
AVCDec_Status BitstreamReadBits(AVCDecBitstream *stream, int nBits, uint *code);
/**
This function shows nBits bits from the current position without advancing the pointer.
\param "stream" "Pointer to bitstream structure."
\param "nBits" "Number of bits to be read."
\param "code" "Point to the read value."
\return "AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits
is greater than the word-size, AVCDEC_NO_DATA if it needs
to callback to get data."
*/
AVCDec_Status BitstreamShowBits(AVCDecBitstream *stream, int nBits, uint *code);
/**
This function flushes nBits bits from the current position.
\param "stream" "Pointer to bitstream structure."
\param "nBits" "Number of bits to be read."
\return "AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits
is greater than the word-size It will not call back to get
more data. Users should call BitstreamShowBits to determine
how much they want to flush."
*/
/**
This function read 1 bit from the current position and advance the pointer.
\param "stream" "Pointer to bitstream structure."
\param "nBits" "Number of bits to be read."
\param "code" "Point to the read value."
\return "AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits
is greater than the word-size, AVCDEC_PACKET_LOSS or
AVCDEC_NO_DATA if callback to get data fails."
*/
AVCDec_Status BitstreamRead1Bit(AVCDecBitstream *stream, uint *code);
/**
This function checks whether the current bit position is byte-aligned or not.
\param "stream" "Pointer to the bitstream structure."
\return "TRUE if byte-aligned, FALSE otherwise."
*/
bool byte_aligned(AVCDecBitstream *stream);
AVCDec_Status BitstreamByteAlign(AVCDecBitstream *stream);
/**
This function checks whether there are more RBSP data before the trailing bits.
\param "stream" "Pointer to the bitstream structure."
\return "TRUE if yes, FALSE otherwise."
*/
bool more_rbsp_data(AVCDecBitstream *stream);
#ifdef __cplusplus
}
#endif /* __cplusplus */
#endif /* _AVCDEC_BITSTREAM_H_ */

View File

@@ -0,0 +1,88 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/**
This file contains application function interfaces to the AVC decoder library
and necessary type defitionitions and enumerations.
Naming convention for variables:
lower_case_with_under_line is syntax element in subclause 7.2 and 7.3
noUnderLine or NoUnderLine is derived variables defined somewhere else in the draft
or introduced by this decoder library.
@publishedAll
*/
#ifndef _AVCDEC_INT_H_
#define _AVCDEC_INT_H_
#include "avcint_common.h"
#include "avcdec_api.h"
/**
Bitstream structure contains bitstream related parameters such as the pointer
to the buffer, the current byte position and bit position.
@publishedAll
*/
typedef struct tagDecBitstream
{
uint8 *bitstreamBuffer; /* pointer to buffer memory */
int nal_size; /* size of the current NAL unit */
int data_end_pos; /* bitstreamBuffer size in bytes */
int read_pos; /* next position to read from bitstreamBuffer */
uint curr_word; /* byte-swapped (MSB left) current word read from buffer */
int bit_left; /* number of bit left in current_word */
uint next_word; /* in case for old data in previous buffer hasn't been flushed. */
int incnt; /* bit left in the prev_word */
int incnt_next;
int bitcnt;
void *userData;
} AVCDecBitstream;
/**
This structure is the main object for AVC decoder library providing access to all
global variables. It is allocated at PVAVCInitDecoder and freed at PVAVCCleanUpDecoder.
@publishedAll
*/
typedef struct tagDecObject
{
AVCCommonObj *common;
AVCDecBitstream *bitstream; /* for current NAL */
/* sequence parameter set */
AVCSeqParamSet *seqParams[32]; /* Array of pointers, get allocated at arrival of new seq_id */
/* picture parameter set */
AVCPicParamSet *picParams[256]; /* Array of pointers to picture param set structures */
/* For internal operation, scratch memory for MV, prediction, transform, etc.*/
uint ref_idx_l0[4]; /* [mbPartIdx], te(v) */
uint ref_idx_l1[4];
/* function pointers */
AVCDec_Status(*residual_block)(struct tagDecObject*, int, int,
int *, int *, int *);
/* Application control data */
AVCHandle *avcHandle;
void (*AVC_DebugLog)(AVCLogType type, char *string1, char *string2);
/*bool*/
uint debugEnable;
} AVCDecObject;
#endif /* _AVCDEC_INT_H_ */

View File

@@ -0,0 +1,555 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/**
This file contains declarations of internal functions for AVC decoder library.
@publishedAll
*/
#ifndef _AVCDEC_LIB_H_
#define _AVCDEC_LIB_H_
#include "avclib_common.h"
#include "avcdec_int.h"
/*----------- avcdec_api.c -------------*/
/**
This function takes out the emulation prevention bytes from the input to creat RBSP.
The result is written over the input bitstream.
\param "nal_unit" "(I/O) Pointer to the input buffer."
\param "size" "(I/O) Pointer to the size of the input/output buffer."
\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
*/
AVCDec_Status EBSPtoRBSP(uint8 *nal_unit, int *size);
/*------------- pred_intra.c ---------------*/
/**
This function is the main entry point to intra prediction operation on a
macroblock.
\param "video" "Pointer to AVCCommonObj."
*/
AVCStatus IntraMBPrediction(AVCCommonObj *video);
void SaveNeighborForIntraPred(AVCCommonObj *video, int offset);
AVCStatus Intra_4x4(AVCCommonObj *video, int component, int SubBlock_indx, uint8 *comp);
void Intra_4x4_Vertical(AVCCommonObj *video, int block_offset);
void Intra_4x4_Horizontal(AVCCommonObj *video, int pitch, int block_offset);
void Intra_4x4_DC(AVCCommonObj *video, int pitch, int block_offset, AVCNeighborAvailability *availability);
void Intra_4x4_Down_Left(AVCCommonObj *video, int block_offset, AVCNeighborAvailability *availability);
void Intra_4x4_Diagonal_Down_Right(AVCCommonObj *video, int pitch, int block_offset);
void Intra_4x4_Diagonal_Vertical_Right(AVCCommonObj *video, int pitch, int block_offset);
void Intra_4x4_Diagonal_Horizontal_Down(AVCCommonObj *video, int pitch, int block_offset);
void Intra_4x4_Vertical_Left(AVCCommonObj *video, int block_offset, AVCNeighborAvailability *availability);
void Intra_4x4_Horizontal_Up(AVCCommonObj *video, int pitch, int block_offset);
void Intra_16x16_Vertical(AVCCommonObj *video);
void Intra_16x16_Horizontal(AVCCommonObj *video, int pitch);
void Intra_16x16_DC(AVCCommonObj *video, int pitch);
void Intra_16x16_Plane(AVCCommonObj *video, int pitch);
void Intra_Chroma_DC(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr);
void Intra_Chroma_Horizontal(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr);
void Intra_Chroma_Vertical(AVCCommonObj *video, uint8 *predCb, uint8 *predCr);
void Intra_Chroma_Plane(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr);
/*------------ pred_inter.c ---------------*/
/**
This function is the main entrance to inter prediction operation for
a macroblock. For decoding, this function also calls inverse transform and
compensation.
\param "video" "Pointer to AVCCommonObj."
\return "void"
*/
void InterMBPrediction(AVCCommonObj *video);
/**
This function is called for luma motion compensation.
\param "ref" "Pointer to the origin of a reference luma."
\param "picwidth" "Width of the picture."
\param "picheight" "Height of the picture."
\param "x_pos" "X-coordinate of the predicted block in quarter pel resolution."
\param "y_pos" "Y-coordinate of the predicted block in quarter pel resolution."
\param "pred" "Pointer to the output predicted block."
\param "pred_pitch" "Width of pred."
\param "blkwidth" "Width of the current partition."
\param "blkheight" "Height of the current partition."
\return "void"
*/
void LumaMotionComp(uint8 *ref, int picwidth, int picheight,
int x_pos, int y_pos,
uint8 *pred, int pred_pitch,
int blkwidth, int blkheight);
/**
Functions below are special cases for luma motion compensation.
LumaFullPelMC is for full pixel motion compensation.
LumaBorderMC is for interpolation in only one dimension.
LumaCrossMC is for interpolation in one dimension and half point in the other dimension.
LumaDiagonalMC is for interpolation in diagonal direction.
\param "ref" "Pointer to the origin of a reference luma."
\param "picwidth" "Width of the picture."
\param "picheight" "Height of the picture."
\param "x_pos" "X-coordinate of the predicted block in full pel resolution."
\param "y_pos" "Y-coordinate of the predicted block in full pel resolution."
\param "dx" "Fraction of x_pos in quarter pel."
\param "dy" "Fraction of y_pos in quarter pel."
\param "curr" "Pointer to the current partition in the current picture."
\param "residue" "Pointer to the current partition for the residue block."
\param "blkwidth" "Width of the current partition."
\param "blkheight" "Height of the current partition."
\return "void"
*/
void CreatePad(uint8 *ref, int picwidth, int picheight, int x_pos, int y_pos,
uint8 *out, int blkwidth, int blkheight);
void FullPelMC(uint8 *in, int inwidth, uint8 *out, int outpitch,
int blkwidth, int blkheight);
void HorzInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
int blkwidth, int blkheight, int dx);
void HorzInterp2MC(int *in, int inpitch, uint8 *out, int outpitch,
int blkwidth, int blkheight, int dx);
void HorzInterp3MC(uint8 *in, int inpitch, int *out, int outpitch,
int blkwidth, int blkheight);
void VertInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
int blkwidth, int blkheight, int dy);
void VertInterp2MC(uint8 *in, int inpitch, int *out, int outpitch,
int blkwidth, int blkheight);
void VertInterp3MC(int *in, int inpitch, uint8 *out, int outpitch,
int blkwidth, int blkheight, int dy);
void DiagonalInterpMC(uint8 *in1, uint8 *in2, int inpitch,
uint8 *out, int outpitch,
int blkwidth, int blkheight);
void ChromaMotionComp(uint8 *ref, int picwidth, int picheight,
int x_pos, int y_pos, uint8 *pred, int pred_pitch,
int blkwidth, int blkheight);
void ChromaFullPelMC(uint8 *in, int inpitch, uint8 *out, int outpitch,
int blkwidth, int blkheight) ;
void ChromaBorderMC(uint8 *ref, int picwidth, int dx, int dy,
uint8 *pred, int pred_pitch, int blkwidth, int blkheight);
void ChromaDiagonalMC(uint8 *ref, int picwidth, int dx, int dy,
uint8 *pred, int pred_pitch, int blkwidth, int blkheight);
void ChromaFullPelMCOutside(uint8 *ref, uint8 *pred, int pred_pitch,
int blkwidth, int blkheight, int x_inc,
int y_inc0, int y_inc1, int x_mid, int y_mid);
void ChromaBorderMCOutside(uint8 *ref, int picwidth, int dx, int dy,
uint8 *pred, int pred_pitch, int blkwidth, int blkheight,
int x_inc, int z_inc, int y_inc0, int y_inc1, int x_mid, int y_mid);
void ChromaDiagonalMCOutside(uint8 *ref, int picwidth,
int dx, int dy, uint8 *pred, int pred_pitch,
int blkwidth, int blkheight, int x_inc, int z_inc,
int y_inc0, int y_inc1, int x_mid, int y_mid);
void ChromaDiagonalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
uint8 *pOut, int predPitch, int blkwidth, int blkheight);
void ChromaHorizontalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
uint8 *pOut, int predPitch, int blkwidth, int blkheight);
void ChromaVerticalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
uint8 *pOut, int predPitch, int blkwidth, int blkheight);
void ChromaFullMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
uint8 *pOut, int predPitch, int blkwidth, int blkheight);
void ChromaVerticalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
uint8 *pOut, int predPitch, int blkwidth, int blkheight);
void ChromaHorizontalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
uint8 *pOut, int predPitch, int blkwidth, int blkheight);
void ChromaDiagonalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
uint8 *pOut, int predPitch, int blkwidth, int blkheight);
/*----------- slice.c ---------------*/
/**
This function performs the main decoding loop for slice data including
INTRA/INTER prediction, transform and quantization and compensation.
See decode_frame_slice() in JM.
\param "video" "Pointer to AVCDecObject."
\return "AVCDEC_SUCCESS for success, AVCDEC_PICTURE_READY for end-of-picture and AVCDEC_FAIL otherwise."
*/
AVCDec_Status DecodeSlice(AVCDecObject *video);
AVCDec_Status ConcealSlice(AVCDecObject *decvid, int mbnum_start, int mbnum_end);
/**
This function performs the decoding of one macroblock.
\param "video" "Pointer to AVCDecObject."
\param "prevMbSkipped" "A value derived in 7.3.4."
\return "AVCDEC_SUCCESS for success or AVCDEC_FAIL otherwise."
*/
AVCDec_Status DecodeMB(AVCDecObject *video);
/**
This function performs macroblock prediction type decoding as in subclause 7.3.5.1.
\param "video" "Pointer to AVCCommonObj."
\param "currMB" "Pointer to the current macroblock."
\param "stream" "Pointer to AVCDecBitstream."
\return "AVCDEC_SUCCESS for success or AVCDEC_FAIL otherwise."
*/
AVCDec_Status mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream);
/**
This function performs sub-macroblock prediction type decoding as in subclause 7.3.5.2.
\param "video" "Pointer to AVCCommonObj."
\param "currMB" "Pointer to the current macroblock."
\param "stream" "Pointer to AVCDecBitstream."
\return "AVCDEC_SUCCESS for success or AVCDEC_FAIL otherwise."
*/
AVCDec_Status sub_mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream);
/**
This function interprets the mb_type and sets necessary information
when the slice type is AVC_I_SLICE.
in the macroblock structure.
\param "mblock" "Pointer to current AVCMacroblock."
\param "mb_type" "From the syntax bitstream."
\return "void"
*/
void InterpretMBModeI(AVCMacroblock *mblock, uint mb_type);
/**
This function interprets the mb_type and sets necessary information
when the slice type is AVC_P_SLICE.
in the macroblock structure.
\param "mblock" "Pointer to current AVCMacroblock."
\param "mb_type" "From the syntax bitstream."
\return "void"
*/
void InterpretMBModeP(AVCMacroblock *mblock, uint mb_type);
/**
This function interprets the mb_type and sets necessary information
when the slice type is AVC_B_SLICE.
in the macroblock structure.
\param "mblock" "Pointer to current AVCMacroblock."
\param "mb_type" "From the syntax bitstream."
\return "void"
*/
void InterpretMBModeB(AVCMacroblock *mblock, uint mb_type);
/**
This function interprets the mb_type and sets necessary information
when the slice type is AVC_SI_SLICE.
in the macroblock structure.
\param "mblock" "Pointer to current AVCMacroblock."
\param "mb_type" "From the syntax bitstream."
\return "void"
*/
void InterpretMBModeSI(AVCMacroblock *mblock, uint mb_type);
/**
This function interprets the sub_mb_type and sets necessary information
when the slice type is AVC_P_SLICE.
in the macroblock structure.
\param "mblock" "Pointer to current AVCMacroblock."
\param "sub_mb_type" "From the syntax bitstream."
\return "void"
*/
void InterpretSubMBModeP(AVCMacroblock *mblock, uint *sub_mb_type);
/**
This function interprets the sub_mb_type and sets necessary information
when the slice type is AVC_B_SLICE.
in the macroblock structure.
\param "mblock" "Pointer to current AVCMacroblock."
\param "sub_mb_type" "From the syntax bitstream."
\return "void"
*/
void InterpretSubMBModeB(AVCMacroblock *mblock, uint *sub_mb_type);
/**
This function decodes the Intra4x4 prediction mode from neighboring information
and from the decoded syntax.
\param "video" "Pointer to AVCCommonObj."
\param "currMB" "Pointer to current macroblock."
\param "stream" "Pointer to AVCDecBitstream."
\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
*/
AVCDec_Status DecodeIntra4x4Mode(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream);
/*----------- vlc.c -------------------*/
/**
This function reads and decodes Exp-Golomb codes.
\param "bitstream" "Pointer to AVCDecBitstream."
\param "codeNum" "Pointer to the value of the codeNum."
\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
*/
AVCDec_Status ue_v(AVCDecBitstream *bitstream, uint *codeNum);
/**
This function reads and decodes signed Exp-Golomb codes.
\param "bitstream" "Pointer to AVCDecBitstream."
\param "value" "Pointer to syntax element value."
\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
*/
AVCDec_Status se_v(AVCDecBitstream *bitstream, int *value);
/**
This function reads and decodes signed Exp-Golomb codes for
32 bit codeword.
\param "bitstream" "Pointer to AVCDecBitstream."
\param "value" "Pointer to syntax element value."
\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
*/
AVCDec_Status se_v32bit(AVCDecBitstream *bitstream, int32 *value);
/**
This function reads and decodes truncated Exp-Golomb codes.
\param "bitstream" "Pointer to AVCDecBitstream."
\param "value" "Pointer to syntax element value."
\param "range" "Range of the value as input to determine the algorithm."
\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
*/
AVCDec_Status te_v(AVCDecBitstream *bitstream, uint *value, uint range);
/**
This function parse Exp-Golomb code from the bitstream.
\param "bitstream" "Pointer to AVCDecBitstream."
\param "leadingZeros" "Pointer to the number of leading zeros."
\param "infobits" "Pointer to the value after leading zeros and the first one.
The total number of bits read is 2*leadingZeros + 1."
\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
*/
AVCDec_Status GetEGBitstring(AVCDecBitstream *bitstream, int *leadingZeros, int *infobits);
/**
This function parse Exp-Golomb code from the bitstream for 32 bit codewords.
\param "bitstream" "Pointer to AVCDecBitstream."
\param "leadingZeros" "Pointer to the number of leading zeros."
\param "infobits" "Pointer to the value after leading zeros and the first one.
The total number of bits read is 2*leadingZeros + 1."
\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
*/
AVCDec_Status GetEGBitstring32bit(AVCDecBitstream *bitstream, int *leadingZeros, uint32 *infobits);
/**
This function performs CAVLC decoding of the CBP (coded block pattern) of a macroblock
by calling ue_v() and then mapping the codeNum to the corresponding CBP value.
\param "currMB" "Pointer to the current AVCMacroblock structure."
\param "stream" "Pointer to the AVCDecBitstream."
\return "void"
*/
AVCDec_Status DecodeCBP(AVCMacroblock *currMB, AVCDecBitstream *stream);
/**
This function decodes the syntax for trailing ones and total coefficient.
Subject to optimization.
\param "stream" "Pointer to the AVCDecBitstream."
\param "TrailingOnes" "Pointer to the trailing one variable output."
\param "TotalCoeff" "Pointer to the total coefficient variable output."
\param "nC" "Context for number of nonzero coefficient (prediction context)."
\return "AVCDEC_SUCCESS for success."
*/
AVCDec_Status ce_TotalCoeffTrailingOnes(AVCDecBitstream *stream, int *TrailingOnes, int *TotalCoeff, int nC);
/**
This function decodes the syntax for trailing ones and total coefficient for
chroma DC block. Subject to optimization.
\param "stream" "Pointer to the AVCDecBitstream."
\param "TrailingOnes" "Pointer to the trailing one variable output."
\param "TotalCoeff" "Pointer to the total coefficient variable output."
\return "AVCDEC_SUCCESS for success."
*/
AVCDec_Status ce_TotalCoeffTrailingOnesChromaDC(AVCDecBitstream *stream, int *TrailingOnes, int *TotalCoeff);
/**
This function decode a VLC table with 2 output.
\param "stream" "Pointer to the AVCDecBitstream."
\param "lentab" "Table for code length."
\param "codtab" "Table for code value."
\param "tabwidth" "Width of the table or alphabet size of the first output."
\param "tabheight" "Height of the table or alphabet size of the second output."
\param "code1" "Pointer to the first output."
\param "code2" "Pointer to the second output."
\return "AVCDEC_SUCCESS for success."
*/
AVCDec_Status code_from_bitstream_2d(AVCDecBitstream *stream, int *lentab, int *codtab, int tabwidth,
int tabheight, int *code1, int *code2);
/**
This function decodes the level_prefix VLC value as in Table 9-6.
\param "stream" "Pointer to the AVCDecBitstream."
\param "code" "Pointer to the output."
\return "AVCDEC_SUCCESS for success."
*/
AVCDec_Status ce_LevelPrefix(AVCDecBitstream *stream, uint *code);
/**
This function decodes total_zeros VLC syntax as in Table 9-7 and 9-8.
\param "stream" "Pointer to the AVCDecBitstream."
\param "code" "Pointer to the output."
\param "TotalCoeff" "Context parameter."
\return "AVCDEC_SUCCESS for success."
*/
AVCDec_Status ce_TotalZeros(AVCDecBitstream *stream, int *code, int TotalCoeff);
/**
This function decodes total_zeros VLC syntax for chroma DC as in Table 9-9.
\param "stream" "Pointer to the AVCDecBitstream."
\param "code" "Pointer to the output."
\param "TotalCoeff" "Context parameter."
\return "AVCDEC_SUCCESS for success."
*/
AVCDec_Status ce_TotalZerosChromaDC(AVCDecBitstream *stream, int *code, int TotalCoeff);
/**
This function decodes run_before VLC syntax as in Table 9-10.
\param "stream" "Pointer to the AVCDecBitstream."
\param "code" "Pointer to the output."
\param "zeroLeft" "Context parameter."
\return "AVCDEC_SUCCESS for success."
*/
AVCDec_Status ce_RunBefore(AVCDecBitstream *stream, int *code, int zeroLeft);
/*----------- header.c -------------------*/
/**
This function parses vui_parameters.
\param "decvid" "Pointer to AVCDecObject."
\param "stream" "Pointer to AVCDecBitstream."
\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
*/
AVCDec_Status vui_parameters(AVCDecObject *decvid, AVCDecBitstream *stream, AVCSeqParamSet *currSPS);
AVCDec_Status sei_payload(AVCDecObject *decvid, AVCDecBitstream *stream, uint payloadType, uint payloadSize);
AVCDec_Status buffering_period(AVCDecObject *decvid, AVCDecBitstream *stream);
AVCDec_Status pic_timing(AVCDecObject *decvid, AVCDecBitstream *stream);
AVCDec_Status recovery_point(AVCDecObject *decvid, AVCDecBitstream *stream);
AVCDec_Status dec_ref_pic_marking_repetition(AVCDecObject *decvid, AVCDecBitstream *stream);
AVCDec_Status motion_constrained_slice_group_set(AVCDecObject *decvid, AVCDecBitstream *stream);
/**
This function parses hrd_parameters.
\param "decvid" "Pointer to AVCDecObject."
\param "stream" "Pointer to AVCDecBitstream."
\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
*/
AVCDec_Status hrd_parameters(AVCDecObject *decvid, AVCDecBitstream *stream, AVCHRDParams *HRDParam);
/**
This function decodes the syntax in sequence parameter set slice and fill up the AVCSeqParamSet
structure.
\param "decvid" "Pointer to AVCDecObject."
\param "video" "Pointer to AVCCommonObj."
\param "stream" "Pointer to AVCDecBitstream."
\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
*/
AVCDec_Status DecodeSPS(AVCDecObject *decvid, AVCDecBitstream *stream);
/**
This function decodes the syntax in picture parameter set and fill up the AVCPicParamSet
structure.
\param "decvid" "Pointer to AVCDecObject."
\param "video" "Pointer to AVCCommonObj."
\param "stream" "Pointer to AVCDecBitstream."
\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
*/
AVCDec_Status DecodePPS(AVCDecObject *decvid, AVCCommonObj *video, AVCDecBitstream *stream);
AVCDec_Status DecodeSEI(AVCDecObject *decvid, AVCDecBitstream *stream);
/**
This function decodes slice header, calls related functions such as
reference picture list reordering, prediction weight table, decode ref marking.
See FirstPartOfSliceHeader() and RestOfSliceHeader() in JM.
\param "decvid" "Pointer to AVCDecObject."
\param "video" "Pointer to AVCCommonObj."
\param "stream" "Pointer to AVCDecBitstream."
\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
*/
AVCDec_Status DecodeSliceHeader(AVCDecObject *decvid, AVCCommonObj *video, AVCDecBitstream *stream);
/**
This function performes necessary operations to create dummy frames when
there is a gap in frame_num.
\param "video" "Pointer to AVCCommonObj."
\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
*/
AVCDec_Status fill_frame_num_gap(AVCHandle *avcHandle, AVCCommonObj *video);
/**
This function decodes ref_pic_list_reordering related syntax and fill up the AVCSliceHeader
structure.
\param "video" "Pointer to AVCCommonObj."
\param "stream" "Pointer to AVCDecBitstream."
\param "sliceHdr" "Pointer to AVCSliceHdr."
\param "slice_type" "Value of slice_type - 5 if greater than 5."
\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
*/
AVCDec_Status ref_pic_list_reordering(AVCCommonObj *video, AVCDecBitstream *stream, AVCSliceHeader *sliceHdr, int slice_type);
/**
This function decodes dec_ref_pic_marking related syntax and fill up the AVCSliceHeader
structure.
\param "video" "Pointer to AVCCommonObj."
\param "stream" "Pointer to AVCDecBitstream."
\param "sliceHdr" "Pointer to AVCSliceHdr."
\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
*/
AVCDec_Status dec_ref_pic_marking(AVCCommonObj *video, AVCDecBitstream *stream, AVCSliceHeader *sliceHdr);
/**
This function performs POC related operation prior to decoding a picture
\param "video" "Pointer to AVCCommonObj."
\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
See also PostPOC() for initialization of some variables.
*/
AVCDec_Status DecodePOC(AVCCommonObj *video);
/*------------ residual.c ------------------*/
/**
This function decodes the intra pcm data and fill it in the corresponding location
on the current picture.
\param "video" "Pointer to AVCCommonObj."
\param "stream" "Pointer to AVCDecBitstream."
*/
AVCDec_Status DecodeIntraPCM(AVCCommonObj *video, AVCDecBitstream *stream);
/**
This function performs residual syntax decoding as well as quantization and transformation of
the decoded coefficients. See subclause 7.3.5.3.
\param "video" "Pointer to AVCDecObject."
\param "currMB" "Pointer to current macroblock."
*/
AVCDec_Status residual(AVCDecObject *video, AVCMacroblock *currMB);
/**
This function performs CAVLC syntax decoding to get the run and level information of the coefficients.
\param "video" "Pointer to AVCDecObject."
\param "type" "One of AVCResidualType for a particular 4x4 block."
\param "bx" "Horizontal block index."
\param "by" "Vertical block index."
\param "level" "Pointer to array of level for output."
\param "run" "Pointer to array of run for output."
\param "numcoeff" "Pointer to the total number of nonzero coefficients."
\return "AVCDEC_SUCCESS for success."
*/
AVCDec_Status residual_block_cavlc(AVCDecObject *video, int nC, int maxNumCoeff,
int *level, int *run, int *numcoeff);
#endif /* _AVCDEC_LIB_H_ */

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,307 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#include "avclib_common.h"
/* input are in the first 16 elements of block,
output must be in the location specified in Figure 8-6. */
/* subclause 8.5.6 */
void Intra16DCTrans(int16 *block, int Qq, int Rq)
{
int m0, m1, m2, m3;
int j, offset;
int16 *inout;
int scale = dequant_coefres[Rq][0];
inout = block;
for (j = 0; j < 4; j++)
{
m0 = inout[0] + inout[4];
m1 = inout[0] - inout[4];
m2 = inout[8] + inout[12];
m3 = inout[8] - inout[12];
inout[0] = m0 + m2;
inout[4] = m0 - m2;
inout[8] = m1 - m3;
inout[12] = m1 + m3;
inout += 64;
}
inout = block;
if (Qq >= 2) /* this way should be faster than JM */
{ /* they use (((m4*scale)<<(QPy/6))+2)>>2 for both cases. */
Qq -= 2;
for (j = 0; j < 4; j++)
{
m0 = inout[0] + inout[64];
m1 = inout[0] - inout[64];
m2 = inout[128] + inout[192];
m3 = inout[128] - inout[192];
inout[0] = ((m0 + m2) * scale) << Qq;
inout[64] = ((m0 - m2) * scale) << Qq;
inout[128] = ((m1 - m3) * scale) << Qq;
inout[192] = ((m1 + m3) * scale) << Qq;
inout += 4;
}
}
else
{
Qq = 2 - Qq;
offset = 1 << (Qq - 1);
for (j = 0; j < 4; j++)
{
m0 = inout[0] + inout[64];
m1 = inout[0] - inout[64];
m2 = inout[128] + inout[192];
m3 = inout[128] - inout[192];
inout[0] = (((m0 + m2) * scale + offset) >> Qq);
inout[64] = (((m0 - m2) * scale + offset) >> Qq);
inout[128] = (((m1 - m3) * scale + offset) >> Qq);
inout[192] = (((m1 + m3) * scale + offset) >> Qq);
inout += 4;
}
}
return ;
}
/* see subclase 8.5.8 */
void itrans(int16 *block, uint8 *pred, uint8 *cur, int width)
{
int e0, e1, e2, e3; /* note, at every step of the calculation, these values */
/* shall never exceed 16bit sign value, but we don't check */
int i; /* to save the cycles. */
int16 *inout;
inout = block;
for (i = 4; i > 0; i--)
{
e0 = inout[0] + inout[2];
e1 = inout[0] - inout[2];
e2 = (inout[1] >> 1) - inout[3];
e3 = inout[1] + (inout[3] >> 1);
inout[0] = e0 + e3;
inout[1] = e1 + e2;
inout[2] = e1 - e2;
inout[3] = e0 - e3;
inout += 16;
}
for (i = 4; i > 0; i--)
{
e0 = block[0] + block[32];
e1 = block[0] - block[32];
e2 = (block[16] >> 1) - block[48];
e3 = block[16] + (block[48] >> 1);
e0 += e3;
e3 = (e0 - (e3 << 1)); /* e0-e3 */
e1 += e2;
e2 = (e1 - (e2 << 1)); /* e1-e2 */
e0 += 32;
e1 += 32;
e2 += 32;
e3 += 32;
#ifdef USE_PRED_BLOCK
e0 = pred[0] + (e0 >> 6);
if ((uint)e0 > 0xFF) e0 = 0xFF & (~(e0 >> 31)); /* clip */
e1 = pred[20] + (e1 >> 6);
if ((uint)e1 > 0xFF) e1 = 0xFF & (~(e1 >> 31)); /* clip */
e2 = pred[40] + (e2 >> 6);
if ((uint)e2 > 0xFF) e2 = 0xFF & (~(e2 >> 31)); /* clip */
e3 = pred[60] + (e3 >> 6);
if ((uint)e3 > 0xFF) e3 = 0xFF & (~(e3 >> 31)); /* clip */
*cur = e0;
*(cur += width) = e1;
*(cur += width) = e2;
cur[width] = e3;
cur -= (width << 1);
cur++;
pred++;
#else
OSCL_UNUSED_ARG(pred);
e0 = *cur + (e0 >> 6);
if ((uint)e0 > 0xFF) e0 = 0xFF & (~(e0 >> 31)); /* clip */
*cur = e0;
e1 = *(cur += width) + (e1 >> 6);
if ((uint)e1 > 0xFF) e1 = 0xFF & (~(e1 >> 31)); /* clip */
*cur = e1;
e2 = *(cur += width) + (e2 >> 6);
if ((uint)e2 > 0xFF) e2 = 0xFF & (~(e2 >> 31)); /* clip */
*cur = e2;
e3 = cur[width] + (e3 >> 6);
if ((uint)e3 > 0xFF) e3 = 0xFF & (~(e3 >> 31)); /* clip */
cur[width] = e3;
cur -= (width << 1);
cur++;
#endif
block++;
}
return ;
}
/* see subclase 8.5.8 */
void ictrans(int16 *block, uint8 *pred, uint8 *cur, int width)
{
int e0, e1, e2, e3; /* note, at every step of the calculation, these values */
/* shall never exceed 16bit sign value, but we don't check */
int i; /* to save the cycles. */
int16 *inout;
inout = block;
for (i = 4; i > 0; i--)
{
e0 = inout[0] + inout[2];
e1 = inout[0] - inout[2];
e2 = (inout[1] >> 1) - inout[3];
e3 = inout[1] + (inout[3] >> 1);
inout[0] = e0 + e3;
inout[1] = e1 + e2;
inout[2] = e1 - e2;
inout[3] = e0 - e3;
inout += 16;
}
for (i = 4; i > 0; i--)
{
e0 = block[0] + block[32];
e1 = block[0] - block[32];
e2 = (block[16] >> 1) - block[48];
e3 = block[16] + (block[48] >> 1);
e0 += e3;
e3 = (e0 - (e3 << 1)); /* e0-e3 */
e1 += e2;
e2 = (e1 - (e2 << 1)); /* e1-e2 */
e0 += 32;
e1 += 32;
e2 += 32;
e3 += 32;
#ifdef USE_PRED_BLOCK
e0 = pred[0] + (e0 >> 6);
if ((uint)e0 > 0xFF) e0 = 0xFF & (~(e0 >> 31)); /* clip */
e1 = pred[12] + (e1 >> 6);
if ((uint)e1 > 0xFF) e1 = 0xFF & (~(e1 >> 31)); /* clip */
e2 = pred[24] + (e2 >> 6);
if ((uint)e2 > 0xFF) e2 = 0xFF & (~(e2 >> 31)); /* clip */
e3 = pred[36] + (e3 >> 6);
if ((uint)e3 > 0xFF) e3 = 0xFF & (~(e3 >> 31)); /* clip */
*cur = e0;
*(cur += width) = e1;
*(cur += width) = e2;
cur[width] = e3;
cur -= (width << 1);
cur++;
pred++;
#else
OSCL_UNUSED_ARG(pred);
e0 = *cur + (e0 >> 6);
if ((uint)e0 > 0xFF) e0 = 0xFF & (~(e0 >> 31)); /* clip */
*cur = e0;
e1 = *(cur += width) + (e1 >> 6);
if ((uint)e1 > 0xFF) e1 = 0xFF & (~(e1 >> 31)); /* clip */
*cur = e1;
e2 = *(cur += width) + (e2 >> 6);
if ((uint)e2 > 0xFF) e2 = 0xFF & (~(e2 >> 31)); /* clip */
*cur = e2;
e3 = cur[width] + (e3 >> 6);
if ((uint)e3 > 0xFF) e3 = 0xFF & (~(e3 >> 31)); /* clip */
cur[width] = e3;
cur -= (width << 1);
cur++;
#endif
block++;
}
return ;
}
/* see subclause 8.5.7 */
void ChromaDCTrans(int16 *block, int Qq, int Rq)
{
int c00, c01, c10, c11;
int f0, f1, f2, f3;
int scale = dequant_coefres[Rq][0];
c00 = block[0] + block[4];
c01 = block[0] - block[4];
c10 = block[64] + block[68];
c11 = block[64] - block[68];
f0 = c00 + c10;
f1 = c01 + c11;
f2 = c00 - c10;
f3 = c01 - c11;
if (Qq >= 1)
{
Qq -= 1;
block[0] = (f0 * scale) << Qq;
block[4] = (f1 * scale) << Qq;
block[64] = (f2 * scale) << Qq;
block[68] = (f3 * scale) << Qq;
}
else
{
block[0] = (f0 * scale) >> 1;
block[4] = (f1 * scale) >> 1;
block[64] = (f2 * scale) >> 1;
block[68] = (f3 * scale) >> 1;
}
return ;
}
void copy_block(uint8 *pred, uint8 *cur, int width, int pred_pitch)
{
uint32 temp;
temp = *((uint32*)pred);
pred += pred_pitch;
*((uint32*)cur) = temp;
cur += width;
temp = *((uint32*)pred);
pred += pred_pitch;
*((uint32*)cur) = temp;
cur += width;
temp = *((uint32*)pred);
pred += pred_pitch;
*((uint32*)cur) = temp;
cur += width;
temp = *((uint32*)pred);
*((uint32*)cur) = temp;
return ;
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,523 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#include <string.h>
#include "avcdec_lib.h"
#include "avcdec_bitstream.h"
AVCDec_Status DecodeIntraPCM(AVCCommonObj *video, AVCDecBitstream *stream)
{
AVCDec_Status status;
int j;
int mb_x, mb_y, offset1;
uint8 *pDst;
uint32 byte0, byte1;
int pitch;
mb_x = video->mb_x;
mb_y = video->mb_y;
#ifdef USE_PRED_BLOCK
pDst = video->pred_block + 84;
pitch = 20;
#else
offset1 = (mb_x << 4) + (mb_y << 4) * video->PicWidthInSamplesL;
pDst = video->currPic->Sl + offset1;
pitch = video->currPic->pitch;
#endif
/* at this point bitstream is byte-aligned */
j = 16;
while (j > 0)
{
status = BitstreamReadBits(stream, 8, (uint*) & byte0);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 8);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 16);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 24);
*((uint32*)pDst) = byte0;
status = BitstreamReadBits(stream, 8, (uint*) & byte0);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 8);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 16);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 24);
*((uint32*)(pDst + 4)) = byte0;
status = BitstreamReadBits(stream, 8, (uint*) & byte0);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 8);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 16);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 24);
*((uint32*)(pDst + 8)) = byte0;
status = BitstreamReadBits(stream, 8, (uint*) & byte0);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 8);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 16);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 24);
*((uint32*)(pDst + 12)) = byte0;
j--;
pDst += pitch;
if (status != AVCDEC_SUCCESS) /* check only once per line */
return status;
}
#ifdef USE_PRED_BLOCK
pDst = video->pred_block + 452;
pitch = 12;
#else
offset1 = (offset1 >> 2) + (mb_x << 2);
pDst = video->currPic->Scb + offset1;
pitch >>= 1;
#endif
j = 8;
while (j > 0)
{
status = BitstreamReadBits(stream, 8, (uint*) & byte0);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 8);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 16);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 24);
*((uint32*)pDst) = byte0;
status = BitstreamReadBits(stream, 8, (uint*) & byte0);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 8);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 16);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 24);
*((uint32*)(pDst + 4)) = byte0;
j--;
pDst += pitch;
if (status != AVCDEC_SUCCESS) /* check only once per line */
return status;
}
#ifdef USE_PRED_BLOCK
pDst = video->pred_block + 596;
pitch = 12;
#else
pDst = video->currPic->Scr + offset1;
#endif
j = 8;
while (j > 0)
{
status = BitstreamReadBits(stream, 8, (uint*) & byte0);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 8);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 16);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 24);
*((uint32*)pDst) = byte0;
status = BitstreamReadBits(stream, 8, (uint*) & byte0);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 8);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 16);
status = BitstreamReadBits(stream, 8, (uint*) & byte1);
byte0 |= (byte1 << 24);
*((uint32*)(pDst + 4)) = byte0;
j--;
pDst += pitch;
if (status != AVCDEC_SUCCESS) /* check only once per line */
return status;
}
#ifdef MB_BASED_DEBLOCK
SaveNeighborForIntraPred(video, offset1);
#endif
return AVCDEC_SUCCESS;
}
/* see subclause 7.3.5.3 and readCBPandCoeffsFromNAL() in JM*/
AVCDec_Status residual(AVCDecObject *decvid, AVCMacroblock *currMB)
{
AVCCommonObj *video = decvid->common;
int16 *block;
int level[16], run[16], numcoeff; /* output from residual_block_cavlc */
int block_x, i, j, k, idx, iCbCr;
int mbPartIdx, subMbPartIdx, mbPartIdx_X, mbPartIdx_Y;
int nC, maxNumCoeff = 16;
int coeffNum, start_scan = 0;
uint8 *zz_scan;
int Rq, Qq;
uint32 cbp4x4 = 0;
/* in 8.5.4, it only says if it's field macroblock. */
zz_scan = (uint8*) ZZ_SCAN_BLOCK;
/* see 8.5.8 for the initialization of these values */
Qq = video->QPy_div_6;
Rq = video->QPy_mod_6;
memset(video->block, 0, sizeof(int16)*NUM_PIXELS_IN_MB);
if (currMB->mbMode == AVC_I16)
{
nC = predict_nnz(video, 0, 0);
decvid->residual_block(decvid, nC, 16, level, run, &numcoeff);
/* then performs zigzag and transform */
block = video->block;
coeffNum = -1;
for (i = numcoeff - 1; i >= 0; i--)
{
coeffNum += run[i] + 1;
if (coeffNum > 15)
{
return AVCDEC_FAIL;
}
idx = zz_scan[coeffNum] << 2;
/* idx = ((idx>>2)<<6) + ((idx&3)<<2); */
block[idx] = level[i];
}
/* inverse transform on Intra16x16DCLevel */
if (numcoeff)
{
Intra16DCTrans(block, Qq, Rq);
cbp4x4 = 0xFFFF;
}
maxNumCoeff = 15;
start_scan = 1;
}
memset(currMB->nz_coeff, 0, sizeof(uint8)*24);
for (mbPartIdx = 0; mbPartIdx < 4; mbPartIdx++)
{
mbPartIdx_X = (mbPartIdx & 1) << 1;
mbPartIdx_Y = mbPartIdx & -2;
if (currMB->CBP&(1 << mbPartIdx))
{
for (subMbPartIdx = 0; subMbPartIdx < 4; subMbPartIdx++)
{
i = mbPartIdx_X + (subMbPartIdx & 1); // check this
j = mbPartIdx_Y + (subMbPartIdx >> 1);
block = video->block + (j << 6) + (i << 2); //
nC = predict_nnz(video, i, j);
decvid->residual_block(decvid, nC, maxNumCoeff, level, run, &numcoeff);
/* convert to raster scan and quantize*/
/* Note: for P mb in SP slice and SI mb in SI slice,
the quantization cannot be done here.
block[idx] should be assigned with level[k].
itrans will be done after the prediction.
There will be transformation on the predicted value,
then addition with block[idx], then this quantization
and transform.*/
coeffNum = -1 + start_scan;
for (k = numcoeff - 1; k >= 0; k--)
{
coeffNum += run[k] + 1;
if (coeffNum > 15)
{
return AVCDEC_FAIL;
}
idx = zz_scan[coeffNum];
block[idx] = (level[k] * dequant_coefres[Rq][coeffNum]) << Qq ;
}
currMB->nz_coeff[(j<<2)+i] = numcoeff;
if (numcoeff)
{
cbp4x4 |= (1 << ((j << 2) + i));
}
}
}
}
Qq = video->QPc_div_6;
Rq = video->QPc_mod_6;
if (currMB->CBP & (3 << 4)) /* chroma DC residual present */
{
for (iCbCr = 0; iCbCr < 2; iCbCr++)
{
decvid->residual_block(decvid, -1, 4, level, run, &numcoeff);
block = video->block + 256 + (iCbCr << 3);
coeffNum = -1;
for (i = numcoeff - 1; i >= 0; i--)
{
coeffNum += run[i] + 1;
if (coeffNum > 3)
{
return AVCDEC_FAIL;
}
block[(coeffNum>>1)*64 + (coeffNum&1)*4] = level[i];
}
/* inverse transform on chroma DC */
/* for P in SP and SI in SI, this function can't be done here,
must do prediction transform/quant first. */
if (numcoeff)
{
ChromaDCTrans(block, Qq, Rq);
cbp4x4 |= (iCbCr ? 0xcc0000 : 0x330000);
}
}
}
if (currMB->CBP & (2 << 4))
{
for (block_x = 0; block_x < 4; block_x += 2) /* for iCbCr */
{
for (j = 4; j < 6; j++) /* for each block inside Cb or Cr */
{
for (i = block_x; i < block_x + 2; i++)
{
block = video->block + (j << 6) + (i << 2);
nC = predict_nnz_chroma(video, i, j);
decvid->residual_block(decvid, nC, 15, level, run, &numcoeff);
/* convert to raster scan and quantize */
/* for P MB in SP slice and SI MB in SI slice,
the dequant and transform cannot be done here.
It needs the prediction values. */
coeffNum = 0;
for (k = numcoeff - 1; k >= 0; k--)
{
coeffNum += run[k] + 1;
if (coeffNum > 15)
{
return AVCDEC_FAIL;
}
idx = zz_scan[coeffNum];
block[idx] = (level[k] * dequant_coefres[Rq][coeffNum]) << Qq;
}
/* then transform */
// itrans(block); /* transform */
currMB->nz_coeff[(j<<2)+i] = numcoeff; //
if (numcoeff)
{
cbp4x4 |= (1 << ((j << 2) + i));
}
}
}
}
}
video->cbp4x4 = cbp4x4;
return AVCDEC_SUCCESS;
}
/* see subclause 7.3.5.3.1 and 9.2 and readCoeff4x4_CAVLC() in JM */
AVCDec_Status residual_block_cavlc(AVCDecObject *decvid, int nC, int maxNumCoeff,
int *level, int *run, int *numcoeff)
{
int i, j;
int TrailingOnes, TotalCoeff;
AVCDecBitstream *stream = decvid->bitstream;
int suffixLength;
uint trailing_ones_sign_flag, level_prefix, level_suffix;
int levelCode, levelSuffixSize, zerosLeft;
int run_before;
if (nC >= 0)
{
ce_TotalCoeffTrailingOnes(stream, &TrailingOnes, &TotalCoeff, nC);
}
else
{
ce_TotalCoeffTrailingOnesChromaDC(stream, &TrailingOnes, &TotalCoeff);
}
*numcoeff = TotalCoeff;
/* This part is done quite differently in ReadCoef4x4_CAVLC() */
if (TotalCoeff == 0)
{
return AVCDEC_SUCCESS;
}
if (TrailingOnes) /* keep reading the sign of those trailing ones */
{
/* instead of reading one bit at a time, read the whole thing at once */
BitstreamReadBits(stream, TrailingOnes, &trailing_ones_sign_flag);
trailing_ones_sign_flag <<= 1;
for (i = 0; i < TrailingOnes; i++)
{
level[i] = 1 - ((trailing_ones_sign_flag >> (TrailingOnes - i - 1)) & 2);
}
}
i = TrailingOnes;
suffixLength = 1;
if (TotalCoeff > TrailingOnes)
{
ce_LevelPrefix(stream, &level_prefix);
if (TotalCoeff < 11 || TrailingOnes == 3)
{
if (level_prefix < 14)
{
// levelSuffixSize = 0;
levelCode = level_prefix;
}
else if (level_prefix == 14)
{
// levelSuffixSize = 4;
BitstreamReadBits(stream, 4, &level_suffix);
levelCode = 14 + level_suffix;
}
else /* if (level_prefix == 15) */
{
// levelSuffixSize = 12;
BitstreamReadBits(stream, 12, &level_suffix);
levelCode = 30 + level_suffix;
}
}
else
{
/* suffixLength = 1; */
if (level_prefix < 15)
{
levelSuffixSize = suffixLength;
}
else
{
levelSuffixSize = 12;
}
BitstreamReadBits(stream, levelSuffixSize, &level_suffix);
levelCode = (level_prefix << 1) + level_suffix;
}
if (TrailingOnes < 3)
{
levelCode += 2;
}
level[i] = (levelCode + 2) >> 1;
if (level[i] > 3)
{
suffixLength = 2;
}
if (levelCode & 1)
{
level[i] = -level[i];
}
i++;
}
for (j = TotalCoeff - i; j > 0 ; j--)
{
ce_LevelPrefix(stream, &level_prefix);
if (level_prefix < 15)
{
levelSuffixSize = suffixLength;
}
else
{
levelSuffixSize = 12;
}
BitstreamReadBits(stream, levelSuffixSize, &level_suffix);
levelCode = (level_prefix << suffixLength) + level_suffix;
level[i] = (levelCode >> 1) + 1;
if (level[i] > (3 << (suffixLength - 1)) && suffixLength < 6)
{
suffixLength++;
}
if (levelCode & 1)
{
level[i] = -level[i];
}
i++;
}
if (TotalCoeff < maxNumCoeff)
{
if (nC >= 0)
{
ce_TotalZeros(stream, &zerosLeft, TotalCoeff);
}
else
{
ce_TotalZerosChromaDC(stream, &zerosLeft, TotalCoeff);
}
}
else
{
zerosLeft = 0;
}
for (i = 0; i < TotalCoeff - 1; i++)
{
if (zerosLeft > 0)
{
ce_RunBefore(stream, &run_before, zerosLeft);
run[i] = run_before;
}
else
{
run[i] = 0;
zerosLeft = 0; // could be negative under error conditions
}
zerosLeft = zerosLeft - run[i];
}
if (zerosLeft < 0)
{
zerosLeft = 0;
// return AVCDEC_FAIL;
}
run[TotalCoeff-1] = zerosLeft;
/* leave the inverse zigzag scan part for the caller */
return AVCDEC_SUCCESS;
}

View File

@@ -0,0 +1,772 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/* Note for optimization: syntax decoding or operations related to B_SLICE should be
commented out by macro definition or function pointers. */
#include <string.h>
#include "avcdec_lib.h"
#include "avcdec_bitstream.h"
const static int mbPart2raster[3][4] = {{0, 0, 0, 0}, {1, 1, 0, 0}, {1, 0, 1, 0}};
/* decode_frame_slice() */
/* decode_one_slice() */
AVCDec_Status DecodeSlice(AVCDecObject *decvid)
{
AVCDec_Status status;
AVCCommonObj *video = decvid->common;
AVCSliceHeader *sliceHdr = video->sliceHdr;
AVCMacroblock *currMB ;
AVCDecBitstream *stream = decvid->bitstream;
uint slice_group_id;
uint CurrMbAddr, moreDataFlag;
/* set the first mb in slice */
CurrMbAddr = sliceHdr->first_mb_in_slice;
slice_group_id = video->MbToSliceGroupMap[CurrMbAddr];
if ((CurrMbAddr && (CurrMbAddr != (uint)(video->mbNum + 1))) && video->currSeqParams->constrained_set1_flag == 1)
{
ConcealSlice(decvid, video->mbNum, CurrMbAddr);
}
moreDataFlag = 1;
video->mb_skip_run = -1;
/* while loop , see subclause 7.3.4 */
do
{
if (CurrMbAddr >= video->PicSizeInMbs)
{
return AVCDEC_FAIL;
}
currMB = video->currMB = &(video->mblock[CurrMbAddr]);
video->mbNum = CurrMbAddr;
currMB->slice_id = video->slice_id; // slice
/* we can remove this check if we don't support Mbaff. */
/* we can wrap below into an initMB() function which will also
do necessary reset of macroblock related parameters. */
video->mb_x = CurrMbAddr % video->PicWidthInMbs;
video->mb_y = CurrMbAddr / video->PicWidthInMbs;
/* check the availability of neighboring macroblocks */
InitNeighborAvailability(video, CurrMbAddr);
/* read_macroblock and decode_one_macroblock() */
status = DecodeMB(decvid);
if (status != AVCDEC_SUCCESS)
{
return status;
}
#ifdef MB_BASED_DEBLOCK
if (video->currPicParams->num_slice_groups_minus1 == 0)
{
MBInLoopDeblock(video); /* MB-based deblocking */
}
else /* this mode cannot be used if the number of slice group is not one. */
{
return AVCDEC_FAIL;
}
#endif
video->numMBs--;
moreDataFlag = more_rbsp_data(stream);
/* go to next MB */
while (++CurrMbAddr < video->PicSizeInMbs && video->MbToSliceGroupMap[CurrMbAddr] != (int)slice_group_id)
{
}
}
while ((moreDataFlag && video->numMBs > 0) || video->mb_skip_run > 0); /* even if no more data, but last few MBs are skipped */
if (video->numMBs == 0)
{
video->newPic = TRUE;
video->mbNum = 0; // _Conceal
return AVCDEC_PICTURE_READY;
}
return AVCDEC_SUCCESS;
}
/* read MB mode and motion vectors */
/* perform Intra/Inter prediction and residue */
/* update video->mb_skip_run */
AVCDec_Status DecodeMB(AVCDecObject *decvid)
{
AVCDec_Status status;
AVCCommonObj *video = decvid->common;
AVCDecBitstream *stream = decvid->bitstream;
AVCMacroblock *currMB = video->currMB;
uint mb_type;
int slice_type = video->slice_type;
int temp;
currMB->QPy = video->QPy;
currMB->QPc = video->QPc;
if (slice_type == AVC_P_SLICE)
{
if (video->mb_skip_run < 0)
{
ue_v(stream, (uint *)&(video->mb_skip_run));
}
if (video->mb_skip_run == 0)
{
/* this will not handle the case where the slice ends with a mb_skip_run == 0 and no following MB data */
ue_v(stream, &mb_type);
if (mb_type > 30)
{
return AVCDEC_FAIL;
}
InterpretMBModeP(currMB, mb_type);
video->mb_skip_run = -1;
}
else
{
/* see subclause 7.4.4 for more details on how
mb_field_decoding_flag is derived in case of skipped MB */
currMB->mb_intra = FALSE;
currMB->mbMode = AVC_SKIP;
currMB->MbPartWidth = currMB->MbPartHeight = 16;
currMB->NumMbPart = 1;
currMB->NumSubMbPart[0] = currMB->NumSubMbPart[1] =
currMB->NumSubMbPart[2] = currMB->NumSubMbPart[3] = 1; //
currMB->SubMbPartWidth[0] = currMB->SubMbPartWidth[1] =
currMB->SubMbPartWidth[2] = currMB->SubMbPartWidth[3] = currMB->MbPartWidth;
currMB->SubMbPartHeight[0] = currMB->SubMbPartHeight[1] =
currMB->SubMbPartHeight[2] = currMB->SubMbPartHeight[3] = currMB->MbPartHeight;
memset(currMB->nz_coeff, 0, sizeof(uint8)*NUM_BLKS_IN_MB);
currMB->CBP = 0;
video->cbp4x4 = 0;
/* for skipped MB, always look at the first entry in RefPicList */
currMB->RefIdx[0] = currMB->RefIdx[1] =
currMB->RefIdx[2] = currMB->RefIdx[3] = video->RefPicList0[0]->RefIdx;
InterMBPrediction(video);
video->mb_skip_run--;
return AVCDEC_SUCCESS;
}
}
else
{
/* Then decode mode and MV */
ue_v(stream, &mb_type);
if (mb_type > 25)
{
return AVCDEC_FAIL;
}
InterpretMBModeI(currMB, mb_type);
}
if (currMB->mbMode != AVC_I_PCM)
{
if (currMB->mbMode == AVC_P8 || currMB->mbMode == AVC_P8ref0)
{
status = sub_mb_pred(video, currMB, stream);
}
else
{
status = mb_pred(video, currMB, stream) ;
}
if (status != AVCDEC_SUCCESS)
{
return status;
}
if (currMB->mbMode != AVC_I16)
{
/* decode coded_block_pattern */
status = DecodeCBP(currMB, stream);
if (status != AVCDEC_SUCCESS)
{
return status;
}
}
if (currMB->CBP > 0 || currMB->mbMode == AVC_I16)
{
se_v(stream, &temp);
if (temp)
{
temp += (video->QPy + 52);
currMB->QPy = video->QPy = temp - 52 * (temp * 79 >> 12);
if (currMB->QPy > 51 || currMB->QPy < 0)
{
video->QPy = AVC_CLIP3(0, 51, video->QPy);
// return AVCDEC_FAIL;
}
video->QPy_div_6 = (video->QPy * 43) >> 8;
video->QPy_mod_6 = video->QPy - 6 * video->QPy_div_6;
currMB->QPc = video->QPc = mapQPi2QPc[AVC_CLIP3(0, 51, video->QPy + video->currPicParams->chroma_qp_index_offset)];
video->QPc_div_6 = (video->QPc * 43) >> 8;
video->QPc_mod_6 = video->QPc - 6 * video->QPc_div_6;
}
}
/* decode residue and inverse transform */
status = residual(decvid, currMB);
if (status != AVCDEC_SUCCESS)
{
return status;
}
}
else
{
if (stream->bitcnt & 7)
{
BitstreamByteAlign(stream);
}
/* decode pcm_byte[i] */
DecodeIntraPCM(video, stream);
currMB->QPy = 0; /* necessary for deblocking */ // _OPTIMIZE
currMB->QPc = mapQPi2QPc[AVC_CLIP3(0, 51, video->currPicParams->chroma_qp_index_offset)];
/* default values, don't know if really needed */
currMB->CBP = 0x3F;
video->cbp4x4 = 0xFFFF;
currMB->mb_intra = TRUE;
memset(currMB->nz_coeff, 16, sizeof(uint8)*NUM_BLKS_IN_MB);
return AVCDEC_SUCCESS;
}
/* do Intra/Inter prediction, together with the residue compensation */
/* This part should be common between the skip and no-skip */
if (currMB->mbMode == AVC_I4 || currMB->mbMode == AVC_I16)
{
IntraMBPrediction(video);
}
else
{
InterMBPrediction(video);
}
return AVCDEC_SUCCESS;
}
/* see subclause 7.3.5.1 */
AVCDec_Status mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream)
{
int mbPartIdx;
AVCSliceHeader *sliceHdr = video->sliceHdr;
uint max_ref_idx;
const int *temp_0;
int16 *temp_1;
uint code;
if (currMB->mbMode == AVC_I4 || currMB->mbMode == AVC_I16)
{
video->intraAvailA = video->intraAvailB = video->intraAvailC = video->intraAvailD = 0;
if (!video->currPicParams->constrained_intra_pred_flag)
{
video->intraAvailA = video->mbAvailA;
video->intraAvailB = video->mbAvailB;
video->intraAvailC = video->mbAvailC;
video->intraAvailD = video->mbAvailD;
}
else
{
if (video->mbAvailA)
{
video->intraAvailA = video->mblock[video->mbAddrA].mb_intra;
}
if (video->mbAvailB)
{
video->intraAvailB = video->mblock[video->mbAddrB].mb_intra ;
}
if (video->mbAvailC)
{
video->intraAvailC = video->mblock[video->mbAddrC].mb_intra;
}
if (video->mbAvailD)
{
video->intraAvailD = video->mblock[video->mbAddrD].mb_intra;
}
}
if (currMB->mbMode == AVC_I4)
{
/* perform prediction to get the actual intra 4x4 pred mode */
DecodeIntra4x4Mode(video, currMB, stream);
/* output will be in currMB->i4Mode[4][4] */
}
ue_v(stream, &code);
if (code > 3)
{
return AVCDEC_FAIL; /* out of range */
}
currMB->intra_chroma_pred_mode = (AVCIntraChromaPredMode)code;
}
else
{
memset(currMB->ref_idx_L0, 0, sizeof(int16)*4);
/* see subclause 7.4.5.1 for the range of ref_idx_lX */
// max_ref_idx = sliceHdr->num_ref_idx_l0_active_minus1;
max_ref_idx = video->refList0Size - 1;
/* decode ref index for L0 */
if (sliceHdr->num_ref_idx_l0_active_minus1 > 0)
{
for (mbPartIdx = 0; mbPartIdx < currMB->NumMbPart; mbPartIdx++)
{
te_v(stream, &code, max_ref_idx);
if (code > (uint)max_ref_idx)
{
return AVCDEC_FAIL;
}
currMB->ref_idx_L0[mbPartIdx] = code;
}
}
/* populate ref_idx_L0 */
temp_0 = &mbPart2raster[currMB->mbMode-AVC_P16][0];
temp_1 = &currMB->ref_idx_L0[3];
*temp_1-- = currMB->ref_idx_L0[*temp_0++];
*temp_1-- = currMB->ref_idx_L0[*temp_0++];
*temp_1-- = currMB->ref_idx_L0[*temp_0++];
*temp_1-- = currMB->ref_idx_L0[*temp_0++];
/* Global reference index, these values are used in deblock */
currMB->RefIdx[0] = video->RefPicList0[currMB->ref_idx_L0[0]]->RefIdx;
currMB->RefIdx[1] = video->RefPicList0[currMB->ref_idx_L0[1]]->RefIdx;
currMB->RefIdx[2] = video->RefPicList0[currMB->ref_idx_L0[2]]->RefIdx;
currMB->RefIdx[3] = video->RefPicList0[currMB->ref_idx_L0[3]]->RefIdx;
/* see subclause 7.4.5.1 for the range of ref_idx_lX */
max_ref_idx = sliceHdr->num_ref_idx_l1_active_minus1;
/* decode mvd_l0 */
for (mbPartIdx = 0; mbPartIdx < currMB->NumMbPart; mbPartIdx++)
{
se_v(stream, &(video->mvd_l0[mbPartIdx][0][0]));
se_v(stream, &(video->mvd_l0[mbPartIdx][0][1]));
}
}
return AVCDEC_SUCCESS;
}
/* see subclause 7.3.5.2 */
AVCDec_Status sub_mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream)
{
int mbPartIdx, subMbPartIdx;
AVCSliceHeader *sliceHdr = video->sliceHdr;
uint max_ref_idx;
uint sub_mb_type[4];
uint code;
memset(currMB->ref_idx_L0, 0, sizeof(int16)*4);
for (mbPartIdx = 0; mbPartIdx < 4; mbPartIdx++)
{
ue_v(stream, &(sub_mb_type[mbPartIdx]));
if (sub_mb_type[mbPartIdx] > 3)
{
return AVCDEC_FAIL;
}
}
/* we have to check the values to make sure they are valid */
/* assign values to currMB->sub_mb_type[], currMB->MBPartPredMode[][x] */
InterpretSubMBModeP(currMB, sub_mb_type);
/* see subclause 7.4.5.1 for the range of ref_idx_lX */
// max_ref_idx = sliceHdr->num_ref_idx_l0_active_minus1;
max_ref_idx = video->refList0Size - 1;
if (sliceHdr->num_ref_idx_l0_active_minus1 > 0 && currMB->mbMode != AVC_P8ref0)
{
for (mbPartIdx = 0; mbPartIdx < 4; mbPartIdx++)
{
te_v(stream, (uint*)&code, max_ref_idx);
if (code > max_ref_idx)
{
return AVCDEC_FAIL;
}
currMB->ref_idx_L0[mbPartIdx] = code;
}
}
/* see subclause 7.4.5.1 for the range of ref_idx_lX */
max_ref_idx = sliceHdr->num_ref_idx_l1_active_minus1;
/* if(video->MbaffFrameFlag && currMB->mb_field_decoding_flag)
max_ref_idx = 2*sliceHdr->num_ref_idx_l1_active_minus1 + 1;*/
for (mbPartIdx = 0; mbPartIdx < 4; mbPartIdx++)
{
for (subMbPartIdx = 0; subMbPartIdx < currMB->NumSubMbPart[mbPartIdx]; subMbPartIdx++)
{
se_v(stream, &(video->mvd_l0[mbPartIdx][subMbPartIdx][0]));
se_v(stream, &(video->mvd_l0[mbPartIdx][subMbPartIdx][1]));
}
/* used in deblocking */
currMB->RefIdx[mbPartIdx] = video->RefPicList0[currMB->ref_idx_L0[mbPartIdx]]->RefIdx;
}
return AVCDEC_SUCCESS;
}
void InterpretMBModeI(AVCMacroblock *mblock, uint mb_type)
{
mblock->NumMbPart = 1;
mblock->mb_intra = TRUE;
if (mb_type == 0) /* I_4x4 */
{
mblock->mbMode = AVC_I4;
}
else if (mb_type < 25) /* I_PCM */
{
mblock->mbMode = AVC_I16;
mblock->i16Mode = (AVCIntra16x16PredMode)((mb_type - 1) & 0x3);
if (mb_type > 12)
{
mblock->CBP = (((mb_type - 13) >> 2) << 4) + 0x0F;
}
else
{
mblock->CBP = ((mb_type - 1) >> 2) << 4;
}
}
else
{
mblock->mbMode = AVC_I_PCM;
}
return ;
}
void InterpretMBModeP(AVCMacroblock *mblock, uint mb_type)
{
const static int map2PartWidth[5] = {16, 16, 8, 8, 8};
const static int map2PartHeight[5] = {16, 8, 16, 8, 8};
const static int map2NumPart[5] = {1, 2, 2, 4, 4};
const static AVCMBMode map2mbMode[5] = {AVC_P16, AVC_P16x8, AVC_P8x16, AVC_P8, AVC_P8ref0};
mblock->mb_intra = FALSE;
if (mb_type < 5)
{
mblock->mbMode = map2mbMode[mb_type];
mblock->MbPartWidth = map2PartWidth[mb_type];
mblock->MbPartHeight = map2PartHeight[mb_type];
mblock->NumMbPart = map2NumPart[mb_type];
mblock->NumSubMbPart[0] = mblock->NumSubMbPart[1] =
mblock->NumSubMbPart[2] = mblock->NumSubMbPart[3] = 1;
mblock->SubMbPartWidth[0] = mblock->SubMbPartWidth[1] =
mblock->SubMbPartWidth[2] = mblock->SubMbPartWidth[3] = mblock->MbPartWidth;
mblock->SubMbPartHeight[0] = mblock->SubMbPartHeight[1] =
mblock->SubMbPartHeight[2] = mblock->SubMbPartHeight[3] = mblock->MbPartHeight;
}
else
{
InterpretMBModeI(mblock, mb_type - 5);
/* set MV and Ref_Idx codes of Intra blocks in P-slices */
memset(mblock->mvL0, 0, sizeof(int32)*16);
mblock->ref_idx_L0[0] = mblock->ref_idx_L0[1] = mblock->ref_idx_L0[2] = mblock->ref_idx_L0[3] = -1;
}
return ;
}
void InterpretMBModeB(AVCMacroblock *mblock, uint mb_type)
{
const static int map2PartWidth[23] = {8, 16, 16, 16, 16, 8, 16, 8, 16, 8,
16, 8, 16, 8, 16, 8, 16, 8, 16, 8, 16, 8, 8
};
const static int map2PartHeight[23] = {8, 16, 16, 16, 8, 16, 8, 16, 8,
16, 8, 16, 8, 16, 8, 16, 8, 16, 8, 16, 8, 16, 8
};
/* see enum AVCMBType declaration */
const static AVCMBMode map2mbMode[23] = {AVC_BDirect16, AVC_P16, AVC_P16, AVC_P16,
AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16,
AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16,
AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16, AVC_P8
};
const static int map2PredMode1[23] = {3, 0, 1, 2, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 2, 2, 2, 2, -1};
const static int map2PredMode2[23] = { -1, -1, -1, -1, 0, 0, 1, 1, 1, 1, 0, 0, 2, 2, 2, 2, 0, 0, 1, 1, 2, 2, -1};
const static int map2NumPart[23] = { -1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4};
mblock->mb_intra = FALSE;
if (mb_type < 23)
{
mblock->mbMode = map2mbMode[mb_type];
mblock->NumMbPart = map2NumPart[mb_type];
mblock->MBPartPredMode[0][0] = (AVCPredMode)map2PredMode1[mb_type];
if (mblock->NumMbPart > 1)
{
mblock->MBPartPredMode[1][0] = (AVCPredMode)map2PredMode2[mb_type];
}
mblock->MbPartWidth = map2PartWidth[mb_type];
mblock->MbPartHeight = map2PartHeight[mb_type];
}
else
{
InterpretMBModeI(mblock, mb_type - 23);
}
return ;
}
void InterpretMBModeSI(AVCMacroblock *mblock, uint mb_type)
{
mblock->mb_intra = TRUE;
if (mb_type == 0)
{
mblock->mbMode = AVC_SI4;
/* other values are N/A */
}
else
{
InterpretMBModeI(mblock, mb_type - 1);
}
return ;
}
/* input is mblock->sub_mb_type[] */
void InterpretSubMBModeP(AVCMacroblock *mblock, uint *sub_mb_type)
{
int i, sub_type;
/* see enum AVCMBType declaration */
// const static AVCSubMBMode map2subMbMode[4] = {AVC_8x8,AVC_8x4,AVC_4x8,AVC_4x4};
const static int map2subPartWidth[4] = {8, 8, 4, 4};
const static int map2subPartHeight[4] = {8, 4, 8, 4};
const static int map2numSubPart[4] = {1, 2, 2, 4};
for (i = 0; i < 4 ; i++)
{
sub_type = (int) sub_mb_type[i];
// mblock->subMbMode[i] = map2subMbMode[sub_type];
mblock->NumSubMbPart[i] = map2numSubPart[sub_type];
mblock->SubMbPartWidth[i] = map2subPartWidth[sub_type];
mblock->SubMbPartHeight[i] = map2subPartHeight[sub_type];
}
return ;
}
void InterpretSubMBModeB(AVCMacroblock *mblock, uint *sub_mb_type)
{
int i, j, sub_type;
/* see enum AVCMBType declaration */
const static AVCSubMBMode map2subMbMode[13] = {AVC_BDirect8, AVC_8x8, AVC_8x8,
AVC_8x8, AVC_8x4, AVC_4x8, AVC_8x4, AVC_4x8, AVC_8x4, AVC_4x8, AVC_4x4, AVC_4x4, AVC_4x4
};
const static int map2subPartWidth[13] = {4, 8, 8, 8, 8, 4, 8, 4, 8, 4, 4, 4, 4};
const static int map2subPartHeight[13] = {4, 8, 8, 8, 4, 8, 4, 8, 4, 8, 4, 4, 4};
const static int map2numSubPart[13] = {1, 1, 1, 2, 2, 2, 2, 2, 2, 4, 4, 4};
const static int map2predMode[13] = {3, 0, 1, 2, 0, 0, 1, 1, 2, 2, 0, 1, 2};
for (i = 0; i < 4 ; i++)
{
sub_type = (int) sub_mb_type[i];
mblock->subMbMode[i] = map2subMbMode[sub_type];
mblock->NumSubMbPart[i] = map2numSubPart[sub_type];
mblock->SubMbPartWidth[i] = map2subPartWidth[sub_type];
mblock->SubMbPartHeight[i] = map2subPartHeight[sub_type];
for (j = 0; j < 4; j++)
{
mblock->MBPartPredMode[i][j] = (AVCPredMode)map2predMode[sub_type];
}
}
return ;
}
/* see subclause 8.3.1 */
AVCDec_Status DecodeIntra4x4Mode(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream)
{
int intra4x4PredModeA = 0, intra4x4PredModeB = 0, predIntra4x4PredMode = 0;
int component, SubBlock_indx, block_x, block_y;
int dcOnlyPredictionFlag;
uint prev_intra4x4_pred_mode_flag[16];
int rem_intra4x4_pred_mode[16];
int bindx = 0;
for (component = 0; component < 4; component++) /* partition index */
{
block_x = ((component & 1) << 1);
block_y = ((component >> 1) << 1);
for (SubBlock_indx = 0; SubBlock_indx < 4; SubBlock_indx++) /* sub-partition index */
{
BitstreamRead1Bit(stream, &(prev_intra4x4_pred_mode_flag[bindx]));
if (!prev_intra4x4_pred_mode_flag[bindx])
{
BitstreamReadBits(stream, 3, (uint*)&(rem_intra4x4_pred_mode[bindx]));
}
dcOnlyPredictionFlag = 0;
if (block_x > 0)
{
intra4x4PredModeA = currMB->i4Mode[(block_y << 2) + block_x - 1 ];
}
else
{
if (video->intraAvailA)
{
if (video->mblock[video->mbAddrA].mbMode == AVC_I4)
{
intra4x4PredModeA = video->mblock[video->mbAddrA].i4Mode[(block_y << 2) + 3];
}
else
{
intra4x4PredModeA = AVC_I4_DC;
}
}
else
{
dcOnlyPredictionFlag = 1;
}
}
if (block_y > 0)
{
intra4x4PredModeB = currMB->i4Mode[((block_y-1) << 2) + block_x];
}
else
{
if (video->intraAvailB)
{
if (video->mblock[video->mbAddrB].mbMode == AVC_I4)
{
intra4x4PredModeB = video->mblock[video->mbAddrB].i4Mode[(3 << 2) + block_x];
}
else
{
intra4x4PredModeB = AVC_I4_DC;
}
}
else
{
dcOnlyPredictionFlag = 1;
}
}
if (dcOnlyPredictionFlag)
{
intra4x4PredModeA = intra4x4PredModeB = AVC_I4_DC;
}
predIntra4x4PredMode = AVC_MIN(intra4x4PredModeA, intra4x4PredModeB);
if (prev_intra4x4_pred_mode_flag[bindx])
{
currMB->i4Mode[(block_y<<2)+block_x] = (AVCIntra4x4PredMode)predIntra4x4PredMode;
}
else
{
if (rem_intra4x4_pred_mode[bindx] < predIntra4x4PredMode)
{
currMB->i4Mode[(block_y<<2)+block_x] = (AVCIntra4x4PredMode)rem_intra4x4_pred_mode[bindx];
}
else
{
currMB->i4Mode[(block_y<<2)+block_x] = (AVCIntra4x4PredMode)(rem_intra4x4_pred_mode[bindx] + 1);
}
}
bindx++;
block_y += (SubBlock_indx & 1) ;
block_x += (1 - 2 * (SubBlock_indx & 1)) ;
}
}
return AVCDEC_SUCCESS;
}
AVCDec_Status ConcealSlice(AVCDecObject *decvid, int mbnum_start, int mbnum_end)
{
AVCCommonObj *video = decvid->common;
AVCMacroblock *currMB ;
int CurrMbAddr;
if (video->RefPicList0[0] == NULL)
{
return AVCDEC_FAIL;
}
for (CurrMbAddr = mbnum_start; CurrMbAddr < mbnum_end; CurrMbAddr++)
{
currMB = video->currMB = &(video->mblock[CurrMbAddr]);
video->mbNum = CurrMbAddr;
currMB->slice_id = video->slice_id++; // slice
/* we can remove this check if we don't support Mbaff. */
/* we can wrap below into an initMB() function which will also
do necessary reset of macroblock related parameters. */
video->mb_x = CurrMbAddr % video->PicWidthInMbs;
video->mb_y = CurrMbAddr / video->PicWidthInMbs;
/* check the availability of neighboring macroblocks */
InitNeighborAvailability(video, CurrMbAddr);
currMB->mb_intra = FALSE;
currMB->mbMode = AVC_SKIP;
currMB->MbPartWidth = currMB->MbPartHeight = 16;
currMB->NumMbPart = 1;
currMB->NumSubMbPart[0] = currMB->NumSubMbPart[1] =
currMB->NumSubMbPart[2] = currMB->NumSubMbPart[3] = 1;
currMB->SubMbPartWidth[0] = currMB->SubMbPartWidth[1] =
currMB->SubMbPartWidth[2] = currMB->SubMbPartWidth[3] = currMB->MbPartWidth;
currMB->SubMbPartHeight[0] = currMB->SubMbPartHeight[1] =
currMB->SubMbPartHeight[2] = currMB->SubMbPartHeight[3] = currMB->MbPartHeight;
currMB->QPy = 26;
currMB->QPc = 26;
memset(currMB->nz_coeff, 0, sizeof(uint8)*NUM_BLKS_IN_MB);
currMB->CBP = 0;
video->cbp4x4 = 0;
/* for skipped MB, always look at the first entry in RefPicList */
currMB->RefIdx[0] = currMB->RefIdx[1] =
currMB->RefIdx[2] = currMB->RefIdx[3] = video->RefPicList0[0]->RefIdx;
InterMBPrediction(video);
video->numMBs--;
}
return AVCDEC_SUCCESS;
}

View File

@@ -0,0 +1,815 @@
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#include "avcdec_lib.h"
#include "avcdec_bitstream.h"
//#define PV_ARM_V5
#ifdef PV_ARM_V5
#define PV_CLZ(A,B) __asm{CLZ (A),(B)} \
A -= 16;
#else
#define PV_CLZ(A,B) while (((B) & 0x8000) == 0) {(B) <<=1; A++;}
#endif
#define PV_NO_CLZ
#ifndef PV_NO_CLZ
typedef struct tagVLCNumCoeffTrail
{
int trailing;
int total_coeff;
int length;
} VLCNumCoeffTrail;
typedef struct tagShiftOffset
{
int shift;
int offset;
} ShiftOffset;
const VLCNumCoeffTrail NumCoeffTrailOnes[3][67] =
{
{{0, 0, 1}, {1, 1, 2}, {2, 2, 3}, {1, 2, 6}, {0, 1, 6}, {3, 3, 5}, {3, 3, 5}, {3, 5, 7},
{2, 3, 7}, {3, 4, 6}, {3, 4, 6}, {3, 6, 8}, {2, 4, 8}, {1, 3, 8}, {0, 2, 8}, {3, 7, 9},
{2, 5, 9}, {1, 4, 9}, {0, 3, 9}, {3, 8, 10}, {2, 6, 10}, {1, 5, 10}, {0, 4, 10}, {3, 9, 11},
{2, 7, 11}, {1, 6, 11}, {0, 5, 11}, {0, 8, 13}, {2, 9, 13}, {1, 8, 13}, {0, 7, 13}, {3, 10, 13},
{2, 8, 13}, {1, 7, 13}, {0, 6, 13}, {3, 12, 14}, {2, 11, 14}, {1, 10, 14}, {0, 10, 14}, {3, 11, 14},
{2, 10, 14}, {1, 9, 14}, {0, 9, 14}, {3, 14, 15}, {2, 13, 15}, {1, 12, 15}, {0, 12, 15}, {3, 13, 15},
{2, 12, 15}, {1, 11, 15}, {0, 11, 15}, {3, 16, 16}, {2, 15, 16}, {1, 15, 16}, {0, 14, 16}, {3, 15, 16},
{2, 14, 16}, {1, 14, 16}, {0, 13, 16}, {0, 16, 16}, {2, 16, 16}, {1, 16, 16}, {0, 15, 16}, {1, 13, 15},
{ -1, -1, -1}, { -1, -1, -1}, { -1, -1, -1}},
{{1, 1, 2}, {0, 0, 2}, {3, 4, 4}, {3, 3, 4}, {2, 2, 3}, {2, 2, 3}, {3, 6, 6}, {2, 3, 6},
{1, 3, 6}, {0, 1, 6}, {3, 5, 5}, {3, 5, 5}, {1, 2, 5}, {1, 2, 5}, {3, 7, 6}, {2, 4, 6},
{1, 4, 6}, {0, 2, 6}, {3, 8, 7}, {2, 5, 7}, {1, 5, 7}, {0, 3, 7}, {0, 5, 8}, {2, 6, 8},
{1, 6, 8}, {0, 4, 8}, {3, 9, 9}, {2, 7, 9}, {1, 7, 9}, {0, 6, 9}, {3, 11, 11}, {2, 9, 11},
{1, 9, 11}, {0, 8, 11}, {3, 10, 11}, {2, 8, 11}, {1, 8, 11}, {0, 7, 11}, {0, 11, 12}, {2, 11, 12},
{1, 11, 12}, {0, 10, 12}, {3, 12, 12}, {2, 10, 12}, {1, 10, 12}, {0, 9, 12}, {3, 14, 13}, {2, 13, 13},
{1, 13, 13}, {0, 13, 13}, {3, 13, 13}, {2, 12, 13}, {1, 12, 13}, {0, 12, 13}, {1, 15, 14}, {0, 15, 14},
{2, 15, 14}, {1, 14, 14}, {2, 14, 13}, {2, 14, 13}, {0, 14, 13}, {0, 14, 13}, {3, 16, 14}, {2, 16, 14},
{1, 16, 14}, {0, 16, 14}, {3, 15, 13}},
{{3, 7, 4}, {3, 6, 4}, {3, 5, 4}, {3, 4, 4}, {3, 3, 4}, {2, 2, 4}, {1, 1, 4}, {0, 0, 4},
{1, 5, 5}, {2, 5, 5}, {1, 4, 5}, {2, 4, 5}, {1, 3, 5}, {3, 8, 5}, {2, 3, 5}, {1, 2, 5},
{0, 3, 6}, {2, 7, 6}, {1, 7, 6}, {0, 2, 6}, {3, 9, 6}, {2, 6, 6}, {1, 6, 6}, {0, 1, 6},
{0, 7, 7}, {0, 6, 7}, {2, 9, 7}, {0, 5, 7}, {3, 10, 7}, {2, 8, 7}, {1, 8, 7}, {0, 4, 7},
{3, 12, 8}, {2, 11, 8}, {1, 10, 8}, {0, 9, 8}, {3, 11, 8}, {2, 10, 8}, {1, 9, 8}, {0, 8, 8},
{0, 12, 9}, {2, 13, 9}, {1, 12, 9}, {0, 11, 9}, {3, 13, 9}, {2, 12, 9}, {1, 11, 9}, {0, 10, 9},
{1, 15, 10}, {0, 14, 10}, {3, 14, 10}, {2, 14, 10}, {1, 14, 10}, {0, 13, 10}, {1, 13, 9}, {1, 13, 9},
{1, 16, 10}, {0, 15, 10}, {3, 15, 10}, {2, 15, 10}, {3, 16, 10}, {2, 16, 10}, {0, 16, 10}, { -1, -1, -1},
{ -1, -1, -1}, { -1, -1, -1}, { -1, -1, -1}}
};
const ShiftOffset NumCoeffTrailOnes_indx[3][15] =
{
{{15, -1}, {14, 0}, {13, 1}, {10, -1}, {9, 3}, {8, 7}, {7, 11}, {6, 15},
{5, 19}, {3, 19}, {2, 27}, {1, 35}, {0, 43}, {0, 55}, {1, 62}},
{{14, -2}, {12, -2}, {10, -2}, {10, 10}, {9, 14}, {8, 18}, {7, 22}, {5, 22},
{4, 30}, {3, 38}, {2, 46}, {2, 58}, {3, 65}, {16, 0}, {16, 0}},
{{12, -8}, {11, 0}, {10, 8}, {9, 16}, {8, 24}, {7, 32}, {6, 40}, {6, 52},
{6, 58}, {6, 61}, {16, 0}, {16, 0}, {16, 0}, {16, 0}, {16, 0}}
};
const static int nC_table[8] = {0, 0, 1, 1, 2, 2, 2, 2};
#endif
/**
See algorithm in subclause 9.1, Table 9-1, Table 9-2. */
AVCDec_Status ue_v(AVCDecBitstream *bitstream, uint *codeNum)
{
uint temp, tmp_cnt;
int leading_zeros = 0;
BitstreamShowBits(bitstream, 16, &temp);
tmp_cnt = temp | 0x1;
PV_CLZ(leading_zeros, tmp_cnt)
if (leading_zeros < 8)
{
*codeNum = (temp >> (15 - (leading_zeros << 1))) - 1;
BitstreamFlushBits(bitstream, (leading_zeros << 1) + 1);
}
else
{
BitstreamReadBits(bitstream, (leading_zeros << 1) + 1, &temp);
*codeNum = temp - 1;
}
return AVCDEC_SUCCESS;
}
/**
See subclause 9.1.1, Table 9-3 */
AVCDec_Status se_v(AVCDecBitstream *bitstream, int *value)
{
uint temp, tmp_cnt;
int leading_zeros = 0;
BitstreamShowBits(bitstream, 16, &temp);
tmp_cnt = temp | 0x1;
PV_CLZ(leading_zeros, tmp_cnt)
if (leading_zeros < 8)
{
temp >>= (15 - (leading_zeros << 1));
BitstreamFlushBits(bitstream, (leading_zeros << 1) + 1);
}
else
{
BitstreamReadBits(bitstream, (leading_zeros << 1) + 1, &temp);
}
*value = temp >> 1;
if (temp & 0x01) // lsb is signed bit
*value = -(*value);
// leading_zeros = temp >> 1;
// *value = leading_zeros - (leading_zeros*2*(temp&1));
return AVCDEC_SUCCESS;
}
AVCDec_Status se_v32bit(AVCDecBitstream *bitstream, int32 *value)
{
int leadingZeros;
uint32 infobits;
uint32 codeNum;
if (AVCDEC_SUCCESS != GetEGBitstring32bit(bitstream, &leadingZeros, &infobits))
return AVCDEC_FAIL;
codeNum = (1 << leadingZeros) - 1 + infobits;
*value = (codeNum + 1) / 2;
if ((codeNum & 0x01) == 0) // lsb is signed bit
*value = -(*value);
return AVCDEC_SUCCESS;
}
AVCDec_Status te_v(AVCDecBitstream *bitstream, uint *value, uint range)
{
if (range > 1)
{
ue_v(bitstream, value);
}
else
{
BitstreamRead1Bit(bitstream, value);
*value = 1 - (*value);
}
return AVCDEC_SUCCESS;
}
/* This function is only used for syntax with range from -2^31 to 2^31-1 */
/* only a few of them in the SPS and PPS */
AVCDec_Status GetEGBitstring32bit(AVCDecBitstream *bitstream, int *leadingZeros, uint32 *infobits)
{
int bit_value;
uint info_temp;
*leadingZeros = 0;
BitstreamRead1Bit(bitstream, (uint*)&bit_value);
while (!bit_value)
{
(*leadingZeros)++;
BitstreamRead1Bit(bitstream, (uint*)&bit_value);
}
if (*leadingZeros > 0)
{
if (sizeof(uint) == 4) /* 32 bit machine */
{
BitstreamReadBits(bitstream, *leadingZeros, (uint*)&info_temp);
*infobits = (uint32)info_temp;
}
else if (sizeof(uint) == 2) /* 16 bit machine */
{
*infobits = 0;
if (*leadingZeros > 16)
{
BitstreamReadBits(bitstream, 16, (uint*)&info_temp);
(*leadingZeros) -= 16;
*infobits = ((uint32)info_temp) << (*leadingZeros);
}
BitstreamReadBits(bitstream, *leadingZeros, (uint*)&info_temp);
*infobits |= (uint32)info_temp ;
}
}
else
*infobits = 0;
return AVCDEC_SUCCESS;
}
/* see Table 9-4 assignment of codeNum to values of coded_block_pattern. */
const static uint8 MapCBP[48][2] =
{
{47, 0}, {31, 16}, {15, 1}, { 0, 2}, {23, 4}, {27, 8}, {29, 32}, {30, 3}, { 7, 5}, {11, 10}, {13, 12}, {14, 15},
{39, 47}, {43, 7}, {45, 11}, {46, 13}, {16, 14}, { 3, 6}, { 5, 9}, {10, 31}, {12, 35}, {19, 37}, {21, 42}, {26, 44},
{28, 33}, {35, 34}, {37, 36}, {42, 40}, {44, 39}, { 1, 43}, { 2, 45}, { 4, 46}, { 8, 17}, {17, 18}, {18, 20}, {20, 24},
{24, 19}, { 6, 21}, { 9, 26}, {22, 28}, {25, 23}, {32, 27}, {33, 29}, {34, 30}, {36, 22}, {40, 25}, {38, 38}, {41, 41},
};
AVCDec_Status DecodeCBP(AVCMacroblock *currMB, AVCDecBitstream *stream)
{
uint codeNum;
uint coded_block_pattern;
ue_v(stream, &codeNum);
if (codeNum > 47)
{
return AVCDEC_FAIL;
}
/* can get rid of the if _OPTIMIZE */
if (currMB->mbMode == AVC_I4)
{
coded_block_pattern = MapCBP[codeNum][0];
}
else
{
coded_block_pattern = MapCBP[codeNum][1];
}
// currMB->cbpL = coded_block_pattern&0xF; /* modulo 16 */
// currMB->cbpC = coded_block_pattern>>4; /* divide 16 */
currMB->CBP = coded_block_pattern;
return AVCDEC_SUCCESS;
}
/* TO BE OPTIMIZED !!!!! */
AVCDec_Status ce_TotalCoeffTrailingOnes(AVCDecBitstream *stream, int *TrailingOnes, int *TotalCoeff, int nC)
{
#ifdef PV_NO_CLZ
const static uint8 TotCofNTrail1[75][3] = {{0, 0, 16}/*error */, {0, 0, 16}/*error */, {1, 13, 15}, {1, 13, 15}, {0, 16, 16}, {2, 16, 16}, {1, 16, 16}, {0, 15, 16},
{3, 16, 16}, {2, 15, 16}, {1, 15, 16}, {0, 14, 16}, {3, 15, 16}, {2, 14, 16}, {1, 14, 16}, {0, 13, 16},
{3, 14, 15}, {2, 13, 15}, {1, 12, 15}, {0, 12, 15}, {3, 13, 15}, {2, 12, 15}, {1, 11, 15}, {0, 11, 15},
{3, 12, 14}, {2, 11, 14}, {1, 10, 14}, {0, 10, 14}, {3, 11, 14}, {2, 10, 14}, {1, 9, 14}, {0, 9, 14},
{0, 8, 13}, {2, 9, 13}, {1, 8, 13}, {0, 7, 13}, {3, 10, 13}, {2, 8, 13}, {1, 7, 13}, {0, 6, 13},
{3, 9, 11}, {2, 7, 11}, {1, 6, 11}, {0, 5, 11}, {3, 8, 10},
{2, 6, 10}, {1, 5, 10}, {0, 4, 10}, {3, 7, 9}, {2, 5, 9}, {1, 4, 9}, {0, 3, 9}, {3, 6, 8},
{2, 4, 8}, {1, 3, 8}, {0, 2, 8}, {3, 5, 7}, {2, 3, 7}, {3, 4, 6}, {3, 4, 6}, {1, 2, 6},
{1, 2, 6}, {0, 1, 6}, {0, 1, 6}, {3, 3, 5}, {3, 3, 5}, {3, 3, 5}, {3, 3, 5}, {2, 2, 3},
{1, 1, 2}, {1, 1, 2}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}
};
const static uint8 TotCofNTrail2[84][3] = {{0, 0, 14 /* error */}, {0, 0, 14/*error */}, {3, 15, 13}, {3, 15, 13}, {3, 16, 14}, {2, 16, 14}, {1, 16, 14}, {0, 16, 14},
{1, 15, 14}, {0, 15, 14}, {2, 15, 14}, {1, 14, 14}, {2, 14, 13}, {2, 14, 13}, {0, 14, 13}, {0, 14, 13},
{3, 14, 13}, {2, 13, 13}, {1, 13, 13}, {0, 13, 13}, {3, 13, 13}, {2, 12, 13}, {1, 12, 13}, {0, 12, 13},
{0, 11, 12}, {2, 11, 12}, {1, 11, 12}, {0, 10, 12}, {3, 12, 12}, {2, 10, 12}, {1, 10, 12}, {0, 9, 12},
{3, 11, 11}, {2, 9, 11}, {1, 9, 11}, {0, 8, 11}, {3, 10, 11}, {2, 8, 11}, {1, 8, 11}, {0, 7, 11},
{3, 9, 9}, {2, 7, 9}, {1, 7, 9}, {0, 6, 9}, {0, 5, 8}, {0, 5, 8}, {2, 6, 8}, {2, 6, 8},
{1, 6, 8}, {1, 6, 8}, {0, 4, 8}, {0, 4, 8}, {3, 8, 7}, {2, 5, 7}, {1, 5, 7}, {0, 3, 7},
{3, 7, 6}, {3, 7, 6}, {2, 4, 6}, {2, 4, 6}, {1, 4, 6}, {1, 4, 6}, {0, 2, 6}, {0, 2, 6},
{3, 6, 6}, {2, 3, 6}, {1, 3, 6}, {0, 1, 6}, {3, 5, 5}, {3, 5, 5}, {1, 2, 5}, {1, 2, 5},
{3, 4, 4}, {3, 3, 4}, {2, 2, 3}, {2, 2, 3}, {1, 1, 2}, {1, 1, 2}, {1, 1, 2}, {1, 1, 2},
{0, 0, 2}, {0, 0, 2}, {0, 0, 2}, {0, 0, 2}
};
const static uint8 TotCofNTrail3[64][3] = {{0, 0, 10/*error*/}, {0, 16, 10}, {3, 16, 10}, {2, 16, 10}, {1, 16, 10}, {0, 15, 10}, {3, 15, 10},
{2, 15, 10}, {1, 15, 10}, {0, 14, 10}, {3, 14, 10}, {2, 14, 10}, {1, 14, 10}, {0, 13, 10}, {1, 13, 9},
{1, 13, 9}, {0, 12, 9}, {2, 13, 9}, {1, 12, 9}, {0, 11, 9}, {3, 13, 9}, {2, 12, 9}, {1, 11, 9},
{0, 10, 9}, {3, 12, 8}, {2, 11, 8}, {1, 10, 8}, {0, 9, 8}, {3, 11, 8}, {2, 10, 8}, {1, 9, 8},
{0, 8, 8}, {0, 7, 7}, {0, 6, 7}, {2, 9, 7}, {0, 5, 7}, {3, 10, 7}, {2, 8, 7}, {1, 8, 7},
{0, 4, 7}, {0, 3, 6}, {2, 7, 6}, {1, 7, 6}, {0, 2, 6}, {3, 9, 6}, {2, 6, 6}, {1, 6, 6},
{0, 1, 6}, {1, 5, 5}, {2, 5, 5}, {1, 4, 5}, {2, 4, 5}, {1, 3, 5}, {3, 8, 5}, {2, 3, 5},
{1, 2, 5}, {3, 7, 4}, {3, 6, 4}, {3, 5, 4}, {3, 4, 4}, {3, 3, 4}, {2, 2, 4}, {1, 1, 4},
{0, 0, 4}
};
#endif
uint code;
#ifdef PV_NO_CLZ
uint8 *pcode;
if (nC < 2)
{
BitstreamShowBits(stream, 16, &code);
if (code >= 8192)
{
pcode = (uint8*) & (TotCofNTrail1[(code>>13)+65+2][0]);
}
else if (code >= 2048)
{
pcode = (uint8*) & (TotCofNTrail1[(code>>9)+50+2][0]);
}
else if (code >= 1024)
{
pcode = (uint8*) & (TotCofNTrail1[(code>>8)+46+2][0]);
}
else if (code >= 512)
{
pcode = (uint8*) & (TotCofNTrail1[(code>>7)+42+2][0]);
}
else if (code >= 256)
{
pcode = (uint8*) & (TotCofNTrail1[(code>>6)+38+2][0]);
}
else if (code >= 128)
{
pcode = (uint8*) & (TotCofNTrail1[(code>>5)+34+2][0]);
}
else if (code >= 64)
{
pcode = (uint8*) & (TotCofNTrail1[(code>>3)+22+2][0]);
}
else if (code >= 32)
{
pcode = (uint8*) & (TotCofNTrail1[(code>>2)+14+2][0]);
}
else if (code >= 16)
{
pcode = (uint8*) & (TotCofNTrail1[(code>>1)+6+2][0]);
}
else
{
pcode = (uint8*) & (TotCofNTrail1[(code-2)+2][0]);
}
*TrailingOnes = pcode[0];
*TotalCoeff = pcode[1];
BitstreamFlushBits(stream, pcode[2]);
}
else if (nC < 4)
{
BitstreamShowBits(stream, 14, &code);
if (code >= 4096)
{
pcode = (uint8*) & (TotCofNTrail2[(code>>10)+66+2][0]);
}
else if (code >= 2048)
{
pcode = (uint8*) & (TotCofNTrail2[(code>>8)+54+2][0]);
}
else if (code >= 512)
{
pcode = (uint8*) & (TotCofNTrail2[(code>>7)+46+2][0]);
}
else if (code >= 128)
{
pcode = (uint8*) & (TotCofNTrail2[(code>>5)+34+2][0]);
}
else if (code >= 64)
{
pcode = (uint8*) & (TotCofNTrail2[(code>>3)+22+2][0]);
}
else if (code >= 32)
{
pcode = (uint8*) & (TotCofNTrail2[(code>>2)+14+2][0]);
}
else if (code >= 16)
{
pcode = (uint8*) & (TotCofNTrail2[(code>>1)+6+2][0]);
}
else
{
pcode = (uint8*) & (TotCofNTrail2[code-2+2][0]);
}
*TrailingOnes = pcode[0];
*TotalCoeff = pcode[1];
BitstreamFlushBits(stream, pcode[2]);
}
else if (nC < 8)
{
BitstreamShowBits(stream, 10, &code);
if (code >= 512)
{
pcode = (uint8*) & (TotCofNTrail3[(code>>6)+47+1][0]);
}
else if (code >= 256)
{
pcode = (uint8*) & (TotCofNTrail3[(code>>5)+39+1][0]);
}
else if (code >= 128)
{
pcode = (uint8*) & (TotCofNTrail3[(code>>4)+31+1][0]);
}
else if (code >= 64)
{
pcode = (uint8*) & (TotCofNTrail3[(code>>3)+23+1][0]);
}
else if (code >= 32)
{
pcode = (uint8*) & (TotCofNTrail3[(code>>2)+15+1][0]);
}
else if (code >= 16)
{
pcode = (uint8*) & (TotCofNTrail3[(code>>1)+7+1][0]);
}
else
{
pcode = (uint8*) & (TotCofNTrail3[code-1+1][0]);
}
*TrailingOnes = pcode[0];
*TotalCoeff = pcode[1];
BitstreamFlushBits(stream, pcode[2]);
}
else
{
/* read 6 bit FLC */
BitstreamReadBits(stream, 6, &code);
*TrailingOnes = code & 3;
*TotalCoeff = (code >> 2) + 1;
if (*TotalCoeff > 16)
{
*TotalCoeff = 16; // _ERROR
}
if (code == 3)
{
*TrailingOnes = 0;
(*TotalCoeff)--;
}
}
#else
const VLCNumCoeffTrail *ptr;
const ShiftOffset *ptr_indx;
uint temp, leading_zeros = 0;
if (nC < 8)
{
BitstreamShowBits(stream, 16, &code);
temp = code | 1;
PV_CLZ(leading_zeros, temp)
temp = nC_table[nC];
ptr_indx = &NumCoeffTrailOnes_indx[temp][leading_zeros];
ptr = &NumCoeffTrailOnes[temp][(code >> ptr_indx->shift) + ptr_indx->offset];
*TrailingOnes = ptr->trailing;
*TotalCoeff = ptr->total_coeff;
BitstreamFlushBits(stream, ptr->length);
}
else
{
/* read 6 bit FLC */
BitstreamReadBits(stream, 6, &code);
*TrailingOnes = code & 3;
*TotalCoeff = (code >> 2) + 1;
if (*TotalCoeff > 16)
{
*TotalCoeff = 16; // _ERROR
}
if (code == 3)
{
*TrailingOnes = 0;
(*TotalCoeff)--;
}
}
#endif
return AVCDEC_SUCCESS;
}
/* TO BE OPTIMIZED !!!!! */
AVCDec_Status ce_TotalCoeffTrailingOnesChromaDC(AVCDecBitstream *stream, int *TrailingOnes, int *TotalCoeff)
{
AVCDec_Status status;
const static uint8 TotCofNTrail5[21][3] =
{
{3, 4, 7}, {3, 4, 7}, {2, 4, 8}, {1, 4, 8}, {2, 3, 7}, {2, 3, 7}, {1, 3, 7},
{1, 3, 7}, {0, 4, 6}, {0, 3, 6}, {0, 2, 6}, {3, 3, 6}, {1, 2, 6}, {0, 1, 6},
{2, 2, 3}, {0, 0, 2}, {0, 0, 2}, {1, 1, 1}, {1, 1, 1}, {1, 1, 1}, {1, 1, 1}
};
uint code;
uint8 *pcode;
status = BitstreamShowBits(stream, 8, &code);
if (code >= 32)
{
pcode = (uint8*) & (TotCofNTrail5[(code>>5)+13][0]);
}
else if (code >= 8)
{
pcode = (uint8*) & (TotCofNTrail5[(code>>2)+6][0]);
}
else
{
pcode = (uint8*) & (TotCofNTrail5[code][0]);
}
*TrailingOnes = pcode[0];
*TotalCoeff = pcode[1];
BitstreamFlushBits(stream, pcode[2]);
return status;
}
/* see Table 9-6 */
AVCDec_Status ce_LevelPrefix(AVCDecBitstream *stream, uint *code)
{
uint temp;
uint leading_zeros = 0;
BitstreamShowBits(stream, 16, &temp);
temp |= 1 ;
PV_CLZ(leading_zeros, temp)
BitstreamFlushBits(stream, leading_zeros + 1);
*code = leading_zeros;
return AVCDEC_SUCCESS;
}
/* see Table 9-7 and 9-8 */
AVCDec_Status ce_TotalZeros(AVCDecBitstream *stream, int *code, int TotalCoeff)
{
const static uint8 TotZero1[28][2] = {{15, 9}, {14, 9}, {13, 9}, {12, 8},
{12, 8}, {11, 8}, {11, 8}, {10, 7}, {9, 7}, {8, 6}, {8, 6}, {7, 6}, {7, 6}, {6, 5}, {6, 5},
{6, 5}, {6, 5}, {5, 5}, {5, 5}, {5, 5}, {5, 5}, {4, 4}, {3, 4},
{2, 3}, {2, 3}, {1, 3}, {1, 3}, {0, 1}
};
const static uint8 TotZero2n3[2][18][2] = {{{14, 6}, {13, 6}, {12, 6}, {11, 6},
{10, 5}, {10, 5}, {9, 5}, {9, 5}, {8, 4}, {7, 4}, {6, 4}, {5, 4}, {4, 3}, {4, 3},
{3, 3}, {2, 3}, {1, 3}, {0, 3}},
/*const static uint8 TotZero3[18][2]=*/{{13, 6}, {11, 6}, {12, 5}, {12, 5}, {10, 5},
{10, 5}, {9, 5}, {9, 5}, {8, 4}, {5, 4}, {4, 4}, {0, 4}, {7, 3}, {7, 3}, {6, 3}, {3, 3},
{2, 3}, {1, 3}}
};
const static uint8 TotZero4[17][2] = {{12, 5}, {11, 5}, {10, 5}, {0, 5}, {9, 4},
{9, 4}, {7, 4}, {7, 4}, {3, 4}, {3, 4}, {2, 4}, {2, 4}, {8, 3}, {6, 3}, {5, 3}, {4, 3}, {1, 3}
};
const static uint8 TotZero5[13][2] = {{11, 5}, {9, 5}, {10, 4}, {8, 4}, {2, 4},
{1, 4}, {0, 4}, {7, 3}, {7, 3}, {6, 3}, {5, 3}, {4, 3}, {3, 3}
};
const static uint8 TotZero6to10[5][15][2] = {{{10, 6}, {0, 6}, {1, 5}, {1, 5}, {8, 4},
{8, 4}, {8, 4}, {8, 4}, {9, 3}, {7, 3}, {6, 3}, {5, 3}, {4, 3}, {3, 3}, {2, 3}},
/*const static uint8 TotZero7[15][2]=*/{{9, 6}, {0, 6}, {1, 5}, {1, 5}, {7, 4},
{7, 4}, {7, 4}, {7, 4}, {8, 3}, {6, 3}, {4, 3}, {3, 3}, {2, 3}, {5, 2}, {5, 2}},
/*const static uint8 TotZero8[15][2]=*/{{8, 6}, {0, 6}, {2, 5}, {2, 5}, {1, 4},
{1, 4}, {1, 4}, {1, 4}, {7, 3}, {6, 3}, {3, 3}, {5, 2}, {5, 2}, {4, 2}, {4, 2}},
/*const static uint8 TotZero9[15][2]=*/{{1, 6}, {0, 6}, {7, 5}, {7, 5}, {2, 4},
{2, 4}, {2, 4}, {2, 4}, {5, 3}, {6, 2}, {6, 2}, {4, 2}, {4, 2}, {3, 2}, {3, 2}},
/*const static uint8 TotZero10[11][2]=*/{{1, 5}, {0, 5}, {6, 4}, {6, 4}, {2, 3},
{2, 3}, {2, 3}, {2, 3}, {5, 2}, {4, 2}, {3, 2}, {0, 0}, {0, 0}, {0, 0}, {0, 0}}
};
const static uint8 TotZero11[7][2] = {{0, 4}, {1, 4}, {2, 3}, {2, 3}, {3, 3}, {5, 3}, {4, 1}};
const static uint8 TotZero12to15[4][5][2] =
{
{{3, 1}, {2, 2}, {4, 3}, {1, 4}, {0, 4}},
{{2, 1}, {3, 2}, {1, 3}, {0, 3}, {0, 0}},
{{2, 1}, {1, 2}, {0, 2}, {0, 0}, {0, 0}},
{{1, 1}, {0, 1}, {0, 0}, {0, 0}, {0, 0}}
};
uint temp, mask;
int indx;
uint8 *pcode;
if (TotalCoeff == 1)
{
BitstreamShowBits(stream, 9, &temp);
if (temp >= 256)
{
pcode = (uint8*) & (TotZero1[27][0]);
}
else if (temp >= 64)
{
pcode = (uint8*) & (TotZero1[(temp>>5)+19][0]);
}
else if (temp >= 8)
{
pcode = (uint8*) & (TotZero1[(temp>>2)+5][0]);
}
else
{
pcode = (uint8*) & (TotZero1[temp-1][0]);
}
}
else if (TotalCoeff == 2 || TotalCoeff == 3)
{
BitstreamShowBits(stream, 6, &temp);
if (temp >= 32)
{
pcode = (uint8*) & (TotZero2n3[TotalCoeff-2][(temp>>3)+10][0]);
}
else if (temp >= 8)
{
pcode = (uint8*) & (TotZero2n3[TotalCoeff-2][(temp>>2)+6][0]);
}
else
{
pcode = (uint8*) & (TotZero2n3[TotalCoeff-2][temp][0]);
}
}
else if (TotalCoeff == 4)
{
BitstreamShowBits(stream, 5, &temp);
if (temp >= 12)
{
pcode = (uint8*) & (TotZero4[(temp>>2)+9][0]);
}
else
{
pcode = (uint8*) & (TotZero4[temp][0]);
}
}
else if (TotalCoeff == 5)
{
BitstreamShowBits(stream, 5, &temp);
if (temp >= 16)
{
pcode = (uint8*) & (TotZero5[(temp>>2)+5][0]);
}
else if (temp >= 2)
{
pcode = (uint8*) & (TotZero5[(temp>>1)+1][0]);
}
else
{
pcode = (uint8*) & (TotZero5[temp][0]);
}
}
else if (TotalCoeff >= 6 && TotalCoeff <= 10)
{
if (TotalCoeff == 10)
{
BitstreamShowBits(stream, 5, &temp);
}
else
{
BitstreamShowBits(stream, 6, &temp);
}
if (temp >= 8)
{
pcode = (uint8*) & (TotZero6to10[TotalCoeff-6][(temp>>3)+7][0]);
}
else
{
pcode = (uint8*) & (TotZero6to10[TotalCoeff-6][temp][0]);
}
}
else if (TotalCoeff == 11)
{
BitstreamShowBits(stream, 4, &temp);
if (temp >= 8)
{
pcode = (uint8*) & (TotZero11[6][0]);
}
else if (temp >= 4)
{
pcode = (uint8*) & (TotZero11[(temp>>1)+2][0]);
}
else
{
pcode = (uint8*) & (TotZero11[temp][0]);
}
}
else
{
BitstreamShowBits(stream, (16 - TotalCoeff), &temp);
mask = 1 << (15 - TotalCoeff);
indx = 0;
while ((temp&mask) == 0 && indx < (16 - TotalCoeff)) /* search location of 1 bit */
{
mask >>= 1;
indx++;
}
pcode = (uint8*) & (TotZero12to15[TotalCoeff-12][indx]);
}
*code = pcode[0];
BitstreamFlushBits(stream, pcode[1]);
return AVCDEC_SUCCESS;
}
/* see Table 9-9 */
AVCDec_Status ce_TotalZerosChromaDC(AVCDecBitstream *stream, int *code, int TotalCoeff)
{
const static uint8 TotZeroChrom1to3[3][8][2] =
{
{{3, 3}, {2, 3}, {1, 2}, {1, 2}, {0, 1}, {0, 1}, {0, 1}, {0, 1}},
{{2, 2}, {2, 2}, {1, 2}, {1, 2}, {0, 1}, {0, 1}, {0, 1}, {0, 1}},
{{1, 1}, {1, 1}, {1, 1}, {1, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}},
};
uint temp;
uint8 *pcode;
BitstreamShowBits(stream, 3, &temp);
pcode = (uint8*) & (TotZeroChrom1to3[TotalCoeff-1][temp]);
*code = pcode[0];
BitstreamFlushBits(stream, pcode[1]);
return AVCDEC_SUCCESS;
}
/* see Table 9-10 */
AVCDec_Status ce_RunBefore(AVCDecBitstream *stream, int *code, int zerosLeft)
{
const static int codlen[6] = {1, 2, 2, 3, 3, 3}; /* num bits to read */
const static uint8 RunBeforeTab[6][8][2] = {{{1, 1}, {0, 1}, {0, 0}, {0, 0}, {0, 0}, {0, 0}, {0, 0}, {0, 0}},
/*const static int RunBefore2[4][2]=*/{{2, 2}, {1, 2}, {0, 1}, {0, 1}, {0, 0}, {0, 0}, {0, 0}, {0, 0}},
/*const static int RunBefore3[4][2]=*/{{3, 2}, {2, 2}, {1, 2}, {0, 2}, {0, 0}, {0, 0}, {0, 0}, {0, 0}},
/*const static int RunBefore4[7][2]=*/{{4, 3}, {3, 3}, {2, 2}, {2, 2}, {1, 2}, {1, 2}, {0, 2}, {0, 2}},
/*const static int RunBefore5[7][2]=*/{{5, 3}, {4, 3}, {3, 3}, {2, 3}, {1, 2}, {1, 2}, {0, 2}, {0, 2}},
/*const static int RunBefore6[7][2]=*/{{1, 3}, {2, 3}, {4, 3}, {3, 3}, {6, 3}, {5, 3}, {0, 2}, {0, 2}}
};
uint temp;
uint8 *pcode;
int indx;
if (zerosLeft <= 6)
{
BitstreamShowBits(stream, codlen[zerosLeft-1], &temp);
pcode = (uint8*) & (RunBeforeTab[zerosLeft-1][temp][0]);
*code = pcode[0];
BitstreamFlushBits(stream, pcode[1]);
}
else
{
BitstreamReadBits(stream, 3, &temp);
if (temp)
{
*code = 7 - temp;
}
else
{
BitstreamShowBits(stream, 9, &temp);
temp <<= 7;
temp |= 1;
indx = 0;
PV_CLZ(indx, temp)
*code = 7 + indx;
BitstreamFlushBits(stream, indx + 1);
}
}
return AVCDEC_SUCCESS;
}

View File

@@ -0,0 +1,9 @@
THIS IS NOT A GRANT OF PATENT RIGHTS.
Google makes no representation or warranty that the codecs for which
source code is made available hereunder are unencumbered by
third-party patents. Those intending to use this source code in
hardware or software products are advised that implementations of
these codecs, including in open source software or shareware, may
require patent licenses from the relevant patent holders.

View File

@@ -51,10 +51,6 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \ LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG= -DOSCL_UNUSED_ARG=
LOCAL_SHARED_LIBRARIES := \
libstagefright \
libutils
LOCAL_MODULE := libstagefright_mp3dec LOCAL_MODULE := libstagefright_mp3dec
include $(BUILD_STATIC_LIBRARY) include $(BUILD_STATIC_LIBRARY)

View File

@@ -0,0 +1,86 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef AVC_DECODER_H_
#define AVC_DECODER_H_
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaSource.h>
#include <utils/Vector.h>
struct tagAVCHandle;
namespace android {
struct MediaBufferGroup;
struct AVCDecoder : public MediaSource,
public MediaBufferObserver {
AVCDecoder(const sp<MediaSource> &source);
virtual status_t start(MetaData *params);
virtual status_t stop();
virtual sp<MetaData> getFormat();
virtual status_t read(
MediaBuffer **buffer, const ReadOptions *options);
virtual void signalBufferReturned(MediaBuffer *buffer);
protected:
virtual ~AVCDecoder();
private:
sp<MediaSource> mSource;
bool mStarted;
sp<MetaData> mFormat;
Vector<MediaBuffer *> mCodecSpecificData;
tagAVCHandle *mHandle;
Vector<MediaBuffer *> mFrames;
MediaBuffer *mInputBuffer;
int64_t mAnchorTimeUs;
int64_t mNumSamplesOutput;
void addCodecSpecificData(const uint8_t *data, size_t size);
static int32_t ActivateSPSWrapper(
void *userData, unsigned int sizeInMbs, unsigned int numBuffers);
static int32_t BindFrameWrapper(
void *userData, int32_t index, uint8_t **yuv);
static void UnbindFrame(void *userData, int32_t index);
int32_t activateSPS(
unsigned int sizeInMbs, unsigned int numBuffers);
int32_t bindFrame(int32_t index, uint8_t **yuv);
void releaseFrames();
AVCDecoder(const AVCDecoder &);
AVCDecoder &operator=(const AVCDecoder &);
};
} // namespace android
#endif // AVC_DECODER_H_