Code refactoring on Macros related to ref frame numbers

We have renamed following Macros to avoid name confusion:

REFS_PER_FRAME --> INTER_REFS_PER_FRAME
(= ALTREF_FRAME - LAST_FRAME + 1)
MAX_REF_FRAMES --> TOTAL_REFS_PER_FRAME
(= ALTREF_FRAME - INTRA_FRAME + 1)

INTER_REFS_PER_FRAME specifies the maximum number of reference frames
that each Inter frame may use.
TOTAL_REFS_PER_FRAME is equal to INTER_REFS_PER_FRAME + 1, which
counts the INTRA_FRAME.

Further, at the encoder side, since REF_FRAMES specifies the maximum
number of the reference frames that the encoder may store, REF_FRAMES
is usually larger than INTER_REFS_PER_FRAME. For example, in the
ext-refs experiment, REF_FRAMES == 8, which allows the encoder to
store maximum 8 reference frames in the buffer, but
INTER_REFS_PER_FRAME equals to 6, which allows each Inter frame may
use up to 6 frames out of the 8 buffered frames as its references.
Hence, in order to explore the possibility to store more reference
frames in future patches, we modified a couple of array sizes to
accomodate the case that the number of buffered reference frames is
not always equal to the number of the references that are being used
by each Inter frame.

Change-Id: I19e42ef608946cc76ebfd3e965a05f4b9b93a0b3
This commit is contained in:
Zoe Liu 2016-08-03 14:46:43 -07:00
Родитель 108df24d2a
Коммит 1af28f0230
17 изменённых файлов: 147 добавлений и 155 удалений

Просмотреть файл

@ -394,22 +394,20 @@ typedef TX_SIZE TXFM_CONTEXT;
#define LAST_FRAME 1
#if CONFIG_EXT_REFS
#define LAST2_FRAME 2
#define LAST3_FRAME 3
#define GOLDEN_FRAME 4
#define BWDREF_FRAME 5
#define ALTREF_FRAME 6
#define MAX_REF_FRAMES 7
#define LAST_REF_FRAMES (LAST3_FRAME - LAST_FRAME + 1)
#else
#define GOLDEN_FRAME 2
#define ALTREF_FRAME 3
#define MAX_REF_FRAMES 4
#endif // CONFIG_EXT_REFS
#define INTER_REFS_PER_FRAME (ALTREF_FRAME - LAST_FRAME + 1)
#define TOTAL_REFS_PER_FRAME (ALTREF_FRAME - INTRA_FRAME + 1)
#define FWD_REFS (GOLDEN_FRAME - LAST_FRAME + 1)
#define FWD_RF_OFFSET(ref) (ref - LAST_FRAME)
#if CONFIG_EXT_REFS
@ -418,15 +416,15 @@ typedef TX_SIZE TXFM_CONTEXT;
#else
#define BWD_REFS 1
#define BWD_RF_OFFSET(ref) (ref - ALTREF_FRAME)
#endif
#endif // CONFIG_EXT_REFS
#define SINGLE_REFS (FWD_REFS + BWD_REFS)
#define COMP_REFS (FWD_REFS * BWD_REFS)
#if CONFIG_REF_MV
#define MODE_CTX_REF_FRAMES (MAX_REF_FRAMES + COMP_REFS)
#define MODE_CTX_REF_FRAMES (TOTAL_REFS_PER_FRAME + COMP_REFS)
#else
#define MODE_CTX_REF_FRAMES MAX_REF_FRAMES
#define MODE_CTX_REF_FRAMES TOTAL_REFS_PER_FRAME
#endif
#if CONFIG_SUPERTX

Просмотреть файл

@ -300,7 +300,7 @@ void vp10_loop_filter_frame_init(VP10_COMMON *cm, int default_filt_lvl) {
const int intra_lvl = lvl_seg + lf->ref_deltas[INTRA_FRAME] * scale;
lfi->lvl[seg_id][INTRA_FRAME][0] = clamp(intra_lvl, 0, MAX_LOOP_FILTER);
for (ref = LAST_FRAME; ref < MAX_REF_FRAMES; ++ref) {
for (ref = LAST_FRAME; ref < TOTAL_REFS_PER_FRAME; ++ref) {
for (mode = 0; mode < MAX_MODE_LF_DELTAS; ++mode) {
const int inter_lvl = lvl_seg + lf->ref_deltas[ref] * scale
+ lf->mode_deltas[mode] * scale;

Просмотреть файл

@ -46,8 +46,8 @@ struct loopfilter {
// 0 = Intra, Last, Last2+Last3(CONFIG_EXT_REFS),
// GF, BRF(CONFIG_EXT_REFS), ARF
signed char ref_deltas[MAX_REF_FRAMES];
signed char last_ref_deltas[MAX_REF_FRAMES];
signed char ref_deltas[TOTAL_REFS_PER_FRAME];
signed char last_ref_deltas[TOTAL_REFS_PER_FRAME];
// 0 = ZERO_MV, MV
signed char mode_deltas[MAX_MODE_LF_DELTAS];
@ -64,7 +64,7 @@ typedef struct {
typedef struct {
loop_filter_thresh lfthr[MAX_LOOP_FILTER + 1];
uint8_t lvl[MAX_SEGMENTS][MAX_REF_FRAMES][MAX_MODE_LF_DELTAS];
uint8_t lvl[MAX_SEGMENTS][TOTAL_REFS_PER_FRAME][MAX_MODE_LF_DELTAS];
} loop_filter_info_n;
// This structure holds bit masks for all 8x8 blocks in a 64x64 region.

Просмотреть файл

@ -262,7 +262,7 @@ static INLINE int vp10_nmv_ctx(const uint8_t ref_mv_count,
static INLINE int8_t vp10_ref_frame_type(const MV_REFERENCE_FRAME *const rf) {
if (rf[1] > INTRA_FRAME) {
return MAX_REF_FRAMES + FWD_RF_OFFSET(rf[0]) +
return TOTAL_REFS_PER_FRAME + FWD_RF_OFFSET(rf[0]) +
BWD_RF_OFFSET(rf[1]) * FWD_REFS;
}
@ -288,13 +288,14 @@ static MV_REFERENCE_FRAME ref_frame_map[COMP_REFS][2] = {
static INLINE void vp10_set_ref_frame(MV_REFERENCE_FRAME *rf,
int8_t ref_frame_type) {
if (ref_frame_type >= MAX_REF_FRAMES) {
rf[0] = ref_frame_map[ref_frame_type - MAX_REF_FRAMES][0];
rf[1] = ref_frame_map[ref_frame_type - MAX_REF_FRAMES][1];
if (ref_frame_type >= TOTAL_REFS_PER_FRAME) {
rf[0] = ref_frame_map[ref_frame_type - TOTAL_REFS_PER_FRAME][0];
rf[1] = ref_frame_map[ref_frame_type - TOTAL_REFS_PER_FRAME][1];
} else {
rf[0] = ref_frame_type;
rf[1] = NONE;
assert(ref_frame_type > INTRA_FRAME && ref_frame_type < MAX_REF_FRAMES);
assert(ref_frame_type > INTRA_FRAME &&
ref_frame_type < TOTAL_REFS_PER_FRAME);
}
}

Просмотреть файл

@ -30,8 +30,6 @@
extern "C" {
#endif
#define REFS_PER_FRAME (ALTREF_FRAME - LAST_FRAME + 1)
#define REF_FRAMES_LOG2 3
#define REF_FRAMES (1 << REF_FRAMES_LOG2)
@ -162,8 +160,8 @@ typedef struct VP10Common {
// TODO(jkoleszar): could expand active_ref_idx to 4, with 0 as intra, and
// roll new_fb_idx into it.
// Each frame can reference REFS_PER_FRAME buffers
RefBuffer frame_refs[REFS_PER_FRAME];
// Each Inter frame can reference INTER_REFS_PER_FRAME buffers
RefBuffer frame_refs[INTER_REFS_PER_FRAME];
int new_fb_idx;
@ -268,7 +266,7 @@ typedef struct VP10Common {
// a frame decode
REFRESH_FRAME_CONTEXT_MODE refresh_frame_context;
int ref_frame_sign_bias[MAX_REF_FRAMES]; /* Two state 0, 1 */
int ref_frame_sign_bias[TOTAL_REFS_PER_FRAME]; /* Two state 0, 1 */
struct loopfilter lf;
struct segmentation seg;
@ -344,7 +342,7 @@ typedef struct VP10Common {
// each keyframe and not used afterwards
vpx_prob kf_y_prob[INTRA_MODES][INTRA_MODES][INTRA_MODES - 1];
#if CONFIG_GLOBAL_MOTION
Global_Motion_Params global_motion[MAX_REF_FRAMES];
Global_Motion_Params global_motion[TOTAL_REFS_PER_FRAME];
#endif
BLOCK_SIZE sb_size; // Size of the superblock used for this frame

Просмотреть файл

@ -49,7 +49,7 @@ static int is_compound_reference_allowed(const VP10_COMMON *cm) {
int i;
if (frame_is_intra_only(cm))
return 0;
for (i = 1; i < REFS_PER_FRAME; ++i)
for (i = 1; i < INTER_REFS_PER_FRAME; ++i)
if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1])
return 1;
@ -2066,7 +2066,7 @@ static void setup_loopfilter(VP10_COMMON *cm,
if (lf->mode_ref_delta_update) {
int i;
for (i = 0; i < MAX_REF_FRAMES; i++)
for (i = 0; i < TOTAL_REFS_PER_FRAME; i++)
if (vpx_rb_read_bit(rb))
lf->ref_deltas[i] = vpx_rb_read_inv_signed_literal(rb, 6);
@ -2247,7 +2247,7 @@ static void setup_frame_size_with_refs(VP10_COMMON *cm,
int found = 0, i;
int has_valid_ref_frame = 0;
BufferPool *const pool = cm->buffer_pool;
for (i = 0; i < REFS_PER_FRAME; ++i) {
for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
if (vpx_rb_read_bit(rb)) {
YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
width = buf->y_crop_width;
@ -2270,7 +2270,7 @@ static void setup_frame_size_with_refs(VP10_COMMON *cm,
// Check to make sure at least one of frames that this frame references
// has valid dimensions.
for (i = 0; i < REFS_PER_FRAME; ++i) {
for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
RefBuffer *const ref_frame = &cm->frame_refs[i];
has_valid_ref_frame |= valid_ref_frame_size(ref_frame->buf->y_crop_width,
ref_frame->buf->y_crop_height,
@ -2279,7 +2279,7 @@ static void setup_frame_size_with_refs(VP10_COMMON *cm,
if (!has_valid_ref_frame)
vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
"Referenced frame has invalid size");
for (i = 0; i < REFS_PER_FRAME; ++i) {
for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
RefBuffer *const ref_frame = &cm->frame_refs[i];
if (!valid_ref_frame_img_fmt(
ref_frame->buf->bit_depth,
@ -3199,7 +3199,7 @@ static size_t read_uncompressed_header(VP10Decoder *pbi,
read_bitdepth_colorspace_sampling(cm, rb);
pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1;
for (i = 0; i < REFS_PER_FRAME; ++i) {
for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
cm->frame_refs[i].idx = INVALID_IDX;
cm->frame_refs[i].buf = NULL;
}
@ -3256,7 +3256,7 @@ static size_t read_uncompressed_header(VP10Decoder *pbi,
}
#endif // CONFIG_EXT_REFS
for (i = 0; i < REFS_PER_FRAME; ++i) {
for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
const int ref = vpx_rb_read_literal(rb, REF_FRAMES_LOG2);
const int idx = cm->ref_frame_map[ref];
RefBuffer *const ref_frame = &cm->frame_refs[i];
@ -3270,7 +3270,7 @@ static size_t read_uncompressed_header(VP10Decoder *pbi,
cm->allow_high_precision_mv = vpx_rb_read_bit(rb);
cm->interp_filter = read_interp_filter(rb);
for (i = 0; i < REFS_PER_FRAME; ++i) {
for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
RefBuffer *const ref_buf = &cm->frame_refs[i];
#if CONFIG_VP9_HIGHBITDEPTH
vp10_setup_scale_factors_for_frame(&ref_buf->sf,

Просмотреть файл

@ -301,7 +301,7 @@ static void swap_frame_buffers(VP10Decoder *pbi) {
}
// Invalidate these references until the next frame starts.
for (ref_index = 0; ref_index < REFS_PER_FRAME; ref_index++) {
for (ref_index = 0; ref_index < INTER_REFS_PER_FRAME; ref_index++) {
cm->frame_refs[ref_index].idx = INVALID_IDX;
cm->frame_refs[ref_index].buf = NULL;
}

Просмотреть файл

@ -155,13 +155,13 @@ static INLINE int dec_is_ref_frame_buf(VP10Decoder *const pbi,
RefCntBuffer *frame_buf) {
VP10_COMMON *const cm = &pbi->common;
int i;
for (i = 0; i < REFS_PER_FRAME; ++i) {
for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
RefBuffer *const ref_frame = &cm->frame_refs[i];
if (ref_frame->idx == INVALID_IDX) continue;
if (frame_buf == &cm->buffer_pool->frame_bufs[ref_frame->idx])
break;
}
return (i < REFS_PER_FRAME);
return (i < INTER_REFS_PER_FRAME);
}
#endif // CONFIG_EXT_REFS

Просмотреть файл

@ -180,7 +180,7 @@ void vp10_frameworker_copy_context(VPxWorker *const dst_worker,
(MAX_LOOP_FILTER + 1) * sizeof(loop_filter_thresh));
dst_cm->lf.last_sharpness_level = src_cm->lf.sharpness_level;
dst_cm->lf.filter_level = src_cm->lf.filter_level;
memcpy(dst_cm->lf.ref_deltas, src_cm->lf.ref_deltas, MAX_REF_FRAMES);
memcpy(dst_cm->lf.ref_deltas, src_cm->lf.ref_deltas, TOTAL_REFS_PER_FRAME);
memcpy(dst_cm->lf.mode_deltas, src_cm->lf.mode_deltas, MAX_MODE_LF_DELTAS);
dst_cm->seg = src_cm->seg;
memcpy(dst_cm->frame_contexts, src_cm->frame_contexts,

Просмотреть файл

@ -2473,7 +2473,7 @@ static void encode_loopfilter(VP10_COMMON *cm,
if (lf->mode_ref_delta_enabled) {
vpx_wb_write_bit(wb, lf->mode_ref_delta_update);
if (lf->mode_ref_delta_update) {
for (i = 0; i < MAX_REF_FRAMES; i++) {
for (i = 0; i < TOTAL_REFS_PER_FRAME; i++) {
const int delta = lf->ref_deltas[i];
const int changed = delta != lf->last_ref_deltas[i];
vpx_wb_write_bit(wb, changed);

Просмотреть файл

@ -103,12 +103,12 @@ struct macroblock {
BLOCK_SIZE min_partition_size;
BLOCK_SIZE max_partition_size;
int mv_best_ref_index[MAX_REF_FRAMES];
unsigned int max_mv_context[MAX_REF_FRAMES];
int mv_best_ref_index[TOTAL_REFS_PER_FRAME];
unsigned int max_mv_context[TOTAL_REFS_PER_FRAME];
unsigned int source_variance;
unsigned int recon_variance;
unsigned int pred_sse[MAX_REF_FRAMES];
int pred_mv_sad[MAX_REF_FRAMES];
unsigned int pred_sse[TOTAL_REFS_PER_FRAME];
int pred_mv_sad[TOTAL_REFS_PER_FRAME];
#if CONFIG_REF_MV
int *nmvjointcost;
@ -160,7 +160,7 @@ struct macroblock {
int use_lp32x32fdct;
// Used to store sub partition's choices.
MV pred_mv[MAX_REF_FRAMES];
MV pred_mv[TOTAL_REFS_PER_FRAME];
// Store the best motion vector during motion search
int_mv best_mv;

Просмотреть файл

@ -60,7 +60,7 @@ typedef struct {
// motion vector cache for adaptive motion search control in partition
// search loop
MV pred_mv[MAX_REF_FRAMES];
MV pred_mv[TOTAL_REFS_PER_FRAME];
INTERP_FILTER pred_interp_filter;
#if CONFIG_EXT_PARTITION_TYPES
PARTITION_TYPE partition;

Просмотреть файл

@ -451,7 +451,7 @@ static void dealloc_compressor_data(VP10_COMP *cpi) {
cpi->active_map.map = NULL;
// Free up-sampled reference buffers.
for (i = 0; i < MAX_REF_FRAMES; i++)
for (i = 0; i < (REF_FRAMES + 1); i++)
vpx_free_frame_buffer(&cpi->upsampled_ref_bufs[i].buf);
vp10_free_ref_frame_buffers(cm->buffer_pool);
@ -2377,7 +2377,7 @@ static void cal_nmvsadcosts_hp(int *mvsadcost[2]) {
static INLINE void init_upsampled_ref_frame_bufs(VP10_COMP *cpi) {
int i;
for (i = 0; i < MAX_REF_FRAMES; ++i) {
for (i = 0; i < (REF_FRAMES + 1); ++i) {
cpi->upsampled_ref_bufs[i].ref_count = 0;
cpi->upsampled_ref_idx[i] = INVALID_IDX;
}
@ -2953,7 +2953,7 @@ static void generate_psnr_packet(VP10_COMP *cpi) {
}
int vp10_use_as_reference(VP10_COMP *cpi, int ref_frame_flags) {
if (ref_frame_flags > ((1 << REFS_PER_FRAME) - 1))
if (ref_frame_flags > ((1 << INTER_REFS_PER_FRAME) - 1))
return -1;
cpi->ref_frame_flags = ref_frame_flags;
@ -3297,7 +3297,7 @@ static int recode_loop_test(VP10_COMP *cpi,
static INLINE int get_free_upsampled_ref_buf(EncRefCntBuffer *ubufs) {
int i;
for (i = 0; i < MAX_REF_FRAMES; i++) {
for (i = 0; i < (REF_FRAMES + 1); i++) {
if (!ubufs[i].ref_count) {
return i;
}
@ -3710,7 +3710,7 @@ static INLINE void alloc_frame_mvs(VP10_COMMON *const cm,
void vp10_scale_references(VP10_COMP *cpi) {
VP10_COMMON *cm = &cpi->common;
MV_REFERENCE_FRAME ref_frame;
const VPX_REFFRAME ref_mask[REFS_PER_FRAME] = {
const VPX_REFFRAME ref_mask[INTER_REFS_PER_FRAME] = {
VPX_LAST_FLAG,
#if CONFIG_EXT_REFS
VPX_LAST2_FLAG,
@ -3835,7 +3835,7 @@ static void release_scaled_references(VP10_COMP *cpi) {
if (cpi->oxcf.pass == 0) {
// Only release scaled references under certain conditions:
// if reference will be updated, or if scaled reference has same resolution.
int refresh[REFS_PER_FRAME];
int refresh[INTER_REFS_PER_FRAME];
refresh[0] = (cpi->refresh_last_frame) ? 1 : 0;
#if CONFIG_EXT_REFS
refresh[1] = refresh[2] = 0;
@ -3860,7 +3860,7 @@ static void release_scaled_references(VP10_COMP *cpi) {
}
}
} else {
for (i = 0; i < MAX_REF_FRAMES; ++i) {
for (i = 0; i < TOTAL_REFS_PER_FRAME; ++i) {
const int idx = cpi->scaled_ref_idx[i];
RefCntBuffer *const buf = idx != INVALID_IDX ?
&cm->buffer_pool->frame_bufs[idx] : NULL;
@ -4669,7 +4669,7 @@ static void set_arf_sign_bias(VP10_COMP *cpi) {
static int setup_interp_filter_search_mask(VP10_COMP *cpi) {
INTERP_FILTER ifilter;
int ref_total[MAX_REF_FRAMES] = {0};
int ref_total[TOTAL_REFS_PER_FRAME] = {0};
MV_REFERENCE_FRAME ref;
int mask = 0;
if (cpi->common.last_frame_type == KEY_FRAME ||
@ -5642,7 +5642,7 @@ int vp10_get_compressed_data(VP10_COMP *cpi, unsigned int *frame_flags,
}
if (cpi->oxcf.pass != 0 || frame_is_intra_only(cm) == 1) {
for (i = 0; i < MAX_REF_FRAMES; ++i)
for (i = 0; i < TOTAL_REFS_PER_FRAME; ++i)
cpi->scaled_ref_idx[i] = INVALID_IDX;
}

Просмотреть файл

@ -62,7 +62,7 @@ typedef struct {
unsigned char *last_frame_seg_map_copy;
// 0 = Intra, Last, GF, ARF
signed char last_ref_lf_deltas[MAX_REF_FRAMES];
signed char last_ref_lf_deltas[TOTAL_REFS_PER_FRAME];
// 0 = ZERO_MV, MV
signed char last_mode_lf_deltas[MAX_MODE_LF_DELTAS];
@ -360,13 +360,16 @@ typedef struct VP10_COMP {
YV12_BUFFER_CONFIG scaled_last_source;
// Up-sampled reference buffers
EncRefCntBuffer upsampled_ref_bufs[MAX_REF_FRAMES];
int upsampled_ref_idx[MAX_REF_FRAMES];
// NOTE(zoeliu): It is needed to allocate sufficient space to the up-sampled
// reference buffers, which should include the up-sampled version of all the
// possibly stored references plus the currently coded frame itself.
EncRefCntBuffer upsampled_ref_bufs[REF_FRAMES + 1];
int upsampled_ref_idx[REF_FRAMES + 1];
// For a still frame, this flag is set to 1 to skip partition search.
int partition_search_skippable_frame;
int scaled_ref_idx[MAX_REF_FRAMES];
int scaled_ref_idx[TOTAL_REFS_PER_FRAME];
#if CONFIG_EXT_REFS
int lst_fb_idxes[LAST_REF_FRAMES];
#else
@ -424,7 +427,10 @@ typedef struct VP10_COMP {
RATE_CONTROL rc;
double framerate;
int interp_filter_selected[MAX_REF_FRAMES][SWITCHABLE];
// NOTE(zoeliu): Any inter frame allows maximum of REF_FRAMES inter
// references; Plus the currently coded frame itself, it is needed to allocate
// sufficient space to the size of the maximum possible number of frames.
int interp_filter_selected[REF_FRAMES + 1][SWITCHABLE];
struct vpx_codec_pkt_list *output_pkt_list;
@ -622,7 +628,7 @@ typedef struct VP10_COMP {
int is_arf_filter_off;
#endif // CONFIG_EXT_REFS
#if CONFIG_GLOBAL_MOTION
int global_motion_used[MAX_REF_FRAMES];
int global_motion_used[TOTAL_REFS_PER_FRAME];
#endif
} VP10_COMP;

Просмотреть файл

@ -22,7 +22,7 @@ typedef struct {
int_mv mv;
PREDICTION_MODE mode;
} m;
} ref[MAX_REF_FRAMES];
} ref[TOTAL_REFS_PER_FRAME];
} MBGRAPH_MB_STATS;
typedef struct {

Просмотреть файл

@ -365,7 +365,7 @@ typedef struct RD_OPT {
int threshes[MAX_SEGMENTS][BLOCK_SIZES][MAX_MODES];
int64_t prediction_type_threshes[MAX_REF_FRAMES][REFERENCE_MODES];
int64_t prediction_type_threshes[TOTAL_REFS_PER_FRAME][REFERENCE_MODES];
int RDMULT;
int RDDIV;

Просмотреть файл

@ -4258,16 +4258,15 @@ static int cost_mv_ref(const VP10_COMP *cpi, PREDICTION_MODE mode,
#endif
}
static int set_and_cost_bmi_mvs(VP10_COMP *cpi, MACROBLOCK *x, MACROBLOCKD *xd,
int i,
PREDICTION_MODE mode, int_mv this_mv[2],
int_mv frame_mv[MB_MODE_COUNT][MAX_REF_FRAMES],
int_mv seg_mvs[MAX_REF_FRAMES],
static int set_and_cost_bmi_mvs(
VP10_COMP *cpi, MACROBLOCK *x, MACROBLOCKD *xd, int i,
PREDICTION_MODE mode, int_mv this_mv[2],
int_mv frame_mv[MB_MODE_COUNT][TOTAL_REFS_PER_FRAME],
int_mv seg_mvs[TOTAL_REFS_PER_FRAME],
#if CONFIG_EXT_INTER
int_mv compound_seg_newmvs[2],
int_mv compound_seg_newmvs[2],
#endif // CONFIG_EXT_INTER
int_mv *best_ref_mv[2], const int *mvjcost,
int *mvcost[2]) {
int_mv *best_ref_mv[2], const int *mvjcost, int *mvcost[2]) {
MODE_INFO *const mic = xd->mi[0];
const MB_MODE_INFO *const mbmi = &mic->mbmi;
const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
@ -4610,11 +4609,11 @@ static INLINE void mi_buf_restore(MACROBLOCK *x, struct buf_2d orig_src,
// Check if NEARESTMV/NEARMV/ZEROMV is the cheapest way encode zero motion.
// TODO(aconverse): Find out if this is still productive then clean up or remove
static int check_best_zero_mv(
const VP10_COMP *cpi, const int16_t mode_context[MAX_REF_FRAMES],
const VP10_COMP *cpi, const int16_t mode_context[TOTAL_REFS_PER_FRAME],
#if CONFIG_REF_MV && CONFIG_EXT_INTER
const int16_t compound_mode_context[MAX_REF_FRAMES],
const int16_t compound_mode_context[TOTAL_REFS_PER_FRAME],
#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
int_mv frame_mv[MB_MODE_COUNT][MAX_REF_FRAMES], int this_mode,
int_mv frame_mv[MB_MODE_COUNT][TOTAL_REFS_PER_FRAME], int this_mode,
const MV_REFERENCE_FRAME ref_frames[2],
const BLOCK_SIZE bsize, int block) {
@ -4724,7 +4723,7 @@ static void joint_motion_search(VP10_COMP *cpi, MACROBLOCK *x,
#if CONFIG_EXT_INTER
int_mv* ref_mv_sub8x8[2],
#endif
int_mv single_newmv[MAX_REF_FRAMES],
int_mv single_newmv[TOTAL_REFS_PER_FRAME],
int *rate_mv,
const int block) {
const VP10_COMMON *const cm = &cpi->common;
@ -4974,22 +4973,18 @@ static void joint_motion_search(VP10_COMP *cpi, MACROBLOCK *x,
}
}
static int64_t rd_pick_best_sub8x8_mode(VP10_COMP *cpi, MACROBLOCK *x,
int_mv *best_ref_mv,
int_mv *second_best_ref_mv,
int64_t best_rd, int *returntotrate,
int *returnyrate,
int64_t *returndistortion,
int *skippable, int64_t *psse,
int mvthresh,
static int64_t rd_pick_best_sub8x8_mode(
VP10_COMP *cpi, MACROBLOCK *x,
int_mv *best_ref_mv, int_mv *second_best_ref_mv,
int64_t best_rd, int *returntotrate, int *returnyrate,
int64_t *returndistortion, int *skippable, int64_t *psse, int mvthresh,
#if CONFIG_EXT_INTER
int_mv seg_mvs[4][2][MAX_REF_FRAMES],
int_mv compound_seg_newmvs[4][2],
int_mv seg_mvs[4][2][TOTAL_REFS_PER_FRAME],
int_mv compound_seg_newmvs[4][2],
#else
int_mv seg_mvs[4][MAX_REF_FRAMES],
int_mv seg_mvs[4][TOTAL_REFS_PER_FRAME],
#endif // CONFIG_EXT_INTER
BEST_SEG_INFO *bsi_buf, int filter_idx,
int mi_row, int mi_col) {
BEST_SEG_INFO *bsi_buf, int filter_idx, int mi_row, int mi_col) {
BEST_SEG_INFO *bsi = bsi_buf + filter_idx;
#if CONFIG_REF_MV
int_mv tmp_ref_mv[2];
@ -5060,7 +5055,7 @@ static int64_t rd_pick_best_sub8x8_mode(VP10_COMP *cpi, MACROBLOCK *x,
// TODO(jingning,rbultje): rewrite the rate-distortion optimization
// loop for 4x4/4x8/8x4 block coding. to be replaced with new rd loop
int_mv mode_mv[MB_MODE_COUNT][2];
int_mv frame_mv[MB_MODE_COUNT][MAX_REF_FRAMES];
int_mv frame_mv[MB_MODE_COUNT][TOTAL_REFS_PER_FRAME];
PREDICTION_MODE mode_selected = ZEROMV;
int64_t best_rd = INT64_MAX;
const int i = idy * 2 + idx;
@ -5794,8 +5789,9 @@ static void estimate_ref_frame_costs(const VP10_COMMON *cm,
int seg_ref_active = segfeature_active(&cm->seg, segment_id,
SEG_LVL_REF_FRAME);
if (seg_ref_active) {
memset(ref_costs_single, 0, MAX_REF_FRAMES * sizeof(*ref_costs_single));
memset(ref_costs_comp, 0, MAX_REF_FRAMES * sizeof(*ref_costs_comp));
memset(ref_costs_single, 0,
TOTAL_REFS_PER_FRAME * sizeof(*ref_costs_single));
memset(ref_costs_comp, 0, TOTAL_REFS_PER_FRAME * sizeof(*ref_costs_comp));
*comp_mode_p = 128;
} else {
vpx_prob intra_inter_p = vp10_get_intra_inter_prob(cm, xd);
@ -5947,9 +5943,9 @@ static void setup_buffer_inter(
MV_REFERENCE_FRAME ref_frame,
BLOCK_SIZE block_size,
int mi_row, int mi_col,
int_mv frame_nearest_mv[MAX_REF_FRAMES],
int_mv frame_near_mv[MAX_REF_FRAMES],
struct buf_2d yv12_mb[MAX_REF_FRAMES][MAX_MB_PLANE]) {
int_mv frame_nearest_mv[TOTAL_REFS_PER_FRAME],
int_mv frame_near_mv[TOTAL_REFS_PER_FRAME],
struct buf_2d yv12_mb[TOTAL_REFS_PER_FRAME][MAX_MB_PLANE]) {
const VP10_COMMON *cm = &cpi->common;
const YV12_BUFFER_CONFIG *yv12 = get_ref_frame_buffer(cpi, ref_frame);
MACROBLOCKD *const xd = &x->e_mbd;
@ -6528,7 +6524,7 @@ static void do_masked_motion_search_indexed(VP10_COMP *cpi, MACROBLOCK *x,
static int discount_newmv_test(const VP10_COMP *cpi,
int this_mode,
int_mv this_mv,
int_mv (*mode_mv)[MAX_REF_FRAMES],
int_mv (*mode_mv)[TOTAL_REFS_PER_FRAME],
int ref_frame) {
return (!cpi->rc.is_src_frame_alt_ref &&
(this_mode == NEWMV) &&
@ -6608,14 +6604,10 @@ static int estimate_wedge_sign(const VP10_COMP *cpi,
#endif // CONFIG_EXT_INTER
#if !CONFIG_DUAL_FILTER
static INTERP_FILTER predict_interp_filter(const VP10_COMP *cpi,
const MACROBLOCK *x,
const BLOCK_SIZE bsize,
const int mi_row,
const int mi_col,
INTERP_FILTER
(*single_filter)[MAX_REF_FRAMES]
) {
static INTERP_FILTER predict_interp_filter(
const VP10_COMP *cpi, const MACROBLOCK *x, const BLOCK_SIZE bsize,
const int mi_row, const int mi_col,
INTERP_FILTER (*single_filter)[TOTAL_REFS_PER_FRAME]) {
INTERP_FILTER best_filter = SWITCHABLE;
const VP10_COMMON *cm = &cpi->common;
const MACROBLOCKD *xd = &x->e_mbd;
@ -6918,33 +6910,27 @@ static int64_t pick_interintra_wedge(const VP10_COMP *const cpi,
}
#endif // CONFIG_EXT_INTER
static int64_t handle_inter_mode(VP10_COMP *cpi, MACROBLOCK *x,
BLOCK_SIZE bsize,
int *rate2, int64_t *distortion,
int *skippable,
int *rate_y, int *rate_uv,
int *disable_skip,
int_mv (*mode_mv)[MAX_REF_FRAMES],
int mi_row, int mi_col,
static int64_t handle_inter_mode(
VP10_COMP *cpi, MACROBLOCK *x, BLOCK_SIZE bsize, int *rate2,
int64_t *distortion, int *skippable, int *rate_y, int *rate_uv,
int *disable_skip, int_mv (*mode_mv)[TOTAL_REFS_PER_FRAME],
int mi_row, int mi_col,
#if CONFIG_OBMC
uint8_t *dst_buf1[3], int dst_stride1[3],
uint8_t *dst_buf2[3], int dst_stride2[3],
const int32_t *const wsrc,
const int32_t *const mask2d,
uint8_t *dst_buf1[3], int dst_stride1[3],
uint8_t *dst_buf2[3], int dst_stride2[3],
const int32_t *const wsrc, const int32_t *const mask2d,
#endif // CONFIG_OBMC
#if CONFIG_EXT_INTER
int_mv single_newmvs[2][MAX_REF_FRAMES],
int single_newmvs_rate[2][MAX_REF_FRAMES],
int *compmode_interintra_cost,
int *compmode_wedge_cost,
int64_t (*const modelled_rd)[MAX_REF_FRAMES],
int_mv single_newmvs[2][TOTAL_REFS_PER_FRAME],
int single_newmvs_rate[2][TOTAL_REFS_PER_FRAME],
int *compmode_interintra_cost, int *compmode_wedge_cost,
int64_t (*const modelled_rd)[TOTAL_REFS_PER_FRAME],
#else
int_mv single_newmv[MAX_REF_FRAMES],
int_mv single_newmv[TOTAL_REFS_PER_FRAME],
#endif // CONFIG_EXT_INTER
INTERP_FILTER (*single_filter)[MAX_REF_FRAMES],
int (*single_skippable)[MAX_REF_FRAMES],
int64_t *psse,
const int64_t ref_best_rd) {
INTERP_FILTER (*single_filter)[TOTAL_REFS_PER_FRAME],
int (*single_skippable)[TOTAL_REFS_PER_FRAME],
int64_t *psse, const int64_t ref_best_rd) {
VP10_COMMON *cm = &cpi->common;
MACROBLOCKD *xd = &x->e_mbd;
MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
@ -6960,7 +6946,7 @@ static int64_t handle_inter_mode(VP10_COMP *cpi, MACROBLOCK *x,
#if CONFIG_EXT_INTER
const int bw = 4 * num_4x4_blocks_wide_lookup[bsize];
int mv_idx = (this_mode == NEWFROMNEARMV) ? 1 : 0;
int_mv single_newmv[MAX_REF_FRAMES];
int_mv single_newmv[TOTAL_REFS_PER_FRAME];
const unsigned int *const interintra_mode_cost =
cpi->interintra_mode_cost[size_group_lookup[bsize]];
const int is_comp_interintra_pred = (mbmi->ref_frame[1] == INTRA_FRAME);
@ -8553,18 +8539,18 @@ void vp10_rd_pick_inter_mode_sb(VP10_COMP *cpi,
MV_REFERENCE_FRAME ref_frame, second_ref_frame;
unsigned char segment_id = mbmi->segment_id;
int comp_pred, i, k;
int_mv frame_mv[MB_MODE_COUNT][MAX_REF_FRAMES];
struct buf_2d yv12_mb[MAX_REF_FRAMES][MAX_MB_PLANE];
int_mv frame_mv[MB_MODE_COUNT][TOTAL_REFS_PER_FRAME];
struct buf_2d yv12_mb[TOTAL_REFS_PER_FRAME][MAX_MB_PLANE];
#if CONFIG_EXT_INTER
int_mv single_newmvs[2][MAX_REF_FRAMES] = { { { 0 } }, { { 0 } } };
int single_newmvs_rate[2][MAX_REF_FRAMES] = { { 0 }, { 0 } };
int64_t modelled_rd[MB_MODE_COUNT][MAX_REF_FRAMES];
int_mv single_newmvs[2][TOTAL_REFS_PER_FRAME] = { { { 0 } }, { { 0 } } };
int single_newmvs_rate[2][TOTAL_REFS_PER_FRAME] = { { 0 }, { 0 } };
int64_t modelled_rd[MB_MODE_COUNT][TOTAL_REFS_PER_FRAME];
#else
int_mv single_newmv[MAX_REF_FRAMES] = { { 0 } };
int_mv single_newmv[TOTAL_REFS_PER_FRAME] = { { 0 } };
#endif // CONFIG_EXT_INTER
INTERP_FILTER single_inter_filter[MB_MODE_COUNT][MAX_REF_FRAMES];
int single_skippable[MB_MODE_COUNT][MAX_REF_FRAMES];
static const int flag_list[REFS_PER_FRAME + 1] = {
INTERP_FILTER single_inter_filter[MB_MODE_COUNT][TOTAL_REFS_PER_FRAME];
int single_skippable[MB_MODE_COUNT][TOTAL_REFS_PER_FRAME];
static const int flag_list[TOTAL_REFS_PER_FRAME] = {
0,
VPX_LAST_FLAG,
#if CONFIG_EXT_REFS
@ -8584,7 +8570,8 @@ void vp10_rd_pick_inter_mode_sb(VP10_COMP *cpi,
MB_MODE_INFO best_mbmode;
int best_mode_skippable = 0;
int midx, best_mode_index = -1;
unsigned int ref_costs_single[MAX_REF_FRAMES], ref_costs_comp[MAX_REF_FRAMES];
unsigned int ref_costs_single[TOTAL_REFS_PER_FRAME];
unsigned int ref_costs_comp[TOTAL_REFS_PER_FRAME];
vpx_prob comp_mode_p;
int64_t best_intra_rd = INT64_MAX;
unsigned int best_pred_sse = UINT_MAX;
@ -8608,11 +8595,11 @@ void vp10_rd_pick_inter_mode_sb(VP10_COMP *cpi,
int best_skip2 = 0;
uint8_t ref_frame_skip_mask[2] = { 0 };
#if CONFIG_EXT_INTER
uint32_t mode_skip_mask[MAX_REF_FRAMES] = { 0 };
uint32_t mode_skip_mask[TOTAL_REFS_PER_FRAME] = { 0 };
MV_REFERENCE_FRAME best_single_inter_ref = LAST_FRAME;
int64_t best_single_inter_rd = INT64_MAX;
#else
uint16_t mode_skip_mask[MAX_REF_FRAMES] = { 0 };
uint16_t mode_skip_mask[TOTAL_REFS_PER_FRAME] = { 0 };
#endif // CONFIG_EXT_INTER
int mode_skip_start = sf->mode_skip_start + 1;
const int *const rd_threshes = rd_opt->threshes[segment_id][bsize];
@ -8688,10 +8675,10 @@ void vp10_rd_pick_inter_mode_sb(VP10_COMP *cpi,
best_pred_rd[i] = INT64_MAX;
for (i = 0; i < TX_SIZES; i++)
rate_uv_intra[i] = INT_MAX;
for (i = 0; i < MAX_REF_FRAMES; ++i)
for (i = 0; i < TOTAL_REFS_PER_FRAME; ++i)
x->pred_sse[i] = INT_MAX;
for (i = 0; i < MB_MODE_COUNT; ++i) {
for (k = 0; k < MAX_REF_FRAMES; ++k) {
for (k = 0; k < TOTAL_REFS_PER_FRAME; ++k) {
single_inter_filter[i][k] = SWITCHABLE;
single_skippable[i][k] = 0;
}
@ -8882,7 +8869,7 @@ void vp10_rd_pick_inter_mode_sb(VP10_COMP *cpi,
#if CONFIG_EXT_INTER
for (i = 0 ; i < MB_MODE_COUNT ; ++i)
for (ref_frame = 0; ref_frame < MAX_REF_FRAMES; ++ref_frame)
for (ref_frame = 0; ref_frame < TOTAL_REFS_PER_FRAME; ++ref_frame)
modelled_rd[i][ref_frame] = INT64_MAX;
#endif // CONFIG_EXT_INTER
@ -8965,7 +8952,7 @@ void vp10_rd_pick_inter_mode_sb(VP10_COMP *cpi,
#endif // CONFIG_EXT_REFS
break;
case NONE:
case MAX_REF_FRAMES:
case TOTAL_REFS_PER_FRAME:
assert(0 && "Invalid Reference frame");
break;
}
@ -9373,20 +9360,20 @@ void vp10_rd_pick_inter_mode_sb(VP10_COMP *cpi,
if (!mv_check_bounds(x, &cur_mv.as_mv)) {
INTERP_FILTER dummy_single_inter_filter[MB_MODE_COUNT]
[MAX_REF_FRAMES] =
[TOTAL_REFS_PER_FRAME] =
{ { 0 } };
int dummy_single_skippable[MB_MODE_COUNT][MAX_REF_FRAMES] =
int dummy_single_skippable[MB_MODE_COUNT][TOTAL_REFS_PER_FRAME] =
{ { 0 } };
int dummy_disable_skip = 0;
#if CONFIG_EXT_INTER
int_mv dummy_single_newmvs[2][MAX_REF_FRAMES] =
int_mv dummy_single_newmvs[2][TOTAL_REFS_PER_FRAME] =
{ { { 0 } }, { { 0 } } };
int dummy_single_newmvs_rate[2][MAX_REF_FRAMES] =
int dummy_single_newmvs_rate[2][TOTAL_REFS_PER_FRAME] =
{ { 0 }, { 0 } };
int dummy_compmode_interintra_cost = 0;
int dummy_compmode_wedge_cost = 0;
#else
int_mv dummy_single_newmv[MAX_REF_FRAMES] = { { 0 } };
int_mv dummy_single_newmv[TOTAL_REFS_PER_FRAME] = { { 0 } };
#endif
frame_mv[NEARMV][ref_frame] = cur_mv;
@ -10159,7 +10146,8 @@ void vp10_rd_pick_inter_mode_sb_seg_skip(VP10_COMP *cpi,
const int comp_pred = 0;
int i;
int64_t best_pred_diff[REFERENCE_MODES];
unsigned int ref_costs_single[MAX_REF_FRAMES], ref_costs_comp[MAX_REF_FRAMES];
unsigned int ref_costs_single[TOTAL_REFS_PER_FRAME];
unsigned int ref_costs_comp[TOTAL_REFS_PER_FRAME];
vpx_prob comp_mode_p;
INTERP_FILTER best_filter = SWITCHABLE;
int64_t this_rd = INT64_MAX;
@ -10169,9 +10157,9 @@ void vp10_rd_pick_inter_mode_sb_seg_skip(VP10_COMP *cpi,
estimate_ref_frame_costs(cm, xd, segment_id, ref_costs_single, ref_costs_comp,
&comp_mode_p);
for (i = 0; i < MAX_REF_FRAMES; ++i)
for (i = 0; i < TOTAL_REFS_PER_FRAME; ++i)
x->pred_sse[i] = INT_MAX;
for (i = LAST_FRAME; i < MAX_REF_FRAMES; ++i)
for (i = LAST_FRAME; i < TOTAL_REFS_PER_FRAME; ++i)
x->pred_mv_sad[i] = INT_MAX;
rd_cost->rate = INT_MAX;
@ -10298,9 +10286,9 @@ void vp10_rd_pick_inter_mode_sub8x8(struct VP10_COMP *cpi,
MV_REFERENCE_FRAME ref_frame, second_ref_frame;
unsigned char segment_id = mbmi->segment_id;
int comp_pred, i;
int_mv frame_mv[MB_MODE_COUNT][MAX_REF_FRAMES];
struct buf_2d yv12_mb[MAX_REF_FRAMES][MAX_MB_PLANE];
static const int flag_list[REFS_PER_FRAME + 1] = {
int_mv frame_mv[MB_MODE_COUNT][TOTAL_REFS_PER_FRAME];
struct buf_2d yv12_mb[TOTAL_REFS_PER_FRAME][MAX_MB_PLANE];
static const int flag_list[TOTAL_REFS_PER_FRAME] = {
0,
VPX_LAST_FLAG,
#if CONFIG_EXT_REFS
@ -10319,7 +10307,8 @@ void vp10_rd_pick_inter_mode_sub8x8(struct VP10_COMP *cpi,
int64_t best_pred_rd[REFERENCE_MODES];
MB_MODE_INFO best_mbmode;
int ref_index, best_ref_index = 0;
unsigned int ref_costs_single[MAX_REF_FRAMES], ref_costs_comp[MAX_REF_FRAMES];
unsigned int ref_costs_single[TOTAL_REFS_PER_FRAME];
unsigned int ref_costs_comp[TOTAL_REFS_PER_FRAME];
vpx_prob comp_mode_p;
#if CONFIG_DUAL_FILTER
INTERP_FILTER tmp_best_filter[4] = { 0 };
@ -10333,9 +10322,9 @@ void vp10_rd_pick_inter_mode_sub8x8(struct VP10_COMP *cpi,
const int intra_cost_penalty = vp10_get_intra_cost_penalty(
cm->base_qindex, cm->y_dc_delta_q, cm->bit_depth);
#if CONFIG_EXT_INTER
int_mv seg_mvs[4][2][MAX_REF_FRAMES];
int_mv seg_mvs[4][2][TOTAL_REFS_PER_FRAME];
#else
int_mv seg_mvs[4][MAX_REF_FRAMES];
int_mv seg_mvs[4][TOTAL_REFS_PER_FRAME];
#endif // CONFIG_EXT_INTER
b_mode_info best_bmodes[4];
int best_skip2 = 0;
@ -10366,10 +10355,10 @@ void vp10_rd_pick_inter_mode_sub8x8(struct VP10_COMP *cpi,
int k;
for (k = 0; k < 2; k++)
for (j = 0; j < MAX_REF_FRAMES; j++)
for (j = 0; j < TOTAL_REFS_PER_FRAME; j++)
seg_mvs[i][k][j].as_int = INVALID_MV;
#else
for (j = 0; j < MAX_REF_FRAMES; j++)
for (j = 0; j < TOTAL_REFS_PER_FRAME; j++)
seg_mvs[i][j].as_int = INVALID_MV;
#endif // CONFIG_EXT_INTER
}
@ -10493,7 +10482,7 @@ void vp10_rd_pick_inter_mode_sub8x8(struct VP10_COMP *cpi,
#endif // CONFIG_EXT_REFS
break;
case NONE:
case MAX_REF_FRAMES:
case TOTAL_REFS_PER_FRAME:
assert(0 && "Invalid Reference frame");
break;
}