Further changes to mv reference code.

Some further changes and refactoring of mv
reference code and selection of center point for
searches. Mainly relates to not passing so many
different local copies of things around.

Some place holder comments.

Change-Id: I309f10ffe9a9cde7663e7eae19eb594371c8d055
This commit is contained in:
Paul Wilkins 2012-12-05 16:23:38 +00:00
Родитель d1356faeb8
Коммит d124465975
9 изменённых файлов: 101 добавлений и 156 удалений

Просмотреть файл

@ -127,7 +127,6 @@ void vp9_find_best_ref_mvs(MACROBLOCKD *xd,
unsigned char *ref_y_buffer,
int ref_y_stride,
int_mv *mvlist,
int_mv *best_mv,
int_mv *nearest,
int_mv *near) {
int i, j;
@ -144,7 +143,7 @@ void vp9_find_best_ref_mvs(MACROBLOCKD *xd,
int zero_seen = FALSE;
// Default all to 0,0 if nothing else available
best_mv->as_int = nearest->as_int = near->as_int = 0;
nearest->as_int = near->as_int = 0;
vpx_memset(sorted_mvs, 0, sizeof(sorted_mvs));
#if CONFIG_SUBPELREFMV
@ -272,9 +271,6 @@ void vp9_find_best_ref_mvs(MACROBLOCKD *xd,
clamp_mv2(&sorted_mvs[i], xd);
}
// Set the best mv to the first entry in the sorted list
best_mv->as_int = sorted_mvs[0].as_int;
// Provided that there are non zero vectors available there will not
// be more than one 0,0 entry in the sorted list.
// The best ref mv is always set to the first entry (which gave the best

Просмотреть файл

@ -25,7 +25,6 @@ void vp9_find_best_ref_mvs(MACROBLOCKD *xd,
unsigned char *ref_y_buffer,
int ref_y_stride,
int_mv *mvlist,
int_mv *best_mv,
int_mv *nearest,
int_mv *near);

Просмотреть файл

@ -765,10 +765,10 @@ static void read_mb_modes_mv(VP9D_COMP *pbi, MODE_INFO *mi, MB_MODE_INFO *mbmi,
int recon_y_stride, recon_yoffset;
int recon_uv_stride, recon_uvoffset;
MV_REFERENCE_FRAME ref_frame = mbmi->ref_frame;
{
int ref_fb_idx;
MV_REFERENCE_FRAME ref_frame = mbmi->ref_frame;
/* Select the appropriate reference frame for this MB */
if (ref_frame == LAST_FRAME)
@ -801,10 +801,13 @@ static void read_mb_modes_mv(VP9D_COMP *pbi, MODE_INFO *mi, MB_MODE_INFO *mbmi,
xd->pre.y_buffer,
recon_y_stride,
mbmi->ref_mvs[ref_frame],
&best_mv, &nearest, &nearby);
&nearest, &nearby);
vp9_mv_ref_probs(&pbi->common, mv_ref_p,
mbmi->mb_mode_context[ref_frame]);
best_mv = mbmi->ref_mvs[ref_frame][0];
#ifdef DEC_DEBUG
if (dec_debug)
printf("[D %d %d] %d %d %d %d\n", ref_frame,
@ -826,7 +829,7 @@ static void read_mb_modes_mv(VP9D_COMP *pbi, MODE_INFO *mi, MB_MODE_INFO *mbmi,
mbmi->mode = read_mv_ref(bc, mv_ref_p);
vp9_accum_mv_refs(&pbi->common, mbmi->mode,
mbmi->mb_mode_context[mbmi->ref_frame]);
mbmi->mb_mode_context[ref_frame]);
}
#if CONFIG_PRED_FILTER
@ -890,9 +893,9 @@ static void read_mb_modes_mv(VP9D_COMP *pbi, MODE_INFO *mi, MB_MODE_INFO *mbmi,
xd->second_pre.y_buffer,
recon_y_stride,
mbmi->ref_mvs[mbmi->second_ref_frame],
&best_mv_second,
&nearest_second,
&nearby_second);
best_mv_second = mbmi->ref_mvs[mbmi->second_ref_frame][0];
}
} else {
@ -925,6 +928,29 @@ static void read_mb_modes_mv(VP9D_COMP *pbi, MODE_INFO *mi, MB_MODE_INFO *mbmi,
#endif
}
#if CONFIG_NEW_MVREF
// if ((mbmi->mode == NEWMV) || (mbmi->mode == SPLITMV))
if (mbmi->mode == NEWMV) {
int best_index;
MV_REFERENCE_FRAME ref_frame = mbmi->ref_frame;
// Encode the index of the choice.
best_index =
vp9_read_mv_ref_id(bc, xd->mb_mv_ref_id_probs[ref_frame]);
best_mv.as_int = mbmi->ref_mvs[ref_frame][best_index].as_int;
if (mbmi->second_ref_frame > 0) {
ref_frame = mbmi->second_ref_frame;
// Encode the index of the choice.
best_index =
vp9_read_mv_ref_id(bc, xd->mb_mv_ref_id_probs[ref_frame]);
best_mv_second.as_int = mbmi->ref_mvs[ref_frame][best_index].as_int;
}
}
#endif
mbmi->uv_mode = DC_PRED;
switch (mbmi->mode) {
case SPLITMV: {
@ -1081,19 +1107,6 @@ static void read_mb_modes_mv(VP9D_COMP *pbi, MODE_INFO *mi, MB_MODE_INFO *mbmi,
case NEWMV:
#if CONFIG_NEW_MVREF
{
int best_index;
MV_REFERENCE_FRAME ref_frame = mbmi->ref_frame;
// Encode the index of the choice.
best_index =
vp9_read_mv_ref_id(bc, xd->mb_mv_ref_id_probs[ref_frame]);
best_mv.as_int = mbmi->ref_mvs[ref_frame][best_index].as_int;
}
#endif
read_nmv(bc, &mv->as_mv, &best_mv.as_mv, nmvc);
read_nmv_fp(bc, &mv->as_mv, &best_mv.as_mv, nmvc,
xd->allow_high_precision_mv);
@ -1115,18 +1128,6 @@ static void read_mb_modes_mv(VP9D_COMP *pbi, MODE_INFO *mi, MB_MODE_INFO *mbmi,
mb_to_bottom_edge);
if (mbmi->second_ref_frame > 0) {
#if CONFIG_NEW_MVREF
{
int best_index;
MV_REFERENCE_FRAME ref_frame = mbmi->second_ref_frame;
// Encode the index of the choice.
best_index =
vp9_read_mv_ref_id(bc, xd->mb_mv_ref_id_probs[ref_frame]);
best_mv_second.as_int = mbmi->ref_mvs[ref_frame][best_index].as_int;
}
#endif
read_nmv(bc, &mbmi->mv[1].as_mv, &best_mv_second.as_mv, nmvc);
read_nmv_fp(bc, &mbmi->mv[1].as_mv, &best_mv_second.as_mv, nmvc,
xd->allow_high_precision_mv);

Просмотреть файл

@ -956,21 +956,14 @@ static void pack_inter_mode_mvs(VP9_COMP *const cpi, vp9_writer *const bc) {
pc->fc.uv_mode_prob[mode]);
}
} else {
int_mv best_mv, best_second_mv;
vp9_prob mv_ref_p [VP9_MVREFS - 1];
{
best_mv.as_int = mi->ref_mvs[rf][0].as_int;
vp9_mv_ref_probs(&cpi->common, mv_ref_p, mi->mb_mode_context[rf]);
vp9_mv_ref_probs(&cpi->common, mv_ref_p, mi->mb_mode_context[rf]);
// #ifdef ENTROPY_STATS
// accum_mv_refs(mode, ct);
// #endif
}
#ifdef ENTROPY_STATS
accum_mv_refs(mode, ct);
active_section = 3;
#endif
@ -1012,13 +1005,6 @@ static void pack_inter_mode_mvs(VP9_COMP *const cpi, vp9_writer *const bc) {
}
}
if (mi->second_ref_frame > 0 &&
(mode == NEWMV || mode == SPLITMV)) {
best_second_mv.as_int =
mi->ref_mvs[mi->second_ref_frame][0].as_int;
}
// does the feature use compound prediction or not
// (if not specified at the frame/segment level)
if (cpi->common.comp_pred_mode == HYBRID_PREDICTION) {
@ -1047,64 +1033,37 @@ static void pack_inter_mode_mvs(VP9_COMP *const cpi, vp9_writer *const bc) {
}
#endif
#if CONFIG_NEW_MVREF
// if ((mode == NEWMV) || (mode == SPLITMV)) {
if (mode == NEWMV) {
// Encode the index of the choice.
vp9_write_mv_ref_id(bc,
xd->mb_mv_ref_id_probs[rf], mi->best_index);
cpi->best_ref_index_counts[rf][mi->best_index]++;
if (mi->second_ref_frame > 0) {
// Encode the index of the choice.
vp9_write_mv_ref_id(
bc, xd->mb_mv_ref_id_probs[mi->second_ref_frame],
mi->best_second_index);
cpi->best_ref_index_counts[mi->second_ref_frame]
[mi->best_second_index]++;
}
}
#endif
{
switch (mode) { /* new, split require MVs */
case NEWMV:
#ifdef ENTROPY_STATS
active_section = 5;
#endif
#if CONFIG_NEW_MVREF
{
unsigned int best_index;
// Choose the best mv reference
/*
best_index = pick_best_mv_ref(x, rf, mi->mv[0],
mi->ref_mvs[rf], &best_mv);
assert(best_index == mi->best_index);
assert(best_mv.as_int == mi->best_mv.as_int);
*/
best_index = mi->best_index;
best_mv.as_int = mi->best_mv.as_int;
// Encode the index of the choice.
vp9_write_mv_ref_id(bc,
xd->mb_mv_ref_id_probs[rf], best_index);
cpi->best_ref_index_counts[rf][best_index]++;
}
#endif
write_nmv(bc, &mi->mv[0].as_mv, &best_mv,
write_nmv(bc, &mi->mv[0].as_mv, &mi->best_mv,
(const nmv_context*) nmvc,
xd->allow_high_precision_mv);
if (mi->second_ref_frame > 0) {
#if CONFIG_NEW_MVREF
unsigned int best_index;
sec_ref_frame = mi->second_ref_frame;
/*
best_index =
pick_best_mv_ref(x, sec_ref_frame, mi->mv[1],
mi->ref_mvs[sec_ref_frame],
&best_second_mv);
assert(best_index == mi->best_second_index);
assert(best_second_mv.as_int == mi->best_second_mv.as_int);
*/
best_index = mi->best_second_index;
best_second_mv.as_int = mi->best_second_mv.as_int;
// Encode the index of the choice.
vp9_write_mv_ref_id(bc,
xd->mb_mv_ref_id_probs[sec_ref_frame],
best_index);
cpi->best_ref_index_counts[sec_ref_frame][best_index]++;
#endif
write_nmv(bc, &mi->mv[1].as_mv, &best_second_mv,
write_nmv(bc, &mi->mv[1].as_mv, &mi->best_second_mv,
(const nmv_context*) nmvc,
xd->allow_high_precision_mv);
}
@ -1148,14 +1107,14 @@ static void pack_inter_mode_mvs(VP9_COMP *const cpi, vp9_writer *const bc) {
#ifdef ENTROPY_STATS
active_section = 11;
#endif
write_nmv(bc, &blockmv.as_mv, &best_mv,
write_nmv(bc, &blockmv.as_mv, &mi->best_mv,
(const nmv_context*) nmvc,
xd->allow_high_precision_mv);
if (mi->second_ref_frame > 0) {
write_nmv(bc,
&cpi->mb.partition_info->bmi[j].second_mv.as_mv,
&best_second_mv,
&mi->best_second_mv,
(const nmv_context*) nmvc,
xd->allow_high_precision_mv);
}
@ -1167,10 +1126,6 @@ static void pack_inter_mode_mvs(VP9_COMP *const cpi, vp9_writer *const bc) {
break;
}
}
/* This is not required if the counts in cpi are consistent with the
* final packing pass */
// if (!cpi->dummy_packing)
// vp9_update_nmv_count(cpi, x, &best_mv, &best_second_mv);
}
if (((rf == INTRA_FRAME && mode <= I8X8_PRED) ||

Просмотреть файл

@ -135,6 +135,8 @@ typedef struct macroblock {
int *mb_norm_activity_ptr;
signed int act_zbin_adj;
int mv_best_ref_index[MAX_REF_FRAMES];
int nmvjointcost[MV_JOINTS];
int nmvcosts[2][MV_VALS];
int *nmvcost[2];

Просмотреть файл

@ -422,6 +422,7 @@ static unsigned int pick_best_mv_ref(MACROBLOCK *x,
}
}
// best_index = x->mv_best_ref_index[ref_frame];
best_ref->as_int = mv_ref_list[best_index].as_int;
return best_index;

Просмотреть файл

@ -668,7 +668,6 @@ void vp9_set_speed_features(VP9_COMP *cpi) {
sf->first_step = 0;
sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
sf->improved_mv_pred = 1;
// default thresholds to 0
for (i = 0; i < MAX_MODES; i++)

Просмотреть файл

@ -359,7 +359,6 @@ typedef struct {
int first_step;
int optimize_coefficients;
int no_skip_block4x4_search;
int improved_mv_pred;
int search_best_filter;
} SPEED_FEATURES;

Просмотреть файл

@ -2916,7 +2916,7 @@ static int rd_pick_best_mbsegmentation(VP9_COMP *cpi, MACROBLOCK *x,
static void mv_pred(VP9_COMP *cpi, MACROBLOCK *x,
unsigned char *ref_y_buffer, int ref_y_stride,
int_mv *mvp, int ref_frame, enum BlockSize block_size ) {
int ref_frame, enum BlockSize block_size ) {
MACROBLOCKD *xd = &x->e_mbd;
MB_MODE_INFO *mbmi = &xd->mode_info_context->mbmi;
int_mv this_mv;
@ -2956,9 +2956,8 @@ static void mv_pred(VP9_COMP *cpi, MACROBLOCK *x,
}
}
// Return the mv that had the best sad for use in the motion search.
mvp->as_int = mbmi->ref_mvs[ref_frame][best_index].as_int;
clamp_mv2(mvp, xd);
// Note the index of the mv that worked best in the reference list.
x->mv_best_ref_index[ref_frame] = best_index;
}
static void set_i8x8_block_modes(MACROBLOCK *x, int modes[2][4]) {
@ -3140,9 +3139,6 @@ static void store_coding_context(MACROBLOCK *x, PICK_MODE_CONTEXT *ctx,
ctx->best_ref_mv.as_int = ref_mv->as_int;
ctx->second_best_ref_mv.as_int = second_ref_mv->as_int;
// ctx[mb_index].rddiv = x->rddiv;
// ctx[mb_index].rdmult = x->rdmult;
ctx->single_pred_diff = comp_pred_diff[SINGLE_PREDICTION_ONLY];
ctx->comp_pred_diff = comp_pred_diff[COMP_PREDICTION_ONLY];
ctx->hybrid_pred_diff = comp_pred_diff[HYBRID_PREDICTION];
@ -3185,8 +3181,6 @@ static void setup_buffer_inter(VP9_COMP *cpi, MACROBLOCK *x,
int recon_yoffset, int recon_uvoffset,
int_mv frame_nearest_mv[MAX_REF_FRAMES],
int_mv frame_near_mv[MAX_REF_FRAMES],
int_mv frame_best_ref_mv[MAX_REF_FRAMES],
int_mv mv_search_ref[MAX_REF_FRAMES],
int frame_mdcounts[4][4],
unsigned char *y_buffer[4],
unsigned char *u_buffer[4],
@ -3210,7 +3204,6 @@ static void setup_buffer_inter(VP9_COMP *cpi, MACROBLOCK *x,
vp9_find_best_ref_mvs(xd, y_buffer[frame_type],
yv12->y_stride,
mbmi->ref_mvs[frame_type],
&frame_best_ref_mv[frame_type],
&frame_nearest_mv[frame_type],
&frame_near_mv[frame_type]);
@ -3218,7 +3211,7 @@ static void setup_buffer_inter(VP9_COMP *cpi, MACROBLOCK *x,
// Further refinement that is encode side only to test the top few candidates
// in full and choose the best as the centre point for subsequent searches.
mv_pred(cpi, x, y_buffer[frame_type], yv12->y_stride,
&mv_search_ref[frame_type], frame_type, block_size);
frame_type, block_size);
#if CONFIG_NEW_MVREF
// TODO(paulwilkins): Final choice of which of the best 4 candidates from
@ -3240,9 +3233,8 @@ static int64_t handle_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
int *rate_uv, int *distortion_uv,
int *mode_excluded, int *disable_skip,
int recon_yoffset, int mode_index,
int_mv frame_mv[MB_MODE_COUNT][MAX_REF_FRAMES],
int_mv frame_best_ref_mv[MAX_REF_FRAMES],
int_mv mv_search_ref[MAX_REF_FRAMES]) {
int_mv frame_mv[MB_MODE_COUNT]
[MAX_REF_FRAMES]) {
VP9_COMMON *cm = &cpi->common;
MACROBLOCKD *xd = &x->e_mbd;
MB_MODE_INFO *mbmi = &xd->mode_info_context->mbmi;
@ -3258,20 +3250,25 @@ static int64_t handle_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
int refs[2] = { mbmi->ref_frame,
(mbmi->second_ref_frame < 0 ? 0 : mbmi->second_ref_frame) };
int_mv cur_mv[2];
int_mv ref_mv[2];
int64_t this_rd = 0;
switch (this_mode) {
case NEWMV:
ref_mv[0] = mbmi->ref_mvs[refs[0]][0];
ref_mv[1] = mbmi->ref_mvs[refs[1]][0];
// ref_mv[0] = mbmi->ref_mvs[refs[0]][x->mv_best_ref_index[refs[0]]];
// ref_mv[1] = mbmi->ref_mvs[refs[1]][x->mv_best_ref_index[refs[1]]];
if (is_comp_pred) {
if (frame_mv[NEWMV][refs[0]].as_int == INVALID_MV ||
frame_mv[NEWMV][refs[1]].as_int == INVALID_MV)
return INT64_MAX;
*rate2 += vp9_mv_bit_cost(&frame_mv[NEWMV][refs[0]],
&frame_best_ref_mv[refs[0]],
&ref_mv[0],
x->nmvjointcost, x->mvcost, 96,
x->e_mbd.allow_high_precision_mv);
*rate2 += vp9_mv_bit_cost(&frame_mv[NEWMV][refs[1]],
&frame_best_ref_mv[refs[1]],
&ref_mv[1],
x->nmvjointcost, x->mvcost, 96,
x->e_mbd.allow_high_precision_mv);
} else {
@ -3286,10 +3283,15 @@ static int64_t handle_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
int tmp_row_min = x->mv_row_min;
int tmp_row_max = x->mv_row_max;
vp9_clamp_mv_min_max(x, &frame_best_ref_mv[refs[0]]);
vp9_clamp_mv_min_max(x, &ref_mv[0]);
mvp_full.as_mv.col = mv_search_ref[mbmi->ref_frame].as_mv.col >> 3;
mvp_full.as_mv.row = mv_search_ref[mbmi->ref_frame].as_mv.row >> 3;
mvp_full.as_int =
mbmi->ref_mvs[refs[0]][x->mv_best_ref_index[refs[0]]].as_int;
mvp_full.as_mv.col >>= 3;
mvp_full.as_mv.row >>= 3;
if (mvp_full.as_int != mvp_full.as_int) {
mvp_full.as_int = mvp_full.as_int;
}
// adjust search range according to sr from mv prediction
step_param = MAX(step_param, sr);
@ -3300,7 +3302,7 @@ static int64_t handle_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
bestsme = vp9_full_pixel_diamond(cpi, x, b, d, &mvp_full, step_param,
sadpb, further_steps, 1,
&cpi->fn_ptr[block_size],
&frame_best_ref_mv[refs[0]], &tmp_mv);
&ref_mv[0], &tmp_mv);
x->mv_col_min = tmp_col_min;
x->mv_col_max = tmp_col_max;
@ -3311,7 +3313,7 @@ static int64_t handle_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
int dis; /* TODO: use dis in distortion calculation later. */
unsigned int sse;
cpi->find_fractional_mv_step(x, b, d, &tmp_mv,
&frame_best_ref_mv[refs[0]],
&ref_mv[0],
x->errorperbit,
&cpi->fn_ptr[block_size],
x->nmvjointcost, x->mvcost,
@ -3321,7 +3323,7 @@ static int64_t handle_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
frame_mv[NEWMV][refs[0]].as_int = d->bmi.as_mv.first.as_int;
// Add the new motion vector cost to our rolling cost variable
*rate2 += vp9_mv_bit_cost(&tmp_mv, &frame_best_ref_mv[refs[0]],
*rate2 += vp9_mv_bit_cost(&tmp_mv, &ref_mv[0],
x->nmvjointcost, x->mvcost,
96, xd->allow_high_precision_mv);
}
@ -3554,8 +3556,6 @@ static void rd_pick_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
int saddone = 0;
int_mv frame_mv[MB_MODE_COUNT][MAX_REF_FRAMES];
int_mv frame_best_ref_mv[MAX_REF_FRAMES];
int_mv mv_search_ref[MAX_REF_FRAMES];
int frame_mdcounts[4][4];
unsigned char *y_buffer[4], *u_buffer[4], *v_buffer[4];
@ -3586,25 +3586,22 @@ static void rd_pick_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
if (cpi->ref_frame_flags & VP9_LAST_FLAG) {
setup_buffer_inter(cpi, x, cpi->common.lst_fb_idx, LAST_FRAME,
BLOCK_16X16, recon_yoffset, recon_uvoffset,
frame_mv[NEARESTMV], frame_mv[NEARMV], frame_best_ref_mv,
mv_search_ref, frame_mdcounts,
y_buffer, u_buffer, v_buffer);
frame_mv[NEARESTMV], frame_mv[NEARMV],
frame_mdcounts, y_buffer, u_buffer, v_buffer);
}
if (cpi->ref_frame_flags & VP9_GOLD_FLAG) {
setup_buffer_inter(cpi, x, cpi->common.gld_fb_idx, GOLDEN_FRAME,
BLOCK_16X16, recon_yoffset, recon_uvoffset,
frame_mv[NEARESTMV], frame_mv[NEARMV], frame_best_ref_mv,
mv_search_ref, frame_mdcounts,
y_buffer, u_buffer, v_buffer);
frame_mv[NEARESTMV], frame_mv[NEARMV],
frame_mdcounts, y_buffer, u_buffer, v_buffer);
}
if (cpi->ref_frame_flags & VP9_ALT_FLAG) {
setup_buffer_inter(cpi, x, cpi->common.alt_fb_idx, ALTREF_FRAME,
BLOCK_16X16, recon_yoffset, recon_uvoffset,
frame_mv[NEARESTMV], frame_mv[NEARMV], frame_best_ref_mv,
mv_search_ref, frame_mdcounts,
y_buffer, u_buffer, v_buffer);
frame_mv[NEARESTMV], frame_mv[NEARMV],
frame_mdcounts, y_buffer, u_buffer, v_buffer);
}
*returnintra = INT64_MAX;
@ -3720,7 +3717,7 @@ static void rd_pick_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
xd->pre.y_buffer = y_buffer[ref];
xd->pre.u_buffer = u_buffer[ref];
xd->pre.v_buffer = v_buffer[ref];
best_ref_mv = frame_best_ref_mv[ref];
best_ref_mv = mbmi->ref_mvs[ref][0];
vpx_memcpy(mdcounts, frame_mdcounts[ref], sizeof(mdcounts));
}
@ -3730,7 +3727,7 @@ static void rd_pick_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
xd->second_pre.y_buffer = y_buffer[ref];
xd->second_pre.u_buffer = u_buffer[ref];
xd->second_pre.v_buffer = v_buffer[ref];
second_best_ref_mv = frame_best_ref_mv[ref];
second_best_ref_mv = mbmi->ref_mvs[ref][0];
}
// Experimental code. Special case for gf and arf zeromv modes.
@ -3982,8 +3979,7 @@ static void rd_pick_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
&rate_y, &distortion,
&rate_uv, &distortion_uv,
&mode_excluded, &disable_skip, recon_yoffset,
mode_index, frame_mv, frame_best_ref_mv,
mv_search_ref);
mode_index, frame_mv);
if (this_rd == INT64_MAX)
continue;
}
@ -4299,9 +4295,9 @@ static void rd_pick_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
end:
store_coding_context(
x, &x->mb_context[xd->mb_index], best_mode_index, &best_partition,
&frame_best_ref_mv[xd->mode_info_context->mbmi.ref_frame],
&frame_best_ref_mv[xd->mode_info_context->mbmi.second_ref_frame < 0 ?
0 : xd->mode_info_context->mbmi.second_ref_frame],
&mbmi->ref_mvs[mbmi->ref_frame][0],
&mbmi->ref_mvs[mbmi->second_ref_frame < 0
? 0 : mbmi->second_ref_frame][0],
best_pred_diff, best_txfm_diff);
}
@ -4504,8 +4500,6 @@ int64_t vp9_rd_pick_inter_mode_sb(VP9_COMP *cpi, MACROBLOCK *x,
unsigned char segment_id = xd->mode_info_context->mbmi.segment_id;
int comp_pred, i;
int_mv frame_mv[MB_MODE_COUNT][MAX_REF_FRAMES];
int_mv frame_best_ref_mv[MAX_REF_FRAMES];
int_mv mv_search_ref[MAX_REF_FRAMES];
int frame_mdcounts[4][4];
unsigned char *y_buffer[4];
unsigned char *u_buffer[4];
@ -4557,8 +4551,8 @@ int64_t vp9_rd_pick_inter_mode_sb(VP9_COMP *cpi, MACROBLOCK *x,
if (cpi->ref_frame_flags & flag_list[ref_frame]) {
setup_buffer_inter(cpi, x, idx_list[ref_frame], ref_frame, BLOCK_32X32,
recon_yoffset, recon_uvoffset, frame_mv[NEARESTMV],
frame_mv[NEARMV], frame_best_ref_mv, mv_search_ref,
frame_mdcounts, y_buffer, u_buffer, v_buffer);
frame_mv[NEARMV], frame_mdcounts,
y_buffer, u_buffer, v_buffer);
}
frame_mv[NEWMV][ref_frame].as_int = INVALID_MV;
frame_mv[ZEROMV][ref_frame].as_int = 0;
@ -4750,8 +4744,7 @@ int64_t vp9_rd_pick_inter_mode_sb(VP9_COMP *cpi, MACROBLOCK *x,
&rate_y, &distortion_y,
&rate_uv, &distortion_uv,
&mode_excluded, &disable_skip, recon_yoffset,
mode_index, frame_mv, frame_best_ref_mv,
mv_search_ref);
mode_index, frame_mv);
if (this_rd == INT64_MAX)
continue;
}
@ -4996,9 +4989,9 @@ int64_t vp9_rd_pick_inter_mode_sb(VP9_COMP *cpi, MACROBLOCK *x,
end:
store_coding_context(x, &x->sb_context[0], best_mode_index, NULL,
&frame_best_ref_mv[mbmi->ref_frame],
&frame_best_ref_mv[mbmi->second_ref_frame < 0 ?
0 : mbmi->second_ref_frame],
&mbmi->ref_mvs[mbmi->ref_frame][0],
&mbmi->ref_mvs[mbmi->second_ref_frame < 0
? 0 : mbmi->second_ref_frame][0],
best_pred_diff, best_txfm_diff);
return best_rd;