Merge remote branch 'origin/master' into experimental
Change-Id: I6d6692418eecf54e23e00a08394b0b37d6e7682b
This commit is contained in:
Коммит
2fa7fe66c4
|
@ -70,9 +70,9 @@ int vp8_alloc_frame_buffers(VP8_COMMON *oci, int width, int height)
|
|||
|
||||
for (i = 0; i < NUM_YV12_BUFFERS; i++)
|
||||
{
|
||||
oci->fb_idx_ref_cnt[0] = 0;
|
||||
|
||||
if (vp8_yv12_alloc_frame_buffer(&oci->yv12_fb[i], width, height, VP8BORDERINPIXELS) < 0)
|
||||
oci->fb_idx_ref_cnt[0] = 0;
|
||||
oci->yv12_fb[i].flags = 0;
|
||||
if (vp8_yv12_alloc_frame_buffer(&oci->yv12_fb[i], width, height, VP8BORDERINPIXELS) < 0)
|
||||
{
|
||||
vp8_de_alloc_frame_buffers(oci);
|
||||
return 1;
|
||||
|
|
|
@ -33,11 +33,11 @@ typedef enum
|
|||
SUBMVREF_LEFT_ABOVE_ZED
|
||||
} sumvfref_t;
|
||||
|
||||
int vp8_mv_cont(const MV *l, const MV *a)
|
||||
int vp8_mv_cont(const int_mv *l, const int_mv *a)
|
||||
{
|
||||
int lez = (l->row == 0 && l->col == 0);
|
||||
int aez = (a->row == 0 && a->col == 0);
|
||||
int lea = (l->row == a->row && l->col == a->col);
|
||||
int lez = (l->as_int == 0);
|
||||
int aez = (a->as_int == 0);
|
||||
int lea = (l->as_int == a->as_int);
|
||||
|
||||
if (lea && lez)
|
||||
return SUBMVREF_LEFT_ABOVE_ZED;
|
||||
|
|
|
@ -25,7 +25,7 @@ extern const int vp8_mbsplit_count [VP8_NUMMBSPLITS]; /* # of subsets */
|
|||
|
||||
extern const vp8_prob vp8_mbsplit_probs [VP8_NUMMBSPLITS-1];
|
||||
|
||||
extern int vp8_mv_cont(const MV *l, const MV *a);
|
||||
extern int vp8_mv_cont(const int_mv *l, const int_mv *a);
|
||||
#define SUBMVREF_COUNT 5
|
||||
extern const vp8_prob vp8_sub_mv_ref_prob2 [SUBMVREF_COUNT][VP8_SUBMVREFS-1];
|
||||
|
||||
|
|
|
@ -362,7 +362,7 @@ static void read_mb_modes_mv(VP8D_COMP *pbi, MODE_INFO *mi, MB_MODE_INFO *mbmi,
|
|||
|
||||
leftmv.as_int = left_block_mv(mi, k);
|
||||
abovemv.as_int = above_block_mv(mi, k, mis);
|
||||
mv_contz = vp8_mv_cont(&(leftmv.as_mv), &(abovemv.as_mv));
|
||||
mv_contz = vp8_mv_cont(&leftmv, &abovemv);
|
||||
|
||||
switch (bmi.mode = (B_PREDICTION_MODE) sub_mv_ref(bc, vp8_sub_mv_ref_prob2 [mv_contz])) /*pc->fc.sub_mv_ref_prob))*/
|
||||
{
|
||||
|
|
|
@ -1020,7 +1020,7 @@ static void pack_inter_mode_mvs(VP8_COMP *const cpi)
|
|||
assert(0);
|
||||
leftmv.as_int = left_block_mv(m, k);
|
||||
abovemv.as_int = above_block_mv(m, k, mis);
|
||||
mv_contz = vp8_mv_cont(&(leftmv.as_mv), &(abovemv.as_mv));
|
||||
mv_contz = vp8_mv_cont(&leftmv, &abovemv);
|
||||
|
||||
write_sub_mv_ref(w, b->mode, vp8_sub_mv_ref_prob2 [mv_contz]); //pc->fc.sub_mv_ref_prob);
|
||||
|
||||
|
|
|
@ -814,7 +814,7 @@ int vp8_find_best_half_pixel_step(MACROBLOCK *mb, BLOCK *b, BLOCKD *d,
|
|||
{\
|
||||
if (thissad < bestsad)\
|
||||
{\
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, error_per_bit);\
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, sad_per_bit);\
|
||||
if (thissad < bestsad)\
|
||||
{\
|
||||
bestsad = thissad;\
|
||||
|
@ -841,7 +841,7 @@ int vp8_hex_search
|
|||
int_mv *ref_mv,
|
||||
int_mv *best_mv,
|
||||
int search_param,
|
||||
int error_per_bit,
|
||||
int sad_per_bit,
|
||||
int *num00,
|
||||
const vp8_variance_fn_ptr_t *vfp,
|
||||
int *mvsadcost[2],
|
||||
|
@ -875,7 +875,7 @@ int vp8_hex_search
|
|||
this_offset = base_offset + (br * (d->pre_stride)) + bc;
|
||||
this_mv.as_mv.row = br;
|
||||
this_mv.as_mv.col = bc;
|
||||
bestsad = vfp->sdf( what, what_stride, this_offset, in_what_stride, 0x7fffffff) + mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
bestsad = vfp->sdf( what, what_stride, this_offset, in_what_stride, 0x7fffffff) + mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
|
||||
// hex search
|
||||
//j=0
|
||||
|
@ -998,7 +998,7 @@ cal_neighbors:
|
|||
this_mv.as_mv.col = bc<<3;
|
||||
|
||||
this_offset = (unsigned char *)(*(d->base_pre) + d->pre + (br * (in_what_stride)) + bc);
|
||||
return vfp->vf(what, what_stride, this_offset, in_what_stride, &bestsad) + mv_err_cost(&this_mv, center_mv, mvcost, error_per_bit) ;
|
||||
return vfp->vf(what, what_stride, this_offset, in_what_stride, &bestsad) + mv_err_cost(&this_mv, center_mv, mvcost, x->errorperbit) ;
|
||||
}
|
||||
#undef CHECK_BOUNDS
|
||||
#undef CHECK_POINT
|
||||
|
@ -1012,7 +1012,7 @@ int vp8_diamond_search_sad
|
|||
int_mv *ref_mv,
|
||||
int_mv *best_mv,
|
||||
int search_param,
|
||||
int error_per_bit,
|
||||
int sad_per_bit,
|
||||
int *num00,
|
||||
vp8_variance_fn_ptr_t *fn_ptr,
|
||||
int *mvcost[2],
|
||||
|
@ -1062,7 +1062,7 @@ int vp8_diamond_search_sad
|
|||
(ref_row > x->mv_row_min) && (ref_row < x->mv_row_max))
|
||||
{
|
||||
// Check the starting position
|
||||
bestsad = fn_ptr->sdf(what, what_stride, in_what, in_what_stride, 0x7fffffff) + mvsad_err_cost(best_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
bestsad = fn_ptr->sdf(what, what_stride, in_what, in_what_stride, 0x7fffffff) + mvsad_err_cost(best_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
}
|
||||
|
||||
// search_param determines the length of the initial step and hence the number of iterations
|
||||
|
@ -1091,7 +1091,7 @@ int vp8_diamond_search_sad
|
|||
{
|
||||
this_mv.as_mv.row = this_row_offset;
|
||||
this_mv.as_mv.col = this_col_offset;
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
|
||||
if (thissad < bestsad)
|
||||
{
|
||||
|
@ -1122,7 +1122,7 @@ int vp8_diamond_search_sad
|
|||
return INT_MAX;
|
||||
|
||||
return fn_ptr->vf(what, what_stride, best_address, in_what_stride, (unsigned int *)(&thissad))
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, error_per_bit);
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, x->errorperbit);
|
||||
}
|
||||
|
||||
int vp8_diamond_search_sadx4
|
||||
|
@ -1133,7 +1133,7 @@ int vp8_diamond_search_sadx4
|
|||
int_mv *ref_mv,
|
||||
int_mv *best_mv,
|
||||
int search_param,
|
||||
int error_per_bit,
|
||||
int sad_per_bit,
|
||||
int *num00,
|
||||
vp8_variance_fn_ptr_t *fn_ptr,
|
||||
int *mvcost[2],
|
||||
|
@ -1182,7 +1182,7 @@ int vp8_diamond_search_sadx4
|
|||
(ref_row > x->mv_row_min) && (ref_row < x->mv_row_max))
|
||||
{
|
||||
// Check the starting position
|
||||
bestsad = fn_ptr->sdf(what, what_stride, in_what, in_what_stride, 0x7fffffff) + mvsad_err_cost(best_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
bestsad = fn_ptr->sdf(what, what_stride, in_what, in_what_stride, 0x7fffffff) + mvsad_err_cost(best_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
}
|
||||
|
||||
// search_param determines the length of the initial step and hence the number of iterations
|
||||
|
@ -1222,7 +1222,7 @@ int vp8_diamond_search_sadx4
|
|||
{
|
||||
this_mv.as_mv.row = best_mv->as_mv.row + ss[i].mv.row;
|
||||
this_mv.as_mv.col = best_mv->as_mv.col + ss[i].mv.col;
|
||||
sad_array[t] += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
sad_array[t] += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
|
||||
if (sad_array[t] < bestsad)
|
||||
{
|
||||
|
@ -1251,7 +1251,7 @@ int vp8_diamond_search_sadx4
|
|||
{
|
||||
this_mv.as_mv.row = this_row_offset;
|
||||
this_mv.as_mv.col = this_col_offset;
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
|
||||
if (thissad < bestsad)
|
||||
{
|
||||
|
@ -1282,11 +1282,11 @@ int vp8_diamond_search_sadx4
|
|||
return INT_MAX;
|
||||
|
||||
return fn_ptr->vf(what, what_stride, best_address, in_what_stride, (unsigned int *)(&thissad))
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, error_per_bit);
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, x->errorperbit);
|
||||
}
|
||||
|
||||
int vp8_full_search_sad(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
||||
int error_per_bit, int distance,
|
||||
int sad_per_bit, int distance,
|
||||
vp8_variance_fn_ptr_t *fn_ptr, int *mvcost[2],
|
||||
int_mv *center_mv)
|
||||
{
|
||||
|
@ -1331,7 +1331,7 @@ int vp8_full_search_sad(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
// Baseline value at the centre
|
||||
|
||||
//bestsad = fn_ptr->sf( what,what_stride,bestaddress,in_what_stride) + (int)sqrt(mv_err_cost(ref_mv,ref_mv, mvcost,error_per_bit*14));
|
||||
bestsad = fn_ptr->sdf(what, what_stride, bestaddress, in_what_stride, 0x7fffffff) + mvsad_err_cost(best_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
bestsad = fn_ptr->sdf(what, what_stride, bestaddress, in_what_stride, 0x7fffffff) + mvsad_err_cost(best_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
}
|
||||
|
||||
// Apply further limits to prevent us looking using vectors that stretch beyiond the UMV border
|
||||
|
@ -1357,7 +1357,7 @@ int vp8_full_search_sad(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
thissad = fn_ptr->sdf(what, what_stride, check_here , in_what_stride, bestsad);
|
||||
|
||||
this_mv.as_mv.col = c;
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
|
||||
if (thissad < bestsad)
|
||||
{
|
||||
|
@ -1376,13 +1376,13 @@ int vp8_full_search_sad(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
|
||||
if (bestsad < INT_MAX)
|
||||
return fn_ptr->vf(what, what_stride, bestaddress, in_what_stride, (unsigned int *)(&thissad))
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, error_per_bit);
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, x->errorperbit);
|
||||
else
|
||||
return INT_MAX;
|
||||
}
|
||||
|
||||
int vp8_full_search_sadx3(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
||||
int error_per_bit, int distance,
|
||||
int sad_per_bit, int distance,
|
||||
vp8_variance_fn_ptr_t *fn_ptr, int *mvcost[2],
|
||||
int_mv *center_mv)
|
||||
{
|
||||
|
@ -1427,7 +1427,7 @@ int vp8_full_search_sadx3(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
(ref_row > x->mv_row_min) && (ref_row < x->mv_row_max))
|
||||
{
|
||||
// Baseline value at the centre
|
||||
bestsad = fn_ptr->sdf(what, what_stride, bestaddress, in_what_stride, 0x7fffffff) + mvsad_err_cost(best_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
bestsad = fn_ptr->sdf(what, what_stride, bestaddress, in_what_stride, 0x7fffffff) + mvsad_err_cost(best_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
}
|
||||
|
||||
// Apply further limits to prevent us looking using vectors that stretch beyiond the UMV border
|
||||
|
@ -1462,7 +1462,7 @@ int vp8_full_search_sadx3(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
if (thissad < bestsad)
|
||||
{
|
||||
this_mv.as_mv.col = c;
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
|
||||
if (thissad < bestsad)
|
||||
{
|
||||
|
@ -1485,7 +1485,7 @@ int vp8_full_search_sadx3(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
if (thissad < bestsad)
|
||||
{
|
||||
this_mv.as_mv.col = c;
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
|
||||
if (thissad < bestsad)
|
||||
{
|
||||
|
@ -1507,13 +1507,13 @@ int vp8_full_search_sadx3(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
|
||||
if (bestsad < INT_MAX)
|
||||
return fn_ptr->vf(what, what_stride, bestaddress, in_what_stride, (unsigned int *)(&thissad))
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, error_per_bit);
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, x->errorperbit);
|
||||
else
|
||||
return INT_MAX;
|
||||
}
|
||||
|
||||
int vp8_full_search_sadx8(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
||||
int error_per_bit, int distance,
|
||||
int sad_per_bit, int distance,
|
||||
vp8_variance_fn_ptr_t *fn_ptr, int *mvcost[2],
|
||||
int_mv *center_mv)
|
||||
{
|
||||
|
@ -1559,7 +1559,7 @@ int vp8_full_search_sadx8(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
(ref_row > x->mv_row_min) && (ref_row < x->mv_row_max))
|
||||
{
|
||||
// Baseline value at the centre
|
||||
bestsad = fn_ptr->sdf(what, what_stride, bestaddress, in_what_stride, 0x7fffffff) + mvsad_err_cost(best_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
bestsad = fn_ptr->sdf(what, what_stride, bestaddress, in_what_stride, 0x7fffffff) + mvsad_err_cost(best_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
}
|
||||
|
||||
// Apply further limits to prevent us looking using vectors that stretch beyiond the UMV border
|
||||
|
@ -1594,7 +1594,7 @@ int vp8_full_search_sadx8(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
if (thissad < bestsad)
|
||||
{
|
||||
this_mv.as_mv.col = c;
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
|
||||
if (thissad < bestsad)
|
||||
{
|
||||
|
@ -1623,7 +1623,7 @@ int vp8_full_search_sadx8(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
if (thissad < bestsad)
|
||||
{
|
||||
this_mv.as_mv.col = c;
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
|
||||
if (thissad < bestsad)
|
||||
{
|
||||
|
@ -1646,7 +1646,7 @@ int vp8_full_search_sadx8(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
if (thissad < bestsad)
|
||||
{
|
||||
this_mv.as_mv.col = c;
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, error_per_bit);
|
||||
thissad += mvsad_err_cost(&this_mv, &fcenter_mv, mvsadcost, sad_per_bit);
|
||||
|
||||
if (thissad < bestsad)
|
||||
{
|
||||
|
@ -1667,7 +1667,7 @@ int vp8_full_search_sadx8(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
|
||||
if (bestsad < INT_MAX)
|
||||
return fn_ptr->vf(what, what_stride, bestaddress, in_what_stride, (unsigned int *)(&thissad))
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, error_per_bit);
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, x->errorperbit);
|
||||
else
|
||||
return INT_MAX;
|
||||
}
|
||||
|
@ -1744,7 +1744,7 @@ int vp8_refining_search_sad(MACROBLOCK *x, BLOCK *b, BLOCKD *d, int_mv *ref_mv,
|
|||
|
||||
if (bestsad < INT_MAX)
|
||||
return fn_ptr->vf(what, what_stride, best_address, in_what_stride, (unsigned int *)(&thissad))
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, error_per_bit);
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, x->errorperbit);
|
||||
else
|
||||
return INT_MAX;
|
||||
}
|
||||
|
@ -1857,7 +1857,7 @@ int vp8_refining_search_sadx4(MACROBLOCK *x, BLOCK *b, BLOCKD *d,
|
|||
|
||||
if (bestsad < INT_MAX)
|
||||
return fn_ptr->vf(what, what_stride, best_address, in_what_stride, (unsigned int *)(&thissad))
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, error_per_bit);
|
||||
+ mv_err_cost(&this_mv, center_mv, mvcost, x->errorperbit);
|
||||
else
|
||||
return INT_MAX;
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ extern fractional_mv_step_fp vp8_skip_fractional_mv_step;
|
|||
BLOCK *b, \
|
||||
BLOCKD *d, \
|
||||
int_mv *ref_mv, \
|
||||
int error_per_bit, \
|
||||
int sad_per_bit, \
|
||||
int distance, \
|
||||
vp8_variance_fn_ptr_t *fn_ptr, \
|
||||
int *mvcost[2], \
|
||||
|
@ -77,7 +77,7 @@ extern fractional_mv_step_fp vp8_skip_fractional_mv_step;
|
|||
BLOCK *b, \
|
||||
BLOCKD *d, \
|
||||
int_mv *ref_mv, \
|
||||
int error_per_bit, \
|
||||
int sad_per_bit, \
|
||||
int distance, \
|
||||
vp8_variance_fn_ptr_t *fn_ptr, \
|
||||
int *mvcost[2], \
|
||||
|
@ -93,7 +93,7 @@ extern fractional_mv_step_fp vp8_skip_fractional_mv_step;
|
|||
int_mv *ref_mv, \
|
||||
int_mv *best_mv, \
|
||||
int search_param, \
|
||||
int error_per_bit, \
|
||||
int sad_per_bit, \
|
||||
int *num00, \
|
||||
vp8_variance_fn_ptr_t *fn_ptr, \
|
||||
int *mvcost[2], \
|
||||
|
|
|
@ -2720,13 +2720,10 @@ static void resize_key_frame(VP8_COMP *cpi)
|
|||
}
|
||||
|
||||
|
||||
static void update_alt_ref_frame_and_stats(VP8_COMP *cpi)
|
||||
static void update_alt_ref_frame_stats(VP8_COMP *cpi)
|
||||
{
|
||||
VP8_COMMON *cm = &cpi->common;
|
||||
|
||||
// Update the golden frame buffer
|
||||
vp8_yv12_copy_frame_ptr(cm->frame_to_show, &cm->yv12_fb[cm->alt_fb_idx]);
|
||||
|
||||
// Select an interval before next GF or altref
|
||||
if (!cpi->auto_gold)
|
||||
cpi->frames_till_gf_update_due = cpi->goldfreq;
|
||||
|
@ -2759,19 +2756,13 @@ static void update_alt_ref_frame_and_stats(VP8_COMP *cpi)
|
|||
|
||||
|
||||
}
|
||||
static void update_golden_frame_and_stats(VP8_COMP *cpi)
|
||||
static void update_golden_frame_stats(VP8_COMP *cpi)
|
||||
{
|
||||
VP8_COMMON *cm = &cpi->common;
|
||||
|
||||
// Update the Golden frame reconstruction buffer if signalled and the GF usage counts.
|
||||
// Update the Golden frame usage counts.
|
||||
if (cm->refresh_golden_frame)
|
||||
{
|
||||
if (cm->frame_type != KEY_FRAME)
|
||||
{
|
||||
// Update the golden frame buffer
|
||||
vp8_yv12_copy_frame_ptr(cm->frame_to_show, &cm->yv12_fb[cm->gld_fb_idx]);
|
||||
}
|
||||
|
||||
// Select an interval before next GF
|
||||
if (!cpi->auto_gold)
|
||||
cpi->frames_till_gf_update_due = cpi->goldfreq;
|
||||
|
@ -3157,6 +3148,85 @@ static BOOL recode_loop_test( VP8_COMP *cpi,
|
|||
return force_recode;
|
||||
}
|
||||
|
||||
void update_reference_frames(VP8_COMMON *cm)
|
||||
{
|
||||
YV12_BUFFER_CONFIG *yv12_fb = cm->yv12_fb;
|
||||
|
||||
// At this point the new frame has been encoded.
|
||||
// If any buffer copy / swapping is signaled it should be done here.
|
||||
|
||||
if (cm->frame_type == KEY_FRAME)
|
||||
{
|
||||
yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FLAG | VP8_ALT_FLAG ;
|
||||
|
||||
yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FLAG;
|
||||
yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALT_FLAG;
|
||||
|
||||
cm->alt_fb_idx = cm->gld_fb_idx = cm->new_fb_idx;
|
||||
}
|
||||
else /* For non key frames */
|
||||
{
|
||||
if (cm->refresh_alt_ref_frame)
|
||||
{
|
||||
assert(!cm->copy_buffer_to_arf);
|
||||
|
||||
cm->yv12_fb[cm->new_fb_idx].flags |= VP8_ALT_FLAG;
|
||||
cm->yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALT_FLAG;
|
||||
cm->alt_fb_idx = cm->new_fb_idx;
|
||||
}
|
||||
else if (cm->copy_buffer_to_arf)
|
||||
{
|
||||
assert(!(cm->copy_buffer_to_arf & ~0x3));
|
||||
|
||||
if (cm->copy_buffer_to_arf == 1)
|
||||
{
|
||||
yv12_fb[cm->lst_fb_idx].flags |= VP8_ALT_FLAG;
|
||||
yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALT_FLAG;
|
||||
cm->alt_fb_idx = cm->lst_fb_idx;
|
||||
}
|
||||
else /* if (cm->copy_buffer_to_arf == 2) */
|
||||
{
|
||||
yv12_fb[cm->gld_fb_idx].flags |= VP8_ALT_FLAG;
|
||||
yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALT_FLAG;
|
||||
cm->alt_fb_idx = cm->gld_fb_idx;
|
||||
}
|
||||
}
|
||||
|
||||
if (cm->refresh_golden_frame)
|
||||
{
|
||||
assert(!cm->copy_buffer_to_gf);
|
||||
|
||||
cm->yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FLAG;
|
||||
cm->yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FLAG;
|
||||
cm->gld_fb_idx = cm->new_fb_idx;
|
||||
}
|
||||
else if (cm->copy_buffer_to_gf)
|
||||
{
|
||||
assert(!(cm->copy_buffer_to_arf & ~0x3));
|
||||
|
||||
if (cm->copy_buffer_to_gf == 1)
|
||||
{
|
||||
yv12_fb[cm->lst_fb_idx].flags |= VP8_GOLD_FLAG;
|
||||
yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FLAG;
|
||||
cm->gld_fb_idx = cm->lst_fb_idx;
|
||||
}
|
||||
else /* if (cm->copy_buffer_to_gf == 2) */
|
||||
{
|
||||
yv12_fb[cm->alt_fb_idx].flags |= VP8_GOLD_FLAG;
|
||||
yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FLAG;
|
||||
cm->gld_fb_idx = cm->alt_fb_idx;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (cm->refresh_last_frame)
|
||||
{
|
||||
cm->yv12_fb[cm->new_fb_idx].flags |= VP8_LAST_FLAG;
|
||||
cm->yv12_fb[cm->lst_fb_idx].flags &= ~VP8_LAST_FLAG;
|
||||
cm->lst_fb_idx = cm->new_fb_idx;
|
||||
}
|
||||
}
|
||||
|
||||
void loopfilter_frame(VP8_COMP *cpi, VP8_COMMON *cm)
|
||||
{
|
||||
if (cm->no_lpf)
|
||||
|
@ -3195,50 +3265,6 @@ void loopfilter_frame(VP8_COMP *cpi, VP8_COMMON *cm)
|
|||
|
||||
vp8_yv12_extend_frame_borders_ptr(cm->frame_to_show);
|
||||
|
||||
{
|
||||
YV12_BUFFER_CONFIG *lst_yv12 = &cm->yv12_fb[cm->lst_fb_idx];
|
||||
YV12_BUFFER_CONFIG *new_yv12 = &cm->yv12_fb[cm->new_fb_idx];
|
||||
YV12_BUFFER_CONFIG *gld_yv12 = &cm->yv12_fb[cm->gld_fb_idx];
|
||||
YV12_BUFFER_CONFIG *alt_yv12 = &cm->yv12_fb[cm->alt_fb_idx];
|
||||
// At this point the new frame has been encoded.
|
||||
// If any buffer copy / swapping is signaled it should be done here.
|
||||
if (cm->frame_type == KEY_FRAME)
|
||||
{
|
||||
vp8_yv12_copy_frame_ptr(cm->frame_to_show, gld_yv12);
|
||||
vp8_yv12_copy_frame_ptr(cm->frame_to_show, alt_yv12);
|
||||
}
|
||||
else // For non key frames
|
||||
{
|
||||
// Code to copy between reference buffers
|
||||
if (cm->copy_buffer_to_arf)
|
||||
{
|
||||
if (cm->copy_buffer_to_arf == 1)
|
||||
{
|
||||
if (cm->refresh_last_frame)
|
||||
// We copy new_frame here because last and new buffers will already have been swapped if cm->refresh_last_frame is set.
|
||||
vp8_yv12_copy_frame_ptr(new_yv12, alt_yv12);
|
||||
else
|
||||
vp8_yv12_copy_frame_ptr(lst_yv12, alt_yv12);
|
||||
}
|
||||
else if (cm->copy_buffer_to_arf == 2)
|
||||
vp8_yv12_copy_frame_ptr(gld_yv12, alt_yv12);
|
||||
}
|
||||
|
||||
if (cm->copy_buffer_to_gf)
|
||||
{
|
||||
if (cm->copy_buffer_to_gf == 1)
|
||||
{
|
||||
if (cm->refresh_last_frame)
|
||||
// We copy new_frame here because last and new buffers will already have been swapped if cm->refresh_last_frame is set.
|
||||
vp8_yv12_copy_frame_ptr(new_yv12, gld_yv12);
|
||||
else
|
||||
vp8_yv12_copy_frame_ptr(lst_yv12, gld_yv12);
|
||||
}
|
||||
else if (cm->copy_buffer_to_gf == 2)
|
||||
vp8_yv12_copy_frame_ptr(alt_yv12, gld_yv12);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void encode_frame_to_data_rate
|
||||
|
@ -4153,21 +4179,15 @@ static void encode_frame_to_data_rate
|
|||
}
|
||||
#endif
|
||||
|
||||
// For inter frames the current default behaviour is that when cm->refresh_golden_frame is set we copy the old GF over to the ARF buffer
|
||||
// This is purely an encoder descision at present.
|
||||
// For inter frames the current default behavior is that when
|
||||
// cm->refresh_golden_frame is set we copy the old GF over to the ARF buffer
|
||||
// This is purely an encoder decision at present.
|
||||
if (!cpi->oxcf.error_resilient_mode && cm->refresh_golden_frame)
|
||||
cm->copy_buffer_to_arf = 2;
|
||||
else
|
||||
cm->copy_buffer_to_arf = 0;
|
||||
|
||||
if (cm->refresh_last_frame)
|
||||
{
|
||||
vp8_swap_yv12_buffer(&cm->yv12_fb[cm->lst_fb_idx], &cm->yv12_fb[cm->new_fb_idx]);
|
||||
cm->frame_to_show = &cm->yv12_fb[cm->lst_fb_idx];
|
||||
}
|
||||
else
|
||||
cm->frame_to_show = &cm->yv12_fb[cm->new_fb_idx];
|
||||
|
||||
cm->frame_to_show = &cm->yv12_fb[cm->new_fb_idx];
|
||||
|
||||
#if CONFIG_MULTITHREAD
|
||||
if (cpi->b_multi_threaded)
|
||||
|
@ -4180,6 +4200,8 @@ static void encode_frame_to_data_rate
|
|||
loopfilter_frame(cpi, cm);
|
||||
}
|
||||
|
||||
update_reference_frames(cm);
|
||||
|
||||
if (cpi->oxcf.error_resilient_mode == 1)
|
||||
{
|
||||
cm->refresh_entropy_probs = 0;
|
||||
|
@ -4204,7 +4226,7 @@ static void encode_frame_to_data_rate
|
|||
|
||||
/* Move storing frame_type out of the above loop since it is also
|
||||
* needed in motion search besides loopfilter */
|
||||
cm->last_frame_type = cm->frame_type;
|
||||
cm->last_frame_type = cm->frame_type;
|
||||
|
||||
// Update rate control heuristics
|
||||
cpi->total_byte_count += (*size);
|
||||
|
@ -4454,26 +4476,14 @@ static void encode_frame_to_data_rate
|
|||
cpi->ref_frame_flags &= ~VP8_ALT_FLAG;
|
||||
|
||||
|
||||
if (cpi->oxcf.error_resilient_mode)
|
||||
{
|
||||
if (cm->frame_type != KEY_FRAME)
|
||||
{
|
||||
// Is this an alternate reference update
|
||||
if (cm->refresh_alt_ref_frame)
|
||||
vp8_yv12_copy_frame_ptr(cm->frame_to_show, &cm->yv12_fb[cm->alt_fb_idx]);
|
||||
|
||||
if (cm->refresh_golden_frame)
|
||||
vp8_yv12_copy_frame_ptr(cm->frame_to_show, &cm->yv12_fb[cm->gld_fb_idx]);
|
||||
}
|
||||
}
|
||||
else
|
||||
if (!cpi->oxcf.error_resilient_mode)
|
||||
{
|
||||
if (cpi->oxcf.play_alternate && cm->refresh_alt_ref_frame && (cm->frame_type != KEY_FRAME))
|
||||
// Update the alternate reference frame and stats as appropriate.
|
||||
update_alt_ref_frame_and_stats(cpi);
|
||||
// Update the alternate reference frame stats as appropriate.
|
||||
update_alt_ref_frame_stats(cpi);
|
||||
else
|
||||
// Update the Golden frame and golden frame and stats as appropriate.
|
||||
update_golden_frame_and_stats(cpi);
|
||||
// Update the Golden frame stats as appropriate.
|
||||
update_golden_frame_stats(cpi);
|
||||
}
|
||||
|
||||
if (cm->frame_type == KEY_FRAME)
|
||||
|
@ -4843,7 +4853,20 @@ int vp8_get_compressed_data(VP8_PTR ptr, unsigned int *frame_flags, unsigned lon
|
|||
}
|
||||
|
||||
#endif
|
||||
/* find a free buffer for the new frame */
|
||||
{
|
||||
int i = 0;
|
||||
for(; i < NUM_YV12_BUFFERS; i++)
|
||||
{
|
||||
if(!cm->yv12_fb[i].flags)
|
||||
{
|
||||
cm->new_fb_idx = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
assert(i < NUM_YV12_BUFFERS );
|
||||
}
|
||||
#if !(CONFIG_REALTIME_ONLY)
|
||||
|
||||
if (cpi->pass == 1)
|
||||
|
|
|
@ -59,6 +59,7 @@ extern "C"
|
|||
YUV_TYPE clrtype;
|
||||
|
||||
int corrupted;
|
||||
int flags;
|
||||
} YV12_BUFFER_CONFIG;
|
||||
|
||||
int vp8_yv12_alloc_frame_buffer(YV12_BUFFER_CONFIG *ybf, int width, int height, int border);
|
||||
|
|
Загрузка…
Ссылка в новой задаче