vp9: Move consec_zero_mv from cyclic refresh to cpi struct.

So it can be used even with aq-mode=3 not enabled.
Also cleans up some code in the places where its used.

No change in behavior.

Change-Id: Ib6b265308dbd483f691200da9a0be4da4b380dbc
This commit is contained in:
Marco 2016-04-21 14:00:26 -07:00
Родитель d179b784f9
Коммит adf8533cee
8 изменённых файлов: 72 добавлений и 52 удалений

Просмотреть файл

@ -23,7 +23,6 @@
CYCLIC_REFRESH *vp9_cyclic_refresh_alloc(int mi_rows, int mi_cols) {
size_t last_coded_q_map_size;
size_t consec_zero_mv_size;
CYCLIC_REFRESH *const cr = vpx_calloc(1, sizeof(*cr));
if (cr == NULL)
return NULL;
@ -41,21 +40,12 @@ CYCLIC_REFRESH *vp9_cyclic_refresh_alloc(int mi_rows, int mi_cols) {
}
assert(MAXQ <= 255);
memset(cr->last_coded_q_map, MAXQ, last_coded_q_map_size);
consec_zero_mv_size = mi_rows * mi_cols * sizeof(*cr->consec_zero_mv);
cr->consec_zero_mv = vpx_malloc(consec_zero_mv_size);
if (cr->consec_zero_mv == NULL) {
vp9_cyclic_refresh_free(cr);
return NULL;
}
memset(cr->consec_zero_mv, 0, consec_zero_mv_size);
return cr;
}
void vp9_cyclic_refresh_free(CYCLIC_REFRESH *cr) {
vpx_free(cr->map);
vpx_free(cr->last_coded_q_map);
vpx_free(cr->consec_zero_mv);
vpx_free(cr);
}
@ -245,7 +235,6 @@ void vp9_cyclic_refresh_update_sb_postencode(VP9_COMP *const cpi,
BLOCK_SIZE bsize) {
const VP9_COMMON *const cm = &cpi->common;
CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
MV mv = mi->mv[0].as_mv;
const int bw = num_8x8_blocks_wide_lookup[bsize];
const int bh = num_8x8_blocks_high_lookup[bsize];
const int xmis = VPXMIN(cm->mi_cols - mi_col, bw);
@ -269,15 +258,8 @@ void vp9_cyclic_refresh_update_sb_postencode(VP9_COMP *const cpi,
clamp(cm->base_qindex + cr->qindex_delta[mi->segment_id],
0, MAXQ),
cr->last_coded_q_map[map_offset]);
// Update the consecutive zero/low_mv count.
if (is_inter_block(mi) && (abs(mv.row) < 8 && abs(mv.col) < 8)) {
if (cr->consec_zero_mv[map_offset] < 255)
cr->consec_zero_mv[map_offset]++;
} else {
cr->consec_zero_mv[map_offset] = 0;
}
}
}
}
// Update the actual number of blocks that were applied the segment delta q.
@ -442,7 +424,7 @@ static void cyclic_refresh_update_map(VP9_COMP *const cpi) {
if (cr->map[bl_index2] == 0) {
count_tot++;
if (cr->last_coded_q_map[bl_index2] > qindex_thresh ||
cr->consec_zero_mv[bl_index2] < consec_zero_mv_thresh) {
cpi->consec_zero_mv[bl_index2] < consec_zero_mv_thresh) {
sum_map++;
count_sel++;
}
@ -545,8 +527,6 @@ void vp9_cyclic_refresh_setup(VP9_COMP *const cpi) {
if (cm->frame_type == KEY_FRAME) {
memset(cr->last_coded_q_map, MAXQ,
cm->mi_rows * cm->mi_cols * sizeof(*cr->last_coded_q_map));
memset(cr->consec_zero_mv, 0,
cm->mi_rows * cm->mi_cols * sizeof(*cr->consec_zero_mv));
cr->sb_index = 0;
}
return;
@ -621,7 +601,6 @@ void vp9_cyclic_refresh_reset_resize(VP9_COMP *const cpi) {
CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
memset(cr->map, 0, cm->mi_rows * cm->mi_cols);
memset(cr->last_coded_q_map, MAXQ, cm->mi_rows * cm->mi_cols);
memset(cr->consec_zero_mv, 0, cm->mi_rows * cm->mi_cols);
cr->sb_index = 0;
cpi->refresh_golden_frame = 1;
cpi->refresh_alt_ref_frame = 1;

Просмотреть файл

@ -53,8 +53,6 @@ struct CYCLIC_REFRESH {
signed char *map;
// Map of the last q a block was coded at.
uint8_t *last_coded_q_map;
// Count on how many consecutive times a block uses ZER0MV for encoding.
uint8_t *consec_zero_mv;
// Thresholds applied to the projected rate/distortion of the coding block,
// when deciding whether block should be refreshed.
int64_t thresh_rate_sb;

Просмотреть файл

@ -341,7 +341,6 @@ void vp9_denoiser_denoise(VP9_COMP *cpi, MACROBLOCK *mb,
// consec_zeromv when current block has small/zero motion).
consec_zeromv = 0;
if (motion_level == 0) {
CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
VP9_COMMON * const cm = &cpi->common;
int j, i;
// Loop through the 8x8 sub-blocks.
@ -354,7 +353,7 @@ void vp9_denoiser_denoise(VP9_COMP *cpi, MACROBLOCK *mb,
for (i = 0; i < ymis; i++) {
for (j = 0; j < xmis; j++) {
int bl_index = block_index + i * cm->mi_cols + j;
consec_zeromv = VPXMIN(cr->consec_zero_mv[bl_index], consec_zeromv);
consec_zeromv = VPXMIN(cpi->consec_zero_mv[bl_index], consec_zeromv);
// No need to keep checking 8x8 blocks if any of the sub-blocks
// has small consec_zeromv (since threshold for no_skin based on
// zero/small motion in skin detection is high, i.e, > 4).

Просмотреть файл

@ -780,7 +780,6 @@ static int choose_partitioning(VP9_COMP *cpi,
#if !CONFIG_VP9_HIGHBITDEPTH
if (cpi->use_skin_detection && !low_res && (mi_col >= 8 &&
mi_col + 8 < cm->mi_cols && mi_row >= 8 && mi_row + 8 < cm->mi_rows)) {
CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
int bl_index1, bl_index2, bl_index3;
int num_16x16_skin = 0;
int num_16x16_nonskin = 0;
@ -803,10 +802,10 @@ static int choose_partitioning(VP9_COMP *cpi,
bl_index1 = bl_index + 1;
bl_index2 = bl_index + cm->mi_cols;
bl_index3 = bl_index2 + 1;
consec_zeromv = VPXMIN(cr->consec_zero_mv[bl_index],
VPXMIN(cr->consec_zero_mv[bl_index1],
VPXMIN(cr->consec_zero_mv[bl_index2],
cr->consec_zero_mv[bl_index3])));
consec_zeromv = VPXMIN(cpi->consec_zero_mv[bl_index],
VPXMIN(cpi->consec_zero_mv[bl_index1],
VPXMIN(cpi->consec_zero_mv[bl_index2],
cpi->consec_zero_mv[bl_index3])));
is_skin = vp9_compute_skin_block(ysignal,
usignal,
vsignal,
@ -4281,6 +4280,33 @@ static void sum_intra_stats(FRAME_COUNTS *counts, const MODE_INFO *mi) {
++counts->uv_mode[y_mode][uv_mode];
}
static void update_zeromv_cnt(VP9_COMP *const cpi,
const MODE_INFO *const mi,
int mi_row, int mi_col,
BLOCK_SIZE bsize) {
const VP9_COMMON *const cm = &cpi->common;
MV mv = mi->mv[0].as_mv;
const int bw = num_8x8_blocks_wide_lookup[bsize];
const int bh = num_8x8_blocks_high_lookup[bsize];
const int xmis = VPXMIN(cm->mi_cols - mi_col, bw);
const int ymis = VPXMIN(cm->mi_rows - mi_row, bh);
const int block_index = mi_row * cm->mi_cols + mi_col;
int x, y;
for (y = 0; y < ymis; y++)
for (x = 0; x < xmis; x++) {
int map_offset = block_index + y * cm->mi_cols + x;
if (is_inter_block(mi) && mi->skip &&
mi->segment_id <= CR_SEGMENT_ID_BOOST2) {
if (abs(mv.row) < 8 && abs(mv.col) < 8) {
if (cpi->consec_zero_mv[map_offset] < 255)
cpi->consec_zero_mv[map_offset]++;
} else {
cpi->consec_zero_mv[map_offset] = 0;
}
}
}
}
static void encode_superblock(VP9_COMP *cpi, ThreadData *td,
TOKENEXTRA **t, int output_enabled,
int mi_row, int mi_col, BLOCK_SIZE bsize,
@ -4361,5 +4387,7 @@ static void encode_superblock(VP9_COMP *cpi, ThreadData *td,
++td->counts->tx.tx_totals[get_uv_tx_size(mi, &xd->plane[1])];
if (cm->seg.enabled && cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
vp9_cyclic_refresh_update_sb_postencode(cpi, mi, mi_row, mi_col, bsize);
if (cpi->oxcf.pass == 0 && cpi->svc.temporal_layer_id == 0)
update_zeromv_cnt(cpi, mi, mi_row, mi_col, bsize);
}
}

Просмотреть файл

@ -375,6 +375,9 @@ static void dealloc_compressor_data(VP9_COMP *cpi) {
vpx_free(cpi->active_map.map);
cpi->active_map.map = NULL;
vpx_free(cpi->consec_zero_mv);
cpi->consec_zero_mv = NULL;
vp9_free_ref_frame_buffers(cm->buffer_pool);
#if CONFIG_VP9_POSTPROC
vp9_free_postproc_buffers(cm);
@ -1549,9 +1552,12 @@ void vp9_change_config(struct VP9_COMP *cpi, const VP9EncoderConfig *oxcf) {
update_frame_size(cpi);
if ((last_w != cpi->oxcf.width || last_h != cpi->oxcf.height) &&
cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
vp9_cyclic_refresh_reset_resize(cpi);
if (last_w != cpi->oxcf.width || last_h != cpi->oxcf.height) {
memset(cpi->consec_zero_mv, 0,
cm->mi_rows * cm->mi_cols * sizeof(*cpi->consec_zero_mv));
if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
vp9_cyclic_refresh_reset_resize(cpi);
}
if ((cpi->svc.number_temporal_layers > 1 &&
cpi->oxcf.rc_mode == VPX_CBR) ||
@ -1698,6 +1704,10 @@ VP9_COMP *vp9_create_compressor(VP9EncoderConfig *oxcf,
realloc_segmentation_maps(cpi);
CHECK_MEM_ERROR(cm, cpi->consec_zero_mv,
vpx_calloc(cm->mi_rows * cm->mi_cols,
sizeof(*cpi->consec_zero_mv)));
CHECK_MEM_ERROR(cm, cpi->nmvcosts[0],
vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts[0])));
CHECK_MEM_ERROR(cm, cpi->nmvcosts[1],
@ -3430,6 +3440,12 @@ static void encode_without_recode_loop(VP9_COMP *cpi,
cpi->unscaled_last_source,
&cpi->scaled_last_source,
(cpi->oxcf.pass == 0));
if (cm->frame_type == KEY_FRAME || cpi->resize_pending != 0) {
memset(cpi->consec_zero_mv, 0,
cm->mi_rows * cm->mi_cols * sizeof(*cpi->consec_zero_mv));
}
vp9_update_noise_estimate(cpi);
if (cpi->oxcf.pass == 0 &&

Просмотреть файл

@ -499,6 +499,9 @@ typedef struct VP9_COMP {
NOISE_ESTIMATE noise_estimate;
// Count on how many consecutive times a block uses small/zeromv for encoding.
uint8_t *consec_zero_mv;
// VAR_BASED_PARTITION thresholds
// 0 - threshold_64x64; 1 - threshold_32x32;
// 2 - threshold_16x16; 3 - vbp_threshold_8x8;

Просмотреть файл

@ -40,11 +40,9 @@ void vp9_noise_estimate_init(NOISE_ESTIMATE *const ne,
}
int enable_noise_estimation(VP9_COMP *const cpi) {
// Enable noise estimation if denoising is on (and cyclic refresh, since
// noise estimate is currently using a struct defined in cyclic refresh).
// Enable noise estimation if denoising is on.
#if CONFIG_VP9_TEMPORAL_DENOISING
if (cpi->oxcf.noise_sensitivity > 0 &&
cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
if (cpi->oxcf.noise_sensitivity > 0)
return 1;
#endif
// Only allow noise estimate under certain encoding mode.
@ -101,7 +99,6 @@ NOISE_LEVEL vp9_noise_estimate_extract_level(NOISE_ESTIMATE *const ne) {
void vp9_update_noise_estimate(VP9_COMP *const cpi) {
const VP9_COMMON *const cm = &cpi->common;
CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
NOISE_ESTIMATE *const ne = &cpi->noise_estimate;
// Estimate of noise level every frame_period frames.
int frame_period = 10;
@ -153,7 +150,7 @@ void vp9_update_noise_estimate(VP9_COMP *const cpi) {
for (mi_row = 0; mi_row < cm->mi_rows; mi_row++) {
for (mi_col = 0; mi_col < cm->mi_cols; mi_col++) {
int bl_index = mi_row * cm->mi_cols + mi_col;
if (cr->consec_zero_mv[bl_index] > thresh_consec_zeromv)
if (cpi->consec_zero_mv[bl_index] > thresh_consec_zeromv)
num_low_motion++;
}
}
@ -173,10 +170,10 @@ void vp9_update_noise_estimate(VP9_COMP *const cpi) {
// been encoded as zero/low motion x (= thresh_consec_zeromv) frames
// in a row. consec_zero_mv[] defined for 8x8 blocks, so consider all
// 4 sub-blocks for 16x16 block. Also, avoid skin blocks.
int consec_zeromv = VPXMIN(cr->consec_zero_mv[bl_index],
VPXMIN(cr->consec_zero_mv[bl_index1],
VPXMIN(cr->consec_zero_mv[bl_index2],
cr->consec_zero_mv[bl_index3])));
int consec_zeromv = VPXMIN(cpi->consec_zero_mv[bl_index],
VPXMIN(cpi->consec_zero_mv[bl_index1],
VPXMIN(cpi->consec_zero_mv[bl_index2],
cpi->consec_zero_mv[bl_index3])));
int is_skin = vp9_compute_skin_block(src_y,
src_u,
src_v,
@ -186,10 +183,10 @@ void vp9_update_noise_estimate(VP9_COMP *const cpi) {
consec_zeromv,
0);
if (frame_low_motion &&
cr->consec_zero_mv[bl_index] > thresh_consec_zeromv &&
cr->consec_zero_mv[bl_index1] > thresh_consec_zeromv &&
cr->consec_zero_mv[bl_index2] > thresh_consec_zeromv &&
cr->consec_zero_mv[bl_index3] > thresh_consec_zeromv &&
cpi->consec_zero_mv[bl_index] > thresh_consec_zeromv &&
cpi->consec_zero_mv[bl_index1] > thresh_consec_zeromv &&
cpi->consec_zero_mv[bl_index2] > thresh_consec_zeromv &&
cpi->consec_zero_mv[bl_index3] > thresh_consec_zeromv &&
!is_skin) {
// Compute variance.
unsigned int sse;

Просмотреть файл

@ -327,12 +327,12 @@ void vp9_restore_layer_context(VP9_COMP *const cpi) {
CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
signed char *temp = cr->map;
uint8_t *temp2 = cr->last_coded_q_map;
uint8_t *temp3 = cr->consec_zero_mv;
uint8_t *temp3 = cpi->consec_zero_mv;
cr->map = lc->map;
lc->map = temp;
cr->last_coded_q_map = lc->last_coded_q_map;
lc->last_coded_q_map = temp2;
cr->consec_zero_mv = lc->consec_zero_mv;
cpi->consec_zero_mv = lc->consec_zero_mv;
lc->consec_zero_mv = temp3;
cr->sb_index = lc->sb_index;
}
@ -360,8 +360,8 @@ void vp9_save_layer_context(VP9_COMP *const cpi) {
cr->map = temp;
lc->last_coded_q_map = cr->last_coded_q_map;
cr->last_coded_q_map = temp2;
lc->consec_zero_mv = cr->consec_zero_mv;
cr->consec_zero_mv = temp3;
lc->consec_zero_mv = cpi->consec_zero_mv;
cpi->consec_zero_mv = temp3;
lc->sb_index = cr->sb_index;
}
}