From 7b1ec7a918bb2789912165054353246ce67b8ea9 Mon Sep 17 00:00:00 2001 From: Zoe Liu Date: Wed, 24 May 2017 22:28:24 -0700 Subject: [PATCH] Add encoder/decoder support for var-refs Check the availability of the reference frames at the frame level at both encoder and decoder, and if a reference frame is not available for a specific video frame, remove the signaling of such reference frame info at the block level. This patch adds the consideration of the bit saving inside the RD optimization loop. Change-Id: I4c22f1b843b21c7d2b47e118c99c3ad615a3d4e4 --- av1/common/blockd.h | 5 ++ av1/common/onyxc_int.h | 18 ++++++ av1/decoder/decodeframe.c | 39 ++++++++++++ av1/decoder/decodemv.c | 100 +++++++++++++++++++++++++++--- av1/encoder/bitstream.c | 97 ++++++++++++++++++++++------- av1/encoder/encodeframe.c | 4 +- av1/encoder/encoder.c | 42 +++++++++++++ av1/encoder/rdopt.c | 124 +++++++++++++++++++++++++++++--------- 8 files changed, 366 insertions(+), 63 deletions(-) diff --git a/av1/common/blockd.h b/av1/common/blockd.h index 3f0f812a5..e134548fa 100644 --- a/av1/common/blockd.h +++ b/av1/common/blockd.h @@ -567,6 +567,11 @@ typedef struct RefBuffer { int idx; YV12_BUFFER_CONFIG *buf; struct scale_factors sf; +#if CONFIG_VAR_REFS + // TODO(zoeliu): To evaluate whether "is_valid" is needed or the use of it can + // be simply replaced by checking the "idx". + int is_valid; +#endif // CONFIG_VAR_REFS } RefBuffer; #if CONFIG_ADAPT_SCAN diff --git a/av1/common/onyxc_int.h b/av1/common/onyxc_int.h index fb4b50354..f1e3a2e86 100644 --- a/av1/common/onyxc_int.h +++ b/av1/common/onyxc_int.h @@ -505,6 +505,24 @@ static INLINE void ref_cnt_fb(RefCntBuffer *bufs, int *idx, int new_idx) { bufs[new_idx].ref_count++; } +#if CONFIG_VAR_REFS +#define LAST_IS_VALID(cm) ((cm)->frame_refs[LAST_FRAME - 1].is_valid) +#define LAST2_IS_VALID(cm) ((cm)->frame_refs[LAST2_FRAME - 1].is_valid) +#define LAST3_IS_VALID(cm) ((cm)->frame_refs[LAST3_FRAME - 1].is_valid) +#define GOLDEN_IS_VALID(cm) ((cm)->frame_refs[GOLDEN_FRAME - 1].is_valid) +#define BWDREF_IS_VALID(cm) ((cm)->frame_refs[BWDREF_FRAME - 1].is_valid) +#define ALTREF_IS_VALID(cm) ((cm)->frame_refs[ALTREF_FRAME - 1].is_valid) + +#define L_OR_L2(cm) (LAST_IS_VALID(cm) || LAST2_IS_VALID(cm)) +#define L_AND_L2(cm) (LAST_IS_VALID(cm) && LAST2_IS_VALID(cm)) + +#define L3_OR_G(cm) (LAST3_IS_VALID(cm) || GOLDEN_IS_VALID(cm)) +#define L3_AND_G(cm) (LAST3_IS_VALID(cm) && GOLDEN_IS_VALID(cm)) + +#define BWD_OR_ALT(cm) (BWDREF_IS_VALID(cm) || ALTREF_IS_VALID(cm)) +#define BWD_AND_ALT(cm) (BWDREF_IS_VALID(cm) && ALTREF_IS_VALID(cm)) +#endif // CONFIG_VAR_REFS + static INLINE int mi_cols_aligned_to_sb(const AV1_COMMON *cm) { return ALIGN_POWER_OF_TWO(cm->mi_cols, cm->mib_size_log2); } diff --git a/av1/decoder/decodeframe.c b/av1/decoder/decodeframe.c index 4e02df490..42947f6f4 100644 --- a/av1/decoder/decodeframe.c +++ b/av1/decoder/decodeframe.c @@ -4103,6 +4103,38 @@ static void read_compound_tools(AV1_COMMON *cm, } #endif // CONFIG_EXT_INTER +#if CONFIG_VAR_REFS +static void check_valid_ref_frames(AV1_COMMON *cm) { + MV_REFERENCE_FRAME ref_frame; + // TODO(zoeliu): To handle ALTREF_FRAME the same way as do with other + // reference frames: Current encoder invalid ALTREF when ALTREF + // is the same as LAST, but invalid all the other references + // when they are the same as ALTREF. + for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) { + RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - LAST_FRAME]; + + if (ref_buf->idx != INVALID_IDX) { + ref_buf->is_valid = 1; + + MV_REFERENCE_FRAME ref; + for (ref = LAST_FRAME; ref < ref_frame; ++ref) { + RefBuffer *const buf = &cm->frame_refs[ref - LAST_FRAME]; + if (buf->is_valid && buf->idx == ref_buf->idx) { + if (ref_frame != ALTREF_FRAME || ref == LAST_FRAME) { + ref_buf->is_valid = 0; + break; + } else { + buf->is_valid = 0; + } + } + } + } else { + ref_buf->is_valid = 0; + } + } +} +#endif // CONFIG_VAR_REFS + static size_t read_uncompressed_header(AV1Decoder *pbi, struct aom_read_bit_buffer *rb) { AV1_COMMON *const cm = &pbi->common; @@ -4234,6 +4266,9 @@ static size_t read_uncompressed_header(AV1Decoder *pbi, for (i = 0; i < INTER_REFS_PER_FRAME; ++i) { cm->frame_refs[i].idx = INVALID_IDX; cm->frame_refs[i].buf = NULL; +#if CONFIG_VAR_REFS + cm->frame_refs[i].is_valid = 0; +#endif // CONFIG_VAR_REFS } setup_frame_size(cm, rb); @@ -4329,6 +4364,10 @@ static size_t read_uncompressed_header(AV1Decoder *pbi, #endif } +#if CONFIG_VAR_REFS + check_valid_ref_frames(cm); +#endif // CONFIG_VAR_REFS + #if CONFIG_FRAME_SIZE if (cm->error_resilient_mode == 0) { setup_frame_size_with_refs(cm, rb); diff --git a/av1/decoder/decodemv.c b/av1/decoder/decodemv.c index 8acc06a44..657eb6e43 100644 --- a/av1/decoder/decodemv.c +++ b/av1/decoder/decodemv.c @@ -1260,65 +1260,145 @@ static void read_ref_frames(AV1_COMMON *const cm, MACROBLOCKD *const xd, const int idx = cm->ref_frame_sign_bias[cm->comp_fixed_ref]; #endif // CONFIG_EXT_REFS #endif - const int ctx = av1_get_pred_context_comp_ref_p(cm, xd); + const int ctx = av1_get_pred_context_comp_ref_p(cm, xd); +#if CONFIG_VAR_REFS + int bit; + // Test need to explicitly code (L,L2) vs (L3,G) branch node in tree + if (L_OR_L2(cm) && L3_OR_G(cm)) + bit = aom_read(r, fc->comp_ref_prob[ctx][0], ACCT_STR); + else + bit = L3_OR_G(cm); +#else // !CONFIG_VAR_REFS const int bit = aom_read(r, fc->comp_ref_prob[ctx][0], ACCT_STR); +#endif // CONFIG_VAR_REFS if (counts) ++counts->comp_ref[ctx][0][bit]; #if CONFIG_EXT_REFS // Decode forward references. if (!bit) { const int ctx1 = av1_get_pred_context_comp_ref_p1(cm, xd); +#if CONFIG_VAR_REFS + int bit1; + // Test need to explicitly code (L) vs (L2) branch node in tree + if (L_AND_L2(cm)) + bit1 = aom_read(r, fc->comp_ref_prob[ctx1][1], ACCT_STR); + else + bit1 = LAST_IS_VALID(cm); +#else // !CONFIG_VAR_REFS const int bit1 = aom_read(r, fc->comp_ref_prob[ctx1][1], ACCT_STR); +#endif // CONFIG_VAR_REFS if (counts) ++counts->comp_ref[ctx1][1][bit1]; ref_frame[!idx] = cm->comp_fwd_ref[bit1 ? 0 : 1]; } else { const int ctx2 = av1_get_pred_context_comp_ref_p2(cm, xd); +#if CONFIG_VAR_REFS + int bit2; + // Test need to explicitly code (L3) vs (G) branch node in tree + if (L3_AND_G(cm)) + bit2 = aom_read(r, fc->comp_ref_prob[ctx2][2], ACCT_STR); + else + bit2 = GOLDEN_IS_VALID(cm); +#else // !CONFIG_VAR_REFS const int bit2 = aom_read(r, fc->comp_ref_prob[ctx2][2], ACCT_STR); +#endif // CONFIG_VAR_REFS if (counts) ++counts->comp_ref[ctx2][2][bit2]; ref_frame[!idx] = cm->comp_fwd_ref[bit2 ? 3 : 2]; } // Decode backward references. - { - const int ctx_bwd = av1_get_pred_context_comp_bwdref_p(cm, xd); - const int bit_bwd = - aom_read(r, fc->comp_bwdref_prob[ctx_bwd][0], ACCT_STR); - if (counts) ++counts->comp_bwdref[ctx_bwd][0][bit_bwd]; - ref_frame[idx] = cm->comp_bwd_ref[bit_bwd]; - } -#else + const int ctx_bwd = av1_get_pred_context_comp_bwdref_p(cm, xd); +#if CONFIG_VAR_REFS + int bit_bwd; + // Test need to explicitly code (BWD) vs (ALT) branch node in tree + if (BWD_AND_ALT(cm)) + bit_bwd = aom_read(r, fc->comp_bwdref_prob[ctx_bwd][0], ACCT_STR); + else + bit_bwd = ALTREF_IS_VALID(cm); +#else // !CONFIG_VAR_REFS + const int bit_bwd = + aom_read(r, fc->comp_bwdref_prob[ctx_bwd][0], ACCT_STR); +#endif // CONFIG_VAR_REFS + if (counts) ++counts->comp_bwdref[ctx_bwd][0][bit_bwd]; + ref_frame[idx] = cm->comp_bwd_ref[bit_bwd]; +#else // !CONFIG_EXT_REFS ref_frame[!idx] = cm->comp_var_ref[bit]; ref_frame[idx] = cm->comp_fixed_ref; #endif // CONFIG_EXT_REFS } else if (mode == SINGLE_REFERENCE) { #if CONFIG_EXT_REFS const int ctx0 = av1_get_pred_context_single_ref_p1(xd); +#if CONFIG_VAR_REFS + int bit0; + // Test need to explicitly code (L,L2,L3,G) vs (BWD,ALT) branch node in + // tree + if ((L_OR_L2(cm) || L3_OR_G(cm)) && BWD_OR_ALT(cm)) + bit0 = aom_read(r, fc->single_ref_prob[ctx0][0], ACCT_STR); + else + bit0 = BWD_OR_ALT(cm); +#else // !CONFIG_VAR_REFS const int bit0 = aom_read(r, fc->single_ref_prob[ctx0][0], ACCT_STR); +#endif // CONFIG_VAR_REFS if (counts) ++counts->single_ref[ctx0][0][bit0]; if (bit0) { const int ctx1 = av1_get_pred_context_single_ref_p2(xd); +#if CONFIG_VAR_REFS + int bit1; + // Test need to explicitly code (BWD) vs (ALT) branch node in tree + if (BWD_AND_ALT(cm)) + bit1 = aom_read(r, fc->single_ref_prob[ctx1][1], ACCT_STR); + else + bit1 = ALTREF_IS_VALID(cm); +#else // !CONFIG_VAR_REFS const int bit1 = aom_read(r, fc->single_ref_prob[ctx1][1], ACCT_STR); +#endif // CONFIG_VAR_REFS if (counts) ++counts->single_ref[ctx1][1][bit1]; ref_frame[0] = bit1 ? ALTREF_FRAME : BWDREF_FRAME; } else { const int ctx2 = av1_get_pred_context_single_ref_p3(xd); +#if CONFIG_VAR_REFS + int bit2; + // Test need to explicitly code (L,L2) vs (L3,G) branch node in tree + if (L_OR_L2(cm) && L3_OR_G(cm)) + bit2 = aom_read(r, fc->single_ref_prob[ctx2][2], ACCT_STR); + else + bit2 = L3_OR_G(cm); +#else // !CONFIG_VAR_REFS const int bit2 = aom_read(r, fc->single_ref_prob[ctx2][2], ACCT_STR); +#endif // CONFIG_VAR_REFS if (counts) ++counts->single_ref[ctx2][2][bit2]; if (bit2) { const int ctx4 = av1_get_pred_context_single_ref_p5(xd); +#if CONFIG_VAR_REFS + int bit4; + // Test need to explicitly code (L3) vs (G) branch node in tree + if (L3_AND_G(cm)) + bit4 = aom_read(r, fc->single_ref_prob[ctx4][4], ACCT_STR); + else + bit4 = GOLDEN_IS_VALID(cm); +#else // !CONFIG_VAR_REFS const int bit4 = aom_read(r, fc->single_ref_prob[ctx4][4], ACCT_STR); +#endif // CONFIG_VAR_REFS if (counts) ++counts->single_ref[ctx4][4][bit4]; ref_frame[0] = bit4 ? GOLDEN_FRAME : LAST3_FRAME; } else { const int ctx3 = av1_get_pred_context_single_ref_p4(xd); +#if CONFIG_VAR_REFS + int bit3; + // Test need to explicitly code (L) vs (L2) branch node in tree + if (L_AND_L2(cm)) + bit3 = aom_read(r, fc->single_ref_prob[ctx3][3], ACCT_STR); + else + bit3 = LAST2_IS_VALID(cm); +#else // !CONFIG_VAR_REFS const int bit3 = aom_read(r, fc->single_ref_prob[ctx3][3], ACCT_STR); +#endif // CONFIG_VAR_REFS if (counts) ++counts->single_ref[ctx3][3][bit3]; ref_frame[0] = bit3 ? LAST2_FRAME : LAST_FRAME; } } -#else +#else // !CONFIG_EXT_REFS const int ctx0 = av1_get_pred_context_single_ref_p1(xd); const int bit0 = aom_read(r, fc->single_ref_prob[ctx0][0], ACCT_STR); if (counts) ++counts->single_ref[ctx0][0][bit0]; diff --git a/av1/encoder/bitstream.c b/av1/encoder/bitstream.c index 814f0fb0e..c44867be8 100644 --- a/av1/encoder/bitstream.c +++ b/av1/encoder/bitstream.c @@ -1165,46 +1165,101 @@ static void write_ref_frames(const AV1_COMMON *cm, const MACROBLOCKD *xd, #if CONFIG_EXT_REFS const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME || mbmi->ref_frame[0] == LAST3_FRAME); - const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME; -#else // CONFIG_EXT_REFS - const int bit = mbmi->ref_frame[0] == GOLDEN_FRAME; -#endif // CONFIG_EXT_REFS +#if CONFIG_VAR_REFS + // Test need to explicitly code (L,L2) vs (L3,G) branch node in tree + if (L_OR_L2(cm) && L3_OR_G(cm)) +#endif // CONFIG_VAR_REFS + aom_write(w, bit, av1_get_pred_prob_comp_ref_p(cm, xd)); - aom_write(w, bit, av1_get_pred_prob_comp_ref_p(cm, xd)); - -#if CONFIG_EXT_REFS if (!bit) { - const int bit1 = mbmi->ref_frame[0] == LAST_FRAME; - aom_write(w, bit1, av1_get_pred_prob_comp_ref_p1(cm, xd)); +#if CONFIG_VAR_REFS + // Test need to explicitly code (L) vs (L2) branch node in tree + if (L_AND_L2(cm)) { +#endif // CONFIG_VAR_REFS + const int bit1 = mbmi->ref_frame[0] == LAST_FRAME; + aom_write(w, bit1, av1_get_pred_prob_comp_ref_p1(cm, xd)); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS } else { - const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME; - aom_write(w, bit2, av1_get_pred_prob_comp_ref_p2(cm, xd)); +#if CONFIG_VAR_REFS + // Test need to explicitly code (L3) vs (G) branch node in tree + if (L3_AND_G(cm)) { +#endif // CONFIG_VAR_REFS + const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME; + aom_write(w, bit2, av1_get_pred_prob_comp_ref_p2(cm, xd)); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS } - aom_write(w, bit_bwd, av1_get_pred_prob_comp_bwdref_p(cm, xd)); + +#if CONFIG_VAR_REFS + // Test need to explicitly code (BWD) vs (ALT) branch node in tree + if (BWD_AND_ALT(cm)) { +#endif // CONFIG_VAR_REFS + const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME; + aom_write(w, bit_bwd, av1_get_pred_prob_comp_bwdref_p(cm, xd)); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS + +#else // !CONFIG_EXT_REFS + const int bit = mbmi->ref_frame[0] == GOLDEN_FRAME; + aom_write(w, bit, av1_get_pred_prob_comp_ref_p(cm, xd)); #endif // CONFIG_EXT_REFS } else { #if CONFIG_EXT_REFS const int bit0 = (mbmi->ref_frame[0] == ALTREF_FRAME || mbmi->ref_frame[0] == BWDREF_FRAME); - aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd)); +#if CONFIG_VAR_REFS + // Test need to explicitly code (L,L2,L3,G) vs (BWD,ALT) branch node in + // tree + if ((L_OR_L2(cm) || L3_OR_G(cm)) && BWD_OR_ALT(cm)) +#endif // CONFIG_VAR_REFS + aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd)); if (bit0) { - const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME; - aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd)); +#if CONFIG_VAR_REFS + // Test need to explicitly code (BWD) vs (ALT) branch node in tree + if (BWD_AND_ALT(cm)) { +#endif // CONFIG_VAR_REFS + const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME; + aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd)); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS } else { const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME || mbmi->ref_frame[0] == GOLDEN_FRAME); - aom_write(w, bit2, av1_get_pred_prob_single_ref_p3(cm, xd)); +#if CONFIG_VAR_REFS + // Test need to explicitly code (L,L2) vs (L3,G) branch node in tree + if (L_OR_L2(cm) && L3_OR_G(cm)) +#endif // CONFIG_VAR_REFS + aom_write(w, bit2, av1_get_pred_prob_single_ref_p3(cm, xd)); if (!bit2) { - const int bit3 = mbmi->ref_frame[0] != LAST_FRAME; - aom_write(w, bit3, av1_get_pred_prob_single_ref_p4(cm, xd)); +#if CONFIG_VAR_REFS + // Test need to explicitly code (L) vs (L2) branch node in tree + if (L_AND_L2(cm)) { +#endif // CONFIG_VAR_REFS + const int bit3 = mbmi->ref_frame[0] != LAST_FRAME; + aom_write(w, bit3, av1_get_pred_prob_single_ref_p4(cm, xd)); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS } else { - const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME; - aom_write(w, bit4, av1_get_pred_prob_single_ref_p5(cm, xd)); +#if CONFIG_VAR_REFS + // Test need to explicitly code (L3) vs (G) branch node in tree + if (L3_AND_G(cm)) { +#endif // CONFIG_VAR_REFS + const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME; + aom_write(w, bit4, av1_get_pred_prob_single_ref_p5(cm, xd)); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS } } -#else // CONFIG_EXT_REFS +#else // !CONFIG_EXT_REFS const int bit0 = mbmi->ref_frame[0] != LAST_FRAME; aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd)); diff --git a/av1/encoder/encodeframe.c b/av1/encoder/encodeframe.c index 5e1f95aed..c8b8ec0f5 100644 --- a/av1/encoder/encodeframe.c +++ b/av1/encoder/encodeframe.c @@ -1585,7 +1585,7 @@ static void update_stats(const AV1_COMMON *const cm, ThreadData *td, int mi_row, counts->comp_bwdref[av1_get_pred_context_comp_bwdref_p(cm, xd)][0] [ref1 == ALTREF_FRAME]++; -#else +#else // !CONFIG_EXT_REFS counts->comp_ref[av1_get_pred_context_comp_ref_p(cm, xd)][0] [ref0 == GOLDEN_FRAME]++; #endif // CONFIG_EXT_REFS @@ -1609,7 +1609,7 @@ static void update_stats(const AV1_COMMON *const cm, ThreadData *td, int mi_row, [ref0 != LAST3_FRAME]++; } } -#else +#else // !CONFIG_EXT_REFS counts->single_ref[av1_get_pred_context_single_ref_p1(xd)][0] [ref0 != LAST_FRAME]++; if (ref0 != LAST_FRAME) { diff --git a/av1/encoder/encoder.c b/av1/encoder/encoder.c index fba5c58ef..f27928b7f 100644 --- a/av1/encoder/encoder.c +++ b/av1/encoder/encoder.c @@ -3068,6 +3068,42 @@ static INLINE void shift_last_ref_frames(AV1_COMP *cpi) { } #endif // CONFIG_EXT_REFS +#if CONFIG_VAR_REFS +static void enc_check_valid_ref_frames(AV1_COMP *const cpi) { + AV1_COMMON *const cm = &cpi->common; + MV_REFERENCE_FRAME ref_frame; + + // TODO(zoeliu): To handle ALTREF_FRAME the same way as do with other + // reference frames. Current encoder invalid ALTREF when ALTREF + // is the same as LAST, but invalid all the other references + // when they are the same as ALTREF. + for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) { + int ref_buf_idx = get_ref_frame_buf_idx(cpi, ref_frame); + RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - LAST_FRAME]; + + if (ref_buf_idx != INVALID_IDX) { + ref_buf->is_valid = 1; + + MV_REFERENCE_FRAME ref; + for (ref = LAST_FRAME; ref < ref_frame; ++ref) { + int buf_idx = get_ref_frame_buf_idx(cpi, ref); + RefBuffer *const buf = &cm->frame_refs[ref - LAST_FRAME]; + if (buf->is_valid && buf_idx == ref_buf_idx) { + if (ref_frame != ALTREF_FRAME || ref == LAST_FRAME) { + ref_buf->is_valid = 0; + break; + } else { + buf->is_valid = 0; + } + } + } + } else { + ref_buf->is_valid = 0; + } + } +} +#endif // CONFIG_VAR_REFS + void av1_update_reference_frames(AV1_COMP *cpi) { AV1_COMMON *const cm = &cpi->common; BufferPool *const pool = cm->buffer_pool; @@ -3858,6 +3894,12 @@ static void set_frame_size(AV1_COMP *cpi, int width, int height) { ref_buf->buf = NULL; } } + +#if CONFIG_VAR_REFS + // Check duplicate reference frames + enc_check_valid_ref_frames(cpi); +#endif // CONFIG_VAR_REFS + #if CONFIG_INTRABC #if CONFIG_HIGHBITDEPTH av1_setup_scale_factors_for_frame(&xd->sf_identity, cm->width, cm->height, diff --git a/av1/encoder/rdopt.c b/av1/encoder/rdopt.c index c31acede1..32ed8df86 100644 --- a/av1/encoder/rdopt.c +++ b/av1/encoder/rdopt.c @@ -5459,27 +5459,63 @@ static void estimate_ref_frame_costs(const AV1_COMMON *cm, ref_costs_single[ALTREF_FRAME] = base_cost; #if CONFIG_EXT_REFS - ref_costs_single[LAST_FRAME] += av1_cost_bit(ref_single_p1, 0); - ref_costs_single[LAST2_FRAME] += av1_cost_bit(ref_single_p1, 0); - ref_costs_single[LAST3_FRAME] += av1_cost_bit(ref_single_p1, 0); - ref_costs_single[GOLDEN_FRAME] += av1_cost_bit(ref_single_p1, 0); - ref_costs_single[BWDREF_FRAME] += av1_cost_bit(ref_single_p1, 1); - ref_costs_single[ALTREF_FRAME] += av1_cost_bit(ref_single_p1, 1); +#if CONFIG_VAR_REFS + // Test need to explicitly code (L,L2,L3,G) vs (BWD,ALT) branch node in + // tree + if ((L_OR_L2(cm) || L3_OR_G(cm)) && BWD_OR_ALT(cm)) { +#endif // CONFIG_VAR_REFS + ref_costs_single[LAST_FRAME] += av1_cost_bit(ref_single_p1, 0); + ref_costs_single[LAST2_FRAME] += av1_cost_bit(ref_single_p1, 0); + ref_costs_single[LAST3_FRAME] += av1_cost_bit(ref_single_p1, 0); + ref_costs_single[GOLDEN_FRAME] += av1_cost_bit(ref_single_p1, 0); + ref_costs_single[BWDREF_FRAME] += av1_cost_bit(ref_single_p1, 1); + ref_costs_single[ALTREF_FRAME] += av1_cost_bit(ref_single_p1, 1); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS - ref_costs_single[LAST_FRAME] += av1_cost_bit(ref_single_p3, 0); - ref_costs_single[LAST2_FRAME] += av1_cost_bit(ref_single_p3, 0); - ref_costs_single[LAST3_FRAME] += av1_cost_bit(ref_single_p3, 1); - ref_costs_single[GOLDEN_FRAME] += av1_cost_bit(ref_single_p3, 1); +#if CONFIG_VAR_REFS + // Test need to explicitly code (L,L2) vs (L3,G) branch node in tree + if (L_OR_L2(cm) && L3_OR_G(cm)) { +#endif // CONFIG_VAR_REFS + ref_costs_single[LAST_FRAME] += av1_cost_bit(ref_single_p3, 0); + ref_costs_single[LAST2_FRAME] += av1_cost_bit(ref_single_p3, 0); + ref_costs_single[LAST3_FRAME] += av1_cost_bit(ref_single_p3, 1); + ref_costs_single[GOLDEN_FRAME] += av1_cost_bit(ref_single_p3, 1); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS - ref_costs_single[BWDREF_FRAME] += av1_cost_bit(ref_single_p2, 0); - ref_costs_single[ALTREF_FRAME] += av1_cost_bit(ref_single_p2, 1); +#if CONFIG_VAR_REFS + // Test need to explicitly code (BWD) vs (ALT) branch node in tree + if (BWD_AND_ALT(cm)) { +#endif // CONFIG_VAR_REFS + ref_costs_single[BWDREF_FRAME] += av1_cost_bit(ref_single_p2, 0); + ref_costs_single[ALTREF_FRAME] += av1_cost_bit(ref_single_p2, 1); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS - ref_costs_single[LAST_FRAME] += av1_cost_bit(ref_single_p4, 0); - ref_costs_single[LAST2_FRAME] += av1_cost_bit(ref_single_p4, 1); +#if CONFIG_VAR_REFS + // Test need to explicitly code (L) vs (L2) branch node in tree + if (L_AND_L2(cm)) { +#endif // CONFIG_VAR_REFS + ref_costs_single[LAST_FRAME] += av1_cost_bit(ref_single_p4, 0); + ref_costs_single[LAST2_FRAME] += av1_cost_bit(ref_single_p4, 1); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS - ref_costs_single[LAST3_FRAME] += av1_cost_bit(ref_single_p5, 0); - ref_costs_single[GOLDEN_FRAME] += av1_cost_bit(ref_single_p5, 1); -#else +#if CONFIG_VAR_REFS + // Test need to explicitly code (L3) vs (G) branch node in tree + if (L3_AND_G(cm)) { +#endif // CONFIG_VAR_REFS + ref_costs_single[LAST3_FRAME] += av1_cost_bit(ref_single_p5, 0); + ref_costs_single[GOLDEN_FRAME] += av1_cost_bit(ref_single_p5, 1); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS +#else // !CONFIG_EXT_REFS ref_costs_single[LAST_FRAME] += av1_cost_bit(ref_single_p1, 0); ref_costs_single[GOLDEN_FRAME] += av1_cost_bit(ref_single_p1, 1); ref_costs_single[ALTREF_FRAME] += av1_cost_bit(ref_single_p1, 1); @@ -5519,22 +5555,50 @@ static void estimate_ref_frame_costs(const AV1_COMMON *cm, #endif // CONFIG_EXT_REFS #if CONFIG_EXT_REFS - ref_costs_comp[LAST_FRAME] += av1_cost_bit(ref_comp_p, 0); - ref_costs_comp[LAST2_FRAME] += av1_cost_bit(ref_comp_p, 0); - ref_costs_comp[LAST3_FRAME] += av1_cost_bit(ref_comp_p, 1); - ref_costs_comp[GOLDEN_FRAME] += av1_cost_bit(ref_comp_p, 1); +#if CONFIG_VAR_REFS + // Test need to explicitly code (L,L2) vs (L3,G) branch node in tree + if (L_OR_L2(cm) && L3_OR_G(cm)) { +#endif // CONFIG_VAR_REFS + ref_costs_comp[LAST_FRAME] += av1_cost_bit(ref_comp_p, 0); + ref_costs_comp[LAST2_FRAME] += av1_cost_bit(ref_comp_p, 0); + ref_costs_comp[LAST3_FRAME] += av1_cost_bit(ref_comp_p, 1); + ref_costs_comp[GOLDEN_FRAME] += av1_cost_bit(ref_comp_p, 1); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS - ref_costs_comp[LAST_FRAME] += av1_cost_bit(ref_comp_p1, 1); - ref_costs_comp[LAST2_FRAME] += av1_cost_bit(ref_comp_p1, 0); +#if CONFIG_VAR_REFS + // Test need to explicitly code (L) vs (L2) branch node in tree + if (L_AND_L2(cm)) { +#endif // CONFIG_VAR_REFS + ref_costs_comp[LAST_FRAME] += av1_cost_bit(ref_comp_p1, 1); + ref_costs_comp[LAST2_FRAME] += av1_cost_bit(ref_comp_p1, 0); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS - ref_costs_comp[LAST3_FRAME] += av1_cost_bit(ref_comp_p2, 0); - ref_costs_comp[GOLDEN_FRAME] += av1_cost_bit(ref_comp_p2, 1); +#if CONFIG_VAR_REFS + // Test need to explicitly code (L3) vs (G) branch node in tree + if (L3_AND_G(cm)) { +#endif // CONFIG_VAR_REFS + ref_costs_comp[LAST3_FRAME] += av1_cost_bit(ref_comp_p2, 0); + ref_costs_comp[GOLDEN_FRAME] += av1_cost_bit(ref_comp_p2, 1); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS - // NOTE(zoeliu): BWDREF and ALTREF each add an extra cost by coding 1 - // more bit. - ref_costs_comp[BWDREF_FRAME] += av1_cost_bit(bwdref_comp_p, 0); - ref_costs_comp[ALTREF_FRAME] += av1_cost_bit(bwdref_comp_p, 1); -#else +// NOTE(zoeliu): BWDREF and ALTREF each add an extra cost by coding 1 +// more bit. +#if CONFIG_VAR_REFS + // Test need to explicitly code (BWD) vs (ALT) branch node in tree + if (BWD_AND_ALT(cm)) { +#endif // CONFIG_VAR_REFS + ref_costs_comp[BWDREF_FRAME] += av1_cost_bit(bwdref_comp_p, 0); + ref_costs_comp[ALTREF_FRAME] += av1_cost_bit(bwdref_comp_p, 1); +#if CONFIG_VAR_REFS + } +#endif // CONFIG_VAR_REFS +#else // !CONFIG_EXT_REFS ref_costs_comp[LAST_FRAME] += av1_cost_bit(ref_comp_p, 0); ref_costs_comp[GOLDEN_FRAME] += av1_cost_bit(ref_comp_p, 1); #endif // CONFIG_EXT_REFS