Make encoder buffer allocation dynamic
Frame buffers are now allocated dynamically on-demand. Entries in the reference frame map, cm->ref_frame_map, may now be set to -1 (INVALID_IDX) to indicate that there is not a valid reference buffer in that "slot". All slots in the reference frame map are now initialized to the empty state (-1) and each buffer is initialized to have a reference count of 0. Change-Id: Id1afe98de98db4ae8b2dfefed7889c3b28c68582
This commit is contained in:
Родитель
87d1a488ed
Коммит
3807dd82ab
|
@ -141,68 +141,6 @@ int vp9_alloc_context_buffers(VP9_COMMON *cm, int width, int height) {
|
|||
return 1;
|
||||
}
|
||||
|
||||
static void init_frame_bufs(VP9_COMMON *cm) {
|
||||
BufferPool *const pool = cm->buffer_pool;
|
||||
int i;
|
||||
|
||||
cm->new_fb_idx = FRAME_BUFFERS - 1;
|
||||
pool->frame_bufs[cm->new_fb_idx].ref_count = 1;
|
||||
|
||||
for (i = 0; i < REF_FRAMES; ++i) {
|
||||
cm->ref_frame_map[i] = i;
|
||||
pool->frame_bufs[i].ref_count = 1;
|
||||
}
|
||||
}
|
||||
|
||||
int vp9_alloc_ref_frame_buffers(VP9_COMMON *cm, int width, int height) {
|
||||
int i;
|
||||
const int ss_x = cm->subsampling_x;
|
||||
const int ss_y = cm->subsampling_y;
|
||||
|
||||
vp9_free_ref_frame_buffers(cm);
|
||||
|
||||
for (i = 0; i < FRAME_BUFFERS; ++i) {
|
||||
BufferPool *const pool = cm->buffer_pool;
|
||||
pool->frame_bufs[i].ref_count = 0;
|
||||
if (vp9_alloc_frame_buffer(&pool->frame_bufs[i].buf, width, height,
|
||||
ss_x, ss_y,
|
||||
#if CONFIG_VP9_HIGHBITDEPTH
|
||||
cm->use_highbitdepth,
|
||||
#endif
|
||||
VP9_ENC_BORDER_IN_PIXELS,
|
||||
cm->byte_alignment) < 0)
|
||||
goto fail;
|
||||
if (pool->frame_bufs[i].mvs == NULL) {
|
||||
pool->frame_bufs[i].mvs =
|
||||
(MV_REF *)vpx_calloc(cm->mi_rows * cm->mi_cols,
|
||||
sizeof(*pool->frame_bufs[i].mvs));
|
||||
if (pool->frame_bufs[i].mvs == NULL)
|
||||
goto fail;
|
||||
|
||||
pool->frame_bufs[i].mi_rows = cm->mi_rows;
|
||||
pool->frame_bufs[i].mi_cols = cm->mi_cols;
|
||||
}
|
||||
}
|
||||
|
||||
init_frame_bufs(cm);
|
||||
|
||||
#if CONFIG_VP9_POSTPROC
|
||||
if (vp9_alloc_frame_buffer(&cm->post_proc_buffer, width, height, ss_x, ss_y,
|
||||
#if CONFIG_VP9_HIGHBITDEPTH
|
||||
cm->use_highbitdepth,
|
||||
#endif
|
||||
VP9_ENC_BORDER_IN_PIXELS,
|
||||
cm->byte_alignment) < 0)
|
||||
goto fail;
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
|
||||
fail:
|
||||
vp9_free_ref_frame_buffers(cm);
|
||||
return 1;
|
||||
}
|
||||
|
||||
void vp9_remove_common(VP9_COMMON *cm) {
|
||||
vp9_free_ref_frame_buffers(cm);
|
||||
vp9_free_context_buffers(cm);
|
||||
|
|
|
@ -12,6 +12,8 @@
|
|||
#ifndef VP9_COMMON_VP9_ALLOCCOMMON_H_
|
||||
#define VP9_COMMON_VP9_ALLOCCOMMON_H_
|
||||
|
||||
#define INVALID_IDX -1 // Invalid buffer index.
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
@ -24,7 +26,6 @@ int vp9_alloc_context_buffers(struct VP9Common *cm, int width, int height);
|
|||
void vp9_init_context_buffers(struct VP9Common *cm);
|
||||
void vp9_free_context_buffers(struct VP9Common *cm);
|
||||
|
||||
int vp9_alloc_ref_frame_buffers(struct VP9Common *cm, int width, int height);
|
||||
void vp9_free_ref_frame_buffers(struct VP9Common *cm);
|
||||
|
||||
int vp9_alloc_state_buffers(struct VP9Common *cm, int width, int height);
|
||||
|
|
|
@ -1362,7 +1362,7 @@ static size_t read_uncompressed_header(VP9Decoder *pbi,
|
|||
pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1;
|
||||
|
||||
for (i = 0; i < REFS_PER_FRAME; ++i) {
|
||||
cm->frame_refs[i].idx = -1;
|
||||
cm->frame_refs[i].idx = INVALID_IDX;
|
||||
cm->frame_refs[i].buf = NULL;
|
||||
}
|
||||
|
||||
|
|
|
@ -989,8 +989,6 @@ static void write_frame_size_with_refs(VP9_COMP *cpi,
|
|||
MV_REFERENCE_FRAME ref_frame;
|
||||
for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
|
||||
YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi, ref_frame);
|
||||
found = cm->width == cfg->y_crop_width &&
|
||||
cm->height == cfg->y_crop_height;
|
||||
|
||||
// Set "found" to 0 for temporal svc and for spatial svc key frame
|
||||
if (cpi->use_svc &&
|
||||
|
@ -1003,6 +1001,9 @@ static void write_frame_size_with_refs(VP9_COMP *cpi,
|
|||
cpi->svc.layer_context[0].frames_from_key_frame <
|
||||
cpi->svc.number_temporal_layers + 1))) {
|
||||
found = 0;
|
||||
} else if (cfg != NULL) {
|
||||
found = cm->width == cfg->y_crop_width &&
|
||||
cm->height == cfg->y_crop_height;
|
||||
}
|
||||
vp9_wb_write_bit(wb, found);
|
||||
if (found) {
|
||||
|
@ -1114,7 +1115,8 @@ static void write_uncompressed_header(VP9_COMP *cpi,
|
|||
MV_REFERENCE_FRAME ref_frame;
|
||||
vp9_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
|
||||
for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
|
||||
vp9_wb_write_literal(wb, get_ref_frame_idx(cpi, ref_frame),
|
||||
assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
|
||||
vp9_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
|
||||
REF_FRAMES_LOG2);
|
||||
vp9_wb_write_bit(wb, cm->ref_frame_sign_bias[ref_frame]);
|
||||
}
|
||||
|
|
|
@ -144,12 +144,14 @@ static unsigned int get_sby_perpixel_diff_variance(VP9_COMP *cpi,
|
|||
const struct buf_2d *ref,
|
||||
int mi_row, int mi_col,
|
||||
BLOCK_SIZE bs) {
|
||||
unsigned int sse, var;
|
||||
uint8_t *last_y;
|
||||
const YV12_BUFFER_CONFIG *last = get_ref_frame_buffer(cpi, LAST_FRAME);
|
||||
const uint8_t* last_y = &last->y_buffer[mi_row * MI_SIZE * last->y_stride +
|
||||
mi_col * MI_SIZE];
|
||||
unsigned int sse;
|
||||
const unsigned int var = cpi->fn_ptr[bs].vf(ref->buf, ref->stride,
|
||||
last_y, last->y_stride, &sse);
|
||||
|
||||
assert(last != NULL);
|
||||
last_y =
|
||||
&last->y_buffer[mi_row * MI_SIZE * last->y_stride + mi_col * MI_SIZE];
|
||||
var = cpi->fn_ptr[bs].vf(ref->buf, ref->stride, last_y, last->y_stride, &sse);
|
||||
return ROUND_POWER_OF_TWO(var, num_pels_log2_lookup[bs]);
|
||||
}
|
||||
|
||||
|
@ -670,7 +672,7 @@ static unsigned int motion_estimation(VP9_COMP *cpi, MACROBLOCK *x,
|
|||
#endif
|
||||
|
||||
// This function chooses partitioning based on the variance between source and
|
||||
// reconstructed last, where variance is computed for downs-sampled inputs.
|
||||
// reconstructed last, where variance is computed for down-sampled inputs.
|
||||
static void choose_partitioning(VP9_COMP *cpi,
|
||||
const TileInfo *const tile,
|
||||
MACROBLOCK *x,
|
||||
|
@ -685,7 +687,6 @@ static void choose_partitioning(VP9_COMP *cpi,
|
|||
int sp;
|
||||
int dp;
|
||||
int pixels_wide = 64, pixels_high = 64;
|
||||
const YV12_BUFFER_CONFIG *yv12 = get_ref_frame_buffer(cpi, LAST_FRAME);
|
||||
|
||||
// Always use 4x4 partition for key frame.
|
||||
const int is_key_frame = (cm->frame_type == KEY_FRAME);
|
||||
|
@ -716,6 +717,8 @@ static void choose_partitioning(VP9_COMP *cpi,
|
|||
#if GLOBAL_MOTION
|
||||
unsigned int y_sse;
|
||||
#endif
|
||||
const YV12_BUFFER_CONFIG *yv12 = get_ref_frame_buffer(cpi, LAST_FRAME);
|
||||
assert(yv12 != NULL);
|
||||
vp9_setup_pre_planes(xd, 0, yv12, mi_row, mi_col,
|
||||
&cm->frame_refs[LAST_FRAME - 1].sf);
|
||||
mbmi->ref_frame[0] = LAST_FRAME;
|
||||
|
@ -4179,6 +4182,7 @@ static void encode_superblock(VP9_COMP *cpi, ThreadData *td,
|
|||
for (ref = 0; ref < 1 + is_compound; ++ref) {
|
||||
YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi,
|
||||
mbmi->ref_frame[ref]);
|
||||
assert(cfg != NULL);
|
||||
vp9_setup_pre_planes(xd, ref, cfg, mi_row, mi_col,
|
||||
&xd->block_refs[ref]->sf);
|
||||
}
|
||||
|
|
|
@ -483,6 +483,7 @@ static void alloc_raw_frame_buffers(VP9_COMP *cpi) {
|
|||
vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
|
||||
"Failed to allocate lag buffers");
|
||||
|
||||
// TODO(agrange) Check if ARF is enabled and skip allocation if not.
|
||||
if (vp9_realloc_frame_buffer(&cpi->alt_ref_buffer,
|
||||
oxcf->width, oxcf->height,
|
||||
cm->subsampling_x, cm->subsampling_y,
|
||||
|
@ -495,13 +496,6 @@ static void alloc_raw_frame_buffers(VP9_COMP *cpi) {
|
|||
"Failed to allocate altref buffer");
|
||||
}
|
||||
|
||||
static void alloc_ref_frame_buffers(VP9_COMP *cpi) {
|
||||
VP9_COMMON *const cm = &cpi->common;
|
||||
if (vp9_alloc_ref_frame_buffers(cm, cm->width, cm->height))
|
||||
vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
|
||||
"Failed to allocate frame buffers");
|
||||
}
|
||||
|
||||
static void alloc_util_frame_buffers(VP9_COMP *cpi) {
|
||||
VP9_COMMON *const cm = &cpi->common;
|
||||
if (vp9_realloc_frame_buffer(&cpi->last_frame_uf,
|
||||
|
@ -2483,6 +2477,21 @@ static void loopfilter_frame(VP9_COMP *cpi, VP9_COMMON *cm) {
|
|||
vp9_extend_frame_inner_borders(cm->frame_to_show);
|
||||
}
|
||||
|
||||
static INLINE void alloc_frame_mvs(const VP9_COMMON *cm,
|
||||
int buffer_idx) {
|
||||
RefCntBuffer *const new_fb_ptr = &cm->buffer_pool->frame_bufs[buffer_idx];
|
||||
if (new_fb_ptr->mvs == NULL ||
|
||||
new_fb_ptr->mi_rows < cm->mi_rows ||
|
||||
new_fb_ptr->mi_cols < cm->mi_cols) {
|
||||
vpx_free(new_fb_ptr->mvs);
|
||||
new_fb_ptr->mvs =
|
||||
(MV_REF *)vpx_calloc(cm->mi_rows * cm->mi_cols,
|
||||
sizeof(*new_fb_ptr->mvs));
|
||||
new_fb_ptr->mi_rows = cm->mi_rows;
|
||||
new_fb_ptr->mi_cols = cm->mi_cols;
|
||||
}
|
||||
}
|
||||
|
||||
void vp9_scale_references(VP9_COMP *cpi) {
|
||||
VP9_COMMON *cm = &cpi->common;
|
||||
MV_REFERENCE_FRAME ref_frame;
|
||||
|
@ -2491,13 +2500,19 @@ void vp9_scale_references(VP9_COMP *cpi) {
|
|||
for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
|
||||
// Need to convert from VP9_REFFRAME to index into ref_mask (subtract 1).
|
||||
if (cpi->ref_frame_flags & ref_mask[ref_frame - 1]) {
|
||||
const int idx = cm->ref_frame_map[get_ref_frame_idx(cpi, ref_frame)];
|
||||
BufferPool *const pool = cm->buffer_pool;
|
||||
const YV12_BUFFER_CONFIG *const ref = &pool->frame_bufs[idx].buf;
|
||||
BufferPool *const pool = cm->buffer_pool;
|
||||
const YV12_BUFFER_CONFIG *const ref = get_ref_frame_buffer(cpi,
|
||||
ref_frame);
|
||||
|
||||
if (ref == NULL) {
|
||||
cpi->scaled_ref_idx[ref_frame - 1] = INVALID_IDX;
|
||||
continue;
|
||||
}
|
||||
|
||||
#if CONFIG_VP9_HIGHBITDEPTH
|
||||
if (ref->y_crop_width != cm->width || ref->y_crop_height != cm->height) {
|
||||
const int new_fb = get_free_fb(cm);
|
||||
RefCntBuffer *const new_fb_ptr = &pool->frame_bufs[new_fb];
|
||||
cm->cur_frame = &pool->frame_bufs[new_fb];
|
||||
vp9_realloc_frame_buffer(&pool->frame_bufs[new_fb].buf,
|
||||
cm->width, cm->height,
|
||||
|
@ -2505,35 +2520,28 @@ void vp9_scale_references(VP9_COMP *cpi) {
|
|||
cm->use_highbitdepth,
|
||||
VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
|
||||
NULL, NULL, NULL);
|
||||
scale_and_extend_frame(ref, &pool->frame_bufs[new_fb].buf,
|
||||
(int)cm->bit_depth);
|
||||
scale_and_extend_frame(ref, &new_fb_ptr->buf, (int)cm->bit_depth);
|
||||
#else
|
||||
if (ref->y_crop_width != cm->width || ref->y_crop_height != cm->height) {
|
||||
const int new_fb = get_free_fb(cm);
|
||||
vp9_realloc_frame_buffer(&pool->frame_bufs[new_fb].buf,
|
||||
RefCntBuffer *const new_fb_ptr = &pool->frame_bufs[new_fb];
|
||||
vp9_realloc_frame_buffer(&new_fb_ptr->buf,
|
||||
cm->width, cm->height,
|
||||
cm->subsampling_x, cm->subsampling_y,
|
||||
VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
|
||||
NULL, NULL, NULL);
|
||||
scale_and_extend_frame(ref, &pool->frame_bufs[new_fb].buf);
|
||||
scale_and_extend_frame(ref, &new_fb_ptr->buf);
|
||||
#endif // CONFIG_VP9_HIGHBITDEPTH
|
||||
cpi->scaled_ref_idx[ref_frame - 1] = new_fb;
|
||||
if (pool->frame_bufs[new_fb].mvs == NULL ||
|
||||
pool->frame_bufs[new_fb].mi_rows < cm->mi_rows ||
|
||||
pool->frame_bufs[new_fb].mi_cols < cm->mi_cols) {
|
||||
vpx_free(pool->frame_bufs[new_fb].mvs);
|
||||
pool->frame_bufs[new_fb].mvs =
|
||||
(MV_REF *)vpx_calloc(cm->mi_rows * cm->mi_cols,
|
||||
sizeof(*pool->frame_bufs[new_fb].mvs));
|
||||
pool->frame_bufs[new_fb].mi_rows = cm->mi_rows;
|
||||
pool->frame_bufs[new_fb].mi_cols = cm->mi_cols;
|
||||
}
|
||||
|
||||
alloc_frame_mvs(cm, new_fb);
|
||||
} else {
|
||||
cpi->scaled_ref_idx[ref_frame - 1] = idx;
|
||||
++pool->frame_bufs[idx].ref_count;
|
||||
const int buf_idx = get_ref_frame_buf_idx(cpi, ref_frame);
|
||||
cpi->scaled_ref_idx[ref_frame - 1] = buf_idx;
|
||||
++pool->frame_bufs[buf_idx].ref_count;
|
||||
}
|
||||
} else {
|
||||
cpi->scaled_ref_idx[ref_frame - 1] = INVALID_REF_BUFFER_IDX;
|
||||
cpi->scaled_ref_idx[ref_frame - 1] = INVALID_IDX;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2543,11 +2551,11 @@ static void release_scaled_references(VP9_COMP *cpi) {
|
|||
int i;
|
||||
for (i = 0; i < MAX_REF_FRAMES; ++i) {
|
||||
const int idx = cpi->scaled_ref_idx[i];
|
||||
RefCntBuffer *const buf = idx != INVALID_REF_BUFFER_IDX ?
|
||||
RefCntBuffer *const buf = idx != INVALID_IDX ?
|
||||
&cm->buffer_pool->frame_bufs[idx] : NULL;
|
||||
if (buf != NULL) {
|
||||
--buf->ref_count;
|
||||
cpi->scaled_ref_idx[i] = INVALID_REF_BUFFER_IDX;
|
||||
cpi->scaled_ref_idx[i] = INVALID_IDX;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2751,6 +2759,8 @@ void set_frame_size(VP9_COMP *cpi) {
|
|||
vp9_set_target_rate(cpi);
|
||||
}
|
||||
|
||||
alloc_frame_mvs(cm, cm->new_fb_idx);
|
||||
|
||||
// Reset the frame pointers to the current frame size.
|
||||
vp9_realloc_frame_buffer(get_frame_new_buffer(cm),
|
||||
cm->width, cm->height,
|
||||
|
@ -2765,24 +2775,30 @@ void set_frame_size(VP9_COMP *cpi) {
|
|||
init_motion_estimation(cpi);
|
||||
|
||||
for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
|
||||
const int idx = cm->ref_frame_map[get_ref_frame_idx(cpi, ref_frame)];
|
||||
YV12_BUFFER_CONFIG *const buf = &cm->buffer_pool->frame_bufs[idx].buf;
|
||||
RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - 1];
|
||||
ref_buf->buf = buf;
|
||||
ref_buf->idx = idx;
|
||||
const int buf_idx = get_ref_frame_buf_idx(cpi, ref_frame);
|
||||
|
||||
ref_buf->idx = buf_idx;
|
||||
|
||||
if (buf_idx != INVALID_IDX) {
|
||||
YV12_BUFFER_CONFIG *const buf = &cm->buffer_pool->frame_bufs[buf_idx].buf;
|
||||
ref_buf->buf = buf;
|
||||
#if CONFIG_VP9_HIGHBITDEPTH
|
||||
vp9_setup_scale_factors_for_frame(&ref_buf->sf,
|
||||
buf->y_crop_width, buf->y_crop_height,
|
||||
cm->width, cm->height,
|
||||
(buf->flags & YV12_FLAG_HIGHBITDEPTH) ?
|
||||
1 : 0);
|
||||
vp9_setup_scale_factors_for_frame(&ref_buf->sf,
|
||||
buf->y_crop_width, buf->y_crop_height,
|
||||
cm->width, cm->height,
|
||||
(buf->flags & YV12_FLAG_HIGHBITDEPTH) ?
|
||||
1 : 0);
|
||||
#else
|
||||
vp9_setup_scale_factors_for_frame(&ref_buf->sf,
|
||||
buf->y_crop_width, buf->y_crop_height,
|
||||
cm->width, cm->height);
|
||||
vp9_setup_scale_factors_for_frame(&ref_buf->sf,
|
||||
buf->y_crop_width, buf->y_crop_height,
|
||||
cm->width, cm->height);
|
||||
#endif // CONFIG_VP9_HIGHBITDEPTH
|
||||
if (vp9_is_scaled(&ref_buf->sf))
|
||||
vp9_extend_frame_borders(buf);
|
||||
if (vp9_is_scaled(&ref_buf->sf))
|
||||
vp9_extend_frame_borders(buf);
|
||||
} else {
|
||||
ref_buf->buf = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
set_ref_ptrs(cm, xd, LAST_FRAME, LAST_FRAME);
|
||||
|
@ -3448,6 +3464,16 @@ static void Pass2Encode(VP9_COMP *cpi, size_t *size,
|
|||
vp9_twopass_postencode_update(cpi);
|
||||
}
|
||||
|
||||
static void init_ref_frame_bufs(VP9_COMMON *cm) {
|
||||
int i;
|
||||
BufferPool *const pool = cm->buffer_pool;
|
||||
cm->new_fb_idx = INVALID_IDX;
|
||||
for (i = 0; i < REF_FRAMES; ++i) {
|
||||
cm->ref_frame_map[i] = INVALID_IDX;
|
||||
pool->frame_bufs[i].ref_count = 0;
|
||||
}
|
||||
}
|
||||
|
||||
static void check_initial_width(VP9_COMP *cpi,
|
||||
#if CONFIG_VP9_HIGHBITDEPTH
|
||||
int use_highbitdepth,
|
||||
|
@ -3468,7 +3494,7 @@ static void check_initial_width(VP9_COMP *cpi,
|
|||
#endif
|
||||
|
||||
alloc_raw_frame_buffers(cpi);
|
||||
alloc_ref_frame_buffers(cpi);
|
||||
init_ref_frame_bufs(cm);
|
||||
alloc_util_frame_buffers(cpi);
|
||||
|
||||
init_motion_estimation(cpi); // TODO(agrange) This can be removed.
|
||||
|
@ -3793,8 +3819,14 @@ int vp9_get_compressed_data(VP9_COMP *cpi, unsigned int *frame_flags,
|
|||
|
||||
// Find a free buffer for the new frame, releasing the reference previously
|
||||
// held.
|
||||
pool->frame_bufs[cm->new_fb_idx].ref_count--;
|
||||
if (cm->new_fb_idx != INVALID_IDX) {
|
||||
--pool->frame_bufs[cm->new_fb_idx].ref_count;
|
||||
}
|
||||
cm->new_fb_idx = get_free_fb(cm);
|
||||
|
||||
if (cm->new_fb_idx == INVALID_IDX)
|
||||
return -1;
|
||||
|
||||
cm->cur_frame = &pool->frame_bufs[cm->new_fb_idx];
|
||||
|
||||
if (!cpi->use_svc && cpi->multi_arf_allowed) {
|
||||
|
@ -3821,7 +3853,7 @@ int vp9_get_compressed_data(VP9_COMP *cpi, unsigned int *frame_flags,
|
|||
}
|
||||
|
||||
for (i = 0; i < MAX_REF_FRAMES; ++i)
|
||||
cpi->scaled_ref_idx[i] = INVALID_REF_BUFFER_IDX;
|
||||
cpi->scaled_ref_idx[i] = INVALID_IDX;
|
||||
|
||||
if (oxcf->pass == 1 &&
|
||||
(!cpi->use_svc || is_two_pass_svc(cpi))) {
|
||||
|
@ -3907,8 +3939,18 @@ int vp9_get_compressed_data(VP9_COMP *cpi, unsigned int *frame_flags,
|
|||
PSNR_STATS psnr2;
|
||||
double frame_ssim2 = 0, weight = 0;
|
||||
#if CONFIG_VP9_POSTPROC
|
||||
// TODO(agrange) Add resizing of post-proc buffer in here when the
|
||||
// encoder is changed to use on-demand buffer allocation.
|
||||
if (vp9_alloc_frame_buffer(&cm->post_proc_buffer,
|
||||
recon->y_crop_width, recon->y_crop_height,
|
||||
cm->subsampling_x, cm->subsampling_y,
|
||||
#if CONFIG_VP9_HIGHBITDEPTH
|
||||
cm->use_highbitdepth,
|
||||
#endif
|
||||
VP9_ENC_BORDER_IN_PIXELS,
|
||||
cm->byte_alignment) < 0) {
|
||||
vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
|
||||
"Failed to allocate post processing buffer");
|
||||
}
|
||||
|
||||
vp9_deblock(cm->frame_to_show, &cm->post_proc_buffer,
|
||||
cm->lf.filter_level * 10 / 6);
|
||||
#endif
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#include "vpx/internal/vpx_codec_internal.h"
|
||||
#include "vpx/vp8cx.h"
|
||||
|
||||
#include "vp9/common/vp9_alloccommon.h"
|
||||
#include "vp9/common/vp9_ppflags.h"
|
||||
#include "vp9/common/vp9_entropymode.h"
|
||||
#include "vp9/common/vp9_thread_common.h"
|
||||
|
@ -47,7 +48,6 @@ extern "C" {
|
|||
#endif
|
||||
|
||||
#define DEFAULT_GF_INTERVAL 10
|
||||
#define INVALID_REF_BUFFER_IDX -1 // Marks an invalid reference buffer id.
|
||||
|
||||
typedef struct {
|
||||
int nmvjointcost[MV_JOINTS];
|
||||
|
@ -517,8 +517,8 @@ static INLINE int frame_is_kf_gf_arf(const VP9_COMP *cpi) {
|
|||
(cpi->refresh_golden_frame && !cpi->rc.is_src_frame_alt_ref);
|
||||
}
|
||||
|
||||
static INLINE int get_ref_frame_idx(const VP9_COMP *cpi,
|
||||
MV_REFERENCE_FRAME ref_frame) {
|
||||
static INLINE int get_ref_frame_map_idx(const VP9_COMP *cpi,
|
||||
MV_REFERENCE_FRAME ref_frame) {
|
||||
if (ref_frame == LAST_FRAME) {
|
||||
return cpi->lst_fb_idx;
|
||||
} else if (ref_frame == GOLDEN_FRAME) {
|
||||
|
@ -528,12 +528,19 @@ static INLINE int get_ref_frame_idx(const VP9_COMP *cpi,
|
|||
}
|
||||
}
|
||||
|
||||
static INLINE int get_ref_frame_buf_idx(const VP9_COMP *const cpi,
|
||||
int ref_frame) {
|
||||
const VP9_COMMON *const cm = &cpi->common;
|
||||
const int map_idx = get_ref_frame_map_idx(cpi, ref_frame);
|
||||
return (map_idx != INVALID_IDX) ? cm->ref_frame_map[map_idx] : INVALID_IDX;
|
||||
}
|
||||
|
||||
static INLINE YV12_BUFFER_CONFIG *get_ref_frame_buffer(
|
||||
VP9_COMP *cpi, MV_REFERENCE_FRAME ref_frame) {
|
||||
VP9_COMMON *const cm = &cpi->common;
|
||||
BufferPool *const pool = cm->buffer_pool;
|
||||
return &pool->frame_bufs[cm->ref_frame_map[get_ref_frame_idx(cpi, ref_frame)]]
|
||||
.buf;
|
||||
const int buf_idx = get_ref_frame_buf_idx(cpi, ref_frame);
|
||||
return
|
||||
buf_idx != INVALID_IDX ? &cm->buffer_pool->frame_bufs[buf_idx].buf : NULL;
|
||||
}
|
||||
|
||||
static INLINE int get_token_alloc(int mb_rows, int mb_cols) {
|
||||
|
|
|
@ -66,12 +66,6 @@
|
|||
unsigned int arf_count = 0;
|
||||
#endif
|
||||
|
||||
static void swap_yv12(YV12_BUFFER_CONFIG *a, YV12_BUFFER_CONFIG *b) {
|
||||
YV12_BUFFER_CONFIG temp = *a;
|
||||
*a = *b;
|
||||
*b = temp;
|
||||
}
|
||||
|
||||
// Resets the first pass file to the given position using a relative seek from
|
||||
// the current position.
|
||||
static void reset_fpf_position(TWO_PASS *p,
|
||||
|
@ -465,12 +459,6 @@ void vp9_first_pass(VP9_COMP *cpi, const struct lookahead_entry *source) {
|
|||
int i;
|
||||
|
||||
int recon_yoffset, recon_uvoffset;
|
||||
YV12_BUFFER_CONFIG *const lst_yv12 = get_ref_frame_buffer(cpi, LAST_FRAME);
|
||||
YV12_BUFFER_CONFIG *gld_yv12 = get_ref_frame_buffer(cpi, GOLDEN_FRAME);
|
||||
YV12_BUFFER_CONFIG *const new_yv12 = get_frame_new_buffer(cm);
|
||||
int recon_y_stride = lst_yv12->y_stride;
|
||||
int recon_uv_stride = lst_yv12->uv_stride;
|
||||
int uv_mb_height = 16 >> (lst_yv12->y_height > lst_yv12->uv_height);
|
||||
int64_t intra_error = 0;
|
||||
int64_t coded_error = 0;
|
||||
int64_t sr_coded_error = 0;
|
||||
|
@ -488,11 +476,26 @@ void vp9_first_pass(VP9_COMP *cpi, const struct lookahead_entry *source) {
|
|||
MV lastmv = {0, 0};
|
||||
TWO_PASS *twopass = &cpi->twopass;
|
||||
const MV zero_mv = {0, 0};
|
||||
int recon_y_stride, recon_uv_stride, uv_mb_height;
|
||||
|
||||
YV12_BUFFER_CONFIG *const lst_yv12 = get_ref_frame_buffer(cpi, LAST_FRAME);
|
||||
YV12_BUFFER_CONFIG *gld_yv12 = get_ref_frame_buffer(cpi, GOLDEN_FRAME);
|
||||
YV12_BUFFER_CONFIG *const new_yv12 = get_frame_new_buffer(cm);
|
||||
const YV12_BUFFER_CONFIG *first_ref_buf = lst_yv12;
|
||||
|
||||
LAYER_CONTEXT *const lc = is_two_pass_svc(cpi) ?
|
||||
&cpi->svc.layer_context[cpi->svc.spatial_layer_id] : NULL;
|
||||
double intra_factor;
|
||||
double brightness_factor;
|
||||
BufferPool *const pool = cm->buffer_pool;
|
||||
|
||||
// First pass code requires valid last and new frame buffers.
|
||||
assert(new_yv12 != NULL);
|
||||
assert((lc != NULL) || frame_is_intra_only(cm) || (lst_yv12 != NULL));
|
||||
|
||||
recon_y_stride = new_yv12->y_stride;
|
||||
recon_uv_stride = new_yv12->uv_stride;
|
||||
uv_mb_height = 16 >> (new_yv12->y_height > new_yv12->uv_height);
|
||||
|
||||
#if CONFIG_FP_MB_STATS
|
||||
if (cpi->use_fp_mb_stats) {
|
||||
|
@ -537,13 +540,10 @@ void vp9_first_pass(VP9_COMP *cpi, const struct lookahead_entry *source) {
|
|||
}
|
||||
|
||||
if (cpi->ref_frame_flags & VP9_GOLD_FLAG) {
|
||||
BufferPool *const pool = cm->buffer_pool;
|
||||
const int ref_idx =
|
||||
cm->ref_frame_map[get_ref_frame_idx(cpi, GOLDEN_FRAME)];
|
||||
const int scaled_idx = cpi->scaled_ref_idx[GOLDEN_FRAME - 1];
|
||||
|
||||
gld_yv12 = (scaled_idx != ref_idx) ? &pool->frame_bufs[scaled_idx].buf :
|
||||
get_ref_frame_buffer(cpi, GOLDEN_FRAME);
|
||||
gld_yv12 = vp9_get_scaled_ref_frame(cpi, GOLDEN_FRAME);
|
||||
if (gld_yv12 == NULL) {
|
||||
gld_yv12 = get_ref_frame_buffer(cpi, GOLDEN_FRAME);
|
||||
}
|
||||
} else {
|
||||
gld_yv12 = NULL;
|
||||
}
|
||||
|
@ -563,9 +563,12 @@ void vp9_first_pass(VP9_COMP *cpi, const struct lookahead_entry *source) {
|
|||
vp9_setup_block_planes(&x->e_mbd, cm->subsampling_x, cm->subsampling_y);
|
||||
|
||||
vp9_setup_src_planes(x, cpi->Source, 0, 0);
|
||||
vp9_setup_pre_planes(xd, 0, first_ref_buf, 0, 0, NULL);
|
||||
vp9_setup_dst_planes(xd->plane, new_yv12, 0, 0);
|
||||
|
||||
if (!frame_is_intra_only(cm)) {
|
||||
vp9_setup_pre_planes(xd, 0, first_ref_buf, 0, 0, NULL);
|
||||
}
|
||||
|
||||
xd->mi = cm->mi;
|
||||
xd->mi[0].src_mi = &xd->mi[0];
|
||||
|
||||
|
@ -1020,7 +1023,8 @@ void vp9_first_pass(VP9_COMP *cpi, const struct lookahead_entry *source) {
|
|||
((twopass->this_frame_stats.intra_error /
|
||||
DOUBLE_DIVIDE_CHECK(twopass->this_frame_stats.coded_error)) > 2.0))) {
|
||||
if (gld_yv12 != NULL) {
|
||||
vp8_yv12_copy_frame(lst_yv12, gld_yv12);
|
||||
ref_cnt_fb(pool->frame_bufs, &cm->ref_frame_map[cpi->gld_fb_idx],
|
||||
cm->ref_frame_map[cpi->lst_fb_idx]);
|
||||
}
|
||||
twopass->sr_update_lag = 1;
|
||||
} else {
|
||||
|
@ -1032,14 +1036,17 @@ void vp9_first_pass(VP9_COMP *cpi, const struct lookahead_entry *source) {
|
|||
if (lc != NULL) {
|
||||
vp9_update_reference_frames(cpi);
|
||||
} else {
|
||||
// Swap frame pointers so last frame refers to the frame we just compressed.
|
||||
swap_yv12(lst_yv12, new_yv12);
|
||||
// The frame we just compressed now becomes the last frame.
|
||||
ref_cnt_fb(pool->frame_bufs, &cm->ref_frame_map[cpi->lst_fb_idx],
|
||||
cm->new_fb_idx);
|
||||
}
|
||||
|
||||
// Special case for the first frame. Copy into the GF buffer as a second
|
||||
// reference.
|
||||
if (cm->current_video_frame == 0 && gld_yv12 != NULL && lc == NULL) {
|
||||
vp8_yv12_copy_frame(lst_yv12, gld_yv12);
|
||||
if (cm->current_video_frame == 0 && cpi->gld_fb_idx != INVALID_IDX &&
|
||||
lc == NULL) {
|
||||
ref_cnt_fb(pool->frame_bufs, &cm->ref_frame_map[cpi->gld_fb_idx],
|
||||
cm->ref_frame_map[cpi->lst_fb_idx]);
|
||||
}
|
||||
|
||||
// Use this to see what the first pass reconstruction looks like.
|
||||
|
|
|
@ -376,6 +376,8 @@ void vp9_update_mbgraph_stats(VP9_COMP *cpi) {
|
|||
int i, n_frames = vp9_lookahead_depth(cpi->lookahead);
|
||||
YV12_BUFFER_CONFIG *golden_ref = get_ref_frame_buffer(cpi, GOLDEN_FRAME);
|
||||
|
||||
assert(golden_ref != NULL);
|
||||
|
||||
// we need to look ahead beyond where the ARF transitions into
|
||||
// being a GF - so exit if we don't look ahead beyond that
|
||||
if (n_frames <= cpi->rc.frames_till_gf_update_due)
|
||||
|
|
|
@ -689,12 +689,13 @@ void vp9_pick_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
|
|||
#endif
|
||||
|
||||
for (ref_frame = LAST_FRAME; ref_frame <= GOLDEN_FRAME; ++ref_frame) {
|
||||
const YV12_BUFFER_CONFIG *yv12 = get_ref_frame_buffer(cpi, ref_frame);
|
||||
|
||||
x->pred_mv_sad[ref_frame] = INT_MAX;
|
||||
frame_mv[NEWMV][ref_frame].as_int = INVALID_MV;
|
||||
frame_mv[ZEROMV][ref_frame].as_int = 0;
|
||||
|
||||
if (cpi->ref_frame_flags & flag_list[ref_frame]) {
|
||||
const YV12_BUFFER_CONFIG *yv12 = get_ref_frame_buffer(cpi, ref_frame);
|
||||
if ((cpi->ref_frame_flags & flag_list[ref_frame]) && (yv12 != NULL)) {
|
||||
int_mv *const candidates = mbmi->ref_mvs[ref_frame];
|
||||
const struct scale_factors *const sf = &cm->frame_refs[ref_frame - 1].sf;
|
||||
|
||||
|
@ -1076,11 +1077,11 @@ void vp9_pick_inter_mode_sub8x8(VP9_COMP *cpi, MACROBLOCK *x,
|
|||
ctx->pred_pixel_ready = 0;
|
||||
|
||||
for (ref_frame = LAST_FRAME; ref_frame <= GOLDEN_FRAME; ++ref_frame) {
|
||||
const YV12_BUFFER_CONFIG *yv12 = get_ref_frame_buffer(cpi, ref_frame);
|
||||
int_mv dummy_mv[2];
|
||||
x->pred_mv_sad[ref_frame] = INT_MAX;
|
||||
|
||||
if (cpi->ref_frame_flags & flag_list[ref_frame]) {
|
||||
const YV12_BUFFER_CONFIG *yv12 = get_ref_frame_buffer(cpi, ref_frame);
|
||||
if ((cpi->ref_frame_flags & flag_list[ref_frame]) && (yv12 != NULL)) {
|
||||
int_mv *const candidates = mbmi->ref_mvs[ref_frame];
|
||||
const struct scale_factors *const sf =
|
||||
&cm->frame_refs[ref_frame - 1].sf;
|
||||
|
|
|
@ -532,13 +532,14 @@ int16_t* vp9_raster_block_offset_int16(BLOCK_SIZE plane_bsize,
|
|||
return base + vp9_raster_block_offset(plane_bsize, raster_block, stride);
|
||||
}
|
||||
|
||||
const YV12_BUFFER_CONFIG *vp9_get_scaled_ref_frame(const VP9_COMP *cpi,
|
||||
int ref_frame) {
|
||||
YV12_BUFFER_CONFIG *vp9_get_scaled_ref_frame(const VP9_COMP *cpi,
|
||||
int ref_frame) {
|
||||
const VP9_COMMON *const cm = &cpi->common;
|
||||
const int ref_idx = cm->ref_frame_map[get_ref_frame_idx(cpi, ref_frame)];
|
||||
const int scaled_idx = cpi->scaled_ref_idx[ref_frame - 1];
|
||||
return (scaled_idx != ref_idx) ?
|
||||
&cm->buffer_pool->frame_bufs[scaled_idx].buf : NULL;
|
||||
const int ref_idx = get_ref_frame_buf_idx(cpi, ref_frame);
|
||||
return
|
||||
(scaled_idx != ref_idx && scaled_idx != INVALID_IDX) ?
|
||||
&cm->buffer_pool->frame_bufs[scaled_idx].buf : NULL;
|
||||
}
|
||||
|
||||
int vp9_get_switchable_rate(const VP9_COMP *cpi, const MACROBLOCKD *const xd) {
|
||||
|
|
|
@ -147,8 +147,8 @@ int vp9_raster_block_offset(BLOCK_SIZE plane_bsize,
|
|||
int16_t* vp9_raster_block_offset_int16(BLOCK_SIZE plane_bsize,
|
||||
int raster_block, int16_t *base);
|
||||
|
||||
const YV12_BUFFER_CONFIG *vp9_get_scaled_ref_frame(const struct VP9_COMP *cpi,
|
||||
int ref_frame);
|
||||
YV12_BUFFER_CONFIG *vp9_get_scaled_ref_frame(const struct VP9_COMP *cpi,
|
||||
int ref_frame);
|
||||
|
||||
void vp9_init_me_luts();
|
||||
|
||||
|
|
|
@ -2022,6 +2022,8 @@ static void setup_buffer_inter(VP9_COMP *cpi, MACROBLOCK *x,
|
|||
int_mv *const candidates = mi->mbmi.ref_mvs[ref_frame];
|
||||
const struct scale_factors *const sf = &cm->frame_refs[ref_frame - 1].sf;
|
||||
|
||||
assert(yv12 != NULL);
|
||||
|
||||
// TODO(jkoleszar): Is the UV buffer ever used here? If so, need to make this
|
||||
// use the UV scaling factors.
|
||||
vp9_setup_pred_block(xd, yv12_mb[ref_frame], yv12, mi_row, mi_col, sf, sf);
|
||||
|
@ -2912,6 +2914,7 @@ void vp9_rd_pick_inter_mode_sb(VP9_COMP *cpi,
|
|||
for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
|
||||
x->pred_mv_sad[ref_frame] = INT_MAX;
|
||||
if (cpi->ref_frame_flags & flag_list[ref_frame]) {
|
||||
assert(get_ref_frame_buffer(cpi, ref_frame) != NULL);
|
||||
setup_buffer_inter(cpi, x, tile_info, ref_frame, bsize, mi_row, mi_col,
|
||||
frame_mv[NEARESTMV], frame_mv[NEARMV], yv12_mb);
|
||||
}
|
||||
|
@ -4238,4 +4241,3 @@ void vp9_rd_pick_inter_mode_sub8x8(VP9_COMP *cpi,
|
|||
store_coding_context(x, ctx, best_ref_index,
|
||||
best_pred_diff, best_tx_diff, best_filter_diff, 0);
|
||||
}
|
||||
|
||||
|
|
|
@ -91,8 +91,8 @@ void vp9_init_layer_context(VP9_COMP *const cpi) {
|
|||
if (oxcf->ss_enable_auto_arf[layer])
|
||||
lc->alt_ref_idx = alt_ref_idx++;
|
||||
else
|
||||
lc->alt_ref_idx = -1;
|
||||
lc->gold_ref_idx = -1;
|
||||
lc->alt_ref_idx = INVALID_IDX;
|
||||
lc->gold_ref_idx = INVALID_IDX;
|
||||
}
|
||||
|
||||
lrc->buffer_level = oxcf->starting_buffer_level_ms *
|
||||
|
|
|
@ -278,7 +278,7 @@ static void init_buffer_callbacks(vpx_codec_alg_priv_t *ctx) {
|
|||
VP9_COMMON *const cm = &frame_worker_data->pbi->common;
|
||||
BufferPool *const pool = cm->buffer_pool;
|
||||
|
||||
cm->new_fb_idx = -1;
|
||||
cm->new_fb_idx = INVALID_IDX;
|
||||
cm->byte_alignment = ctx->byte_alignment;
|
||||
|
||||
if (ctx->get_ext_fb_cb != NULL && ctx->release_ext_fb_cb != NULL) {
|
||||
|
|
Загрузка…
Ссылка в новой задаче