From 3f7656cc23dfce0d6a9849a0460626258579f647 Mon Sep 17 00:00:00 2001 From: Marco Date: Tue, 23 Jun 2015 08:34:06 -0700 Subject: [PATCH] Limit cyclic refresh on steady background blocks. Use the existing QP condition on limiting cyclic refresh, and add addiitonal condition that block has been encoded with zero/small motion x frames in row (where x is at least several times the refresh period). Additional condition only affect non-screen content mode. This helps to improve visual stability for noisy input, where on steady background areas the application of delta_qp may lead to encoding the noise. Also added a change to use the true skip (after encoding) to update the last QP. Change-Id: I234a1128d017d284cf767fdb58ef6c59d809f679 --- vp9/encoder/vp9_aq_cyclicrefresh.c | 53 ++++++++++++++++++++++++++---- vp9/encoder/vp9_aq_cyclicrefresh.h | 7 ++++ vp9/encoder/vp9_encodeframe.c | 2 ++ 3 files changed, 56 insertions(+), 6 deletions(-) diff --git a/vp9/encoder/vp9_aq_cyclicrefresh.c b/vp9/encoder/vp9_aq_cyclicrefresh.c index bb1e17956..968dad26b 100644 --- a/vp9/encoder/vp9_aq_cyclicrefresh.c +++ b/vp9/encoder/vp9_aq_cyclicrefresh.c @@ -23,6 +23,7 @@ CYCLIC_REFRESH *vp9_cyclic_refresh_alloc(int mi_rows, int mi_cols) { size_t last_coded_q_map_size; + size_t consec_zero_mv_size; CYCLIC_REFRESH *const cr = vpx_calloc(1, sizeof(*cr)); if (cr == NULL) return NULL; @@ -41,12 +42,20 @@ CYCLIC_REFRESH *vp9_cyclic_refresh_alloc(int mi_rows, int mi_cols) { assert(MAXQ <= 255); memset(cr->last_coded_q_map, MAXQ, last_coded_q_map_size); + consec_zero_mv_size = mi_rows * mi_cols * sizeof(*cr->consec_zero_mv); + cr->consec_zero_mv = vpx_malloc(consec_zero_mv_size); + if (cr->consec_zero_mv == NULL) { + vpx_free(cr); + return NULL; + } + memset(cr->consec_zero_mv, 0, consec_zero_mv_size); return cr; } void vp9_cyclic_refresh_free(CYCLIC_REFRESH *cr) { vpx_free(cr->map); vpx_free(cr->last_coded_q_map); + vpx_free(cr->consec_zero_mv); vpx_free(cr); } @@ -228,22 +237,48 @@ void vp9_cyclic_refresh_update_segment(VP9_COMP *const cpi, int map_offset = block_index + y * cm->mi_cols + x; cr->map[map_offset] = new_map_value; cpi->segmentation_map[map_offset] = mbmi->segment_id; + } +} + +void vp9_cyclic_refresh_update_sb_postencode(VP9_COMP *const cpi, + const MB_MODE_INFO *const mbmi, + int mi_row, int mi_col, + BLOCK_SIZE bsize) { + const VP9_COMMON *const cm = &cpi->common; + CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; + MV mv = mbmi->mv[0].as_mv; + const int bw = num_8x8_blocks_wide_lookup[bsize]; + const int bh = num_8x8_blocks_high_lookup[bsize]; + const int xmis = VPXMIN(cm->mi_cols - mi_col, bw); + const int ymis = VPXMIN(cm->mi_rows - mi_row, bh); + const int block_index = mi_row * cm->mi_cols + mi_col; + int x, y; + for (y = 0; y < ymis; y++) + for (x = 0; x < xmis; x++) { + int map_offset = block_index + y * cm->mi_cols + x; // Inter skip blocks were clearly not coded at the current qindex, so // don't update the map for them. For cases where motion is non-zero or // the reference frame isn't the previous frame, the previous value in // the map for this spatial location is not entirely correct. - if ((!is_inter_block(mbmi) || !skip) && + if ((!is_inter_block(mbmi) || !mbmi->skip) && mbmi->segment_id <= CR_SEGMENT_ID_BOOST2) { cr->last_coded_q_map[map_offset] = clamp( cm->base_qindex + cr->qindex_delta[mbmi->segment_id], 0, MAXQ); - } else if (is_inter_block(mbmi) && skip && + } else if (is_inter_block(mbmi) && mbmi->skip && mbmi->segment_id <= CR_SEGMENT_ID_BOOST2) { cr->last_coded_q_map[map_offset] = VPXMIN( clamp(cm->base_qindex + cr->qindex_delta[mbmi->segment_id], 0, MAXQ), cr->last_coded_q_map[map_offset]); + // Update the consecutive zero/low_mv count. + if (is_inter_block(mbmi) && (abs(mv.row) < 8 && abs(mv.col) < 8)) { + if (cr->consec_zero_mv[map_offset] < 255) + cr->consec_zero_mv[map_offset]++; + } else { + cr->consec_zero_mv[map_offset] = 0; } } + } } // Update the actual number of blocks that were applied the segment delta q. @@ -380,9 +415,10 @@ static void cyclic_refresh_update_map(VP9_COMP *const cpi) { int mi_row = sb_row_index * MI_BLOCK_SIZE; int mi_col = sb_col_index * MI_BLOCK_SIZE; int qindex_thresh = - cpi->oxcf.content == VP9E_CONTENT_SCREEN - ? vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST2, cm->base_qindex) - : 0; + vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST2, cm->base_qindex); + int consec_zero_mv_thresh = + cpi->oxcf.content == VP9E_CONTENT_SCREEN ? 0 + : 10 * (100 / cr->percent_refresh); assert(mi_row >= 0 && mi_row < cm->mi_rows); assert(mi_col >= 0 && mi_col < cm->mi_cols); bl_index = mi_row * cm->mi_cols + mi_col; @@ -398,7 +434,8 @@ static void cyclic_refresh_update_map(VP9_COMP *const cpi) { // for possible boost/refresh (segment 1). The segment id may get // reset to 0 later if block gets coded anything other than ZEROMV. if (cr->map[bl_index2] == 0) { - if (cr->last_coded_q_map[bl_index2] > qindex_thresh) + if (cr->last_coded_q_map[bl_index2] > qindex_thresh || + cr->consec_zero_mv[bl_index2] < consec_zero_mv_thresh) sum_map++; } else if (cr->map[bl_index2] < 0) { cr->map[bl_index2]++; @@ -475,6 +512,8 @@ void vp9_cyclic_refresh_setup(VP9_COMP *const cpi) { if (cm->frame_type == KEY_FRAME) { memset(cr->last_coded_q_map, MAXQ, cm->mi_rows * cm->mi_cols * sizeof(*cr->last_coded_q_map)); + memset(cr->consec_zero_mv, 0, + cm->mi_rows * cm->mi_cols * sizeof(*cr->consec_zero_mv)); cr->sb_index = 0; } return; @@ -544,6 +583,8 @@ void vp9_cyclic_refresh_reset_resize(VP9_COMP *const cpi) { const VP9_COMMON *const cm = &cpi->common; CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; memset(cr->map, 0, cm->mi_rows * cm->mi_cols); + memset(cr->last_coded_q_map, MAXQ, cm->mi_rows * cm->mi_cols); + memset(cr->consec_zero_mv, 0, cm->mi_rows * cm->mi_cols); cr->sb_index = 0; cpi->refresh_golden_frame = 1; } diff --git a/vp9/encoder/vp9_aq_cyclicrefresh.h b/vp9/encoder/vp9_aq_cyclicrefresh.h index 7da1f94cf..839ce6df4 100644 --- a/vp9/encoder/vp9_aq_cyclicrefresh.h +++ b/vp9/encoder/vp9_aq_cyclicrefresh.h @@ -51,6 +51,8 @@ struct CYCLIC_REFRESH { signed char *map; // Map of the last q a block was coded at. uint8_t *last_coded_q_map; + // Count on how many consecutive times a block uses ZER0MV for encoding. + uint8_t *consec_zero_mv; // Thresholds applied to the projected rate/distortion of the coding block, // when deciding whether block should be refreshed. int64_t thresh_rate_sb; @@ -92,6 +94,11 @@ void vp9_cyclic_refresh_update_segment(struct VP9_COMP *const cpi, int mi_row, int mi_col, BLOCK_SIZE bsize, int64_t rate, int64_t dist, int skip); +void vp9_cyclic_refresh_update_sb_postencode(struct VP9_COMP *const cpi, + const MB_MODE_INFO *const mbmi, + int mi_row, int mi_col, + BLOCK_SIZE bsize); + // Update the segmentation map, and related quantities: cyclic refresh map, // refresh sb_index, and target number of blocks to be refreshed. void vp9_cyclic_refresh_update__map(struct VP9_COMP *const cpi); diff --git a/vp9/encoder/vp9_encodeframe.c b/vp9/encoder/vp9_encodeframe.c index 4a4301e85..1c4f35a53 100644 --- a/vp9/encoder/vp9_encodeframe.c +++ b/vp9/encoder/vp9_encodeframe.c @@ -4213,5 +4213,7 @@ static void encode_superblock(VP9_COMP *cpi, ThreadData *td, } ++td->counts->tx.tx_totals[mbmi->tx_size]; ++td->counts->tx.tx_totals[get_uv_tx_size(mbmi, &xd->plane[1])]; + if (cm->seg.enabled && cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ) + vp9_cyclic_refresh_update_sb_postencode(cpi, mbmi, mi_row, mi_col, bsize); } } -- 2.40.0