#endif // CONFIG_MULTI_REF
#if CONFIG_NEW_QUANT
-#define QUANT_PROFILES 3
-#define DEFAULT_DQ 0
+#define QUANT_PROFILES 2
+
+#if QUANT_PROFILES > 1
+static INLINE int switchable_dq_profile_used(BLOCK_SIZE bsize) {
+ return bsize >= BLOCK_16X16;
+}
+#define Q_THRESHOLD_MIN 0
+#define Q_THRESHOLD_MAX 1000
+#endif // QUANT_PROFILES > 1
#endif // CONFIG_NEW_QUANT
typedef enum {
};
#endif // CONFIG_COPY_MODE
+#if CONFIG_NEW_QUANT
+#if QUANT_PROFILES == 2
+const vp9_tree_index vp9_dq_profile_tree[TREE_SIZE(QUANT_PROFILES)] = {
+ -0, -1
+};
+static const vp9_prob default_dq_profile_prob[QUANT_PROFILES - 1] = {
+ 240
+};
+
+#elif QUANT_PROFILES == 3
+const vp9_tree_index vp9_dq_profile_tree[TREE_SIZE(QUANT_PROFILES)] = {
+ -0, 2,
+ -1, -2
+};
+static const vp9_prob default_dq_profile_prob[QUANT_PROFILES - 1] = {
+ 240, 128
+};
+#endif // QUANT_PROFILES != 2 and QUANT_PROFILES != 3
+#endif // CONFIG_NEW_QUANT
+
#if CONFIG_TX64X64
void tx_counts_to_branch_counts_64x64(const unsigned int *tx_count_64x64p,
unsigned int (*ct_64x64p)[2]) {
#if CONFIG_WEDGE_PARTITION
vp9_copy(fc->wedge_interinter_prob, default_wedge_interinter_prob);
#endif // CONFIG_WEDGE_PARTITION
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ vp9_copy(fc->dq_profile_prob, default_dq_profile_prob);
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
const vp9_tree_index vp9_switchable_interp_tree
adapt_prob(pre_fc->palette_uv_enabled_prob[i],
counts->uv_palette_enabled[i]);
#endif // CONFIG_PALETTE
+
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ adapt_probs(vp9_dq_profile_tree, pre_fc->dq_profile_prob,
+ counts->dq_profile, fc->dq_profile_prob);
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
static void set_default_lf_deltas(struct loopfilter *lf) {
#if CONFIG_GLOBAL_MOTION
vp9_prob global_motion_types_prob[GLOBAL_MOTION_TYPES - 1];
#endif // CONFIG_GLOBAL_MOTION
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ vp9_prob dq_profile_prob[QUANT_PROFILES - 1];
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
} FRAME_CONTEXT;
typedef struct {
#if CONFIG_GLOBAL_MOTION
unsigned int global_motion_types[GLOBAL_MOTION_TYPES];
#endif // CONFIG_GLOBAL_MOTION
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ unsigned int dq_profile[QUANT_PROFILES];
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
} FRAME_COUNTS;
extern const vp9_prob vp9_kf_uv_mode_prob[INTRA_MODES][INTRA_MODES - 1];
[TREE_SIZE(INTER_COMPOUND_MODES)];
#endif // CONFIG_NEW_INTER
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+extern const vp9_tree_index vp9_dq_profile_tree[TREE_SIZE(QUANT_PROFILES)];
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+
void vp9_setup_past_independence(struct VP9Common *cm);
#if CONFIG_ROW_TILE
void vp9_dec_setup_past_independence(struct VP9Common *cm,
#define FRAME_CONTEXTS_LOG2 2
#define FRAME_CONTEXTS (1 << FRAME_CONTEXTS_LOG2)
-#if CONFIG_NEW_QUANT
-#define QUANT_PROFILES 3
-#endif // CONFIG_NEW_QUANT
-
extern const struct {
PARTITION_CONTEXT above;
PARTITION_CONTEXT left;
#endif // CONFIG_TX_SKIP
};
-static const uint8_t vp9_nuq_knots_mid[COEF_BANDS][NUQ_KNOTS] = {
- {84, 124, 128}, // dc, band 0
- {84, 124, 128}, // band 1
- {84, 124, 128}, // band 2
- {86, 124, 128}, // band 3
- {86, 124, 128}, // band 4
- {86, 124, 128}, // band 5
+static const uint8_t vp9_nuq_knots[QUANT_PROFILES][COEF_BANDS][NUQ_KNOTS] = {
+ {
+ {86, 122, 128}, // dc, band 0
+ {86, 122, 128}, // band 1
+ {86, 122, 128}, // band 2
+ {88, 122, 128}, // band 3
+ {88, 122, 128}, // band 4
+ {88, 122, 128}, // band 5
#if CONFIG_TX_SKIP
- {84, 124, 128}, // band 6
+ {86, 122, 128}, // band 6
#endif // CONFIG_TX_SKIP
+ },
+#if QUANT_PROFILES > 1
+ {
+ {86, 122, 128}, // dc, band 0
+ {86, 122, 128}, // band 1
+ {86, 122, 128}, // band 2
+ {88, 122, 128}, // band 3
+ {88, 122, 128}, // band 4
+ {88, 122, 128}, // band 5
+#if CONFIG_TX_SKIP
+ {86, 122, 128}, // band 6
+#endif // CONFIG_TX_SKIP
+ },
+#if QUANT_PROFILES > 2
+ {
+ {86, 122, 128}, // dc, band 0
+ {86, 122, 128}, // band 1
+ {86, 122, 128}, // band 2
+ {88, 122, 128}, // band 3
+ {88, 122, 128}, // band 4
+ {88, 122, 128}, // band 5
+#if CONFIG_TX_SKIP
+ {86, 122, 128}, // band 6
+#endif // CONFIG_TX_SKIP
+ }
+#endif // QUANT_PROFILES > 2
+#endif // QUANT_PROFILES > 1
};
-static const uint8_t vp9_nuq_doff_lossless[COEF_BANDS] = { 0, 0, 0, 0, 0, 0
+static const uint8_t vp9_nuq_doff_lossless[COEF_BANDS] = { 0, 0, 0, 0, 0, 0,
#if CONFIG_TX_SKIP
- , 0
+ 0
#endif // CONFIG_TX_SKIP
};
-static const uint8_t vp9_nuq_doff_low[COEF_BANDS] = { 5, 13, 14, 19, 20, 21
+static const uint8_t vp9_nuq_doff[QUANT_PROFILES][COEF_BANDS] = {
+ { 8, 15, 16, 22, 23, 24, // dq_off_index = 0
#if CONFIG_TX_SKIP
- , 8
+ 8
#endif // CONFIG_TX_SKIP
-};
-static const uint8_t vp9_nuq_doff_mid[COEF_BANDS] = { 8, 16, 17, 22, 23, 24
+ },
+#if QUANT_PROFILES > 1
+ { 6, 12, 13, 16, 17, 18, // dq_off_index = 1
#if CONFIG_TX_SKIP
- , 8
+ 8
#endif // CONFIG_TX_SKIP
-};
-static const uint8_t vp9_nuq_doff_high[COEF_BANDS] = { 41, 49, 50, 55, 56, 57
+ },
+#if QUANT_PROFILES > 2
+ { 10, 18, 19, 23, 25, 26, // dq_off_index = 2
#if CONFIG_TX_SKIP
- , 8
+ 8
#endif // CONFIG_TX_SKIP
+ }
+#endif // QUANT_PROFILES > 2
+#endif // QUANT_PROFILES > 1
};
// Allow different quantization profiles in different q ranges,
// to enable entropy-constraints in scalar quantization.
-static const uint8_t *get_nuq_knots(int lossless, int band) {
+static const uint8_t *get_nuq_knots(int lossless, int band, int dq_off_index) {
if (lossless)
return vp9_nuq_knots_lossless[band];
else
- return vp9_nuq_knots_mid[band];
+ return vp9_nuq_knots[dq_off_index][band];
}
static INLINE int16_t quant_to_doff_fixed(int lossless, int band,
int dq_off_index) {
if (lossless)
return vp9_nuq_doff_lossless[band];
- else if (!dq_off_index) // dq_off_index == 0
- return vp9_nuq_doff_mid[band];
- else if (dq_off_index == 1)
- return vp9_nuq_doff_low[band];
- else // dq_off_index == 2
- return vp9_nuq_doff_high[band];
+ else
+ return vp9_nuq_doff[dq_off_index][band];
}
static INLINE void get_cumbins_nuq(int q, int lossless, int band,
- tran_low_t *cumbins) {
- const uint8_t *knots = get_nuq_knots(lossless, band);
+ tran_low_t *cumbins, int dq_off_index) {
+ const uint8_t *knots = get_nuq_knots(lossless, band, dq_off_index);
int16_t cumknots[NUQ_KNOTS];
int i;
cumknots[0] = knots[0];
void vp9_get_dequant_val_nuq(int q, int lossless, int band,
tran_low_t *dq, tran_low_t *cumbins,
int dq_off_index) {
- const uint8_t *knots = get_nuq_knots(lossless, band);
+ const uint8_t *knots = get_nuq_knots(lossless, band, dq_off_index);
tran_low_t cumbins_[NUQ_KNOTS], *cumbins_ptr;
tran_low_t doff;
int i;
cumbins_ptr = (cumbins ? cumbins : cumbins_);
- get_cumbins_nuq(q, lossless, band, cumbins_ptr);
+ get_cumbins_nuq(q, lossless, band, cumbins_ptr, dq_off_index);
dq[0] = 0;
for (i = 1; i < NUQ_KNOTS; ++i) {
const int16_t qstep = (knots[i] * q + 64) >> 7;
vp9_diff_update_prob(r, &fc->switchable_interp_prob[j][i]);
}
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+static void read_dq_profile_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
+ int i;
+ for (i = 0; i < QUANT_PROFILES - 1; ++i)
+ vp9_diff_update_prob(r, &fc->dq_profile_prob[i]);
+}
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+
static void read_inter_mode_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
int i, j;
for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
BLOCK_SIZE bsize, int mi_row, int mi_col,
#if CONFIG_EXT_TX
int txfm,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ int dq_off_index,
#endif
int skip) {
const int bw = num_8x8_blocks_wide_lookup[bsize];
xd->mi[y * cm->mi_stride + x].mbmi.skip = skip;
#if CONFIG_EXT_TX
xd->mi[y * cm->mi_stride + x].mbmi.ext_txfrm = txfm;
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ xd->mi[y * cm->mi_stride + x].mbmi.dq_off_index = dq_off_index;
#endif
}
}
#if CONFIG_SUPERTX
if (!supertx_enabled) {
#endif
- if (less8x8)
- bsize = BLOCK_8X8;
+ if (less8x8)
+ bsize = BLOCK_8X8;
- if (mbmi->skip) {
- reset_skip_context(xd, bsize);
- } else {
- if (cm->seg.enabled) {
- setup_plane_dequants(cm, xd, vp9_get_qindex(&cm->seg, mbmi->segment_id,
- cm->base_qindex));
+ if (mbmi->skip) {
+ reset_skip_context(xd, bsize);
+ } else {
+ if (cm->seg.enabled) {
+ setup_plane_dequants(cm, xd, vp9_get_qindex(&cm->seg, mbmi->segment_id,
+ cm->base_qindex));
+ }
}
- }
- if (!is_inter_block(mbmi)
+ if (!is_inter_block(mbmi)
#if CONFIG_INTRABC
- && !is_intrabc_mode(mbmi->mode)
+ && !is_intrabc_mode(mbmi->mode)
#endif // CONFIG_INTRABC
- ) {
- struct intra_args arg = { cm, xd, r };
- vp9_foreach_transformed_block(xd, bsize,
- predict_and_reconstruct_intra_block, &arg);
- } else {
- // Prediction
- vp9_dec_build_inter_predictors_sb(xd, mi_row, mi_col, bsize);
+ ) {
+ struct intra_args arg = { cm, xd, r };
+ vp9_foreach_transformed_block(xd, bsize,
+ predict_and_reconstruct_intra_block, &arg);
+ } else {
+ // Prediction
+ vp9_dec_build_inter_predictors_sb(xd, mi_row, mi_col, bsize);
- // Reconstruction
- if (!mbmi->skip) {
- int eobtotal = 0;
- struct inter_args arg = { cm, xd, r, &eobtotal };
+ // Reconstruction
+ if (!mbmi->skip) {
+ int eobtotal = 0;
+ struct inter_args arg = { cm, xd, r, &eobtotal };
- vp9_foreach_transformed_block(xd, bsize, reconstruct_inter_block, &arg);
+ vp9_foreach_transformed_block(xd, bsize, reconstruct_inter_block, &arg);
#if CONFIG_BITSTREAM_FIXES
#else
- if (!less8x8 && eobtotal == 0)
- mbmi->skip = 1; // skip loopfilter
+ if (!less8x8 && eobtotal == 0)
+ mbmi->skip = 1; // skip loopfilter
#endif
+ }
}
- }
#if CONFIG_SUPERTX
}
#endif
#if CONFIG_EXT_TX
int txfm = NORM;
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ int dq_off_index = 0;
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#endif // CONFIG_SUPERTX
if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
}
}
#endif // CONFIG_EXT_TX
+ /*
+ printf("D[%d/%d, %d %d] sb_type %d skip %d}\n", cm->current_video_frame, cm->show_frame,
+ mi_row, mi_col, bsize, skip);
+ */
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ if (cm->base_qindex > Q_THRESHOLD_MIN &&
+ cm->base_qindex < Q_THRESHOLD_MAX &&
+ switchable_dq_profile_used(bsize) && !skip &&
+ !vp9_segfeature_active(
+ &cm->seg, xd->mi[0].mbmi.segment_id, SEG_LVL_SKIP)) {
+ dq_off_index = vp9_read_dq_profile(cm, r);
+ } else {
+ dq_off_index = 0;
+ }
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
#endif // CONFIG_SUPERTX
if (subsize < BLOCK_8X8) {
#if CONFIG_EXT_TX
xd->mi[0].mbmi.ext_txfrm = txfm;
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ xd->mi[0].mbmi.dq_off_index = dq_off_index;
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
vp9_foreach_transformed_block(xd, bsize, reconstruct_inter_block, &arg);
if (!(subsize < BLOCK_8X8) && eobtotal == 0)
skip = 1;
#if CONFIG_EXT_TX
txfm,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_off_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
skip);
}
#endif // CONFIG_SUPERTX
read_inter_compound_mode_probs(fc, &r);
#endif // CONFIG_NEW_INTER
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ read_dq_profile_probs(fc, &r);
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+
if (cm->interp_filter == SWITCHABLE)
read_switchable_interp_probs(fc, &r);
}
#endif // CONFIG_NEW_INTER
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+int vp9_read_dq_profile(VP9_COMMON *cm, vp9_reader *r) {
+ const int dq_profile = vp9_read_tree(r, vp9_dq_profile_tree,
+ cm->fc.dq_profile_prob);
+ if (!cm->frame_parallel_decoding_mode) {
+ ++cm->counts.dq_profile[dq_profile];
+ }
+ return dq_profile;
+}
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+
static PREDICTION_MODE read_inter_mode(VP9_COMMON *cm, vp9_reader *r,
int ctx) {
const int mode = vp9_read_tree(r, vp9_inter_mode_tree,
#if CONFIG_COPY_MODE
static COPY_MODE read_copy_mode(VP9_COMMON *cm, vp9_reader *r,
int num_candidate, int ctx) {
- COPY_MODE mode;
+ COPY_MODE mode = 0;
switch (num_candidate) {
case 0:
int_mv dv_ref;
#endif // CONFIG_INTRABC
-#if CONFIG_NEW_QUANT
- mbmi->dq_off_index = DEFAULT_DQ;
-#endif // CONFIG_NEW_QUANT
-
mbmi->segment_id = read_intra_segment_id(cm, xd, mi_row, mi_col, r);
#if CONFIG_MISC_ENTROPY
mbmi->skip = 0;
#else // CONFIG_SR_MODE
mbmi->tx_size = read_tx_size(cm, xd, cm->tx_mode, bsize, 1, r);
#endif // CONFIG_SR_MODE
-#endif
+#endif // CONFIG_PALETTE
+
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ if (cm->base_qindex > Q_THRESHOLD_MIN && cm->base_qindex < Q_THRESHOLD_MAX &&
+ switchable_dq_profile_used(mbmi->sb_type) &&
+ !mbmi->skip &&
+ !vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
+ mbmi->dq_off_index = vp9_read_dq_profile(cm, r);
+ } else {
+ mbmi->dq_off_index = 0;
+ }
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+
mbmi->ref_frame[0] = INTRA_FRAME;
mbmi->ref_frame[1] = NONE;
(void) supertx_enabled;
#endif
-#if CONFIG_NEW_QUANT
- mbmi->dq_off_index = DEFAULT_DQ;
-#endif // CONFIG_NEW_QUANT
-
mbmi->mv[0].as_int = 0;
mbmi->mv[1].as_int = 0;
mbmi->mode = NEARESTMV;
mbmi->skip = skip_backup;
mbmi->copy_mode = copy_mode_backup;
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ if (!(cm->base_qindex > Q_THRESHOLD_MIN &&
+ cm->base_qindex < Q_THRESHOLD_MAX &&
+ switchable_dq_profile_used(mbmi->sb_type) &&
+ !vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)))
+ mbmi->dq_off_index = 0;
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
#endif // CONFIG_COPY_MODE
#endif // CONFIG_SR_MODE
#endif // CONFIG_PALETTE
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ if (cm->base_qindex > Q_THRESHOLD_MIN &&
+ cm->base_qindex < Q_THRESHOLD_MAX &&
+ switchable_dq_profile_used(mbmi->sb_type) &&
+#if CONFIG_SUPERTX
+ !supertx_enabled &&
+#endif
+ !mbmi->skip &&
+ !vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
+#if CONFIG_COPY_MODE
+ if (mbmi->copy_mode == NOREF)
+#endif // CONFIG_COPY_MODE
+ mbmi->dq_off_index = vp9_read_dq_profile(cm, r);
+ } else {
+ mbmi->dq_off_index = 0;
+ }
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+
#if CONFIG_EXT_TX
if (inter_block &&
#if !CONFIG_WAVELETS
#endif
#endif
int mi_row, int mi_col, vp9_reader *r);
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+int vp9_read_dq_profile(VP9_COMMON *cm, vp9_reader *r);
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#ifdef __cplusplus
} // extern "C"
#if CONFIG_GLOBAL_MOTION
static struct vp9_token global_motion_types_encodings[GLOBAL_MOTION_TYPES];
#endif // CONFIG_GLOBAL_MOTION
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+static struct vp9_token dq_profile_encodings[QUANT_PROFILES];
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_SUPERTX
static int vp9_check_supertx(VP9_COMMON *cm, int mi_row, int mi_col,
vp9_tokens_from_tree(global_motion_types_encodings,
vp9_global_motion_types_tree);
#endif // CONFIG_GLOBAL_MOTION
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ vp9_tokens_from_tree(dq_profile_encodings, vp9_dq_profile_tree);
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
static void write_intra_mode(vp9_writer *w, PREDICTION_MODE mode,
vp9_cond_prob_diff_update(w, &cm->fc.skip_probs[k], cm->counts.skip[k]);
}
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+static void write_dq_profile(const VP9_COMMON *cm, int dq_profile,
+ vp9_writer *w) {
+ vp9_write_token(w, vp9_dq_profile_tree, cm->fc.dq_profile_prob,
+ &dq_profile_encodings[dq_profile]);
+}
+
+static void update_dq_profile_probs(VP9_COMMON *cm, vp9_writer *w) {
+ prob_diff_update(vp9_dq_profile_tree,
+ cm->fc.dq_profile_prob,
+ cm->counts.dq_profile, QUANT_PROFILES, w);
+}
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+
#if CONFIG_SR_MODE
#if SR_USE_MULTI_F
static int write_sr_usfilter(const VP9_COMMON *cm, const MACROBLOCKD *xd,
if (!supertx_enabled) {
#endif
#if CONFIG_COPY_MODE
- if (mbmi->copy_mode == NOREF)
+ if (mbmi->copy_mode == NOREF)
#endif
- if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
- vp9_write(w, is_inter, vp9_get_intra_inter_prob(cm, xd));
+ if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
+ vp9_write(w, is_inter, vp9_get_intra_inter_prob(cm, xd));
#if CONFIG_MISC_ENTROPY
- skip = write_skip(cm, xd, segment_id, mi, is_inter, w);
+ skip = write_skip(cm, xd, segment_id, mi, is_inter, w);
#endif
#if CONFIG_SUPERTX
(skip || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
write_selected_tx_size(cm, xd, mbmi->tx_size, bsize, w);
}
+
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ if (cm->base_qindex > Q_THRESHOLD_MIN && cm->base_qindex < Q_THRESHOLD_MAX &&
+ switchable_dq_profile_used(mbmi->sb_type) &&
+#if CONFIG_SUPERTX
+ !supertx_enabled &&
+#endif // CONFIG_SUPERTX
+ !mbmi->skip &&
+ !vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
+#if CONFIG_COPY_MODE
+ if (mbmi->copy_mode == NOREF)
+#endif // CONFIG_COPY_MODE
+ write_dq_profile(cm, mbmi->dq_off_index, w);
+ } else {
+ assert(mbmi->dq_off_index == 0);
+ }
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+
#if CONFIG_EXT_TX
if (is_inter &&
#if !CONFIG_WAVELETS
write_selected_tx_size(cm, xd, mbmi->tx_size, bsize, w);
}
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ if (cm->base_qindex > Q_THRESHOLD_MIN && cm->base_qindex < Q_THRESHOLD_MAX &&
+ switchable_dq_profile_used(mbmi->sb_type) &&
+ !mbmi->skip &&
+ !vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
+ write_dq_profile(cm, mbmi->dq_off_index, w);
+ } else {
+ assert(mbmi->dq_off_index == 0);
+ }
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+
#if CONFIG_TX_SKIP
if (bsize >= BLOCK_8X8) {
int q_idx = vp9_get_qindex(seg, mbmi->segment_id, cm->base_qindex);
&ext_tx_encodings[xd->mi[0].mbmi.ext_txfrm]);
#endif // CONFIG_WAVELETS
#endif // CONFIG_EXT_TX
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ if (!xd->mi[0].mbmi.skip &&
+ cm->base_qindex > Q_THRESHOLD_MIN &&
+ cm->base_qindex < Q_THRESHOLD_MAX &&
+ switchable_dq_profile_used(bsize) &&
+ !vp9_segfeature_active(
+ &cm->seg, xd->mi[0].mbmi.segment_id, SEG_LVL_SKIP)) {
+ write_dq_profile(cm, xd->mi[0].mbmi.dq_off_index, w);
+ }
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
}
#endif // CONFIG_SUPERTX
update_inter_compound_mode_probs(cm, &header_bc);
#endif // CONFIG_NEW_INTER
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ update_dq_profile_probs(cm, &header_bc);
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+
if (cm->interp_filter == SWITCHABLE)
update_switchable_interp_probs(cm, &header_bc);
#if CONFIG_EXT_TX
EXT_TX_TYPE *best_tx,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ int *dq_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
PC_TREE *pc_tree);
#endif // CONFIG_SUPERTX
#if CONFIG_EXT_TX
int best_tx,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ int dq_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
TX_SIZE supertx_size) {
MACROBLOCK *const x = &cpi->mb;
#if CONFIG_EXT_TX
ctx->mic.mbmi.ext_txfrm = best_tx;
#endif // CONFIG_EXT_TX
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ ctx->mic.mbmi.dq_off_index = dq_index;
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_TX_SKIP
ctx->mic.mbmi.tx_skip[0] = 0;
ctx->mic.mbmi.tx_skip[1] = 0;
#if CONFIG_EXT_TX
int best_tx,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ int dq_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
TX_SIZE supertx_size, PC_TREE *pc_tree) {
VP9_COMMON *const cm = &cpi->common;
int bsl = b_width_log2_lookup[bsize], hbs = (1 << bsl) / 4;
update_supertx_param(cpi, &pc_tree->none,
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size);
break;
update_supertx_param(cpi, &pc_tree->vertical[0],
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size);
if (mi_col + hbs < cm->mi_cols && bsize > BLOCK_8X8)
update_supertx_param(cpi, &pc_tree->vertical[1],
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size);
break;
update_supertx_param(cpi, &pc_tree->horizontal[0],
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size);
if (mi_row + hbs < cm->mi_rows && bsize > BLOCK_8X8)
update_supertx_param(cpi, &pc_tree->horizontal[1],
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size);
break;
update_supertx_param(cpi, pc_tree->leaf_split[0],
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size);
} else {
update_supertx_param_sb(cpi, mi_row, mi_col, subsize,
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size, pc_tree->split[0]);
update_supertx_param_sb(cpi, mi_row, mi_col + hbs, subsize,
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size, pc_tree->split[1]);
update_supertx_param_sb(cpi, mi_row + hbs, mi_col, subsize,
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size, pc_tree->split[2]);
update_supertx_param_sb(cpi, mi_row + hbs, mi_col + hbs, subsize,
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size, pc_tree->split[3]);
}
update_supertx_param(cpi, &pc_tree->horizontala[i],
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size);
break;
update_supertx_param(cpi, &pc_tree->horizontalb[i],
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size);
break;
update_supertx_param(cpi, &pc_tree->verticala[i],
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size);
break;
update_supertx_param(cpi, &pc_tree->verticalb[i],
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size);
break;
cm->counts.supertx
[partition_supertx_context_lookup[partition]][supertx_size][1]++;
cm->counts.supertx_size[supertx_size]++;
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ if (cm->base_qindex > Q_THRESHOLD_MIN &&
+ cm->base_qindex < Q_THRESHOLD_MAX &&
+ switchable_dq_profile_used(bsize) &&
+ !xd->mi[0].mbmi.skip &&
+ !vp9_segfeature_active(&cm->seg, xd->mi[0].mbmi.segment_id,
+ SEG_LVL_SKIP)) {
+ ++cm->counts.dq_profile[xd->mi[0].mbmi.dq_off_index];
+ }
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_EXT_TX
#if CONFIG_WAVELETS
if (!xd->mi[0].mbmi.skip)
#if CONFIG_EXT_TX
EXT_TX_TYPE best_tx = NORM;
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ int dq_index = 0;
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
tmp_rate = sum_rate_nocoef;
tmp_dist = 0;
#if CONFIG_EXT_TX
&best_tx,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ &dq_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
pc_tree);
tmp_rate += vp9_cost_bit(
update_supertx_param_sb(cpi, mi_row, mi_col, bsize,
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size, pc_tree);
}
#if CONFIG_EXT_TX
EXT_TX_TYPE best_tx = NORM;
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ int dq_index = 0;
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
tmp_rate = sum_rate_nocoef;
tmp_dist = 0;
#if CONFIG_EXT_TX
&best_tx,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ &dq_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
pc_tree);
tmp_rate += vp9_cost_bit(
update_supertx_param_sb(cpi, mi_row, mi_col, bsize,
#if CONFIG_EXT_TX
best_tx,
+#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
#endif
supertx_size, pc_tree);
}
#if CONFIG_EXT_TX
EXT_TX_TYPE best_tx = NORM;
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ int dq_index = 0;
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
tmp_rate = sum_rate_nocoef;
tmp_dist = 0;
#if CONFIG_EXT_TX
&best_tx,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ &dq_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
pc_tree);
tmp_rate += vp9_cost_bit(
#if CONFIG_EXT_TX
best_tx,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
supertx_size, pc_tree);
}
}
#if CONFIG_EXT_TX
EXT_TX_TYPE best_tx = NORM;
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ int dq_index = 0;
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
tmp_rate = sum_rate_nocoef;
tmp_dist = 0;
#if CONFIG_EXT_TX
&best_tx,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ &dq_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
pc_tree);
tmp_rate += vp9_cost_bit(
#if CONFIG_EXT_TX
best_tx,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
supertx_size, pc_tree);
}
}
#if CONFIG_EXT_TX
EXT_TX_TYPE best_tx = NORM;
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ int dq_index = 0;
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
tmp_rate = sum_rate_nocoef;
tmp_dist = 0;
#if CONFIG_EXT_TX
&best_tx,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ &dq_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
pc_tree);
tmp_rate += vp9_cost_bit(
#if CONFIG_EXT_TX
best_tx,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ dq_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
supertx_size, pc_tree);
}
}
const int mi_width = num_8x8_blocks_wide_lookup[bsize];
const int mi_height = num_8x8_blocks_high_lookup[bsize];
-#if CONFIG_NEW_QUANT
- mbmi->dq_off_index = DEFAULT_DQ;
-#endif // CONFIG_NEW_QUANT
-
x->skip_recode = !x->select_tx_size && mbmi->sb_type >= BLOCK_8X8 &&
cpi->oxcf.aq_mode != COMPLEXITY_AQ &&
cpi->oxcf.aq_mode != CYCLIC_REFRESH_AQ &&
rows * cols * sizeof(xd->plane[1].color_index_map[0]));
}
}
-#endif // CONFIG_PALETTE
-#if CONFIG_PALETTE
if (frame_is_intra_only(cm) && output_enabled && bsize >= BLOCK_8X8) {
cm->palette_blocks_signalled++;
if (mbmi->palette_enabled[0])
vp9_encode_sb(x, MAX(bsize, BLOCK_8X8));
vp9_tokenize_sb(cpi, t, !output_enabled, MAX(bsize, BLOCK_8X8));
}
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ // This is not strictly required, but is a good practice.
+ // If you remove this, the assert in vp9_bitstream.c needs to be removed also.
+ if (mbmi->skip)
+ mbmi->dq_off_index = 0;
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_INTRABC
if (frame_is_intra_only(cm) && output_enabled && bsize >= BLOCK_8X8) {
++cm->counts.ext_tx[mbmi->tx_size][mbmi->ext_txfrm];
}
#endif // CONFIG_EXT_TX
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ if (cm->base_qindex > Q_THRESHOLD_MIN &&
+ cm->base_qindex < Q_THRESHOLD_MAX &&
+ switchable_dq_profile_used(mbmi->sb_type) &&
+#if CONFIG_COPY_MODE
+ (frame_is_intra_only(cm) || mbmi->copy_mode == NOREF) &&
+#endif // CONFIG_COPY_MODE
+ !mbmi->skip &&
+ !vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
+ ++cm->counts.dq_profile[mbmi->dq_off_index];
+ }
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_TX_SKIP
if (bsize >= BLOCK_8X8) {
int q_idx = vp9_get_qindex(&cm->seg, mbmi->segment_id, cm->base_qindex);
#if CONFIG_EXT_TX
EXT_TX_TYPE *best_tx,
#endif
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ int *dq_index,
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
PC_TREE *pc_tree) {
VP9_COMMON *const cm = &cpi->common;
MACROBLOCK *const x = &cpi->mb;
#endif
update_state_sb_supertx(cpi, tile, mi_row, mi_col, bsize, 0, pc_tree);
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ xd->mi[0].mbmi.dq_off_index = 0;
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
vp9_setup_dst_planes(xd->plane, get_frame_new_buffer(cm),
mi_row, mi_col);
for (plane = 0; plane < MAX_MB_PLANE; plane++) {
x->skip = skip_tx;
xd->mi[0].mbmi.ext_txfrm = best_tx_nostx;
#endif // CONFIG_EXT_TX
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ *dq_index = 0;
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
#endif // CONFIG_SUPERTX
const int16_t *dequant_ptr = pd->dequant;
#endif // CONFIG_TX_SKIP
#if CONFIG_NEW_QUANT
- int dq = xd->mi->mbmi.dq_off_index;
#if CONFIG_TX_SKIP
const int use_rect_quant = is_rect_quant_used(&xd->mi[0].src_mi->mbmi, plane);
#endif // CONFIG_TX_SKIP
int plane;
mbmi->skip = 1;
- if (x->skip)
+ if (x->skip) {
+ mbmi->tx_size = max_txsize_lookup[bsize];
return;
+ }
for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
if (!x->skip_recode)
// Pick the loop filter level for the frame.
loopfilter_frame(cpi, cm);
- // printf("Bilateral level: %d\n", cm->lf.bilateral_level);
-
// build the bitstream
#if CONFIG_ROW_TILE
if (vp9_pack_bitstream(cpi, dest, size, 1) < 0) {
[PALETTE_COLORS];
#endif // CONFIG_PALETTE
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ int dq_profile_costs[QUANT_PROFILES];
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+
PICK_MODE_CONTEXT *leaf_tree;
PC_TREE *pc_tree;
PC_TREE *pc_root;
vp9_cost_tokens(cpi->palette_uv_color_costs[i][j],
fc->palette_uv_color_prob[i][j], vp9_palette_color_tree);
#endif // CONFIG_PALETTE
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ vp9_cost_tokens(cpi->dq_profile_costs, fc->dq_profile_prob,
+ vp9_dq_profile_tree);
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
static void fill_token_costs(vp9_coeff_cost *c,
palette_enabled_prob[bsize - BLOCK_8X8][palette_ctx], 0);
#endif
this_rd = RDCOST(x->rdmult, x->rddiv, this_rate, this_distortion);
-
if (this_rd < best_rd) {
mode_selected = mode;
#if CONFIG_FILTERINTRA
}
}
}
+#if CONFIG_NEW_QUANT
+ mic->mbmi.dq_off_index = 0;
+#if QUANT_PROFILES > 1
+ if (cpi->common.base_qindex > Q_THRESHOLD_MIN &&
+ cpi->common.base_qindex < Q_THRESHOLD_MAX &&
+ !xd->lossless && switchable_dq_profile_used(bsize)) {
+ int64_t local_tx_cache[TX_MODES];
+ int i;
+ int best_dq = -1;
+ for (i = 0; i < QUANT_PROFILES; i++) {
+ mic->mbmi.dq_off_index = i;
+ super_block_yrd(cpi, x, &this_rate_tokenonly, &this_distortion,
+ &s, NULL, bsize, local_tx_cache, INT64_MAX);
+ this_rate = this_rate_tokenonly + bmode_costs[mic->mbmi.mode] +
+ cpi->dq_profile_costs[i];
+ this_rd = RDCOST(x->rdmult, x->rddiv, this_rate, this_distortion);
+ if (this_rd < best_rd || best_dq == -1) {
+ best_dq = i;
+ best_rd = this_rd;
+ }
+ }
+ mic->mbmi.dq_off_index = best_dq;
+ *rate = this_rate;
+ *rate_tokenonly = this_rate_tokenonly;
+ *distortion = this_distortion;
+ }
+#endif // QUANT_PROFILES > 1
+#endif // CONFIG_NEW_QUANT
#if CONFIG_TX_SKIP
#if CONFIG_FILTERINTRA
#endif // CONFIG_FILTERINTRA
}
#endif // CONFIG_PALETTE
-
return best_rd;
}
}
#endif // CONFIG_EXT_TX
+#if CONFIG_NEW_QUANT
+ mbmi->dq_off_index = 0;
+#if QUANT_PROFILES > 1
+ // Choose the best dq_index
+ if (cm->base_qindex > Q_THRESHOLD_MIN &&
+ cm->base_qindex < Q_THRESHOLD_MAX &&
+ !xd->lossless && switchable_dq_profile_used(bsize)) {
+ int64_t rdcost_dq;
+ int rate_y_dq;
+ int64_t distortion_y_dq;
+ int dummy;
+ int64_t best_rdcost_dq = INT64_MAX;
+ int best_dq = -1;
+ for (i = 0; i < QUANT_PROFILES; i++) {
+ mbmi->dq_off_index = i;
+ super_block_yrd(cpi, x, &rate_y_dq, &distortion_y_dq, &dummy, psse,
+ bsize, txfm_cache, INT64_MAX);
+ assert(rate_y_dq != INT_MAX);
+ assert(rate_y_dq >= 0);
+ rate_y_dq += cpi->dq_profile_costs[i];
+ rdcost_dq = RDCOST(x->rdmult, x->rddiv, rate_y_dq, distortion_y_dq);
+ rdcost_dq = MIN(rdcost_dq, RDCOST(x->rdmult, x->rddiv, 0, *psse));
+ assert(rdcost_dq >= 0);
+ if (rdcost_dq < best_rdcost_dq || best_dq == -1) {
+ best_dq = i;
+ best_rdcost_dq = rdcost_dq;
+ }
+ }
+ mbmi->dq_off_index = best_dq;
+ }
+#endif // QUANT_PROFILES > 1
+#endif // CONFIG_NEW_QUANT
+
// Y cost and distortion
super_block_yrd(cpi, x, rate_y, &distortion_y, &skippable_y, psse,
bsize, txfm_cache, ref_best_rd);
if (this_mode != DC_PRED && this_mode != TM_PRED)
rate2 += intra_cost_penalty;
distortion2 = distortion_y + distortion_uv;
+
+#if CONFIG_NEW_QUANT
+ mbmi->dq_off_index = 0;
+#if QUANT_PROFILES > 1
+ if (cm->base_qindex > Q_THRESHOLD_MIN &&
+ cm->base_qindex < Q_THRESHOLD_MAX &&
+ !xd->lossless && switchable_dq_profile_used(bsize)) {
+ int64_t rdcost_dq;
+ int rate_y_dq;
+ int64_t distortion_y_dq;
+ int dummy;
+ int64_t best_rdcost_dq = INT64_MAX;
+ int best_dq = -1;
+ for (i = 0; i < QUANT_PROFILES; i++) {
+ mbmi->dq_off_index = i;
+ super_block_yrd(cpi, x, &rate_y_dq, &distortion_y_dq, &dummy,
+ NULL, bsize, tx_cache, INT64_MAX);
+ assert(rate_y_dq != INT_MAX);
+ assert(rate_y_dq >= 0);
+ rate_y_dq += cpi->dq_profile_costs[i];
+ rdcost_dq = RDCOST(x->rdmult, x->rddiv, rate_y_dq, distortion_y_dq);
+ assert(rdcost_dq >= 0);
+ if (rdcost_dq < best_rdcost_dq || best_dq == -1) {
+ best_dq = i;
+ best_rdcost_dq = rdcost_dq;
+ }
+ }
+ mbmi->dq_off_index = best_dq;
+ }
+#endif // QUANT_PROFILES > 1
+#endif // CONFIG_NEW_QUANT
+
} else {
#if CONFIG_INTERINTRA
if (second_ref_frame == INTRA_FRAME) {
mbmi->tx_skip[0] = 0;
mbmi->tx_skip[1] = 0;
#endif // CONFIG_TX_SKIP
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ if (!(cm->base_qindex > Q_THRESHOLD_MIN &&
+ cm->base_qindex < Q_THRESHOLD_MAX &&
+ switchable_dq_profile_used(mbmi->sb_type) &&
+ !vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)))
+ mbmi->dq_off_index = 0;
+#endif
x->skip = 0;
set_ref_ptrs(cm, xd, mbmi->ref_frame[0], mbmi->ref_frame[1]);
for (i = 0; i < MAX_MB_PLANE; i++) {
rate2 += rate_copy_mode;
this_rd = RDCOST(x->rdmult, x->rddiv, rate2, distortion2);
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ if (this_skip2 && mbmi->dq_off_index > 0)
+ mbmi->dq_off_index = 0;
+#endif
+
if (this_rd < best_rd) {
rd_cost->rate = rate2;
rd_cost->dist = distortion2;
*mbmi = best_mbmode;
if (mbmi->copy_mode != NOREF) {
+#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
+ if (best_skip2)
+ assert(mbmi->dq_off_index == 0);
+#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
x->skip = best_skip2;
ctx->skip = x->skip;
ctx->skippable = best_mode_skippable;
vp9_zero(best_tx_diff);
}
#endif // CONFIG_COPY_MODE
+
#if CONFIG_PALETTE
if (bsize >= BLOCK_8X8 && cpi->common.allow_palette_mode &&
!is_inter_block(mbmi)) {