Remove unnecessary transform type lookups and unused codes.
Change-Id: Ib52d26690468996b1501b419d919643c8ea5ecaa
#define MAX_MB_PLANE 3
+#if CONFIG_EXT_TX
+#define GET_TX_TYPES(tx_size) \
+ ((tx_size) >= TX_32X32 ? 1 : TX_TYPES)
+#endif // CONFIG_EXT_TX
+
typedef enum {
KEY_FRAME = 0,
INTER_FRAME = 1,
INTERP_FILTER interp_filter;
MV_REFERENCE_FRAME ref_frame[2];
#if CONFIG_EXT_TX
- EXT_TX_TYPE ext_txfrm;
-#endif
+ TX_TYPE tx_type;
+#endif // CONFIG_EXT_TX
// TODO(slavarnway): Delete and use bmi[3].as_mv[] instead.
int_mv mv[2];
ADST_ADST, // TM
};
-#if CONFIG_EXT_TX
-#define GET_EXT_TX_TYPES(tx_size) \
- ((tx_size) >= TX_32X32 ? 1 : EXT_TX_TYPES)
-#define GET_EXT_TX_TREE(tx_size) \
- ((tx_size) >= TX_32X32 ? NULL : vp10_ext_tx_tree)
-#define GET_EXT_TX_ENCODINGS(tx_size) \
- ((tx_size) >= TX_32X32 ? NULL : ext_tx_encodings)
-
-static TX_TYPE ext_tx_to_txtype[EXT_TX_TYPES] = {
- DCT_DCT,
- ADST_DCT,
- DCT_ADST,
- ADST_ADST,
- FLIPADST_DCT,
- DCT_FLIPADST,
- FLIPADST_FLIPADST,
- ADST_FLIPADST,
- FLIPADST_ADST,
- DST_DCT,
- DCT_DST,
- DST_ADST,
- ADST_DST,
- DST_FLIPADST,
- FLIPADST_DST,
- DST_DST,
- IDTX,
-};
-#endif // CONFIG_EXT_TX
-
-static INLINE TX_TYPE get_tx_type_large(PLANE_TYPE plane_type,
- const MACROBLOCKD *xd) {
- (void) plane_type;
- (void) xd;
- return DCT_DCT;
-}
-
static INLINE TX_TYPE get_tx_type(PLANE_TYPE plane_type,
const MACROBLOCKD *xd,
int block_idx, TX_SIZE tx_size) {
return DCT_DCT;
if (mbmi->sb_type >= BLOCK_8X8) {
if (plane_type == PLANE_TYPE_Y || is_inter_block(mbmi))
- return ext_tx_to_txtype[mbmi->ext_txfrm];
+ return mbmi->tx_type;
}
if (is_inter_block(mbmi))
};
#if CONFIG_EXT_TX
-const vpx_tree_index vp10_ext_tx_tree[TREE_SIZE(EXT_TX_TYPES)] = {
- -ALT16, 2,
- -NORM, 4,
- -ALT15, 6,
+const vpx_tree_index vp10_tx_type_tree[TREE_SIZE(TX_TYPES)] = {
+ -IDTX, 2,
+ -DCT_DCT, 4,
+ -DST_DST, 6,
8, 18,
10, 12,
- -ALT9, -ALT10,
+ -DST_DCT, -DCT_DST,
14, 16,
- -ALT1, -ALT2,
- -ALT4, -ALT5,
+ -ADST_DCT, -DCT_ADST,
+ -FLIPADST_DCT, -DCT_FLIPADST,
20, 26,
22, 24,
- -ALT11, -ALT12,
- -ALT13, -ALT14,
+ -DST_ADST, -ADST_DST,
+ -DST_FLIPADST, -FLIPADST_DST,
28, 30,
- -ALT3, -ALT6,
- -ALT7, -ALT8
+ -ADST_ADST, -FLIPADST_FLIPADST,
+ -ADST_FLIPADST, -FLIPADST_ADST
};
static const vpx_prob
-default_inter_ext_tx_prob[EXT_TX_SIZES][EXT_TX_TYPES - 1] = {
+default_inter_tx_type_prob[EXT_TX_SIZES][TX_TYPES - 1] = {
{ 12, 112, 16, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
128 },
{ 12, 112, 16, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
};
static const vpx_prob
-default_intra_ext_tx_prob[EXT_TX_SIZES][INTRA_MODES][EXT_TX_TYPES - 1] = {
+default_intra_tx_type_prob[EXT_TX_SIZES][INTRA_MODES][TX_TYPES - 1] = {
{
{ 8, 11, 24, 112, 87, 137, 127, 134,
128, 86, 128, 124, 125, 133, 176, 123, },
vp10_copy(fc->skip_probs, default_skip_probs);
vp10_copy(fc->inter_mode_probs, default_inter_mode_probs);
#if CONFIG_EXT_TX
- vp10_copy(fc->inter_ext_tx_prob, default_inter_ext_tx_prob);
- vp10_copy(fc->intra_ext_tx_prob, default_intra_ext_tx_prob);
+ vp10_copy(fc->inter_tx_type_prob, default_inter_tx_type_prob);
+ vp10_copy(fc->intra_tx_type_prob, default_intra_tx_type_prob);
#endif // CONFIG_EXT_TX
}
#if CONFIG_EXT_TX
for (i = TX_4X4; i <= TX_16X16; ++i) {
- vpx_tree_merge_probs(vp10_ext_tx_tree, pre_fc->inter_ext_tx_prob[i],
- counts->inter_ext_tx[i], fc->inter_ext_tx_prob[i]);
+ vpx_tree_merge_probs(vp10_tx_type_tree, pre_fc->inter_tx_type_prob[i],
+ counts->inter_tx_type[i], fc->inter_tx_type_prob[i]);
for (j = 0; j < INTRA_MODES; ++j)
- vpx_tree_merge_probs(vp10_ext_tx_tree, pre_fc->intra_ext_tx_prob[i][j],
- counts->intra_ext_tx[i][j],
- fc->intra_ext_tx_prob[i][j]);
+ vpx_tree_merge_probs(vp10_tx_type_tree, pre_fc->intra_tx_type_prob[i][j],
+ counts->intra_tx_type[i][j],
+ fc->intra_tx_type_prob[i][j]);
}
#endif // CONFIG_EXT_TX
}
vpx_prob skip_probs[SKIP_CONTEXTS];
nmv_context nmvc;
#if CONFIG_EXT_TX
- vpx_prob inter_ext_tx_prob[EXT_TX_SIZES][EXT_TX_TYPES - 1];
- vpx_prob intra_ext_tx_prob[EXT_TX_SIZES][INTRA_MODES][EXT_TX_TYPES - 1];
+ vpx_prob inter_tx_type_prob[EXT_TX_SIZES][TX_TYPES - 1];
+ vpx_prob intra_tx_type_prob[EXT_TX_SIZES][INTRA_MODES][TX_TYPES - 1];
#endif // CONFIG_EXT_TX
int initialized;
} FRAME_CONTEXT;
unsigned int skip[SKIP_CONTEXTS][2];
nmv_context_counts mv;
#if CONFIG_EXT_TX
- unsigned int inter_ext_tx[EXT_TX_SIZES][EXT_TX_TYPES];
- unsigned int intra_ext_tx[EXT_TX_SIZES][INTRA_MODES][EXT_TX_TYPES];
+ unsigned int inter_tx_type[EXT_TX_SIZES][TX_TYPES];
+ unsigned int intra_tx_type[EXT_TX_SIZES][INTRA_MODES][TX_TYPES];
#endif // CONFIG_EXT_TX
} FRAME_COUNTS;
unsigned int (*ct_8x8p)[2]);
#if CONFIG_EXT_TX
-extern const vpx_tree_index vp10_ext_tx_tree[TREE_SIZE(EXT_TX_TYPES)];
+extern const vpx_tree_index vp10_tx_type_tree[TREE_SIZE(TX_TYPES)];
#endif // CONFIG_EXT_TX
#ifdef __cplusplus
#if CONFIG_EXT_TX
#define EXT_TX_SIZES 3 // number of sizes that use extended transforms
-typedef enum {
- NORM = 0,
- ALT1 = 1,
- ALT2 = 2,
- ALT3 = 3,
- ALT4 = 4,
- ALT5 = 5,
- ALT6 = 6,
- ALT7 = 7,
- ALT8 = 8,
- ALT9 = 9,
- ALT10 = 10,
- ALT11 = 11,
- ALT12 = 12,
- ALT13 = 13,
- ALT14 = 14,
- ALT15 = 15,
- ALT16 = 16,
- EXT_TX_TYPES
-} EXT_TX_TYPE;
#endif // CONFIG_EXT_TX
typedef enum {
int i, j, k;
if (vpx_read(r, GROUP_DIFF_UPDATE_PROB)) {
for (i = TX_4X4; i <= TX_16X16; ++i)
- for (j = 0; j < EXT_TX_TYPES - 1; ++j)
- vp10_diff_update_prob(r, &fc->inter_ext_tx_prob[i][j]);
+ for (j = 0; j < TX_TYPES - 1; ++j)
+ vp10_diff_update_prob(r, &fc->inter_tx_type_prob[i][j]);
}
if (vpx_read(r, GROUP_DIFF_UPDATE_PROB)) {
for (i = TX_4X4; i <= TX_16X16; ++i)
for (j = 0; j < INTRA_MODES; ++j)
- for (k = 0; k < EXT_TX_TYPES - 1; ++k)
- vp10_diff_update_prob(r, &fc->intra_ext_tx_prob[i][j][k]);
+ for (k = 0; k < TX_TYPES - 1; ++k)
+ vp10_diff_update_prob(r, &fc->intra_tx_type_prob[i][j][k]);
}
}
#endif // CONFIG_EXT_TX
if (mbmi->tx_size <= TX_16X16 && cm->base_qindex > 0 &&
mbmi->sb_type >= BLOCK_8X8 && !mbmi->skip &&
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
- mbmi->ext_txfrm =
- vpx_read_tree(r, vp10_ext_tx_tree,
- cm->fc->intra_ext_tx_prob[mbmi->tx_size][mbmi->mode]);
+ mbmi->tx_type =
+ vpx_read_tree(r, vp10_tx_type_tree,
+ cm->fc->intra_tx_type_prob[mbmi->tx_size][mbmi->mode]);
} else {
- mbmi->ext_txfrm = NORM;
+ mbmi->tx_type = DCT_DCT;
}
#endif // CONFIG_EXT_TX
}
FRAME_COUNTS *counts = xd->counts;
if (inter_block)
- mbmi->ext_txfrm =
- vpx_read_tree(r, vp10_ext_tx_tree,
- cm->fc->inter_ext_tx_prob[mbmi->tx_size]);
+ mbmi->tx_type =
+ vpx_read_tree(r, vp10_tx_type_tree,
+ cm->fc->inter_tx_type_prob[mbmi->tx_size]);
else
- mbmi->ext_txfrm =
- vpx_read_tree(r, vp10_ext_tx_tree,
- cm->fc->intra_ext_tx_prob[mbmi->tx_size][mbmi->mode]);
+ mbmi->tx_type = vpx_read_tree(r, vp10_tx_type_tree,
+ cm->fc->intra_tx_type_prob
+ [mbmi->tx_size][mbmi->mode]);
if (counts) {
if (inter_block)
- ++counts->inter_ext_tx[mbmi->tx_size][mbmi->ext_txfrm];
+ ++counts->inter_tx_type[mbmi->tx_size][mbmi->tx_type];
else
- ++counts->intra_ext_tx[mbmi->tx_size][mbmi->mode][mbmi->ext_txfrm];
+ ++counts->intra_tx_type[mbmi->tx_size][mbmi->mode][mbmi->tx_type];
}
} else {
- mbmi->ext_txfrm = NORM;
+ mbmi->tx_type = DCT_DCT;
}
#endif // CONFIG_EXT_TX
}
{{2, 2}, {6, 3}, {0, 1}, {7, 3}};
#if CONFIG_EXT_TX
-static struct vp10_token ext_tx_encodings[EXT_TX_TYPES];
+static struct vp10_token tx_type_encodings[TX_TYPES];
#endif // CONFIG_EXT_TX
void vp10_encode_token_init() {
#if CONFIG_EXT_TX
- vp10_tokens_from_tree(ext_tx_encodings, vp10_ext_tx_tree);
+ vp10_tokens_from_tree(tx_type_encodings, vp10_tx_type_tree);
#endif // CONFIG_EXT_TX
}
int do_update = 0;
for (i = TX_4X4; i <= TX_16X16; ++i) {
savings += prob_diff_update_savings(
- vp10_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
- cm->counts.inter_ext_tx[i], EXT_TX_TYPES);
+ vp10_tx_type_tree, cm->fc->inter_tx_type_prob[i],
+ cm->counts.inter_tx_type[i], TX_TYPES);
}
do_update = savings > savings_thresh;
vpx_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
if (do_update) {
for (i = TX_4X4; i <= TX_16X16; ++i) {
- prob_diff_update(vp10_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
- cm->counts.inter_ext_tx[i], EXT_TX_TYPES, w);
+ prob_diff_update(vp10_tx_type_tree, cm->fc->inter_tx_type_prob[i],
+ cm->counts.inter_tx_type[i], TX_TYPES, w);
}
}
for (i = TX_4X4; i <= TX_16X16; ++i)
for (j = 0; j < INTRA_MODES; ++j)
savings += prob_diff_update_savings(
- vp10_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
- cm->counts.intra_ext_tx[i][j], EXT_TX_TYPES);
+ vp10_tx_type_tree, cm->fc->intra_tx_type_prob[i][j],
+ cm->counts.intra_tx_type[i][j], TX_TYPES);
do_update = savings > savings_thresh;
vpx_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
if (do_update) {
for (i = TX_4X4; i <= TX_16X16; ++i)
for (j = 0; j < INTRA_MODES; ++j)
- prob_diff_update(vp10_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
- cm->counts.intra_ext_tx[i][j], EXT_TX_TYPES, w);
+ prob_diff_update(vp10_tx_type_tree, cm->fc->intra_tx_type_prob[i][j],
+ cm->counts.intra_tx_type[i][j], TX_TYPES, w);
}
}
#endif // CONFIG_EXT_TX
bsize >= BLOCK_8X8 && !mbmi->skip &&
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
if (is_inter)
- vp10_write_token(w, vp10_ext_tx_tree,
- cm->fc->inter_ext_tx_prob[mbmi->tx_size],
- &ext_tx_encodings[mbmi->ext_txfrm]);
+ vp10_write_token(w, vp10_tx_type_tree,
+ cm->fc->inter_tx_type_prob[mbmi->tx_size],
+ &tx_type_encodings[mbmi->tx_type]);
else
- vp10_write_token(w, vp10_ext_tx_tree,
- cm->fc->intra_ext_tx_prob[mbmi->tx_size][mbmi->mode],
- &ext_tx_encodings[mbmi->ext_txfrm]);
+ vp10_write_token(w, vp10_tx_type_tree,
+ cm->fc->intra_tx_type_prob[mbmi->tx_size][mbmi->mode],
+ &tx_type_encodings[mbmi->tx_type]);
}
#endif // CONFIG_EXT_TX
}
if (mbmi->tx_size <= TX_16X16 && cm->base_qindex > 0 &&
bsize >= BLOCK_8X8 && !mbmi->skip &&
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
- vp10_write_token(w, vp10_ext_tx_tree,
- cm->fc->intra_ext_tx_prob[mbmi->tx_size][mbmi->mode],
- &ext_tx_encodings[mbmi->ext_txfrm]);
+ vp10_write_token(w, vp10_tx_type_tree,
+ cm->fc->intra_tx_type_prob[mbmi->tx_size][mbmi->mode],
+ &tx_type_encodings[mbmi->tx_type]);
}
#endif // CONFIG_EXT_TX
}
bsize >= BLOCK_8X8 && !mbmi->skip &&
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
if (is_inter_block(mbmi))
- ++td->counts->inter_ext_tx[mbmi->tx_size][mbmi->ext_txfrm];
+ ++td->counts->inter_tx_type[mbmi->tx_size][mbmi->tx_type];
else
- ++td->counts->intra_ext_tx[mbmi->tx_size][mbmi->mode][mbmi->ext_txfrm];
+ ++td->counts->intra_tx_type[mbmi->tx_size][mbmi->mode][mbmi->tx_type];
}
#endif // CONFIG_EXT_TX
}
int switchable_interp_costs[SWITCHABLE_FILTER_CONTEXTS][SWITCHABLE_FILTERS];
int partition_cost[PARTITION_CONTEXTS][PARTITION_TYPES];
#if CONFIG_EXT_TX
- int inter_ext_tx_costs[EXT_TX_SIZES][EXT_TX_TYPES];
- int intra_ext_tx_costs[EXT_TX_SIZES][INTRA_MODES][EXT_TX_TYPES];
+ int inter_tx_type_costs[EXT_TX_SIZES][TX_TYPES];
+ int intra_tx_type_costs[EXT_TX_SIZES][INTRA_MODES][TX_TYPES];
#endif // CONFIG_EXT_TX
int multi_arf_allowed;
fc->switchable_interp_prob[i], vp10_switchable_interp_tree);
#if CONFIG_EXT_TX
for (i = TX_4X4; i <= TX_16X16; ++i) {
- vp10_cost_tokens(cpi->inter_ext_tx_costs[i], fc->inter_ext_tx_prob[i],
- vp10_ext_tx_tree);
+ vp10_cost_tokens(cpi->inter_tx_type_costs[i], fc->inter_tx_type_prob[i],
+ vp10_tx_type_tree);
for (j = 0; j < INTRA_MODES; ++j)
- vp10_cost_tokens(cpi->intra_ext_tx_costs[i][j],
- fc->intra_ext_tx_prob[i][j], vp10_ext_tx_tree);
+ vp10_cost_tokens(cpi->intra_tx_type_costs[i][j],
+ fc->intra_tx_type_prob[i][j], vp10_tx_type_tree);
}
#endif // CONFIG_EXT_TX
}
MACROBLOCKD *const xd = &x->e_mbd;
MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
#if CONFIG_EXT_TX
- int tx_type, best_tx_type = NORM;
+ int tx_type, best_tx_type = DCT_DCT;
int r, s;
int64_t d, psse, this_rd, best_rd = INT64_MAX;
vpx_prob skip_prob = vp10_get_skip_prob(cm, xd);
mbmi->tx_size = VPXMIN(max_tx_size, largest_tx_size);
#if CONFIG_EXT_TX
if (is_inter_block(mbmi) && bs >= BLOCK_8X8 && !xd->lossless) {
- for (tx_type = NORM; tx_type < EXT_TX_TYPES - 1; ++tx_type) {
- if (mbmi->ext_txfrm >= ALT11 && mbmi->ext_txfrm < ALT16 &&
- best_tx_type == NORM) {
- tx_type = ALT16 - 1;
+ for (tx_type = DCT_DCT; tx_type < TX_TYPES - 1; ++tx_type) {
+ if (mbmi->tx_type >= DST_ADST && mbmi->tx_type < IDTX &&
+ best_tx_type == DCT_DCT) {
+ tx_type = IDTX - 1;
continue;
}
- if (tx_type >= GET_EXT_TX_TYPES(mbmi->tx_size))
+ if (tx_type >= GET_TX_TYPES(mbmi->tx_size))
continue;
- mbmi->ext_txfrm = tx_type;
+ mbmi->tx_type = tx_type;
txfm_rd_in_plane(x, &r, &d, &s,
&psse, ref_best_rd, 0, bs, mbmi->tx_size,
cpi->sf.use_fast_coef_costing);
continue;
if (mbmi->tx_size <= TX_16X16) {
if (is_inter_block(mbmi))
- r += cpi->inter_ext_tx_costs[mbmi->tx_size][mbmi->ext_txfrm];
+ r += cpi->inter_tx_type_costs[mbmi->tx_size][mbmi->tx_type];
else
- r += cpi->intra_ext_tx_costs[mbmi->tx_size]
- [mbmi->mode][mbmi->ext_txfrm];
+ r += cpi->intra_tx_type_costs[mbmi->tx_size]
+ [mbmi->mode][mbmi->tx_type];
}
if (s)
if (is_inter_block(mbmi) && !xd->lossless && !s)
this_rd = VPXMIN(this_rd, RDCOST(x->rdmult, x->rddiv, s1, psse));
- if (this_rd < ((best_tx_type == NORM) ? ext_tx_th : 1) * best_rd) {
+ if (this_rd < ((best_tx_type == DCT_DCT) ? ext_tx_th : 1) * best_rd) {
best_rd = this_rd;
- best_tx_type = mbmi->ext_txfrm;
+ best_tx_type = mbmi->tx_type;
}
}
}
- mbmi->ext_txfrm = best_tx_type;
+ mbmi->tx_type = best_tx_type;
#endif // CONFIG_EXT_TX
txfm_rd_in_plane(x, rate, distortion, skip,
if (bs >= BLOCK_8X8 && mbmi->tx_size <= TX_16X16 &&
!xd->lossless && *rate != INT_MAX) {
if (is_inter_block(mbmi))
- *rate += cpi->inter_ext_tx_costs[mbmi->tx_size][mbmi->ext_txfrm];
+ *rate += cpi->inter_tx_type_costs[mbmi->tx_size][mbmi->tx_type];
else
- *rate += cpi->intra_ext_tx_costs[mbmi->tx_size]
- [mbmi->mode][mbmi->ext_txfrm];
+ *rate += cpi->intra_tx_type_costs[mbmi->tx_size]
+ [mbmi->mode][mbmi->tx_type];
}
#endif // CONFIG_EXT_TX
}
int start_tx, end_tx;
const int tx_select = cm->tx_mode == TX_MODE_SELECT;
#if CONFIG_EXT_TX
- int tx_type, best_tx_type = NORM;
+ int tx_type, best_tx_type = DCT_DCT;
int start_tx_type, end_tx_type;
#endif // CONFIG_EXT_TX
*psse = INT64_MAX;
#if CONFIG_EXT_TX
- start_tx_type = NORM;
+ start_tx_type = DCT_DCT;
if (bs >= BLOCK_8X8 && !xd->lossless)
- end_tx_type = EXT_TX_TYPES - 1;
+ end_tx_type = TX_TYPES - 1;
else
- end_tx_type = NORM;
+ end_tx_type = DCT_DCT;
for (tx_type = start_tx_type; tx_type <= end_tx_type; ++tx_type) {
- mbmi->ext_txfrm = tx_type;
+ mbmi->tx_type = tx_type;
// TODO(huisu): clean up the logic.
- if (mbmi->ext_txfrm >= ALT11 && mbmi->ext_txfrm < ALT16 &&
- best_tx_type == NORM) {
- tx_type = ALT16 - 1;
+ if (mbmi->tx_type >= DST_ADST && mbmi->tx_type < IDTX &&
+ best_tx_type == DCT_DCT) {
+ tx_type = IDTX - 1;
continue;
}
#endif // CONFIG_EXT_TX
int r_tx_size = 0;
#if CONFIG_EXT_TX
- if (mbmi->ext_txfrm >= GET_EXT_TX_TYPES(n))
+ if (mbmi->tx_type >= GET_TX_TYPES(n))
continue;
#endif // CONFIG_EXT_TX
#if CONFIG_EXT_TX
if (bs >= BLOCK_8X8 && !xd->lossless && r != INT_MAX && n < TX_32X32) {
if (is_inter_block(mbmi))
- r += cpi->inter_ext_tx_costs[n][mbmi->ext_txfrm];
+ r += cpi->inter_tx_type_costs[n][mbmi->tx_type];
else
- r += cpi->intra_ext_tx_costs[n][mbmi->mode][mbmi->ext_txfrm];
+ r += cpi->intra_tx_type_costs[n][mbmi->mode][mbmi->tx_type];
}
#endif // CONFIG_EXT_TX
last_rd = rd;
#if CONFIG_EXT_TX
- if (rd < (is_inter_block(mbmi) && best_tx_type == NORM ? ext_tx_th : 1) *
+ if (rd <
+ (is_inter_block(mbmi) && best_tx_type == DCT_DCT ? ext_tx_th : 1) *
best_rd) {
#else
if (rd < best_rd) {
*skip = s;
*psse = sse;
#if CONFIG_EXT_TX
- best_tx_type = mbmi->ext_txfrm;
+ best_tx_type = mbmi->tx_type;
#endif // CONFIG_EXT_TX
}
}
mbmi->tx_size = best_tx;
#if CONFIG_EXT_TX
- mbmi->ext_txfrm = best_tx_type;
+ mbmi->tx_type = best_tx_type;
txfm_rd_in_plane(x, &r, &d, &s,
&sse, ref_best_rd, 0, bs, best_tx,
cpi->sf.use_fast_coef_costing);
int64_t this_distortion, this_rd;
TX_SIZE best_tx = TX_4X4;
#if CONFIG_EXT_TX
- EXT_TX_TYPE best_tx_type = NORM;
+ TX_TYPE best_tx_type = DCT_DCT;
#endif // CONFIG_EXT_TX
int *bmode_costs;
const MODE_INFO *above_mi = xd->above_mi;
best_rd = this_rd;
best_tx = mic->mbmi.tx_size;
#if CONFIG_EXT_TX
- best_tx_type = mic->mbmi.ext_txfrm;
+ best_tx_type = mic->mbmi.tx_type;
#endif // CONFIG_EXT_TX
*rate = this_rate;
*rate_tokenonly = this_rate_tokenonly;
mic->mbmi.mode = mode_selected;
mic->mbmi.tx_size = best_tx;
#if CONFIG_EXT_TX
- mic->mbmi.ext_txfrm = best_tx_type;
+ mic->mbmi.tx_type = best_tx_type;
#endif // CONFIG_EXT_TX
return best_rd;