struct vp9_token_struct vp9_kf_bmode_encodings[VP9_KF_BINTRAMODES];
struct vp9_token_struct vp9_ymode_encodings[VP9_YMODES];
#if CONFIG_SUPERBLOCKS
-struct vp9_token_struct vp9_sb_kf_ymode_encodings [VP9_I32X32_MODES];
+struct vp9_token_struct vp9_sb_ymode_encodings[VP9_I32X32_MODES];
+struct vp9_token_struct vp9_sb_kf_ymode_encodings[VP9_I32X32_MODES];
#endif
struct vp9_token_struct vp9_kf_ymode_encodings[VP9_YMODES];
struct vp9_token_struct vp9_uv_mode_encodings[VP9_UV_MODES];
vp9_tree_probs_from_distribution(VP9_YMODES, vp9_ymode_encodings,
vp9_ymode_tree, x->fc.ymode_prob,
bct, y_mode_cts, 256, 1);
+#if CONFIG_SUPERBLOCKS
+ vp9_tree_probs_from_distribution(VP9_I32X32_MODES, vp9_sb_ymode_encodings,
+ vp9_sb_ymode_tree, x->fc.sb_ymode_prob,
+ bct, y_mode_cts, 256, 1);
+#endif
{
int i;
for (i = 0; i < 8; i++) {
#if CONFIG_SUPERBLOCKS
vp9_tree_probs_from_distribution(VP9_I32X32_MODES,
vp9_sb_kf_ymode_encodings,
- vp9_sb_ymode_tree,
+ vp9_sb_kf_ymode_tree,
x->sb_kf_ymode_prob[i], bct,
kf_y_mode_cts[i], 256, 1);
#endif
vp9_tokens_from_tree(vp9_ymode_encodings, vp9_ymode_tree);
vp9_tokens_from_tree(vp9_kf_ymode_encodings, vp9_kf_ymode_tree);
#if CONFIG_SUPERBLOCKS
- vp9_tokens_from_tree(vp9_sb_kf_ymode_encodings, vp9_sb_ymode_tree);
+ vp9_tokens_from_tree(vp9_sb_ymode_encodings, vp9_sb_ymode_tree);
+ vp9_tokens_from_tree(vp9_sb_kf_ymode_encodings, vp9_sb_kf_ymode_tree);
#endif
vp9_tokens_from_tree(vp9_uv_mode_encodings, vp9_uv_mode_tree);
vp9_tokens_from_tree(vp9_i8x8_mode_encodings, vp9_i8x8_mode_tree);
factor = (MVREF_MAX_UPDATE_FACTOR * count / MVREF_COUNT_SAT);
this_prob = (pc->fc.vp9_mode_contexts[j][i] * (256 - factor) +
this_prob * factor + 128) >> 8;
- this_prob = this_prob ? (this_prob < 255 ? this_prob : 255) : 1;
- mode_context[j][i] = this_prob;
+ mode_context[j][i] = clip_prob(this_prob);
}
}
}
int i, t, count, factor;
unsigned int branch_ct[32][2];
vp9_prob ymode_probs[VP9_YMODES - 1];
+#if CONFIG_SUPERBLOCKS
+ vp9_prob sb_ymode_probs[VP9_I32X32_MODES - 1];
+#endif
vp9_prob uvmode_probs[VP9_UV_MODES - 1];
vp9_prob bmode_probs[VP9_NKF_BINTRAMODES - 1];
vp9_prob i8x8_mode_probs[VP9_I8X8_MODES - 1];
factor = (MODE_MAX_UPDATE_FACTOR * count / MODE_COUNT_SAT);
prob = ((int)cm->fc.pre_ymode_prob[t] * (256 - factor) +
(int)ymode_probs[t] * factor + 128) >> 8;
- if (prob <= 0) cm->fc.ymode_prob[t] = 1;
- else if (prob > 255) cm->fc.ymode_prob[t] = 255;
- else cm->fc.ymode_prob[t] = prob;
+ cm->fc.ymode_prob[t] = clip_prob(prob);
+ }
+#if CONFIG_SUPERBLOCKS
+ vp9_tree_probs_from_distribution(VP9_I32X32_MODES,
+ vp9_sb_ymode_encodings, vp9_sb_ymode_tree,
+ sb_ymode_probs, branch_ct,
+ cm->fc.sb_ymode_counts,
+ 256, 1);
+ for (t = 0; t < VP9_I32X32_MODES - 1; ++t) {
+ int prob;
+ count = branch_ct[t][0] + branch_ct[t][1];
+ count = count > MODE_COUNT_SAT ? MODE_COUNT_SAT : count;
+ factor = (MODE_MAX_UPDATE_FACTOR * count / MODE_COUNT_SAT);
+ prob = ((int)cm->fc.pre_sb_ymode_prob[t] * (256 - factor) +
+ (int)sb_ymode_probs[t] * factor + 128) >> 8;
+ cm->fc.sb_ymode_prob[t] = clip_prob(prob);
}
+#endif
for (i = 0; i < VP9_YMODES; ++i) {
vp9_tree_probs_from_distribution(VP9_UV_MODES, vp9_uv_mode_encodings,
vp9_uv_mode_tree, uvmode_probs, branch_ct,
factor = (MODE_MAX_UPDATE_FACTOR * count / MODE_COUNT_SAT);
prob = ((int)cm->fc.pre_uv_mode_prob[i][t] * (256 - factor) +
(int)uvmode_probs[t] * factor + 128) >> 8;
- if (prob <= 0) cm->fc.uv_mode_prob[i][t] = 1;
- else if (prob > 255) cm->fc.uv_mode_prob[i][t] = 255;
- else cm->fc.uv_mode_prob[i][t] = prob;
+ cm->fc.uv_mode_prob[i][t] = clip_prob(prob);
}
}
vp9_tree_probs_from_distribution(VP9_NKF_BINTRAMODES, vp9_bmode_encodings,
factor = (MODE_MAX_UPDATE_FACTOR * count / MODE_COUNT_SAT);
prob = ((int)cm->fc.pre_bmode_prob[t] * (256 - factor) +
(int)bmode_probs[t] * factor + 128) >> 8;
- if (prob <= 0) cm->fc.bmode_prob[t] = 1;
- else if (prob > 255) cm->fc.bmode_prob[t] = 255;
- else cm->fc.bmode_prob[t] = prob;
+ cm->fc.bmode_prob[t] = clip_prob(prob);
}
vp9_tree_probs_from_distribution(VP9_I8X8_MODES, vp9_i8x8_mode_encodings,
vp9_i8x8_mode_tree, i8x8_mode_probs,
factor = (MODE_MAX_UPDATE_FACTOR * count / MODE_COUNT_SAT);
prob = ((int)cm->fc.pre_i8x8_mode_prob[t] * (256 - factor) +
(int)i8x8_mode_probs[t] * factor + 128) >> 8;
- if (prob <= 0) cm->fc.i8x8_mode_prob[t] = 1;
- else if (prob > 255) cm->fc.i8x8_mode_prob[t] = 255;
- else cm->fc.i8x8_mode_prob[t] = prob;
+ cm->fc.i8x8_mode_prob[t] = clip_prob(prob);
}
for (i = 0; i < SUBMVREF_COUNT; ++i) {
vp9_tree_probs_from_distribution(VP9_SUBMVREFS,
factor = (MODE_MAX_UPDATE_FACTOR * count / MODE_COUNT_SAT);
prob = ((int)cm->fc.pre_sub_mv_ref_prob[i][t] * (256 - factor) +
(int)sub_mv_ref_probs[t] * factor + 128) >> 8;
- if (prob <= 0) cm->fc.sub_mv_ref_prob[i][t] = 1;
- else if (prob > 255) cm->fc.sub_mv_ref_prob[i][t] = 255;
- else cm->fc.sub_mv_ref_prob[i][t] = prob;
+ cm->fc.sub_mv_ref_prob[i][t] = clip_prob(prob);
}
}
vp9_tree_probs_from_distribution(VP9_NUMMBSPLITS, vp9_mbsplit_encodings,
factor = (MODE_MAX_UPDATE_FACTOR * count / MODE_COUNT_SAT);
prob = ((int)cm->fc.pre_mbsplit_prob[t] * (256 - factor) +
(int)mbsplit_probs[t] * factor + 128) >> 8;
- if (prob <= 0) cm->fc.mbsplit_prob[t] = 1;
- else if (prob > 255) cm->fc.mbsplit_prob[t] = 255;
- else cm->fc.mbsplit_prob[t] = prob;
+ cm->fc.mbsplit_prob[t] = clip_prob(prob);
}
#if CONFIG_COMP_INTERINTRA_PRED
if (cm->use_interintra) {
extern const vp9_tree_index vp9_kf_ymode_tree[];
extern const vp9_tree_index vp9_uv_mode_tree[];
#define vp9_sb_ymode_tree vp9_uv_mode_tree
+#define vp9_sb_kf_ymode_tree vp9_uv_mode_tree
extern const vp9_tree_index vp9_i8x8_mode_tree[];
extern const vp9_tree_index vp9_mbsplit_tree[];
extern const vp9_tree_index vp9_mv_ref_tree[];
extern struct vp9_token_struct vp9_bmode_encodings[VP9_NKF_BINTRAMODES];
extern struct vp9_token_struct vp9_kf_bmode_encodings[VP9_KF_BINTRAMODES];
extern struct vp9_token_struct vp9_ymode_encodings[VP9_YMODES];
+extern struct vp9_token_struct vp9_sb_ymode_encodings[VP9_I32X32_MODES];
extern struct vp9_token_struct vp9_sb_kf_ymode_encodings[VP9_I32X32_MODES];
extern struct vp9_token_struct vp9_kf_ymode_encodings[VP9_YMODES];
extern struct vp9_token_struct vp9_i8x8_mode_encodings[VP9_I8X8_MODES];
typedef struct frame_contexts {
vp9_prob bmode_prob[VP9_NKF_BINTRAMODES - 1];
vp9_prob ymode_prob[VP9_YMODES - 1]; /* interframe intra mode probs */
+#if CONFIG_SUPERBLOCKS
+ vp9_prob sb_ymode_prob[VP9_I32X32_MODES - 1];
+#endif
vp9_prob uv_mode_prob[VP9_YMODES][VP9_UV_MODES - 1];
vp9_prob i8x8_mode_prob[VP9_I8X8_MODES - 1];
vp9_prob sub_mv_ref_prob[SUBMVREF_COUNT][VP9_SUBMVREFS - 1];
nmv_context pre_nmvc;
vp9_prob pre_bmode_prob[VP9_NKF_BINTRAMODES - 1];
vp9_prob pre_ymode_prob[VP9_YMODES - 1]; /* interframe intra mode probs */
+#if CONFIG_SUPERBLOCKS
+ vp9_prob pre_sb_ymode_prob[VP9_I32X32_MODES - 1];
+#endif
vp9_prob pre_uv_mode_prob[VP9_YMODES][VP9_UV_MODES - 1];
vp9_prob pre_i8x8_mode_prob[VP9_I8X8_MODES - 1];
vp9_prob pre_sub_mv_ref_prob[SUBMVREF_COUNT][VP9_SUBMVREFS - 1];
vp9_prob pre_mbsplit_prob[VP9_NUMMBSPLITS - 1];
unsigned int bmode_counts[VP9_NKF_BINTRAMODES];
unsigned int ymode_counts[VP9_YMODES]; /* interframe intra mode probs */
+#if CONFIG_SUPERBLOCKS
+ unsigned int sb_ymode_counts[VP9_I32X32_MODES];
+#endif
unsigned int uv_mode_counts[VP9_YMODES][VP9_UV_MODES];
unsigned int i8x8_mode_counts[VP9_I8X8_MODES]; /* interframe intra probs */
unsigned int sub_mv_ref_counts[SUBMVREF_COUNT][VP9_SUBMVREFS];
int Round
);
+static __inline int clip_prob(int p) {
+ if (p > 255)
+ return 255;
+ else if (p < 1)
+ return 1;
+ return p;
+}
+
vp9_prob vp9_bin_prob_from_distribution(const unsigned int counts[2]);
#endif
}
#if CONFIG_SUPERBLOCKS
+static int read_sb_ymode(vp9_reader *bc, const vp9_prob *p) {
+ return treed_read(bc, vp9_sb_ymode_tree, p);
+}
+
static int read_kf_sb_ymode(vp9_reader *bc, const vp9_prob *p) {
return treed_read(bc, vp9_uv_mode_tree, p);
}
} while (++i < VP9_YMODES - 1);
}
+#if CONFIG_SUPERBLOCKS
+ if (vp9_read_bit(bc)) {
+ int i = 0;
+
+ do {
+ cm->fc.sb_ymode_prob[i] = (vp9_prob) vp9_read_literal(bc, 8);
+ } while (++i < VP9_I32X32_MODES - 1);
+ }
+#endif
+
#if CONFIG_NEW_MVREF
// Temp defaults probabilities for ecnoding the MV ref id signal
vpx_memset(xd->mb_mv_ref_id_probs, 192, sizeof(xd->mb_mv_ref_id_probs));
/* required for left and above block mv */
mbmi->mv[0].as_int = 0;
- if (vp9_segfeature_active(xd, mbmi->segment_id, SEG_LVL_MODE))
+ if (vp9_segfeature_active(xd, mbmi->segment_id, SEG_LVL_MODE)) {
mbmi->mode = (MB_PREDICTION_MODE)
vp9_get_segdata(xd, mbmi->segment_id, SEG_LVL_MODE);
- else {
- // FIXME write using SB mode tree
+#if CONFIG_SUPERBLOCKS
+ } else if (mbmi->encoded_as_sb) {
+ mbmi->mode = (MB_PREDICTION_MODE)
+ read_sb_ymode(bc, pbi->common.fc.sb_ymode_prob);
+ pbi->common.fc.sb_ymode_counts[mbmi->mode]++;
+#endif
+ } else {
mbmi->mode = (MB_PREDICTION_MODE)
read_ymode(bc, pbi->common.fc.ymode_prob);
pbi->common.fc.ymode_counts[mbmi->mode]++;
vp9_copy(pbi->common.fc.pre_hybrid_coef_probs_16x16,
pbi->common.fc.hybrid_coef_probs_16x16);
vp9_copy(pbi->common.fc.pre_ymode_prob, pbi->common.fc.ymode_prob);
+#if CONFIG_SUPERBLOCKS
+ vp9_copy(pbi->common.fc.pre_sb_ymode_prob, pbi->common.fc.sb_ymode_prob);
+#endif
vp9_copy(pbi->common.fc.pre_uv_mode_prob, pbi->common.fc.uv_mode_prob);
vp9_copy(pbi->common.fc.pre_bmode_prob, pbi->common.fc.bmode_prob);
vp9_copy(pbi->common.fc.pre_i8x8_mode_prob, pbi->common.fc.i8x8_mode_prob);
vp9_zero(pbi->common.fc.coef_counts_16x16);
vp9_zero(pbi->common.fc.hybrid_coef_counts_16x16);
vp9_zero(pbi->common.fc.ymode_counts);
+#if CONFIG_SUPERBLOCKS
+ vp9_zero(pbi->common.fc.sb_ymode_counts);
+#endif
vp9_zero(pbi->common.fc.uv_mode_counts);
vp9_zero(pbi->common.fc.bmode_counts);
vp9_zero(pbi->common.fc.i8x8_mode_counts);
bc, VP9_YMODES, vp9_ymode_encodings, vp9_ymode_tree,
Pnew, cm->fc.ymode_prob, bct, (unsigned int *)cpi->ymode_count
);
+#if CONFIG_SUPERBLOCKS
+ update_mode(bc, VP9_I32X32_MODES, vp9_sb_ymode_encodings,
+ vp9_sb_ymode_tree, Pnew, cm->fc.sb_ymode_prob, bct,
+ (unsigned int *)cpi->sb_ymode_count);
+#endif
}
}
if (den <= 0)
return 128;
p = (num * 255 + (den >> 1)) / den;
- if (p > 255)
- return 255;
- else if (p < 1)
- return 1;
- return p;
+ return clip_prob(p);
}
static int get_binary_prob(int n0, int n1) {
}
#if CONFIG_SUPERBLOCKS
+static void write_sb_ymode(vp9_writer *bc, int m, const vp9_prob *p) {
+ write_token(bc, vp9_sb_ymode_tree, p, vp9_sb_ymode_encodings + m);
+}
+
static void sb_kfwrite_ymode(vp9_writer *bc, int m, const vp9_prob *p) {
write_token(bc, vp9_uv_mode_tree, p, vp9_sb_kf_ymode_encodings + m);
}
active_section = 6;
#endif
- // TODO(rbultje) write using SB tree structure
-
if (!vp9_segfeature_active(xd, segment_id, SEG_LVL_MODE)) {
+#if CONFIG_SUPERBLOCKS
+ if (m->mbmi.encoded_as_sb)
+ write_sb_ymode(bc, mode, pc->fc.sb_ymode_prob);
+ else
+#endif
write_ymode(bc, mode, pc->fc.ymode_prob);
}
vp9_copy(cpi->common.fc.pre_hybrid_coef_probs_8x8, cpi->common.fc.hybrid_coef_probs_8x8);
vp9_copy(cpi->common.fc.pre_coef_probs_16x16, cpi->common.fc.coef_probs_16x16);
vp9_copy(cpi->common.fc.pre_hybrid_coef_probs_16x16, cpi->common.fc.hybrid_coef_probs_16x16);
+#if CONFIG_SUPERBLOCKS
+ vp9_copy(cpi->common.fc.pre_sb_ymode_prob, cpi->common.fc.sb_ymode_prob);
+#endif
vp9_copy(cpi->common.fc.pre_ymode_prob, cpi->common.fc.ymode_prob);
vp9_copy(cpi->common.fc.pre_uv_mode_prob, cpi->common.fc.uv_mode_prob);
vp9_copy(cpi->common.fc.pre_bmode_prob, cpi->common.fc.bmode_prob);
}
}
- if (output_enabled && cm->frame_type == KEY_FRAME) {
- sum_intra_stats(cpi, x);
- }
-
if (!x->skip) {
#ifdef ENC_DEBUG
if (enc_debug) {
if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME) {
vp9_build_intra_predictors_sby_s(&x->e_mbd);
vp9_build_intra_predictors_sbuv_s(&x->e_mbd);
+ sum_intra_stats(cpi, x);
} else {
int ref_fb_idx;
}
}
- if (cm->frame_type == KEY_FRAME) {
- sum_intra_stats(cpi, x);
- }
-
xd->mode_info_context = mi;
update_sb_skip_coeff_state(cpi, x, ta, tl, tp, t, skip);
if (cm->txfm_mode == TX_MODE_SELECT &&
vp9_cost_tokens((int *)c->mb.inter_bmode_costs,
x->fc.sub_mv_ref_prob[0], vp9_sub_mv_ref_tree);
+ // TODO(rbultje) separate tables for superblock costing?
vp9_cost_tokens(c->mb.mbmode_cost[1], x->fc.ymode_prob, vp9_ymode_tree);
vp9_cost_tokens(c->mb.mbmode_cost[0],
x->kf_ymode_prob[c->common.kf_ymode_probs_index],
cpi->hybrid_coef_counts_16x16);
vp9_adapt_coef_probs(&cpi->common);
if (cpi->common.frame_type != KEY_FRAME) {
+#if CONFIG_SUPERBLOCKS
+ vp9_copy(cpi->common.fc.sb_ymode_counts, cpi->sb_ymode_count);
+#endif
vp9_copy(cpi->common.fc.ymode_counts, cpi->ymode_count);
vp9_copy(cpi->common.fc.uv_mode_counts, cpi->y_uv_mode_count);
vp9_copy(cpi->common.fc.bmode_counts, cpi->bmode_count);
vp9_prob hybrid_coef_probs_16x16[BLOCK_TYPES_16X16]
[COEF_BANDS][PREV_COEF_CONTEXTS][ENTROPY_NODES];
+#if CONFIG_SUPERBLOCKS
+ vp9_prob sb_ymode_prob[VP9_I32X32_MODES - 1];
+#endif
vp9_prob ymode_prob[VP9_YMODES - 1]; /* interframe intra mode probs */
vp9_prob uv_mode_prob[VP9_YMODES][VP9_UV_MODES - 1];
vp9_prob bmode_prob[VP9_NKF_BINTRAMODES - 1];
vp9_copy(cc->mode_context_a, cm->fc.mode_context_a);
vp9_copy(cc->ymode_prob, cm->fc.ymode_prob);
+#if CONFIG_SUPERBLOCKS
+ vp9_copy(cc->sb_ymode_prob, cm->fc.sb_ymode_prob);
+#endif
vp9_copy(cc->bmode_prob, cm->fc.bmode_prob);
vp9_copy(cc->uv_mode_prob, cm->fc.uv_mode_prob);
vp9_copy(cc->i8x8_mode_prob, cm->fc.i8x8_mode_prob);
vp9_copy(cm->fc.mode_context_a, cc->mode_context_a);
vp9_copy(cm->fc.ymode_prob, cc->ymode_prob);
+#if CONFIG_SUPERBLOCKS
+ vp9_copy(cm->fc.sb_ymode_prob, cc->sb_ymode_prob);
+#endif
vp9_copy(cm->fc.bmode_prob, cc->bmode_prob);
vp9_copy(cm->fc.i8x8_mode_prob, cc->i8x8_mode_prob);
vp9_copy(cm->fc.uv_mode_prob, cc->uv_mode_prob);
int64_t best_intra16_rd = INT64_MAX;
int best_intra16_mode = DC_PRED, best_intra16_uv_mode = DC_PRED;
#endif
+ int rate_uv_4x4, rate_uv_8x8, rate_uv_tokenonly_4x4, rate_uv_tokenonly_8x8;
+ int dist_uv_4x4, dist_uv_8x8, uv_skip_4x4, uv_skip_8x8;
+ MB_PREDICTION_MODE mode_uv_4x4, mode_uv_8x8;
x->skip = 0;
xd->mode_info_context->mbmi.segment_id = segment_id;
frame_mv[ZEROMV][ref_frame].as_int = 0;
}
+ mbmi->mode = DC_PRED;
+ if (cm->txfm_mode == ONLY_4X4 || cm->txfm_mode == TX_MODE_SELECT) {
+ mbmi->txfm_size = TX_4X4;
+ rd_pick_intra_sbuv_mode(cpi, x, &rate_uv_4x4, &rate_uv_tokenonly_4x4,
+ &dist_uv_4x4, &uv_skip_4x4);
+ mode_uv_4x4 = mbmi->uv_mode;
+ }
+ if (cm->txfm_mode != ONLY_4X4) {
+ mbmi->txfm_size = TX_8X8;
+ rd_pick_intra_sbuv_mode(cpi, x, &rate_uv_8x8, &rate_uv_tokenonly_8x8,
+ &dist_uv_8x8, &uv_skip_8x8);
+ mode_uv_8x8 = mbmi->uv_mode;
+ }
+
for (mode_index = 0; mode_index < MAX_MODES; mode_index++) {
- int mode_excluded;
+ int mode_excluded = 0;
int64_t this_rd = INT64_MAX;
int disable_skip = 0;
int other_cost = 0;
this_mode = vp9_mode_order[mode_index].mode;
ref_frame = vp9_mode_order[mode_index].ref_frame;
+ assert(ref_frame == INTRA_FRAME ||
+ (cpi->ref_frame_flags & flag_list[ref_frame]));
mbmi->ref_frame = ref_frame;
comp_pred = vp9_mode_order[mode_index].second_ref_frame > INTRA_FRAME;
mbmi->mode = this_mode;
if (!(cpi->ref_frame_flags & flag_list[ref_frame]))
continue;
- // not yet supported or not superblocky
- // TODO(rbultje): support intra coding
- if (ref_frame == INTRA_FRAME || this_mode == SPLITMV ||
+ // TODO(debargha): intra/inter encoding at SB level
+ if (this_mode == I8X8_PRED || this_mode == B_PRED || this_mode == SPLITMV ||
vp9_mode_order[mode_index].second_ref_frame == INTRA_FRAME)
continue;
mode_excluded = cm->comp_pred_mode == SINGLE_PREDICTION_ONLY;
} else {
mbmi->second_ref_frame = NONE;
- mode_excluded = cm->comp_pred_mode == COMP_PREDICTION_ONLY;
+ if (ref_frame != INTRA_FRAME)
+ mode_excluded = cm->comp_pred_mode == COMP_PREDICTION_ONLY;
}
xd->pre.y_buffer = y_buffer[ref_frame];
}
}
- this_rd = handle_inter_mode(cpi, x, BLOCK_32X32,
- &saddone, near_sadidx, mdcounts, txfm_cache,
- &rate2, &distortion2, &skippable,
- &compmode_cost,
+ if (ref_frame == INTRA_FRAME) {
+ vp9_build_intra_predictors_sby_s(xd);
+ super_block_yrd(cpi, x, &rate_y, &distortion_y,
+ IF_RTCD(&cpi->rtcd), &skippable, txfm_cache);
+ if (mbmi->txfm_size == TX_4X4) {
+ rate_uv = rate_uv_4x4;
+ distortion_uv = dist_uv_4x4;
+ skippable = skippable && uv_skip_4x4;
+ mbmi->uv_mode = mode_uv_4x4;
+ } else {
+ rate_uv = rate_uv_8x8;
+ distortion_uv = dist_uv_8x8;
+ skippable = skippable && uv_skip_8x8;
+ mbmi->uv_mode = mode_uv_8x8;
+ }
+
+ rate2 = rate_y + x->mbmode_cost[cm->frame_type][mbmi->mode] + rate_uv;
+ distortion2 = distortion_y + distortion_uv;
+ } else {
+ this_rd = handle_inter_mode(cpi, x, BLOCK_32X32,
+ &saddone, near_sadidx, mdcounts, txfm_cache,
+ &rate2, &distortion2, &skippable,
+ &compmode_cost,
#if CONFIG_COMP_INTERINTRA_PRED
- &compmode_interintra_cost,
+ &compmode_interintra_cost,
#endif
- &rate_y, &distortion_y,
- &rate_uv, &distortion_uv,
- &mode_excluded, &disable_skip, recon_yoffset,
- mode_index, frame_mv, frame_best_ref_mv);
- if (this_rd == INT64_MAX)
- continue;
+ &rate_y, &distortion_y,
+ &rate_uv, &distortion_uv,
+ &mode_excluded, &disable_skip, recon_yoffset,
+ mode_index, frame_mv, frame_best_ref_mv);
+ if (this_rd == INT64_MAX)
+ continue;
+ }
if (cpi->common.comp_pred_mode == HYBRID_PREDICTION) {
rate2 += compmode_cost;
// Note index of best mode so far
best_mode_index = mode_index;
-#if 0
if (this_mode <= B_PRED) {
- xd->mode_info_context->mbmi.uv_mode = uv_intra_mode_8x8;
/* required for left and above block mv */
- xd->mode_info_context->mbmi.mv.as_int = 0;
+ mbmi->mv[0].as_int = 0;
}
-#endif
other_cost += ref_costs[xd->mode_info_context->mbmi.ref_frame];