220, 220, 200, 200, 180, 128, 30, 220, 30,
};
+static const vpx_prob default_drl_prob[DRL_MODE_CONTEXTS] = {
+ 128, 128, 128,
+};
+
#if CONFIG_EXT_INTER
static const vpx_prob default_new2mv_prob = 180;
#endif
vp10_copy(fc->newmv_prob, default_newmv_prob);
vp10_copy(fc->zeromv_prob, default_zeromv_prob);
vp10_copy(fc->refmv_prob, default_refmv_prob);
+ vp10_copy(fc->drl_prob0, default_drl_prob);
+ vp10_copy(fc->drl_prob1, default_drl_prob);
#if CONFIG_EXT_INTER
fc->new2mv_prob = default_new2mv_prob;
#endif // CONFIG_EXT_INTER
fc->refmv_prob[i] = mode_mv_merge_probs(pre_fc->refmv_prob[i],
counts->refmv_mode[i]);
+ for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
+ fc->drl_prob0[i] = mode_mv_merge_probs(pre_fc->drl_prob0[i],
+ counts->drl_mode0[i]);
+ for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
+ fc->drl_prob1[i] = mode_mv_merge_probs(pre_fc->drl_prob1[i],
+ counts->drl_mode1[i]);
+
#if CONFIG_EXT_INTER
fc->new2mv_prob = mode_mv_merge_probs(pre_fc->new2mv_prob,
counts->new2mv_mode);
vpx_prob newmv_prob[NEWMV_MODE_CONTEXTS];
vpx_prob zeromv_prob[ZEROMV_MODE_CONTEXTS];
vpx_prob refmv_prob[REFMV_MODE_CONTEXTS];
+ vpx_prob drl_prob0[DRL_MODE_CONTEXTS];
+ vpx_prob drl_prob1[DRL_MODE_CONTEXTS];
+
#if CONFIG_EXT_INTER
vpx_prob new2mv_prob;
#endif // CONFIG_EXT_INTER
unsigned int newmv_mode[NEWMV_MODE_CONTEXTS][2];
unsigned int zeromv_mode[ZEROMV_MODE_CONTEXTS][2];
unsigned int refmv_mode[REFMV_MODE_CONTEXTS][2];
+ unsigned int drl_mode0[DRL_MODE_CONTEXTS][2];
+ unsigned int drl_mode1[DRL_MODE_CONTEXTS][2];
#if CONFIG_EXT_INTER
unsigned int new2mv_mode[2];
#endif // CONFIG_EXT_INTER
#define NEWMV_MODE_CONTEXTS 7
#define ZEROMV_MODE_CONTEXTS 2
#define REFMV_MODE_CONTEXTS 9
+#define DRL_MODE_CONTEXTS 3
#define ZEROMV_OFFSET 3
#define REFMV_OFFSET 4
#define MAX_MV_REF_CANDIDATES 2
#if CONFIG_REF_MV
#define MAX_REF_MV_STACK_SIZE 16
+#define REF_CAT_LEVEL 160
#endif
#define INTRA_INTER_CONTEXTS 4
nearest_refmv_count = *refmv_count;
+ for (idx = 0; idx < nearest_refmv_count; ++idx)
+ ref_mv_stack[idx].weight += REF_CAT_LEVEL;
+
if (prev_frame_mvs_base && cm->show_frame && cm->last_show_frame
&& rf[1] == NONE) {
int ref;
else
return mode_context[rf[0]];
}
+
+static INLINE uint8_t vp10_drl_ctx(const CANDIDATE_MV *ref_mv_stack,
+ int ref_idx) {
+ if (ref_mv_stack[ref_idx + 1].weight > REF_CAT_LEVEL &&
+ ref_mv_stack[ref_idx + 2].weight > REF_CAT_LEVEL)
+ return 0;
+
+ if (ref_mv_stack[ref_idx + 1].weight > REF_CAT_LEVEL &&
+ ref_mv_stack[ref_idx + 2].weight < REF_CAT_LEVEL)
+ return 1;
+
+ if (ref_mv_stack[ref_idx + 1].weight < REF_CAT_LEVEL &&
+ ref_mv_stack[ref_idx + 2].weight < REF_CAT_LEVEL)
+ return 2;
+
+ assert(0);
+ return 0;
+}
#endif
typedef void (*find_mv_refs_sync)(void *const data, int mi_row);
for (j = 0; j < 2; ++j)
cm->counts.refmv_mode[i][j] += counts->refmv_mode[i][j];
+
+ for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
+ for (j = 0; j < 2; ++j)
+ cm->counts.drl_mode0[i][j] += counts->drl_mode0[i][j];
+
+ for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
+ for (j = 0; j < 2; ++j)
+ cm->counts.drl_mode1[i][j] += counts->drl_mode1[i][j];
+
#if CONFIG_EXT_INTER
for (j = 0; j < 2; ++j)
cm->counts.new2mv_mode[j] += counts->new2mv_mode[j];
vp10_diff_update_prob(r, &fc->zeromv_prob[i]);
for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
vp10_diff_update_prob(r, &fc->refmv_prob[i]);
+ for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
+ vp10_diff_update_prob(r, &fc->drl_prob0[i]);
+ for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
+ vp10_diff_update_prob(r, &fc->drl_prob1[i]);
#if CONFIG_EXT_INTER
vp10_diff_update_prob(r, &fc->new2mv_prob);
#endif // CONFIG_EXT_INTER
#endif
}
+#if CONFIG_REF_MV
+static void read_drl_idx(const VP10_COMMON *cm,
+ MACROBLOCKD *xd,
+ MB_MODE_INFO *mbmi,
+ vpx_reader *r) {
+ uint8_t ref_frame_type = vp10_ref_frame_type(mbmi->ref_frame);
+ mbmi->ref_mv_idx = 0;
+
+ if (xd->ref_mv_count[ref_frame_type] > 2) {
+ uint8_t drl0_ctx = vp10_drl_ctx(xd->ref_mv_stack[ref_frame_type], 0);
+ vpx_prob drl0_prob = cm->fc->drl_prob0[drl0_ctx];
+ if (vpx_read(r, drl0_prob)) {
+ mbmi->ref_mv_idx = 1;
+ if (xd->counts)
+ ++xd->counts->drl_mode0[drl0_ctx][1];
+ if (xd->ref_mv_count[ref_frame_type] > 3) {
+ uint8_t drl1_ctx =
+ vp10_drl_ctx(xd->ref_mv_stack[ref_frame_type], 1);
+ vpx_prob drl1_prob = cm->fc->drl_prob1[drl1_ctx];
+ if (vpx_read(r, drl1_prob)) {
+ mbmi->ref_mv_idx = 2;
+ if (xd->counts)
+ ++xd->counts->drl_mode1[drl1_ctx][1];
+
+ return;
+ }
+
+ if (xd->counts)
+ ++xd->counts->drl_mode1[drl1_ctx][0];
+ }
+ return;
+ }
+
+ if (xd->counts)
+ ++xd->counts->drl_mode0[drl0_ctx][0];
+ }
+}
+#endif
+
#if CONFIG_EXT_INTER
static PREDICTION_MODE read_inter_compound_mode(VP10_COMMON *cm,
MACROBLOCKD *xd,
#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
r, mode_ctx);
#if CONFIG_REF_MV
- if (mbmi->mode == NEARMV) {
- uint8_t ref_frame_type = vp10_ref_frame_type(mbmi->ref_frame);
- if (xd->ref_mv_count[ref_frame_type] > 2) {
- if (vpx_read_bit(r)) {
- mbmi->ref_mv_idx = 1;
- if (xd->ref_mv_count[ref_frame_type] > 3)
- if (vpx_read_bit(r))
- mbmi->ref_mv_idx = 2;
- }
- }
- }
+ if (mbmi->mode == NEARMV)
+ read_drl_idx(cm, xd, mbmi, r);
#endif
}
}
#endif
}
+#if CONFIG_REF_MV
+static void write_drl_idx(const VP10_COMMON *cm,
+ const MB_MODE_INFO *mbmi,
+ const MB_MODE_INFO_EXT *mbmi_ext,
+ vpx_writer *w) {
+ uint8_t ref_frame_type = vp10_ref_frame_type(mbmi->ref_frame);
+ if (mbmi_ext->ref_mv_count[ref_frame_type] > 2) {
+ uint8_t drl0_ctx =
+ vp10_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], 0);
+ vpx_prob drl0_prob = cm->fc->drl_prob0[drl0_ctx];
+ vpx_write(w, mbmi->ref_mv_idx != 0, drl0_prob);
+ if (mbmi_ext->ref_mv_count[ref_frame_type] > 3 &&
+ mbmi->ref_mv_idx > 0) {
+ uint8_t drl1_ctx =
+ vp10_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], 1);
+ vpx_prob drl1_prob = cm->fc->drl_prob1[drl1_ctx];
+ vpx_write(w, mbmi->ref_mv_idx != 1, drl1_prob);
+ }
+ }
+}
+#endif
+
#if CONFIG_EXT_INTER
static void write_inter_compound_mode(VP10_COMMON *cm, vpx_writer *w,
PREDICTION_MODE mode,
for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
vp10_cond_prob_diff_update(w, &cm->fc->refmv_prob[i],
counts->refmv_mode[i]);
-
+ for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
+ vp10_cond_prob_diff_update(w, &cm->fc->drl_prob0[i],
+ counts->drl_mode0[i]);
+ for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
+ vp10_cond_prob_diff_update(w, &cm->fc->drl_prob1[i],
+ counts->drl_mode1[i]);
#if CONFIG_EXT_INTER
vp10_cond_prob_diff_update(w, &cm->fc->new2mv_prob, counts->new2mv_mode);
#endif // CONFIG_EXT_INTER
mode_ctx);
#if CONFIG_REF_MV
- if (mode == NEARMV) {
- uint8_t ref_frame_type = vp10_ref_frame_type(mbmi->ref_frame);
- if (mbmi_ext->ref_mv_count[ref_frame_type] > 2) {
- vpx_write_bit(w, mbmi->ref_mv_idx != 0);
- if (mbmi_ext->ref_mv_count[ref_frame_type] > 3 &&
- mbmi->ref_mv_idx > 0)
- vpx_write_bit(w, mbmi->ref_mv_idx != 1);
- }
- }
+ if (mode == NEARMV)
+ write_drl_idx(cm, mbmi, mbmi_ext, w);
#endif
}
}
has_second_ref(mbmi),
#endif // CONFIG_EXT_INTER
mode_ctx);
+
+ if (mode == NEARMV) {
+ uint8_t ref_frame_type = vp10_ref_frame_type(mbmi->ref_frame);
+ if (mbmi_ext->ref_mv_count[ref_frame_type] > 2) {
+ uint8_t drl0_ctx =
+ vp10_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], 0);
+ if (mbmi->ref_mv_idx == 0)
+ ++counts->drl_mode0[drl0_ctx][0];
+ else
+ ++counts->drl_mode0[drl0_ctx][1];
+
+ if (mbmi_ext->ref_mv_count[ref_frame_type] > 3 &&
+ mbmi->ref_mv_idx > 0) {
+ uint8_t drl1_ctx =
+ vp10_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], 1);
+ if (mbmi->ref_mv_idx == 1)
+ ++counts->drl_mode1[drl1_ctx][0];
+ else
+ ++counts->drl_mode1[drl1_ctx][1];
+ }
+ }
+ }
#if CONFIG_EXT_INTER
}
#endif // CONFIG_EXT_INTER
int newmv_mode_cost[NEWMV_MODE_CONTEXTS][2];
int zeromv_mode_cost[ZEROMV_MODE_CONTEXTS][2];
int refmv_mode_cost[REFMV_MODE_CONTEXTS][2];
+ int drl_mode_cost0[DRL_MODE_CONTEXTS][2];
+ int drl_mode_cost1[DRL_MODE_CONTEXTS][2];
#if CONFIG_EXT_INTER
int new2mv_mode_cost[2];
#endif // CONFIG_EXT_INTER
cpi->refmv_mode_cost[i][0] = vp10_cost_bit(cm->fc->refmv_prob[i], 0);
cpi->refmv_mode_cost[i][1] = vp10_cost_bit(cm->fc->refmv_prob[i], 1);
}
+
+ for (i = 0; i < DRL_MODE_CONTEXTS; ++i) {
+ cpi->drl_mode_cost0[i][0] = vp10_cost_bit(cm->fc->drl_prob0[i], 0);
+ cpi->drl_mode_cost0[i][1] = vp10_cost_bit(cm->fc->drl_prob0[i], 1);
+ }
+
+ for (i = 0; i < DRL_MODE_CONTEXTS; ++i) {
+ cpi->drl_mode_cost1[i][0] = vp10_cost_bit(cm->fc->drl_prob1[i], 0);
+ cpi->drl_mode_cost1[i][1] = vp10_cost_bit(cm->fc->drl_prob1[i], 1);
+ }
#if CONFIG_EXT_INTER
cpi->new2mv_mode_cost[0] = vp10_cost_bit(cm->fc->new2mv_prob, 0);
cpi->new2mv_mode_cost[1] = vp10_cost_bit(cm->fc->new2mv_prob, 1);
int ref_idx;
int ref_set = VPXMIN(2, mbmi_ext->ref_mv_count[ref_frame_type] - 2);
- rate2 += vp10_cost_bit(128, 0);
+ uint8_t drl0_ctx =
+ vp10_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], 0);
+ rate2 += cpi->drl_mode_cost0[drl0_ctx][0];
if (this_rd < INT64_MAX) {
if (RDCOST(x->rdmult, x->rddiv, rate_y + rate_uv, distortion2) <
dummy_filter_cache);
}
- tmp_rate += vp10_cost_bit(128, 1);
- if (mbmi_ext->ref_mv_count[ref_frame_type] > 3)
- tmp_rate += vp10_cost_bit(128, ref_idx);
+ tmp_rate += cpi->drl_mode_cost0[drl0_ctx][1];
+
+ if (mbmi_ext->ref_mv_count[ref_frame_type] > 3) {
+ uint8_t drl1_ctx =
+ vp10_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], 1);
+ tmp_rate += cpi->drl_mode_cost1[drl1_ctx][ref_idx];
+ }
if (tmp_alt_rd < INT64_MAX) {
if (RDCOST(x->rdmult, x->rddiv,