#define NONE -1
#define INTRA_FRAME 0
#define LAST_FRAME 1
+
#if CONFIG_EXT_REFS
+
#define LAST2_FRAME 2
#define LAST3_FRAME 3
#define LAST4_FRAME 4
#define ALTREF_FRAME 6
#define MAX_REF_FRAMES 7
#define LAST_REF_FRAMES (LAST4_FRAME - LAST_FRAME + 1)
-#else
+
+#else // CONFIG_EXT_REFS
+
+#if CONFIG_BIDIR_PRED
+
+#define GOLDEN_FRAME 2
+#define BWDREF_FRAME 3
+#define ALTREF_FRAME 4
+#define MAX_REF_FRAMES 5
+
+#else // CONFIG_BIDIR_PRED
+
#define GOLDEN_FRAME 2
#define ALTREF_FRAME 3
#define MAX_REF_FRAMES 4
+
+#endif // CONFIG_BIDIR_PRED
+
#endif // CONFIG_EXT_REFS
typedef int8_t MV_REFERENCE_FRAME;
239, 183, 119, 96, 41
};
-static const vpx_prob default_comp_ref_p[REF_CONTEXTS][COMP_REFS - 1] = {
+
#if CONFIG_EXT_REFS
+static const vpx_prob default_comp_ref_p[REF_CONTEXTS][COMP_REFS - 1] = {
// TODO(zoeliu): To adjust the initial prob values.
{ 33, 16, 16, 16 },
{ 77, 74, 74, 74 },
{ 142, 142, 142, 142 },
{ 172, 170, 170, 170 },
{ 238, 247, 247, 247 }
-#else
+};
+
+#else // CONFIG_EXT_REFS
+
+#if CONFIG_BIDIR_PRED
+// TODO(zoeliu): To adjust the initial prob values.
+static const vpx_prob default_comp_ref_p[REF_CONTEXTS][FWD_REFS - 1] = {
+// { 50 }, { 126 }, { 123 }, { 221 }, { 226 }
+ { 33 }, { 77 }, { 142 }, { 172 }, { 238 }
+};
+static const vpx_prob default_comp_bwdref_p[REF_CONTEXTS][BWD_REFS - 1] = {
+ { 16 }, { 74 }, { 142 }, { 170 }, { 247 }
+};
+#else // CONFIG_BIDIR_PRED
+static const vpx_prob default_comp_ref_p[REF_CONTEXTS][COMP_REFS - 1] = {
{ 50 }, { 126 }, { 123 }, { 221 }, { 226 }
-#endif // CONFIG_EXT_REFS
};
+#endif // CONFIG_BIDIR_PRED
+
+#endif // CONFIG_EXT_REFS
static const vpx_prob default_single_ref_p[REF_CONTEXTS][SINGLE_REFS - 1] = {
#if CONFIG_EXT_REFS
{ 142, 142, 142, 142, 142 },
{ 172, 170, 170, 170, 170 },
{ 238, 247, 247, 247, 247 }
-#else
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ { 33, 16, 16 },
+ { 77, 74, 74 },
+ { 142, 142, 142 },
+ { 172, 170, 170 },
+ { 238, 247, 247 }
+#else // CONFIG_BIDIR_PRED
{ 33, 16 },
{ 77, 74 },
{ 142, 142 },
{ 172, 170 },
{ 238, 247 }
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
};
vp10_copy(fc->intra_inter_prob, default_intra_inter_p);
vp10_copy(fc->comp_inter_prob, default_comp_inter_p);
vp10_copy(fc->comp_ref_prob, default_comp_ref_p);
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ vp10_copy(fc->comp_bwdref_prob, default_comp_bwdref_p);
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
vp10_copy(fc->single_ref_prob, default_single_ref_p);
vp10_copy(fc->tx_size_probs, default_tx_size_prob);
#if CONFIG_VAR_TX
for (i = 0; i < COMP_INTER_CONTEXTS; i++)
fc->comp_inter_prob[i] = vp10_mode_mv_merge_probs(
pre_fc->comp_inter_prob[i], counts->comp_inter[i]);
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ for (i = 0; i < REF_CONTEXTS; i++)
+ for (j = 0; j < (FWD_REFS - 1); j++)
+ fc->comp_ref_prob[i][j] = mode_mv_merge_probs(
+ pre_fc->comp_ref_prob[i][j], counts->comp_ref[i][j]);
+ for (i = 0; i < REF_CONTEXTS; i++)
+ for (j = 0; j < (BWD_REFS - 1); j++)
+ fc->comp_bwdref_prob[i][j] = mode_mv_merge_probs(
+ pre_fc->comp_bwdref_prob[i][j], counts->comp_bwdref[i][j]);
+#else
for (i = 0; i < REF_CONTEXTS; i++)
for (j = 0; j < (COMP_REFS - 1); j++)
- fc->comp_ref_prob[i][j] = vp10_mode_mv_merge_probs(
+ fc->comp_ref_prob[i][j] = mode_mv_merge_probs(
pre_fc->comp_ref_prob[i][j], counts->comp_ref[i][j]);
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
for (i = 0; i < REF_CONTEXTS; i++)
for (j = 0; j < (SINGLE_REFS - 1); j++)
fc->single_ref_prob[i][j] = vp10_mode_mv_merge_probs(
vpx_prob intra_inter_prob[INTRA_INTER_CONTEXTS];
vpx_prob comp_inter_prob[COMP_INTER_CONTEXTS];
vpx_prob single_ref_prob[REF_CONTEXTS][SINGLE_REFS-1];
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ vpx_prob comp_ref_prob[REF_CONTEXTS][FWD_REFS-1];
+ vpx_prob comp_bwdref_prob[REF_CONTEXTS][BWD_REFS-1];
+#else
vpx_prob comp_ref_prob[REF_CONTEXTS][COMP_REFS-1];
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
vpx_prob tx_size_probs[TX_SIZES - 1][TX_SIZE_CONTEXTS][TX_SIZES - 1];
#if CONFIG_VAR_TX
vpx_prob txfm_partition_prob[TXFM_PARTITION_CONTEXTS];
unsigned int intra_inter[INTRA_INTER_CONTEXTS][2];
unsigned int comp_inter[COMP_INTER_CONTEXTS][2];
unsigned int single_ref[REF_CONTEXTS][SINGLE_REFS-1][2];
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ unsigned int comp_ref[REF_CONTEXTS][FWD_REFS-1][2];
+ unsigned int comp_bwdref[REF_CONTEXTS][BWD_REFS-1][2];
+#else
unsigned int comp_ref[REF_CONTEXTS][COMP_REFS-1][2];
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
unsigned int tx_size_totals[TX_SIZES];
unsigned int tx_size[TX_SIZES - 1][TX_SIZE_CONTEXTS][TX_SIZES];
#if CONFIG_VAR_TX
VP9_GOLD_FLAG = 1 << 4,
VP9_ALT_FLAG = 1 << 5,
VP9_REFFRAME_ALL = (1 << 6) - 1
-#else
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ VP9_GOLD_FLAG = 1 << 1,
+ VP9_BWD_FLAG = 1 << 2,
+ VP9_ALT_FLAG = 1 << 3,
+ VP9_REFFRAME_ALL = (1 << 4) - 1
+#else // CONFIG_BIDIR_PRED
VP9_GOLD_FLAG = 1 << 1,
VP9_ALT_FLAG = 1 << 2,
VP9_REFFRAME_ALL = (1 << 3) - 1
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
} VP9_REFFRAME;
#endif
#if CONFIG_EXT_REFS
+
#define SINGLE_REFS 6
#define COMP_REFS 5
-#else
+
+#else // CONFIG_EXT_REFS
+
+#if CONFIG_BIDIR_PRED
+#define FWD_REFS 2
+#define BWD_REFS 2
+#define SINGLE_REFS (FWD_REFS + BWD_REFS)
+#define COMP_REFS (FWD_REFS * BWD_REFS)
+
+#else // CONFIG_BIDIR_PRED
+
#define SINGLE_REFS 3
#define COMP_REFS 2
+#endif // CONFIG_BIDIR_PRED
+
#endif // CONFIG_EXT_REFS
#if CONFIG_SUPERTX
uint8_t mode_ref_delta_update;
// 0 = Intra, Last, Last2+Last3+LAST4(CONFIG_EXT_REFS),
- // GF, ARF
+ // GF, BRF(CONFIG_BIDIR_PRED), ARF
signed char ref_deltas[MAX_REF_FRAMES];
signed char last_ref_deltas[MAX_REF_FRAMES];
int show_frame;
int last_show_frame;
int show_existing_frame;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ int is_reference_frame; // A frame used as a reference
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
// Flag signaling that the frame is encoded using only INTRA modes.
uint8_t intra_only;
int frame_parallel_decode; // frame-based threading.
// Context probabilities for reference frame prediction
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ MV_REFERENCE_FRAME comp_fwd_ref[FWD_REFS];
+ MV_REFERENCE_FRAME comp_bwd_ref[BWD_REFS];
+#else
MV_REFERENCE_FRAME comp_fixed_ref;
MV_REFERENCE_FRAME comp_var_ref[COMP_REFS];
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
REFERENCE_MODE reference_mode;
FRAME_CONTEXT *fc; /* this frame entropy */
}
}
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
+#define CHECK_COMP_BWD_REF(ref_frame) \
+ (((ref_frame) == cm->comp_bwd_ref[0]) || ((ref_frame) == cm->comp_bwd_ref[1]))
+
+int vp10_get_reference_mode_context(const VP10_COMMON *cm,
+ const MACROBLOCKD *xd) {
+ int ctx;
+ const MB_MODE_INFO *const above_mbmi = xd->above_mbmi;
+ const MB_MODE_INFO *const left_mbmi = xd->left_mbmi;
+ const int has_above = xd->up_available;
+ const int has_left = xd->left_available;
+ // Note:
+ // The mode info data structure has a one element border above and to the
+ // left of the entries corresponding to real macroblocks.
+ // The prediction flags in these dummy entries are initialized to 0.
+ if (has_above && has_left) { // both edges available
+ if (!has_second_ref(above_mbmi) && !has_second_ref(left_mbmi))
+ // neither edge uses comp pred (0/1)
+ ctx = CHECK_COMP_BWD_REF(above_mbmi->ref_frame[0]) ^
+ CHECK_COMP_BWD_REF(left_mbmi->ref_frame[0]);
+ else if (!has_second_ref(above_mbmi))
+ // one of two edges uses comp pred (2/3)
+ ctx = 2 + (CHECK_COMP_BWD_REF(above_mbmi->ref_frame[0]) ||
+ !is_inter_block(above_mbmi));
+ else if (!has_second_ref(left_mbmi))
+ // one of two edges uses comp pred (2/3)
+ ctx = 2 + (CHECK_COMP_BWD_REF(left_mbmi->ref_frame[0]) ||
+ !is_inter_block(left_mbmi));
+ else // both edges use comp pred (4)
+ ctx = 4;
+ } else if (has_above || has_left) { // one edge available
+ const MB_MODE_INFO *edge_mbmi = has_above ? above_mbmi : left_mbmi;
+
+ if (!has_second_ref(edge_mbmi))
+ // edge does not use comp pred (0/1)
+ ctx = CHECK_COMP_BWD_REF(edge_mbmi->ref_frame[0]);
+ else
+ // edge uses comp pred (3)
+ ctx = 3;
+ } else { // no edges available (1)
+ ctx = 1;
+ }
+ assert(ctx >= 0 && ctx < COMP_INTER_CONTEXTS);
+ return ctx;
+}
+
+#else // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
int vp10_get_reference_mode_context(const VP10_COMMON *cm,
- const MACROBLOCKD *xd) {
+ const MACROBLOCKD *xd) {
int ctx;
const MB_MODE_INFO *const above_mbmi = xd->above_mbmi;
const MB_MODE_INFO *const left_mbmi = xd->left_mbmi;
return ctx;
}
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
#if CONFIG_EXT_REFS
// TODO(zoeliu): Future work will be conducted to optimize the context design
#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+
+// Returns a context number for the given MB prediction signal
+int vp10_get_pred_context_comp_ref_p(const VP10_COMMON *cm,
+ const MACROBLOCKD *xd) {
+ int pred_context;
+ const MB_MODE_INFO *const above_mbmi = xd->above_mbmi;
+ const MB_MODE_INFO *const left_mbmi = xd->left_mbmi;
+ const int above_in_image = xd->up_available;
+ const int left_in_image = xd->left_available;
+
+ // Note:
+ // The mode info data structure has a one element border above and to the
+ // left of the entries corresponding to real macroblocks.
+ // The prediction flags in these dummy entries are initialized to 0.
+ const int bwd_ref_sign_idx = cm->ref_frame_sign_bias[cm->comp_bwd_ref[0]];
+ const int fwd_ref_sign_idx = !bwd_ref_sign_idx;
+
+ if (above_in_image && left_in_image) { // both edges available
+ const int above_intra = !is_inter_block(above_mbmi);
+ const int left_intra = !is_inter_block(left_mbmi);
+
+ if (above_intra && left_intra) { // intra/intra (2)
+ pred_context = 2;
+ } else if (above_intra || left_intra) { // intra/inter
+ const MB_MODE_INFO *edge_mbmi = above_intra ? left_mbmi : above_mbmi;
+
+ if (!has_second_ref(edge_mbmi)) // single pred (1/3)
+ pred_context = 1 + 2 * (edge_mbmi->ref_frame[0] != cm->comp_fwd_ref[1]);
+ else // comp pred (1/3)
+ pred_context = 1 + 2 * (edge_mbmi->ref_frame[fwd_ref_sign_idx]
+ != cm->comp_fwd_ref[1]);
+ } else { // inter/inter
+ const int l_sg = !has_second_ref(left_mbmi);
+ const int a_sg = !has_second_ref(above_mbmi);
+ const MV_REFERENCE_FRAME frfa = a_sg ?
+ above_mbmi->ref_frame[0] : above_mbmi->ref_frame[fwd_ref_sign_idx];
+ const MV_REFERENCE_FRAME frfl = l_sg ?
+ left_mbmi->ref_frame[0] : left_mbmi->ref_frame[fwd_ref_sign_idx];
+
+ if (frfa == frfl && frfa == cm->comp_fwd_ref[1]) {
+ pred_context = 0;
+ } else if (l_sg && a_sg) { // single/single
+ if ((frfa != frfl) &&
+ (frfa != cm->comp_fwd_ref[1]) && (frfl != cm->comp_fwd_ref[1]))
+ pred_context = 4;
+ else if (frfa == frfl)
+ pred_context = 3;
+ else
+ pred_context = 1;
+ } else if (l_sg || a_sg) { // single/comp
+ const MV_REFERENCE_FRAME frfc = l_sg ? frfa : frfl;
+ const MV_REFERENCE_FRAME rfs = a_sg ? frfa : frfl;
+ if (frfc == cm->comp_fwd_ref[1] && rfs != cm->comp_fwd_ref[1])
+ pred_context = 1;
+ else if (rfs == cm->comp_fwd_ref[1] && frfc != cm->comp_fwd_ref[1])
+ pred_context = 2;
+ else
+ pred_context = 4;
+ } else if (frfa == frfl) { // comp/comp
+ pred_context = 4;
+ } else {
+ pred_context = 2;
+ }
+ }
+ } else if (above_in_image || left_in_image) { // one edge available
+ const MB_MODE_INFO *edge_mbmi = above_in_image ? above_mbmi : left_mbmi;
+
+ if (!is_inter_block(edge_mbmi)) {
+ pred_context = 2;
+ } else {
+ if (has_second_ref(edge_mbmi))
+ pred_context = 4 * (edge_mbmi->ref_frame[fwd_ref_sign_idx]
+ != cm->comp_fwd_ref[1]);
+ else
+ pred_context = 3 * (edge_mbmi->ref_frame[0] != cm->comp_fwd_ref[1]);
+ }
+ } else { // no edges available (2)
+ pred_context = 2;
+ }
+ assert(pred_context >= 0 && pred_context < REF_CONTEXTS);
+
+ return pred_context;
+}
+
+// Returns a context number for the given MB prediction signal
+int vp10_get_pred_context_comp_bwdref_p(const VP10_COMMON *cm,
+ const MACROBLOCKD *xd) {
+ int pred_context;
+ const MB_MODE_INFO *const above_mbmi = xd->above_mbmi;
+ const MB_MODE_INFO *const left_mbmi = xd->left_mbmi;
+ const int above_in_image = xd->up_available;
+ const int left_in_image = xd->left_available;
+
+ // Note:
+ // The mode info data structure has a one element border above and to the
+ // left of the entries corresponding to real macroblocks.
+ // The prediction flags in these dummy entries are initialized to 0.
+ const int bwd_ref_sign_idx = cm->ref_frame_sign_bias[cm->comp_bwd_ref[0]];
+ const int fwd_ref_sign_idx = !bwd_ref_sign_idx;
+
+ if (above_in_image && left_in_image) { // both edges available
+ const int above_intra = !is_inter_block(above_mbmi);
+ const int left_intra = !is_inter_block(left_mbmi);
+
+ if (above_intra && left_intra) { // intra/intra (2)
+ pred_context = 2;
+ } else if (above_intra || left_intra) { // intra/inter
+ const MB_MODE_INFO *edge_mbmi = above_intra ? left_mbmi : above_mbmi;
+
+ if (!has_second_ref(edge_mbmi)) // single pred (1/3)
+ pred_context = 1 + 2 * (edge_mbmi->ref_frame[1] != cm->comp_bwd_ref[1]);
+ else // comp pred (1/3)
+ pred_context = 1 + 2 * (edge_mbmi->ref_frame[bwd_ref_sign_idx]
+ != cm->comp_bwd_ref[1]);
+ } else { // inter/inter
+ const int l_comp = has_second_ref(left_mbmi);
+ const int a_comp = has_second_ref(above_mbmi);
+
+ const MV_REFERENCE_FRAME l_brf = l_comp ?
+ left_mbmi->ref_frame[bwd_ref_sign_idx] : NONE;
+ const MV_REFERENCE_FRAME a_brf = a_comp ?
+ above_mbmi->ref_frame[bwd_ref_sign_idx] : NONE;
+
+ const MV_REFERENCE_FRAME l_frf = !l_comp ?
+ left_mbmi->ref_frame[0] : left_mbmi->ref_frame[fwd_ref_sign_idx];
+ const MV_REFERENCE_FRAME a_frf = !a_comp ?
+ above_mbmi->ref_frame[0] : above_mbmi->ref_frame[fwd_ref_sign_idx];
+
+ if (l_comp && a_comp) {
+ if (l_brf == a_brf && l_brf == cm->comp_bwd_ref[1]) {
+ pred_context = 0;
+ } else if (l_brf == cm->comp_bwd_ref[1] ||
+ a_brf == cm->comp_bwd_ref[1]) {
+ pred_context = 1;
+ } else {
+ // NOTE: Backward ref should be either BWDREF or ALTREF.
+ assert(l_brf == a_brf && l_brf != cm->comp_bwd_ref[1]);
+ pred_context = 3;
+ }
+ } else if (!l_comp && !a_comp) {
+ if (l_frf == a_frf && l_frf == cm->comp_bwd_ref[1]) {
+ pred_context = 0;
+ } else if (l_frf == cm->comp_bwd_ref[1] ||
+ a_frf == cm->comp_bwd_ref[1]) {
+ pred_context = 1;
+ } else if (l_frf == a_frf) {
+ pred_context = 3;
+ } else {
+ assert(l_frf != a_frf &&
+ l_frf != cm->comp_bwd_ref[1] && a_frf != cm->comp_bwd_ref[1]);
+ pred_context = 4;
+ }
+ } else {
+ assert((l_comp && !a_comp) || (!l_comp && a_comp));
+
+ if ((l_comp && l_brf == cm->comp_bwd_ref[1] &&
+ a_frf == cm->comp_bwd_ref[1]) ||
+ (a_comp && a_brf == cm->comp_bwd_ref[1] &&
+ l_frf == cm->comp_bwd_ref[1])) {
+ pred_context = 1;
+ } else if ((l_comp && l_brf == cm->comp_bwd_ref[1]) ||
+ (a_comp && a_brf == cm->comp_bwd_ref[1]) ||
+ (!l_comp && l_frf == cm->comp_bwd_ref[1]) ||
+ (!a_comp && a_frf == cm->comp_bwd_ref[1])) {
+ pred_context = 2;
+ } else {
+ pred_context = 4;
+ }
+ }
+ }
+ } else if (above_in_image || left_in_image) { // one edge available
+ const MB_MODE_INFO *edge_mbmi = above_in_image ? above_mbmi : left_mbmi;
+
+ if (!is_inter_block(edge_mbmi)) {
+ pred_context = 2;
+ } else {
+ if (has_second_ref(edge_mbmi)) {
+ pred_context = 4 * (edge_mbmi->ref_frame[bwd_ref_sign_idx]
+ != cm->comp_bwd_ref[1]);
+ } else {
+ pred_context = 3 * (edge_mbmi->ref_frame[0] != cm->comp_bwd_ref[1]);
+ }
+ }
+ } else { // no edges available (2)
+ pred_context = 2;
+ }
+ assert(pred_context >= 0 && pred_context < REF_CONTEXTS);
+
+ return pred_context;
+}
+
+#else // CONFIG_BIDIR_PRED
+
// Returns a context number for the given MB prediction signal
int vp10_get_pred_context_comp_ref_p(const VP10_COMMON *cm,
const MACROBLOCKD *xd) {
return pred_context;
}
+#endif // CONFIG_BIDIR_PRED
+
#endif // CONFIG_EXT_REFS
#if CONFIG_EXT_REFS
if (rfs == GOLDEN_FRAME)
pred_context = 3 + (crf1 == GOLDEN_FRAME || crf2 == GOLDEN_FRAME);
- else if (rfs == ALTREF_FRAME)
+ else if (rfs != GOLDEN_FRAME && rfs != LAST_FRAME)
pred_context = crf1 == GOLDEN_FRAME || crf2 == GOLDEN_FRAME;
else
pred_context = 1 + 2 * (crf1 == GOLDEN_FRAME || crf2 == GOLDEN_FRAME);
: above0;
pred_context = 4 * (edge0 == GOLDEN_FRAME);
} else {
- pred_context = 2 * (above0 == GOLDEN_FRAME) +
- 2 * (left0 == GOLDEN_FRAME);
+ pred_context =
+ 2 * (above0 == GOLDEN_FRAME) + 2 * (left0 == GOLDEN_FRAME);
}
}
}
return pred_context;
}
+#if CONFIG_BIDIR_PRED
+
+#define CHECK_BWDREF_OR_ALTREF(ref_frame) \
+ ((ref_frame == BWDREF_FRAME) || (ref_frame == ALTREF_FRAME))
+// For the bit to signal whether the single reference is ALTREF_FRAME or
+// BWDREF_FRAME, knowing that it shall be either of these 2 choices.
+//
+// NOTE(zoeliu): The probability of ref_frame[0] is ALTREF_FRAME, conditioning
+// on it is either ALTREF_FRAME or BWDREF_FRAME.
+int vp10_get_pred_context_single_ref_p3(const MACROBLOCKD *xd) {
+ int pred_context;
+ const MB_MODE_INFO *const above_mbmi = xd->above_mbmi;
+ const MB_MODE_INFO *const left_mbmi = xd->left_mbmi;
+ const int has_above = xd->up_available;
+ const int has_left = xd->left_available;
+
+ // Note:
+ // The mode info data structure has a one element border above and to the
+ // left of the entries correpsonding to real macroblocks.
+ // The prediction flags in these dummy entries are initialised to 0.
+ if (has_above && has_left) { // both edges available
+ const int above_intra = !is_inter_block(above_mbmi);
+ const int left_intra = !is_inter_block(left_mbmi);
+
+ if (above_intra && left_intra) { // intra/intra
+ pred_context = 2;
+ } else if (above_intra || left_intra) { // intra/inter or inter/intra
+ const MB_MODE_INFO *edge_mbmi = above_intra ? left_mbmi : above_mbmi;
+ if (!has_second_ref(edge_mbmi)) {
+ if (!CHECK_BWDREF_OR_ALTREF(edge_mbmi->ref_frame[0]))
+ pred_context = 3;
+ else
+ pred_context = 4 * (edge_mbmi->ref_frame[0] == BWDREF_FRAME);
+ } else {
+ pred_context = 1 +
+ 2 * (edge_mbmi->ref_frame[0] == BWDREF_FRAME ||
+ edge_mbmi->ref_frame[1] == BWDREF_FRAME);
+ }
+ } else { // inter/inter
+ const int above_has_second = has_second_ref(above_mbmi);
+ const int left_has_second = has_second_ref(left_mbmi);
+ const MV_REFERENCE_FRAME above0 = above_mbmi->ref_frame[0];
+ const MV_REFERENCE_FRAME above1 = above_mbmi->ref_frame[1];
+ const MV_REFERENCE_FRAME left0 = left_mbmi->ref_frame[0];
+ const MV_REFERENCE_FRAME left1 = left_mbmi->ref_frame[1];
+
+ if (above_has_second && left_has_second) {
+ if (above0 == left0 && above1 == left1)
+ pred_context =
+ 3 * (above0 == BWDREF_FRAME || above1 == BWDREF_FRAME ||
+ left0 == BWDREF_FRAME || left1 == BWDREF_FRAME);
+ else
+ pred_context = 2;
+ } else if (above_has_second || left_has_second) {
+ const MV_REFERENCE_FRAME srf = !above_has_second ? above0 : left0;
+ const MV_REFERENCE_FRAME crf0 = above_has_second ? above0 : left0;
+ const MV_REFERENCE_FRAME crf1 = above_has_second ? above1 : left1;
+
+ if (srf == BWDREF_FRAME)
+ pred_context = 3 + (crf0 == BWDREF_FRAME || crf0 == BWDREF_FRAME);
+ else if (srf == ALTREF_FRAME)
+ pred_context = (crf0 == BWDREF_FRAME || crf1 == BWDREF_FRAME);
+ else
+ pred_context = 1 + 2 * (crf0 == BWDREF_FRAME || crf1 == BWDREF_FRAME);
+ } else {
+ if (!CHECK_BWDREF_OR_ALTREF(above0) &&
+ !CHECK_BWDREF_OR_ALTREF(left0)) {
+ pred_context = 2 + (above0 == left0);
+ } else if (!CHECK_BWDREF_OR_ALTREF(above0) ||
+ !CHECK_BWDREF_OR_ALTREF(left0)) {
+ const MV_REFERENCE_FRAME edge0 =
+ !CHECK_BWDREF_OR_ALTREF(above0) ? left0 : above0;
+ pred_context = 4 * (edge0 == BWDREF_FRAME);
+ } else {
+ pred_context =
+ 2 * (above0 == BWDREF_FRAME) + 2 * (left0 == BWDREF_FRAME);
+ }
+ }
+ }
+ } else if (has_above || has_left) { // one edge available
+ const MB_MODE_INFO *edge_mbmi = has_above ? above_mbmi : left_mbmi;
+
+ if (!is_inter_block(edge_mbmi) ||
+ (!CHECK_BWDREF_OR_ALTREF(edge_mbmi->ref_frame[0]) &&
+ !has_second_ref(edge_mbmi)))
+ pred_context = 2;
+ else if (!has_second_ref(edge_mbmi))
+ pred_context = 4 * (edge_mbmi->ref_frame[0] == BWDREF_FRAME);
+ else
+ pred_context = 3 * (edge_mbmi->ref_frame[0] == BWDREF_FRAME ||
+ edge_mbmi->ref_frame[1] == BWDREF_FRAME);
+ } else { // no edges available (2)
+ pred_context = 2;
+ }
+
+ assert(pred_context >= 0 && pred_context < REF_CONTEXTS);
+ return pred_context;
+}
+
+#endif // CONFIG_BIDIR_PRED
+
#endif // CONFIG_EXT_REFS
const MACROBLOCKD *xd);
static INLINE vpx_prob vp10_get_reference_mode_prob(const VP10_COMMON *cm,
- const MACROBLOCKD *xd) {
+ const MACROBLOCKD *xd) {
return cm->fc->comp_inter_prob[vp10_get_reference_mode_context(cm, xd)];
}
const int pred_context = vp10_get_pred_context_comp_ref_p3(cm, xd);
return cm->fc->comp_ref_prob[pred_context][3];
}
+
+#else // CONFIG_EXT_REFS
+
+#if CONFIG_BIDIR_PRED
+int vp10_get_pred_context_comp_bwdref_p(const VP10_COMMON *cm,
+ const MACROBLOCKD *xd);
+
+static INLINE vpx_prob vp10_get_pred_prob_comp_bwdref_p(const VP10_COMMON *cm,
+ const MACROBLOCKD *xd) {
+ const int pred_context = vp10_get_pred_context_comp_bwdref_p(cm, xd);
+ return cm->fc->comp_bwdref_prob[pred_context][0];
+}
+#endif // CONFIG_BIDIR_PRED
+
#endif // CONFIG_EXT_REFS
int vp10_get_pred_context_single_ref_p1(const MACROBLOCKD *xd);
return cm->fc->single_ref_prob[vp10_get_pred_context_single_ref_p2(xd)][1];
}
-#if CONFIG_EXT_REFS
+#if CONFIG_EXT_REFS || CONFIG_BIDIR_PRED
int vp10_get_pred_context_single_ref_p3(const MACROBLOCKD *xd);
static INLINE vpx_prob vp10_get_pred_prob_single_ref_p3(const VP10_COMMON *cm,
const MACROBLOCKD *xd) {
return cm->fc->single_ref_prob[vp10_get_pred_context_single_ref_p3(xd)][2];
}
+#endif // CONFIG_EXT_REFS || CONFIR_BIDIR_PRED
+#if CONFIG_EXT_REFS
int vp10_get_pred_context_single_ref_p4(const MACROBLOCKD *xd);
static INLINE vpx_prob vp10_get_pred_prob_single_ref_p4(const VP10_COMMON *cm,
}
static void setup_compound_reference_mode(VP10_COMMON *cm) {
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cm->comp_fwd_ref[0] = LAST_FRAME;
+ cm->comp_fwd_ref[1] = GOLDEN_FRAME;
+ cm->comp_bwd_ref[0] = BWDREF_FRAME;
+ cm->comp_bwd_ref[1] = ALTREF_FRAME;
+
+#else // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
if (cm->ref_frame_sign_bias[LAST_FRAME] ==
cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
cm->comp_fixed_ref = ALTREF_FRAME;
cm->comp_var_ref[2] = LAST3_FRAME;
cm->comp_var_ref[3] = LAST4_FRAME;
cm->comp_var_ref[4] = GOLDEN_FRAME;
-#else
+#else // CONFIG_EXT_REFS
cm->comp_var_ref[1] = GOLDEN_FRAME;
#endif // CONFIG_EXT_REFS
} else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
cm->comp_var_ref[0] = GOLDEN_FRAME;
cm->comp_var_ref[1] = ALTREF_FRAME;
}
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
}
static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
if (cm->reference_mode != SINGLE_REFERENCE) {
for (i = 0; i < REF_CONTEXTS; ++i) {
- for (j = 0; j < (COMP_REFS - 1); ++j) {
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ for (j = 0; j < (FWD_REFS - 1); ++j)
vp10_diff_update_prob(r, &fc->comp_ref_prob[i][j]);
- }
+ for (j = 0; j < (BWD_REFS - 1); ++j)
+ vp10_diff_update_prob(r, &fc->comp_bwdref_prob[i][j]);
+#else
+ for (j = 0; j < (COMP_REFS - 1); ++j)
+ vp10_diff_update_prob(r, &fc->comp_ref_prob[i][j]);
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
}
}
}
cm->last_frame_type = cm->frame_type;
cm->last_intra_only = cm->intra_only;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // NOTE: By default all coded frames to be used as a reference
+ cm->is_reference_frame = 1;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
if (vpx_rb_read_literal(rb, 2) != VP9_FRAME_MARKER)
vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
"Invalid frame marker");
#endif
cm->show_existing_frame = vpx_rb_read_bit(rb);
+
if (cm->show_existing_frame) {
// Show an existing frame directly.
const int frame_to_show = cm->ref_frame_map[vpx_rb_read_literal(rb, 3)];
+
lock_buffer_pool(pool);
if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
unlock_buffer_pool(pool);
"Buffer %d does not contain a decoded frame",
frame_to_show);
}
-
ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
unlock_buffer_pool(pool);
- pbi->refresh_frame_flags = 0;
+
cm->lf.filter_level = 0;
cm->show_frame = 1;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // NOTE(zoeliu): The existing frame to show is adopted as a reference frame.
+ pbi->refresh_frame_flags = vpx_rb_read_literal(rb, REF_FRAMES);
+
+ for (i = 0; i < REFS_PER_FRAME; ++i) {
+ const int ref = vpx_rb_read_literal(rb, REF_FRAMES_LOG2);
+ const int idx = cm->ref_frame_map[ref];
+ RefBuffer *const ref_frame = &cm->frame_refs[i];
+ ref_frame->idx = idx;
+ ref_frame->buf = &frame_bufs[idx].buf;
+ cm->ref_frame_sign_bias[LAST_FRAME + i] = vpx_rb_read_bit(rb);
+ }
+
+ for (i = 0; i < REFS_PER_FRAME; ++i) {
+ RefBuffer *const ref_buf = &cm->frame_refs[i];
+#if CONFIG_VP9_HIGHBITDEPTH
+ vp10_setup_scale_factors_for_frame(&ref_buf->sf,
+ ref_buf->buf->y_crop_width,
+ ref_buf->buf->y_crop_height,
+ cm->width, cm->height,
+ cm->use_highbitdepth);
+#else // CONFIG_VP9_HIGHBITDEPTH
+ vp10_setup_scale_factors_for_frame(&ref_buf->sf,
+ ref_buf->buf->y_crop_width,
+ ref_buf->buf->y_crop_height,
+ cm->width, cm->height);
+#endif // CONFIG_VP9_HIGHBITDEPTH
+ }
+
+ // Generate next_ref_frame_map.
+ lock_buffer_pool(pool);
+ for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
+ if (mask & 1) {
+ cm->next_ref_frame_map[ref_index] = cm->new_fb_idx;
+ ++frame_bufs[cm->new_fb_idx].ref_count;
+ } else {
+ cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
+ }
+ // Current thread holds the reference frame.
+ if (cm->ref_frame_map[ref_index] >= 0)
+ ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
+ ++ref_index;
+ }
+
+ for (; ref_index < REF_FRAMES; ++ref_index) {
+ cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
+ // Current thread holds the reference frame.
+ if (cm->ref_frame_map[ref_index] >= 0)
+ ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
+ }
+ unlock_buffer_pool(pool);
+ pbi->hold_ref_buf = 1;
+#else
+ pbi->refresh_frame_flags = 0;
if (cm->frame_parallel_decode) {
for (i = 0; i < REF_FRAMES; ++i)
cm->next_ref_frame_map[i] = cm->ref_frame_map[i];
}
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
return 0;
}
}
} else if (pbi->need_resync != 1) { /* Skip if need resync */
pbi->refresh_frame_flags = vpx_rb_read_literal(rb, REF_FRAMES);
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (!pbi->refresh_frame_flags) {
+ // NOTE: "pbi->refresh_frame_flags == 0" indicates that the coded frame
+ // will not be used as a reference
+ cm->is_reference_frame = 0;
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
for (i = 0; i < REFS_PER_FRAME; ++i) {
const int ref = vpx_rb_read_literal(rb, REF_FRAMES_LOG2);
const int idx = cm->ref_frame_map[ref];
if (cm->reference_mode != SINGLE_REFERENCE)
setup_compound_reference_mode(cm);
+
read_frame_reference_mode_probs(cm, &r);
for (j = 0; j < BLOCK_SIZE_GROUPS; j++)
sizeof(cm->counts.comp_ref)));
assert(!memcmp(&cm->counts.tx_size, &zero_counts.tx_size,
sizeof(cm->counts.tx_size)));
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ assert(!memcmp(cm->counts.comp_bwdref, zero_counts.comp_bwdref,
+ sizeof(cm->counts.comp_bwdref)));
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
assert(!memcmp(cm->counts.skip, zero_counts.skip, sizeof(cm->counts.skip)));
#if CONFIG_REF_MV
assert(!memcmp(&cm->counts.mv[0], &zero_counts.mv[0],
if (!first_partition_size) {
// showing a frame directly
- *p_data_end = data + (cm->profile <= PROFILE_2 ? 1 : 2);
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (cm->show_existing_frame)
+ *p_data_end = data + vpx_rb_bytes_read(&rb);
+ else
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ *p_data_end = data + (cm->profile <= PROFILE_2 ? 1 : 2);
+
return;
}
const REFERENCE_MODE mode = read_block_reference_mode(cm, xd, r);
// FIXME(rbultje) I'm pretty sure this breaks segmentation ref frame coding
if (mode == COMPOUND_REFERENCE) {
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ const int idx = cm->ref_frame_sign_bias[cm->comp_bwd_ref[0]];
+#else
const int idx = cm->ref_frame_sign_bias[cm->comp_fixed_ref];
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
const int ctx = vp10_get_pred_context_comp_ref_p(cm, xd);
const int bit = vp10_read(r, fc->comp_ref_prob[ctx][0]);
if (counts)
++counts->comp_ref[ctx][0][bit];
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ ref_frame[!idx] = cm->comp_fwd_ref[bit];
+ {
+ const int ctx1 = vp10_get_pred_context_comp_bwdref_p(cm, xd);
+ const int bit1 = vpx_read(r, fc->comp_bwdref_prob[ctx1][0]);
+ if (counts)
+ ++counts->comp_bwdref[ctx1][0][bit1];
+ ref_frame[idx] = cm->comp_bwd_ref[bit1];
+ }
+
+#else // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
ref_frame[idx] = cm->comp_fixed_ref;
#if CONFIG_EXT_REFS
ref_frame[!idx] = cm->comp_var_ref[4];
}
}
-#else
+#else // CONFIG_EXT_REFS
ref_frame[!idx] = cm->comp_var_ref[bit];
#endif // CONFIG_EXT_REFS
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
} else if (mode == SINGLE_REFERENCE) {
#if CONFIG_EXT_REFS
const int ctx0 = vp10_get_pred_context_single_ref_p1(xd);
ref_frame[0] = bit3 ? LAST2_FRAME : LAST_FRAME;
}
}
-#else
+#else // CONFIG_EXT_REFS
const int ctx0 = vp10_get_pred_context_single_ref_p1(xd);
const int bit0 = vp10_read(r, fc->single_ref_prob[ctx0][0]);
if (counts)
const int bit1 = vp10_read(r, fc->single_ref_prob[ctx1][1]);
if (counts)
++counts->single_ref[ctx1][1][bit1];
+#if CONFIG_BIDIR_PRED
+ if (bit1) {
+ const int ctx2 = vp10_get_pred_context_single_ref_p3(xd);
+ const int bit2 = vpx_read(r, fc->single_ref_prob[ctx2][2]);
+ if (counts)
+ ++counts->single_ref[ctx2][2][bit2];
+ ref_frame[0] = bit2 ? ALTREF_FRAME : BWDREF_FRAME;
+ } else {
+ ref_frame[0] = GOLDEN_FRAME;
+ }
+#else // CONFIG_BIDIR_PRED
ref_frame[0] = bit1 ? ALTREF_FRAME : GOLDEN_FRAME;
+#endif // CONFIG_BIDIR_PRED
} else {
ref_frame[0] = LAST_FRAME;
}
// #else // CONFIG_EXT_REFS
// cpi->gld_fb_idx = 1;
// cpi->alt_fb_idx = 2;
+
+ // TODO(zoeliu): To revisit following code and reconsider what assumption we
+ // may take on the reference frame buffer virtual indexes
if (ref_frame_flag == VP9_LAST_FLAG) {
idx = cm->ref_frame_map[0];
#if CONFIG_EXT_REFS
idx = cm->ref_frame_map[4];
} else if (ref_frame_flag == VP9_ALT_FLAG) {
idx = cm->ref_frame_map[5];
-#else
+#else // CONFIG_EXT_REFS
} else if (ref_frame_flag == VP9_GOLD_FLAG) {
idx = cm->ref_frame_map[1];
+#if CONFIG_BIDIR_PRED
+ } else if (ref_frame_flag == VP9_BWD_FLAG) {
+ idx = cm->ref_frame_map[2];
+ } else if (ref_frame_flag == VP9_ALT_FLAG) {
+ idx = cm->ref_frame_map[3];
+#else // CONFIG_BIDIR_PRED
} else if (ref_frame_flag == VP9_ALT_FLAG) {
idx = cm->ref_frame_map[2];
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
} else {
vpx_internal_error(&cm->error, VPX_CODEC_ERROR,
}
// Current thread releases the holding of reference frame.
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ for (; ref_index < REF_FRAMES; ++ref_index) {
+ const int old_idx = cm->ref_frame_map[ref_index];
+ decrease_ref_count(old_idx, frame_bufs, pool);
+ cm->ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
+ }
+#else
for (; ref_index < REF_FRAMES && !cm->show_existing_frame; ++ref_index) {
const int old_idx = cm->ref_frame_map[ref_index];
decrease_ref_count(old_idx, frame_bufs, pool);
cm->ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
}
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
unlock_buffer_pool(pool);
pbi->hold_ref_buf = 0;
cm->frame_to_show = get_frame_new_buffer(cm);
+ // TODO(zoeliu): To fix the ref frame buffer update for the scenario of
+ // cm->frame_parellel_decode == 1
if (!cm->frame_parallel_decode || !cm->show_frame) {
lock_buffer_pool(pool);
--frame_bufs[cm->new_fb_idx].ref_count;
}
// Invalidate these references until the next frame starts.
- for (ref_index = 0; ref_index < REFS_PER_FRAME; ref_index++)
- cm->frame_refs[ref_index].idx = -1;
+ for (ref_index = 0; ref_index < REFS_PER_FRAME; ref_index++) {
+ cm->frame_refs[ref_index].idx = INVALID_IDX;
+ cm->frame_refs[ref_index].buf = NULL;
+ }
}
int vp10_receive_compressed_data(VP10Decoder *pbi,
pbi->ready_for_new_data = 0;
+ // Find a free buffer for the new frame, releasing the reference previously
+ // held.
+
// Check if the previous frame was a frame without any references to it.
// Release frame buffer if not decoding in frame parallel mode.
if (!cm->frame_parallel_decode && cm->new_fb_idx >= 0
&& frame_bufs[cm->new_fb_idx].ref_count == 0)
pool->release_fb_cb(pool->cb_priv,
&frame_bufs[cm->new_fb_idx].raw_frame_buffer);
+
// Find a free frame buffer. Return error if can not find any.
cm->new_fb_idx = get_free_fb(cm);
if (cm->new_fb_idx == INVALID_IDX)
}
// Current thread releases the holding of reference frame.
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ for (; ref_index < REF_FRAMES; ++ref_index) {
+ const int old_idx = cm->ref_frame_map[ref_index];
+ decrease_ref_count(old_idx, frame_bufs, pool);
+ }
+#else
for (; ref_index < REF_FRAMES && !cm->show_existing_frame; ++ref_index) {
const int old_idx = cm->ref_frame_map[ref_index];
decrease_ref_count(old_idx, frame_bufs, pool);
}
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
pbi->hold_ref_buf = 0;
}
// Release current frame.
if (!cm->show_existing_frame) {
cm->last_show_frame = cm->show_frame;
- cm->prev_frame = cm->cur_frame;
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // NOTE: It is not supposed to ref to any frame not used as reference
+ if (cm->is_reference_frame)
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cm->prev_frame = cm->cur_frame;
+
if (cm->seg.enabled && !cm->frame_parallel_decode)
vp10_swap_current_and_last_seg_map(cm);
}
#if CONFIG_VP9_HIGHBITDEPTH
dst_cm->use_highbitdepth = src_cm->use_highbitdepth;
#endif
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // TODO(zoeliu): To handle parallel decoding
+ assert(0);
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
dst_cm->prev_frame = src_cm->show_existing_frame ?
src_cm->prev_frame : src_cm->cur_frame;
dst_cm->last_width = !src_cm->show_existing_frame ?
const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME ||
mbmi->ref_frame[0] == LAST3_FRAME ||
mbmi->ref_frame[0] == LAST4_FRAME);
-#else
+#else // CONFIG_EXT_REFS
const int bit = mbmi->ref_frame[0] == GOLDEN_FRAME;
+#if CONFIG_BIDIR_PRED
+ const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
vp10_write(w, bit, vp10_get_pred_prob_comp_ref_p(cm, xd));
vp10_write(w, bit3, vp10_get_pred_prob_comp_ref_p3(cm, xd));
}
}
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ vpx_write(w, bit_bwd, vp10_get_pred_prob_comp_bwdref_p(cm, xd));
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
} else {
#if CONFIG_EXT_REFS
vp10_write(w, bit4, vp10_get_pred_prob_single_ref_p5(cm, xd));
}
}
-#else
+#else // CONFIG_EXT_REFS
const int bit0 = mbmi->ref_frame[0] != LAST_FRAME;
vp10_write(w, bit0, vp10_get_pred_prob_single_ref_p1(cm, xd));
if (bit0) {
const int bit1 = mbmi->ref_frame[0] != GOLDEN_FRAME;
vp10_write(w, bit1, vp10_get_pred_prob_single_ref_p2(cm, xd));
+#if CONFIG_BIDIR_PRED
+ if (bit1) {
+ const int bit2 = mbmi->ref_frame[0] != BWDREF_FRAME;
+ vp10_write(w, bit2, vp10_get_pred_prob_single_ref_p3(cm, xd));
+ }
+#endif // CONFIG_BIDIR_PRED
}
#endif // CONFIG_EXT_REFS
}
// up if they are scaled. vp10_is_interp_needed is in turn needed by
// write_switchable_interp_filter, which is called by pack_inter_mode_mvs.
set_ref_ptrs(cm, xd, m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
-#endif // CONFIG_EXT_INTER
+#endif // CONFIG_EXT_INTERP
+#if 0
+ // NOTE(zoeliu): For debug
+ if (cm->current_video_frame == FRAME_TO_CHECK && cm->show_frame == 1) {
+ const PREDICTION_MODE mode = m->mbmi.mode;
+ const int segment_id = m->mbmi.segment_id;
+ const BLOCK_SIZE bsize = m->mbmi.sb_type;
+
+ // For sub8x8, simply dump out the first sub8x8 block info
+ const PREDICTION_MODE b_mode =
+ (bsize < BLOCK_8X8) ? m->bmi[0].as_mode : -1;
+ const int mv_x = (bsize < BLOCK_8X8) ?
+ m->bmi[0].as_mv[0].as_mv.row : m->mbmi.mv[0].as_mv.row;
+ const int mv_y = (bsize < BLOCK_8X8) ?
+ m->bmi[0].as_mv[0].as_mv.col : m->mbmi.mv[0].as_mv.col;
+
+ printf("Before pack_inter_mode_mvs(): "
+ "Frame=%d, (mi_row,mi_col)=(%d,%d), "
+ "mode=%d, segment_id=%d, bsize=%d, b_mode=%d, "
+ "mv[0]=(%d, %d), ref[0]=%d, ref[1]=%d\n",
+ cm->current_video_frame, mi_row, mi_col,
+ mode, segment_id, bsize, b_mode, mv_x, mv_y,
+ m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
+ }
+#endif // 0
pack_inter_mode_mvs(cpi, m,
#if CONFIG_SUPERTX
supertx_enabled,
// LAST4_FRAME.
refresh_mask |= (cpi->refresh_last_frame <<
cpi->lst_fb_idxes[LAST4_FRAME - LAST_FRAME]);
-#else
+#else // CONFIG_EXT_REFS
refresh_mask |= (cpi->refresh_last_frame << cpi->lst_fb_idx);
+
+#if CONFIG_BIDIR_PRED
+ refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->bwd_fb_idx);
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
if (vp10_preserve_existing_gf(cpi)) {
write_profile(cm->profile, wb);
- vpx_wb_write_bit(wb, 0); // show_existing_frame
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // NOTE: By default all coded frames to be used as a reference
+ cm->is_reference_frame = 1;
+
+ if (cm->show_existing_frame) {
+ RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
+ const int frame_to_show =
+ cm->ref_frame_map[cpi->existing_fb_idx_to_show];
+
+ if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
+ vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
+ "Buffer %d does not contain a reconstructed frame",
+ frame_to_show);
+ }
+ ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
+
+ vpx_wb_write_bit(wb, 1); // show_existing_frame
+ vpx_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
+
+ cpi->refresh_frame_mask = get_refresh_mask(cpi);
+ vpx_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
+ {
+ MV_REFERENCE_FRAME ref_frame;
+ for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
+ assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
+ vpx_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
+ REF_FRAMES_LOG2);
+ // TODO(zoeliu): To further explore whether sign bias bits are needed.
+ vpx_wb_write_bit(wb, cm->ref_frame_sign_bias[ref_frame]);
+ }
+ }
+
+ return;
+ } else {
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ vpx_wb_write_bit(wb, 0); // show_existing_frame
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
vpx_wb_write_bit(wb, cm->frame_type);
vpx_wb_write_bit(wb, cm->show_frame);
vpx_wb_write_bit(wb, cm->error_resilient_mode);
}
}
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cpi->refresh_frame_mask = get_refresh_mask(cpi);
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
if (cm->intra_only) {
write_sync_code(wb);
write_bitdepth_colorspace_sampling(cm, wb);
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ vpx_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
+#else
vpx_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
write_frame_size(cm, wb);
} else {
MV_REFERENCE_FRAME ref_frame;
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ vpx_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
+#else
vpx_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (!cpi->refresh_frame_mask) {
+ // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
+ // will not be used as a reference
+ cm->is_reference_frame = 0;
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
vpx_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
cm->tx_mode = TX_4X4;
else
write_txfm_mode(cm->tx_mode, wb);
+
if (cpi->allow_comp_inter_inter) {
const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
const int use_compound_pred = cm->reference_mode != SINGLE_REFERENCE;
if (cm->reference_mode != SINGLE_REFERENCE) {
for (i = 0; i < REF_CONTEXTS; i++) {
- for (j = 0; j < (COMP_REFS - 1); j ++) {
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ for (j = 0; j < (FWD_REFS - 1); j++) {
+ vp10_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j],
+ counts->comp_ref[i][j]);
+ }
+ for (j = 0; j < (BWD_REFS - 1); j++) {
+ vp10_cond_prob_diff_update(header_bc, &fc->comp_bwdref_prob[i][j],
+ counts->comp_bwdref[i][j]);
+ }
+#else // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ for (j = 0; j < (COMP_REFS - 1); j++) {
vp10_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j],
counts->comp_ref[i][j]);
}
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
}
}
// Write the uncompressed header
write_uncompressed_header(cpi, &wb);
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (cm->show_existing_frame) {
+ *size = vpx_wb_bytes_written(&wb);
+ return;
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
// We do not know these in advance. Output placeholder bit.
saved_wb = wb;
// Write tile size magnitudes
YV12_BUFFER_CONFIG src,
FRAME_TYPE frame_type,
int refresh_last_frame,
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ int refresh_bwd_ref_frame,
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
int refresh_alt_ref_frame,
int refresh_golden_frame) {
if (frame_type == KEY_FRAME) {
swap_frame_buffer(&denoiser->running_avg_y[LAST_FRAME],
&denoiser->running_avg_y[INTRA_FRAME]);
}
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (refresh_bwd_ref_frame) {
+ swap_frame_buffer(&denoiser->running_avg_y[BWDREF_FRAME],
+ &denoiser->running_avg_y[INTRA_FRAME]);
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
}
void vp10_denoiser_reset_frame_stats(PICK_MODE_CONTEXT *ctx) {
YV12_BUFFER_CONFIG src,
FRAME_TYPE frame_type,
int refresh_last_frame,
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ int refresh_bwd_ref_frame,
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
int refresh_alt_ref_frame,
int refresh_golden_frame);
// the reference frame counts used to work out probabilities.
if (inter_block) {
const MV_REFERENCE_FRAME ref0 = mbmi->ref_frame[0];
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ const MV_REFERENCE_FRAME ref1 = mbmi->ref_frame[1];
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
if (cm->reference_mode == REFERENCE_MODE_SELECT)
counts->comp_inter[vp10_get_reference_mode_context(cm, xd)]
[has_second_ref(mbmi)]++;
[ref0 == LAST3_FRAME]++;
}
}
-#else
+#else // CONFIG_EXT_REFS
counts->comp_ref[vp10_get_pred_context_comp_ref_p(cm, xd)][0]
[ref0 == GOLDEN_FRAME]++;
+#if CONFIG_BIDIR_PRED
+ counts->comp_bwdref[vp10_get_pred_context_comp_bwdref_p(cm, xd)][0]
+ [ref1 == ALTREF_FRAME]++;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
} else {
#if CONFIG_EXT_REFS
[ref0 != LAST3_FRAME]++;
}
}
-#else
+#else // CONFIG_EXT_REFS
counts->single_ref[vp10_get_pred_context_single_ref_p1(xd)][0]
[ref0 != LAST_FRAME]++;
- if (ref0 != LAST_FRAME)
+ if (ref0 != LAST_FRAME) {
counts->single_ref[vp10_get_pred_context_single_ref_p2(xd)][1]
[ref0 != GOLDEN_FRAME]++;
+#if CONFIG_BIDIR_PRED
+ if (ref0 != GOLDEN_FRAME) {
+ counts->single_ref[vp10_get_pred_context_single_ref_p3(xd)][2]
+ [ref0 != BWDREF_FRAME]++;
+ }
+#endif // CONFIG_BIDIR_PRED
+ }
#endif // CONFIG_EXT_REFS
}
!!(ref_flags & VP9_LAST2_FLAG) +
!!(ref_flags & VP9_LAST3_FLAG) +
!!(ref_flags & VP9_LAST4_FLAG) +
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ !!(ref_flags & VP9_BWD_FLAG) +
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
!!(ref_flags & VP9_ALT_FLAG)) >= 2;
}
else if (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)
return GOLDEN_FRAME;
else
- // TODO(zoeliu): TO investigate whether a frame_type other than
+ // TODO(zoeliu): To investigate whether a frame_type other than
// INTRA/ALTREF/GOLDEN/LAST needs to be specified seperately.
return LAST_FRAME;
}
cm->height == cm->last_height &&
!cm->intra_only &&
cm->last_show_frame;
+
// Special case: set prev_mi to NULL when the previous mode info
// context cannot be used.
cm->prev_mi = cm->use_prev_frame_mvs ?
cpi->allow_comp_inter_inter = 0;
} else {
cpi->allow_comp_inter_inter = 1;
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cm->comp_fwd_ref[0] = LAST_FRAME;
+ cm->comp_fwd_ref[1] = GOLDEN_FRAME;
+ cm->comp_bwd_ref[0] = BWDREF_FRAME;
+ cm->comp_bwd_ref[1] = ALTREF_FRAME;
+#else // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
cm->comp_fixed_ref = ALTREF_FRAME;
cm->comp_var_ref[0] = LAST_FRAME;
#if CONFIG_EXT_REFS
cm->comp_var_ref[2] = LAST3_FRAME;
cm->comp_var_ref[3] = LAST4_FRAME;
cm->comp_var_ref[4] = GOLDEN_FRAME;
-#else
+#else // CONFIG_EXT_REFS
cm->comp_var_ref[1] = GOLDEN_FRAME;
#endif // CONFIG_EXT_REFS
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
}
} else {
cpi->allow_comp_inter_inter = 0;
// that for subsequent frames.
// It does the same analysis for transform size selection also.
//
- // TODO(zoeliu): TO investigate whether a frame_type other than
+ // TODO(zoeliu): To investigate whether a frame_type other than
// INTRA/ALTREF/GOLDEN/LAST needs to be specified seperately.
const MV_REFERENCE_FRAME frame_type = get_frame_type(cpi);
int64_t *const mode_thrs = rd_opt->prediction_type_threshes[frame_type];
cpi->lookahead = vp10_lookahead_init(oxcf->width, oxcf->height,
cm->subsampling_x, cm->subsampling_y,
#if CONFIG_VP9_HIGHBITDEPTH
- cm->use_highbitdepth,
+ cm->use_highbitdepth,
#endif
- oxcf->lag_in_frames);
+ oxcf->lag_in_frames);
if (!cpi->lookahead)
vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
"Failed to allocate lag buffers");
cpi->lst_fb_idxes[fb_idx] = fb_idx;
cpi->gld_fb_idx = LAST_REF_FRAMES;
cpi->alt_fb_idx = cpi->gld_fb_idx + 1;
-#else
+#else // CONFIG_EXT_REFS
cpi->lst_fb_idx = 0;
cpi->gld_fb_idx = 1;
+#if CONFIG_BIDIR_PRED
+ cpi->bwd_fb_idx = 2;
+ cpi->alt_fb_idx = 3;
+#else // CONFIG_BIDIR_PRED
cpi->alt_fb_idx = 2;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
}
cpi->refresh_golden_frame = 0;
cpi->refresh_last_frame = 1;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cpi->refresh_bwd_ref_frame = 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
cm->refresh_frame_context =
(oxcf->error_resilient_mode || oxcf->frame_parallel_decoding_mode) ?
cpi->alt_ref_source = NULL;
rc->is_src_frame_alt_ref = 0;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ rc->is_bwd_ref_frame = 0;
+ rc->is_last_nonref_frame = 0;
+ rc->is_nonref_frame = 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
#if 0
// Experimental RD Code
cpi->frame_distortion = 0;
return cpi;
}
+
#define SNPRINT(H, T) \
snprintf((H) + strlen(H), sizeof(H) - strlen(H), (T))
cpi->ext_refresh_frame_flags_pending = 1;
}
-static YV12_BUFFER_CONFIG *get_vp10_ref_frame_buffer(VP10_COMP *cpi,
- VP9_REFFRAME ref_frame_flag) {
+static YV12_BUFFER_CONFIG *get_vp10_ref_frame_buffer(
+ VP10_COMP *cpi, VP9_REFFRAME ref_frame_flag) {
MV_REFERENCE_FRAME ref_frame = NONE;
if (ref_frame_flag == VP9_LAST_FLAG)
ref_frame = LAST_FRAME;
#endif // CONFIG_EXT_REFS
else if (ref_frame_flag == VP9_GOLD_FLAG)
ref_frame = GOLDEN_FRAME;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ else if (ref_frame_flag == VP9_BWD_FLAG)
+ ref_frame = BWDREF_FRAME;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
else if (ref_frame_flag == VP9_ALT_FLAG)
ref_frame = ALTREF_FRAME;
if (cm->show_frame)
cpi->last_show_frame_buf_idx = cm->new_fb_idx;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // TODO(zoeliu): To remove the reference buffer update for the
+ // show_existing_frame==1 case.
+#if 0
+ if (cpi->rc.is_last_nonref_frame) {
+ // NOTE: After the encoding of the LAST_NONREF_FRAME, the flag of
+ // show_existing_frame will be set, to notify the decoder to show the
+ // coded BWDREF_FRAME. During the handling of the show_existing_frame,
+ // no update will be conducted on the reference frame buffer.
+ // Following is to get the BWDREF_FRAME to show to be taken as the
+ // LAST_FRAME, preparing for the encoding of the next BWDREF_FRAME.
+ cpi->lst_fb_idx = cpi->bwd_fb_idx;
+ return;
+ }
+#endif // 0
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
if (use_upsampled_ref) {
- // Up-sample the current encoded frame.
- RefCntBuffer *bufs = pool->frame_bufs;
- const YV12_BUFFER_CONFIG *const ref = &bufs[cm->new_fb_idx].buf;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (cm->show_existing_frame) {
+ new_uidx = cpi->upsampled_ref_idx[cpi->existing_fb_idx_to_show];
+ // TODO(zoeliu): Once following is confirmed, remove it.
+ assert(cpi->upsampled_ref_bufs[new_uidx].ref_count > 0);
+ } else {
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // Up-sample the current encoded frame.
+ RefCntBuffer *bufs = pool->frame_bufs;
+ const YV12_BUFFER_CONFIG *const ref = &bufs[cm->new_fb_idx].buf;
- new_uidx = upsample_ref_frame(cpi, ref);
+ new_uidx = upsample_ref_frame(cpi, ref);
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
}
// At this point the new frame has been encoded.
if (cm->frame_type == KEY_FRAME) {
ref_cnt_fb(pool->frame_bufs,
&cm->ref_frame_map[cpi->gld_fb_idx], cm->new_fb_idx);
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ ref_cnt_fb(pool->frame_bufs,
+ &cm->ref_frame_map[cpi->bwd_fb_idx], cm->new_fb_idx);
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
ref_cnt_fb(pool->frame_bufs,
&cm->ref_frame_map[cpi->alt_fb_idx], cm->new_fb_idx);
if (use_upsampled_ref) {
uref_cnt_fb(cpi->upsampled_ref_bufs,
&cpi->upsampled_ref_idx[cpi->gld_fb_idx], new_uidx);
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ uref_cnt_fb(cpi->upsampled_ref_bufs,
+ &cpi->upsampled_ref_idx[cpi->bwd_fb_idx], new_uidx);
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
uref_cnt_fb(cpi->upsampled_ref_bufs,
&cpi->upsampled_ref_idx[cpi->alt_fb_idx], new_uidx);
}
tmp = cpi->alt_fb_idx;
cpi->alt_fb_idx = cpi->gld_fb_idx;
cpi->gld_fb_idx = tmp;
+
+ // TODO(zoeliu): Do we need to copy cpi->interp_filter_selected[0] over to
+ // cpi->interp_filter_selected[GOLDEN_FRAME]?
} else { /* For non key/golden frames */
if (cpi->refresh_alt_ref_frame) {
int arf_idx = cpi->alt_fb_idx;
cpi->interp_filter_selected[ALTREF_FRAME],
sizeof(cpi->interp_filter_selected[ALTREF_FRAME]));
}
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (cpi->refresh_bwd_ref_frame) {
+ ref_cnt_fb(pool->frame_bufs,
+ &cm->ref_frame_map[cpi->bwd_fb_idx], cm->new_fb_idx);
+ if (use_upsampled_ref)
+ uref_cnt_fb(cpi->upsampled_ref_bufs,
+ &cpi->upsampled_ref_idx[cpi->bwd_fb_idx], new_uidx);
+
+ memcpy(cpi->interp_filter_selected[BWDREF_FRAME],
+ cpi->interp_filter_selected[0],
+ sizeof(cpi->interp_filter_selected[0]));
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
}
if (cpi->refresh_last_frame) {
sizeof(cpi->interp_filter_selected[0]));
}
#else // CONFIG_EXT_REFS
- ref_cnt_fb(pool->frame_bufs,
- &cm->ref_frame_map[cpi->lst_fb_idx], cm->new_fb_idx);
- if (use_upsampled_ref)
- uref_cnt_fb(cpi->upsampled_ref_bufs,
- &cpi->upsampled_ref_idx[cpi->lst_fb_idx], new_uidx);
+#if CONFIG_BIDIR_PRED
+ // TODO(zoeliu): To remove the reference buffer update for the
+ // show_existing_frame==1 case; Instead, we move the reference buffer update
+ // to the previous coded frame, i.e. the last-nonref-frame. In that case, no
+ // bit should be set in the refresh-mask, but the visual ref-idx should be
+ // updated and written to the bitstream accordingly, as the virtual ref-idx
+ // for LAST_FRAME and BWDREF_FRAME should be switched, i.e. cpi->lst_fb_idx
+ // and cpi->bwd_fb_idx should be switched.
+ if (cm->show_existing_frame) {
+ ref_cnt_fb(pool->frame_bufs,
+ &cm->ref_frame_map[cpi->lst_fb_idx], cm->new_fb_idx);
+
+ if (use_upsampled_ref)
+ uref_cnt_fb(cpi->upsampled_ref_bufs,
+ &cpi->upsampled_ref_idx[cpi->lst_fb_idx], new_uidx);
+
+ // NOTE(zoeliu): OVERLAY should not be the last non-reference frame.
+ assert(!cpi->rc.is_src_frame_alt_ref);
- if (!cpi->rc.is_src_frame_alt_ref) {
memcpy(cpi->interp_filter_selected[LAST_FRAME],
- cpi->interp_filter_selected[0],
- sizeof(cpi->interp_filter_selected[0]));
+ cpi->interp_filter_selected[BWDREF_FRAME],
+ sizeof(cpi->interp_filter_selected[BWDREF_FRAME]));
+ } else {
+#endif // CONFIG_BIDIR_PRED
+ ref_cnt_fb(pool->frame_bufs,
+ &cm->ref_frame_map[cpi->lst_fb_idx], cm->new_fb_idx);
+ if (use_upsampled_ref)
+ uref_cnt_fb(cpi->upsampled_ref_bufs,
+ &cpi->upsampled_ref_idx[cpi->lst_fb_idx], new_uidx);
+ if (!cpi->rc.is_src_frame_alt_ref) {
+ memcpy(cpi->interp_filter_selected[LAST_FRAME],
+ cpi->interp_filter_selected[0],
+ sizeof(cpi->interp_filter_selected[0]));
+ }
+#if CONFIG_BIDIR_PRED
}
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
}
*cpi->Source,
cpi->common.frame_type,
cpi->refresh_last_frame,
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cpi->refresh_bwd_ref_frame,
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
cpi->refresh_alt_ref_frame,
cpi->refresh_golden_frame);
}
VP9_LAST4_FLAG,
#endif // CONFIG_EXT_REFS
VP9_GOLD_FLAG,
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ VP9_BWD_FLAG,
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
VP9_ALT_FLAG
};
refresh[1] = refresh[2] = refresh[3] = 0;
refresh[4] = (cpi->refresh_golden_frame) ? 1 : 0;
refresh[5] = (cpi->refresh_alt_ref_frame) ? 1 : 0;
-#else
+#else // CONFIG_EXT_REFS
refresh[1] = (cpi->refresh_golden_frame) ? 1 : 0;
+#if CONFIG_BIDIR_PRED
+ refresh[2] = (cpi->refresh_bwd_ref_frame) ? 1 : 0;
+ refresh[3] = (cpi->refresh_alt_ref_frame) ? 1 : 0;
+#else // CONFIG_BIDIR_PRED
refresh[2] = (cpi->refresh_alt_ref_frame) ? 1 : 0;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
const int idx = cpi->scaled_ref_idx[i - 1];
// after a key/intra-only frame.
cpi->max_mv_magnitude = max_mv_def;
} else {
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // TODO(zoeliu): Maybe we should leave it the same as base.
+ if (cm->show_frame || cpi->rc.is_bwd_ref_frame) {
+#else
if (cm->show_frame) {
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
// Allow mv_steps to correspond to twice the max mv magnitude found
// in the previous frame, capped by the default max_mv_magnitude based
// on resolution.
// There has been a change in frame size.
vp10_set_size_literal(cpi, oxcf->scaled_frame_width,
- oxcf->scaled_frame_height);
+ oxcf->scaled_frame_height);
}
if (oxcf->pass == 0 &&
}
cpi->Source = vp10_scale_if_required(cm, cpi->un_scaled_source,
- &cpi->scaled_source);
+ &cpi->scaled_source);
if (cpi->unscaled_last_source != NULL)
cpi->Last_Source = vp10_scale_if_required(cm, cpi->unscaled_last_source,
map[cpi->lst_fb_idxes[3]] == map[cpi->lst_fb_idxes[1]];
const int last4_is_last3 =
map[cpi->lst_fb_idxes[3]] == map[cpi->lst_fb_idxes[2]];
- const int gld_is_last4 = map[cpi->gld_fb_idx] == map[cpi->lst_fb_idxes[3]];
+
const int last4_is_alt = map[cpi->alt_fb_idx] == map[cpi->lst_fb_idxes[3]];
-#else
+ const int gld_is_last4 = map[cpi->gld_fb_idx] == map[cpi->lst_fb_idxes[3]];
+#else // CONFIG_EXT_REFS
const int gld_is_last = map[cpi->gld_fb_idx] == map[cpi->lst_fb_idx];
+#if CONFIG_BIDIR_PRED
+ const int bwd_is_last = map[cpi->bwd_fb_idx] == map[cpi->lst_fb_idx];
+#endif // CONFIG_BIDIR_PRED
const int alt_is_last = map[cpi->alt_fb_idx] == map[cpi->lst_fb_idx];
#endif // CONFIG_EXT_REFS
const int gld_is_alt = map[cpi->gld_fb_idx] == map[cpi->alt_fb_idx];
int flags = VP9_REFFRAME_ALL;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (!cpi->rc.is_bwd_ref_frame)
+ flags &= ~VP9_BWD_FLAG;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
if (gld_is_alt || gld_is_last)
flags &= ~VP9_GOLD_FLAG;
if (gld_is_last4 || gld_is_last3 || gld_is_last2)
flags &= ~VP9_GOLD_FLAG;
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ if (bwd_is_last && (flags & VP9_BWD_FLAG))
+ flags &= ~VP9_BWD_FLAG;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
return flags;
(cpi->rc.source_alt_ref_active && !cpi->refresh_alt_ref_frame);
}
cm->ref_frame_sign_bias[ALTREF_FRAME] = arf_sign_bias;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cm->ref_frame_sign_bias[BWDREF_FRAME] = cm->ref_frame_sign_bias[ALTREF_FRAME];
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
}
static int setup_interp_filter_search_mask(VP10_COMP *cpi) {
(ref_total[GOLDEN_FRAME] == 0 ||
cpi->interp_filter_selected[GOLDEN_FRAME][ifilter] * 50
< ref_total[GOLDEN_FRAME]) &&
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ (ref_total[BWDREF_FRAME] == 0 ||
+ cpi->interp_filter_selected[BWDREF_FRAME][ifilter] * 50
+ < ref_total[BWDREF_FRAME]) &&
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
(ref_total[ALTREF_FRAME] == 0 ||
cpi->interp_filter_selected[ALTREF_FRAME][ifilter] * 50
< ref_total[ALTREF_FRAME]))
return mask;
}
+#define DUMP_RECON_FRAMES 0
+
+#if DUMP_RECON_FRAMES == 1
+// NOTE(zoeliu): For debug - Output the filtered reconstructed video.
+static void dump_filtered_recon_frames(VP10_COMP *cpi) {
+ VP10_COMMON *const cm = &cpi->common;
+ const YV12_BUFFER_CONFIG *recon_buf = cm->frame_to_show;
+ int h;
+ char file_name[256] = "/tmp/enc_filtered_recon.yuv";
+ FILE *f_recon = NULL;
+
+ if (recon_buf == NULL || !cm->show_frame) {
+ printf("Frame %d is not ready or no show to dump.\n",
+ cm->current_video_frame);
+ return;
+ }
+
+ if (cm->current_video_frame == 0) {
+ if ((f_recon = fopen(file_name, "wb")) == NULL) {
+ printf("Unable to open file %s to write.\n", file_name);
+ return;
+ }
+ } else {
+ if ((f_recon = fopen(file_name, "ab")) == NULL) {
+ printf("Unable to open file %s to append.\n", file_name);
+ return;
+ }
+ }
+ printf("\nFrame=%5d, encode_update_type[%5d]=%1d, show_existing_frame=%d, "
+ "y_stride=%4d, uv_stride=%4d, width=%4d, height=%4d\n",
+ cm->current_video_frame, cpi->twopass.gf_group.index,
+ cpi->twopass.gf_group.update_type[cpi->twopass.gf_group.index],
+ cm->show_existing_frame,
+ recon_buf->y_stride, recon_buf->uv_stride, cm->width, cm->height);
+
+ // --- Y ---
+ for (h = 0; h < cm->height; ++h) {
+ fwrite(&recon_buf->y_buffer[h*recon_buf->y_stride],
+ 1, cm->width, f_recon);
+ }
+ // --- U ---
+ for (h = 0; h < (cm->height >> 1); ++h) {
+ fwrite(&recon_buf->u_buffer[h*recon_buf->uv_stride],
+ 1, (cm->width >> 1), f_recon);
+ }
+ // --- V ---
+ for (h = 0; h < (cm->height >> 1); ++h) {
+ fwrite(&recon_buf->v_buffer[h*recon_buf->uv_stride],
+ 1, (cm->width >> 1), f_recon);
+ }
+
+ fclose(f_recon);
+}
+#endif // DUMP_RECON_FRAMES
+
static void encode_frame_to_data_rate(VP10_COMP *cpi,
size_t *size,
uint8_t *dest,
// Set the arf sign bias for this frame.
set_arf_sign_bias(cpi);
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (cm->show_existing_frame) {
+ // NOTE(zoeliu): In BIDIR_PRED, the existing frame to show is the current
+ // BWDREF_FRAME in the reference frame buffer.
+
+ cm->frame_type = INTER_FRAME;
+ cm->show_frame = 1;
+ cpi->frame_flags = *frame_flags;
+
+ cpi->refresh_last_frame = 1;
+ cpi->refresh_golden_frame = 0;
+ cpi->refresh_bwd_ref_frame = 0;
+ cpi->refresh_alt_ref_frame = 0;
+
+ cpi->rc.is_bwd_ref_frame = 0;
+ cpi->rc.is_last_nonref_frame = 0;
+ cpi->rc.is_nonref_frame = 0;
+
+ // Build the bitstream
+ vp10_pack_bitstream(cpi, dest, size);
+
+ // Set up frame to show to get ready for stats collection.
+ cm->frame_to_show = get_frame_new_buffer(cm);
+
+ // Update the LAST_FRAME in the reference frame buffer.
+ vp10_update_reference_frames(cpi);
+
+ cpi->frame_flags &= ~FRAMEFLAGS_GOLDEN;
+ cpi->frame_flags &= ~FRAMEFLAGS_BWDREF;
+ cpi->frame_flags &= ~FRAMEFLAGS_ALTREF;
+
+ *frame_flags = cpi->frame_flags & ~FRAMEFLAGS_KEY;
+
+#if DUMP_RECON_FRAMES == 1
+ // NOTE(zoeliu): For debug - Output the filtered reconstructed video.
+ dump_filtered_recon_frames(cpi);
+#endif // DUMP_RECON_FRAMES
+
+ // Update the frame type
+ cm->last_frame_type = cm->frame_type;
+
+ cm->last_width = cm->width;
+ cm->last_height = cm->height;
+
+ ++cm->current_video_frame;
+
+ return;
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
// Set default state for segment based loop filter update flags.
cm->lf.mode_ref_delta_update = 0;
vp10_write_yuv_frame_420(&cpi->denoiser.running_avg_y[INTRA_FRAME],
yuv_denoised_file);
}
-#endif
-#endif
+#endif // OUTPUT_YUV_DENOISED
+#endif // CONFIG_VP9_TEMPORAL_DENOISING
+
#ifdef OUTPUT_YUV_SKINMAP
if (cpi->common.current_video_frame > 1) {
vp10_compute_skin_map(cpi, yuv_skinmap_file);
}
-#endif
+#endif // OUTPUT_YUV_SKINMAP
// Special case code to reduce pulsing when key frames are forced at a
// fixed interval. Note the reconstruction error if it is the frame before
cm->frame_to_show->render_width = cm->render_width;
cm->frame_to_show->render_height = cm->render_height;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // TODO(zoeliu): For non-ref frames, loop filtering may need to be turned
+ // off.
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
// Pick the loop filter level for the frame.
loopfilter_frame(cpi, cm);
- // build the bitstream
+ // Build the bitstream
vp10_pack_bitstream(cpi, dest, size);
+#if DUMP_RECON_FRAMES == 1
+ // NOTE(zoeliu): For debug - Output the filtered reconstructed video.
+ if (cm->show_frame)
+ dump_filtered_recon_frames(cpi);
+#endif // DUMP_RECON_FRAMES
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (cpi->rc.is_last_nonref_frame) {
+ // NOTE: If the current frame is a LAST_NONREF_FRAME, we need next to show
+ // the BWDREF_FRAME.
+ cpi->existing_fb_idx_to_show = cpi->bwd_fb_idx;
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
if (cm->seg.update_map)
update_reference_segmentation_map(cpi);
if (frame_is_intra_only(cm) == 0) {
release_scaled_references(cpi);
}
+
vp10_update_reference_frames(cpi);
for (t = TX_4X4; t <= TX_32X32; t++)
else
cpi->frame_flags &= ~FRAMEFLAGS_ALTREF;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (cpi->refresh_bwd_ref_frame == 1)
+ cpi->frame_flags |= FRAMEFLAGS_BWDREF;
+ else
+ cpi->frame_flags &= ~FRAMEFLAGS_BWDREF;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
cpi->ref_frame_flags = get_ref_frame_flags(cpi);
#if CONFIG_EXT_REFS
if (!cm->show_existing_frame)
cm->last_show_frame = cm->show_frame;
+#if 0
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if ((cm->show_frame &&
+ !(cpi->rc.is_nonref_frame || cpi->rc.is_last_nonref_frame) ||
+ cpi->rc.is_bwd_ref_frame) {
+ vp10_swap_mi_and_prev_mi(cm);
+ }
+ if (cm->show_frame || cpi->rc.is_bwd_ref_frame) {
+ // Don't increment frame counters if this was an altref buffer
+ // update not a real frame
+ ++cm->current_video_frame;
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+#endif // 0
+
if (cm->show_frame) {
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // TODO(zoeliu): We may only swamp mi and prev_mi for those frames that are
+ // being used as reference.
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
vp10_swap_mi_and_prev_mi(cm);
// Don't increment frame counters if this was an altref buffer
// update not a real frame
++cm->current_video_frame;
}
- cm->prev_frame = cm->cur_frame;
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // NOTE: It is not supposed to ref to any frame not used as reference
+ if (cm->is_reference_frame)
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cm->prev_frame = cm->cur_frame;
}
static void Pass0Encode(VP10_COMP *cpi, size_t *size, uint8_t *dest,
cpi->allow_encode_breakout = ENCODE_BREAKOUT_ENABLED;
encode_frame_to_data_rate(cpi, size, dest, frame_flags);
- vp10_twopass_postencode_update(cpi);
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (!cpi->common.show_existing_frame)
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ vp10_twopass_postencode_update(cpi);
}
static void init_ref_frame_bufs(VP10_COMMON *cm) {
return cm->frame_type == KEY_FRAME ||
cpi->refresh_last_frame ||
cpi->refresh_golden_frame ||
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cpi->refresh_bwd_ref_frame ||
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
cpi->refresh_alt_ref_frame ||
!cm->error_resilient_mode ||
cm->lf.mode_ref_delta_update ||
return arf_src_index;
}
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+static int get_brf_src_index(VP10_COMP *cpi) {
+ int brf_src_index = 0;
+ const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
+
+ // TODO(zoeliu): We need to add the check on the -bwd_ref command line setup
+ // flag.
+ if (gf_group->bidir_pred_enabled[gf_group->index]) {
+ if (cpi->oxcf.pass == 2) {
+ if (gf_group->update_type[gf_group->index] == BRF_UPDATE)
+ brf_src_index = gf_group->brf_src_offset[gf_group->index];
+ } else {
+ // TODO(zoeliu): To re-visit the setup for this scenario
+ brf_src_index = BIDIR_PRED_PERIOD - 1;
+ }
+ }
+
+ return brf_src_index;
+}
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
static void check_src_altref(VP10_COMP *cpi,
const struct lookahead_entry *source) {
RATE_CONTROL *const rc = &cpi->rc;
struct lookahead_entry *last_source = NULL;
struct lookahead_entry *source = NULL;
int arf_src_index;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ int brf_src_index;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
int i;
vpx_usec_timer_start(&cmptimer);
cpi->refresh_last_frame = 1;
cpi->refresh_golden_frame = 0;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cpi->refresh_bwd_ref_frame = 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
cpi->refresh_alt_ref_frame = 0;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (oxcf->pass == 2 && cm->show_existing_frame) {
+ // Manage the source buffer and flush out the source frame that has been
+ // coded already; Also get prepared for PSNR calculation if needed.
+ if ((source = vp10_lookahead_pop(cpi->lookahead, flush)) == NULL) {
+ *size = 0;
+ return -1;
+ }
+ cpi->Source = &source->img;
+
+ // TODO(zoeliu): To track down to determine whether it's needed to adjust
+ // the frame rate.
+ *time_stamp = source->ts_start;
+ *time_end = source->ts_end;
+
+ // Find a free buffer for the new frame, releasing the reference previously
+ // held.
+ if (cm->new_fb_idx != INVALID_IDX) {
+ --pool->frame_bufs[cm->new_fb_idx].ref_count;
+ }
+ cm->new_fb_idx = get_free_fb(cm);
+
+ if (cm->new_fb_idx == INVALID_IDX)
+ return -1;
+
+ // Clear down mmx registers
+ vpx_clear_system_state();
+
+ // Start with a 0 size frame.
+ *size = 0;
+
+ Pass2Encode(cpi, size, dest, frame_flags);
+
+ if (cpi->b_calculate_psnr)
+ generate_psnr_packet(cpi);
+
+#if CONFIG_INTERNAL_STATS
+ compute_internal_stats(cpi);
+ cpi->bytes += (int)(*size);
+#endif // CONFIG_INTERNAL_STATS
+
+ // Clear down mmx registers
+ vpx_clear_system_state();
+
+ cm->show_existing_frame = 0;
+
+ return 0;
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
// Should we encode an arf frame.
arf_src_index = get_arf_src_index(cpi);
-
if (arf_src_index) {
for (i = 0; i <= arf_src_index; ++i) {
struct lookahead_entry *e = vp10_lookahead_peek(cpi->lookahead, i);
rc->source_alt_ref_pending = 0;
}
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ rc->is_bwd_ref_frame = 0;
+ brf_src_index = get_brf_src_index(cpi);
+ // TODO(zoeliu): Need to handle when alt-ref is disabled; Currently bwd-ref
+ // works only when alt-ref is on.
+ if (brf_src_index) {
+ assert(brf_src_index <= rc->frames_to_key);
+ if ((source = vp10_lookahead_peek(cpi->lookahead, brf_src_index)) != NULL) {
+ cm->show_frame = 0;
+ cm->intra_only = 0;
+
+ cpi->refresh_bwd_ref_frame = 1;
+ cpi->refresh_last_frame = 0;
+ cpi->refresh_golden_frame = 0;
+ cpi->refresh_alt_ref_frame = 0;
+
+ rc->is_bwd_ref_frame = 1;
+ }
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
if (!source) {
// Get last frame source.
if (cm->current_video_frame > 0) {
vpx_clear_system_state();
// adjust frame rates based on timestamps given
- if (cm->show_frame) {
+ if (cm->show_frame)
adjust_frame_rate(cpi, source);
- }
// Find a free buffer for the new frame, releasing the reference previously
// held.
compute_internal_stats(cpi);
cpi->bytes += (int)(*size);
}
-#endif
+#endif // CONFIG_INTERNAL_STATS
+
vpx_clear_system_state();
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (cpi->rc.is_last_nonref_frame) {
+ // NOTE(zoeliu): If the current frame is a last non-reference frame, we need
+ // next to show the BWDREF_FRAME.
+ cpi->rc.is_last_nonref_frame = 0;
+ cm->show_existing_frame = 1;
+ } else {
+ cm->show_existing_frame = 0;
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
return 0;
}
FRAME_CONTEXT fc;
} CODING_CONTEXT;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+#define BIDIR_PRED_PERIOD 2
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
typedef enum {
// encode_breakout is disabled.
typedef enum {
FRAMEFLAGS_KEY = 1 << 0,
FRAMEFLAGS_GOLDEN = 1 << 1,
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ FRAMEFLAGS_BWDREF = 1 << 2,
+ FRAMEFLAGS_ALTREF = 1 << 3,
+#else
FRAMEFLAGS_ALTREF = 1 << 2,
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
} FRAMETYPE_FLAGS;
typedef enum {
// ----------------------------------------------------------------
int enable_auto_arf;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ int enable_auto_brf; // (b)ackward (r)ef (f)rame
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
int encode_breakout; // early breakout : for video conf recommend 800
int scaled_ref_idx[MAX_REF_FRAMES];
#if CONFIG_EXT_REFS
int lst_fb_idxes[LAST_REF_FRAMES];
-#else
+#else // CONFIG_EXT_REFS
int lst_fb_idx;
#endif // CONFIG_EXT_REFS
int gld_fb_idx;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ int bwd_fb_idx; // BWD_REF_FRAME
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
int alt_fb_idx;
int last_show_frame_buf_idx; // last show frame buffer index
int refresh_last_frame;
int refresh_golden_frame;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ int refresh_bwd_ref_frame;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
int refresh_alt_ref_frame;
int ext_refresh_frame_flags_pending;
#if CONFIG_ANS
struct BufAnsCoder buf_ans;
#endif
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ int refresh_frame_mask;
+ int existing_fb_idx_to_show;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
} VP10_COMP;
void vp10_initialize_enc(void);
#endif // CONFIG_EXT_REFS
else if (ref_frame == GOLDEN_FRAME)
return cpi->gld_fb_idx;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ else if (ref_frame == BWDREF_FRAME)
+ return cpi->bwd_fb_idx;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
else
return cpi->alt_fb_idx;
}
cpi->oxcf.enable_auto_arf;
}
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+static INLINE int is_bwdref_enabled(const VP10_COMP *const cpi) {
+ // NOTE(zoeliu): The enabling of backward prediction depends on the alt_ref
+ // period, and will be off when the alt_ref period is not sufficiently large.
+ return cpi->oxcf.mode != REALTIME && cpi->oxcf.lag_in_frames > 0;
+ // (zoeliu):
+ // && cpi->oxcf.enable_auto_brf && cpi->rc.bidir_pred_enabled;
+}
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
static INLINE void set_ref_ptrs(VP10_COMMON *cm, MACROBLOCKD *xd,
MV_REFERENCE_FRAME ref0,
MV_REFERENCE_FRAME ref1) {
GF_GROUP *const gf_group = &twopass->gf_group;
FIRSTPASS_STATS frame_stats;
int i;
- int frame_index = 1;
+ int frame_index = 0;
int target_frame_size;
int key_frame;
const int max_bits = frame_max_bits(&cpi->rc, &cpi->oxcf);
int mid_boost_bits = 0;
int mid_frame_idx;
unsigned char arf_buffer_indices[MAX_ACTIVE_ARFS];
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ int bidir_pred_frame_index = 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
key_frame = cpi->common.frame_type == KEY_FRAME;
// is also the golden frame.
if (!key_frame) {
if (rc->source_alt_ref_active) {
- gf_group->update_type[0] = OVERLAY_UPDATE;
- gf_group->rf_level[0] = INTER_NORMAL;
- gf_group->bit_allocation[0] = 0;
+ gf_group->update_type[frame_index] = OVERLAY_UPDATE;
+ gf_group->rf_level[frame_index] = INTER_NORMAL;
+ gf_group->bit_allocation[frame_index] = 0;
} else {
- gf_group->update_type[0] = GF_UPDATE;
- gf_group->rf_level[0] = GF_ARF_STD;
- gf_group->bit_allocation[0] = gf_arf_bits;
+ gf_group->update_type[frame_index] = GF_UPDATE;
+ gf_group->rf_level[frame_index] = GF_ARF_STD;
+ gf_group->bit_allocation[frame_index] = gf_arf_bits;
}
- gf_group->arf_update_idx[0] = arf_buffer_indices[0];
- gf_group->arf_ref_idx[0] = arf_buffer_indices[0];
+ gf_group->arf_update_idx[frame_index] = arf_buffer_indices[0];
+ gf_group->arf_ref_idx[frame_index] = arf_buffer_indices[0];
// Step over the golden frame / overlay frame
if (EOF == input_stats(twopass, &frame_stats))
return;
}
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ gf_group->bidir_pred_enabled[frame_index] = 0;
+ gf_group->brf_src_offset[frame_index] = 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
// Deduct the boost bits for arf (or gf if it is not a key frame)
// from the group total.
if (rc->source_alt_ref_pending || !key_frame)
total_group_bits -= gf_arf_bits;
+ frame_index++;
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ bidir_pred_frame_index++;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
// Store the bits to spend on the ARF if there is one.
if (rc->source_alt_ref_pending) {
gf_group->update_type[frame_index] = ARF_UPDATE;
gf_group->arf_ref_idx[frame_index] =
arf_buffer_indices[cpi->multi_arf_last_grp_enabled &&
rc->source_alt_ref_active];
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ gf_group->bidir_pred_enabled[frame_index] = 0;
+ gf_group->brf_src_offset[frame_index] = 0;
+ // NOTE: "bidir_pred_frame_index" stays unchanged for ARF_UPDATE frames.
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
++frame_index;
if (cpi->multi_arf_enabled) {
target_frame_size = clamp(target_frame_size, 0,
VPXMIN(max_bits, (int)total_group_bits));
- gf_group->update_type[frame_index] = LF_UPDATE;
- gf_group->rf_level[frame_index] = INTER_NORMAL;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // TODO(zoeliu): Currently only support BIDIR_PRED_PERIOD = 2
+ assert(BIDIR_PRED_PERIOD == 2);
+ // NOTE: BIDIR_PRED is only enabled when its interval is strictly
+ // less than the GOLDEN_FRAME group interval.
+ // TODO(zoeliu): Currently BIDIR_PRED is only enabled when alt-ref is on.
+ if (rc->source_alt_ref_pending && BIDIR_PRED_PERIOD <
+ (rc->baseline_gf_interval - rc->source_alt_ref_pending)) {
+ if (bidir_pred_frame_index == 1) {
+ const int curr_brf_src_offset = BIDIR_PRED_PERIOD - 1;
+ if ((i + curr_brf_src_offset) >=
+ (rc->baseline_gf_interval - rc->source_alt_ref_pending)) {
+ gf_group->update_type[frame_index] = LF_UPDATE;
+ gf_group->bidir_pred_enabled[frame_index] = 0;
+ gf_group->brf_src_offset[frame_index] = 0;
+ } else {
+ gf_group->update_type[frame_index] = BRF_UPDATE;
+ gf_group->bidir_pred_enabled[frame_index] = 1;
+ gf_group->brf_src_offset[frame_index] = curr_brf_src_offset;
+ }
+ } else if (bidir_pred_frame_index == BIDIR_PRED_PERIOD) {
+ gf_group->update_type[frame_index] = LASTNRF_UPDATE;
+ gf_group->bidir_pred_enabled[frame_index] = 1;
+ gf_group->brf_src_offset[frame_index] = 0;
+ // Reset the bidir_pred index.
+ bidir_pred_frame_index = 0;
+ } else {
+ gf_group->update_type[frame_index] = NRF_UPDATE;
+ gf_group->bidir_pred_enabled[frame_index] = 1;
+ gf_group->brf_src_offset[frame_index] = 0;
+ }
+
+ bidir_pred_frame_index++;
+ } else {
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ gf_group->update_type[frame_index] = LF_UPDATE;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ gf_group->bidir_pred_enabled[frame_index] = 0;
+ gf_group->brf_src_offset[frame_index] = 0;
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (gf_group->update_type[frame_index] == BRF_UPDATE) {
+ // Boost up the allocated bits on BWDREF_FRAME
+ // (zoeliu)gf_group->rf_level[frame_index] = GF_ARF_LOW;
+ gf_group->rf_level[frame_index] = INTER_HIGH;
+ gf_group->bit_allocation[frame_index] =
+ target_frame_size + (target_frame_size >> 2);
+ } else if (gf_group->update_type[frame_index] == LASTNRF_UPDATE) {
+ gf_group->rf_level[frame_index] = INTER_NORMAL;
+ gf_group->bit_allocation[frame_index] =
+ VPXMAX(0, target_frame_size - (target_frame_size >> 1));
+ } else {
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ gf_group->rf_level[frame_index] = INTER_NORMAL;
+ gf_group->bit_allocation[frame_index] = target_frame_size;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
- gf_group->bit_allocation[frame_index] = target_frame_size;
++frame_index;
}
gf_group->update_type[frame_index] = GF_UPDATE;
gf_group->rf_level[frame_index] = GF_ARF_STD;
}
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ gf_group->bidir_pred_enabled[frame_index] = 0;
+ gf_group->brf_src_offset[frame_index] = 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
// Note whether multi-arf was enabled this group for next time.
cpi->multi_arf_last_grp_enabled = cpi->multi_arf_enabled;
int int_lbq =
(int)(vp10_convert_qindex_to_q(rc->last_boosted_qindex,
cpi->common.bit_depth));
+
active_min_gf_interval = rc->min_gf_interval + VPXMIN(2, int_max_q / 200);
if (active_min_gf_interval > rc->max_gf_interval)
active_min_gf_interval = rc->max_gf_interval;
TWO_PASS *const twopass = &cpi->twopass;
cpi->rc.is_src_frame_alt_ref = 0;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cpi->rc.is_bwd_ref_frame = 0;
+ cpi->rc.is_last_nonref_frame = 0;
+ cpi->rc.is_nonref_frame = 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
switch (twopass->gf_group.update_type[twopass->gf_group.index]) {
case KF_UPDATE:
cpi->refresh_last_frame = 1;
cpi->refresh_golden_frame = 1;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cpi->refresh_bwd_ref_frame = 1;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
cpi->refresh_alt_ref_frame = 1;
break;
+
case LF_UPDATE:
cpi->refresh_last_frame = 1;
cpi->refresh_golden_frame = 0;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cpi->refresh_bwd_ref_frame = 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
cpi->refresh_alt_ref_frame = 0;
break;
+
case GF_UPDATE:
cpi->refresh_last_frame = 1;
cpi->refresh_golden_frame = 1;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cpi->refresh_bwd_ref_frame = 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
cpi->refresh_alt_ref_frame = 0;
break;
+
case OVERLAY_UPDATE:
cpi->refresh_last_frame = 0;
cpi->refresh_golden_frame = 1;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cpi->refresh_bwd_ref_frame = 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
cpi->refresh_alt_ref_frame = 0;
cpi->rc.is_src_frame_alt_ref = 1;
break;
+
case ARF_UPDATE:
cpi->refresh_last_frame = 0;
cpi->refresh_golden_frame = 0;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ cpi->refresh_bwd_ref_frame = 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
cpi->refresh_alt_ref_frame = 1;
break;
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ case BRF_UPDATE:
+ cpi->refresh_last_frame = 0;
+ cpi->refresh_golden_frame = 0;
+ cpi->refresh_bwd_ref_frame = 1;
+ cpi->refresh_alt_ref_frame = 0;
+ cpi->rc.is_bwd_ref_frame = 1;
+ break;
+
+ // TODO(zoeliu): When BIDIR_PRED and EXT_REFS start to work together, we
+ // may take both LASTNRF and NRF as one of the last ref
+
+ case LASTNRF_UPDATE:
+ cpi->refresh_last_frame = 0;
+ cpi->refresh_golden_frame = 0;
+ cpi->refresh_bwd_ref_frame = 0;
+ cpi->refresh_alt_ref_frame = 0;
+ cpi->rc.is_last_nonref_frame = 1;
+ break;
+
+ case NRF_UPDATE:
+ cpi->refresh_last_frame = 0;
+ cpi->refresh_golden_frame = 0;
+ cpi->refresh_bwd_ref_frame = 0;
+ cpi->refresh_alt_ref_frame = 0;
+ cpi->rc.is_nonref_frame = 1;
+ break;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
default:
assert(0);
break;
rc->last_q[KEY_FRAME] = (tmp_q + cpi->oxcf.best_allowed_q) / 2;
rc->avg_frame_qindex[KEY_FRAME] = rc->last_q[KEY_FRAME];
}
+
vp10_zero(this_frame);
if (EOF == input_stats(twopass, &this_frame))
return;
GF_UPDATE = 2,
ARF_UPDATE = 3,
OVERLAY_UPDATE = 4,
+#if CONFIG_BIDIR_PRED
+ BRF_UPDATE = 5, // Backward Reference Frame
+ // For NRF's within a BIDIR_PRED period, if it is the last one, then it is
+ // needed to get LAST_FRAME updated; Otherwise no ref update is needed at all.
+ LASTNRF_UPDATE = 6, // Last Non-Reference Frame
+ NRF_UPDATE = 7, // Non-Reference Frame, but not the last one
+ FRAME_UPDATE_TYPES = 8
+#else
FRAME_UPDATE_TYPES = 5
+#endif // CONFIG_BIDIR_PRED
} FRAME_UPDATE_TYPE;
#define FC_ANIMATION_THRESH 0.15
unsigned char arf_src_offset[(MAX_LAG_BUFFERS * 2) + 1];
unsigned char arf_update_idx[(MAX_LAG_BUFFERS * 2) + 1];
unsigned char arf_ref_idx[(MAX_LAG_BUFFERS * 2) + 1];
+#if CONFIG_BIDIR_PRED
+ unsigned char brf_src_offset[(MAX_LAG_BUFFERS * 2) + 1];
+ unsigned char bidir_pred_enabled[(MAX_LAG_BUFFERS * 2) + 1];
+#endif // CONFIG_BIDIR_PRED
int bit_allocation[(MAX_LAG_BUFFERS * 2) + 1];
} GF_GROUP;
struct lookahead_ctx *vp10_lookahead_init(unsigned int width,
- unsigned int height,
- unsigned int subsampling_x,
- unsigned int subsampling_y,
+ unsigned int height,
+ unsigned int subsampling_x,
+ unsigned int subsampling_y,
#if CONFIG_VP9_HIGHBITDEPTH
- int use_highbitdepth,
+ int use_highbitdepth,
#endif
- unsigned int depth) {
+ unsigned int depth) {
struct lookahead_ctx *ctx = NULL;
// Clamp the lookahead queue depth
RATE_CONTROL *const rc = &cpi->rc;
// Non-viewable frames are a special case and are treated as pure overhead.
- if (!cm->show_frame) {
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (!cm->show_frame && !rc->is_bwd_ref_frame)
+#else
+ if (!cm->show_frame)
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
rc->bits_off_target -= encoded_frame_size;
- } else {
+ else
rc->bits_off_target += rc->avg_frame_bandwidth - encoded_frame_size;
- }
// Clip the buffer level to the maximum specified buffer size.
rc->bits_off_target = VPXMIN(rc->bits_off_target, rc->maximum_buffer_size);
int vp10_frame_type_qdelta(const VP10_COMP *cpi, int rf_level, int q) {
static const double rate_factor_deltas[RATE_FACTOR_LEVELS] = {
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ 0.80, // INTER_NORMAL
+ 1.25, // INTER_HIGH
+#else
1.00, // INTER_NORMAL
1.00, // INTER_HIGH
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
1.50, // GF_ARF_LOW
1.75, // GF_ARF_STD
2.00, // KF_STD
}
}
- // Keep record of last boosted (KF/KF/ARF) Q value.
+ // Keep record of last boosted (KF/GF/ARF) Q value.
// If the current frame is coded at a lower Q then we also update it.
// If all mbs in this group are skipped only update if the Q value is
// better than that already stored.
// Actual bits spent
rc->total_actual_bits += rc->projected_frame_size;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ rc->total_target_bits += (cm->show_frame || rc->is_bwd_ref_frame) ?
+ rc->avg_frame_bandwidth : 0;
+#else
rc->total_target_bits += cm->show_frame ? rc->avg_frame_bandwidth : 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
rc->total_target_vs_actual = rc->total_actual_bits - rc->total_target_bits;
if (cm->frame_type == KEY_FRAME)
rc->frames_since_key = 0;
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (cm->show_frame || rc->is_bwd_ref_frame) {
+#else
if (cm->show_frame) {
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
rc->frames_since_key++;
rc->frames_to_key--;
}
int source_alt_ref_active;
int is_src_frame_alt_ref;
+#if CONFIG_BIDIR_PRED
+ // NOTE: Different types of frames may have different bits allocated
+ // accordingly, aiming to achieve the overall optimal RD performance.
+ int is_bwd_ref_frame;
+ int is_last_nonref_frame;
+ int is_nonref_frame;
+#endif // CONFIG_BIDIR_PRED
+
int avg_frame_bandwidth; // Average frame size target for clip
int min_frame_bandwidth; // Minimum allocation used for any frame
int max_frame_bandwidth; // Maximum burst rate allowed for a frame.
8, 8, 4, 4, 2, 2, 1, 0
};
static const int rd_frame_type_factor[FRAME_UPDATE_TYPES] = {
- 128, 144, 128, 128, 144
+ 128, 144, 128, 128, 144,
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // TODO(zoeliu): To adjust further following factor values.
+ 128, 128, 128
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
};
int vp10_compute_rd_mult(const VP10_COMP *cpi, int qindex) {
rd->thresh_mult[THR_NEARESTL2] = 300;
rd->thresh_mult[THR_NEARESTL3] = 300;
rd->thresh_mult[THR_NEARESTL4] = 300;
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ rd->thresh_mult[THR_NEARESTB] = 300;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
rd->thresh_mult[THR_NEARESTG] = 300;
rd->thresh_mult[THR_NEARESTA] = 300;
rd->thresh_mult[THR_NEARESTL2] = 0;
rd->thresh_mult[THR_NEARESTL3] = 0;
rd->thresh_mult[THR_NEARESTL4] = 0;
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ rd->thresh_mult[THR_NEARESTB] = 0;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
rd->thresh_mult[THR_NEARESTG] = 0;
rd->thresh_mult[THR_NEARESTA] = 0;
rd->thresh_mult[THR_NEWL2] += 1000;
rd->thresh_mult[THR_NEWL3] += 1000;
rd->thresh_mult[THR_NEWL4] += 1000;
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ rd->thresh_mult[THR_NEWB] += 1000;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
rd->thresh_mult[THR_NEWA] += 1000;
rd->thresh_mult[THR_NEWG] += 1000;
rd->thresh_mult[THR_NEARL2] += 1000;
rd->thresh_mult[THR_NEARL3] += 1000;
rd->thresh_mult[THR_NEARL4] += 1000;
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ rd->thresh_mult[THR_NEARB] += 1000;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
rd->thresh_mult[THR_NEARA] += 1000;
rd->thresh_mult[THR_NEARG] += 1000;
rd->thresh_mult[THR_NEWFROMNEARL2] += 1000;
rd->thresh_mult[THR_NEWFROMNEARL3] += 1000;
rd->thresh_mult[THR_NEWFROMNEARL4] += 1000;
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ rd->thresh_mult[THR_NEWFROMNEARB] += 1000;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
rd->thresh_mult[THR_NEWFROMNEARG] += 1000;
rd->thresh_mult[THR_NEWFROMNEARA] += 1000;
rd->thresh_mult[THR_ZEROL2] += 2000;
rd->thresh_mult[THR_ZEROL3] += 2000;
rd->thresh_mult[THR_ZEROL4] += 2000;
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ rd->thresh_mult[THR_ZEROB] += 2000;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
rd->thresh_mult[THR_ZEROG] += 2000;
rd->thresh_mult[THR_ZEROA] += 2000;
rd->thresh_mult[THR_COMP_NEW_NEARL4A] += 1700;
rd->thresh_mult[THR_COMP_NEW_NEWL4A] += 2000;
rd->thresh_mult[THR_COMP_ZERO_ZEROL4A] += 2500;
+
+#else // CONFIG_EXT_REFS
+
+#if CONFIG_BIDIR_PRED
+ rd->thresh_mult[THR_COMP_NEAREST_NEARESTLB] += 1000;
+ rd->thresh_mult[THR_COMP_NEAREST_NEARESTGB] += 1000;
+
+ rd->thresh_mult[THR_COMP_NEAREST_NEARLB] += 1200;
+ rd->thresh_mult[THR_COMP_NEAREST_NEARGB] += 1200;
+ rd->thresh_mult[THR_COMP_NEAR_NEARESTLB] += 1200;
+ rd->thresh_mult[THR_COMP_NEAR_NEARESTGB] += 1200;
+
+ rd->thresh_mult[THR_COMP_NEAREST_NEWLB] += 1500;
+ rd->thresh_mult[THR_COMP_NEAREST_NEWGB] += 1500;
+ rd->thresh_mult[THR_COMP_NEW_NEARESTLB] += 1500;
+ rd->thresh_mult[THR_COMP_NEW_NEARESTGB] += 1500;
+
+ rd->thresh_mult[THR_COMP_NEAR_NEWLB] += 1700;
+ rd->thresh_mult[THR_COMP_NEAR_NEWGB] += 1700;
+ rd->thresh_mult[THR_COMP_NEW_NEARLB] += 1700;
+ rd->thresh_mult[THR_COMP_NEW_NEARGB] += 1700;
+
+ rd->thresh_mult[THR_COMP_NEW_NEWLB] += 2000;
+ rd->thresh_mult[THR_COMP_NEW_NEWGB] += 2000;
+
+ rd->thresh_mult[THR_COMP_ZERO_ZEROLB] += 2500;
+ rd->thresh_mult[THR_COMP_ZERO_ZEROGB] += 2500;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
-#else
+
+#else // CONFIG_EXT_INTER
+
rd->thresh_mult[THR_COMP_NEARESTLA] += 1000;
#if CONFIG_EXT_REFS
rd->thresh_mult[THR_COMP_NEARESTL2A] += 1000;
rd->thresh_mult[THR_COMP_NEARESTL3A] += 1000;
rd->thresh_mult[THR_COMP_NEARESTL4A] += 1000;
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ rd->thresh_mult[THR_COMP_NEARESTLB] += 1000;
+ rd->thresh_mult[THR_COMP_NEARESTGB] += 1000;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
rd->thresh_mult[THR_COMP_NEARESTGA] += 1000;
rd->thresh_mult[THR_COMP_NEARLA] += 1500;
rd->thresh_mult[THR_COMP_NEWLA] += 2000;
- rd->thresh_mult[THR_COMP_NEARGA] += 1500;
- rd->thresh_mult[THR_COMP_NEWGA] += 2000;
#if CONFIG_EXT_REFS
rd->thresh_mult[THR_COMP_NEARL2A] += 1500;
rd->thresh_mult[THR_COMP_NEWL2A] += 2000;
rd->thresh_mult[THR_COMP_NEWL3A] += 2000;
rd->thresh_mult[THR_COMP_NEARL4A] += 1500;
rd->thresh_mult[THR_COMP_NEWL4A] += 2000;
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ rd->thresh_mult[THR_COMP_NEARLB] += 1500;
+ rd->thresh_mult[THR_COMP_NEARGB] += 1500;
+ rd->thresh_mult[THR_COMP_NEWLB] += 2000;
+ rd->thresh_mult[THR_COMP_NEWGB] += 2000;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
+ rd->thresh_mult[THR_COMP_NEARGA] += 1500;
+ rd->thresh_mult[THR_COMP_NEWGA] += 2000;
rd->thresh_mult[THR_COMP_ZEROLA] += 2500;
#if CONFIG_EXT_REFS
rd->thresh_mult[THR_COMP_ZEROL2A] += 2500;
rd->thresh_mult[THR_COMP_ZEROL3A] += 2500;
rd->thresh_mult[THR_COMP_ZEROL4A] += 2500;
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ rd->thresh_mult[THR_COMP_ZEROLB] += 2500;
+ rd->thresh_mult[THR_COMP_ZEROGB] += 2500;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
rd->thresh_mult[THR_COMP_ZEROGA] += 2500;
+
#endif // CONFIG_EXT_INTER
rd->thresh_mult[THR_H_PRED] += 2000;
#if CONFIG_EXT_REFS
{2500, 2500, 2500, 2500, 2500, 2500, 4500, 4500, 4500, 4500, 4500, 2500},
{2000, 2000, 2000, 2000, 2000, 2000, 4000, 4000, 4000, 4000, 4000, 2000}
-#else
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {2500, 2500, 2500, 2500, 4500, 4500, 4500, 4500, 2500},
+ {2000, 2000, 2000, 2000, 4000, 4000, 4000, 4000, 2000}
+#else // CONFIG_BIDIR_PRED
{2500, 2500, 2500, 4500, 4500, 2500},
{2000, 2000, 2000, 4000, 4000, 2000}
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
};
RD_OPT *const rd = &cpi->rd;
#define INVALID_MV 0x80008000
#if CONFIG_EXT_REFS
+
#if CONFIG_EXT_INTER
#define MAX_MODES 114
-#else
+#else // CONFIG_EXT_INTER
#define MAX_MODES 54
#endif // CONFIG_EXT_INTER
-#else
+
+#else // CONFIG_EXT_REFS
+
+#if CONFIG_BIDIR_PRED
+
+#if CONFIG_EXT_INTER
+#define MAX_MODES 80
+#else // CONFIG_EXT_INTER
+#define MAX_MODES 42
+#endif // CONFIG_EXT_INTER
+
+#else // CONFIG_BIDIR_PRED
+
#if CONFIG_EXT_INTER
#define MAX_MODES 57
-#else
+#else // CONFIG_EXT_INTER
#define MAX_MODES 30
#endif // CONFIG_EXT_INTER
+
+#endif // CONFIG_BIDIR_PRED
+
#endif // CONFIG_EXT_REFS
#if CONFIG_EXT_REFS
#define MAX_REFS 12
-#else
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+#define MAX_REFS 9
+#else // CONFIG_BIDIR_PRED
#define MAX_REFS 6
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
#define RD_THRESH_MAX_FACT 64
THR_NEARESTL2,
THR_NEARESTL3,
THR_NEARESTL4,
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ THR_NEARESTB,
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
THR_NEARESTA,
THR_NEARESTG,
THR_NEWL2,
THR_NEWL3,
THR_NEWL4,
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ THR_NEWB,
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
THR_NEWA,
THR_NEWG,
THR_NEARL2,
THR_NEARL3,
THR_NEARL4,
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ THR_NEARB,
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
THR_NEARA,
THR_NEARG,
THR_NEWFROMNEARL2,
THR_NEWFROMNEARL3,
THR_NEWFROMNEARL4,
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ THR_NEWFROMNEARB,
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
THR_NEWFROMNEARA,
THR_NEWFROMNEARG,
THR_ZEROL2,
THR_ZEROL3,
THR_ZEROL4,
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ THR_ZEROB,
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
THR_ZEROG,
THR_ZEROA,
THR_COMP_NEAREST_NEARESTL2A,
THR_COMP_NEAREST_NEARESTL3A,
THR_COMP_NEAREST_NEARESTL4A,
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ THR_COMP_NEAREST_NEARESTLB,
+ THR_COMP_NEAREST_NEARESTGB,
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
THR_COMP_NEAREST_NEARESTGA,
#else // CONFIG_EXT_INTER
THR_COMP_NEARESTL2A,
THR_COMP_NEARESTL3A,
THR_COMP_NEARESTL4A,
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ THR_COMP_NEARESTLB,
+ THR_COMP_NEARESTGB,
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
THR_COMP_NEARESTGA,
#endif // CONFIG_EXT_INTER
THR_COMP_NEAR_NEWL4A,
THR_COMP_NEW_NEWL4A,
THR_COMP_ZERO_ZEROL4A,
+
+#else // CONFIG_EXT_REFS
+
+#if CONFIG_BIDIR_PRED
+ THR_COMP_NEAR_NEARESTLB,
+ THR_COMP_NEAR_NEARESTGB,
+ THR_COMP_NEAREST_NEARLB,
+ THR_COMP_NEAREST_NEARGB,
+ THR_COMP_NEW_NEARESTLB,
+ THR_COMP_NEW_NEARESTGB,
+ THR_COMP_NEAREST_NEWLB,
+ THR_COMP_NEAREST_NEWGB,
+ THR_COMP_NEW_NEARLB,
+ THR_COMP_NEW_NEARGB,
+ THR_COMP_NEAR_NEWLB,
+ THR_COMP_NEAR_NEWGB,
+ THR_COMP_NEW_NEWLB,
+ THR_COMP_NEW_NEWGB,
+ THR_COMP_ZERO_ZEROLB,
+ THR_COMP_ZERO_ZEROGB,
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
-#else
+
+#else // CONFIG_EXT_INTER
+
THR_COMP_NEARLA,
THR_COMP_NEWLA,
#if CONFIG_EXT_REFS
THR_COMP_NEWL3A,
THR_COMP_NEARL4A,
THR_COMP_NEWL4A,
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ THR_COMP_NEARLB,
+ THR_COMP_NEWLB,
+ THR_COMP_NEARGB,
+ THR_COMP_NEWGB,
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
THR_COMP_NEARGA,
THR_COMP_NEWGA,
THR_COMP_ZEROL2A,
THR_COMP_ZEROL3A,
THR_COMP_ZEROL4A,
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ THR_COMP_ZEROLB,
+ THR_COMP_ZEROGB,
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
THR_COMP_ZEROGA,
#endif // CONFIG_EXT_INTER
THR_LAST2,
THR_LAST3,
THR_LAST4,
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ THR_BWDR,
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
THR_GOLD,
THR_ALTR,
+
THR_COMP_LA,
#if CONFIG_EXT_REFS
THR_COMP_L2A,
THR_COMP_L3A,
THR_COMP_L4A,
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ THR_COMP_LB,
+ THR_COMP_GB,
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
THR_COMP_GA,
THR_INTRA,
(1 << LAST2_FRAME) | (1 << INTRA_FRAME) | \
(1 << LAST3_FRAME) | (1 << LAST4_FRAME))
-#else
+#else // CONFIG_EXT_REFS
+
+#if CONFIG_BIDIR_PRED
+
+#define LAST_FRAME_MODE_MASK ((1 << GOLDEN_FRAME) | (1 << ALTREF_FRAME) | \
+ (1 << BWDREF_FRAME) | (1 << INTRA_FRAME))
+#define GOLDEN_FRAME_MODE_MASK ((1 << LAST_FRAME) | (1 << ALTREF_FRAME) | \
+ (1 << BWDREF_FRAME) | (1 << INTRA_FRAME))
+#define BWD_REF_MODE_MASK ((1 << LAST_FRAME) | (1 << GOLDEN_FRAME) | \
+ (1 << ALTREF_FRAME) | (1 << INTRA_FRAME))
+// TODO(zoeliu): To rename the following to ALTREF_MODE_MASK
+#define ALT_REF_MODE_MASK ((1 << LAST_FRAME) | (1 << GOLDEN_FRAME) | \
+ (1 << BWDREF_FRAME) | (1 << INTRA_FRAME))
+
+
+#else // CONFIG_BIDIR_PRED
#define LAST_FRAME_MODE_MASK ((1 << GOLDEN_FRAME) | (1 << ALTREF_FRAME) | \
(1 << INTRA_FRAME))
#define ALT_REF_MODE_MASK ((1 << LAST_FRAME) | (1 << GOLDEN_FRAME) | \
(1 << INTRA_FRAME))
+#endif // CONFIG_BIDIR_PRED
+
#endif // CONFIG_EXT_REFS
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+#define SECOND_REF_FRAME_MASK ((1 << ALTREF_FRAME) | (1 << BWDREF_FRAME) | \
+ 0x01)
+#else
#define SECOND_REF_FRAME_MASK ((1 << ALTREF_FRAME) | 0x01)
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
#define MIN_EARLY_TERM_INDEX 3
#define NEW_MV_DISCOUNT_FACTOR 8
{NEARESTMV, {LAST2_FRAME, NONE}},
{NEARESTMV, {LAST3_FRAME, NONE}},
{NEARESTMV, {LAST4_FRAME, NONE}},
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {NEARESTMV, {BWDREF_FRAME, NONE}},
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
{NEARESTMV, {ALTREF_FRAME, NONE}},
{NEARESTMV, {GOLDEN_FRAME, NONE}},
{NEWMV, {LAST2_FRAME, NONE}},
{NEWMV, {LAST3_FRAME, NONE}},
{NEWMV, {LAST4_FRAME, NONE}},
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {NEWMV, {BWDREF_FRAME, NONE}},
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
{NEWMV, {ALTREF_FRAME, NONE}},
{NEWMV, {GOLDEN_FRAME, NONE}},
{NEARMV, {LAST2_FRAME, NONE}},
{NEARMV, {LAST3_FRAME, NONE}},
{NEARMV, {LAST4_FRAME, NONE}},
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {NEARMV, {BWDREF_FRAME, NONE}},
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
{NEARMV, {ALTREF_FRAME, NONE}},
{NEARMV, {GOLDEN_FRAME, NONE}},
{NEWFROMNEARMV, {LAST2_FRAME, NONE}},
{NEWFROMNEARMV, {LAST3_FRAME, NONE}},
{NEWFROMNEARMV, {LAST4_FRAME, NONE}},
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {NEWFROMNEARMV, {BWDREF_FRAME, NONE}},
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
{NEWFROMNEARMV, {ALTREF_FRAME, NONE}},
{NEWFROMNEARMV, {GOLDEN_FRAME, NONE}},
{ZEROMV, {LAST2_FRAME, NONE}},
{ZEROMV, {LAST3_FRAME, NONE}},
{ZEROMV, {LAST4_FRAME, NONE}},
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {ZEROMV, {BWDREF_FRAME, NONE}},
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
{ZEROMV, {GOLDEN_FRAME, NONE}},
{ZEROMV, {ALTREF_FRAME, NONE}},
+ // TODO(zoeliu): May need to reconsider the order on the modes to check
+
#if CONFIG_EXT_INTER
{NEAREST_NEARESTMV, {LAST_FRAME, ALTREF_FRAME}},
#if CONFIG_EXT_REFS
{NEAREST_NEARESTMV, {LAST2_FRAME, ALTREF_FRAME}},
{NEAREST_NEARESTMV, {LAST3_FRAME, ALTREF_FRAME}},
{NEAREST_NEARESTMV, {LAST4_FRAME, ALTREF_FRAME}},
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {NEAREST_NEARESTMV, {LAST_FRAME, BWDREF_FRAME}},
+ {NEAREST_NEARESTMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
{NEAREST_NEARESTMV, {GOLDEN_FRAME, ALTREF_FRAME}},
#else // CONFIG_EXT_INTER
{NEARESTMV, {LAST2_FRAME, ALTREF_FRAME}},
{NEARESTMV, {LAST3_FRAME, ALTREF_FRAME}},
{NEARESTMV, {LAST4_FRAME, ALTREF_FRAME}},
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {NEARESTMV, {LAST_FRAME, BWDREF_FRAME}},
+ {NEARESTMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
{NEARESTMV, {GOLDEN_FRAME, ALTREF_FRAME}},
#endif // CONFIG_EXT_INTER
{NEAREST_NEARMV, {GOLDEN_FRAME, ALTREF_FRAME}},
{NEAR_NEARMV, {LAST_FRAME, ALTREF_FRAME}},
{NEAR_NEARMV, {GOLDEN_FRAME, ALTREF_FRAME}},
- {NEW_NEARESTMV, {LAST_FRAME, ALTREF_FRAME}},
- {NEW_NEARESTMV, {GOLDEN_FRAME, ALTREF_FRAME}},
- {NEAREST_NEWMV, {LAST_FRAME, ALTREF_FRAME}},
- {NEAREST_NEWMV, {GOLDEN_FRAME, ALTREF_FRAME}},
- {NEW_NEARMV, {LAST_FRAME, ALTREF_FRAME}},
- {NEW_NEARMV, {GOLDEN_FRAME, ALTREF_FRAME}},
- {NEAR_NEWMV, {LAST_FRAME, ALTREF_FRAME}},
- {NEAR_NEWMV, {GOLDEN_FRAME, ALTREF_FRAME}},
+ {NEW_NEARESTMV, {LAST_FRAME, ALTREF_FRAME}},
+ {NEW_NEARESTMV, {GOLDEN_FRAME, ALTREF_FRAME}},
+ {NEAREST_NEWMV, {LAST_FRAME, ALTREF_FRAME}},
+ {NEAREST_NEWMV, {GOLDEN_FRAME, ALTREF_FRAME}},
+ {NEW_NEARMV, {LAST_FRAME, ALTREF_FRAME}},
+ {NEW_NEARMV, {GOLDEN_FRAME, ALTREF_FRAME}},
+ {NEAR_NEWMV, {LAST_FRAME, ALTREF_FRAME}},
+ {NEAR_NEWMV, {GOLDEN_FRAME, ALTREF_FRAME}},
{NEW_NEWMV, {LAST_FRAME, ALTREF_FRAME}},
{NEW_NEWMV, {GOLDEN_FRAME, ALTREF_FRAME}},
{ZERO_ZEROMV, {LAST_FRAME, ALTREF_FRAME}},
{NEAR_NEWMV, {LAST4_FRAME, ALTREF_FRAME}},
{NEW_NEWMV, {LAST4_FRAME, ALTREF_FRAME}},
{ZERO_ZEROMV, {LAST4_FRAME, ALTREF_FRAME}},
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {NEAR_NEARESTMV, {LAST_FRAME, BWDREF_FRAME}},
+ {NEAR_NEARESTMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+ {NEAREST_NEARMV, {LAST_FRAME, BWDREF_FRAME}},
+ {NEAREST_NEARMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+ {NEW_NEARESTMV, {LAST_FRAME, BWDREF_FRAME}},
+ {NEW_NEARESTMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+ {NEAREST_NEWMV, {LAST_FRAME, BWDREF_FRAME}},
+ {NEAREST_NEWMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+ {NEW_NEARMV, {LAST_FRAME, BWDREF_FRAME}},
+ {NEW_NEARMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+ {NEAR_NEWMV, {LAST_FRAME, BWDREF_FRAME}},
+ {NEAR_NEWMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+ {NEW_NEWMV, {LAST_FRAME, BWDREF_FRAME}},
+ {NEW_NEWMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+ {ZERO_ZEROMV, {LAST_FRAME, BWDREF_FRAME}},
+ {ZERO_ZEROMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
-#else
+
+#else // CONFIG_EXT_INTER
+
{NEARMV, {LAST_FRAME, ALTREF_FRAME}},
{NEWMV, {LAST_FRAME, ALTREF_FRAME}},
#if CONFIG_EXT_REFS
{NEWMV, {LAST3_FRAME, ALTREF_FRAME}},
{NEARMV, {LAST4_FRAME, ALTREF_FRAME}},
{NEWMV, {LAST4_FRAME, ALTREF_FRAME}},
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {NEARMV, {LAST_FRAME, BWDREF_FRAME}},
+ {NEWMV, {LAST_FRAME, BWDREF_FRAME}},
+ {NEARMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+ {NEWMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
{NEARMV, {GOLDEN_FRAME, ALTREF_FRAME}},
{NEWMV, {GOLDEN_FRAME, ALTREF_FRAME}},
{ZEROMV, {LAST3_FRAME, ALTREF_FRAME}},
{ZEROMV, {LAST2_FRAME, ALTREF_FRAME}},
{ZEROMV, {LAST4_FRAME, ALTREF_FRAME}},
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {ZEROMV, {LAST_FRAME, BWDREF_FRAME}},
+ {ZEROMV, {GOLDEN_FRAME, BWDREF_FRAME}},
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
{ZEROMV, {GOLDEN_FRAME, ALTREF_FRAME}},
#endif // CONFIG_EXT_INTER
{{LAST2_FRAME, NONE}},
{{LAST3_FRAME, NONE}},
{{LAST4_FRAME, NONE}},
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {{BWDREF_FRAME, NONE}},
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
{{GOLDEN_FRAME, NONE}},
{{ALTREF_FRAME, NONE}},
+
{{LAST_FRAME, ALTREF_FRAME}},
#if CONFIG_EXT_REFS
{{LAST2_FRAME, ALTREF_FRAME}},
{{LAST3_FRAME, ALTREF_FRAME}},
{{LAST4_FRAME, ALTREF_FRAME}},
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ {{LAST_FRAME, BWDREF_FRAME}},
+ {{GOLDEN_FRAME, BWDREF_FRAME}},
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
{{GOLDEN_FRAME, ALTREF_FRAME}},
{{INTRA_FRAME, NONE}},
if (cm->reference_mode != COMPOUND_REFERENCE) {
vpx_prob ref_single_p1 = vp10_get_pred_prob_single_ref_p1(cm, xd);
vpx_prob ref_single_p2 = vp10_get_pred_prob_single_ref_p2(cm, xd);
-#if CONFIG_EXT_REFS
+#if CONFIG_EXT_REFS || CONFIG_BIDIR_PRED
vpx_prob ref_single_p3 = vp10_get_pred_prob_single_ref_p3(cm, xd);
+#endif // CONFIG_EXT_REFS || CONFIG_BIDIR_PRED
+#if CONFIG_EXT_REFS
vpx_prob ref_single_p4 = vp10_get_pred_prob_single_ref_p4(cm, xd);
vpx_prob ref_single_p5 = vp10_get_pred_prob_single_ref_p5(cm, xd);
#endif // CONFIG_EXT_REFS
ref_costs_single[LAST2_FRAME] =
ref_costs_single[LAST3_FRAME] =
ref_costs_single[LAST4_FRAME] =
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ ref_costs_single[BWDREF_FRAME] =
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
ref_costs_single[GOLDEN_FRAME] =
ref_costs_single[ALTREF_FRAME] = base_cost;
ref_costs_single[LAST2_FRAME] += vp10_cost_bit(ref_single_p4, 1);
ref_costs_single[LAST3_FRAME] += vp10_cost_bit(ref_single_p5, 0);
ref_costs_single[LAST4_FRAME] += vp10_cost_bit(ref_single_p5, 1);
-#else
+#else // CONFIG_EXT_REFS
ref_costs_single[LAST_FRAME] += vp10_cost_bit(ref_single_p1, 0);
ref_costs_single[GOLDEN_FRAME] += vp10_cost_bit(ref_single_p1, 1);
- ref_costs_single[ALTREF_FRAME] += vp10_cost_bit(ref_single_p1, 1);
ref_costs_single[GOLDEN_FRAME] += vp10_cost_bit(ref_single_p2, 0);
+ ref_costs_single[ALTREF_FRAME] += vp10_cost_bit(ref_single_p1, 1);
ref_costs_single[ALTREF_FRAME] += vp10_cost_bit(ref_single_p2, 1);
+#if CONFIG_BIDIR_PRED
+ ref_costs_single[ALTREF_FRAME] += vp10_cost_bit(ref_single_p3, 1);
+ ref_costs_single[BWDREF_FRAME] += vp10_cost_bit(ref_single_p1, 1);
+ ref_costs_single[BWDREF_FRAME] += vp10_cost_bit(ref_single_p2, 1);
+ ref_costs_single[BWDREF_FRAME] += vp10_cost_bit(ref_single_p3, 0);
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
} else {
ref_costs_single[LAST_FRAME] = 512;
ref_costs_single[LAST2_FRAME] = 512;
ref_costs_single[LAST3_FRAME] = 512;
ref_costs_single[LAST4_FRAME] = 512;
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ ref_costs_single[BWDREF_FRAME] = 512;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
ref_costs_single[GOLDEN_FRAME] = 512;
ref_costs_single[ALTREF_FRAME] = 512;
vpx_prob ref_comp_p1 = vp10_get_pred_prob_comp_ref_p1(cm, xd);
vpx_prob ref_comp_p2 = vp10_get_pred_prob_comp_ref_p2(cm, xd);
vpx_prob ref_comp_p3 = vp10_get_pred_prob_comp_ref_p3(cm, xd);
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ vpx_prob bwdref_comp_p = vp10_get_pred_prob_comp_bwdref_p(cm, xd);
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
+
unsigned int base_cost = vp10_cost_bit(intra_inter_p, 1);
ref_costs_comp[LAST_FRAME] =
#endif // CONFIG_EXT_REFS
ref_costs_comp[GOLDEN_FRAME] = base_cost;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // NOTE(zoeliu): BWDREF and ALTREF each add an extra cost by coding 1
+ // more bit.
+ ref_costs_comp[BWDREF_FRAME] = ref_costs_comp[ALTREF_FRAME] = 0;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
#if CONFIG_EXT_REFS
ref_costs_comp[LAST_FRAME] += vp10_cost_bit(ref_comp_p, 0);
ref_costs_comp[LAST2_FRAME] += vp10_cost_bit(ref_comp_p, 0);
ref_costs_comp[LAST3_FRAME] += vp10_cost_bit(ref_comp_p3, 1);
ref_costs_comp[LAST4_FRAME] += vp10_cost_bit(ref_comp_p3, 0);
-#else
+#else // CONFIG_EXT_REFS
ref_costs_comp[LAST_FRAME] += vp10_cost_bit(ref_comp_p, 0);
ref_costs_comp[GOLDEN_FRAME] += vp10_cost_bit(ref_comp_p, 1);
+#if CONFIG_BIDIR_PRED
+ ref_costs_comp[BWDREF_FRAME] += vp10_cost_bit(bwdref_comp_p, 0);
+ ref_costs_comp[ALTREF_FRAME] += vp10_cost_bit(bwdref_comp_p, 1);
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
} else {
ref_costs_comp[LAST_FRAME] = 512;
ref_costs_comp[LAST2_FRAME] = 512;
ref_costs_comp[LAST3_FRAME] = 512;
ref_costs_comp[LAST4_FRAME] = 512;
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ ref_costs_comp[BWDREF_FRAME] = 512;
+ ref_costs_comp[ALTREF_FRAME] = 512;
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
ref_costs_comp[GOLDEN_FRAME] = 512;
}
VP9_LAST4_FLAG,
#endif // CONFIG_EXT_REFS
VP9_GOLD_FLAG,
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ VP9_BWD_FLAG,
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
VP9_ALT_FLAG
};
int64_t best_rd = best_rd_so_far;
// Skip checking missing references in both single and compound reference
// modes. Note that a mode will be skipped iff both reference frames
// are masked out.
- ref_frame_skip_mask[0] |= (1 << ref_frame);
- ref_frame_skip_mask[1] |= SECOND_REF_FRAME_MASK;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ if (ref_frame == BWDREF_FRAME || ref_frame == ALTREF_FRAME) {
+ ref_frame_skip_mask[0] |= (1 << ref_frame);
+ ref_frame_skip_mask[1] |= ((1 << ref_frame) | 0x01);
+ } else {
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ ref_frame_skip_mask[0] |= (1 << ref_frame);
+ ref_frame_skip_mask[1] |= SECOND_REF_FRAME_MASK;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
} else {
for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
// Skip fixed mv modes for poor references
(1 << LAST2_FRAME) |
(1 << LAST3_FRAME) |
(1 << LAST4_FRAME) |
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ (1 << BWDREF_FRAME) |
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
(1 << GOLDEN_FRAME);
ref_frame_skip_mask[1] = SECOND_REF_FRAME_MASK;
+ // TODO(zoeliu): To further explore whether following needs to be done for
+ // BWDREF_FRAME as well.
mode_skip_mask[ALTREF_FRAME] = ~INTER_NEAREST_NEAR_ZERO;
if (frame_mv[NEARMV][ALTREF_FRAME].as_int != 0)
mode_skip_mask[ALTREF_FRAME] |= (1 << NEARMV);
ref_frame_skip_mask[0] |= GOLDEN_FRAME_MODE_MASK;
ref_frame_skip_mask[1] |= SECOND_REF_FRAME_MASK;
break;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ case BWDREF_FRAME:
+ ref_frame_skip_mask[0] |= BWD_REF_MODE_MASK;
+ ref_frame_skip_mask[1] |= SECOND_REF_FRAME_MASK;
+ break;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
case ALTREF_FRAME:
ref_frame_skip_mask[0] |= ALT_REF_MODE_MASK;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ ref_frame_skip_mask[1] |= SECOND_REF_FRAME_MASK;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
break;
case NONE:
case MAX_REF_FRAMES:
comp_pred = second_ref_frame > INTRA_FRAME;
if (comp_pred) {
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // TODO(zoeliu): To further justify whether following is needed
+ if (cpi->twopass.gf_group.update_type[cpi->twopass.gf_group.index]
+ != LASTNRF_UPDATE && second_ref_frame == BWDREF_FRAME) {
+ continue;
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
if (!cpi->allow_comp_inter_inter)
continue;
VP9_LAST4_FLAG,
#endif // CONFIG_EXT_REFS
VP9_GOLD_FLAG,
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ VP9_BWD_FLAG,
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
VP9_ALT_FLAG
};
int64_t best_rd = best_rd_so_far;
(1 << LAST2_FRAME) |
(1 << LAST3_FRAME) |
(1 << LAST4_FRAME) |
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ (1 << BWDREF_FRAME) |
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
(1 << ALTREF_FRAME);
ref_frame_skip_mask[1] |= SECOND_REF_FRAME_MASK;
(1 << LAST2_FRAME) |
(1 << LAST3_FRAME) |
(1 << LAST4_FRAME) |
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ (1 << BWDREF_FRAME) |
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
(1 << ALTREF_FRAME);
ref_frame_skip_mask[1] |= SECOND_REF_FRAME_MASK;
break;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ case BWDREF_FRAME:
+ ref_frame_skip_mask[0] |= (1 << LAST_FRAME) |
+ (1 << GOLDEN_FRAME) |
+ (1 << ALTREF_FRAME);
+ ref_frame_skip_mask[1] |= SECOND_REF_FRAME_MASK;
+ break;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
case ALTREF_FRAME:
ref_frame_skip_mask[0] |= (1 << GOLDEN_FRAME) |
#if CONFIG_EXT_REFS
(1 << LAST2_FRAME) |
(1 << LAST3_FRAME) |
(1 << LAST4_FRAME) |
+#else // CONFIG_EXT_REFS
+#if CONFIG_BIDIR_PRED
+ (1 << BWDREF_FRAME) |
+#endif // CONFIG_BIDIR_PRED
#endif // CONFIG_EXT_REFS
(1 << LAST_FRAME);
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ ref_frame_skip_mask[1] |= SECOND_REF_FRAME_MASK;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
break;
case NONE:
case MAX_REF_FRAMES:
comp_pred = second_ref_frame > INTRA_FRAME;
if (comp_pred) {
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ // TODO(zoeliu): To further justify whether following is needed
+ if (cpi->twopass.gf_group.update_type[cpi->twopass.gf_group.index]
+ != LASTNRF_UPDATE && second_ref_frame == BWDREF_FRAME) {
+ continue;
+ }
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
if (!cpi->allow_comp_inter_inter)
continue;
if (!(cpi->ref_frame_flags & flag_list[second_ref_frame]))
struct vp10_extracfg {
int cpu_used; // available cpu percentage in 1/16
unsigned int enable_auto_alt_ref;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ unsigned int enable_auto_bwd_ref;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
unsigned int noise_sensitivity;
unsigned int sharpness;
unsigned int static_thresh;
static struct vp10_extracfg default_extra_cfg = {
0, // cpu_used
1, // enable_auto_alt_ref
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ 0, // enable_auto_bwd_ref
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
0, // noise_sensitivity
0, // sharpness
0, // static_thresh
"or kf_max_dist instead.");
RANGE_CHECK(extra_cfg, enable_auto_alt_ref, 0, 2);
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ RANGE_CHECK(extra_cfg, enable_auto_bwd_ref, 0, 2);
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
RANGE_CHECK(extra_cfg, cpu_used, -8, 8);
RANGE_CHECK_HI(extra_cfg, noise_sensitivity, 6);
RANGE_CHECK(extra_cfg, superblock_size,
oxcf->speed = abs(extra_cfg->cpu_used);
oxcf->encode_breakout = extra_cfg->static_thresh;
oxcf->enable_auto_arf = extra_cfg->enable_auto_alt_ref;
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ oxcf->enable_auto_brf = extra_cfg->enable_auto_bwd_ref;
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
oxcf->noise_sensitivity = extra_cfg->noise_sensitivity;
oxcf->sharpness = extra_cfg->sharpness;
return update_extra_cfg(ctx, &extra_cfg);
}
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+static vpx_codec_err_t ctrl_set_enable_auto_bwd_ref(vpx_codec_alg_priv_t *ctx,
+ va_list args) {
+ struct vp10_extracfg extra_cfg = ctx->extra_cfg;
+ extra_cfg.enable_auto_bwd_ref = CAST(VP8E_SET_ENABLEAUTOBWDREF, args);
+ return update_extra_cfg(ctx, &extra_cfg);
+}
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
static vpx_codec_err_t ctrl_set_noise_sensitivity(vpx_codec_alg_priv_t *ctx,
va_list args) {
struct vp10_extracfg extra_cfg = ctx->extra_cfg;
if (res == VPX_CODEC_OK) {
// There's no codec control for multiple alt-refs so check the encoder
// instance for its status to determine the compressed data size.
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ data_sz = ctx->cfg.g_w * ctx->cfg.g_h * get_image_bps(img);
+#else
data_sz = ctx->cfg.g_w * ctx->cfg.g_h * get_image_bps(img) / 8 *
(cpi->multi_arf_allowed ? 8 : 2);
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
if (data_sz < 4096)
data_sz = 4096;
if (ctx->cx_data == NULL || ctx->cx_data_sz < data_sz) {
{VP8E_SET_SCALEMODE, ctrl_set_scale_mode},
{VP8E_SET_CPUUSED, ctrl_set_cpuused},
{VP8E_SET_ENABLEAUTOALTREF, ctrl_set_enable_auto_alt_ref},
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ {VP8E_SET_ENABLEAUTOBWDREF, ctrl_set_enable_auto_bwd_ref},
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
{VP8E_SET_SHARPNESS, ctrl_set_sharpness},
{VP8E_SET_STATIC_THRESHOLD, ctrl_set_static_thresh},
{VP9E_SET_TILE_COLUMNS, ctrl_set_tile_columns},
*/
VP8E_SET_ENABLEAUTOALTREF,
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+ /*!\brief Codec control function to enable automatic set and use
+ * bwd-pred frames.
+ *
+ * Supported in codecs: VP10
+ */
+ VP8E_SET_ENABLEAUTOBWDREF,
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
/*!\brief control function to set noise sensitivity
*
* 0: off, 1: OnYOnly, 2: OnYUV,
#define VPX_CTRL_VP8E_SET_CPUUSED
VPX_CTRL_USE_TYPE(VP8E_SET_ENABLEAUTOALTREF, unsigned int)
#define VPX_CTRL_VP8E_SET_ENABLEAUTOALTREF
+
+#if !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+VPX_CTRL_USE_TYPE(VP8E_SET_ENABLEAUTOBWDREF, unsigned int)
+#define VPX_CTRL_VP8E_SET_ENABLEAUTOBWDREF
+#endif // !CONFIG_EXT_REFS && CONFIG_BIDIR_PRED
+
VPX_CTRL_USE_TYPE(VP8E_SET_NOISE_SENSITIVITY, unsigned int)
#define VPX_CTRL_VP8E_SET_NOISE_SENSITIVITY
VPX_CTRL_USE_TYPE(VP8E_SET_SHARPNESS, unsigned int)