}
static INLINE int get_ref_frame_buf_idx(const VP10_COMP *const cpi,
- int ref_frame) {
+ MV_REFERENCE_FRAME ref_frame) {
const VP10_COMMON *const cm = &cpi->common;
const int map_idx = get_ref_frame_map_idx(cpi, ref_frame);
return (map_idx != INVALID_IDX) ? cm->ref_frame_map[map_idx] : INVALID_IDX;
buf_idx != INVALID_IDX ? &cm->buffer_pool->frame_bufs[buf_idx].buf : NULL;
}
+static INLINE const YV12_BUFFER_CONFIG *get_upsampled_ref(
+ VP10_COMP *cpi, const MV_REFERENCE_FRAME ref_frame) {
+ // Use up-sampled reference frames.
+ const int buf_idx =
+ cpi->upsampled_ref_idx[get_ref_frame_map_idx(cpi, ref_frame)];
+ return &cpi->upsampled_ref_bufs[buf_idx].buf;
+}
+
static INLINE unsigned int get_token_alloc(int mb_rows, int mb_cols) {
// TODO(JBB): double check we can't exceed this token count if we have a
// 32x32 transform crossing a boundary at a multiple of 16.
int use_fast_coef_casting);
#endif // CONFIG_SUPERTX
-static INLINE const YV12_BUFFER_CONFIG *get_upsampled_ref(VP10_COMP *cpi,
- const int ref) {
- // Use up-sampled reference frames.
- int ref_idx = 0;
- if (ref == LAST_FRAME)
-#if CONFIG_EXT_REFS
- ref_idx = cpi->lst_fb_idxes[ref - LAST_FRAME];
-#else
- ref_idx = cpi->lst_fb_idx;
-#endif // CONFIG_EXT_REFS
- else if (ref == GOLDEN_FRAME)
- ref_idx = cpi->gld_fb_idx;
- else if (ref == ALTREF_FRAME)
- ref_idx = cpi->alt_fb_idx;
-
- return &cpi->upsampled_ref_bufs[cpi->upsampled_ref_idx[ref_idx]].buf;
-}
-
#if CONFIG_OBMC
void calc_target_weighted_pred(VP10_COMMON *cm,
MACROBLOCK *x,