#include "vpx_mem/vpx_mem.h"
#include "onyxd_int.h"
-/* For skip_recon_mb(), add vp8_build_intra_predictors_mby_s(MACROBLOCKD *x) and
- * vp8_build_intra_predictors_mbuv_s(MACROBLOCKD *x).
- */
-
-void vp8mt_build_intra_predictors_mby(VP8D_COMP *pbi, MACROBLOCKD *x, int mb_row, int mb_col)
-{
- unsigned char *yabove_row; /* = x->dst.y_buffer - x->dst.y_stride; */
- unsigned char *yleft_col;
- unsigned char yleft_buf[16];
- unsigned char ytop_left; /* = yabove_row[-1]; */
- unsigned char *ypred_ptr = x->predictor;
- int r, c, i;
-
- if (pbi->common.filter_level)
- {
- yabove_row = pbi->mt_yabove_row[mb_row] + mb_col*16 +32;
- yleft_col = pbi->mt_yleft_col[mb_row];
- } else
- {
- yabove_row = x->dst.y_buffer - x->dst.y_stride;
-
- for (i = 0; i < 16; i++)
- yleft_buf[i] = x->dst.y_buffer [i* x->dst.y_stride -1];
- yleft_col = yleft_buf;
- }
-
- ytop_left = yabove_row[-1];
-
- /* for Y */
- switch (x->mode_info_context->mbmi.mode)
- {
- case DC_PRED:
- {
- int expected_dc;
- int i;
- int shift;
- int average = 0;
-
-
- if (x->up_available || x->left_available)
- {
- if (x->up_available)
- {
- for (i = 0; i < 16; i++)
- {
- average += yabove_row[i];
- }
- }
-
- if (x->left_available)
- {
-
- for (i = 0; i < 16; i++)
- {
- average += yleft_col[i];
- }
-
- }
-
-
-
- shift = 3 + x->up_available + x->left_available;
- expected_dc = (average + (1 << (shift - 1))) >> shift;
- }
- else
- {
- expected_dc = 128;
- }
-
- vpx_memset(ypred_ptr, expected_dc, 256);
- }
- break;
- case V_PRED:
- {
-
- for (r = 0; r < 16; r++)
- {
-
- ((int *)ypred_ptr)[0] = ((int *)yabove_row)[0];
- ((int *)ypred_ptr)[1] = ((int *)yabove_row)[1];
- ((int *)ypred_ptr)[2] = ((int *)yabove_row)[2];
- ((int *)ypred_ptr)[3] = ((int *)yabove_row)[3];
- ypred_ptr += 16;
- }
- }
- break;
- case H_PRED:
- {
-
- for (r = 0; r < 16; r++)
- {
-
- vpx_memset(ypred_ptr, yleft_col[r], 16);
- ypred_ptr += 16;
- }
-
- }
- break;
- case TM_PRED:
- {
-
- for (r = 0; r < 16; r++)
- {
- for (c = 0; c < 16; c++)
- {
- int pred = yleft_col[r] + yabove_row[ c] - ytop_left;
-
- if (pred < 0)
- pred = 0;
-
- if (pred > 255)
- pred = 255;
-
- ypred_ptr[c] = pred;
- }
-
- ypred_ptr += 16;
- }
-
- }
- break;
- case B_PRED:
- case NEARESTMV:
- case NEARMV:
- case ZEROMV:
- case NEWMV:
- case SPLITMV:
- case MB_MODE_COUNT:
- break;
- }
-}
-
void vp8mt_build_intra_predictors_mby_s(VP8D_COMP *pbi, MACROBLOCKD *x, int mb_row, int mb_col)
{
unsigned char *yabove_row; /* = x->dst.y_buffer - x->dst.y_stride; */
}
}
-void vp8mt_build_intra_predictors_mbuv(VP8D_COMP *pbi, MACROBLOCKD *x, int mb_row, int mb_col)
-{
- unsigned char *uabove_row; /* = x->dst.u_buffer - x->dst.uv_stride; */
- unsigned char *uleft_col; /*[16];*/
- unsigned char uleft_buf[8];
- unsigned char utop_left; /* = uabove_row[-1]; */
- unsigned char *vabove_row; /* = x->dst.v_buffer - x->dst.uv_stride; */
- unsigned char *vleft_col; /*[20];*/
- unsigned char vleft_buf[8];
- unsigned char vtop_left; /* = vabove_row[-1]; */
- unsigned char *upred_ptr = &x->predictor[256];
- unsigned char *vpred_ptr = &x->predictor[320];
- int i, j;
-
- if (pbi->common.filter_level)
- {
- uabove_row = pbi->mt_uabove_row[mb_row] + mb_col*8 +16;
- vabove_row = pbi->mt_vabove_row[mb_row] + mb_col*8 +16;
- uleft_col = pbi->mt_uleft_col[mb_row];
- vleft_col = pbi->mt_vleft_col[mb_row];
- } else
- {
- uabove_row = x->dst.u_buffer - x->dst.uv_stride;
- vabove_row = x->dst.v_buffer - x->dst.uv_stride;
-
- for (i = 0; i < 8; i++)
- {
- uleft_buf[i] = x->dst.u_buffer [i* x->dst.uv_stride -1];
- vleft_buf[i] = x->dst.v_buffer [i* x->dst.uv_stride -1];
- }
- uleft_col = uleft_buf;
- vleft_col = vleft_buf;
- }
- utop_left = uabove_row[-1];
- vtop_left = vabove_row[-1];
-
- switch (x->mode_info_context->mbmi.uv_mode)
- {
- case DC_PRED:
- {
- int expected_udc;
- int expected_vdc;
- int i;
- int shift;
- int Uaverage = 0;
- int Vaverage = 0;
-
- if (x->up_available)
- {
- for (i = 0; i < 8; i++)
- {
- Uaverage += uabove_row[i];
- Vaverage += vabove_row[i];
- }
- }
-
- if (x->left_available)
- {
- for (i = 0; i < 8; i++)
- {
- Uaverage += uleft_col[i];
- Vaverage += vleft_col[i];
- }
- }
-
- if (!x->up_available && !x->left_available)
- {
- expected_udc = 128;
- expected_vdc = 128;
- }
- else
- {
- shift = 2 + x->up_available + x->left_available;
- expected_udc = (Uaverage + (1 << (shift - 1))) >> shift;
- expected_vdc = (Vaverage + (1 << (shift - 1))) >> shift;
- }
-
-
- vpx_memset(upred_ptr, expected_udc, 64);
- vpx_memset(vpred_ptr, expected_vdc, 64);
-
-
- }
- break;
- case V_PRED:
- {
- int i;
-
- for (i = 0; i < 8; i++)
- {
- vpx_memcpy(upred_ptr, uabove_row, 8);
- vpx_memcpy(vpred_ptr, vabove_row, 8);
- upred_ptr += 8;
- vpred_ptr += 8;
- }
-
- }
- break;
- case H_PRED:
- {
- int i;
-
- for (i = 0; i < 8; i++)
- {
- vpx_memset(upred_ptr, uleft_col[i], 8);
- vpx_memset(vpred_ptr, vleft_col[i], 8);
- upred_ptr += 8;
- vpred_ptr += 8;
- }
- }
-
- break;
- case TM_PRED:
- {
- int i;
-
- for (i = 0; i < 8; i++)
- {
- for (j = 0; j < 8; j++)
- {
- int predu = uleft_col[i] + uabove_row[j] - utop_left;
- int predv = vleft_col[i] + vabove_row[j] - vtop_left;
-
- if (predu < 0)
- predu = 0;
-
- if (predu > 255)
- predu = 255;
-
- if (predv < 0)
- predv = 0;
-
- if (predv > 255)
- predv = 255;
-
- upred_ptr[j] = predu;
- vpred_ptr[j] = predv;
- }
-
- upred_ptr += 8;
- vpred_ptr += 8;
- }
-
- }
- break;
- case B_PRED:
- case NEARESTMV:
- case NEARMV:
- case ZEROMV:
- case NEWMV:
- case SPLITMV:
- case MB_MODE_COUNT:
- break;
- }
-}
-
-void vp8mt_build_intra_predictors_mbuv_s(VP8D_COMP *pbi, MACROBLOCKD *x, int mb_row, int mb_col)
-{
- unsigned char *uabove_row; /* = x->dst.u_buffer - x->dst.uv_stride; */
- unsigned char *uleft_col; /*[16];*/
- unsigned char uleft_buf[8];
- unsigned char utop_left; /* = uabove_row[-1]; */
- unsigned char *vabove_row; /* = x->dst.v_buffer - x->dst.uv_stride; */
- unsigned char *vleft_col; /*[20];*/
- unsigned char vleft_buf[8];
- unsigned char vtop_left; /* = vabove_row[-1]; */
- unsigned char *upred_ptr = x->dst.u_buffer; /*&x->predictor[256];*/
- unsigned char *vpred_ptr = x->dst.v_buffer; /*&x->predictor[320];*/
- int uv_stride = x->dst.uv_stride;
- int i, j;
-
- if (pbi->common.filter_level)
- {
- uabove_row = pbi->mt_uabove_row[mb_row] + mb_col*8 +16;
- vabove_row = pbi->mt_vabove_row[mb_row] + mb_col*8 +16;
- uleft_col = pbi->mt_uleft_col[mb_row];
- vleft_col = pbi->mt_vleft_col[mb_row];
- } else
- {
- uabove_row = x->dst.u_buffer - x->dst.uv_stride;
- vabove_row = x->dst.v_buffer - x->dst.uv_stride;
-
- for (i = 0; i < 8; i++)
- {
- uleft_buf[i] = x->dst.u_buffer [i* x->dst.uv_stride -1];
- vleft_buf[i] = x->dst.v_buffer [i* x->dst.uv_stride -1];
- }
- uleft_col = uleft_buf;
- vleft_col = vleft_buf;
- }
- utop_left = uabove_row[-1];
- vtop_left = vabove_row[-1];
-
- switch (x->mode_info_context->mbmi.uv_mode)
- {
- case DC_PRED:
- {
- int expected_udc;
- int expected_vdc;
- int i;
- int shift;
- int Uaverage = 0;
- int Vaverage = 0;
-
- if (x->up_available)
- {
- for (i = 0; i < 8; i++)
- {
- Uaverage += uabove_row[i];
- Vaverage += vabove_row[i];
- }
- }
-
- if (x->left_available)
- {
- for (i = 0; i < 8; i++)
- {
- Uaverage += uleft_col[i];
- Vaverage += vleft_col[i];
- }
- }
-
- if (!x->up_available && !x->left_available)
- {
- expected_udc = 128;
- expected_vdc = 128;
- }
- else
- {
- shift = 2 + x->up_available + x->left_available;
- expected_udc = (Uaverage + (1 << (shift - 1))) >> shift;
- expected_vdc = (Vaverage + (1 << (shift - 1))) >> shift;
- }
-
-
- /*vpx_memset(upred_ptr,expected_udc,64);
- vpx_memset(vpred_ptr,expected_vdc,64);*/
- for (i = 0; i < 8; i++)
- {
- vpx_memset(upred_ptr, expected_udc, 8);
- vpx_memset(vpred_ptr, expected_vdc, 8);
- upred_ptr += uv_stride; /*8;*/
- vpred_ptr += uv_stride; /*8;*/
- }
- }
- break;
- case V_PRED:
- {
- int i;
-
- for (i = 0; i < 8; i++)
- {
- vpx_memcpy(upred_ptr, uabove_row, 8);
- vpx_memcpy(vpred_ptr, vabove_row, 8);
- upred_ptr += uv_stride; /*8;*/
- vpred_ptr += uv_stride; /*8;*/
- }
-
- }
- break;
- case H_PRED:
- {
- int i;
-
- for (i = 0; i < 8; i++)
- {
- vpx_memset(upred_ptr, uleft_col[i], 8);
- vpx_memset(vpred_ptr, vleft_col[i], 8);
- upred_ptr += uv_stride; /*8;*/
- vpred_ptr += uv_stride; /*8;*/
- }
- }
-
- break;
- case TM_PRED:
- {
- int i;
-
- for (i = 0; i < 8; i++)
- {
- for (j = 0; j < 8; j++)
- {
- int predu = uleft_col[i] + uabove_row[j] - utop_left;
- int predv = vleft_col[i] + vabove_row[j] - vtop_left;
-
- if (predu < 0)
- predu = 0;
-
- if (predu > 255)
- predu = 255;
-
- if (predv < 0)
- predv = 0;
-
- if (predv > 255)
- predv = 255;
-
- upred_ptr[j] = predu;
- vpred_ptr[j] = predv;
- }
-
- upred_ptr += uv_stride; /*8;*/
- vpred_ptr += uv_stride; /*8;*/
- }
-
- }
- break;
- case B_PRED:
- case NEARESTMV:
- case NEARMV:
- case ZEROMV:
- case NEWMV:
- case SPLITMV:
- case MB_MODE_COUNT:
- break;
- }
-}
-
-
void vp8mt_predict_intra4x4(VP8D_COMP *pbi,
MACROBLOCKD *xd,
int b_mode,
}
else if (!vp8dx_bool_error(xd->current_bc))
{
+ int eobtotal;
eobtotal = vp8_decode_mb_tokens(pbi, xd);
- }
-
- eobtotal |= (xd->mode_info_context->mbmi.mode == B_PRED ||
- xd->mode_info_context->mbmi.mode == SPLITMV);
- if (!eobtotal && !vp8dx_bool_error(xd->current_bc))
- {
- /* Special case: Force the loopfilter to skip when eobtotal and
- * mb_skip_coeff are zero.
- * */
- xd->mode_info_context->mbmi.mb_skip_coeff = 1;
- /*mt_skip_recon_mb(pbi, xd, mb_row, mb_col);*/
- if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME)
- {
- vp8mt_build_intra_predictors_mbuv_s(pbi, xd, mb_row, mb_col);
- vp8mt_build_intra_predictors_mby_s(pbi, xd, mb_row, mb_col);
- }
- else
- {
- vp8_build_inter16x16_predictors_mb(xd, xd->dst.y_buffer,
- xd->dst.u_buffer, xd->dst.v_buffer,
- xd->dst.y_stride, xd->dst.uv_stride);
- }
- return;
+ /* Special case: Force the loopfilter to skip when eobtotal is zero */
+ xd->mode_info_context->mbmi.mb_skip_coeff = (eobtotal==0);
}
if (xd->segmentation_enabled)
/* do prediction */
if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME)
{
- vp8mt_build_intra_predictors_mbuv_s(pbi, xd, mb_row, mb_col);
+ if (pbi->common.filter_level)
+ {
+ unsigned char *uabove_row;
+ unsigned char *vabove_row;
+ unsigned char * uleft_col;
+ unsigned char * vleft_col;
+ uabove_row = pbi->mt_uabove_row[mb_row] + mb_col*8 +16;
+ vabove_row = pbi->mt_vabove_row[mb_row] + mb_col*8 +16;
+ uleft_col = pbi->mt_uleft_col[mb_row];
+ vleft_col = pbi->mt_vleft_col[mb_row];
+ vp8_build_intra_predictors_mbuv_s(xd, uabove_row,
+ vabove_row,
+ uleft_col,
+ vleft_col,
+ 1,
+ xd->dst.u_buffer, xd->dst.v_buffer);
+ }
+ else
+ {
+ vp8_build_intra_predictors_mbuv_s(xd,
+ xd->dst.u_buffer - xd->dst.uv_stride,
+ xd->dst.v_buffer - xd->dst.uv_stride,
+ xd->dst.u_buffer - 1,
+ xd->dst.v_buffer - 1,
+ xd->dst.uv_stride,
+ xd->dst.u_buffer, xd->dst.v_buffer);
+ }
if (xd->mode_info_context->mbmi.mode != B_PRED)
{