]> granicus.if.org Git - libvpx/blob - vp8/decoder/decodeframe.c
Merge "make vp9 encoder static initializers thread safe"
[libvpx] / vp8 / decoder / decodeframe.c
1 /*
2  *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10
11
12 #include "vpx_config.h"
13 #include "vp8_rtcd.h"
14 #include "./vpx_scale_rtcd.h"
15 #include "onyxd_int.h"
16 #include "vp8/common/header.h"
17 #include "vp8/common/reconintra4x4.h"
18 #include "vp8/common/reconinter.h"
19 #include "detokenize.h"
20 #include "vp8/common/common.h"
21 #include "vp8/common/invtrans.h"
22 #include "vp8/common/alloccommon.h"
23 #include "vp8/common/entropymode.h"
24 #include "vp8/common/quant_common.h"
25 #include "vpx_scale/vpx_scale.h"
26 #include "vp8/common/setupintrarecon.h"
27
28 #include "decodemv.h"
29 #include "vp8/common/extend.h"
30 #if CONFIG_ERROR_CONCEALMENT
31 #include "error_concealment.h"
32 #endif
33 #include "vpx_mem/vpx_mem.h"
34 #include "vp8/common/threading.h"
35 #include "decoderthreading.h"
36 #include "dboolhuff.h"
37
38 #include <assert.h>
39 #include <stdio.h>
40
41 void vp8cx_init_de_quantizer(VP8D_COMP *pbi)
42 {
43     int Q;
44     VP8_COMMON *const pc = & pbi->common;
45
46     for (Q = 0; Q < QINDEX_RANGE; Q++)
47     {
48         pc->Y1dequant[Q][0] = (short)vp8_dc_quant(Q, pc->y1dc_delta_q);
49         pc->Y2dequant[Q][0] = (short)vp8_dc2quant(Q, pc->y2dc_delta_q);
50         pc->UVdequant[Q][0] = (short)vp8_dc_uv_quant(Q, pc->uvdc_delta_q);
51
52         pc->Y1dequant[Q][1] = (short)vp8_ac_yquant(Q);
53         pc->Y2dequant[Q][1] = (short)vp8_ac2quant(Q, pc->y2ac_delta_q);
54         pc->UVdequant[Q][1] = (short)vp8_ac_uv_quant(Q, pc->uvac_delta_q);
55     }
56 }
57
58 void vp8_mb_init_dequantizer(VP8D_COMP *pbi, MACROBLOCKD *xd)
59 {
60     int i;
61     int QIndex;
62     MB_MODE_INFO *mbmi = &xd->mode_info_context->mbmi;
63     VP8_COMMON *const pc = & pbi->common;
64
65     /* Decide whether to use the default or alternate baseline Q value. */
66     if (xd->segmentation_enabled)
67     {
68         /* Abs Value */
69         if (xd->mb_segement_abs_delta == SEGMENT_ABSDATA)
70             QIndex = xd->segment_feature_data[MB_LVL_ALT_Q][mbmi->segment_id];
71
72         /* Delta Value */
73         else
74         {
75             QIndex = pc->base_qindex + xd->segment_feature_data[MB_LVL_ALT_Q][mbmi->segment_id];
76             QIndex = (QIndex >= 0) ? ((QIndex <= MAXQ) ? QIndex : MAXQ) : 0;    /* Clamp to valid range */
77         }
78     }
79     else
80         QIndex = pc->base_qindex;
81
82     /* Set up the macroblock dequant constants */
83     xd->dequant_y1_dc[0] = 1;
84     xd->dequant_y1[0] = pc->Y1dequant[QIndex][0];
85     xd->dequant_y2[0] = pc->Y2dequant[QIndex][0];
86     xd->dequant_uv[0] = pc->UVdequant[QIndex][0];
87
88     for (i = 1; i < 16; i++)
89     {
90         xd->dequant_y1_dc[i] =
91         xd->dequant_y1[i] = pc->Y1dequant[QIndex][1];
92         xd->dequant_y2[i] = pc->Y2dequant[QIndex][1];
93         xd->dequant_uv[i] = pc->UVdequant[QIndex][1];
94     }
95 }
96
97 static void decode_macroblock(VP8D_COMP *pbi, MACROBLOCKD *xd,
98                               unsigned int mb_idx)
99 {
100     MB_PREDICTION_MODE mode;
101     int i;
102 #if CONFIG_ERROR_CONCEALMENT
103     int corruption_detected = 0;
104 #else
105     (void)mb_idx;
106 #endif
107
108     if (xd->mode_info_context->mbmi.mb_skip_coeff)
109     {
110         vp8_reset_mb_tokens_context(xd);
111     }
112     else if (!vp8dx_bool_error(xd->current_bc))
113     {
114         int eobtotal;
115         eobtotal = vp8_decode_mb_tokens(pbi, xd);
116
117         /* Special case:  Force the loopfilter to skip when eobtotal is zero */
118         xd->mode_info_context->mbmi.mb_skip_coeff = (eobtotal==0);
119     }
120
121     mode = xd->mode_info_context->mbmi.mode;
122
123     if (xd->segmentation_enabled)
124         vp8_mb_init_dequantizer(pbi, xd);
125
126
127 #if CONFIG_ERROR_CONCEALMENT
128
129     if(pbi->ec_active)
130     {
131         int throw_residual;
132         /* When we have independent partitions we can apply residual even
133          * though other partitions within the frame are corrupt.
134          */
135         throw_residual = (!pbi->independent_partitions &&
136                           pbi->frame_corrupt_residual);
137         throw_residual = (throw_residual || vp8dx_bool_error(xd->current_bc));
138
139         if ((mb_idx >= pbi->mvs_corrupt_from_mb || throw_residual))
140         {
141             /* MB with corrupt residuals or corrupt mode/motion vectors.
142              * Better to use the predictor as reconstruction.
143              */
144             pbi->frame_corrupt_residual = 1;
145             vpx_memset(xd->qcoeff, 0, sizeof(xd->qcoeff));
146             vp8_conceal_corrupt_mb(xd);
147
148
149             corruption_detected = 1;
150
151             /* force idct to be skipped for B_PRED and use the
152              * prediction only for reconstruction
153              * */
154             vpx_memset(xd->eobs, 0, 25);
155         }
156     }
157 #endif
158
159     /* do prediction */
160     if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME)
161     {
162         vp8_build_intra_predictors_mbuv_s(xd,
163                                           xd->recon_above[1],
164                                           xd->recon_above[2],
165                                           xd->recon_left[1],
166                                           xd->recon_left[2],
167                                           xd->recon_left_stride[1],
168                                           xd->dst.u_buffer, xd->dst.v_buffer,
169                                           xd->dst.uv_stride);
170
171         if (mode != B_PRED)
172         {
173             vp8_build_intra_predictors_mby_s(xd,
174                                                  xd->recon_above[0],
175                                                  xd->recon_left[0],
176                                                  xd->recon_left_stride[0],
177                                                  xd->dst.y_buffer,
178                                                  xd->dst.y_stride);
179         }
180         else
181         {
182             short *DQC = xd->dequant_y1;
183             int dst_stride = xd->dst.y_stride;
184
185             /* clear out residual eob info */
186             if(xd->mode_info_context->mbmi.mb_skip_coeff)
187                 vpx_memset(xd->eobs, 0, 25);
188
189             intra_prediction_down_copy(xd, xd->recon_above[0] + 16);
190
191             for (i = 0; i < 16; i++)
192             {
193                 BLOCKD *b = &xd->block[i];
194                 unsigned char *dst = xd->dst.y_buffer + b->offset;
195                 B_PREDICTION_MODE b_mode =
196                     xd->mode_info_context->bmi[i].as_mode;
197                 unsigned char *Above = dst - dst_stride;
198                 unsigned char *yleft = dst - 1;
199                 int left_stride = dst_stride;
200                 unsigned char top_left = Above[-1];
201
202                 vp8_intra4x4_predict(Above, yleft, left_stride, b_mode,
203                                      dst, dst_stride, top_left);
204
205                 if (xd->eobs[i])
206                 {
207                     if (xd->eobs[i] > 1)
208                     {
209                     vp8_dequant_idct_add(b->qcoeff, DQC, dst, dst_stride);
210                     }
211                     else
212                     {
213                         vp8_dc_only_idct_add
214                             (b->qcoeff[0] * DQC[0],
215                                 dst, dst_stride,
216                                 dst, dst_stride);
217                         vpx_memset(b->qcoeff, 0, 2 * sizeof(b->qcoeff[0]));
218                     }
219                 }
220             }
221         }
222     }
223     else
224     {
225         vp8_build_inter_predictors_mb(xd);
226     }
227
228
229 #if CONFIG_ERROR_CONCEALMENT
230     if (corruption_detected)
231     {
232         return;
233     }
234 #endif
235
236     if(!xd->mode_info_context->mbmi.mb_skip_coeff)
237     {
238         /* dequantization and idct */
239         if (mode != B_PRED)
240         {
241             short *DQC = xd->dequant_y1;
242
243             if (mode != SPLITMV)
244             {
245                 BLOCKD *b = &xd->block[24];
246
247                 /* do 2nd order transform on the dc block */
248                 if (xd->eobs[24] > 1)
249                 {
250                     vp8_dequantize_b(b, xd->dequant_y2);
251
252                     vp8_short_inv_walsh4x4(&b->dqcoeff[0],
253                         xd->qcoeff);
254                     vpx_memset(b->qcoeff, 0, 16 * sizeof(b->qcoeff[0]));
255                 }
256                 else
257                 {
258                     b->dqcoeff[0] = b->qcoeff[0] * xd->dequant_y2[0];
259                     vp8_short_inv_walsh4x4_1(&b->dqcoeff[0],
260                         xd->qcoeff);
261                     vpx_memset(b->qcoeff, 0, 2 * sizeof(b->qcoeff[0]));
262                 }
263
264                 /* override the dc dequant constant in order to preserve the
265                  * dc components
266                  */
267                 DQC = xd->dequant_y1_dc;
268             }
269
270             vp8_dequant_idct_add_y_block
271                             (xd->qcoeff, DQC,
272                              xd->dst.y_buffer,
273                              xd->dst.y_stride, xd->eobs);
274         }
275
276         vp8_dequant_idct_add_uv_block
277                         (xd->qcoeff+16*16, xd->dequant_uv,
278                          xd->dst.u_buffer, xd->dst.v_buffer,
279                          xd->dst.uv_stride, xd->eobs+16);
280     }
281 }
282
283 static int get_delta_q(vp8_reader *bc, int prev, int *q_update)
284 {
285     int ret_val = 0;
286
287     if (vp8_read_bit(bc))
288     {
289         ret_val = vp8_read_literal(bc, 4);
290
291         if (vp8_read_bit(bc))
292             ret_val = -ret_val;
293     }
294
295     /* Trigger a quantizer update if the delta-q value has changed */
296     if (ret_val != prev)
297         *q_update = 1;
298
299     return ret_val;
300 }
301
302 #ifdef PACKET_TESTING
303 #include <stdio.h>
304 FILE *vpxlog = 0;
305 #endif
306
307 static void yv12_extend_frame_top_c(YV12_BUFFER_CONFIG *ybf)
308 {
309     int i;
310     unsigned char *src_ptr1;
311     unsigned char *dest_ptr1;
312
313     unsigned int Border;
314     int plane_stride;
315
316     /***********/
317     /* Y Plane */
318     /***********/
319     Border = ybf->border;
320     plane_stride = ybf->y_stride;
321     src_ptr1 = ybf->y_buffer - Border;
322     dest_ptr1 = src_ptr1 - (Border * plane_stride);
323
324     for (i = 0; i < (int)Border; i++)
325     {
326         vpx_memcpy(dest_ptr1, src_ptr1, plane_stride);
327         dest_ptr1 += plane_stride;
328     }
329
330
331     /***********/
332     /* U Plane */
333     /***********/
334     plane_stride = ybf->uv_stride;
335     Border /= 2;
336     src_ptr1 = ybf->u_buffer - Border;
337     dest_ptr1 = src_ptr1 - (Border * plane_stride);
338
339     for (i = 0; i < (int)(Border); i++)
340     {
341         vpx_memcpy(dest_ptr1, src_ptr1, plane_stride);
342         dest_ptr1 += plane_stride;
343     }
344
345     /***********/
346     /* V Plane */
347     /***********/
348
349     src_ptr1 = ybf->v_buffer - Border;
350     dest_ptr1 = src_ptr1 - (Border * plane_stride);
351
352     for (i = 0; i < (int)(Border); i++)
353     {
354         vpx_memcpy(dest_ptr1, src_ptr1, plane_stride);
355         dest_ptr1 += plane_stride;
356     }
357 }
358
359 static void yv12_extend_frame_bottom_c(YV12_BUFFER_CONFIG *ybf)
360 {
361     int i;
362     unsigned char *src_ptr1, *src_ptr2;
363     unsigned char *dest_ptr2;
364
365     unsigned int Border;
366     int plane_stride;
367     int plane_height;
368
369     /***********/
370     /* Y Plane */
371     /***********/
372     Border = ybf->border;
373     plane_stride = ybf->y_stride;
374     plane_height = ybf->y_height;
375
376     src_ptr1 = ybf->y_buffer - Border;
377     src_ptr2 = src_ptr1 + (plane_height * plane_stride) - plane_stride;
378     dest_ptr2 = src_ptr2 + plane_stride;
379
380     for (i = 0; i < (int)Border; i++)
381     {
382         vpx_memcpy(dest_ptr2, src_ptr2, plane_stride);
383         dest_ptr2 += plane_stride;
384     }
385
386
387     /***********/
388     /* U Plane */
389     /***********/
390     plane_stride = ybf->uv_stride;
391     plane_height = ybf->uv_height;
392     Border /= 2;
393
394     src_ptr1 = ybf->u_buffer - Border;
395     src_ptr2 = src_ptr1 + (plane_height * plane_stride) - plane_stride;
396     dest_ptr2 = src_ptr2 + plane_stride;
397
398     for (i = 0; i < (int)(Border); i++)
399     {
400         vpx_memcpy(dest_ptr2, src_ptr2, plane_stride);
401         dest_ptr2 += plane_stride;
402     }
403
404     /***********/
405     /* V Plane */
406     /***********/
407
408     src_ptr1 = ybf->v_buffer - Border;
409     src_ptr2 = src_ptr1 + (plane_height * plane_stride) - plane_stride;
410     dest_ptr2 = src_ptr2 + plane_stride;
411
412     for (i = 0; i < (int)(Border); i++)
413     {
414         vpx_memcpy(dest_ptr2, src_ptr2, plane_stride);
415         dest_ptr2 += plane_stride;
416     }
417 }
418
419 static void yv12_extend_frame_left_right_c(YV12_BUFFER_CONFIG *ybf,
420                                            unsigned char *y_src,
421                                            unsigned char *u_src,
422                                            unsigned char *v_src)
423 {
424     int i;
425     unsigned char *src_ptr1, *src_ptr2;
426     unsigned char *dest_ptr1, *dest_ptr2;
427
428     unsigned int Border;
429     int plane_stride;
430     int plane_height;
431     int plane_width;
432
433     /***********/
434     /* Y Plane */
435     /***********/
436     Border = ybf->border;
437     plane_stride = ybf->y_stride;
438     plane_height = 16;
439     plane_width = ybf->y_width;
440
441     /* copy the left and right most columns out */
442     src_ptr1 = y_src;
443     src_ptr2 = src_ptr1 + plane_width - 1;
444     dest_ptr1 = src_ptr1 - Border;
445     dest_ptr2 = src_ptr2 + 1;
446
447     for (i = 0; i < plane_height; i++)
448     {
449         vpx_memset(dest_ptr1, src_ptr1[0], Border);
450         vpx_memset(dest_ptr2, src_ptr2[0], Border);
451         src_ptr1  += plane_stride;
452         src_ptr2  += plane_stride;
453         dest_ptr1 += plane_stride;
454         dest_ptr2 += plane_stride;
455     }
456
457     /***********/
458     /* U Plane */
459     /***********/
460     plane_stride = ybf->uv_stride;
461     plane_height = 8;
462     plane_width = ybf->uv_width;
463     Border /= 2;
464
465     /* copy the left and right most columns out */
466     src_ptr1 = u_src;
467     src_ptr2 = src_ptr1 + plane_width - 1;
468     dest_ptr1 = src_ptr1 - Border;
469     dest_ptr2 = src_ptr2 + 1;
470
471     for (i = 0; i < plane_height; i++)
472     {
473         vpx_memset(dest_ptr1, src_ptr1[0], Border);
474         vpx_memset(dest_ptr2, src_ptr2[0], Border);
475         src_ptr1  += plane_stride;
476         src_ptr2  += plane_stride;
477         dest_ptr1 += plane_stride;
478         dest_ptr2 += plane_stride;
479     }
480
481     /***********/
482     /* V Plane */
483     /***********/
484
485     /* copy the left and right most columns out */
486     src_ptr1 = v_src;
487     src_ptr2 = src_ptr1 + plane_width - 1;
488     dest_ptr1 = src_ptr1 - Border;
489     dest_ptr2 = src_ptr2 + 1;
490
491     for (i = 0; i < plane_height; i++)
492     {
493         vpx_memset(dest_ptr1, src_ptr1[0], Border);
494         vpx_memset(dest_ptr2, src_ptr2[0], Border);
495         src_ptr1  += plane_stride;
496         src_ptr2  += plane_stride;
497         dest_ptr1 += plane_stride;
498         dest_ptr2 += plane_stride;
499     }
500 }
501
502 static void decode_mb_rows(VP8D_COMP *pbi)
503 {
504     VP8_COMMON *const pc = & pbi->common;
505     MACROBLOCKD *const xd  = & pbi->mb;
506
507     MODE_INFO *lf_mic = xd->mode_info_context;
508
509     int ibc = 0;
510     int num_part = 1 << pc->multi_token_partition;
511
512     int recon_yoffset, recon_uvoffset;
513     int mb_row, mb_col;
514     int mb_idx = 0;
515
516     YV12_BUFFER_CONFIG *yv12_fb_new = pbi->dec_fb_ref[INTRA_FRAME];
517
518     int recon_y_stride = yv12_fb_new->y_stride;
519     int recon_uv_stride = yv12_fb_new->uv_stride;
520
521     unsigned char *ref_buffer[MAX_REF_FRAMES][3];
522     unsigned char *dst_buffer[3];
523     unsigned char *lf_dst[3];
524     unsigned char *eb_dst[3];
525     int i;
526     int ref_fb_corrupted[MAX_REF_FRAMES];
527
528     ref_fb_corrupted[INTRA_FRAME] = 0;
529
530     for(i = 1; i < MAX_REF_FRAMES; i++)
531     {
532         YV12_BUFFER_CONFIG *this_fb = pbi->dec_fb_ref[i];
533
534         ref_buffer[i][0] = this_fb->y_buffer;
535         ref_buffer[i][1] = this_fb->u_buffer;
536         ref_buffer[i][2] = this_fb->v_buffer;
537
538         ref_fb_corrupted[i] = this_fb->corrupted;
539     }
540
541     /* Set up the buffer pointers */
542     eb_dst[0] = lf_dst[0] = dst_buffer[0] = yv12_fb_new->y_buffer;
543     eb_dst[1] = lf_dst[1] = dst_buffer[1] = yv12_fb_new->u_buffer;
544     eb_dst[2] = lf_dst[2] = dst_buffer[2] = yv12_fb_new->v_buffer;
545
546     xd->up_available = 0;
547
548     /* Initialize the loop filter for this frame. */
549     if(pc->filter_level)
550         vp8_loop_filter_frame_init(pc, xd, pc->filter_level);
551
552     vp8_setup_intra_recon_top_line(yv12_fb_new);
553
554     /* Decode the individual macro block */
555     for (mb_row = 0; mb_row < pc->mb_rows; mb_row++)
556     {
557         if (num_part > 1)
558         {
559             xd->current_bc = & pbi->mbc[ibc];
560             ibc++;
561
562             if (ibc == num_part)
563                 ibc = 0;
564         }
565
566         recon_yoffset = mb_row * recon_y_stride * 16;
567         recon_uvoffset = mb_row * recon_uv_stride * 8;
568
569         /* reset contexts */
570         xd->above_context = pc->above_context;
571         vpx_memset(xd->left_context, 0, sizeof(ENTROPY_CONTEXT_PLANES));
572
573         xd->left_available = 0;
574
575         xd->mb_to_top_edge = -((mb_row * 16) << 3);
576         xd->mb_to_bottom_edge = ((pc->mb_rows - 1 - mb_row) * 16) << 3;
577
578         xd->recon_above[0] = dst_buffer[0] + recon_yoffset;
579         xd->recon_above[1] = dst_buffer[1] + recon_uvoffset;
580         xd->recon_above[2] = dst_buffer[2] + recon_uvoffset;
581
582         xd->recon_left[0] = xd->recon_above[0] - 1;
583         xd->recon_left[1] = xd->recon_above[1] - 1;
584         xd->recon_left[2] = xd->recon_above[2] - 1;
585
586         xd->recon_above[0] -= xd->dst.y_stride;
587         xd->recon_above[1] -= xd->dst.uv_stride;
588         xd->recon_above[2] -= xd->dst.uv_stride;
589
590         /* TODO: move to outside row loop */
591         xd->recon_left_stride[0] = xd->dst.y_stride;
592         xd->recon_left_stride[1] = xd->dst.uv_stride;
593
594         setup_intra_recon_left(xd->recon_left[0], xd->recon_left[1],
595                                xd->recon_left[2], xd->dst.y_stride,
596                                xd->dst.uv_stride);
597
598         for (mb_col = 0; mb_col < pc->mb_cols; mb_col++)
599         {
600             /* Distance of Mb to the various image edges.
601              * These are specified to 8th pel as they are always compared to values
602              * that are in 1/8th pel units
603              */
604             xd->mb_to_left_edge = -((mb_col * 16) << 3);
605             xd->mb_to_right_edge = ((pc->mb_cols - 1 - mb_col) * 16) << 3;
606
607 #if CONFIG_ERROR_CONCEALMENT
608             {
609                 int corrupt_residual = (!pbi->independent_partitions &&
610                                        pbi->frame_corrupt_residual) ||
611                                        vp8dx_bool_error(xd->current_bc);
612                 if (pbi->ec_active &&
613                     xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME &&
614                     corrupt_residual)
615                 {
616                     /* We have an intra block with corrupt coefficients, better to
617                      * conceal with an inter block. Interpolate MVs from neighboring
618                      * MBs.
619                      *
620                      * Note that for the first mb with corrupt residual in a frame,
621                      * we might not discover that before decoding the residual. That
622                      * happens after this check, and therefore no inter concealment
623                      * will be done.
624                      */
625                     vp8_interpolate_motion(xd,
626                                            mb_row, mb_col,
627                                            pc->mb_rows, pc->mb_cols,
628                                            pc->mode_info_stride);
629                 }
630             }
631 #endif
632
633             xd->dst.y_buffer = dst_buffer[0] + recon_yoffset;
634             xd->dst.u_buffer = dst_buffer[1] + recon_uvoffset;
635             xd->dst.v_buffer = dst_buffer[2] + recon_uvoffset;
636
637             if (xd->mode_info_context->mbmi.ref_frame >= LAST_FRAME) {
638               MV_REFERENCE_FRAME ref = xd->mode_info_context->mbmi.ref_frame;
639               xd->pre.y_buffer = ref_buffer[ref][0] + recon_yoffset;
640               xd->pre.u_buffer = ref_buffer[ref][1] + recon_uvoffset;
641               xd->pre.v_buffer = ref_buffer[ref][2] + recon_uvoffset;
642             } else {
643               // ref_frame is INTRA_FRAME, pre buffer should not be used.
644               xd->pre.y_buffer = 0;
645               xd->pre.u_buffer = 0;
646               xd->pre.v_buffer = 0;
647             }
648
649             /* propagate errors from reference frames */
650             xd->corrupted |= ref_fb_corrupted[xd->mode_info_context->mbmi.ref_frame];
651
652             decode_macroblock(pbi, xd, mb_idx);
653
654             mb_idx++;
655             xd->left_available = 1;
656
657             /* check if the boolean decoder has suffered an error */
658             xd->corrupted |= vp8dx_bool_error(xd->current_bc);
659
660             xd->recon_above[0] += 16;
661             xd->recon_above[1] += 8;
662             xd->recon_above[2] += 8;
663             xd->recon_left[0] += 16;
664             xd->recon_left[1] += 8;
665             xd->recon_left[2] += 8;
666
667             recon_yoffset += 16;
668             recon_uvoffset += 8;
669
670             ++xd->mode_info_context;  /* next mb */
671
672             xd->above_context++;
673         }
674
675         /* adjust to the next row of mbs */
676         vp8_extend_mb_row(yv12_fb_new, xd->dst.y_buffer + 16,
677                           xd->dst.u_buffer + 8, xd->dst.v_buffer + 8);
678
679         ++xd->mode_info_context;      /* skip prediction column */
680         xd->up_available = 1;
681
682         if(pc->filter_level)
683         {
684             if(mb_row > 0)
685             {
686                 if (pc->filter_type == NORMAL_LOOPFILTER)
687                     vp8_loop_filter_row_normal(pc, lf_mic, mb_row-1,
688                                                recon_y_stride, recon_uv_stride,
689                                                lf_dst[0], lf_dst[1], lf_dst[2]);
690                 else
691                     vp8_loop_filter_row_simple(pc, lf_mic, mb_row-1,
692                                                recon_y_stride, recon_uv_stride,
693                                                lf_dst[0], lf_dst[1], lf_dst[2]);
694                 if(mb_row > 1)
695                 {
696                     yv12_extend_frame_left_right_c(yv12_fb_new,
697                                                    eb_dst[0],
698                                                    eb_dst[1],
699                                                    eb_dst[2]);
700
701                     eb_dst[0] += recon_y_stride  * 16;
702                     eb_dst[1] += recon_uv_stride *  8;
703                     eb_dst[2] += recon_uv_stride *  8;
704                 }
705
706                 lf_dst[0] += recon_y_stride  * 16;
707                 lf_dst[1] += recon_uv_stride *  8;
708                 lf_dst[2] += recon_uv_stride *  8;
709                 lf_mic += pc->mb_cols;
710                 lf_mic++;         /* Skip border mb */
711             }
712         }
713         else
714         {
715             if(mb_row > 0)
716             {
717                 /**/
718                 yv12_extend_frame_left_right_c(yv12_fb_new,
719                                                eb_dst[0],
720                                                eb_dst[1],
721                                                eb_dst[2]);
722                 eb_dst[0] += recon_y_stride  * 16;
723                 eb_dst[1] += recon_uv_stride *  8;
724                 eb_dst[2] += recon_uv_stride *  8;
725             }
726         }
727     }
728
729     if(pc->filter_level)
730     {
731         if (pc->filter_type == NORMAL_LOOPFILTER)
732             vp8_loop_filter_row_normal(pc, lf_mic, mb_row-1, recon_y_stride,
733                                        recon_uv_stride, lf_dst[0], lf_dst[1],
734                                        lf_dst[2]);
735         else
736             vp8_loop_filter_row_simple(pc, lf_mic, mb_row-1, recon_y_stride,
737                                        recon_uv_stride, lf_dst[0], lf_dst[1],
738                                        lf_dst[2]);
739
740         yv12_extend_frame_left_right_c(yv12_fb_new,
741                                        eb_dst[0],
742                                        eb_dst[1],
743                                        eb_dst[2]);
744         eb_dst[0] += recon_y_stride  * 16;
745         eb_dst[1] += recon_uv_stride *  8;
746         eb_dst[2] += recon_uv_stride *  8;
747     }
748     yv12_extend_frame_left_right_c(yv12_fb_new,
749                                    eb_dst[0],
750                                    eb_dst[1],
751                                    eb_dst[2]);
752     yv12_extend_frame_top_c(yv12_fb_new);
753     yv12_extend_frame_bottom_c(yv12_fb_new);
754
755 }
756
757 static unsigned int read_partition_size(VP8D_COMP *pbi,
758                                         const unsigned char *cx_size)
759 {
760     unsigned char temp[3];
761     if (pbi->decrypt_cb)
762     {
763         pbi->decrypt_cb(pbi->decrypt_state, cx_size, temp, 3);
764         cx_size = temp;
765     }
766     return cx_size[0] + (cx_size[1] << 8) + (cx_size[2] << 16);
767 }
768
769 static int read_is_valid(const unsigned char *start,
770                          size_t               len,
771                          const unsigned char *end)
772 {
773     return (start + len > start && start + len <= end);
774 }
775
776 static unsigned int read_available_partition_size(
777                                        VP8D_COMP *pbi,
778                                        const unsigned char *token_part_sizes,
779                                        const unsigned char *fragment_start,
780                                        const unsigned char *first_fragment_end,
781                                        const unsigned char *fragment_end,
782                                        int i,
783                                        int num_part)
784 {
785     VP8_COMMON* pc = &pbi->common;
786     const unsigned char *partition_size_ptr = token_part_sizes + i * 3;
787     unsigned int partition_size = 0;
788     ptrdiff_t bytes_left = fragment_end - fragment_start;
789     /* Calculate the length of this partition. The last partition
790      * size is implicit. If the partition size can't be read, then
791      * either use the remaining data in the buffer (for EC mode)
792      * or throw an error.
793      */
794     if (i < num_part - 1)
795     {
796         if (read_is_valid(partition_size_ptr, 3, first_fragment_end))
797             partition_size = read_partition_size(pbi, partition_size_ptr);
798         else if (pbi->ec_active)
799             partition_size = (unsigned int)bytes_left;
800         else
801             vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
802                                "Truncated partition size data");
803     }
804     else
805         partition_size = (unsigned int)bytes_left;
806
807     /* Validate the calculated partition length. If the buffer
808      * described by the partition can't be fully read, then restrict
809      * it to the portion that can be (for EC mode) or throw an error.
810      */
811     if (!read_is_valid(fragment_start, partition_size, fragment_end))
812     {
813         if (pbi->ec_active)
814             partition_size = (unsigned int)bytes_left;
815         else
816             vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
817                                "Truncated packet or corrupt partition "
818                                "%d length", i + 1);
819     }
820     return partition_size;
821 }
822
823
824 static void setup_token_decoder(VP8D_COMP *pbi,
825                                 const unsigned char* token_part_sizes)
826 {
827     vp8_reader *bool_decoder = &pbi->mbc[0];
828     unsigned int partition_idx;
829     unsigned int fragment_idx;
830     unsigned int num_token_partitions;
831     const unsigned char *first_fragment_end = pbi->fragments.ptrs[0] +
832                                           pbi->fragments.sizes[0];
833
834     TOKEN_PARTITION multi_token_partition =
835             (TOKEN_PARTITION)vp8_read_literal(&pbi->mbc[8], 2);
836     if (!vp8dx_bool_error(&pbi->mbc[8]))
837         pbi->common.multi_token_partition = multi_token_partition;
838     num_token_partitions = 1 << pbi->common.multi_token_partition;
839
840     /* Check for partitions within the fragments and unpack the fragments
841      * so that each fragment pointer points to its corresponding partition. */
842     for (fragment_idx = 0; fragment_idx < pbi->fragments.count; ++fragment_idx)
843     {
844         unsigned int fragment_size = pbi->fragments.sizes[fragment_idx];
845         const unsigned char *fragment_end = pbi->fragments.ptrs[fragment_idx] +
846                                             fragment_size;
847         /* Special case for handling the first partition since we have already
848          * read its size. */
849         if (fragment_idx == 0)
850         {
851             /* Size of first partition + token partition sizes element */
852             ptrdiff_t ext_first_part_size = token_part_sizes -
853                 pbi->fragments.ptrs[0] + 3 * (num_token_partitions - 1);
854             fragment_size -= (unsigned int)ext_first_part_size;
855             if (fragment_size > 0)
856             {
857                 pbi->fragments.sizes[0] = (unsigned int)ext_first_part_size;
858                 /* The fragment contains an additional partition. Move to
859                  * next. */
860                 fragment_idx++;
861                 pbi->fragments.ptrs[fragment_idx] = pbi->fragments.ptrs[0] +
862                   pbi->fragments.sizes[0];
863             }
864         }
865         /* Split the chunk into partitions read from the bitstream */
866         while (fragment_size > 0)
867         {
868             ptrdiff_t partition_size = read_available_partition_size(
869                                                  pbi,
870                                                  token_part_sizes,
871                                                  pbi->fragments.ptrs[fragment_idx],
872                                                  first_fragment_end,
873                                                  fragment_end,
874                                                  fragment_idx - 1,
875                                                  num_token_partitions);
876             pbi->fragments.sizes[fragment_idx] = (unsigned int)partition_size;
877             fragment_size -= (unsigned int)partition_size;
878             assert(fragment_idx <= num_token_partitions);
879             if (fragment_size > 0)
880             {
881                 /* The fragment contains an additional partition.
882                  * Move to next. */
883                 fragment_idx++;
884                 pbi->fragments.ptrs[fragment_idx] =
885                     pbi->fragments.ptrs[fragment_idx - 1] + partition_size;
886             }
887         }
888     }
889
890     pbi->fragments.count = num_token_partitions + 1;
891
892     for (partition_idx = 1; partition_idx < pbi->fragments.count; ++partition_idx)
893     {
894         if (vp8dx_start_decode(bool_decoder,
895                                pbi->fragments.ptrs[partition_idx],
896                                pbi->fragments.sizes[partition_idx],
897                                pbi->decrypt_cb, pbi->decrypt_state))
898             vpx_internal_error(&pbi->common.error, VPX_CODEC_MEM_ERROR,
899                                "Failed to allocate bool decoder %d",
900                                partition_idx);
901
902         bool_decoder++;
903     }
904
905 #if CONFIG_MULTITHREAD
906     /* Clamp number of decoder threads */
907     if (pbi->decoding_thread_count > num_token_partitions - 1)
908         pbi->decoding_thread_count = num_token_partitions - 1;
909 #endif
910 }
911
912
913 static void init_frame(VP8D_COMP *pbi)
914 {
915     VP8_COMMON *const pc = & pbi->common;
916     MACROBLOCKD *const xd  = & pbi->mb;
917
918     if (pc->frame_type == KEY_FRAME)
919     {
920         /* Various keyframe initializations */
921         vpx_memcpy(pc->fc.mvc, vp8_default_mv_context, sizeof(vp8_default_mv_context));
922
923         vp8_init_mbmode_probs(pc);
924
925         vp8_default_coef_probs(pc);
926
927         /* reset the segment feature data to 0 with delta coding (Default state). */
928         vpx_memset(xd->segment_feature_data, 0, sizeof(xd->segment_feature_data));
929         xd->mb_segement_abs_delta = SEGMENT_DELTADATA;
930
931         /* reset the mode ref deltasa for loop filter */
932         vpx_memset(xd->ref_lf_deltas, 0, sizeof(xd->ref_lf_deltas));
933         vpx_memset(xd->mode_lf_deltas, 0, sizeof(xd->mode_lf_deltas));
934
935         /* All buffers are implicitly updated on key frames. */
936         pc->refresh_golden_frame = 1;
937         pc->refresh_alt_ref_frame = 1;
938         pc->copy_buffer_to_gf = 0;
939         pc->copy_buffer_to_arf = 0;
940
941         /* Note that Golden and Altref modes cannot be used on a key frame so
942          * ref_frame_sign_bias[] is undefined and meaningless
943          */
944         pc->ref_frame_sign_bias[GOLDEN_FRAME] = 0;
945         pc->ref_frame_sign_bias[ALTREF_FRAME] = 0;
946     }
947     else
948     {
949         /* To enable choice of different interploation filters */
950         if (!pc->use_bilinear_mc_filter)
951         {
952             xd->subpixel_predict        = vp8_sixtap_predict4x4;
953             xd->subpixel_predict8x4     = vp8_sixtap_predict8x4;
954             xd->subpixel_predict8x8     = vp8_sixtap_predict8x8;
955             xd->subpixel_predict16x16   = vp8_sixtap_predict16x16;
956         }
957         else
958         {
959             xd->subpixel_predict        = vp8_bilinear_predict4x4;
960             xd->subpixel_predict8x4     = vp8_bilinear_predict8x4;
961             xd->subpixel_predict8x8     = vp8_bilinear_predict8x8;
962             xd->subpixel_predict16x16   = vp8_bilinear_predict16x16;
963         }
964
965         if (pbi->decoded_key_frame && pbi->ec_enabled && !pbi->ec_active)
966             pbi->ec_active = 1;
967     }
968
969     xd->left_context = &pc->left_context;
970     xd->mode_info_context = pc->mi;
971     xd->frame_type = pc->frame_type;
972     xd->mode_info_context->mbmi.mode = DC_PRED;
973     xd->mode_info_stride = pc->mode_info_stride;
974     xd->corrupted = 0; /* init without corruption */
975
976     xd->fullpixel_mask = 0xffffffff;
977     if(pc->full_pixel)
978         xd->fullpixel_mask = 0xfffffff8;
979
980 }
981
982 int vp8_decode_frame(VP8D_COMP *pbi)
983 {
984     vp8_reader *const bc = &pbi->mbc[8];
985     VP8_COMMON *const pc = &pbi->common;
986     MACROBLOCKD *const xd  = &pbi->mb;
987     const unsigned char *data = pbi->fragments.ptrs[0];
988     const unsigned char *data_end =  data + pbi->fragments.sizes[0];
989     ptrdiff_t first_partition_length_in_bytes;
990
991     int i, j, k, l;
992     const int *const mb_feature_data_bits = vp8_mb_feature_data_bits;
993     int corrupt_tokens = 0;
994     int prev_independent_partitions = pbi->independent_partitions;
995
996     YV12_BUFFER_CONFIG *yv12_fb_new = pbi->dec_fb_ref[INTRA_FRAME];
997
998     /* start with no corruption of current frame */
999     xd->corrupted = 0;
1000     yv12_fb_new->corrupted = 0;
1001
1002     if (data_end - data < 3)
1003     {
1004         if (!pbi->ec_active)
1005         {
1006             vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
1007                                "Truncated packet");
1008         }
1009
1010         /* Declare the missing frame as an inter frame since it will
1011            be handled as an inter frame when we have estimated its
1012            motion vectors. */
1013         pc->frame_type = INTER_FRAME;
1014         pc->version = 0;
1015         pc->show_frame = 1;
1016         first_partition_length_in_bytes = 0;
1017     }
1018     else
1019     {
1020         unsigned char clear_buffer[10];
1021         const unsigned char *clear = data;
1022         if (pbi->decrypt_cb)
1023         {
1024             int n = (int)MIN(sizeof(clear_buffer), data_end - data);
1025             pbi->decrypt_cb(pbi->decrypt_state, data, clear_buffer, n);
1026             clear = clear_buffer;
1027         }
1028
1029         pc->frame_type = (FRAME_TYPE)(clear[0] & 1);
1030         pc->version = (clear[0] >> 1) & 7;
1031         pc->show_frame = (clear[0] >> 4) & 1;
1032         first_partition_length_in_bytes =
1033             (clear[0] | (clear[1] << 8) | (clear[2] << 16)) >> 5;
1034
1035         if (!pbi->ec_active &&
1036             (data + first_partition_length_in_bytes > data_end
1037             || data + first_partition_length_in_bytes < data))
1038             vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
1039                                "Truncated packet or corrupt partition 0 length");
1040
1041         data += 3;
1042         clear += 3;
1043
1044         vp8_setup_version(pc);
1045
1046
1047         if (pc->frame_type == KEY_FRAME)
1048         {
1049             /* vet via sync code */
1050             /* When error concealment is enabled we should only check the sync
1051              * code if we have enough bits available
1052              */
1053             if (!pbi->ec_active || data + 3 < data_end)
1054             {
1055                 if (clear[0] != 0x9d || clear[1] != 0x01 || clear[2] != 0x2a)
1056                     vpx_internal_error(&pc->error, VPX_CODEC_UNSUP_BITSTREAM,
1057                                    "Invalid frame sync code");
1058             }
1059
1060             /* If error concealment is enabled we should only parse the new size
1061              * if we have enough data. Otherwise we will end up with the wrong
1062              * size.
1063              */
1064             if (!pbi->ec_active || data + 6 < data_end)
1065             {
1066                 pc->Width = (clear[3] | (clear[4] << 8)) & 0x3fff;
1067                 pc->horiz_scale = clear[4] >> 6;
1068                 pc->Height = (clear[5] | (clear[6] << 8)) & 0x3fff;
1069                 pc->vert_scale = clear[6] >> 6;
1070             }
1071             data += 7;
1072             clear += 7;
1073         }
1074         else
1075         {
1076           vpx_memcpy(&xd->pre, yv12_fb_new, sizeof(YV12_BUFFER_CONFIG));
1077           vpx_memcpy(&xd->dst, yv12_fb_new, sizeof(YV12_BUFFER_CONFIG));
1078         }
1079     }
1080     if ((!pbi->decoded_key_frame && pc->frame_type != KEY_FRAME))
1081     {
1082         return -1;
1083     }
1084
1085     init_frame(pbi);
1086
1087     if (vp8dx_start_decode(bc, data, (unsigned int)(data_end - data),
1088                            pbi->decrypt_cb, pbi->decrypt_state))
1089         vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
1090                            "Failed to allocate bool decoder 0");
1091     if (pc->frame_type == KEY_FRAME) {
1092         (void)vp8_read_bit(bc);  // colorspace
1093         pc->clamp_type  = (CLAMP_TYPE)vp8_read_bit(bc);
1094     }
1095
1096     /* Is segmentation enabled */
1097     xd->segmentation_enabled = (unsigned char)vp8_read_bit(bc);
1098
1099     if (xd->segmentation_enabled)
1100     {
1101         /* Signal whether or not the segmentation map is being explicitly updated this frame. */
1102         xd->update_mb_segmentation_map = (unsigned char)vp8_read_bit(bc);
1103         xd->update_mb_segmentation_data = (unsigned char)vp8_read_bit(bc);
1104
1105         if (xd->update_mb_segmentation_data)
1106         {
1107             xd->mb_segement_abs_delta = (unsigned char)vp8_read_bit(bc);
1108
1109             vpx_memset(xd->segment_feature_data, 0, sizeof(xd->segment_feature_data));
1110
1111             /* For each segmentation feature (Quant and loop filter level) */
1112             for (i = 0; i < MB_LVL_MAX; i++)
1113             {
1114                 for (j = 0; j < MAX_MB_SEGMENTS; j++)
1115                 {
1116                     /* Frame level data */
1117                     if (vp8_read_bit(bc))
1118                     {
1119                         xd->segment_feature_data[i][j] = (signed char)vp8_read_literal(bc, mb_feature_data_bits[i]);
1120
1121                         if (vp8_read_bit(bc))
1122                             xd->segment_feature_data[i][j] = -xd->segment_feature_data[i][j];
1123                     }
1124                     else
1125                         xd->segment_feature_data[i][j] = 0;
1126                 }
1127             }
1128         }
1129
1130         if (xd->update_mb_segmentation_map)
1131         {
1132             /* Which macro block level features are enabled */
1133             vpx_memset(xd->mb_segment_tree_probs, 255, sizeof(xd->mb_segment_tree_probs));
1134
1135             /* Read the probs used to decode the segment id for each macro block. */
1136             for (i = 0; i < MB_FEATURE_TREE_PROBS; i++)
1137             {
1138                 /* If not explicitly set value is defaulted to 255 by memset above */
1139                 if (vp8_read_bit(bc))
1140                     xd->mb_segment_tree_probs[i] = (vp8_prob)vp8_read_literal(bc, 8);
1141             }
1142         }
1143     }
1144     else
1145     {
1146         /* No segmentation updates on this frame */
1147         xd->update_mb_segmentation_map = 0;
1148         xd->update_mb_segmentation_data = 0;
1149     }
1150
1151     /* Read the loop filter level and type */
1152     pc->filter_type = (LOOPFILTERTYPE) vp8_read_bit(bc);
1153     pc->filter_level = vp8_read_literal(bc, 6);
1154     pc->sharpness_level = vp8_read_literal(bc, 3);
1155
1156     /* Read in loop filter deltas applied at the MB level based on mode or ref frame. */
1157     xd->mode_ref_lf_delta_update = 0;
1158     xd->mode_ref_lf_delta_enabled = (unsigned char)vp8_read_bit(bc);
1159
1160     if (xd->mode_ref_lf_delta_enabled)
1161     {
1162         /* Do the deltas need to be updated */
1163         xd->mode_ref_lf_delta_update = (unsigned char)vp8_read_bit(bc);
1164
1165         if (xd->mode_ref_lf_delta_update)
1166         {
1167             /* Send update */
1168             for (i = 0; i < MAX_REF_LF_DELTAS; i++)
1169             {
1170                 if (vp8_read_bit(bc))
1171                 {
1172                     /*sign = vp8_read_bit( bc );*/
1173                     xd->ref_lf_deltas[i] = (signed char)vp8_read_literal(bc, 6);
1174
1175                     if (vp8_read_bit(bc))        /* Apply sign */
1176                         xd->ref_lf_deltas[i] = xd->ref_lf_deltas[i] * -1;
1177                 }
1178             }
1179
1180             /* Send update */
1181             for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
1182             {
1183                 if (vp8_read_bit(bc))
1184                 {
1185                     /*sign = vp8_read_bit( bc );*/
1186                     xd->mode_lf_deltas[i] = (signed char)vp8_read_literal(bc, 6);
1187
1188                     if (vp8_read_bit(bc))        /* Apply sign */
1189                         xd->mode_lf_deltas[i] = xd->mode_lf_deltas[i] * -1;
1190                 }
1191             }
1192         }
1193     }
1194
1195     setup_token_decoder(pbi, data + first_partition_length_in_bytes);
1196
1197     xd->current_bc = &pbi->mbc[0];
1198
1199     /* Read the default quantizers. */
1200     {
1201         int Q, q_update;
1202
1203         Q = vp8_read_literal(bc, 7);  /* AC 1st order Q = default */
1204         pc->base_qindex = Q;
1205         q_update = 0;
1206         pc->y1dc_delta_q = get_delta_q(bc, pc->y1dc_delta_q, &q_update);
1207         pc->y2dc_delta_q = get_delta_q(bc, pc->y2dc_delta_q, &q_update);
1208         pc->y2ac_delta_q = get_delta_q(bc, pc->y2ac_delta_q, &q_update);
1209         pc->uvdc_delta_q = get_delta_q(bc, pc->uvdc_delta_q, &q_update);
1210         pc->uvac_delta_q = get_delta_q(bc, pc->uvac_delta_q, &q_update);
1211
1212         if (q_update)
1213             vp8cx_init_de_quantizer(pbi);
1214
1215         /* MB level dequantizer setup */
1216         vp8_mb_init_dequantizer(pbi, &pbi->mb);
1217     }
1218
1219     /* Determine if the golden frame or ARF buffer should be updated and how.
1220      * For all non key frames the GF and ARF refresh flags and sign bias
1221      * flags must be set explicitly.
1222      */
1223     if (pc->frame_type != KEY_FRAME)
1224     {
1225         /* Should the GF or ARF be updated from the current frame */
1226         pc->refresh_golden_frame = vp8_read_bit(bc);
1227 #if CONFIG_ERROR_CONCEALMENT
1228         /* Assume we shouldn't refresh golden if the bit is missing */
1229         xd->corrupted |= vp8dx_bool_error(bc);
1230         if (pbi->ec_active && xd->corrupted)
1231             pc->refresh_golden_frame = 0;
1232 #endif
1233
1234         pc->refresh_alt_ref_frame = vp8_read_bit(bc);
1235 #if CONFIG_ERROR_CONCEALMENT
1236         /* Assume we shouldn't refresh altref if the bit is missing */
1237         xd->corrupted |= vp8dx_bool_error(bc);
1238         if (pbi->ec_active && xd->corrupted)
1239             pc->refresh_alt_ref_frame = 0;
1240 #endif
1241
1242         /* Buffer to buffer copy flags. */
1243         pc->copy_buffer_to_gf = 0;
1244
1245         if (!pc->refresh_golden_frame)
1246             pc->copy_buffer_to_gf = vp8_read_literal(bc, 2);
1247
1248 #if CONFIG_ERROR_CONCEALMENT
1249         /* Assume we shouldn't copy to the golden if the bit is missing */
1250         xd->corrupted |= vp8dx_bool_error(bc);
1251         if (pbi->ec_active && xd->corrupted)
1252             pc->copy_buffer_to_gf = 0;
1253 #endif
1254
1255         pc->copy_buffer_to_arf = 0;
1256
1257         if (!pc->refresh_alt_ref_frame)
1258             pc->copy_buffer_to_arf = vp8_read_literal(bc, 2);
1259
1260 #if CONFIG_ERROR_CONCEALMENT
1261         /* Assume we shouldn't copy to the alt-ref if the bit is missing */
1262         xd->corrupted |= vp8dx_bool_error(bc);
1263         if (pbi->ec_active && xd->corrupted)
1264             pc->copy_buffer_to_arf = 0;
1265 #endif
1266
1267
1268         pc->ref_frame_sign_bias[GOLDEN_FRAME] = vp8_read_bit(bc);
1269         pc->ref_frame_sign_bias[ALTREF_FRAME] = vp8_read_bit(bc);
1270     }
1271
1272     pc->refresh_entropy_probs = vp8_read_bit(bc);
1273 #if CONFIG_ERROR_CONCEALMENT
1274     /* Assume we shouldn't refresh the probabilities if the bit is
1275      * missing */
1276     xd->corrupted |= vp8dx_bool_error(bc);
1277     if (pbi->ec_active && xd->corrupted)
1278         pc->refresh_entropy_probs = 0;
1279 #endif
1280     if (pc->refresh_entropy_probs == 0)
1281     {
1282         vpx_memcpy(&pc->lfc, &pc->fc, sizeof(pc->fc));
1283     }
1284
1285     pc->refresh_last_frame = pc->frame_type == KEY_FRAME  ||  vp8_read_bit(bc);
1286
1287 #if CONFIG_ERROR_CONCEALMENT
1288     /* Assume we should refresh the last frame if the bit is missing */
1289     xd->corrupted |= vp8dx_bool_error(bc);
1290     if (pbi->ec_active && xd->corrupted)
1291         pc->refresh_last_frame = 1;
1292 #endif
1293
1294     if (0)
1295     {
1296         FILE *z = fopen("decodestats.stt", "a");
1297         fprintf(z, "%6d F:%d,G:%d,A:%d,L:%d,Q:%d\n",
1298                 pc->current_video_frame,
1299                 pc->frame_type,
1300                 pc->refresh_golden_frame,
1301                 pc->refresh_alt_ref_frame,
1302                 pc->refresh_last_frame,
1303                 pc->base_qindex);
1304         fclose(z);
1305     }
1306
1307     {
1308         pbi->independent_partitions = 1;
1309
1310         /* read coef probability tree */
1311         for (i = 0; i < BLOCK_TYPES; i++)
1312             for (j = 0; j < COEF_BANDS; j++)
1313                 for (k = 0; k < PREV_COEF_CONTEXTS; k++)
1314                     for (l = 0; l < ENTROPY_NODES; l++)
1315                     {
1316
1317                         vp8_prob *const p = pc->fc.coef_probs [i][j][k] + l;
1318
1319                         if (vp8_read(bc, vp8_coef_update_probs [i][j][k][l]))
1320                         {
1321                             *p = (vp8_prob)vp8_read_literal(bc, 8);
1322
1323                         }
1324                         if (k > 0 && *p != pc->fc.coef_probs[i][j][k-1][l])
1325                             pbi->independent_partitions = 0;
1326
1327                     }
1328     }
1329
1330     /* clear out the coeff buffer */
1331     vpx_memset(xd->qcoeff, 0, sizeof(xd->qcoeff));
1332
1333     vp8_decode_mode_mvs(pbi);
1334
1335 #if CONFIG_ERROR_CONCEALMENT
1336     if (pbi->ec_active &&
1337             pbi->mvs_corrupt_from_mb < (unsigned int)pc->mb_cols * pc->mb_rows)
1338     {
1339         /* Motion vectors are missing in this frame. We will try to estimate
1340          * them and then continue decoding the frame as usual */
1341         vp8_estimate_missing_mvs(pbi);
1342     }
1343 #endif
1344
1345     vpx_memset(pc->above_context, 0, sizeof(ENTROPY_CONTEXT_PLANES) * pc->mb_cols);
1346     pbi->frame_corrupt_residual = 0;
1347
1348 #if CONFIG_MULTITHREAD
1349     if (pbi->b_multithreaded_rd && pc->multi_token_partition != ONE_PARTITION)
1350     {
1351         unsigned int thread;
1352         vp8mt_decode_mb_rows(pbi, xd);
1353         vp8_yv12_extend_frame_borders(yv12_fb_new);
1354         for (thread = 0; thread < pbi->decoding_thread_count; ++thread)
1355             corrupt_tokens |= pbi->mb_row_di[thread].mbd.corrupted;
1356     }
1357     else
1358 #endif
1359     {
1360         decode_mb_rows(pbi);
1361         corrupt_tokens |= xd->corrupted;
1362     }
1363
1364     /* Collect information about decoder corruption. */
1365     /* 1. Check first boolean decoder for errors. */
1366     yv12_fb_new->corrupted = vp8dx_bool_error(bc);
1367     /* 2. Check the macroblock information */
1368     yv12_fb_new->corrupted |= corrupt_tokens;
1369
1370     if (!pbi->decoded_key_frame)
1371     {
1372         if (pc->frame_type == KEY_FRAME &&
1373             !yv12_fb_new->corrupted)
1374             pbi->decoded_key_frame = 1;
1375         else
1376             vpx_internal_error(&pbi->common.error, VPX_CODEC_CORRUPT_FRAME,
1377                                "A stream must start with a complete key frame");
1378     }
1379
1380     /* vpx_log("Decoder: Frame Decoded, Size Roughly:%d bytes  \n",bc->pos+pbi->bc2.pos); */
1381
1382     if (pc->refresh_entropy_probs == 0)
1383     {
1384         vpx_memcpy(&pc->fc, &pc->lfc, sizeof(pc->fc));
1385         pbi->independent_partitions = prev_independent_partitions;
1386     }
1387
1388 #ifdef PACKET_TESTING
1389     {
1390         FILE *f = fopen("decompressor.VP8", "ab");
1391         unsigned int size = pbi->bc2.pos + pbi->bc.pos + 8;
1392         fwrite((void *) &size, 4, 1, f);
1393         fwrite((void *) pbi->Source, size, 1, f);
1394         fclose(f);
1395     }
1396 #endif
1397
1398     return 0;
1399 }