1 : /*
2 : * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 : *
4 : * Use of this source code is governed by a BSD-style license
5 : * that can be found in the LICENSE file in the root of the source
6 : * tree. An additional intellectual property rights grant can be found
7 : * in the file PATENTS. All contributing project authors may
8 : * be found in the AUTHORS file in the root of the source tree.
9 : */
10 :
11 :
12 : #include "onyxd_int.h"
13 : #include "vp8/common/header.h"
14 : #include "vp8/common/reconintra.h"
15 : #include "vp8/common/reconintra4x4.h"
16 : #include "vp8/common/recon.h"
17 : #include "vp8/common/reconinter.h"
18 : #include "dequantize.h"
19 : #include "detokenize.h"
20 : #include "vp8/common/invtrans.h"
21 : #include "vp8/common/alloccommon.h"
22 : #include "vp8/common/entropymode.h"
23 : #include "vp8/common/quant_common.h"
24 : #include "vpx_scale/vpxscale.h"
25 : #include "vpx_scale/yv12extend.h"
26 : #include "vp8/common/setupintrarecon.h"
27 :
28 : #include "decodemv.h"
29 : #include "vp8/common/extend.h"
30 : #if CONFIG_ERROR_CONCEALMENT
31 : #include "error_concealment.h"
32 : #endif
33 : #include "vpx_mem/vpx_mem.h"
34 : #include "vp8/common/idct.h"
35 : #include "dequantize.h"
36 : #include "vp8/common/threading.h"
37 : #include "decoderthreading.h"
38 : #include "dboolhuff.h"
39 :
40 : #include <assert.h>
41 : #include <stdio.h>
42 :
43 0 : void vp8cx_init_de_quantizer(VP8D_COMP *pbi)
44 : {
45 : int i;
46 : int Q;
47 0 : VP8_COMMON *const pc = & pbi->common;
48 :
49 0 : for (Q = 0; Q < QINDEX_RANGE; Q++)
50 : {
51 0 : pc->Y1dequant[Q][0] = (short)vp8_dc_quant(Q, pc->y1dc_delta_q);
52 0 : pc->Y2dequant[Q][0] = (short)vp8_dc2quant(Q, pc->y2dc_delta_q);
53 0 : pc->UVdequant[Q][0] = (short)vp8_dc_uv_quant(Q, pc->uvdc_delta_q);
54 :
55 : /* all the ac values = ; */
56 0 : for (i = 1; i < 16; i++)
57 : {
58 0 : int rc = vp8_default_zig_zag1d[i];
59 :
60 0 : pc->Y1dequant[Q][rc] = (short)vp8_ac_yquant(Q);
61 0 : pc->Y2dequant[Q][rc] = (short)vp8_ac2quant(Q, pc->y2ac_delta_q);
62 0 : pc->UVdequant[Q][rc] = (short)vp8_ac_uv_quant(Q, pc->uvac_delta_q);
63 : }
64 : }
65 0 : }
66 :
67 0 : void mb_init_dequantizer(VP8D_COMP *pbi, MACROBLOCKD *xd)
68 : {
69 : int i;
70 : int QIndex;
71 0 : MB_MODE_INFO *mbmi = &xd->mode_info_context->mbmi;
72 0 : VP8_COMMON *const pc = & pbi->common;
73 :
74 : /* Decide whether to use the default or alternate baseline Q value. */
75 0 : if (xd->segmentation_enabled)
76 : {
77 : /* Abs Value */
78 0 : if (xd->mb_segement_abs_delta == SEGMENT_ABSDATA)
79 0 : QIndex = xd->segment_feature_data[MB_LVL_ALT_Q][mbmi->segment_id];
80 :
81 : /* Delta Value */
82 : else
83 : {
84 0 : QIndex = pc->base_qindex + xd->segment_feature_data[MB_LVL_ALT_Q][mbmi->segment_id];
85 0 : QIndex = (QIndex >= 0) ? ((QIndex <= MAXQ) ? QIndex : MAXQ) : 0; /* Clamp to valid range */
86 : }
87 : }
88 : else
89 0 : QIndex = pc->base_qindex;
90 :
91 : /* Set up the block level dequant pointers */
92 0 : for (i = 0; i < 16; i++)
93 : {
94 0 : xd->block[i].dequant = pc->Y1dequant[QIndex];
95 : }
96 :
97 0 : for (i = 16; i < 24; i++)
98 : {
99 0 : xd->block[i].dequant = pc->UVdequant[QIndex];
100 : }
101 :
102 0 : xd->block[24].dequant = pc->Y2dequant[QIndex];
103 :
104 0 : }
105 :
106 : #if CONFIG_RUNTIME_CPU_DETECT
107 : #define RTCD_VTABLE(x) (&(pbi)->common.rtcd.x)
108 : #else
109 : #define RTCD_VTABLE(x) NULL
110 : #endif
111 :
112 : /* skip_recon_mb() is Modified: Instead of writing the result to predictor buffer and then copying it
113 : * to dst buffer, we can write the result directly to dst buffer. This eliminates unnecessary copy.
114 : */
115 0 : static void skip_recon_mb(VP8D_COMP *pbi, MACROBLOCKD *xd)
116 : {
117 0 : if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME)
118 : {
119 0 : RECON_INVOKE(&pbi->common.rtcd.recon, build_intra_predictors_mbuv_s)(xd);
120 0 : RECON_INVOKE(&pbi->common.rtcd.recon,
121 0 : build_intra_predictors_mby_s)(xd);
122 : }
123 : else
124 : {
125 0 : vp8_build_inter16x16_predictors_mb(xd, xd->dst.y_buffer,
126 : xd->dst.u_buffer, xd->dst.v_buffer,
127 : xd->dst.y_stride, xd->dst.uv_stride);
128 : }
129 0 : }
130 :
131 0 : static void clamp_mv_to_umv_border(MV *mv, const MACROBLOCKD *xd)
132 : {
133 : /* If the MV points so far into the UMV border that no visible pixels
134 : * are used for reconstruction, the subpel part of the MV can be
135 : * discarded and the MV limited to 16 pixels with equivalent results.
136 : *
137 : * This limit kicks in at 19 pixels for the top and left edges, for
138 : * the 16 pixels plus 3 taps right of the central pixel when subpel
139 : * filtering. The bottom and right edges use 16 pixels plus 2 pixels
140 : * left of the central pixel when filtering.
141 : */
142 0 : if (mv->col < (xd->mb_to_left_edge - (19 << 3)))
143 0 : mv->col = xd->mb_to_left_edge - (16 << 3);
144 0 : else if (mv->col > xd->mb_to_right_edge + (18 << 3))
145 0 : mv->col = xd->mb_to_right_edge + (16 << 3);
146 :
147 0 : if (mv->row < (xd->mb_to_top_edge - (19 << 3)))
148 0 : mv->row = xd->mb_to_top_edge - (16 << 3);
149 0 : else if (mv->row > xd->mb_to_bottom_edge + (18 << 3))
150 0 : mv->row = xd->mb_to_bottom_edge + (16 << 3);
151 0 : }
152 :
153 : /* A version of the above function for chroma block MVs.*/
154 0 : static void clamp_uvmv_to_umv_border(MV *mv, const MACROBLOCKD *xd)
155 : {
156 0 : mv->col = (2*mv->col < (xd->mb_to_left_edge - (19 << 3))) ? (xd->mb_to_left_edge - (16 << 3)) >> 1 : mv->col;
157 0 : mv->col = (2*mv->col > xd->mb_to_right_edge + (18 << 3)) ? (xd->mb_to_right_edge + (16 << 3)) >> 1 : mv->col;
158 :
159 0 : mv->row = (2*mv->row < (xd->mb_to_top_edge - (19 << 3))) ? (xd->mb_to_top_edge - (16 << 3)) >> 1 : mv->row;
160 0 : mv->row = (2*mv->row > xd->mb_to_bottom_edge + (18 << 3)) ? (xd->mb_to_bottom_edge + (16 << 3)) >> 1 : mv->row;
161 0 : }
162 :
163 0 : void clamp_mvs(MACROBLOCKD *xd)
164 : {
165 0 : if (xd->mode_info_context->mbmi.mode == SPLITMV)
166 : {
167 : int i;
168 :
169 0 : for (i=0; i<16; i++)
170 0 : clamp_mv_to_umv_border(&xd->block[i].bmi.mv.as_mv, xd);
171 0 : for (i=16; i<24; i++)
172 0 : clamp_uvmv_to_umv_border(&xd->block[i].bmi.mv.as_mv, xd);
173 : }
174 : else
175 : {
176 0 : clamp_mv_to_umv_border(&xd->mode_info_context->mbmi.mv.as_mv, xd);
177 0 : clamp_uvmv_to_umv_border(&xd->block[16].bmi.mv.as_mv, xd);
178 : }
179 :
180 0 : }
181 :
182 0 : static void decode_macroblock(VP8D_COMP *pbi, MACROBLOCKD *xd,
183 : unsigned int mb_idx)
184 : {
185 0 : int eobtotal = 0;
186 0 : int throw_residual = 0;
187 : MB_PREDICTION_MODE mode;
188 : int i;
189 :
190 0 : if (xd->mode_info_context->mbmi.mb_skip_coeff)
191 : {
192 0 : vp8_reset_mb_tokens_context(xd);
193 : }
194 : else
195 : {
196 0 : eobtotal = vp8_decode_mb_tokens(pbi, xd);
197 : }
198 :
199 : /* Perform temporary clamping of the MV to be used for prediction */
200 0 : if (xd->mode_info_context->mbmi.need_to_clamp_mvs)
201 : {
202 0 : clamp_mvs(xd);
203 : }
204 :
205 0 : mode = xd->mode_info_context->mbmi.mode;
206 :
207 0 : if (eobtotal == 0 && mode != B_PRED && mode != SPLITMV &&
208 0 : !vp8dx_bool_error(xd->current_bc))
209 : {
210 : /* Special case: Force the loopfilter to skip when eobtotal and
211 : * mb_skip_coeff are zero.
212 : * */
213 0 : xd->mode_info_context->mbmi.mb_skip_coeff = 1;
214 :
215 0 : skip_recon_mb(pbi, xd);
216 0 : return;
217 : }
218 :
219 0 : if (xd->segmentation_enabled)
220 0 : mb_init_dequantizer(pbi, xd);
221 :
222 : /* do prediction */
223 0 : if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME)
224 : {
225 0 : RECON_INVOKE(&pbi->common.rtcd.recon, build_intra_predictors_mbuv)(xd);
226 :
227 0 : if (mode != B_PRED)
228 : {
229 0 : RECON_INVOKE(&pbi->common.rtcd.recon,
230 0 : build_intra_predictors_mby)(xd);
231 : } else {
232 0 : vp8_intra_prediction_down_copy(xd);
233 : }
234 : }
235 : else
236 : {
237 0 : vp8_build_inter_predictors_mb(xd);
238 : }
239 :
240 : /* When we have independent partitions we can apply residual even
241 : * though other partitions within the frame are corrupt.
242 : */
243 0 : throw_residual = (!pbi->independent_partitions &&
244 0 : pbi->frame_corrupt_residual);
245 0 : throw_residual = (throw_residual || vp8dx_bool_error(xd->current_bc));
246 :
247 : #if CONFIG_ERROR_CONCEALMENT
248 : if (pbi->ec_active &&
249 : (mb_idx >= pbi->mvs_corrupt_from_mb || throw_residual))
250 : {
251 : /* MB with corrupt residuals or corrupt mode/motion vectors.
252 : * Better to use the predictor as reconstruction.
253 : */
254 : pbi->frame_corrupt_residual = 1;
255 : vpx_memset(xd->qcoeff, 0, sizeof(xd->qcoeff));
256 : vp8_conceal_corrupt_mb(xd);
257 : return;
258 : }
259 : #endif
260 :
261 : /* dequantization and idct */
262 0 : if (mode == B_PRED)
263 : {
264 0 : for (i = 0; i < 16; i++)
265 : {
266 0 : BLOCKD *b = &xd->block[i];
267 0 : RECON_INVOKE(RTCD_VTABLE(recon), intra4x4_predict)
268 0 : (b, b->bmi.as_mode, b->predictor);
269 :
270 0 : if (xd->eobs[i] > 1)
271 : {
272 0 : DEQUANT_INVOKE(&pbi->dequant, idct_add)
273 0 : (b->qcoeff, b->dequant, b->predictor,
274 0 : *(b->base_dst) + b->dst, 16, b->dst_stride);
275 : }
276 : else
277 : {
278 0 : IDCT_INVOKE(RTCD_VTABLE(idct), idct1_scalar_add)
279 0 : (b->qcoeff[0] * b->dequant[0], b->predictor,
280 0 : *(b->base_dst) + b->dst, 16, b->dst_stride);
281 0 : ((int *)b->qcoeff)[0] = 0;
282 : }
283 : }
284 :
285 : }
286 0 : else if (mode == SPLITMV)
287 : {
288 0 : DEQUANT_INVOKE (&pbi->dequant, idct_add_y_block)
289 0 : (xd->qcoeff, xd->block[0].dequant,
290 : xd->predictor, xd->dst.y_buffer,
291 : xd->dst.y_stride, xd->eobs);
292 : }
293 : else
294 : {
295 0 : BLOCKD *b = &xd->block[24];
296 :
297 0 : DEQUANT_INVOKE(&pbi->dequant, block)(b);
298 :
299 : /* do 2nd order transform on the dc block */
300 0 : if (xd->eobs[24] > 1)
301 : {
302 0 : IDCT_INVOKE(RTCD_VTABLE(idct), iwalsh16)(&b->dqcoeff[0], b->diff);
303 0 : ((int *)b->qcoeff)[0] = 0;
304 0 : ((int *)b->qcoeff)[1] = 0;
305 0 : ((int *)b->qcoeff)[2] = 0;
306 0 : ((int *)b->qcoeff)[3] = 0;
307 0 : ((int *)b->qcoeff)[4] = 0;
308 0 : ((int *)b->qcoeff)[5] = 0;
309 0 : ((int *)b->qcoeff)[6] = 0;
310 0 : ((int *)b->qcoeff)[7] = 0;
311 : }
312 : else
313 : {
314 0 : IDCT_INVOKE(RTCD_VTABLE(idct), iwalsh1)(&b->dqcoeff[0], b->diff);
315 0 : ((int *)b->qcoeff)[0] = 0;
316 : }
317 :
318 0 : DEQUANT_INVOKE (&pbi->dequant, dc_idct_add_y_block)
319 0 : (xd->qcoeff, xd->block[0].dequant,
320 : xd->predictor, xd->dst.y_buffer,
321 : xd->dst.y_stride, xd->eobs, xd->block[24].diff);
322 : }
323 :
324 0 : DEQUANT_INVOKE (&pbi->dequant, idct_add_uv_block)
325 0 : (xd->qcoeff+16*16, xd->block[16].dequant,
326 : xd->predictor+16*16, xd->dst.u_buffer, xd->dst.v_buffer,
327 : xd->dst.uv_stride, xd->eobs+16);
328 : }
329 :
330 :
331 0 : static int get_delta_q(vp8_reader *bc, int prev, int *q_update)
332 : {
333 0 : int ret_val = 0;
334 :
335 0 : if (vp8_read_bit(bc))
336 : {
337 0 : ret_val = vp8_read_literal(bc, 4);
338 :
339 0 : if (vp8_read_bit(bc))
340 0 : ret_val = -ret_val;
341 : }
342 :
343 : /* Trigger a quantizer update if the delta-q value has changed */
344 0 : if (ret_val != prev)
345 0 : *q_update = 1;
346 :
347 0 : return ret_val;
348 : }
349 :
350 : #ifdef PACKET_TESTING
351 : #include <stdio.h>
352 : FILE *vpxlog = 0;
353 : #endif
354 :
355 :
356 :
357 : static void
358 0 : decode_mb_row(VP8D_COMP *pbi, VP8_COMMON *pc, int mb_row, MACROBLOCKD *xd)
359 : {
360 : int recon_yoffset, recon_uvoffset;
361 : int mb_col;
362 0 : int ref_fb_idx = pc->lst_fb_idx;
363 0 : int dst_fb_idx = pc->new_fb_idx;
364 0 : int recon_y_stride = pc->yv12_fb[ref_fb_idx].y_stride;
365 0 : int recon_uv_stride = pc->yv12_fb[ref_fb_idx].uv_stride;
366 :
367 0 : vpx_memset(&pc->left_context, 0, sizeof(pc->left_context));
368 0 : recon_yoffset = mb_row * recon_y_stride * 16;
369 0 : recon_uvoffset = mb_row * recon_uv_stride * 8;
370 : /* reset above block coeffs */
371 :
372 0 : xd->above_context = pc->above_context;
373 0 : xd->up_available = (mb_row != 0);
374 :
375 0 : xd->mb_to_top_edge = -((mb_row * 16)) << 3;
376 0 : xd->mb_to_bottom_edge = ((pc->mb_rows - 1 - mb_row) * 16) << 3;
377 :
378 0 : for (mb_col = 0; mb_col < pc->mb_cols; mb_col++)
379 : {
380 : /* Distance of Mb to the various image edges.
381 : * These are specified to 8th pel as they are always compared to values
382 : * that are in 1/8th pel units
383 : */
384 0 : xd->mb_to_left_edge = -((mb_col * 16) << 3);
385 0 : xd->mb_to_right_edge = ((pc->mb_cols - 1 - mb_col) * 16) << 3;
386 :
387 : #if CONFIG_ERROR_CONCEALMENT
388 : {
389 : int corrupt_residual = (!pbi->independent_partitions &&
390 : pbi->frame_corrupt_residual) ||
391 : vp8dx_bool_error(xd->current_bc);
392 : if (pbi->ec_active &&
393 : xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME &&
394 : corrupt_residual)
395 : {
396 : /* We have an intra block with corrupt coefficients, better to
397 : * conceal with an inter block. Interpolate MVs from neighboring
398 : * MBs.
399 : *
400 : * Note that for the first mb with corrupt residual in a frame,
401 : * we might not discover that before decoding the residual. That
402 : * happens after this check, and therefore no inter concealment
403 : * will be done.
404 : */
405 : vp8_interpolate_motion(xd,
406 : mb_row, mb_col,
407 : pc->mb_rows, pc->mb_cols,
408 : pc->mode_info_stride);
409 : }
410 : }
411 : #endif
412 :
413 0 : update_blockd_bmi(xd);
414 :
415 0 : xd->dst.y_buffer = pc->yv12_fb[dst_fb_idx].y_buffer + recon_yoffset;
416 0 : xd->dst.u_buffer = pc->yv12_fb[dst_fb_idx].u_buffer + recon_uvoffset;
417 0 : xd->dst.v_buffer = pc->yv12_fb[dst_fb_idx].v_buffer + recon_uvoffset;
418 :
419 0 : xd->left_available = (mb_col != 0);
420 :
421 : /* Select the appropriate reference frame for this MB */
422 0 : if (xd->mode_info_context->mbmi.ref_frame == LAST_FRAME)
423 0 : ref_fb_idx = pc->lst_fb_idx;
424 0 : else if (xd->mode_info_context->mbmi.ref_frame == GOLDEN_FRAME)
425 0 : ref_fb_idx = pc->gld_fb_idx;
426 : else
427 0 : ref_fb_idx = pc->alt_fb_idx;
428 :
429 0 : xd->pre.y_buffer = pc->yv12_fb[ref_fb_idx].y_buffer + recon_yoffset;
430 0 : xd->pre.u_buffer = pc->yv12_fb[ref_fb_idx].u_buffer + recon_uvoffset;
431 0 : xd->pre.v_buffer = pc->yv12_fb[ref_fb_idx].v_buffer + recon_uvoffset;
432 :
433 0 : if (xd->mode_info_context->mbmi.ref_frame != INTRA_FRAME)
434 : {
435 : /* propagate errors from reference frames */
436 0 : xd->corrupted |= pc->yv12_fb[ref_fb_idx].corrupted;
437 : }
438 :
439 0 : vp8_build_uvmvs(xd, pc->full_pixel);
440 :
441 : /*
442 : if(pc->current_video_frame==0 &&mb_col==1 && mb_row==0)
443 : pbi->debugoutput =1;
444 : else
445 : pbi->debugoutput =0;
446 : */
447 0 : decode_macroblock(pbi, xd, mb_row * pc->mb_cols + mb_col);
448 :
449 : /* check if the boolean decoder has suffered an error */
450 0 : xd->corrupted |= vp8dx_bool_error(xd->current_bc);
451 :
452 0 : recon_yoffset += 16;
453 0 : recon_uvoffset += 8;
454 :
455 0 : ++xd->mode_info_context; /* next mb */
456 :
457 0 : xd->above_context++;
458 :
459 : }
460 :
461 : /* adjust to the next row of mbs */
462 0 : vp8_extend_mb_row(
463 0 : &pc->yv12_fb[dst_fb_idx],
464 0 : xd->dst.y_buffer + 16, xd->dst.u_buffer + 8, xd->dst.v_buffer + 8
465 : );
466 :
467 0 : ++xd->mode_info_context; /* skip prediction column */
468 0 : }
469 :
470 :
471 0 : static unsigned int read_partition_size(const unsigned char *cx_size)
472 : {
473 0 : const unsigned int size =
474 0 : cx_size[0] + (cx_size[1] << 8) + (cx_size[2] << 16);
475 0 : return size;
476 : }
477 :
478 0 : static void setup_token_decoder_partition_input(VP8D_COMP *pbi)
479 : {
480 0 : vp8_reader *bool_decoder = &pbi->bc2;
481 0 : int part_idx = 1;
482 :
483 0 : TOKEN_PARTITION multi_token_partition =
484 0 : (TOKEN_PARTITION)vp8_read_literal(&pbi->bc, 2);
485 0 : assert(vp8dx_bool_error(&pbi->bc) ||
486 : multi_token_partition == pbi->common.multi_token_partition);
487 0 : if (pbi->num_partitions > 2)
488 : {
489 0 : CHECK_MEM_ERROR(pbi->mbc, vpx_malloc((pbi->num_partitions - 1) *
490 : sizeof(vp8_reader)));
491 0 : bool_decoder = pbi->mbc;
492 : }
493 :
494 0 : for (; part_idx < pbi->num_partitions; ++part_idx)
495 : {
496 0 : if (vp8dx_start_decode(bool_decoder,
497 : pbi->partitions[part_idx],
498 : pbi->partition_sizes[part_idx]))
499 0 : vpx_internal_error(&pbi->common.error, VPX_CODEC_MEM_ERROR,
500 : "Failed to allocate bool decoder %d",
501 : part_idx);
502 :
503 0 : bool_decoder++;
504 : }
505 :
506 : #if CONFIG_MULTITHREAD
507 : /* Clamp number of decoder threads */
508 0 : if (pbi->decoding_thread_count > pbi->num_partitions - 1)
509 0 : pbi->decoding_thread_count = pbi->num_partitions - 1;
510 : #endif
511 0 : }
512 :
513 :
514 0 : static int read_is_valid(const unsigned char *start,
515 : size_t len,
516 : const unsigned char *end)
517 : {
518 0 : return (start + len > start && start + len <= end);
519 : }
520 :
521 :
522 0 : static void setup_token_decoder(VP8D_COMP *pbi,
523 : const unsigned char *cx_data)
524 : {
525 : int num_part;
526 : int i;
527 0 : VP8_COMMON *pc = &pbi->common;
528 0 : const unsigned char *user_data_end = pbi->Source + pbi->source_sz;
529 : vp8_reader *bool_decoder;
530 : const unsigned char *partition;
531 :
532 : /* Parse number of token partitions to use */
533 0 : const TOKEN_PARTITION multi_token_partition =
534 0 : (TOKEN_PARTITION)vp8_read_literal(&pbi->bc, 2);
535 : /* Only update the multi_token_partition field if we are sure the value
536 : * is correct. */
537 0 : if (!pbi->ec_active || !vp8dx_bool_error(&pbi->bc))
538 0 : pc->multi_token_partition = multi_token_partition;
539 :
540 0 : num_part = 1 << pc->multi_token_partition;
541 :
542 : /* Set up pointers to the first partition */
543 0 : partition = cx_data;
544 0 : bool_decoder = &pbi->bc2;
545 :
546 0 : if (num_part > 1)
547 : {
548 0 : CHECK_MEM_ERROR(pbi->mbc, vpx_malloc(num_part * sizeof(vp8_reader)));
549 0 : bool_decoder = pbi->mbc;
550 0 : partition += 3 * (num_part - 1);
551 : }
552 :
553 0 : for (i = 0; i < num_part; i++)
554 : {
555 0 : const unsigned char *partition_size_ptr = cx_data + i * 3;
556 : ptrdiff_t partition_size, bytes_left;
557 :
558 0 : bytes_left = user_data_end - partition;
559 :
560 : /* Calculate the length of this partition. The last partition
561 : * size is implicit. If the partition size can't be read, then
562 : * either use the remaining data in the buffer (for EC mode)
563 : * or throw an error.
564 : */
565 0 : if (i < num_part - 1)
566 : {
567 0 : if (read_is_valid(partition_size_ptr, 3, user_data_end))
568 0 : partition_size = read_partition_size(partition_size_ptr);
569 0 : else if (pbi->ec_active)
570 0 : partition_size = bytes_left;
571 : else
572 0 : vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
573 : "Truncated partition size data");
574 : }
575 : else
576 0 : partition_size = bytes_left;
577 :
578 : /* Validate the calculated partition length. If the buffer
579 : * described by the partition can't be fully read, then restrict
580 : * it to the portion that can be (for EC mode) or throw an error.
581 : */
582 0 : if (!read_is_valid(partition, partition_size, user_data_end))
583 : {
584 0 : if (pbi->ec_active)
585 0 : partition_size = bytes_left;
586 : else
587 0 : vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
588 : "Truncated packet or corrupt partition "
589 : "%d length", i + 1);
590 : }
591 :
592 0 : if (vp8dx_start_decode(bool_decoder, partition, partition_size))
593 0 : vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
594 : "Failed to allocate bool decoder %d", i + 1);
595 :
596 : /* Advance to the next partition */
597 0 : partition += partition_size;
598 0 : bool_decoder++;
599 : }
600 :
601 : #if CONFIG_MULTITHREAD
602 : /* Clamp number of decoder threads */
603 0 : if (pbi->decoding_thread_count > num_part - 1)
604 0 : pbi->decoding_thread_count = num_part - 1;
605 : #endif
606 0 : }
607 :
608 :
609 0 : static void stop_token_decoder(VP8D_COMP *pbi)
610 : {
611 0 : VP8_COMMON *pc = &pbi->common;
612 :
613 0 : if (pc->multi_token_partition != ONE_PARTITION)
614 : {
615 0 : vpx_free(pbi->mbc);
616 0 : pbi->mbc = NULL;
617 : }
618 0 : }
619 :
620 0 : static void init_frame(VP8D_COMP *pbi)
621 : {
622 0 : VP8_COMMON *const pc = & pbi->common;
623 0 : MACROBLOCKD *const xd = & pbi->mb;
624 :
625 0 : if (pc->frame_type == KEY_FRAME)
626 : {
627 : /* Various keyframe initializations */
628 0 : vpx_memcpy(pc->fc.mvc, vp8_default_mv_context, sizeof(vp8_default_mv_context));
629 :
630 0 : vp8_init_mbmode_probs(pc);
631 :
632 0 : vp8_default_coef_probs(pc);
633 0 : vp8_kf_default_bmode_probs(pc->kf_bmode_prob);
634 :
635 : /* reset the segment feature data to 0 with delta coding (Default state). */
636 0 : vpx_memset(xd->segment_feature_data, 0, sizeof(xd->segment_feature_data));
637 0 : xd->mb_segement_abs_delta = SEGMENT_DELTADATA;
638 :
639 : /* reset the mode ref deltasa for loop filter */
640 0 : vpx_memset(xd->ref_lf_deltas, 0, sizeof(xd->ref_lf_deltas));
641 0 : vpx_memset(xd->mode_lf_deltas, 0, sizeof(xd->mode_lf_deltas));
642 :
643 : /* All buffers are implicitly updated on key frames. */
644 0 : pc->refresh_golden_frame = 1;
645 0 : pc->refresh_alt_ref_frame = 1;
646 0 : pc->copy_buffer_to_gf = 0;
647 0 : pc->copy_buffer_to_arf = 0;
648 :
649 : /* Note that Golden and Altref modes cannot be used on a key frame so
650 : * ref_frame_sign_bias[] is undefined and meaningless
651 : */
652 0 : pc->ref_frame_sign_bias[GOLDEN_FRAME] = 0;
653 0 : pc->ref_frame_sign_bias[ALTREF_FRAME] = 0;
654 : }
655 : else
656 : {
657 0 : if (!pc->use_bilinear_mc_filter)
658 0 : pc->mcomp_filter_type = SIXTAP;
659 : else
660 0 : pc->mcomp_filter_type = BILINEAR;
661 :
662 : /* To enable choice of different interploation filters */
663 0 : if (pc->mcomp_filter_type == SIXTAP)
664 : {
665 0 : xd->subpixel_predict = SUBPIX_INVOKE(RTCD_VTABLE(subpix), sixtap4x4);
666 0 : xd->subpixel_predict8x4 = SUBPIX_INVOKE(RTCD_VTABLE(subpix), sixtap8x4);
667 0 : xd->subpixel_predict8x8 = SUBPIX_INVOKE(RTCD_VTABLE(subpix), sixtap8x8);
668 0 : xd->subpixel_predict16x16 = SUBPIX_INVOKE(RTCD_VTABLE(subpix), sixtap16x16);
669 : }
670 : else
671 : {
672 0 : xd->subpixel_predict = SUBPIX_INVOKE(RTCD_VTABLE(subpix), bilinear4x4);
673 0 : xd->subpixel_predict8x4 = SUBPIX_INVOKE(RTCD_VTABLE(subpix), bilinear8x4);
674 0 : xd->subpixel_predict8x8 = SUBPIX_INVOKE(RTCD_VTABLE(subpix), bilinear8x8);
675 0 : xd->subpixel_predict16x16 = SUBPIX_INVOKE(RTCD_VTABLE(subpix), bilinear16x16);
676 : }
677 :
678 0 : if (pbi->decoded_key_frame && pbi->ec_enabled && !pbi->ec_active)
679 0 : pbi->ec_active = 1;
680 : }
681 :
682 0 : xd->left_context = &pc->left_context;
683 0 : xd->mode_info_context = pc->mi;
684 0 : xd->frame_type = pc->frame_type;
685 0 : xd->mode_info_context->mbmi.mode = DC_PRED;
686 0 : xd->mode_info_stride = pc->mode_info_stride;
687 0 : xd->corrupted = 0; /* init without corruption */
688 0 : }
689 :
690 0 : int vp8_decode_frame(VP8D_COMP *pbi)
691 : {
692 0 : vp8_reader *const bc = & pbi->bc;
693 0 : VP8_COMMON *const pc = & pbi->common;
694 0 : MACROBLOCKD *const xd = & pbi->mb;
695 0 : const unsigned char *data = (const unsigned char *)pbi->Source;
696 0 : const unsigned char *data_end = data + pbi->source_sz;
697 : ptrdiff_t first_partition_length_in_bytes;
698 :
699 : int mb_row;
700 : int i, j, k, l;
701 0 : const int *const mb_feature_data_bits = vp8_mb_feature_data_bits;
702 0 : int corrupt_tokens = 0;
703 0 : int prev_independent_partitions = pbi->independent_partitions;
704 :
705 0 : if (pbi->input_partition)
706 : {
707 0 : data = pbi->partitions[0];
708 0 : data_end = data + pbi->partition_sizes[0];
709 : }
710 :
711 : /* start with no corruption of current frame */
712 0 : xd->corrupted = 0;
713 0 : pc->yv12_fb[pc->new_fb_idx].corrupted = 0;
714 :
715 0 : if (data_end - data < 3)
716 : {
717 0 : if (pbi->ec_active)
718 : {
719 : /* Declare the missing frame as an inter frame since it will
720 : be handled as an inter frame when we have estimated its
721 : motion vectors. */
722 0 : pc->frame_type = INTER_FRAME;
723 0 : pc->version = 0;
724 0 : pc->show_frame = 1;
725 0 : first_partition_length_in_bytes = 0;
726 : }
727 : else
728 : {
729 0 : vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
730 : "Truncated packet");
731 : }
732 : }
733 : else
734 : {
735 0 : pc->frame_type = (FRAME_TYPE)(data[0] & 1);
736 0 : pc->version = (data[0] >> 1) & 7;
737 0 : pc->show_frame = (data[0] >> 4) & 1;
738 0 : first_partition_length_in_bytes =
739 0 : (data[0] | (data[1] << 8) | (data[2] << 16)) >> 5;
740 0 : data += 3;
741 :
742 0 : if (!pbi->ec_active && (data + first_partition_length_in_bytes > data_end
743 0 : || data + first_partition_length_in_bytes < data))
744 0 : vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
745 : "Truncated packet or corrupt partition 0 length");
746 0 : vp8_setup_version(pc);
747 :
748 0 : if (pc->frame_type == KEY_FRAME)
749 : {
750 0 : const int Width = pc->Width;
751 0 : const int Height = pc->Height;
752 :
753 : /* vet via sync code */
754 : /* When error concealment is enabled we should only check the sync
755 : * code if we have enough bits available
756 : */
757 0 : if (!pbi->ec_active || data + 3 < data_end)
758 : {
759 0 : if (data[0] != 0x9d || data[1] != 0x01 || data[2] != 0x2a)
760 0 : vpx_internal_error(&pc->error, VPX_CODEC_UNSUP_BITSTREAM,
761 : "Invalid frame sync code");
762 : }
763 :
764 : /* If error concealment is enabled we should only parse the new size
765 : * if we have enough data. Otherwise we will end up with the wrong
766 : * size.
767 : */
768 0 : if (!pbi->ec_active || data + 6 < data_end)
769 : {
770 0 : pc->Width = (data[3] | (data[4] << 8)) & 0x3fff;
771 0 : pc->horiz_scale = data[4] >> 6;
772 0 : pc->Height = (data[5] | (data[6] << 8)) & 0x3fff;
773 0 : pc->vert_scale = data[6] >> 6;
774 : }
775 0 : data += 7;
776 :
777 0 : if (Width != pc->Width || Height != pc->Height)
778 : {
779 0 : int prev_mb_rows = pc->mb_rows;
780 :
781 0 : if (pc->Width <= 0)
782 : {
783 0 : pc->Width = Width;
784 0 : vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
785 : "Invalid frame width");
786 : }
787 :
788 0 : if (pc->Height <= 0)
789 : {
790 0 : pc->Height = Height;
791 0 : vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
792 : "Invalid frame height");
793 : }
794 :
795 0 : if (vp8_alloc_frame_buffers(pc, pc->Width, pc->Height))
796 0 : vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
797 : "Failed to allocate frame buffers");
798 :
799 : #if CONFIG_ERROR_CONCEALMENT
800 : pbi->overlaps = NULL;
801 : if (pbi->ec_enabled)
802 : {
803 : if (vp8_alloc_overlap_lists(pbi))
804 : vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
805 : "Failed to allocate overlap lists "
806 : "for error concealment");
807 : }
808 : #endif
809 :
810 : #if CONFIG_MULTITHREAD
811 0 : if (pbi->b_multithreaded_rd)
812 0 : vp8mt_alloc_temp_buffers(pbi, pc->Width, prev_mb_rows);
813 : #endif
814 : }
815 : }
816 : }
817 :
818 0 : if (pc->Width == 0 || pc->Height == 0)
819 : {
820 0 : return -1;
821 : }
822 :
823 0 : init_frame(pbi);
824 :
825 0 : if (vp8dx_start_decode(bc, data, data_end - data))
826 0 : vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
827 : "Failed to allocate bool decoder 0");
828 0 : if (pc->frame_type == KEY_FRAME) {
829 0 : pc->clr_type = (YUV_TYPE)vp8_read_bit(bc);
830 0 : pc->clamp_type = (CLAMP_TYPE)vp8_read_bit(bc);
831 : }
832 :
833 : /* Is segmentation enabled */
834 0 : xd->segmentation_enabled = (unsigned char)vp8_read_bit(bc);
835 :
836 0 : if (xd->segmentation_enabled)
837 : {
838 : /* Signal whether or not the segmentation map is being explicitly updated this frame. */
839 0 : xd->update_mb_segmentation_map = (unsigned char)vp8_read_bit(bc);
840 0 : xd->update_mb_segmentation_data = (unsigned char)vp8_read_bit(bc);
841 :
842 0 : if (xd->update_mb_segmentation_data)
843 : {
844 0 : xd->mb_segement_abs_delta = (unsigned char)vp8_read_bit(bc);
845 :
846 0 : vpx_memset(xd->segment_feature_data, 0, sizeof(xd->segment_feature_data));
847 :
848 : /* For each segmentation feature (Quant and loop filter level) */
849 0 : for (i = 0; i < MB_LVL_MAX; i++)
850 : {
851 0 : for (j = 0; j < MAX_MB_SEGMENTS; j++)
852 : {
853 : /* Frame level data */
854 0 : if (vp8_read_bit(bc))
855 : {
856 0 : xd->segment_feature_data[i][j] = (signed char)vp8_read_literal(bc, mb_feature_data_bits[i]);
857 :
858 0 : if (vp8_read_bit(bc))
859 0 : xd->segment_feature_data[i][j] = -xd->segment_feature_data[i][j];
860 : }
861 : else
862 0 : xd->segment_feature_data[i][j] = 0;
863 : }
864 : }
865 : }
866 :
867 0 : if (xd->update_mb_segmentation_map)
868 : {
869 : /* Which macro block level features are enabled */
870 0 : vpx_memset(xd->mb_segment_tree_probs, 255, sizeof(xd->mb_segment_tree_probs));
871 :
872 : /* Read the probs used to decode the segment id for each macro block. */
873 0 : for (i = 0; i < MB_FEATURE_TREE_PROBS; i++)
874 : {
875 : /* If not explicitly set value is defaulted to 255 by memset above */
876 0 : if (vp8_read_bit(bc))
877 0 : xd->mb_segment_tree_probs[i] = (vp8_prob)vp8_read_literal(bc, 8);
878 : }
879 : }
880 : }
881 :
882 : /* Read the loop filter level and type */
883 0 : pc->filter_type = (LOOPFILTERTYPE) vp8_read_bit(bc);
884 0 : pc->filter_level = vp8_read_literal(bc, 6);
885 0 : pc->sharpness_level = vp8_read_literal(bc, 3);
886 :
887 : /* Read in loop filter deltas applied at the MB level based on mode or ref frame. */
888 0 : xd->mode_ref_lf_delta_update = 0;
889 0 : xd->mode_ref_lf_delta_enabled = (unsigned char)vp8_read_bit(bc);
890 :
891 0 : if (xd->mode_ref_lf_delta_enabled)
892 : {
893 : /* Do the deltas need to be updated */
894 0 : xd->mode_ref_lf_delta_update = (unsigned char)vp8_read_bit(bc);
895 :
896 0 : if (xd->mode_ref_lf_delta_update)
897 : {
898 : /* Send update */
899 0 : for (i = 0; i < MAX_REF_LF_DELTAS; i++)
900 : {
901 0 : if (vp8_read_bit(bc))
902 : {
903 : /*sign = vp8_read_bit( bc );*/
904 0 : xd->ref_lf_deltas[i] = (signed char)vp8_read_literal(bc, 6);
905 :
906 0 : if (vp8_read_bit(bc)) /* Apply sign */
907 0 : xd->ref_lf_deltas[i] = xd->ref_lf_deltas[i] * -1;
908 : }
909 : }
910 :
911 : /* Send update */
912 0 : for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
913 : {
914 0 : if (vp8_read_bit(bc))
915 : {
916 : /*sign = vp8_read_bit( bc );*/
917 0 : xd->mode_lf_deltas[i] = (signed char)vp8_read_literal(bc, 6);
918 :
919 0 : if (vp8_read_bit(bc)) /* Apply sign */
920 0 : xd->mode_lf_deltas[i] = xd->mode_lf_deltas[i] * -1;
921 : }
922 : }
923 : }
924 : }
925 :
926 0 : if (pbi->input_partition)
927 : {
928 0 : setup_token_decoder_partition_input(pbi);
929 : }
930 : else
931 : {
932 0 : setup_token_decoder(pbi, data + first_partition_length_in_bytes);
933 : }
934 0 : xd->current_bc = &pbi->bc2;
935 :
936 : /* Read the default quantizers. */
937 : {
938 : int Q, q_update;
939 :
940 0 : Q = vp8_read_literal(bc, 7); /* AC 1st order Q = default */
941 0 : pc->base_qindex = Q;
942 0 : q_update = 0;
943 0 : pc->y1dc_delta_q = get_delta_q(bc, pc->y1dc_delta_q, &q_update);
944 0 : pc->y2dc_delta_q = get_delta_q(bc, pc->y2dc_delta_q, &q_update);
945 0 : pc->y2ac_delta_q = get_delta_q(bc, pc->y2ac_delta_q, &q_update);
946 0 : pc->uvdc_delta_q = get_delta_q(bc, pc->uvdc_delta_q, &q_update);
947 0 : pc->uvac_delta_q = get_delta_q(bc, pc->uvac_delta_q, &q_update);
948 :
949 0 : if (q_update)
950 0 : vp8cx_init_de_quantizer(pbi);
951 :
952 : /* MB level dequantizer setup */
953 0 : mb_init_dequantizer(pbi, &pbi->mb);
954 : }
955 :
956 : /* Determine if the golden frame or ARF buffer should be updated and how.
957 : * For all non key frames the GF and ARF refresh flags and sign bias
958 : * flags must be set explicitly.
959 : */
960 0 : if (pc->frame_type != KEY_FRAME)
961 : {
962 : /* Should the GF or ARF be updated from the current frame */
963 0 : pc->refresh_golden_frame = vp8_read_bit(bc);
964 : #if CONFIG_ERROR_CONCEALMENT
965 : /* Assume we shouldn't refresh golden if the bit is missing */
966 : xd->corrupted |= vp8dx_bool_error(bc);
967 : if (pbi->ec_active && xd->corrupted)
968 : pc->refresh_golden_frame = 0;
969 : #endif
970 :
971 0 : pc->refresh_alt_ref_frame = vp8_read_bit(bc);
972 : #if CONFIG_ERROR_CONCEALMENT
973 : /* Assume we shouldn't refresh altref if the bit is missing */
974 : xd->corrupted |= vp8dx_bool_error(bc);
975 : if (pbi->ec_active && xd->corrupted)
976 : pc->refresh_alt_ref_frame = 0;
977 : #endif
978 :
979 : /* Buffer to buffer copy flags. */
980 0 : pc->copy_buffer_to_gf = 0;
981 :
982 0 : if (!pc->refresh_golden_frame)
983 0 : pc->copy_buffer_to_gf = vp8_read_literal(bc, 2);
984 :
985 0 : pc->copy_buffer_to_arf = 0;
986 :
987 0 : if (!pc->refresh_alt_ref_frame)
988 0 : pc->copy_buffer_to_arf = vp8_read_literal(bc, 2);
989 :
990 0 : pc->ref_frame_sign_bias[GOLDEN_FRAME] = vp8_read_bit(bc);
991 0 : pc->ref_frame_sign_bias[ALTREF_FRAME] = vp8_read_bit(bc);
992 : }
993 :
994 0 : pc->refresh_entropy_probs = vp8_read_bit(bc);
995 0 : if (pc->refresh_entropy_probs == 0)
996 : {
997 0 : vpx_memcpy(&pc->lfc, &pc->fc, sizeof(pc->fc));
998 : }
999 :
1000 0 : pc->refresh_last_frame = pc->frame_type == KEY_FRAME || vp8_read_bit(bc);
1001 :
1002 : #if CONFIG_ERROR_CONCEALMENT
1003 : /* Assume we should refresh the last frame if the bit is missing */
1004 : xd->corrupted |= vp8dx_bool_error(bc);
1005 : if (pbi->ec_active && xd->corrupted)
1006 : pc->refresh_last_frame = 1;
1007 : #endif
1008 :
1009 : if (0)
1010 : {
1011 : FILE *z = fopen("decodestats.stt", "a");
1012 : fprintf(z, "%6d F:%d,G:%d,A:%d,L:%d,Q:%d\n",
1013 : pc->current_video_frame,
1014 : pc->frame_type,
1015 : pc->refresh_golden_frame,
1016 : pc->refresh_alt_ref_frame,
1017 : pc->refresh_last_frame,
1018 : pc->base_qindex);
1019 : fclose(z);
1020 : }
1021 :
1022 : {
1023 0 : pbi->independent_partitions = 1;
1024 :
1025 : /* read coef probability tree */
1026 0 : for (i = 0; i < BLOCK_TYPES; i++)
1027 0 : for (j = 0; j < COEF_BANDS; j++)
1028 0 : for (k = 0; k < PREV_COEF_CONTEXTS; k++)
1029 0 : for (l = 0; l < ENTROPY_NODES; l++)
1030 : {
1031 :
1032 0 : vp8_prob *const p = pc->fc.coef_probs [i][j][k] + l;
1033 :
1034 0 : if (vp8_read(bc, vp8_coef_update_probs [i][j][k][l]))
1035 : {
1036 0 : *p = (vp8_prob)vp8_read_literal(bc, 8);
1037 :
1038 : }
1039 0 : if (k > 0 && *p != pc->fc.coef_probs[i][j][k-1][l])
1040 0 : pbi->independent_partitions = 0;
1041 :
1042 : }
1043 : }
1044 :
1045 0 : vpx_memcpy(&xd->pre, &pc->yv12_fb[pc->lst_fb_idx], sizeof(YV12_BUFFER_CONFIG));
1046 0 : vpx_memcpy(&xd->dst, &pc->yv12_fb[pc->new_fb_idx], sizeof(YV12_BUFFER_CONFIG));
1047 :
1048 : /* set up frame new frame for intra coded blocks */
1049 : #if CONFIG_MULTITHREAD
1050 0 : if (!(pbi->b_multithreaded_rd) || pc->multi_token_partition == ONE_PARTITION || !(pc->filter_level))
1051 : #endif
1052 0 : vp8_setup_intra_recon(&pc->yv12_fb[pc->new_fb_idx]);
1053 :
1054 0 : vp8_setup_block_dptrs(xd);
1055 :
1056 0 : vp8_build_block_doffsets(xd);
1057 :
1058 : /* clear out the coeff buffer */
1059 0 : vpx_memset(xd->qcoeff, 0, sizeof(xd->qcoeff));
1060 :
1061 : /* Read the mb_no_coeff_skip flag */
1062 0 : pc->mb_no_coeff_skip = (int)vp8_read_bit(bc);
1063 :
1064 :
1065 0 : vp8_decode_mode_mvs(pbi);
1066 :
1067 : #if CONFIG_ERROR_CONCEALMENT
1068 : if (pbi->ec_active &&
1069 : pbi->mvs_corrupt_from_mb < (unsigned int)pc->mb_cols * pc->mb_rows)
1070 : {
1071 : /* Motion vectors are missing in this frame. We will try to estimate
1072 : * them and then continue decoding the frame as usual */
1073 : vp8_estimate_missing_mvs(pbi);
1074 : }
1075 : #endif
1076 :
1077 0 : vpx_memset(pc->above_context, 0, sizeof(ENTROPY_CONTEXT_PLANES) * pc->mb_cols);
1078 :
1079 : #if CONFIG_MULTITHREAD
1080 0 : if (pbi->b_multithreaded_rd && pc->multi_token_partition != ONE_PARTITION)
1081 0 : {
1082 : int i;
1083 0 : pbi->frame_corrupt_residual = 0;
1084 0 : vp8mt_decode_mb_rows(pbi, xd);
1085 0 : vp8_yv12_extend_frame_borders_ptr(&pc->yv12_fb[pc->new_fb_idx]); /*cm->frame_to_show);*/
1086 0 : for (i = 0; i < pbi->decoding_thread_count; ++i)
1087 0 : corrupt_tokens |= pbi->mb_row_di[i].mbd.corrupted;
1088 : }
1089 : else
1090 : #endif
1091 : {
1092 0 : int ibc = 0;
1093 0 : int num_part = 1 << pc->multi_token_partition;
1094 0 : pbi->frame_corrupt_residual = 0;
1095 :
1096 : /* Decode the individual macro block */
1097 0 : for (mb_row = 0; mb_row < pc->mb_rows; mb_row++)
1098 : {
1099 :
1100 0 : if (num_part > 1)
1101 : {
1102 0 : xd->current_bc = & pbi->mbc[ibc];
1103 0 : ibc++;
1104 :
1105 0 : if (ibc == num_part)
1106 0 : ibc = 0;
1107 : }
1108 :
1109 0 : decode_mb_row(pbi, pc, mb_row, xd);
1110 : }
1111 0 : corrupt_tokens |= xd->corrupted;
1112 : }
1113 :
1114 0 : stop_token_decoder(pbi);
1115 :
1116 : /* Collect information about decoder corruption. */
1117 : /* 1. Check first boolean decoder for errors. */
1118 0 : pc->yv12_fb[pc->new_fb_idx].corrupted = vp8dx_bool_error(bc);
1119 : /* 2. Check the macroblock information */
1120 0 : pc->yv12_fb[pc->new_fb_idx].corrupted |= corrupt_tokens;
1121 :
1122 0 : if (!pbi->decoded_key_frame)
1123 : {
1124 0 : if (pc->frame_type == KEY_FRAME &&
1125 0 : !pc->yv12_fb[pc->new_fb_idx].corrupted)
1126 0 : pbi->decoded_key_frame = 1;
1127 : else
1128 0 : vpx_internal_error(&pbi->common.error, VPX_CODEC_CORRUPT_FRAME,
1129 : "A stream must start with a complete key frame");
1130 : }
1131 :
1132 : /* vpx_log("Decoder: Frame Decoded, Size Roughly:%d bytes \n",bc->pos+pbi->bc2.pos); */
1133 :
1134 : /* If this was a kf or Gf note the Q used */
1135 0 : if ((pc->frame_type == KEY_FRAME) ||
1136 0 : pc->refresh_golden_frame || pc->refresh_alt_ref_frame)
1137 : {
1138 0 : pc->last_kf_gf_q = pc->base_qindex;
1139 : }
1140 :
1141 0 : if (pc->refresh_entropy_probs == 0)
1142 : {
1143 0 : vpx_memcpy(&pc->fc, &pc->lfc, sizeof(pc->fc));
1144 0 : pbi->independent_partitions = prev_independent_partitions;
1145 : }
1146 :
1147 : #ifdef PACKET_TESTING
1148 : {
1149 : FILE *f = fopen("decompressor.VP8", "ab");
1150 : unsigned int size = pbi->bc2.pos + pbi->bc.pos + 8;
1151 : fwrite((void *) &size, 4, 1, f);
1152 : fwrite((void *) pbi->Source, size, 1, f);
1153 : fclose(f);
1154 : }
1155 : #endif
1156 :
1157 0 : return 0;
1158 : }
|