2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
12 #include "onyxd_int.h"
13 #include "vp9/common/header.h"
14 #include "vp9/common/reconintra.h"
15 #include "vp9/common/reconintra4x4.h"
16 #include "vp9/common/reconinter.h"
17 #include "vp9/decoder/decodframe.h"
18 #include "detokenize.h"
19 #include "vp9/common/invtrans.h"
20 #include "vp9/common/alloccommon.h"
21 #include "vp9/common/entropymode.h"
22 #include "vp9/common/quant_common.h"
23 #include "vpx_scale/vpxscale.h"
24 #include "vp9/common/setupintrarecon.h"
27 #include "vp9/common/extend.h"
28 #include "vp9/common/modecont.h"
29 #include "vpx_mem/vpx_mem.h"
30 #include "vp9/common/idct.h"
31 #include "dboolhuff.h"
33 #include "vp9/common/seg_common.h"
34 #include "vp9/common/entropy.h"
41 #define COEFCOUNT_TESTING
43 static int merge_index(int v
, int n
, int modulus
) {
44 int max1
= (n
- 1 - modulus
/ 2) / modulus
+ 1;
45 if (v
< max1
) v
= v
* modulus
+ modulus
/ 2;
50 v
+= (v
+ modulus
- modulus
/ 2) / modulus
;
51 while (v
% modulus
== modulus
/ 2 ||
52 w
!= v
- (v
+ modulus
- modulus
/ 2) / modulus
) v
++;
57 static int inv_remap_prob(int v
, int m
) {
59 const int modulus
= MODULUS_PARAM
;
61 v
= merge_index(v
, n
- 1, modulus
);
63 i
= vp9_inv_recenter_nonneg(v
+ 1, m
);
65 i
= n
- 1 - vp9_inv_recenter_nonneg(v
+ 1, n
- 1 - m
);
70 static vp9_prob
read_prob_diff_update(vp9_reader
*const bc
, int oldp
) {
71 int delp
= vp9_decode_term_subexp(bc
, SUBEXP_PARAM
, 255);
72 return (vp9_prob
)inv_remap_prob(delp
, oldp
);
75 void vp9_init_de_quantizer(VP9D_COMP
*pbi
) {
78 VP9_COMMON
*const pc
= &pbi
->common
;
80 for (Q
= 0; Q
< QINDEX_RANGE
; Q
++) {
81 pc
->Y1dequant
[Q
][0] = (short)vp9_dc_quant(Q
, pc
->y1dc_delta_q
);
82 pc
->Y2dequant
[Q
][0] = (short)vp9_dc2quant(Q
, pc
->y2dc_delta_q
);
83 pc
->UVdequant
[Q
][0] = (short)vp9_dc_uv_quant(Q
, pc
->uvdc_delta_q
);
85 /* all the ac values =; */
86 for (i
= 1; i
< 16; i
++) {
87 int rc
= vp9_default_zig_zag1d
[i
];
89 pc
->Y1dequant
[Q
][rc
] = (short)vp9_ac_yquant(Q
);
90 pc
->Y2dequant
[Q
][rc
] = (short)vp9_ac2quant(Q
, pc
->y2ac_delta_q
);
91 pc
->UVdequant
[Q
][rc
] = (short)vp9_ac_uv_quant(Q
, pc
->uvac_delta_q
);
96 static void mb_init_dequantizer(VP9D_COMP
*pbi
, MACROBLOCKD
*xd
) {
99 VP9_COMMON
*const pc
= &pbi
->common
;
100 int segment_id
= xd
->mode_info_context
->mbmi
.segment_id
;
102 // Set the Q baseline allowing for any segment level adjustment
103 if (vp9_segfeature_active(xd
, segment_id
, SEG_LVL_ALT_Q
)) {
105 if (xd
->mb_segment_abs_delta
== SEGMENT_ABSDATA
)
106 QIndex
= vp9_get_segdata(xd
, segment_id
, SEG_LVL_ALT_Q
);
110 QIndex
= pc
->base_qindex
+
111 vp9_get_segdata(xd
, segment_id
, SEG_LVL_ALT_Q
);
112 QIndex
= (QIndex
>= 0) ? ((QIndex
<= MAXQ
) ? QIndex
: MAXQ
) : 0; /* Clamp to valid range */
115 QIndex
= pc
->base_qindex
;
116 xd
->q_index
= QIndex
;
118 /* Set up the block level dequant pointers */
119 for (i
= 0; i
< 16; i
++) {
120 xd
->block
[i
].dequant
= pc
->Y1dequant
[QIndex
];
125 pbi
->common
.rtcd
.idct
.idct1
= vp9_short_inv_walsh4x4_1_x8_c
;
126 pbi
->common
.rtcd
.idct
.idct16
= vp9_short_inv_walsh4x4_x8_c
;
127 pbi
->common
.rtcd
.idct
.idct1_scalar_add
= vp9_dc_only_inv_walsh_add_c
;
128 pbi
->common
.rtcd
.idct
.iwalsh1
= vp9_short_inv_walsh4x4_1_lossless_c
;
129 pbi
->common
.rtcd
.idct
.iwalsh16
= vp9_short_inv_walsh4x4_lossless_c
;
130 pbi
->idct_add
= vp9_dequant_idct_add_lossless_c
;
131 pbi
->dc_idct_add
= vp9_dequant_dc_idct_add_lossless_c
;
132 pbi
->dc_idct_add_y_block
= vp9_dequant_dc_idct_add_y_block_lossless_c
;
133 pbi
->idct_add_y_block
= vp9_dequant_idct_add_y_block_lossless_c
;
134 pbi
->idct_add_uv_block
= vp9_dequant_idct_add_uv_block_lossless_c
;
136 pbi
->common
.rtcd
.idct
.idct1
= vp9_short_idct4x4llm_1_c
;
137 pbi
->common
.rtcd
.idct
.idct16
= vp9_short_idct4x4llm_c
;
138 pbi
->common
.rtcd
.idct
.idct1_scalar_add
= vp9_dc_only_idct_add_c
;
139 pbi
->common
.rtcd
.idct
.iwalsh1
= vp9_short_inv_walsh4x4_1_c
;
140 pbi
->common
.rtcd
.idct
.iwalsh16
= vp9_short_inv_walsh4x4_c
;
141 pbi
->idct_add
= vp9_dequant_idct_add
;
142 pbi
->dc_idct_add
= vp9_dequant_dc_idct_add
;
143 pbi
->dc_idct_add_y_block
= vp9_dequant_dc_idct_add_y_block
;
144 pbi
->idct_add_y_block
= vp9_dequant_idct_add_y_block
;
145 pbi
->idct_add_uv_block
= vp9_dequant_idct_add_uv_block
;
148 pbi
->idct_add
= vp9_dequant_idct_add
;
149 pbi
->dc_idct_add
= vp9_dequant_dc_idct_add
;
150 pbi
->dc_idct_add_y_block
= vp9_dequant_dc_idct_add_y_block
;
151 pbi
->idct_add_y_block
= vp9_dequant_idct_add_y_block
;
152 pbi
->idct_add_uv_block
= vp9_dequant_idct_add_uv_block
;
155 for (i
= 16; i
< 24; i
++) {
156 xd
->block
[i
].dequant
= pc
->UVdequant
[QIndex
];
159 xd
->block
[24].dequant
= pc
->Y2dequant
[QIndex
];
163 #if CONFIG_RUNTIME_CPU_DETECT
164 #define RTCD_VTABLE(x) (&(pbi)->common.rtcd.x)
166 #define RTCD_VTABLE(x) NULL
169 /* skip_recon_mb() is Modified: Instead of writing the result to predictor buffer and then copying it
170 * to dst buffer, we can write the result directly to dst buffer. This eliminates unnecessary copy.
172 static void skip_recon_mb(VP9D_COMP
*pbi
, MACROBLOCKD
*xd
) {
173 if (xd
->mode_info_context
->mbmi
.ref_frame
== INTRA_FRAME
) {
174 #if CONFIG_SUPERBLOCKS
175 if (xd
->mode_info_context
->mbmi
.encoded_as_sb
) {
176 vp9_build_intra_predictors_sbuv_s(xd
);
177 vp9_build_intra_predictors_sby_s(xd
);
180 vp9_build_intra_predictors_mbuv_s(xd
);
181 vp9_build_intra_predictors_mby_s(xd
);
182 #if CONFIG_SUPERBLOCKS
186 #if CONFIG_SUPERBLOCKS
187 if (xd
->mode_info_context
->mbmi
.encoded_as_sb
) {
188 vp9_build_inter32x32_predictors_sb(xd
,
196 vp9_build_1st_inter16x16_predictors_mb(xd
,
203 if (xd
->mode_info_context
->mbmi
.second_ref_frame
> 0) {
204 vp9_build_2nd_inter16x16_predictors_mb(xd
,
211 #if CONFIG_COMP_INTERINTRA_PRED
212 else if (xd
->mode_info_context
->mbmi
.second_ref_frame
== INTRA_FRAME
) {
213 vp9_build_interintra_16x16_predictors_mb(xd
,
221 #if CONFIG_SUPERBLOCKS
227 #if CONFIG_SUPERBLOCKS
228 static void decode_superblock(VP9D_COMP
*pbi
, MACROBLOCKD
*xd
,
229 int mb_row
, unsigned int mb_col
,
230 BOOL_DECODER
* const bc
) {
232 TX_SIZE tx_size
= xd
->mode_info_context
->mbmi
.txfm_size
;
233 VP9_COMMON
*const pc
= &pbi
->common
;
234 MODE_INFO
*orig_mi
= xd
->mode_info_context
;
236 assert(xd
->mode_info_context
->mbmi
.encoded_as_sb
);
238 // re-initialize macroblock dequantizer before detokenization
239 if (xd
->segmentation_enabled
)
240 mb_init_dequantizer(pbi
, xd
);
242 if (pbi
->common
.frame_type
!= KEY_FRAME
)
243 vp9_setup_interp_filters(xd
, xd
->mode_info_context
->mbmi
.interp_filter
, pc
);
245 if (xd
->mode_info_context
->mbmi
.mb_skip_coeff
) {
246 vp9_reset_mb_tokens_context(xd
);
247 if (mb_col
< pc
->mb_cols
- 1)
249 if (mb_row
< pc
->mb_rows
- 1)
251 vp9_reset_mb_tokens_context(xd
);
252 if (mb_col
< pc
->mb_cols
- 1)
254 if (mb_row
< pc
->mb_rows
- 1)
257 /* Special case: Force the loopfilter to skip when eobtotal and
258 * mb_skip_coeff are zero.
260 skip_recon_mb(pbi
, xd
);
265 if (xd
->mode_info_context
->mbmi
.ref_frame
== INTRA_FRAME
) {
266 vp9_build_intra_predictors_sby_s(xd
);
267 vp9_build_intra_predictors_sbuv_s(xd
);
269 vp9_build_inter32x32_predictors_sb(xd
, xd
->dst
.y_buffer
,
270 xd
->dst
.u_buffer
, xd
->dst
.v_buffer
,
271 xd
->dst
.y_stride
, xd
->dst
.uv_stride
);
274 /* dequantization and idct */
275 for (n
= 0; n
< 4; n
++) {
276 BLOCKD
*b
= &xd
->block
[24];
277 int x_idx
= n
& 1, y_idx
= n
>> 1;
279 if (mb_col
+ x_idx
>= pc
->mb_cols
|| mb_row
+ y_idx
>= pc
->mb_rows
)
282 xd
->above_context
= pc
->above_context
+ mb_col
+ x_idx
;
283 xd
->left_context
= pc
->left_context
+ y_idx
;
284 xd
->mode_info_context
= orig_mi
+ x_idx
+ y_idx
* pc
->mode_info_stride
;
285 for (i
= 0; i
< 25; i
++) {
286 xd
->block
[i
].eob
= 0;
290 if (tx_size
== TX_16X16
) {
291 eobtotal
= vp9_decode_mb_tokens_16x16(pbi
, xd
, bc
);
292 } else if (tx_size
== TX_8X8
) {
293 eobtotal
= vp9_decode_mb_tokens_8x8(pbi
, xd
, bc
);
295 eobtotal
= vp9_decode_mb_tokens_4x4(pbi
, xd
, bc
);
297 if (eobtotal
== 0) { // skip loopfilter
298 xd
->mode_info_context
->mbmi
.mb_skip_coeff
= 1;
302 if (tx_size
== TX_16X16
) {
303 vp9_dequant_idct_add_16x16(xd
->qcoeff
, xd
->block
[0].dequant
,
304 xd
->dst
.y_buffer
+ y_idx
* 16 * xd
->dst
.y_stride
+ x_idx
* 16,
305 xd
->dst
.y_buffer
+ y_idx
* 16 * xd
->dst
.y_stride
+ x_idx
* 16,
306 xd
->dst
.y_stride
, xd
->dst
.y_stride
, xd
->eobs
[0]);
307 vp9_dequant_idct_add_uv_block_8x8_inplace_c(xd
->qcoeff
+ 16 * 16,
308 xd
->block
[16].dequant
,
309 xd
->dst
.u_buffer
+ y_idx
* 8 * xd
->dst
.uv_stride
+ x_idx
* 8,
310 xd
->dst
.v_buffer
+ y_idx
* 8 * xd
->dst
.uv_stride
+ x_idx
* 8,
311 xd
->dst
.uv_stride
, xd
->eobs
+ 16, xd
);
312 } else if (tx_size
== TX_8X8
) {
313 vp9_dequantize_b_2x2(b
);
314 IDCT_INVOKE(RTCD_VTABLE(idct
), ihaar2
)(&b
->dqcoeff
[0], b
->diff
, 8);
315 ((int *)b
->qcoeff
)[0] = 0; // 2nd order block are set to 0 after idct
316 ((int *)b
->qcoeff
)[1] = 0;
317 ((int *)b
->qcoeff
)[2] = 0;
318 ((int *)b
->qcoeff
)[3] = 0;
319 ((int *)b
->qcoeff
)[4] = 0;
320 ((int *)b
->qcoeff
)[5] = 0;
321 ((int *)b
->qcoeff
)[6] = 0;
322 ((int *)b
->qcoeff
)[7] = 0;
323 vp9_dequant_dc_idct_add_y_block_8x8_inplace_c(xd
->qcoeff
,
324 xd
->block
[0].dequant
,
325 xd
->dst
.y_buffer
+ y_idx
* 16 * xd
->dst
.y_stride
+ x_idx
* 16,
326 xd
->dst
.y_stride
, xd
->eobs
, xd
->block
[24].diff
, xd
);
327 vp9_dequant_idct_add_uv_block_8x8_inplace_c(xd
->qcoeff
+ 16 * 16,
328 xd
->block
[16].dequant
,
329 xd
->dst
.u_buffer
+ y_idx
* 8 * xd
->dst
.uv_stride
+ x_idx
* 8,
330 xd
->dst
.v_buffer
+ y_idx
* 8 * xd
->dst
.uv_stride
+ x_idx
* 8,
331 xd
->dst
.uv_stride
, xd
->eobs
+ 16, xd
);
334 if (xd
->eobs
[24] > 1) {
335 IDCT_INVOKE(RTCD_VTABLE(idct
), iwalsh16
)(&b
->dqcoeff
[0], b
->diff
);
336 ((int *)b
->qcoeff
)[0] = 0;
337 ((int *)b
->qcoeff
)[1] = 0;
338 ((int *)b
->qcoeff
)[2] = 0;
339 ((int *)b
->qcoeff
)[3] = 0;
340 ((int *)b
->qcoeff
)[4] = 0;
341 ((int *)b
->qcoeff
)[5] = 0;
342 ((int *)b
->qcoeff
)[6] = 0;
343 ((int *)b
->qcoeff
)[7] = 0;
345 IDCT_INVOKE(RTCD_VTABLE(idct
), iwalsh1
)(&b
->dqcoeff
[0], b
->diff
);
346 ((int *)b
->qcoeff
)[0] = 0;
349 vp9_dequant_dc_idct_add_y_block_4x4_inplace_c(xd
->qcoeff
,
350 xd
->block
[0].dequant
,
351 xd
->dst
.y_buffer
+ y_idx
* 16 * xd
->dst
.y_stride
+ x_idx
* 16,
352 xd
->dst
.y_stride
, xd
->eobs
, xd
->block
[24].diff
, xd
);
353 vp9_dequant_idct_add_uv_block_4x4_inplace_c(xd
->qcoeff
+ 16 * 16,
354 xd
->block
[16].dequant
,
355 xd
->dst
.u_buffer
+ y_idx
* 8 * xd
->dst
.uv_stride
+ x_idx
* 8,
356 xd
->dst
.v_buffer
+ y_idx
* 8 * xd
->dst
.uv_stride
+ x_idx
* 8,
357 xd
->dst
.uv_stride
, xd
->eobs
+ 16, xd
);
361 xd
->above_context
= pc
->above_context
+ mb_col
;
362 xd
->left_context
= pc
->left_context
;
363 xd
->mode_info_context
= orig_mi
;
367 static void decode_macroblock(VP9D_COMP
*pbi
, MACROBLOCKD
*xd
,
368 int mb_row
, unsigned int mb_col
,
369 BOOL_DECODER
* const bc
) {
371 MB_PREDICTION_MODE mode
;
376 #if CONFIG_SUPERBLOCKS
377 assert(!xd
->mode_info_context
->mbmi
.encoded_as_sb
);
380 // re-initialize macroblock dequantizer before detokenization
381 if (xd
->segmentation_enabled
)
382 mb_init_dequantizer(pbi
, xd
);
384 tx_size
= xd
->mode_info_context
->mbmi
.txfm_size
;
385 mode
= xd
->mode_info_context
->mbmi
.mode
;
387 if (xd
->mode_info_context
->mbmi
.mb_skip_coeff
) {
388 vp9_reset_mb_tokens_context(xd
);
389 } else if (!bool_error(bc
)) {
390 for (i
= 0; i
< 25; i
++) {
391 xd
->block
[i
].eob
= 0;
394 if (tx_size
== TX_16X16
) {
395 eobtotal
= vp9_decode_mb_tokens_16x16(pbi
, xd
, bc
);
396 } else if (tx_size
== TX_8X8
) {
397 eobtotal
= vp9_decode_mb_tokens_8x8(pbi
, xd
, bc
);
399 eobtotal
= vp9_decode_mb_tokens_4x4(pbi
, xd
, bc
);
403 //mode = xd->mode_info_context->mbmi.mode;
404 if (pbi
->common
.frame_type
!= KEY_FRAME
)
405 vp9_setup_interp_filters(xd
, xd
->mode_info_context
->mbmi
.interp_filter
,
408 if (eobtotal
== 0 && mode
!= B_PRED
&& mode
!= SPLITMV
410 && !bool_error(bc
)) {
411 /* Special case: Force the loopfilter to skip when eobtotal and
412 * mb_skip_coeff are zero.
414 xd
->mode_info_context
->mbmi
.mb_skip_coeff
= 1;
415 skip_recon_mb(pbi
, xd
);
419 // moved to be performed before detokenization
420 // if (xd->segmentation_enabled)
421 // mb_init_dequantizer(pbi, xd);
424 if (xd
->mode_info_context
->mbmi
.ref_frame
== INTRA_FRAME
) {
425 if (mode
!= I8X8_PRED
) {
426 vp9_build_intra_predictors_mbuv(xd
);
427 if (mode
!= B_PRED
) {
428 vp9_build_intra_predictors_mby(xd
);
432 vp9_build_inter_predictors_mb(xd
);
435 /* dequantization and idct */
436 if (mode
== I8X8_PRED
) {
437 for (i
= 0; i
< 4; i
++) {
438 int ib
= vp9_i8x8_block
[i
];
439 const int iblock
[4] = {0, 1, 4, 5};
444 int idx
= (ib
& 0x02) ? (ib
+ 2) : ib
;
446 short *q
= xd
->block
[idx
].qcoeff
;
447 short *dq
= xd
->block
[0].dequant
;
448 unsigned char *pre
= xd
->block
[ib
].predictor
;
449 unsigned char *dst
= *(xd
->block
[ib
].base_dst
) + xd
->block
[ib
].dst
;
450 int stride
= xd
->dst
.y_stride
;
453 i8x8mode
= b
->bmi
.as_mode
.first
;
454 vp9_intra8x8_predict(b
, i8x8mode
, b
->predictor
);
456 if (xd
->mode_info_context
->mbmi
.txfm_size
== TX_8X8
) {
457 tx_type
= get_tx_type(xd
, &xd
->block
[idx
]);
458 if (tx_type
!= DCT_DCT
) {
459 vp9_ht_dequant_idct_add_8x8_c(tx_type
,
460 q
, dq
, pre
, dst
, 16, stride
);
462 vp9_dequant_idct_add_8x8_c(q
, dq
, pre
, dst
, 16, stride
, 0,
467 for (j
= 0; j
< 4; j
++) {
468 b
= &xd
->block
[ib
+ iblock
[j
]];
469 vp9_dequant_idct_add(b
->qcoeff
, b
->dequant
, b
->predictor
,
470 *(b
->base_dst
) + b
->dst
, 16, b
->dst_stride
);
473 b
= &xd
->block
[16 + i
];
474 vp9_intra_uv4x4_predict(b
, i8x8mode
, b
->predictor
);
475 pbi
->idct_add(b
->qcoeff
, b
->dequant
, b
->predictor
,
476 *(b
->base_dst
) + b
->dst
, 8, b
->dst_stride
);
477 b
= &xd
->block
[20 + i
];
478 vp9_intra_uv4x4_predict(b
, i8x8mode
, b
->predictor
);
479 pbi
->idct_add(b
->qcoeff
, b
->dequant
, b
->predictor
,
480 *(b
->base_dst
) + b
->dst
, 8, b
->dst_stride
);
482 } else if (mode
== B_PRED
) {
483 for (i
= 0; i
< 16; i
++) {
485 #if CONFIG_COMP_INTRA_PRED
488 BLOCKD
*b
= &xd
->block
[i
];
489 b_mode
= xd
->mode_info_context
->bmi
[i
].as_mode
.first
;
490 #if CONFIG_NEWBINTRAMODES
491 xd
->mode_info_context
->bmi
[i
].as_mode
.context
= b
->bmi
.as_mode
.context
=
492 vp9_find_bpred_context(b
);
494 #if CONFIG_COMP_INTRA_PRED
495 b_mode2
= xd
->mode_info_context
->bmi
[i
].as_mode
.second
;
497 if (b_mode2
== (B_PREDICTION_MODE
)(B_DC_PRED
- 1)) {
499 vp9_intra4x4_predict(b
, b_mode
, b
->predictor
);
500 #if CONFIG_COMP_INTRA_PRED
502 vp9_comp_intra4x4_predict(b
, b_mode
, b_mode2
, b
->predictor
);
506 tx_type
= get_tx_type(xd
, b
);
507 if (tx_type
!= DCT_DCT
) {
508 vp9_ht_dequant_idct_add_c(tx_type
, b
->qcoeff
,
509 b
->dequant
, b
->predictor
,
510 *(b
->base_dst
) + b
->dst
, 16, b
->dst_stride
);
512 vp9_dequant_idct_add(b
->qcoeff
, b
->dequant
, b
->predictor
,
513 *(b
->base_dst
) + b
->dst
, 16, b
->dst_stride
);
516 } else if (mode
== SPLITMV
) {
517 if (tx_size
== TX_8X8
) {
518 vp9_dequant_idct_add_y_block_8x8(xd
->qcoeff
, xd
->block
[0].dequant
,
519 xd
->predictor
, xd
->dst
.y_buffer
,
520 xd
->dst
.y_stride
, xd
->eobs
, xd
);
522 pbi
->idct_add_y_block(xd
->qcoeff
, xd
->block
[0].dequant
,
523 xd
->predictor
, xd
->dst
.y_buffer
,
524 xd
->dst
.y_stride
, xd
->eobs
);
527 BLOCKD
*b
= &xd
->block
[24];
529 if (tx_size
== TX_16X16
) {
530 BLOCKD
*bd
= &xd
->block
[0];
531 tx_type
= get_tx_type(xd
, bd
);
532 if (tx_type
!= DCT_DCT
) {
533 vp9_ht_dequant_idct_add_16x16_c(tx_type
, xd
->qcoeff
,
534 xd
->block
[0].dequant
, xd
->predictor
,
535 xd
->dst
.y_buffer
, 16, xd
->dst
.y_stride
);
537 vp9_dequant_idct_add_16x16(xd
->qcoeff
, xd
->block
[0].dequant
,
538 xd
->predictor
, xd
->dst
.y_buffer
,
539 16, xd
->dst
.y_stride
, xd
->eobs
[0]);
541 } else if (tx_size
== TX_8X8
) {
542 vp9_dequantize_b_2x2(b
);
543 IDCT_INVOKE(RTCD_VTABLE(idct
), ihaar2
)(&b
->dqcoeff
[0], b
->diff
, 8);
544 ((int *)b
->qcoeff
)[0] = 0; // 2nd order block are set to 0 after idct
545 ((int *)b
->qcoeff
)[1] = 0;
546 ((int *)b
->qcoeff
)[2] = 0;
547 ((int *)b
->qcoeff
)[3] = 0;
548 ((int *)b
->qcoeff
)[4] = 0;
549 ((int *)b
->qcoeff
)[5] = 0;
550 ((int *)b
->qcoeff
)[6] = 0;
551 ((int *)b
->qcoeff
)[7] = 0;
552 vp9_dequant_dc_idct_add_y_block_8x8(xd
->qcoeff
,
553 xd
->block
[0].dequant
, xd
->predictor
, xd
->dst
.y_buffer
,
554 xd
->dst
.y_stride
, xd
->eobs
, xd
->block
[24].diff
, xd
);
557 if (xd
->eobs
[24] > 1) {
558 IDCT_INVOKE(RTCD_VTABLE(idct
), iwalsh16
)(&b
->dqcoeff
[0], b
->diff
);
559 ((int *)b
->qcoeff
)[0] = 0;
560 ((int *)b
->qcoeff
)[1] = 0;
561 ((int *)b
->qcoeff
)[2] = 0;
562 ((int *)b
->qcoeff
)[3] = 0;
563 ((int *)b
->qcoeff
)[4] = 0;
564 ((int *)b
->qcoeff
)[5] = 0;
565 ((int *)b
->qcoeff
)[6] = 0;
566 ((int *)b
->qcoeff
)[7] = 0;
568 IDCT_INVOKE(RTCD_VTABLE(idct
), iwalsh1
)(&b
->dqcoeff
[0], b
->diff
);
569 ((int *)b
->qcoeff
)[0] = 0;
572 pbi
->dc_idct_add_y_block(xd
->qcoeff
, xd
->block
[0].dequant
, xd
->predictor
,
573 xd
->dst
.y_buffer
, xd
->dst
.y_stride
, xd
->eobs
,
578 if ((tx_size
== TX_8X8
&&
579 xd
->mode_info_context
->mbmi
.mode
!= I8X8_PRED
&&
580 xd
->mode_info_context
->mbmi
.mode
!= SPLITMV
)
581 || tx_size
== TX_16X16
583 vp9_dequant_idct_add_uv_block_8x8
584 (xd
->qcoeff
+ 16 * 16, xd
->block
[16].dequant
,
585 xd
->predictor
+ 16 * 16, xd
->dst
.u_buffer
, xd
->dst
.v_buffer
,
586 xd
->dst
.uv_stride
, xd
->eobs
+ 16, xd
);
587 else if (xd
->mode_info_context
->mbmi
.mode
!= I8X8_PRED
)
588 pbi
->idct_add_uv_block(xd
->qcoeff
+ 16 * 16, xd
->block
[16].dequant
,
589 xd
->predictor
+ 16 * 16, xd
->dst
.u_buffer
, xd
->dst
.v_buffer
,
590 xd
->dst
.uv_stride
, xd
->eobs
+ 16);
594 static int get_delta_q(vp9_reader
*bc
, int prev
, int *q_update
) {
597 if (vp9_read_bit(bc
)) {
598 ret_val
= vp9_read_literal(bc
, 4);
600 if (vp9_read_bit(bc
))
604 /* Trigger a quantizer update if the delta-q value has changed */
611 #ifdef PACKET_TESTING
616 /* Decode a row of Superblocks (2x2 region of MBs) */
618 decode_sb_row(VP9D_COMP
*pbi
, VP9_COMMON
*pc
, int mbrow
, MACROBLOCKD
*xd
,
619 BOOL_DECODER
* const bc
) {
623 int recon_yoffset
, recon_uvoffset
;
624 int ref_fb_idx
= pc
->lst_fb_idx
;
625 int dst_fb_idx
= pc
->new_fb_idx
;
626 int recon_y_stride
= pc
->yv12_fb
[ref_fb_idx
].y_stride
;
627 int recon_uv_stride
= pc
->yv12_fb
[ref_fb_idx
].uv_stride
;
628 int row_delta
[4] = { 0, +1, 0, -1};
629 int col_delta
[4] = { +1, -1, +1, +1};
630 int sb_cols
= (pc
->mb_cols
+ 1) >> 1;
632 // For a SB there are 2 left contexts, each pertaining to a MB row within
633 vpx_memset(pc
->left_context
, 0, sizeof(pc
->left_context
));
638 for (sb_col
= 0; sb_col
< sb_cols
; sb_col
++) {
639 MODE_INFO
*mi
= xd
->mode_info_context
;
641 #if CONFIG_SUPERBLOCKS
642 mi
->mbmi
.encoded_as_sb
= vp9_read(bc
, pc
->sb_coded
);
645 // Process the 4 MBs within the SB in the order:
646 // top-left, top-right, bottom-left, bottom-right
647 for (i
= 0; i
< 4; i
++) {
648 int dy
= row_delta
[i
];
649 int dx
= col_delta
[i
];
650 int offset_extended
= dy
* xd
->mode_info_stride
+ dx
;
654 mi
= xd
->mode_info_context
;
655 if ((mb_row
>= pc
->mb_rows
) || (mb_col
>= pc
->mb_cols
)) {
656 // MB lies outside frame, skip on to next
659 xd
->mode_info_context
+= offset_extended
;
660 xd
->prev_mode_info_context
+= offset_extended
;
663 #if CONFIG_SUPERBLOCKS
665 mi
->mbmi
.encoded_as_sb
= 0;
668 // Set above context pointer
669 xd
->above_context
= pc
->above_context
+ mb_col
;
670 xd
->left_context
= pc
->left_context
+ (i
>> 1);
672 /* Distance of Mb to the various image edges.
673 * These are specified to 8th pel as they are always compared to
674 * values that are in 1/8th pel units
676 xd
->mb_to_top_edge
= -((mb_row
* 16)) << 3;
677 xd
->mb_to_left_edge
= -((mb_col
* 16) << 3);
678 #if CONFIG_SUPERBLOCKS
679 if (mi
->mbmi
.encoded_as_sb
) {
680 xd
->mb_to_bottom_edge
= ((pc
->mb_rows
- 2 - mb_row
) * 16) << 3;
681 xd
->mb_to_right_edge
= ((pc
->mb_cols
- 2 - mb_col
) * 16) << 3;
684 xd
->mb_to_bottom_edge
= ((pc
->mb_rows
- 1 - mb_row
) * 16) << 3;
685 xd
->mb_to_right_edge
= ((pc
->mb_cols
- 1 - mb_col
) * 16) << 3;
686 #if CONFIG_SUPERBLOCKS
690 xd
->up_available
= (mb_row
!= 0);
691 xd
->left_available
= (mb_col
!= 0);
694 recon_yoffset
= (mb_row
* recon_y_stride
* 16) + (mb_col
* 16);
695 recon_uvoffset
= (mb_row
* recon_uv_stride
* 8) + (mb_col
* 8);
697 xd
->dst
.y_buffer
= pc
->yv12_fb
[dst_fb_idx
].y_buffer
+ recon_yoffset
;
698 xd
->dst
.u_buffer
= pc
->yv12_fb
[dst_fb_idx
].u_buffer
+ recon_uvoffset
;
699 xd
->dst
.v_buffer
= pc
->yv12_fb
[dst_fb_idx
].v_buffer
+ recon_uvoffset
;
701 vp9_decode_mb_mode_mv(pbi
, xd
, mb_row
, mb_col
, bc
);
703 update_blockd_bmi(xd
);
705 /* Select the appropriate reference frame for this MB */
706 if (xd
->mode_info_context
->mbmi
.ref_frame
== LAST_FRAME
)
707 ref_fb_idx
= pc
->lst_fb_idx
;
708 else if (xd
->mode_info_context
->mbmi
.ref_frame
== GOLDEN_FRAME
)
709 ref_fb_idx
= pc
->gld_fb_idx
;
711 ref_fb_idx
= pc
->alt_fb_idx
;
713 xd
->pre
.y_buffer
= pc
->yv12_fb
[ref_fb_idx
].y_buffer
+ recon_yoffset
;
714 xd
->pre
.u_buffer
= pc
->yv12_fb
[ref_fb_idx
].u_buffer
+ recon_uvoffset
;
715 xd
->pre
.v_buffer
= pc
->yv12_fb
[ref_fb_idx
].v_buffer
+ recon_uvoffset
;
717 if (xd
->mode_info_context
->mbmi
.second_ref_frame
> 0) {
718 int second_ref_fb_idx
;
720 /* Select the appropriate reference frame for this MB */
721 if (xd
->mode_info_context
->mbmi
.second_ref_frame
== LAST_FRAME
)
722 second_ref_fb_idx
= pc
->lst_fb_idx
;
723 else if (xd
->mode_info_context
->mbmi
.second_ref_frame
==
725 second_ref_fb_idx
= pc
->gld_fb_idx
;
727 second_ref_fb_idx
= pc
->alt_fb_idx
;
729 xd
->second_pre
.y_buffer
=
730 pc
->yv12_fb
[second_ref_fb_idx
].y_buffer
+ recon_yoffset
;
731 xd
->second_pre
.u_buffer
=
732 pc
->yv12_fb
[second_ref_fb_idx
].u_buffer
+ recon_uvoffset
;
733 xd
->second_pre
.v_buffer
=
734 pc
->yv12_fb
[second_ref_fb_idx
].v_buffer
+ recon_uvoffset
;
737 if (xd
->mode_info_context
->mbmi
.ref_frame
!= INTRA_FRAME
) {
738 /* propagate errors from reference frames */
739 xd
->corrupted
|= pc
->yv12_fb
[ref_fb_idx
].corrupted
;
742 #if CONFIG_SUPERBLOCKS
743 if (xd
->mode_info_context
->mbmi
.encoded_as_sb
) {
744 if (mb_col
< pc
->mb_cols
- 1)
746 if (mb_row
< pc
->mb_rows
- 1) {
747 mi
[pc
->mode_info_stride
] = mi
[0];
748 if (mb_col
< pc
->mb_cols
- 1)
749 mi
[pc
->mode_info_stride
+ 1] = mi
[0];
752 if (xd
->mode_info_context
->mbmi
.encoded_as_sb
) {
753 decode_superblock(pbi
, xd
, mb_row
, mb_col
, bc
);
756 vp9_intra_prediction_down_copy(xd
);
757 decode_macroblock(pbi
, xd
, mb_row
, mb_col
, bc
);
758 #if CONFIG_SUPERBLOCKS
762 /* check if the boolean decoder has suffered an error */
763 xd
->corrupted
|= bool_error(bc
);
765 #if CONFIG_SUPERBLOCKS
766 if (mi
->mbmi
.encoded_as_sb
) {
769 xd
->mode_info_context
+= 2;
770 xd
->prev_mode_info_context
+= 2;
776 xd
->mode_info_context
+= offset_extended
;
777 xd
->prev_mode_info_context
+= offset_extended
;
783 /* skip prediction column */
784 xd
->mode_info_context
+= 1 - (pc
->mb_cols
& 0x1) + xd
->mode_info_stride
;
785 xd
->prev_mode_info_context
+= 1 - (pc
->mb_cols
& 0x1) + xd
->mode_info_stride
;
788 static unsigned int read_partition_size(const unsigned char *cx_size
) {
789 const unsigned int size
=
790 cx_size
[0] + (cx_size
[1] << 8) + (cx_size
[2] << 16);
794 static int read_is_valid(const unsigned char *start
,
796 const unsigned char *end
) {
797 return (start
+ len
> start
&& start
+ len
<= end
);
801 static void setup_token_decoder(VP9D_COMP
*pbi
,
802 const unsigned char *cx_data
,
803 BOOL_DECODER
* const bool_decoder
) {
804 VP9_COMMON
*pc
= &pbi
->common
;
805 const unsigned char *user_data_end
= pbi
->Source
+ pbi
->source_sz
;
806 const unsigned char *partition
;
808 ptrdiff_t partition_size
;
809 ptrdiff_t bytes_left
;
811 // Set up pointers to token partition
813 bytes_left
= user_data_end
- partition
;
814 partition_size
= bytes_left
;
816 /* Validate the calculated partition length. If the buffer
817 * described by the partition can't be fully read, then restrict
818 * it to the portion that can be (for EC mode) or throw an error.
820 if (!read_is_valid(partition
, partition_size
, user_data_end
)) {
821 vpx_internal_error(&pc
->error
, VPX_CODEC_CORRUPT_FRAME
,
822 "Truncated packet or corrupt partition "
826 if (vp9_start_decode(bool_decoder
,
827 partition
, (unsigned int)partition_size
))
828 vpx_internal_error(&pc
->error
, VPX_CODEC_MEM_ERROR
,
829 "Failed to allocate bool decoder %d", 1);
832 static void init_frame(VP9D_COMP
*pbi
) {
833 VP9_COMMON
*const pc
= &pbi
->common
;
834 MACROBLOCKD
*const xd
= &pbi
->mb
;
836 if (pc
->frame_type
== KEY_FRAME
) {
837 /* Various keyframe initializations */
838 vp9_init_mv_probs(pc
);
840 vp9_init_mbmode_probs(pc
);
841 vp9_default_bmode_probs(pc
->fc
.bmode_prob
);
843 vp9_default_coef_probs(pc
);
844 vp9_kf_default_bmode_probs(pc
->kf_bmode_prob
);
846 // Reset the segment feature data to the default stats:
847 // Features disabled, 0, with delta coding (Default state).
848 vp9_clearall_segfeatures(xd
);
850 xd
->mb_segment_abs_delta
= SEGMENT_DELTADATA
;
852 /* reset the mode ref deltasa for loop filter */
853 vpx_memset(xd
->ref_lf_deltas
, 0, sizeof(xd
->ref_lf_deltas
));
854 vpx_memset(xd
->mode_lf_deltas
, 0, sizeof(xd
->mode_lf_deltas
));
856 /* All buffers are implicitly updated on key frames. */
857 pc
->refresh_golden_frame
= 1;
858 pc
->refresh_alt_ref_frame
= 1;
859 pc
->copy_buffer_to_gf
= 0;
860 pc
->copy_buffer_to_arf
= 0;
862 /* Note that Golden and Altref modes cannot be used on a key frame so
863 * ref_frame_sign_bias[] is undefined and meaningless
865 pc
->ref_frame_sign_bias
[GOLDEN_FRAME
] = 0;
866 pc
->ref_frame_sign_bias
[ALTREF_FRAME
] = 0;
868 vp9_init_mode_contexts(&pbi
->common
);
869 vpx_memcpy(&pc
->lfc
, &pc
->fc
, sizeof(pc
->fc
));
870 vpx_memcpy(&pc
->lfc_a
, &pc
->fc
, sizeof(pc
->fc
));
872 vpx_memcpy(pbi
->common
.fc
.vp9_mode_contexts
,
873 pbi
->common
.fc
.mode_context
,
874 sizeof(pbi
->common
.fc
.mode_context
));
875 vpx_memset(pc
->prev_mip
, 0,
876 (pc
->mb_cols
+ 1) * (pc
->mb_rows
+ 1)* sizeof(MODE_INFO
));
877 vpx_memset(pc
->mip
, 0,
878 (pc
->mb_cols
+ 1) * (pc
->mb_rows
+ 1)* sizeof(MODE_INFO
));
880 vp9_update_mode_info_border(pc
, pc
->mip
);
881 vp9_update_mode_info_in_image(pc
, pc
->mi
);
885 if (!pc
->use_bilinear_mc_filter
)
886 pc
->mcomp_filter_type
= EIGHTTAP
;
888 pc
->mcomp_filter_type
= BILINEAR
;
890 /* To enable choice of different interpolation filters */
891 vp9_setup_interp_filters(xd
, pc
->mcomp_filter_type
, pc
);
894 xd
->mode_info_context
= pc
->mi
;
895 xd
->prev_mode_info_context
= pc
->prev_mi
;
896 xd
->frame_type
= pc
->frame_type
;
897 xd
->mode_info_context
->mbmi
.mode
= DC_PRED
;
898 xd
->mode_info_stride
= pc
->mode_info_stride
;
899 xd
->corrupted
= 0; /* init without corruption */
901 xd
->fullpixel_mask
= 0xffffffff;
903 xd
->fullpixel_mask
= 0xfffffff8;
908 static void read_coef_probs2(VP9D_COMP
*pbi
) {
909 const vp9_prob grpupd
= 192;
911 vp9_reader
*const bc
= &pbi
->bc
;
912 VP9_COMMON
*const pc
= &pbi
->common
;
913 for (l
= 0; l
< ENTROPY_NODES
; l
++) {
914 if (vp9_read(bc
, grpupd
)) {
915 // printf("Decoding %d\n", l);
916 for (i
= 0; i
< BLOCK_TYPES
; i
++)
917 for (j
= !i
; j
< COEF_BANDS
; j
++)
918 for (k
= 0; k
< PREV_COEF_CONTEXTS
; k
++) {
919 if (k
>= 3 && ((i
== 0 && j
== 1) ||
923 vp9_prob
*const p
= pc
->fc
.coef_probs
[i
][j
][k
] + l
;
924 int u
= vp9_read(bc
, COEF_UPDATE_PROB
);
925 if (u
) *p
= read_prob_diff_update(bc
, *p
);
930 if (pbi
->common
.txfm_mode
== ALLOW_8X8
) {
931 for (l
= 0; l
< ENTROPY_NODES
; l
++) {
932 if (vp9_read(bc
, grpupd
)) {
933 for (i
= 0; i
< BLOCK_TYPES_8X8
; i
++)
934 for (j
= !i
; j
< COEF_BANDS
; j
++)
935 for (k
= 0; k
< PREV_COEF_CONTEXTS
; k
++) {
936 if (k
>= 3 && ((i
== 0 && j
== 1) ||
940 vp9_prob
*const p
= pc
->fc
.coef_probs_8x8
[i
][j
][k
] + l
;
942 int u
= vp9_read(bc
, COEF_UPDATE_PROB_8X8
);
943 if (u
) *p
= read_prob_diff_update(bc
, *p
);
952 static void read_coef_probs_common(
953 BOOL_DECODER
* const bc
,
954 vp9_prob coef_probs
[BLOCK_TYPES
][COEF_BANDS
]
955 [PREV_COEF_CONTEXTS
][ENTROPY_NODES
]) {
958 if (vp9_read_bit(bc
)) {
959 for (i
= 0; i
< BLOCK_TYPES
; i
++) {
960 for (j
= !i
; j
< COEF_BANDS
; j
++) {
961 /* NB: This j loop starts from 1 on block type i == 0 */
962 for (k
= 0; k
< PREV_COEF_CONTEXTS
; k
++) {
963 if (k
>= 3 && ((i
== 0 && j
== 1) ||
966 for (l
= 0; l
< ENTROPY_NODES
; l
++) {
967 vp9_prob
*const p
= coef_probs
[i
][j
][k
] + l
;
969 if (vp9_read(bc
, COEF_UPDATE_PROB
)) {
970 *p
= read_prob_diff_update(bc
, *p
);
979 static void read_coef_probs(VP9D_COMP
*pbi
, BOOL_DECODER
* const bc
) {
980 VP9_COMMON
*const pc
= &pbi
->common
;
982 read_coef_probs_common(bc
, pc
->fc
.coef_probs
);
983 read_coef_probs_common(bc
, pc
->fc
.hybrid_coef_probs
);
985 if (pbi
->common
.txfm_mode
!= ONLY_4X4
) {
986 read_coef_probs_common(bc
, pc
->fc
.coef_probs_8x8
);
987 read_coef_probs_common(bc
, pc
->fc
.hybrid_coef_probs_8x8
);
989 if (pbi
->common
.txfm_mode
> ALLOW_8X8
) {
990 read_coef_probs_common(bc
, pc
->fc
.coef_probs_16x16
);
991 read_coef_probs_common(bc
, pc
->fc
.hybrid_coef_probs_16x16
);
995 int vp9_decode_frame(VP9D_COMP
*pbi
, const unsigned char **p_data_end
) {
996 BOOL_DECODER header_bc
, residual_bc
;
997 VP9_COMMON
*const pc
= &pbi
->common
;
998 MACROBLOCKD
*const xd
= &pbi
->mb
;
999 const unsigned char *data
= (const unsigned char *)pbi
->Source
;
1000 const unsigned char *data_end
= data
+ pbi
->source_sz
;
1001 ptrdiff_t first_partition_length_in_bytes
= 0;
1005 int corrupt_tokens
= 0;
1007 /* start with no corruption of current frame */
1009 pc
->yv12_fb
[pc
->new_fb_idx
].corrupted
= 0;
1011 if (data_end
- data
< 3) {
1012 vpx_internal_error(&pc
->error
, VPX_CODEC_CORRUPT_FRAME
,
1013 "Truncated packet");
1015 pc
->last_frame_type
= pc
->frame_type
;
1016 pc
->frame_type
= (FRAME_TYPE
)(data
[0] & 1);
1017 pc
->version
= (data
[0] >> 1) & 7;
1018 pc
->show_frame
= (data
[0] >> 4) & 1;
1019 first_partition_length_in_bytes
=
1020 (data
[0] | (data
[1] << 8) | (data
[2] << 16)) >> 5;
1022 if ((data
+ first_partition_length_in_bytes
> data_end
1023 || data
+ first_partition_length_in_bytes
< data
))
1024 vpx_internal_error(&pc
->error
, VPX_CODEC_CORRUPT_FRAME
,
1025 "Truncated packet or corrupt partition 0 length");
1029 vp9_setup_version(pc
);
1031 if (pc
->frame_type
== KEY_FRAME
) {
1032 const int Width
= pc
->Width
;
1033 const int Height
= pc
->Height
;
1035 /* vet via sync code */
1036 /* When error concealment is enabled we should only check the sync
1037 * code if we have enough bits available
1039 if (data
+ 3 < data_end
) {
1040 if (data
[0] != 0x9d || data
[1] != 0x01 || data
[2] != 0x2a)
1041 vpx_internal_error(&pc
->error
, VPX_CODEC_UNSUP_BITSTREAM
,
1042 "Invalid frame sync code");
1045 /* If error concealment is enabled we should only parse the new size
1046 * if we have enough data. Otherwise we will end up with the wrong
1049 if (data
+ 6 < data_end
) {
1050 pc
->Width
= (data
[3] | (data
[4] << 8)) & 0x3fff;
1051 pc
->horiz_scale
= data
[4] >> 6;
1052 pc
->Height
= (data
[5] | (data
[6] << 8)) & 0x3fff;
1053 pc
->vert_scale
= data
[6] >> 6;
1057 if (Width
!= pc
->Width
|| Height
!= pc
->Height
) {
1058 if (pc
->Width
<= 0) {
1060 vpx_internal_error(&pc
->error
, VPX_CODEC_CORRUPT_FRAME
,
1061 "Invalid frame width");
1064 if (pc
->Height
<= 0) {
1065 pc
->Height
= Height
;
1066 vpx_internal_error(&pc
->error
, VPX_CODEC_CORRUPT_FRAME
,
1067 "Invalid frame height");
1070 if (vp9_alloc_frame_buffers(pc
, pc
->Width
, pc
->Height
))
1071 vpx_internal_error(&pc
->error
, VPX_CODEC_MEM_ERROR
,
1072 "Failed to allocate frame buffers");
1077 if ((!pbi
->decoded_key_frame
&& pc
->frame_type
!= KEY_FRAME
) ||
1078 pc
->Width
== 0 || pc
->Height
== 0) {
1084 if (vp9_start_decode(&header_bc
, data
,
1085 (unsigned int)first_partition_length_in_bytes
))
1086 vpx_internal_error(&pc
->error
, VPX_CODEC_MEM_ERROR
,
1087 "Failed to allocate bool decoder 0");
1088 if (pc
->frame_type
== KEY_FRAME
) {
1089 pc
->clr_type
= (YUV_TYPE
)vp9_read_bit(&header_bc
);
1090 pc
->clamp_type
= (CLAMP_TYPE
)vp9_read_bit(&header_bc
);
1093 /* Is segmentation enabled */
1094 xd
->segmentation_enabled
= (unsigned char)vp9_read_bit(&header_bc
);
1096 if (xd
->segmentation_enabled
) {
1097 // Read whether or not the segmentation map is being explicitly
1098 // updated this frame.
1099 xd
->update_mb_segmentation_map
= (unsigned char)vp9_read_bit(&header_bc
);
1101 // If so what method will be used.
1102 if (xd
->update_mb_segmentation_map
) {
1103 // Which macro block level features are enabled
1105 // Read the probs used to decode the segment id for each macro
1107 for (i
= 0; i
< MB_FEATURE_TREE_PROBS
; i
++) {
1108 xd
->mb_segment_tree_probs
[i
] = vp9_read_bit(&header_bc
) ?
1109 (vp9_prob
)vp9_read_literal(&header_bc
, 8) : 255;
1112 // Read the prediction probs needed to decode the segment id
1113 pc
->temporal_update
= (unsigned char)vp9_read_bit(&header_bc
);
1114 for (i
= 0; i
< PREDICTION_PROBS
; i
++) {
1115 if (pc
->temporal_update
) {
1116 pc
->segment_pred_probs
[i
] = vp9_read_bit(&header_bc
) ?
1117 (vp9_prob
)vp9_read_literal(&header_bc
, 8) : 255;
1119 pc
->segment_pred_probs
[i
] = 255;
1123 // Is the segment data being updated
1124 xd
->update_mb_segmentation_data
= (unsigned char)vp9_read_bit(&header_bc
);
1126 if (xd
->update_mb_segmentation_data
) {
1129 xd
->mb_segment_abs_delta
= (unsigned char)vp9_read_bit(&header_bc
);
1131 vp9_clearall_segfeatures(xd
);
1133 // For each segmentation...
1134 for (i
= 0; i
< MAX_MB_SEGMENTS
; i
++) {
1135 // For each of the segments features...
1136 for (j
= 0; j
< SEG_LVL_MAX
; j
++) {
1137 // Is the feature enabled
1138 if (vp9_read_bit(&header_bc
)) {
1139 // Update the feature data and mask
1140 vp9_enable_segfeature(xd
, i
, j
);
1142 data
= (signed char)vp9_read_literal(
1143 &header_bc
, vp9_seg_feature_data_bits(j
));
1145 // Is the segment data signed..
1146 if (vp9_is_segfeature_signed(j
)) {
1147 if (vp9_read_bit(&header_bc
))
1153 vp9_set_segdata(xd
, i
, j
, data
);
1159 // Read common prediction model status flag probability updates for the
1161 if (pc
->frame_type
== KEY_FRAME
) {
1162 // Set the prediction probabilities to defaults
1163 pc
->ref_pred_probs
[0] = 120;
1164 pc
->ref_pred_probs
[1] = 80;
1165 pc
->ref_pred_probs
[2] = 40;
1167 for (i
= 0; i
< PREDICTION_PROBS
; i
++) {
1168 if (vp9_read_bit(&header_bc
))
1169 pc
->ref_pred_probs
[i
] = (vp9_prob
)vp9_read_literal(&header_bc
, 8);
1173 #if CONFIG_SUPERBLOCKS
1174 pc
->sb_coded
= vp9_read_literal(&header_bc
, 8);
1177 /* Read the loop filter level and type */
1178 pc
->txfm_mode
= vp9_read_literal(&header_bc
, 2);
1179 if (pc
->txfm_mode
== TX_MODE_SELECT
) {
1180 pc
->prob_tx
[0] = vp9_read_literal(&header_bc
, 8);
1181 pc
->prob_tx
[1] = vp9_read_literal(&header_bc
, 8);
1184 pc
->filter_type
= (LOOPFILTERTYPE
) vp9_read_bit(&header_bc
);
1185 pc
->filter_level
= vp9_read_literal(&header_bc
, 6);
1186 pc
->sharpness_level
= vp9_read_literal(&header_bc
, 3);
1188 /* Read in loop filter deltas applied at the MB level based on mode or ref frame. */
1189 xd
->mode_ref_lf_delta_update
= 0;
1190 xd
->mode_ref_lf_delta_enabled
= (unsigned char)vp9_read_bit(&header_bc
);
1192 if (xd
->mode_ref_lf_delta_enabled
) {
1193 /* Do the deltas need to be updated */
1194 xd
->mode_ref_lf_delta_update
= (unsigned char)vp9_read_bit(&header_bc
);
1196 if (xd
->mode_ref_lf_delta_update
) {
1198 for (i
= 0; i
< MAX_REF_LF_DELTAS
; i
++) {
1199 if (vp9_read_bit(&header_bc
)) {
1200 /*sign = vp9_read_bit( &header_bc );*/
1201 xd
->ref_lf_deltas
[i
] = (signed char)vp9_read_literal(&header_bc
, 6);
1203 if (vp9_read_bit(&header_bc
)) /* Apply sign */
1204 xd
->ref_lf_deltas
[i
] = xd
->ref_lf_deltas
[i
] * -1;
1209 for (i
= 0; i
< MAX_MODE_LF_DELTAS
; i
++) {
1210 if (vp9_read_bit(&header_bc
)) {
1211 /*sign = vp9_read_bit( &header_bc );*/
1212 xd
->mode_lf_deltas
[i
] = (signed char)vp9_read_literal(&header_bc
, 6);
1214 if (vp9_read_bit(&header_bc
)) /* Apply sign */
1215 xd
->mode_lf_deltas
[i
] = xd
->mode_lf_deltas
[i
] * -1;
1221 // Dummy read for now
1222 vp9_read_literal(&header_bc
, 2);
1224 setup_token_decoder(pbi
, data
+ first_partition_length_in_bytes
,
1227 /* Read the default quantizers. */
1231 Q
= vp9_read_literal(&header_bc
, QINDEX_BITS
);
1232 pc
->base_qindex
= Q
;
1234 /* AC 1st order Q = default */
1235 pc
->y1dc_delta_q
= get_delta_q(&header_bc
, pc
->y1dc_delta_q
, &q_update
);
1236 pc
->y2dc_delta_q
= get_delta_q(&header_bc
, pc
->y2dc_delta_q
, &q_update
);
1237 pc
->y2ac_delta_q
= get_delta_q(&header_bc
, pc
->y2ac_delta_q
, &q_update
);
1238 pc
->uvdc_delta_q
= get_delta_q(&header_bc
, pc
->uvdc_delta_q
, &q_update
);
1239 pc
->uvac_delta_q
= get_delta_q(&header_bc
, pc
->uvac_delta_q
, &q_update
);
1242 vp9_init_de_quantizer(pbi
);
1244 /* MB level dequantizer setup */
1245 mb_init_dequantizer(pbi
, &pbi
->mb
);
1248 /* Determine if the golden frame or ARF buffer should be updated and how.
1249 * For all non key frames the GF and ARF refresh flags and sign bias
1250 * flags must be set explicitly.
1252 if (pc
->frame_type
!= KEY_FRAME
) {
1253 /* Should the GF or ARF be updated from the current frame */
1254 pc
->refresh_golden_frame
= vp9_read_bit(&header_bc
);
1255 pc
->refresh_alt_ref_frame
= vp9_read_bit(&header_bc
);
1257 if (pc
->refresh_alt_ref_frame
) {
1258 vpx_memcpy(&pc
->fc
, &pc
->lfc_a
, sizeof(pc
->fc
));
1259 vpx_memcpy(pc
->fc
.vp9_mode_contexts
,
1260 pc
->fc
.mode_context_a
,
1261 sizeof(pc
->fc
.vp9_mode_contexts
));
1263 vpx_memcpy(&pc
->fc
, &pc
->lfc
, sizeof(pc
->fc
));
1264 vpx_memcpy(pc
->fc
.vp9_mode_contexts
,
1265 pc
->fc
.mode_context
,
1266 sizeof(pc
->fc
.vp9_mode_contexts
));
1269 /* Buffer to buffer copy flags. */
1270 pc
->copy_buffer_to_gf
= 0;
1272 if (!pc
->refresh_golden_frame
)
1273 pc
->copy_buffer_to_gf
= vp9_read_literal(&header_bc
, 2);
1275 pc
->copy_buffer_to_arf
= 0;
1277 if (!pc
->refresh_alt_ref_frame
)
1278 pc
->copy_buffer_to_arf
= vp9_read_literal(&header_bc
, 2);
1280 pc
->ref_frame_sign_bias
[GOLDEN_FRAME
] = vp9_read_bit(&header_bc
);
1281 pc
->ref_frame_sign_bias
[ALTREF_FRAME
] = vp9_read_bit(&header_bc
);
1283 /* Is high precision mv allowed */
1284 xd
->allow_high_precision_mv
= (unsigned char)vp9_read_bit(&header_bc
);
1285 // Read the type of subpel filter to use
1286 if (vp9_read_bit(&header_bc
)) {
1287 pc
->mcomp_filter_type
= SWITCHABLE
;
1289 pc
->mcomp_filter_type
= vp9_read_literal(&header_bc
, 2);
1291 #if CONFIG_COMP_INTERINTRA_PRED
1292 pc
->use_interintra
= vp9_read_bit(&header_bc
);
1294 /* To enable choice of different interploation filters */
1295 vp9_setup_interp_filters(xd
, pc
->mcomp_filter_type
, pc
);
1298 pc
->refresh_entropy_probs
= vp9_read_bit(&header_bc
);
1299 if (pc
->refresh_entropy_probs
== 0) {
1300 vpx_memcpy(&pc
->lfc
, &pc
->fc
, sizeof(pc
->fc
));
1303 pc
->refresh_last_frame
= (pc
->frame_type
== KEY_FRAME
)
1304 || vp9_read_bit(&header_bc
);
1306 // Read inter mode probability context updates
1307 if (pc
->frame_type
!= KEY_FRAME
) {
1309 for (i
= 0; i
< INTER_MODE_CONTEXTS
; i
++) {
1310 for (j
= 0; j
< 4; j
++) {
1311 if (vp9_read(&header_bc
, 252)) {
1312 pc
->fc
.vp9_mode_contexts
[i
][j
] =
1313 (vp9_prob
)vp9_read_literal(&header_bc
, 8);
1320 FILE *z
= fopen("decodestats.stt", "a");
1321 fprintf(z
, "%6d F:%d,G:%d,A:%d,L:%d,Q:%d\n",
1322 pc
->current_video_frame
,
1324 pc
->refresh_golden_frame
,
1325 pc
->refresh_alt_ref_frame
,
1326 pc
->refresh_last_frame
,
1331 vp9_copy(pbi
->common
.fc
.pre_coef_probs
,
1332 pbi
->common
.fc
.coef_probs
);
1333 vp9_copy(pbi
->common
.fc
.pre_hybrid_coef_probs
,
1334 pbi
->common
.fc
.hybrid_coef_probs
);
1335 vp9_copy(pbi
->common
.fc
.pre_coef_probs_8x8
,
1336 pbi
->common
.fc
.coef_probs_8x8
);
1337 vp9_copy(pbi
->common
.fc
.pre_hybrid_coef_probs_8x8
,
1338 pbi
->common
.fc
.hybrid_coef_probs_8x8
);
1339 vp9_copy(pbi
->common
.fc
.pre_coef_probs_16x16
,
1340 pbi
->common
.fc
.coef_probs_16x16
);
1341 vp9_copy(pbi
->common
.fc
.pre_hybrid_coef_probs_16x16
,
1342 pbi
->common
.fc
.hybrid_coef_probs_16x16
);
1343 vp9_copy(pbi
->common
.fc
.pre_ymode_prob
, pbi
->common
.fc
.ymode_prob
);
1344 #if CONFIG_SUPERBLOCKS
1345 vp9_copy(pbi
->common
.fc
.pre_sb_ymode_prob
, pbi
->common
.fc
.sb_ymode_prob
);
1347 vp9_copy(pbi
->common
.fc
.pre_uv_mode_prob
, pbi
->common
.fc
.uv_mode_prob
);
1348 vp9_copy(pbi
->common
.fc
.pre_bmode_prob
, pbi
->common
.fc
.bmode_prob
);
1349 vp9_copy(pbi
->common
.fc
.pre_i8x8_mode_prob
, pbi
->common
.fc
.i8x8_mode_prob
);
1350 vp9_copy(pbi
->common
.fc
.pre_sub_mv_ref_prob
, pbi
->common
.fc
.sub_mv_ref_prob
);
1351 vp9_copy(pbi
->common
.fc
.pre_mbsplit_prob
, pbi
->common
.fc
.mbsplit_prob
);
1352 #if CONFIG_COMP_INTERINTRA_PRED
1353 pbi
->common
.fc
.pre_interintra_prob
= pbi
->common
.fc
.interintra_prob
;
1355 pbi
->common
.fc
.pre_nmvc
= pbi
->common
.fc
.nmvc
;
1356 vp9_zero(pbi
->common
.fc
.coef_counts
);
1357 vp9_zero(pbi
->common
.fc
.hybrid_coef_counts
);
1358 vp9_zero(pbi
->common
.fc
.coef_counts_8x8
);
1359 vp9_zero(pbi
->common
.fc
.hybrid_coef_counts_8x8
);
1360 vp9_zero(pbi
->common
.fc
.coef_counts_16x16
);
1361 vp9_zero(pbi
->common
.fc
.hybrid_coef_counts_16x16
);
1362 vp9_zero(pbi
->common
.fc
.ymode_counts
);
1363 #if CONFIG_SUPERBLOCKS
1364 vp9_zero(pbi
->common
.fc
.sb_ymode_counts
);
1366 vp9_zero(pbi
->common
.fc
.uv_mode_counts
);
1367 vp9_zero(pbi
->common
.fc
.bmode_counts
);
1368 vp9_zero(pbi
->common
.fc
.i8x8_mode_counts
);
1369 vp9_zero(pbi
->common
.fc
.sub_mv_ref_counts
);
1370 vp9_zero(pbi
->common
.fc
.mbsplit_counts
);
1371 vp9_zero(pbi
->common
.fc
.NMVcount
);
1372 vp9_zero(pbi
->common
.fc
.mv_ref_ct
);
1373 #if CONFIG_COMP_INTERINTRA_PRED
1374 vp9_zero(pbi
->common
.fc
.interintra_counts
);
1377 read_coef_probs(pbi
, &header_bc
);
1379 vpx_memcpy(&xd
->pre
, &pc
->yv12_fb
[pc
->lst_fb_idx
], sizeof(YV12_BUFFER_CONFIG
));
1380 vpx_memcpy(&xd
->dst
, &pc
->yv12_fb
[pc
->new_fb_idx
], sizeof(YV12_BUFFER_CONFIG
));
1382 // Create the segmentation map structure and set to 0
1383 if (!pc
->last_frame_seg_map
)
1384 CHECK_MEM_ERROR(pc
->last_frame_seg_map
,
1385 vpx_calloc((pc
->mb_rows
* pc
->mb_cols
), 1));
1387 /* set up frame new frame for intra coded blocks */
1388 vp9_setup_intra_recon(&pc
->yv12_fb
[pc
->new_fb_idx
]);
1390 vp9_setup_block_dptrs(xd
);
1392 vp9_build_block_doffsets(xd
);
1394 /* clear out the coeff buffer */
1395 vpx_memset(xd
->qcoeff
, 0, sizeof(xd
->qcoeff
));
1397 /* Read the mb_no_coeff_skip flag */
1398 pc
->mb_no_coeff_skip
= (int)vp9_read_bit(&header_bc
);
1400 vp9_decode_mode_mvs_init(pbi
, &header_bc
);
1402 vpx_memset(pc
->above_context
, 0, sizeof(ENTROPY_CONTEXT_PLANES
) * pc
->mb_cols
);
1404 // Resset the macroblock mode info context to the start of the list
1405 xd
->mode_info_context
= pc
->mi
;
1406 xd
->prev_mode_info_context
= pc
->prev_mi
;
1408 /* Decode a row of superblocks */
1409 for (mb_row
= 0; mb_row
< pc
->mb_rows
; mb_row
+= 2) {
1410 decode_sb_row(pbi
, pc
, mb_row
, xd
, &residual_bc
);
1412 corrupt_tokens
|= xd
->corrupted
;
1414 /* Collect information about decoder corruption. */
1415 /* 1. Check first boolean decoder for errors. */
1416 pc
->yv12_fb
[pc
->new_fb_idx
].corrupted
= bool_error(&header_bc
);
1417 /* 2. Check the macroblock information */
1418 pc
->yv12_fb
[pc
->new_fb_idx
].corrupted
|= corrupt_tokens
;
1420 if (!pbi
->decoded_key_frame
) {
1421 if (pc
->frame_type
== KEY_FRAME
&&
1422 !pc
->yv12_fb
[pc
->new_fb_idx
].corrupted
)
1423 pbi
->decoded_key_frame
= 1;
1425 vpx_internal_error(&pbi
->common
.error
, VPX_CODEC_CORRUPT_FRAME
,
1426 "A stream must start with a complete key frame");
1429 vp9_adapt_coef_probs(pc
);
1430 if (pc
->frame_type
!= KEY_FRAME
) {
1431 vp9_adapt_mode_probs(pc
);
1432 vp9_adapt_nmv_probs(pc
, xd
->allow_high_precision_mv
);
1433 vp9_update_mode_context(&pbi
->common
);
1436 /* If this was a kf or Gf note the Q used */
1437 if ((pc
->frame_type
== KEY_FRAME
) ||
1438 pc
->refresh_golden_frame
|| pc
->refresh_alt_ref_frame
) {
1439 pc
->last_kf_gf_q
= pc
->base_qindex
;
1441 if (pc
->refresh_entropy_probs
) {
1442 if (pc
->refresh_alt_ref_frame
)
1443 vpx_memcpy(&pc
->lfc_a
, &pc
->fc
, sizeof(pc
->fc
));
1445 vpx_memcpy(&pc
->lfc
, &pc
->fc
, sizeof(pc
->fc
));
1448 #ifdef PACKET_TESTING
1450 FILE *f
= fopen("decompressor.VP8", "ab");
1451 unsigned int size
= residual_bc
.pos
+ header_bc
.pos
+ 8;
1452 fwrite((void *) &size
, 4, 1, f
);
1453 fwrite((void *) pbi
->Source
, size
, 1, f
);
1457 // printf("Frame %d Done\n", frame_count++);
1459 /* Find the end of the coded buffer */
1460 while (residual_bc
.count
> CHAR_BIT
1461 && residual_bc
.count
< VP9_BD_VALUE_SIZE
) {
1462 residual_bc
.count
-= CHAR_BIT
;
1463 residual_bc
.user_buffer
--;
1465 *p_data_end
= residual_bc
.user_buffer
;