2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
12 #include "treereader.h"
13 #include "vp8/common/entropymv.h"
14 #include "vp8/common/entropymode.h"
15 #include "onyxd_int.h"
16 #include "vp8/common/findnearmv.h"
21 static int vp8_read_bmode(vp8_reader
*bc
, const vp8_prob
*p
)
23 const int i
= vp8_treed_read(bc
, vp8_bmode_tree
, p
);
29 static int vp8_read_ymode(vp8_reader
*bc
, const vp8_prob
*p
)
31 const int i
= vp8_treed_read(bc
, vp8_ymode_tree
, p
);
36 static int vp8_kfread_ymode(vp8_reader
*bc
, const vp8_prob
*p
)
38 const int i
= vp8_treed_read(bc
, vp8_kf_ymode_tree
, p
);
45 static int vp8_read_uv_mode(vp8_reader
*bc
, const vp8_prob
*p
)
47 const int i
= vp8_treed_read(bc
, vp8_uv_mode_tree
, p
);
52 static void vp8_read_mb_features(vp8_reader
*r
, MB_MODE_INFO
*mi
, MACROBLOCKD
*x
)
54 /* Is segmentation enabled */
55 if (x
->segmentation_enabled
&& x
->update_mb_segmentation_map
)
57 /* If so then read the segment id. */
58 if (vp8_read(r
, x
->mb_segment_tree_probs
[0]))
59 mi
->segment_id
= (unsigned char)(2 + vp8_read(r
, x
->mb_segment_tree_probs
[2]));
61 mi
->segment_id
= (unsigned char)(vp8_read(r
, x
->mb_segment_tree_probs
[1]));
65 static void vp8_kfread_modes(VP8D_COMP
*pbi
, MODE_INFO
*m
, int mb_row
, int mb_col
)
67 vp8_reader
*const bc
= & pbi
->bc
;
68 const int mis
= pbi
->common
.mode_info_stride
;
71 MB_PREDICTION_MODE y_mode
;
73 /* Read the Macroblock segmentation map if it is being updated explicitly this frame (reset to 0 above by default)
74 * By default on a key frame reset all MBs to segment 0
76 m
->mbmi
.segment_id
= 0;
78 if (pbi
->mb
.update_mb_segmentation_map
)
79 vp8_read_mb_features(bc
, &m
->mbmi
, &pbi
->mb
);
81 /* Read the macroblock coeff skip flag if this feature is in use, else default to 0 */
82 if (pbi
->common
.mb_no_coeff_skip
)
83 m
->mbmi
.mb_skip_coeff
= vp8_read(bc
, pbi
->prob_skip_false
);
85 m
->mbmi
.mb_skip_coeff
= 0;
87 y_mode
= (MB_PREDICTION_MODE
) vp8_kfread_ymode(bc
, pbi
->common
.kf_ymode_prob
);
89 m
->mbmi
.ref_frame
= INTRA_FRAME
;
91 if ((m
->mbmi
.mode
= y_mode
) == B_PRED
)
97 const B_PREDICTION_MODE A
= above_block_mode(m
, i
, mis
);
98 const B_PREDICTION_MODE L
= left_block_mode(m
, i
);
100 m
->bmi
[i
].as_mode
= (B_PREDICTION_MODE
) vp8_read_bmode(bc
, pbi
->common
.kf_bmode_prob
[A
] [L
]);
130 m
->bmi
[i
].as_mode
= (B_PREDICTION_MODE
)BMode
;
135 m
->mbmi
.uv_mode
= (MB_PREDICTION_MODE
)vp8_read_uv_mode(bc
, pbi
->common
.kf_uv_mode_prob
);
139 static int read_mvcomponent(vp8_reader
*r
, const MV_CONTEXT
*mvc
)
141 const vp8_prob
*const p
= (const vp8_prob
*) mvc
;
144 if (vp8_read(r
, p
[mvpis_short
])) /* Large */
150 x
+= vp8_read(r
, p
[MVPbits
+ i
]) << i
;
154 i
= mvlong_width
- 1; /* Skip bit 3, which is sometimes implicit */
158 x
+= vp8_read(r
, p
[MVPbits
+ i
]) << i
;
162 if (!(x
& 0xFFF0) || vp8_read(r
, p
[MVPbits
+ 3]))
166 x
= vp8_treed_read(r
, vp8_small_mvtree
, p
+ MVPshort
);
168 if (x
&& vp8_read(r
, p
[MVPsign
]))
174 static void read_mv(vp8_reader
*r
, MV
*mv
, const MV_CONTEXT
*mvc
)
176 mv
->row
= (short)(read_mvcomponent(r
, mvc
) << 1);
177 mv
->col
= (short)(read_mvcomponent(r
, ++mvc
) << 1);
181 static void read_mvcontexts(vp8_reader
*bc
, MV_CONTEXT
*mvc
)
187 const vp8_prob
*up
= vp8_mv_update_probs
[i
].prob
;
188 vp8_prob
*p
= (vp8_prob
*)(mvc
+ i
);
189 vp8_prob
*const pstop
= p
+ MVPcount
;
193 if (vp8_read(bc
, *up
++))
195 const vp8_prob x
= (vp8_prob
)vp8_read_literal(bc
, 7);
206 static MB_PREDICTION_MODE
read_mv_ref(vp8_reader
*bc
, const vp8_prob
*p
)
208 const int i
= vp8_treed_read(bc
, vp8_mv_ref_tree
, p
);
210 return (MB_PREDICTION_MODE
)i
;
213 static MB_PREDICTION_MODE
sub_mv_ref(vp8_reader
*bc
, const vp8_prob
*p
)
215 const int i
= vp8_treed_read(bc
, vp8_sub_mv_ref_tree
, p
);
217 return (MB_PREDICTION_MODE
)i
;
220 #ifdef VPX_MODE_COUNT
221 unsigned int vp8_mv_cont_count
[5][4] =
231 static const unsigned char mbsplit_fill_count
[4] = {8, 8, 4, 1};
232 static const unsigned char mbsplit_fill_offset
[4][16] = {
233 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15},
234 { 0, 1, 4, 5, 8, 9, 12, 13, 2, 3, 6, 7, 10, 11, 14, 15},
235 { 0, 1, 4, 5, 2, 3, 6, 7, 8, 9, 12, 13, 10, 11, 14, 15},
236 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}
242 static void mb_mode_mv_init(VP8D_COMP
*pbi
)
244 vp8_reader
*const bc
= & pbi
->bc
;
245 MV_CONTEXT
*const mvc
= pbi
->common
.fc
.mvc
;
247 #if CONFIG_ERROR_CONCEALMENT
248 /* Default is that no macroblock is corrupt, therefore we initialize
249 * mvs_corrupt_from_mb to something very big, which we can be sure is
250 * outside the frame. */
251 pbi
->mvs_corrupt_from_mb
= UINT_MAX
;
253 pbi
->prob_skip_false
= 0;
254 if (pbi
->common
.mb_no_coeff_skip
)
255 pbi
->prob_skip_false
= (vp8_prob
)vp8_read_literal(bc
, 8);
257 if(pbi
->common
.frame_type
!= KEY_FRAME
)
259 pbi
->prob_intra
= (vp8_prob
)vp8_read_literal(bc
, 8);
260 pbi
->prob_last
= (vp8_prob
)vp8_read_literal(bc
, 8);
261 pbi
->prob_gf
= (vp8_prob
)vp8_read_literal(bc
, 8);
263 if (vp8_read_bit(bc
))
269 pbi
->common
.fc
.ymode_prob
[i
] = (vp8_prob
) vp8_read_literal(bc
, 8);
274 if (vp8_read_bit(bc
))
280 pbi
->common
.fc
.uv_mode_prob
[i
] = (vp8_prob
) vp8_read_literal(bc
, 8);
285 read_mvcontexts(bc
, mvc
);
290 static void read_mb_modes_mv(VP8D_COMP
*pbi
, MODE_INFO
*mi
, MB_MODE_INFO
*mbmi
,
291 int mb_row
, int mb_col
)
293 vp8_reader
*const bc
= & pbi
->bc
;
294 MV_CONTEXT
*const mvc
= pbi
->common
.fc
.mvc
;
295 const int mis
= pbi
->common
.mode_info_stride
;
297 int_mv
*const mv
= & mbmi
->mv
;
299 int mb_to_right_edge
;
301 int mb_to_bottom_edge
;
303 mb_to_top_edge
= pbi
->mb
.mb_to_top_edge
;
304 mb_to_bottom_edge
= pbi
->mb
.mb_to_bottom_edge
;
305 mb_to_top_edge
-= LEFT_TOP_MARGIN
;
306 mb_to_bottom_edge
+= RIGHT_BOTTOM_MARGIN
;
308 mbmi
->need_to_clamp_mvs
= 0;
309 /* Distance of Mb to the various image edges.
310 * These specified to 8th pel as they are always compared to MV values that are in 1/8th pel units
312 pbi
->mb
.mb_to_left_edge
=
313 mb_to_left_edge
= -((mb_col
* 16) << 3);
314 mb_to_left_edge
-= LEFT_TOP_MARGIN
;
316 pbi
->mb
.mb_to_right_edge
=
317 mb_to_right_edge
= ((pbi
->common
.mb_cols
- 1 - mb_col
) * 16) << 3;
318 mb_to_right_edge
+= RIGHT_BOTTOM_MARGIN
;
320 /* If required read in new segmentation data for this MB */
321 if (pbi
->mb
.update_mb_segmentation_map
)
322 vp8_read_mb_features(bc
, mbmi
, &pbi
->mb
);
324 /* Read the macroblock coeff skip flag if this feature is in use, else default to 0 */
325 if (pbi
->common
.mb_no_coeff_skip
)
326 mbmi
->mb_skip_coeff
= vp8_read(bc
, pbi
->prob_skip_false
);
328 mbmi
->mb_skip_coeff
= 0;
330 if ((mbmi
->ref_frame
= (MV_REFERENCE_FRAME
) vp8_read(bc
, pbi
->prob_intra
))) /* inter MB */
333 vp8_prob mv_ref_p
[VP8_MVREFS
-1];
334 int_mv nearest
, nearby
, best_mv
;
336 if (vp8_read(bc
, pbi
->prob_last
))
338 mbmi
->ref_frame
= (MV_REFERENCE_FRAME
)((int)mbmi
->ref_frame
+ (int)(1 + vp8_read(bc
, pbi
->prob_gf
)));
341 vp8_find_near_mvs(&pbi
->mb
, mi
, &nearest
, &nearby
, &best_mv
, rct
, mbmi
->ref_frame
, pbi
->common
.ref_frame_sign_bias
);
343 vp8_mv_ref_probs(mv_ref_p
, rct
);
345 mbmi
->uv_mode
= DC_PRED
;
346 switch (mbmi
->mode
= read_mv_ref(bc
, mv_ref_p
))
350 const int s
= mbmi
->partitioning
=
351 vp8_treed_read(bc
, vp8_mbsplit_tree
, vp8_mbsplit_probs
);
352 const int num_p
= vp8_mbsplit_count
[s
];
355 do /* for each subset j */
357 int_mv leftmv
, abovemv
;
359 int k
; /* first block in subset j */
361 k
= vp8_mbsplit_offset
[s
][j
];
363 leftmv
.as_int
= left_block_mv(mi
, k
);
364 abovemv
.as_int
= above_block_mv(mi
, k
, mis
);
365 mv_contz
= vp8_mv_cont(&(leftmv
.as_mv
), &(abovemv
.as_mv
));
367 switch (bmi
.mode
= (B_PREDICTION_MODE
) sub_mv_ref(bc
, vp8_sub_mv_ref_prob2
[mv_contz
])) /*pc->fc.sub_mv_ref_prob))*/
370 read_mv(bc
, &bmi
.mv
.as_mv
, (const MV_CONTEXT
*) mvc
);
371 bmi
.mv
.as_mv
.row
+= best_mv
.as_mv
.row
;
372 bmi
.mv
.as_mv
.col
+= best_mv
.as_mv
.col
;
373 #ifdef VPX_MODE_COUNT
374 vp8_mv_cont_count
[mv_contz
][3]++;
378 bmi
.mv
.as_int
= leftmv
.as_int
;
379 #ifdef VPX_MODE_COUNT
380 vp8_mv_cont_count
[mv_contz
][0]++;
384 bmi
.mv
.as_int
= abovemv
.as_int
;
385 #ifdef VPX_MODE_COUNT
386 vp8_mv_cont_count
[mv_contz
][1]++;
391 #ifdef VPX_MODE_COUNT
392 vp8_mv_cont_count
[mv_contz
][2]++;
399 mbmi
->need_to_clamp_mvs
= vp8_check_mv_bounds(&bmi
.mv
,
406 /* Fill (uniform) modes, mvs of jth subset.
407 Must do it here because ensuing subsets can
408 refer back to us via "left" or "above". */
409 const unsigned char *fill_offset
;
410 unsigned int fill_count
= mbsplit_fill_count
[s
];
412 fill_offset
= &mbsplit_fill_offset
[s
][(unsigned char)j
* mbsplit_fill_count
[s
]];
415 mi
->bmi
[ *fill_offset
].mv
.as_int
= bmi
.mv
.as_int
;
417 }while (--fill_count
);
424 mv
->as_int
= mi
->bmi
[15].mv
.as_int
;
426 break; /* done with SPLITMV */
429 mv
->as_int
= nearby
.as_int
;
430 /* Clip "next_nearest" so that it does not extend to far out of image */
431 vp8_clamp_mv(mv
, mb_to_left_edge
, mb_to_right_edge
,
432 mb_to_top_edge
, mb_to_bottom_edge
);
436 mv
->as_int
= nearest
.as_int
;
437 /* Clip "next_nearest" so that it does not extend to far out of image */
438 vp8_clamp_mv(mv
, mb_to_left_edge
, mb_to_right_edge
,
439 mb_to_top_edge
, mb_to_bottom_edge
);
447 read_mv(bc
, &mv
->as_mv
, (const MV_CONTEXT
*) mvc
);
448 mv
->as_mv
.row
+= best_mv
.as_mv
.row
;
449 mv
->as_mv
.col
+= best_mv
.as_mv
.col
;
451 /* Don't need to check this on NEARMV and NEARESTMV modes
452 * since those modes clamp the MV. The NEWMV mode does not,
453 * so signal to the prediction stage whether special
454 * handling may be required.
456 mbmi
->need_to_clamp_mvs
= vp8_check_mv_bounds(mv
,
462 propagate_mv
: /* same MV throughout */
464 mi
->bmi
[ 0].mv
.as_int
=
465 mi
->bmi
[ 1].mv
.as_int
=
466 mi
->bmi
[ 2].mv
.as_int
=
467 mi
->bmi
[ 3].mv
.as_int
=
468 mi
->bmi
[ 4].mv
.as_int
=
469 mi
->bmi
[ 5].mv
.as_int
=
470 mi
->bmi
[ 6].mv
.as_int
=
471 mi
->bmi
[ 7].mv
.as_int
=
472 mi
->bmi
[ 8].mv
.as_int
=
473 mi
->bmi
[ 9].mv
.as_int
=
474 mi
->bmi
[10].mv
.as_int
=
475 mi
->bmi
[11].mv
.as_int
=
476 mi
->bmi
[12].mv
.as_int
=
477 mi
->bmi
[13].mv
.as_int
=
478 mi
->bmi
[14].mv
.as_int
=
479 mi
->bmi
[15].mv
.as_int
= mv
->as_int
;
490 /* required for left and above block mv */
493 /* MB is intra coded */
494 if ((mbmi
->mode
= (MB_PREDICTION_MODE
) vp8_read_ymode(bc
, pbi
->common
.fc
.ymode_prob
)) == B_PRED
)
499 mi
->bmi
[j
].as_mode
= (B_PREDICTION_MODE
)vp8_read_bmode(bc
, pbi
->common
.fc
.bmode_prob
);
504 mbmi
->uv_mode
= (MB_PREDICTION_MODE
)vp8_read_uv_mode(bc
, pbi
->common
.fc
.uv_mode_prob
);
509 void vp8_decode_mode_mvs(VP8D_COMP
*pbi
)
511 MODE_INFO
*mi
= pbi
->common
.mi
;
514 mb_mode_mv_init(pbi
);
516 while (++mb_row
< pbi
->common
.mb_rows
)
520 int mb_to_bottom_edge
;
522 pbi
->mb
.mb_to_top_edge
=
523 mb_to_top_edge
= -((mb_row
* 16)) << 3;
524 mb_to_top_edge
-= LEFT_TOP_MARGIN
;
526 pbi
->mb
.mb_to_bottom_edge
=
527 mb_to_bottom_edge
= ((pbi
->common
.mb_rows
- 1 - mb_row
) * 16) << 3;
528 mb_to_bottom_edge
+= RIGHT_BOTTOM_MARGIN
;
530 while (++mb_col
< pbi
->common
.mb_cols
)
532 #if CONFIG_ERROR_CONCEALMENT
533 int mb_num
= mb_row
* pbi
->common
.mb_cols
+ mb_col
;
535 /*read_mb_modes_mv(pbi, xd->mode_info_context, &xd->mode_info_context->mbmi, mb_row, mb_col);*/
536 if(pbi
->common
.frame_type
== KEY_FRAME
)
537 vp8_kfread_modes(pbi
, mi
, mb_row
, mb_col
);
539 read_mb_modes_mv(pbi
, mi
, &mi
->mbmi
, mb_row
, mb_col
);
541 #if CONFIG_ERROR_CONCEALMENT
542 /* look for corruption. set mvs_corrupt_from_mb to the current
543 * mb_num if the frame is corrupt from this macroblock. */
544 if (vp8dx_bool_error(&pbi
->bc
) && mb_num
< pbi
->mvs_corrupt_from_mb
)
546 pbi
->mvs_corrupt_from_mb
= mb_num
;
547 /* no need to continue since the partition is corrupt from
554 mi
++; /* next macroblock */
557 mi
++; /* skip left predictor each row */