5 /*For th_setup_info, packet state, idct, huffdec, dequant.*/
7 /*For oc_huff_codes_pack, oc_state_flushheader.*/
10 typedef struct th_rec_ctx oc_rec_ctx
;
11 typedef oc_tok_hist oc_tok_hist_table
[5];
15 /*Reading packet statistics.*/
16 #define OC_PACKET_ANALYZE (1)
17 /*Waiting for Huffman tables to be set.*/
18 #define OC_PACKET_HUFFTABLES (2)
19 /*Rewriting data packets.*/
20 #define OC_PACKET_REWRITE (0)
25 /*Shared encoder/decoder state.*/
26 oc_theora_state state
;
27 /*The next four fields must be in the given positions in order to be
28 compatible with some encoder functions we call.*/
29 /*Whether or not packets are ready to be emitted.
30 This takes on negative values while there are remaining header packets to
31 be emitted, reaches 0 when the codec is ready for input, and goes to 1
32 when a frame has been processed and a data packet is ready.*/
34 /*Buffer in which to assemble packets.*/
35 oggpack_buffer enc_opb
;
36 /*Huffman encode tables.*/
37 th_huff_code enc_huff_codes
[TH_NHUFFMAN_TABLES
][TH_NDCT_TOKENS
];
38 /*Quantization parameters.*/
40 /*The previous four fields must be in the given positions in order to be
41 compatible with some encoder functions we call.*/
42 /*Buffer from which to decode packets.*/
43 oggpack_buffer dec_opb
;
44 /*Huffman decode trees.*/
45 oc_huff_node
*dec_huff_tables
[TH_NHUFFMAN_TABLES
];
46 /*The index of one past the last token in each plane for each coefficient.
47 The final entries are the total number of tokens for each coefficient.*/
49 /*The index of one past the last extra bits entry in each plane for each
51 The final entries are the total number of extra bits entries for each
54 /*The number of outstanding EOB runs at the start of each coefficient in each
57 /*The DCT token lists.*/
58 unsigned char **dct_tokens
;
59 /*The extra bits associated with DCT tokens.*/
60 ogg_uint16_t
**extra_bits
;
61 /*The DCT token counts for the last decoded frame.*/
62 oc_tok_hist tok_hist
[2][5];
63 /*The DCT token counts for all decoded frames.*/
64 oc_frame_tok_hist
*tok_hists
;
67 /*The index of the set of token counts used for the current frame while
74 /*The mode alphabets for the various mode coding schemes.
75 Scheme 0 uses a custom alphabet, which is not stored in this table.*/
76 static const int OC_MODE_ALPHABETS
[7][OC_NMODES
]={
77 /*Last MV dominates */
79 OC_MODE_INTER_MV_LAST
,OC_MODE_INTER_MV_LAST2
,OC_MODE_INTER_MV
,
80 OC_MODE_INTER_NOMV
,OC_MODE_INTRA
,OC_MODE_GOLDEN_NOMV
,OC_MODE_GOLDEN_MV
,
84 OC_MODE_INTER_MV_LAST
,OC_MODE_INTER_MV_LAST2
,OC_MODE_INTER_NOMV
,
85 OC_MODE_INTER_MV
,OC_MODE_INTRA
,OC_MODE_GOLDEN_NOMV
,OC_MODE_GOLDEN_MV
,
89 OC_MODE_INTER_MV_LAST
,OC_MODE_INTER_MV
,OC_MODE_INTER_MV_LAST2
,
90 OC_MODE_INTER_NOMV
,OC_MODE_INTRA
,OC_MODE_GOLDEN_NOMV
,OC_MODE_GOLDEN_MV
,
94 OC_MODE_INTER_MV_LAST
,OC_MODE_INTER_MV
,OC_MODE_INTER_NOMV
,
95 OC_MODE_INTER_MV_LAST2
,OC_MODE_INTRA
,OC_MODE_GOLDEN_NOMV
,
96 OC_MODE_GOLDEN_MV
,OC_MODE_INTER_MV_FOUR
100 OC_MODE_INTER_NOMV
,OC_MODE_INTER_MV_LAST
,OC_MODE_INTER_MV_LAST2
,
101 OC_MODE_INTER_MV
,OC_MODE_INTRA
,OC_MODE_GOLDEN_NOMV
,OC_MODE_GOLDEN_MV
,
102 OC_MODE_INTER_MV_FOUR
105 OC_MODE_INTER_NOMV
,OC_MODE_GOLDEN_NOMV
,OC_MODE_INTER_MV_LAST
,
106 OC_MODE_INTER_MV_LAST2
,OC_MODE_INTER_MV
,OC_MODE_INTRA
,OC_MODE_GOLDEN_MV
,
107 OC_MODE_INTER_MV_FOUR
109 /*Default ordering.*/
111 OC_MODE_INTER_NOMV
,OC_MODE_INTRA
,OC_MODE_INTER_MV
,OC_MODE_INTER_MV_LAST
,
112 OC_MODE_INTER_MV_LAST2
,OC_MODE_GOLDEN_NOMV
,OC_MODE_GOLDEN_MV
,
113 OC_MODE_INTER_MV_FOUR
119 static int oc_sb_run_unpack(oggpack_buffer
*_opb
){
130 111111xxxxxxxxxxxx 34-4129*/
131 theora_read1(_opb
,&bits
);
133 theora_read(_opb
,2,&bits
);
134 if((bits
&2)==0)return 2+(int)bits
;
135 else if((bits
&1)==0){
136 theora_read1(_opb
,&bits
);
139 theora_read(_opb
,3,&bits
);
140 if((bits
&4)==0)return 6+(int)bits
;
141 else if((bits
&2)==0){
142 ret
=10+((bits
&1)<<2);
143 theora_read(_opb
,2,&bits
);
144 return ret
+(int)bits
;
146 else if((bits
&1)==0){
147 theora_read(_opb
,4,&bits
);
150 theora_read(_opb
,12,&bits
);
154 static int oc_block_run_unpack(oggpack_buffer
*_opb
){
165 theora_read(_opb
,2,&bits
);
166 if((bits
&2)==0)return 1+(int)bits
;
167 else if((bits
&1)==0){
168 theora_read1(_opb
,&bits
);
171 theora_read(_opb
,2,&bits
);
172 if((bits
&2)==0)return 5+(int)bits
;
173 else if((bits
&1)==0){
174 theora_read(_opb
,2,&bits
);
177 theora_read(_opb
,3,&bits
);
178 if((bits
&4)==0)return 11+bits
;
179 theora_read(_opb
,2,&bits2
);
180 return 15+((bits
&3)<<2)+bits2
;
183 static void oc_quant_params_copy(th_quant_info
*_qdst
,
184 const th_quant_info
*_qsrc
){
186 memcpy(_qdst
,_qsrc
,sizeof(*_qdst
));
196 if(i
>0&&_qsrc
->qi_ranges
[qti
][pli
].sizes
==
197 _qsrc
->qi_ranges
[qtj
][plj
].sizes
){
198 _qdst
->qi_ranges
[qti
][pli
].sizes
=_qdst
->qi_ranges
[qtj
][plj
].sizes
;
200 else if(qti
>0&&_qsrc
->qi_ranges
[1][pli
].sizes
==
201 _qsrc
->qi_ranges
[0][pli
].sizes
){
202 _qdst
->qi_ranges
[1][pli
].sizes
=_qdst
->qi_ranges
[0][pli
].sizes
;
206 sizes
=(int *)_ogg_malloc(
207 _qsrc
->qi_ranges
[qti
][pli
].nranges
*sizeof(*sizes
));
208 memcpy(sizes
,_qsrc
->qi_ranges
[qti
][pli
].sizes
,
209 _qsrc
->qi_ranges
[qti
][pli
].nranges
*sizeof(*sizes
));
210 _qdst
->qi_ranges
[qti
][pli
].sizes
=sizes
;
212 if(i
>0&&_qsrc
->qi_ranges
[qti
][pli
].base_matrices
==
213 _qsrc
->qi_ranges
[qtj
][plj
].base_matrices
){
214 _qdst
->qi_ranges
[qti
][pli
].base_matrices
=
215 _qdst
->qi_ranges
[qtj
][plj
].base_matrices
;
217 else if(qti
>0&&_qsrc
->qi_ranges
[1][pli
].base_matrices
==
218 _qsrc
->qi_ranges
[0][pli
].base_matrices
){
219 _qdst
->qi_ranges
[1][pli
].base_matrices
=
220 _qdst
->qi_ranges
[0][pli
].base_matrices
;
223 th_quant_base
*base_matrices
;
224 base_matrices
=(th_quant_base
*)_ogg_malloc(
225 (_qsrc
->qi_ranges
[qti
][pli
].nranges
+1)*sizeof(*base_matrices
));
226 memcpy(base_matrices
,_qsrc
->qi_ranges
[qti
][pli
].base_matrices
,
227 (_qsrc
->qi_ranges
[qti
][pli
].nranges
+1)*sizeof(*base_matrices
));
228 _qdst
->qi_ranges
[qti
][pli
].base_matrices
=
229 (const th_quant_base
*)base_matrices
;
235 static int oc_rec_init(oc_rec_ctx
*_rec
,const th_info
*_info
,
236 const th_setup_info
*_setup
){
238 ret
=oc_state_init(&_rec
->state
,_info
);
240 oc_huff_trees_copy(_rec
->dec_huff_tables
,
241 (const oc_huff_node
*const *)_setup
->huff_tables
);
242 /*Do a deep copy of the quant params, since we will need to refer to this
243 data again (unlike in the normal decoder).*/
244 oc_quant_params_copy(&_rec
->qinfo
,&_setup
->qinfo
);
245 _rec
->dct_tokens
=(unsigned char **)oc_calloc_2d(64,
246 _rec
->state
.nfrags
,sizeof(_rec
->dct_tokens
[0][0]));
247 _rec
->extra_bits
=(ogg_uint16_t
**)oc_calloc_2d(64,
248 _rec
->state
.nfrags
,sizeof(_rec
->extra_bits
[0][0]));
249 _rec
->tok_hists
=NULL
;
250 _rec
->ntok_hists
=_rec
->ctok_hists
=0;
251 _rec
->cur_tok_histi
=0;
252 _rec
->packet_state
=OC_PACKET_ANALYZE
;
253 oggpackB_writeinit(&_rec
->enc_opb
);
257 static void oc_rec_clear(oc_rec_ctx
*_rec
){
258 _ogg_free(_rec
->tok_hists
);
259 oc_free_2d(_rec
->extra_bits
);
260 oc_free_2d(_rec
->dct_tokens
);
261 oc_quant_params_clear(&_rec
->qinfo
);
262 oc_huff_trees_clear(_rec
->dec_huff_tables
);
263 oggpackB_writeclear(&_rec
->enc_opb
);
264 oc_state_clear(&_rec
->state
);
268 static int oc_rec_frame_header_unpack(oc_rec_ctx
*_rec
){
270 /*Check to make sure this is a data packet.*/
271 theora_read1(&_rec
->dec_opb
,&val
);
272 if(val
!=0)return TH_EBADPACKET
;
273 /*Read in the frame type (I or P).*/
274 theora_read1(&_rec
->dec_opb
,&val
);
275 _rec
->state
.frame_type
=(int)val
;
276 /*Read in the current qi.*/
277 theora_read(&_rec
->dec_opb
,6,&val
);
278 _rec
->state
.qis
[0]=(int)val
;
279 theora_read1(&_rec
->dec_opb
,&val
);
280 if(!val
)_rec
->state
.nqis
=1;
282 theora_read(&_rec
->dec_opb
,6,&val
);
283 _rec
->state
.qis
[1]=(int)val
;
284 theora_read1(&_rec
->dec_opb
,&val
);
285 if(!val
)_rec
->state
.nqis
=2;
287 theora_read(&_rec
->dec_opb
,6,&val
);
288 _rec
->state
.qis
[2]=(int)val
;
292 if(_rec
->state
.frame_type
==OC_INTRA_FRAME
){
293 /*Keyframes have 3 unused configuration bits, holdovers from VP3 days.
294 Most of the other unused bits in the VP3 headers were eliminated.
295 I don't know why these remain.*/
296 theora_read(&_rec
->dec_opb
,3,&val
);
297 if(val
!=0)return TH_EIMPL
;
302 /*Mark all fragments as coded and in OC_MODE_INTRA.
303 This also builds up the coded fragment list (in coded order), and clears the
304 uncoded fragment list.
305 It does not update the coded macro block list, as that is not used when
306 decoding INTRA frames.*/
307 static void oc_rec_mark_all_intra(oc_rec_ctx
*_rec
){
312 int prev_ncoded_fragis
;
313 prev_ncoded_fragis
=ncoded_fragis
=0;
314 sb
=sb_end
=_rec
->state
.sbs
;
315 for(pli
=0;pli
<3;pli
++){
316 const oc_fragment_plane
*fplane
;
317 fplane
=_rec
->state
.fplanes
+pli
;
318 sb_end
+=fplane
->nsbs
;
319 for(;sb
<sb_end
;sb
++){
321 for(quadi
=0;quadi
<4;quadi
++)if(sb
->quad_valid
&1<<quadi
){
325 fragi
=sb
->map
[quadi
][bi
];
328 frag
=_rec
->state
.frags
+fragi
;
330 frag
->mbmode
=OC_MODE_INTRA
;
331 _rec
->state
.coded_fragis
[ncoded_fragis
++]=fragi
;
336 _rec
->state
.ncoded_fragis
[pli
]=ncoded_fragis
-prev_ncoded_fragis
;
337 prev_ncoded_fragis
=ncoded_fragis
;
338 _rec
->state
.nuncoded_fragis
[pli
]=0;
342 /*Decodes the bit flags for whether or not each super block is partially coded
344 Return: The number of partially coded super blocks.*/
345 static int oc_rec_partial_sb_flags_unpack(oc_rec_ctx
*_rec
){
352 theora_read1(&_rec
->dec_opb
,&val
);
355 sb_end
=sb
+_rec
->state
.nsbs
;
356 run_count
=npartial
=0;
359 run_count
=oc_sb_run_unpack(&_rec
->dec_opb
);
360 full_run
=run_count
>=4129;
362 sb
->coded_partially
=flag
;
367 while(--run_count
>0&&sb
<sb_end
);
368 if(full_run
&&sb
<sb_end
){
369 theora_read1(&_rec
->dec_opb
,&val
);
374 /*TODO: run_count should be 0 here.
375 If it's not, we should issue a warning of some kind.*/
379 /*Decodes the bit flags for whether or not each non-partially-coded super
380 block is fully coded or not.
381 This function should only be called if there is at least one
382 non-partially-coded super block.
383 Return: The number of partially coded super blocks.*/
384 static void oc_rec_coded_sb_flags_unpack(oc_rec_ctx
*_rec
){
391 sb_end
=sb
+_rec
->state
.nsbs
;
392 /*Skip partially coded super blocks.*/
393 for(;sb
->coded_partially
;sb
++);
394 theora_read1(&_rec
->dec_opb
,&val
);
398 run_count
=oc_sb_run_unpack(&_rec
->dec_opb
);
399 full_run
=run_count
>=4129;
400 for(;sb
<sb_end
;sb
++){
401 if(sb
->coded_partially
)continue;
402 if(run_count
--<=0)break;
403 sb
->coded_fully
=flag
;
405 if(full_run
&&sb
<sb_end
){
406 theora_read1(&_rec
->dec_opb
,&val
);
411 /*TODO: run_count should be 0 here.
412 If it's not, we should issue a warning of some kind.*/
415 static void oc_rec_coded_flags_unpack(oc_rec_ctx
*_rec
){
424 int prev_ncoded_fragis
;
426 int prev_nuncoded_fragis
;
427 npartial
=oc_rec_partial_sb_flags_unpack(_rec
);
428 if(npartial
<_rec
->state
.nsbs
)oc_rec_coded_sb_flags_unpack(_rec
);
430 theora_read1(&_rec
->dec_opb
,&val
);
435 prev_ncoded_fragis
=ncoded_fragis
=prev_nuncoded_fragis
=nuncoded_fragis
=0;
436 sb
=sb_end
=_rec
->state
.sbs
;
437 for(pli
=0;pli
<3;pli
++){
438 const oc_fragment_plane
*fplane
;
439 fplane
=_rec
->state
.fplanes
+pli
;
440 sb_end
+=fplane
->nsbs
;
441 for(;sb
<sb_end
;sb
++){
443 for(quadi
=0;quadi
<4;quadi
++)if(sb
->quad_valid
&1<<quadi
){
447 fragi
=sb
->map
[quadi
][bi
];
450 frag
=_rec
->state
.frags
+fragi
;
451 if(sb
->coded_fully
)frag
->coded
=1;
452 else if(!sb
->coded_partially
)frag
->coded
=0;
455 run_count
=oc_block_run_unpack(&_rec
->dec_opb
);
461 if(frag
->coded
)_rec
->state
.coded_fragis
[ncoded_fragis
++]=fragi
;
462 else *(_rec
->state
.uncoded_fragis
-++nuncoded_fragis
)=fragi
;
467 _rec
->state
.ncoded_fragis
[pli
]=ncoded_fragis
-prev_ncoded_fragis
;
468 prev_ncoded_fragis
=ncoded_fragis
;
469 _rec
->state
.nuncoded_fragis
[pli
]=nuncoded_fragis
-prev_nuncoded_fragis
;
470 prev_nuncoded_fragis
=nuncoded_fragis
;
472 /*TODO: run_count should be 0 here.
473 If it's not, we should issue a warning of some kind.*/
478 typedef int (*oc_mode_unpack_func
)(oggpack_buffer
*_opb
);
480 static int oc_vlc_mode_unpack(oggpack_buffer
*_opb
){
484 theora_read1(_opb
,&val
);
490 static int oc_clc_mode_unpack(oggpack_buffer
*_opb
){
492 theora_read(_opb
,3,&val
);
496 /*Unpacks the list of macro block modes for INTER frames.*/
497 void oc_rec_mb_modes_unpack(oc_rec_ctx
*_rec
){
498 oc_mode_unpack_func mode_unpack
;
503 int scheme0_alphabet
[8];
505 theora_read(&_rec
->dec_opb
,3,&val
);
506 mode_scheme
=(int)val
;
509 /*Just in case, initialize the modes to something.
510 If the bitstream doesn't contain each index exactly once, it's likely
511 corrupt and the rest of the packet is garbage anyway, but this way we
512 won't crash, and we'll decode SOMETHING.*/
514 for(mi
=0;mi
<OC_NMODES
;mi
++)scheme0_alphabet
[mi
]=OC_MODE_INTER_NOMV
;
515 for(mi
=0;mi
<OC_NMODES
;mi
++){
516 theora_read(&_rec
->dec_opb
,3,&val
);
517 scheme0_alphabet
[val
]=OC_MODE_ALPHABETS
[6][mi
];
519 alphabet
=scheme0_alphabet
;
521 else alphabet
=OC_MODE_ALPHABETS
[mode_scheme
-1];
522 if(mode_scheme
==7)mode_unpack
=oc_clc_mode_unpack
;
523 else mode_unpack
=oc_vlc_mode_unpack
;
525 mb_end
=mb
+_rec
->state
.nmbs
;
526 for(;mb
<mb_end
;mb
++)if(mb
->mode
!=OC_MODE_INVALID
){
530 fragi
=mb
->map
[0][bi
];
531 if(fragi
>=0&&_rec
->state
.frags
[fragi
].coded
)break;
533 if(bi
<4)mb
->mode
=alphabet
[(*mode_unpack
)(&_rec
->dec_opb
)];
534 else mb
->mode
=OC_MODE_INTER_NOMV
;
540 typedef int (*oc_mv_comp_unpack_func
)(oggpack_buffer
*_opb
);
542 static int oc_vlc_mv_comp_unpack(oggpack_buffer
*_opb
){
545 theora_read(_opb
,3,&bits
);
552 theora_read1(_opb
,&bits
);
556 theora_read1(_opb
,&bits
);
559 theora_read(_opb
,3,&bits
);
560 mvsigned
[0]=4+(bits
>>1);
564 theora_read(_opb
,4,&bits
);
565 mvsigned
[0]=8+(bits
>>1);
569 theora_read(_opb
,5,&bits
);
570 mvsigned
[0]=16+(bits
>>1);
574 mvsigned
[1]=-mvsigned
[0];
575 return mvsigned
[bits
];
578 static int oc_clc_mv_comp_unpack(oggpack_buffer
*_opb
){
581 theora_read(_opb
,6,&bits
);
583 mvsigned
[1]=-mvsigned
[0];
584 return mvsigned
[bits
&1];
587 /*Unpacks the list of motion vectors for INTER frames.
588 Does not propagte the macro block modes and motion vectors to the individual
590 The purpose of this function is solely to skip these bits in the packet.*/
591 static void oc_rec_mv_unpack(oc_rec_ctx
*_rec
){
592 oc_mv_comp_unpack_func mv_comp_unpack
;
598 theora_read1(&_rec
->dec_opb
,&val
);
599 mv_comp_unpack
=val
?oc_clc_mv_comp_unpack
:oc_vlc_mv_comp_unpack
;
600 map_idxs
=OC_MB_MAP_IDXS
[_rec
->state
.info
.pixel_fmt
];
601 map_nidxs
=OC_MB_MAP_NIDXS
[_rec
->state
.info
.pixel_fmt
];
603 mb_end
=mb
+_rec
->state
.nmbs
;
604 for(;mb
<mb_end
;mb
++)if(mb
->mode
!=OC_MODE_INVALID
){
611 /*Search for at least one coded fragment.*/
614 mapi
=map_idxs
[mapii
];
615 fragi
=mb
->map
[mapi
>>2][mapi
&3];
616 if(fragi
>=0&&_rec
->state
.frags
[fragi
].coded
)coded
[ncoded
++]=mapi
;
618 while(++mapii
<map_nidxs
);
619 if(ncoded
<=0)continue;
621 case OC_MODE_INTER_MV_FOUR
:{
623 /*Mark the tail of the list, so we don't accidentally go past it.*/
625 for(bi
=codedi
=0;bi
<4;bi
++)if(coded
[codedi
]==bi
){
627 (*mv_comp_unpack
)(&_rec
->dec_opb
);
628 (*mv_comp_unpack
)(&_rec
->dec_opb
);
631 case OC_MODE_INTER_MV
:{
632 (*mv_comp_unpack
)(&_rec
->dec_opb
);
633 (*mv_comp_unpack
)(&_rec
->dec_opb
);
635 case OC_MODE_GOLDEN_MV
:{
636 (*mv_comp_unpack
)(&_rec
->dec_opb
);
637 (*mv_comp_unpack
)(&_rec
->dec_opb
);
643 static void oc_rec_block_qis_unpack(oc_rec_ctx
*_rec
){
645 int *coded_fragi_end
;
647 ncoded_fragis
=_rec
->state
.ncoded_fragis
[0]+
648 _rec
->state
.ncoded_fragis
[1]+_rec
->state
.ncoded_fragis
[2];
649 if(ncoded_fragis
<=0)return;
650 coded_fragi
=_rec
->state
.coded_fragis
;
651 coded_fragi_end
=coded_fragi
+ncoded_fragis
;
652 if(_rec
->state
.nqis
>1){
657 /*If this frame has more than one qi value, we decode a qi index for each
658 fragment, using two passes of the same binary RLE scheme used for
659 super-block coded bits.
660 The first pass marks each fragment as having a qii of 0 or greater than
661 0, and the second pass (if necessary), distinguishes between a qii of
663 We just store the qii in the fragment.*/
664 theora_read1(&_rec
->dec_opb
,&val
);
667 while(coded_fragi
<coded_fragi_end
){
669 run_count
=oc_sb_run_unpack(&_rec
->dec_opb
);
670 full_run
=run_count
>=4129;
672 _rec
->state
.frags
[*coded_fragi
++].qi
=flag
;
675 while(--run_count
>0&&coded_fragi
<coded_fragi_end
);
676 if(full_run
&&coded_fragi
<coded_fragi_end
){
677 theora_read1(&_rec
->dec_opb
,&val
);
682 /*TODO: run_count should be 0 here.
683 If it's not, we should issue a warning of some kind.*/
684 /*If we have 3 different qi's for this frame, and there was at least one
685 fragment with a non-zero qi, make the second pass.*/
686 if(_rec
->state
.nqis
==3&&nqi0
<ncoded_fragis
){
687 /*Skip qii==0 fragments.*/
688 for(coded_fragi
=_rec
->state
.coded_fragis
;
689 _rec
->state
.frags
[*coded_fragi
].qi
==0;coded_fragi
++);
690 theora_read1(&_rec
->dec_opb
,&val
);
692 while(coded_fragi
<coded_fragi_end
){
694 run_count
=oc_sb_run_unpack(&_rec
->dec_opb
);
695 full_run
=run_count
>=4129;
696 for(;coded_fragi
<coded_fragi_end
;coded_fragi
++){
698 frag
=_rec
->state
.frags
+*coded_fragi
;
699 if(frag
->qi
==0)continue;
700 if(run_count
--<=0)break;
703 if(full_run
&&coded_fragi
<coded_fragi_end
){
704 theora_read1(&_rec
->dec_opb
,&val
);
709 /*TODO: run_count should be 0 here.
710 If it's not, we should issue a warning of some kind.*/
715 /*Unpacks the DC coefficient tokens.
716 Unlike when unpacking the AC coefficient tokens, we actually need to decode
717 the DC coefficient values now so that we can do DC prediction.
718 _huff_idx: The index of the Huffman table to use for each color plane.
719 _ntoks_left: The number of tokens left to be decoded in each color plane for
721 This is updated as EOB tokens and zero run tokens are decoded.
722 Return: The length of any outstanding EOB run.*/
723 static int oc_rec_dc_coeff_unpack(oc_rec_ctx
*_rec
,int _huff_idxs
[3],
724 int *_tok_hists
[3],int _ntoks_left
[3][64]){
727 int *coded_fragi_end
;
738 coded_fragi_end
=coded_fragi
=_rec
->state
.coded_fragis
;
739 for(pli
=0;pli
<3;pli
++){
740 coded_fragi_end
+=_rec
->state
.ncoded_fragis
[pli
];
741 memset(run_counts
,0,sizeof(run_counts
));
742 _rec
->eob_runs
[pli
][0]=eobs
;
743 /*Continue any previous EOB run, if there was one.*/
744 for(eobi
=eobs
;eobi
-->0&&coded_fragi
<coded_fragi_end
;coded_fragi
++);
746 while(eobs
<_ntoks_left
[pli
][0]-cfi
){
752 run_counts
[63]+=eobs
;
753 token
=oc_huff_token_decode(&_rec
->dec_opb
,
754 _rec
->dec_huff_tables
[_huff_idxs
[pli
]]);
755 _rec
->dct_tokens
[0][ti
++]=(char)token
;
756 _tok_hists
[pli
][token
]++;
757 neb
=OC_DCT_TOKEN_EXTRA_BITS
[token
];
759 theora_read(&_rec
->dec_opb
,neb
,&val
);
761 _rec
->extra_bits
[0][ebi
++]=(ogg_int16_t
)eb
;
764 skip
=oc_dct_token_skip(token
,eb
);
767 while(eobi
-->0&&coded_fragi
<coded_fragi_end
)coded_fragi
++;
770 run_counts
[skip
-1]++;
776 _rec
->ti0
[pli
][0]=ti
;
777 _rec
->ebi0
[pli
][0]=ebi
;
778 /*Set the EOB count to the portion of the last EOB run which extends past
780 eobs
=eobs
+cfi
-_ntoks_left
[pli
][0];
781 /*Add the portion of the last EOB which was included in this coefficient to
782 to the longest run length.*/
783 run_counts
[63]+=_ntoks_left
[pli
][0]-cfi
;
784 /*And convert the run_counts array to a moment table.*/
785 for(rli
=63;rli
-->0;)run_counts
[rli
]+=run_counts
[rli
+1];
786 /*Finally, subtract off the number of coefficients that have been
787 accounted for by runs started in this coefficient.*/
788 for(rli
=64;rli
-->0;)_ntoks_left
[pli
][rli
]-=run_counts
[rli
];
793 /*Unpacks the AC coefficient tokens.
794 This can completely discard coefficient values while unpacking, and so is
795 somewhat simpler than unpacking the DC coefficient tokens.
796 _huff_idx: The index of the Huffman table to use for each color plane.
797 _ntoks_left: The number of tokens left to be decoded in each color plane for
799 This is updated as EOB tokens and zero run tokens are decoded.
800 _eobs: The length of any outstanding EOB run from previous
802 Return: The length of any outstanding EOB run.*/
803 static int oc_rec_ac_coeff_unpack(oc_rec_ctx
*_rec
,int _zzi
,int _huff_idxs
[3],
804 int *_tok_hists
[3],int _ntoks_left
[3][64],int _eobs
){
813 for(pli
=0;pli
<3;pli
++){
814 memset(run_counts
,0,sizeof(run_counts
));
815 _rec
->eob_runs
[pli
][_zzi
]=_eobs
;
817 while(_eobs
<_ntoks_left
[pli
][_zzi
]-cfi
){
823 run_counts
[63]+=_eobs
;
824 token
=oc_huff_token_decode(&_rec
->dec_opb
,
825 _rec
->dec_huff_tables
[_huff_idxs
[pli
]]);
826 _rec
->dct_tokens
[_zzi
][ti
++]=(char)token
;
827 _tok_hists
[pli
][token
]++;
828 neb
=OC_DCT_TOKEN_EXTRA_BITS
[token
];
830 theora_read(&_rec
->dec_opb
,neb
,&val
);
832 _rec
->extra_bits
[_zzi
][ebi
++]=(ogg_int16_t
)eb
;
835 skip
=oc_dct_token_skip(token
,eb
);
836 if(skip
<0)_eobs
=-skip
;
838 run_counts
[skip
-1]++;
843 _rec
->ti0
[pli
][_zzi
]=ti
;
844 _rec
->ebi0
[pli
][_zzi
]=ebi
;
845 /*Set the EOB count to the portion of the last EOB run which extends past
847 _eobs
=_eobs
+cfi
-_ntoks_left
[pli
][_zzi
];
848 /*Add the portion of the last EOB which was included in this coefficient to
849 to the longest run length.*/
850 run_counts
[63]+=_ntoks_left
[pli
][_zzi
]-cfi
;
851 /*And convert the run_counts array to a moment table.*/
852 for(rli
=63;rli
-->0;)run_counts
[rli
]+=run_counts
[rli
+1];
853 /*Finally, subtract off the number of coefficients that have been
854 accounted for by runs started in this coefficient.*/
855 for(rli
=64-_zzi
;rli
-->0;)_ntoks_left
[pli
][_zzi
+rli
]-=run_counts
[rli
];
860 /*Tokens describing the DCT coefficients that belong to each fragment are
861 stored in the bitstream grouped by coefficient, not by fragment.
862 This means that we either decode all the tokens in order, building up a
863 separate coefficient list for each fragment as we go, and then go back and
864 do the iDCT on each fragment, or we have to create separate lists of tokens
865 for each coefficient, so that we can pull the next token required off the
866 head of the appropriate list when decoding a specific fragment.
867 The former was VP3's choice, and it meant 2*w*h extra storage for all the
868 decoded coefficient values.
869 We take the second option, which lets us store just one or three bytes per
870 token (generally far fewer than the number of coefficients, due to EOB
871 tokens and zero runs), and which requires us to only maintain a counter for
872 each of the 64 coefficients, instead of a counter for every fragment to
873 determine where the next token goes.
874 Actually, we use 3 counters per coefficient, one for each color plane, so we
875 can decode all color planes simultaneously.
876 This lets us color conversion, etc., be done as soon as a full MCU (one or
877 two super block rows) is decoded, while the image data is still in cache.*/
878 static void oc_rec_residual_tokens_unpack(oc_rec_ctx
*_rec
){
879 static const int OC_HUFF_LIST_MAX
[5]={1,6,15,28,64};
881 int ntoks_left
[3][64];
890 memset(_rec
->tok_hist
,0,sizeof(_rec
->tok_hist
));
891 for(pli
=0;pli
<3;pli
++)for(zzi
=0;zzi
<64;zzi
++){
892 ntoks_left
[pli
][zzi
]=_rec
->state
.ncoded_fragis
[pli
];
894 theora_read(&_rec
->dec_opb
,4,&val
);
896 theora_read(&_rec
->dec_opb
,4,&val
);
898 huff_idxs
[0]=huffi_y
;
899 huff_idxs
[1]=huff_idxs
[2]=huffi_c
;
900 tok_hists
[0]=_rec
->tok_hist
[0][0];
901 tok_hists
[1]=tok_hists
[2]=_rec
->tok_hist
[1][0];
902 _rec
->eob_runs
[0][0]=0;
903 eobs
=oc_rec_dc_coeff_unpack(_rec
,huff_idxs
,tok_hists
,ntoks_left
);
904 theora_read(&_rec
->dec_opb
,4,&val
);
906 theora_read(&_rec
->dec_opb
,4,&val
);
909 for(hgi
=1;hgi
<5;hgi
++){
910 huff_idxs
[0]=huffi_y
+(hgi
<<4);
911 huff_idxs
[1]=huff_idxs
[2]=huffi_c
+(hgi
<<4);
912 tok_hists
[0]=_rec
->tok_hist
[0][hgi
];
913 tok_hists
[1]=tok_hists
[2]=_rec
->tok_hist
[1][hgi
];
914 for(;zzi
<OC_HUFF_LIST_MAX
[hgi
];zzi
++){
915 eobs
=oc_rec_ac_coeff_unpack(_rec
,zzi
,huff_idxs
,tok_hists
,ntoks_left
,eobs
);
918 /*TODO: eobs should be exactly zero, or 4096 or greater.
919 The second case occurs when an EOB run of size zero is encountered, which
920 gets treated as an infinite EOB run (where infinity is INT_MAX).
921 If neither of these conditions holds, then a warning should be issued.*/
924 static int oc_rec_set_huffman_codes(oc_rec_ctx
*_rec
,
925 const th_huff_code _codes
[TH_NHUFFMAN_TABLES
][TH_NDCT_TOKENS
]){
927 if(_rec
==NULL
)return TH_EFAULT
;
928 /*If we've already emitted the setup header, then don't let the user set the
930 if(_rec
->packet_state
>=OC_PACKET_SETUP_HDR
&&
931 _rec
->packet_state
<=OC_PACKET_REWRITE
){
934 if(_codes
==NULL
)_codes
=TH_VP31_HUFF_CODES
;
935 /*Validate the codes.*/
936 oggpackB_reset(&_rec
->enc_opb
);
937 ret
=oc_huff_codes_pack(&_rec
->enc_opb
,_codes
);
939 memcpy(_rec
->enc_huff_codes
,_codes
,sizeof(_rec
->enc_huff_codes
));
940 _rec
->packet_state
=OC_PACKET_INFO_HDR
;
944 /*Computes the number of bits used for each of the potential Huffman codes for
945 the given list of token counts.
946 The bits are added to whatever the current bit counts are.*/
947 static void oc_rec_count_bits(oc_rec_ctx
*_rec
,int _hgi
,
948 const int _token_counts
[TH_NDCT_TOKENS
],int _bit_counts
[16]){
953 for(huffi
=huff_base
;huffi
<huff_base
+16;huffi
++){
954 for(token
=0;token
<TH_NDCT_TOKENS
;token
++){
955 _bit_counts
[huffi
-huff_base
]+=
956 _token_counts
[token
]*_rec
->enc_huff_codes
[huffi
][token
].nbits
;
961 /*Returns the Huffman index using the fewest number of bits.*/
962 static int oc_rec_select_huffi(int _bit_counts
[16]){
966 for(huffi
=1;huffi
<16;huffi
++)if(_bit_counts
[huffi
]<_bit_counts
[best_huffi
]){
972 /*Packs the DCT tokens for the given range of coefficient indices in zig-zag
973 order using the given Huffman tables.*/
974 static void oc_rec_huff_group_pack(oc_rec_ctx
*_rec
,int _zzi_start
,
975 int _zzi_end
,int _huff_idxs
[3]){
977 for(zzi
=_zzi_start
;zzi
<_zzi_end
;zzi
++){
983 for(pli
=0;pli
<3;pli
++){
984 const th_huff_code
*huff_codes
;
987 /*Step 2: Write the tokens using these tables.*/
988 huff_codes
=_rec
->enc_huff_codes
[_huff_idxs
[pli
]];
989 /*Note: dct_token_offs[3] is really the ndct_tokens table.
990 Yes, this seems like a horrible hack, yet it's strangely elegant.*/
991 ti_end
=_rec
->ti0
[pli
][zzi
];
992 for(;ti
<ti_end
;ti
++){
993 token
=_rec
->dct_tokens
[zzi
][ti
];
994 oggpackB_write(&_rec
->enc_opb
,huff_codes
[token
].pattern
,
995 huff_codes
[token
].nbits
);
996 if(OC_DCT_TOKEN_EXTRA_BITS
[token
]){
997 oggpackB_write(&_rec
->enc_opb
,_rec
->extra_bits
[zzi
][ebi
++],
998 OC_DCT_TOKEN_EXTRA_BITS
[token
]);
1005 static void oc_rec_residual_tokens_pack(oc_rec_ctx
*_rec
,
1006 const oc_tok_hist _tok_hist
[2][5]){
1007 static const int OC_HUFF_LIST_MIN
[6]={0,1,6,15,28,64};
1008 static const int *OC_HUFF_LIST_MAX
=OC_HUFF_LIST_MIN
+1;
1011 int huff_idxs
[5][3];
1015 /*Step 1a: Select Huffman tables for the DC token list.*/
1016 memset(bits_y
,0,sizeof(bits_y
));
1017 memset(bits_c
,0,sizeof(bits_c
));
1018 oc_rec_count_bits(_rec
,0,_tok_hist
[0][0],bits_y
);
1019 oc_rec_count_bits(_rec
,0,_tok_hist
[1][0],bits_c
);
1020 huffi_y
=oc_rec_select_huffi(bits_y
);
1021 huffi_c
=oc_rec_select_huffi(bits_c
);
1022 huff_idxs
[0][0]=huffi_y
;
1023 huff_idxs
[0][1]=huff_idxs
[0][2]=huffi_c
;
1024 /*Step 1b: Write the DC token list with the chosen tables.*/
1025 oggpackB_write(&_rec
->enc_opb
,huffi_y
,4);
1026 oggpackB_write(&_rec
->enc_opb
,huffi_c
,4);
1027 oc_rec_huff_group_pack(_rec
,0,1,huff_idxs
[0]);
1028 /*Step 2a: Select Huffman tables for the AC token lists.*/
1029 memset(bits_y
,0,sizeof(bits_y
));
1030 memset(bits_y
,0,sizeof(bits_c
));
1031 for(hgi
=1;hgi
<5;hgi
++){
1032 oc_rec_count_bits(_rec
,hgi
,_tok_hist
[0][hgi
],bits_y
);
1033 oc_rec_count_bits(_rec
,hgi
,_tok_hist
[1][hgi
],bits_c
);
1035 huffi_y
=oc_rec_select_huffi(bits_y
);
1036 huffi_c
=oc_rec_select_huffi(bits_c
);
1037 /*Step 2b: Write the AC token lists using the chosen tables.*/
1038 oggpackB_write(&_rec
->enc_opb
,huffi_y
,4);
1039 oggpackB_write(&_rec
->enc_opb
,huffi_c
,4);
1040 for(hgi
=1;hgi
<5;hgi
++){
1041 huff_idxs
[hgi
][0]=huffi_y
+(hgi
<<4);
1042 huff_idxs
[hgi
][1]=huff_idxs
[hgi
][2]=huffi_c
+(hgi
<<4);
1043 oc_rec_huff_group_pack(_rec
,OC_HUFF_LIST_MIN
[hgi
],OC_HUFF_LIST_MAX
[hgi
],
1050 th_rec_ctx
*th_recode_alloc(const th_info
*_info
,const th_setup_info
*_setup
){
1052 if(_info
==NULL
||_setup
==NULL
)return NULL
;
1053 dec
=_ogg_malloc(sizeof(*dec
));
1054 if(oc_rec_init(dec
,_info
,_setup
)<0){
1058 dec
->state
.curframe_num
=0;
1062 void th_recode_free(th_rec_ctx
*_rec
){
1069 int th_recode_packetin(th_rec_ctx
*_rec
,const ogg_packet
*_op
,
1070 ogg_int64_t
*_granpos
){
1072 if(_rec
==NULL
||_op
==NULL
)return TH_EFAULT
;
1073 /*If the user has already retrieved the statistics, we can't update them any
1075 if(_rec
->packet_state
!=OC_PACKET_ANALYZE
)return TH_EINVAL
;
1076 /*A completely empty packet indicates a dropped frame and is treated exactly
1077 like an inter frame with no coded blocks.
1078 Only proceed if we have a non-empty packet.*/
1080 oc_frame_tok_hist
*tok_hist
;
1081 oggpackB_readinit(&_rec
->dec_opb
,_op
->packet
,_op
->bytes
);
1082 ret
=oc_rec_frame_header_unpack(_rec
);
1083 if(ret
<0)return ret
;
1084 if(_rec
->state
.frame_type
==OC_INTRA_FRAME
){
1085 oc_rec_mark_all_intra(_rec
);
1086 _rec
->state
.keyframe_num
=_rec
->state
.curframe_num
;
1089 oc_rec_coded_flags_unpack(_rec
);
1090 oc_rec_mb_modes_unpack(_rec
);
1091 oc_rec_mv_unpack(_rec
);
1093 oc_rec_block_qis_unpack(_rec
);
1094 if(_rec
->ntok_hists
>=_rec
->ctok_hists
){
1095 _rec
->ctok_hists
=_rec
->ctok_hists
<<1|1;
1096 _rec
->tok_hists
=(oc_frame_tok_hist
*)_ogg_realloc(_rec
->tok_hists
,
1097 _rec
->ctok_hists
*sizeof(*_rec
->tok_hists
));
1099 tok_hist
=_rec
->tok_hists
+_rec
->ntok_hists
++;
1100 tok_hist
->pkt_sz
=_op
->bytes
;
1101 tok_hist
->dct_offs
=oggpackB_bits(&_rec
->dec_opb
);
1102 memcpy(tok_hist
->ncoded_fragis
,_rec
->state
.ncoded_fragis
,
1103 sizeof(tok_hist
->ncoded_fragis
));
1104 oc_rec_residual_tokens_unpack(_rec
);
1105 /*Update granule position.*/
1106 _rec
->state
.granpos
=
1107 (_rec
->state
.keyframe_num
<<_rec
->state
.info
.keyframe_granule_shift
)+
1108 (_rec
->state
.curframe_num
-_rec
->state
.keyframe_num
);
1109 tok_hist
->granpos
=_rec
->state
.granpos
;
1110 /*Save the statistics for this frame.*/
1111 memcpy(tok_hist
->tok_hist
,_rec
->tok_hist
,sizeof(tok_hist
->tok_hist
));
1112 _rec
->state
.curframe_num
++;
1113 if(_granpos
!=NULL
)*_granpos
=_rec
->state
.granpos
;
1117 /*Just update the granule position and return.*/
1118 _rec
->state
.granpos
=
1119 (_rec
->state
.keyframe_num
<<_rec
->state
.info
.keyframe_granule_shift
)+
1120 (_rec
->state
.curframe_num
-_rec
->state
.keyframe_num
);
1121 _rec
->state
.curframe_num
++;
1122 if(_granpos
!=NULL
)*_granpos
=_rec
->state
.granpos
;
1127 int th_recode_ctl(th_rec_ctx
*_rec
,int _req
,void *_buf
,size_t _buf_sz
){
1129 case TH_ENCCTL_SET_HUFFMAN_CODES
:{
1130 if(_buf
==NULL
&&_buf_sz
!=0||_buf
!=NULL
&&
1131 _buf_sz
!=sizeof(th_huff_code
)*TH_NHUFFMAN_TABLES
*TH_NDCT_TOKENS
){
1134 return oc_rec_set_huffman_codes(_rec
,(const th_huff_table
*)_buf
);
1136 case TH_DECCTL_SET_GRANPOS
:{
1137 ogg_int64_t granpos
;
1138 if(_rec
==NULL
||_buf
==NULL
)return TH_EFAULT
;
1139 if(_buf_sz
!=sizeof(ogg_int64_t
))return TH_EINVAL
;
1140 granpos
=*(ogg_int64_t
*)_buf
;
1141 if(granpos
<0)return TH_EINVAL
;
1142 _rec
->state
.granpos
=granpos
;
1143 _rec
->state
.keyframe_num
=
1144 granpos
>>_rec
->state
.info
.keyframe_granule_shift
;
1145 _rec
->state
.curframe_num
=_rec
->state
.keyframe_num
+
1146 (granpos
&(1<<_rec
->state
.info
.keyframe_granule_shift
)-1);
1149 case TH_RECCTL_GET_TOK_NSTATS
:{
1150 if(_rec
==NULL
||_buf
==NULL
)return TH_EFAULT
;
1151 if(_buf_sz
!=sizeof(long))return TH_EINVAL
;
1152 *((long *)_buf
)=_rec
->ntok_hists
;
1155 case TH_RECCTL_GET_TOK_STATS
:{
1156 if(_rec
==NULL
||_buf
==NULL
)return TH_EFAULT
;
1157 if(_buf_sz
!=sizeof(const oc_frame_tok_hist
**))return TH_EINVAL
;
1158 if(_rec
->packet_state
<OC_PACKET_ANALYZE
)return TH_EINVAL
;
1159 /*Update the state to prevent us from invalidating this pointer.*/
1160 _rec
->packet_state
=OC_PACKET_HUFFTABLES
;
1161 *((const oc_frame_tok_hist
**)_buf
)=_rec
->tok_hists
;
1164 default:return TH_EIMPL
;
1168 int th_recode_flushheader(th_rec_ctx
*_rec
,th_comment
*_tc
,ogg_packet
*_op
){
1169 return oc_state_flushheader(&_rec
->state
,&_rec
->packet_state
,&_rec
->enc_opb
,
1170 &_rec
->qinfo
,(const th_huff_table
*)_rec
->enc_huff_codes
,_tc
->vendor
,
1176 int th_recode_packet_rewrite(th_rec_ctx
*_rec
,const ogg_packet
*_op_in
,
1177 ogg_packet
*_op_out
){
1179 if(_rec
==NULL
||_op_in
==NULL
||_op_out
==NULL
)return TH_EFAULT
;
1180 /*If we've used all our decoded token histograms, please stop calling us.*/
1181 if(_rec
->cur_tok_histi
>=_rec
->ntok_hists
)return TH_EINVAL
;
1182 /*A completely empty packet indicates a dropped frame and is treated exactly
1183 like an inter frame with no coded blocks.
1184 Only proceed if we have a non-empty packet.*/
1185 if(_op_in
->bytes
!=0){
1186 oc_frame_tok_hist
*tok_hist
;
1187 /*Read enough of the packet to figure out what kind of frame we have.
1188 This also validates the packet to be sure we can decode it, which is why
1189 we don't just use th_packet_iskeyframe().*/
1190 oggpackB_readinit(&_rec
->dec_opb
,_op_in
->packet
,_op_in
->bytes
);
1191 ret
=oc_rec_frame_header_unpack(_rec
);
1192 if(ret
<0)return ret
;
1193 /*Update granule position.*/
1194 if(_rec
->state
.frame_type
==OC_INTRA_FRAME
){
1195 _rec
->state
.keyframe_num
=_rec
->state
.curframe_num
;
1197 _rec
->state
.granpos
=
1198 (_rec
->state
.keyframe_num
<<_rec
->state
.info
.keyframe_granule_shift
)+
1199 (_rec
->state
.curframe_num
-_rec
->state
.keyframe_num
);
1200 _rec
->state
.curframe_num
++;
1201 /*Sanity checks to see if the next piece of frame data corresponds to this
1203 This isn't a guarantee if someone rewrote the file out from under us, but
1204 it at least ensures that we have enough bytes in the packet.
1205 TODO: We could re-decode this packet to get the info we need, instead of
1206 failing, but that would be more code.*/
1207 tok_hist
=_rec
->tok_hists
+_rec
->cur_tok_histi
;
1208 if(tok_hist
->granpos
!=_rec
->state
.granpos
||
1209 tok_hist
->pkt_sz
!=_op_in
->bytes
){
1210 return TH_EBADPACKET
;
1212 _rec
->cur_tok_histi
++;
1213 /*Copy the contents of the input packet up to the DCT tokens.*/
1214 oggpackB_reset(&_rec
->enc_opb
);
1215 oggpackB_writecopy(&_rec
->enc_opb
,_op_in
->packet
,tok_hist
->dct_offs
);
1216 /*Read the DCT tokens using the old codes.*/
1217 oggpackB_readinit(&_rec
->dec_opb
,_op_in
->packet
,_op_in
->bytes
);
1218 oggpackB_adv(&_rec
->dec_opb
,tok_hist
->dct_offs
);
1219 memcpy(_rec
->state
.ncoded_fragis
,tok_hist
->ncoded_fragis
,
1220 sizeof(_rec
->state
.ncoded_fragis
));
1221 oc_rec_residual_tokens_unpack(_rec
);
1222 /*Write the DCT tokens using the new codes.*/
1223 memcpy(_rec
->state
.ncoded_fragis
,tok_hist
->ncoded_fragis
,
1224 sizeof(_rec
->state
.ncoded_fragis
));
1225 oc_rec_residual_tokens_pack(_rec
,
1226 (const oc_tok_hist_table
*)tok_hist
->tok_hist
);
1230 oggpackB_reset(&_rec
->enc_opb
);
1231 /*Just update the granule position and return.*/
1232 _rec
->state
.granpos
=
1233 (_rec
->state
.keyframe_num
<<_rec
->state
.info
.keyframe_granule_shift
)+
1234 (_rec
->state
.curframe_num
-_rec
->state
.keyframe_num
);
1235 _rec
->state
.curframe_num
++;
1238 _op_out
->packet
=oggpackB_get_buffer(&_rec
->enc_opb
);
1239 _op_out
->bytes
=oggpackB_bytes(&_rec
->enc_opb
);
1241 _op_out
->e_o_s
=_op_in
->e_o_s
;
1242 _op_out
->packetno
=_rec
->state
.curframe_num
;
1243 _op_out
->granulepos
=_rec
->state
.granpos
;
1244 if(_op_out
->e_o_s
)_rec
->packet_state
=OC_PACKET_DONE
;