Mark which loops auto-vectorize.
[xiph/unicode.git] / theora-exp / lib / recode.c
blob207d940fc43c62c65961b74ad61a8e01554f5a15
1 #include <stdlib.h>
2 #include <string.h>
3 #include <limits.h>
4 #include "recode.h"
5 /*For th_setup_info, packet state, idct, huffdec, dequant.*/
6 #include "decint.h"
7 /*For oc_huff_codes_pack, oc_state_flushheader.*/
8 #include "encint.h"
10 typedef struct th_rec_ctx oc_rec_ctx;
11 typedef oc_tok_hist oc_tok_hist_table[5];
15 /*Reading packet statistics.*/
16 #define OC_PACKET_ANALYZE (1)
17 /*Waiting for Huffman tables to be set.*/
18 #define OC_PACKET_HUFFTABLES (2)
19 /*Rewriting data packets.*/
20 #define OC_PACKET_REWRITE (0)
24 struct th_rec_ctx{
25 /*Shared encoder/decoder state.*/
26 oc_theora_state state;
27 /*The next four fields must be in the given positions in order to be
28 compatible with some encoder functions we call.*/
29 /*Whether or not packets are ready to be emitted.
30 This takes on negative values while there are remaining header packets to
31 be emitted, reaches 0 when the codec is ready for input, and goes to 1
32 when a frame has been processed and a data packet is ready.*/
33 int packet_state;
34 /*Buffer in which to assemble packets.*/
35 oggpack_buffer enc_opb;
36 /*Huffman encode tables.*/
37 th_huff_code enc_huff_codes[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS];
38 /*Quantization parameters.*/
39 th_quant_info qinfo;
40 /*The previous four fields must be in the given positions in order to be
41 compatible with some encoder functions we call.*/
42 /*Buffer from which to decode packets.*/
43 oggpack_buffer dec_opb;
44 /*Huffman decode trees.*/
45 oc_huff_node *dec_huff_tables[TH_NHUFFMAN_TABLES];
46 /*The index of one past the last token in each plane for each coefficient.
47 The final entries are the total number of tokens for each coefficient.*/
48 int ti0[3][64];
49 /*The index of one past the last extra bits entry in each plane for each
50 coefficient.
51 The final entries are the total number of extra bits entries for each
52 coefficient.*/
53 int ebi0[3][64];
54 /*The number of outstanding EOB runs at the start of each coefficient in each
55 plane.*/
56 int eob_runs[3][64];
57 /*The DCT token lists.*/
58 unsigned char **dct_tokens;
59 /*The extra bits associated with DCT tokens.*/
60 ogg_uint16_t **extra_bits;
61 /*The DCT token counts for the last decoded frame.*/
62 oc_tok_hist tok_hist[2][5];
63 /*The DCT token counts for all decoded frames.*/
64 oc_frame_tok_hist *tok_hists;
65 long ntok_hists;
66 long ctok_hists;
67 /*The index of the set of token counts used for the current frame while
68 rewriting.*/
69 long cur_tok_histi;
74 /*The mode alphabets for the various mode coding schemes.
75 Scheme 0 uses a custom alphabet, which is not stored in this table.*/
76 static const int OC_MODE_ALPHABETS[7][OC_NMODES]={
77 /*Last MV dominates */
79 OC_MODE_INTER_MV_LAST,OC_MODE_INTER_MV_LAST2,OC_MODE_INTER_MV,
80 OC_MODE_INTER_NOMV,OC_MODE_INTRA,OC_MODE_GOLDEN_NOMV,OC_MODE_GOLDEN_MV,
81 OC_MODE_INTER_MV_FOUR
84 OC_MODE_INTER_MV_LAST,OC_MODE_INTER_MV_LAST2,OC_MODE_INTER_NOMV,
85 OC_MODE_INTER_MV,OC_MODE_INTRA,OC_MODE_GOLDEN_NOMV,OC_MODE_GOLDEN_MV,
86 OC_MODE_INTER_MV_FOUR
89 OC_MODE_INTER_MV_LAST,OC_MODE_INTER_MV,OC_MODE_INTER_MV_LAST2,
90 OC_MODE_INTER_NOMV,OC_MODE_INTRA,OC_MODE_GOLDEN_NOMV,OC_MODE_GOLDEN_MV,
91 OC_MODE_INTER_MV_FOUR
94 OC_MODE_INTER_MV_LAST,OC_MODE_INTER_MV,OC_MODE_INTER_NOMV,
95 OC_MODE_INTER_MV_LAST2,OC_MODE_INTRA,OC_MODE_GOLDEN_NOMV,
96 OC_MODE_GOLDEN_MV,OC_MODE_INTER_MV_FOUR
98 /*No MV dominates.*/
100 OC_MODE_INTER_NOMV,OC_MODE_INTER_MV_LAST,OC_MODE_INTER_MV_LAST2,
101 OC_MODE_INTER_MV,OC_MODE_INTRA,OC_MODE_GOLDEN_NOMV,OC_MODE_GOLDEN_MV,
102 OC_MODE_INTER_MV_FOUR
105 OC_MODE_INTER_NOMV,OC_MODE_GOLDEN_NOMV,OC_MODE_INTER_MV_LAST,
106 OC_MODE_INTER_MV_LAST2,OC_MODE_INTER_MV,OC_MODE_INTRA,OC_MODE_GOLDEN_MV,
107 OC_MODE_INTER_MV_FOUR
109 /*Default ordering.*/
111 OC_MODE_INTER_NOMV,OC_MODE_INTRA,OC_MODE_INTER_MV,OC_MODE_INTER_MV_LAST,
112 OC_MODE_INTER_MV_LAST2,OC_MODE_GOLDEN_NOMV,OC_MODE_GOLDEN_MV,
113 OC_MODE_INTER_MV_FOUR
119 static int oc_sb_run_unpack(oggpack_buffer *_opb){
120 long bits;
121 int ret;
122 /*Coding scheme:
123 Codeword Run Length
125 10x 2-3
126 110x 4-5
127 1110xx 6-9
128 11110xxx 10-17
129 111110xxxx 18-33
130 111111xxxxxxxxxxxx 34-4129*/
131 theora_read1(_opb,&bits);
132 if(bits==0)return 1;
133 theora_read(_opb,2,&bits);
134 if((bits&2)==0)return 2+(int)bits;
135 else if((bits&1)==0){
136 theora_read1(_opb,&bits);
137 return 4+(int)bits;
139 theora_read(_opb,3,&bits);
140 if((bits&4)==0)return 6+(int)bits;
141 else if((bits&2)==0){
142 ret=10+((bits&1)<<2);
143 theora_read(_opb,2,&bits);
144 return ret+(int)bits;
146 else if((bits&1)==0){
147 theora_read(_opb,4,&bits);
148 return 18+(int)bits;
150 theora_read(_opb,12,&bits);
151 return 34+(int)bits;
154 static int oc_block_run_unpack(oggpack_buffer *_opb){
155 long bits;
156 long bits2;
157 /*Coding scheme:
158 Codeword Run Length
159 0x 1-2
160 10x 3-4
161 110x 5-6
162 1110xx 7-10
163 11110xx 11-14
164 11111xxxx 15-30*/
165 theora_read(_opb,2,&bits);
166 if((bits&2)==0)return 1+(int)bits;
167 else if((bits&1)==0){
168 theora_read1(_opb,&bits);
169 return 3+(int)bits;
171 theora_read(_opb,2,&bits);
172 if((bits&2)==0)return 5+(int)bits;
173 else if((bits&1)==0){
174 theora_read(_opb,2,&bits);
175 return 7+(int)bits;
177 theora_read(_opb,3,&bits);
178 if((bits&4)==0)return 11+bits;
179 theora_read(_opb,2,&bits2);
180 return 15+((bits&3)<<2)+bits2;
183 static void oc_quant_params_copy(th_quant_info *_qdst,
184 const th_quant_info *_qsrc){
185 int i;
186 memcpy(_qdst,_qsrc,sizeof(*_qdst));
187 for(i=0;i<6;i++){
188 int qti;
189 int pli;
190 int qtj;
191 int plj;
192 qti=i/3;
193 pli=i%3;
194 qtj=(i-1)/3;
195 plj=(i-1)%3;
196 if(i>0&&_qsrc->qi_ranges[qti][pli].sizes==
197 _qsrc->qi_ranges[qtj][plj].sizes){
198 _qdst->qi_ranges[qti][pli].sizes=_qdst->qi_ranges[qtj][plj].sizes;
200 else if(qti>0&&_qsrc->qi_ranges[1][pli].sizes==
201 _qsrc->qi_ranges[0][pli].sizes){
202 _qdst->qi_ranges[1][pli].sizes=_qdst->qi_ranges[0][pli].sizes;
204 else{
205 int *sizes;
206 sizes=(int *)_ogg_malloc(
207 _qsrc->qi_ranges[qti][pli].nranges*sizeof(*sizes));
208 memcpy(sizes,_qsrc->qi_ranges[qti][pli].sizes,
209 _qsrc->qi_ranges[qti][pli].nranges*sizeof(*sizes));
210 _qdst->qi_ranges[qti][pli].sizes=sizes;
212 if(i>0&&_qsrc->qi_ranges[qti][pli].base_matrices==
213 _qsrc->qi_ranges[qtj][plj].base_matrices){
214 _qdst->qi_ranges[qti][pli].base_matrices=
215 _qdst->qi_ranges[qtj][plj].base_matrices;
217 else if(qti>0&&_qsrc->qi_ranges[1][pli].base_matrices==
218 _qsrc->qi_ranges[0][pli].base_matrices){
219 _qdst->qi_ranges[1][pli].base_matrices=
220 _qdst->qi_ranges[0][pli].base_matrices;
222 else{
223 th_quant_base *base_matrices;
224 base_matrices=(th_quant_base *)_ogg_malloc(
225 (_qsrc->qi_ranges[qti][pli].nranges+1)*sizeof(*base_matrices));
226 memcpy(base_matrices,_qsrc->qi_ranges[qti][pli].base_matrices,
227 (_qsrc->qi_ranges[qti][pli].nranges+1)*sizeof(*base_matrices));
228 _qdst->qi_ranges[qti][pli].base_matrices=
229 (const th_quant_base *)base_matrices;
235 static int oc_rec_init(oc_rec_ctx *_rec,const th_info *_info,
236 const th_setup_info *_setup){
237 int ret;
238 ret=oc_state_init(&_rec->state,_info);
239 if(ret<0)return ret;
240 oc_huff_trees_copy(_rec->dec_huff_tables,
241 (const oc_huff_node *const *)_setup->huff_tables);
242 /*Do a deep copy of the quant params, since we will need to refer to this
243 data again (unlike in the normal decoder).*/
244 oc_quant_params_copy(&_rec->qinfo,&_setup->qinfo);
245 _rec->dct_tokens=(unsigned char **)oc_calloc_2d(64,
246 _rec->state.nfrags,sizeof(_rec->dct_tokens[0][0]));
247 _rec->extra_bits=(ogg_uint16_t **)oc_calloc_2d(64,
248 _rec->state.nfrags,sizeof(_rec->extra_bits[0][0]));
249 _rec->tok_hists=NULL;
250 _rec->ntok_hists=_rec->ctok_hists=0;
251 _rec->cur_tok_histi=0;
252 _rec->packet_state=OC_PACKET_ANALYZE;
253 oggpackB_writeinit(&_rec->enc_opb);
254 return 0;
257 static void oc_rec_clear(oc_rec_ctx *_rec){
258 _ogg_free(_rec->tok_hists);
259 oc_free_2d(_rec->extra_bits);
260 oc_free_2d(_rec->dct_tokens);
261 oc_quant_params_clear(&_rec->qinfo);
262 oc_huff_trees_clear(_rec->dec_huff_tables);
263 oggpackB_writeclear(&_rec->enc_opb);
264 oc_state_clear(&_rec->state);
268 static int oc_rec_frame_header_unpack(oc_rec_ctx *_rec){
269 long val;
270 /*Check to make sure this is a data packet.*/
271 theora_read1(&_rec->dec_opb,&val);
272 if(val!=0)return TH_EBADPACKET;
273 /*Read in the frame type (I or P).*/
274 theora_read1(&_rec->dec_opb,&val);
275 _rec->state.frame_type=(int)val;
276 /*Read in the current qi.*/
277 theora_read(&_rec->dec_opb,6,&val);
278 _rec->state.qis[0]=(int)val;
279 theora_read1(&_rec->dec_opb,&val);
280 if(!val)_rec->state.nqis=1;
281 else{
282 theora_read(&_rec->dec_opb,6,&val);
283 _rec->state.qis[1]=(int)val;
284 theora_read1(&_rec->dec_opb,&val);
285 if(!val)_rec->state.nqis=2;
286 else{
287 theora_read(&_rec->dec_opb,6,&val);
288 _rec->state.qis[2]=(int)val;
289 _rec->state.nqis=3;
292 if(_rec->state.frame_type==OC_INTRA_FRAME){
293 /*Keyframes have 3 unused configuration bits, holdovers from VP3 days.
294 Most of the other unused bits in the VP3 headers were eliminated.
295 I don't know why these remain.*/
296 theora_read(&_rec->dec_opb,3,&val);
297 if(val!=0)return TH_EIMPL;
299 return 0;
302 /*Mark all fragments as coded and in OC_MODE_INTRA.
303 This also builds up the coded fragment list (in coded order), and clears the
304 uncoded fragment list.
305 It does not update the coded macro block list, as that is not used when
306 decoding INTRA frames.*/
307 static void oc_rec_mark_all_intra(oc_rec_ctx *_rec){
308 oc_sb *sb;
309 oc_sb *sb_end;
310 int pli;
311 int ncoded_fragis;
312 int prev_ncoded_fragis;
313 prev_ncoded_fragis=ncoded_fragis=0;
314 sb=sb_end=_rec->state.sbs;
315 for(pli=0;pli<3;pli++){
316 const oc_fragment_plane *fplane;
317 fplane=_rec->state.fplanes+pli;
318 sb_end+=fplane->nsbs;
319 for(;sb<sb_end;sb++){
320 int quadi;
321 for(quadi=0;quadi<4;quadi++)if(sb->quad_valid&1<<quadi){
322 int bi;
323 for(bi=0;bi<4;bi++){
324 int fragi;
325 fragi=sb->map[quadi][bi];
326 if(fragi>=0){
327 oc_fragment *frag;
328 frag=_rec->state.frags+fragi;
329 frag->coded=1;
330 frag->mbmode=OC_MODE_INTRA;
331 _rec->state.coded_fragis[ncoded_fragis++]=fragi;
336 _rec->state.ncoded_fragis[pli]=ncoded_fragis-prev_ncoded_fragis;
337 prev_ncoded_fragis=ncoded_fragis;
338 _rec->state.nuncoded_fragis[pli]=0;
342 /*Decodes the bit flags for whether or not each super block is partially coded
343 or not.
344 Return: The number of partially coded super blocks.*/
345 static int oc_rec_partial_sb_flags_unpack(oc_rec_ctx *_rec){
346 oc_sb *sb;
347 oc_sb *sb_end;
348 long val;
349 int flag;
350 int npartial;
351 int run_count;
352 theora_read1(&_rec->dec_opb,&val);
353 flag=(int)val;
354 sb=_rec->state.sbs;
355 sb_end=sb+_rec->state.nsbs;
356 run_count=npartial=0;
357 while(sb<sb_end){
358 int full_run;
359 run_count=oc_sb_run_unpack(&_rec->dec_opb);
360 full_run=run_count>=4129;
362 sb->coded_partially=flag;
363 sb->coded_fully=0;
364 npartial+=flag;
365 sb++;
367 while(--run_count>0&&sb<sb_end);
368 if(full_run&&sb<sb_end){
369 theora_read1(&_rec->dec_opb,&val);
370 flag=(int)val;
372 else flag=!flag;
374 /*TODO: run_count should be 0 here.
375 If it's not, we should issue a warning of some kind.*/
376 return npartial;
379 /*Decodes the bit flags for whether or not each non-partially-coded super
380 block is fully coded or not.
381 This function should only be called if there is at least one
382 non-partially-coded super block.
383 Return: The number of partially coded super blocks.*/
384 static void oc_rec_coded_sb_flags_unpack(oc_rec_ctx *_rec){
385 oc_sb *sb;
386 oc_sb *sb_end;
387 long val;
388 int flag;
389 int run_count;
390 sb=_rec->state.sbs;
391 sb_end=sb+_rec->state.nsbs;
392 /*Skip partially coded super blocks.*/
393 for(;sb->coded_partially;sb++);
394 theora_read1(&_rec->dec_opb,&val);
395 flag=(int)val;
396 while(sb<sb_end){
397 int full_run;
398 run_count=oc_sb_run_unpack(&_rec->dec_opb);
399 full_run=run_count>=4129;
400 for(;sb<sb_end;sb++){
401 if(sb->coded_partially)continue;
402 if(run_count--<=0)break;
403 sb->coded_fully=flag;
405 if(full_run&&sb<sb_end){
406 theora_read1(&_rec->dec_opb,&val);
407 flag=(int)val;
409 else flag=!flag;
411 /*TODO: run_count should be 0 here.
412 If it's not, we should issue a warning of some kind.*/
415 static void oc_rec_coded_flags_unpack(oc_rec_ctx *_rec){
416 oc_sb *sb;
417 oc_sb *sb_end;
418 long val;
419 int npartial;
420 int pli;
421 int flag;
422 int run_count;
423 int ncoded_fragis;
424 int prev_ncoded_fragis;
425 int nuncoded_fragis;
426 int prev_nuncoded_fragis;
427 npartial=oc_rec_partial_sb_flags_unpack(_rec);
428 if(npartial<_rec->state.nsbs)oc_rec_coded_sb_flags_unpack(_rec);
429 if(npartial>0){
430 theora_read1(&_rec->dec_opb,&val);
431 flag=!(int)val;
433 else flag=0;
434 run_count=0;
435 prev_ncoded_fragis=ncoded_fragis=prev_nuncoded_fragis=nuncoded_fragis=0;
436 sb=sb_end=_rec->state.sbs;
437 for(pli=0;pli<3;pli++){
438 const oc_fragment_plane *fplane;
439 fplane=_rec->state.fplanes+pli;
440 sb_end+=fplane->nsbs;
441 for(;sb<sb_end;sb++){
442 int quadi;
443 for(quadi=0;quadi<4;quadi++)if(sb->quad_valid&1<<quadi){
444 int bi;
445 for(bi=0;bi<4;bi++){
446 int fragi;
447 fragi=sb->map[quadi][bi];
448 if(fragi>=0){
449 oc_fragment *frag;
450 frag=_rec->state.frags+fragi;
451 if(sb->coded_fully)frag->coded=1;
452 else if(!sb->coded_partially)frag->coded=0;
453 else{
454 if(run_count<=0){
455 run_count=oc_block_run_unpack(&_rec->dec_opb);
456 flag=!flag;
458 run_count--;
459 frag->coded=flag;
461 if(frag->coded)_rec->state.coded_fragis[ncoded_fragis++]=fragi;
462 else *(_rec->state.uncoded_fragis-++nuncoded_fragis)=fragi;
467 _rec->state.ncoded_fragis[pli]=ncoded_fragis-prev_ncoded_fragis;
468 prev_ncoded_fragis=ncoded_fragis;
469 _rec->state.nuncoded_fragis[pli]=nuncoded_fragis-prev_nuncoded_fragis;
470 prev_nuncoded_fragis=nuncoded_fragis;
472 /*TODO: run_count should be 0 here.
473 If it's not, we should issue a warning of some kind.*/
478 typedef int (*oc_mode_unpack_func)(oggpack_buffer *_opb);
480 static int oc_vlc_mode_unpack(oggpack_buffer *_opb){
481 long val;
482 int i;
483 for(i=0;i<7;i++){
484 theora_read1(_opb,&val);
485 if(!val)break;
487 return i;
490 static int oc_clc_mode_unpack(oggpack_buffer *_opb){
491 long val;
492 theora_read(_opb,3,&val);
493 return (int)val;
496 /*Unpacks the list of macro block modes for INTER frames.*/
497 void oc_rec_mb_modes_unpack(oc_rec_ctx *_rec){
498 oc_mode_unpack_func mode_unpack;
499 oc_mb *mb;
500 oc_mb *mb_end;
501 const int *alphabet;
502 long val;
503 int scheme0_alphabet[8];
504 int mode_scheme;
505 theora_read(&_rec->dec_opb,3,&val);
506 mode_scheme=(int)val;
507 if(mode_scheme==0){
508 int mi;
509 /*Just in case, initialize the modes to something.
510 If the bitstream doesn't contain each index exactly once, it's likely
511 corrupt and the rest of the packet is garbage anyway, but this way we
512 won't crash, and we'll decode SOMETHING.*/
513 /*LOOP VECTORIZES.*/
514 for(mi=0;mi<OC_NMODES;mi++)scheme0_alphabet[mi]=OC_MODE_INTER_NOMV;
515 for(mi=0;mi<OC_NMODES;mi++){
516 theora_read(&_rec->dec_opb,3,&val);
517 scheme0_alphabet[val]=OC_MODE_ALPHABETS[6][mi];
519 alphabet=scheme0_alphabet;
521 else alphabet=OC_MODE_ALPHABETS[mode_scheme-1];
522 if(mode_scheme==7)mode_unpack=oc_clc_mode_unpack;
523 else mode_unpack=oc_vlc_mode_unpack;
524 mb=_rec->state.mbs;
525 mb_end=mb+_rec->state.nmbs;
526 for(;mb<mb_end;mb++)if(mb->mode!=OC_MODE_INVALID){
527 int bi;
528 for(bi=0;bi<4;bi++){
529 int fragi;
530 fragi=mb->map[0][bi];
531 if(fragi>=0&&_rec->state.frags[fragi].coded)break;
533 if(bi<4)mb->mode=alphabet[(*mode_unpack)(&_rec->dec_opb)];
534 else mb->mode=OC_MODE_INTER_NOMV;
540 typedef int (*oc_mv_comp_unpack_func)(oggpack_buffer *_opb);
542 static int oc_vlc_mv_comp_unpack(oggpack_buffer *_opb){
543 long bits;
544 int mvsigned[2];
545 theora_read(_opb,3,&bits);
546 switch(bits){
547 case 0:return 0;
548 case 1:return 1;
549 case 2:return -1;
550 case 3:{
551 mvsigned[0]=2;
552 theora_read1(_opb,&bits);
553 }break;
554 case 4:{
555 mvsigned[0]=3;
556 theora_read1(_opb,&bits);
557 }break;
558 case 5:{
559 theora_read(_opb,3,&bits);
560 mvsigned[0]=4+(bits>>1);
561 bits&=1;
562 }break;
563 case 6:{
564 theora_read(_opb,4,&bits);
565 mvsigned[0]=8+(bits>>1);
566 bits&=1;
567 }break;
568 case 7:{
569 theora_read(_opb,5,&bits);
570 mvsigned[0]=16+(bits>>1);
571 bits&=1;
572 }break;
574 mvsigned[1]=-mvsigned[0];
575 return mvsigned[bits];
578 static int oc_clc_mv_comp_unpack(oggpack_buffer *_opb){
579 long bits;
580 int mvsigned[2];
581 theora_read(_opb,6,&bits);
582 mvsigned[0]=bits>>1;
583 mvsigned[1]=-mvsigned[0];
584 return mvsigned[bits&1];
587 /*Unpacks the list of motion vectors for INTER frames.
588 Does not propagte the macro block modes and motion vectors to the individual
589 fragments.
590 The purpose of this function is solely to skip these bits in the packet.*/
591 static void oc_rec_mv_unpack(oc_rec_ctx *_rec){
592 oc_mv_comp_unpack_func mv_comp_unpack;
593 oc_mb *mb;
594 oc_mb *mb_end;
595 const int *map_idxs;
596 long val;
597 int map_nidxs;
598 theora_read1(&_rec->dec_opb,&val);
599 mv_comp_unpack=val?oc_clc_mv_comp_unpack:oc_vlc_mv_comp_unpack;
600 map_idxs=OC_MB_MAP_IDXS[_rec->state.info.pixel_fmt];
601 map_nidxs=OC_MB_MAP_NIDXS[_rec->state.info.pixel_fmt];
602 mb=_rec->state.mbs;
603 mb_end=mb+_rec->state.nmbs;
604 for(;mb<mb_end;mb++)if(mb->mode!=OC_MODE_INVALID){
605 int coded[13];
606 int codedi;
607 int ncoded;
608 int mapi;
609 int mapii;
610 int fragi;
611 /*Search for at least one coded fragment.*/
612 ncoded=mapii=0;
614 mapi=map_idxs[mapii];
615 fragi=mb->map[mapi>>2][mapi&3];
616 if(fragi>=0&&_rec->state.frags[fragi].coded)coded[ncoded++]=mapi;
618 while(++mapii<map_nidxs);
619 if(ncoded<=0)continue;
620 switch(mb->mode){
621 case OC_MODE_INTER_MV_FOUR:{
622 int bi;
623 /*Mark the tail of the list, so we don't accidentally go past it.*/
624 coded[ncoded]=-1;
625 for(bi=codedi=0;bi<4;bi++)if(coded[codedi]==bi){
626 codedi++;
627 (*mv_comp_unpack)(&_rec->dec_opb);
628 (*mv_comp_unpack)(&_rec->dec_opb);
630 }break;
631 case OC_MODE_INTER_MV:{
632 (*mv_comp_unpack)(&_rec->dec_opb);
633 (*mv_comp_unpack)(&_rec->dec_opb);
634 }break;
635 case OC_MODE_GOLDEN_MV:{
636 (*mv_comp_unpack)(&_rec->dec_opb);
637 (*mv_comp_unpack)(&_rec->dec_opb);
638 }break;
643 static void oc_rec_block_qis_unpack(oc_rec_ctx *_rec){
644 int *coded_fragi;
645 int *coded_fragi_end;
646 int ncoded_fragis;
647 ncoded_fragis=_rec->state.ncoded_fragis[0]+
648 _rec->state.ncoded_fragis[1]+_rec->state.ncoded_fragis[2];
649 if(ncoded_fragis<=0)return;
650 coded_fragi=_rec->state.coded_fragis;
651 coded_fragi_end=coded_fragi+ncoded_fragis;
652 if(_rec->state.nqis>1){
653 long val;
654 int flag;
655 int nqi0;
656 int run_count;
657 /*If this frame has more than one qi value, we decode a qi index for each
658 fragment, using two passes of the same binary RLE scheme used for
659 super-block coded bits.
660 The first pass marks each fragment as having a qii of 0 or greater than
661 0, and the second pass (if necessary), distinguishes between a qii of
662 1 and 2.
663 We just store the qii in the fragment.*/
664 theora_read1(&_rec->dec_opb,&val);
665 flag=(int)val;
666 run_count=nqi0=0;
667 while(coded_fragi<coded_fragi_end){
668 int full_run;
669 run_count=oc_sb_run_unpack(&_rec->dec_opb);
670 full_run=run_count>=4129;
672 _rec->state.frags[*coded_fragi++].qi=flag;
673 nqi0+=!flag;
675 while(--run_count>0&&coded_fragi<coded_fragi_end);
676 if(full_run&&coded_fragi<coded_fragi_end){
677 theora_read1(&_rec->dec_opb,&val);
678 flag=(int)val;
680 else flag=!flag;
682 /*TODO: run_count should be 0 here.
683 If it's not, we should issue a warning of some kind.*/
684 /*If we have 3 different qi's for this frame, and there was at least one
685 fragment with a non-zero qi, make the second pass.*/
686 if(_rec->state.nqis==3&&nqi0<ncoded_fragis){
687 /*Skip qii==0 fragments.*/
688 for(coded_fragi=_rec->state.coded_fragis;
689 _rec->state.frags[*coded_fragi].qi==0;coded_fragi++);
690 theora_read1(&_rec->dec_opb,&val);
691 flag=(int)val;
692 while(coded_fragi<coded_fragi_end){
693 int full_run;
694 run_count=oc_sb_run_unpack(&_rec->dec_opb);
695 full_run=run_count>=4129;
696 for(;coded_fragi<coded_fragi_end;coded_fragi++){
697 oc_fragment *frag;
698 frag=_rec->state.frags+*coded_fragi;
699 if(frag->qi==0)continue;
700 if(run_count--<=0)break;
701 frag->qi+=flag;
703 if(full_run&&coded_fragi<coded_fragi_end){
704 theora_read1(&_rec->dec_opb,&val);
705 flag=(int)val;
707 else flag=!flag;
709 /*TODO: run_count should be 0 here.
710 If it's not, we should issue a warning of some kind.*/
715 /*Unpacks the DC coefficient tokens.
716 Unlike when unpacking the AC coefficient tokens, we actually need to decode
717 the DC coefficient values now so that we can do DC prediction.
718 _huff_idx: The index of the Huffman table to use for each color plane.
719 _ntoks_left: The number of tokens left to be decoded in each color plane for
720 each coefficient.
721 This is updated as EOB tokens and zero run tokens are decoded.
722 Return: The length of any outstanding EOB run.*/
723 static int oc_rec_dc_coeff_unpack(oc_rec_ctx *_rec,int _huff_idxs[3],
724 int *_tok_hists[3],int _ntoks_left[3][64]){
725 long val;
726 int *coded_fragi;
727 int *coded_fragi_end;
728 int run_counts[64];
729 int cfi;
730 int eobi;
731 int eobs;
732 int ti;
733 int ebi;
734 int pli;
735 int rli;
736 eobs=0;
737 ti=ebi=0;
738 coded_fragi_end=coded_fragi=_rec->state.coded_fragis;
739 for(pli=0;pli<3;pli++){
740 coded_fragi_end+=_rec->state.ncoded_fragis[pli];
741 memset(run_counts,0,sizeof(run_counts));
742 _rec->eob_runs[pli][0]=eobs;
743 /*Continue any previous EOB run, if there was one.*/
744 for(eobi=eobs;eobi-->0&&coded_fragi<coded_fragi_end;coded_fragi++);
745 cfi=0;
746 while(eobs<_ntoks_left[pli][0]-cfi){
747 int token;
748 int neb;
749 int eb;
750 int skip;
751 cfi+=eobs;
752 run_counts[63]+=eobs;
753 token=oc_huff_token_decode(&_rec->dec_opb,
754 _rec->dec_huff_tables[_huff_idxs[pli]]);
755 _rec->dct_tokens[0][ti++]=(char)token;
756 _tok_hists[pli][token]++;
757 neb=OC_DCT_TOKEN_EXTRA_BITS[token];
758 if(neb){
759 theora_read(&_rec->dec_opb,neb,&val);
760 eb=(int)val;
761 _rec->extra_bits[0][ebi++]=(ogg_int16_t)eb;
763 else eb=0;
764 skip=oc_dct_token_skip(token,eb);
765 if(skip<0){
766 eobs=eobi=-skip;
767 while(eobi-->0&&coded_fragi<coded_fragi_end)coded_fragi++;
769 else{
770 run_counts[skip-1]++;
771 cfi++;
772 eobs=0;
773 coded_fragi++;
776 _rec->ti0[pli][0]=ti;
777 _rec->ebi0[pli][0]=ebi;
778 /*Set the EOB count to the portion of the last EOB run which extends past
779 this coefficient.*/
780 eobs=eobs+cfi-_ntoks_left[pli][0];
781 /*Add the portion of the last EOB which was included in this coefficient to
782 to the longest run length.*/
783 run_counts[63]+=_ntoks_left[pli][0]-cfi;
784 /*And convert the run_counts array to a moment table.*/
785 for(rli=63;rli-->0;)run_counts[rli]+=run_counts[rli+1];
786 /*Finally, subtract off the number of coefficients that have been
787 accounted for by runs started in this coefficient.*/
788 for(rli=64;rli-->0;)_ntoks_left[pli][rli]-=run_counts[rli];
790 return eobs;
793 /*Unpacks the AC coefficient tokens.
794 This can completely discard coefficient values while unpacking, and so is
795 somewhat simpler than unpacking the DC coefficient tokens.
796 _huff_idx: The index of the Huffman table to use for each color plane.
797 _ntoks_left: The number of tokens left to be decoded in each color plane for
798 each coefficient.
799 This is updated as EOB tokens and zero run tokens are decoded.
800 _eobs: The length of any outstanding EOB run from previous
801 coefficients.
802 Return: The length of any outstanding EOB run.*/
803 static int oc_rec_ac_coeff_unpack(oc_rec_ctx *_rec,int _zzi,int _huff_idxs[3],
804 int *_tok_hists[3],int _ntoks_left[3][64],int _eobs){
805 long val;
806 int run_counts[64];
807 int cfi;
808 int ti;
809 int ebi;
810 int pli;
811 int rli;
812 ti=ebi=0;
813 for(pli=0;pli<3;pli++){
814 memset(run_counts,0,sizeof(run_counts));
815 _rec->eob_runs[pli][_zzi]=_eobs;
816 cfi=0;
817 while(_eobs<_ntoks_left[pli][_zzi]-cfi){
818 int token;
819 int neb;
820 int eb;
821 int skip;
822 cfi+=_eobs;
823 run_counts[63]+=_eobs;
824 token=oc_huff_token_decode(&_rec->dec_opb,
825 _rec->dec_huff_tables[_huff_idxs[pli]]);
826 _rec->dct_tokens[_zzi][ti++]=(char)token;
827 _tok_hists[pli][token]++;
828 neb=OC_DCT_TOKEN_EXTRA_BITS[token];
829 if(neb){
830 theora_read(&_rec->dec_opb,neb,&val);
831 eb=(int)val;
832 _rec->extra_bits[_zzi][ebi++]=(ogg_int16_t)eb;
834 else eb=0;
835 skip=oc_dct_token_skip(token,eb);
836 if(skip<0)_eobs=-skip;
837 else{
838 run_counts[skip-1]++;
839 cfi++;
840 _eobs=0;
843 _rec->ti0[pli][_zzi]=ti;
844 _rec->ebi0[pli][_zzi]=ebi;
845 /*Set the EOB count to the portion of the last EOB run which extends past
846 this coefficient.*/
847 _eobs=_eobs+cfi-_ntoks_left[pli][_zzi];
848 /*Add the portion of the last EOB which was included in this coefficient to
849 to the longest run length.*/
850 run_counts[63]+=_ntoks_left[pli][_zzi]-cfi;
851 /*And convert the run_counts array to a moment table.*/
852 for(rli=63;rli-->0;)run_counts[rli]+=run_counts[rli+1];
853 /*Finally, subtract off the number of coefficients that have been
854 accounted for by runs started in this coefficient.*/
855 for(rli=64-_zzi;rli-->0;)_ntoks_left[pli][_zzi+rli]-=run_counts[rli];
857 return _eobs;
860 /*Tokens describing the DCT coefficients that belong to each fragment are
861 stored in the bitstream grouped by coefficient, not by fragment.
862 This means that we either decode all the tokens in order, building up a
863 separate coefficient list for each fragment as we go, and then go back and
864 do the iDCT on each fragment, or we have to create separate lists of tokens
865 for each coefficient, so that we can pull the next token required off the
866 head of the appropriate list when decoding a specific fragment.
867 The former was VP3's choice, and it meant 2*w*h extra storage for all the
868 decoded coefficient values.
869 We take the second option, which lets us store just one or three bytes per
870 token (generally far fewer than the number of coefficients, due to EOB
871 tokens and zero runs), and which requires us to only maintain a counter for
872 each of the 64 coefficients, instead of a counter for every fragment to
873 determine where the next token goes.
874 Actually, we use 3 counters per coefficient, one for each color plane, so we
875 can decode all color planes simultaneously.
876 This lets us color conversion, etc., be done as soon as a full MCU (one or
877 two super block rows) is decoded, while the image data is still in cache.*/
878 static void oc_rec_residual_tokens_unpack(oc_rec_ctx *_rec){
879 static const int OC_HUFF_LIST_MAX[5]={1,6,15,28,64};
880 long val;
881 int ntoks_left[3][64];
882 int huff_idxs[3];
883 int *tok_hists[3];
884 int pli;
885 int zzi;
886 int hgi;
887 int huffi_y;
888 int huffi_c;
889 int eobs;
890 memset(_rec->tok_hist,0,sizeof(_rec->tok_hist));
891 for(pli=0;pli<3;pli++)for(zzi=0;zzi<64;zzi++){
892 ntoks_left[pli][zzi]=_rec->state.ncoded_fragis[pli];
894 theora_read(&_rec->dec_opb,4,&val);
895 huffi_y=(int)val;
896 theora_read(&_rec->dec_opb,4,&val);
897 huffi_c=(int)val;
898 huff_idxs[0]=huffi_y;
899 huff_idxs[1]=huff_idxs[2]=huffi_c;
900 tok_hists[0]=_rec->tok_hist[0][0];
901 tok_hists[1]=tok_hists[2]=_rec->tok_hist[1][0];
902 _rec->eob_runs[0][0]=0;
903 eobs=oc_rec_dc_coeff_unpack(_rec,huff_idxs,tok_hists,ntoks_left);
904 theora_read(&_rec->dec_opb,4,&val);
905 huffi_y=(int)val;
906 theora_read(&_rec->dec_opb,4,&val);
907 huffi_c=(int)val;
908 zzi=1;
909 for(hgi=1;hgi<5;hgi++){
910 huff_idxs[0]=huffi_y+(hgi<<4);
911 huff_idxs[1]=huff_idxs[2]=huffi_c+(hgi<<4);
912 tok_hists[0]=_rec->tok_hist[0][hgi];
913 tok_hists[1]=tok_hists[2]=_rec->tok_hist[1][hgi];
914 for(;zzi<OC_HUFF_LIST_MAX[hgi];zzi++){
915 eobs=oc_rec_ac_coeff_unpack(_rec,zzi,huff_idxs,tok_hists,ntoks_left,eobs);
918 /*TODO: eobs should be exactly zero, or 4096 or greater.
919 The second case occurs when an EOB run of size zero is encountered, which
920 gets treated as an infinite EOB run (where infinity is INT_MAX).
921 If neither of these conditions holds, then a warning should be issued.*/
924 static int oc_rec_set_huffman_codes(oc_rec_ctx *_rec,
925 const th_huff_code _codes[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS]){
926 int ret;
927 if(_rec==NULL)return TH_EFAULT;
928 /*If we've already emitted the setup header, then don't let the user set the
929 tables again.*/
930 if(_rec->packet_state>=OC_PACKET_SETUP_HDR&&
931 _rec->packet_state<=OC_PACKET_REWRITE){
932 return TH_EINVAL;
934 if(_codes==NULL)_codes=TH_VP31_HUFF_CODES;
935 /*Validate the codes.*/
936 oggpackB_reset(&_rec->enc_opb);
937 ret=oc_huff_codes_pack(&_rec->enc_opb,_codes);
938 if(ret<0)return ret;
939 memcpy(_rec->enc_huff_codes,_codes,sizeof(_rec->enc_huff_codes));
940 _rec->packet_state=OC_PACKET_INFO_HDR;
941 return 0;
944 /*Computes the number of bits used for each of the potential Huffman codes for
945 the given list of token counts.
946 The bits are added to whatever the current bit counts are.*/
947 static void oc_rec_count_bits(oc_rec_ctx *_rec,int _hgi,
948 const int _token_counts[TH_NDCT_TOKENS],int _bit_counts[16]){
949 int huffi;
950 int huff_base;
951 int token;
952 huff_base=_hgi<<4;
953 for(huffi=huff_base;huffi<huff_base+16;huffi++){
954 for(token=0;token<TH_NDCT_TOKENS;token++){
955 _bit_counts[huffi-huff_base]+=
956 _token_counts[token]*_rec->enc_huff_codes[huffi][token].nbits;
961 /*Returns the Huffman index using the fewest number of bits.*/
962 static int oc_rec_select_huffi(int _bit_counts[16]){
963 int best_huffi;
964 int huffi;
965 best_huffi=0;
966 for(huffi=1;huffi<16;huffi++)if(_bit_counts[huffi]<_bit_counts[best_huffi]){
967 best_huffi=huffi;
969 return best_huffi;
972 /*Packs the DCT tokens for the given range of coefficient indices in zig-zag
973 order using the given Huffman tables.*/
974 static void oc_rec_huff_group_pack(oc_rec_ctx *_rec,int _zzi_start,
975 int _zzi_end,int _huff_idxs[3]){
976 int zzi;
977 for(zzi=_zzi_start;zzi<_zzi_end;zzi++){
978 int pli;
979 int ti;
980 int ebi;
981 ti=0;
982 ebi=0;
983 for(pli=0;pli<3;pli++){
984 const th_huff_code *huff_codes;
985 int token;
986 int ti_end;
987 /*Step 2: Write the tokens using these tables.*/
988 huff_codes=_rec->enc_huff_codes[_huff_idxs[pli]];
989 /*Note: dct_token_offs[3] is really the ndct_tokens table.
990 Yes, this seems like a horrible hack, yet it's strangely elegant.*/
991 ti_end=_rec->ti0[pli][zzi];
992 for(;ti<ti_end;ti++){
993 token=_rec->dct_tokens[zzi][ti];
994 oggpackB_write(&_rec->enc_opb,huff_codes[token].pattern,
995 huff_codes[token].nbits);
996 if(OC_DCT_TOKEN_EXTRA_BITS[token]){
997 oggpackB_write(&_rec->enc_opb,_rec->extra_bits[zzi][ebi++],
998 OC_DCT_TOKEN_EXTRA_BITS[token]);
1005 static void oc_rec_residual_tokens_pack(oc_rec_ctx *_rec,
1006 const oc_tok_hist _tok_hist[2][5]){
1007 static const int OC_HUFF_LIST_MIN[6]={0,1,6,15,28,64};
1008 static const int *OC_HUFF_LIST_MAX=OC_HUFF_LIST_MIN+1;
1009 int bits_y[16];
1010 int bits_c[16];
1011 int huff_idxs[5][3];
1012 int huffi_y;
1013 int huffi_c;
1014 int hgi;
1015 /*Step 1a: Select Huffman tables for the DC token list.*/
1016 memset(bits_y,0,sizeof(bits_y));
1017 memset(bits_c,0,sizeof(bits_c));
1018 oc_rec_count_bits(_rec,0,_tok_hist[0][0],bits_y);
1019 oc_rec_count_bits(_rec,0,_tok_hist[1][0],bits_c);
1020 huffi_y=oc_rec_select_huffi(bits_y);
1021 huffi_c=oc_rec_select_huffi(bits_c);
1022 huff_idxs[0][0]=huffi_y;
1023 huff_idxs[0][1]=huff_idxs[0][2]=huffi_c;
1024 /*Step 1b: Write the DC token list with the chosen tables.*/
1025 oggpackB_write(&_rec->enc_opb,huffi_y,4);
1026 oggpackB_write(&_rec->enc_opb,huffi_c,4);
1027 oc_rec_huff_group_pack(_rec,0,1,huff_idxs[0]);
1028 /*Step 2a: Select Huffman tables for the AC token lists.*/
1029 memset(bits_y,0,sizeof(bits_y));
1030 memset(bits_y,0,sizeof(bits_c));
1031 for(hgi=1;hgi<5;hgi++){
1032 oc_rec_count_bits(_rec,hgi,_tok_hist[0][hgi],bits_y);
1033 oc_rec_count_bits(_rec,hgi,_tok_hist[1][hgi],bits_c);
1035 huffi_y=oc_rec_select_huffi(bits_y);
1036 huffi_c=oc_rec_select_huffi(bits_c);
1037 /*Step 2b: Write the AC token lists using the chosen tables.*/
1038 oggpackB_write(&_rec->enc_opb,huffi_y,4);
1039 oggpackB_write(&_rec->enc_opb,huffi_c,4);
1040 for(hgi=1;hgi<5;hgi++){
1041 huff_idxs[hgi][0]=huffi_y+(hgi<<4);
1042 huff_idxs[hgi][1]=huff_idxs[hgi][2]=huffi_c+(hgi<<4);
1043 oc_rec_huff_group_pack(_rec,OC_HUFF_LIST_MIN[hgi],OC_HUFF_LIST_MAX[hgi],
1044 huff_idxs[hgi]);
1050 th_rec_ctx *th_recode_alloc(const th_info *_info,const th_setup_info *_setup){
1051 oc_rec_ctx *dec;
1052 if(_info==NULL||_setup==NULL)return NULL;
1053 dec=_ogg_malloc(sizeof(*dec));
1054 if(oc_rec_init(dec,_info,_setup)<0){
1055 _ogg_free(dec);
1056 return NULL;
1058 dec->state.curframe_num=0;
1059 return dec;
1062 void th_recode_free(th_rec_ctx *_rec){
1063 if(_rec!=NULL){
1064 oc_rec_clear(_rec);
1065 _ogg_free(_rec);
1069 int th_recode_packetin(th_rec_ctx *_rec,const ogg_packet *_op,
1070 ogg_int64_t *_granpos){
1071 int ret;
1072 if(_rec==NULL||_op==NULL)return TH_EFAULT;
1073 /*If the user has already retrieved the statistics, we can't update them any
1074 longer.*/
1075 if(_rec->packet_state!=OC_PACKET_ANALYZE)return TH_EINVAL;
1076 /*A completely empty packet indicates a dropped frame and is treated exactly
1077 like an inter frame with no coded blocks.
1078 Only proceed if we have a non-empty packet.*/
1079 if(_op->bytes!=0){
1080 oc_frame_tok_hist *tok_hist;
1081 oggpackB_readinit(&_rec->dec_opb,_op->packet,_op->bytes);
1082 ret=oc_rec_frame_header_unpack(_rec);
1083 if(ret<0)return ret;
1084 if(_rec->state.frame_type==OC_INTRA_FRAME){
1085 oc_rec_mark_all_intra(_rec);
1086 _rec->state.keyframe_num=_rec->state.curframe_num;
1088 else{
1089 oc_rec_coded_flags_unpack(_rec);
1090 oc_rec_mb_modes_unpack(_rec);
1091 oc_rec_mv_unpack(_rec);
1093 oc_rec_block_qis_unpack(_rec);
1094 if(_rec->ntok_hists>=_rec->ctok_hists){
1095 _rec->ctok_hists=_rec->ctok_hists<<1|1;
1096 _rec->tok_hists=(oc_frame_tok_hist *)_ogg_realloc(_rec->tok_hists,
1097 _rec->ctok_hists*sizeof(*_rec->tok_hists));
1099 tok_hist=_rec->tok_hists+_rec->ntok_hists++;
1100 tok_hist->pkt_sz=_op->bytes;
1101 tok_hist->dct_offs=oggpackB_bits(&_rec->dec_opb);
1102 memcpy(tok_hist->ncoded_fragis,_rec->state.ncoded_fragis,
1103 sizeof(tok_hist->ncoded_fragis));
1104 oc_rec_residual_tokens_unpack(_rec);
1105 /*Update granule position.*/
1106 _rec->state.granpos=
1107 (_rec->state.keyframe_num<<_rec->state.info.keyframe_granule_shift)+
1108 (_rec->state.curframe_num-_rec->state.keyframe_num);
1109 tok_hist->granpos=_rec->state.granpos;
1110 /*Save the statistics for this frame.*/
1111 memcpy(tok_hist->tok_hist,_rec->tok_hist,sizeof(tok_hist->tok_hist));
1112 _rec->state.curframe_num++;
1113 if(_granpos!=NULL)*_granpos=_rec->state.granpos;
1114 return 0;
1116 else{
1117 /*Just update the granule position and return.*/
1118 _rec->state.granpos=
1119 (_rec->state.keyframe_num<<_rec->state.info.keyframe_granule_shift)+
1120 (_rec->state.curframe_num-_rec->state.keyframe_num);
1121 _rec->state.curframe_num++;
1122 if(_granpos!=NULL)*_granpos=_rec->state.granpos;
1123 return TH_DUPFRAME;
1127 int th_recode_ctl(th_rec_ctx *_rec,int _req,void *_buf,size_t _buf_sz){
1128 switch(_req){
1129 case TH_ENCCTL_SET_HUFFMAN_CODES:{
1130 if(_buf==NULL&&_buf_sz!=0||_buf!=NULL&&
1131 _buf_sz!=sizeof(th_huff_code)*TH_NHUFFMAN_TABLES*TH_NDCT_TOKENS){
1132 return TH_EINVAL;
1134 return oc_rec_set_huffman_codes(_rec,(const th_huff_table *)_buf);
1135 }break;
1136 case TH_DECCTL_SET_GRANPOS:{
1137 ogg_int64_t granpos;
1138 if(_rec==NULL||_buf==NULL)return TH_EFAULT;
1139 if(_buf_sz!=sizeof(ogg_int64_t))return TH_EINVAL;
1140 granpos=*(ogg_int64_t *)_buf;
1141 if(granpos<0)return TH_EINVAL;
1142 _rec->state.granpos=granpos;
1143 _rec->state.keyframe_num=
1144 granpos>>_rec->state.info.keyframe_granule_shift;
1145 _rec->state.curframe_num=_rec->state.keyframe_num+
1146 (granpos&(1<<_rec->state.info.keyframe_granule_shift)-1);
1147 return 0;
1148 }break;
1149 case TH_RECCTL_GET_TOK_NSTATS:{
1150 if(_rec==NULL||_buf==NULL)return TH_EFAULT;
1151 if(_buf_sz!=sizeof(long))return TH_EINVAL;
1152 *((long *)_buf)=_rec->ntok_hists;
1153 return 0;
1154 }break;
1155 case TH_RECCTL_GET_TOK_STATS:{
1156 if(_rec==NULL||_buf==NULL)return TH_EFAULT;
1157 if(_buf_sz!=sizeof(const oc_frame_tok_hist **))return TH_EINVAL;
1158 if(_rec->packet_state<OC_PACKET_ANALYZE)return TH_EINVAL;
1159 /*Update the state to prevent us from invalidating this pointer.*/
1160 _rec->packet_state=OC_PACKET_HUFFTABLES;
1161 *((const oc_frame_tok_hist **)_buf)=_rec->tok_hists;
1162 return 0;
1163 }break;
1164 default:return TH_EIMPL;
1168 int th_recode_flushheader(th_rec_ctx *_rec,th_comment *_tc,ogg_packet *_op){
1169 return oc_state_flushheader(&_rec->state,&_rec->packet_state,&_rec->enc_opb,
1170 &_rec->qinfo,(const th_huff_table *)_rec->enc_huff_codes,_tc->vendor,
1171 _tc,_op);
1174 #include <stdio.h>
1176 int th_recode_packet_rewrite(th_rec_ctx *_rec,const ogg_packet *_op_in,
1177 ogg_packet *_op_out){
1178 int ret;
1179 if(_rec==NULL||_op_in==NULL||_op_out==NULL)return TH_EFAULT;
1180 /*If we've used all our decoded token histograms, please stop calling us.*/
1181 if(_rec->cur_tok_histi>=_rec->ntok_hists)return TH_EINVAL;
1182 /*A completely empty packet indicates a dropped frame and is treated exactly
1183 like an inter frame with no coded blocks.
1184 Only proceed if we have a non-empty packet.*/
1185 if(_op_in->bytes!=0){
1186 oc_frame_tok_hist *tok_hist;
1187 /*Read enough of the packet to figure out what kind of frame we have.
1188 This also validates the packet to be sure we can decode it, which is why
1189 we don't just use th_packet_iskeyframe().*/
1190 oggpackB_readinit(&_rec->dec_opb,_op_in->packet,_op_in->bytes);
1191 ret=oc_rec_frame_header_unpack(_rec);
1192 if(ret<0)return ret;
1193 /*Update granule position.*/
1194 if(_rec->state.frame_type==OC_INTRA_FRAME){
1195 _rec->state.keyframe_num=_rec->state.curframe_num;
1197 _rec->state.granpos=
1198 (_rec->state.keyframe_num<<_rec->state.info.keyframe_granule_shift)+
1199 (_rec->state.curframe_num-_rec->state.keyframe_num);
1200 _rec->state.curframe_num++;
1201 /*Sanity checks to see if the next piece of frame data corresponds to this
1202 packet.
1203 This isn't a guarantee if someone rewrote the file out from under us, but
1204 it at least ensures that we have enough bytes in the packet.
1205 TODO: We could re-decode this packet to get the info we need, instead of
1206 failing, but that would be more code.*/
1207 tok_hist=_rec->tok_hists+_rec->cur_tok_histi;
1208 if(tok_hist->granpos!=_rec->state.granpos||
1209 tok_hist->pkt_sz!=_op_in->bytes){
1210 return TH_EBADPACKET;
1212 _rec->cur_tok_histi++;
1213 /*Copy the contents of the input packet up to the DCT tokens.*/
1214 oggpackB_reset(&_rec->enc_opb);
1215 oggpackB_writecopy(&_rec->enc_opb,_op_in->packet,tok_hist->dct_offs);
1216 /*Read the DCT tokens using the old codes.*/
1217 oggpackB_readinit(&_rec->dec_opb,_op_in->packet,_op_in->bytes);
1218 oggpackB_adv(&_rec->dec_opb,tok_hist->dct_offs);
1219 memcpy(_rec->state.ncoded_fragis,tok_hist->ncoded_fragis,
1220 sizeof(_rec->state.ncoded_fragis));
1221 oc_rec_residual_tokens_unpack(_rec);
1222 /*Write the DCT tokens using the new codes.*/
1223 memcpy(_rec->state.ncoded_fragis,tok_hist->ncoded_fragis,
1224 sizeof(_rec->state.ncoded_fragis));
1225 oc_rec_residual_tokens_pack(_rec,
1226 (const oc_tok_hist_table *)tok_hist->tok_hist);
1227 ret=0;
1229 else{
1230 oggpackB_reset(&_rec->enc_opb);
1231 /*Just update the granule position and return.*/
1232 _rec->state.granpos=
1233 (_rec->state.keyframe_num<<_rec->state.info.keyframe_granule_shift)+
1234 (_rec->state.curframe_num-_rec->state.keyframe_num);
1235 _rec->state.curframe_num++;
1236 ret=TH_DUPFRAME;
1238 _op_out->packet=oggpackB_get_buffer(&_rec->enc_opb);
1239 _op_out->bytes=oggpackB_bytes(&_rec->enc_opb);
1240 _op_out->b_o_s=0;
1241 _op_out->e_o_s=_op_in->e_o_s;
1242 _op_out->packetno=_rec->state.curframe_num;
1243 _op_out->granulepos=_rec->state.granpos;
1244 if(_op_out->e_o_s)_rec->packet_state=OC_PACKET_DONE;
1245 return ret;