Merged revisions 78965 via svnmerge from
[python/dscho.git] / Python / peephole.c
blob0e701834421d7662cd2257d8b19acabc9b69fa6a
1 /* Peephole optimizations for bytecode compiler. */
3 #include "Python.h"
5 #include "Python-ast.h"
6 #include "node.h"
7 #include "pyarena.h"
8 #include "ast.h"
9 #include "code.h"
10 #include "compile.h"
11 #include "symtable.h"
12 #include "opcode.h"
14 #define GETARG(arr, i) ((int)((arr[i+2]<<8) + arr[i+1]))
15 #define UNCONDITIONAL_JUMP(op) (op==JUMP_ABSOLUTE || op==JUMP_FORWARD)
16 #define CONDITIONAL_JUMP(op) (op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
17 || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
18 #define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE || op==CONTINUE_LOOP \
19 || op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
20 || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
21 #define JUMPS_ON_TRUE(op) (op==POP_JUMP_IF_TRUE || op==JUMP_IF_TRUE_OR_POP)
22 #define GETJUMPTGT(arr, i) (GETARG(arr,i) + (ABSOLUTE_JUMP(arr[i]) ? 0 : i+3))
23 #define SETARG(arr, i, val) arr[i+2] = val>>8; arr[i+1] = val & 255
24 #define CODESIZE(op) (HAS_ARG(op) ? 3 : 1)
25 #define ISBASICBLOCK(blocks, start, bytes) \
26 (blocks[start]==blocks[start+bytes-1])
28 /* Replace LOAD_CONST c1. LOAD_CONST c2 ... LOAD_CONST cn BUILD_TUPLE n
29 with LOAD_CONST (c1, c2, ... cn).
30 The consts table must still be in list form so that the
31 new constant (c1, c2, ... cn) can be appended.
32 Called with codestr pointing to the first LOAD_CONST.
33 Bails out with no change if one or more of the LOAD_CONSTs is missing.
34 Also works for BUILD_LIST when followed by an "in" or "not in" test.
36 static int
37 tuple_of_constants(unsigned char *codestr, Py_ssize_t n, PyObject *consts)
39 PyObject *newconst, *constant;
40 Py_ssize_t i, arg, len_consts;
42 /* Pre-conditions */
43 assert(PyList_CheckExact(consts));
44 assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST);
45 assert(GETARG(codestr, (n*3)) == n);
46 for (i=0 ; i<n ; i++)
47 assert(codestr[i*3] == LOAD_CONST);
49 /* Buildup new tuple of constants */
50 newconst = PyTuple_New(n);
51 if (newconst == NULL)
52 return 0;
53 len_consts = PyList_GET_SIZE(consts);
54 for (i=0 ; i<n ; i++) {
55 arg = GETARG(codestr, (i*3));
56 assert(arg < len_consts);
57 constant = PyList_GET_ITEM(consts, arg);
58 Py_INCREF(constant);
59 PyTuple_SET_ITEM(newconst, i, constant);
62 /* Append folded constant onto consts */
63 if (PyList_Append(consts, newconst)) {
64 Py_DECREF(newconst);
65 return 0;
67 Py_DECREF(newconst);
69 /* Write NOPs over old LOAD_CONSTS and
70 add a new LOAD_CONST newconst on top of the BUILD_TUPLE n */
71 memset(codestr, NOP, n*3);
72 codestr[n*3] = LOAD_CONST;
73 SETARG(codestr, (n*3), len_consts);
74 return 1;
77 /* Replace LOAD_CONST c1. LOAD_CONST c2 BINOP
78 with LOAD_CONST binop(c1,c2)
79 The consts table must still be in list form so that the
80 new constant can be appended.
81 Called with codestr pointing to the first LOAD_CONST.
82 Abandons the transformation if the folding fails (i.e. 1+'a').
83 If the new constant is a sequence, only folds when the size
84 is below a threshold value. That keeps pyc files from
85 becoming large in the presence of code like: (None,)*1000.
87 static int
88 fold_binops_on_constants(unsigned char *codestr, PyObject *consts)
90 PyObject *newconst, *v, *w;
91 Py_ssize_t len_consts, size;
92 int opcode;
94 /* Pre-conditions */
95 assert(PyList_CheckExact(consts));
96 assert(codestr[0] == LOAD_CONST);
97 assert(codestr[3] == LOAD_CONST);
99 /* Create new constant */
100 v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
101 w = PyList_GET_ITEM(consts, GETARG(codestr, 3));
102 opcode = codestr[6];
103 switch (opcode) {
104 case BINARY_POWER:
105 newconst = PyNumber_Power(v, w, Py_None);
106 break;
107 case BINARY_MULTIPLY:
108 newconst = PyNumber_Multiply(v, w);
109 break;
110 case BINARY_TRUE_DIVIDE:
111 newconst = PyNumber_TrueDivide(v, w);
112 break;
113 case BINARY_FLOOR_DIVIDE:
114 newconst = PyNumber_FloorDivide(v, w);
115 break;
116 case BINARY_MODULO:
117 newconst = PyNumber_Remainder(v, w);
118 break;
119 case BINARY_ADD:
120 newconst = PyNumber_Add(v, w);
121 break;
122 case BINARY_SUBTRACT:
123 newconst = PyNumber_Subtract(v, w);
124 break;
125 case BINARY_SUBSCR:
126 newconst = PyObject_GetItem(v, w);
127 break;
128 case BINARY_LSHIFT:
129 newconst = PyNumber_Lshift(v, w);
130 break;
131 case BINARY_RSHIFT:
132 newconst = PyNumber_Rshift(v, w);
133 break;
134 case BINARY_AND:
135 newconst = PyNumber_And(v, w);
136 break;
137 case BINARY_XOR:
138 newconst = PyNumber_Xor(v, w);
139 break;
140 case BINARY_OR:
141 newconst = PyNumber_Or(v, w);
142 break;
143 default:
144 /* Called with an unknown opcode */
145 PyErr_Format(PyExc_SystemError,
146 "unexpected binary operation %d on a constant",
147 opcode);
148 return 0;
150 if (newconst == NULL) {
151 PyErr_Clear();
152 return 0;
154 size = PyObject_Size(newconst);
155 if (size == -1)
156 PyErr_Clear();
157 else if (size > 20) {
158 Py_DECREF(newconst);
159 return 0;
162 /* Append folded constant into consts table */
163 len_consts = PyList_GET_SIZE(consts);
164 if (PyList_Append(consts, newconst)) {
165 Py_DECREF(newconst);
166 return 0;
168 Py_DECREF(newconst);
170 /* Write NOP NOP NOP NOP LOAD_CONST newconst */
171 memset(codestr, NOP, 4);
172 codestr[4] = LOAD_CONST;
173 SETARG(codestr, 4, len_consts);
174 return 1;
177 static int
178 fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts)
180 PyObject *newconst=NULL, *v;
181 Py_ssize_t len_consts;
182 int opcode;
184 /* Pre-conditions */
185 assert(PyList_CheckExact(consts));
186 assert(codestr[0] == LOAD_CONST);
188 /* Create new constant */
189 v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
190 opcode = codestr[3];
191 switch (opcode) {
192 case UNARY_NEGATIVE:
193 /* Preserve the sign of -0.0 */
194 if (PyObject_IsTrue(v) == 1)
195 newconst = PyNumber_Negative(v);
196 break;
197 case UNARY_INVERT:
198 newconst = PyNumber_Invert(v);
199 break;
200 default:
201 /* Called with an unknown opcode */
202 PyErr_Format(PyExc_SystemError,
203 "unexpected unary operation %d on a constant",
204 opcode);
205 return 0;
207 if (newconst == NULL) {
208 PyErr_Clear();
209 return 0;
212 /* Append folded constant into consts table */
213 len_consts = PyList_GET_SIZE(consts);
214 if (PyList_Append(consts, newconst)) {
215 Py_DECREF(newconst);
216 return 0;
218 Py_DECREF(newconst);
220 /* Write NOP LOAD_CONST newconst */
221 codestr[0] = NOP;
222 codestr[1] = LOAD_CONST;
223 SETARG(codestr, 1, len_consts);
224 return 1;
227 static unsigned int *
228 markblocks(unsigned char *code, Py_ssize_t len)
230 unsigned int *blocks = (unsigned int *)PyMem_Malloc(len*sizeof(int));
231 int i,j, opcode, blockcnt = 0;
233 if (blocks == NULL) {
234 PyErr_NoMemory();
235 return NULL;
237 memset(blocks, 0, len*sizeof(int));
239 /* Mark labels in the first pass */
240 for (i=0 ; i<len ; i+=CODESIZE(opcode)) {
241 opcode = code[i];
242 switch (opcode) {
243 case FOR_ITER:
244 case JUMP_FORWARD:
245 case JUMP_IF_FALSE_OR_POP:
246 case JUMP_IF_TRUE_OR_POP:
247 case POP_JUMP_IF_FALSE:
248 case POP_JUMP_IF_TRUE:
249 case JUMP_ABSOLUTE:
250 case CONTINUE_LOOP:
251 case SETUP_LOOP:
252 case SETUP_EXCEPT:
253 case SETUP_FINALLY:
254 j = GETJUMPTGT(code, i);
255 blocks[j] = 1;
256 break;
259 /* Build block numbers in the second pass */
260 for (i=0 ; i<len ; i++) {
261 blockcnt += blocks[i]; /* increment blockcnt over labels */
262 blocks[i] = blockcnt;
264 return blocks;
267 /* Helper to replace LOAD_NAME None/True/False with LOAD_CONST
268 Returns: 0 if no change, 1 if change, -1 if error */
269 static int
270 load_global(unsigned char *codestr, Py_ssize_t i, char *name, PyObject *consts)
272 Py_ssize_t j;
273 PyObject *obj;
274 if (name == NULL)
275 return 0;
276 if (strcmp(name, "None") == 0)
277 obj = Py_None;
278 else if (strcmp(name, "True") == 0)
279 obj = Py_True;
280 else if (strcmp(name, "False") == 0)
281 obj = Py_False;
282 else
283 return 0;
284 for (j = 0; j < PyList_GET_SIZE(consts); j++) {
285 if (PyList_GET_ITEM(consts, j) == obj)
286 break;
288 if (j == PyList_GET_SIZE(consts)) {
289 if (PyList_Append(consts, obj) < 0)
290 return -1;
292 assert(PyList_GET_ITEM(consts, j) == obj);
293 codestr[i] = LOAD_CONST;
294 SETARG(codestr, i, j);
295 return 1;
298 /* Perform basic peephole optimizations to components of a code object.
299 The consts object should still be in list form to allow new constants
300 to be appended.
302 To keep the optimizer simple, it bails out (does nothing) for code that
303 has a length over 32,700, and does not calculate extended arguments.
304 That allows us to avoid overflow and sign issues. Likewise, it bails when
305 the lineno table has complex encoding for gaps >= 255. EXTENDED_ARG can
306 appear before MAKE_FUNCTION; in this case both opcodes are skipped.
307 EXTENDED_ARG preceding any other opcode causes the optimizer to bail.
309 Optimizations are restricted to simple transformations occuring within a
310 single basic block. All transformations keep the code size the same or
311 smaller. For those that reduce size, the gaps are initially filled with
312 NOPs. Later those NOPs are removed and the jump addresses retargeted in
313 a single pass. Line numbering is adjusted accordingly. */
315 PyObject *
316 PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names,
317 PyObject *lineno_obj)
319 Py_ssize_t i, j, codelen;
320 int nops, h, adj;
321 int tgt, tgttgt, opcode;
322 unsigned char *codestr = NULL;
323 unsigned char *lineno;
324 int *addrmap = NULL;
325 int new_line, cum_orig_line, last_line, tabsiz;
326 int cumlc=0, lastlc=0; /* Count runs of consecutive LOAD_CONSTs */
327 unsigned int *blocks = NULL;
328 char *name;
330 /* Bail out if an exception is set */
331 if (PyErr_Occurred())
332 goto exitError;
334 /* Bypass optimization when the lineno table is too complex */
335 assert(PyBytes_Check(lineno_obj));
336 lineno = (unsigned char*)PyBytes_AS_STRING(lineno_obj);
337 tabsiz = PyBytes_GET_SIZE(lineno_obj);
338 if (memchr(lineno, 255, tabsiz) != NULL)
339 goto exitUnchanged;
341 /* Avoid situations where jump retargeting could overflow */
342 assert(PyBytes_Check(code));
343 codelen = PyBytes_GET_SIZE(code);
344 if (codelen > 32700)
345 goto exitUnchanged;
347 /* Make a modifiable copy of the code string */
348 codestr = (unsigned char *)PyMem_Malloc(codelen);
349 if (codestr == NULL)
350 goto exitError;
351 codestr = (unsigned char *)memcpy(codestr,
352 PyBytes_AS_STRING(code), codelen);
354 /* Verify that RETURN_VALUE terminates the codestring. This allows
355 the various transformation patterns to look ahead several
356 instructions without additional checks to make sure they are not
357 looking beyond the end of the code string.
359 if (codestr[codelen-1] != RETURN_VALUE)
360 goto exitUnchanged;
362 /* Mapping to new jump targets after NOPs are removed */
363 addrmap = (int *)PyMem_Malloc(codelen * sizeof(int));
364 if (addrmap == NULL)
365 goto exitError;
367 blocks = markblocks(codestr, codelen);
368 if (blocks == NULL)
369 goto exitError;
370 assert(PyList_Check(consts));
372 for (i=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
373 reoptimize_current:
374 opcode = codestr[i];
376 lastlc = cumlc;
377 cumlc = 0;
379 switch (opcode) {
380 /* Replace UNARY_NOT POP_JUMP_IF_FALSE
381 with POP_JUMP_IF_TRUE */
382 case UNARY_NOT:
383 if (codestr[i+1] != POP_JUMP_IF_FALSE
384 || !ISBASICBLOCK(blocks,i,4))
385 continue;
386 j = GETARG(codestr, i+1);
387 codestr[i] = POP_JUMP_IF_TRUE;
388 SETARG(codestr, i, j);
389 codestr[i+3] = NOP;
390 goto reoptimize_current;
392 /* not a is b --> a is not b
393 not a in b --> a not in b
394 not a is not b --> a is b
395 not a not in b --> a in b
397 case COMPARE_OP:
398 j = GETARG(codestr, i);
399 if (j < 6 || j > 9 ||
400 codestr[i+3] != UNARY_NOT ||
401 !ISBASICBLOCK(blocks,i,4))
402 continue;
403 SETARG(codestr, i, (j^1));
404 codestr[i+3] = NOP;
405 break;
407 /* Replace LOAD_GLOBAL/LOAD_NAME None/True/False
408 with LOAD_CONST None/True/False */
409 case LOAD_NAME:
410 case LOAD_GLOBAL:
411 j = GETARG(codestr, i);
412 name = _PyUnicode_AsString(PyTuple_GET_ITEM(names, j));
413 h = load_global(codestr, i, name, consts);
414 if (h < 0)
415 goto exitError;
416 else if (h == 0)
417 continue;
418 cumlc = lastlc + 1;
419 break;
421 /* Skip over LOAD_CONST trueconst
422 POP_JUMP_IF_FALSE xx. This improves
423 "while 1" performance. */
424 case LOAD_CONST:
425 cumlc = lastlc + 1;
426 j = GETARG(codestr, i);
427 if (codestr[i+3] != POP_JUMP_IF_FALSE ||
428 !ISBASICBLOCK(blocks,i,6) ||
429 !PyObject_IsTrue(PyList_GET_ITEM(consts, j)))
430 continue;
431 memset(codestr+i, NOP, 6);
432 cumlc = 0;
433 break;
435 /* Try to fold tuples of constants (includes a case for lists
436 which are only used for "in" and "not in" tests).
437 Skip over BUILD_SEQN 1 UNPACK_SEQN 1.
438 Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2.
439 Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */
440 case BUILD_TUPLE:
441 case BUILD_LIST:
442 j = GETARG(codestr, i);
443 h = i - 3 * j;
444 if (h >= 0 &&
445 j <= lastlc &&
446 ((opcode == BUILD_TUPLE &&
447 ISBASICBLOCK(blocks, h, 3*(j+1))) ||
448 (opcode == BUILD_LIST &&
449 codestr[i+3]==COMPARE_OP &&
450 ISBASICBLOCK(blocks, h, 3*(j+2)) &&
451 (GETARG(codestr,i+3)==6 ||
452 GETARG(codestr,i+3)==7))) &&
453 tuple_of_constants(&codestr[h], j, consts)) {
454 assert(codestr[i] == LOAD_CONST);
455 cumlc = 1;
456 break;
458 if (codestr[i+3] != UNPACK_SEQUENCE ||
459 !ISBASICBLOCK(blocks,i,6) ||
460 j != GETARG(codestr, i+3))
461 continue;
462 if (j == 1) {
463 memset(codestr+i, NOP, 6);
464 } else if (j == 2) {
465 codestr[i] = ROT_TWO;
466 memset(codestr+i+1, NOP, 5);
467 } else if (j == 3) {
468 codestr[i] = ROT_THREE;
469 codestr[i+1] = ROT_TWO;
470 memset(codestr+i+2, NOP, 4);
472 break;
474 /* Fold binary ops on constants.
475 LOAD_CONST c1 LOAD_CONST c2 BINOP --> LOAD_CONST binop(c1,c2) */
476 case BINARY_POWER:
477 case BINARY_MULTIPLY:
478 case BINARY_TRUE_DIVIDE:
479 case BINARY_FLOOR_DIVIDE:
480 case BINARY_MODULO:
481 case BINARY_ADD:
482 case BINARY_SUBTRACT:
483 case BINARY_SUBSCR:
484 case BINARY_LSHIFT:
485 case BINARY_RSHIFT:
486 case BINARY_AND:
487 case BINARY_XOR:
488 case BINARY_OR:
489 if (lastlc >= 2 &&
490 ISBASICBLOCK(blocks, i-6, 7) &&
491 fold_binops_on_constants(&codestr[i-6], consts)) {
492 i -= 2;
493 assert(codestr[i] == LOAD_CONST);
494 cumlc = 1;
496 break;
498 /* Fold unary ops on constants.
499 LOAD_CONST c1 UNARY_OP --> LOAD_CONST unary_op(c) */
500 case UNARY_NEGATIVE:
501 case UNARY_INVERT:
502 if (lastlc >= 1 &&
503 ISBASICBLOCK(blocks, i-3, 4) &&
504 fold_unaryops_on_constants(&codestr[i-3], consts)) {
505 i -= 2;
506 assert(codestr[i] == LOAD_CONST);
507 cumlc = 1;
509 break;
511 /* Simplify conditional jump to conditional jump where the
512 result of the first test implies the success of a similar
513 test or the failure of the opposite test.
514 Arises in code like:
515 "if a and b:"
516 "if a or b:"
517 "a and b or c"
518 "(a and b) and c"
519 x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_FALSE_OR_POP z
520 --> x:JUMP_IF_FALSE_OR_POP z
521 x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_TRUE_OR_POP z
522 --> x:POP_JUMP_IF_FALSE y+3
523 where y+3 is the instruction following the second test.
525 case JUMP_IF_FALSE_OR_POP:
526 case JUMP_IF_TRUE_OR_POP:
527 tgt = GETJUMPTGT(codestr, i);
528 j = codestr[tgt];
529 if (CONDITIONAL_JUMP(j)) {
530 /* NOTE: all possible jumps here are
531 absolute! */
532 if (JUMPS_ON_TRUE(j) == JUMPS_ON_TRUE(opcode)) {
533 /* The second jump will be
534 taken iff the first is. */
535 tgttgt = GETJUMPTGT(codestr, tgt);
536 /* The current opcode inherits
537 its target's stack behaviour */
538 codestr[i] = j;
539 SETARG(codestr, i, tgttgt);
540 goto reoptimize_current;
541 } else {
542 /* The second jump is not taken
543 if the first is (so jump past
544 it), and all conditional
545 jumps pop their argument when
546 they're not taken (so change
547 the first jump to pop its
548 argument when it's taken). */
549 if (JUMPS_ON_TRUE(opcode))
550 codestr[i] = POP_JUMP_IF_TRUE;
551 else
552 codestr[i] = POP_JUMP_IF_FALSE;
553 SETARG(codestr, i, (tgt + 3));
554 goto reoptimize_current;
557 /* Intentional fallthrough */
559 /* Replace jumps to unconditional jumps */
560 case POP_JUMP_IF_FALSE:
561 case POP_JUMP_IF_TRUE:
562 case FOR_ITER:
563 case JUMP_FORWARD:
564 case JUMP_ABSOLUTE:
565 case CONTINUE_LOOP:
566 case SETUP_LOOP:
567 case SETUP_EXCEPT:
568 case SETUP_FINALLY:
569 tgt = GETJUMPTGT(codestr, i);
570 /* Replace JUMP_* to a RETURN into just a RETURN */
571 if (UNCONDITIONAL_JUMP(opcode) &&
572 codestr[tgt] == RETURN_VALUE) {
573 codestr[i] = RETURN_VALUE;
574 memset(codestr+i+1, NOP, 2);
575 continue;
577 if (!UNCONDITIONAL_JUMP(codestr[tgt]))
578 continue;
579 tgttgt = GETJUMPTGT(codestr, tgt);
580 if (opcode == JUMP_FORWARD) /* JMP_ABS can go backwards */
581 opcode = JUMP_ABSOLUTE;
582 if (!ABSOLUTE_JUMP(opcode))
583 tgttgt -= i + 3; /* Calc relative jump addr */
584 if (tgttgt < 0) /* No backward relative jumps */
585 continue;
586 codestr[i] = opcode;
587 SETARG(codestr, i, tgttgt);
588 break;
590 case EXTENDED_ARG:
591 if (codestr[i+3] != MAKE_FUNCTION)
592 goto exitUnchanged;
593 /* don't visit MAKE_FUNCTION as GETARG will be wrong */
594 i += 3;
595 break;
597 /* Replace RETURN LOAD_CONST None RETURN with just RETURN */
598 /* Remove unreachable JUMPs after RETURN */
599 case RETURN_VALUE:
600 if (i+4 >= codelen)
601 continue;
602 if (codestr[i+4] == RETURN_VALUE &&
603 ISBASICBLOCK(blocks,i,5))
604 memset(codestr+i+1, NOP, 4);
605 else if (UNCONDITIONAL_JUMP(codestr[i+1]) &&
606 ISBASICBLOCK(blocks,i,4))
607 memset(codestr+i+1, NOP, 3);
608 break;
612 /* Fixup linenotab */
613 for (i=0, nops=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
614 addrmap[i] = i - nops;
615 if (codestr[i] == NOP)
616 nops++;
618 cum_orig_line = 0;
619 last_line = 0;
620 for (i=0 ; i < tabsiz ; i+=2) {
621 cum_orig_line += lineno[i];
622 new_line = addrmap[cum_orig_line];
623 assert (new_line - last_line < 255);
624 lineno[i] =((unsigned char)(new_line - last_line));
625 last_line = new_line;
628 /* Remove NOPs and fixup jump targets */
629 for (i=0, h=0 ; i<codelen ; ) {
630 opcode = codestr[i];
631 switch (opcode) {
632 case NOP:
633 i++;
634 continue;
636 case JUMP_ABSOLUTE:
637 case CONTINUE_LOOP:
638 case POP_JUMP_IF_FALSE:
639 case POP_JUMP_IF_TRUE:
640 case JUMP_IF_FALSE_OR_POP:
641 case JUMP_IF_TRUE_OR_POP:
642 j = addrmap[GETARG(codestr, i)];
643 SETARG(codestr, i, j);
644 break;
646 case FOR_ITER:
647 case JUMP_FORWARD:
648 case SETUP_LOOP:
649 case SETUP_EXCEPT:
650 case SETUP_FINALLY:
651 j = addrmap[GETARG(codestr, i) + i + 3] - addrmap[i] - 3;
652 SETARG(codestr, i, j);
653 break;
655 adj = CODESIZE(opcode);
656 while (adj--)
657 codestr[h++] = codestr[i++];
659 assert(h + nops == codelen);
661 code = PyBytes_FromStringAndSize((char *)codestr, h);
662 PyMem_Free(addrmap);
663 PyMem_Free(codestr);
664 PyMem_Free(blocks);
665 return code;
667 exitError:
668 code = NULL;
670 exitUnchanged:
671 if (blocks != NULL)
672 PyMem_Free(blocks);
673 if (addrmap != NULL)
674 PyMem_Free(addrmap);
675 if (codestr != NULL)
676 PyMem_Free(codestr);
677 Py_XINCREF(code);
678 return code;