Issue #5512: speed up the long division algorithm for Python longs.
[python.git] / Python / peephole.c
bloba3dda9c8aaf4a20cdb7a5d7aa8433ce31f7e1280
1 /* Peephole optimizations for bytecode compiler. */
3 #include "Python.h"
5 #include "Python-ast.h"
6 #include "node.h"
7 #include "pyarena.h"
8 #include "ast.h"
9 #include "code.h"
10 #include "compile.h"
11 #include "symtable.h"
12 #include "opcode.h"
14 #define GETARG(arr, i) ((int)((arr[i+2]<<8) + arr[i+1]))
15 #define UNCONDITIONAL_JUMP(op) (op==JUMP_ABSOLUTE || op==JUMP_FORWARD)
16 #define CONDITIONAL_JUMP(op) (op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
17 || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
18 #define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE || op==CONTINUE_LOOP \
19 || op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
20 || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
21 #define JUMPS_ON_TRUE(op) (op==POP_JUMP_IF_TRUE || op==JUMP_IF_TRUE_OR_POP)
22 #define GETJUMPTGT(arr, i) (GETARG(arr,i) + (ABSOLUTE_JUMP(arr[i]) ? 0 : i+3))
23 #define SETARG(arr, i, val) arr[i+2] = val>>8; arr[i+1] = val & 255
24 #define CODESIZE(op) (HAS_ARG(op) ? 3 : 1)
25 #define ISBASICBLOCK(blocks, start, bytes) \
26 (blocks[start]==blocks[start+bytes-1])
28 /* Replace LOAD_CONST c1. LOAD_CONST c2 ... LOAD_CONST cn BUILD_TUPLE n
29 with LOAD_CONST (c1, c2, ... cn).
30 The consts table must still be in list form so that the
31 new constant (c1, c2, ... cn) can be appended.
32 Called with codestr pointing to the first LOAD_CONST.
33 Bails out with no change if one or more of the LOAD_CONSTs is missing.
34 Also works for BUILD_LIST when followed by an "in" or "not in" test.
36 static int
37 tuple_of_constants(unsigned char *codestr, Py_ssize_t n, PyObject *consts)
39 PyObject *newconst, *constant;
40 Py_ssize_t i, arg, len_consts;
42 /* Pre-conditions */
43 assert(PyList_CheckExact(consts));
44 assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST);
45 assert(GETARG(codestr, (n*3)) == n);
46 for (i=0 ; i<n ; i++)
47 assert(codestr[i*3] == LOAD_CONST);
49 /* Buildup new tuple of constants */
50 newconst = PyTuple_New(n);
51 if (newconst == NULL)
52 return 0;
53 len_consts = PyList_GET_SIZE(consts);
54 for (i=0 ; i<n ; i++) {
55 arg = GETARG(codestr, (i*3));
56 assert(arg < len_consts);
57 constant = PyList_GET_ITEM(consts, arg);
58 Py_INCREF(constant);
59 PyTuple_SET_ITEM(newconst, i, constant);
62 /* Append folded constant onto consts */
63 if (PyList_Append(consts, newconst)) {
64 Py_DECREF(newconst);
65 return 0;
67 Py_DECREF(newconst);
69 /* Write NOPs over old LOAD_CONSTS and
70 add a new LOAD_CONST newconst on top of the BUILD_TUPLE n */
71 memset(codestr, NOP, n*3);
72 codestr[n*3] = LOAD_CONST;
73 SETARG(codestr, (n*3), len_consts);
74 return 1;
77 /* Replace LOAD_CONST c1. LOAD_CONST c2 BINOP
78 with LOAD_CONST binop(c1,c2)
79 The consts table must still be in list form so that the
80 new constant can be appended.
81 Called with codestr pointing to the first LOAD_CONST.
82 Abandons the transformation if the folding fails (i.e. 1+'a').
83 If the new constant is a sequence, only folds when the size
84 is below a threshold value. That keeps pyc files from
85 becoming large in the presence of code like: (None,)*1000.
87 static int
88 fold_binops_on_constants(unsigned char *codestr, PyObject *consts)
90 PyObject *newconst, *v, *w;
91 Py_ssize_t len_consts, size;
92 int opcode;
94 /* Pre-conditions */
95 assert(PyList_CheckExact(consts));
96 assert(codestr[0] == LOAD_CONST);
97 assert(codestr[3] == LOAD_CONST);
99 /* Create new constant */
100 v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
101 w = PyList_GET_ITEM(consts, GETARG(codestr, 3));
102 opcode = codestr[6];
103 switch (opcode) {
104 case BINARY_POWER:
105 newconst = PyNumber_Power(v, w, Py_None);
106 break;
107 case BINARY_MULTIPLY:
108 newconst = PyNumber_Multiply(v, w);
109 break;
110 case BINARY_DIVIDE:
111 /* Cannot fold this operation statically since
112 the result can depend on the run-time presence
113 of the -Qnew flag */
114 return 0;
115 case BINARY_TRUE_DIVIDE:
116 newconst = PyNumber_TrueDivide(v, w);
117 break;
118 case BINARY_FLOOR_DIVIDE:
119 newconst = PyNumber_FloorDivide(v, w);
120 break;
121 case BINARY_MODULO:
122 newconst = PyNumber_Remainder(v, w);
123 break;
124 case BINARY_ADD:
125 newconst = PyNumber_Add(v, w);
126 break;
127 case BINARY_SUBTRACT:
128 newconst = PyNumber_Subtract(v, w);
129 break;
130 case BINARY_SUBSCR:
131 newconst = PyObject_GetItem(v, w);
132 break;
133 case BINARY_LSHIFT:
134 newconst = PyNumber_Lshift(v, w);
135 break;
136 case BINARY_RSHIFT:
137 newconst = PyNumber_Rshift(v, w);
138 break;
139 case BINARY_AND:
140 newconst = PyNumber_And(v, w);
141 break;
142 case BINARY_XOR:
143 newconst = PyNumber_Xor(v, w);
144 break;
145 case BINARY_OR:
146 newconst = PyNumber_Or(v, w);
147 break;
148 default:
149 /* Called with an unknown opcode */
150 PyErr_Format(PyExc_SystemError,
151 "unexpected binary operation %d on a constant",
152 opcode);
153 return 0;
155 if (newconst == NULL) {
156 PyErr_Clear();
157 return 0;
159 size = PyObject_Size(newconst);
160 if (size == -1)
161 PyErr_Clear();
162 else if (size > 20) {
163 Py_DECREF(newconst);
164 return 0;
167 /* Append folded constant into consts table */
168 len_consts = PyList_GET_SIZE(consts);
169 if (PyList_Append(consts, newconst)) {
170 Py_DECREF(newconst);
171 return 0;
173 Py_DECREF(newconst);
175 /* Write NOP NOP NOP NOP LOAD_CONST newconst */
176 memset(codestr, NOP, 4);
177 codestr[4] = LOAD_CONST;
178 SETARG(codestr, 4, len_consts);
179 return 1;
182 static int
183 fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts)
185 PyObject *newconst=NULL, *v;
186 Py_ssize_t len_consts;
187 int opcode;
189 /* Pre-conditions */
190 assert(PyList_CheckExact(consts));
191 assert(codestr[0] == LOAD_CONST);
193 /* Create new constant */
194 v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
195 opcode = codestr[3];
196 switch (opcode) {
197 case UNARY_NEGATIVE:
198 /* Preserve the sign of -0.0 */
199 if (PyObject_IsTrue(v) == 1)
200 newconst = PyNumber_Negative(v);
201 break;
202 case UNARY_CONVERT:
203 newconst = PyObject_Repr(v);
204 break;
205 case UNARY_INVERT:
206 newconst = PyNumber_Invert(v);
207 break;
208 default:
209 /* Called with an unknown opcode */
210 PyErr_Format(PyExc_SystemError,
211 "unexpected unary operation %d on a constant",
212 opcode);
213 return 0;
215 if (newconst == NULL) {
216 PyErr_Clear();
217 return 0;
220 /* Append folded constant into consts table */
221 len_consts = PyList_GET_SIZE(consts);
222 if (PyList_Append(consts, newconst)) {
223 Py_DECREF(newconst);
224 return 0;
226 Py_DECREF(newconst);
228 /* Write NOP LOAD_CONST newconst */
229 codestr[0] = NOP;
230 codestr[1] = LOAD_CONST;
231 SETARG(codestr, 1, len_consts);
232 return 1;
235 static unsigned int *
236 markblocks(unsigned char *code, Py_ssize_t len)
238 unsigned int *blocks = (unsigned int *)PyMem_Malloc(len*sizeof(int));
239 int i,j, opcode, blockcnt = 0;
241 if (blocks == NULL) {
242 PyErr_NoMemory();
243 return NULL;
245 memset(blocks, 0, len*sizeof(int));
247 /* Mark labels in the first pass */
248 for (i=0 ; i<len ; i+=CODESIZE(opcode)) {
249 opcode = code[i];
250 switch (opcode) {
251 case FOR_ITER:
252 case JUMP_FORWARD:
253 case JUMP_IF_FALSE_OR_POP:
254 case JUMP_IF_TRUE_OR_POP:
255 case POP_JUMP_IF_FALSE:
256 case POP_JUMP_IF_TRUE:
257 case JUMP_ABSOLUTE:
258 case CONTINUE_LOOP:
259 case SETUP_LOOP:
260 case SETUP_EXCEPT:
261 case SETUP_FINALLY:
262 j = GETJUMPTGT(code, i);
263 blocks[j] = 1;
264 break;
267 /* Build block numbers in the second pass */
268 for (i=0 ; i<len ; i++) {
269 blockcnt += blocks[i]; /* increment blockcnt over labels */
270 blocks[i] = blockcnt;
272 return blocks;
275 /* Perform basic peephole optimizations to components of a code object.
276 The consts object should still be in list form to allow new constants
277 to be appended.
279 To keep the optimizer simple, it bails out (does nothing) for code
280 containing extended arguments or that has a length over 32,700. That
281 allows us to avoid overflow and sign issues. Likewise, it bails when
282 the lineno table has complex encoding for gaps >= 255.
284 Optimizations are restricted to simple transformations occuring within a
285 single basic block. All transformations keep the code size the same or
286 smaller. For those that reduce size, the gaps are initially filled with
287 NOPs. Later those NOPs are removed and the jump addresses retargeted in
288 a single pass. Line numbering is adjusted accordingly. */
290 PyObject *
291 PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names,
292 PyObject *lineno_obj)
294 Py_ssize_t i, j, codelen;
295 int nops, h, adj;
296 int tgt, tgttgt, opcode;
297 unsigned char *codestr = NULL;
298 unsigned char *lineno;
299 int *addrmap = NULL;
300 int new_line, cum_orig_line, last_line, tabsiz;
301 int cumlc=0, lastlc=0; /* Count runs of consecutive LOAD_CONSTs */
302 unsigned int *blocks = NULL;
303 char *name;
305 /* Bail out if an exception is set */
306 if (PyErr_Occurred())
307 goto exitUnchanged;
309 /* Bypass optimization when the lineno table is too complex */
310 assert(PyString_Check(lineno_obj));
311 lineno = (unsigned char*)PyString_AS_STRING(lineno_obj);
312 tabsiz = PyString_GET_SIZE(lineno_obj);
313 if (memchr(lineno, 255, tabsiz) != NULL)
314 goto exitUnchanged;
316 /* Avoid situations where jump retargeting could overflow */
317 assert(PyString_Check(code));
318 codelen = PyString_GET_SIZE(code);
319 if (codelen > 32700)
320 goto exitUnchanged;
322 /* Make a modifiable copy of the code string */
323 codestr = (unsigned char *)PyMem_Malloc(codelen);
324 if (codestr == NULL)
325 goto exitUnchanged;
326 codestr = (unsigned char *)memcpy(codestr,
327 PyString_AS_STRING(code), codelen);
329 /* Verify that RETURN_VALUE terminates the codestring. This allows
330 the various transformation patterns to look ahead several
331 instructions without additional checks to make sure they are not
332 looking beyond the end of the code string.
334 if (codestr[codelen-1] != RETURN_VALUE)
335 goto exitUnchanged;
337 /* Mapping to new jump targets after NOPs are removed */
338 addrmap = (int *)PyMem_Malloc(codelen * sizeof(int));
339 if (addrmap == NULL)
340 goto exitUnchanged;
342 blocks = markblocks(codestr, codelen);
343 if (blocks == NULL)
344 goto exitUnchanged;
345 assert(PyList_Check(consts));
347 for (i=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
348 reoptimize_current:
349 opcode = codestr[i];
351 lastlc = cumlc;
352 cumlc = 0;
354 switch (opcode) {
355 /* Replace UNARY_NOT POP_JUMP_IF_FALSE
356 with POP_JUMP_IF_TRUE */
357 case UNARY_NOT:
358 if (codestr[i+1] != POP_JUMP_IF_FALSE
359 || !ISBASICBLOCK(blocks,i,4))
360 continue;
361 j = GETARG(codestr, i+1);
362 codestr[i] = POP_JUMP_IF_TRUE;
363 SETARG(codestr, i, j);
364 codestr[i+3] = NOP;
365 goto reoptimize_current;
367 /* not a is b --> a is not b
368 not a in b --> a not in b
369 not a is not b --> a is b
370 not a not in b --> a in b
372 case COMPARE_OP:
373 j = GETARG(codestr, i);
374 if (j < 6 || j > 9 ||
375 codestr[i+3] != UNARY_NOT ||
376 !ISBASICBLOCK(blocks,i,4))
377 continue;
378 SETARG(codestr, i, (j^1));
379 codestr[i+3] = NOP;
380 break;
382 /* Replace LOAD_GLOBAL/LOAD_NAME None
383 with LOAD_CONST None */
384 case LOAD_NAME:
385 case LOAD_GLOBAL:
386 j = GETARG(codestr, i);
387 name = PyString_AsString(PyTuple_GET_ITEM(names, j));
388 if (name == NULL || strcmp(name, "None") != 0)
389 continue;
390 for (j=0 ; j < PyList_GET_SIZE(consts) ; j++) {
391 if (PyList_GET_ITEM(consts, j) == Py_None)
392 break;
394 if (j == PyList_GET_SIZE(consts)) {
395 if (PyList_Append(consts, Py_None) == -1)
396 goto exitUnchanged;
398 assert(PyList_GET_ITEM(consts, j) == Py_None);
399 codestr[i] = LOAD_CONST;
400 SETARG(codestr, i, j);
401 cumlc = lastlc + 1;
402 break;
404 /* Skip over LOAD_CONST trueconst
405 POP_JUMP_IF_FALSE xx. This improves
406 "while 1" performance. */
407 case LOAD_CONST:
408 cumlc = lastlc + 1;
409 j = GETARG(codestr, i);
410 if (codestr[i+3] != POP_JUMP_IF_FALSE ||
411 !ISBASICBLOCK(blocks,i,6) ||
412 !PyObject_IsTrue(PyList_GET_ITEM(consts, j)))
413 continue;
414 memset(codestr+i, NOP, 6);
415 cumlc = 0;
416 break;
418 /* Try to fold tuples of constants (includes a case for lists
419 which are only used for "in" and "not in" tests).
420 Skip over BUILD_SEQN 1 UNPACK_SEQN 1.
421 Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2.
422 Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */
423 case BUILD_TUPLE:
424 case BUILD_LIST:
425 j = GETARG(codestr, i);
426 h = i - 3 * j;
427 if (h >= 0 &&
428 j <= lastlc &&
429 ((opcode == BUILD_TUPLE &&
430 ISBASICBLOCK(blocks, h, 3*(j+1))) ||
431 (opcode == BUILD_LIST &&
432 codestr[i+3]==COMPARE_OP &&
433 ISBASICBLOCK(blocks, h, 3*(j+2)) &&
434 (GETARG(codestr,i+3)==6 ||
435 GETARG(codestr,i+3)==7))) &&
436 tuple_of_constants(&codestr[h], j, consts)) {
437 assert(codestr[i] == LOAD_CONST);
438 cumlc = 1;
439 break;
441 if (codestr[i+3] != UNPACK_SEQUENCE ||
442 !ISBASICBLOCK(blocks,i,6) ||
443 j != GETARG(codestr, i+3))
444 continue;
445 if (j == 1) {
446 memset(codestr+i, NOP, 6);
447 } else if (j == 2) {
448 codestr[i] = ROT_TWO;
449 memset(codestr+i+1, NOP, 5);
450 } else if (j == 3) {
451 codestr[i] = ROT_THREE;
452 codestr[i+1] = ROT_TWO;
453 memset(codestr+i+2, NOP, 4);
455 break;
457 /* Fold binary ops on constants.
458 LOAD_CONST c1 LOAD_CONST c2 BINOP --> LOAD_CONST binop(c1,c2) */
459 case BINARY_POWER:
460 case BINARY_MULTIPLY:
461 case BINARY_TRUE_DIVIDE:
462 case BINARY_FLOOR_DIVIDE:
463 case BINARY_MODULO:
464 case BINARY_ADD:
465 case BINARY_SUBTRACT:
466 case BINARY_SUBSCR:
467 case BINARY_LSHIFT:
468 case BINARY_RSHIFT:
469 case BINARY_AND:
470 case BINARY_XOR:
471 case BINARY_OR:
472 if (lastlc >= 2 &&
473 ISBASICBLOCK(blocks, i-6, 7) &&
474 fold_binops_on_constants(&codestr[i-6], consts)) {
475 i -= 2;
476 assert(codestr[i] == LOAD_CONST);
477 cumlc = 1;
479 break;
481 /* Fold unary ops on constants.
482 LOAD_CONST c1 UNARY_OP --> LOAD_CONST unary_op(c) */
483 case UNARY_NEGATIVE:
484 case UNARY_CONVERT:
485 case UNARY_INVERT:
486 if (lastlc >= 1 &&
487 ISBASICBLOCK(blocks, i-3, 4) &&
488 fold_unaryops_on_constants(&codestr[i-3], consts)) {
489 i -= 2;
490 assert(codestr[i] == LOAD_CONST);
491 cumlc = 1;
493 break;
495 /* Simplify conditional jump to conditional jump where the
496 result of the first test implies the success of a similar
497 test or the failure of the opposite test.
498 Arises in code like:
499 "if a and b:"
500 "if a or b:"
501 "a and b or c"
502 "(a and b) and c"
503 x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_FALSE_OR_POP z
504 --> x:JUMP_IF_FALSE_OR_POP z
505 x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_TRUE_OR_POP z
506 --> x:POP_JUMP_IF_FALSE y+3
507 where y+3 is the instruction following the second test.
509 case JUMP_IF_FALSE_OR_POP:
510 case JUMP_IF_TRUE_OR_POP:
511 tgt = GETJUMPTGT(codestr, i);
512 j = codestr[tgt];
513 if (CONDITIONAL_JUMP(j)) {
514 /* NOTE: all possible jumps here are
515 absolute! */
516 if (JUMPS_ON_TRUE(j) == JUMPS_ON_TRUE(opcode)) {
517 /* The second jump will be
518 taken iff the first is. */
519 tgttgt = GETJUMPTGT(codestr, tgt);
520 /* The current opcode inherits
521 its target's stack behaviour */
522 codestr[i] = j;
523 SETARG(codestr, i, tgttgt);
524 goto reoptimize_current;
525 } else {
526 /* The second jump is not taken
527 if the first is (so jump past
528 it), and all conditional
529 jumps pop their argument when
530 they're not taken (so change
531 the first jump to pop its
532 argument when it's taken). */
533 if (JUMPS_ON_TRUE(opcode))
534 codestr[i] = POP_JUMP_IF_TRUE;
535 else
536 codestr[i] = POP_JUMP_IF_FALSE;
537 SETARG(codestr, i, (tgt + 3));
538 goto reoptimize_current;
541 /* Intentional fallthrough */
543 /* Replace jumps to unconditional jumps */
544 case POP_JUMP_IF_FALSE:
545 case POP_JUMP_IF_TRUE:
546 case FOR_ITER:
547 case JUMP_FORWARD:
548 case JUMP_ABSOLUTE:
549 case CONTINUE_LOOP:
550 case SETUP_LOOP:
551 case SETUP_EXCEPT:
552 case SETUP_FINALLY:
553 tgt = GETJUMPTGT(codestr, i);
554 /* Replace JUMP_* to a RETURN into just a RETURN */
555 if (UNCONDITIONAL_JUMP(opcode) &&
556 codestr[tgt] == RETURN_VALUE) {
557 codestr[i] = RETURN_VALUE;
558 memset(codestr+i+1, NOP, 2);
559 continue;
561 if (!UNCONDITIONAL_JUMP(codestr[tgt]))
562 continue;
563 tgttgt = GETJUMPTGT(codestr, tgt);
564 if (opcode == JUMP_FORWARD) /* JMP_ABS can go backwards */
565 opcode = JUMP_ABSOLUTE;
566 if (!ABSOLUTE_JUMP(opcode))
567 tgttgt -= i + 3; /* Calc relative jump addr */
568 if (tgttgt < 0) /* No backward relative jumps */
569 continue;
570 codestr[i] = opcode;
571 SETARG(codestr, i, tgttgt);
572 break;
574 case EXTENDED_ARG:
575 goto exitUnchanged;
577 /* Replace RETURN LOAD_CONST None RETURN with just RETURN */
578 /* Remove unreachable JUMPs after RETURN */
579 case RETURN_VALUE:
580 if (i+4 >= codelen)
581 continue;
582 if (codestr[i+4] == RETURN_VALUE &&
583 ISBASICBLOCK(blocks,i,5))
584 memset(codestr+i+1, NOP, 4);
585 else if (UNCONDITIONAL_JUMP(codestr[i+1]) &&
586 ISBASICBLOCK(blocks,i,4))
587 memset(codestr+i+1, NOP, 3);
588 break;
592 /* Fixup linenotab */
593 for (i=0, nops=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
594 addrmap[i] = i - nops;
595 if (codestr[i] == NOP)
596 nops++;
598 cum_orig_line = 0;
599 last_line = 0;
600 for (i=0 ; i < tabsiz ; i+=2) {
601 cum_orig_line += lineno[i];
602 new_line = addrmap[cum_orig_line];
603 assert (new_line - last_line < 255);
604 lineno[i] =((unsigned char)(new_line - last_line));
605 last_line = new_line;
608 /* Remove NOPs and fixup jump targets */
609 for (i=0, h=0 ; i<codelen ; ) {
610 opcode = codestr[i];
611 switch (opcode) {
612 case NOP:
613 i++;
614 continue;
616 case JUMP_ABSOLUTE:
617 case CONTINUE_LOOP:
618 case POP_JUMP_IF_FALSE:
619 case POP_JUMP_IF_TRUE:
620 case JUMP_IF_FALSE_OR_POP:
621 case JUMP_IF_TRUE_OR_POP:
622 j = addrmap[GETARG(codestr, i)];
623 SETARG(codestr, i, j);
624 break;
626 case FOR_ITER:
627 case JUMP_FORWARD:
628 case SETUP_LOOP:
629 case SETUP_EXCEPT:
630 case SETUP_FINALLY:
631 j = addrmap[GETARG(codestr, i) + i + 3] - addrmap[i] - 3;
632 SETARG(codestr, i, j);
633 break;
635 adj = CODESIZE(opcode);
636 while (adj--)
637 codestr[h++] = codestr[i++];
639 assert(h + nops == codelen);
641 code = PyString_FromStringAndSize((char *)codestr, h);
642 PyMem_Free(addrmap);
643 PyMem_Free(codestr);
644 PyMem_Free(blocks);
645 return code;
647 exitUnchanged:
648 if (blocks != NULL)
649 PyMem_Free(blocks);
650 if (addrmap != NULL)
651 PyMem_Free(addrmap);
652 if (codestr != NULL)
653 PyMem_Free(codestr);
654 Py_INCREF(code);
655 return code;