1 /* Peephole optimizations for bytecode compiler. */
5 #include "Python-ast.h"
14 #define GETARG(arr, i) ((int)((arr[i+2]<<8) + arr[i+1]))
15 #define UNCONDITIONAL_JUMP(op) (op==JUMP_ABSOLUTE || op==JUMP_FORWARD)
16 #define CONDITIONAL_JUMP(op) (op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
17 || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
18 #define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE || op==CONTINUE_LOOP \
19 || op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
20 || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
21 #define JUMPS_ON_TRUE(op) (op==POP_JUMP_IF_TRUE || op==JUMP_IF_TRUE_OR_POP)
22 #define GETJUMPTGT(arr, i) (GETARG(arr,i) + (ABSOLUTE_JUMP(arr[i]) ? 0 : i+3))
23 #define SETARG(arr, i, val) arr[i+2] = val>>8; arr[i+1] = val & 255
24 #define CODESIZE(op) (HAS_ARG(op) ? 3 : 1)
25 #define ISBASICBLOCK(blocks, start, bytes) \
26 (blocks[start]==blocks[start+bytes-1])
28 /* Replace LOAD_CONST c1. LOAD_CONST c2 ... LOAD_CONST cn BUILD_TUPLE n
29 with LOAD_CONST (c1, c2, ... cn).
30 The consts table must still be in list form so that the
31 new constant (c1, c2, ... cn) can be appended.
32 Called with codestr pointing to the first LOAD_CONST.
33 Bails out with no change if one or more of the LOAD_CONSTs is missing.
34 Also works for BUILD_LIST when followed by an "in" or "not in" test.
37 tuple_of_constants(unsigned char *codestr
, Py_ssize_t n
, PyObject
*consts
)
39 PyObject
*newconst
, *constant
;
40 Py_ssize_t i
, arg
, len_consts
;
43 assert(PyList_CheckExact(consts
));
44 assert(codestr
[n
*3] == BUILD_TUPLE
|| codestr
[n
*3] == BUILD_LIST
);
45 assert(GETARG(codestr
, (n
*3)) == n
);
47 assert(codestr
[i
*3] == LOAD_CONST
);
49 /* Buildup new tuple of constants */
50 newconst
= PyTuple_New(n
);
53 len_consts
= PyList_GET_SIZE(consts
);
54 for (i
=0 ; i
<n
; i
++) {
55 arg
= GETARG(codestr
, (i
*3));
56 assert(arg
< len_consts
);
57 constant
= PyList_GET_ITEM(consts
, arg
);
59 PyTuple_SET_ITEM(newconst
, i
, constant
);
62 /* Append folded constant onto consts */
63 if (PyList_Append(consts
, newconst
)) {
69 /* Write NOPs over old LOAD_CONSTS and
70 add a new LOAD_CONST newconst on top of the BUILD_TUPLE n */
71 memset(codestr
, NOP
, n
*3);
72 codestr
[n
*3] = LOAD_CONST
;
73 SETARG(codestr
, (n
*3), len_consts
);
77 /* Replace LOAD_CONST c1. LOAD_CONST c2 BINOP
78 with LOAD_CONST binop(c1,c2)
79 The consts table must still be in list form so that the
80 new constant can be appended.
81 Called with codestr pointing to the first LOAD_CONST.
82 Abandons the transformation if the folding fails (i.e. 1+'a').
83 If the new constant is a sequence, only folds when the size
84 is below a threshold value. That keeps pyc files from
85 becoming large in the presence of code like: (None,)*1000.
88 fold_binops_on_constants(unsigned char *codestr
, PyObject
*consts
)
90 PyObject
*newconst
, *v
, *w
;
91 Py_ssize_t len_consts
, size
;
95 assert(PyList_CheckExact(consts
));
96 assert(codestr
[0] == LOAD_CONST
);
97 assert(codestr
[3] == LOAD_CONST
);
99 /* Create new constant */
100 v
= PyList_GET_ITEM(consts
, GETARG(codestr
, 0));
101 w
= PyList_GET_ITEM(consts
, GETARG(codestr
, 3));
105 newconst
= PyNumber_Power(v
, w
, Py_None
);
107 case BINARY_MULTIPLY
:
108 newconst
= PyNumber_Multiply(v
, w
);
111 /* Cannot fold this operation statically since
112 the result can depend on the run-time presence
115 case BINARY_TRUE_DIVIDE
:
116 newconst
= PyNumber_TrueDivide(v
, w
);
118 case BINARY_FLOOR_DIVIDE
:
119 newconst
= PyNumber_FloorDivide(v
, w
);
122 newconst
= PyNumber_Remainder(v
, w
);
125 newconst
= PyNumber_Add(v
, w
);
127 case BINARY_SUBTRACT
:
128 newconst
= PyNumber_Subtract(v
, w
);
131 newconst
= PyObject_GetItem(v
, w
);
134 newconst
= PyNumber_Lshift(v
, w
);
137 newconst
= PyNumber_Rshift(v
, w
);
140 newconst
= PyNumber_And(v
, w
);
143 newconst
= PyNumber_Xor(v
, w
);
146 newconst
= PyNumber_Or(v
, w
);
149 /* Called with an unknown opcode */
150 PyErr_Format(PyExc_SystemError
,
151 "unexpected binary operation %d on a constant",
155 if (newconst
== NULL
) {
159 size
= PyObject_Size(newconst
);
162 else if (size
> 20) {
167 /* Append folded constant into consts table */
168 len_consts
= PyList_GET_SIZE(consts
);
169 if (PyList_Append(consts
, newconst
)) {
175 /* Write NOP NOP NOP NOP LOAD_CONST newconst */
176 memset(codestr
, NOP
, 4);
177 codestr
[4] = LOAD_CONST
;
178 SETARG(codestr
, 4, len_consts
);
183 fold_unaryops_on_constants(unsigned char *codestr
, PyObject
*consts
)
185 PyObject
*newconst
=NULL
, *v
;
186 Py_ssize_t len_consts
;
190 assert(PyList_CheckExact(consts
));
191 assert(codestr
[0] == LOAD_CONST
);
193 /* Create new constant */
194 v
= PyList_GET_ITEM(consts
, GETARG(codestr
, 0));
198 /* Preserve the sign of -0.0 */
199 if (PyObject_IsTrue(v
) == 1)
200 newconst
= PyNumber_Negative(v
);
203 newconst
= PyObject_Repr(v
);
206 newconst
= PyNumber_Invert(v
);
209 /* Called with an unknown opcode */
210 PyErr_Format(PyExc_SystemError
,
211 "unexpected unary operation %d on a constant",
215 if (newconst
== NULL
) {
220 /* Append folded constant into consts table */
221 len_consts
= PyList_GET_SIZE(consts
);
222 if (PyList_Append(consts
, newconst
)) {
228 /* Write NOP LOAD_CONST newconst */
230 codestr
[1] = LOAD_CONST
;
231 SETARG(codestr
, 1, len_consts
);
235 static unsigned int *
236 markblocks(unsigned char *code
, Py_ssize_t len
)
238 unsigned int *blocks
= (unsigned int *)PyMem_Malloc(len
*sizeof(int));
239 int i
,j
, opcode
, blockcnt
= 0;
241 if (blocks
== NULL
) {
245 memset(blocks
, 0, len
*sizeof(int));
247 /* Mark labels in the first pass */
248 for (i
=0 ; i
<len
; i
+=CODESIZE(opcode
)) {
253 case JUMP_IF_FALSE_OR_POP
:
254 case JUMP_IF_TRUE_OR_POP
:
255 case POP_JUMP_IF_FALSE
:
256 case POP_JUMP_IF_TRUE
:
262 j
= GETJUMPTGT(code
, i
);
267 /* Build block numbers in the second pass */
268 for (i
=0 ; i
<len
; i
++) {
269 blockcnt
+= blocks
[i
]; /* increment blockcnt over labels */
270 blocks
[i
] = blockcnt
;
275 /* Perform basic peephole optimizations to components of a code object.
276 The consts object should still be in list form to allow new constants
279 To keep the optimizer simple, it bails out (does nothing) for code
280 containing extended arguments or that has a length over 32,700. That
281 allows us to avoid overflow and sign issues. Likewise, it bails when
282 the lineno table has complex encoding for gaps >= 255.
284 Optimizations are restricted to simple transformations occuring within a
285 single basic block. All transformations keep the code size the same or
286 smaller. For those that reduce size, the gaps are initially filled with
287 NOPs. Later those NOPs are removed and the jump addresses retargeted in
288 a single pass. Line numbering is adjusted accordingly. */
291 PyCode_Optimize(PyObject
*code
, PyObject
* consts
, PyObject
*names
,
292 PyObject
*lineno_obj
)
294 Py_ssize_t i
, j
, codelen
;
296 int tgt
, tgttgt
, opcode
;
297 unsigned char *codestr
= NULL
;
298 unsigned char *lineno
;
300 int new_line
, cum_orig_line
, last_line
, tabsiz
;
301 int cumlc
=0, lastlc
=0; /* Count runs of consecutive LOAD_CONSTs */
302 unsigned int *blocks
= NULL
;
305 /* Bail out if an exception is set */
306 if (PyErr_Occurred())
309 /* Bypass optimization when the lineno table is too complex */
310 assert(PyString_Check(lineno_obj
));
311 lineno
= (unsigned char*)PyString_AS_STRING(lineno_obj
);
312 tabsiz
= PyString_GET_SIZE(lineno_obj
);
313 if (memchr(lineno
, 255, tabsiz
) != NULL
)
316 /* Avoid situations where jump retargeting could overflow */
317 assert(PyString_Check(code
));
318 codelen
= PyString_GET_SIZE(code
);
322 /* Make a modifiable copy of the code string */
323 codestr
= (unsigned char *)PyMem_Malloc(codelen
);
326 codestr
= (unsigned char *)memcpy(codestr
,
327 PyString_AS_STRING(code
), codelen
);
329 /* Verify that RETURN_VALUE terminates the codestring. This allows
330 the various transformation patterns to look ahead several
331 instructions without additional checks to make sure they are not
332 looking beyond the end of the code string.
334 if (codestr
[codelen
-1] != RETURN_VALUE
)
337 /* Mapping to new jump targets after NOPs are removed */
338 addrmap
= (int *)PyMem_Malloc(codelen
* sizeof(int));
342 blocks
= markblocks(codestr
, codelen
);
345 assert(PyList_Check(consts
));
347 for (i
=0 ; i
<codelen
; i
+= CODESIZE(codestr
[i
])) {
355 /* Replace UNARY_NOT POP_JUMP_IF_FALSE
356 with POP_JUMP_IF_TRUE */
358 if (codestr
[i
+1] != POP_JUMP_IF_FALSE
359 || !ISBASICBLOCK(blocks
,i
,4))
361 j
= GETARG(codestr
, i
+1);
362 codestr
[i
] = POP_JUMP_IF_TRUE
;
363 SETARG(codestr
, i
, j
);
365 goto reoptimize_current
;
367 /* not a is b --> a is not b
368 not a in b --> a not in b
369 not a is not b --> a is b
370 not a not in b --> a in b
373 j
= GETARG(codestr
, i
);
374 if (j
< 6 || j
> 9 ||
375 codestr
[i
+3] != UNARY_NOT
||
376 !ISBASICBLOCK(blocks
,i
,4))
378 SETARG(codestr
, i
, (j
^1));
382 /* Replace LOAD_GLOBAL/LOAD_NAME None
383 with LOAD_CONST None */
386 j
= GETARG(codestr
, i
);
387 name
= PyString_AsString(PyTuple_GET_ITEM(names
, j
));
388 if (name
== NULL
|| strcmp(name
, "None") != 0)
390 for (j
=0 ; j
< PyList_GET_SIZE(consts
) ; j
++) {
391 if (PyList_GET_ITEM(consts
, j
) == Py_None
)
394 if (j
== PyList_GET_SIZE(consts
)) {
395 if (PyList_Append(consts
, Py_None
) == -1)
398 assert(PyList_GET_ITEM(consts
, j
) == Py_None
);
399 codestr
[i
] = LOAD_CONST
;
400 SETARG(codestr
, i
, j
);
404 /* Skip over LOAD_CONST trueconst
405 POP_JUMP_IF_FALSE xx. This improves
406 "while 1" performance. */
409 j
= GETARG(codestr
, i
);
410 if (codestr
[i
+3] != POP_JUMP_IF_FALSE
||
411 !ISBASICBLOCK(blocks
,i
,6) ||
412 !PyObject_IsTrue(PyList_GET_ITEM(consts
, j
)))
414 memset(codestr
+i
, NOP
, 6);
418 /* Try to fold tuples of constants (includes a case for lists
419 which are only used for "in" and "not in" tests).
420 Skip over BUILD_SEQN 1 UNPACK_SEQN 1.
421 Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2.
422 Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */
425 j
= GETARG(codestr
, i
);
429 ((opcode
== BUILD_TUPLE
&&
430 ISBASICBLOCK(blocks
, h
, 3*(j
+1))) ||
431 (opcode
== BUILD_LIST
&&
432 codestr
[i
+3]==COMPARE_OP
&&
433 ISBASICBLOCK(blocks
, h
, 3*(j
+2)) &&
434 (GETARG(codestr
,i
+3)==6 ||
435 GETARG(codestr
,i
+3)==7))) &&
436 tuple_of_constants(&codestr
[h
], j
, consts
)) {
437 assert(codestr
[i
] == LOAD_CONST
);
441 if (codestr
[i
+3] != UNPACK_SEQUENCE
||
442 !ISBASICBLOCK(blocks
,i
,6) ||
443 j
!= GETARG(codestr
, i
+3))
446 memset(codestr
+i
, NOP
, 6);
448 codestr
[i
] = ROT_TWO
;
449 memset(codestr
+i
+1, NOP
, 5);
451 codestr
[i
] = ROT_THREE
;
452 codestr
[i
+1] = ROT_TWO
;
453 memset(codestr
+i
+2, NOP
, 4);
457 /* Fold binary ops on constants.
458 LOAD_CONST c1 LOAD_CONST c2 BINOP --> LOAD_CONST binop(c1,c2) */
460 case BINARY_MULTIPLY
:
461 case BINARY_TRUE_DIVIDE
:
462 case BINARY_FLOOR_DIVIDE
:
465 case BINARY_SUBTRACT
:
473 ISBASICBLOCK(blocks
, i
-6, 7) &&
474 fold_binops_on_constants(&codestr
[i
-6], consts
)) {
476 assert(codestr
[i
] == LOAD_CONST
);
481 /* Fold unary ops on constants.
482 LOAD_CONST c1 UNARY_OP --> LOAD_CONST unary_op(c) */
487 ISBASICBLOCK(blocks
, i
-3, 4) &&
488 fold_unaryops_on_constants(&codestr
[i
-3], consts
)) {
490 assert(codestr
[i
] == LOAD_CONST
);
495 /* Simplify conditional jump to conditional jump where the
496 result of the first test implies the success of a similar
497 test or the failure of the opposite test.
503 x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_FALSE_OR_POP z
504 --> x:JUMP_IF_FALSE_OR_POP z
505 x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_TRUE_OR_POP z
506 --> x:POP_JUMP_IF_FALSE y+3
507 where y+3 is the instruction following the second test.
509 case JUMP_IF_FALSE_OR_POP
:
510 case JUMP_IF_TRUE_OR_POP
:
511 tgt
= GETJUMPTGT(codestr
, i
);
513 if (CONDITIONAL_JUMP(j
)) {
514 /* NOTE: all possible jumps here are
516 if (JUMPS_ON_TRUE(j
) == JUMPS_ON_TRUE(opcode
)) {
517 /* The second jump will be
518 taken iff the first is. */
519 tgttgt
= GETJUMPTGT(codestr
, tgt
);
520 /* The current opcode inherits
521 its target's stack behaviour */
523 SETARG(codestr
, i
, tgttgt
);
524 goto reoptimize_current
;
526 /* The second jump is not taken
527 if the first is (so jump past
528 it), and all conditional
529 jumps pop their argument when
530 they're not taken (so change
531 the first jump to pop its
532 argument when it's taken). */
533 if (JUMPS_ON_TRUE(opcode
))
534 codestr
[i
] = POP_JUMP_IF_TRUE
;
536 codestr
[i
] = POP_JUMP_IF_FALSE
;
537 SETARG(codestr
, i
, (tgt
+ 3));
538 goto reoptimize_current
;
541 /* Intentional fallthrough */
543 /* Replace jumps to unconditional jumps */
544 case POP_JUMP_IF_FALSE
:
545 case POP_JUMP_IF_TRUE
:
553 tgt
= GETJUMPTGT(codestr
, i
);
554 /* Replace JUMP_* to a RETURN into just a RETURN */
555 if (UNCONDITIONAL_JUMP(opcode
) &&
556 codestr
[tgt
] == RETURN_VALUE
) {
557 codestr
[i
] = RETURN_VALUE
;
558 memset(codestr
+i
+1, NOP
, 2);
561 if (!UNCONDITIONAL_JUMP(codestr
[tgt
]))
563 tgttgt
= GETJUMPTGT(codestr
, tgt
);
564 if (opcode
== JUMP_FORWARD
) /* JMP_ABS can go backwards */
565 opcode
= JUMP_ABSOLUTE
;
566 if (!ABSOLUTE_JUMP(opcode
))
567 tgttgt
-= i
+ 3; /* Calc relative jump addr */
568 if (tgttgt
< 0) /* No backward relative jumps */
571 SETARG(codestr
, i
, tgttgt
);
577 /* Replace RETURN LOAD_CONST None RETURN with just RETURN */
578 /* Remove unreachable JUMPs after RETURN */
582 if (codestr
[i
+4] == RETURN_VALUE
&&
583 ISBASICBLOCK(blocks
,i
,5))
584 memset(codestr
+i
+1, NOP
, 4);
585 else if (UNCONDITIONAL_JUMP(codestr
[i
+1]) &&
586 ISBASICBLOCK(blocks
,i
,4))
587 memset(codestr
+i
+1, NOP
, 3);
592 /* Fixup linenotab */
593 for (i
=0, nops
=0 ; i
<codelen
; i
+= CODESIZE(codestr
[i
])) {
594 addrmap
[i
] = i
- nops
;
595 if (codestr
[i
] == NOP
)
600 for (i
=0 ; i
< tabsiz
; i
+=2) {
601 cum_orig_line
+= lineno
[i
];
602 new_line
= addrmap
[cum_orig_line
];
603 assert (new_line
- last_line
< 255);
604 lineno
[i
] =((unsigned char)(new_line
- last_line
));
605 last_line
= new_line
;
608 /* Remove NOPs and fixup jump targets */
609 for (i
=0, h
=0 ; i
<codelen
; ) {
618 case POP_JUMP_IF_FALSE
:
619 case POP_JUMP_IF_TRUE
:
620 case JUMP_IF_FALSE_OR_POP
:
621 case JUMP_IF_TRUE_OR_POP
:
622 j
= addrmap
[GETARG(codestr
, i
)];
623 SETARG(codestr
, i
, j
);
631 j
= addrmap
[GETARG(codestr
, i
) + i
+ 3] - addrmap
[i
] - 3;
632 SETARG(codestr
, i
, j
);
635 adj
= CODESIZE(opcode
);
637 codestr
[h
++] = codestr
[i
++];
639 assert(h
+ nops
== codelen
);
641 code
= PyString_FromStringAndSize((char *)codestr
, h
);