Catch situations where currentframe() returns None. See SF patch #1447410, this is...
[python.git] / Python / compile.c
blobbaf3989a9d2a434473eb531ead23f9ebb4092ae4
1 /*
2 * This file compiles an abstract syntax tree (AST) into Python bytecode.
4 * The primary entry point is PyAST_Compile(), which returns a
5 * PyCodeObject. The compiler makes several passes to build the code
6 * object:
7 * 1. Checks for future statements. See future.c
8 * 2. Builds a symbol table. See symtable.c.
9 * 3. Generate code for basic blocks. See compiler_mod() in this file.
10 * 4. Assemble the basic blocks into final code. See assemble() in
11 * this file.
13 * Note that compiler_mod() suggests module, but the module ast type
14 * (mod_ty) has cases for expressions and interactive statements.
16 * CAUTION: The VISIT_* macros abort the current function when they
17 * encounter a problem. So don't invoke them when there is memory
18 * which needs to be released. Code blocks are OK, as the compiler
19 * structure takes care of releasing those.
22 #include "Python.h"
24 #include "Python-ast.h"
25 #include "node.h"
26 #include "pyarena.h"
27 #include "ast.h"
28 #include "code.h"
29 #include "compile.h"
30 #include "symtable.h"
31 #include "opcode.h"
33 int Py_OptimizeFlag = 0;
36 ISSUES:
38 opcode_stack_effect() function should be reviewed since stack depth bugs
39 could be really hard to find later.
41 Dead code is being generated (i.e. after unconditional jumps).
42 XXX(nnorwitz): not sure this is still true
45 #define DEFAULT_BLOCK_SIZE 16
46 #define DEFAULT_BLOCKS 8
47 #define DEFAULT_CODE_SIZE 128
48 #define DEFAULT_LNOTAB_SIZE 16
50 struct instr {
51 unsigned i_jabs : 1;
52 unsigned i_jrel : 1;
53 unsigned i_hasarg : 1;
54 unsigned char i_opcode;
55 int i_oparg;
56 struct basicblock_ *i_target; /* target block (if jump instruction) */
57 int i_lineno;
60 typedef struct basicblock_ {
61 /* next block in the list of blocks for a unit (don't confuse with
62 * b_next) */
63 struct basicblock_ *b_list;
64 /* number of instructions used */
65 int b_iused;
66 /* length of instruction array (b_instr) */
67 int b_ialloc;
68 /* pointer to an array of instructions, initially NULL */
69 struct instr *b_instr;
70 /* If b_next is non-NULL, it is a pointer to the next
71 block reached by normal control flow. */
72 struct basicblock_ *b_next;
73 /* b_seen is used to perform a DFS of basicblocks. */
74 unsigned b_seen : 1;
75 /* b_return is true if a RETURN_VALUE opcode is inserted. */
76 unsigned b_return : 1;
77 /* depth of stack upon entry of block, computed by stackdepth() */
78 int b_startdepth;
79 /* instruction offset for block, computed by assemble_jump_offsets() */
80 int b_offset;
81 } basicblock;
83 /* fblockinfo tracks the current frame block.
85 A frame block is used to handle loops, try/except, and try/finally.
86 It's called a frame block to distinguish it from a basic block in the
87 compiler IR.
90 enum fblocktype { LOOP, EXCEPT, FINALLY_TRY, FINALLY_END };
92 struct fblockinfo {
93 enum fblocktype fb_type;
94 basicblock *fb_block;
97 /* The following items change on entry and exit of code blocks.
98 They must be saved and restored when returning to a block.
100 struct compiler_unit {
101 PySTEntryObject *u_ste;
103 PyObject *u_name;
104 /* The following fields are dicts that map objects to
105 the index of them in co_XXX. The index is used as
106 the argument for opcodes that refer to those collections.
108 PyObject *u_consts; /* all constants */
109 PyObject *u_names; /* all names */
110 PyObject *u_varnames; /* local variables */
111 PyObject *u_cellvars; /* cell variables */
112 PyObject *u_freevars; /* free variables */
114 PyObject *u_private; /* for private name mangling */
116 int u_argcount; /* number of arguments for block */
117 basicblock *u_blocks; /* pointer to list of blocks */
118 basicblock *u_curblock; /* pointer to current block */
119 int u_tmpname; /* temporary variables for list comps */
121 int u_nfblocks;
122 struct fblockinfo u_fblock[CO_MAXBLOCKS];
124 int u_firstlineno; /* the first lineno of the block */
125 int u_lineno; /* the lineno for the current stmt */
126 bool u_lineno_set; /* boolean to indicate whether instr
127 has been generated with current lineno */
130 /* This struct captures the global state of a compilation.
132 The u pointer points to the current compilation unit, while units
133 for enclosing blocks are stored in c_stack. The u and c_stack are
134 managed by compiler_enter_scope() and compiler_exit_scope().
137 struct compiler {
138 const char *c_filename;
139 struct symtable *c_st;
140 PyFutureFeatures *c_future; /* pointer to module's __future__ */
141 PyCompilerFlags *c_flags;
143 int c_interactive;
144 int c_nestlevel;
146 struct compiler_unit *u; /* compiler state for current block */
147 PyObject *c_stack; /* Python list holding compiler_unit ptrs */
148 char *c_encoding; /* source encoding (a borrowed reference) */
149 PyArena *c_arena; /* pointer to memory allocation arena */
152 struct assembler {
153 PyObject *a_bytecode; /* string containing bytecode */
154 int a_offset; /* offset into bytecode */
155 int a_nblocks; /* number of reachable blocks */
156 basicblock **a_postorder; /* list of blocks in dfs postorder */
157 PyObject *a_lnotab; /* string containing lnotab */
158 int a_lnotab_off; /* offset into lnotab */
159 int a_lineno; /* last lineno of emitted instruction */
160 int a_lineno_off; /* bytecode offset of last lineno */
163 static int compiler_enter_scope(struct compiler *, identifier, void *, int);
164 static void compiler_free(struct compiler *);
165 static basicblock *compiler_new_block(struct compiler *);
166 static int compiler_next_instr(struct compiler *, basicblock *);
167 static int compiler_addop(struct compiler *, int);
168 static int compiler_addop_o(struct compiler *, int, PyObject *, PyObject *);
169 static int compiler_addop_i(struct compiler *, int, int);
170 static int compiler_addop_j(struct compiler *, int, basicblock *, int);
171 static basicblock *compiler_use_new_block(struct compiler *);
172 static int compiler_error(struct compiler *, const char *);
173 static int compiler_nameop(struct compiler *, identifier, expr_context_ty);
175 static PyCodeObject *compiler_mod(struct compiler *, mod_ty);
176 static int compiler_visit_stmt(struct compiler *, stmt_ty);
177 static int compiler_visit_keyword(struct compiler *, keyword_ty);
178 static int compiler_visit_expr(struct compiler *, expr_ty);
179 static int compiler_augassign(struct compiler *, stmt_ty);
180 static int compiler_visit_slice(struct compiler *, slice_ty,
181 expr_context_ty);
183 static int compiler_push_fblock(struct compiler *, enum fblocktype,
184 basicblock *);
185 static void compiler_pop_fblock(struct compiler *, enum fblocktype,
186 basicblock *);
188 static int inplace_binop(struct compiler *, operator_ty);
189 static int expr_constant(expr_ty e);
191 static int compiler_with(struct compiler *, stmt_ty);
193 static PyCodeObject *assemble(struct compiler *, int addNone);
194 static PyObject *__doc__;
196 PyObject *
197 _Py_Mangle(PyObject *private, PyObject *ident)
199 /* Name mangling: __private becomes _classname__private.
200 This is independent from how the name is used. */
201 const char *p, *name = PyString_AsString(ident);
202 char *buffer;
203 size_t nlen, plen;
204 if (private == NULL || name == NULL || name[0] != '_' ||
205 name[1] != '_') {
206 Py_INCREF(ident);
207 return ident;
209 p = PyString_AsString(private);
210 nlen = strlen(name);
211 if (name[nlen-1] == '_' && name[nlen-2] == '_') {
212 Py_INCREF(ident);
213 return ident; /* Don't mangle __whatever__ */
215 /* Strip leading underscores from class name */
216 while (*p == '_')
217 p++;
218 if (*p == '\0') {
219 Py_INCREF(ident);
220 return ident; /* Don't mangle if class is just underscores */
222 plen = strlen(p);
223 ident = PyString_FromStringAndSize(NULL, 1 + nlen + plen);
224 if (!ident)
225 return 0;
226 /* ident = "_" + p[:plen] + name # i.e. 1+plen+nlen bytes */
227 buffer = PyString_AS_STRING(ident);
228 buffer[0] = '_';
229 strncpy(buffer+1, p, plen);
230 strcpy(buffer+1+plen, name);
231 return ident;
234 static int
235 compiler_init(struct compiler *c)
237 memset(c, 0, sizeof(struct compiler));
239 c->c_stack = PyList_New(0);
240 if (!c->c_stack)
241 return 0;
243 return 1;
246 PyCodeObject *
247 PyAST_Compile(mod_ty mod, const char *filename, PyCompilerFlags *flags,
248 PyArena *arena)
250 struct compiler c;
251 PyCodeObject *co = NULL;
252 PyCompilerFlags local_flags;
253 int merged;
255 if (!__doc__) {
256 __doc__ = PyString_InternFromString("__doc__");
257 if (!__doc__)
258 return NULL;
261 if (!compiler_init(&c))
262 return NULL;
263 c.c_filename = filename;
264 c.c_arena = arena;
265 c.c_future = PyFuture_FromAST(mod, filename);
266 if (c.c_future == NULL)
267 goto finally;
268 if (!flags) {
269 local_flags.cf_flags = 0;
270 flags = &local_flags;
272 merged = c.c_future->ff_features | flags->cf_flags;
273 c.c_future->ff_features = merged;
274 flags->cf_flags = merged;
275 c.c_flags = flags;
276 c.c_nestlevel = 0;
278 c.c_st = PySymtable_Build(mod, filename, c.c_future);
279 if (c.c_st == NULL) {
280 if (!PyErr_Occurred())
281 PyErr_SetString(PyExc_SystemError, "no symtable");
282 goto finally;
285 /* XXX initialize to NULL for now, need to handle */
286 c.c_encoding = NULL;
288 co = compiler_mod(&c, mod);
290 finally:
291 compiler_free(&c);
292 assert(co || PyErr_Occurred());
293 return co;
296 PyCodeObject *
297 PyNode_Compile(struct _node *n, const char *filename)
299 PyCodeObject *co = NULL;
300 PyArena *arena = PyArena_New();
301 mod_ty mod = PyAST_FromNode(n, NULL, filename, arena);
302 if (mod)
303 co = PyAST_Compile(mod, filename, NULL, arena);
304 PyArena_Free(arena);
305 return co;
308 static void
309 compiler_free(struct compiler *c)
311 if (c->c_st)
312 PySymtable_Free(c->c_st);
313 if (c->c_future)
314 PyMem_Free(c->c_future);
315 Py_DECREF(c->c_stack);
318 static PyObject *
319 list2dict(PyObject *list)
321 Py_ssize_t i, n;
322 PyObject *v, *k, *dict = PyDict_New();
324 n = PyList_Size(list);
325 for (i = 0; i < n; i++) {
326 v = PyInt_FromLong(i);
327 if (!v) {
328 Py_DECREF(dict);
329 return NULL;
331 k = PyList_GET_ITEM(list, i);
332 k = Py_BuildValue("(OO)", k, k->ob_type);
333 if (k == NULL || PyDict_SetItem(dict, k, v) < 0) {
334 Py_XDECREF(k);
335 Py_DECREF(v);
336 Py_DECREF(dict);
337 return NULL;
339 Py_DECREF(k);
340 Py_DECREF(v);
342 return dict;
345 /* Return new dict containing names from src that match scope(s).
347 src is a symbol table dictionary. If the scope of a name matches
348 either scope_type or flag is set, insert it into the new dict. The
349 values are integers, starting at offset and increasing by one for
350 each key.
353 static PyObject *
354 dictbytype(PyObject *src, int scope_type, int flag, int offset)
356 Py_ssize_t pos = 0, i = offset, scope;
357 PyObject *k, *v, *dest = PyDict_New();
359 assert(offset >= 0);
360 if (dest == NULL)
361 return NULL;
363 while (PyDict_Next(src, &pos, &k, &v)) {
364 /* XXX this should probably be a macro in symtable.h */
365 assert(PyInt_Check(v));
366 scope = (PyInt_AS_LONG(v) >> SCOPE_OFF) & SCOPE_MASK;
368 if (scope == scope_type || PyInt_AS_LONG(v) & flag) {
369 PyObject *tuple, *item = PyInt_FromLong(i);
370 if (item == NULL) {
371 Py_DECREF(dest);
372 return NULL;
374 i++;
375 tuple = Py_BuildValue("(OO)", k, k->ob_type);
376 if (!tuple || PyDict_SetItem(dest, tuple, item) < 0) {
377 Py_DECREF(item);
378 Py_DECREF(dest);
379 Py_XDECREF(tuple);
380 return NULL;
382 Py_DECREF(item);
383 Py_DECREF(tuple);
386 return dest;
389 /* Begin: Peephole optimizations ----------------------------------------- */
391 #define GETARG(arr, i) ((int)((arr[i+2]<<8) + arr[i+1]))
392 #define UNCONDITIONAL_JUMP(op) (op==JUMP_ABSOLUTE || op==JUMP_FORWARD)
393 #define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE || op==CONTINUE_LOOP)
394 #define GETJUMPTGT(arr, i) (GETARG(arr,i) + (ABSOLUTE_JUMP(arr[i]) ? 0 : i+3))
395 #define SETARG(arr, i, val) arr[i+2] = val>>8; arr[i+1] = val & 255
396 #define CODESIZE(op) (HAS_ARG(op) ? 3 : 1)
397 #define ISBASICBLOCK(blocks, start, bytes) \
398 (blocks[start]==blocks[start+bytes-1])
400 /* Replace LOAD_CONST c1. LOAD_CONST c2 ... LOAD_CONST cn BUILD_TUPLE n
401 with LOAD_CONST (c1, c2, ... cn).
402 The consts table must still be in list form so that the
403 new constant (c1, c2, ... cn) can be appended.
404 Called with codestr pointing to the first LOAD_CONST.
405 Bails out with no change if one or more of the LOAD_CONSTs is missing.
406 Also works for BUILD_LIST when followed by an "in" or "not in" test.
408 static int
409 tuple_of_constants(unsigned char *codestr, int n, PyObject *consts)
411 PyObject *newconst, *constant;
412 Py_ssize_t i, arg, len_consts;
414 /* Pre-conditions */
415 assert(PyList_CheckExact(consts));
416 assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST);
417 assert(GETARG(codestr, (n*3)) == n);
418 for (i=0 ; i<n ; i++)
419 assert(codestr[i*3] == LOAD_CONST);
421 /* Buildup new tuple of constants */
422 newconst = PyTuple_New(n);
423 if (newconst == NULL)
424 return 0;
425 len_consts = PyList_GET_SIZE(consts);
426 for (i=0 ; i<n ; i++) {
427 arg = GETARG(codestr, (i*3));
428 assert(arg < len_consts);
429 constant = PyList_GET_ITEM(consts, arg);
430 Py_INCREF(constant);
431 PyTuple_SET_ITEM(newconst, i, constant);
434 /* Append folded constant onto consts */
435 if (PyList_Append(consts, newconst)) {
436 Py_DECREF(newconst);
437 return 0;
439 Py_DECREF(newconst);
441 /* Write NOPs over old LOAD_CONSTS and
442 add a new LOAD_CONST newconst on top of the BUILD_TUPLE n */
443 memset(codestr, NOP, n*3);
444 codestr[n*3] = LOAD_CONST;
445 SETARG(codestr, (n*3), len_consts);
446 return 1;
449 /* Replace LOAD_CONST c1. LOAD_CONST c2 BINOP
450 with LOAD_CONST binop(c1,c2)
451 The consts table must still be in list form so that the
452 new constant can be appended.
453 Called with codestr pointing to the first LOAD_CONST.
454 Abandons the transformation if the folding fails (i.e. 1+'a').
455 If the new constant is a sequence, only folds when the size
456 is below a threshold value. That keeps pyc files from
457 becoming large in the presence of code like: (None,)*1000.
459 static int
460 fold_binops_on_constants(unsigned char *codestr, PyObject *consts)
462 PyObject *newconst, *v, *w;
463 Py_ssize_t len_consts, size;
464 int opcode;
466 /* Pre-conditions */
467 assert(PyList_CheckExact(consts));
468 assert(codestr[0] == LOAD_CONST);
469 assert(codestr[3] == LOAD_CONST);
471 /* Create new constant */
472 v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
473 w = PyList_GET_ITEM(consts, GETARG(codestr, 3));
474 opcode = codestr[6];
475 switch (opcode) {
476 case BINARY_POWER:
477 newconst = PyNumber_Power(v, w, Py_None);
478 break;
479 case BINARY_MULTIPLY:
480 newconst = PyNumber_Multiply(v, w);
481 break;
482 case BINARY_DIVIDE:
483 /* Cannot fold this operation statically since
484 the result can depend on the run-time presence
485 of the -Qnew flag */
486 return 0;
487 case BINARY_TRUE_DIVIDE:
488 newconst = PyNumber_TrueDivide(v, w);
489 break;
490 case BINARY_FLOOR_DIVIDE:
491 newconst = PyNumber_FloorDivide(v, w);
492 break;
493 case BINARY_MODULO:
494 newconst = PyNumber_Remainder(v, w);
495 break;
496 case BINARY_ADD:
497 newconst = PyNumber_Add(v, w);
498 break;
499 case BINARY_SUBTRACT:
500 newconst = PyNumber_Subtract(v, w);
501 break;
502 case BINARY_SUBSCR:
503 newconst = PyObject_GetItem(v, w);
504 break;
505 case BINARY_LSHIFT:
506 newconst = PyNumber_Lshift(v, w);
507 break;
508 case BINARY_RSHIFT:
509 newconst = PyNumber_Rshift(v, w);
510 break;
511 case BINARY_AND:
512 newconst = PyNumber_And(v, w);
513 break;
514 case BINARY_XOR:
515 newconst = PyNumber_Xor(v, w);
516 break;
517 case BINARY_OR:
518 newconst = PyNumber_Or(v, w);
519 break;
520 default:
521 /* Called with an unknown opcode */
522 PyErr_Format(PyExc_SystemError,
523 "unexpected binary operation %d on a constant",
524 opcode);
525 return 0;
527 if (newconst == NULL) {
528 PyErr_Clear();
529 return 0;
531 size = PyObject_Size(newconst);
532 if (size == -1)
533 PyErr_Clear();
534 else if (size > 20) {
535 Py_DECREF(newconst);
536 return 0;
539 /* Append folded constant into consts table */
540 len_consts = PyList_GET_SIZE(consts);
541 if (PyList_Append(consts, newconst)) {
542 Py_DECREF(newconst);
543 return 0;
545 Py_DECREF(newconst);
547 /* Write NOP NOP NOP NOP LOAD_CONST newconst */
548 memset(codestr, NOP, 4);
549 codestr[4] = LOAD_CONST;
550 SETARG(codestr, 4, len_consts);
551 return 1;
554 static int
555 fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts)
557 PyObject *newconst=NULL, *v;
558 Py_ssize_t len_consts;
559 int opcode;
561 /* Pre-conditions */
562 assert(PyList_CheckExact(consts));
563 assert(codestr[0] == LOAD_CONST);
565 /* Create new constant */
566 v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
567 opcode = codestr[3];
568 switch (opcode) {
569 case UNARY_NEGATIVE:
570 /* Preserve the sign of -0.0 */
571 if (PyObject_IsTrue(v) == 1)
572 newconst = PyNumber_Negative(v);
573 break;
574 case UNARY_CONVERT:
575 newconst = PyObject_Repr(v);
576 break;
577 case UNARY_INVERT:
578 newconst = PyNumber_Invert(v);
579 break;
580 default:
581 /* Called with an unknown opcode */
582 PyErr_Format(PyExc_SystemError,
583 "unexpected unary operation %d on a constant",
584 opcode);
585 return 0;
587 if (newconst == NULL) {
588 PyErr_Clear();
589 return 0;
592 /* Append folded constant into consts table */
593 len_consts = PyList_GET_SIZE(consts);
594 if (PyList_Append(consts, newconst)) {
595 Py_DECREF(newconst);
596 return 0;
598 Py_DECREF(newconst);
600 /* Write NOP LOAD_CONST newconst */
601 codestr[0] = NOP;
602 codestr[1] = LOAD_CONST;
603 SETARG(codestr, 1, len_consts);
604 return 1;
607 static unsigned int *
608 markblocks(unsigned char *code, int len)
610 unsigned int *blocks = PyMem_Malloc(len*sizeof(int));
611 int i,j, opcode, blockcnt = 0;
613 if (blocks == NULL)
614 return NULL;
615 memset(blocks, 0, len*sizeof(int));
617 /* Mark labels in the first pass */
618 for (i=0 ; i<len ; i+=CODESIZE(opcode)) {
619 opcode = code[i];
620 switch (opcode) {
621 case FOR_ITER:
622 case JUMP_FORWARD:
623 case JUMP_IF_FALSE:
624 case JUMP_IF_TRUE:
625 case JUMP_ABSOLUTE:
626 case CONTINUE_LOOP:
627 case SETUP_LOOP:
628 case SETUP_EXCEPT:
629 case SETUP_FINALLY:
630 j = GETJUMPTGT(code, i);
631 blocks[j] = 1;
632 break;
635 /* Build block numbers in the second pass */
636 for (i=0 ; i<len ; i++) {
637 blockcnt += blocks[i]; /* increment blockcnt over labels */
638 blocks[i] = blockcnt;
640 return blocks;
643 /* Perform basic peephole optimizations to components of a code object.
644 The consts object should still be in list form to allow new constants
645 to be appended.
647 To keep the optimizer simple, it bails out (does nothing) for code
648 containing extended arguments or that has a length over 32,700. That
649 allows us to avoid overflow and sign issues. Likewise, it bails when
650 the lineno table has complex encoding for gaps >= 255.
652 Optimizations are restricted to simple transformations occuring within a
653 single basic block. All transformations keep the code size the same or
654 smaller. For those that reduce size, the gaps are initially filled with
655 NOPs. Later those NOPs are removed and the jump addresses retargeted in
656 a single pass. Line numbering is adjusted accordingly. */
658 static PyObject *
659 optimize_code(PyObject *code, PyObject* consts, PyObject *names,
660 PyObject *lineno_obj)
662 Py_ssize_t i, j, codelen;
663 int nops, h, adj;
664 int tgt, tgttgt, opcode;
665 unsigned char *codestr = NULL;
666 unsigned char *lineno;
667 int *addrmap = NULL;
668 int new_line, cum_orig_line, last_line, tabsiz;
669 int cumlc=0, lastlc=0; /* Count runs of consecutive LOAD_CONSTs */
670 unsigned int *blocks = NULL;
671 char *name;
673 /* Bail out if an exception is set */
674 if (PyErr_Occurred())
675 goto exitUnchanged;
677 /* Bypass optimization when the lineno table is too complex */
678 assert(PyString_Check(lineno_obj));
679 lineno = (unsigned char*)PyString_AS_STRING(lineno_obj);
680 tabsiz = PyString_GET_SIZE(lineno_obj);
681 if (memchr(lineno, 255, tabsiz) != NULL)
682 goto exitUnchanged;
684 /* Avoid situations where jump retargeting could overflow */
685 assert(PyString_Check(code));
686 codelen = PyString_Size(code);
687 if (codelen > 32700)
688 goto exitUnchanged;
690 /* Make a modifiable copy of the code string */
691 codestr = PyMem_Malloc(codelen);
692 if (codestr == NULL)
693 goto exitUnchanged;
694 codestr = memcpy(codestr, PyString_AS_STRING(code), codelen);
696 /* Verify that RETURN_VALUE terminates the codestring. This allows
697 the various transformation patterns to look ahead several
698 instructions without additional checks to make sure they are not
699 looking beyond the end of the code string.
701 if (codestr[codelen-1] != RETURN_VALUE)
702 goto exitUnchanged;
704 /* Mapping to new jump targets after NOPs are removed */
705 addrmap = PyMem_Malloc(codelen * sizeof(int));
706 if (addrmap == NULL)
707 goto exitUnchanged;
709 blocks = markblocks(codestr, codelen);
710 if (blocks == NULL)
711 goto exitUnchanged;
712 assert(PyList_Check(consts));
714 for (i=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
715 opcode = codestr[i];
717 lastlc = cumlc;
718 cumlc = 0;
720 switch (opcode) {
722 /* Replace UNARY_NOT JUMP_IF_FALSE POP_TOP with
723 with JUMP_IF_TRUE POP_TOP */
724 case UNARY_NOT:
725 if (codestr[i+1] != JUMP_IF_FALSE ||
726 codestr[i+4] != POP_TOP ||
727 !ISBASICBLOCK(blocks,i,5))
728 continue;
729 tgt = GETJUMPTGT(codestr, (i+1));
730 if (codestr[tgt] != POP_TOP)
731 continue;
732 j = GETARG(codestr, i+1) + 1;
733 codestr[i] = JUMP_IF_TRUE;
734 SETARG(codestr, i, j);
735 codestr[i+3] = POP_TOP;
736 codestr[i+4] = NOP;
737 break;
739 /* not a is b --> a is not b
740 not a in b --> a not in b
741 not a is not b --> a is b
742 not a not in b --> a in b
744 case COMPARE_OP:
745 j = GETARG(codestr, i);
746 if (j < 6 || j > 9 ||
747 codestr[i+3] != UNARY_NOT ||
748 !ISBASICBLOCK(blocks,i,4))
749 continue;
750 SETARG(codestr, i, (j^1));
751 codestr[i+3] = NOP;
752 break;
754 /* Replace LOAD_GLOBAL/LOAD_NAME None
755 with LOAD_CONST None */
756 case LOAD_NAME:
757 case LOAD_GLOBAL:
758 j = GETARG(codestr, i);
759 name = PyString_AsString(PyTuple_GET_ITEM(names, j));
760 if (name == NULL || strcmp(name, "None") != 0)
761 continue;
762 for (j=0 ; j < PyList_GET_SIZE(consts) ; j++) {
763 if (PyList_GET_ITEM(consts, j) == Py_None) {
764 codestr[i] = LOAD_CONST;
765 SETARG(codestr, i, j);
766 cumlc = lastlc + 1;
767 break;
770 break;
772 /* Skip over LOAD_CONST trueconst
773 JUMP_IF_FALSE xx POP_TOP */
774 case LOAD_CONST:
775 cumlc = lastlc + 1;
776 j = GETARG(codestr, i);
777 if (codestr[i+3] != JUMP_IF_FALSE ||
778 codestr[i+6] != POP_TOP ||
779 !ISBASICBLOCK(blocks,i,7) ||
780 !PyObject_IsTrue(PyList_GET_ITEM(consts, j)))
781 continue;
782 memset(codestr+i, NOP, 7);
783 cumlc = 0;
784 break;
786 /* Try to fold tuples of constants (includes a case for lists
787 which are only used for "in" and "not in" tests).
788 Skip over BUILD_SEQN 1 UNPACK_SEQN 1.
789 Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2.
790 Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */
791 case BUILD_TUPLE:
792 case BUILD_LIST:
793 j = GETARG(codestr, i);
794 h = i - 3 * j;
795 if (h >= 0 &&
796 j <= lastlc &&
797 ((opcode == BUILD_TUPLE &&
798 ISBASICBLOCK(blocks, h, 3*(j+1))) ||
799 (opcode == BUILD_LIST &&
800 codestr[i+3]==COMPARE_OP &&
801 ISBASICBLOCK(blocks, h, 3*(j+2)) &&
802 (GETARG(codestr,i+3)==6 ||
803 GETARG(codestr,i+3)==7))) &&
804 tuple_of_constants(&codestr[h], j, consts)) {
805 assert(codestr[i] == LOAD_CONST);
806 cumlc = 1;
807 break;
809 if (codestr[i+3] != UNPACK_SEQUENCE ||
810 !ISBASICBLOCK(blocks,i,6) ||
811 j != GETARG(codestr, i+3))
812 continue;
813 if (j == 1) {
814 memset(codestr+i, NOP, 6);
815 } else if (j == 2) {
816 codestr[i] = ROT_TWO;
817 memset(codestr+i+1, NOP, 5);
818 } else if (j == 3) {
819 codestr[i] = ROT_THREE;
820 codestr[i+1] = ROT_TWO;
821 memset(codestr+i+2, NOP, 4);
823 break;
825 /* Fold binary ops on constants.
826 LOAD_CONST c1 LOAD_CONST c2 BINOP --> LOAD_CONST binop(c1,c2) */
827 case BINARY_POWER:
828 case BINARY_MULTIPLY:
829 case BINARY_TRUE_DIVIDE:
830 case BINARY_FLOOR_DIVIDE:
831 case BINARY_MODULO:
832 case BINARY_ADD:
833 case BINARY_SUBTRACT:
834 case BINARY_SUBSCR:
835 case BINARY_LSHIFT:
836 case BINARY_RSHIFT:
837 case BINARY_AND:
838 case BINARY_XOR:
839 case BINARY_OR:
840 if (lastlc >= 2 &&
841 ISBASICBLOCK(blocks, i-6, 7) &&
842 fold_binops_on_constants(&codestr[i-6], consts)) {
843 i -= 2;
844 assert(codestr[i] == LOAD_CONST);
845 cumlc = 1;
847 break;
849 /* Fold unary ops on constants.
850 LOAD_CONST c1 UNARY_OP --> LOAD_CONST unary_op(c) */
851 case UNARY_NEGATIVE:
852 case UNARY_CONVERT:
853 case UNARY_INVERT:
854 if (lastlc >= 1 &&
855 ISBASICBLOCK(blocks, i-3, 4) &&
856 fold_unaryops_on_constants(&codestr[i-3], consts)) {
857 i -= 2;
858 assert(codestr[i] == LOAD_CONST);
859 cumlc = 1;
861 break;
863 /* Simplify conditional jump to conditional jump where the
864 result of the first test implies the success of a similar
865 test or the failure of the opposite test.
866 Arises in code like:
867 "if a and b:"
868 "if a or b:"
869 "a and b or c"
870 "(a and b) and c"
871 x:JUMP_IF_FALSE y y:JUMP_IF_FALSE z --> x:JUMP_IF_FALSE z
872 x:JUMP_IF_FALSE y y:JUMP_IF_TRUE z --> x:JUMP_IF_FALSE y+3
873 where y+3 is the instruction following the second test.
875 case JUMP_IF_FALSE:
876 case JUMP_IF_TRUE:
877 tgt = GETJUMPTGT(codestr, i);
878 j = codestr[tgt];
879 if (j == JUMP_IF_FALSE || j == JUMP_IF_TRUE) {
880 if (j == opcode) {
881 tgttgt = GETJUMPTGT(codestr, tgt) - i - 3;
882 SETARG(codestr, i, tgttgt);
883 } else {
884 tgt -= i;
885 SETARG(codestr, i, tgt);
887 break;
889 /* Intentional fallthrough */
891 /* Replace jumps to unconditional jumps */
892 case FOR_ITER:
893 case JUMP_FORWARD:
894 case JUMP_ABSOLUTE:
895 case CONTINUE_LOOP:
896 case SETUP_LOOP:
897 case SETUP_EXCEPT:
898 case SETUP_FINALLY:
899 tgt = GETJUMPTGT(codestr, i);
900 if (!UNCONDITIONAL_JUMP(codestr[tgt]))
901 continue;
902 tgttgt = GETJUMPTGT(codestr, tgt);
903 if (opcode == JUMP_FORWARD) /* JMP_ABS can go backwards */
904 opcode = JUMP_ABSOLUTE;
905 if (!ABSOLUTE_JUMP(opcode))
906 tgttgt -= i + 3; /* Calc relative jump addr */
907 if (tgttgt < 0) /* No backward relative jumps */
908 continue;
909 codestr[i] = opcode;
910 SETARG(codestr, i, tgttgt);
911 break;
913 case EXTENDED_ARG:
914 goto exitUnchanged;
916 /* Replace RETURN LOAD_CONST None RETURN with just RETURN */
917 case RETURN_VALUE:
918 if (i+4 >= codelen ||
919 codestr[i+4] != RETURN_VALUE ||
920 !ISBASICBLOCK(blocks,i,5))
921 continue;
922 memset(codestr+i+1, NOP, 4);
923 break;
927 /* Fixup linenotab */
928 for (i=0, nops=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
929 addrmap[i] = i - nops;
930 if (codestr[i] == NOP)
931 nops++;
933 cum_orig_line = 0;
934 last_line = 0;
935 for (i=0 ; i < tabsiz ; i+=2) {
936 cum_orig_line += lineno[i];
937 new_line = addrmap[cum_orig_line];
938 assert (new_line - last_line < 255);
939 lineno[i] =((unsigned char)(new_line - last_line));
940 last_line = new_line;
943 /* Remove NOPs and fixup jump targets */
944 for (i=0, h=0 ; i<codelen ; ) {
945 opcode = codestr[i];
946 switch (opcode) {
947 case NOP:
948 i++;
949 continue;
951 case JUMP_ABSOLUTE:
952 case CONTINUE_LOOP:
953 j = addrmap[GETARG(codestr, i)];
954 SETARG(codestr, i, j);
955 break;
957 case FOR_ITER:
958 case JUMP_FORWARD:
959 case JUMP_IF_FALSE:
960 case JUMP_IF_TRUE:
961 case SETUP_LOOP:
962 case SETUP_EXCEPT:
963 case SETUP_FINALLY:
964 j = addrmap[GETARG(codestr, i) + i + 3] - addrmap[i] - 3;
965 SETARG(codestr, i, j);
966 break;
968 adj = CODESIZE(opcode);
969 while (adj--)
970 codestr[h++] = codestr[i++];
972 assert(h + nops == codelen);
974 code = PyString_FromStringAndSize((char *)codestr, h);
975 PyMem_Free(addrmap);
976 PyMem_Free(codestr);
977 PyMem_Free(blocks);
978 return code;
980 exitUnchanged:
981 if (blocks != NULL)
982 PyMem_Free(blocks);
983 if (addrmap != NULL)
984 PyMem_Free(addrmap);
985 if (codestr != NULL)
986 PyMem_Free(codestr);
987 Py_INCREF(code);
988 return code;
991 /* End: Peephole optimizations ----------------------------------------- */
995 Leave this debugging code for just a little longer.
997 static void
998 compiler_display_symbols(PyObject *name, PyObject *symbols)
1000 PyObject *key, *value;
1001 int flags;
1002 Py_ssize_t pos = 0;
1004 fprintf(stderr, "block %s\n", PyString_AS_STRING(name));
1005 while (PyDict_Next(symbols, &pos, &key, &value)) {
1006 flags = PyInt_AsLong(value);
1007 fprintf(stderr, "var %s:", PyString_AS_STRING(key));
1008 if (flags & DEF_GLOBAL)
1009 fprintf(stderr, " declared_global");
1010 if (flags & DEF_LOCAL)
1011 fprintf(stderr, " local");
1012 if (flags & DEF_PARAM)
1013 fprintf(stderr, " param");
1014 if (flags & DEF_STAR)
1015 fprintf(stderr, " stararg");
1016 if (flags & DEF_DOUBLESTAR)
1017 fprintf(stderr, " starstar");
1018 if (flags & DEF_INTUPLE)
1019 fprintf(stderr, " tuple");
1020 if (flags & DEF_FREE)
1021 fprintf(stderr, " free");
1022 if (flags & DEF_FREE_GLOBAL)
1023 fprintf(stderr, " global");
1024 if (flags & DEF_FREE_CLASS)
1025 fprintf(stderr, " free/class");
1026 if (flags & DEF_IMPORT)
1027 fprintf(stderr, " import");
1028 fprintf(stderr, "\n");
1030 fprintf(stderr, "\n");
1034 static void
1035 compiler_unit_check(struct compiler_unit *u)
1037 basicblock *block;
1038 for (block = u->u_blocks; block != NULL; block = block->b_list) {
1039 assert(block != (void *)0xcbcbcbcb);
1040 assert(block != (void *)0xfbfbfbfb);
1041 assert(block != (void *)0xdbdbdbdb);
1042 if (block->b_instr != NULL) {
1043 assert(block->b_ialloc > 0);
1044 assert(block->b_iused > 0);
1045 assert(block->b_ialloc >= block->b_iused);
1047 else {
1048 assert (block->b_iused == 0);
1049 assert (block->b_ialloc == 0);
1054 static void
1055 compiler_unit_free(struct compiler_unit *u)
1057 basicblock *b, *next;
1059 compiler_unit_check(u);
1060 b = u->u_blocks;
1061 while (b != NULL) {
1062 if (b->b_instr)
1063 PyObject_Free((void *)b->b_instr);
1064 next = b->b_list;
1065 PyObject_Free((void *)b);
1066 b = next;
1068 Py_XDECREF(u->u_ste);
1069 Py_XDECREF(u->u_name);
1070 Py_XDECREF(u->u_consts);
1071 Py_XDECREF(u->u_names);
1072 Py_XDECREF(u->u_varnames);
1073 Py_XDECREF(u->u_freevars);
1074 Py_XDECREF(u->u_cellvars);
1075 Py_XDECREF(u->u_private);
1076 PyObject_Free(u);
1079 static int
1080 compiler_enter_scope(struct compiler *c, identifier name, void *key,
1081 int lineno)
1083 struct compiler_unit *u;
1085 u = PyObject_Malloc(sizeof(struct compiler_unit));
1086 if (!u) {
1087 PyErr_NoMemory();
1088 return 0;
1090 memset(u, 0, sizeof(struct compiler_unit));
1091 u->u_argcount = 0;
1092 u->u_ste = PySymtable_Lookup(c->c_st, key);
1093 if (!u->u_ste) {
1094 compiler_unit_free(u);
1095 return 0;
1097 Py_INCREF(name);
1098 u->u_name = name;
1099 u->u_varnames = list2dict(u->u_ste->ste_varnames);
1100 u->u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0);
1101 u->u_freevars = dictbytype(u->u_ste->ste_symbols, FREE, DEF_FREE_CLASS,
1102 PyDict_Size(u->u_cellvars));
1104 u->u_blocks = NULL;
1105 u->u_tmpname = 0;
1106 u->u_nfblocks = 0;
1107 u->u_firstlineno = lineno;
1108 u->u_lineno = 0;
1109 u->u_lineno_set = false;
1110 u->u_consts = PyDict_New();
1111 if (!u->u_consts) {
1112 compiler_unit_free(u);
1113 return 0;
1115 u->u_names = PyDict_New();
1116 if (!u->u_names) {
1117 compiler_unit_free(u);
1118 return 0;
1121 u->u_private = NULL;
1123 /* Push the old compiler_unit on the stack. */
1124 if (c->u) {
1125 PyObject *wrapper = PyCObject_FromVoidPtr(c->u, NULL);
1126 if (PyList_Append(c->c_stack, wrapper) < 0) {
1127 compiler_unit_free(u);
1128 return 0;
1130 Py_DECREF(wrapper);
1131 u->u_private = c->u->u_private;
1132 Py_XINCREF(u->u_private);
1134 c->u = u;
1136 c->c_nestlevel++;
1137 if (compiler_use_new_block(c) == NULL)
1138 return 0;
1140 return 1;
1143 static void
1144 compiler_exit_scope(struct compiler *c)
1146 int n;
1147 PyObject *wrapper;
1149 c->c_nestlevel--;
1150 compiler_unit_free(c->u);
1151 /* Restore c->u to the parent unit. */
1152 n = PyList_GET_SIZE(c->c_stack) - 1;
1153 if (n >= 0) {
1154 wrapper = PyList_GET_ITEM(c->c_stack, n);
1155 c->u = (struct compiler_unit *)PyCObject_AsVoidPtr(wrapper);
1156 /* we are deleting from a list so this really shouldn't fail */
1157 if (PySequence_DelItem(c->c_stack, n) < 0)
1158 Py_FatalError("compiler_exit_scope()");
1159 compiler_unit_check(c->u);
1161 else
1162 c->u = NULL;
1166 /* Allocate a new "anonymous" local variable.
1167 Used by list comprehensions and with statements.
1170 static PyObject *
1171 compiler_new_tmpname(struct compiler *c)
1173 char tmpname[256];
1174 PyOS_snprintf(tmpname, sizeof(tmpname), "_[%d]", ++c->u->u_tmpname);
1175 return PyString_FromString(tmpname);
1178 /* Allocate a new block and return a pointer to it.
1179 Returns NULL on error.
1182 static basicblock *
1183 compiler_new_block(struct compiler *c)
1185 basicblock *b;
1186 struct compiler_unit *u;
1188 u = c->u;
1189 b = (basicblock *)PyObject_Malloc(sizeof(basicblock));
1190 if (b == NULL) {
1191 PyErr_NoMemory();
1192 return NULL;
1194 memset((void *)b, 0, sizeof(basicblock));
1195 assert (b->b_next == NULL);
1196 b->b_list = u->u_blocks;
1197 u->u_blocks = b;
1198 return b;
1201 static basicblock *
1202 compiler_use_new_block(struct compiler *c)
1204 basicblock *block = compiler_new_block(c);
1205 if (block == NULL)
1206 return NULL;
1207 c->u->u_curblock = block;
1208 return block;
1211 static basicblock *
1212 compiler_next_block(struct compiler *c)
1214 basicblock *block = compiler_new_block(c);
1215 if (block == NULL)
1216 return NULL;
1217 c->u->u_curblock->b_next = block;
1218 c->u->u_curblock = block;
1219 return block;
1222 static basicblock *
1223 compiler_use_next_block(struct compiler *c, basicblock *block)
1225 assert(block != NULL);
1226 c->u->u_curblock->b_next = block;
1227 c->u->u_curblock = block;
1228 return block;
1231 /* Returns the offset of the next instruction in the current block's
1232 b_instr array. Resizes the b_instr as necessary.
1233 Returns -1 on failure.
1236 static int
1237 compiler_next_instr(struct compiler *c, basicblock *b)
1239 assert(b != NULL);
1240 if (b->b_instr == NULL) {
1241 b->b_instr = PyObject_Malloc(sizeof(struct instr) *
1242 DEFAULT_BLOCK_SIZE);
1243 if (b->b_instr == NULL) {
1244 PyErr_NoMemory();
1245 return -1;
1247 b->b_ialloc = DEFAULT_BLOCK_SIZE;
1248 memset((char *)b->b_instr, 0,
1249 sizeof(struct instr) * DEFAULT_BLOCK_SIZE);
1251 else if (b->b_iused == b->b_ialloc) {
1252 size_t oldsize, newsize;
1253 oldsize = b->b_ialloc * sizeof(struct instr);
1254 newsize = oldsize << 1;
1255 if (newsize == 0) {
1256 PyErr_NoMemory();
1257 return -1;
1259 b->b_ialloc <<= 1;
1260 b->b_instr = PyObject_Realloc((void *)b->b_instr, newsize);
1261 if (b->b_instr == NULL)
1262 return -1;
1263 memset((char *)b->b_instr + oldsize, 0, newsize - oldsize);
1265 return b->b_iused++;
1268 static void
1269 compiler_set_lineno(struct compiler *c, int off)
1271 basicblock *b;
1272 if (c->u->u_lineno_set)
1273 return;
1274 c->u->u_lineno_set = true;
1275 b = c->u->u_curblock;
1276 b->b_instr[off].i_lineno = c->u->u_lineno;
1279 static int
1280 opcode_stack_effect(int opcode, int oparg)
1282 switch (opcode) {
1283 case POP_TOP:
1284 return -1;
1285 case ROT_TWO:
1286 case ROT_THREE:
1287 return 0;
1288 case DUP_TOP:
1289 return 1;
1290 case ROT_FOUR:
1291 return 0;
1293 case UNARY_POSITIVE:
1294 case UNARY_NEGATIVE:
1295 case UNARY_NOT:
1296 case UNARY_CONVERT:
1297 case UNARY_INVERT:
1298 return 0;
1300 case LIST_APPEND:
1301 return -2;
1303 case BINARY_POWER:
1304 case BINARY_MULTIPLY:
1305 case BINARY_DIVIDE:
1306 case BINARY_MODULO:
1307 case BINARY_ADD:
1308 case BINARY_SUBTRACT:
1309 case BINARY_SUBSCR:
1310 case BINARY_FLOOR_DIVIDE:
1311 case BINARY_TRUE_DIVIDE:
1312 return -1;
1313 case INPLACE_FLOOR_DIVIDE:
1314 case INPLACE_TRUE_DIVIDE:
1315 return -1;
1317 case SLICE+0:
1318 return 1;
1319 case SLICE+1:
1320 return 0;
1321 case SLICE+2:
1322 return 0;
1323 case SLICE+3:
1324 return -1;
1326 case STORE_SLICE+0:
1327 return -2;
1328 case STORE_SLICE+1:
1329 return -3;
1330 case STORE_SLICE+2:
1331 return -3;
1332 case STORE_SLICE+3:
1333 return -4;
1335 case DELETE_SLICE+0:
1336 return -1;
1337 case DELETE_SLICE+1:
1338 return -2;
1339 case DELETE_SLICE+2:
1340 return -2;
1341 case DELETE_SLICE+3:
1342 return -3;
1344 case INPLACE_ADD:
1345 case INPLACE_SUBTRACT:
1346 case INPLACE_MULTIPLY:
1347 case INPLACE_DIVIDE:
1348 case INPLACE_MODULO:
1349 return -1;
1350 case STORE_SUBSCR:
1351 return -3;
1352 case DELETE_SUBSCR:
1353 return -2;
1355 case BINARY_LSHIFT:
1356 case BINARY_RSHIFT:
1357 case BINARY_AND:
1358 case BINARY_XOR:
1359 case BINARY_OR:
1360 return -1;
1361 case INPLACE_POWER:
1362 return -1;
1363 case GET_ITER:
1364 return 0;
1366 case PRINT_EXPR:
1367 return -1;
1368 case PRINT_ITEM:
1369 return -1;
1370 case PRINT_NEWLINE:
1371 return 0;
1372 case PRINT_ITEM_TO:
1373 return -2;
1374 case PRINT_NEWLINE_TO:
1375 return -1;
1376 case INPLACE_LSHIFT:
1377 case INPLACE_RSHIFT:
1378 case INPLACE_AND:
1379 case INPLACE_XOR:
1380 case INPLACE_OR:
1381 return -1;
1382 case BREAK_LOOP:
1383 return 0;
1384 case WITH_CLEANUP:
1385 return -1; /* XXX Sometimes more */
1386 case LOAD_LOCALS:
1387 return 1;
1388 case RETURN_VALUE:
1389 return -1;
1390 case IMPORT_STAR:
1391 return -1;
1392 case EXEC_STMT:
1393 return -3;
1394 case YIELD_VALUE:
1395 return 0;
1397 case POP_BLOCK:
1398 return 0;
1399 case END_FINALLY:
1400 return -1; /* or -2 or -3 if exception occurred */
1401 case BUILD_CLASS:
1402 return -2;
1404 case STORE_NAME:
1405 return -1;
1406 case DELETE_NAME:
1407 return 0;
1408 case UNPACK_SEQUENCE:
1409 return oparg-1;
1410 case FOR_ITER:
1411 return 1;
1413 case STORE_ATTR:
1414 return -2;
1415 case DELETE_ATTR:
1416 return -1;
1417 case STORE_GLOBAL:
1418 return -1;
1419 case DELETE_GLOBAL:
1420 return 0;
1421 case DUP_TOPX:
1422 return oparg;
1423 case LOAD_CONST:
1424 return 1;
1425 case LOAD_NAME:
1426 return 1;
1427 case BUILD_TUPLE:
1428 case BUILD_LIST:
1429 return 1-oparg;
1430 case BUILD_MAP:
1431 return 1;
1432 case LOAD_ATTR:
1433 return 0;
1434 case COMPARE_OP:
1435 return -1;
1436 case IMPORT_NAME:
1437 return 0;
1438 case IMPORT_FROM:
1439 return 1;
1441 case JUMP_FORWARD:
1442 case JUMP_IF_FALSE:
1443 case JUMP_IF_TRUE:
1444 case JUMP_ABSOLUTE:
1445 return 0;
1447 case LOAD_GLOBAL:
1448 return 1;
1450 case CONTINUE_LOOP:
1451 return 0;
1452 case SETUP_LOOP:
1453 return 0;
1454 case SETUP_EXCEPT:
1455 case SETUP_FINALLY:
1456 return 3; /* actually pushed by an exception */
1458 case LOAD_FAST:
1459 return 1;
1460 case STORE_FAST:
1461 return -1;
1462 case DELETE_FAST:
1463 return 0;
1465 case RAISE_VARARGS:
1466 return -oparg;
1467 #define NARGS(o) (((o) % 256) + 2*((o) / 256))
1468 case CALL_FUNCTION:
1469 return -NARGS(oparg);
1470 case CALL_FUNCTION_VAR:
1471 case CALL_FUNCTION_KW:
1472 return -NARGS(oparg)-1;
1473 case CALL_FUNCTION_VAR_KW:
1474 return -NARGS(oparg)-2;
1475 #undef NARGS
1476 case MAKE_FUNCTION:
1477 return -oparg;
1478 case BUILD_SLICE:
1479 if (oparg == 3)
1480 return -2;
1481 else
1482 return -1;
1484 case MAKE_CLOSURE:
1485 return -oparg;
1486 case LOAD_CLOSURE:
1487 return 1;
1488 case LOAD_DEREF:
1489 return 1;
1490 case STORE_DEREF:
1491 return -1;
1492 default:
1493 fprintf(stderr, "opcode = %d\n", opcode);
1494 Py_FatalError("opcode_stack_effect()");
1497 return 0; /* not reachable */
1500 /* Add an opcode with no argument.
1501 Returns 0 on failure, 1 on success.
1504 static int
1505 compiler_addop(struct compiler *c, int opcode)
1507 basicblock *b;
1508 struct instr *i;
1509 int off;
1510 off = compiler_next_instr(c, c->u->u_curblock);
1511 if (off < 0)
1512 return 0;
1513 b = c->u->u_curblock;
1514 i = &b->b_instr[off];
1515 i->i_opcode = opcode;
1516 i->i_hasarg = 0;
1517 if (opcode == RETURN_VALUE)
1518 b->b_return = 1;
1519 compiler_set_lineno(c, off);
1520 return 1;
1523 static int
1524 compiler_add_o(struct compiler *c, PyObject *dict, PyObject *o)
1526 PyObject *t, *v;
1527 Py_ssize_t arg;
1529 /* necessary to make sure types aren't coerced (e.g., int and long) */
1530 t = PyTuple_Pack(2, o, o->ob_type);
1531 if (t == NULL)
1532 return -1;
1534 v = PyDict_GetItem(dict, t);
1535 if (!v) {
1536 arg = PyDict_Size(dict);
1537 v = PyInt_FromLong(arg);
1538 if (!v) {
1539 Py_DECREF(t);
1540 return -1;
1542 if (PyDict_SetItem(dict, t, v) < 0) {
1543 Py_DECREF(t);
1544 Py_DECREF(v);
1545 return -1;
1547 Py_DECREF(v);
1549 else
1550 arg = PyInt_AsLong(v);
1551 Py_DECREF(t);
1552 return arg;
1555 static int
1556 compiler_addop_o(struct compiler *c, int opcode, PyObject *dict,
1557 PyObject *o)
1559 int arg = compiler_add_o(c, dict, o);
1560 if (arg < 0)
1561 return 0;
1562 return compiler_addop_i(c, opcode, arg);
1565 static int
1566 compiler_addop_name(struct compiler *c, int opcode, PyObject *dict,
1567 PyObject *o)
1569 int arg;
1570 PyObject *mangled = _Py_Mangle(c->u->u_private, o);
1571 if (!mangled)
1572 return 0;
1573 arg = compiler_add_o(c, dict, mangled);
1574 Py_DECREF(mangled);
1575 if (arg < 0)
1576 return 0;
1577 return compiler_addop_i(c, opcode, arg);
1580 /* Add an opcode with an integer argument.
1581 Returns 0 on failure, 1 on success.
1584 static int
1585 compiler_addop_i(struct compiler *c, int opcode, int oparg)
1587 struct instr *i;
1588 int off;
1589 off = compiler_next_instr(c, c->u->u_curblock);
1590 if (off < 0)
1591 return 0;
1592 i = &c->u->u_curblock->b_instr[off];
1593 i->i_opcode = opcode;
1594 i->i_oparg = oparg;
1595 i->i_hasarg = 1;
1596 compiler_set_lineno(c, off);
1597 return 1;
1600 static int
1601 compiler_addop_j(struct compiler *c, int opcode, basicblock *b, int absolute)
1603 struct instr *i;
1604 int off;
1606 assert(b != NULL);
1607 off = compiler_next_instr(c, c->u->u_curblock);
1608 if (off < 0)
1609 return 0;
1610 compiler_set_lineno(c, off);
1611 i = &c->u->u_curblock->b_instr[off];
1612 i->i_opcode = opcode;
1613 i->i_target = b;
1614 i->i_hasarg = 1;
1615 if (absolute)
1616 i->i_jabs = 1;
1617 else
1618 i->i_jrel = 1;
1619 return 1;
1622 /* The distinction between NEW_BLOCK and NEXT_BLOCK is subtle. (I'd
1623 like to find better names.) NEW_BLOCK() creates a new block and sets
1624 it as the current block. NEXT_BLOCK() also creates an implicit jump
1625 from the current block to the new block.
1628 /* XXX The returns inside these macros make it impossible to decref
1629 objects created in the local function.
1633 #define NEW_BLOCK(C) { \
1634 if (compiler_use_new_block((C)) == NULL) \
1635 return 0; \
1638 #define NEXT_BLOCK(C) { \
1639 if (compiler_next_block((C)) == NULL) \
1640 return 0; \
1643 #define ADDOP(C, OP) { \
1644 if (!compiler_addop((C), (OP))) \
1645 return 0; \
1648 #define ADDOP_IN_SCOPE(C, OP) { \
1649 if (!compiler_addop((C), (OP))) { \
1650 compiler_exit_scope(c); \
1651 return 0; \
1655 #define ADDOP_O(C, OP, O, TYPE) { \
1656 if (!compiler_addop_o((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1657 return 0; \
1660 #define ADDOP_NAME(C, OP, O, TYPE) { \
1661 if (!compiler_addop_name((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1662 return 0; \
1665 #define ADDOP_I(C, OP, O) { \
1666 if (!compiler_addop_i((C), (OP), (O))) \
1667 return 0; \
1670 #define ADDOP_JABS(C, OP, O) { \
1671 if (!compiler_addop_j((C), (OP), (O), 1)) \
1672 return 0; \
1675 #define ADDOP_JREL(C, OP, O) { \
1676 if (!compiler_addop_j((C), (OP), (O), 0)) \
1677 return 0; \
1680 /* VISIT and VISIT_SEQ takes an ASDL type as their second argument. They use
1681 the ASDL name to synthesize the name of the C type and the visit function.
1684 #define VISIT(C, TYPE, V) {\
1685 if (!compiler_visit_ ## TYPE((C), (V))) \
1686 return 0; \
1689 #define VISIT_IN_SCOPE(C, TYPE, V) {\
1690 if (!compiler_visit_ ## TYPE((C), (V))) { \
1691 compiler_exit_scope(c); \
1692 return 0; \
1696 #define VISIT_SLICE(C, V, CTX) {\
1697 if (!compiler_visit_slice((C), (V), (CTX))) \
1698 return 0; \
1701 #define VISIT_SEQ(C, TYPE, SEQ) { \
1702 int _i; \
1703 asdl_seq *seq = (SEQ); /* avoid variable capture */ \
1704 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1705 TYPE ## _ty elt = asdl_seq_GET(seq, _i); \
1706 if (!compiler_visit_ ## TYPE((C), elt)) \
1707 return 0; \
1711 #define VISIT_SEQ_IN_SCOPE(C, TYPE, SEQ) { \
1712 int _i; \
1713 asdl_seq *seq = (SEQ); /* avoid variable capture */ \
1714 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1715 TYPE ## _ty elt = asdl_seq_GET(seq, _i); \
1716 if (!compiler_visit_ ## TYPE((C), elt)) { \
1717 compiler_exit_scope(c); \
1718 return 0; \
1723 static int
1724 compiler_isdocstring(stmt_ty s)
1726 if (s->kind != Expr_kind)
1727 return 0;
1728 return s->v.Expr.value->kind == Str_kind;
1731 /* Compile a sequence of statements, checking for a docstring. */
1733 static int
1734 compiler_body(struct compiler *c, asdl_seq *stmts)
1736 int i = 0;
1737 stmt_ty st;
1739 if (!asdl_seq_LEN(stmts))
1740 return 1;
1741 st = asdl_seq_GET(stmts, 0);
1742 if (compiler_isdocstring(st)) {
1743 i = 1;
1744 VISIT(c, expr, st->v.Expr.value);
1745 if (!compiler_nameop(c, __doc__, Store))
1746 return 0;
1748 for (; i < asdl_seq_LEN(stmts); i++)
1749 VISIT(c, stmt, asdl_seq_GET(stmts, i));
1750 return 1;
1753 static PyCodeObject *
1754 compiler_mod(struct compiler *c, mod_ty mod)
1756 PyCodeObject *co;
1757 int addNone = 1;
1758 static PyObject *module;
1759 if (!module) {
1760 module = PyString_FromString("<module>");
1761 if (!module)
1762 return NULL;
1764 if (!compiler_enter_scope(c, module, mod, 1))
1765 return NULL;
1766 switch (mod->kind) {
1767 case Module_kind:
1768 if (!compiler_body(c, mod->v.Module.body)) {
1769 compiler_exit_scope(c);
1770 return 0;
1772 break;
1773 case Interactive_kind:
1774 c->c_interactive = 1;
1775 VISIT_SEQ_IN_SCOPE(c, stmt, mod->v.Interactive.body);
1776 break;
1777 case Expression_kind:
1778 VISIT_IN_SCOPE(c, expr, mod->v.Expression.body);
1779 addNone = 0;
1780 break;
1781 case Suite_kind:
1782 PyErr_SetString(PyExc_SystemError,
1783 "suite should not be possible");
1784 return 0;
1785 default:
1786 PyErr_Format(PyExc_SystemError,
1787 "module kind %d should not be possible",
1788 mod->kind);
1789 return 0;
1791 co = assemble(c, addNone);
1792 compiler_exit_scope(c);
1793 return co;
1796 /* The test for LOCAL must come before the test for FREE in order to
1797 handle classes where name is both local and free. The local var is
1798 a method and the free var is a free var referenced within a method.
1801 static int
1802 get_ref_type(struct compiler *c, PyObject *name)
1804 int scope = PyST_GetScope(c->u->u_ste, name);
1805 if (scope == 0) {
1806 char buf[350];
1807 PyOS_snprintf(buf, sizeof(buf),
1808 "unknown scope for %.100s in %.100s(%s) in %s\n"
1809 "symbols: %s\nlocals: %s\nglobals: %s\n",
1810 PyString_AS_STRING(name),
1811 PyString_AS_STRING(c->u->u_name),
1812 PyObject_REPR(c->u->u_ste->ste_id),
1813 c->c_filename,
1814 PyObject_REPR(c->u->u_ste->ste_symbols),
1815 PyObject_REPR(c->u->u_varnames),
1816 PyObject_REPR(c->u->u_names)
1818 Py_FatalError(buf);
1821 return scope;
1824 static int
1825 compiler_lookup_arg(PyObject *dict, PyObject *name)
1827 PyObject *k, *v;
1828 k = Py_BuildValue("(OO)", name, name->ob_type);
1829 if (k == NULL)
1830 return -1;
1831 v = PyDict_GetItem(dict, k);
1832 Py_DECREF(k);
1833 if (v == NULL)
1834 return -1;
1835 return PyInt_AS_LONG(v);
1838 static int
1839 compiler_make_closure(struct compiler *c, PyCodeObject *co, int args)
1841 int i, free = PyCode_GetNumFree(co);
1842 if (free == 0) {
1843 ADDOP_O(c, LOAD_CONST, (PyObject*)co, consts);
1844 ADDOP_I(c, MAKE_FUNCTION, args);
1845 return 1;
1847 for (i = 0; i < free; ++i) {
1848 /* Bypass com_addop_varname because it will generate
1849 LOAD_DEREF but LOAD_CLOSURE is needed.
1851 PyObject *name = PyTuple_GET_ITEM(co->co_freevars, i);
1852 int arg, reftype;
1854 /* Special case: If a class contains a method with a
1855 free variable that has the same name as a method,
1856 the name will be considered free *and* local in the
1857 class. It should be handled by the closure, as
1858 well as by the normal name loookup logic.
1860 reftype = get_ref_type(c, name);
1861 if (reftype == CELL)
1862 arg = compiler_lookup_arg(c->u->u_cellvars, name);
1863 else /* (reftype == FREE) */
1864 arg = compiler_lookup_arg(c->u->u_freevars, name);
1865 if (arg == -1) {
1866 printf("lookup %s in %s %d %d\n"
1867 "freevars of %s: %s\n",
1868 PyObject_REPR(name),
1869 PyString_AS_STRING(c->u->u_name),
1870 reftype, arg,
1871 PyString_AS_STRING(co->co_name),
1872 PyObject_REPR(co->co_freevars));
1873 Py_FatalError("compiler_make_closure()");
1875 ADDOP_I(c, LOAD_CLOSURE, arg);
1877 ADDOP_I(c, BUILD_TUPLE, free);
1878 ADDOP_O(c, LOAD_CONST, (PyObject*)co, consts);
1879 ADDOP_I(c, MAKE_CLOSURE, args);
1880 return 1;
1883 static int
1884 compiler_decorators(struct compiler *c, asdl_seq* decos)
1886 int i;
1888 if (!decos)
1889 return 1;
1891 for (i = 0; i < asdl_seq_LEN(decos); i++) {
1892 VISIT(c, expr, asdl_seq_GET(decos, i));
1894 return 1;
1897 static int
1898 compiler_arguments(struct compiler *c, arguments_ty args)
1900 int i;
1901 int n = asdl_seq_LEN(args->args);
1902 /* Correctly handle nested argument lists */
1903 for (i = 0; i < n; i++) {
1904 expr_ty arg = asdl_seq_GET(args->args, i);
1905 if (arg->kind == Tuple_kind) {
1906 PyObject *id = PyString_FromFormat(".%d", i);
1907 if (id == NULL) {
1908 return 0;
1910 if (!compiler_nameop(c, id, Load)) {
1911 Py_DECREF(id);
1912 return 0;
1914 Py_DECREF(id);
1915 VISIT(c, expr, arg);
1918 return 1;
1921 static int
1922 compiler_function(struct compiler *c, stmt_ty s)
1924 PyCodeObject *co;
1925 PyObject *first_const = Py_None;
1926 arguments_ty args = s->v.FunctionDef.args;
1927 asdl_seq* decos = s->v.FunctionDef.decorators;
1928 stmt_ty st;
1929 int i, n, docstring;
1931 assert(s->kind == FunctionDef_kind);
1933 if (!compiler_decorators(c, decos))
1934 return 0;
1935 if (args->defaults)
1936 VISIT_SEQ(c, expr, args->defaults);
1937 if (!compiler_enter_scope(c, s->v.FunctionDef.name, (void *)s,
1938 s->lineno))
1939 return 0;
1941 st = asdl_seq_GET(s->v.FunctionDef.body, 0);
1942 docstring = compiler_isdocstring(st);
1943 if (docstring)
1944 first_const = st->v.Expr.value->v.Str.s;
1945 if (compiler_add_o(c, c->u->u_consts, first_const) < 0) {
1946 compiler_exit_scope(c);
1947 return 0;
1950 /* unpack nested arguments */
1951 compiler_arguments(c, args);
1953 c->u->u_argcount = asdl_seq_LEN(args->args);
1954 n = asdl_seq_LEN(s->v.FunctionDef.body);
1955 /* if there was a docstring, we need to skip the first statement */
1956 for (i = docstring; i < n; i++) {
1957 stmt_ty s2 = asdl_seq_GET(s->v.FunctionDef.body, i);
1958 if (i == 0 && s2->kind == Expr_kind &&
1959 s2->v.Expr.value->kind == Str_kind)
1960 continue;
1961 VISIT_IN_SCOPE(c, stmt, s2);
1963 co = assemble(c, 1);
1964 compiler_exit_scope(c);
1965 if (co == NULL)
1966 return 0;
1968 compiler_make_closure(c, co, asdl_seq_LEN(args->defaults));
1969 Py_DECREF(co);
1971 for (i = 0; i < asdl_seq_LEN(decos); i++) {
1972 ADDOP_I(c, CALL_FUNCTION, 1);
1975 return compiler_nameop(c, s->v.FunctionDef.name, Store);
1978 static int
1979 compiler_class(struct compiler *c, stmt_ty s)
1981 int n;
1982 PyCodeObject *co;
1983 PyObject *str;
1984 /* push class name on stack, needed by BUILD_CLASS */
1985 ADDOP_O(c, LOAD_CONST, s->v.ClassDef.name, consts);
1986 /* push the tuple of base classes on the stack */
1987 n = asdl_seq_LEN(s->v.ClassDef.bases);
1988 if (n > 0)
1989 VISIT_SEQ(c, expr, s->v.ClassDef.bases);
1990 ADDOP_I(c, BUILD_TUPLE, n);
1991 if (!compiler_enter_scope(c, s->v.ClassDef.name, (void *)s,
1992 s->lineno))
1993 return 0;
1994 c->u->u_private = s->v.ClassDef.name;
1995 Py_INCREF(c->u->u_private);
1996 str = PyString_InternFromString("__name__");
1997 if (!str || !compiler_nameop(c, str, Load)) {
1998 Py_XDECREF(str);
1999 compiler_exit_scope(c);
2000 return 0;
2003 Py_DECREF(str);
2004 str = PyString_InternFromString("__module__");
2005 if (!str || !compiler_nameop(c, str, Store)) {
2006 Py_XDECREF(str);
2007 compiler_exit_scope(c);
2008 return 0;
2010 Py_DECREF(str);
2012 if (!compiler_body(c, s->v.ClassDef.body)) {
2013 compiler_exit_scope(c);
2014 return 0;
2017 ADDOP_IN_SCOPE(c, LOAD_LOCALS);
2018 ADDOP_IN_SCOPE(c, RETURN_VALUE);
2019 co = assemble(c, 1);
2020 compiler_exit_scope(c);
2021 if (co == NULL)
2022 return 0;
2024 compiler_make_closure(c, co, 0);
2025 Py_DECREF(co);
2027 ADDOP_I(c, CALL_FUNCTION, 0);
2028 ADDOP(c, BUILD_CLASS);
2029 if (!compiler_nameop(c, s->v.ClassDef.name, Store))
2030 return 0;
2031 return 1;
2034 static int
2035 compiler_ifexp(struct compiler *c, expr_ty e)
2037 basicblock *end, *next;
2039 assert(e->kind == IfExp_kind);
2040 end = compiler_new_block(c);
2041 if (end == NULL)
2042 return 0;
2043 next = compiler_new_block(c);
2044 if (next == NULL)
2045 return 0;
2046 VISIT(c, expr, e->v.IfExp.test);
2047 ADDOP_JREL(c, JUMP_IF_FALSE, next);
2048 ADDOP(c, POP_TOP);
2049 VISIT(c, expr, e->v.IfExp.body);
2050 ADDOP_JREL(c, JUMP_FORWARD, end);
2051 compiler_use_next_block(c, next);
2052 ADDOP(c, POP_TOP);
2053 VISIT(c, expr, e->v.IfExp.orelse);
2054 compiler_use_next_block(c, end);
2055 return 1;
2058 static int
2059 compiler_lambda(struct compiler *c, expr_ty e)
2061 PyCodeObject *co;
2062 static identifier name;
2063 arguments_ty args = e->v.Lambda.args;
2064 assert(e->kind == Lambda_kind);
2066 if (!name) {
2067 name = PyString_InternFromString("<lambda>");
2068 if (!name)
2069 return 0;
2072 if (args->defaults)
2073 VISIT_SEQ(c, expr, args->defaults);
2074 if (!compiler_enter_scope(c, name, (void *)e, e->lineno))
2075 return 0;
2077 /* unpack nested arguments */
2078 compiler_arguments(c, args);
2080 c->u->u_argcount = asdl_seq_LEN(args->args);
2081 VISIT_IN_SCOPE(c, expr, e->v.Lambda.body);
2082 ADDOP_IN_SCOPE(c, RETURN_VALUE);
2083 co = assemble(c, 1);
2084 compiler_exit_scope(c);
2085 if (co == NULL)
2086 return 0;
2088 compiler_make_closure(c, co, asdl_seq_LEN(args->defaults));
2089 Py_DECREF(co);
2091 return 1;
2094 static int
2095 compiler_print(struct compiler *c, stmt_ty s)
2097 int i, n;
2098 bool dest;
2100 assert(s->kind == Print_kind);
2101 n = asdl_seq_LEN(s->v.Print.values);
2102 dest = false;
2103 if (s->v.Print.dest) {
2104 VISIT(c, expr, s->v.Print.dest);
2105 dest = true;
2107 for (i = 0; i < n; i++) {
2108 expr_ty e = (expr_ty)asdl_seq_GET(s->v.Print.values, i);
2109 if (dest) {
2110 ADDOP(c, DUP_TOP);
2111 VISIT(c, expr, e);
2112 ADDOP(c, ROT_TWO);
2113 ADDOP(c, PRINT_ITEM_TO);
2115 else {
2116 VISIT(c, expr, e);
2117 ADDOP(c, PRINT_ITEM);
2120 if (s->v.Print.nl) {
2121 if (dest)
2122 ADDOP(c, PRINT_NEWLINE_TO)
2123 else
2124 ADDOP(c, PRINT_NEWLINE)
2126 else if (dest)
2127 ADDOP(c, POP_TOP);
2128 return 1;
2131 static int
2132 compiler_if(struct compiler *c, stmt_ty s)
2134 basicblock *end, *next;
2136 assert(s->kind == If_kind);
2137 end = compiler_new_block(c);
2138 if (end == NULL)
2139 return 0;
2140 next = compiler_new_block(c);
2141 if (next == NULL)
2142 return 0;
2143 VISIT(c, expr, s->v.If.test);
2144 ADDOP_JREL(c, JUMP_IF_FALSE, next);
2145 ADDOP(c, POP_TOP);
2146 VISIT_SEQ(c, stmt, s->v.If.body);
2147 ADDOP_JREL(c, JUMP_FORWARD, end);
2148 compiler_use_next_block(c, next);
2149 ADDOP(c, POP_TOP);
2150 if (s->v.If.orelse)
2151 VISIT_SEQ(c, stmt, s->v.If.orelse);
2152 compiler_use_next_block(c, end);
2153 return 1;
2156 static int
2157 compiler_for(struct compiler *c, stmt_ty s)
2159 basicblock *start, *cleanup, *end;
2161 start = compiler_new_block(c);
2162 cleanup = compiler_new_block(c);
2163 end = compiler_new_block(c);
2164 if (start == NULL || end == NULL || cleanup == NULL)
2165 return 0;
2166 ADDOP_JREL(c, SETUP_LOOP, end);
2167 if (!compiler_push_fblock(c, LOOP, start))
2168 return 0;
2169 VISIT(c, expr, s->v.For.iter);
2170 ADDOP(c, GET_ITER);
2171 compiler_use_next_block(c, start);
2172 ADDOP_JREL(c, FOR_ITER, cleanup);
2173 VISIT(c, expr, s->v.For.target);
2174 VISIT_SEQ(c, stmt, s->v.For.body);
2175 ADDOP_JABS(c, JUMP_ABSOLUTE, start);
2176 compiler_use_next_block(c, cleanup);
2177 ADDOP(c, POP_BLOCK);
2178 compiler_pop_fblock(c, LOOP, start);
2179 VISIT_SEQ(c, stmt, s->v.For.orelse);
2180 compiler_use_next_block(c, end);
2181 return 1;
2184 static int
2185 compiler_while(struct compiler *c, stmt_ty s)
2187 basicblock *loop, *orelse, *end, *anchor = NULL;
2188 int constant = expr_constant(s->v.While.test);
2190 if (constant == 0)
2191 return 1;
2192 loop = compiler_new_block(c);
2193 end = compiler_new_block(c);
2194 if (constant == -1) {
2195 anchor = compiler_new_block(c);
2196 if (anchor == NULL)
2197 return 0;
2199 if (loop == NULL || end == NULL)
2200 return 0;
2201 if (s->v.While.orelse) {
2202 orelse = compiler_new_block(c);
2203 if (orelse == NULL)
2204 return 0;
2206 else
2207 orelse = NULL;
2209 ADDOP_JREL(c, SETUP_LOOP, end);
2210 compiler_use_next_block(c, loop);
2211 if (!compiler_push_fblock(c, LOOP, loop))
2212 return 0;
2213 if (constant == -1) {
2214 VISIT(c, expr, s->v.While.test);
2215 ADDOP_JREL(c, JUMP_IF_FALSE, anchor);
2216 ADDOP(c, POP_TOP);
2218 VISIT_SEQ(c, stmt, s->v.While.body);
2219 ADDOP_JABS(c, JUMP_ABSOLUTE, loop);
2221 /* XXX should the two POP instructions be in a separate block
2222 if there is no else clause ?
2225 if (constant == -1) {
2226 compiler_use_next_block(c, anchor);
2227 ADDOP(c, POP_TOP);
2228 ADDOP(c, POP_BLOCK);
2230 compiler_pop_fblock(c, LOOP, loop);
2231 if (orelse != NULL)
2232 VISIT_SEQ(c, stmt, s->v.While.orelse);
2233 compiler_use_next_block(c, end);
2235 return 1;
2238 static int
2239 compiler_continue(struct compiler *c)
2241 static const char LOOP_ERROR_MSG[] = "'continue' not properly in loop";
2242 int i;
2244 if (!c->u->u_nfblocks)
2245 return compiler_error(c, LOOP_ERROR_MSG);
2246 i = c->u->u_nfblocks - 1;
2247 switch (c->u->u_fblock[i].fb_type) {
2248 case LOOP:
2249 ADDOP_JABS(c, JUMP_ABSOLUTE, c->u->u_fblock[i].fb_block);
2250 break;
2251 case EXCEPT:
2252 case FINALLY_TRY:
2253 while (--i >= 0 && c->u->u_fblock[i].fb_type != LOOP)
2255 if (i == -1)
2256 return compiler_error(c, LOOP_ERROR_MSG);
2257 ADDOP_JABS(c, CONTINUE_LOOP, c->u->u_fblock[i].fb_block);
2258 break;
2259 case FINALLY_END:
2260 return compiler_error(c,
2261 "'continue' not supported inside 'finally' clause");
2264 return 1;
2267 /* Code generated for "try: <body> finally: <finalbody>" is as follows:
2269 SETUP_FINALLY L
2270 <code for body>
2271 POP_BLOCK
2272 LOAD_CONST <None>
2273 L: <code for finalbody>
2274 END_FINALLY
2276 The special instructions use the block stack. Each block
2277 stack entry contains the instruction that created it (here
2278 SETUP_FINALLY), the level of the value stack at the time the
2279 block stack entry was created, and a label (here L).
2281 SETUP_FINALLY:
2282 Pushes the current value stack level and the label
2283 onto the block stack.
2284 POP_BLOCK:
2285 Pops en entry from the block stack, and pops the value
2286 stack until its level is the same as indicated on the
2287 block stack. (The label is ignored.)
2288 END_FINALLY:
2289 Pops a variable number of entries from the *value* stack
2290 and re-raises the exception they specify. The number of
2291 entries popped depends on the (pseudo) exception type.
2293 The block stack is unwound when an exception is raised:
2294 when a SETUP_FINALLY entry is found, the exception is pushed
2295 onto the value stack (and the exception condition is cleared),
2296 and the interpreter jumps to the label gotten from the block
2297 stack.
2300 static int
2301 compiler_try_finally(struct compiler *c, stmt_ty s)
2303 basicblock *body, *end;
2304 body = compiler_new_block(c);
2305 end = compiler_new_block(c);
2306 if (body == NULL || end == NULL)
2307 return 0;
2309 ADDOP_JREL(c, SETUP_FINALLY, end);
2310 compiler_use_next_block(c, body);
2311 if (!compiler_push_fblock(c, FINALLY_TRY, body))
2312 return 0;
2313 VISIT_SEQ(c, stmt, s->v.TryFinally.body);
2314 ADDOP(c, POP_BLOCK);
2315 compiler_pop_fblock(c, FINALLY_TRY, body);
2317 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2318 compiler_use_next_block(c, end);
2319 if (!compiler_push_fblock(c, FINALLY_END, end))
2320 return 0;
2321 VISIT_SEQ(c, stmt, s->v.TryFinally.finalbody);
2322 ADDOP(c, END_FINALLY);
2323 compiler_pop_fblock(c, FINALLY_END, end);
2325 return 1;
2329 Code generated for "try: S except E1, V1: S1 except E2, V2: S2 ...":
2330 (The contents of the value stack is shown in [], with the top
2331 at the right; 'tb' is trace-back info, 'val' the exception's
2332 associated value, and 'exc' the exception.)
2334 Value stack Label Instruction Argument
2335 [] SETUP_EXCEPT L1
2336 [] <code for S>
2337 [] POP_BLOCK
2338 [] JUMP_FORWARD L0
2340 [tb, val, exc] L1: DUP )
2341 [tb, val, exc, exc] <evaluate E1> )
2342 [tb, val, exc, exc, E1] COMPARE_OP EXC_MATCH ) only if E1
2343 [tb, val, exc, 1-or-0] JUMP_IF_FALSE L2 )
2344 [tb, val, exc, 1] POP )
2345 [tb, val, exc] POP
2346 [tb, val] <assign to V1> (or POP if no V1)
2347 [tb] POP
2348 [] <code for S1>
2349 JUMP_FORWARD L0
2351 [tb, val, exc, 0] L2: POP
2352 [tb, val, exc] DUP
2353 .............................etc.......................
2355 [tb, val, exc, 0] Ln+1: POP
2356 [tb, val, exc] END_FINALLY # re-raise exception
2358 [] L0: <next statement>
2360 Of course, parts are not generated if Vi or Ei is not present.
2362 static int
2363 compiler_try_except(struct compiler *c, stmt_ty s)
2365 basicblock *body, *orelse, *except, *end;
2366 int i, n;
2368 body = compiler_new_block(c);
2369 except = compiler_new_block(c);
2370 orelse = compiler_new_block(c);
2371 end = compiler_new_block(c);
2372 if (body == NULL || except == NULL || orelse == NULL || end == NULL)
2373 return 0;
2374 ADDOP_JREL(c, SETUP_EXCEPT, except);
2375 compiler_use_next_block(c, body);
2376 if (!compiler_push_fblock(c, EXCEPT, body))
2377 return 0;
2378 VISIT_SEQ(c, stmt, s->v.TryExcept.body);
2379 ADDOP(c, POP_BLOCK);
2380 compiler_pop_fblock(c, EXCEPT, body);
2381 ADDOP_JREL(c, JUMP_FORWARD, orelse);
2382 n = asdl_seq_LEN(s->v.TryExcept.handlers);
2383 compiler_use_next_block(c, except);
2384 for (i = 0; i < n; i++) {
2385 excepthandler_ty handler = asdl_seq_GET(
2386 s->v.TryExcept.handlers, i);
2387 if (!handler->type && i < n-1)
2388 return compiler_error(c, "default 'except:' must be last");
2389 except = compiler_new_block(c);
2390 if (except == NULL)
2391 return 0;
2392 if (handler->type) {
2393 ADDOP(c, DUP_TOP);
2394 VISIT(c, expr, handler->type);
2395 ADDOP_I(c, COMPARE_OP, PyCmp_EXC_MATCH);
2396 ADDOP_JREL(c, JUMP_IF_FALSE, except);
2397 ADDOP(c, POP_TOP);
2399 ADDOP(c, POP_TOP);
2400 if (handler->name) {
2401 VISIT(c, expr, handler->name);
2403 else {
2404 ADDOP(c, POP_TOP);
2406 ADDOP(c, POP_TOP);
2407 VISIT_SEQ(c, stmt, handler->body);
2408 ADDOP_JREL(c, JUMP_FORWARD, end);
2409 compiler_use_next_block(c, except);
2410 if (handler->type)
2411 ADDOP(c, POP_TOP);
2413 ADDOP(c, END_FINALLY);
2414 compiler_use_next_block(c, orelse);
2415 VISIT_SEQ(c, stmt, s->v.TryExcept.orelse);
2416 compiler_use_next_block(c, end);
2417 return 1;
2420 static int
2421 compiler_import_as(struct compiler *c, identifier name, identifier asname)
2423 /* The IMPORT_NAME opcode was already generated. This function
2424 merely needs to bind the result to a name.
2426 If there is a dot in name, we need to split it and emit a
2427 LOAD_ATTR for each name.
2429 const char *src = PyString_AS_STRING(name);
2430 const char *dot = strchr(src, '.');
2431 if (dot) {
2432 /* Consume the base module name to get the first attribute */
2433 src = dot + 1;
2434 while (dot) {
2435 /* NB src is only defined when dot != NULL */
2436 PyObject *attr;
2437 dot = strchr(src, '.');
2438 attr = PyString_FromStringAndSize(src,
2439 dot ? dot - src : strlen(src));
2440 if (!attr)
2441 return -1;
2442 ADDOP_O(c, LOAD_ATTR, attr, names);
2443 Py_DECREF(attr);
2444 src = dot + 1;
2447 return compiler_nameop(c, asname, Store);
2450 static int
2451 compiler_import(struct compiler *c, stmt_ty s)
2453 /* The Import node stores a module name like a.b.c as a single
2454 string. This is convenient for all cases except
2455 import a.b.c as d
2456 where we need to parse that string to extract the individual
2457 module names.
2458 XXX Perhaps change the representation to make this case simpler?
2460 int i, n = asdl_seq_LEN(s->v.Import.names);
2462 for (i = 0; i < n; i++) {
2463 alias_ty alias = asdl_seq_GET(s->v.Import.names, i);
2464 int r;
2465 PyObject *level;
2467 if (c->c_flags && (c->c_flags->cf_flags & CO_FUTURE_ABSIMPORT))
2468 level = PyInt_FromLong(0);
2469 else
2470 level = PyInt_FromLong(-1);
2472 if (level == NULL)
2473 return 0;
2475 ADDOP_O(c, LOAD_CONST, level, consts);
2476 Py_DECREF(level);
2477 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2478 ADDOP_NAME(c, IMPORT_NAME, alias->name, names);
2480 if (alias->asname) {
2481 r = compiler_import_as(c, alias->name, alias->asname);
2482 if (!r)
2483 return r;
2485 else {
2486 identifier tmp = alias->name;
2487 const char *base = PyString_AS_STRING(alias->name);
2488 char *dot = strchr(base, '.');
2489 if (dot)
2490 tmp = PyString_FromStringAndSize(base,
2491 dot - base);
2492 r = compiler_nameop(c, tmp, Store);
2493 if (dot) {
2494 Py_DECREF(tmp);
2496 if (!r)
2497 return r;
2500 return 1;
2503 static int
2504 compiler_from_import(struct compiler *c, stmt_ty s)
2506 int i, n = asdl_seq_LEN(s->v.ImportFrom.names);
2508 PyObject *names = PyTuple_New(n);
2509 PyObject *level;
2511 if (!names)
2512 return 0;
2514 if (s->v.ImportFrom.level == 0 && c->c_flags &&
2515 !(c->c_flags->cf_flags & CO_FUTURE_ABSIMPORT))
2516 level = PyInt_FromLong(-1);
2517 else
2518 level = PyInt_FromLong(s->v.ImportFrom.level);
2520 if (!level) {
2521 Py_DECREF(names);
2522 return 0;
2525 /* build up the names */
2526 for (i = 0; i < n; i++) {
2527 alias_ty alias = asdl_seq_GET(s->v.ImportFrom.names, i);
2528 Py_INCREF(alias->name);
2529 PyTuple_SET_ITEM(names, i, alias->name);
2532 if (s->lineno > c->c_future->ff_lineno) {
2533 if (!strcmp(PyString_AS_STRING(s->v.ImportFrom.module),
2534 "__future__")) {
2535 Py_DECREF(level);
2536 Py_DECREF(names);
2537 return compiler_error(c,
2538 "from __future__ imports must occur "
2539 "at the beginning of the file");
2544 ADDOP_O(c, LOAD_CONST, level, consts);
2545 Py_DECREF(level);
2546 ADDOP_O(c, LOAD_CONST, names, consts);
2547 Py_DECREF(names);
2548 ADDOP_NAME(c, IMPORT_NAME, s->v.ImportFrom.module, names);
2549 for (i = 0; i < n; i++) {
2550 alias_ty alias = asdl_seq_GET(s->v.ImportFrom.names, i);
2551 identifier store_name;
2553 if (i == 0 && *PyString_AS_STRING(alias->name) == '*') {
2554 assert(n == 1);
2555 ADDOP(c, IMPORT_STAR);
2556 return 1;
2559 ADDOP_NAME(c, IMPORT_FROM, alias->name, names);
2560 store_name = alias->name;
2561 if (alias->asname)
2562 store_name = alias->asname;
2564 if (!compiler_nameop(c, store_name, Store)) {
2565 Py_DECREF(names);
2566 return 0;
2569 /* remove imported module */
2570 ADDOP(c, POP_TOP);
2571 return 1;
2574 static int
2575 compiler_assert(struct compiler *c, stmt_ty s)
2577 static PyObject *assertion_error = NULL;
2578 basicblock *end;
2580 if (Py_OptimizeFlag)
2581 return 1;
2582 if (assertion_error == NULL) {
2583 assertion_error = PyString_FromString("AssertionError");
2584 if (assertion_error == NULL)
2585 return 0;
2587 VISIT(c, expr, s->v.Assert.test);
2588 end = compiler_new_block(c);
2589 if (end == NULL)
2590 return 0;
2591 ADDOP_JREL(c, JUMP_IF_TRUE, end);
2592 ADDOP(c, POP_TOP);
2593 ADDOP_O(c, LOAD_GLOBAL, assertion_error, names);
2594 if (s->v.Assert.msg) {
2595 VISIT(c, expr, s->v.Assert.msg);
2596 ADDOP_I(c, RAISE_VARARGS, 2);
2598 else {
2599 ADDOP_I(c, RAISE_VARARGS, 1);
2601 compiler_use_next_block(c, end);
2602 ADDOP(c, POP_TOP);
2603 return 1;
2606 static int
2607 compiler_visit_stmt(struct compiler *c, stmt_ty s)
2609 int i, n;
2611 c->u->u_lineno = s->lineno;
2612 c->u->u_lineno_set = false;
2613 switch (s->kind) {
2614 case FunctionDef_kind:
2615 return compiler_function(c, s);
2616 case ClassDef_kind:
2617 return compiler_class(c, s);
2618 case Return_kind:
2619 if (c->u->u_ste->ste_type != FunctionBlock)
2620 return compiler_error(c, "'return' outside function");
2621 if (s->v.Return.value) {
2622 if (c->u->u_ste->ste_generator) {
2623 return compiler_error(c,
2624 "'return' with argument inside generator");
2626 VISIT(c, expr, s->v.Return.value);
2628 else
2629 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2630 ADDOP(c, RETURN_VALUE);
2631 break;
2632 case Delete_kind:
2633 VISIT_SEQ(c, expr, s->v.Delete.targets)
2634 break;
2635 case Assign_kind:
2636 n = asdl_seq_LEN(s->v.Assign.targets);
2637 VISIT(c, expr, s->v.Assign.value);
2638 for (i = 0; i < n; i++) {
2639 if (i < n - 1)
2640 ADDOP(c, DUP_TOP);
2641 VISIT(c, expr,
2642 (expr_ty)asdl_seq_GET(s->v.Assign.targets, i));
2644 break;
2645 case AugAssign_kind:
2646 return compiler_augassign(c, s);
2647 case Print_kind:
2648 return compiler_print(c, s);
2649 case For_kind:
2650 return compiler_for(c, s);
2651 case While_kind:
2652 return compiler_while(c, s);
2653 case If_kind:
2654 return compiler_if(c, s);
2655 case Raise_kind:
2656 n = 0;
2657 if (s->v.Raise.type) {
2658 VISIT(c, expr, s->v.Raise.type);
2659 n++;
2660 if (s->v.Raise.inst) {
2661 VISIT(c, expr, s->v.Raise.inst);
2662 n++;
2663 if (s->v.Raise.tback) {
2664 VISIT(c, expr, s->v.Raise.tback);
2665 n++;
2669 ADDOP_I(c, RAISE_VARARGS, n);
2670 break;
2671 case TryExcept_kind:
2672 return compiler_try_except(c, s);
2673 case TryFinally_kind:
2674 return compiler_try_finally(c, s);
2675 case Assert_kind:
2676 return compiler_assert(c, s);
2677 case Import_kind:
2678 return compiler_import(c, s);
2679 case ImportFrom_kind:
2680 return compiler_from_import(c, s);
2681 case Exec_kind:
2682 VISIT(c, expr, s->v.Exec.body);
2683 if (s->v.Exec.globals) {
2684 VISIT(c, expr, s->v.Exec.globals);
2685 if (s->v.Exec.locals) {
2686 VISIT(c, expr, s->v.Exec.locals);
2687 } else {
2688 ADDOP(c, DUP_TOP);
2690 } else {
2691 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2692 ADDOP(c, DUP_TOP);
2694 ADDOP(c, EXEC_STMT);
2695 break;
2696 case Global_kind:
2697 break;
2698 case Expr_kind:
2699 VISIT(c, expr, s->v.Expr.value);
2700 if (c->c_interactive && c->c_nestlevel <= 1) {
2701 ADDOP(c, PRINT_EXPR);
2703 else {
2704 ADDOP(c, POP_TOP);
2706 break;
2707 case Pass_kind:
2708 break;
2709 case Break_kind:
2710 if (!c->u->u_nfblocks)
2711 return compiler_error(c, "'break' outside loop");
2712 ADDOP(c, BREAK_LOOP);
2713 break;
2714 case Continue_kind:
2715 return compiler_continue(c);
2716 case With_kind:
2717 return compiler_with(c, s);
2719 return 1;
2722 static int
2723 unaryop(unaryop_ty op)
2725 switch (op) {
2726 case Invert:
2727 return UNARY_INVERT;
2728 case Not:
2729 return UNARY_NOT;
2730 case UAdd:
2731 return UNARY_POSITIVE;
2732 case USub:
2733 return UNARY_NEGATIVE;
2735 return 0;
2738 static int
2739 binop(struct compiler *c, operator_ty op)
2741 switch (op) {
2742 case Add:
2743 return BINARY_ADD;
2744 case Sub:
2745 return BINARY_SUBTRACT;
2746 case Mult:
2747 return BINARY_MULTIPLY;
2748 case Div:
2749 if (c->c_flags && c->c_flags->cf_flags & CO_FUTURE_DIVISION)
2750 return BINARY_TRUE_DIVIDE;
2751 else
2752 return BINARY_DIVIDE;
2753 case Mod:
2754 return BINARY_MODULO;
2755 case Pow:
2756 return BINARY_POWER;
2757 case LShift:
2758 return BINARY_LSHIFT;
2759 case RShift:
2760 return BINARY_RSHIFT;
2761 case BitOr:
2762 return BINARY_OR;
2763 case BitXor:
2764 return BINARY_XOR;
2765 case BitAnd:
2766 return BINARY_AND;
2767 case FloorDiv:
2768 return BINARY_FLOOR_DIVIDE;
2770 return 0;
2773 static int
2774 cmpop(cmpop_ty op)
2776 switch (op) {
2777 case Eq:
2778 return PyCmp_EQ;
2779 case NotEq:
2780 return PyCmp_NE;
2781 case Lt:
2782 return PyCmp_LT;
2783 case LtE:
2784 return PyCmp_LE;
2785 case Gt:
2786 return PyCmp_GT;
2787 case GtE:
2788 return PyCmp_GE;
2789 case Is:
2790 return PyCmp_IS;
2791 case IsNot:
2792 return PyCmp_IS_NOT;
2793 case In:
2794 return PyCmp_IN;
2795 case NotIn:
2796 return PyCmp_NOT_IN;
2798 return PyCmp_BAD;
2801 static int
2802 inplace_binop(struct compiler *c, operator_ty op)
2804 switch (op) {
2805 case Add:
2806 return INPLACE_ADD;
2807 case Sub:
2808 return INPLACE_SUBTRACT;
2809 case Mult:
2810 return INPLACE_MULTIPLY;
2811 case Div:
2812 if (c->c_flags && c->c_flags->cf_flags & CO_FUTURE_DIVISION)
2813 return INPLACE_TRUE_DIVIDE;
2814 else
2815 return INPLACE_DIVIDE;
2816 case Mod:
2817 return INPLACE_MODULO;
2818 case Pow:
2819 return INPLACE_POWER;
2820 case LShift:
2821 return INPLACE_LSHIFT;
2822 case RShift:
2823 return INPLACE_RSHIFT;
2824 case BitOr:
2825 return INPLACE_OR;
2826 case BitXor:
2827 return INPLACE_XOR;
2828 case BitAnd:
2829 return INPLACE_AND;
2830 case FloorDiv:
2831 return INPLACE_FLOOR_DIVIDE;
2833 PyErr_Format(PyExc_SystemError,
2834 "inplace binary op %d should not be possible", op);
2835 return 0;
2838 static int
2839 compiler_nameop(struct compiler *c, identifier name, expr_context_ty ctx)
2841 int op, scope, arg;
2842 enum { OP_FAST, OP_GLOBAL, OP_DEREF, OP_NAME } optype;
2844 PyObject *dict = c->u->u_names;
2845 PyObject *mangled;
2846 /* XXX AugStore isn't used anywhere! */
2848 /* First check for assignment to __debug__. Param? */
2849 if ((ctx == Store || ctx == AugStore || ctx == Del)
2850 && !strcmp(PyString_AS_STRING(name), "__debug__")) {
2851 return compiler_error(c, "can not assign to __debug__");
2854 mangled = _Py_Mangle(c->u->u_private, name);
2855 if (!mangled)
2856 return 0;
2858 op = 0;
2859 optype = OP_NAME;
2860 scope = PyST_GetScope(c->u->u_ste, mangled);
2861 switch (scope) {
2862 case FREE:
2863 dict = c->u->u_freevars;
2864 optype = OP_DEREF;
2865 break;
2866 case CELL:
2867 dict = c->u->u_cellvars;
2868 optype = OP_DEREF;
2869 break;
2870 case LOCAL:
2871 if (c->u->u_ste->ste_type == FunctionBlock)
2872 optype = OP_FAST;
2873 break;
2874 case GLOBAL_IMPLICIT:
2875 if (c->u->u_ste->ste_type == FunctionBlock &&
2876 !c->u->u_ste->ste_unoptimized)
2877 optype = OP_GLOBAL;
2878 break;
2879 case GLOBAL_EXPLICIT:
2880 optype = OP_GLOBAL;
2881 break;
2882 default:
2883 /* scope can be 0 */
2884 break;
2887 /* XXX Leave assert here, but handle __doc__ and the like better */
2888 assert(scope || PyString_AS_STRING(name)[0] == '_');
2890 switch (optype) {
2891 case OP_DEREF:
2892 switch (ctx) {
2893 case Load: op = LOAD_DEREF; break;
2894 case Store: op = STORE_DEREF; break;
2895 case AugLoad:
2896 case AugStore:
2897 break;
2898 case Del:
2899 PyErr_Format(PyExc_SyntaxError,
2900 "can not delete variable '%s' referenced "
2901 "in nested scope",
2902 PyString_AS_STRING(name));
2903 Py_DECREF(mangled);
2904 return 0;
2905 case Param:
2906 default:
2907 PyErr_SetString(PyExc_SystemError,
2908 "param invalid for deref variable");
2909 return 0;
2911 break;
2912 case OP_FAST:
2913 switch (ctx) {
2914 case Load: op = LOAD_FAST; break;
2915 case Store: op = STORE_FAST; break;
2916 case Del: op = DELETE_FAST; break;
2917 case AugLoad:
2918 case AugStore:
2919 break;
2920 case Param:
2921 default:
2922 PyErr_SetString(PyExc_SystemError,
2923 "param invalid for local variable");
2924 return 0;
2926 ADDOP_O(c, op, mangled, varnames);
2927 Py_DECREF(mangled);
2928 return 1;
2929 case OP_GLOBAL:
2930 switch (ctx) {
2931 case Load: op = LOAD_GLOBAL; break;
2932 case Store: op = STORE_GLOBAL; break;
2933 case Del: op = DELETE_GLOBAL; break;
2934 case AugLoad:
2935 case AugStore:
2936 break;
2937 case Param:
2938 default:
2939 PyErr_SetString(PyExc_SystemError,
2940 "param invalid for global variable");
2941 return 0;
2943 break;
2944 case OP_NAME:
2945 switch (ctx) {
2946 case Load: op = LOAD_NAME; break;
2947 case Store: op = STORE_NAME; break;
2948 case Del: op = DELETE_NAME; break;
2949 case AugLoad:
2950 case AugStore:
2951 break;
2952 case Param:
2953 default:
2954 PyErr_SetString(PyExc_SystemError,
2955 "param invalid for name variable");
2956 return 0;
2958 break;
2961 assert(op);
2962 arg = compiler_add_o(c, dict, mangled);
2963 Py_DECREF(mangled);
2964 if (arg < 0)
2965 return 0;
2966 return compiler_addop_i(c, op, arg);
2969 static int
2970 compiler_boolop(struct compiler *c, expr_ty e)
2972 basicblock *end;
2973 int jumpi, i, n;
2974 asdl_seq *s;
2976 assert(e->kind == BoolOp_kind);
2977 if (e->v.BoolOp.op == And)
2978 jumpi = JUMP_IF_FALSE;
2979 else
2980 jumpi = JUMP_IF_TRUE;
2981 end = compiler_new_block(c);
2982 if (end == NULL)
2983 return 0;
2984 s = e->v.BoolOp.values;
2985 n = asdl_seq_LEN(s) - 1;
2986 for (i = 0; i < n; ++i) {
2987 VISIT(c, expr, asdl_seq_GET(s, i));
2988 ADDOP_JREL(c, jumpi, end);
2989 ADDOP(c, POP_TOP)
2991 VISIT(c, expr, asdl_seq_GET(s, n));
2992 compiler_use_next_block(c, end);
2993 return 1;
2996 static int
2997 compiler_list(struct compiler *c, expr_ty e)
2999 int n = asdl_seq_LEN(e->v.List.elts);
3000 if (e->v.List.ctx == Store) {
3001 ADDOP_I(c, UNPACK_SEQUENCE, n);
3003 VISIT_SEQ(c, expr, e->v.List.elts);
3004 if (e->v.List.ctx == Load) {
3005 ADDOP_I(c, BUILD_LIST, n);
3007 return 1;
3010 static int
3011 compiler_tuple(struct compiler *c, expr_ty e)
3013 int n = asdl_seq_LEN(e->v.Tuple.elts);
3014 if (e->v.Tuple.ctx == Store) {
3015 ADDOP_I(c, UNPACK_SEQUENCE, n);
3017 VISIT_SEQ(c, expr, e->v.Tuple.elts);
3018 if (e->v.Tuple.ctx == Load) {
3019 ADDOP_I(c, BUILD_TUPLE, n);
3021 return 1;
3024 static int
3025 compiler_compare(struct compiler *c, expr_ty e)
3027 int i, n;
3028 basicblock *cleanup = NULL;
3030 /* XXX the logic can be cleaned up for 1 or multiple comparisons */
3031 VISIT(c, expr, e->v.Compare.left);
3032 n = asdl_seq_LEN(e->v.Compare.ops);
3033 assert(n > 0);
3034 if (n > 1) {
3035 cleanup = compiler_new_block(c);
3036 if (cleanup == NULL)
3037 return 0;
3038 VISIT(c, expr, asdl_seq_GET(e->v.Compare.comparators, 0));
3040 for (i = 1; i < n; i++) {
3041 ADDOP(c, DUP_TOP);
3042 ADDOP(c, ROT_THREE);
3043 /* XXX We're casting a void* to cmpop_ty in the next stmt. */
3044 ADDOP_I(c, COMPARE_OP,
3045 cmpop((cmpop_ty)asdl_seq_GET(e->v.Compare.ops, i - 1)));
3046 ADDOP_JREL(c, JUMP_IF_FALSE, cleanup);
3047 NEXT_BLOCK(c);
3048 ADDOP(c, POP_TOP);
3049 if (i < (n - 1))
3050 VISIT(c, expr, asdl_seq_GET(e->v.Compare.comparators, i));
3052 VISIT(c, expr, asdl_seq_GET(e->v.Compare.comparators, n - 1));
3053 ADDOP_I(c, COMPARE_OP,
3054 /* XXX We're casting a void* to cmpop_ty in the next stmt. */
3055 cmpop((cmpop_ty)asdl_seq_GET(e->v.Compare.ops, n - 1)));
3056 if (n > 1) {
3057 basicblock *end = compiler_new_block(c);
3058 if (end == NULL)
3059 return 0;
3060 ADDOP_JREL(c, JUMP_FORWARD, end);
3061 compiler_use_next_block(c, cleanup);
3062 ADDOP(c, ROT_TWO);
3063 ADDOP(c, POP_TOP);
3064 compiler_use_next_block(c, end);
3066 return 1;
3069 static int
3070 compiler_call(struct compiler *c, expr_ty e)
3072 int n, code = 0;
3074 VISIT(c, expr, e->v.Call.func);
3075 n = asdl_seq_LEN(e->v.Call.args);
3076 VISIT_SEQ(c, expr, e->v.Call.args);
3077 if (e->v.Call.keywords) {
3078 VISIT_SEQ(c, keyword, e->v.Call.keywords);
3079 n |= asdl_seq_LEN(e->v.Call.keywords) << 8;
3081 if (e->v.Call.starargs) {
3082 VISIT(c, expr, e->v.Call.starargs);
3083 code |= 1;
3085 if (e->v.Call.kwargs) {
3086 VISIT(c, expr, e->v.Call.kwargs);
3087 code |= 2;
3089 switch (code) {
3090 case 0:
3091 ADDOP_I(c, CALL_FUNCTION, n);
3092 break;
3093 case 1:
3094 ADDOP_I(c, CALL_FUNCTION_VAR, n);
3095 break;
3096 case 2:
3097 ADDOP_I(c, CALL_FUNCTION_KW, n);
3098 break;
3099 case 3:
3100 ADDOP_I(c, CALL_FUNCTION_VAR_KW, n);
3101 break;
3103 return 1;
3106 static int
3107 compiler_listcomp_generator(struct compiler *c, PyObject *tmpname,
3108 asdl_seq *generators, int gen_index,
3109 expr_ty elt)
3111 /* generate code for the iterator, then each of the ifs,
3112 and then write to the element */
3114 comprehension_ty l;
3115 basicblock *start, *anchor, *skip, *if_cleanup;
3116 int i, n;
3118 start = compiler_new_block(c);
3119 skip = compiler_new_block(c);
3120 if_cleanup = compiler_new_block(c);
3121 anchor = compiler_new_block(c);
3123 if (start == NULL || skip == NULL || if_cleanup == NULL ||
3124 anchor == NULL)
3125 return 0;
3127 l = asdl_seq_GET(generators, gen_index);
3128 VISIT(c, expr, l->iter);
3129 ADDOP(c, GET_ITER);
3130 compiler_use_next_block(c, start);
3131 ADDOP_JREL(c, FOR_ITER, anchor);
3132 NEXT_BLOCK(c);
3133 VISIT(c, expr, l->target);
3135 /* XXX this needs to be cleaned up...a lot! */
3136 n = asdl_seq_LEN(l->ifs);
3137 for (i = 0; i < n; i++) {
3138 expr_ty e = asdl_seq_GET(l->ifs, i);
3139 VISIT(c, expr, e);
3140 ADDOP_JREL(c, JUMP_IF_FALSE, if_cleanup);
3141 NEXT_BLOCK(c);
3142 ADDOP(c, POP_TOP);
3145 if (++gen_index < asdl_seq_LEN(generators))
3146 if (!compiler_listcomp_generator(c, tmpname,
3147 generators, gen_index, elt))
3148 return 0;
3150 /* only append after the last for generator */
3151 if (gen_index >= asdl_seq_LEN(generators)) {
3152 if (!compiler_nameop(c, tmpname, Load))
3153 return 0;
3154 VISIT(c, expr, elt);
3155 ADDOP(c, LIST_APPEND);
3157 compiler_use_next_block(c, skip);
3159 for (i = 0; i < n; i++) {
3160 ADDOP_I(c, JUMP_FORWARD, 1);
3161 if (i == 0)
3162 compiler_use_next_block(c, if_cleanup);
3163 ADDOP(c, POP_TOP);
3165 ADDOP_JABS(c, JUMP_ABSOLUTE, start);
3166 compiler_use_next_block(c, anchor);
3167 /* delete the append method added to locals */
3168 if (gen_index == 1)
3169 if (!compiler_nameop(c, tmpname, Del))
3170 return 0;
3172 return 1;
3175 static int
3176 compiler_listcomp(struct compiler *c, expr_ty e)
3178 identifier tmp;
3179 int rc = 0;
3180 static identifier append;
3181 asdl_seq *generators = e->v.ListComp.generators;
3183 assert(e->kind == ListComp_kind);
3184 if (!append) {
3185 append = PyString_InternFromString("append");
3186 if (!append)
3187 return 0;
3189 tmp = compiler_new_tmpname(c);
3190 if (!tmp)
3191 return 0;
3192 ADDOP_I(c, BUILD_LIST, 0);
3193 ADDOP(c, DUP_TOP);
3194 if (compiler_nameop(c, tmp, Store))
3195 rc = compiler_listcomp_generator(c, tmp, generators, 0,
3196 e->v.ListComp.elt);
3197 Py_DECREF(tmp);
3198 return rc;
3201 static int
3202 compiler_genexp_generator(struct compiler *c,
3203 asdl_seq *generators, int gen_index,
3204 expr_ty elt)
3206 /* generate code for the iterator, then each of the ifs,
3207 and then write to the element */
3209 comprehension_ty ge;
3210 basicblock *start, *anchor, *skip, *if_cleanup, *end;
3211 int i, n;
3213 start = compiler_new_block(c);
3214 skip = compiler_new_block(c);
3215 if_cleanup = compiler_new_block(c);
3216 anchor = compiler_new_block(c);
3217 end = compiler_new_block(c);
3219 if (start == NULL || skip == NULL || if_cleanup == NULL ||
3220 anchor == NULL || end == NULL)
3221 return 0;
3223 ge = asdl_seq_GET(generators, gen_index);
3224 ADDOP_JREL(c, SETUP_LOOP, end);
3225 if (!compiler_push_fblock(c, LOOP, start))
3226 return 0;
3228 if (gen_index == 0) {
3229 /* Receive outermost iter as an implicit argument */
3230 c->u->u_argcount = 1;
3231 ADDOP_I(c, LOAD_FAST, 0);
3233 else {
3234 /* Sub-iter - calculate on the fly */
3235 VISIT(c, expr, ge->iter);
3236 ADDOP(c, GET_ITER);
3238 compiler_use_next_block(c, start);
3239 ADDOP_JREL(c, FOR_ITER, anchor);
3240 NEXT_BLOCK(c);
3241 VISIT(c, expr, ge->target);
3243 /* XXX this needs to be cleaned up...a lot! */
3244 n = asdl_seq_LEN(ge->ifs);
3245 for (i = 0; i < n; i++) {
3246 expr_ty e = asdl_seq_GET(ge->ifs, i);
3247 VISIT(c, expr, e);
3248 ADDOP_JREL(c, JUMP_IF_FALSE, if_cleanup);
3249 NEXT_BLOCK(c);
3250 ADDOP(c, POP_TOP);
3253 if (++gen_index < asdl_seq_LEN(generators))
3254 if (!compiler_genexp_generator(c, generators, gen_index, elt))
3255 return 0;
3257 /* only append after the last 'for' generator */
3258 if (gen_index >= asdl_seq_LEN(generators)) {
3259 VISIT(c, expr, elt);
3260 ADDOP(c, YIELD_VALUE);
3261 ADDOP(c, POP_TOP);
3263 compiler_use_next_block(c, skip);
3265 for (i = 0; i < n; i++) {
3266 ADDOP_I(c, JUMP_FORWARD, 1);
3267 if (i == 0)
3268 compiler_use_next_block(c, if_cleanup);
3270 ADDOP(c, POP_TOP);
3272 ADDOP_JABS(c, JUMP_ABSOLUTE, start);
3273 compiler_use_next_block(c, anchor);
3274 ADDOP(c, POP_BLOCK);
3275 compiler_pop_fblock(c, LOOP, start);
3276 compiler_use_next_block(c, end);
3278 return 1;
3281 static int
3282 compiler_genexp(struct compiler *c, expr_ty e)
3284 static identifier name;
3285 PyCodeObject *co;
3286 expr_ty outermost_iter = ((comprehension_ty)
3287 (asdl_seq_GET(e->v.GeneratorExp.generators,
3288 0)))->iter;
3290 if (!name) {
3291 name = PyString_FromString("<genexpr>");
3292 if (!name)
3293 return 0;
3296 if (!compiler_enter_scope(c, name, (void *)e, e->lineno))
3297 return 0;
3298 compiler_genexp_generator(c, e->v.GeneratorExp.generators, 0,
3299 e->v.GeneratorExp.elt);
3300 co = assemble(c, 1);
3301 compiler_exit_scope(c);
3302 if (co == NULL)
3303 return 0;
3305 compiler_make_closure(c, co, 0);
3306 Py_DECREF(co);
3308 VISIT(c, expr, outermost_iter);
3309 ADDOP(c, GET_ITER);
3310 ADDOP_I(c, CALL_FUNCTION, 1);
3312 return 1;
3315 static int
3316 compiler_visit_keyword(struct compiler *c, keyword_ty k)
3318 ADDOP_O(c, LOAD_CONST, k->arg, consts);
3319 VISIT(c, expr, k->value);
3320 return 1;
3323 /* Test whether expression is constant. For constants, report
3324 whether they are true or false.
3326 Return values: 1 for true, 0 for false, -1 for non-constant.
3329 static int
3330 expr_constant(expr_ty e)
3332 switch (e->kind) {
3333 case Num_kind:
3334 return PyObject_IsTrue(e->v.Num.n);
3335 case Str_kind:
3336 return PyObject_IsTrue(e->v.Str.s);
3337 default:
3338 return -1;
3343 Implements the with statement from PEP 343.
3345 The semantics outlined in that PEP are as follows:
3347 with EXPR as VAR:
3348 BLOCK
3350 It is implemented roughly as:
3352 context = (EXPR).__context__()
3353 exit = context.__exit__ # not calling it
3354 value = context.__enter__()
3355 try:
3356 VAR = value # if VAR present in the syntax
3357 BLOCK
3358 finally:
3359 if an exception was raised:
3360 exc = copy of (exception, instance, traceback)
3361 else:
3362 exc = (None, None, None)
3363 exit(*exc)
3365 static int
3366 compiler_with(struct compiler *c, stmt_ty s)
3368 static identifier context_attr, enter_attr, exit_attr;
3369 basicblock *block, *finally;
3370 identifier tmpexit, tmpvalue = NULL;
3372 assert(s->kind == With_kind);
3374 if (!context_attr) {
3375 context_attr = PyString_InternFromString("__context__");
3376 if (!context_attr)
3377 return 0;
3379 if (!enter_attr) {
3380 enter_attr = PyString_InternFromString("__enter__");
3381 if (!enter_attr)
3382 return 0;
3384 if (!exit_attr) {
3385 exit_attr = PyString_InternFromString("__exit__");
3386 if (!exit_attr)
3387 return 0;
3390 block = compiler_new_block(c);
3391 finally = compiler_new_block(c);
3392 if (!block || !finally)
3393 return 0;
3395 /* Create a temporary variable to hold context.__exit__ */
3396 tmpexit = compiler_new_tmpname(c);
3397 if (tmpexit == NULL)
3398 return 0;
3399 PyArena_AddPyObject(c->c_arena, tmpexit);
3401 if (s->v.With.optional_vars) {
3402 /* Create a temporary variable to hold context.__enter__().
3403 We need to do this rather than preserving it on the stack
3404 because SETUP_FINALLY remembers the stack level.
3405 We need to do the assignment *inside* the try/finally
3406 so that context.__exit__() is called when the assignment
3407 fails. But we need to call context.__enter__() *before*
3408 the try/finally so that if it fails we won't call
3409 context.__exit__().
3411 tmpvalue = compiler_new_tmpname(c);
3412 if (tmpvalue == NULL)
3413 return 0;
3414 PyArena_AddPyObject(c->c_arena, tmpvalue);
3417 /* Evaluate (EXPR).__context__() */
3418 VISIT(c, expr, s->v.With.context_expr);
3419 ADDOP_O(c, LOAD_ATTR, context_attr, names);
3420 ADDOP_I(c, CALL_FUNCTION, 0);
3422 /* Squirrel away context.__exit__ */
3423 ADDOP(c, DUP_TOP);
3424 ADDOP_O(c, LOAD_ATTR, exit_attr, names);
3425 if (!compiler_nameop(c, tmpexit, Store))
3426 return 0;
3428 /* Call context.__enter__() */
3429 ADDOP_O(c, LOAD_ATTR, enter_attr, names);
3430 ADDOP_I(c, CALL_FUNCTION, 0);
3432 if (s->v.With.optional_vars) {
3433 /* Store it in tmpvalue */
3434 if (!compiler_nameop(c, tmpvalue, Store))
3435 return 0;
3437 else {
3438 /* Discard result from context.__enter__() */
3439 ADDOP(c, POP_TOP);
3442 /* Start the try block */
3443 ADDOP_JREL(c, SETUP_FINALLY, finally);
3445 compiler_use_next_block(c, block);
3446 if (!compiler_push_fblock(c, FINALLY_TRY, block)) {
3447 return 0;
3450 if (s->v.With.optional_vars) {
3451 /* Bind saved result of context.__enter__() to VAR */
3452 if (!compiler_nameop(c, tmpvalue, Load) ||
3453 !compiler_nameop(c, tmpvalue, Del))
3454 return 0;
3455 VISIT(c, expr, s->v.With.optional_vars);
3458 /* BLOCK code */
3459 VISIT_SEQ(c, stmt, s->v.With.body);
3461 /* End of try block; start the finally block */
3462 ADDOP(c, POP_BLOCK);
3463 compiler_pop_fblock(c, FINALLY_TRY, block);
3465 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3466 compiler_use_next_block(c, finally);
3467 if (!compiler_push_fblock(c, FINALLY_END, finally))
3468 return 0;
3470 /* Finally block starts; push tmpexit and issue our magic opcode. */
3471 if (!compiler_nameop(c, tmpexit, Load) ||
3472 !compiler_nameop(c, tmpexit, Del))
3473 return 0;
3474 ADDOP(c, WITH_CLEANUP);
3476 /* Finally block ends. */
3477 ADDOP(c, END_FINALLY);
3478 compiler_pop_fblock(c, FINALLY_END, finally);
3479 return 1;
3482 static int
3483 compiler_visit_expr(struct compiler *c, expr_ty e)
3485 int i, n;
3487 if (e->lineno > c->u->u_lineno) {
3488 c->u->u_lineno = e->lineno;
3489 c->u->u_lineno_set = false;
3491 switch (e->kind) {
3492 case BoolOp_kind:
3493 return compiler_boolop(c, e);
3494 case BinOp_kind:
3495 VISIT(c, expr, e->v.BinOp.left);
3496 VISIT(c, expr, e->v.BinOp.right);
3497 ADDOP(c, binop(c, e->v.BinOp.op));
3498 break;
3499 case UnaryOp_kind:
3500 VISIT(c, expr, e->v.UnaryOp.operand);
3501 ADDOP(c, unaryop(e->v.UnaryOp.op));
3502 break;
3503 case Lambda_kind:
3504 return compiler_lambda(c, e);
3505 case IfExp_kind:
3506 return compiler_ifexp(c, e);
3507 case Dict_kind:
3508 /* XXX get rid of arg? */
3509 ADDOP_I(c, BUILD_MAP, 0);
3510 n = asdl_seq_LEN(e->v.Dict.values);
3511 /* We must arrange things just right for STORE_SUBSCR.
3512 It wants the stack to look like (value) (dict) (key) */
3513 for (i = 0; i < n; i++) {
3514 ADDOP(c, DUP_TOP);
3515 VISIT(c, expr, asdl_seq_GET(e->v.Dict.values, i));
3516 ADDOP(c, ROT_TWO);
3517 VISIT(c, expr, asdl_seq_GET(e->v.Dict.keys, i));
3518 ADDOP(c, STORE_SUBSCR);
3520 break;
3521 case ListComp_kind:
3522 return compiler_listcomp(c, e);
3523 case GeneratorExp_kind:
3524 return compiler_genexp(c, e);
3525 case Yield_kind:
3526 if (c->u->u_ste->ste_type != FunctionBlock)
3527 return compiler_error(c, "'yield' outside function");
3529 for (i = 0; i < c->u->u_nfblocks; i++) {
3530 if (c->u->u_fblock[i].fb_type == FINALLY_TRY)
3531 return compiler_error(
3532 c, "'yield' not allowed in a 'try' "
3533 "block with a 'finally' clause");
3536 if (e->v.Yield.value) {
3537 VISIT(c, expr, e->v.Yield.value);
3539 else {
3540 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3542 ADDOP(c, YIELD_VALUE);
3543 break;
3544 case Compare_kind:
3545 return compiler_compare(c, e);
3546 case Call_kind:
3547 return compiler_call(c, e);
3548 case Repr_kind:
3549 VISIT(c, expr, e->v.Repr.value);
3550 ADDOP(c, UNARY_CONVERT);
3551 break;
3552 case Num_kind:
3553 ADDOP_O(c, LOAD_CONST, e->v.Num.n, consts);
3554 break;
3555 case Str_kind:
3556 ADDOP_O(c, LOAD_CONST, e->v.Str.s, consts);
3557 break;
3558 /* The following exprs can be assignment targets. */
3559 case Attribute_kind:
3560 if (e->v.Attribute.ctx != AugStore)
3561 VISIT(c, expr, e->v.Attribute.value);
3562 switch (e->v.Attribute.ctx) {
3563 case AugLoad:
3564 ADDOP(c, DUP_TOP);
3565 /* Fall through to load */
3566 case Load:
3567 ADDOP_NAME(c, LOAD_ATTR, e->v.Attribute.attr, names);
3568 break;
3569 case AugStore:
3570 ADDOP(c, ROT_TWO);
3571 /* Fall through to save */
3572 case Store:
3573 ADDOP_NAME(c, STORE_ATTR, e->v.Attribute.attr, names);
3574 break;
3575 case Del:
3576 ADDOP_NAME(c, DELETE_ATTR, e->v.Attribute.attr, names);
3577 break;
3578 case Param:
3579 default:
3580 PyErr_SetString(PyExc_SystemError,
3581 "param invalid in attribute expression");
3582 return 0;
3584 break;
3585 case Subscript_kind:
3586 switch (e->v.Subscript.ctx) {
3587 case AugLoad:
3588 VISIT(c, expr, e->v.Subscript.value);
3589 VISIT_SLICE(c, e->v.Subscript.slice, AugLoad);
3590 break;
3591 case Load:
3592 VISIT(c, expr, e->v.Subscript.value);
3593 VISIT_SLICE(c, e->v.Subscript.slice, Load);
3594 break;
3595 case AugStore:
3596 VISIT_SLICE(c, e->v.Subscript.slice, AugStore);
3597 break;
3598 case Store:
3599 VISIT(c, expr, e->v.Subscript.value);
3600 VISIT_SLICE(c, e->v.Subscript.slice, Store);
3601 break;
3602 case Del:
3603 VISIT(c, expr, e->v.Subscript.value);
3604 VISIT_SLICE(c, e->v.Subscript.slice, Del);
3605 break;
3606 case Param:
3607 default:
3608 PyErr_SetString(PyExc_SystemError,
3609 "param invalid in subscript expression");
3610 return 0;
3612 break;
3613 case Name_kind:
3614 return compiler_nameop(c, e->v.Name.id, e->v.Name.ctx);
3615 /* child nodes of List and Tuple will have expr_context set */
3616 case List_kind:
3617 return compiler_list(c, e);
3618 case Tuple_kind:
3619 return compiler_tuple(c, e);
3621 return 1;
3624 static int
3625 compiler_augassign(struct compiler *c, stmt_ty s)
3627 expr_ty e = s->v.AugAssign.target;
3628 expr_ty auge;
3630 assert(s->kind == AugAssign_kind);
3632 switch (e->kind) {
3633 case Attribute_kind:
3634 auge = Attribute(e->v.Attribute.value, e->v.Attribute.attr,
3635 AugLoad, e->lineno, e->col_offset, c->c_arena);
3636 if (auge == NULL)
3637 return 0;
3638 VISIT(c, expr, auge);
3639 VISIT(c, expr, s->v.AugAssign.value);
3640 ADDOP(c, inplace_binop(c, s->v.AugAssign.op));
3641 auge->v.Attribute.ctx = AugStore;
3642 VISIT(c, expr, auge);
3643 break;
3644 case Subscript_kind:
3645 auge = Subscript(e->v.Subscript.value, e->v.Subscript.slice,
3646 AugLoad, e->lineno, e->col_offset, c->c_arena);
3647 if (auge == NULL)
3648 return 0;
3649 VISIT(c, expr, auge);
3650 VISIT(c, expr, s->v.AugAssign.value);
3651 ADDOP(c, inplace_binop(c, s->v.AugAssign.op));
3652 auge->v.Subscript.ctx = AugStore;
3653 VISIT(c, expr, auge);
3654 break;
3655 case Name_kind:
3656 VISIT(c, expr, s->v.AugAssign.target);
3657 VISIT(c, expr, s->v.AugAssign.value);
3658 ADDOP(c, inplace_binop(c, s->v.AugAssign.op));
3659 return compiler_nameop(c, e->v.Name.id, Store);
3660 default:
3661 PyErr_Format(PyExc_SystemError,
3662 "invalid node type (%d) for augmented assignment",
3663 e->kind);
3664 return 0;
3666 return 1;
3669 static int
3670 compiler_push_fblock(struct compiler *c, enum fblocktype t, basicblock *b)
3672 struct fblockinfo *f;
3673 if (c->u->u_nfblocks >= CO_MAXBLOCKS)
3674 return 0;
3675 f = &c->u->u_fblock[c->u->u_nfblocks++];
3676 f->fb_type = t;
3677 f->fb_block = b;
3678 return 1;
3681 static void
3682 compiler_pop_fblock(struct compiler *c, enum fblocktype t, basicblock *b)
3684 struct compiler_unit *u = c->u;
3685 assert(u->u_nfblocks > 0);
3686 u->u_nfblocks--;
3687 assert(u->u_fblock[u->u_nfblocks].fb_type == t);
3688 assert(u->u_fblock[u->u_nfblocks].fb_block == b);
3691 /* Raises a SyntaxError and returns 0.
3692 If something goes wrong, a different exception may be raised.
3695 static int
3696 compiler_error(struct compiler *c, const char *errstr)
3698 PyObject *loc;
3699 PyObject *u = NULL, *v = NULL;
3701 loc = PyErr_ProgramText(c->c_filename, c->u->u_lineno);
3702 if (!loc) {
3703 Py_INCREF(Py_None);
3704 loc = Py_None;
3706 u = Py_BuildValue("(ziOO)", c->c_filename, c->u->u_lineno,
3707 Py_None, loc);
3708 if (!u)
3709 goto exit;
3710 v = Py_BuildValue("(zO)", errstr, u);
3711 if (!v)
3712 goto exit;
3713 PyErr_SetObject(PyExc_SyntaxError, v);
3714 exit:
3715 Py_DECREF(loc);
3716 Py_XDECREF(u);
3717 Py_XDECREF(v);
3718 return 0;
3721 static int
3722 compiler_handle_subscr(struct compiler *c, const char *kind,
3723 expr_context_ty ctx)
3725 int op = 0;
3727 /* XXX this code is duplicated */
3728 switch (ctx) {
3729 case AugLoad: /* fall through to Load */
3730 case Load: op = BINARY_SUBSCR; break;
3731 case AugStore:/* fall through to Store */
3732 case Store: op = STORE_SUBSCR; break;
3733 case Del: op = DELETE_SUBSCR; break;
3734 case Param:
3735 PyErr_Format(PyExc_SystemError,
3736 "invalid %s kind %d in subscript\n",
3737 kind, ctx);
3738 return 0;
3740 if (ctx == AugLoad) {
3741 ADDOP_I(c, DUP_TOPX, 2);
3743 else if (ctx == AugStore) {
3744 ADDOP(c, ROT_THREE);
3746 ADDOP(c, op);
3747 return 1;
3750 static int
3751 compiler_slice(struct compiler *c, slice_ty s, expr_context_ty ctx)
3753 int n = 2;
3754 assert(s->kind == Slice_kind);
3756 /* only handles the cases where BUILD_SLICE is emitted */
3757 if (s->v.Slice.lower) {
3758 VISIT(c, expr, s->v.Slice.lower);
3760 else {
3761 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3764 if (s->v.Slice.upper) {
3765 VISIT(c, expr, s->v.Slice.upper);
3767 else {
3768 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3771 if (s->v.Slice.step) {
3772 n++;
3773 VISIT(c, expr, s->v.Slice.step);
3775 ADDOP_I(c, BUILD_SLICE, n);
3776 return 1;
3779 static int
3780 compiler_simple_slice(struct compiler *c, slice_ty s, expr_context_ty ctx)
3782 int op = 0, slice_offset = 0, stack_count = 0;
3784 assert(s->v.Slice.step == NULL);
3785 if (s->v.Slice.lower) {
3786 slice_offset++;
3787 stack_count++;
3788 if (ctx != AugStore)
3789 VISIT(c, expr, s->v.Slice.lower);
3791 if (s->v.Slice.upper) {
3792 slice_offset += 2;
3793 stack_count++;
3794 if (ctx != AugStore)
3795 VISIT(c, expr, s->v.Slice.upper);
3798 if (ctx == AugLoad) {
3799 switch (stack_count) {
3800 case 0: ADDOP(c, DUP_TOP); break;
3801 case 1: ADDOP_I(c, DUP_TOPX, 2); break;
3802 case 2: ADDOP_I(c, DUP_TOPX, 3); break;
3805 else if (ctx == AugStore) {
3806 switch (stack_count) {
3807 case 0: ADDOP(c, ROT_TWO); break;
3808 case 1: ADDOP(c, ROT_THREE); break;
3809 case 2: ADDOP(c, ROT_FOUR); break;
3813 switch (ctx) {
3814 case AugLoad: /* fall through to Load */
3815 case Load: op = SLICE; break;
3816 case AugStore:/* fall through to Store */
3817 case Store: op = STORE_SLICE; break;
3818 case Del: op = DELETE_SLICE; break;
3819 case Param:
3820 default:
3821 PyErr_SetString(PyExc_SystemError,
3822 "param invalid in simple slice");
3823 return 0;
3826 ADDOP(c, op + slice_offset);
3827 return 1;
3830 static int
3831 compiler_visit_nested_slice(struct compiler *c, slice_ty s,
3832 expr_context_ty ctx)
3834 switch (s->kind) {
3835 case Ellipsis_kind:
3836 ADDOP_O(c, LOAD_CONST, Py_Ellipsis, consts);
3837 break;
3838 case Slice_kind:
3839 return compiler_slice(c, s, ctx);
3840 case Index_kind:
3841 VISIT(c, expr, s->v.Index.value);
3842 break;
3843 case ExtSlice_kind:
3844 default:
3845 PyErr_SetString(PyExc_SystemError,
3846 "extended slice invalid in nested slice");
3847 return 0;
3849 return 1;
3853 static int
3854 compiler_visit_slice(struct compiler *c, slice_ty s, expr_context_ty ctx)
3856 char * kindname = NULL;
3857 switch (s->kind) {
3858 case Index_kind:
3859 kindname = "index";
3860 if (ctx != AugStore) {
3861 VISIT(c, expr, s->v.Index.value);
3863 break;
3864 case Ellipsis_kind:
3865 kindname = "ellipsis";
3866 if (ctx != AugStore) {
3867 ADDOP_O(c, LOAD_CONST, Py_Ellipsis, consts);
3869 break;
3870 case Slice_kind:
3871 kindname = "slice";
3872 if (!s->v.Slice.step)
3873 return compiler_simple_slice(c, s, ctx);
3874 if (ctx != AugStore) {
3875 if (!compiler_slice(c, s, ctx))
3876 return 0;
3878 break;
3879 case ExtSlice_kind:
3880 kindname = "extended slice";
3881 if (ctx != AugStore) {
3882 int i, n = asdl_seq_LEN(s->v.ExtSlice.dims);
3883 for (i = 0; i < n; i++) {
3884 slice_ty sub = asdl_seq_GET(s->v.ExtSlice.dims, i);
3885 if (!compiler_visit_nested_slice(c, sub, ctx))
3886 return 0;
3888 ADDOP_I(c, BUILD_TUPLE, n);
3890 break;
3891 default:
3892 PyErr_Format(PyExc_SystemError,
3893 "invalid subscript kind %d", s->kind);
3894 return 0;
3896 return compiler_handle_subscr(c, kindname, ctx);
3899 /* do depth-first search of basic block graph, starting with block.
3900 post records the block indices in post-order.
3902 XXX must handle implicit jumps from one block to next
3905 static void
3906 dfs(struct compiler *c, basicblock *b, struct assembler *a)
3908 int i;
3909 struct instr *instr = NULL;
3911 if (b->b_seen)
3912 return;
3913 b->b_seen = 1;
3914 if (b->b_next != NULL)
3915 dfs(c, b->b_next, a);
3916 for (i = 0; i < b->b_iused; i++) {
3917 instr = &b->b_instr[i];
3918 if (instr->i_jrel || instr->i_jabs)
3919 dfs(c, instr->i_target, a);
3921 a->a_postorder[a->a_nblocks++] = b;
3924 static int
3925 stackdepth_walk(struct compiler *c, basicblock *b, int depth, int maxdepth)
3927 int i;
3928 struct instr *instr;
3929 if (b->b_seen || b->b_startdepth >= depth)
3930 return maxdepth;
3931 b->b_seen = 1;
3932 b->b_startdepth = depth;
3933 for (i = 0; i < b->b_iused; i++) {
3934 instr = &b->b_instr[i];
3935 depth += opcode_stack_effect(instr->i_opcode, instr->i_oparg);
3936 if (depth > maxdepth)
3937 maxdepth = depth;
3938 assert(depth >= 0); /* invalid code or bug in stackdepth() */
3939 if (instr->i_jrel || instr->i_jabs) {
3940 maxdepth = stackdepth_walk(c, instr->i_target,
3941 depth, maxdepth);
3942 if (instr->i_opcode == JUMP_ABSOLUTE ||
3943 instr->i_opcode == JUMP_FORWARD) {
3944 goto out; /* remaining code is dead */
3948 if (b->b_next)
3949 maxdepth = stackdepth_walk(c, b->b_next, depth, maxdepth);
3950 out:
3951 b->b_seen = 0;
3952 return maxdepth;
3955 /* Find the flow path that needs the largest stack. We assume that
3956 * cycles in the flow graph have no net effect on the stack depth.
3958 static int
3959 stackdepth(struct compiler *c)
3961 basicblock *b, *entryblock;
3962 entryblock = NULL;
3963 for (b = c->u->u_blocks; b != NULL; b = b->b_list) {
3964 b->b_seen = 0;
3965 b->b_startdepth = INT_MIN;
3966 entryblock = b;
3968 return stackdepth_walk(c, entryblock, 0, 0);
3971 static int
3972 assemble_init(struct assembler *a, int nblocks, int firstlineno)
3974 memset(a, 0, sizeof(struct assembler));
3975 a->a_lineno = firstlineno;
3976 a->a_bytecode = PyString_FromStringAndSize(NULL, DEFAULT_CODE_SIZE);
3977 if (!a->a_bytecode)
3978 return 0;
3979 a->a_lnotab = PyString_FromStringAndSize(NULL, DEFAULT_LNOTAB_SIZE);
3980 if (!a->a_lnotab)
3981 return 0;
3982 a->a_postorder = (basicblock **)PyObject_Malloc(
3983 sizeof(basicblock *) * nblocks);
3984 if (!a->a_postorder) {
3985 PyErr_NoMemory();
3986 return 0;
3988 return 1;
3991 static void
3992 assemble_free(struct assembler *a)
3994 Py_XDECREF(a->a_bytecode);
3995 Py_XDECREF(a->a_lnotab);
3996 if (a->a_postorder)
3997 PyObject_Free(a->a_postorder);
4000 /* Return the size of a basic block in bytes. */
4002 static int
4003 instrsize(struct instr *instr)
4005 if (!instr->i_hasarg)
4006 return 1;
4007 if (instr->i_oparg > 0xffff)
4008 return 6;
4009 return 3;
4012 static int
4013 blocksize(basicblock *b)
4015 int i;
4016 int size = 0;
4018 for (i = 0; i < b->b_iused; i++)
4019 size += instrsize(&b->b_instr[i]);
4020 return size;
4023 /* All about a_lnotab.
4025 c_lnotab is an array of unsigned bytes disguised as a Python string.
4026 It is used to map bytecode offsets to source code line #s (when needed
4027 for tracebacks).
4029 The array is conceptually a list of
4030 (bytecode offset increment, line number increment)
4031 pairs. The details are important and delicate, best illustrated by example:
4033 byte code offset source code line number
4036 50 7
4037 350 307
4038 361 308
4040 The first trick is that these numbers aren't stored, only the increments
4041 from one row to the next (this doesn't really work, but it's a start):
4043 0, 1, 6, 1, 44, 5, 300, 300, 11, 1
4045 The second trick is that an unsigned byte can't hold negative values, or
4046 values larger than 255, so (a) there's a deep assumption that byte code
4047 offsets and their corresponding line #s both increase monotonically, and (b)
4048 if at least one column jumps by more than 255 from one row to the next, more
4049 than one pair is written to the table. In case #b, there's no way to know
4050 from looking at the table later how many were written. That's the delicate
4051 part. A user of c_lnotab desiring to find the source line number
4052 corresponding to a bytecode address A should do something like this
4054 lineno = addr = 0
4055 for addr_incr, line_incr in c_lnotab:
4056 addr += addr_incr
4057 if addr > A:
4058 return lineno
4059 lineno += line_incr
4061 In order for this to work, when the addr field increments by more than 255,
4062 the line # increment in each pair generated must be 0 until the remaining addr
4063 increment is < 256. So, in the example above, com_set_lineno should not (as
4064 was actually done until 2.2) expand 300, 300 to 255, 255, 45, 45, but to
4065 255, 0, 45, 255, 0, 45.
4068 static int
4069 assemble_lnotab(struct assembler *a, struct instr *i)
4071 int d_bytecode, d_lineno;
4072 int len;
4073 char *lnotab;
4075 d_bytecode = a->a_offset - a->a_lineno_off;
4076 d_lineno = i->i_lineno - a->a_lineno;
4078 assert(d_bytecode >= 0);
4079 assert(d_lineno >= 0);
4081 if (d_lineno == 0)
4082 return 1;
4084 if (d_bytecode > 255) {
4085 int j, nbytes, ncodes = d_bytecode / 255;
4086 nbytes = a->a_lnotab_off + 2 * ncodes;
4087 len = PyString_GET_SIZE(a->a_lnotab);
4088 if (nbytes >= len) {
4089 if (len * 2 < nbytes)
4090 len = nbytes;
4091 else
4092 len *= 2;
4093 if (_PyString_Resize(&a->a_lnotab, len) < 0)
4094 return 0;
4096 lnotab = PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
4097 for (j = 0; j < ncodes; j++) {
4098 *lnotab++ = 255;
4099 *lnotab++ = 0;
4101 d_bytecode -= ncodes * 255;
4102 a->a_lnotab_off += ncodes * 2;
4104 assert(d_bytecode <= 255);
4105 if (d_lineno > 255) {
4106 int j, nbytes, ncodes = d_lineno / 255;
4107 nbytes = a->a_lnotab_off + 2 * ncodes;
4108 len = PyString_GET_SIZE(a->a_lnotab);
4109 if (nbytes >= len) {
4110 if (len * 2 < nbytes)
4111 len = nbytes;
4112 else
4113 len *= 2;
4114 if (_PyString_Resize(&a->a_lnotab, len) < 0)
4115 return 0;
4117 lnotab = PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
4118 *lnotab++ = 255;
4119 *lnotab++ = d_bytecode;
4120 d_bytecode = 0;
4121 for (j = 1; j < ncodes; j++) {
4122 *lnotab++ = 255;
4123 *lnotab++ = 0;
4125 d_lineno -= ncodes * 255;
4126 a->a_lnotab_off += ncodes * 2;
4129 len = PyString_GET_SIZE(a->a_lnotab);
4130 if (a->a_lnotab_off + 2 >= len) {
4131 if (_PyString_Resize(&a->a_lnotab, len * 2) < 0)
4132 return 0;
4134 lnotab = PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
4136 a->a_lnotab_off += 2;
4137 if (d_bytecode) {
4138 *lnotab++ = d_bytecode;
4139 *lnotab++ = d_lineno;
4141 else { /* First line of a block; def stmt, etc. */
4142 *lnotab++ = 0;
4143 *lnotab++ = d_lineno;
4145 a->a_lineno = i->i_lineno;
4146 a->a_lineno_off = a->a_offset;
4147 return 1;
4150 /* assemble_emit()
4151 Extend the bytecode with a new instruction.
4152 Update lnotab if necessary.
4155 static int
4156 assemble_emit(struct assembler *a, struct instr *i)
4158 int size, arg = 0, ext = 0;
4159 int len = PyString_GET_SIZE(a->a_bytecode);
4160 char *code;
4162 size = instrsize(i);
4163 if (i->i_hasarg) {
4164 arg = i->i_oparg;
4165 ext = arg >> 16;
4167 if (i->i_lineno && !assemble_lnotab(a, i))
4168 return 0;
4169 if (a->a_offset + size >= len) {
4170 if (_PyString_Resize(&a->a_bytecode, len * 2) < 0)
4171 return 0;
4173 code = PyString_AS_STRING(a->a_bytecode) + a->a_offset;
4174 a->a_offset += size;
4175 if (size == 6) {
4176 assert(i->i_hasarg);
4177 *code++ = (char)EXTENDED_ARG;
4178 *code++ = ext & 0xff;
4179 *code++ = ext >> 8;
4180 arg &= 0xffff;
4182 *code++ = i->i_opcode;
4183 if (i->i_hasarg) {
4184 assert(size == 3 || size == 6);
4185 *code++ = arg & 0xff;
4186 *code++ = arg >> 8;
4188 return 1;
4191 static void
4192 assemble_jump_offsets(struct assembler *a, struct compiler *c)
4194 basicblock *b;
4195 int bsize, totsize, extended_arg_count, last_extended_arg_count = 0;
4196 int i;
4198 /* Compute the size of each block and fixup jump args.
4199 Replace block pointer with position in bytecode. */
4200 start:
4201 totsize = 0;
4202 for (i = a->a_nblocks - 1; i >= 0; i--) {
4203 b = a->a_postorder[i];
4204 bsize = blocksize(b);
4205 b->b_offset = totsize;
4206 totsize += bsize;
4208 extended_arg_count = 0;
4209 for (b = c->u->u_blocks; b != NULL; b = b->b_list) {
4210 bsize = b->b_offset;
4211 for (i = 0; i < b->b_iused; i++) {
4212 struct instr *instr = &b->b_instr[i];
4213 /* Relative jumps are computed relative to
4214 the instruction pointer after fetching
4215 the jump instruction.
4217 bsize += instrsize(instr);
4218 if (instr->i_jabs)
4219 instr->i_oparg = instr->i_target->b_offset;
4220 else if (instr->i_jrel) {
4221 int delta = instr->i_target->b_offset - bsize;
4222 instr->i_oparg = delta;
4224 else
4225 continue;
4226 if (instr->i_oparg > 0xffff)
4227 extended_arg_count++;
4231 /* XXX: This is an awful hack that could hurt performance, but
4232 on the bright side it should work until we come up
4233 with a better solution.
4235 In the meantime, should the goto be dropped in favor
4236 of a loop?
4238 The issue is that in the first loop blocksize() is called
4239 which calls instrsize() which requires i_oparg be set
4240 appropriately. There is a bootstrap problem because
4241 i_oparg is calculated in the second loop above.
4243 So we loop until we stop seeing new EXTENDED_ARGs.
4244 The only EXTENDED_ARGs that could be popping up are
4245 ones in jump instructions. So this should converge
4246 fairly quickly.
4248 if (last_extended_arg_count != extended_arg_count) {
4249 last_extended_arg_count = extended_arg_count;
4250 goto start;
4254 static PyObject *
4255 dict_keys_inorder(PyObject *dict, int offset)
4257 PyObject *tuple, *k, *v;
4258 Py_ssize_t i, pos = 0, size = PyDict_Size(dict);
4260 tuple = PyTuple_New(size);
4261 if (tuple == NULL)
4262 return NULL;
4263 while (PyDict_Next(dict, &pos, &k, &v)) {
4264 i = PyInt_AS_LONG(v);
4265 k = PyTuple_GET_ITEM(k, 0);
4266 Py_INCREF(k);
4267 assert((i - offset) < size);
4268 assert((i - offset) >= 0);
4269 PyTuple_SET_ITEM(tuple, i - offset, k);
4271 return tuple;
4274 static int
4275 compute_code_flags(struct compiler *c)
4277 PySTEntryObject *ste = c->u->u_ste;
4278 int flags = 0, n;
4279 if (ste->ste_type != ModuleBlock)
4280 flags |= CO_NEWLOCALS;
4281 if (ste->ste_type == FunctionBlock) {
4282 if (!ste->ste_unoptimized)
4283 flags |= CO_OPTIMIZED;
4284 if (ste->ste_nested)
4285 flags |= CO_NESTED;
4286 if (ste->ste_generator)
4287 flags |= CO_GENERATOR;
4289 if (ste->ste_varargs)
4290 flags |= CO_VARARGS;
4291 if (ste->ste_varkeywords)
4292 flags |= CO_VARKEYWORDS;
4293 if (ste->ste_generator)
4294 flags |= CO_GENERATOR;
4296 /* (Only) inherit compilerflags in PyCF_MASK */
4297 flags |= (c->c_flags->cf_flags & PyCF_MASK);
4299 n = PyDict_Size(c->u->u_freevars);
4300 if (n < 0)
4301 return -1;
4302 if (n == 0) {
4303 n = PyDict_Size(c->u->u_cellvars);
4304 if (n < 0)
4305 return -1;
4306 if (n == 0) {
4307 flags |= CO_NOFREE;
4311 return flags;
4314 static PyCodeObject *
4315 makecode(struct compiler *c, struct assembler *a)
4317 PyObject *tmp;
4318 PyCodeObject *co = NULL;
4319 PyObject *consts = NULL;
4320 PyObject *names = NULL;
4321 PyObject *varnames = NULL;
4322 PyObject *filename = NULL;
4323 PyObject *name = NULL;
4324 PyObject *freevars = NULL;
4325 PyObject *cellvars = NULL;
4326 PyObject *bytecode = NULL;
4327 int nlocals, flags;
4329 tmp = dict_keys_inorder(c->u->u_consts, 0);
4330 if (!tmp)
4331 goto error;
4332 consts = PySequence_List(tmp); /* optimize_code requires a list */
4333 Py_DECREF(tmp);
4335 names = dict_keys_inorder(c->u->u_names, 0);
4336 varnames = dict_keys_inorder(c->u->u_varnames, 0);
4337 if (!consts || !names || !varnames)
4338 goto error;
4340 cellvars = dict_keys_inorder(c->u->u_cellvars, 0);
4341 if (!cellvars)
4342 goto error;
4343 freevars = dict_keys_inorder(c->u->u_freevars, PyTuple_Size(cellvars));
4344 if (!freevars)
4345 goto error;
4346 filename = PyString_FromString(c->c_filename);
4347 if (!filename)
4348 goto error;
4350 nlocals = PyDict_Size(c->u->u_varnames);
4351 flags = compute_code_flags(c);
4352 if (flags < 0)
4353 goto error;
4355 bytecode = optimize_code(a->a_bytecode, consts, names, a->a_lnotab);
4356 if (!bytecode)
4357 goto error;
4359 tmp = PyList_AsTuple(consts); /* PyCode_New requires a tuple */
4360 if (!tmp)
4361 goto error;
4362 Py_DECREF(consts);
4363 consts = tmp;
4365 co = PyCode_New(c->u->u_argcount, nlocals, stackdepth(c), flags,
4366 bytecode, consts, names, varnames,
4367 freevars, cellvars,
4368 filename, c->u->u_name,
4369 c->u->u_firstlineno,
4370 a->a_lnotab);
4371 error:
4372 Py_XDECREF(consts);
4373 Py_XDECREF(names);
4374 Py_XDECREF(varnames);
4375 Py_XDECREF(filename);
4376 Py_XDECREF(name);
4377 Py_XDECREF(freevars);
4378 Py_XDECREF(cellvars);
4379 Py_XDECREF(bytecode);
4380 return co;
4383 static PyCodeObject *
4384 assemble(struct compiler *c, int addNone)
4386 basicblock *b, *entryblock;
4387 struct assembler a;
4388 int i, j, nblocks;
4389 PyCodeObject *co = NULL;
4391 /* Make sure every block that falls off the end returns None.
4392 XXX NEXT_BLOCK() isn't quite right, because if the last
4393 block ends with a jump or return b_next shouldn't set.
4395 if (!c->u->u_curblock->b_return) {
4396 NEXT_BLOCK(c);
4397 if (addNone)
4398 ADDOP_O(c, LOAD_CONST, Py_None, consts);
4399 ADDOP(c, RETURN_VALUE);
4402 nblocks = 0;
4403 entryblock = NULL;
4404 for (b = c->u->u_blocks; b != NULL; b = b->b_list) {
4405 nblocks++;
4406 entryblock = b;
4409 if (!assemble_init(&a, nblocks, c->u->u_firstlineno))
4410 goto error;
4411 dfs(c, entryblock, &a);
4413 /* Can't modify the bytecode after computing jump offsets. */
4414 assemble_jump_offsets(&a, c);
4416 /* Emit code in reverse postorder from dfs. */
4417 for (i = a.a_nblocks - 1; i >= 0; i--) {
4418 b = a.a_postorder[i];
4419 for (j = 0; j < b->b_iused; j++)
4420 if (!assemble_emit(&a, &b->b_instr[j]))
4421 goto error;
4424 if (_PyString_Resize(&a.a_lnotab, a.a_lnotab_off) < 0)
4425 goto error;
4426 if (_PyString_Resize(&a.a_bytecode, a.a_offset) < 0)
4427 goto error;
4429 co = makecode(c, &a);
4430 error:
4431 assemble_free(&a);
4432 return co;