Issue #5262: Fixed bug in next roll over time computation in TimedRotatingFileHandler.
[python.git] / Python / compile.c
blob00e0462c4d3d5e6ea4c0a89bb7e7b17c4323710a
1 /*
2 * This file compiles an abstract syntax tree (AST) into Python bytecode.
4 * The primary entry point is PyAST_Compile(), which returns a
5 * PyCodeObject. The compiler makes several passes to build the code
6 * object:
7 * 1. Checks for future statements. See future.c
8 * 2. Builds a symbol table. See symtable.c.
9 * 3. Generate code for basic blocks. See compiler_mod() in this file.
10 * 4. Assemble the basic blocks into final code. See assemble() in
11 * this file.
12 * 5. Optimize the byte code (peephole optimizations). See peephole.c
14 * Note that compiler_mod() suggests module, but the module ast type
15 * (mod_ty) has cases for expressions and interactive statements.
17 * CAUTION: The VISIT_* macros abort the current function when they
18 * encounter a problem. So don't invoke them when there is memory
19 * which needs to be released. Code blocks are OK, as the compiler
20 * structure takes care of releasing those. Use the arena to manage
21 * objects.
24 #include "Python.h"
26 #include "Python-ast.h"
27 #include "node.h"
28 #include "pyarena.h"
29 #include "ast.h"
30 #include "code.h"
31 #include "compile.h"
32 #include "symtable.h"
33 #include "opcode.h"
35 int Py_OptimizeFlag = 0;
37 #define DEFAULT_BLOCK_SIZE 16
38 #define DEFAULT_BLOCKS 8
39 #define DEFAULT_CODE_SIZE 128
40 #define DEFAULT_LNOTAB_SIZE 16
42 struct instr {
43 unsigned i_jabs : 1;
44 unsigned i_jrel : 1;
45 unsigned i_hasarg : 1;
46 unsigned char i_opcode;
47 int i_oparg;
48 struct basicblock_ *i_target; /* target block (if jump instruction) */
49 int i_lineno;
52 typedef struct basicblock_ {
53 /* Each basicblock in a compilation unit is linked via b_list in the
54 reverse order that the block are allocated. b_list points to the next
55 block, not to be confused with b_next, which is next by control flow. */
56 struct basicblock_ *b_list;
57 /* number of instructions used */
58 int b_iused;
59 /* length of instruction array (b_instr) */
60 int b_ialloc;
61 /* pointer to an array of instructions, initially NULL */
62 struct instr *b_instr;
63 /* If b_next is non-NULL, it is a pointer to the next
64 block reached by normal control flow. */
65 struct basicblock_ *b_next;
66 /* b_seen is used to perform a DFS of basicblocks. */
67 unsigned b_seen : 1;
68 /* b_return is true if a RETURN_VALUE opcode is inserted. */
69 unsigned b_return : 1;
70 /* depth of stack upon entry of block, computed by stackdepth() */
71 int b_startdepth;
72 /* instruction offset for block, computed by assemble_jump_offsets() */
73 int b_offset;
74 } basicblock;
76 /* fblockinfo tracks the current frame block.
78 A frame block is used to handle loops, try/except, and try/finally.
79 It's called a frame block to distinguish it from a basic block in the
80 compiler IR.
83 enum fblocktype { LOOP, EXCEPT, FINALLY_TRY, FINALLY_END };
85 struct fblockinfo {
86 enum fblocktype fb_type;
87 basicblock *fb_block;
90 /* The following items change on entry and exit of code blocks.
91 They must be saved and restored when returning to a block.
93 struct compiler_unit {
94 PySTEntryObject *u_ste;
96 PyObject *u_name;
97 /* The following fields are dicts that map objects to
98 the index of them in co_XXX. The index is used as
99 the argument for opcodes that refer to those collections.
101 PyObject *u_consts; /* all constants */
102 PyObject *u_names; /* all names */
103 PyObject *u_varnames; /* local variables */
104 PyObject *u_cellvars; /* cell variables */
105 PyObject *u_freevars; /* free variables */
107 PyObject *u_private; /* for private name mangling */
109 int u_argcount; /* number of arguments for block */
110 /* Pointer to the most recently allocated block. By following b_list
111 members, you can reach all early allocated blocks. */
112 basicblock *u_blocks;
113 basicblock *u_curblock; /* pointer to current block */
114 int u_tmpname; /* temporary variables for list comps */
116 int u_nfblocks;
117 struct fblockinfo u_fblock[CO_MAXBLOCKS];
119 int u_firstlineno; /* the first lineno of the block */
120 int u_lineno; /* the lineno for the current stmt */
121 bool u_lineno_set; /* boolean to indicate whether instr
122 has been generated with current lineno */
125 /* This struct captures the global state of a compilation.
127 The u pointer points to the current compilation unit, while units
128 for enclosing blocks are stored in c_stack. The u and c_stack are
129 managed by compiler_enter_scope() and compiler_exit_scope().
132 struct compiler {
133 const char *c_filename;
134 struct symtable *c_st;
135 PyFutureFeatures *c_future; /* pointer to module's __future__ */
136 PyCompilerFlags *c_flags;
138 int c_interactive; /* true if in interactive mode */
139 int c_nestlevel;
141 struct compiler_unit *u; /* compiler state for current block */
142 PyObject *c_stack; /* Python list holding compiler_unit ptrs */
143 char *c_encoding; /* source encoding (a borrowed reference) */
144 PyArena *c_arena; /* pointer to memory allocation arena */
147 static int compiler_enter_scope(struct compiler *, identifier, void *, int);
148 static void compiler_free(struct compiler *);
149 static basicblock *compiler_new_block(struct compiler *);
150 static int compiler_next_instr(struct compiler *, basicblock *);
151 static int compiler_addop(struct compiler *, int);
152 static int compiler_addop_o(struct compiler *, int, PyObject *, PyObject *);
153 static int compiler_addop_i(struct compiler *, int, int);
154 static int compiler_addop_j(struct compiler *, int, basicblock *, int);
155 static basicblock *compiler_use_new_block(struct compiler *);
156 static int compiler_error(struct compiler *, const char *);
157 static int compiler_nameop(struct compiler *, identifier, expr_context_ty);
159 static PyCodeObject *compiler_mod(struct compiler *, mod_ty);
160 static int compiler_visit_stmt(struct compiler *, stmt_ty);
161 static int compiler_visit_keyword(struct compiler *, keyword_ty);
162 static int compiler_visit_expr(struct compiler *, expr_ty);
163 static int compiler_augassign(struct compiler *, stmt_ty);
164 static int compiler_visit_slice(struct compiler *, slice_ty,
165 expr_context_ty);
167 static int compiler_push_fblock(struct compiler *, enum fblocktype,
168 basicblock *);
169 static void compiler_pop_fblock(struct compiler *, enum fblocktype,
170 basicblock *);
171 /* Returns true if there is a loop on the fblock stack. */
172 static int compiler_in_loop(struct compiler *);
174 static int inplace_binop(struct compiler *, operator_ty);
175 static int expr_constant(expr_ty e);
177 static int compiler_with(struct compiler *, stmt_ty);
179 static PyCodeObject *assemble(struct compiler *, int addNone);
180 static PyObject *__doc__;
182 PyObject *
183 _Py_Mangle(PyObject *privateobj, PyObject *ident)
185 /* Name mangling: __private becomes _classname__private.
186 This is independent from how the name is used. */
187 const char *p, *name = PyString_AsString(ident);
188 char *buffer;
189 size_t nlen, plen;
190 if (privateobj == NULL || !PyString_Check(privateobj) ||
191 name == NULL || name[0] != '_' || name[1] != '_') {
192 Py_INCREF(ident);
193 return ident;
195 p = PyString_AsString(privateobj);
196 nlen = strlen(name);
197 /* Don't mangle __id__ or names with dots.
199 The only time a name with a dot can occur is when
200 we are compiling an import statement that has a
201 package name.
203 TODO(jhylton): Decide whether we want to support
204 mangling of the module name, e.g. __M.X.
206 if ((name[nlen-1] == '_' && name[nlen-2] == '_')
207 || strchr(name, '.')) {
208 Py_INCREF(ident);
209 return ident; /* Don't mangle __whatever__ */
211 /* Strip leading underscores from class name */
212 while (*p == '_')
213 p++;
214 if (*p == '\0') {
215 Py_INCREF(ident);
216 return ident; /* Don't mangle if class is just underscores */
218 plen = strlen(p);
220 assert(1 <= PY_SSIZE_T_MAX - nlen);
221 assert(1 + nlen <= PY_SSIZE_T_MAX - plen);
223 ident = PyString_FromStringAndSize(NULL, 1 + nlen + plen);
224 if (!ident)
225 return 0;
226 /* ident = "_" + p[:plen] + name # i.e. 1+plen+nlen bytes */
227 buffer = PyString_AS_STRING(ident);
228 buffer[0] = '_';
229 strncpy(buffer+1, p, plen);
230 strcpy(buffer+1+plen, name);
231 return ident;
234 static int
235 compiler_init(struct compiler *c)
237 memset(c, 0, sizeof(struct compiler));
239 c->c_stack = PyList_New(0);
240 if (!c->c_stack)
241 return 0;
243 return 1;
246 PyCodeObject *
247 PyAST_Compile(mod_ty mod, const char *filename, PyCompilerFlags *flags,
248 PyArena *arena)
250 struct compiler c;
251 PyCodeObject *co = NULL;
252 PyCompilerFlags local_flags;
253 int merged;
255 if (!__doc__) {
256 __doc__ = PyString_InternFromString("__doc__");
257 if (!__doc__)
258 return NULL;
261 if (!compiler_init(&c))
262 return NULL;
263 c.c_filename = filename;
264 c.c_arena = arena;
265 c.c_future = PyFuture_FromAST(mod, filename);
266 if (c.c_future == NULL)
267 goto finally;
268 if (!flags) {
269 local_flags.cf_flags = 0;
270 flags = &local_flags;
272 merged = c.c_future->ff_features | flags->cf_flags;
273 c.c_future->ff_features = merged;
274 flags->cf_flags = merged;
275 c.c_flags = flags;
276 c.c_nestlevel = 0;
278 c.c_st = PySymtable_Build(mod, filename, c.c_future);
279 if (c.c_st == NULL) {
280 if (!PyErr_Occurred())
281 PyErr_SetString(PyExc_SystemError, "no symtable");
282 goto finally;
285 /* XXX initialize to NULL for now, need to handle */
286 c.c_encoding = NULL;
288 co = compiler_mod(&c, mod);
290 finally:
291 compiler_free(&c);
292 assert(co || PyErr_Occurred());
293 return co;
296 PyCodeObject *
297 PyNode_Compile(struct _node *n, const char *filename)
299 PyCodeObject *co = NULL;
300 mod_ty mod;
301 PyArena *arena = PyArena_New();
302 if (!arena)
303 return NULL;
304 mod = PyAST_FromNode(n, NULL, filename, arena);
305 if (mod)
306 co = PyAST_Compile(mod, filename, NULL, arena);
307 PyArena_Free(arena);
308 return co;
311 static void
312 compiler_free(struct compiler *c)
314 if (c->c_st)
315 PySymtable_Free(c->c_st);
316 if (c->c_future)
317 PyObject_Free(c->c_future);
318 Py_DECREF(c->c_stack);
321 static PyObject *
322 list2dict(PyObject *list)
324 Py_ssize_t i, n;
325 PyObject *v, *k;
326 PyObject *dict = PyDict_New();
327 if (!dict) return NULL;
329 n = PyList_Size(list);
330 for (i = 0; i < n; i++) {
331 v = PyInt_FromLong(i);
332 if (!v) {
333 Py_DECREF(dict);
334 return NULL;
336 k = PyList_GET_ITEM(list, i);
337 k = PyTuple_Pack(2, k, k->ob_type);
338 if (k == NULL || PyDict_SetItem(dict, k, v) < 0) {
339 Py_XDECREF(k);
340 Py_DECREF(v);
341 Py_DECREF(dict);
342 return NULL;
344 Py_DECREF(k);
345 Py_DECREF(v);
347 return dict;
350 /* Return new dict containing names from src that match scope(s).
352 src is a symbol table dictionary. If the scope of a name matches
353 either scope_type or flag is set, insert it into the new dict. The
354 values are integers, starting at offset and increasing by one for
355 each key.
358 static PyObject *
359 dictbytype(PyObject *src, int scope_type, int flag, int offset)
361 Py_ssize_t pos = 0, i = offset, scope;
362 PyObject *k, *v, *dest = PyDict_New();
364 assert(offset >= 0);
365 if (dest == NULL)
366 return NULL;
368 while (PyDict_Next(src, &pos, &k, &v)) {
369 /* XXX this should probably be a macro in symtable.h */
370 assert(PyInt_Check(v));
371 scope = (PyInt_AS_LONG(v) >> SCOPE_OFF) & SCOPE_MASK;
373 if (scope == scope_type || PyInt_AS_LONG(v) & flag) {
374 PyObject *tuple, *item = PyInt_FromLong(i);
375 if (item == NULL) {
376 Py_DECREF(dest);
377 return NULL;
379 i++;
380 tuple = PyTuple_Pack(2, k, k->ob_type);
381 if (!tuple || PyDict_SetItem(dest, tuple, item) < 0) {
382 Py_DECREF(item);
383 Py_DECREF(dest);
384 Py_XDECREF(tuple);
385 return NULL;
387 Py_DECREF(item);
388 Py_DECREF(tuple);
391 return dest;
394 static void
395 compiler_unit_check(struct compiler_unit *u)
397 basicblock *block;
398 for (block = u->u_blocks; block != NULL; block = block->b_list) {
399 assert((void *)block != (void *)0xcbcbcbcb);
400 assert((void *)block != (void *)0xfbfbfbfb);
401 assert((void *)block != (void *)0xdbdbdbdb);
402 if (block->b_instr != NULL) {
403 assert(block->b_ialloc > 0);
404 assert(block->b_iused > 0);
405 assert(block->b_ialloc >= block->b_iused);
407 else {
408 assert (block->b_iused == 0);
409 assert (block->b_ialloc == 0);
414 static void
415 compiler_unit_free(struct compiler_unit *u)
417 basicblock *b, *next;
419 compiler_unit_check(u);
420 b = u->u_blocks;
421 while (b != NULL) {
422 if (b->b_instr)
423 PyObject_Free((void *)b->b_instr);
424 next = b->b_list;
425 PyObject_Free((void *)b);
426 b = next;
428 Py_CLEAR(u->u_ste);
429 Py_CLEAR(u->u_name);
430 Py_CLEAR(u->u_consts);
431 Py_CLEAR(u->u_names);
432 Py_CLEAR(u->u_varnames);
433 Py_CLEAR(u->u_freevars);
434 Py_CLEAR(u->u_cellvars);
435 Py_CLEAR(u->u_private);
436 PyObject_Free(u);
439 static int
440 compiler_enter_scope(struct compiler *c, identifier name, void *key,
441 int lineno)
443 struct compiler_unit *u;
445 u = (struct compiler_unit *)PyObject_Malloc(sizeof(
446 struct compiler_unit));
447 if (!u) {
448 PyErr_NoMemory();
449 return 0;
451 memset(u, 0, sizeof(struct compiler_unit));
452 u->u_argcount = 0;
453 u->u_ste = PySymtable_Lookup(c->c_st, key);
454 if (!u->u_ste) {
455 compiler_unit_free(u);
456 return 0;
458 Py_INCREF(name);
459 u->u_name = name;
460 u->u_varnames = list2dict(u->u_ste->ste_varnames);
461 u->u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0);
462 if (!u->u_varnames || !u->u_cellvars) {
463 compiler_unit_free(u);
464 return 0;
467 u->u_freevars = dictbytype(u->u_ste->ste_symbols, FREE, DEF_FREE_CLASS,
468 PyDict_Size(u->u_cellvars));
469 if (!u->u_freevars) {
470 compiler_unit_free(u);
471 return 0;
474 u->u_blocks = NULL;
475 u->u_tmpname = 0;
476 u->u_nfblocks = 0;
477 u->u_firstlineno = lineno;
478 u->u_lineno = 0;
479 u->u_lineno_set = false;
480 u->u_consts = PyDict_New();
481 if (!u->u_consts) {
482 compiler_unit_free(u);
483 return 0;
485 u->u_names = PyDict_New();
486 if (!u->u_names) {
487 compiler_unit_free(u);
488 return 0;
491 u->u_private = NULL;
493 /* Push the old compiler_unit on the stack. */
494 if (c->u) {
495 PyObject *wrapper = PyCObject_FromVoidPtr(c->u, NULL);
496 if (!wrapper || PyList_Append(c->c_stack, wrapper) < 0) {
497 Py_XDECREF(wrapper);
498 compiler_unit_free(u);
499 return 0;
501 Py_DECREF(wrapper);
502 u->u_private = c->u->u_private;
503 Py_XINCREF(u->u_private);
505 c->u = u;
507 c->c_nestlevel++;
508 if (compiler_use_new_block(c) == NULL)
509 return 0;
511 return 1;
514 static void
515 compiler_exit_scope(struct compiler *c)
517 int n;
518 PyObject *wrapper;
520 c->c_nestlevel--;
521 compiler_unit_free(c->u);
522 /* Restore c->u to the parent unit. */
523 n = PyList_GET_SIZE(c->c_stack) - 1;
524 if (n >= 0) {
525 wrapper = PyList_GET_ITEM(c->c_stack, n);
526 c->u = (struct compiler_unit *)PyCObject_AsVoidPtr(wrapper);
527 assert(c->u);
528 /* we are deleting from a list so this really shouldn't fail */
529 if (PySequence_DelItem(c->c_stack, n) < 0)
530 Py_FatalError("compiler_exit_scope()");
531 compiler_unit_check(c->u);
533 else
534 c->u = NULL;
538 /* Allocate a new block and return a pointer to it.
539 Returns NULL on error.
542 static basicblock *
543 compiler_new_block(struct compiler *c)
545 basicblock *b;
546 struct compiler_unit *u;
548 u = c->u;
549 b = (basicblock *)PyObject_Malloc(sizeof(basicblock));
550 if (b == NULL) {
551 PyErr_NoMemory();
552 return NULL;
554 memset((void *)b, 0, sizeof(basicblock));
555 /* Extend the singly linked list of blocks with new block. */
556 b->b_list = u->u_blocks;
557 u->u_blocks = b;
558 return b;
561 static basicblock *
562 compiler_use_new_block(struct compiler *c)
564 basicblock *block = compiler_new_block(c);
565 if (block == NULL)
566 return NULL;
567 c->u->u_curblock = block;
568 return block;
571 static basicblock *
572 compiler_next_block(struct compiler *c)
574 basicblock *block = compiler_new_block(c);
575 if (block == NULL)
576 return NULL;
577 c->u->u_curblock->b_next = block;
578 c->u->u_curblock = block;
579 return block;
582 static basicblock *
583 compiler_use_next_block(struct compiler *c, basicblock *block)
585 assert(block != NULL);
586 c->u->u_curblock->b_next = block;
587 c->u->u_curblock = block;
588 return block;
591 /* Returns the offset of the next instruction in the current block's
592 b_instr array. Resizes the b_instr as necessary.
593 Returns -1 on failure.
596 static int
597 compiler_next_instr(struct compiler *c, basicblock *b)
599 assert(b != NULL);
600 if (b->b_instr == NULL) {
601 b->b_instr = (struct instr *)PyObject_Malloc(
602 sizeof(struct instr) * DEFAULT_BLOCK_SIZE);
603 if (b->b_instr == NULL) {
604 PyErr_NoMemory();
605 return -1;
607 b->b_ialloc = DEFAULT_BLOCK_SIZE;
608 memset((char *)b->b_instr, 0,
609 sizeof(struct instr) * DEFAULT_BLOCK_SIZE);
611 else if (b->b_iused == b->b_ialloc) {
612 struct instr *tmp;
613 size_t oldsize, newsize;
614 oldsize = b->b_ialloc * sizeof(struct instr);
615 newsize = oldsize << 1;
617 if (oldsize > (PY_SIZE_MAX >> 1)) {
618 PyErr_NoMemory();
619 return -1;
622 if (newsize == 0) {
623 PyErr_NoMemory();
624 return -1;
626 b->b_ialloc <<= 1;
627 tmp = (struct instr *)PyObject_Realloc(
628 (void *)b->b_instr, newsize);
629 if (tmp == NULL) {
630 PyErr_NoMemory();
631 return -1;
633 b->b_instr = tmp;
634 memset((char *)b->b_instr + oldsize, 0, newsize - oldsize);
636 return b->b_iused++;
639 /* Set the i_lineno member of the instruction at offset off if the
640 line number for the current expression/statement has not
641 already been set. If it has been set, the call has no effect.
643 The line number is reset in the following cases:
644 - when entering a new scope
645 - on each statement
646 - on each expression that start a new line
647 - before the "except" clause
648 - before the "for" and "while" expressions
651 static void
652 compiler_set_lineno(struct compiler *c, int off)
654 basicblock *b;
655 if (c->u->u_lineno_set)
656 return;
657 c->u->u_lineno_set = true;
658 b = c->u->u_curblock;
659 b->b_instr[off].i_lineno = c->u->u_lineno;
662 static int
663 opcode_stack_effect(int opcode, int oparg)
665 switch (opcode) {
666 case POP_TOP:
667 return -1;
668 case ROT_TWO:
669 case ROT_THREE:
670 return 0;
671 case DUP_TOP:
672 return 1;
673 case ROT_FOUR:
674 return 0;
676 case UNARY_POSITIVE:
677 case UNARY_NEGATIVE:
678 case UNARY_NOT:
679 case UNARY_CONVERT:
680 case UNARY_INVERT:
681 return 0;
683 case LIST_APPEND:
684 return -1;
686 case BINARY_POWER:
687 case BINARY_MULTIPLY:
688 case BINARY_DIVIDE:
689 case BINARY_MODULO:
690 case BINARY_ADD:
691 case BINARY_SUBTRACT:
692 case BINARY_SUBSCR:
693 case BINARY_FLOOR_DIVIDE:
694 case BINARY_TRUE_DIVIDE:
695 return -1;
696 case INPLACE_FLOOR_DIVIDE:
697 case INPLACE_TRUE_DIVIDE:
698 return -1;
700 case SLICE+0:
701 return 1;
702 case SLICE+1:
703 return 0;
704 case SLICE+2:
705 return 0;
706 case SLICE+3:
707 return -1;
709 case STORE_SLICE+0:
710 return -2;
711 case STORE_SLICE+1:
712 return -3;
713 case STORE_SLICE+2:
714 return -3;
715 case STORE_SLICE+3:
716 return -4;
718 case DELETE_SLICE+0:
719 return -1;
720 case DELETE_SLICE+1:
721 return -2;
722 case DELETE_SLICE+2:
723 return -2;
724 case DELETE_SLICE+3:
725 return -3;
727 case INPLACE_ADD:
728 case INPLACE_SUBTRACT:
729 case INPLACE_MULTIPLY:
730 case INPLACE_DIVIDE:
731 case INPLACE_MODULO:
732 return -1;
733 case STORE_SUBSCR:
734 return -3;
735 case STORE_MAP:
736 return -2;
737 case DELETE_SUBSCR:
738 return -2;
740 case BINARY_LSHIFT:
741 case BINARY_RSHIFT:
742 case BINARY_AND:
743 case BINARY_XOR:
744 case BINARY_OR:
745 return -1;
746 case INPLACE_POWER:
747 return -1;
748 case GET_ITER:
749 return 0;
751 case PRINT_EXPR:
752 return -1;
753 case PRINT_ITEM:
754 return -1;
755 case PRINT_NEWLINE:
756 return 0;
757 case PRINT_ITEM_TO:
758 return -2;
759 case PRINT_NEWLINE_TO:
760 return -1;
761 case INPLACE_LSHIFT:
762 case INPLACE_RSHIFT:
763 case INPLACE_AND:
764 case INPLACE_XOR:
765 case INPLACE_OR:
766 return -1;
767 case BREAK_LOOP:
768 return 0;
769 case SETUP_WITH:
770 return 4;
771 case WITH_CLEANUP:
772 return -1; /* XXX Sometimes more */
773 case LOAD_LOCALS:
774 return 1;
775 case RETURN_VALUE:
776 return -1;
777 case IMPORT_STAR:
778 return -1;
779 case EXEC_STMT:
780 return -3;
781 case YIELD_VALUE:
782 return 0;
784 case POP_BLOCK:
785 return 0;
786 case END_FINALLY:
787 return -1; /* or -2 or -3 if exception occurred */
788 case BUILD_CLASS:
789 return -2;
791 case STORE_NAME:
792 return -1;
793 case DELETE_NAME:
794 return 0;
795 case UNPACK_SEQUENCE:
796 return oparg-1;
797 case FOR_ITER:
798 return 1;
800 case STORE_ATTR:
801 return -2;
802 case DELETE_ATTR:
803 return -1;
804 case STORE_GLOBAL:
805 return -1;
806 case DELETE_GLOBAL:
807 return 0;
808 case DUP_TOPX:
809 return oparg;
810 case LOAD_CONST:
811 return 1;
812 case LOAD_NAME:
813 return 1;
814 case BUILD_TUPLE:
815 case BUILD_LIST:
816 return 1-oparg;
817 case BUILD_MAP:
818 return 1;
819 case LOAD_ATTR:
820 return 0;
821 case COMPARE_OP:
822 return -1;
823 case IMPORT_NAME:
824 return 0;
825 case IMPORT_FROM:
826 return 1;
828 case JUMP_FORWARD:
829 case JUMP_IF_TRUE_OR_POP: /* -1 if jump not taken */
830 case JUMP_IF_FALSE_OR_POP: /* "" */
831 case JUMP_ABSOLUTE:
832 return 0;
834 case POP_JUMP_IF_FALSE:
835 case POP_JUMP_IF_TRUE:
836 return -1;
838 case LOAD_GLOBAL:
839 return 1;
841 case CONTINUE_LOOP:
842 return 0;
843 case SETUP_LOOP:
844 return 0;
845 case SETUP_EXCEPT:
846 case SETUP_FINALLY:
847 return 3; /* actually pushed by an exception */
849 case LOAD_FAST:
850 return 1;
851 case STORE_FAST:
852 return -1;
853 case DELETE_FAST:
854 return 0;
856 case RAISE_VARARGS:
857 return -oparg;
858 #define NARGS(o) (((o) % 256) + 2*((o) / 256))
859 case CALL_FUNCTION:
860 return -NARGS(oparg);
861 case CALL_FUNCTION_VAR:
862 case CALL_FUNCTION_KW:
863 return -NARGS(oparg)-1;
864 case CALL_FUNCTION_VAR_KW:
865 return -NARGS(oparg)-2;
866 #undef NARGS
867 case MAKE_FUNCTION:
868 return -oparg;
869 case BUILD_SLICE:
870 if (oparg == 3)
871 return -2;
872 else
873 return -1;
875 case MAKE_CLOSURE:
876 return -oparg;
877 case LOAD_CLOSURE:
878 return 1;
879 case LOAD_DEREF:
880 return 1;
881 case STORE_DEREF:
882 return -1;
883 default:
884 fprintf(stderr, "opcode = %d\n", opcode);
885 Py_FatalError("opcode_stack_effect()");
888 return 0; /* not reachable */
891 /* Add an opcode with no argument.
892 Returns 0 on failure, 1 on success.
895 static int
896 compiler_addop(struct compiler *c, int opcode)
898 basicblock *b;
899 struct instr *i;
900 int off;
901 off = compiler_next_instr(c, c->u->u_curblock);
902 if (off < 0)
903 return 0;
904 b = c->u->u_curblock;
905 i = &b->b_instr[off];
906 i->i_opcode = opcode;
907 i->i_hasarg = 0;
908 if (opcode == RETURN_VALUE)
909 b->b_return = 1;
910 compiler_set_lineno(c, off);
911 return 1;
914 static int
915 compiler_add_o(struct compiler *c, PyObject *dict, PyObject *o)
917 PyObject *t, *v;
918 Py_ssize_t arg;
919 unsigned char *p, *q;
920 Py_complex z;
921 double d;
922 int real_part_zero, imag_part_zero;
924 /* necessary to make sure types aren't coerced (e.g., int and long) */
925 /* _and_ to distinguish 0.0 from -0.0 e.g. on IEEE platforms */
926 if (PyFloat_Check(o)) {
927 d = PyFloat_AS_DOUBLE(o);
928 p = (unsigned char*) &d;
929 /* all we need is to make the tuple different in either the 0.0
930 * or -0.0 case from all others, just to avoid the "coercion".
932 if (*p==0 && p[sizeof(double)-1]==0)
933 t = PyTuple_Pack(3, o, o->ob_type, Py_None);
934 else
935 t = PyTuple_Pack(2, o, o->ob_type);
937 else if (PyComplex_Check(o)) {
938 /* complex case is even messier: we need to make complex(x,
939 0.) different from complex(x, -0.) and complex(0., y)
940 different from complex(-0., y), for any x and y. In
941 particular, all four complex zeros should be
942 distinguished.*/
943 z = PyComplex_AsCComplex(o);
944 p = (unsigned char*) &(z.real);
945 q = (unsigned char*) &(z.imag);
946 /* all that matters here is that on IEEE platforms
947 real_part_zero will be true if z.real == 0., and false if
948 z.real == -0. In fact, real_part_zero will also be true
949 for some other rarely occurring nonzero floats, but this
950 doesn't matter. Similar comments apply to
951 imag_part_zero. */
952 real_part_zero = *p==0 && p[sizeof(double)-1]==0;
953 imag_part_zero = *q==0 && q[sizeof(double)-1]==0;
954 if (real_part_zero && imag_part_zero) {
955 t = PyTuple_Pack(4, o, o->ob_type, Py_True, Py_True);
957 else if (real_part_zero && !imag_part_zero) {
958 t = PyTuple_Pack(4, o, o->ob_type, Py_True, Py_False);
960 else if (!real_part_zero && imag_part_zero) {
961 t = PyTuple_Pack(4, o, o->ob_type, Py_False, Py_True);
963 else {
964 t = PyTuple_Pack(2, o, o->ob_type);
967 else {
968 t = PyTuple_Pack(2, o, o->ob_type);
970 if (t == NULL)
971 return -1;
973 v = PyDict_GetItem(dict, t);
974 if (!v) {
975 arg = PyDict_Size(dict);
976 v = PyInt_FromLong(arg);
977 if (!v) {
978 Py_DECREF(t);
979 return -1;
981 if (PyDict_SetItem(dict, t, v) < 0) {
982 Py_DECREF(t);
983 Py_DECREF(v);
984 return -1;
986 Py_DECREF(v);
988 else
989 arg = PyInt_AsLong(v);
990 Py_DECREF(t);
991 return arg;
994 static int
995 compiler_addop_o(struct compiler *c, int opcode, PyObject *dict,
996 PyObject *o)
998 int arg = compiler_add_o(c, dict, o);
999 if (arg < 0)
1000 return 0;
1001 return compiler_addop_i(c, opcode, arg);
1004 static int
1005 compiler_addop_name(struct compiler *c, int opcode, PyObject *dict,
1006 PyObject *o)
1008 int arg;
1009 PyObject *mangled = _Py_Mangle(c->u->u_private, o);
1010 if (!mangled)
1011 return 0;
1012 arg = compiler_add_o(c, dict, mangled);
1013 Py_DECREF(mangled);
1014 if (arg < 0)
1015 return 0;
1016 return compiler_addop_i(c, opcode, arg);
1019 /* Add an opcode with an integer argument.
1020 Returns 0 on failure, 1 on success.
1023 static int
1024 compiler_addop_i(struct compiler *c, int opcode, int oparg)
1026 struct instr *i;
1027 int off;
1028 off = compiler_next_instr(c, c->u->u_curblock);
1029 if (off < 0)
1030 return 0;
1031 i = &c->u->u_curblock->b_instr[off];
1032 i->i_opcode = opcode;
1033 i->i_oparg = oparg;
1034 i->i_hasarg = 1;
1035 compiler_set_lineno(c, off);
1036 return 1;
1039 static int
1040 compiler_addop_j(struct compiler *c, int opcode, basicblock *b, int absolute)
1042 struct instr *i;
1043 int off;
1045 assert(b != NULL);
1046 off = compiler_next_instr(c, c->u->u_curblock);
1047 if (off < 0)
1048 return 0;
1049 i = &c->u->u_curblock->b_instr[off];
1050 i->i_opcode = opcode;
1051 i->i_target = b;
1052 i->i_hasarg = 1;
1053 if (absolute)
1054 i->i_jabs = 1;
1055 else
1056 i->i_jrel = 1;
1057 compiler_set_lineno(c, off);
1058 return 1;
1061 /* The distinction between NEW_BLOCK and NEXT_BLOCK is subtle. (I'd
1062 like to find better names.) NEW_BLOCK() creates a new block and sets
1063 it as the current block. NEXT_BLOCK() also creates an implicit jump
1064 from the current block to the new block.
1067 /* The returns inside these macros make it impossible to decref objects
1068 created in the local function. Local objects should use the arena.
1072 #define NEW_BLOCK(C) { \
1073 if (compiler_use_new_block((C)) == NULL) \
1074 return 0; \
1077 #define NEXT_BLOCK(C) { \
1078 if (compiler_next_block((C)) == NULL) \
1079 return 0; \
1082 #define ADDOP(C, OP) { \
1083 if (!compiler_addop((C), (OP))) \
1084 return 0; \
1087 #define ADDOP_IN_SCOPE(C, OP) { \
1088 if (!compiler_addop((C), (OP))) { \
1089 compiler_exit_scope(c); \
1090 return 0; \
1094 #define ADDOP_O(C, OP, O, TYPE) { \
1095 if (!compiler_addop_o((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1096 return 0; \
1099 #define ADDOP_NAME(C, OP, O, TYPE) { \
1100 if (!compiler_addop_name((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1101 return 0; \
1104 #define ADDOP_I(C, OP, O) { \
1105 if (!compiler_addop_i((C), (OP), (O))) \
1106 return 0; \
1109 #define ADDOP_JABS(C, OP, O) { \
1110 if (!compiler_addop_j((C), (OP), (O), 1)) \
1111 return 0; \
1114 #define ADDOP_JREL(C, OP, O) { \
1115 if (!compiler_addop_j((C), (OP), (O), 0)) \
1116 return 0; \
1119 /* VISIT and VISIT_SEQ takes an ASDL type as their second argument. They use
1120 the ASDL name to synthesize the name of the C type and the visit function.
1123 #define VISIT(C, TYPE, V) {\
1124 if (!compiler_visit_ ## TYPE((C), (V))) \
1125 return 0; \
1128 #define VISIT_IN_SCOPE(C, TYPE, V) {\
1129 if (!compiler_visit_ ## TYPE((C), (V))) { \
1130 compiler_exit_scope(c); \
1131 return 0; \
1135 #define VISIT_SLICE(C, V, CTX) {\
1136 if (!compiler_visit_slice((C), (V), (CTX))) \
1137 return 0; \
1140 #define VISIT_SEQ(C, TYPE, SEQ) { \
1141 int _i; \
1142 asdl_seq *seq = (SEQ); /* avoid variable capture */ \
1143 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1144 TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1145 if (!compiler_visit_ ## TYPE((C), elt)) \
1146 return 0; \
1150 #define VISIT_SEQ_IN_SCOPE(C, TYPE, SEQ) { \
1151 int _i; \
1152 asdl_seq *seq = (SEQ); /* avoid variable capture */ \
1153 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1154 TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1155 if (!compiler_visit_ ## TYPE((C), elt)) { \
1156 compiler_exit_scope(c); \
1157 return 0; \
1162 static int
1163 compiler_isdocstring(stmt_ty s)
1165 if (s->kind != Expr_kind)
1166 return 0;
1167 return s->v.Expr.value->kind == Str_kind;
1170 /* Compile a sequence of statements, checking for a docstring. */
1172 static int
1173 compiler_body(struct compiler *c, asdl_seq *stmts)
1175 int i = 0;
1176 stmt_ty st;
1178 if (!asdl_seq_LEN(stmts))
1179 return 1;
1180 st = (stmt_ty)asdl_seq_GET(stmts, 0);
1181 if (compiler_isdocstring(st) && Py_OptimizeFlag < 2) {
1182 /* don't generate docstrings if -OO */
1183 i = 1;
1184 VISIT(c, expr, st->v.Expr.value);
1185 if (!compiler_nameop(c, __doc__, Store))
1186 return 0;
1188 for (; i < asdl_seq_LEN(stmts); i++)
1189 VISIT(c, stmt, (stmt_ty)asdl_seq_GET(stmts, i));
1190 return 1;
1193 static PyCodeObject *
1194 compiler_mod(struct compiler *c, mod_ty mod)
1196 PyCodeObject *co;
1197 int addNone = 1;
1198 static PyObject *module;
1199 if (!module) {
1200 module = PyString_InternFromString("<module>");
1201 if (!module)
1202 return NULL;
1204 /* Use 0 for firstlineno initially, will fixup in assemble(). */
1205 if (!compiler_enter_scope(c, module, mod, 0))
1206 return NULL;
1207 switch (mod->kind) {
1208 case Module_kind:
1209 if (!compiler_body(c, mod->v.Module.body)) {
1210 compiler_exit_scope(c);
1211 return 0;
1213 break;
1214 case Interactive_kind:
1215 c->c_interactive = 1;
1216 VISIT_SEQ_IN_SCOPE(c, stmt,
1217 mod->v.Interactive.body);
1218 break;
1219 case Expression_kind:
1220 VISIT_IN_SCOPE(c, expr, mod->v.Expression.body);
1221 addNone = 0;
1222 break;
1223 case Suite_kind:
1224 PyErr_SetString(PyExc_SystemError,
1225 "suite should not be possible");
1226 return 0;
1227 default:
1228 PyErr_Format(PyExc_SystemError,
1229 "module kind %d should not be possible",
1230 mod->kind);
1231 return 0;
1233 co = assemble(c, addNone);
1234 compiler_exit_scope(c);
1235 return co;
1238 /* The test for LOCAL must come before the test for FREE in order to
1239 handle classes where name is both local and free. The local var is
1240 a method and the free var is a free var referenced within a method.
1243 static int
1244 get_ref_type(struct compiler *c, PyObject *name)
1246 int scope = PyST_GetScope(c->u->u_ste, name);
1247 if (scope == 0) {
1248 char buf[350];
1249 PyOS_snprintf(buf, sizeof(buf),
1250 "unknown scope for %.100s in %.100s(%s) in %s\n"
1251 "symbols: %s\nlocals: %s\nglobals: %s",
1252 PyString_AS_STRING(name),
1253 PyString_AS_STRING(c->u->u_name),
1254 PyObject_REPR(c->u->u_ste->ste_id),
1255 c->c_filename,
1256 PyObject_REPR(c->u->u_ste->ste_symbols),
1257 PyObject_REPR(c->u->u_varnames),
1258 PyObject_REPR(c->u->u_names)
1260 Py_FatalError(buf);
1263 return scope;
1266 static int
1267 compiler_lookup_arg(PyObject *dict, PyObject *name)
1269 PyObject *k, *v;
1270 k = PyTuple_Pack(2, name, name->ob_type);
1271 if (k == NULL)
1272 return -1;
1273 v = PyDict_GetItem(dict, k);
1274 Py_DECREF(k);
1275 if (v == NULL)
1276 return -1;
1277 return PyInt_AS_LONG(v);
1280 static int
1281 compiler_make_closure(struct compiler *c, PyCodeObject *co, int args)
1283 int i, free = PyCode_GetNumFree(co);
1284 if (free == 0) {
1285 ADDOP_O(c, LOAD_CONST, (PyObject*)co, consts);
1286 ADDOP_I(c, MAKE_FUNCTION, args);
1287 return 1;
1289 for (i = 0; i < free; ++i) {
1290 /* Bypass com_addop_varname because it will generate
1291 LOAD_DEREF but LOAD_CLOSURE is needed.
1293 PyObject *name = PyTuple_GET_ITEM(co->co_freevars, i);
1294 int arg, reftype;
1296 /* Special case: If a class contains a method with a
1297 free variable that has the same name as a method,
1298 the name will be considered free *and* local in the
1299 class. It should be handled by the closure, as
1300 well as by the normal name loookup logic.
1302 reftype = get_ref_type(c, name);
1303 if (reftype == CELL)
1304 arg = compiler_lookup_arg(c->u->u_cellvars, name);
1305 else /* (reftype == FREE) */
1306 arg = compiler_lookup_arg(c->u->u_freevars, name);
1307 if (arg == -1) {
1308 printf("lookup %s in %s %d %d\n"
1309 "freevars of %s: %s\n",
1310 PyObject_REPR(name),
1311 PyString_AS_STRING(c->u->u_name),
1312 reftype, arg,
1313 PyString_AS_STRING(co->co_name),
1314 PyObject_REPR(co->co_freevars));
1315 Py_FatalError("compiler_make_closure()");
1317 ADDOP_I(c, LOAD_CLOSURE, arg);
1319 ADDOP_I(c, BUILD_TUPLE, free);
1320 ADDOP_O(c, LOAD_CONST, (PyObject*)co, consts);
1321 ADDOP_I(c, MAKE_CLOSURE, args);
1322 return 1;
1325 static int
1326 compiler_decorators(struct compiler *c, asdl_seq* decos)
1328 int i;
1330 if (!decos)
1331 return 1;
1333 for (i = 0; i < asdl_seq_LEN(decos); i++) {
1334 VISIT(c, expr, (expr_ty)asdl_seq_GET(decos, i));
1336 return 1;
1339 static int
1340 compiler_arguments(struct compiler *c, arguments_ty args)
1342 int i;
1343 int n = asdl_seq_LEN(args->args);
1344 /* Correctly handle nested argument lists */
1345 for (i = 0; i < n; i++) {
1346 expr_ty arg = (expr_ty)asdl_seq_GET(args->args, i);
1347 if (arg->kind == Tuple_kind) {
1348 PyObject *id = PyString_FromFormat(".%d", i);
1349 if (id == NULL) {
1350 return 0;
1352 if (!compiler_nameop(c, id, Load)) {
1353 Py_DECREF(id);
1354 return 0;
1356 Py_DECREF(id);
1357 VISIT(c, expr, arg);
1360 return 1;
1363 static int
1364 compiler_function(struct compiler *c, stmt_ty s)
1366 PyCodeObject *co;
1367 PyObject *first_const = Py_None;
1368 arguments_ty args = s->v.FunctionDef.args;
1369 asdl_seq* decos = s->v.FunctionDef.decorator_list;
1370 stmt_ty st;
1371 int i, n, docstring;
1373 assert(s->kind == FunctionDef_kind);
1375 if (!compiler_decorators(c, decos))
1376 return 0;
1377 if (args->defaults)
1378 VISIT_SEQ(c, expr, args->defaults);
1379 if (!compiler_enter_scope(c, s->v.FunctionDef.name, (void *)s,
1380 s->lineno))
1381 return 0;
1383 st = (stmt_ty)asdl_seq_GET(s->v.FunctionDef.body, 0);
1384 docstring = compiler_isdocstring(st);
1385 if (docstring && Py_OptimizeFlag < 2)
1386 first_const = st->v.Expr.value->v.Str.s;
1387 if (compiler_add_o(c, c->u->u_consts, first_const) < 0) {
1388 compiler_exit_scope(c);
1389 return 0;
1392 /* unpack nested arguments */
1393 compiler_arguments(c, args);
1395 c->u->u_argcount = asdl_seq_LEN(args->args);
1396 n = asdl_seq_LEN(s->v.FunctionDef.body);
1397 /* if there was a docstring, we need to skip the first statement */
1398 for (i = docstring; i < n; i++) {
1399 st = (stmt_ty)asdl_seq_GET(s->v.FunctionDef.body, i);
1400 VISIT_IN_SCOPE(c, stmt, st);
1402 co = assemble(c, 1);
1403 compiler_exit_scope(c);
1404 if (co == NULL)
1405 return 0;
1407 compiler_make_closure(c, co, asdl_seq_LEN(args->defaults));
1408 Py_DECREF(co);
1410 for (i = 0; i < asdl_seq_LEN(decos); i++) {
1411 ADDOP_I(c, CALL_FUNCTION, 1);
1414 return compiler_nameop(c, s->v.FunctionDef.name, Store);
1417 static int
1418 compiler_class(struct compiler *c, stmt_ty s)
1420 int n, i;
1421 PyCodeObject *co;
1422 PyObject *str;
1423 asdl_seq* decos = s->v.ClassDef.decorator_list;
1425 if (!compiler_decorators(c, decos))
1426 return 0;
1428 /* push class name on stack, needed by BUILD_CLASS */
1429 ADDOP_O(c, LOAD_CONST, s->v.ClassDef.name, consts);
1430 /* push the tuple of base classes on the stack */
1431 n = asdl_seq_LEN(s->v.ClassDef.bases);
1432 if (n > 0)
1433 VISIT_SEQ(c, expr, s->v.ClassDef.bases);
1434 ADDOP_I(c, BUILD_TUPLE, n);
1435 if (!compiler_enter_scope(c, s->v.ClassDef.name, (void *)s,
1436 s->lineno))
1437 return 0;
1438 Py_XDECREF(c->u->u_private);
1439 c->u->u_private = s->v.ClassDef.name;
1440 Py_INCREF(c->u->u_private);
1441 str = PyString_InternFromString("__name__");
1442 if (!str || !compiler_nameop(c, str, Load)) {
1443 Py_XDECREF(str);
1444 compiler_exit_scope(c);
1445 return 0;
1448 Py_DECREF(str);
1449 str = PyString_InternFromString("__module__");
1450 if (!str || !compiler_nameop(c, str, Store)) {
1451 Py_XDECREF(str);
1452 compiler_exit_scope(c);
1453 return 0;
1455 Py_DECREF(str);
1457 if (!compiler_body(c, s->v.ClassDef.body)) {
1458 compiler_exit_scope(c);
1459 return 0;
1462 ADDOP_IN_SCOPE(c, LOAD_LOCALS);
1463 ADDOP_IN_SCOPE(c, RETURN_VALUE);
1464 co = assemble(c, 1);
1465 compiler_exit_scope(c);
1466 if (co == NULL)
1467 return 0;
1469 compiler_make_closure(c, co, 0);
1470 Py_DECREF(co);
1472 ADDOP_I(c, CALL_FUNCTION, 0);
1473 ADDOP(c, BUILD_CLASS);
1474 /* apply decorators */
1475 for (i = 0; i < asdl_seq_LEN(decos); i++) {
1476 ADDOP_I(c, CALL_FUNCTION, 1);
1478 if (!compiler_nameop(c, s->v.ClassDef.name, Store))
1479 return 0;
1480 return 1;
1483 static int
1484 compiler_ifexp(struct compiler *c, expr_ty e)
1486 basicblock *end, *next;
1488 assert(e->kind == IfExp_kind);
1489 end = compiler_new_block(c);
1490 if (end == NULL)
1491 return 0;
1492 next = compiler_new_block(c);
1493 if (next == NULL)
1494 return 0;
1495 VISIT(c, expr, e->v.IfExp.test);
1496 ADDOP_JABS(c, POP_JUMP_IF_FALSE, next);
1497 VISIT(c, expr, e->v.IfExp.body);
1498 ADDOP_JREL(c, JUMP_FORWARD, end);
1499 compiler_use_next_block(c, next);
1500 VISIT(c, expr, e->v.IfExp.orelse);
1501 compiler_use_next_block(c, end);
1502 return 1;
1505 static int
1506 compiler_lambda(struct compiler *c, expr_ty e)
1508 PyCodeObject *co;
1509 static identifier name;
1510 arguments_ty args = e->v.Lambda.args;
1511 assert(e->kind == Lambda_kind);
1513 if (!name) {
1514 name = PyString_InternFromString("<lambda>");
1515 if (!name)
1516 return 0;
1519 if (args->defaults)
1520 VISIT_SEQ(c, expr, args->defaults);
1521 if (!compiler_enter_scope(c, name, (void *)e, e->lineno))
1522 return 0;
1524 /* unpack nested arguments */
1525 compiler_arguments(c, args);
1527 c->u->u_argcount = asdl_seq_LEN(args->args);
1528 VISIT_IN_SCOPE(c, expr, e->v.Lambda.body);
1529 if (c->u->u_ste->ste_generator) {
1530 ADDOP_IN_SCOPE(c, POP_TOP);
1532 else {
1533 ADDOP_IN_SCOPE(c, RETURN_VALUE);
1535 co = assemble(c, 1);
1536 compiler_exit_scope(c);
1537 if (co == NULL)
1538 return 0;
1540 compiler_make_closure(c, co, asdl_seq_LEN(args->defaults));
1541 Py_DECREF(co);
1543 return 1;
1546 static int
1547 compiler_print(struct compiler *c, stmt_ty s)
1549 int i, n;
1550 bool dest;
1552 assert(s->kind == Print_kind);
1553 n = asdl_seq_LEN(s->v.Print.values);
1554 dest = false;
1555 if (s->v.Print.dest) {
1556 VISIT(c, expr, s->v.Print.dest);
1557 dest = true;
1559 for (i = 0; i < n; i++) {
1560 expr_ty e = (expr_ty)asdl_seq_GET(s->v.Print.values, i);
1561 if (dest) {
1562 ADDOP(c, DUP_TOP);
1563 VISIT(c, expr, e);
1564 ADDOP(c, ROT_TWO);
1565 ADDOP(c, PRINT_ITEM_TO);
1567 else {
1568 VISIT(c, expr, e);
1569 ADDOP(c, PRINT_ITEM);
1572 if (s->v.Print.nl) {
1573 if (dest)
1574 ADDOP(c, PRINT_NEWLINE_TO)
1575 else
1576 ADDOP(c, PRINT_NEWLINE)
1578 else if (dest)
1579 ADDOP(c, POP_TOP);
1580 return 1;
1583 static int
1584 compiler_if(struct compiler *c, stmt_ty s)
1586 basicblock *end, *next;
1587 int constant;
1588 assert(s->kind == If_kind);
1589 end = compiler_new_block(c);
1590 if (end == NULL)
1591 return 0;
1593 constant = expr_constant(s->v.If.test);
1594 /* constant = 0: "if 0"
1595 * constant = 1: "if 1", "if 2", ...
1596 * constant = -1: rest */
1597 if (constant == 0) {
1598 if (s->v.If.orelse)
1599 VISIT_SEQ(c, stmt, s->v.If.orelse);
1600 } else if (constant == 1) {
1601 VISIT_SEQ(c, stmt, s->v.If.body);
1602 } else {
1603 if (s->v.If.orelse) {
1604 next = compiler_new_block(c);
1605 if (next == NULL)
1606 return 0;
1608 else
1609 next = end;
1610 VISIT(c, expr, s->v.If.test);
1611 ADDOP_JABS(c, POP_JUMP_IF_FALSE, next);
1612 VISIT_SEQ(c, stmt, s->v.If.body);
1613 ADDOP_JREL(c, JUMP_FORWARD, end);
1614 if (s->v.If.orelse) {
1615 compiler_use_next_block(c, next);
1616 VISIT_SEQ(c, stmt, s->v.If.orelse);
1619 compiler_use_next_block(c, end);
1620 return 1;
1623 static int
1624 compiler_for(struct compiler *c, stmt_ty s)
1626 basicblock *start, *cleanup, *end;
1628 start = compiler_new_block(c);
1629 cleanup = compiler_new_block(c);
1630 end = compiler_new_block(c);
1631 if (start == NULL || end == NULL || cleanup == NULL)
1632 return 0;
1633 ADDOP_JREL(c, SETUP_LOOP, end);
1634 if (!compiler_push_fblock(c, LOOP, start))
1635 return 0;
1636 VISIT(c, expr, s->v.For.iter);
1637 ADDOP(c, GET_ITER);
1638 compiler_use_next_block(c, start);
1639 ADDOP_JREL(c, FOR_ITER, cleanup);
1640 VISIT(c, expr, s->v.For.target);
1641 VISIT_SEQ(c, stmt, s->v.For.body);
1642 ADDOP_JABS(c, JUMP_ABSOLUTE, start);
1643 compiler_use_next_block(c, cleanup);
1644 ADDOP(c, POP_BLOCK);
1645 compiler_pop_fblock(c, LOOP, start);
1646 VISIT_SEQ(c, stmt, s->v.For.orelse);
1647 compiler_use_next_block(c, end);
1648 return 1;
1651 static int
1652 compiler_while(struct compiler *c, stmt_ty s)
1654 basicblock *loop, *orelse, *end, *anchor = NULL;
1655 int constant = expr_constant(s->v.While.test);
1657 if (constant == 0) {
1658 if (s->v.While.orelse)
1659 VISIT_SEQ(c, stmt, s->v.While.orelse);
1660 return 1;
1662 loop = compiler_new_block(c);
1663 end = compiler_new_block(c);
1664 if (constant == -1) {
1665 anchor = compiler_new_block(c);
1666 if (anchor == NULL)
1667 return 0;
1669 if (loop == NULL || end == NULL)
1670 return 0;
1671 if (s->v.While.orelse) {
1672 orelse = compiler_new_block(c);
1673 if (orelse == NULL)
1674 return 0;
1676 else
1677 orelse = NULL;
1679 ADDOP_JREL(c, SETUP_LOOP, end);
1680 compiler_use_next_block(c, loop);
1681 if (!compiler_push_fblock(c, LOOP, loop))
1682 return 0;
1683 if (constant == -1) {
1684 VISIT(c, expr, s->v.While.test);
1685 ADDOP_JABS(c, POP_JUMP_IF_FALSE, anchor);
1687 VISIT_SEQ(c, stmt, s->v.While.body);
1688 ADDOP_JABS(c, JUMP_ABSOLUTE, loop);
1690 /* XXX should the two POP instructions be in a separate block
1691 if there is no else clause ?
1694 if (constant == -1) {
1695 compiler_use_next_block(c, anchor);
1696 ADDOP(c, POP_BLOCK);
1698 compiler_pop_fblock(c, LOOP, loop);
1699 if (orelse != NULL) /* what if orelse is just pass? */
1700 VISIT_SEQ(c, stmt, s->v.While.orelse);
1701 compiler_use_next_block(c, end);
1703 return 1;
1706 static int
1707 compiler_continue(struct compiler *c)
1709 static const char LOOP_ERROR_MSG[] = "'continue' not properly in loop";
1710 static const char IN_FINALLY_ERROR_MSG[] =
1711 "'continue' not supported inside 'finally' clause";
1712 int i;
1714 if (!c->u->u_nfblocks)
1715 return compiler_error(c, LOOP_ERROR_MSG);
1716 i = c->u->u_nfblocks - 1;
1717 switch (c->u->u_fblock[i].fb_type) {
1718 case LOOP:
1719 ADDOP_JABS(c, JUMP_ABSOLUTE, c->u->u_fblock[i].fb_block);
1720 break;
1721 case EXCEPT:
1722 case FINALLY_TRY:
1723 while (--i >= 0 && c->u->u_fblock[i].fb_type != LOOP) {
1724 /* Prevent continue anywhere under a finally
1725 even if hidden in a sub-try or except. */
1726 if (c->u->u_fblock[i].fb_type == FINALLY_END)
1727 return compiler_error(c, IN_FINALLY_ERROR_MSG);
1729 if (i == -1)
1730 return compiler_error(c, LOOP_ERROR_MSG);
1731 ADDOP_JABS(c, CONTINUE_LOOP, c->u->u_fblock[i].fb_block);
1732 break;
1733 case FINALLY_END:
1734 return compiler_error(c, IN_FINALLY_ERROR_MSG);
1737 return 1;
1740 /* Code generated for "try: <body> finally: <finalbody>" is as follows:
1742 SETUP_FINALLY L
1743 <code for body>
1744 POP_BLOCK
1745 LOAD_CONST <None>
1746 L: <code for finalbody>
1747 END_FINALLY
1749 The special instructions use the block stack. Each block
1750 stack entry contains the instruction that created it (here
1751 SETUP_FINALLY), the level of the value stack at the time the
1752 block stack entry was created, and a label (here L).
1754 SETUP_FINALLY:
1755 Pushes the current value stack level and the label
1756 onto the block stack.
1757 POP_BLOCK:
1758 Pops en entry from the block stack, and pops the value
1759 stack until its level is the same as indicated on the
1760 block stack. (The label is ignored.)
1761 END_FINALLY:
1762 Pops a variable number of entries from the *value* stack
1763 and re-raises the exception they specify. The number of
1764 entries popped depends on the (pseudo) exception type.
1766 The block stack is unwound when an exception is raised:
1767 when a SETUP_FINALLY entry is found, the exception is pushed
1768 onto the value stack (and the exception condition is cleared),
1769 and the interpreter jumps to the label gotten from the block
1770 stack.
1773 static int
1774 compiler_try_finally(struct compiler *c, stmt_ty s)
1776 basicblock *body, *end;
1777 body = compiler_new_block(c);
1778 end = compiler_new_block(c);
1779 if (body == NULL || end == NULL)
1780 return 0;
1782 ADDOP_JREL(c, SETUP_FINALLY, end);
1783 compiler_use_next_block(c, body);
1784 if (!compiler_push_fblock(c, FINALLY_TRY, body))
1785 return 0;
1786 VISIT_SEQ(c, stmt, s->v.TryFinally.body);
1787 ADDOP(c, POP_BLOCK);
1788 compiler_pop_fblock(c, FINALLY_TRY, body);
1790 ADDOP_O(c, LOAD_CONST, Py_None, consts);
1791 compiler_use_next_block(c, end);
1792 if (!compiler_push_fblock(c, FINALLY_END, end))
1793 return 0;
1794 VISIT_SEQ(c, stmt, s->v.TryFinally.finalbody);
1795 ADDOP(c, END_FINALLY);
1796 compiler_pop_fblock(c, FINALLY_END, end);
1798 return 1;
1802 Code generated for "try: S except E1, V1: S1 except E2, V2: S2 ...":
1803 (The contents of the value stack is shown in [], with the top
1804 at the right; 'tb' is trace-back info, 'val' the exception's
1805 associated value, and 'exc' the exception.)
1807 Value stack Label Instruction Argument
1808 [] SETUP_EXCEPT L1
1809 [] <code for S>
1810 [] POP_BLOCK
1811 [] JUMP_FORWARD L0
1813 [tb, val, exc] L1: DUP )
1814 [tb, val, exc, exc] <evaluate E1> )
1815 [tb, val, exc, exc, E1] COMPARE_OP EXC_MATCH ) only if E1
1816 [tb, val, exc, 1-or-0] POP_JUMP_IF_FALSE L2 )
1817 [tb, val, exc] POP
1818 [tb, val] <assign to V1> (or POP if no V1)
1819 [tb] POP
1820 [] <code for S1>
1821 JUMP_FORWARD L0
1823 [tb, val, exc] L2: DUP
1824 .............................etc.......................
1826 [tb, val, exc] Ln+1: END_FINALLY # re-raise exception
1828 [] L0: <next statement>
1830 Of course, parts are not generated if Vi or Ei is not present.
1832 static int
1833 compiler_try_except(struct compiler *c, stmt_ty s)
1835 basicblock *body, *orelse, *except, *end;
1836 int i, n;
1838 body = compiler_new_block(c);
1839 except = compiler_new_block(c);
1840 orelse = compiler_new_block(c);
1841 end = compiler_new_block(c);
1842 if (body == NULL || except == NULL || orelse == NULL || end == NULL)
1843 return 0;
1844 ADDOP_JREL(c, SETUP_EXCEPT, except);
1845 compiler_use_next_block(c, body);
1846 if (!compiler_push_fblock(c, EXCEPT, body))
1847 return 0;
1848 VISIT_SEQ(c, stmt, s->v.TryExcept.body);
1849 ADDOP(c, POP_BLOCK);
1850 compiler_pop_fblock(c, EXCEPT, body);
1851 ADDOP_JREL(c, JUMP_FORWARD, orelse);
1852 n = asdl_seq_LEN(s->v.TryExcept.handlers);
1853 compiler_use_next_block(c, except);
1854 for (i = 0; i < n; i++) {
1855 excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
1856 s->v.TryExcept.handlers, i);
1857 if (!handler->v.ExceptHandler.type && i < n-1)
1858 return compiler_error(c, "default 'except:' must be last");
1859 c->u->u_lineno_set = false;
1860 c->u->u_lineno = handler->lineno;
1861 except = compiler_new_block(c);
1862 if (except == NULL)
1863 return 0;
1864 if (handler->v.ExceptHandler.type) {
1865 ADDOP(c, DUP_TOP);
1866 VISIT(c, expr, handler->v.ExceptHandler.type);
1867 ADDOP_I(c, COMPARE_OP, PyCmp_EXC_MATCH);
1868 ADDOP_JABS(c, POP_JUMP_IF_FALSE, except);
1870 ADDOP(c, POP_TOP);
1871 if (handler->v.ExceptHandler.name) {
1872 VISIT(c, expr, handler->v.ExceptHandler.name);
1874 else {
1875 ADDOP(c, POP_TOP);
1877 ADDOP(c, POP_TOP);
1878 VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
1879 ADDOP_JREL(c, JUMP_FORWARD, end);
1880 compiler_use_next_block(c, except);
1882 ADDOP(c, END_FINALLY);
1883 compiler_use_next_block(c, orelse);
1884 VISIT_SEQ(c, stmt, s->v.TryExcept.orelse);
1885 compiler_use_next_block(c, end);
1886 return 1;
1889 static int
1890 compiler_import_as(struct compiler *c, identifier name, identifier asname)
1892 /* The IMPORT_NAME opcode was already generated. This function
1893 merely needs to bind the result to a name.
1895 If there is a dot in name, we need to split it and emit a
1896 LOAD_ATTR for each name.
1898 const char *src = PyString_AS_STRING(name);
1899 const char *dot = strchr(src, '.');
1900 if (dot) {
1901 /* Consume the base module name to get the first attribute */
1902 src = dot + 1;
1903 while (dot) {
1904 /* NB src is only defined when dot != NULL */
1905 PyObject *attr;
1906 dot = strchr(src, '.');
1907 attr = PyString_FromStringAndSize(src,
1908 dot ? dot - src : strlen(src));
1909 if (!attr)
1910 return -1;
1911 ADDOP_O(c, LOAD_ATTR, attr, names);
1912 Py_DECREF(attr);
1913 src = dot + 1;
1916 return compiler_nameop(c, asname, Store);
1919 static int
1920 compiler_import(struct compiler *c, stmt_ty s)
1922 /* The Import node stores a module name like a.b.c as a single
1923 string. This is convenient for all cases except
1924 import a.b.c as d
1925 where we need to parse that string to extract the individual
1926 module names.
1927 XXX Perhaps change the representation to make this case simpler?
1929 int i, n = asdl_seq_LEN(s->v.Import.names);
1931 for (i = 0; i < n; i++) {
1932 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.Import.names, i);
1933 int r;
1934 PyObject *level;
1936 if (c->c_flags && (c->c_flags->cf_flags & CO_FUTURE_ABSOLUTE_IMPORT))
1937 level = PyInt_FromLong(0);
1938 else
1939 level = PyInt_FromLong(-1);
1941 if (level == NULL)
1942 return 0;
1944 ADDOP_O(c, LOAD_CONST, level, consts);
1945 Py_DECREF(level);
1946 ADDOP_O(c, LOAD_CONST, Py_None, consts);
1947 ADDOP_NAME(c, IMPORT_NAME, alias->name, names);
1949 if (alias->asname) {
1950 r = compiler_import_as(c, alias->name, alias->asname);
1951 if (!r)
1952 return r;
1954 else {
1955 identifier tmp = alias->name;
1956 const char *base = PyString_AS_STRING(alias->name);
1957 char *dot = strchr(base, '.');
1958 if (dot)
1959 tmp = PyString_FromStringAndSize(base,
1960 dot - base);
1961 r = compiler_nameop(c, tmp, Store);
1962 if (dot) {
1963 Py_DECREF(tmp);
1965 if (!r)
1966 return r;
1969 return 1;
1972 static int
1973 compiler_from_import(struct compiler *c, stmt_ty s)
1975 int i, n = asdl_seq_LEN(s->v.ImportFrom.names);
1977 PyObject *names = PyTuple_New(n);
1978 PyObject *level;
1980 if (!names)
1981 return 0;
1983 if (s->v.ImportFrom.level == 0 && c->c_flags &&
1984 !(c->c_flags->cf_flags & CO_FUTURE_ABSOLUTE_IMPORT))
1985 level = PyInt_FromLong(-1);
1986 else
1987 level = PyInt_FromLong(s->v.ImportFrom.level);
1989 if (!level) {
1990 Py_DECREF(names);
1991 return 0;
1994 /* build up the names */
1995 for (i = 0; i < n; i++) {
1996 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i);
1997 Py_INCREF(alias->name);
1998 PyTuple_SET_ITEM(names, i, alias->name);
2001 if (s->lineno > c->c_future->ff_lineno) {
2002 if (!strcmp(PyString_AS_STRING(s->v.ImportFrom.module),
2003 "__future__")) {
2004 Py_DECREF(level);
2005 Py_DECREF(names);
2006 return compiler_error(c,
2007 "from __future__ imports must occur "
2008 "at the beginning of the file");
2013 ADDOP_O(c, LOAD_CONST, level, consts);
2014 Py_DECREF(level);
2015 ADDOP_O(c, LOAD_CONST, names, consts);
2016 Py_DECREF(names);
2017 ADDOP_NAME(c, IMPORT_NAME, s->v.ImportFrom.module, names);
2018 for (i = 0; i < n; i++) {
2019 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i);
2020 identifier store_name;
2022 if (i == 0 && *PyString_AS_STRING(alias->name) == '*') {
2023 assert(n == 1);
2024 ADDOP(c, IMPORT_STAR);
2025 return 1;
2028 ADDOP_NAME(c, IMPORT_FROM, alias->name, names);
2029 store_name = alias->name;
2030 if (alias->asname)
2031 store_name = alias->asname;
2033 if (!compiler_nameop(c, store_name, Store)) {
2034 Py_DECREF(names);
2035 return 0;
2038 /* remove imported module */
2039 ADDOP(c, POP_TOP);
2040 return 1;
2043 static int
2044 compiler_assert(struct compiler *c, stmt_ty s)
2046 static PyObject *assertion_error = NULL;
2047 basicblock *end;
2049 if (Py_OptimizeFlag)
2050 return 1;
2051 if (assertion_error == NULL) {
2052 assertion_error = PyString_InternFromString("AssertionError");
2053 if (assertion_error == NULL)
2054 return 0;
2056 if (s->v.Assert.test->kind == Tuple_kind &&
2057 asdl_seq_LEN(s->v.Assert.test->v.Tuple.elts) > 0) {
2058 const char* msg =
2059 "assertion is always true, perhaps remove parentheses?";
2060 if (PyErr_WarnExplicit(PyExc_SyntaxWarning, msg, c->c_filename,
2061 c->u->u_lineno, NULL, NULL) == -1)
2062 return 0;
2064 VISIT(c, expr, s->v.Assert.test);
2065 end = compiler_new_block(c);
2066 if (end == NULL)
2067 return 0;
2068 ADDOP_JABS(c, POP_JUMP_IF_TRUE, end);
2069 ADDOP_O(c, LOAD_GLOBAL, assertion_error, names);
2070 if (s->v.Assert.msg) {
2071 VISIT(c, expr, s->v.Assert.msg);
2072 ADDOP_I(c, RAISE_VARARGS, 2);
2074 else {
2075 ADDOP_I(c, RAISE_VARARGS, 1);
2077 compiler_use_next_block(c, end);
2078 return 1;
2081 static int
2082 compiler_visit_stmt(struct compiler *c, stmt_ty s)
2084 int i, n;
2086 /* Always assign a lineno to the next instruction for a stmt. */
2087 c->u->u_lineno = s->lineno;
2088 c->u->u_lineno_set = false;
2090 switch (s->kind) {
2091 case FunctionDef_kind:
2092 return compiler_function(c, s);
2093 case ClassDef_kind:
2094 return compiler_class(c, s);
2095 case Return_kind:
2096 if (c->u->u_ste->ste_type != FunctionBlock)
2097 return compiler_error(c, "'return' outside function");
2098 if (s->v.Return.value) {
2099 VISIT(c, expr, s->v.Return.value);
2101 else
2102 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2103 ADDOP(c, RETURN_VALUE);
2104 break;
2105 case Delete_kind:
2106 VISIT_SEQ(c, expr, s->v.Delete.targets)
2107 break;
2108 case Assign_kind:
2109 n = asdl_seq_LEN(s->v.Assign.targets);
2110 VISIT(c, expr, s->v.Assign.value);
2111 for (i = 0; i < n; i++) {
2112 if (i < n - 1)
2113 ADDOP(c, DUP_TOP);
2114 VISIT(c, expr,
2115 (expr_ty)asdl_seq_GET(s->v.Assign.targets, i));
2117 break;
2118 case AugAssign_kind:
2119 return compiler_augassign(c, s);
2120 case Print_kind:
2121 return compiler_print(c, s);
2122 case For_kind:
2123 return compiler_for(c, s);
2124 case While_kind:
2125 return compiler_while(c, s);
2126 case If_kind:
2127 return compiler_if(c, s);
2128 case Raise_kind:
2129 n = 0;
2130 if (s->v.Raise.type) {
2131 VISIT(c, expr, s->v.Raise.type);
2132 n++;
2133 if (s->v.Raise.inst) {
2134 VISIT(c, expr, s->v.Raise.inst);
2135 n++;
2136 if (s->v.Raise.tback) {
2137 VISIT(c, expr, s->v.Raise.tback);
2138 n++;
2142 ADDOP_I(c, RAISE_VARARGS, n);
2143 break;
2144 case TryExcept_kind:
2145 return compiler_try_except(c, s);
2146 case TryFinally_kind:
2147 return compiler_try_finally(c, s);
2148 case Assert_kind:
2149 return compiler_assert(c, s);
2150 case Import_kind:
2151 return compiler_import(c, s);
2152 case ImportFrom_kind:
2153 return compiler_from_import(c, s);
2154 case Exec_kind:
2155 VISIT(c, expr, s->v.Exec.body);
2156 if (s->v.Exec.globals) {
2157 VISIT(c, expr, s->v.Exec.globals);
2158 if (s->v.Exec.locals) {
2159 VISIT(c, expr, s->v.Exec.locals);
2160 } else {
2161 ADDOP(c, DUP_TOP);
2163 } else {
2164 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2165 ADDOP(c, DUP_TOP);
2167 ADDOP(c, EXEC_STMT);
2168 break;
2169 case Global_kind:
2170 break;
2171 case Expr_kind:
2172 if (c->c_interactive && c->c_nestlevel <= 1) {
2173 VISIT(c, expr, s->v.Expr.value);
2174 ADDOP(c, PRINT_EXPR);
2176 else if (s->v.Expr.value->kind != Str_kind &&
2177 s->v.Expr.value->kind != Num_kind) {
2178 VISIT(c, expr, s->v.Expr.value);
2179 ADDOP(c, POP_TOP);
2181 break;
2182 case Pass_kind:
2183 break;
2184 case Break_kind:
2185 if (!compiler_in_loop(c))
2186 return compiler_error(c, "'break' outside loop");
2187 ADDOP(c, BREAK_LOOP);
2188 break;
2189 case Continue_kind:
2190 return compiler_continue(c);
2191 case With_kind:
2192 return compiler_with(c, s);
2194 return 1;
2197 static int
2198 unaryop(unaryop_ty op)
2200 switch (op) {
2201 case Invert:
2202 return UNARY_INVERT;
2203 case Not:
2204 return UNARY_NOT;
2205 case UAdd:
2206 return UNARY_POSITIVE;
2207 case USub:
2208 return UNARY_NEGATIVE;
2209 default:
2210 PyErr_Format(PyExc_SystemError,
2211 "unary op %d should not be possible", op);
2212 return 0;
2216 static int
2217 binop(struct compiler *c, operator_ty op)
2219 switch (op) {
2220 case Add:
2221 return BINARY_ADD;
2222 case Sub:
2223 return BINARY_SUBTRACT;
2224 case Mult:
2225 return BINARY_MULTIPLY;
2226 case Div:
2227 if (c->c_flags && c->c_flags->cf_flags & CO_FUTURE_DIVISION)
2228 return BINARY_TRUE_DIVIDE;
2229 else
2230 return BINARY_DIVIDE;
2231 case Mod:
2232 return BINARY_MODULO;
2233 case Pow:
2234 return BINARY_POWER;
2235 case LShift:
2236 return BINARY_LSHIFT;
2237 case RShift:
2238 return BINARY_RSHIFT;
2239 case BitOr:
2240 return BINARY_OR;
2241 case BitXor:
2242 return BINARY_XOR;
2243 case BitAnd:
2244 return BINARY_AND;
2245 case FloorDiv:
2246 return BINARY_FLOOR_DIVIDE;
2247 default:
2248 PyErr_Format(PyExc_SystemError,
2249 "binary op %d should not be possible", op);
2250 return 0;
2254 static int
2255 cmpop(cmpop_ty op)
2257 switch (op) {
2258 case Eq:
2259 return PyCmp_EQ;
2260 case NotEq:
2261 return PyCmp_NE;
2262 case Lt:
2263 return PyCmp_LT;
2264 case LtE:
2265 return PyCmp_LE;
2266 case Gt:
2267 return PyCmp_GT;
2268 case GtE:
2269 return PyCmp_GE;
2270 case Is:
2271 return PyCmp_IS;
2272 case IsNot:
2273 return PyCmp_IS_NOT;
2274 case In:
2275 return PyCmp_IN;
2276 case NotIn:
2277 return PyCmp_NOT_IN;
2278 default:
2279 return PyCmp_BAD;
2283 static int
2284 inplace_binop(struct compiler *c, operator_ty op)
2286 switch (op) {
2287 case Add:
2288 return INPLACE_ADD;
2289 case Sub:
2290 return INPLACE_SUBTRACT;
2291 case Mult:
2292 return INPLACE_MULTIPLY;
2293 case Div:
2294 if (c->c_flags && c->c_flags->cf_flags & CO_FUTURE_DIVISION)
2295 return INPLACE_TRUE_DIVIDE;
2296 else
2297 return INPLACE_DIVIDE;
2298 case Mod:
2299 return INPLACE_MODULO;
2300 case Pow:
2301 return INPLACE_POWER;
2302 case LShift:
2303 return INPLACE_LSHIFT;
2304 case RShift:
2305 return INPLACE_RSHIFT;
2306 case BitOr:
2307 return INPLACE_OR;
2308 case BitXor:
2309 return INPLACE_XOR;
2310 case BitAnd:
2311 return INPLACE_AND;
2312 case FloorDiv:
2313 return INPLACE_FLOOR_DIVIDE;
2314 default:
2315 PyErr_Format(PyExc_SystemError,
2316 "inplace binary op %d should not be possible", op);
2317 return 0;
2321 static int
2322 compiler_nameop(struct compiler *c, identifier name, expr_context_ty ctx)
2324 int op, scope, arg;
2325 enum { OP_FAST, OP_GLOBAL, OP_DEREF, OP_NAME } optype;
2327 PyObject *dict = c->u->u_names;
2328 PyObject *mangled;
2329 /* XXX AugStore isn't used anywhere! */
2331 mangled = _Py_Mangle(c->u->u_private, name);
2332 if (!mangled)
2333 return 0;
2335 op = 0;
2336 optype = OP_NAME;
2337 scope = PyST_GetScope(c->u->u_ste, mangled);
2338 switch (scope) {
2339 case FREE:
2340 dict = c->u->u_freevars;
2341 optype = OP_DEREF;
2342 break;
2343 case CELL:
2344 dict = c->u->u_cellvars;
2345 optype = OP_DEREF;
2346 break;
2347 case LOCAL:
2348 if (c->u->u_ste->ste_type == FunctionBlock)
2349 optype = OP_FAST;
2350 break;
2351 case GLOBAL_IMPLICIT:
2352 if (c->u->u_ste->ste_type == FunctionBlock &&
2353 !c->u->u_ste->ste_unoptimized)
2354 optype = OP_GLOBAL;
2355 break;
2356 case GLOBAL_EXPLICIT:
2357 optype = OP_GLOBAL;
2358 break;
2359 default:
2360 /* scope can be 0 */
2361 break;
2364 /* XXX Leave assert here, but handle __doc__ and the like better */
2365 assert(scope || PyString_AS_STRING(name)[0] == '_');
2367 switch (optype) {
2368 case OP_DEREF:
2369 switch (ctx) {
2370 case Load: op = LOAD_DEREF; break;
2371 case Store: op = STORE_DEREF; break;
2372 case AugLoad:
2373 case AugStore:
2374 break;
2375 case Del:
2376 PyErr_Format(PyExc_SyntaxError,
2377 "can not delete variable '%s' referenced "
2378 "in nested scope",
2379 PyString_AS_STRING(name));
2380 Py_DECREF(mangled);
2381 return 0;
2382 case Param:
2383 default:
2384 PyErr_SetString(PyExc_SystemError,
2385 "param invalid for deref variable");
2386 return 0;
2388 break;
2389 case OP_FAST:
2390 switch (ctx) {
2391 case Load: op = LOAD_FAST; break;
2392 case Store: op = STORE_FAST; break;
2393 case Del: op = DELETE_FAST; break;
2394 case AugLoad:
2395 case AugStore:
2396 break;
2397 case Param:
2398 default:
2399 PyErr_SetString(PyExc_SystemError,
2400 "param invalid for local variable");
2401 return 0;
2403 ADDOP_O(c, op, mangled, varnames);
2404 Py_DECREF(mangled);
2405 return 1;
2406 case OP_GLOBAL:
2407 switch (ctx) {
2408 case Load: op = LOAD_GLOBAL; break;
2409 case Store: op = STORE_GLOBAL; break;
2410 case Del: op = DELETE_GLOBAL; break;
2411 case AugLoad:
2412 case AugStore:
2413 break;
2414 case Param:
2415 default:
2416 PyErr_SetString(PyExc_SystemError,
2417 "param invalid for global variable");
2418 return 0;
2420 break;
2421 case OP_NAME:
2422 switch (ctx) {
2423 case Load: op = LOAD_NAME; break;
2424 case Store: op = STORE_NAME; break;
2425 case Del: op = DELETE_NAME; break;
2426 case AugLoad:
2427 case AugStore:
2428 break;
2429 case Param:
2430 default:
2431 PyErr_SetString(PyExc_SystemError,
2432 "param invalid for name variable");
2433 return 0;
2435 break;
2438 assert(op);
2439 arg = compiler_add_o(c, dict, mangled);
2440 Py_DECREF(mangled);
2441 if (arg < 0)
2442 return 0;
2443 return compiler_addop_i(c, op, arg);
2446 static int
2447 compiler_boolop(struct compiler *c, expr_ty e)
2449 basicblock *end;
2450 int jumpi, i, n;
2451 asdl_seq *s;
2453 assert(e->kind == BoolOp_kind);
2454 if (e->v.BoolOp.op == And)
2455 jumpi = JUMP_IF_FALSE_OR_POP;
2456 else
2457 jumpi = JUMP_IF_TRUE_OR_POP;
2458 end = compiler_new_block(c);
2459 if (end == NULL)
2460 return 0;
2461 s = e->v.BoolOp.values;
2462 n = asdl_seq_LEN(s) - 1;
2463 assert(n >= 0);
2464 for (i = 0; i < n; ++i) {
2465 VISIT(c, expr, (expr_ty)asdl_seq_GET(s, i));
2466 ADDOP_JABS(c, jumpi, end);
2468 VISIT(c, expr, (expr_ty)asdl_seq_GET(s, n));
2469 compiler_use_next_block(c, end);
2470 return 1;
2473 static int
2474 compiler_list(struct compiler *c, expr_ty e)
2476 int n = asdl_seq_LEN(e->v.List.elts);
2477 if (e->v.List.ctx == Store) {
2478 ADDOP_I(c, UNPACK_SEQUENCE, n);
2480 VISIT_SEQ(c, expr, e->v.List.elts);
2481 if (e->v.List.ctx == Load) {
2482 ADDOP_I(c, BUILD_LIST, n);
2484 return 1;
2487 static int
2488 compiler_tuple(struct compiler *c, expr_ty e)
2490 int n = asdl_seq_LEN(e->v.Tuple.elts);
2491 if (e->v.Tuple.ctx == Store) {
2492 ADDOP_I(c, UNPACK_SEQUENCE, n);
2494 VISIT_SEQ(c, expr, e->v.Tuple.elts);
2495 if (e->v.Tuple.ctx == Load) {
2496 ADDOP_I(c, BUILD_TUPLE, n);
2498 return 1;
2501 static int
2502 compiler_compare(struct compiler *c, expr_ty e)
2504 int i, n;
2505 basicblock *cleanup = NULL;
2507 /* XXX the logic can be cleaned up for 1 or multiple comparisons */
2508 VISIT(c, expr, e->v.Compare.left);
2509 n = asdl_seq_LEN(e->v.Compare.ops);
2510 assert(n > 0);
2511 if (n > 1) {
2512 cleanup = compiler_new_block(c);
2513 if (cleanup == NULL)
2514 return 0;
2515 VISIT(c, expr,
2516 (expr_ty)asdl_seq_GET(e->v.Compare.comparators, 0));
2518 for (i = 1; i < n; i++) {
2519 ADDOP(c, DUP_TOP);
2520 ADDOP(c, ROT_THREE);
2521 ADDOP_I(c, COMPARE_OP,
2522 cmpop((cmpop_ty)(asdl_seq_GET(
2523 e->v.Compare.ops, i - 1))));
2524 ADDOP_JABS(c, JUMP_IF_FALSE_OR_POP, cleanup);
2525 NEXT_BLOCK(c);
2526 if (i < (n - 1))
2527 VISIT(c, expr,
2528 (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
2530 VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n - 1));
2531 ADDOP_I(c, COMPARE_OP,
2532 cmpop((cmpop_ty)(asdl_seq_GET(e->v.Compare.ops, n - 1))));
2533 if (n > 1) {
2534 basicblock *end = compiler_new_block(c);
2535 if (end == NULL)
2536 return 0;
2537 ADDOP_JREL(c, JUMP_FORWARD, end);
2538 compiler_use_next_block(c, cleanup);
2539 ADDOP(c, ROT_TWO);
2540 ADDOP(c, POP_TOP);
2541 compiler_use_next_block(c, end);
2543 return 1;
2546 static int
2547 compiler_call(struct compiler *c, expr_ty e)
2549 int n, code = 0;
2551 VISIT(c, expr, e->v.Call.func);
2552 n = asdl_seq_LEN(e->v.Call.args);
2553 VISIT_SEQ(c, expr, e->v.Call.args);
2554 if (e->v.Call.keywords) {
2555 VISIT_SEQ(c, keyword, e->v.Call.keywords);
2556 n |= asdl_seq_LEN(e->v.Call.keywords) << 8;
2558 if (e->v.Call.starargs) {
2559 VISIT(c, expr, e->v.Call.starargs);
2560 code |= 1;
2562 if (e->v.Call.kwargs) {
2563 VISIT(c, expr, e->v.Call.kwargs);
2564 code |= 2;
2566 switch (code) {
2567 case 0:
2568 ADDOP_I(c, CALL_FUNCTION, n);
2569 break;
2570 case 1:
2571 ADDOP_I(c, CALL_FUNCTION_VAR, n);
2572 break;
2573 case 2:
2574 ADDOP_I(c, CALL_FUNCTION_KW, n);
2575 break;
2576 case 3:
2577 ADDOP_I(c, CALL_FUNCTION_VAR_KW, n);
2578 break;
2580 return 1;
2583 static int
2584 compiler_listcomp_generator(struct compiler *c, asdl_seq *generators,
2585 int gen_index, expr_ty elt)
2587 /* generate code for the iterator, then each of the ifs,
2588 and then write to the element */
2590 comprehension_ty l;
2591 basicblock *start, *anchor, *skip, *if_cleanup;
2592 int i, n;
2594 start = compiler_new_block(c);
2595 skip = compiler_new_block(c);
2596 if_cleanup = compiler_new_block(c);
2597 anchor = compiler_new_block(c);
2599 if (start == NULL || skip == NULL || if_cleanup == NULL ||
2600 anchor == NULL)
2601 return 0;
2603 l = (comprehension_ty)asdl_seq_GET(generators, gen_index);
2604 VISIT(c, expr, l->iter);
2605 ADDOP(c, GET_ITER);
2606 compiler_use_next_block(c, start);
2607 ADDOP_JREL(c, FOR_ITER, anchor);
2608 NEXT_BLOCK(c);
2609 VISIT(c, expr, l->target);
2611 /* XXX this needs to be cleaned up...a lot! */
2612 n = asdl_seq_LEN(l->ifs);
2613 for (i = 0; i < n; i++) {
2614 expr_ty e = (expr_ty)asdl_seq_GET(l->ifs, i);
2615 VISIT(c, expr, e);
2616 ADDOP_JABS(c, POP_JUMP_IF_FALSE, if_cleanup);
2617 NEXT_BLOCK(c);
2620 if (++gen_index < asdl_seq_LEN(generators))
2621 if (!compiler_listcomp_generator(c, generators, gen_index, elt))
2622 return 0;
2624 /* only append after the last for generator */
2625 if (gen_index >= asdl_seq_LEN(generators)) {
2626 VISIT(c, expr, elt);
2627 ADDOP_I(c, LIST_APPEND, gen_index+1);
2629 compiler_use_next_block(c, skip);
2631 compiler_use_next_block(c, if_cleanup);
2632 ADDOP_JABS(c, JUMP_ABSOLUTE, start);
2633 compiler_use_next_block(c, anchor);
2635 return 1;
2638 static int
2639 compiler_listcomp(struct compiler *c, expr_ty e)
2641 assert(e->kind == ListComp_kind);
2642 ADDOP_I(c, BUILD_LIST, 0);
2643 return compiler_listcomp_generator(c, e->v.ListComp.generators, 0,
2644 e->v.ListComp.elt);
2647 static int
2648 compiler_genexp_generator(struct compiler *c,
2649 asdl_seq *generators, int gen_index,
2650 expr_ty elt)
2652 /* generate code for the iterator, then each of the ifs,
2653 and then write to the element */
2655 comprehension_ty ge;
2656 basicblock *start, *anchor, *skip, *if_cleanup, *end;
2657 int i, n;
2659 start = compiler_new_block(c);
2660 skip = compiler_new_block(c);
2661 if_cleanup = compiler_new_block(c);
2662 anchor = compiler_new_block(c);
2663 end = compiler_new_block(c);
2665 if (start == NULL || skip == NULL || if_cleanup == NULL ||
2666 anchor == NULL || end == NULL)
2667 return 0;
2669 ge = (comprehension_ty)asdl_seq_GET(generators, gen_index);
2670 ADDOP_JREL(c, SETUP_LOOP, end);
2671 if (!compiler_push_fblock(c, LOOP, start))
2672 return 0;
2674 if (gen_index == 0) {
2675 /* Receive outermost iter as an implicit argument */
2676 c->u->u_argcount = 1;
2677 ADDOP_I(c, LOAD_FAST, 0);
2679 else {
2680 /* Sub-iter - calculate on the fly */
2681 VISIT(c, expr, ge->iter);
2682 ADDOP(c, GET_ITER);
2684 compiler_use_next_block(c, start);
2685 ADDOP_JREL(c, FOR_ITER, anchor);
2686 NEXT_BLOCK(c);
2687 VISIT(c, expr, ge->target);
2689 /* XXX this needs to be cleaned up...a lot! */
2690 n = asdl_seq_LEN(ge->ifs);
2691 for (i = 0; i < n; i++) {
2692 expr_ty e = (expr_ty)asdl_seq_GET(ge->ifs, i);
2693 VISIT(c, expr, e);
2694 ADDOP_JABS(c, POP_JUMP_IF_FALSE, if_cleanup);
2695 NEXT_BLOCK(c);
2698 if (++gen_index < asdl_seq_LEN(generators))
2699 if (!compiler_genexp_generator(c, generators, gen_index, elt))
2700 return 0;
2702 /* only append after the last 'for' generator */
2703 if (gen_index >= asdl_seq_LEN(generators)) {
2704 VISIT(c, expr, elt);
2705 ADDOP(c, YIELD_VALUE);
2706 ADDOP(c, POP_TOP);
2708 compiler_use_next_block(c, skip);
2710 compiler_use_next_block(c, if_cleanup);
2711 ADDOP_JABS(c, JUMP_ABSOLUTE, start);
2712 compiler_use_next_block(c, anchor);
2713 ADDOP(c, POP_BLOCK);
2714 compiler_pop_fblock(c, LOOP, start);
2715 compiler_use_next_block(c, end);
2717 return 1;
2720 static int
2721 compiler_genexp(struct compiler *c, expr_ty e)
2723 static identifier name;
2724 PyCodeObject *co;
2725 expr_ty outermost_iter = ((comprehension_ty)
2726 (asdl_seq_GET(e->v.GeneratorExp.generators,
2727 0)))->iter;
2729 if (!name) {
2730 name = PyString_FromString("<genexpr>");
2731 if (!name)
2732 return 0;
2735 if (!compiler_enter_scope(c, name, (void *)e, e->lineno))
2736 return 0;
2737 compiler_genexp_generator(c, e->v.GeneratorExp.generators, 0,
2738 e->v.GeneratorExp.elt);
2739 co = assemble(c, 1);
2740 compiler_exit_scope(c);
2741 if (co == NULL)
2742 return 0;
2744 compiler_make_closure(c, co, 0);
2745 Py_DECREF(co);
2747 VISIT(c, expr, outermost_iter);
2748 ADDOP(c, GET_ITER);
2749 ADDOP_I(c, CALL_FUNCTION, 1);
2751 return 1;
2754 static int
2755 compiler_visit_keyword(struct compiler *c, keyword_ty k)
2757 ADDOP_O(c, LOAD_CONST, k->arg, consts);
2758 VISIT(c, expr, k->value);
2759 return 1;
2762 /* Test whether expression is constant. For constants, report
2763 whether they are true or false.
2765 Return values: 1 for true, 0 for false, -1 for non-constant.
2768 static int
2769 expr_constant(expr_ty e)
2771 switch (e->kind) {
2772 case Num_kind:
2773 return PyObject_IsTrue(e->v.Num.n);
2774 case Str_kind:
2775 return PyObject_IsTrue(e->v.Str.s);
2776 case Name_kind:
2777 /* __debug__ is not assignable, so we can optimize
2778 * it away in if and while statements */
2779 if (strcmp(PyString_AS_STRING(e->v.Name.id),
2780 "__debug__") == 0)
2781 return ! Py_OptimizeFlag;
2782 /* fall through */
2783 default:
2784 return -1;
2789 Implements the with statement from PEP 343.
2791 The semantics outlined in that PEP are as follows:
2793 with EXPR as VAR:
2794 BLOCK
2796 It is implemented roughly as:
2798 context = EXPR
2799 exit = context.__exit__ # not calling it
2800 value = context.__enter__()
2801 try:
2802 VAR = value # if VAR present in the syntax
2803 BLOCK
2804 finally:
2805 if an exception was raised:
2806 exc = copy of (exception, instance, traceback)
2807 else:
2808 exc = (None, None, None)
2809 exit(*exc)
2811 static int
2812 compiler_with(struct compiler *c, stmt_ty s)
2814 basicblock *block, *finally;
2816 assert(s->kind == With_kind);
2818 block = compiler_new_block(c);
2819 finally = compiler_new_block(c);
2820 if (!block || !finally)
2821 return 0;
2823 /* Evaluate EXPR */
2824 VISIT(c, expr, s->v.With.context_expr);
2825 ADDOP_JREL(c, SETUP_WITH, finally);
2827 /* SETUP_WITH pushes a finally block. */
2828 compiler_use_next_block(c, block);
2829 if (!compiler_push_fblock(c, FINALLY_TRY, block)) {
2830 return 0;
2833 if (s->v.With.optional_vars) {
2834 VISIT(c, expr, s->v.With.optional_vars);
2836 else {
2837 /* Discard result from context.__enter__() */
2838 ADDOP(c, POP_TOP);
2841 /* BLOCK code */
2842 VISIT_SEQ(c, stmt, s->v.With.body);
2844 /* End of try block; start the finally block */
2845 ADDOP(c, POP_BLOCK);
2846 compiler_pop_fblock(c, FINALLY_TRY, block);
2848 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2849 compiler_use_next_block(c, finally);
2850 if (!compiler_push_fblock(c, FINALLY_END, finally))
2851 return 0;
2853 /* Finally block starts; context.__exit__ is on the stack under
2854 the exception or return information. Just issue our magic
2855 opcode. */
2856 ADDOP(c, WITH_CLEANUP);
2858 /* Finally block ends. */
2859 ADDOP(c, END_FINALLY);
2860 compiler_pop_fblock(c, FINALLY_END, finally);
2861 return 1;
2864 static int
2865 compiler_visit_expr(struct compiler *c, expr_ty e)
2867 int i, n;
2869 /* If expr e has a different line number than the last expr/stmt,
2870 set a new line number for the next instruction.
2872 if (e->lineno > c->u->u_lineno) {
2873 c->u->u_lineno = e->lineno;
2874 c->u->u_lineno_set = false;
2876 switch (e->kind) {
2877 case BoolOp_kind:
2878 return compiler_boolop(c, e);
2879 case BinOp_kind:
2880 VISIT(c, expr, e->v.BinOp.left);
2881 VISIT(c, expr, e->v.BinOp.right);
2882 ADDOP(c, binop(c, e->v.BinOp.op));
2883 break;
2884 case UnaryOp_kind:
2885 VISIT(c, expr, e->v.UnaryOp.operand);
2886 ADDOP(c, unaryop(e->v.UnaryOp.op));
2887 break;
2888 case Lambda_kind:
2889 return compiler_lambda(c, e);
2890 case IfExp_kind:
2891 return compiler_ifexp(c, e);
2892 case Dict_kind:
2893 n = asdl_seq_LEN(e->v.Dict.values);
2894 ADDOP_I(c, BUILD_MAP, (n>0xFFFF ? 0xFFFF : n));
2895 for (i = 0; i < n; i++) {
2896 VISIT(c, expr,
2897 (expr_ty)asdl_seq_GET(e->v.Dict.values, i));
2898 VISIT(c, expr,
2899 (expr_ty)asdl_seq_GET(e->v.Dict.keys, i));
2900 ADDOP(c, STORE_MAP);
2902 break;
2903 case ListComp_kind:
2904 return compiler_listcomp(c, e);
2905 case GeneratorExp_kind:
2906 return compiler_genexp(c, e);
2907 case Yield_kind:
2908 if (c->u->u_ste->ste_type != FunctionBlock)
2909 return compiler_error(c, "'yield' outside function");
2910 if (e->v.Yield.value) {
2911 VISIT(c, expr, e->v.Yield.value);
2913 else {
2914 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2916 ADDOP(c, YIELD_VALUE);
2917 break;
2918 case Compare_kind:
2919 return compiler_compare(c, e);
2920 case Call_kind:
2921 return compiler_call(c, e);
2922 case Repr_kind:
2923 VISIT(c, expr, e->v.Repr.value);
2924 ADDOP(c, UNARY_CONVERT);
2925 break;
2926 case Num_kind:
2927 ADDOP_O(c, LOAD_CONST, e->v.Num.n, consts);
2928 break;
2929 case Str_kind:
2930 ADDOP_O(c, LOAD_CONST, e->v.Str.s, consts);
2931 break;
2932 /* The following exprs can be assignment targets. */
2933 case Attribute_kind:
2934 if (e->v.Attribute.ctx != AugStore)
2935 VISIT(c, expr, e->v.Attribute.value);
2936 switch (e->v.Attribute.ctx) {
2937 case AugLoad:
2938 ADDOP(c, DUP_TOP);
2939 /* Fall through to load */
2940 case Load:
2941 ADDOP_NAME(c, LOAD_ATTR, e->v.Attribute.attr, names);
2942 break;
2943 case AugStore:
2944 ADDOP(c, ROT_TWO);
2945 /* Fall through to save */
2946 case Store:
2947 ADDOP_NAME(c, STORE_ATTR, e->v.Attribute.attr, names);
2948 break;
2949 case Del:
2950 ADDOP_NAME(c, DELETE_ATTR, e->v.Attribute.attr, names);
2951 break;
2952 case Param:
2953 default:
2954 PyErr_SetString(PyExc_SystemError,
2955 "param invalid in attribute expression");
2956 return 0;
2958 break;
2959 case Subscript_kind:
2960 switch (e->v.Subscript.ctx) {
2961 case AugLoad:
2962 VISIT(c, expr, e->v.Subscript.value);
2963 VISIT_SLICE(c, e->v.Subscript.slice, AugLoad);
2964 break;
2965 case Load:
2966 VISIT(c, expr, e->v.Subscript.value);
2967 VISIT_SLICE(c, e->v.Subscript.slice, Load);
2968 break;
2969 case AugStore:
2970 VISIT_SLICE(c, e->v.Subscript.slice, AugStore);
2971 break;
2972 case Store:
2973 VISIT(c, expr, e->v.Subscript.value);
2974 VISIT_SLICE(c, e->v.Subscript.slice, Store);
2975 break;
2976 case Del:
2977 VISIT(c, expr, e->v.Subscript.value);
2978 VISIT_SLICE(c, e->v.Subscript.slice, Del);
2979 break;
2980 case Param:
2981 default:
2982 PyErr_SetString(PyExc_SystemError,
2983 "param invalid in subscript expression");
2984 return 0;
2986 break;
2987 case Name_kind:
2988 return compiler_nameop(c, e->v.Name.id, e->v.Name.ctx);
2989 /* child nodes of List and Tuple will have expr_context set */
2990 case List_kind:
2991 return compiler_list(c, e);
2992 case Tuple_kind:
2993 return compiler_tuple(c, e);
2995 return 1;
2998 static int
2999 compiler_augassign(struct compiler *c, stmt_ty s)
3001 expr_ty e = s->v.AugAssign.target;
3002 expr_ty auge;
3004 assert(s->kind == AugAssign_kind);
3006 switch (e->kind) {
3007 case Attribute_kind:
3008 auge = Attribute(e->v.Attribute.value, e->v.Attribute.attr,
3009 AugLoad, e->lineno, e->col_offset, c->c_arena);
3010 if (auge == NULL)
3011 return 0;
3012 VISIT(c, expr, auge);
3013 VISIT(c, expr, s->v.AugAssign.value);
3014 ADDOP(c, inplace_binop(c, s->v.AugAssign.op));
3015 auge->v.Attribute.ctx = AugStore;
3016 VISIT(c, expr, auge);
3017 break;
3018 case Subscript_kind:
3019 auge = Subscript(e->v.Subscript.value, e->v.Subscript.slice,
3020 AugLoad, e->lineno, e->col_offset, c->c_arena);
3021 if (auge == NULL)
3022 return 0;
3023 VISIT(c, expr, auge);
3024 VISIT(c, expr, s->v.AugAssign.value);
3025 ADDOP(c, inplace_binop(c, s->v.AugAssign.op));
3026 auge->v.Subscript.ctx = AugStore;
3027 VISIT(c, expr, auge);
3028 break;
3029 case Name_kind:
3030 if (!compiler_nameop(c, e->v.Name.id, Load))
3031 return 0;
3032 VISIT(c, expr, s->v.AugAssign.value);
3033 ADDOP(c, inplace_binop(c, s->v.AugAssign.op));
3034 return compiler_nameop(c, e->v.Name.id, Store);
3035 default:
3036 PyErr_Format(PyExc_SystemError,
3037 "invalid node type (%d) for augmented assignment",
3038 e->kind);
3039 return 0;
3041 return 1;
3044 static int
3045 compiler_push_fblock(struct compiler *c, enum fblocktype t, basicblock *b)
3047 struct fblockinfo *f;
3048 if (c->u->u_nfblocks >= CO_MAXBLOCKS) {
3049 PyErr_SetString(PyExc_SystemError,
3050 "too many statically nested blocks");
3051 return 0;
3053 f = &c->u->u_fblock[c->u->u_nfblocks++];
3054 f->fb_type = t;
3055 f->fb_block = b;
3056 return 1;
3059 static void
3060 compiler_pop_fblock(struct compiler *c, enum fblocktype t, basicblock *b)
3062 struct compiler_unit *u = c->u;
3063 assert(u->u_nfblocks > 0);
3064 u->u_nfblocks--;
3065 assert(u->u_fblock[u->u_nfblocks].fb_type == t);
3066 assert(u->u_fblock[u->u_nfblocks].fb_block == b);
3069 static int
3070 compiler_in_loop(struct compiler *c) {
3071 int i;
3072 struct compiler_unit *u = c->u;
3073 for (i = 0; i < u->u_nfblocks; ++i) {
3074 if (u->u_fblock[i].fb_type == LOOP)
3075 return 1;
3077 return 0;
3079 /* Raises a SyntaxError and returns 0.
3080 If something goes wrong, a different exception may be raised.
3083 static int
3084 compiler_error(struct compiler *c, const char *errstr)
3086 PyObject *loc;
3087 PyObject *u = NULL, *v = NULL;
3089 loc = PyErr_ProgramText(c->c_filename, c->u->u_lineno);
3090 if (!loc) {
3091 Py_INCREF(Py_None);
3092 loc = Py_None;
3094 u = Py_BuildValue("(ziOO)", c->c_filename, c->u->u_lineno,
3095 Py_None, loc);
3096 if (!u)
3097 goto exit;
3098 v = Py_BuildValue("(zO)", errstr, u);
3099 if (!v)
3100 goto exit;
3101 PyErr_SetObject(PyExc_SyntaxError, v);
3102 exit:
3103 Py_DECREF(loc);
3104 Py_XDECREF(u);
3105 Py_XDECREF(v);
3106 return 0;
3109 static int
3110 compiler_handle_subscr(struct compiler *c, const char *kind,
3111 expr_context_ty ctx)
3113 int op = 0;
3115 /* XXX this code is duplicated */
3116 switch (ctx) {
3117 case AugLoad: /* fall through to Load */
3118 case Load: op = BINARY_SUBSCR; break;
3119 case AugStore:/* fall through to Store */
3120 case Store: op = STORE_SUBSCR; break;
3121 case Del: op = DELETE_SUBSCR; break;
3122 case Param:
3123 PyErr_Format(PyExc_SystemError,
3124 "invalid %s kind %d in subscript\n",
3125 kind, ctx);
3126 return 0;
3128 if (ctx == AugLoad) {
3129 ADDOP_I(c, DUP_TOPX, 2);
3131 else if (ctx == AugStore) {
3132 ADDOP(c, ROT_THREE);
3134 ADDOP(c, op);
3135 return 1;
3138 static int
3139 compiler_slice(struct compiler *c, slice_ty s, expr_context_ty ctx)
3141 int n = 2;
3142 assert(s->kind == Slice_kind);
3144 /* only handles the cases where BUILD_SLICE is emitted */
3145 if (s->v.Slice.lower) {
3146 VISIT(c, expr, s->v.Slice.lower);
3148 else {
3149 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3152 if (s->v.Slice.upper) {
3153 VISIT(c, expr, s->v.Slice.upper);
3155 else {
3156 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3159 if (s->v.Slice.step) {
3160 n++;
3161 VISIT(c, expr, s->v.Slice.step);
3163 ADDOP_I(c, BUILD_SLICE, n);
3164 return 1;
3167 static int
3168 compiler_simple_slice(struct compiler *c, slice_ty s, expr_context_ty ctx)
3170 int op = 0, slice_offset = 0, stack_count = 0;
3172 assert(s->v.Slice.step == NULL);
3173 if (s->v.Slice.lower) {
3174 slice_offset++;
3175 stack_count++;
3176 if (ctx != AugStore)
3177 VISIT(c, expr, s->v.Slice.lower);
3179 if (s->v.Slice.upper) {
3180 slice_offset += 2;
3181 stack_count++;
3182 if (ctx != AugStore)
3183 VISIT(c, expr, s->v.Slice.upper);
3186 if (ctx == AugLoad) {
3187 switch (stack_count) {
3188 case 0: ADDOP(c, DUP_TOP); break;
3189 case 1: ADDOP_I(c, DUP_TOPX, 2); break;
3190 case 2: ADDOP_I(c, DUP_TOPX, 3); break;
3193 else if (ctx == AugStore) {
3194 switch (stack_count) {
3195 case 0: ADDOP(c, ROT_TWO); break;
3196 case 1: ADDOP(c, ROT_THREE); break;
3197 case 2: ADDOP(c, ROT_FOUR); break;
3201 switch (ctx) {
3202 case AugLoad: /* fall through to Load */
3203 case Load: op = SLICE; break;
3204 case AugStore:/* fall through to Store */
3205 case Store: op = STORE_SLICE; break;
3206 case Del: op = DELETE_SLICE; break;
3207 case Param:
3208 default:
3209 PyErr_SetString(PyExc_SystemError,
3210 "param invalid in simple slice");
3211 return 0;
3214 ADDOP(c, op + slice_offset);
3215 return 1;
3218 static int
3219 compiler_visit_nested_slice(struct compiler *c, slice_ty s,
3220 expr_context_ty ctx)
3222 switch (s->kind) {
3223 case Ellipsis_kind:
3224 ADDOP_O(c, LOAD_CONST, Py_Ellipsis, consts);
3225 break;
3226 case Slice_kind:
3227 return compiler_slice(c, s, ctx);
3228 case Index_kind:
3229 VISIT(c, expr, s->v.Index.value);
3230 break;
3231 case ExtSlice_kind:
3232 default:
3233 PyErr_SetString(PyExc_SystemError,
3234 "extended slice invalid in nested slice");
3235 return 0;
3237 return 1;
3240 static int
3241 compiler_visit_slice(struct compiler *c, slice_ty s, expr_context_ty ctx)
3243 char * kindname = NULL;
3244 switch (s->kind) {
3245 case Index_kind:
3246 kindname = "index";
3247 if (ctx != AugStore) {
3248 VISIT(c, expr, s->v.Index.value);
3250 break;
3251 case Ellipsis_kind:
3252 kindname = "ellipsis";
3253 if (ctx != AugStore) {
3254 ADDOP_O(c, LOAD_CONST, Py_Ellipsis, consts);
3256 break;
3257 case Slice_kind:
3258 kindname = "slice";
3259 if (!s->v.Slice.step)
3260 return compiler_simple_slice(c, s, ctx);
3261 if (ctx != AugStore) {
3262 if (!compiler_slice(c, s, ctx))
3263 return 0;
3265 break;
3266 case ExtSlice_kind:
3267 kindname = "extended slice";
3268 if (ctx != AugStore) {
3269 int i, n = asdl_seq_LEN(s->v.ExtSlice.dims);
3270 for (i = 0; i < n; i++) {
3271 slice_ty sub = (slice_ty)asdl_seq_GET(
3272 s->v.ExtSlice.dims, i);
3273 if (!compiler_visit_nested_slice(c, sub, ctx))
3274 return 0;
3276 ADDOP_I(c, BUILD_TUPLE, n);
3278 break;
3279 default:
3280 PyErr_Format(PyExc_SystemError,
3281 "invalid subscript kind %d", s->kind);
3282 return 0;
3284 return compiler_handle_subscr(c, kindname, ctx);
3288 /* End of the compiler section, beginning of the assembler section */
3290 /* do depth-first search of basic block graph, starting with block.
3291 post records the block indices in post-order.
3293 XXX must handle implicit jumps from one block to next
3296 struct assembler {
3297 PyObject *a_bytecode; /* string containing bytecode */
3298 int a_offset; /* offset into bytecode */
3299 int a_nblocks; /* number of reachable blocks */
3300 basicblock **a_postorder; /* list of blocks in dfs postorder */
3301 PyObject *a_lnotab; /* string containing lnotab */
3302 int a_lnotab_off; /* offset into lnotab */
3303 int a_lineno; /* last lineno of emitted instruction */
3304 int a_lineno_off; /* bytecode offset of last lineno */
3307 static void
3308 dfs(struct compiler *c, basicblock *b, struct assembler *a)
3310 int i;
3311 struct instr *instr = NULL;
3313 if (b->b_seen)
3314 return;
3315 b->b_seen = 1;
3316 if (b->b_next != NULL)
3317 dfs(c, b->b_next, a);
3318 for (i = 0; i < b->b_iused; i++) {
3319 instr = &b->b_instr[i];
3320 if (instr->i_jrel || instr->i_jabs)
3321 dfs(c, instr->i_target, a);
3323 a->a_postorder[a->a_nblocks++] = b;
3326 static int
3327 stackdepth_walk(struct compiler *c, basicblock *b, int depth, int maxdepth)
3329 int i;
3330 struct instr *instr;
3331 if (b->b_seen || b->b_startdepth >= depth)
3332 return maxdepth;
3333 b->b_seen = 1;
3334 b->b_startdepth = depth;
3335 for (i = 0; i < b->b_iused; i++) {
3336 instr = &b->b_instr[i];
3337 depth += opcode_stack_effect(instr->i_opcode, instr->i_oparg);
3338 if (depth > maxdepth)
3339 maxdepth = depth;
3340 assert(depth >= 0); /* invalid code or bug in stackdepth() */
3341 if (instr->i_jrel || instr->i_jabs) {
3342 maxdepth = stackdepth_walk(c, instr->i_target,
3343 depth, maxdepth);
3344 if (instr->i_opcode == JUMP_ABSOLUTE ||
3345 instr->i_opcode == JUMP_FORWARD) {
3346 goto out; /* remaining code is dead */
3350 if (b->b_next)
3351 maxdepth = stackdepth_walk(c, b->b_next, depth, maxdepth);
3352 out:
3353 b->b_seen = 0;
3354 return maxdepth;
3357 /* Find the flow path that needs the largest stack. We assume that
3358 * cycles in the flow graph have no net effect on the stack depth.
3360 static int
3361 stackdepth(struct compiler *c)
3363 basicblock *b, *entryblock;
3364 entryblock = NULL;
3365 for (b = c->u->u_blocks; b != NULL; b = b->b_list) {
3366 b->b_seen = 0;
3367 b->b_startdepth = INT_MIN;
3368 entryblock = b;
3370 if (!entryblock)
3371 return 0;
3372 return stackdepth_walk(c, entryblock, 0, 0);
3375 static int
3376 assemble_init(struct assembler *a, int nblocks, int firstlineno)
3378 memset(a, 0, sizeof(struct assembler));
3379 a->a_lineno = firstlineno;
3380 a->a_bytecode = PyString_FromStringAndSize(NULL, DEFAULT_CODE_SIZE);
3381 if (!a->a_bytecode)
3382 return 0;
3383 a->a_lnotab = PyString_FromStringAndSize(NULL, DEFAULT_LNOTAB_SIZE);
3384 if (!a->a_lnotab)
3385 return 0;
3386 if (nblocks > PY_SIZE_MAX / sizeof(basicblock *)) {
3387 PyErr_NoMemory();
3388 return 0;
3390 a->a_postorder = (basicblock **)PyObject_Malloc(
3391 sizeof(basicblock *) * nblocks);
3392 if (!a->a_postorder) {
3393 PyErr_NoMemory();
3394 return 0;
3396 return 1;
3399 static void
3400 assemble_free(struct assembler *a)
3402 Py_XDECREF(a->a_bytecode);
3403 Py_XDECREF(a->a_lnotab);
3404 if (a->a_postorder)
3405 PyObject_Free(a->a_postorder);
3408 /* Return the size of a basic block in bytes. */
3410 static int
3411 instrsize(struct instr *instr)
3413 if (!instr->i_hasarg)
3414 return 1; /* 1 byte for the opcode*/
3415 if (instr->i_oparg > 0xffff)
3416 return 6; /* 1 (opcode) + 1 (EXTENDED_ARG opcode) + 2 (oparg) + 2(oparg extended) */
3417 return 3; /* 1 (opcode) + 2 (oparg) */
3420 static int
3421 blocksize(basicblock *b)
3423 int i;
3424 int size = 0;
3426 for (i = 0; i < b->b_iused; i++)
3427 size += instrsize(&b->b_instr[i]);
3428 return size;
3431 /* Appends a pair to the end of the line number table, a_lnotab, representing
3432 the instruction's bytecode offset and line number. See
3433 Objects/lnotab_notes.txt for the description of the line number table. */
3435 static int
3436 assemble_lnotab(struct assembler *a, struct instr *i)
3438 int d_bytecode, d_lineno;
3439 int len;
3440 unsigned char *lnotab;
3442 d_bytecode = a->a_offset - a->a_lineno_off;
3443 d_lineno = i->i_lineno - a->a_lineno;
3445 assert(d_bytecode >= 0);
3446 assert(d_lineno >= 0);
3448 if(d_bytecode == 0 && d_lineno == 0)
3449 return 1;
3451 if (d_bytecode > 255) {
3452 int j, nbytes, ncodes = d_bytecode / 255;
3453 nbytes = a->a_lnotab_off + 2 * ncodes;
3454 len = PyString_GET_SIZE(a->a_lnotab);
3455 if (nbytes >= len) {
3456 if ((len <= INT_MAX / 2) && (len * 2 < nbytes))
3457 len = nbytes;
3458 else if (len <= INT_MAX / 2)
3459 len *= 2;
3460 else {
3461 PyErr_NoMemory();
3462 return 0;
3464 if (_PyString_Resize(&a->a_lnotab, len) < 0)
3465 return 0;
3467 lnotab = (unsigned char *)
3468 PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
3469 for (j = 0; j < ncodes; j++) {
3470 *lnotab++ = 255;
3471 *lnotab++ = 0;
3473 d_bytecode -= ncodes * 255;
3474 a->a_lnotab_off += ncodes * 2;
3476 assert(d_bytecode <= 255);
3477 if (d_lineno > 255) {
3478 int j, nbytes, ncodes = d_lineno / 255;
3479 nbytes = a->a_lnotab_off + 2 * ncodes;
3480 len = PyString_GET_SIZE(a->a_lnotab);
3481 if (nbytes >= len) {
3482 if ((len <= INT_MAX / 2) && len * 2 < nbytes)
3483 len = nbytes;
3484 else if (len <= INT_MAX / 2)
3485 len *= 2;
3486 else {
3487 PyErr_NoMemory();
3488 return 0;
3490 if (_PyString_Resize(&a->a_lnotab, len) < 0)
3491 return 0;
3493 lnotab = (unsigned char *)
3494 PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
3495 *lnotab++ = d_bytecode;
3496 *lnotab++ = 255;
3497 d_bytecode = 0;
3498 for (j = 1; j < ncodes; j++) {
3499 *lnotab++ = 0;
3500 *lnotab++ = 255;
3502 d_lineno -= ncodes * 255;
3503 a->a_lnotab_off += ncodes * 2;
3506 len = PyString_GET_SIZE(a->a_lnotab);
3507 if (a->a_lnotab_off + 2 >= len) {
3508 if (_PyString_Resize(&a->a_lnotab, len * 2) < 0)
3509 return 0;
3511 lnotab = (unsigned char *)
3512 PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
3514 a->a_lnotab_off += 2;
3515 if (d_bytecode) {
3516 *lnotab++ = d_bytecode;
3517 *lnotab++ = d_lineno;
3519 else { /* First line of a block; def stmt, etc. */
3520 *lnotab++ = 0;
3521 *lnotab++ = d_lineno;
3523 a->a_lineno = i->i_lineno;
3524 a->a_lineno_off = a->a_offset;
3525 return 1;
3528 /* assemble_emit()
3529 Extend the bytecode with a new instruction.
3530 Update lnotab if necessary.
3533 static int
3534 assemble_emit(struct assembler *a, struct instr *i)
3536 int size, arg = 0, ext = 0;
3537 Py_ssize_t len = PyString_GET_SIZE(a->a_bytecode);
3538 char *code;
3540 size = instrsize(i);
3541 if (i->i_hasarg) {
3542 arg = i->i_oparg;
3543 ext = arg >> 16;
3545 if (i->i_lineno && !assemble_lnotab(a, i))
3546 return 0;
3547 if (a->a_offset + size >= len) {
3548 if (len > PY_SSIZE_T_MAX / 2)
3549 return 0;
3550 if (_PyString_Resize(&a->a_bytecode, len * 2) < 0)
3551 return 0;
3553 code = PyString_AS_STRING(a->a_bytecode) + a->a_offset;
3554 a->a_offset += size;
3555 if (size == 6) {
3556 assert(i->i_hasarg);
3557 *code++ = (char)EXTENDED_ARG;
3558 *code++ = ext & 0xff;
3559 *code++ = ext >> 8;
3560 arg &= 0xffff;
3562 *code++ = i->i_opcode;
3563 if (i->i_hasarg) {
3564 assert(size == 3 || size == 6);
3565 *code++ = arg & 0xff;
3566 *code++ = arg >> 8;
3568 return 1;
3571 static void
3572 assemble_jump_offsets(struct assembler *a, struct compiler *c)
3574 basicblock *b;
3575 int bsize, totsize, extended_arg_count, last_extended_arg_count = 0;
3576 int i;
3578 /* Compute the size of each block and fixup jump args.
3579 Replace block pointer with position in bytecode. */
3580 start:
3581 totsize = 0;
3582 for (i = a->a_nblocks - 1; i >= 0; i--) {
3583 b = a->a_postorder[i];
3584 bsize = blocksize(b);
3585 b->b_offset = totsize;
3586 totsize += bsize;
3588 extended_arg_count = 0;
3589 for (b = c->u->u_blocks; b != NULL; b = b->b_list) {
3590 bsize = b->b_offset;
3591 for (i = 0; i < b->b_iused; i++) {
3592 struct instr *instr = &b->b_instr[i];
3593 /* Relative jumps are computed relative to
3594 the instruction pointer after fetching
3595 the jump instruction.
3597 bsize += instrsize(instr);
3598 if (instr->i_jabs)
3599 instr->i_oparg = instr->i_target->b_offset;
3600 else if (instr->i_jrel) {
3601 int delta = instr->i_target->b_offset - bsize;
3602 instr->i_oparg = delta;
3604 else
3605 continue;
3606 if (instr->i_oparg > 0xffff)
3607 extended_arg_count++;
3611 /* XXX: This is an awful hack that could hurt performance, but
3612 on the bright side it should work until we come up
3613 with a better solution.
3615 In the meantime, should the goto be dropped in favor
3616 of a loop?
3618 The issue is that in the first loop blocksize() is called
3619 which calls instrsize() which requires i_oparg be set
3620 appropriately. There is a bootstrap problem because
3621 i_oparg is calculated in the second loop above.
3623 So we loop until we stop seeing new EXTENDED_ARGs.
3624 The only EXTENDED_ARGs that could be popping up are
3625 ones in jump instructions. So this should converge
3626 fairly quickly.
3628 if (last_extended_arg_count != extended_arg_count) {
3629 last_extended_arg_count = extended_arg_count;
3630 goto start;
3634 static PyObject *
3635 dict_keys_inorder(PyObject *dict, int offset)
3637 PyObject *tuple, *k, *v;
3638 Py_ssize_t i, pos = 0, size = PyDict_Size(dict);
3640 tuple = PyTuple_New(size);
3641 if (tuple == NULL)
3642 return NULL;
3643 while (PyDict_Next(dict, &pos, &k, &v)) {
3644 i = PyInt_AS_LONG(v);
3645 /* The keys of the dictionary are tuples. (see compiler_add_o)
3646 The object we want is always first, though. */
3647 k = PyTuple_GET_ITEM(k, 0);
3648 Py_INCREF(k);
3649 assert((i - offset) < size);
3650 assert((i - offset) >= 0);
3651 PyTuple_SET_ITEM(tuple, i - offset, k);
3653 return tuple;
3656 static int
3657 compute_code_flags(struct compiler *c)
3659 PySTEntryObject *ste = c->u->u_ste;
3660 int flags = 0, n;
3661 if (ste->ste_type != ModuleBlock)
3662 flags |= CO_NEWLOCALS;
3663 if (ste->ste_type == FunctionBlock) {
3664 if (!ste->ste_unoptimized)
3665 flags |= CO_OPTIMIZED;
3666 if (ste->ste_nested)
3667 flags |= CO_NESTED;
3668 if (ste->ste_generator)
3669 flags |= CO_GENERATOR;
3670 if (ste->ste_varargs)
3671 flags |= CO_VARARGS;
3672 if (ste->ste_varkeywords)
3673 flags |= CO_VARKEYWORDS;
3676 /* (Only) inherit compilerflags in PyCF_MASK */
3677 flags |= (c->c_flags->cf_flags & PyCF_MASK);
3679 n = PyDict_Size(c->u->u_freevars);
3680 if (n < 0)
3681 return -1;
3682 if (n == 0) {
3683 n = PyDict_Size(c->u->u_cellvars);
3684 if (n < 0)
3685 return -1;
3686 if (n == 0) {
3687 flags |= CO_NOFREE;
3691 return flags;
3694 static PyCodeObject *
3695 makecode(struct compiler *c, struct assembler *a)
3697 PyObject *tmp;
3698 PyCodeObject *co = NULL;
3699 PyObject *consts = NULL;
3700 PyObject *names = NULL;
3701 PyObject *varnames = NULL;
3702 PyObject *filename = NULL;
3703 PyObject *name = NULL;
3704 PyObject *freevars = NULL;
3705 PyObject *cellvars = NULL;
3706 PyObject *bytecode = NULL;
3707 int nlocals, flags;
3709 tmp = dict_keys_inorder(c->u->u_consts, 0);
3710 if (!tmp)
3711 goto error;
3712 consts = PySequence_List(tmp); /* optimize_code requires a list */
3713 Py_DECREF(tmp);
3715 names = dict_keys_inorder(c->u->u_names, 0);
3716 varnames = dict_keys_inorder(c->u->u_varnames, 0);
3717 if (!consts || !names || !varnames)
3718 goto error;
3720 cellvars = dict_keys_inorder(c->u->u_cellvars, 0);
3721 if (!cellvars)
3722 goto error;
3723 freevars = dict_keys_inorder(c->u->u_freevars, PyTuple_Size(cellvars));
3724 if (!freevars)
3725 goto error;
3726 filename = PyString_FromString(c->c_filename);
3727 if (!filename)
3728 goto error;
3730 nlocals = PyDict_Size(c->u->u_varnames);
3731 flags = compute_code_flags(c);
3732 if (flags < 0)
3733 goto error;
3735 bytecode = PyCode_Optimize(a->a_bytecode, consts, names, a->a_lnotab);
3736 if (!bytecode)
3737 goto error;
3739 tmp = PyList_AsTuple(consts); /* PyCode_New requires a tuple */
3740 if (!tmp)
3741 goto error;
3742 Py_DECREF(consts);
3743 consts = tmp;
3745 co = PyCode_New(c->u->u_argcount, nlocals, stackdepth(c), flags,
3746 bytecode, consts, names, varnames,
3747 freevars, cellvars,
3748 filename, c->u->u_name,
3749 c->u->u_firstlineno,
3750 a->a_lnotab);
3751 error:
3752 Py_XDECREF(consts);
3753 Py_XDECREF(names);
3754 Py_XDECREF(varnames);
3755 Py_XDECREF(filename);
3756 Py_XDECREF(name);
3757 Py_XDECREF(freevars);
3758 Py_XDECREF(cellvars);
3759 Py_XDECREF(bytecode);
3760 return co;
3764 /* For debugging purposes only */
3765 #if 0
3766 static void
3767 dump_instr(const struct instr *i)
3769 const char *jrel = i->i_jrel ? "jrel " : "";
3770 const char *jabs = i->i_jabs ? "jabs " : "";
3771 char arg[128];
3773 *arg = '\0';
3774 if (i->i_hasarg)
3775 sprintf(arg, "arg: %d ", i->i_oparg);
3777 fprintf(stderr, "line: %d, opcode: %d %s%s%s\n",
3778 i->i_lineno, i->i_opcode, arg, jabs, jrel);
3781 static void
3782 dump_basicblock(const basicblock *b)
3784 const char *seen = b->b_seen ? "seen " : "";
3785 const char *b_return = b->b_return ? "return " : "";
3786 fprintf(stderr, "used: %d, depth: %d, offset: %d %s%s\n",
3787 b->b_iused, b->b_startdepth, b->b_offset, seen, b_return);
3788 if (b->b_instr) {
3789 int i;
3790 for (i = 0; i < b->b_iused; i++) {
3791 fprintf(stderr, " [%02d] ", i);
3792 dump_instr(b->b_instr + i);
3796 #endif
3798 static PyCodeObject *
3799 assemble(struct compiler *c, int addNone)
3801 basicblock *b, *entryblock;
3802 struct assembler a;
3803 int i, j, nblocks;
3804 PyCodeObject *co = NULL;
3806 /* Make sure every block that falls off the end returns None.
3807 XXX NEXT_BLOCK() isn't quite right, because if the last
3808 block ends with a jump or return b_next shouldn't set.
3810 if (!c->u->u_curblock->b_return) {
3811 NEXT_BLOCK(c);
3812 if (addNone)
3813 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3814 ADDOP(c, RETURN_VALUE);
3817 nblocks = 0;
3818 entryblock = NULL;
3819 for (b = c->u->u_blocks; b != NULL; b = b->b_list) {
3820 nblocks++;
3821 entryblock = b;
3824 /* Set firstlineno if it wasn't explicitly set. */
3825 if (!c->u->u_firstlineno) {
3826 if (entryblock && entryblock->b_instr)
3827 c->u->u_firstlineno = entryblock->b_instr->i_lineno;
3828 else
3829 c->u->u_firstlineno = 1;
3831 if (!assemble_init(&a, nblocks, c->u->u_firstlineno))
3832 goto error;
3833 dfs(c, entryblock, &a);
3835 /* Can't modify the bytecode after computing jump offsets. */
3836 assemble_jump_offsets(&a, c);
3838 /* Emit code in reverse postorder from dfs. */
3839 for (i = a.a_nblocks - 1; i >= 0; i--) {
3840 b = a.a_postorder[i];
3841 for (j = 0; j < b->b_iused; j++)
3842 if (!assemble_emit(&a, &b->b_instr[j]))
3843 goto error;
3846 if (_PyString_Resize(&a.a_lnotab, a.a_lnotab_off) < 0)
3847 goto error;
3848 if (_PyString_Resize(&a.a_bytecode, a.a_offset) < 0)
3849 goto error;
3851 co = makecode(c, &a);
3852 error:
3853 assemble_free(&a);
3854 return co;