isinf and isnan are macros, not functions; fix configure script
[python.git] / Python / compile.c
blob8f0fd8d0433fe0e0a0994f69cf574cc618a90412
1 /*
2 * This file compiles an abstract syntax tree (AST) into Python bytecode.
4 * The primary entry point is PyAST_Compile(), which returns a
5 * PyCodeObject. The compiler makes several passes to build the code
6 * object:
7 * 1. Checks for future statements. See future.c
8 * 2. Builds a symbol table. See symtable.c.
9 * 3. Generate code for basic blocks. See compiler_mod() in this file.
10 * 4. Assemble the basic blocks into final code. See assemble() in
11 * this file.
12 * 5. Optimize the byte code (peephole optimizations). See peephole.c
14 * Note that compiler_mod() suggests module, but the module ast type
15 * (mod_ty) has cases for expressions and interactive statements.
17 * CAUTION: The VISIT_* macros abort the current function when they
18 * encounter a problem. So don't invoke them when there is memory
19 * which needs to be released. Code blocks are OK, as the compiler
20 * structure takes care of releasing those. Use the arena to manage
21 * objects.
24 #include "Python.h"
26 #include "Python-ast.h"
27 #include "node.h"
28 #include "pyarena.h"
29 #include "ast.h"
30 #include "code.h"
31 #include "compile.h"
32 #include "symtable.h"
33 #include "opcode.h"
35 int Py_OptimizeFlag = 0;
37 #define DEFAULT_BLOCK_SIZE 16
38 #define DEFAULT_BLOCKS 8
39 #define DEFAULT_CODE_SIZE 128
40 #define DEFAULT_LNOTAB_SIZE 16
42 struct instr {
43 unsigned i_jabs : 1;
44 unsigned i_jrel : 1;
45 unsigned i_hasarg : 1;
46 unsigned char i_opcode;
47 int i_oparg;
48 struct basicblock_ *i_target; /* target block (if jump instruction) */
49 int i_lineno;
52 typedef struct basicblock_ {
53 /* Each basicblock in a compilation unit is linked via b_list in the
54 reverse order that the block are allocated. b_list points to the next
55 block, not to be confused with b_next, which is next by control flow. */
56 struct basicblock_ *b_list;
57 /* number of instructions used */
58 int b_iused;
59 /* length of instruction array (b_instr) */
60 int b_ialloc;
61 /* pointer to an array of instructions, initially NULL */
62 struct instr *b_instr;
63 /* If b_next is non-NULL, it is a pointer to the next
64 block reached by normal control flow. */
65 struct basicblock_ *b_next;
66 /* b_seen is used to perform a DFS of basicblocks. */
67 unsigned b_seen : 1;
68 /* b_return is true if a RETURN_VALUE opcode is inserted. */
69 unsigned b_return : 1;
70 /* depth of stack upon entry of block, computed by stackdepth() */
71 int b_startdepth;
72 /* instruction offset for block, computed by assemble_jump_offsets() */
73 int b_offset;
74 } basicblock;
76 /* fblockinfo tracks the current frame block.
78 A frame block is used to handle loops, try/except, and try/finally.
79 It's called a frame block to distinguish it from a basic block in the
80 compiler IR.
83 enum fblocktype { LOOP, EXCEPT, FINALLY_TRY, FINALLY_END };
85 struct fblockinfo {
86 enum fblocktype fb_type;
87 basicblock *fb_block;
90 /* The following items change on entry and exit of code blocks.
91 They must be saved and restored when returning to a block.
93 struct compiler_unit {
94 PySTEntryObject *u_ste;
96 PyObject *u_name;
97 /* The following fields are dicts that map objects to
98 the index of them in co_XXX. The index is used as
99 the argument for opcodes that refer to those collections.
101 PyObject *u_consts; /* all constants */
102 PyObject *u_names; /* all names */
103 PyObject *u_varnames; /* local variables */
104 PyObject *u_cellvars; /* cell variables */
105 PyObject *u_freevars; /* free variables */
107 PyObject *u_private; /* for private name mangling */
109 int u_argcount; /* number of arguments for block */
110 /* Pointer to the most recently allocated block. By following b_list
111 members, you can reach all early allocated blocks. */
112 basicblock *u_blocks;
113 basicblock *u_curblock; /* pointer to current block */
114 int u_tmpname; /* temporary variables for list comps */
116 int u_nfblocks;
117 struct fblockinfo u_fblock[CO_MAXBLOCKS];
119 int u_firstlineno; /* the first lineno of the block */
120 int u_lineno; /* the lineno for the current stmt */
121 bool u_lineno_set; /* boolean to indicate whether instr
122 has been generated with current lineno */
125 /* This struct captures the global state of a compilation.
127 The u pointer points to the current compilation unit, while units
128 for enclosing blocks are stored in c_stack. The u and c_stack are
129 managed by compiler_enter_scope() and compiler_exit_scope().
132 struct compiler {
133 const char *c_filename;
134 struct symtable *c_st;
135 PyFutureFeatures *c_future; /* pointer to module's __future__ */
136 PyCompilerFlags *c_flags;
138 int c_interactive; /* true if in interactive mode */
139 int c_nestlevel;
141 struct compiler_unit *u; /* compiler state for current block */
142 PyObject *c_stack; /* Python list holding compiler_unit ptrs */
143 char *c_encoding; /* source encoding (a borrowed reference) */
144 PyArena *c_arena; /* pointer to memory allocation arena */
147 static int compiler_enter_scope(struct compiler *, identifier, void *, int);
148 static void compiler_free(struct compiler *);
149 static basicblock *compiler_new_block(struct compiler *);
150 static int compiler_next_instr(struct compiler *, basicblock *);
151 static int compiler_addop(struct compiler *, int);
152 static int compiler_addop_o(struct compiler *, int, PyObject *, PyObject *);
153 static int compiler_addop_i(struct compiler *, int, int);
154 static int compiler_addop_j(struct compiler *, int, basicblock *, int);
155 static basicblock *compiler_use_new_block(struct compiler *);
156 static int compiler_error(struct compiler *, const char *);
157 static int compiler_nameop(struct compiler *, identifier, expr_context_ty);
159 static PyCodeObject *compiler_mod(struct compiler *, mod_ty);
160 static int compiler_visit_stmt(struct compiler *, stmt_ty);
161 static int compiler_visit_keyword(struct compiler *, keyword_ty);
162 static int compiler_visit_expr(struct compiler *, expr_ty);
163 static int compiler_augassign(struct compiler *, stmt_ty);
164 static int compiler_visit_slice(struct compiler *, slice_ty,
165 expr_context_ty);
167 static int compiler_push_fblock(struct compiler *, enum fblocktype,
168 basicblock *);
169 static void compiler_pop_fblock(struct compiler *, enum fblocktype,
170 basicblock *);
171 /* Returns true if there is a loop on the fblock stack. */
172 static int compiler_in_loop(struct compiler *);
174 static int inplace_binop(struct compiler *, operator_ty);
175 static int expr_constant(expr_ty e);
177 static int compiler_with(struct compiler *, stmt_ty);
179 static PyCodeObject *assemble(struct compiler *, int addNone);
180 static PyObject *__doc__;
182 PyObject *
183 _Py_Mangle(PyObject *privateobj, PyObject *ident)
185 /* Name mangling: __private becomes _classname__private.
186 This is independent from how the name is used. */
187 const char *p, *name = PyString_AsString(ident);
188 char *buffer;
189 size_t nlen, plen;
190 if (privateobj == NULL || !PyString_Check(privateobj) ||
191 name == NULL || name[0] != '_' || name[1] != '_') {
192 Py_INCREF(ident);
193 return ident;
195 p = PyString_AsString(privateobj);
196 nlen = strlen(name);
197 /* Don't mangle __id__ or names with dots.
199 The only time a name with a dot can occur is when
200 we are compiling an import statement that has a
201 package name.
203 TODO(jhylton): Decide whether we want to support
204 mangling of the module name, e.g. __M.X.
206 if ((name[nlen-1] == '_' && name[nlen-2] == '_')
207 || strchr(name, '.')) {
208 Py_INCREF(ident);
209 return ident; /* Don't mangle __whatever__ */
211 /* Strip leading underscores from class name */
212 while (*p == '_')
213 p++;
214 if (*p == '\0') {
215 Py_INCREF(ident);
216 return ident; /* Don't mangle if class is just underscores */
218 plen = strlen(p);
220 assert(1 <= PY_SSIZE_T_MAX - nlen);
221 assert(1 + nlen <= PY_SSIZE_T_MAX - plen);
223 ident = PyString_FromStringAndSize(NULL, 1 + nlen + plen);
224 if (!ident)
225 return 0;
226 /* ident = "_" + p[:plen] + name # i.e. 1+plen+nlen bytes */
227 buffer = PyString_AS_STRING(ident);
228 buffer[0] = '_';
229 strncpy(buffer+1, p, plen);
230 strcpy(buffer+1+plen, name);
231 return ident;
234 static int
235 compiler_init(struct compiler *c)
237 memset(c, 0, sizeof(struct compiler));
239 c->c_stack = PyList_New(0);
240 if (!c->c_stack)
241 return 0;
243 return 1;
246 PyCodeObject *
247 PyAST_Compile(mod_ty mod, const char *filename, PyCompilerFlags *flags,
248 PyArena *arena)
250 struct compiler c;
251 PyCodeObject *co = NULL;
252 PyCompilerFlags local_flags;
253 int merged;
255 if (!__doc__) {
256 __doc__ = PyString_InternFromString("__doc__");
257 if (!__doc__)
258 return NULL;
261 if (!compiler_init(&c))
262 return NULL;
263 c.c_filename = filename;
264 c.c_arena = arena;
265 c.c_future = PyFuture_FromAST(mod, filename);
266 if (c.c_future == NULL)
267 goto finally;
268 if (!flags) {
269 local_flags.cf_flags = 0;
270 flags = &local_flags;
272 merged = c.c_future->ff_features | flags->cf_flags;
273 c.c_future->ff_features = merged;
274 flags->cf_flags = merged;
275 c.c_flags = flags;
276 c.c_nestlevel = 0;
278 c.c_st = PySymtable_Build(mod, filename, c.c_future);
279 if (c.c_st == NULL) {
280 if (!PyErr_Occurred())
281 PyErr_SetString(PyExc_SystemError, "no symtable");
282 goto finally;
285 /* XXX initialize to NULL for now, need to handle */
286 c.c_encoding = NULL;
288 co = compiler_mod(&c, mod);
290 finally:
291 compiler_free(&c);
292 assert(co || PyErr_Occurred());
293 return co;
296 PyCodeObject *
297 PyNode_Compile(struct _node *n, const char *filename)
299 PyCodeObject *co = NULL;
300 mod_ty mod;
301 PyArena *arena = PyArena_New();
302 if (!arena)
303 return NULL;
304 mod = PyAST_FromNode(n, NULL, filename, arena);
305 if (mod)
306 co = PyAST_Compile(mod, filename, NULL, arena);
307 PyArena_Free(arena);
308 return co;
311 static void
312 compiler_free(struct compiler *c)
314 if (c->c_st)
315 PySymtable_Free(c->c_st);
316 if (c->c_future)
317 PyObject_Free(c->c_future);
318 Py_DECREF(c->c_stack);
321 static PyObject *
322 list2dict(PyObject *list)
324 Py_ssize_t i, n;
325 PyObject *v, *k;
326 PyObject *dict = PyDict_New();
327 if (!dict) return NULL;
329 n = PyList_Size(list);
330 for (i = 0; i < n; i++) {
331 v = PyInt_FromLong(i);
332 if (!v) {
333 Py_DECREF(dict);
334 return NULL;
336 k = PyList_GET_ITEM(list, i);
337 k = PyTuple_Pack(2, k, k->ob_type);
338 if (k == NULL || PyDict_SetItem(dict, k, v) < 0) {
339 Py_XDECREF(k);
340 Py_DECREF(v);
341 Py_DECREF(dict);
342 return NULL;
344 Py_DECREF(k);
345 Py_DECREF(v);
347 return dict;
350 /* Return new dict containing names from src that match scope(s).
352 src is a symbol table dictionary. If the scope of a name matches
353 either scope_type or flag is set, insert it into the new dict. The
354 values are integers, starting at offset and increasing by one for
355 each key.
358 static PyObject *
359 dictbytype(PyObject *src, int scope_type, int flag, int offset)
361 Py_ssize_t pos = 0, i = offset, scope;
362 PyObject *k, *v, *dest = PyDict_New();
364 assert(offset >= 0);
365 if (dest == NULL)
366 return NULL;
368 while (PyDict_Next(src, &pos, &k, &v)) {
369 /* XXX this should probably be a macro in symtable.h */
370 assert(PyInt_Check(v));
371 scope = (PyInt_AS_LONG(v) >> SCOPE_OFF) & SCOPE_MASK;
373 if (scope == scope_type || PyInt_AS_LONG(v) & flag) {
374 PyObject *tuple, *item = PyInt_FromLong(i);
375 if (item == NULL) {
376 Py_DECREF(dest);
377 return NULL;
379 i++;
380 tuple = PyTuple_Pack(2, k, k->ob_type);
381 if (!tuple || PyDict_SetItem(dest, tuple, item) < 0) {
382 Py_DECREF(item);
383 Py_DECREF(dest);
384 Py_XDECREF(tuple);
385 return NULL;
387 Py_DECREF(item);
388 Py_DECREF(tuple);
391 return dest;
394 static void
395 compiler_unit_check(struct compiler_unit *u)
397 basicblock *block;
398 for (block = u->u_blocks; block != NULL; block = block->b_list) {
399 assert((void *)block != (void *)0xcbcbcbcb);
400 assert((void *)block != (void *)0xfbfbfbfb);
401 assert((void *)block != (void *)0xdbdbdbdb);
402 if (block->b_instr != NULL) {
403 assert(block->b_ialloc > 0);
404 assert(block->b_iused > 0);
405 assert(block->b_ialloc >= block->b_iused);
407 else {
408 assert (block->b_iused == 0);
409 assert (block->b_ialloc == 0);
414 static void
415 compiler_unit_free(struct compiler_unit *u)
417 basicblock *b, *next;
419 compiler_unit_check(u);
420 b = u->u_blocks;
421 while (b != NULL) {
422 if (b->b_instr)
423 PyObject_Free((void *)b->b_instr);
424 next = b->b_list;
425 PyObject_Free((void *)b);
426 b = next;
428 Py_CLEAR(u->u_ste);
429 Py_CLEAR(u->u_name);
430 Py_CLEAR(u->u_consts);
431 Py_CLEAR(u->u_names);
432 Py_CLEAR(u->u_varnames);
433 Py_CLEAR(u->u_freevars);
434 Py_CLEAR(u->u_cellvars);
435 Py_CLEAR(u->u_private);
436 PyObject_Free(u);
439 static int
440 compiler_enter_scope(struct compiler *c, identifier name, void *key,
441 int lineno)
443 struct compiler_unit *u;
445 u = (struct compiler_unit *)PyObject_Malloc(sizeof(
446 struct compiler_unit));
447 if (!u) {
448 PyErr_NoMemory();
449 return 0;
451 memset(u, 0, sizeof(struct compiler_unit));
452 u->u_argcount = 0;
453 u->u_ste = PySymtable_Lookup(c->c_st, key);
454 if (!u->u_ste) {
455 compiler_unit_free(u);
456 return 0;
458 Py_INCREF(name);
459 u->u_name = name;
460 u->u_varnames = list2dict(u->u_ste->ste_varnames);
461 u->u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0);
462 if (!u->u_varnames || !u->u_cellvars) {
463 compiler_unit_free(u);
464 return 0;
467 u->u_freevars = dictbytype(u->u_ste->ste_symbols, FREE, DEF_FREE_CLASS,
468 PyDict_Size(u->u_cellvars));
469 if (!u->u_freevars) {
470 compiler_unit_free(u);
471 return 0;
474 u->u_blocks = NULL;
475 u->u_tmpname = 0;
476 u->u_nfblocks = 0;
477 u->u_firstlineno = lineno;
478 u->u_lineno = 0;
479 u->u_lineno_set = false;
480 u->u_consts = PyDict_New();
481 if (!u->u_consts) {
482 compiler_unit_free(u);
483 return 0;
485 u->u_names = PyDict_New();
486 if (!u->u_names) {
487 compiler_unit_free(u);
488 return 0;
491 u->u_private = NULL;
493 /* Push the old compiler_unit on the stack. */
494 if (c->u) {
495 PyObject *wrapper = PyCObject_FromVoidPtr(c->u, NULL);
496 if (!wrapper || PyList_Append(c->c_stack, wrapper) < 0) {
497 Py_XDECREF(wrapper);
498 compiler_unit_free(u);
499 return 0;
501 Py_DECREF(wrapper);
502 u->u_private = c->u->u_private;
503 Py_XINCREF(u->u_private);
505 c->u = u;
507 c->c_nestlevel++;
508 if (compiler_use_new_block(c) == NULL)
509 return 0;
511 return 1;
514 static void
515 compiler_exit_scope(struct compiler *c)
517 int n;
518 PyObject *wrapper;
520 c->c_nestlevel--;
521 compiler_unit_free(c->u);
522 /* Restore c->u to the parent unit. */
523 n = PyList_GET_SIZE(c->c_stack) - 1;
524 if (n >= 0) {
525 wrapper = PyList_GET_ITEM(c->c_stack, n);
526 c->u = (struct compiler_unit *)PyCObject_AsVoidPtr(wrapper);
527 assert(c->u);
528 /* we are deleting from a list so this really shouldn't fail */
529 if (PySequence_DelItem(c->c_stack, n) < 0)
530 Py_FatalError("compiler_exit_scope()");
531 compiler_unit_check(c->u);
533 else
534 c->u = NULL;
538 /* Allocate a new "anonymous" local variable.
539 Used by list comprehensions and with statements.
542 static PyObject *
543 compiler_new_tmpname(struct compiler *c)
545 char tmpname[256];
546 PyOS_snprintf(tmpname, sizeof(tmpname), "_[%d]", ++c->u->u_tmpname);
547 return PyString_FromString(tmpname);
550 /* Allocate a new block and return a pointer to it.
551 Returns NULL on error.
554 static basicblock *
555 compiler_new_block(struct compiler *c)
557 basicblock *b;
558 struct compiler_unit *u;
560 u = c->u;
561 b = (basicblock *)PyObject_Malloc(sizeof(basicblock));
562 if (b == NULL) {
563 PyErr_NoMemory();
564 return NULL;
566 memset((void *)b, 0, sizeof(basicblock));
567 /* Extend the singly linked list of blocks with new block. */
568 b->b_list = u->u_blocks;
569 u->u_blocks = b;
570 return b;
573 static basicblock *
574 compiler_use_new_block(struct compiler *c)
576 basicblock *block = compiler_new_block(c);
577 if (block == NULL)
578 return NULL;
579 c->u->u_curblock = block;
580 return block;
583 static basicblock *
584 compiler_next_block(struct compiler *c)
586 basicblock *block = compiler_new_block(c);
587 if (block == NULL)
588 return NULL;
589 c->u->u_curblock->b_next = block;
590 c->u->u_curblock = block;
591 return block;
594 static basicblock *
595 compiler_use_next_block(struct compiler *c, basicblock *block)
597 assert(block != NULL);
598 c->u->u_curblock->b_next = block;
599 c->u->u_curblock = block;
600 return block;
603 /* Returns the offset of the next instruction in the current block's
604 b_instr array. Resizes the b_instr as necessary.
605 Returns -1 on failure.
608 static int
609 compiler_next_instr(struct compiler *c, basicblock *b)
611 assert(b != NULL);
612 if (b->b_instr == NULL) {
613 b->b_instr = (struct instr *)PyObject_Malloc(
614 sizeof(struct instr) * DEFAULT_BLOCK_SIZE);
615 if (b->b_instr == NULL) {
616 PyErr_NoMemory();
617 return -1;
619 b->b_ialloc = DEFAULT_BLOCK_SIZE;
620 memset((char *)b->b_instr, 0,
621 sizeof(struct instr) * DEFAULT_BLOCK_SIZE);
623 else if (b->b_iused == b->b_ialloc) {
624 struct instr *tmp;
625 size_t oldsize, newsize;
626 oldsize = b->b_ialloc * sizeof(struct instr);
627 newsize = oldsize << 1;
629 if (oldsize > (PY_SIZE_MAX >> 1)) {
630 PyErr_NoMemory();
631 return -1;
634 if (newsize == 0) {
635 PyErr_NoMemory();
636 return -1;
638 b->b_ialloc <<= 1;
639 tmp = (struct instr *)PyObject_Realloc(
640 (void *)b->b_instr, newsize);
641 if (tmp == NULL) {
642 PyErr_NoMemory();
643 return -1;
645 b->b_instr = tmp;
646 memset((char *)b->b_instr + oldsize, 0, newsize - oldsize);
648 return b->b_iused++;
651 /* Set the i_lineno member of the instruction at offset off if the
652 line number for the current expression/statement has not
653 already been set. If it has been set, the call has no effect.
655 The line number is reset in the following cases:
656 - when entering a new scope
657 - on each statement
658 - on each expression that start a new line
659 - before the "except" clause
660 - before the "for" and "while" expressions
663 static void
664 compiler_set_lineno(struct compiler *c, int off)
666 basicblock *b;
667 if (c->u->u_lineno_set)
668 return;
669 c->u->u_lineno_set = true;
670 b = c->u->u_curblock;
671 b->b_instr[off].i_lineno = c->u->u_lineno;
674 static int
675 opcode_stack_effect(int opcode, int oparg)
677 switch (opcode) {
678 case POP_TOP:
679 return -1;
680 case ROT_TWO:
681 case ROT_THREE:
682 return 0;
683 case DUP_TOP:
684 return 1;
685 case ROT_FOUR:
686 return 0;
688 case UNARY_POSITIVE:
689 case UNARY_NEGATIVE:
690 case UNARY_NOT:
691 case UNARY_CONVERT:
692 case UNARY_INVERT:
693 return 0;
695 case LIST_APPEND:
696 return -1;
698 case BINARY_POWER:
699 case BINARY_MULTIPLY:
700 case BINARY_DIVIDE:
701 case BINARY_MODULO:
702 case BINARY_ADD:
703 case BINARY_SUBTRACT:
704 case BINARY_SUBSCR:
705 case BINARY_FLOOR_DIVIDE:
706 case BINARY_TRUE_DIVIDE:
707 return -1;
708 case INPLACE_FLOOR_DIVIDE:
709 case INPLACE_TRUE_DIVIDE:
710 return -1;
712 case SLICE+0:
713 return 1;
714 case SLICE+1:
715 return 0;
716 case SLICE+2:
717 return 0;
718 case SLICE+3:
719 return -1;
721 case STORE_SLICE+0:
722 return -2;
723 case STORE_SLICE+1:
724 return -3;
725 case STORE_SLICE+2:
726 return -3;
727 case STORE_SLICE+3:
728 return -4;
730 case DELETE_SLICE+0:
731 return -1;
732 case DELETE_SLICE+1:
733 return -2;
734 case DELETE_SLICE+2:
735 return -2;
736 case DELETE_SLICE+3:
737 return -3;
739 case INPLACE_ADD:
740 case INPLACE_SUBTRACT:
741 case INPLACE_MULTIPLY:
742 case INPLACE_DIVIDE:
743 case INPLACE_MODULO:
744 return -1;
745 case STORE_SUBSCR:
746 return -3;
747 case STORE_MAP:
748 return -2;
749 case DELETE_SUBSCR:
750 return -2;
752 case BINARY_LSHIFT:
753 case BINARY_RSHIFT:
754 case BINARY_AND:
755 case BINARY_XOR:
756 case BINARY_OR:
757 return -1;
758 case INPLACE_POWER:
759 return -1;
760 case GET_ITER:
761 return 0;
763 case PRINT_EXPR:
764 return -1;
765 case PRINT_ITEM:
766 return -1;
767 case PRINT_NEWLINE:
768 return 0;
769 case PRINT_ITEM_TO:
770 return -2;
771 case PRINT_NEWLINE_TO:
772 return -1;
773 case INPLACE_LSHIFT:
774 case INPLACE_RSHIFT:
775 case INPLACE_AND:
776 case INPLACE_XOR:
777 case INPLACE_OR:
778 return -1;
779 case BREAK_LOOP:
780 return 0;
781 case WITH_CLEANUP:
782 return -1; /* XXX Sometimes more */
783 case LOAD_LOCALS:
784 return 1;
785 case RETURN_VALUE:
786 return -1;
787 case IMPORT_STAR:
788 return -1;
789 case EXEC_STMT:
790 return -3;
791 case YIELD_VALUE:
792 return 0;
794 case POP_BLOCK:
795 return 0;
796 case END_FINALLY:
797 return -1; /* or -2 or -3 if exception occurred */
798 case BUILD_CLASS:
799 return -2;
801 case STORE_NAME:
802 return -1;
803 case DELETE_NAME:
804 return 0;
805 case UNPACK_SEQUENCE:
806 return oparg-1;
807 case FOR_ITER:
808 return 1;
810 case STORE_ATTR:
811 return -2;
812 case DELETE_ATTR:
813 return -1;
814 case STORE_GLOBAL:
815 return -1;
816 case DELETE_GLOBAL:
817 return 0;
818 case DUP_TOPX:
819 return oparg;
820 case LOAD_CONST:
821 return 1;
822 case LOAD_NAME:
823 return 1;
824 case BUILD_TUPLE:
825 case BUILD_LIST:
826 return 1-oparg;
827 case BUILD_MAP:
828 return 1;
829 case LOAD_ATTR:
830 return 0;
831 case COMPARE_OP:
832 return -1;
833 case IMPORT_NAME:
834 return 0;
835 case IMPORT_FROM:
836 return 1;
838 case JUMP_FORWARD:
839 case JUMP_IF_FALSE:
840 case JUMP_IF_TRUE:
841 case JUMP_ABSOLUTE:
842 return 0;
844 case LOAD_GLOBAL:
845 return 1;
847 case CONTINUE_LOOP:
848 return 0;
849 case SETUP_LOOP:
850 return 0;
851 case SETUP_EXCEPT:
852 case SETUP_FINALLY:
853 return 3; /* actually pushed by an exception */
855 case LOAD_FAST:
856 return 1;
857 case STORE_FAST:
858 return -1;
859 case DELETE_FAST:
860 return 0;
862 case RAISE_VARARGS:
863 return -oparg;
864 #define NARGS(o) (((o) % 256) + 2*((o) / 256))
865 case CALL_FUNCTION:
866 return -NARGS(oparg);
867 case CALL_FUNCTION_VAR:
868 case CALL_FUNCTION_KW:
869 return -NARGS(oparg)-1;
870 case CALL_FUNCTION_VAR_KW:
871 return -NARGS(oparg)-2;
872 #undef NARGS
873 case MAKE_FUNCTION:
874 return -oparg;
875 case BUILD_SLICE:
876 if (oparg == 3)
877 return -2;
878 else
879 return -1;
881 case MAKE_CLOSURE:
882 return -oparg;
883 case LOAD_CLOSURE:
884 return 1;
885 case LOAD_DEREF:
886 return 1;
887 case STORE_DEREF:
888 return -1;
889 default:
890 fprintf(stderr, "opcode = %d\n", opcode);
891 Py_FatalError("opcode_stack_effect()");
894 return 0; /* not reachable */
897 /* Add an opcode with no argument.
898 Returns 0 on failure, 1 on success.
901 static int
902 compiler_addop(struct compiler *c, int opcode)
904 basicblock *b;
905 struct instr *i;
906 int off;
907 off = compiler_next_instr(c, c->u->u_curblock);
908 if (off < 0)
909 return 0;
910 b = c->u->u_curblock;
911 i = &b->b_instr[off];
912 i->i_opcode = opcode;
913 i->i_hasarg = 0;
914 if (opcode == RETURN_VALUE)
915 b->b_return = 1;
916 compiler_set_lineno(c, off);
917 return 1;
920 static int
921 compiler_add_o(struct compiler *c, PyObject *dict, PyObject *o)
923 PyObject *t, *v;
924 Py_ssize_t arg;
925 unsigned char *p, *q;
926 Py_complex z;
927 double d;
928 int real_part_zero, imag_part_zero;
930 /* necessary to make sure types aren't coerced (e.g., int and long) */
931 /* _and_ to distinguish 0.0 from -0.0 e.g. on IEEE platforms */
932 if (PyFloat_Check(o)) {
933 d = PyFloat_AS_DOUBLE(o);
934 p = (unsigned char*) &d;
935 /* all we need is to make the tuple different in either the 0.0
936 * or -0.0 case from all others, just to avoid the "coercion".
938 if (*p==0 && p[sizeof(double)-1]==0)
939 t = PyTuple_Pack(3, o, o->ob_type, Py_None);
940 else
941 t = PyTuple_Pack(2, o, o->ob_type);
943 else if (PyComplex_Check(o)) {
944 /* complex case is even messier: we need to make complex(x,
945 0.) different from complex(x, -0.) and complex(0., y)
946 different from complex(-0., y), for any x and y. In
947 particular, all four complex zeros should be
948 distinguished.*/
949 z = PyComplex_AsCComplex(o);
950 p = (unsigned char*) &(z.real);
951 q = (unsigned char*) &(z.imag);
952 /* all that matters here is that on IEEE platforms
953 real_part_zero will be true if z.real == 0., and false if
954 z.real == -0. In fact, real_part_zero will also be true
955 for some other rarely occurring nonzero floats, but this
956 doesn't matter. Similar comments apply to
957 imag_part_zero. */
958 real_part_zero = *p==0 && p[sizeof(double)-1]==0;
959 imag_part_zero = *q==0 && q[sizeof(double)-1]==0;
960 if (real_part_zero && imag_part_zero) {
961 t = PyTuple_Pack(4, o, o->ob_type, Py_True, Py_True);
963 else if (real_part_zero && !imag_part_zero) {
964 t = PyTuple_Pack(4, o, o->ob_type, Py_True, Py_False);
966 else if (!real_part_zero && imag_part_zero) {
967 t = PyTuple_Pack(4, o, o->ob_type, Py_False, Py_True);
969 else {
970 t = PyTuple_Pack(2, o, o->ob_type);
973 else {
974 t = PyTuple_Pack(2, o, o->ob_type);
976 if (t == NULL)
977 return -1;
979 v = PyDict_GetItem(dict, t);
980 if (!v) {
981 arg = PyDict_Size(dict);
982 v = PyInt_FromLong(arg);
983 if (!v) {
984 Py_DECREF(t);
985 return -1;
987 if (PyDict_SetItem(dict, t, v) < 0) {
988 Py_DECREF(t);
989 Py_DECREF(v);
990 return -1;
992 Py_DECREF(v);
994 else
995 arg = PyInt_AsLong(v);
996 Py_DECREF(t);
997 return arg;
1000 static int
1001 compiler_addop_o(struct compiler *c, int opcode, PyObject *dict,
1002 PyObject *o)
1004 int arg = compiler_add_o(c, dict, o);
1005 if (arg < 0)
1006 return 0;
1007 return compiler_addop_i(c, opcode, arg);
1010 static int
1011 compiler_addop_name(struct compiler *c, int opcode, PyObject *dict,
1012 PyObject *o)
1014 int arg;
1015 PyObject *mangled = _Py_Mangle(c->u->u_private, o);
1016 if (!mangled)
1017 return 0;
1018 arg = compiler_add_o(c, dict, mangled);
1019 Py_DECREF(mangled);
1020 if (arg < 0)
1021 return 0;
1022 return compiler_addop_i(c, opcode, arg);
1025 /* Add an opcode with an integer argument.
1026 Returns 0 on failure, 1 on success.
1029 static int
1030 compiler_addop_i(struct compiler *c, int opcode, int oparg)
1032 struct instr *i;
1033 int off;
1034 off = compiler_next_instr(c, c->u->u_curblock);
1035 if (off < 0)
1036 return 0;
1037 i = &c->u->u_curblock->b_instr[off];
1038 i->i_opcode = opcode;
1039 i->i_oparg = oparg;
1040 i->i_hasarg = 1;
1041 compiler_set_lineno(c, off);
1042 return 1;
1045 static int
1046 compiler_addop_j(struct compiler *c, int opcode, basicblock *b, int absolute)
1048 struct instr *i;
1049 int off;
1051 assert(b != NULL);
1052 off = compiler_next_instr(c, c->u->u_curblock);
1053 if (off < 0)
1054 return 0;
1055 i = &c->u->u_curblock->b_instr[off];
1056 i->i_opcode = opcode;
1057 i->i_target = b;
1058 i->i_hasarg = 1;
1059 if (absolute)
1060 i->i_jabs = 1;
1061 else
1062 i->i_jrel = 1;
1063 compiler_set_lineno(c, off);
1064 return 1;
1067 /* The distinction between NEW_BLOCK and NEXT_BLOCK is subtle. (I'd
1068 like to find better names.) NEW_BLOCK() creates a new block and sets
1069 it as the current block. NEXT_BLOCK() also creates an implicit jump
1070 from the current block to the new block.
1073 /* The returns inside these macros make it impossible to decref objects
1074 created in the local function. Local objects should use the arena.
1078 #define NEW_BLOCK(C) { \
1079 if (compiler_use_new_block((C)) == NULL) \
1080 return 0; \
1083 #define NEXT_BLOCK(C) { \
1084 if (compiler_next_block((C)) == NULL) \
1085 return 0; \
1088 #define ADDOP(C, OP) { \
1089 if (!compiler_addop((C), (OP))) \
1090 return 0; \
1093 #define ADDOP_IN_SCOPE(C, OP) { \
1094 if (!compiler_addop((C), (OP))) { \
1095 compiler_exit_scope(c); \
1096 return 0; \
1100 #define ADDOP_O(C, OP, O, TYPE) { \
1101 if (!compiler_addop_o((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1102 return 0; \
1105 #define ADDOP_NAME(C, OP, O, TYPE) { \
1106 if (!compiler_addop_name((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1107 return 0; \
1110 #define ADDOP_I(C, OP, O) { \
1111 if (!compiler_addop_i((C), (OP), (O))) \
1112 return 0; \
1115 #define ADDOP_JABS(C, OP, O) { \
1116 if (!compiler_addop_j((C), (OP), (O), 1)) \
1117 return 0; \
1120 #define ADDOP_JREL(C, OP, O) { \
1121 if (!compiler_addop_j((C), (OP), (O), 0)) \
1122 return 0; \
1125 /* VISIT and VISIT_SEQ takes an ASDL type as their second argument. They use
1126 the ASDL name to synthesize the name of the C type and the visit function.
1129 #define VISIT(C, TYPE, V) {\
1130 if (!compiler_visit_ ## TYPE((C), (V))) \
1131 return 0; \
1134 #define VISIT_IN_SCOPE(C, TYPE, V) {\
1135 if (!compiler_visit_ ## TYPE((C), (V))) { \
1136 compiler_exit_scope(c); \
1137 return 0; \
1141 #define VISIT_SLICE(C, V, CTX) {\
1142 if (!compiler_visit_slice((C), (V), (CTX))) \
1143 return 0; \
1146 #define VISIT_SEQ(C, TYPE, SEQ) { \
1147 int _i; \
1148 asdl_seq *seq = (SEQ); /* avoid variable capture */ \
1149 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1150 TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1151 if (!compiler_visit_ ## TYPE((C), elt)) \
1152 return 0; \
1156 #define VISIT_SEQ_IN_SCOPE(C, TYPE, SEQ) { \
1157 int _i; \
1158 asdl_seq *seq = (SEQ); /* avoid variable capture */ \
1159 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1160 TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1161 if (!compiler_visit_ ## TYPE((C), elt)) { \
1162 compiler_exit_scope(c); \
1163 return 0; \
1168 static int
1169 compiler_isdocstring(stmt_ty s)
1171 if (s->kind != Expr_kind)
1172 return 0;
1173 return s->v.Expr.value->kind == Str_kind;
1176 /* Compile a sequence of statements, checking for a docstring. */
1178 static int
1179 compiler_body(struct compiler *c, asdl_seq *stmts)
1181 int i = 0;
1182 stmt_ty st;
1184 if (!asdl_seq_LEN(stmts))
1185 return 1;
1186 st = (stmt_ty)asdl_seq_GET(stmts, 0);
1187 if (compiler_isdocstring(st) && Py_OptimizeFlag < 2) {
1188 /* don't generate docstrings if -OO */
1189 i = 1;
1190 VISIT(c, expr, st->v.Expr.value);
1191 if (!compiler_nameop(c, __doc__, Store))
1192 return 0;
1194 for (; i < asdl_seq_LEN(stmts); i++)
1195 VISIT(c, stmt, (stmt_ty)asdl_seq_GET(stmts, i));
1196 return 1;
1199 static PyCodeObject *
1200 compiler_mod(struct compiler *c, mod_ty mod)
1202 PyCodeObject *co;
1203 int addNone = 1;
1204 static PyObject *module;
1205 if (!module) {
1206 module = PyString_InternFromString("<module>");
1207 if (!module)
1208 return NULL;
1210 /* Use 0 for firstlineno initially, will fixup in assemble(). */
1211 if (!compiler_enter_scope(c, module, mod, 0))
1212 return NULL;
1213 switch (mod->kind) {
1214 case Module_kind:
1215 if (!compiler_body(c, mod->v.Module.body)) {
1216 compiler_exit_scope(c);
1217 return 0;
1219 break;
1220 case Interactive_kind:
1221 c->c_interactive = 1;
1222 VISIT_SEQ_IN_SCOPE(c, stmt,
1223 mod->v.Interactive.body);
1224 break;
1225 case Expression_kind:
1226 VISIT_IN_SCOPE(c, expr, mod->v.Expression.body);
1227 addNone = 0;
1228 break;
1229 case Suite_kind:
1230 PyErr_SetString(PyExc_SystemError,
1231 "suite should not be possible");
1232 return 0;
1233 default:
1234 PyErr_Format(PyExc_SystemError,
1235 "module kind %d should not be possible",
1236 mod->kind);
1237 return 0;
1239 co = assemble(c, addNone);
1240 compiler_exit_scope(c);
1241 return co;
1244 /* The test for LOCAL must come before the test for FREE in order to
1245 handle classes where name is both local and free. The local var is
1246 a method and the free var is a free var referenced within a method.
1249 static int
1250 get_ref_type(struct compiler *c, PyObject *name)
1252 int scope = PyST_GetScope(c->u->u_ste, name);
1253 if (scope == 0) {
1254 char buf[350];
1255 PyOS_snprintf(buf, sizeof(buf),
1256 "unknown scope for %.100s in %.100s(%s) in %s\n"
1257 "symbols: %s\nlocals: %s\nglobals: %s\n",
1258 PyString_AS_STRING(name),
1259 PyString_AS_STRING(c->u->u_name),
1260 PyObject_REPR(c->u->u_ste->ste_id),
1261 c->c_filename,
1262 PyObject_REPR(c->u->u_ste->ste_symbols),
1263 PyObject_REPR(c->u->u_varnames),
1264 PyObject_REPR(c->u->u_names)
1266 Py_FatalError(buf);
1269 return scope;
1272 static int
1273 compiler_lookup_arg(PyObject *dict, PyObject *name)
1275 PyObject *k, *v;
1276 k = PyTuple_Pack(2, name, name->ob_type);
1277 if (k == NULL)
1278 return -1;
1279 v = PyDict_GetItem(dict, k);
1280 Py_DECREF(k);
1281 if (v == NULL)
1282 return -1;
1283 return PyInt_AS_LONG(v);
1286 static int
1287 compiler_make_closure(struct compiler *c, PyCodeObject *co, int args)
1289 int i, free = PyCode_GetNumFree(co);
1290 if (free == 0) {
1291 ADDOP_O(c, LOAD_CONST, (PyObject*)co, consts);
1292 ADDOP_I(c, MAKE_FUNCTION, args);
1293 return 1;
1295 for (i = 0; i < free; ++i) {
1296 /* Bypass com_addop_varname because it will generate
1297 LOAD_DEREF but LOAD_CLOSURE is needed.
1299 PyObject *name = PyTuple_GET_ITEM(co->co_freevars, i);
1300 int arg, reftype;
1302 /* Special case: If a class contains a method with a
1303 free variable that has the same name as a method,
1304 the name will be considered free *and* local in the
1305 class. It should be handled by the closure, as
1306 well as by the normal name loookup logic.
1308 reftype = get_ref_type(c, name);
1309 if (reftype == CELL)
1310 arg = compiler_lookup_arg(c->u->u_cellvars, name);
1311 else /* (reftype == FREE) */
1312 arg = compiler_lookup_arg(c->u->u_freevars, name);
1313 if (arg == -1) {
1314 printf("lookup %s in %s %d %d\n"
1315 "freevars of %s: %s\n",
1316 PyObject_REPR(name),
1317 PyString_AS_STRING(c->u->u_name),
1318 reftype, arg,
1319 PyString_AS_STRING(co->co_name),
1320 PyObject_REPR(co->co_freevars));
1321 Py_FatalError("compiler_make_closure()");
1323 ADDOP_I(c, LOAD_CLOSURE, arg);
1325 ADDOP_I(c, BUILD_TUPLE, free);
1326 ADDOP_O(c, LOAD_CONST, (PyObject*)co, consts);
1327 ADDOP_I(c, MAKE_CLOSURE, args);
1328 return 1;
1331 static int
1332 compiler_decorators(struct compiler *c, asdl_seq* decos)
1334 int i;
1336 if (!decos)
1337 return 1;
1339 for (i = 0; i < asdl_seq_LEN(decos); i++) {
1340 VISIT(c, expr, (expr_ty)asdl_seq_GET(decos, i));
1342 return 1;
1345 static int
1346 compiler_arguments(struct compiler *c, arguments_ty args)
1348 int i;
1349 int n = asdl_seq_LEN(args->args);
1350 /* Correctly handle nested argument lists */
1351 for (i = 0; i < n; i++) {
1352 expr_ty arg = (expr_ty)asdl_seq_GET(args->args, i);
1353 if (arg->kind == Tuple_kind) {
1354 PyObject *id = PyString_FromFormat(".%d", i);
1355 if (id == NULL) {
1356 return 0;
1358 if (!compiler_nameop(c, id, Load)) {
1359 Py_DECREF(id);
1360 return 0;
1362 Py_DECREF(id);
1363 VISIT(c, expr, arg);
1366 return 1;
1369 static int
1370 compiler_function(struct compiler *c, stmt_ty s)
1372 PyCodeObject *co;
1373 PyObject *first_const = Py_None;
1374 arguments_ty args = s->v.FunctionDef.args;
1375 asdl_seq* decos = s->v.FunctionDef.decorator_list;
1376 stmt_ty st;
1377 int i, n, docstring;
1379 assert(s->kind == FunctionDef_kind);
1381 if (!compiler_decorators(c, decos))
1382 return 0;
1383 if (args->defaults)
1384 VISIT_SEQ(c, expr, args->defaults);
1385 if (!compiler_enter_scope(c, s->v.FunctionDef.name, (void *)s,
1386 s->lineno))
1387 return 0;
1389 st = (stmt_ty)asdl_seq_GET(s->v.FunctionDef.body, 0);
1390 docstring = compiler_isdocstring(st);
1391 if (docstring && Py_OptimizeFlag < 2)
1392 first_const = st->v.Expr.value->v.Str.s;
1393 if (compiler_add_o(c, c->u->u_consts, first_const) < 0) {
1394 compiler_exit_scope(c);
1395 return 0;
1398 /* unpack nested arguments */
1399 compiler_arguments(c, args);
1401 c->u->u_argcount = asdl_seq_LEN(args->args);
1402 n = asdl_seq_LEN(s->v.FunctionDef.body);
1403 /* if there was a docstring, we need to skip the first statement */
1404 for (i = docstring; i < n; i++) {
1405 st = (stmt_ty)asdl_seq_GET(s->v.FunctionDef.body, i);
1406 VISIT_IN_SCOPE(c, stmt, st);
1408 co = assemble(c, 1);
1409 compiler_exit_scope(c);
1410 if (co == NULL)
1411 return 0;
1413 compiler_make_closure(c, co, asdl_seq_LEN(args->defaults));
1414 Py_DECREF(co);
1416 for (i = 0; i < asdl_seq_LEN(decos); i++) {
1417 ADDOP_I(c, CALL_FUNCTION, 1);
1420 return compiler_nameop(c, s->v.FunctionDef.name, Store);
1423 static int
1424 compiler_class(struct compiler *c, stmt_ty s)
1426 int n, i;
1427 PyCodeObject *co;
1428 PyObject *str;
1429 asdl_seq* decos = s->v.ClassDef.decorator_list;
1431 if (!compiler_decorators(c, decos))
1432 return 0;
1434 /* push class name on stack, needed by BUILD_CLASS */
1435 ADDOP_O(c, LOAD_CONST, s->v.ClassDef.name, consts);
1436 /* push the tuple of base classes on the stack */
1437 n = asdl_seq_LEN(s->v.ClassDef.bases);
1438 if (n > 0)
1439 VISIT_SEQ(c, expr, s->v.ClassDef.bases);
1440 ADDOP_I(c, BUILD_TUPLE, n);
1441 if (!compiler_enter_scope(c, s->v.ClassDef.name, (void *)s,
1442 s->lineno))
1443 return 0;
1444 Py_XDECREF(c->u->u_private);
1445 c->u->u_private = s->v.ClassDef.name;
1446 Py_INCREF(c->u->u_private);
1447 str = PyString_InternFromString("__name__");
1448 if (!str || !compiler_nameop(c, str, Load)) {
1449 Py_XDECREF(str);
1450 compiler_exit_scope(c);
1451 return 0;
1454 Py_DECREF(str);
1455 str = PyString_InternFromString("__module__");
1456 if (!str || !compiler_nameop(c, str, Store)) {
1457 Py_XDECREF(str);
1458 compiler_exit_scope(c);
1459 return 0;
1461 Py_DECREF(str);
1463 if (!compiler_body(c, s->v.ClassDef.body)) {
1464 compiler_exit_scope(c);
1465 return 0;
1468 ADDOP_IN_SCOPE(c, LOAD_LOCALS);
1469 ADDOP_IN_SCOPE(c, RETURN_VALUE);
1470 co = assemble(c, 1);
1471 compiler_exit_scope(c);
1472 if (co == NULL)
1473 return 0;
1475 compiler_make_closure(c, co, 0);
1476 Py_DECREF(co);
1478 ADDOP_I(c, CALL_FUNCTION, 0);
1479 ADDOP(c, BUILD_CLASS);
1480 /* apply decorators */
1481 for (i = 0; i < asdl_seq_LEN(decos); i++) {
1482 ADDOP_I(c, CALL_FUNCTION, 1);
1484 if (!compiler_nameop(c, s->v.ClassDef.name, Store))
1485 return 0;
1486 return 1;
1489 static int
1490 compiler_ifexp(struct compiler *c, expr_ty e)
1492 basicblock *end, *next;
1494 assert(e->kind == IfExp_kind);
1495 end = compiler_new_block(c);
1496 if (end == NULL)
1497 return 0;
1498 next = compiler_new_block(c);
1499 if (next == NULL)
1500 return 0;
1501 VISIT(c, expr, e->v.IfExp.test);
1502 ADDOP_JREL(c, JUMP_IF_FALSE, next);
1503 ADDOP(c, POP_TOP);
1504 VISIT(c, expr, e->v.IfExp.body);
1505 ADDOP_JREL(c, JUMP_FORWARD, end);
1506 compiler_use_next_block(c, next);
1507 ADDOP(c, POP_TOP);
1508 VISIT(c, expr, e->v.IfExp.orelse);
1509 compiler_use_next_block(c, end);
1510 return 1;
1513 static int
1514 compiler_lambda(struct compiler *c, expr_ty e)
1516 PyCodeObject *co;
1517 static identifier name;
1518 arguments_ty args = e->v.Lambda.args;
1519 assert(e->kind == Lambda_kind);
1521 if (!name) {
1522 name = PyString_InternFromString("<lambda>");
1523 if (!name)
1524 return 0;
1527 if (args->defaults)
1528 VISIT_SEQ(c, expr, args->defaults);
1529 if (!compiler_enter_scope(c, name, (void *)e, e->lineno))
1530 return 0;
1532 /* unpack nested arguments */
1533 compiler_arguments(c, args);
1535 c->u->u_argcount = asdl_seq_LEN(args->args);
1536 VISIT_IN_SCOPE(c, expr, e->v.Lambda.body);
1537 if (c->u->u_ste->ste_generator) {
1538 ADDOP_IN_SCOPE(c, POP_TOP);
1540 else {
1541 ADDOP_IN_SCOPE(c, RETURN_VALUE);
1543 co = assemble(c, 1);
1544 compiler_exit_scope(c);
1545 if (co == NULL)
1546 return 0;
1548 compiler_make_closure(c, co, asdl_seq_LEN(args->defaults));
1549 Py_DECREF(co);
1551 return 1;
1554 static int
1555 compiler_print(struct compiler *c, stmt_ty s)
1557 int i, n;
1558 bool dest;
1560 assert(s->kind == Print_kind);
1561 n = asdl_seq_LEN(s->v.Print.values);
1562 dest = false;
1563 if (s->v.Print.dest) {
1564 VISIT(c, expr, s->v.Print.dest);
1565 dest = true;
1567 for (i = 0; i < n; i++) {
1568 expr_ty e = (expr_ty)asdl_seq_GET(s->v.Print.values, i);
1569 if (dest) {
1570 ADDOP(c, DUP_TOP);
1571 VISIT(c, expr, e);
1572 ADDOP(c, ROT_TWO);
1573 ADDOP(c, PRINT_ITEM_TO);
1575 else {
1576 VISIT(c, expr, e);
1577 ADDOP(c, PRINT_ITEM);
1580 if (s->v.Print.nl) {
1581 if (dest)
1582 ADDOP(c, PRINT_NEWLINE_TO)
1583 else
1584 ADDOP(c, PRINT_NEWLINE)
1586 else if (dest)
1587 ADDOP(c, POP_TOP);
1588 return 1;
1591 static int
1592 compiler_if(struct compiler *c, stmt_ty s)
1594 basicblock *end, *next;
1595 int constant;
1596 assert(s->kind == If_kind);
1597 end = compiler_new_block(c);
1598 if (end == NULL)
1599 return 0;
1600 next = compiler_new_block(c);
1601 if (next == NULL)
1602 return 0;
1604 constant = expr_constant(s->v.If.test);
1605 /* constant = 0: "if 0"
1606 * constant = 1: "if 1", "if 2", ...
1607 * constant = -1: rest */
1608 if (constant == 0) {
1609 if (s->v.If.orelse)
1610 VISIT_SEQ(c, stmt, s->v.If.orelse);
1611 } else if (constant == 1) {
1612 VISIT_SEQ(c, stmt, s->v.If.body);
1613 } else {
1614 VISIT(c, expr, s->v.If.test);
1615 ADDOP_JREL(c, JUMP_IF_FALSE, next);
1616 ADDOP(c, POP_TOP);
1617 VISIT_SEQ(c, stmt, s->v.If.body);
1618 ADDOP_JREL(c, JUMP_FORWARD, end);
1619 compiler_use_next_block(c, next);
1620 ADDOP(c, POP_TOP);
1621 if (s->v.If.orelse)
1622 VISIT_SEQ(c, stmt, s->v.If.orelse);
1624 compiler_use_next_block(c, end);
1625 return 1;
1628 static int
1629 compiler_for(struct compiler *c, stmt_ty s)
1631 basicblock *start, *cleanup, *end;
1633 start = compiler_new_block(c);
1634 cleanup = compiler_new_block(c);
1635 end = compiler_new_block(c);
1636 if (start == NULL || end == NULL || cleanup == NULL)
1637 return 0;
1638 ADDOP_JREL(c, SETUP_LOOP, end);
1639 if (!compiler_push_fblock(c, LOOP, start))
1640 return 0;
1641 VISIT(c, expr, s->v.For.iter);
1642 ADDOP(c, GET_ITER);
1643 compiler_use_next_block(c, start);
1644 /* for expressions must be traced on each iteration,
1645 so we need to set an extra line number. */
1646 c->u->u_lineno_set = false;
1647 ADDOP_JREL(c, FOR_ITER, cleanup);
1648 VISIT(c, expr, s->v.For.target);
1649 VISIT_SEQ(c, stmt, s->v.For.body);
1650 ADDOP_JABS(c, JUMP_ABSOLUTE, start);
1651 compiler_use_next_block(c, cleanup);
1652 ADDOP(c, POP_BLOCK);
1653 compiler_pop_fblock(c, LOOP, start);
1654 VISIT_SEQ(c, stmt, s->v.For.orelse);
1655 compiler_use_next_block(c, end);
1656 return 1;
1659 static int
1660 compiler_while(struct compiler *c, stmt_ty s)
1662 basicblock *loop, *orelse, *end, *anchor = NULL;
1663 int constant = expr_constant(s->v.While.test);
1665 if (constant == 0) {
1666 if (s->v.While.orelse)
1667 VISIT_SEQ(c, stmt, s->v.While.orelse);
1668 return 1;
1670 loop = compiler_new_block(c);
1671 end = compiler_new_block(c);
1672 if (constant == -1) {
1673 anchor = compiler_new_block(c);
1674 if (anchor == NULL)
1675 return 0;
1677 if (loop == NULL || end == NULL)
1678 return 0;
1679 if (s->v.While.orelse) {
1680 orelse = compiler_new_block(c);
1681 if (orelse == NULL)
1682 return 0;
1684 else
1685 orelse = NULL;
1687 ADDOP_JREL(c, SETUP_LOOP, end);
1688 compiler_use_next_block(c, loop);
1689 if (!compiler_push_fblock(c, LOOP, loop))
1690 return 0;
1691 if (constant == -1) {
1692 /* while expressions must be traced on each iteration,
1693 so we need to set an extra line number. */
1694 c->u->u_lineno_set = false;
1695 VISIT(c, expr, s->v.While.test);
1696 ADDOP_JREL(c, JUMP_IF_FALSE, anchor);
1697 ADDOP(c, POP_TOP);
1699 VISIT_SEQ(c, stmt, s->v.While.body);
1700 ADDOP_JABS(c, JUMP_ABSOLUTE, loop);
1702 /* XXX should the two POP instructions be in a separate block
1703 if there is no else clause ?
1706 if (constant == -1) {
1707 compiler_use_next_block(c, anchor);
1708 ADDOP(c, POP_TOP);
1709 ADDOP(c, POP_BLOCK);
1711 compiler_pop_fblock(c, LOOP, loop);
1712 if (orelse != NULL) /* what if orelse is just pass? */
1713 VISIT_SEQ(c, stmt, s->v.While.orelse);
1714 compiler_use_next_block(c, end);
1716 return 1;
1719 static int
1720 compiler_continue(struct compiler *c)
1722 static const char LOOP_ERROR_MSG[] = "'continue' not properly in loop";
1723 static const char IN_FINALLY_ERROR_MSG[] =
1724 "'continue' not supported inside 'finally' clause";
1725 int i;
1727 if (!c->u->u_nfblocks)
1728 return compiler_error(c, LOOP_ERROR_MSG);
1729 i = c->u->u_nfblocks - 1;
1730 switch (c->u->u_fblock[i].fb_type) {
1731 case LOOP:
1732 ADDOP_JABS(c, JUMP_ABSOLUTE, c->u->u_fblock[i].fb_block);
1733 break;
1734 case EXCEPT:
1735 case FINALLY_TRY:
1736 while (--i >= 0 && c->u->u_fblock[i].fb_type != LOOP) {
1737 /* Prevent continue anywhere under a finally
1738 even if hidden in a sub-try or except. */
1739 if (c->u->u_fblock[i].fb_type == FINALLY_END)
1740 return compiler_error(c, IN_FINALLY_ERROR_MSG);
1742 if (i == -1)
1743 return compiler_error(c, LOOP_ERROR_MSG);
1744 ADDOP_JABS(c, CONTINUE_LOOP, c->u->u_fblock[i].fb_block);
1745 break;
1746 case FINALLY_END:
1747 return compiler_error(c, IN_FINALLY_ERROR_MSG);
1750 return 1;
1753 /* Code generated for "try: <body> finally: <finalbody>" is as follows:
1755 SETUP_FINALLY L
1756 <code for body>
1757 POP_BLOCK
1758 LOAD_CONST <None>
1759 L: <code for finalbody>
1760 END_FINALLY
1762 The special instructions use the block stack. Each block
1763 stack entry contains the instruction that created it (here
1764 SETUP_FINALLY), the level of the value stack at the time the
1765 block stack entry was created, and a label (here L).
1767 SETUP_FINALLY:
1768 Pushes the current value stack level and the label
1769 onto the block stack.
1770 POP_BLOCK:
1771 Pops en entry from the block stack, and pops the value
1772 stack until its level is the same as indicated on the
1773 block stack. (The label is ignored.)
1774 END_FINALLY:
1775 Pops a variable number of entries from the *value* stack
1776 and re-raises the exception they specify. The number of
1777 entries popped depends on the (pseudo) exception type.
1779 The block stack is unwound when an exception is raised:
1780 when a SETUP_FINALLY entry is found, the exception is pushed
1781 onto the value stack (and the exception condition is cleared),
1782 and the interpreter jumps to the label gotten from the block
1783 stack.
1786 static int
1787 compiler_try_finally(struct compiler *c, stmt_ty s)
1789 basicblock *body, *end;
1790 body = compiler_new_block(c);
1791 end = compiler_new_block(c);
1792 if (body == NULL || end == NULL)
1793 return 0;
1795 ADDOP_JREL(c, SETUP_FINALLY, end);
1796 compiler_use_next_block(c, body);
1797 if (!compiler_push_fblock(c, FINALLY_TRY, body))
1798 return 0;
1799 VISIT_SEQ(c, stmt, s->v.TryFinally.body);
1800 ADDOP(c, POP_BLOCK);
1801 compiler_pop_fblock(c, FINALLY_TRY, body);
1803 ADDOP_O(c, LOAD_CONST, Py_None, consts);
1804 compiler_use_next_block(c, end);
1805 if (!compiler_push_fblock(c, FINALLY_END, end))
1806 return 0;
1807 VISIT_SEQ(c, stmt, s->v.TryFinally.finalbody);
1808 ADDOP(c, END_FINALLY);
1809 compiler_pop_fblock(c, FINALLY_END, end);
1811 return 1;
1815 Code generated for "try: S except E1, V1: S1 except E2, V2: S2 ...":
1816 (The contents of the value stack is shown in [], with the top
1817 at the right; 'tb' is trace-back info, 'val' the exception's
1818 associated value, and 'exc' the exception.)
1820 Value stack Label Instruction Argument
1821 [] SETUP_EXCEPT L1
1822 [] <code for S>
1823 [] POP_BLOCK
1824 [] JUMP_FORWARD L0
1826 [tb, val, exc] L1: DUP )
1827 [tb, val, exc, exc] <evaluate E1> )
1828 [tb, val, exc, exc, E1] COMPARE_OP EXC_MATCH ) only if E1
1829 [tb, val, exc, 1-or-0] JUMP_IF_FALSE L2 )
1830 [tb, val, exc, 1] POP )
1831 [tb, val, exc] POP
1832 [tb, val] <assign to V1> (or POP if no V1)
1833 [tb] POP
1834 [] <code for S1>
1835 JUMP_FORWARD L0
1837 [tb, val, exc, 0] L2: POP
1838 [tb, val, exc] DUP
1839 .............................etc.......................
1841 [tb, val, exc, 0] Ln+1: POP
1842 [tb, val, exc] END_FINALLY # re-raise exception
1844 [] L0: <next statement>
1846 Of course, parts are not generated if Vi or Ei is not present.
1848 static int
1849 compiler_try_except(struct compiler *c, stmt_ty s)
1851 basicblock *body, *orelse, *except, *end;
1852 int i, n;
1854 body = compiler_new_block(c);
1855 except = compiler_new_block(c);
1856 orelse = compiler_new_block(c);
1857 end = compiler_new_block(c);
1858 if (body == NULL || except == NULL || orelse == NULL || end == NULL)
1859 return 0;
1860 ADDOP_JREL(c, SETUP_EXCEPT, except);
1861 compiler_use_next_block(c, body);
1862 if (!compiler_push_fblock(c, EXCEPT, body))
1863 return 0;
1864 VISIT_SEQ(c, stmt, s->v.TryExcept.body);
1865 ADDOP(c, POP_BLOCK);
1866 compiler_pop_fblock(c, EXCEPT, body);
1867 ADDOP_JREL(c, JUMP_FORWARD, orelse);
1868 n = asdl_seq_LEN(s->v.TryExcept.handlers);
1869 compiler_use_next_block(c, except);
1870 for (i = 0; i < n; i++) {
1871 excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
1872 s->v.TryExcept.handlers, i);
1873 if (!handler->v.ExceptHandler.type && i < n-1)
1874 return compiler_error(c, "default 'except:' must be last");
1875 c->u->u_lineno_set = false;
1876 c->u->u_lineno = handler->lineno;
1877 except = compiler_new_block(c);
1878 if (except == NULL)
1879 return 0;
1880 if (handler->v.ExceptHandler.type) {
1881 ADDOP(c, DUP_TOP);
1882 VISIT(c, expr, handler->v.ExceptHandler.type);
1883 ADDOP_I(c, COMPARE_OP, PyCmp_EXC_MATCH);
1884 ADDOP_JREL(c, JUMP_IF_FALSE, except);
1885 ADDOP(c, POP_TOP);
1887 ADDOP(c, POP_TOP);
1888 if (handler->v.ExceptHandler.name) {
1889 VISIT(c, expr, handler->v.ExceptHandler.name);
1891 else {
1892 ADDOP(c, POP_TOP);
1894 ADDOP(c, POP_TOP);
1895 VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
1896 ADDOP_JREL(c, JUMP_FORWARD, end);
1897 compiler_use_next_block(c, except);
1898 if (handler->v.ExceptHandler.type)
1899 ADDOP(c, POP_TOP);
1901 ADDOP(c, END_FINALLY);
1902 compiler_use_next_block(c, orelse);
1903 VISIT_SEQ(c, stmt, s->v.TryExcept.orelse);
1904 compiler_use_next_block(c, end);
1905 return 1;
1908 static int
1909 compiler_import_as(struct compiler *c, identifier name, identifier asname)
1911 /* The IMPORT_NAME opcode was already generated. This function
1912 merely needs to bind the result to a name.
1914 If there is a dot in name, we need to split it and emit a
1915 LOAD_ATTR for each name.
1917 const char *src = PyString_AS_STRING(name);
1918 const char *dot = strchr(src, '.');
1919 if (dot) {
1920 /* Consume the base module name to get the first attribute */
1921 src = dot + 1;
1922 while (dot) {
1923 /* NB src is only defined when dot != NULL */
1924 PyObject *attr;
1925 dot = strchr(src, '.');
1926 attr = PyString_FromStringAndSize(src,
1927 dot ? dot - src : strlen(src));
1928 if (!attr)
1929 return -1;
1930 ADDOP_O(c, LOAD_ATTR, attr, names);
1931 Py_DECREF(attr);
1932 src = dot + 1;
1935 return compiler_nameop(c, asname, Store);
1938 static int
1939 compiler_import(struct compiler *c, stmt_ty s)
1941 /* The Import node stores a module name like a.b.c as a single
1942 string. This is convenient for all cases except
1943 import a.b.c as d
1944 where we need to parse that string to extract the individual
1945 module names.
1946 XXX Perhaps change the representation to make this case simpler?
1948 int i, n = asdl_seq_LEN(s->v.Import.names);
1950 for (i = 0; i < n; i++) {
1951 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.Import.names, i);
1952 int r;
1953 PyObject *level;
1955 if (c->c_flags && (c->c_flags->cf_flags & CO_FUTURE_ABSOLUTE_IMPORT))
1956 level = PyInt_FromLong(0);
1957 else
1958 level = PyInt_FromLong(-1);
1960 if (level == NULL)
1961 return 0;
1963 ADDOP_O(c, LOAD_CONST, level, consts);
1964 Py_DECREF(level);
1965 ADDOP_O(c, LOAD_CONST, Py_None, consts);
1966 ADDOP_NAME(c, IMPORT_NAME, alias->name, names);
1968 if (alias->asname) {
1969 r = compiler_import_as(c, alias->name, alias->asname);
1970 if (!r)
1971 return r;
1973 else {
1974 identifier tmp = alias->name;
1975 const char *base = PyString_AS_STRING(alias->name);
1976 char *dot = strchr(base, '.');
1977 if (dot)
1978 tmp = PyString_FromStringAndSize(base,
1979 dot - base);
1980 r = compiler_nameop(c, tmp, Store);
1981 if (dot) {
1982 Py_DECREF(tmp);
1984 if (!r)
1985 return r;
1988 return 1;
1991 static int
1992 compiler_from_import(struct compiler *c, stmt_ty s)
1994 int i, n = asdl_seq_LEN(s->v.ImportFrom.names);
1996 PyObject *names = PyTuple_New(n);
1997 PyObject *level;
1999 if (!names)
2000 return 0;
2002 if (s->v.ImportFrom.level == 0 && c->c_flags &&
2003 !(c->c_flags->cf_flags & CO_FUTURE_ABSOLUTE_IMPORT))
2004 level = PyInt_FromLong(-1);
2005 else
2006 level = PyInt_FromLong(s->v.ImportFrom.level);
2008 if (!level) {
2009 Py_DECREF(names);
2010 return 0;
2013 /* build up the names */
2014 for (i = 0; i < n; i++) {
2015 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i);
2016 Py_INCREF(alias->name);
2017 PyTuple_SET_ITEM(names, i, alias->name);
2020 if (s->lineno > c->c_future->ff_lineno) {
2021 if (!strcmp(PyString_AS_STRING(s->v.ImportFrom.module),
2022 "__future__")) {
2023 Py_DECREF(level);
2024 Py_DECREF(names);
2025 return compiler_error(c,
2026 "from __future__ imports must occur "
2027 "at the beginning of the file");
2032 ADDOP_O(c, LOAD_CONST, level, consts);
2033 Py_DECREF(level);
2034 ADDOP_O(c, LOAD_CONST, names, consts);
2035 Py_DECREF(names);
2036 ADDOP_NAME(c, IMPORT_NAME, s->v.ImportFrom.module, names);
2037 for (i = 0; i < n; i++) {
2038 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i);
2039 identifier store_name;
2041 if (i == 0 && *PyString_AS_STRING(alias->name) == '*') {
2042 assert(n == 1);
2043 ADDOP(c, IMPORT_STAR);
2044 return 1;
2047 ADDOP_NAME(c, IMPORT_FROM, alias->name, names);
2048 store_name = alias->name;
2049 if (alias->asname)
2050 store_name = alias->asname;
2052 if (!compiler_nameop(c, store_name, Store)) {
2053 Py_DECREF(names);
2054 return 0;
2057 /* remove imported module */
2058 ADDOP(c, POP_TOP);
2059 return 1;
2062 static int
2063 compiler_assert(struct compiler *c, stmt_ty s)
2065 static PyObject *assertion_error = NULL;
2066 basicblock *end;
2068 if (Py_OptimizeFlag)
2069 return 1;
2070 if (assertion_error == NULL) {
2071 assertion_error = PyString_InternFromString("AssertionError");
2072 if (assertion_error == NULL)
2073 return 0;
2075 if (s->v.Assert.test->kind == Tuple_kind &&
2076 asdl_seq_LEN(s->v.Assert.test->v.Tuple.elts) > 0) {
2077 const char* msg =
2078 "assertion is always true, perhaps remove parentheses?";
2079 if (PyErr_WarnExplicit(PyExc_SyntaxWarning, msg, c->c_filename,
2080 c->u->u_lineno, NULL, NULL) == -1)
2081 return 0;
2083 VISIT(c, expr, s->v.Assert.test);
2084 end = compiler_new_block(c);
2085 if (end == NULL)
2086 return 0;
2087 ADDOP_JREL(c, JUMP_IF_TRUE, end);
2088 ADDOP(c, POP_TOP);
2089 ADDOP_O(c, LOAD_GLOBAL, assertion_error, names);
2090 if (s->v.Assert.msg) {
2091 VISIT(c, expr, s->v.Assert.msg);
2092 ADDOP_I(c, RAISE_VARARGS, 2);
2094 else {
2095 ADDOP_I(c, RAISE_VARARGS, 1);
2097 compiler_use_next_block(c, end);
2098 ADDOP(c, POP_TOP);
2099 return 1;
2102 static int
2103 compiler_visit_stmt(struct compiler *c, stmt_ty s)
2105 int i, n;
2107 /* Always assign a lineno to the next instruction for a stmt. */
2108 c->u->u_lineno = s->lineno;
2109 c->u->u_lineno_set = false;
2111 switch (s->kind) {
2112 case FunctionDef_kind:
2113 return compiler_function(c, s);
2114 case ClassDef_kind:
2115 return compiler_class(c, s);
2116 case Return_kind:
2117 if (c->u->u_ste->ste_type != FunctionBlock)
2118 return compiler_error(c, "'return' outside function");
2119 if (s->v.Return.value) {
2120 VISIT(c, expr, s->v.Return.value);
2122 else
2123 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2124 ADDOP(c, RETURN_VALUE);
2125 break;
2126 case Delete_kind:
2127 VISIT_SEQ(c, expr, s->v.Delete.targets)
2128 break;
2129 case Assign_kind:
2130 n = asdl_seq_LEN(s->v.Assign.targets);
2131 VISIT(c, expr, s->v.Assign.value);
2132 for (i = 0; i < n; i++) {
2133 if (i < n - 1)
2134 ADDOP(c, DUP_TOP);
2135 VISIT(c, expr,
2136 (expr_ty)asdl_seq_GET(s->v.Assign.targets, i));
2138 break;
2139 case AugAssign_kind:
2140 return compiler_augassign(c, s);
2141 case Print_kind:
2142 return compiler_print(c, s);
2143 case For_kind:
2144 return compiler_for(c, s);
2145 case While_kind:
2146 return compiler_while(c, s);
2147 case If_kind:
2148 return compiler_if(c, s);
2149 case Raise_kind:
2150 n = 0;
2151 if (s->v.Raise.type) {
2152 VISIT(c, expr, s->v.Raise.type);
2153 n++;
2154 if (s->v.Raise.inst) {
2155 VISIT(c, expr, s->v.Raise.inst);
2156 n++;
2157 if (s->v.Raise.tback) {
2158 VISIT(c, expr, s->v.Raise.tback);
2159 n++;
2163 ADDOP_I(c, RAISE_VARARGS, n);
2164 break;
2165 case TryExcept_kind:
2166 return compiler_try_except(c, s);
2167 case TryFinally_kind:
2168 return compiler_try_finally(c, s);
2169 case Assert_kind:
2170 return compiler_assert(c, s);
2171 case Import_kind:
2172 return compiler_import(c, s);
2173 case ImportFrom_kind:
2174 return compiler_from_import(c, s);
2175 case Exec_kind:
2176 VISIT(c, expr, s->v.Exec.body);
2177 if (s->v.Exec.globals) {
2178 VISIT(c, expr, s->v.Exec.globals);
2179 if (s->v.Exec.locals) {
2180 VISIT(c, expr, s->v.Exec.locals);
2181 } else {
2182 ADDOP(c, DUP_TOP);
2184 } else {
2185 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2186 ADDOP(c, DUP_TOP);
2188 ADDOP(c, EXEC_STMT);
2189 break;
2190 case Global_kind:
2191 break;
2192 case Expr_kind:
2193 if (c->c_interactive && c->c_nestlevel <= 1) {
2194 VISIT(c, expr, s->v.Expr.value);
2195 ADDOP(c, PRINT_EXPR);
2197 else if (s->v.Expr.value->kind != Str_kind &&
2198 s->v.Expr.value->kind != Num_kind) {
2199 VISIT(c, expr, s->v.Expr.value);
2200 ADDOP(c, POP_TOP);
2202 break;
2203 case Pass_kind:
2204 break;
2205 case Break_kind:
2206 if (!compiler_in_loop(c))
2207 return compiler_error(c, "'break' outside loop");
2208 ADDOP(c, BREAK_LOOP);
2209 break;
2210 case Continue_kind:
2211 return compiler_continue(c);
2212 case With_kind:
2213 return compiler_with(c, s);
2215 return 1;
2218 static int
2219 unaryop(unaryop_ty op)
2221 switch (op) {
2222 case Invert:
2223 return UNARY_INVERT;
2224 case Not:
2225 return UNARY_NOT;
2226 case UAdd:
2227 return UNARY_POSITIVE;
2228 case USub:
2229 return UNARY_NEGATIVE;
2230 default:
2231 PyErr_Format(PyExc_SystemError,
2232 "unary op %d should not be possible", op);
2233 return 0;
2237 static int
2238 binop(struct compiler *c, operator_ty op)
2240 switch (op) {
2241 case Add:
2242 return BINARY_ADD;
2243 case Sub:
2244 return BINARY_SUBTRACT;
2245 case Mult:
2246 return BINARY_MULTIPLY;
2247 case Div:
2248 if (c->c_flags && c->c_flags->cf_flags & CO_FUTURE_DIVISION)
2249 return BINARY_TRUE_DIVIDE;
2250 else
2251 return BINARY_DIVIDE;
2252 case Mod:
2253 return BINARY_MODULO;
2254 case Pow:
2255 return BINARY_POWER;
2256 case LShift:
2257 return BINARY_LSHIFT;
2258 case RShift:
2259 return BINARY_RSHIFT;
2260 case BitOr:
2261 return BINARY_OR;
2262 case BitXor:
2263 return BINARY_XOR;
2264 case BitAnd:
2265 return BINARY_AND;
2266 case FloorDiv:
2267 return BINARY_FLOOR_DIVIDE;
2268 default:
2269 PyErr_Format(PyExc_SystemError,
2270 "binary op %d should not be possible", op);
2271 return 0;
2275 static int
2276 cmpop(cmpop_ty op)
2278 switch (op) {
2279 case Eq:
2280 return PyCmp_EQ;
2281 case NotEq:
2282 return PyCmp_NE;
2283 case Lt:
2284 return PyCmp_LT;
2285 case LtE:
2286 return PyCmp_LE;
2287 case Gt:
2288 return PyCmp_GT;
2289 case GtE:
2290 return PyCmp_GE;
2291 case Is:
2292 return PyCmp_IS;
2293 case IsNot:
2294 return PyCmp_IS_NOT;
2295 case In:
2296 return PyCmp_IN;
2297 case NotIn:
2298 return PyCmp_NOT_IN;
2299 default:
2300 return PyCmp_BAD;
2304 static int
2305 inplace_binop(struct compiler *c, operator_ty op)
2307 switch (op) {
2308 case Add:
2309 return INPLACE_ADD;
2310 case Sub:
2311 return INPLACE_SUBTRACT;
2312 case Mult:
2313 return INPLACE_MULTIPLY;
2314 case Div:
2315 if (c->c_flags && c->c_flags->cf_flags & CO_FUTURE_DIVISION)
2316 return INPLACE_TRUE_DIVIDE;
2317 else
2318 return INPLACE_DIVIDE;
2319 case Mod:
2320 return INPLACE_MODULO;
2321 case Pow:
2322 return INPLACE_POWER;
2323 case LShift:
2324 return INPLACE_LSHIFT;
2325 case RShift:
2326 return INPLACE_RSHIFT;
2327 case BitOr:
2328 return INPLACE_OR;
2329 case BitXor:
2330 return INPLACE_XOR;
2331 case BitAnd:
2332 return INPLACE_AND;
2333 case FloorDiv:
2334 return INPLACE_FLOOR_DIVIDE;
2335 default:
2336 PyErr_Format(PyExc_SystemError,
2337 "inplace binary op %d should not be possible", op);
2338 return 0;
2342 static int
2343 compiler_nameop(struct compiler *c, identifier name, expr_context_ty ctx)
2345 int op, scope, arg;
2346 enum { OP_FAST, OP_GLOBAL, OP_DEREF, OP_NAME } optype;
2348 PyObject *dict = c->u->u_names;
2349 PyObject *mangled;
2350 /* XXX AugStore isn't used anywhere! */
2352 mangled = _Py_Mangle(c->u->u_private, name);
2353 if (!mangled)
2354 return 0;
2356 op = 0;
2357 optype = OP_NAME;
2358 scope = PyST_GetScope(c->u->u_ste, mangled);
2359 switch (scope) {
2360 case FREE:
2361 dict = c->u->u_freevars;
2362 optype = OP_DEREF;
2363 break;
2364 case CELL:
2365 dict = c->u->u_cellvars;
2366 optype = OP_DEREF;
2367 break;
2368 case LOCAL:
2369 if (c->u->u_ste->ste_type == FunctionBlock)
2370 optype = OP_FAST;
2371 break;
2372 case GLOBAL_IMPLICIT:
2373 if (c->u->u_ste->ste_type == FunctionBlock &&
2374 !c->u->u_ste->ste_unoptimized)
2375 optype = OP_GLOBAL;
2376 break;
2377 case GLOBAL_EXPLICIT:
2378 optype = OP_GLOBAL;
2379 break;
2380 default:
2381 /* scope can be 0 */
2382 break;
2385 /* XXX Leave assert here, but handle __doc__ and the like better */
2386 assert(scope || PyString_AS_STRING(name)[0] == '_');
2388 switch (optype) {
2389 case OP_DEREF:
2390 switch (ctx) {
2391 case Load: op = LOAD_DEREF; break;
2392 case Store: op = STORE_DEREF; break;
2393 case AugLoad:
2394 case AugStore:
2395 break;
2396 case Del:
2397 PyErr_Format(PyExc_SyntaxError,
2398 "can not delete variable '%s' referenced "
2399 "in nested scope",
2400 PyString_AS_STRING(name));
2401 Py_DECREF(mangled);
2402 return 0;
2403 case Param:
2404 default:
2405 PyErr_SetString(PyExc_SystemError,
2406 "param invalid for deref variable");
2407 return 0;
2409 break;
2410 case OP_FAST:
2411 switch (ctx) {
2412 case Load: op = LOAD_FAST; break;
2413 case Store: op = STORE_FAST; break;
2414 case Del: op = DELETE_FAST; break;
2415 case AugLoad:
2416 case AugStore:
2417 break;
2418 case Param:
2419 default:
2420 PyErr_SetString(PyExc_SystemError,
2421 "param invalid for local variable");
2422 return 0;
2424 ADDOP_O(c, op, mangled, varnames);
2425 Py_DECREF(mangled);
2426 return 1;
2427 case OP_GLOBAL:
2428 switch (ctx) {
2429 case Load: op = LOAD_GLOBAL; break;
2430 case Store: op = STORE_GLOBAL; break;
2431 case Del: op = DELETE_GLOBAL; break;
2432 case AugLoad:
2433 case AugStore:
2434 break;
2435 case Param:
2436 default:
2437 PyErr_SetString(PyExc_SystemError,
2438 "param invalid for global variable");
2439 return 0;
2441 break;
2442 case OP_NAME:
2443 switch (ctx) {
2444 case Load: op = LOAD_NAME; break;
2445 case Store: op = STORE_NAME; break;
2446 case Del: op = DELETE_NAME; break;
2447 case AugLoad:
2448 case AugStore:
2449 break;
2450 case Param:
2451 default:
2452 PyErr_SetString(PyExc_SystemError,
2453 "param invalid for name variable");
2454 return 0;
2456 break;
2459 assert(op);
2460 arg = compiler_add_o(c, dict, mangled);
2461 Py_DECREF(mangled);
2462 if (arg < 0)
2463 return 0;
2464 return compiler_addop_i(c, op, arg);
2467 static int
2468 compiler_boolop(struct compiler *c, expr_ty e)
2470 basicblock *end;
2471 int jumpi, i, n;
2472 asdl_seq *s;
2474 assert(e->kind == BoolOp_kind);
2475 if (e->v.BoolOp.op == And)
2476 jumpi = JUMP_IF_FALSE;
2477 else
2478 jumpi = JUMP_IF_TRUE;
2479 end = compiler_new_block(c);
2480 if (end == NULL)
2481 return 0;
2482 s = e->v.BoolOp.values;
2483 n = asdl_seq_LEN(s) - 1;
2484 assert(n >= 0);
2485 for (i = 0; i < n; ++i) {
2486 VISIT(c, expr, (expr_ty)asdl_seq_GET(s, i));
2487 ADDOP_JREL(c, jumpi, end);
2488 ADDOP(c, POP_TOP)
2490 VISIT(c, expr, (expr_ty)asdl_seq_GET(s, n));
2491 compiler_use_next_block(c, end);
2492 return 1;
2495 static int
2496 compiler_list(struct compiler *c, expr_ty e)
2498 int n = asdl_seq_LEN(e->v.List.elts);
2499 if (e->v.List.ctx == Store) {
2500 ADDOP_I(c, UNPACK_SEQUENCE, n);
2502 VISIT_SEQ(c, expr, e->v.List.elts);
2503 if (e->v.List.ctx == Load) {
2504 ADDOP_I(c, BUILD_LIST, n);
2506 return 1;
2509 static int
2510 compiler_tuple(struct compiler *c, expr_ty e)
2512 int n = asdl_seq_LEN(e->v.Tuple.elts);
2513 if (e->v.Tuple.ctx == Store) {
2514 ADDOP_I(c, UNPACK_SEQUENCE, n);
2516 VISIT_SEQ(c, expr, e->v.Tuple.elts);
2517 if (e->v.Tuple.ctx == Load) {
2518 ADDOP_I(c, BUILD_TUPLE, n);
2520 return 1;
2523 static int
2524 compiler_compare(struct compiler *c, expr_ty e)
2526 int i, n;
2527 basicblock *cleanup = NULL;
2529 /* XXX the logic can be cleaned up for 1 or multiple comparisons */
2530 VISIT(c, expr, e->v.Compare.left);
2531 n = asdl_seq_LEN(e->v.Compare.ops);
2532 assert(n > 0);
2533 if (n > 1) {
2534 cleanup = compiler_new_block(c);
2535 if (cleanup == NULL)
2536 return 0;
2537 VISIT(c, expr,
2538 (expr_ty)asdl_seq_GET(e->v.Compare.comparators, 0));
2540 for (i = 1; i < n; i++) {
2541 ADDOP(c, DUP_TOP);
2542 ADDOP(c, ROT_THREE);
2543 ADDOP_I(c, COMPARE_OP,
2544 cmpop((cmpop_ty)(asdl_seq_GET(
2545 e->v.Compare.ops, i - 1))));
2546 ADDOP_JREL(c, JUMP_IF_FALSE, cleanup);
2547 NEXT_BLOCK(c);
2548 ADDOP(c, POP_TOP);
2549 if (i < (n - 1))
2550 VISIT(c, expr,
2551 (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
2553 VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n - 1));
2554 ADDOP_I(c, COMPARE_OP,
2555 cmpop((cmpop_ty)(asdl_seq_GET(e->v.Compare.ops, n - 1))));
2556 if (n > 1) {
2557 basicblock *end = compiler_new_block(c);
2558 if (end == NULL)
2559 return 0;
2560 ADDOP_JREL(c, JUMP_FORWARD, end);
2561 compiler_use_next_block(c, cleanup);
2562 ADDOP(c, ROT_TWO);
2563 ADDOP(c, POP_TOP);
2564 compiler_use_next_block(c, end);
2566 return 1;
2569 static int
2570 compiler_call(struct compiler *c, expr_ty e)
2572 int n, code = 0;
2574 VISIT(c, expr, e->v.Call.func);
2575 n = asdl_seq_LEN(e->v.Call.args);
2576 VISIT_SEQ(c, expr, e->v.Call.args);
2577 if (e->v.Call.keywords) {
2578 VISIT_SEQ(c, keyword, e->v.Call.keywords);
2579 n |= asdl_seq_LEN(e->v.Call.keywords) << 8;
2581 if (e->v.Call.starargs) {
2582 VISIT(c, expr, e->v.Call.starargs);
2583 code |= 1;
2585 if (e->v.Call.kwargs) {
2586 VISIT(c, expr, e->v.Call.kwargs);
2587 code |= 2;
2589 switch (code) {
2590 case 0:
2591 ADDOP_I(c, CALL_FUNCTION, n);
2592 break;
2593 case 1:
2594 ADDOP_I(c, CALL_FUNCTION_VAR, n);
2595 break;
2596 case 2:
2597 ADDOP_I(c, CALL_FUNCTION_KW, n);
2598 break;
2599 case 3:
2600 ADDOP_I(c, CALL_FUNCTION_VAR_KW, n);
2601 break;
2603 return 1;
2606 static int
2607 compiler_listcomp_generator(struct compiler *c, asdl_seq *generators,
2608 int gen_index, expr_ty elt)
2610 /* generate code for the iterator, then each of the ifs,
2611 and then write to the element */
2613 comprehension_ty l;
2614 basicblock *start, *anchor, *skip, *if_cleanup;
2615 int i, n;
2617 start = compiler_new_block(c);
2618 skip = compiler_new_block(c);
2619 if_cleanup = compiler_new_block(c);
2620 anchor = compiler_new_block(c);
2622 if (start == NULL || skip == NULL || if_cleanup == NULL ||
2623 anchor == NULL)
2624 return 0;
2626 l = (comprehension_ty)asdl_seq_GET(generators, gen_index);
2627 VISIT(c, expr, l->iter);
2628 ADDOP(c, GET_ITER);
2629 compiler_use_next_block(c, start);
2630 ADDOP_JREL(c, FOR_ITER, anchor);
2631 NEXT_BLOCK(c);
2632 VISIT(c, expr, l->target);
2634 /* XXX this needs to be cleaned up...a lot! */
2635 n = asdl_seq_LEN(l->ifs);
2636 for (i = 0; i < n; i++) {
2637 expr_ty e = (expr_ty)asdl_seq_GET(l->ifs, i);
2638 VISIT(c, expr, e);
2639 ADDOP_JREL(c, JUMP_IF_FALSE, if_cleanup);
2640 NEXT_BLOCK(c);
2641 ADDOP(c, POP_TOP);
2644 if (++gen_index < asdl_seq_LEN(generators))
2645 if (!compiler_listcomp_generator(c, generators, gen_index, elt))
2646 return 0;
2648 /* only append after the last for generator */
2649 if (gen_index >= asdl_seq_LEN(generators)) {
2650 VISIT(c, expr, elt);
2651 ADDOP_I(c, LIST_APPEND, gen_index+1);
2653 compiler_use_next_block(c, skip);
2655 for (i = 0; i < n; i++) {
2656 ADDOP_I(c, JUMP_FORWARD, 1);
2657 if (i == 0)
2658 compiler_use_next_block(c, if_cleanup);
2659 ADDOP(c, POP_TOP);
2661 ADDOP_JABS(c, JUMP_ABSOLUTE, start);
2662 compiler_use_next_block(c, anchor);
2664 return 1;
2667 static int
2668 compiler_listcomp(struct compiler *c, expr_ty e)
2670 assert(e->kind == ListComp_kind);
2671 ADDOP_I(c, BUILD_LIST, 0);
2672 return compiler_listcomp_generator(c, e->v.ListComp.generators, 0,
2673 e->v.ListComp.elt);
2676 static int
2677 compiler_genexp_generator(struct compiler *c,
2678 asdl_seq *generators, int gen_index,
2679 expr_ty elt)
2681 /* generate code for the iterator, then each of the ifs,
2682 and then write to the element */
2684 comprehension_ty ge;
2685 basicblock *start, *anchor, *skip, *if_cleanup, *end;
2686 int i, n;
2688 start = compiler_new_block(c);
2689 skip = compiler_new_block(c);
2690 if_cleanup = compiler_new_block(c);
2691 anchor = compiler_new_block(c);
2692 end = compiler_new_block(c);
2694 if (start == NULL || skip == NULL || if_cleanup == NULL ||
2695 anchor == NULL || end == NULL)
2696 return 0;
2698 ge = (comprehension_ty)asdl_seq_GET(generators, gen_index);
2699 ADDOP_JREL(c, SETUP_LOOP, end);
2700 if (!compiler_push_fblock(c, LOOP, start))
2701 return 0;
2703 if (gen_index == 0) {
2704 /* Receive outermost iter as an implicit argument */
2705 c->u->u_argcount = 1;
2706 ADDOP_I(c, LOAD_FAST, 0);
2708 else {
2709 /* Sub-iter - calculate on the fly */
2710 VISIT(c, expr, ge->iter);
2711 ADDOP(c, GET_ITER);
2713 compiler_use_next_block(c, start);
2714 ADDOP_JREL(c, FOR_ITER, anchor);
2715 NEXT_BLOCK(c);
2716 VISIT(c, expr, ge->target);
2718 /* XXX this needs to be cleaned up...a lot! */
2719 n = asdl_seq_LEN(ge->ifs);
2720 for (i = 0; i < n; i++) {
2721 expr_ty e = (expr_ty)asdl_seq_GET(ge->ifs, i);
2722 VISIT(c, expr, e);
2723 ADDOP_JREL(c, JUMP_IF_FALSE, if_cleanup);
2724 NEXT_BLOCK(c);
2725 ADDOP(c, POP_TOP);
2728 if (++gen_index < asdl_seq_LEN(generators))
2729 if (!compiler_genexp_generator(c, generators, gen_index, elt))
2730 return 0;
2732 /* only append after the last 'for' generator */
2733 if (gen_index >= asdl_seq_LEN(generators)) {
2734 VISIT(c, expr, elt);
2735 ADDOP(c, YIELD_VALUE);
2736 ADDOP(c, POP_TOP);
2738 compiler_use_next_block(c, skip);
2740 for (i = 0; i < n; i++) {
2741 ADDOP_I(c, JUMP_FORWARD, 1);
2742 if (i == 0)
2743 compiler_use_next_block(c, if_cleanup);
2745 ADDOP(c, POP_TOP);
2747 ADDOP_JABS(c, JUMP_ABSOLUTE, start);
2748 compiler_use_next_block(c, anchor);
2749 ADDOP(c, POP_BLOCK);
2750 compiler_pop_fblock(c, LOOP, start);
2751 compiler_use_next_block(c, end);
2753 return 1;
2756 static int
2757 compiler_genexp(struct compiler *c, expr_ty e)
2759 static identifier name;
2760 PyCodeObject *co;
2761 expr_ty outermost_iter = ((comprehension_ty)
2762 (asdl_seq_GET(e->v.GeneratorExp.generators,
2763 0)))->iter;
2765 if (!name) {
2766 name = PyString_FromString("<genexpr>");
2767 if (!name)
2768 return 0;
2771 if (!compiler_enter_scope(c, name, (void *)e, e->lineno))
2772 return 0;
2773 compiler_genexp_generator(c, e->v.GeneratorExp.generators, 0,
2774 e->v.GeneratorExp.elt);
2775 co = assemble(c, 1);
2776 compiler_exit_scope(c);
2777 if (co == NULL)
2778 return 0;
2780 compiler_make_closure(c, co, 0);
2781 Py_DECREF(co);
2783 VISIT(c, expr, outermost_iter);
2784 ADDOP(c, GET_ITER);
2785 ADDOP_I(c, CALL_FUNCTION, 1);
2787 return 1;
2790 static int
2791 compiler_visit_keyword(struct compiler *c, keyword_ty k)
2793 ADDOP_O(c, LOAD_CONST, k->arg, consts);
2794 VISIT(c, expr, k->value);
2795 return 1;
2798 /* Test whether expression is constant. For constants, report
2799 whether they are true or false.
2801 Return values: 1 for true, 0 for false, -1 for non-constant.
2804 static int
2805 expr_constant(expr_ty e)
2807 switch (e->kind) {
2808 case Num_kind:
2809 return PyObject_IsTrue(e->v.Num.n);
2810 case Str_kind:
2811 return PyObject_IsTrue(e->v.Str.s);
2812 case Name_kind:
2813 /* __debug__ is not assignable, so we can optimize
2814 * it away in if and while statements */
2815 if (strcmp(PyString_AS_STRING(e->v.Name.id),
2816 "__debug__") == 0)
2817 return ! Py_OptimizeFlag;
2818 /* fall through */
2819 default:
2820 return -1;
2825 Implements the with statement from PEP 343.
2827 The semantics outlined in that PEP are as follows:
2829 with EXPR as VAR:
2830 BLOCK
2832 It is implemented roughly as:
2834 context = EXPR
2835 exit = context.__exit__ # not calling it
2836 value = context.__enter__()
2837 try:
2838 VAR = value # if VAR present in the syntax
2839 BLOCK
2840 finally:
2841 if an exception was raised:
2842 exc = copy of (exception, instance, traceback)
2843 else:
2844 exc = (None, None, None)
2845 exit(*exc)
2847 static int
2848 compiler_with(struct compiler *c, stmt_ty s)
2850 static identifier enter_attr, exit_attr;
2851 basicblock *block, *finally;
2852 identifier tmpvalue = NULL;
2854 assert(s->kind == With_kind);
2856 if (!enter_attr) {
2857 enter_attr = PyString_InternFromString("__enter__");
2858 if (!enter_attr)
2859 return 0;
2861 if (!exit_attr) {
2862 exit_attr = PyString_InternFromString("__exit__");
2863 if (!exit_attr)
2864 return 0;
2867 block = compiler_new_block(c);
2868 finally = compiler_new_block(c);
2869 if (!block || !finally)
2870 return 0;
2872 if (s->v.With.optional_vars) {
2873 /* Create a temporary variable to hold context.__enter__().
2874 We need to do this rather than preserving it on the stack
2875 because SETUP_FINALLY remembers the stack level.
2876 We need to do the assignment *inside* the try/finally
2877 so that context.__exit__() is called when the assignment
2878 fails. But we need to call context.__enter__() *before*
2879 the try/finally so that if it fails we won't call
2880 context.__exit__().
2882 tmpvalue = compiler_new_tmpname(c);
2883 if (tmpvalue == NULL)
2884 return 0;
2885 PyArena_AddPyObject(c->c_arena, tmpvalue);
2888 /* Evaluate EXPR */
2889 VISIT(c, expr, s->v.With.context_expr);
2891 /* Squirrel away context.__exit__ by stuffing it under context */
2892 ADDOP(c, DUP_TOP);
2893 ADDOP_O(c, LOAD_ATTR, exit_attr, names);
2894 ADDOP(c, ROT_TWO);
2896 /* Call context.__enter__() */
2897 ADDOP_O(c, LOAD_ATTR, enter_attr, names);
2898 ADDOP_I(c, CALL_FUNCTION, 0);
2900 if (s->v.With.optional_vars) {
2901 /* Store it in tmpvalue */
2902 if (!compiler_nameop(c, tmpvalue, Store))
2903 return 0;
2905 else {
2906 /* Discard result from context.__enter__() */
2907 ADDOP(c, POP_TOP);
2910 /* Start the try block */
2911 ADDOP_JREL(c, SETUP_FINALLY, finally);
2913 compiler_use_next_block(c, block);
2914 if (!compiler_push_fblock(c, FINALLY_TRY, block)) {
2915 return 0;
2918 if (s->v.With.optional_vars) {
2919 /* Bind saved result of context.__enter__() to VAR */
2920 if (!compiler_nameop(c, tmpvalue, Load) ||
2921 !compiler_nameop(c, tmpvalue, Del))
2922 return 0;
2923 VISIT(c, expr, s->v.With.optional_vars);
2926 /* BLOCK code */
2927 VISIT_SEQ(c, stmt, s->v.With.body);
2929 /* End of try block; start the finally block */
2930 ADDOP(c, POP_BLOCK);
2931 compiler_pop_fblock(c, FINALLY_TRY, block);
2933 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2934 compiler_use_next_block(c, finally);
2935 if (!compiler_push_fblock(c, FINALLY_END, finally))
2936 return 0;
2938 /* Finally block starts; context.__exit__ is on the stack under
2939 the exception or return information. Just issue our magic
2940 opcode. */
2941 ADDOP(c, WITH_CLEANUP);
2943 /* Finally block ends. */
2944 ADDOP(c, END_FINALLY);
2945 compiler_pop_fblock(c, FINALLY_END, finally);
2946 return 1;
2949 static int
2950 compiler_visit_expr(struct compiler *c, expr_ty e)
2952 int i, n;
2954 /* If expr e has a different line number than the last expr/stmt,
2955 set a new line number for the next instruction.
2957 if (e->lineno > c->u->u_lineno) {
2958 c->u->u_lineno = e->lineno;
2959 c->u->u_lineno_set = false;
2961 switch (e->kind) {
2962 case BoolOp_kind:
2963 return compiler_boolop(c, e);
2964 case BinOp_kind:
2965 VISIT(c, expr, e->v.BinOp.left);
2966 VISIT(c, expr, e->v.BinOp.right);
2967 ADDOP(c, binop(c, e->v.BinOp.op));
2968 break;
2969 case UnaryOp_kind:
2970 VISIT(c, expr, e->v.UnaryOp.operand);
2971 ADDOP(c, unaryop(e->v.UnaryOp.op));
2972 break;
2973 case Lambda_kind:
2974 return compiler_lambda(c, e);
2975 case IfExp_kind:
2976 return compiler_ifexp(c, e);
2977 case Dict_kind:
2978 n = asdl_seq_LEN(e->v.Dict.values);
2979 ADDOP_I(c, BUILD_MAP, (n>0xFFFF ? 0xFFFF : n));
2980 for (i = 0; i < n; i++) {
2981 VISIT(c, expr,
2982 (expr_ty)asdl_seq_GET(e->v.Dict.values, i));
2983 VISIT(c, expr,
2984 (expr_ty)asdl_seq_GET(e->v.Dict.keys, i));
2985 ADDOP(c, STORE_MAP);
2987 break;
2988 case ListComp_kind:
2989 return compiler_listcomp(c, e);
2990 case GeneratorExp_kind:
2991 return compiler_genexp(c, e);
2992 case Yield_kind:
2993 if (c->u->u_ste->ste_type != FunctionBlock)
2994 return compiler_error(c, "'yield' outside function");
2995 if (e->v.Yield.value) {
2996 VISIT(c, expr, e->v.Yield.value);
2998 else {
2999 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3001 ADDOP(c, YIELD_VALUE);
3002 break;
3003 case Compare_kind:
3004 return compiler_compare(c, e);
3005 case Call_kind:
3006 return compiler_call(c, e);
3007 case Repr_kind:
3008 VISIT(c, expr, e->v.Repr.value);
3009 ADDOP(c, UNARY_CONVERT);
3010 break;
3011 case Num_kind:
3012 ADDOP_O(c, LOAD_CONST, e->v.Num.n, consts);
3013 break;
3014 case Str_kind:
3015 ADDOP_O(c, LOAD_CONST, e->v.Str.s, consts);
3016 break;
3017 /* The following exprs can be assignment targets. */
3018 case Attribute_kind:
3019 if (e->v.Attribute.ctx != AugStore)
3020 VISIT(c, expr, e->v.Attribute.value);
3021 switch (e->v.Attribute.ctx) {
3022 case AugLoad:
3023 ADDOP(c, DUP_TOP);
3024 /* Fall through to load */
3025 case Load:
3026 ADDOP_NAME(c, LOAD_ATTR, e->v.Attribute.attr, names);
3027 break;
3028 case AugStore:
3029 ADDOP(c, ROT_TWO);
3030 /* Fall through to save */
3031 case Store:
3032 ADDOP_NAME(c, STORE_ATTR, e->v.Attribute.attr, names);
3033 break;
3034 case Del:
3035 ADDOP_NAME(c, DELETE_ATTR, e->v.Attribute.attr, names);
3036 break;
3037 case Param:
3038 default:
3039 PyErr_SetString(PyExc_SystemError,
3040 "param invalid in attribute expression");
3041 return 0;
3043 break;
3044 case Subscript_kind:
3045 switch (e->v.Subscript.ctx) {
3046 case AugLoad:
3047 VISIT(c, expr, e->v.Subscript.value);
3048 VISIT_SLICE(c, e->v.Subscript.slice, AugLoad);
3049 break;
3050 case Load:
3051 VISIT(c, expr, e->v.Subscript.value);
3052 VISIT_SLICE(c, e->v.Subscript.slice, Load);
3053 break;
3054 case AugStore:
3055 VISIT_SLICE(c, e->v.Subscript.slice, AugStore);
3056 break;
3057 case Store:
3058 VISIT(c, expr, e->v.Subscript.value);
3059 VISIT_SLICE(c, e->v.Subscript.slice, Store);
3060 break;
3061 case Del:
3062 VISIT(c, expr, e->v.Subscript.value);
3063 VISIT_SLICE(c, e->v.Subscript.slice, Del);
3064 break;
3065 case Param:
3066 default:
3067 PyErr_SetString(PyExc_SystemError,
3068 "param invalid in subscript expression");
3069 return 0;
3071 break;
3072 case Name_kind:
3073 return compiler_nameop(c, e->v.Name.id, e->v.Name.ctx);
3074 /* child nodes of List and Tuple will have expr_context set */
3075 case List_kind:
3076 return compiler_list(c, e);
3077 case Tuple_kind:
3078 return compiler_tuple(c, e);
3080 return 1;
3083 static int
3084 compiler_augassign(struct compiler *c, stmt_ty s)
3086 expr_ty e = s->v.AugAssign.target;
3087 expr_ty auge;
3089 assert(s->kind == AugAssign_kind);
3091 switch (e->kind) {
3092 case Attribute_kind:
3093 auge = Attribute(e->v.Attribute.value, e->v.Attribute.attr,
3094 AugLoad, e->lineno, e->col_offset, c->c_arena);
3095 if (auge == NULL)
3096 return 0;
3097 VISIT(c, expr, auge);
3098 VISIT(c, expr, s->v.AugAssign.value);
3099 ADDOP(c, inplace_binop(c, s->v.AugAssign.op));
3100 auge->v.Attribute.ctx = AugStore;
3101 VISIT(c, expr, auge);
3102 break;
3103 case Subscript_kind:
3104 auge = Subscript(e->v.Subscript.value, e->v.Subscript.slice,
3105 AugLoad, e->lineno, e->col_offset, c->c_arena);
3106 if (auge == NULL)
3107 return 0;
3108 VISIT(c, expr, auge);
3109 VISIT(c, expr, s->v.AugAssign.value);
3110 ADDOP(c, inplace_binop(c, s->v.AugAssign.op));
3111 auge->v.Subscript.ctx = AugStore;
3112 VISIT(c, expr, auge);
3113 break;
3114 case Name_kind:
3115 if (!compiler_nameop(c, e->v.Name.id, Load))
3116 return 0;
3117 VISIT(c, expr, s->v.AugAssign.value);
3118 ADDOP(c, inplace_binop(c, s->v.AugAssign.op));
3119 return compiler_nameop(c, e->v.Name.id, Store);
3120 default:
3121 PyErr_Format(PyExc_SystemError,
3122 "invalid node type (%d) for augmented assignment",
3123 e->kind);
3124 return 0;
3126 return 1;
3129 static int
3130 compiler_push_fblock(struct compiler *c, enum fblocktype t, basicblock *b)
3132 struct fblockinfo *f;
3133 if (c->u->u_nfblocks >= CO_MAXBLOCKS) {
3134 PyErr_SetString(PyExc_SystemError,
3135 "too many statically nested blocks");
3136 return 0;
3138 f = &c->u->u_fblock[c->u->u_nfblocks++];
3139 f->fb_type = t;
3140 f->fb_block = b;
3141 return 1;
3144 static void
3145 compiler_pop_fblock(struct compiler *c, enum fblocktype t, basicblock *b)
3147 struct compiler_unit *u = c->u;
3148 assert(u->u_nfblocks > 0);
3149 u->u_nfblocks--;
3150 assert(u->u_fblock[u->u_nfblocks].fb_type == t);
3151 assert(u->u_fblock[u->u_nfblocks].fb_block == b);
3154 static int
3155 compiler_in_loop(struct compiler *c) {
3156 int i;
3157 struct compiler_unit *u = c->u;
3158 for (i = 0; i < u->u_nfblocks; ++i) {
3159 if (u->u_fblock[i].fb_type == LOOP)
3160 return 1;
3162 return 0;
3164 /* Raises a SyntaxError and returns 0.
3165 If something goes wrong, a different exception may be raised.
3168 static int
3169 compiler_error(struct compiler *c, const char *errstr)
3171 PyObject *loc;
3172 PyObject *u = NULL, *v = NULL;
3174 loc = PyErr_ProgramText(c->c_filename, c->u->u_lineno);
3175 if (!loc) {
3176 Py_INCREF(Py_None);
3177 loc = Py_None;
3179 u = Py_BuildValue("(ziOO)", c->c_filename, c->u->u_lineno,
3180 Py_None, loc);
3181 if (!u)
3182 goto exit;
3183 v = Py_BuildValue("(zO)", errstr, u);
3184 if (!v)
3185 goto exit;
3186 PyErr_SetObject(PyExc_SyntaxError, v);
3187 exit:
3188 Py_DECREF(loc);
3189 Py_XDECREF(u);
3190 Py_XDECREF(v);
3191 return 0;
3194 static int
3195 compiler_handle_subscr(struct compiler *c, const char *kind,
3196 expr_context_ty ctx)
3198 int op = 0;
3200 /* XXX this code is duplicated */
3201 switch (ctx) {
3202 case AugLoad: /* fall through to Load */
3203 case Load: op = BINARY_SUBSCR; break;
3204 case AugStore:/* fall through to Store */
3205 case Store: op = STORE_SUBSCR; break;
3206 case Del: op = DELETE_SUBSCR; break;
3207 case Param:
3208 PyErr_Format(PyExc_SystemError,
3209 "invalid %s kind %d in subscript\n",
3210 kind, ctx);
3211 return 0;
3213 if (ctx == AugLoad) {
3214 ADDOP_I(c, DUP_TOPX, 2);
3216 else if (ctx == AugStore) {
3217 ADDOP(c, ROT_THREE);
3219 ADDOP(c, op);
3220 return 1;
3223 static int
3224 compiler_slice(struct compiler *c, slice_ty s, expr_context_ty ctx)
3226 int n = 2;
3227 assert(s->kind == Slice_kind);
3229 /* only handles the cases where BUILD_SLICE is emitted */
3230 if (s->v.Slice.lower) {
3231 VISIT(c, expr, s->v.Slice.lower);
3233 else {
3234 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3237 if (s->v.Slice.upper) {
3238 VISIT(c, expr, s->v.Slice.upper);
3240 else {
3241 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3244 if (s->v.Slice.step) {
3245 n++;
3246 VISIT(c, expr, s->v.Slice.step);
3248 ADDOP_I(c, BUILD_SLICE, n);
3249 return 1;
3252 static int
3253 compiler_simple_slice(struct compiler *c, slice_ty s, expr_context_ty ctx)
3255 int op = 0, slice_offset = 0, stack_count = 0;
3257 assert(s->v.Slice.step == NULL);
3258 if (s->v.Slice.lower) {
3259 slice_offset++;
3260 stack_count++;
3261 if (ctx != AugStore)
3262 VISIT(c, expr, s->v.Slice.lower);
3264 if (s->v.Slice.upper) {
3265 slice_offset += 2;
3266 stack_count++;
3267 if (ctx != AugStore)
3268 VISIT(c, expr, s->v.Slice.upper);
3271 if (ctx == AugLoad) {
3272 switch (stack_count) {
3273 case 0: ADDOP(c, DUP_TOP); break;
3274 case 1: ADDOP_I(c, DUP_TOPX, 2); break;
3275 case 2: ADDOP_I(c, DUP_TOPX, 3); break;
3278 else if (ctx == AugStore) {
3279 switch (stack_count) {
3280 case 0: ADDOP(c, ROT_TWO); break;
3281 case 1: ADDOP(c, ROT_THREE); break;
3282 case 2: ADDOP(c, ROT_FOUR); break;
3286 switch (ctx) {
3287 case AugLoad: /* fall through to Load */
3288 case Load: op = SLICE; break;
3289 case AugStore:/* fall through to Store */
3290 case Store: op = STORE_SLICE; break;
3291 case Del: op = DELETE_SLICE; break;
3292 case Param:
3293 default:
3294 PyErr_SetString(PyExc_SystemError,
3295 "param invalid in simple slice");
3296 return 0;
3299 ADDOP(c, op + slice_offset);
3300 return 1;
3303 static int
3304 compiler_visit_nested_slice(struct compiler *c, slice_ty s,
3305 expr_context_ty ctx)
3307 switch (s->kind) {
3308 case Ellipsis_kind:
3309 ADDOP_O(c, LOAD_CONST, Py_Ellipsis, consts);
3310 break;
3311 case Slice_kind:
3312 return compiler_slice(c, s, ctx);
3313 case Index_kind:
3314 VISIT(c, expr, s->v.Index.value);
3315 break;
3316 case ExtSlice_kind:
3317 default:
3318 PyErr_SetString(PyExc_SystemError,
3319 "extended slice invalid in nested slice");
3320 return 0;
3322 return 1;
3325 static int
3326 compiler_visit_slice(struct compiler *c, slice_ty s, expr_context_ty ctx)
3328 char * kindname = NULL;
3329 switch (s->kind) {
3330 case Index_kind:
3331 kindname = "index";
3332 if (ctx != AugStore) {
3333 VISIT(c, expr, s->v.Index.value);
3335 break;
3336 case Ellipsis_kind:
3337 kindname = "ellipsis";
3338 if (ctx != AugStore) {
3339 ADDOP_O(c, LOAD_CONST, Py_Ellipsis, consts);
3341 break;
3342 case Slice_kind:
3343 kindname = "slice";
3344 if (!s->v.Slice.step)
3345 return compiler_simple_slice(c, s, ctx);
3346 if (ctx != AugStore) {
3347 if (!compiler_slice(c, s, ctx))
3348 return 0;
3350 break;
3351 case ExtSlice_kind:
3352 kindname = "extended slice";
3353 if (ctx != AugStore) {
3354 int i, n = asdl_seq_LEN(s->v.ExtSlice.dims);
3355 for (i = 0; i < n; i++) {
3356 slice_ty sub = (slice_ty)asdl_seq_GET(
3357 s->v.ExtSlice.dims, i);
3358 if (!compiler_visit_nested_slice(c, sub, ctx))
3359 return 0;
3361 ADDOP_I(c, BUILD_TUPLE, n);
3363 break;
3364 default:
3365 PyErr_Format(PyExc_SystemError,
3366 "invalid subscript kind %d", s->kind);
3367 return 0;
3369 return compiler_handle_subscr(c, kindname, ctx);
3373 /* End of the compiler section, beginning of the assembler section */
3375 /* do depth-first search of basic block graph, starting with block.
3376 post records the block indices in post-order.
3378 XXX must handle implicit jumps from one block to next
3381 struct assembler {
3382 PyObject *a_bytecode; /* string containing bytecode */
3383 int a_offset; /* offset into bytecode */
3384 int a_nblocks; /* number of reachable blocks */
3385 basicblock **a_postorder; /* list of blocks in dfs postorder */
3386 PyObject *a_lnotab; /* string containing lnotab */
3387 int a_lnotab_off; /* offset into lnotab */
3388 int a_lineno; /* last lineno of emitted instruction */
3389 int a_lineno_off; /* bytecode offset of last lineno */
3392 static void
3393 dfs(struct compiler *c, basicblock *b, struct assembler *a)
3395 int i;
3396 struct instr *instr = NULL;
3398 if (b->b_seen)
3399 return;
3400 b->b_seen = 1;
3401 if (b->b_next != NULL)
3402 dfs(c, b->b_next, a);
3403 for (i = 0; i < b->b_iused; i++) {
3404 instr = &b->b_instr[i];
3405 if (instr->i_jrel || instr->i_jabs)
3406 dfs(c, instr->i_target, a);
3408 a->a_postorder[a->a_nblocks++] = b;
3411 static int
3412 stackdepth_walk(struct compiler *c, basicblock *b, int depth, int maxdepth)
3414 int i;
3415 struct instr *instr;
3416 if (b->b_seen || b->b_startdepth >= depth)
3417 return maxdepth;
3418 b->b_seen = 1;
3419 b->b_startdepth = depth;
3420 for (i = 0; i < b->b_iused; i++) {
3421 instr = &b->b_instr[i];
3422 depth += opcode_stack_effect(instr->i_opcode, instr->i_oparg);
3423 if (depth > maxdepth)
3424 maxdepth = depth;
3425 assert(depth >= 0); /* invalid code or bug in stackdepth() */
3426 if (instr->i_jrel || instr->i_jabs) {
3427 maxdepth = stackdepth_walk(c, instr->i_target,
3428 depth, maxdepth);
3429 if (instr->i_opcode == JUMP_ABSOLUTE ||
3430 instr->i_opcode == JUMP_FORWARD) {
3431 goto out; /* remaining code is dead */
3435 if (b->b_next)
3436 maxdepth = stackdepth_walk(c, b->b_next, depth, maxdepth);
3437 out:
3438 b->b_seen = 0;
3439 return maxdepth;
3442 /* Find the flow path that needs the largest stack. We assume that
3443 * cycles in the flow graph have no net effect on the stack depth.
3445 static int
3446 stackdepth(struct compiler *c)
3448 basicblock *b, *entryblock;
3449 entryblock = NULL;
3450 for (b = c->u->u_blocks; b != NULL; b = b->b_list) {
3451 b->b_seen = 0;
3452 b->b_startdepth = INT_MIN;
3453 entryblock = b;
3455 if (!entryblock)
3456 return 0;
3457 return stackdepth_walk(c, entryblock, 0, 0);
3460 static int
3461 assemble_init(struct assembler *a, int nblocks, int firstlineno)
3463 memset(a, 0, sizeof(struct assembler));
3464 a->a_lineno = firstlineno;
3465 a->a_bytecode = PyString_FromStringAndSize(NULL, DEFAULT_CODE_SIZE);
3466 if (!a->a_bytecode)
3467 return 0;
3468 a->a_lnotab = PyString_FromStringAndSize(NULL, DEFAULT_LNOTAB_SIZE);
3469 if (!a->a_lnotab)
3470 return 0;
3471 if (nblocks > PY_SIZE_MAX / sizeof(basicblock *)) {
3472 PyErr_NoMemory();
3473 return 0;
3475 a->a_postorder = (basicblock **)PyObject_Malloc(
3476 sizeof(basicblock *) * nblocks);
3477 if (!a->a_postorder) {
3478 PyErr_NoMemory();
3479 return 0;
3481 return 1;
3484 static void
3485 assemble_free(struct assembler *a)
3487 Py_XDECREF(a->a_bytecode);
3488 Py_XDECREF(a->a_lnotab);
3489 if (a->a_postorder)
3490 PyObject_Free(a->a_postorder);
3493 /* Return the size of a basic block in bytes. */
3495 static int
3496 instrsize(struct instr *instr)
3498 if (!instr->i_hasarg)
3499 return 1; /* 1 byte for the opcode*/
3500 if (instr->i_oparg > 0xffff)
3501 return 6; /* 1 (opcode) + 1 (EXTENDED_ARG opcode) + 2 (oparg) + 2(oparg extended) */
3502 return 3; /* 1 (opcode) + 2 (oparg) */
3505 static int
3506 blocksize(basicblock *b)
3508 int i;
3509 int size = 0;
3511 for (i = 0; i < b->b_iused; i++)
3512 size += instrsize(&b->b_instr[i]);
3513 return size;
3516 /* All about a_lnotab.
3518 c_lnotab is an array of unsigned bytes disguised as a Python string.
3519 It is used to map bytecode offsets to source code line #s (when needed
3520 for tracebacks).
3522 The array is conceptually a list of
3523 (bytecode offset increment, line number increment)
3524 pairs. The details are important and delicate, best illustrated by example:
3526 byte code offset source code line number
3529 50 7
3530 350 307
3531 361 308
3533 The first trick is that these numbers aren't stored, only the increments
3534 from one row to the next (this doesn't really work, but it's a start):
3536 0, 1, 6, 1, 44, 5, 300, 300, 11, 1
3538 The second trick is that an unsigned byte can't hold negative values, or
3539 values larger than 255, so (a) there's a deep assumption that byte code
3540 offsets and their corresponding line #s both increase monotonically, and (b)
3541 if at least one column jumps by more than 255 from one row to the next, more
3542 than one pair is written to the table. In case #b, there's no way to know
3543 from looking at the table later how many were written. That's the delicate
3544 part. A user of c_lnotab desiring to find the source line number
3545 corresponding to a bytecode address A should do something like this
3547 lineno = addr = 0
3548 for addr_incr, line_incr in c_lnotab:
3549 addr += addr_incr
3550 if addr > A:
3551 return lineno
3552 lineno += line_incr
3554 In order for this to work, when the addr field increments by more than 255,
3555 the line # increment in each pair generated must be 0 until the remaining addr
3556 increment is < 256. So, in the example above, assemble_lnotab (it used
3557 to be called com_set_lineno) should not (as was actually done until 2.2)
3558 expand 300, 300 to 255, 255, 45, 45,
3559 but to 255, 0, 45, 255, 0, 45.
3562 static int
3563 assemble_lnotab(struct assembler *a, struct instr *i)
3565 int d_bytecode, d_lineno;
3566 int len;
3567 unsigned char *lnotab;
3569 d_bytecode = a->a_offset - a->a_lineno_off;
3570 d_lineno = i->i_lineno - a->a_lineno;
3572 assert(d_bytecode >= 0);
3573 assert(d_lineno >= 0);
3575 if(d_bytecode == 0 && d_lineno == 0)
3576 return 1;
3578 if (d_bytecode > 255) {
3579 int j, nbytes, ncodes = d_bytecode / 255;
3580 nbytes = a->a_lnotab_off + 2 * ncodes;
3581 len = PyString_GET_SIZE(a->a_lnotab);
3582 if (nbytes >= len) {
3583 if ((len <= INT_MAX / 2) && (len * 2 < nbytes))
3584 len = nbytes;
3585 else if (len <= INT_MAX / 2)
3586 len *= 2;
3587 else {
3588 PyErr_NoMemory();
3589 return 0;
3591 if (_PyString_Resize(&a->a_lnotab, len) < 0)
3592 return 0;
3594 lnotab = (unsigned char *)
3595 PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
3596 for (j = 0; j < ncodes; j++) {
3597 *lnotab++ = 255;
3598 *lnotab++ = 0;
3600 d_bytecode -= ncodes * 255;
3601 a->a_lnotab_off += ncodes * 2;
3603 assert(d_bytecode <= 255);
3604 if (d_lineno > 255) {
3605 int j, nbytes, ncodes = d_lineno / 255;
3606 nbytes = a->a_lnotab_off + 2 * ncodes;
3607 len = PyString_GET_SIZE(a->a_lnotab);
3608 if (nbytes >= len) {
3609 if ((len <= INT_MAX / 2) && len * 2 < nbytes)
3610 len = nbytes;
3611 else if (len <= INT_MAX / 2)
3612 len *= 2;
3613 else {
3614 PyErr_NoMemory();
3615 return 0;
3617 if (_PyString_Resize(&a->a_lnotab, len) < 0)
3618 return 0;
3620 lnotab = (unsigned char *)
3621 PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
3622 *lnotab++ = d_bytecode;
3623 *lnotab++ = 255;
3624 d_bytecode = 0;
3625 for (j = 1; j < ncodes; j++) {
3626 *lnotab++ = 0;
3627 *lnotab++ = 255;
3629 d_lineno -= ncodes * 255;
3630 a->a_lnotab_off += ncodes * 2;
3633 len = PyString_GET_SIZE(a->a_lnotab);
3634 if (a->a_lnotab_off + 2 >= len) {
3635 if (_PyString_Resize(&a->a_lnotab, len * 2) < 0)
3636 return 0;
3638 lnotab = (unsigned char *)
3639 PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
3641 a->a_lnotab_off += 2;
3642 if (d_bytecode) {
3643 *lnotab++ = d_bytecode;
3644 *lnotab++ = d_lineno;
3646 else { /* First line of a block; def stmt, etc. */
3647 *lnotab++ = 0;
3648 *lnotab++ = d_lineno;
3650 a->a_lineno = i->i_lineno;
3651 a->a_lineno_off = a->a_offset;
3652 return 1;
3655 /* assemble_emit()
3656 Extend the bytecode with a new instruction.
3657 Update lnotab if necessary.
3660 static int
3661 assemble_emit(struct assembler *a, struct instr *i)
3663 int size, arg = 0, ext = 0;
3664 Py_ssize_t len = PyString_GET_SIZE(a->a_bytecode);
3665 char *code;
3667 size = instrsize(i);
3668 if (i->i_hasarg) {
3669 arg = i->i_oparg;
3670 ext = arg >> 16;
3672 if (i->i_lineno && !assemble_lnotab(a, i))
3673 return 0;
3674 if (a->a_offset + size >= len) {
3675 if (len > PY_SSIZE_T_MAX / 2)
3676 return 0;
3677 if (_PyString_Resize(&a->a_bytecode, len * 2) < 0)
3678 return 0;
3680 code = PyString_AS_STRING(a->a_bytecode) + a->a_offset;
3681 a->a_offset += size;
3682 if (size == 6) {
3683 assert(i->i_hasarg);
3684 *code++ = (char)EXTENDED_ARG;
3685 *code++ = ext & 0xff;
3686 *code++ = ext >> 8;
3687 arg &= 0xffff;
3689 *code++ = i->i_opcode;
3690 if (i->i_hasarg) {
3691 assert(size == 3 || size == 6);
3692 *code++ = arg & 0xff;
3693 *code++ = arg >> 8;
3695 return 1;
3698 static void
3699 assemble_jump_offsets(struct assembler *a, struct compiler *c)
3701 basicblock *b;
3702 int bsize, totsize, extended_arg_count, last_extended_arg_count = 0;
3703 int i;
3705 /* Compute the size of each block and fixup jump args.
3706 Replace block pointer with position in bytecode. */
3707 start:
3708 totsize = 0;
3709 for (i = a->a_nblocks - 1; i >= 0; i--) {
3710 b = a->a_postorder[i];
3711 bsize = blocksize(b);
3712 b->b_offset = totsize;
3713 totsize += bsize;
3715 extended_arg_count = 0;
3716 for (b = c->u->u_blocks; b != NULL; b = b->b_list) {
3717 bsize = b->b_offset;
3718 for (i = 0; i < b->b_iused; i++) {
3719 struct instr *instr = &b->b_instr[i];
3720 /* Relative jumps are computed relative to
3721 the instruction pointer after fetching
3722 the jump instruction.
3724 bsize += instrsize(instr);
3725 if (instr->i_jabs)
3726 instr->i_oparg = instr->i_target->b_offset;
3727 else if (instr->i_jrel) {
3728 int delta = instr->i_target->b_offset - bsize;
3729 instr->i_oparg = delta;
3731 else
3732 continue;
3733 if (instr->i_oparg > 0xffff)
3734 extended_arg_count++;
3738 /* XXX: This is an awful hack that could hurt performance, but
3739 on the bright side it should work until we come up
3740 with a better solution.
3742 In the meantime, should the goto be dropped in favor
3743 of a loop?
3745 The issue is that in the first loop blocksize() is called
3746 which calls instrsize() which requires i_oparg be set
3747 appropriately. There is a bootstrap problem because
3748 i_oparg is calculated in the second loop above.
3750 So we loop until we stop seeing new EXTENDED_ARGs.
3751 The only EXTENDED_ARGs that could be popping up are
3752 ones in jump instructions. So this should converge
3753 fairly quickly.
3755 if (last_extended_arg_count != extended_arg_count) {
3756 last_extended_arg_count = extended_arg_count;
3757 goto start;
3761 static PyObject *
3762 dict_keys_inorder(PyObject *dict, int offset)
3764 PyObject *tuple, *k, *v;
3765 Py_ssize_t i, pos = 0, size = PyDict_Size(dict);
3767 tuple = PyTuple_New(size);
3768 if (tuple == NULL)
3769 return NULL;
3770 while (PyDict_Next(dict, &pos, &k, &v)) {
3771 i = PyInt_AS_LONG(v);
3772 k = PyTuple_GET_ITEM(k, 0);
3773 Py_INCREF(k);
3774 assert((i - offset) < size);
3775 assert((i - offset) >= 0);
3776 PyTuple_SET_ITEM(tuple, i - offset, k);
3778 return tuple;
3781 static int
3782 compute_code_flags(struct compiler *c)
3784 PySTEntryObject *ste = c->u->u_ste;
3785 int flags = 0, n;
3786 if (ste->ste_type != ModuleBlock)
3787 flags |= CO_NEWLOCALS;
3788 if (ste->ste_type == FunctionBlock) {
3789 if (!ste->ste_unoptimized)
3790 flags |= CO_OPTIMIZED;
3791 if (ste->ste_nested)
3792 flags |= CO_NESTED;
3793 if (ste->ste_generator)
3794 flags |= CO_GENERATOR;
3796 if (ste->ste_varargs)
3797 flags |= CO_VARARGS;
3798 if (ste->ste_varkeywords)
3799 flags |= CO_VARKEYWORDS;
3800 if (ste->ste_generator)
3801 flags |= CO_GENERATOR;
3803 /* (Only) inherit compilerflags in PyCF_MASK */
3804 flags |= (c->c_flags->cf_flags & PyCF_MASK);
3806 n = PyDict_Size(c->u->u_freevars);
3807 if (n < 0)
3808 return -1;
3809 if (n == 0) {
3810 n = PyDict_Size(c->u->u_cellvars);
3811 if (n < 0)
3812 return -1;
3813 if (n == 0) {
3814 flags |= CO_NOFREE;
3818 return flags;
3821 static PyCodeObject *
3822 makecode(struct compiler *c, struct assembler *a)
3824 PyObject *tmp;
3825 PyCodeObject *co = NULL;
3826 PyObject *consts = NULL;
3827 PyObject *names = NULL;
3828 PyObject *varnames = NULL;
3829 PyObject *filename = NULL;
3830 PyObject *name = NULL;
3831 PyObject *freevars = NULL;
3832 PyObject *cellvars = NULL;
3833 PyObject *bytecode = NULL;
3834 int nlocals, flags;
3836 tmp = dict_keys_inorder(c->u->u_consts, 0);
3837 if (!tmp)
3838 goto error;
3839 consts = PySequence_List(tmp); /* optimize_code requires a list */
3840 Py_DECREF(tmp);
3842 names = dict_keys_inorder(c->u->u_names, 0);
3843 varnames = dict_keys_inorder(c->u->u_varnames, 0);
3844 if (!consts || !names || !varnames)
3845 goto error;
3847 cellvars = dict_keys_inorder(c->u->u_cellvars, 0);
3848 if (!cellvars)
3849 goto error;
3850 freevars = dict_keys_inorder(c->u->u_freevars, PyTuple_Size(cellvars));
3851 if (!freevars)
3852 goto error;
3853 filename = PyString_FromString(c->c_filename);
3854 if (!filename)
3855 goto error;
3857 nlocals = PyDict_Size(c->u->u_varnames);
3858 flags = compute_code_flags(c);
3859 if (flags < 0)
3860 goto error;
3862 bytecode = PyCode_Optimize(a->a_bytecode, consts, names, a->a_lnotab);
3863 if (!bytecode)
3864 goto error;
3866 tmp = PyList_AsTuple(consts); /* PyCode_New requires a tuple */
3867 if (!tmp)
3868 goto error;
3869 Py_DECREF(consts);
3870 consts = tmp;
3872 co = PyCode_New(c->u->u_argcount, nlocals, stackdepth(c), flags,
3873 bytecode, consts, names, varnames,
3874 freevars, cellvars,
3875 filename, c->u->u_name,
3876 c->u->u_firstlineno,
3877 a->a_lnotab);
3878 error:
3879 Py_XDECREF(consts);
3880 Py_XDECREF(names);
3881 Py_XDECREF(varnames);
3882 Py_XDECREF(filename);
3883 Py_XDECREF(name);
3884 Py_XDECREF(freevars);
3885 Py_XDECREF(cellvars);
3886 Py_XDECREF(bytecode);
3887 return co;
3891 /* For debugging purposes only */
3892 #if 0
3893 static void
3894 dump_instr(const struct instr *i)
3896 const char *jrel = i->i_jrel ? "jrel " : "";
3897 const char *jabs = i->i_jabs ? "jabs " : "";
3898 char arg[128];
3900 *arg = '\0';
3901 if (i->i_hasarg)
3902 sprintf(arg, "arg: %d ", i->i_oparg);
3904 fprintf(stderr, "line: %d, opcode: %d %s%s%s\n",
3905 i->i_lineno, i->i_opcode, arg, jabs, jrel);
3908 static void
3909 dump_basicblock(const basicblock *b)
3911 const char *seen = b->b_seen ? "seen " : "";
3912 const char *b_return = b->b_return ? "return " : "";
3913 fprintf(stderr, "used: %d, depth: %d, offset: %d %s%s\n",
3914 b->b_iused, b->b_startdepth, b->b_offset, seen, b_return);
3915 if (b->b_instr) {
3916 int i;
3917 for (i = 0; i < b->b_iused; i++) {
3918 fprintf(stderr, " [%02d] ", i);
3919 dump_instr(b->b_instr + i);
3923 #endif
3925 static PyCodeObject *
3926 assemble(struct compiler *c, int addNone)
3928 basicblock *b, *entryblock;
3929 struct assembler a;
3930 int i, j, nblocks;
3931 PyCodeObject *co = NULL;
3933 /* Make sure every block that falls off the end returns None.
3934 XXX NEXT_BLOCK() isn't quite right, because if the last
3935 block ends with a jump or return b_next shouldn't set.
3937 if (!c->u->u_curblock->b_return) {
3938 NEXT_BLOCK(c);
3939 if (addNone)
3940 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3941 ADDOP(c, RETURN_VALUE);
3944 nblocks = 0;
3945 entryblock = NULL;
3946 for (b = c->u->u_blocks; b != NULL; b = b->b_list) {
3947 nblocks++;
3948 entryblock = b;
3951 /* Set firstlineno if it wasn't explicitly set. */
3952 if (!c->u->u_firstlineno) {
3953 if (entryblock && entryblock->b_instr)
3954 c->u->u_firstlineno = entryblock->b_instr->i_lineno;
3955 else
3956 c->u->u_firstlineno = 1;
3958 if (!assemble_init(&a, nblocks, c->u->u_firstlineno))
3959 goto error;
3960 dfs(c, entryblock, &a);
3962 /* Can't modify the bytecode after computing jump offsets. */
3963 assemble_jump_offsets(&a, c);
3965 /* Emit code in reverse postorder from dfs. */
3966 for (i = a.a_nblocks - 1; i >= 0; i--) {
3967 b = a.a_postorder[i];
3968 for (j = 0; j < b->b_iused; j++)
3969 if (!assemble_emit(&a, &b->b_instr[j]))
3970 goto error;
3973 if (_PyString_Resize(&a.a_lnotab, a.a_lnotab_off) < 0)
3974 goto error;
3975 if (_PyString_Resize(&a.a_bytecode, a.a_offset) < 0)
3976 goto error;
3978 co = makecode(c, &a);
3979 error:
3980 assemble_free(&a);
3981 return co;