2 * This file compiles an abstract syntax tree (AST) into Python bytecode.
4 * The primary entry point is PyAST_Compile(), which returns a
5 * PyCodeObject. The compiler makes several passes to build the code
7 * 1. Checks for future statements. See future.c
8 * 2. Builds a symbol table. See symtable.c.
9 * 3. Generate code for basic blocks. See compiler_mod() in this file.
10 * 4. Assemble the basic blocks into final code. See assemble() in
12 * 5. Optimize the byte code (peephole optimizations). See peephole.c
14 * Note that compiler_mod() suggests module, but the module ast type
15 * (mod_ty) has cases for expressions and interactive statements.
17 * CAUTION: The VISIT_* macros abort the current function when they
18 * encounter a problem. So don't invoke them when there is memory
19 * which needs to be released. Code blocks are OK, as the compiler
20 * structure takes care of releasing those. Use the arena to manage
26 #include "Python-ast.h"
35 int Py_OptimizeFlag
= 0;
37 #define DEFAULT_BLOCK_SIZE 16
38 #define DEFAULT_BLOCKS 8
39 #define DEFAULT_CODE_SIZE 128
40 #define DEFAULT_LNOTAB_SIZE 16
45 unsigned i_hasarg
: 1;
46 unsigned char i_opcode
;
48 struct basicblock_
*i_target
; /* target block (if jump instruction) */
52 typedef struct basicblock_
{
53 /* Each basicblock in a compilation unit is linked via b_list in the
54 reverse order that the block are allocated. b_list points to the next
55 block, not to be confused with b_next, which is next by control flow. */
56 struct basicblock_
*b_list
;
57 /* number of instructions used */
59 /* length of instruction array (b_instr) */
61 /* pointer to an array of instructions, initially NULL */
62 struct instr
*b_instr
;
63 /* If b_next is non-NULL, it is a pointer to the next
64 block reached by normal control flow. */
65 struct basicblock_
*b_next
;
66 /* b_seen is used to perform a DFS of basicblocks. */
68 /* b_return is true if a RETURN_VALUE opcode is inserted. */
69 unsigned b_return
: 1;
70 /* depth of stack upon entry of block, computed by stackdepth() */
72 /* instruction offset for block, computed by assemble_jump_offsets() */
76 /* fblockinfo tracks the current frame block.
78 A frame block is used to handle loops, try/except, and try/finally.
79 It's called a frame block to distinguish it from a basic block in the
83 enum fblocktype
{ LOOP
, EXCEPT
, FINALLY_TRY
, FINALLY_END
};
86 enum fblocktype fb_type
;
90 /* The following items change on entry and exit of code blocks.
91 They must be saved and restored when returning to a block.
93 struct compiler_unit
{
94 PySTEntryObject
*u_ste
;
97 /* The following fields are dicts that map objects to
98 the index of them in co_XXX. The index is used as
99 the argument for opcodes that refer to those collections.
101 PyObject
*u_consts
; /* all constants */
102 PyObject
*u_names
; /* all names */
103 PyObject
*u_varnames
; /* local variables */
104 PyObject
*u_cellvars
; /* cell variables */
105 PyObject
*u_freevars
; /* free variables */
107 PyObject
*u_private
; /* for private name mangling */
109 int u_argcount
; /* number of arguments for block */
110 /* Pointer to the most recently allocated block. By following b_list
111 members, you can reach all early allocated blocks. */
112 basicblock
*u_blocks
;
113 basicblock
*u_curblock
; /* pointer to current block */
114 int u_tmpname
; /* temporary variables for list comps */
117 struct fblockinfo u_fblock
[CO_MAXBLOCKS
];
119 int u_firstlineno
; /* the first lineno of the block */
120 int u_lineno
; /* the lineno for the current stmt */
121 bool u_lineno_set
; /* boolean to indicate whether instr
122 has been generated with current lineno */
125 /* This struct captures the global state of a compilation.
127 The u pointer points to the current compilation unit, while units
128 for enclosing blocks are stored in c_stack. The u and c_stack are
129 managed by compiler_enter_scope() and compiler_exit_scope().
133 const char *c_filename
;
134 struct symtable
*c_st
;
135 PyFutureFeatures
*c_future
; /* pointer to module's __future__ */
136 PyCompilerFlags
*c_flags
;
138 int c_interactive
; /* true if in interactive mode */
141 struct compiler_unit
*u
; /* compiler state for current block */
142 PyObject
*c_stack
; /* Python list holding compiler_unit ptrs */
143 char *c_encoding
; /* source encoding (a borrowed reference) */
144 PyArena
*c_arena
; /* pointer to memory allocation arena */
147 static int compiler_enter_scope(struct compiler
*, identifier
, void *, int);
148 static void compiler_free(struct compiler
*);
149 static basicblock
*compiler_new_block(struct compiler
*);
150 static int compiler_next_instr(struct compiler
*, basicblock
*);
151 static int compiler_addop(struct compiler
*, int);
152 static int compiler_addop_o(struct compiler
*, int, PyObject
*, PyObject
*);
153 static int compiler_addop_i(struct compiler
*, int, int);
154 static int compiler_addop_j(struct compiler
*, int, basicblock
*, int);
155 static basicblock
*compiler_use_new_block(struct compiler
*);
156 static int compiler_error(struct compiler
*, const char *);
157 static int compiler_nameop(struct compiler
*, identifier
, expr_context_ty
);
159 static PyCodeObject
*compiler_mod(struct compiler
*, mod_ty
);
160 static int compiler_visit_stmt(struct compiler
*, stmt_ty
);
161 static int compiler_visit_keyword(struct compiler
*, keyword_ty
);
162 static int compiler_visit_expr(struct compiler
*, expr_ty
);
163 static int compiler_augassign(struct compiler
*, stmt_ty
);
164 static int compiler_visit_slice(struct compiler
*, slice_ty
,
167 static int compiler_push_fblock(struct compiler
*, enum fblocktype
,
169 static void compiler_pop_fblock(struct compiler
*, enum fblocktype
,
171 /* Returns true if there is a loop on the fblock stack. */
172 static int compiler_in_loop(struct compiler
*);
174 static int inplace_binop(struct compiler
*, operator_ty
);
175 static int expr_constant(expr_ty e
);
177 static int compiler_with(struct compiler
*, stmt_ty
);
179 static PyCodeObject
*assemble(struct compiler
*, int addNone
);
180 static PyObject
*__doc__
;
183 _Py_Mangle(PyObject
*privateobj
, PyObject
*ident
)
185 /* Name mangling: __private becomes _classname__private.
186 This is independent from how the name is used. */
187 const char *p
, *name
= PyString_AsString(ident
);
190 if (privateobj
== NULL
|| !PyString_Check(privateobj
) ||
191 name
== NULL
|| name
[0] != '_' || name
[1] != '_') {
195 p
= PyString_AsString(privateobj
);
197 /* Don't mangle __id__ or names with dots.
199 The only time a name with a dot can occur is when
200 we are compiling an import statement that has a
203 TODO(jhylton): Decide whether we want to support
204 mangling of the module name, e.g. __M.X.
206 if ((name
[nlen
-1] == '_' && name
[nlen
-2] == '_')
207 || strchr(name
, '.')) {
209 return ident
; /* Don't mangle __whatever__ */
211 /* Strip leading underscores from class name */
216 return ident
; /* Don't mangle if class is just underscores */
220 assert(1 <= PY_SSIZE_T_MAX
- nlen
);
221 assert(1 + nlen
<= PY_SSIZE_T_MAX
- plen
);
223 ident
= PyString_FromStringAndSize(NULL
, 1 + nlen
+ plen
);
226 /* ident = "_" + p[:plen] + name # i.e. 1+plen+nlen bytes */
227 buffer
= PyString_AS_STRING(ident
);
229 strncpy(buffer
+1, p
, plen
);
230 strcpy(buffer
+1+plen
, name
);
235 compiler_init(struct compiler
*c
)
237 memset(c
, 0, sizeof(struct compiler
));
239 c
->c_stack
= PyList_New(0);
247 PyAST_Compile(mod_ty mod
, const char *filename
, PyCompilerFlags
*flags
,
251 PyCodeObject
*co
= NULL
;
252 PyCompilerFlags local_flags
;
256 __doc__
= PyString_InternFromString("__doc__");
261 if (!compiler_init(&c
))
263 c
.c_filename
= filename
;
265 c
.c_future
= PyFuture_FromAST(mod
, filename
);
266 if (c
.c_future
== NULL
)
269 local_flags
.cf_flags
= 0;
270 flags
= &local_flags
;
272 merged
= c
.c_future
->ff_features
| flags
->cf_flags
;
273 c
.c_future
->ff_features
= merged
;
274 flags
->cf_flags
= merged
;
278 c
.c_st
= PySymtable_Build(mod
, filename
, c
.c_future
);
279 if (c
.c_st
== NULL
) {
280 if (!PyErr_Occurred())
281 PyErr_SetString(PyExc_SystemError
, "no symtable");
285 /* XXX initialize to NULL for now, need to handle */
288 co
= compiler_mod(&c
, mod
);
292 assert(co
|| PyErr_Occurred());
297 PyNode_Compile(struct _node
*n
, const char *filename
)
299 PyCodeObject
*co
= NULL
;
301 PyArena
*arena
= PyArena_New();
304 mod
= PyAST_FromNode(n
, NULL
, filename
, arena
);
306 co
= PyAST_Compile(mod
, filename
, NULL
, arena
);
312 compiler_free(struct compiler
*c
)
315 PySymtable_Free(c
->c_st
);
317 PyObject_Free(c
->c_future
);
318 Py_DECREF(c
->c_stack
);
322 list2dict(PyObject
*list
)
326 PyObject
*dict
= PyDict_New();
327 if (!dict
) return NULL
;
329 n
= PyList_Size(list
);
330 for (i
= 0; i
< n
; i
++) {
331 v
= PyInt_FromLong(i
);
336 k
= PyList_GET_ITEM(list
, i
);
337 k
= PyTuple_Pack(2, k
, k
->ob_type
);
338 if (k
== NULL
|| PyDict_SetItem(dict
, k
, v
) < 0) {
350 /* Return new dict containing names from src that match scope(s).
352 src is a symbol table dictionary. If the scope of a name matches
353 either scope_type or flag is set, insert it into the new dict. The
354 values are integers, starting at offset and increasing by one for
359 dictbytype(PyObject
*src
, int scope_type
, int flag
, int offset
)
361 Py_ssize_t pos
= 0, i
= offset
, scope
;
362 PyObject
*k
, *v
, *dest
= PyDict_New();
368 while (PyDict_Next(src
, &pos
, &k
, &v
)) {
369 /* XXX this should probably be a macro in symtable.h */
370 assert(PyInt_Check(v
));
371 scope
= (PyInt_AS_LONG(v
) >> SCOPE_OFF
) & SCOPE_MASK
;
373 if (scope
== scope_type
|| PyInt_AS_LONG(v
) & flag
) {
374 PyObject
*tuple
, *item
= PyInt_FromLong(i
);
380 tuple
= PyTuple_Pack(2, k
, k
->ob_type
);
381 if (!tuple
|| PyDict_SetItem(dest
, tuple
, item
) < 0) {
395 compiler_unit_check(struct compiler_unit
*u
)
398 for (block
= u
->u_blocks
; block
!= NULL
; block
= block
->b_list
) {
399 assert((void *)block
!= (void *)0xcbcbcbcb);
400 assert((void *)block
!= (void *)0xfbfbfbfb);
401 assert((void *)block
!= (void *)0xdbdbdbdb);
402 if (block
->b_instr
!= NULL
) {
403 assert(block
->b_ialloc
> 0);
404 assert(block
->b_iused
> 0);
405 assert(block
->b_ialloc
>= block
->b_iused
);
408 assert (block
->b_iused
== 0);
409 assert (block
->b_ialloc
== 0);
415 compiler_unit_free(struct compiler_unit
*u
)
417 basicblock
*b
, *next
;
419 compiler_unit_check(u
);
423 PyObject_Free((void *)b
->b_instr
);
425 PyObject_Free((void *)b
);
430 Py_CLEAR(u
->u_consts
);
431 Py_CLEAR(u
->u_names
);
432 Py_CLEAR(u
->u_varnames
);
433 Py_CLEAR(u
->u_freevars
);
434 Py_CLEAR(u
->u_cellvars
);
435 Py_CLEAR(u
->u_private
);
440 compiler_enter_scope(struct compiler
*c
, identifier name
, void *key
,
443 struct compiler_unit
*u
;
445 u
= (struct compiler_unit
*)PyObject_Malloc(sizeof(
446 struct compiler_unit
));
451 memset(u
, 0, sizeof(struct compiler_unit
));
453 u
->u_ste
= PySymtable_Lookup(c
->c_st
, key
);
455 compiler_unit_free(u
);
460 u
->u_varnames
= list2dict(u
->u_ste
->ste_varnames
);
461 u
->u_cellvars
= dictbytype(u
->u_ste
->ste_symbols
, CELL
, 0, 0);
462 if (!u
->u_varnames
|| !u
->u_cellvars
) {
463 compiler_unit_free(u
);
467 u
->u_freevars
= dictbytype(u
->u_ste
->ste_symbols
, FREE
, DEF_FREE_CLASS
,
468 PyDict_Size(u
->u_cellvars
));
469 if (!u
->u_freevars
) {
470 compiler_unit_free(u
);
477 u
->u_firstlineno
= lineno
;
479 u
->u_lineno_set
= false;
480 u
->u_consts
= PyDict_New();
482 compiler_unit_free(u
);
485 u
->u_names
= PyDict_New();
487 compiler_unit_free(u
);
493 /* Push the old compiler_unit on the stack. */
495 PyObject
*wrapper
= PyCObject_FromVoidPtr(c
->u
, NULL
);
496 if (!wrapper
|| PyList_Append(c
->c_stack
, wrapper
) < 0) {
498 compiler_unit_free(u
);
502 u
->u_private
= c
->u
->u_private
;
503 Py_XINCREF(u
->u_private
);
508 if (compiler_use_new_block(c
) == NULL
)
515 compiler_exit_scope(struct compiler
*c
)
521 compiler_unit_free(c
->u
);
522 /* Restore c->u to the parent unit. */
523 n
= PyList_GET_SIZE(c
->c_stack
) - 1;
525 wrapper
= PyList_GET_ITEM(c
->c_stack
, n
);
526 c
->u
= (struct compiler_unit
*)PyCObject_AsVoidPtr(wrapper
);
528 /* we are deleting from a list so this really shouldn't fail */
529 if (PySequence_DelItem(c
->c_stack
, n
) < 0)
530 Py_FatalError("compiler_exit_scope()");
531 compiler_unit_check(c
->u
);
538 /* Allocate a new block and return a pointer to it.
539 Returns NULL on error.
543 compiler_new_block(struct compiler
*c
)
546 struct compiler_unit
*u
;
549 b
= (basicblock
*)PyObject_Malloc(sizeof(basicblock
));
554 memset((void *)b
, 0, sizeof(basicblock
));
555 /* Extend the singly linked list of blocks with new block. */
556 b
->b_list
= u
->u_blocks
;
562 compiler_use_new_block(struct compiler
*c
)
564 basicblock
*block
= compiler_new_block(c
);
567 c
->u
->u_curblock
= block
;
572 compiler_next_block(struct compiler
*c
)
574 basicblock
*block
= compiler_new_block(c
);
577 c
->u
->u_curblock
->b_next
= block
;
578 c
->u
->u_curblock
= block
;
583 compiler_use_next_block(struct compiler
*c
, basicblock
*block
)
585 assert(block
!= NULL
);
586 c
->u
->u_curblock
->b_next
= block
;
587 c
->u
->u_curblock
= block
;
591 /* Returns the offset of the next instruction in the current block's
592 b_instr array. Resizes the b_instr as necessary.
593 Returns -1 on failure.
597 compiler_next_instr(struct compiler
*c
, basicblock
*b
)
600 if (b
->b_instr
== NULL
) {
601 b
->b_instr
= (struct instr
*)PyObject_Malloc(
602 sizeof(struct instr
) * DEFAULT_BLOCK_SIZE
);
603 if (b
->b_instr
== NULL
) {
607 b
->b_ialloc
= DEFAULT_BLOCK_SIZE
;
608 memset((char *)b
->b_instr
, 0,
609 sizeof(struct instr
) * DEFAULT_BLOCK_SIZE
);
611 else if (b
->b_iused
== b
->b_ialloc
) {
613 size_t oldsize
, newsize
;
614 oldsize
= b
->b_ialloc
* sizeof(struct instr
);
615 newsize
= oldsize
<< 1;
617 if (oldsize
> (PY_SIZE_MAX
>> 1)) {
627 tmp
= (struct instr
*)PyObject_Realloc(
628 (void *)b
->b_instr
, newsize
);
634 memset((char *)b
->b_instr
+ oldsize
, 0, newsize
- oldsize
);
639 /* Set the i_lineno member of the instruction at offset off if the
640 line number for the current expression/statement has not
641 already been set. If it has been set, the call has no effect.
643 The line number is reset in the following cases:
644 - when entering a new scope
646 - on each expression that start a new line
647 - before the "except" clause
648 - before the "for" and "while" expressions
652 compiler_set_lineno(struct compiler
*c
, int off
)
655 if (c
->u
->u_lineno_set
)
657 c
->u
->u_lineno_set
= true;
658 b
= c
->u
->u_curblock
;
659 b
->b_instr
[off
].i_lineno
= c
->u
->u_lineno
;
663 opcode_stack_effect(int opcode
, int oparg
)
687 case BINARY_MULTIPLY
:
691 case BINARY_SUBTRACT
:
693 case BINARY_FLOOR_DIVIDE
:
694 case BINARY_TRUE_DIVIDE
:
696 case INPLACE_FLOOR_DIVIDE
:
697 case INPLACE_TRUE_DIVIDE
:
728 case INPLACE_SUBTRACT
:
729 case INPLACE_MULTIPLY
:
759 case PRINT_NEWLINE_TO
:
772 return -1; /* XXX Sometimes more */
787 return -1; /* or -2 or -3 if exception occurred */
795 case UNPACK_SEQUENCE
:
829 case JUMP_IF_TRUE_OR_POP
: /* -1 if jump not taken */
830 case JUMP_IF_FALSE_OR_POP
: /* "" */
834 case POP_JUMP_IF_FALSE
:
835 case POP_JUMP_IF_TRUE
:
847 return 3; /* actually pushed by an exception */
858 #define NARGS(o) (((o) % 256) + 2*((o) / 256))
860 return -NARGS(oparg
);
861 case CALL_FUNCTION_VAR
:
862 case CALL_FUNCTION_KW
:
863 return -NARGS(oparg
)-1;
864 case CALL_FUNCTION_VAR_KW
:
865 return -NARGS(oparg
)-2;
884 fprintf(stderr
, "opcode = %d\n", opcode
);
885 Py_FatalError("opcode_stack_effect()");
888 return 0; /* not reachable */
891 /* Add an opcode with no argument.
892 Returns 0 on failure, 1 on success.
896 compiler_addop(struct compiler
*c
, int opcode
)
901 off
= compiler_next_instr(c
, c
->u
->u_curblock
);
904 b
= c
->u
->u_curblock
;
905 i
= &b
->b_instr
[off
];
906 i
->i_opcode
= opcode
;
908 if (opcode
== RETURN_VALUE
)
910 compiler_set_lineno(c
, off
);
915 compiler_add_o(struct compiler
*c
, PyObject
*dict
, PyObject
*o
)
919 unsigned char *p
, *q
;
922 int real_part_zero
, imag_part_zero
;
924 /* necessary to make sure types aren't coerced (e.g., int and long) */
925 /* _and_ to distinguish 0.0 from -0.0 e.g. on IEEE platforms */
926 if (PyFloat_Check(o
)) {
927 d
= PyFloat_AS_DOUBLE(o
);
928 p
= (unsigned char*) &d
;
929 /* all we need is to make the tuple different in either the 0.0
930 * or -0.0 case from all others, just to avoid the "coercion".
932 if (*p
==0 && p
[sizeof(double)-1]==0)
933 t
= PyTuple_Pack(3, o
, o
->ob_type
, Py_None
);
935 t
= PyTuple_Pack(2, o
, o
->ob_type
);
937 else if (PyComplex_Check(o
)) {
938 /* complex case is even messier: we need to make complex(x,
939 0.) different from complex(x, -0.) and complex(0., y)
940 different from complex(-0., y), for any x and y. In
941 particular, all four complex zeros should be
943 z
= PyComplex_AsCComplex(o
);
944 p
= (unsigned char*) &(z
.real
);
945 q
= (unsigned char*) &(z
.imag
);
946 /* all that matters here is that on IEEE platforms
947 real_part_zero will be true if z.real == 0., and false if
948 z.real == -0. In fact, real_part_zero will also be true
949 for some other rarely occurring nonzero floats, but this
950 doesn't matter. Similar comments apply to
952 real_part_zero
= *p
==0 && p
[sizeof(double)-1]==0;
953 imag_part_zero
= *q
==0 && q
[sizeof(double)-1]==0;
954 if (real_part_zero
&& imag_part_zero
) {
955 t
= PyTuple_Pack(4, o
, o
->ob_type
, Py_True
, Py_True
);
957 else if (real_part_zero
&& !imag_part_zero
) {
958 t
= PyTuple_Pack(4, o
, o
->ob_type
, Py_True
, Py_False
);
960 else if (!real_part_zero
&& imag_part_zero
) {
961 t
= PyTuple_Pack(4, o
, o
->ob_type
, Py_False
, Py_True
);
964 t
= PyTuple_Pack(2, o
, o
->ob_type
);
968 t
= PyTuple_Pack(2, o
, o
->ob_type
);
973 v
= PyDict_GetItem(dict
, t
);
975 arg
= PyDict_Size(dict
);
976 v
= PyInt_FromLong(arg
);
981 if (PyDict_SetItem(dict
, t
, v
) < 0) {
989 arg
= PyInt_AsLong(v
);
995 compiler_addop_o(struct compiler
*c
, int opcode
, PyObject
*dict
,
998 int arg
= compiler_add_o(c
, dict
, o
);
1001 return compiler_addop_i(c
, opcode
, arg
);
1005 compiler_addop_name(struct compiler
*c
, int opcode
, PyObject
*dict
,
1009 PyObject
*mangled
= _Py_Mangle(c
->u
->u_private
, o
);
1012 arg
= compiler_add_o(c
, dict
, mangled
);
1016 return compiler_addop_i(c
, opcode
, arg
);
1019 /* Add an opcode with an integer argument.
1020 Returns 0 on failure, 1 on success.
1024 compiler_addop_i(struct compiler
*c
, int opcode
, int oparg
)
1028 off
= compiler_next_instr(c
, c
->u
->u_curblock
);
1031 i
= &c
->u
->u_curblock
->b_instr
[off
];
1032 i
->i_opcode
= opcode
;
1035 compiler_set_lineno(c
, off
);
1040 compiler_addop_j(struct compiler
*c
, int opcode
, basicblock
*b
, int absolute
)
1046 off
= compiler_next_instr(c
, c
->u
->u_curblock
);
1049 i
= &c
->u
->u_curblock
->b_instr
[off
];
1050 i
->i_opcode
= opcode
;
1057 compiler_set_lineno(c
, off
);
1061 /* The distinction between NEW_BLOCK and NEXT_BLOCK is subtle. (I'd
1062 like to find better names.) NEW_BLOCK() creates a new block and sets
1063 it as the current block. NEXT_BLOCK() also creates an implicit jump
1064 from the current block to the new block.
1067 /* The returns inside these macros make it impossible to decref objects
1068 created in the local function. Local objects should use the arena.
1072 #define NEW_BLOCK(C) { \
1073 if (compiler_use_new_block((C)) == NULL) \
1077 #define NEXT_BLOCK(C) { \
1078 if (compiler_next_block((C)) == NULL) \
1082 #define ADDOP(C, OP) { \
1083 if (!compiler_addop((C), (OP))) \
1087 #define ADDOP_IN_SCOPE(C, OP) { \
1088 if (!compiler_addop((C), (OP))) { \
1089 compiler_exit_scope(c); \
1094 #define ADDOP_O(C, OP, O, TYPE) { \
1095 if (!compiler_addop_o((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1099 #define ADDOP_NAME(C, OP, O, TYPE) { \
1100 if (!compiler_addop_name((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1104 #define ADDOP_I(C, OP, O) { \
1105 if (!compiler_addop_i((C), (OP), (O))) \
1109 #define ADDOP_JABS(C, OP, O) { \
1110 if (!compiler_addop_j((C), (OP), (O), 1)) \
1114 #define ADDOP_JREL(C, OP, O) { \
1115 if (!compiler_addop_j((C), (OP), (O), 0)) \
1119 /* VISIT and VISIT_SEQ takes an ASDL type as their second argument. They use
1120 the ASDL name to synthesize the name of the C type and the visit function.
1123 #define VISIT(C, TYPE, V) {\
1124 if (!compiler_visit_ ## TYPE((C), (V))) \
1128 #define VISIT_IN_SCOPE(C, TYPE, V) {\
1129 if (!compiler_visit_ ## TYPE((C), (V))) { \
1130 compiler_exit_scope(c); \
1135 #define VISIT_SLICE(C, V, CTX) {\
1136 if (!compiler_visit_slice((C), (V), (CTX))) \
1140 #define VISIT_SEQ(C, TYPE, SEQ) { \
1142 asdl_seq *seq = (SEQ); /* avoid variable capture */ \
1143 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1144 TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1145 if (!compiler_visit_ ## TYPE((C), elt)) \
1150 #define VISIT_SEQ_IN_SCOPE(C, TYPE, SEQ) { \
1152 asdl_seq *seq = (SEQ); /* avoid variable capture */ \
1153 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1154 TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1155 if (!compiler_visit_ ## TYPE((C), elt)) { \
1156 compiler_exit_scope(c); \
1163 compiler_isdocstring(stmt_ty s
)
1165 if (s
->kind
!= Expr_kind
)
1167 return s
->v
.Expr
.value
->kind
== Str_kind
;
1170 /* Compile a sequence of statements, checking for a docstring. */
1173 compiler_body(struct compiler
*c
, asdl_seq
*stmts
)
1178 if (!asdl_seq_LEN(stmts
))
1180 st
= (stmt_ty
)asdl_seq_GET(stmts
, 0);
1181 if (compiler_isdocstring(st
) && Py_OptimizeFlag
< 2) {
1182 /* don't generate docstrings if -OO */
1184 VISIT(c
, expr
, st
->v
.Expr
.value
);
1185 if (!compiler_nameop(c
, __doc__
, Store
))
1188 for (; i
< asdl_seq_LEN(stmts
); i
++)
1189 VISIT(c
, stmt
, (stmt_ty
)asdl_seq_GET(stmts
, i
));
1193 static PyCodeObject
*
1194 compiler_mod(struct compiler
*c
, mod_ty mod
)
1198 static PyObject
*module
;
1200 module
= PyString_InternFromString("<module>");
1204 /* Use 0 for firstlineno initially, will fixup in assemble(). */
1205 if (!compiler_enter_scope(c
, module
, mod
, 0))
1207 switch (mod
->kind
) {
1209 if (!compiler_body(c
, mod
->v
.Module
.body
)) {
1210 compiler_exit_scope(c
);
1214 case Interactive_kind
:
1215 c
->c_interactive
= 1;
1216 VISIT_SEQ_IN_SCOPE(c
, stmt
,
1217 mod
->v
.Interactive
.body
);
1219 case Expression_kind
:
1220 VISIT_IN_SCOPE(c
, expr
, mod
->v
.Expression
.body
);
1224 PyErr_SetString(PyExc_SystemError
,
1225 "suite should not be possible");
1228 PyErr_Format(PyExc_SystemError
,
1229 "module kind %d should not be possible",
1233 co
= assemble(c
, addNone
);
1234 compiler_exit_scope(c
);
1238 /* The test for LOCAL must come before the test for FREE in order to
1239 handle classes where name is both local and free. The local var is
1240 a method and the free var is a free var referenced within a method.
1244 get_ref_type(struct compiler
*c
, PyObject
*name
)
1246 int scope
= PyST_GetScope(c
->u
->u_ste
, name
);
1249 PyOS_snprintf(buf
, sizeof(buf
),
1250 "unknown scope for %.100s in %.100s(%s) in %s\n"
1251 "symbols: %s\nlocals: %s\nglobals: %s",
1252 PyString_AS_STRING(name
),
1253 PyString_AS_STRING(c
->u
->u_name
),
1254 PyObject_REPR(c
->u
->u_ste
->ste_id
),
1256 PyObject_REPR(c
->u
->u_ste
->ste_symbols
),
1257 PyObject_REPR(c
->u
->u_varnames
),
1258 PyObject_REPR(c
->u
->u_names
)
1267 compiler_lookup_arg(PyObject
*dict
, PyObject
*name
)
1270 k
= PyTuple_Pack(2, name
, name
->ob_type
);
1273 v
= PyDict_GetItem(dict
, k
);
1277 return PyInt_AS_LONG(v
);
1281 compiler_make_closure(struct compiler
*c
, PyCodeObject
*co
, int args
)
1283 int i
, free
= PyCode_GetNumFree(co
);
1285 ADDOP_O(c
, LOAD_CONST
, (PyObject
*)co
, consts
);
1286 ADDOP_I(c
, MAKE_FUNCTION
, args
);
1289 for (i
= 0; i
< free
; ++i
) {
1290 /* Bypass com_addop_varname because it will generate
1291 LOAD_DEREF but LOAD_CLOSURE is needed.
1293 PyObject
*name
= PyTuple_GET_ITEM(co
->co_freevars
, i
);
1296 /* Special case: If a class contains a method with a
1297 free variable that has the same name as a method,
1298 the name will be considered free *and* local in the
1299 class. It should be handled by the closure, as
1300 well as by the normal name loookup logic.
1302 reftype
= get_ref_type(c
, name
);
1303 if (reftype
== CELL
)
1304 arg
= compiler_lookup_arg(c
->u
->u_cellvars
, name
);
1305 else /* (reftype == FREE) */
1306 arg
= compiler_lookup_arg(c
->u
->u_freevars
, name
);
1308 printf("lookup %s in %s %d %d\n"
1309 "freevars of %s: %s\n",
1310 PyObject_REPR(name
),
1311 PyString_AS_STRING(c
->u
->u_name
),
1313 PyString_AS_STRING(co
->co_name
),
1314 PyObject_REPR(co
->co_freevars
));
1315 Py_FatalError("compiler_make_closure()");
1317 ADDOP_I(c
, LOAD_CLOSURE
, arg
);
1319 ADDOP_I(c
, BUILD_TUPLE
, free
);
1320 ADDOP_O(c
, LOAD_CONST
, (PyObject
*)co
, consts
);
1321 ADDOP_I(c
, MAKE_CLOSURE
, args
);
1326 compiler_decorators(struct compiler
*c
, asdl_seq
* decos
)
1333 for (i
= 0; i
< asdl_seq_LEN(decos
); i
++) {
1334 VISIT(c
, expr
, (expr_ty
)asdl_seq_GET(decos
, i
));
1340 compiler_arguments(struct compiler
*c
, arguments_ty args
)
1343 int n
= asdl_seq_LEN(args
->args
);
1344 /* Correctly handle nested argument lists */
1345 for (i
= 0; i
< n
; i
++) {
1346 expr_ty arg
= (expr_ty
)asdl_seq_GET(args
->args
, i
);
1347 if (arg
->kind
== Tuple_kind
) {
1348 PyObject
*id
= PyString_FromFormat(".%d", i
);
1352 if (!compiler_nameop(c
, id
, Load
)) {
1357 VISIT(c
, expr
, arg
);
1364 compiler_function(struct compiler
*c
, stmt_ty s
)
1367 PyObject
*first_const
= Py_None
;
1368 arguments_ty args
= s
->v
.FunctionDef
.args
;
1369 asdl_seq
* decos
= s
->v
.FunctionDef
.decorator_list
;
1371 int i
, n
, docstring
;
1373 assert(s
->kind
== FunctionDef_kind
);
1375 if (!compiler_decorators(c
, decos
))
1378 VISIT_SEQ(c
, expr
, args
->defaults
);
1379 if (!compiler_enter_scope(c
, s
->v
.FunctionDef
.name
, (void *)s
,
1383 st
= (stmt_ty
)asdl_seq_GET(s
->v
.FunctionDef
.body
, 0);
1384 docstring
= compiler_isdocstring(st
);
1385 if (docstring
&& Py_OptimizeFlag
< 2)
1386 first_const
= st
->v
.Expr
.value
->v
.Str
.s
;
1387 if (compiler_add_o(c
, c
->u
->u_consts
, first_const
) < 0) {
1388 compiler_exit_scope(c
);
1392 /* unpack nested arguments */
1393 compiler_arguments(c
, args
);
1395 c
->u
->u_argcount
= asdl_seq_LEN(args
->args
);
1396 n
= asdl_seq_LEN(s
->v
.FunctionDef
.body
);
1397 /* if there was a docstring, we need to skip the first statement */
1398 for (i
= docstring
; i
< n
; i
++) {
1399 st
= (stmt_ty
)asdl_seq_GET(s
->v
.FunctionDef
.body
, i
);
1400 VISIT_IN_SCOPE(c
, stmt
, st
);
1402 co
= assemble(c
, 1);
1403 compiler_exit_scope(c
);
1407 compiler_make_closure(c
, co
, asdl_seq_LEN(args
->defaults
));
1410 for (i
= 0; i
< asdl_seq_LEN(decos
); i
++) {
1411 ADDOP_I(c
, CALL_FUNCTION
, 1);
1414 return compiler_nameop(c
, s
->v
.FunctionDef
.name
, Store
);
1418 compiler_class(struct compiler
*c
, stmt_ty s
)
1423 asdl_seq
* decos
= s
->v
.ClassDef
.decorator_list
;
1425 if (!compiler_decorators(c
, decos
))
1428 /* push class name on stack, needed by BUILD_CLASS */
1429 ADDOP_O(c
, LOAD_CONST
, s
->v
.ClassDef
.name
, consts
);
1430 /* push the tuple of base classes on the stack */
1431 n
= asdl_seq_LEN(s
->v
.ClassDef
.bases
);
1433 VISIT_SEQ(c
, expr
, s
->v
.ClassDef
.bases
);
1434 ADDOP_I(c
, BUILD_TUPLE
, n
);
1435 if (!compiler_enter_scope(c
, s
->v
.ClassDef
.name
, (void *)s
,
1438 Py_XDECREF(c
->u
->u_private
);
1439 c
->u
->u_private
= s
->v
.ClassDef
.name
;
1440 Py_INCREF(c
->u
->u_private
);
1441 str
= PyString_InternFromString("__name__");
1442 if (!str
|| !compiler_nameop(c
, str
, Load
)) {
1444 compiler_exit_scope(c
);
1449 str
= PyString_InternFromString("__module__");
1450 if (!str
|| !compiler_nameop(c
, str
, Store
)) {
1452 compiler_exit_scope(c
);
1457 if (!compiler_body(c
, s
->v
.ClassDef
.body
)) {
1458 compiler_exit_scope(c
);
1462 ADDOP_IN_SCOPE(c
, LOAD_LOCALS
);
1463 ADDOP_IN_SCOPE(c
, RETURN_VALUE
);
1464 co
= assemble(c
, 1);
1465 compiler_exit_scope(c
);
1469 compiler_make_closure(c
, co
, 0);
1472 ADDOP_I(c
, CALL_FUNCTION
, 0);
1473 ADDOP(c
, BUILD_CLASS
);
1474 /* apply decorators */
1475 for (i
= 0; i
< asdl_seq_LEN(decos
); i
++) {
1476 ADDOP_I(c
, CALL_FUNCTION
, 1);
1478 if (!compiler_nameop(c
, s
->v
.ClassDef
.name
, Store
))
1484 compiler_ifexp(struct compiler
*c
, expr_ty e
)
1486 basicblock
*end
, *next
;
1488 assert(e
->kind
== IfExp_kind
);
1489 end
= compiler_new_block(c
);
1492 next
= compiler_new_block(c
);
1495 VISIT(c
, expr
, e
->v
.IfExp
.test
);
1496 ADDOP_JABS(c
, POP_JUMP_IF_FALSE
, next
);
1497 VISIT(c
, expr
, e
->v
.IfExp
.body
);
1498 ADDOP_JREL(c
, JUMP_FORWARD
, end
);
1499 compiler_use_next_block(c
, next
);
1500 VISIT(c
, expr
, e
->v
.IfExp
.orelse
);
1501 compiler_use_next_block(c
, end
);
1506 compiler_lambda(struct compiler
*c
, expr_ty e
)
1509 static identifier name
;
1510 arguments_ty args
= e
->v
.Lambda
.args
;
1511 assert(e
->kind
== Lambda_kind
);
1514 name
= PyString_InternFromString("<lambda>");
1520 VISIT_SEQ(c
, expr
, args
->defaults
);
1521 if (!compiler_enter_scope(c
, name
, (void *)e
, e
->lineno
))
1524 /* unpack nested arguments */
1525 compiler_arguments(c
, args
);
1527 c
->u
->u_argcount
= asdl_seq_LEN(args
->args
);
1528 VISIT_IN_SCOPE(c
, expr
, e
->v
.Lambda
.body
);
1529 if (c
->u
->u_ste
->ste_generator
) {
1530 ADDOP_IN_SCOPE(c
, POP_TOP
);
1533 ADDOP_IN_SCOPE(c
, RETURN_VALUE
);
1535 co
= assemble(c
, 1);
1536 compiler_exit_scope(c
);
1540 compiler_make_closure(c
, co
, asdl_seq_LEN(args
->defaults
));
1547 compiler_print(struct compiler
*c
, stmt_ty s
)
1552 assert(s
->kind
== Print_kind
);
1553 n
= asdl_seq_LEN(s
->v
.Print
.values
);
1555 if (s
->v
.Print
.dest
) {
1556 VISIT(c
, expr
, s
->v
.Print
.dest
);
1559 for (i
= 0; i
< n
; i
++) {
1560 expr_ty e
= (expr_ty
)asdl_seq_GET(s
->v
.Print
.values
, i
);
1565 ADDOP(c
, PRINT_ITEM_TO
);
1569 ADDOP(c
, PRINT_ITEM
);
1572 if (s
->v
.Print
.nl
) {
1574 ADDOP(c
, PRINT_NEWLINE_TO
)
1576 ADDOP(c
, PRINT_NEWLINE
)
1584 compiler_if(struct compiler
*c
, stmt_ty s
)
1586 basicblock
*end
, *next
;
1588 assert(s
->kind
== If_kind
);
1589 end
= compiler_new_block(c
);
1593 constant
= expr_constant(s
->v
.If
.test
);
1594 /* constant = 0: "if 0"
1595 * constant = 1: "if 1", "if 2", ...
1596 * constant = -1: rest */
1597 if (constant
== 0) {
1599 VISIT_SEQ(c
, stmt
, s
->v
.If
.orelse
);
1600 } else if (constant
== 1) {
1601 VISIT_SEQ(c
, stmt
, s
->v
.If
.body
);
1603 if (s
->v
.If
.orelse
) {
1604 next
= compiler_new_block(c
);
1610 VISIT(c
, expr
, s
->v
.If
.test
);
1611 ADDOP_JABS(c
, POP_JUMP_IF_FALSE
, next
);
1612 VISIT_SEQ(c
, stmt
, s
->v
.If
.body
);
1613 ADDOP_JREL(c
, JUMP_FORWARD
, end
);
1614 if (s
->v
.If
.orelse
) {
1615 compiler_use_next_block(c
, next
);
1616 VISIT_SEQ(c
, stmt
, s
->v
.If
.orelse
);
1619 compiler_use_next_block(c
, end
);
1624 compiler_for(struct compiler
*c
, stmt_ty s
)
1626 basicblock
*start
, *cleanup
, *end
;
1628 start
= compiler_new_block(c
);
1629 cleanup
= compiler_new_block(c
);
1630 end
= compiler_new_block(c
);
1631 if (start
== NULL
|| end
== NULL
|| cleanup
== NULL
)
1633 ADDOP_JREL(c
, SETUP_LOOP
, end
);
1634 if (!compiler_push_fblock(c
, LOOP
, start
))
1636 VISIT(c
, expr
, s
->v
.For
.iter
);
1638 compiler_use_next_block(c
, start
);
1639 ADDOP_JREL(c
, FOR_ITER
, cleanup
);
1640 VISIT(c
, expr
, s
->v
.For
.target
);
1641 VISIT_SEQ(c
, stmt
, s
->v
.For
.body
);
1642 ADDOP_JABS(c
, JUMP_ABSOLUTE
, start
);
1643 compiler_use_next_block(c
, cleanup
);
1644 ADDOP(c
, POP_BLOCK
);
1645 compiler_pop_fblock(c
, LOOP
, start
);
1646 VISIT_SEQ(c
, stmt
, s
->v
.For
.orelse
);
1647 compiler_use_next_block(c
, end
);
1652 compiler_while(struct compiler
*c
, stmt_ty s
)
1654 basicblock
*loop
, *orelse
, *end
, *anchor
= NULL
;
1655 int constant
= expr_constant(s
->v
.While
.test
);
1657 if (constant
== 0) {
1658 if (s
->v
.While
.orelse
)
1659 VISIT_SEQ(c
, stmt
, s
->v
.While
.orelse
);
1662 loop
= compiler_new_block(c
);
1663 end
= compiler_new_block(c
);
1664 if (constant
== -1) {
1665 anchor
= compiler_new_block(c
);
1669 if (loop
== NULL
|| end
== NULL
)
1671 if (s
->v
.While
.orelse
) {
1672 orelse
= compiler_new_block(c
);
1679 ADDOP_JREL(c
, SETUP_LOOP
, end
);
1680 compiler_use_next_block(c
, loop
);
1681 if (!compiler_push_fblock(c
, LOOP
, loop
))
1683 if (constant
== -1) {
1684 VISIT(c
, expr
, s
->v
.While
.test
);
1685 ADDOP_JABS(c
, POP_JUMP_IF_FALSE
, anchor
);
1687 VISIT_SEQ(c
, stmt
, s
->v
.While
.body
);
1688 ADDOP_JABS(c
, JUMP_ABSOLUTE
, loop
);
1690 /* XXX should the two POP instructions be in a separate block
1691 if there is no else clause ?
1694 if (constant
== -1) {
1695 compiler_use_next_block(c
, anchor
);
1696 ADDOP(c
, POP_BLOCK
);
1698 compiler_pop_fblock(c
, LOOP
, loop
);
1699 if (orelse
!= NULL
) /* what if orelse is just pass? */
1700 VISIT_SEQ(c
, stmt
, s
->v
.While
.orelse
);
1701 compiler_use_next_block(c
, end
);
1707 compiler_continue(struct compiler
*c
)
1709 static const char LOOP_ERROR_MSG
[] = "'continue' not properly in loop";
1710 static const char IN_FINALLY_ERROR_MSG
[] =
1711 "'continue' not supported inside 'finally' clause";
1714 if (!c
->u
->u_nfblocks
)
1715 return compiler_error(c
, LOOP_ERROR_MSG
);
1716 i
= c
->u
->u_nfblocks
- 1;
1717 switch (c
->u
->u_fblock
[i
].fb_type
) {
1719 ADDOP_JABS(c
, JUMP_ABSOLUTE
, c
->u
->u_fblock
[i
].fb_block
);
1723 while (--i
>= 0 && c
->u
->u_fblock
[i
].fb_type
!= LOOP
) {
1724 /* Prevent continue anywhere under a finally
1725 even if hidden in a sub-try or except. */
1726 if (c
->u
->u_fblock
[i
].fb_type
== FINALLY_END
)
1727 return compiler_error(c
, IN_FINALLY_ERROR_MSG
);
1730 return compiler_error(c
, LOOP_ERROR_MSG
);
1731 ADDOP_JABS(c
, CONTINUE_LOOP
, c
->u
->u_fblock
[i
].fb_block
);
1734 return compiler_error(c
, IN_FINALLY_ERROR_MSG
);
1740 /* Code generated for "try: <body> finally: <finalbody>" is as follows:
1746 L: <code for finalbody>
1749 The special instructions use the block stack. Each block
1750 stack entry contains the instruction that created it (here
1751 SETUP_FINALLY), the level of the value stack at the time the
1752 block stack entry was created, and a label (here L).
1755 Pushes the current value stack level and the label
1756 onto the block stack.
1758 Pops en entry from the block stack, and pops the value
1759 stack until its level is the same as indicated on the
1760 block stack. (The label is ignored.)
1762 Pops a variable number of entries from the *value* stack
1763 and re-raises the exception they specify. The number of
1764 entries popped depends on the (pseudo) exception type.
1766 The block stack is unwound when an exception is raised:
1767 when a SETUP_FINALLY entry is found, the exception is pushed
1768 onto the value stack (and the exception condition is cleared),
1769 and the interpreter jumps to the label gotten from the block
1774 compiler_try_finally(struct compiler
*c
, stmt_ty s
)
1776 basicblock
*body
, *end
;
1777 body
= compiler_new_block(c
);
1778 end
= compiler_new_block(c
);
1779 if (body
== NULL
|| end
== NULL
)
1782 ADDOP_JREL(c
, SETUP_FINALLY
, end
);
1783 compiler_use_next_block(c
, body
);
1784 if (!compiler_push_fblock(c
, FINALLY_TRY
, body
))
1786 VISIT_SEQ(c
, stmt
, s
->v
.TryFinally
.body
);
1787 ADDOP(c
, POP_BLOCK
);
1788 compiler_pop_fblock(c
, FINALLY_TRY
, body
);
1790 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
1791 compiler_use_next_block(c
, end
);
1792 if (!compiler_push_fblock(c
, FINALLY_END
, end
))
1794 VISIT_SEQ(c
, stmt
, s
->v
.TryFinally
.finalbody
);
1795 ADDOP(c
, END_FINALLY
);
1796 compiler_pop_fblock(c
, FINALLY_END
, end
);
1802 Code generated for "try: S except E1, V1: S1 except E2, V2: S2 ...":
1803 (The contents of the value stack is shown in [], with the top
1804 at the right; 'tb' is trace-back info, 'val' the exception's
1805 associated value, and 'exc' the exception.)
1807 Value stack Label Instruction Argument
1813 [tb, val, exc] L1: DUP )
1814 [tb, val, exc, exc] <evaluate E1> )
1815 [tb, val, exc, exc, E1] COMPARE_OP EXC_MATCH ) only if E1
1816 [tb, val, exc, 1-or-0] POP_JUMP_IF_FALSE L2 )
1818 [tb, val] <assign to V1> (or POP if no V1)
1823 [tb, val, exc] L2: DUP
1824 .............................etc.......................
1826 [tb, val, exc] Ln+1: END_FINALLY # re-raise exception
1828 [] L0: <next statement>
1830 Of course, parts are not generated if Vi or Ei is not present.
1833 compiler_try_except(struct compiler
*c
, stmt_ty s
)
1835 basicblock
*body
, *orelse
, *except
, *end
;
1838 body
= compiler_new_block(c
);
1839 except
= compiler_new_block(c
);
1840 orelse
= compiler_new_block(c
);
1841 end
= compiler_new_block(c
);
1842 if (body
== NULL
|| except
== NULL
|| orelse
== NULL
|| end
== NULL
)
1844 ADDOP_JREL(c
, SETUP_EXCEPT
, except
);
1845 compiler_use_next_block(c
, body
);
1846 if (!compiler_push_fblock(c
, EXCEPT
, body
))
1848 VISIT_SEQ(c
, stmt
, s
->v
.TryExcept
.body
);
1849 ADDOP(c
, POP_BLOCK
);
1850 compiler_pop_fblock(c
, EXCEPT
, body
);
1851 ADDOP_JREL(c
, JUMP_FORWARD
, orelse
);
1852 n
= asdl_seq_LEN(s
->v
.TryExcept
.handlers
);
1853 compiler_use_next_block(c
, except
);
1854 for (i
= 0; i
< n
; i
++) {
1855 excepthandler_ty handler
= (excepthandler_ty
)asdl_seq_GET(
1856 s
->v
.TryExcept
.handlers
, i
);
1857 if (!handler
->v
.ExceptHandler
.type
&& i
< n
-1)
1858 return compiler_error(c
, "default 'except:' must be last");
1859 c
->u
->u_lineno_set
= false;
1860 c
->u
->u_lineno
= handler
->lineno
;
1861 except
= compiler_new_block(c
);
1864 if (handler
->v
.ExceptHandler
.type
) {
1866 VISIT(c
, expr
, handler
->v
.ExceptHandler
.type
);
1867 ADDOP_I(c
, COMPARE_OP
, PyCmp_EXC_MATCH
);
1868 ADDOP_JABS(c
, POP_JUMP_IF_FALSE
, except
);
1871 if (handler
->v
.ExceptHandler
.name
) {
1872 VISIT(c
, expr
, handler
->v
.ExceptHandler
.name
);
1878 VISIT_SEQ(c
, stmt
, handler
->v
.ExceptHandler
.body
);
1879 ADDOP_JREL(c
, JUMP_FORWARD
, end
);
1880 compiler_use_next_block(c
, except
);
1882 ADDOP(c
, END_FINALLY
);
1883 compiler_use_next_block(c
, orelse
);
1884 VISIT_SEQ(c
, stmt
, s
->v
.TryExcept
.orelse
);
1885 compiler_use_next_block(c
, end
);
1890 compiler_import_as(struct compiler
*c
, identifier name
, identifier asname
)
1892 /* The IMPORT_NAME opcode was already generated. This function
1893 merely needs to bind the result to a name.
1895 If there is a dot in name, we need to split it and emit a
1896 LOAD_ATTR for each name.
1898 const char *src
= PyString_AS_STRING(name
);
1899 const char *dot
= strchr(src
, '.');
1901 /* Consume the base module name to get the first attribute */
1904 /* NB src is only defined when dot != NULL */
1906 dot
= strchr(src
, '.');
1907 attr
= PyString_FromStringAndSize(src
,
1908 dot
? dot
- src
: strlen(src
));
1911 ADDOP_O(c
, LOAD_ATTR
, attr
, names
);
1916 return compiler_nameop(c
, asname
, Store
);
1920 compiler_import(struct compiler
*c
, stmt_ty s
)
1922 /* The Import node stores a module name like a.b.c as a single
1923 string. This is convenient for all cases except
1925 where we need to parse that string to extract the individual
1927 XXX Perhaps change the representation to make this case simpler?
1929 int i
, n
= asdl_seq_LEN(s
->v
.Import
.names
);
1931 for (i
= 0; i
< n
; i
++) {
1932 alias_ty alias
= (alias_ty
)asdl_seq_GET(s
->v
.Import
.names
, i
);
1936 if (c
->c_flags
&& (c
->c_flags
->cf_flags
& CO_FUTURE_ABSOLUTE_IMPORT
))
1937 level
= PyInt_FromLong(0);
1939 level
= PyInt_FromLong(-1);
1944 ADDOP_O(c
, LOAD_CONST
, level
, consts
);
1946 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
1947 ADDOP_NAME(c
, IMPORT_NAME
, alias
->name
, names
);
1949 if (alias
->asname
) {
1950 r
= compiler_import_as(c
, alias
->name
, alias
->asname
);
1955 identifier tmp
= alias
->name
;
1956 const char *base
= PyString_AS_STRING(alias
->name
);
1957 char *dot
= strchr(base
, '.');
1959 tmp
= PyString_FromStringAndSize(base
,
1961 r
= compiler_nameop(c
, tmp
, Store
);
1973 compiler_from_import(struct compiler
*c
, stmt_ty s
)
1975 int i
, n
= asdl_seq_LEN(s
->v
.ImportFrom
.names
);
1977 PyObject
*names
= PyTuple_New(n
);
1983 if (s
->v
.ImportFrom
.level
== 0 && c
->c_flags
&&
1984 !(c
->c_flags
->cf_flags
& CO_FUTURE_ABSOLUTE_IMPORT
))
1985 level
= PyInt_FromLong(-1);
1987 level
= PyInt_FromLong(s
->v
.ImportFrom
.level
);
1994 /* build up the names */
1995 for (i
= 0; i
< n
; i
++) {
1996 alias_ty alias
= (alias_ty
)asdl_seq_GET(s
->v
.ImportFrom
.names
, i
);
1997 Py_INCREF(alias
->name
);
1998 PyTuple_SET_ITEM(names
, i
, alias
->name
);
2001 if (s
->lineno
> c
->c_future
->ff_lineno
) {
2002 if (!strcmp(PyString_AS_STRING(s
->v
.ImportFrom
.module
),
2006 return compiler_error(c
,
2007 "from __future__ imports must occur "
2008 "at the beginning of the file");
2013 ADDOP_O(c
, LOAD_CONST
, level
, consts
);
2015 ADDOP_O(c
, LOAD_CONST
, names
, consts
);
2017 ADDOP_NAME(c
, IMPORT_NAME
, s
->v
.ImportFrom
.module
, names
);
2018 for (i
= 0; i
< n
; i
++) {
2019 alias_ty alias
= (alias_ty
)asdl_seq_GET(s
->v
.ImportFrom
.names
, i
);
2020 identifier store_name
;
2022 if (i
== 0 && *PyString_AS_STRING(alias
->name
) == '*') {
2024 ADDOP(c
, IMPORT_STAR
);
2028 ADDOP_NAME(c
, IMPORT_FROM
, alias
->name
, names
);
2029 store_name
= alias
->name
;
2031 store_name
= alias
->asname
;
2033 if (!compiler_nameop(c
, store_name
, Store
)) {
2038 /* remove imported module */
2044 compiler_assert(struct compiler
*c
, stmt_ty s
)
2046 static PyObject
*assertion_error
= NULL
;
2049 if (Py_OptimizeFlag
)
2051 if (assertion_error
== NULL
) {
2052 assertion_error
= PyString_InternFromString("AssertionError");
2053 if (assertion_error
== NULL
)
2056 if (s
->v
.Assert
.test
->kind
== Tuple_kind
&&
2057 asdl_seq_LEN(s
->v
.Assert
.test
->v
.Tuple
.elts
) > 0) {
2059 "assertion is always true, perhaps remove parentheses?";
2060 if (PyErr_WarnExplicit(PyExc_SyntaxWarning
, msg
, c
->c_filename
,
2061 c
->u
->u_lineno
, NULL
, NULL
) == -1)
2064 VISIT(c
, expr
, s
->v
.Assert
.test
);
2065 end
= compiler_new_block(c
);
2068 ADDOP_JABS(c
, POP_JUMP_IF_TRUE
, end
);
2069 ADDOP_O(c
, LOAD_GLOBAL
, assertion_error
, names
);
2070 if (s
->v
.Assert
.msg
) {
2071 VISIT(c
, expr
, s
->v
.Assert
.msg
);
2072 ADDOP_I(c
, RAISE_VARARGS
, 2);
2075 ADDOP_I(c
, RAISE_VARARGS
, 1);
2077 compiler_use_next_block(c
, end
);
2082 compiler_visit_stmt(struct compiler
*c
, stmt_ty s
)
2086 /* Always assign a lineno to the next instruction for a stmt. */
2087 c
->u
->u_lineno
= s
->lineno
;
2088 c
->u
->u_lineno_set
= false;
2091 case FunctionDef_kind
:
2092 return compiler_function(c
, s
);
2094 return compiler_class(c
, s
);
2096 if (c
->u
->u_ste
->ste_type
!= FunctionBlock
)
2097 return compiler_error(c
, "'return' outside function");
2098 if (s
->v
.Return
.value
) {
2099 VISIT(c
, expr
, s
->v
.Return
.value
);
2102 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
2103 ADDOP(c
, RETURN_VALUE
);
2106 VISIT_SEQ(c
, expr
, s
->v
.Delete
.targets
)
2109 n
= asdl_seq_LEN(s
->v
.Assign
.targets
);
2110 VISIT(c
, expr
, s
->v
.Assign
.value
);
2111 for (i
= 0; i
< n
; i
++) {
2115 (expr_ty
)asdl_seq_GET(s
->v
.Assign
.targets
, i
));
2118 case AugAssign_kind
:
2119 return compiler_augassign(c
, s
);
2121 return compiler_print(c
, s
);
2123 return compiler_for(c
, s
);
2125 return compiler_while(c
, s
);
2127 return compiler_if(c
, s
);
2130 if (s
->v
.Raise
.type
) {
2131 VISIT(c
, expr
, s
->v
.Raise
.type
);
2133 if (s
->v
.Raise
.inst
) {
2134 VISIT(c
, expr
, s
->v
.Raise
.inst
);
2136 if (s
->v
.Raise
.tback
) {
2137 VISIT(c
, expr
, s
->v
.Raise
.tback
);
2142 ADDOP_I(c
, RAISE_VARARGS
, n
);
2144 case TryExcept_kind
:
2145 return compiler_try_except(c
, s
);
2146 case TryFinally_kind
:
2147 return compiler_try_finally(c
, s
);
2149 return compiler_assert(c
, s
);
2151 return compiler_import(c
, s
);
2152 case ImportFrom_kind
:
2153 return compiler_from_import(c
, s
);
2155 VISIT(c
, expr
, s
->v
.Exec
.body
);
2156 if (s
->v
.Exec
.globals
) {
2157 VISIT(c
, expr
, s
->v
.Exec
.globals
);
2158 if (s
->v
.Exec
.locals
) {
2159 VISIT(c
, expr
, s
->v
.Exec
.locals
);
2164 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
2167 ADDOP(c
, EXEC_STMT
);
2172 if (c
->c_interactive
&& c
->c_nestlevel
<= 1) {
2173 VISIT(c
, expr
, s
->v
.Expr
.value
);
2174 ADDOP(c
, PRINT_EXPR
);
2176 else if (s
->v
.Expr
.value
->kind
!= Str_kind
&&
2177 s
->v
.Expr
.value
->kind
!= Num_kind
) {
2178 VISIT(c
, expr
, s
->v
.Expr
.value
);
2185 if (!compiler_in_loop(c
))
2186 return compiler_error(c
, "'break' outside loop");
2187 ADDOP(c
, BREAK_LOOP
);
2190 return compiler_continue(c
);
2192 return compiler_with(c
, s
);
2198 unaryop(unaryop_ty op
)
2202 return UNARY_INVERT
;
2206 return UNARY_POSITIVE
;
2208 return UNARY_NEGATIVE
;
2210 PyErr_Format(PyExc_SystemError
,
2211 "unary op %d should not be possible", op
);
2217 binop(struct compiler
*c
, operator_ty op
)
2223 return BINARY_SUBTRACT
;
2225 return BINARY_MULTIPLY
;
2227 if (c
->c_flags
&& c
->c_flags
->cf_flags
& CO_FUTURE_DIVISION
)
2228 return BINARY_TRUE_DIVIDE
;
2230 return BINARY_DIVIDE
;
2232 return BINARY_MODULO
;
2234 return BINARY_POWER
;
2236 return BINARY_LSHIFT
;
2238 return BINARY_RSHIFT
;
2246 return BINARY_FLOOR_DIVIDE
;
2248 PyErr_Format(PyExc_SystemError
,
2249 "binary op %d should not be possible", op
);
2273 return PyCmp_IS_NOT
;
2277 return PyCmp_NOT_IN
;
2284 inplace_binop(struct compiler
*c
, operator_ty op
)
2290 return INPLACE_SUBTRACT
;
2292 return INPLACE_MULTIPLY
;
2294 if (c
->c_flags
&& c
->c_flags
->cf_flags
& CO_FUTURE_DIVISION
)
2295 return INPLACE_TRUE_DIVIDE
;
2297 return INPLACE_DIVIDE
;
2299 return INPLACE_MODULO
;
2301 return INPLACE_POWER
;
2303 return INPLACE_LSHIFT
;
2305 return INPLACE_RSHIFT
;
2313 return INPLACE_FLOOR_DIVIDE
;
2315 PyErr_Format(PyExc_SystemError
,
2316 "inplace binary op %d should not be possible", op
);
2322 compiler_nameop(struct compiler
*c
, identifier name
, expr_context_ty ctx
)
2325 enum { OP_FAST
, OP_GLOBAL
, OP_DEREF
, OP_NAME
} optype
;
2327 PyObject
*dict
= c
->u
->u_names
;
2329 /* XXX AugStore isn't used anywhere! */
2331 mangled
= _Py_Mangle(c
->u
->u_private
, name
);
2337 scope
= PyST_GetScope(c
->u
->u_ste
, mangled
);
2340 dict
= c
->u
->u_freevars
;
2344 dict
= c
->u
->u_cellvars
;
2348 if (c
->u
->u_ste
->ste_type
== FunctionBlock
)
2351 case GLOBAL_IMPLICIT
:
2352 if (c
->u
->u_ste
->ste_type
== FunctionBlock
&&
2353 !c
->u
->u_ste
->ste_unoptimized
)
2356 case GLOBAL_EXPLICIT
:
2360 /* scope can be 0 */
2364 /* XXX Leave assert here, but handle __doc__ and the like better */
2365 assert(scope
|| PyString_AS_STRING(name
)[0] == '_');
2370 case Load
: op
= LOAD_DEREF
; break;
2371 case Store
: op
= STORE_DEREF
; break;
2376 PyErr_Format(PyExc_SyntaxError
,
2377 "can not delete variable '%s' referenced "
2379 PyString_AS_STRING(name
));
2384 PyErr_SetString(PyExc_SystemError
,
2385 "param invalid for deref variable");
2391 case Load
: op
= LOAD_FAST
; break;
2392 case Store
: op
= STORE_FAST
; break;
2393 case Del
: op
= DELETE_FAST
; break;
2399 PyErr_SetString(PyExc_SystemError
,
2400 "param invalid for local variable");
2403 ADDOP_O(c
, op
, mangled
, varnames
);
2408 case Load
: op
= LOAD_GLOBAL
; break;
2409 case Store
: op
= STORE_GLOBAL
; break;
2410 case Del
: op
= DELETE_GLOBAL
; break;
2416 PyErr_SetString(PyExc_SystemError
,
2417 "param invalid for global variable");
2423 case Load
: op
= LOAD_NAME
; break;
2424 case Store
: op
= STORE_NAME
; break;
2425 case Del
: op
= DELETE_NAME
; break;
2431 PyErr_SetString(PyExc_SystemError
,
2432 "param invalid for name variable");
2439 arg
= compiler_add_o(c
, dict
, mangled
);
2443 return compiler_addop_i(c
, op
, arg
);
2447 compiler_boolop(struct compiler
*c
, expr_ty e
)
2453 assert(e
->kind
== BoolOp_kind
);
2454 if (e
->v
.BoolOp
.op
== And
)
2455 jumpi
= JUMP_IF_FALSE_OR_POP
;
2457 jumpi
= JUMP_IF_TRUE_OR_POP
;
2458 end
= compiler_new_block(c
);
2461 s
= e
->v
.BoolOp
.values
;
2462 n
= asdl_seq_LEN(s
) - 1;
2464 for (i
= 0; i
< n
; ++i
) {
2465 VISIT(c
, expr
, (expr_ty
)asdl_seq_GET(s
, i
));
2466 ADDOP_JABS(c
, jumpi
, end
);
2468 VISIT(c
, expr
, (expr_ty
)asdl_seq_GET(s
, n
));
2469 compiler_use_next_block(c
, end
);
2474 compiler_list(struct compiler
*c
, expr_ty e
)
2476 int n
= asdl_seq_LEN(e
->v
.List
.elts
);
2477 if (e
->v
.List
.ctx
== Store
) {
2478 ADDOP_I(c
, UNPACK_SEQUENCE
, n
);
2480 VISIT_SEQ(c
, expr
, e
->v
.List
.elts
);
2481 if (e
->v
.List
.ctx
== Load
) {
2482 ADDOP_I(c
, BUILD_LIST
, n
);
2488 compiler_tuple(struct compiler
*c
, expr_ty e
)
2490 int n
= asdl_seq_LEN(e
->v
.Tuple
.elts
);
2491 if (e
->v
.Tuple
.ctx
== Store
) {
2492 ADDOP_I(c
, UNPACK_SEQUENCE
, n
);
2494 VISIT_SEQ(c
, expr
, e
->v
.Tuple
.elts
);
2495 if (e
->v
.Tuple
.ctx
== Load
) {
2496 ADDOP_I(c
, BUILD_TUPLE
, n
);
2502 compiler_compare(struct compiler
*c
, expr_ty e
)
2505 basicblock
*cleanup
= NULL
;
2507 /* XXX the logic can be cleaned up for 1 or multiple comparisons */
2508 VISIT(c
, expr
, e
->v
.Compare
.left
);
2509 n
= asdl_seq_LEN(e
->v
.Compare
.ops
);
2512 cleanup
= compiler_new_block(c
);
2513 if (cleanup
== NULL
)
2516 (expr_ty
)asdl_seq_GET(e
->v
.Compare
.comparators
, 0));
2518 for (i
= 1; i
< n
; i
++) {
2520 ADDOP(c
, ROT_THREE
);
2521 ADDOP_I(c
, COMPARE_OP
,
2522 cmpop((cmpop_ty
)(asdl_seq_GET(
2523 e
->v
.Compare
.ops
, i
- 1))));
2524 ADDOP_JABS(c
, JUMP_IF_FALSE_OR_POP
, cleanup
);
2528 (expr_ty
)asdl_seq_GET(e
->v
.Compare
.comparators
, i
));
2530 VISIT(c
, expr
, (expr_ty
)asdl_seq_GET(e
->v
.Compare
.comparators
, n
- 1));
2531 ADDOP_I(c
, COMPARE_OP
,
2532 cmpop((cmpop_ty
)(asdl_seq_GET(e
->v
.Compare
.ops
, n
- 1))));
2534 basicblock
*end
= compiler_new_block(c
);
2537 ADDOP_JREL(c
, JUMP_FORWARD
, end
);
2538 compiler_use_next_block(c
, cleanup
);
2541 compiler_use_next_block(c
, end
);
2547 compiler_call(struct compiler
*c
, expr_ty e
)
2551 VISIT(c
, expr
, e
->v
.Call
.func
);
2552 n
= asdl_seq_LEN(e
->v
.Call
.args
);
2553 VISIT_SEQ(c
, expr
, e
->v
.Call
.args
);
2554 if (e
->v
.Call
.keywords
) {
2555 VISIT_SEQ(c
, keyword
, e
->v
.Call
.keywords
);
2556 n
|= asdl_seq_LEN(e
->v
.Call
.keywords
) << 8;
2558 if (e
->v
.Call
.starargs
) {
2559 VISIT(c
, expr
, e
->v
.Call
.starargs
);
2562 if (e
->v
.Call
.kwargs
) {
2563 VISIT(c
, expr
, e
->v
.Call
.kwargs
);
2568 ADDOP_I(c
, CALL_FUNCTION
, n
);
2571 ADDOP_I(c
, CALL_FUNCTION_VAR
, n
);
2574 ADDOP_I(c
, CALL_FUNCTION_KW
, n
);
2577 ADDOP_I(c
, CALL_FUNCTION_VAR_KW
, n
);
2584 compiler_listcomp_generator(struct compiler
*c
, asdl_seq
*generators
,
2585 int gen_index
, expr_ty elt
)
2587 /* generate code for the iterator, then each of the ifs,
2588 and then write to the element */
2591 basicblock
*start
, *anchor
, *skip
, *if_cleanup
;
2594 start
= compiler_new_block(c
);
2595 skip
= compiler_new_block(c
);
2596 if_cleanup
= compiler_new_block(c
);
2597 anchor
= compiler_new_block(c
);
2599 if (start
== NULL
|| skip
== NULL
|| if_cleanup
== NULL
||
2603 l
= (comprehension_ty
)asdl_seq_GET(generators
, gen_index
);
2604 VISIT(c
, expr
, l
->iter
);
2606 compiler_use_next_block(c
, start
);
2607 ADDOP_JREL(c
, FOR_ITER
, anchor
);
2609 VISIT(c
, expr
, l
->target
);
2611 /* XXX this needs to be cleaned up...a lot! */
2612 n
= asdl_seq_LEN(l
->ifs
);
2613 for (i
= 0; i
< n
; i
++) {
2614 expr_ty e
= (expr_ty
)asdl_seq_GET(l
->ifs
, i
);
2616 ADDOP_JABS(c
, POP_JUMP_IF_FALSE
, if_cleanup
);
2620 if (++gen_index
< asdl_seq_LEN(generators
))
2621 if (!compiler_listcomp_generator(c
, generators
, gen_index
, elt
))
2624 /* only append after the last for generator */
2625 if (gen_index
>= asdl_seq_LEN(generators
)) {
2626 VISIT(c
, expr
, elt
);
2627 ADDOP_I(c
, LIST_APPEND
, gen_index
+1);
2629 compiler_use_next_block(c
, skip
);
2631 compiler_use_next_block(c
, if_cleanup
);
2632 ADDOP_JABS(c
, JUMP_ABSOLUTE
, start
);
2633 compiler_use_next_block(c
, anchor
);
2639 compiler_listcomp(struct compiler
*c
, expr_ty e
)
2641 assert(e
->kind
== ListComp_kind
);
2642 ADDOP_I(c
, BUILD_LIST
, 0);
2643 return compiler_listcomp_generator(c
, e
->v
.ListComp
.generators
, 0,
2648 compiler_genexp_generator(struct compiler
*c
,
2649 asdl_seq
*generators
, int gen_index
,
2652 /* generate code for the iterator, then each of the ifs,
2653 and then write to the element */
2655 comprehension_ty ge
;
2656 basicblock
*start
, *anchor
, *skip
, *if_cleanup
, *end
;
2659 start
= compiler_new_block(c
);
2660 skip
= compiler_new_block(c
);
2661 if_cleanup
= compiler_new_block(c
);
2662 anchor
= compiler_new_block(c
);
2663 end
= compiler_new_block(c
);
2665 if (start
== NULL
|| skip
== NULL
|| if_cleanup
== NULL
||
2666 anchor
== NULL
|| end
== NULL
)
2669 ge
= (comprehension_ty
)asdl_seq_GET(generators
, gen_index
);
2670 ADDOP_JREL(c
, SETUP_LOOP
, end
);
2671 if (!compiler_push_fblock(c
, LOOP
, start
))
2674 if (gen_index
== 0) {
2675 /* Receive outermost iter as an implicit argument */
2676 c
->u
->u_argcount
= 1;
2677 ADDOP_I(c
, LOAD_FAST
, 0);
2680 /* Sub-iter - calculate on the fly */
2681 VISIT(c
, expr
, ge
->iter
);
2684 compiler_use_next_block(c
, start
);
2685 ADDOP_JREL(c
, FOR_ITER
, anchor
);
2687 VISIT(c
, expr
, ge
->target
);
2689 /* XXX this needs to be cleaned up...a lot! */
2690 n
= asdl_seq_LEN(ge
->ifs
);
2691 for (i
= 0; i
< n
; i
++) {
2692 expr_ty e
= (expr_ty
)asdl_seq_GET(ge
->ifs
, i
);
2694 ADDOP_JABS(c
, POP_JUMP_IF_FALSE
, if_cleanup
);
2698 if (++gen_index
< asdl_seq_LEN(generators
))
2699 if (!compiler_genexp_generator(c
, generators
, gen_index
, elt
))
2702 /* only append after the last 'for' generator */
2703 if (gen_index
>= asdl_seq_LEN(generators
)) {
2704 VISIT(c
, expr
, elt
);
2705 ADDOP(c
, YIELD_VALUE
);
2708 compiler_use_next_block(c
, skip
);
2710 compiler_use_next_block(c
, if_cleanup
);
2711 ADDOP_JABS(c
, JUMP_ABSOLUTE
, start
);
2712 compiler_use_next_block(c
, anchor
);
2713 ADDOP(c
, POP_BLOCK
);
2714 compiler_pop_fblock(c
, LOOP
, start
);
2715 compiler_use_next_block(c
, end
);
2721 compiler_genexp(struct compiler
*c
, expr_ty e
)
2723 static identifier name
;
2725 expr_ty outermost_iter
= ((comprehension_ty
)
2726 (asdl_seq_GET(e
->v
.GeneratorExp
.generators
,
2730 name
= PyString_FromString("<genexpr>");
2735 if (!compiler_enter_scope(c
, name
, (void *)e
, e
->lineno
))
2737 compiler_genexp_generator(c
, e
->v
.GeneratorExp
.generators
, 0,
2738 e
->v
.GeneratorExp
.elt
);
2739 co
= assemble(c
, 1);
2740 compiler_exit_scope(c
);
2744 compiler_make_closure(c
, co
, 0);
2747 VISIT(c
, expr
, outermost_iter
);
2749 ADDOP_I(c
, CALL_FUNCTION
, 1);
2755 compiler_visit_keyword(struct compiler
*c
, keyword_ty k
)
2757 ADDOP_O(c
, LOAD_CONST
, k
->arg
, consts
);
2758 VISIT(c
, expr
, k
->value
);
2762 /* Test whether expression is constant. For constants, report
2763 whether they are true or false.
2765 Return values: 1 for true, 0 for false, -1 for non-constant.
2769 expr_constant(expr_ty e
)
2773 return PyObject_IsTrue(e
->v
.Num
.n
);
2775 return PyObject_IsTrue(e
->v
.Str
.s
);
2777 /* __debug__ is not assignable, so we can optimize
2778 * it away in if and while statements */
2779 if (strcmp(PyString_AS_STRING(e
->v
.Name
.id
),
2781 return ! Py_OptimizeFlag
;
2789 Implements the with statement from PEP 343.
2791 The semantics outlined in that PEP are as follows:
2796 It is implemented roughly as:
2799 exit = context.__exit__ # not calling it
2800 value = context.__enter__()
2802 VAR = value # if VAR present in the syntax
2805 if an exception was raised:
2806 exc = copy of (exception, instance, traceback)
2808 exc = (None, None, None)
2812 compiler_with(struct compiler
*c
, stmt_ty s
)
2814 basicblock
*block
, *finally
;
2816 assert(s
->kind
== With_kind
);
2818 block
= compiler_new_block(c
);
2819 finally
= compiler_new_block(c
);
2820 if (!block
|| !finally
)
2824 VISIT(c
, expr
, s
->v
.With
.context_expr
);
2825 ADDOP_JREL(c
, SETUP_WITH
, finally
);
2827 /* SETUP_WITH pushes a finally block. */
2828 compiler_use_next_block(c
, block
);
2829 if (!compiler_push_fblock(c
, FINALLY_TRY
, block
)) {
2833 if (s
->v
.With
.optional_vars
) {
2834 VISIT(c
, expr
, s
->v
.With
.optional_vars
);
2837 /* Discard result from context.__enter__() */
2842 VISIT_SEQ(c
, stmt
, s
->v
.With
.body
);
2844 /* End of try block; start the finally block */
2845 ADDOP(c
, POP_BLOCK
);
2846 compiler_pop_fblock(c
, FINALLY_TRY
, block
);
2848 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
2849 compiler_use_next_block(c
, finally
);
2850 if (!compiler_push_fblock(c
, FINALLY_END
, finally
))
2853 /* Finally block starts; context.__exit__ is on the stack under
2854 the exception or return information. Just issue our magic
2856 ADDOP(c
, WITH_CLEANUP
);
2858 /* Finally block ends. */
2859 ADDOP(c
, END_FINALLY
);
2860 compiler_pop_fblock(c
, FINALLY_END
, finally
);
2865 compiler_visit_expr(struct compiler
*c
, expr_ty e
)
2869 /* If expr e has a different line number than the last expr/stmt,
2870 set a new line number for the next instruction.
2872 if (e
->lineno
> c
->u
->u_lineno
) {
2873 c
->u
->u_lineno
= e
->lineno
;
2874 c
->u
->u_lineno_set
= false;
2878 return compiler_boolop(c
, e
);
2880 VISIT(c
, expr
, e
->v
.BinOp
.left
);
2881 VISIT(c
, expr
, e
->v
.BinOp
.right
);
2882 ADDOP(c
, binop(c
, e
->v
.BinOp
.op
));
2885 VISIT(c
, expr
, e
->v
.UnaryOp
.operand
);
2886 ADDOP(c
, unaryop(e
->v
.UnaryOp
.op
));
2889 return compiler_lambda(c
, e
);
2891 return compiler_ifexp(c
, e
);
2893 n
= asdl_seq_LEN(e
->v
.Dict
.values
);
2894 ADDOP_I(c
, BUILD_MAP
, (n
>0xFFFF ? 0xFFFF : n
));
2895 for (i
= 0; i
< n
; i
++) {
2897 (expr_ty
)asdl_seq_GET(e
->v
.Dict
.values
, i
));
2899 (expr_ty
)asdl_seq_GET(e
->v
.Dict
.keys
, i
));
2900 ADDOP(c
, STORE_MAP
);
2904 return compiler_listcomp(c
, e
);
2905 case GeneratorExp_kind
:
2906 return compiler_genexp(c
, e
);
2908 if (c
->u
->u_ste
->ste_type
!= FunctionBlock
)
2909 return compiler_error(c
, "'yield' outside function");
2910 if (e
->v
.Yield
.value
) {
2911 VISIT(c
, expr
, e
->v
.Yield
.value
);
2914 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
2916 ADDOP(c
, YIELD_VALUE
);
2919 return compiler_compare(c
, e
);
2921 return compiler_call(c
, e
);
2923 VISIT(c
, expr
, e
->v
.Repr
.value
);
2924 ADDOP(c
, UNARY_CONVERT
);
2927 ADDOP_O(c
, LOAD_CONST
, e
->v
.Num
.n
, consts
);
2930 ADDOP_O(c
, LOAD_CONST
, e
->v
.Str
.s
, consts
);
2932 /* The following exprs can be assignment targets. */
2933 case Attribute_kind
:
2934 if (e
->v
.Attribute
.ctx
!= AugStore
)
2935 VISIT(c
, expr
, e
->v
.Attribute
.value
);
2936 switch (e
->v
.Attribute
.ctx
) {
2939 /* Fall through to load */
2941 ADDOP_NAME(c
, LOAD_ATTR
, e
->v
.Attribute
.attr
, names
);
2945 /* Fall through to save */
2947 ADDOP_NAME(c
, STORE_ATTR
, e
->v
.Attribute
.attr
, names
);
2950 ADDOP_NAME(c
, DELETE_ATTR
, e
->v
.Attribute
.attr
, names
);
2954 PyErr_SetString(PyExc_SystemError
,
2955 "param invalid in attribute expression");
2959 case Subscript_kind
:
2960 switch (e
->v
.Subscript
.ctx
) {
2962 VISIT(c
, expr
, e
->v
.Subscript
.value
);
2963 VISIT_SLICE(c
, e
->v
.Subscript
.slice
, AugLoad
);
2966 VISIT(c
, expr
, e
->v
.Subscript
.value
);
2967 VISIT_SLICE(c
, e
->v
.Subscript
.slice
, Load
);
2970 VISIT_SLICE(c
, e
->v
.Subscript
.slice
, AugStore
);
2973 VISIT(c
, expr
, e
->v
.Subscript
.value
);
2974 VISIT_SLICE(c
, e
->v
.Subscript
.slice
, Store
);
2977 VISIT(c
, expr
, e
->v
.Subscript
.value
);
2978 VISIT_SLICE(c
, e
->v
.Subscript
.slice
, Del
);
2982 PyErr_SetString(PyExc_SystemError
,
2983 "param invalid in subscript expression");
2988 return compiler_nameop(c
, e
->v
.Name
.id
, e
->v
.Name
.ctx
);
2989 /* child nodes of List and Tuple will have expr_context set */
2991 return compiler_list(c
, e
);
2993 return compiler_tuple(c
, e
);
2999 compiler_augassign(struct compiler
*c
, stmt_ty s
)
3001 expr_ty e
= s
->v
.AugAssign
.target
;
3004 assert(s
->kind
== AugAssign_kind
);
3007 case Attribute_kind
:
3008 auge
= Attribute(e
->v
.Attribute
.value
, e
->v
.Attribute
.attr
,
3009 AugLoad
, e
->lineno
, e
->col_offset
, c
->c_arena
);
3012 VISIT(c
, expr
, auge
);
3013 VISIT(c
, expr
, s
->v
.AugAssign
.value
);
3014 ADDOP(c
, inplace_binop(c
, s
->v
.AugAssign
.op
));
3015 auge
->v
.Attribute
.ctx
= AugStore
;
3016 VISIT(c
, expr
, auge
);
3018 case Subscript_kind
:
3019 auge
= Subscript(e
->v
.Subscript
.value
, e
->v
.Subscript
.slice
,
3020 AugLoad
, e
->lineno
, e
->col_offset
, c
->c_arena
);
3023 VISIT(c
, expr
, auge
);
3024 VISIT(c
, expr
, s
->v
.AugAssign
.value
);
3025 ADDOP(c
, inplace_binop(c
, s
->v
.AugAssign
.op
));
3026 auge
->v
.Subscript
.ctx
= AugStore
;
3027 VISIT(c
, expr
, auge
);
3030 if (!compiler_nameop(c
, e
->v
.Name
.id
, Load
))
3032 VISIT(c
, expr
, s
->v
.AugAssign
.value
);
3033 ADDOP(c
, inplace_binop(c
, s
->v
.AugAssign
.op
));
3034 return compiler_nameop(c
, e
->v
.Name
.id
, Store
);
3036 PyErr_Format(PyExc_SystemError
,
3037 "invalid node type (%d) for augmented assignment",
3045 compiler_push_fblock(struct compiler
*c
, enum fblocktype t
, basicblock
*b
)
3047 struct fblockinfo
*f
;
3048 if (c
->u
->u_nfblocks
>= CO_MAXBLOCKS
) {
3049 PyErr_SetString(PyExc_SystemError
,
3050 "too many statically nested blocks");
3053 f
= &c
->u
->u_fblock
[c
->u
->u_nfblocks
++];
3060 compiler_pop_fblock(struct compiler
*c
, enum fblocktype t
, basicblock
*b
)
3062 struct compiler_unit
*u
= c
->u
;
3063 assert(u
->u_nfblocks
> 0);
3065 assert(u
->u_fblock
[u
->u_nfblocks
].fb_type
== t
);
3066 assert(u
->u_fblock
[u
->u_nfblocks
].fb_block
== b
);
3070 compiler_in_loop(struct compiler
*c
) {
3072 struct compiler_unit
*u
= c
->u
;
3073 for (i
= 0; i
< u
->u_nfblocks
; ++i
) {
3074 if (u
->u_fblock
[i
].fb_type
== LOOP
)
3079 /* Raises a SyntaxError and returns 0.
3080 If something goes wrong, a different exception may be raised.
3084 compiler_error(struct compiler
*c
, const char *errstr
)
3087 PyObject
*u
= NULL
, *v
= NULL
;
3089 loc
= PyErr_ProgramText(c
->c_filename
, c
->u
->u_lineno
);
3094 u
= Py_BuildValue("(ziOO)", c
->c_filename
, c
->u
->u_lineno
,
3098 v
= Py_BuildValue("(zO)", errstr
, u
);
3101 PyErr_SetObject(PyExc_SyntaxError
, v
);
3110 compiler_handle_subscr(struct compiler
*c
, const char *kind
,
3111 expr_context_ty ctx
)
3115 /* XXX this code is duplicated */
3117 case AugLoad
: /* fall through to Load */
3118 case Load
: op
= BINARY_SUBSCR
; break;
3119 case AugStore
:/* fall through to Store */
3120 case Store
: op
= STORE_SUBSCR
; break;
3121 case Del
: op
= DELETE_SUBSCR
; break;
3123 PyErr_Format(PyExc_SystemError
,
3124 "invalid %s kind %d in subscript\n",
3128 if (ctx
== AugLoad
) {
3129 ADDOP_I(c
, DUP_TOPX
, 2);
3131 else if (ctx
== AugStore
) {
3132 ADDOP(c
, ROT_THREE
);
3139 compiler_slice(struct compiler
*c
, slice_ty s
, expr_context_ty ctx
)
3142 assert(s
->kind
== Slice_kind
);
3144 /* only handles the cases where BUILD_SLICE is emitted */
3145 if (s
->v
.Slice
.lower
) {
3146 VISIT(c
, expr
, s
->v
.Slice
.lower
);
3149 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
3152 if (s
->v
.Slice
.upper
) {
3153 VISIT(c
, expr
, s
->v
.Slice
.upper
);
3156 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
3159 if (s
->v
.Slice
.step
) {
3161 VISIT(c
, expr
, s
->v
.Slice
.step
);
3163 ADDOP_I(c
, BUILD_SLICE
, n
);
3168 compiler_simple_slice(struct compiler
*c
, slice_ty s
, expr_context_ty ctx
)
3170 int op
= 0, slice_offset
= 0, stack_count
= 0;
3172 assert(s
->v
.Slice
.step
== NULL
);
3173 if (s
->v
.Slice
.lower
) {
3176 if (ctx
!= AugStore
)
3177 VISIT(c
, expr
, s
->v
.Slice
.lower
);
3179 if (s
->v
.Slice
.upper
) {
3182 if (ctx
!= AugStore
)
3183 VISIT(c
, expr
, s
->v
.Slice
.upper
);
3186 if (ctx
== AugLoad
) {
3187 switch (stack_count
) {
3188 case 0: ADDOP(c
, DUP_TOP
); break;
3189 case 1: ADDOP_I(c
, DUP_TOPX
, 2); break;
3190 case 2: ADDOP_I(c
, DUP_TOPX
, 3); break;
3193 else if (ctx
== AugStore
) {
3194 switch (stack_count
) {
3195 case 0: ADDOP(c
, ROT_TWO
); break;
3196 case 1: ADDOP(c
, ROT_THREE
); break;
3197 case 2: ADDOP(c
, ROT_FOUR
); break;
3202 case AugLoad
: /* fall through to Load */
3203 case Load
: op
= SLICE
; break;
3204 case AugStore
:/* fall through to Store */
3205 case Store
: op
= STORE_SLICE
; break;
3206 case Del
: op
= DELETE_SLICE
; break;
3209 PyErr_SetString(PyExc_SystemError
,
3210 "param invalid in simple slice");
3214 ADDOP(c
, op
+ slice_offset
);
3219 compiler_visit_nested_slice(struct compiler
*c
, slice_ty s
,
3220 expr_context_ty ctx
)
3224 ADDOP_O(c
, LOAD_CONST
, Py_Ellipsis
, consts
);
3227 return compiler_slice(c
, s
, ctx
);
3229 VISIT(c
, expr
, s
->v
.Index
.value
);
3233 PyErr_SetString(PyExc_SystemError
,
3234 "extended slice invalid in nested slice");
3241 compiler_visit_slice(struct compiler
*c
, slice_ty s
, expr_context_ty ctx
)
3243 char * kindname
= NULL
;
3247 if (ctx
!= AugStore
) {
3248 VISIT(c
, expr
, s
->v
.Index
.value
);
3252 kindname
= "ellipsis";
3253 if (ctx
!= AugStore
) {
3254 ADDOP_O(c
, LOAD_CONST
, Py_Ellipsis
, consts
);
3259 if (!s
->v
.Slice
.step
)
3260 return compiler_simple_slice(c
, s
, ctx
);
3261 if (ctx
!= AugStore
) {
3262 if (!compiler_slice(c
, s
, ctx
))
3267 kindname
= "extended slice";
3268 if (ctx
!= AugStore
) {
3269 int i
, n
= asdl_seq_LEN(s
->v
.ExtSlice
.dims
);
3270 for (i
= 0; i
< n
; i
++) {
3271 slice_ty sub
= (slice_ty
)asdl_seq_GET(
3272 s
->v
.ExtSlice
.dims
, i
);
3273 if (!compiler_visit_nested_slice(c
, sub
, ctx
))
3276 ADDOP_I(c
, BUILD_TUPLE
, n
);
3280 PyErr_Format(PyExc_SystemError
,
3281 "invalid subscript kind %d", s
->kind
);
3284 return compiler_handle_subscr(c
, kindname
, ctx
);
3288 /* End of the compiler section, beginning of the assembler section */
3290 /* do depth-first search of basic block graph, starting with block.
3291 post records the block indices in post-order.
3293 XXX must handle implicit jumps from one block to next
3297 PyObject
*a_bytecode
; /* string containing bytecode */
3298 int a_offset
; /* offset into bytecode */
3299 int a_nblocks
; /* number of reachable blocks */
3300 basicblock
**a_postorder
; /* list of blocks in dfs postorder */
3301 PyObject
*a_lnotab
; /* string containing lnotab */
3302 int a_lnotab_off
; /* offset into lnotab */
3303 int a_lineno
; /* last lineno of emitted instruction */
3304 int a_lineno_off
; /* bytecode offset of last lineno */
3308 dfs(struct compiler
*c
, basicblock
*b
, struct assembler
*a
)
3311 struct instr
*instr
= NULL
;
3316 if (b
->b_next
!= NULL
)
3317 dfs(c
, b
->b_next
, a
);
3318 for (i
= 0; i
< b
->b_iused
; i
++) {
3319 instr
= &b
->b_instr
[i
];
3320 if (instr
->i_jrel
|| instr
->i_jabs
)
3321 dfs(c
, instr
->i_target
, a
);
3323 a
->a_postorder
[a
->a_nblocks
++] = b
;
3327 stackdepth_walk(struct compiler
*c
, basicblock
*b
, int depth
, int maxdepth
)
3330 struct instr
*instr
;
3331 if (b
->b_seen
|| b
->b_startdepth
>= depth
)
3334 b
->b_startdepth
= depth
;
3335 for (i
= 0; i
< b
->b_iused
; i
++) {
3336 instr
= &b
->b_instr
[i
];
3337 depth
+= opcode_stack_effect(instr
->i_opcode
, instr
->i_oparg
);
3338 if (depth
> maxdepth
)
3340 assert(depth
>= 0); /* invalid code or bug in stackdepth() */
3341 if (instr
->i_jrel
|| instr
->i_jabs
) {
3342 maxdepth
= stackdepth_walk(c
, instr
->i_target
,
3344 if (instr
->i_opcode
== JUMP_ABSOLUTE
||
3345 instr
->i_opcode
== JUMP_FORWARD
) {
3346 goto out
; /* remaining code is dead */
3351 maxdepth
= stackdepth_walk(c
, b
->b_next
, depth
, maxdepth
);
3357 /* Find the flow path that needs the largest stack. We assume that
3358 * cycles in the flow graph have no net effect on the stack depth.
3361 stackdepth(struct compiler
*c
)
3363 basicblock
*b
, *entryblock
;
3365 for (b
= c
->u
->u_blocks
; b
!= NULL
; b
= b
->b_list
) {
3367 b
->b_startdepth
= INT_MIN
;
3372 return stackdepth_walk(c
, entryblock
, 0, 0);
3376 assemble_init(struct assembler
*a
, int nblocks
, int firstlineno
)
3378 memset(a
, 0, sizeof(struct assembler
));
3379 a
->a_lineno
= firstlineno
;
3380 a
->a_bytecode
= PyString_FromStringAndSize(NULL
, DEFAULT_CODE_SIZE
);
3383 a
->a_lnotab
= PyString_FromStringAndSize(NULL
, DEFAULT_LNOTAB_SIZE
);
3386 if (nblocks
> PY_SIZE_MAX
/ sizeof(basicblock
*)) {
3390 a
->a_postorder
= (basicblock
**)PyObject_Malloc(
3391 sizeof(basicblock
*) * nblocks
);
3392 if (!a
->a_postorder
) {
3400 assemble_free(struct assembler
*a
)
3402 Py_XDECREF(a
->a_bytecode
);
3403 Py_XDECREF(a
->a_lnotab
);
3405 PyObject_Free(a
->a_postorder
);
3408 /* Return the size of a basic block in bytes. */
3411 instrsize(struct instr
*instr
)
3413 if (!instr
->i_hasarg
)
3414 return 1; /* 1 byte for the opcode*/
3415 if (instr
->i_oparg
> 0xffff)
3416 return 6; /* 1 (opcode) + 1 (EXTENDED_ARG opcode) + 2 (oparg) + 2(oparg extended) */
3417 return 3; /* 1 (opcode) + 2 (oparg) */
3421 blocksize(basicblock
*b
)
3426 for (i
= 0; i
< b
->b_iused
; i
++)
3427 size
+= instrsize(&b
->b_instr
[i
]);
3431 /* Appends a pair to the end of the line number table, a_lnotab, representing
3432 the instruction's bytecode offset and line number. See
3433 Objects/lnotab_notes.txt for the description of the line number table. */
3436 assemble_lnotab(struct assembler
*a
, struct instr
*i
)
3438 int d_bytecode
, d_lineno
;
3440 unsigned char *lnotab
;
3442 d_bytecode
= a
->a_offset
- a
->a_lineno_off
;
3443 d_lineno
= i
->i_lineno
- a
->a_lineno
;
3445 assert(d_bytecode
>= 0);
3446 assert(d_lineno
>= 0);
3448 if(d_bytecode
== 0 && d_lineno
== 0)
3451 if (d_bytecode
> 255) {
3452 int j
, nbytes
, ncodes
= d_bytecode
/ 255;
3453 nbytes
= a
->a_lnotab_off
+ 2 * ncodes
;
3454 len
= PyString_GET_SIZE(a
->a_lnotab
);
3455 if (nbytes
>= len
) {
3456 if ((len
<= INT_MAX
/ 2) && (len
* 2 < nbytes
))
3458 else if (len
<= INT_MAX
/ 2)
3464 if (_PyString_Resize(&a
->a_lnotab
, len
) < 0)
3467 lnotab
= (unsigned char *)
3468 PyString_AS_STRING(a
->a_lnotab
) + a
->a_lnotab_off
;
3469 for (j
= 0; j
< ncodes
; j
++) {
3473 d_bytecode
-= ncodes
* 255;
3474 a
->a_lnotab_off
+= ncodes
* 2;
3476 assert(d_bytecode
<= 255);
3477 if (d_lineno
> 255) {
3478 int j
, nbytes
, ncodes
= d_lineno
/ 255;
3479 nbytes
= a
->a_lnotab_off
+ 2 * ncodes
;
3480 len
= PyString_GET_SIZE(a
->a_lnotab
);
3481 if (nbytes
>= len
) {
3482 if ((len
<= INT_MAX
/ 2) && len
* 2 < nbytes
)
3484 else if (len
<= INT_MAX
/ 2)
3490 if (_PyString_Resize(&a
->a_lnotab
, len
) < 0)
3493 lnotab
= (unsigned char *)
3494 PyString_AS_STRING(a
->a_lnotab
) + a
->a_lnotab_off
;
3495 *lnotab
++ = d_bytecode
;
3498 for (j
= 1; j
< ncodes
; j
++) {
3502 d_lineno
-= ncodes
* 255;
3503 a
->a_lnotab_off
+= ncodes
* 2;
3506 len
= PyString_GET_SIZE(a
->a_lnotab
);
3507 if (a
->a_lnotab_off
+ 2 >= len
) {
3508 if (_PyString_Resize(&a
->a_lnotab
, len
* 2) < 0)
3511 lnotab
= (unsigned char *)
3512 PyString_AS_STRING(a
->a_lnotab
) + a
->a_lnotab_off
;
3514 a
->a_lnotab_off
+= 2;
3516 *lnotab
++ = d_bytecode
;
3517 *lnotab
++ = d_lineno
;
3519 else { /* First line of a block; def stmt, etc. */
3521 *lnotab
++ = d_lineno
;
3523 a
->a_lineno
= i
->i_lineno
;
3524 a
->a_lineno_off
= a
->a_offset
;
3529 Extend the bytecode with a new instruction.
3530 Update lnotab if necessary.
3534 assemble_emit(struct assembler
*a
, struct instr
*i
)
3536 int size
, arg
= 0, ext
= 0;
3537 Py_ssize_t len
= PyString_GET_SIZE(a
->a_bytecode
);
3540 size
= instrsize(i
);
3545 if (i
->i_lineno
&& !assemble_lnotab(a
, i
))
3547 if (a
->a_offset
+ size
>= len
) {
3548 if (len
> PY_SSIZE_T_MAX
/ 2)
3550 if (_PyString_Resize(&a
->a_bytecode
, len
* 2) < 0)
3553 code
= PyString_AS_STRING(a
->a_bytecode
) + a
->a_offset
;
3554 a
->a_offset
+= size
;
3556 assert(i
->i_hasarg
);
3557 *code
++ = (char)EXTENDED_ARG
;
3558 *code
++ = ext
& 0xff;
3562 *code
++ = i
->i_opcode
;
3564 assert(size
== 3 || size
== 6);
3565 *code
++ = arg
& 0xff;
3572 assemble_jump_offsets(struct assembler
*a
, struct compiler
*c
)
3575 int bsize
, totsize
, extended_arg_count
, last_extended_arg_count
= 0;
3578 /* Compute the size of each block and fixup jump args.
3579 Replace block pointer with position in bytecode. */
3582 for (i
= a
->a_nblocks
- 1; i
>= 0; i
--) {
3583 b
= a
->a_postorder
[i
];
3584 bsize
= blocksize(b
);
3585 b
->b_offset
= totsize
;
3588 extended_arg_count
= 0;
3589 for (b
= c
->u
->u_blocks
; b
!= NULL
; b
= b
->b_list
) {
3590 bsize
= b
->b_offset
;
3591 for (i
= 0; i
< b
->b_iused
; i
++) {
3592 struct instr
*instr
= &b
->b_instr
[i
];
3593 /* Relative jumps are computed relative to
3594 the instruction pointer after fetching
3595 the jump instruction.
3597 bsize
+= instrsize(instr
);
3599 instr
->i_oparg
= instr
->i_target
->b_offset
;
3600 else if (instr
->i_jrel
) {
3601 int delta
= instr
->i_target
->b_offset
- bsize
;
3602 instr
->i_oparg
= delta
;
3606 if (instr
->i_oparg
> 0xffff)
3607 extended_arg_count
++;
3611 /* XXX: This is an awful hack that could hurt performance, but
3612 on the bright side it should work until we come up
3613 with a better solution.
3615 In the meantime, should the goto be dropped in favor
3618 The issue is that in the first loop blocksize() is called
3619 which calls instrsize() which requires i_oparg be set
3620 appropriately. There is a bootstrap problem because
3621 i_oparg is calculated in the second loop above.
3623 So we loop until we stop seeing new EXTENDED_ARGs.
3624 The only EXTENDED_ARGs that could be popping up are
3625 ones in jump instructions. So this should converge
3628 if (last_extended_arg_count
!= extended_arg_count
) {
3629 last_extended_arg_count
= extended_arg_count
;
3635 dict_keys_inorder(PyObject
*dict
, int offset
)
3637 PyObject
*tuple
, *k
, *v
;
3638 Py_ssize_t i
, pos
= 0, size
= PyDict_Size(dict
);
3640 tuple
= PyTuple_New(size
);
3643 while (PyDict_Next(dict
, &pos
, &k
, &v
)) {
3644 i
= PyInt_AS_LONG(v
);
3645 /* The keys of the dictionary are tuples. (see compiler_add_o)
3646 The object we want is always first, though. */
3647 k
= PyTuple_GET_ITEM(k
, 0);
3649 assert((i
- offset
) < size
);
3650 assert((i
- offset
) >= 0);
3651 PyTuple_SET_ITEM(tuple
, i
- offset
, k
);
3657 compute_code_flags(struct compiler
*c
)
3659 PySTEntryObject
*ste
= c
->u
->u_ste
;
3661 if (ste
->ste_type
!= ModuleBlock
)
3662 flags
|= CO_NEWLOCALS
;
3663 if (ste
->ste_type
== FunctionBlock
) {
3664 if (!ste
->ste_unoptimized
)
3665 flags
|= CO_OPTIMIZED
;
3666 if (ste
->ste_nested
)
3668 if (ste
->ste_generator
)
3669 flags
|= CO_GENERATOR
;
3670 if (ste
->ste_varargs
)
3671 flags
|= CO_VARARGS
;
3672 if (ste
->ste_varkeywords
)
3673 flags
|= CO_VARKEYWORDS
;
3676 /* (Only) inherit compilerflags in PyCF_MASK */
3677 flags
|= (c
->c_flags
->cf_flags
& PyCF_MASK
);
3679 n
= PyDict_Size(c
->u
->u_freevars
);
3683 n
= PyDict_Size(c
->u
->u_cellvars
);
3694 static PyCodeObject
*
3695 makecode(struct compiler
*c
, struct assembler
*a
)
3698 PyCodeObject
*co
= NULL
;
3699 PyObject
*consts
= NULL
;
3700 PyObject
*names
= NULL
;
3701 PyObject
*varnames
= NULL
;
3702 PyObject
*filename
= NULL
;
3703 PyObject
*name
= NULL
;
3704 PyObject
*freevars
= NULL
;
3705 PyObject
*cellvars
= NULL
;
3706 PyObject
*bytecode
= NULL
;
3709 tmp
= dict_keys_inorder(c
->u
->u_consts
, 0);
3712 consts
= PySequence_List(tmp
); /* optimize_code requires a list */
3715 names
= dict_keys_inorder(c
->u
->u_names
, 0);
3716 varnames
= dict_keys_inorder(c
->u
->u_varnames
, 0);
3717 if (!consts
|| !names
|| !varnames
)
3720 cellvars
= dict_keys_inorder(c
->u
->u_cellvars
, 0);
3723 freevars
= dict_keys_inorder(c
->u
->u_freevars
, PyTuple_Size(cellvars
));
3726 filename
= PyString_FromString(c
->c_filename
);
3730 nlocals
= PyDict_Size(c
->u
->u_varnames
);
3731 flags
= compute_code_flags(c
);
3735 bytecode
= PyCode_Optimize(a
->a_bytecode
, consts
, names
, a
->a_lnotab
);
3739 tmp
= PyList_AsTuple(consts
); /* PyCode_New requires a tuple */
3745 co
= PyCode_New(c
->u
->u_argcount
, nlocals
, stackdepth(c
), flags
,
3746 bytecode
, consts
, names
, varnames
,
3748 filename
, c
->u
->u_name
,
3749 c
->u
->u_firstlineno
,
3754 Py_XDECREF(varnames
);
3755 Py_XDECREF(filename
);
3757 Py_XDECREF(freevars
);
3758 Py_XDECREF(cellvars
);
3759 Py_XDECREF(bytecode
);
3764 /* For debugging purposes only */
3767 dump_instr(const struct instr
*i
)
3769 const char *jrel
= i
->i_jrel
? "jrel " : "";
3770 const char *jabs
= i
->i_jabs
? "jabs " : "";
3775 sprintf(arg
, "arg: %d ", i
->i_oparg
);
3777 fprintf(stderr
, "line: %d, opcode: %d %s%s%s\n",
3778 i
->i_lineno
, i
->i_opcode
, arg
, jabs
, jrel
);
3782 dump_basicblock(const basicblock
*b
)
3784 const char *seen
= b
->b_seen
? "seen " : "";
3785 const char *b_return
= b
->b_return
? "return " : "";
3786 fprintf(stderr
, "used: %d, depth: %d, offset: %d %s%s\n",
3787 b
->b_iused
, b
->b_startdepth
, b
->b_offset
, seen
, b_return
);
3790 for (i
= 0; i
< b
->b_iused
; i
++) {
3791 fprintf(stderr
, " [%02d] ", i
);
3792 dump_instr(b
->b_instr
+ i
);
3798 static PyCodeObject
*
3799 assemble(struct compiler
*c
, int addNone
)
3801 basicblock
*b
, *entryblock
;
3804 PyCodeObject
*co
= NULL
;
3806 /* Make sure every block that falls off the end returns None.
3807 XXX NEXT_BLOCK() isn't quite right, because if the last
3808 block ends with a jump or return b_next shouldn't set.
3810 if (!c
->u
->u_curblock
->b_return
) {
3813 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
3814 ADDOP(c
, RETURN_VALUE
);
3819 for (b
= c
->u
->u_blocks
; b
!= NULL
; b
= b
->b_list
) {
3824 /* Set firstlineno if it wasn't explicitly set. */
3825 if (!c
->u
->u_firstlineno
) {
3826 if (entryblock
&& entryblock
->b_instr
)
3827 c
->u
->u_firstlineno
= entryblock
->b_instr
->i_lineno
;
3829 c
->u
->u_firstlineno
= 1;
3831 if (!assemble_init(&a
, nblocks
, c
->u
->u_firstlineno
))
3833 dfs(c
, entryblock
, &a
);
3835 /* Can't modify the bytecode after computing jump offsets. */
3836 assemble_jump_offsets(&a
, c
);
3838 /* Emit code in reverse postorder from dfs. */
3839 for (i
= a
.a_nblocks
- 1; i
>= 0; i
--) {
3840 b
= a
.a_postorder
[i
];
3841 for (j
= 0; j
< b
->b_iused
; j
++)
3842 if (!assemble_emit(&a
, &b
->b_instr
[j
]))
3846 if (_PyString_Resize(&a
.a_lnotab
, a
.a_lnotab_off
) < 0)
3848 if (_PyString_Resize(&a
.a_bytecode
, a
.a_offset
) < 0)
3851 co
= makecode(c
, &a
);