2 * This file compiles an abstract syntax tree (AST) into Python bytecode.
4 * The primary entry point is PyAST_Compile(), which returns a
5 * PyCodeObject. The compiler makes several passes to build the code
7 * 1. Checks for future statements. See future.c
8 * 2. Builds a symbol table. See symtable.c.
9 * 3. Generate code for basic blocks. See compiler_mod() in this file.
10 * 4. Assemble the basic blocks into final code. See assemble() in
12 * 5. Optimize the byte code (peephole optimizations). See peephole.c
14 * Note that compiler_mod() suggests module, but the module ast type
15 * (mod_ty) has cases for expressions and interactive statements.
17 * CAUTION: The VISIT_* macros abort the current function when they
18 * encounter a problem. So don't invoke them when there is memory
19 * which needs to be released. Code blocks are OK, as the compiler
20 * structure takes care of releasing those. Use the arena to manage
26 #include "Python-ast.h"
35 int Py_OptimizeFlag
= 0;
37 #define DEFAULT_BLOCK_SIZE 16
38 #define DEFAULT_BLOCKS 8
39 #define DEFAULT_CODE_SIZE 128
40 #define DEFAULT_LNOTAB_SIZE 16
45 unsigned i_hasarg
: 1;
46 unsigned char i_opcode
;
48 struct basicblock_
*i_target
; /* target block (if jump instruction) */
52 typedef struct basicblock_
{
53 /* Each basicblock in a compilation unit is linked via b_list in the
54 reverse order that the block are allocated. b_list points to the next
55 block, not to be confused with b_next, which is next by control flow. */
56 struct basicblock_
*b_list
;
57 /* number of instructions used */
59 /* length of instruction array (b_instr) */
61 /* pointer to an array of instructions, initially NULL */
62 struct instr
*b_instr
;
63 /* If b_next is non-NULL, it is a pointer to the next
64 block reached by normal control flow. */
65 struct basicblock_
*b_next
;
66 /* b_seen is used to perform a DFS of basicblocks. */
68 /* b_return is true if a RETURN_VALUE opcode is inserted. */
69 unsigned b_return
: 1;
70 /* depth of stack upon entry of block, computed by stackdepth() */
72 /* instruction offset for block, computed by assemble_jump_offsets() */
76 /* fblockinfo tracks the current frame block.
78 A frame block is used to handle loops, try/except, and try/finally.
79 It's called a frame block to distinguish it from a basic block in the
83 enum fblocktype
{ LOOP
, EXCEPT
, FINALLY_TRY
, FINALLY_END
};
86 enum fblocktype fb_type
;
90 /* The following items change on entry and exit of code blocks.
91 They must be saved and restored when returning to a block.
93 struct compiler_unit
{
94 PySTEntryObject
*u_ste
;
97 /* The following fields are dicts that map objects to
98 the index of them in co_XXX. The index is used as
99 the argument for opcodes that refer to those collections.
101 PyObject
*u_consts
; /* all constants */
102 PyObject
*u_names
; /* all names */
103 PyObject
*u_varnames
; /* local variables */
104 PyObject
*u_cellvars
; /* cell variables */
105 PyObject
*u_freevars
; /* free variables */
107 PyObject
*u_private
; /* for private name mangling */
109 int u_argcount
; /* number of arguments for block */
110 /* Pointer to the most recently allocated block. By following b_list
111 members, you can reach all early allocated blocks. */
112 basicblock
*u_blocks
;
113 basicblock
*u_curblock
; /* pointer to current block */
116 struct fblockinfo u_fblock
[CO_MAXBLOCKS
];
118 int u_firstlineno
; /* the first lineno of the block */
119 int u_lineno
; /* the lineno for the current stmt */
120 bool u_lineno_set
; /* boolean to indicate whether instr
121 has been generated with current lineno */
124 /* This struct captures the global state of a compilation.
126 The u pointer points to the current compilation unit, while units
127 for enclosing blocks are stored in c_stack. The u and c_stack are
128 managed by compiler_enter_scope() and compiler_exit_scope().
132 const char *c_filename
;
133 struct symtable
*c_st
;
134 PyFutureFeatures
*c_future
; /* pointer to module's __future__ */
135 PyCompilerFlags
*c_flags
;
137 int c_interactive
; /* true if in interactive mode */
140 struct compiler_unit
*u
; /* compiler state for current block */
141 PyObject
*c_stack
; /* Python list holding compiler_unit ptrs */
142 PyArena
*c_arena
; /* pointer to memory allocation arena */
145 static int compiler_enter_scope(struct compiler
*, identifier
, void *, int);
146 static void compiler_free(struct compiler
*);
147 static basicblock
*compiler_new_block(struct compiler
*);
148 static int compiler_next_instr(struct compiler
*, basicblock
*);
149 static int compiler_addop(struct compiler
*, int);
150 static int compiler_addop_o(struct compiler
*, int, PyObject
*, PyObject
*);
151 static int compiler_addop_i(struct compiler
*, int, int);
152 static int compiler_addop_j(struct compiler
*, int, basicblock
*, int);
153 static basicblock
*compiler_use_new_block(struct compiler
*);
154 static int compiler_error(struct compiler
*, const char *);
155 static int compiler_nameop(struct compiler
*, identifier
, expr_context_ty
);
157 static PyCodeObject
*compiler_mod(struct compiler
*, mod_ty
);
158 static int compiler_visit_stmt(struct compiler
*, stmt_ty
);
159 static int compiler_visit_keyword(struct compiler
*, keyword_ty
);
160 static int compiler_visit_expr(struct compiler
*, expr_ty
);
161 static int compiler_augassign(struct compiler
*, stmt_ty
);
162 static int compiler_visit_slice(struct compiler
*, slice_ty
,
165 static int compiler_push_fblock(struct compiler
*, enum fblocktype
,
167 static void compiler_pop_fblock(struct compiler
*, enum fblocktype
,
169 /* Returns true if there is a loop on the fblock stack. */
170 static int compiler_in_loop(struct compiler
*);
172 static int inplace_binop(struct compiler
*, operator_ty
);
173 static int expr_constant(expr_ty e
);
175 static int compiler_with(struct compiler
*, stmt_ty
);
177 static PyCodeObject
*assemble(struct compiler
*, int addNone
);
178 static PyObject
*__doc__
;
181 _Py_Mangle(PyObject
*privateobj
, PyObject
*ident
)
183 /* Name mangling: __private becomes _classname__private.
184 This is independent from how the name is used. */
185 const char *p
, *name
= PyString_AsString(ident
);
188 if (privateobj
== NULL
|| !PyString_Check(privateobj
) ||
189 name
== NULL
|| name
[0] != '_' || name
[1] != '_') {
193 p
= PyString_AsString(privateobj
);
195 /* Don't mangle __id__ or names with dots.
197 The only time a name with a dot can occur is when
198 we are compiling an import statement that has a
201 TODO(jhylton): Decide whether we want to support
202 mangling of the module name, e.g. __M.X.
204 if ((name
[nlen
-1] == '_' && name
[nlen
-2] == '_')
205 || strchr(name
, '.')) {
207 return ident
; /* Don't mangle __whatever__ */
209 /* Strip leading underscores from class name */
214 return ident
; /* Don't mangle if class is just underscores */
218 assert(1 <= PY_SSIZE_T_MAX
- nlen
);
219 assert(1 + nlen
<= PY_SSIZE_T_MAX
- plen
);
221 ident
= PyString_FromStringAndSize(NULL
, 1 + nlen
+ plen
);
224 /* ident = "_" + p[:plen] + name # i.e. 1+plen+nlen bytes */
225 buffer
= PyString_AS_STRING(ident
);
227 strncpy(buffer
+1, p
, plen
);
228 strcpy(buffer
+1+plen
, name
);
233 compiler_init(struct compiler
*c
)
235 memset(c
, 0, sizeof(struct compiler
));
237 c
->c_stack
= PyList_New(0);
245 PyAST_Compile(mod_ty mod
, const char *filename
, PyCompilerFlags
*flags
,
249 PyCodeObject
*co
= NULL
;
250 PyCompilerFlags local_flags
;
254 __doc__
= PyString_InternFromString("__doc__");
259 if (!compiler_init(&c
))
261 c
.c_filename
= filename
;
263 c
.c_future
= PyFuture_FromAST(mod
, filename
);
264 if (c
.c_future
== NULL
)
267 local_flags
.cf_flags
= 0;
268 flags
= &local_flags
;
270 merged
= c
.c_future
->ff_features
| flags
->cf_flags
;
271 c
.c_future
->ff_features
= merged
;
272 flags
->cf_flags
= merged
;
276 c
.c_st
= PySymtable_Build(mod
, filename
, c
.c_future
);
277 if (c
.c_st
== NULL
) {
278 if (!PyErr_Occurred())
279 PyErr_SetString(PyExc_SystemError
, "no symtable");
283 co
= compiler_mod(&c
, mod
);
287 assert(co
|| PyErr_Occurred());
292 PyNode_Compile(struct _node
*n
, const char *filename
)
294 PyCodeObject
*co
= NULL
;
296 PyArena
*arena
= PyArena_New();
299 mod
= PyAST_FromNode(n
, NULL
, filename
, arena
);
301 co
= PyAST_Compile(mod
, filename
, NULL
, arena
);
307 compiler_free(struct compiler
*c
)
310 PySymtable_Free(c
->c_st
);
312 PyObject_Free(c
->c_future
);
313 Py_DECREF(c
->c_stack
);
317 list2dict(PyObject
*list
)
321 PyObject
*dict
= PyDict_New();
322 if (!dict
) return NULL
;
324 n
= PyList_Size(list
);
325 for (i
= 0; i
< n
; i
++) {
326 v
= PyInt_FromLong(i
);
331 k
= PyList_GET_ITEM(list
, i
);
332 k
= PyTuple_Pack(2, k
, k
->ob_type
);
333 if (k
== NULL
|| PyDict_SetItem(dict
, k
, v
) < 0) {
345 /* Return new dict containing names from src that match scope(s).
347 src is a symbol table dictionary. If the scope of a name matches
348 either scope_type or flag is set, insert it into the new dict. The
349 values are integers, starting at offset and increasing by one for
354 dictbytype(PyObject
*src
, int scope_type
, int flag
, int offset
)
356 Py_ssize_t pos
= 0, i
= offset
, scope
;
357 PyObject
*k
, *v
, *dest
= PyDict_New();
363 while (PyDict_Next(src
, &pos
, &k
, &v
)) {
364 /* XXX this should probably be a macro in symtable.h */
365 assert(PyInt_Check(v
));
366 scope
= (PyInt_AS_LONG(v
) >> SCOPE_OFF
) & SCOPE_MASK
;
368 if (scope
== scope_type
|| PyInt_AS_LONG(v
) & flag
) {
369 PyObject
*tuple
, *item
= PyInt_FromLong(i
);
375 tuple
= PyTuple_Pack(2, k
, k
->ob_type
);
376 if (!tuple
|| PyDict_SetItem(dest
, tuple
, item
) < 0) {
390 compiler_unit_check(struct compiler_unit
*u
)
393 for (block
= u
->u_blocks
; block
!= NULL
; block
= block
->b_list
) {
394 assert((void *)block
!= (void *)0xcbcbcbcb);
395 assert((void *)block
!= (void *)0xfbfbfbfb);
396 assert((void *)block
!= (void *)0xdbdbdbdb);
397 if (block
->b_instr
!= NULL
) {
398 assert(block
->b_ialloc
> 0);
399 assert(block
->b_iused
> 0);
400 assert(block
->b_ialloc
>= block
->b_iused
);
403 assert (block
->b_iused
== 0);
404 assert (block
->b_ialloc
== 0);
410 compiler_unit_free(struct compiler_unit
*u
)
412 basicblock
*b
, *next
;
414 compiler_unit_check(u
);
418 PyObject_Free((void *)b
->b_instr
);
420 PyObject_Free((void *)b
);
425 Py_CLEAR(u
->u_consts
);
426 Py_CLEAR(u
->u_names
);
427 Py_CLEAR(u
->u_varnames
);
428 Py_CLEAR(u
->u_freevars
);
429 Py_CLEAR(u
->u_cellvars
);
430 Py_CLEAR(u
->u_private
);
435 compiler_enter_scope(struct compiler
*c
, identifier name
, void *key
,
438 struct compiler_unit
*u
;
440 u
= (struct compiler_unit
*)PyObject_Malloc(sizeof(
441 struct compiler_unit
));
446 memset(u
, 0, sizeof(struct compiler_unit
));
448 u
->u_ste
= PySymtable_Lookup(c
->c_st
, key
);
450 compiler_unit_free(u
);
455 u
->u_varnames
= list2dict(u
->u_ste
->ste_varnames
);
456 u
->u_cellvars
= dictbytype(u
->u_ste
->ste_symbols
, CELL
, 0, 0);
457 if (!u
->u_varnames
|| !u
->u_cellvars
) {
458 compiler_unit_free(u
);
462 u
->u_freevars
= dictbytype(u
->u_ste
->ste_symbols
, FREE
, DEF_FREE_CLASS
,
463 PyDict_Size(u
->u_cellvars
));
464 if (!u
->u_freevars
) {
465 compiler_unit_free(u
);
471 u
->u_firstlineno
= lineno
;
473 u
->u_lineno_set
= false;
474 u
->u_consts
= PyDict_New();
476 compiler_unit_free(u
);
479 u
->u_names
= PyDict_New();
481 compiler_unit_free(u
);
487 /* Push the old compiler_unit on the stack. */
489 PyObject
*wrapper
= PyCObject_FromVoidPtr(c
->u
, NULL
);
490 if (!wrapper
|| PyList_Append(c
->c_stack
, wrapper
) < 0) {
492 compiler_unit_free(u
);
496 u
->u_private
= c
->u
->u_private
;
497 Py_XINCREF(u
->u_private
);
502 if (compiler_use_new_block(c
) == NULL
)
509 compiler_exit_scope(struct compiler
*c
)
515 compiler_unit_free(c
->u
);
516 /* Restore c->u to the parent unit. */
517 n
= PyList_GET_SIZE(c
->c_stack
) - 1;
519 wrapper
= PyList_GET_ITEM(c
->c_stack
, n
);
520 c
->u
= (struct compiler_unit
*)PyCObject_AsVoidPtr(wrapper
);
522 /* we are deleting from a list so this really shouldn't fail */
523 if (PySequence_DelItem(c
->c_stack
, n
) < 0)
524 Py_FatalError("compiler_exit_scope()");
525 compiler_unit_check(c
->u
);
532 /* Allocate a new block and return a pointer to it.
533 Returns NULL on error.
537 compiler_new_block(struct compiler
*c
)
540 struct compiler_unit
*u
;
543 b
= (basicblock
*)PyObject_Malloc(sizeof(basicblock
));
548 memset((void *)b
, 0, sizeof(basicblock
));
549 /* Extend the singly linked list of blocks with new block. */
550 b
->b_list
= u
->u_blocks
;
556 compiler_use_new_block(struct compiler
*c
)
558 basicblock
*block
= compiler_new_block(c
);
561 c
->u
->u_curblock
= block
;
566 compiler_next_block(struct compiler
*c
)
568 basicblock
*block
= compiler_new_block(c
);
571 c
->u
->u_curblock
->b_next
= block
;
572 c
->u
->u_curblock
= block
;
577 compiler_use_next_block(struct compiler
*c
, basicblock
*block
)
579 assert(block
!= NULL
);
580 c
->u
->u_curblock
->b_next
= block
;
581 c
->u
->u_curblock
= block
;
585 /* Returns the offset of the next instruction in the current block's
586 b_instr array. Resizes the b_instr as necessary.
587 Returns -1 on failure.
591 compiler_next_instr(struct compiler
*c
, basicblock
*b
)
594 if (b
->b_instr
== NULL
) {
595 b
->b_instr
= (struct instr
*)PyObject_Malloc(
596 sizeof(struct instr
) * DEFAULT_BLOCK_SIZE
);
597 if (b
->b_instr
== NULL
) {
601 b
->b_ialloc
= DEFAULT_BLOCK_SIZE
;
602 memset((char *)b
->b_instr
, 0,
603 sizeof(struct instr
) * DEFAULT_BLOCK_SIZE
);
605 else if (b
->b_iused
== b
->b_ialloc
) {
607 size_t oldsize
, newsize
;
608 oldsize
= b
->b_ialloc
* sizeof(struct instr
);
609 newsize
= oldsize
<< 1;
611 if (oldsize
> (PY_SIZE_MAX
>> 1)) {
621 tmp
= (struct instr
*)PyObject_Realloc(
622 (void *)b
->b_instr
, newsize
);
628 memset((char *)b
->b_instr
+ oldsize
, 0, newsize
- oldsize
);
633 /* Set the i_lineno member of the instruction at offset off if the
634 line number for the current expression/statement has not
635 already been set. If it has been set, the call has no effect.
637 The line number is reset in the following cases:
638 - when entering a new scope
640 - on each expression that start a new line
641 - before the "except" clause
642 - before the "for" and "while" expressions
646 compiler_set_lineno(struct compiler
*c
, int off
)
649 if (c
->u
->u_lineno_set
)
651 c
->u
->u_lineno_set
= true;
652 b
= c
->u
->u_curblock
;
653 b
->b_instr
[off
].i_lineno
= c
->u
->u_lineno
;
657 opcode_stack_effect(int opcode
, int oparg
)
681 case BINARY_MULTIPLY
:
685 case BINARY_SUBTRACT
:
687 case BINARY_FLOOR_DIVIDE
:
688 case BINARY_TRUE_DIVIDE
:
690 case INPLACE_FLOOR_DIVIDE
:
691 case INPLACE_TRUE_DIVIDE
:
722 case INPLACE_SUBTRACT
:
723 case INPLACE_MULTIPLY
:
753 case PRINT_NEWLINE_TO
:
766 return -1; /* XXX Sometimes more */
781 return -1; /* or -2 or -3 if exception occurred */
789 case UNPACK_SEQUENCE
:
823 case JUMP_IF_TRUE_OR_POP
: /* -1 if jump not taken */
824 case JUMP_IF_FALSE_OR_POP
: /* "" */
828 case POP_JUMP_IF_FALSE
:
829 case POP_JUMP_IF_TRUE
:
841 return 3; /* actually pushed by an exception */
852 #define NARGS(o) (((o) % 256) + 2*((o) / 256))
854 return -NARGS(oparg
);
855 case CALL_FUNCTION_VAR
:
856 case CALL_FUNCTION_KW
:
857 return -NARGS(oparg
)-1;
858 case CALL_FUNCTION_VAR_KW
:
859 return -NARGS(oparg
)-2;
878 fprintf(stderr
, "opcode = %d\n", opcode
);
879 Py_FatalError("opcode_stack_effect()");
882 return 0; /* not reachable */
885 /* Add an opcode with no argument.
886 Returns 0 on failure, 1 on success.
890 compiler_addop(struct compiler
*c
, int opcode
)
895 off
= compiler_next_instr(c
, c
->u
->u_curblock
);
898 b
= c
->u
->u_curblock
;
899 i
= &b
->b_instr
[off
];
900 i
->i_opcode
= opcode
;
902 if (opcode
== RETURN_VALUE
)
904 compiler_set_lineno(c
, off
);
909 compiler_add_o(struct compiler
*c
, PyObject
*dict
, PyObject
*o
)
913 unsigned char *p
, *q
;
916 int real_part_zero
, imag_part_zero
;
918 /* necessary to make sure types aren't coerced (e.g., int and long) */
919 /* _and_ to distinguish 0.0 from -0.0 e.g. on IEEE platforms */
920 if (PyFloat_Check(o
)) {
921 d
= PyFloat_AS_DOUBLE(o
);
922 p
= (unsigned char*) &d
;
923 /* all we need is to make the tuple different in either the 0.0
924 * or -0.0 case from all others, just to avoid the "coercion".
926 if (*p
==0 && p
[sizeof(double)-1]==0)
927 t
= PyTuple_Pack(3, o
, o
->ob_type
, Py_None
);
929 t
= PyTuple_Pack(2, o
, o
->ob_type
);
931 else if (PyComplex_Check(o
)) {
932 /* complex case is even messier: we need to make complex(x,
933 0.) different from complex(x, -0.) and complex(0., y)
934 different from complex(-0., y), for any x and y. In
935 particular, all four complex zeros should be
937 z
= PyComplex_AsCComplex(o
);
938 p
= (unsigned char*) &(z
.real
);
939 q
= (unsigned char*) &(z
.imag
);
940 /* all that matters here is that on IEEE platforms
941 real_part_zero will be true if z.real == 0., and false if
942 z.real == -0. In fact, real_part_zero will also be true
943 for some other rarely occurring nonzero floats, but this
944 doesn't matter. Similar comments apply to
946 real_part_zero
= *p
==0 && p
[sizeof(double)-1]==0;
947 imag_part_zero
= *q
==0 && q
[sizeof(double)-1]==0;
948 if (real_part_zero
&& imag_part_zero
) {
949 t
= PyTuple_Pack(4, o
, o
->ob_type
, Py_True
, Py_True
);
951 else if (real_part_zero
&& !imag_part_zero
) {
952 t
= PyTuple_Pack(4, o
, o
->ob_type
, Py_True
, Py_False
);
954 else if (!real_part_zero
&& imag_part_zero
) {
955 t
= PyTuple_Pack(4, o
, o
->ob_type
, Py_False
, Py_True
);
958 t
= PyTuple_Pack(2, o
, o
->ob_type
);
962 t
= PyTuple_Pack(2, o
, o
->ob_type
);
967 v
= PyDict_GetItem(dict
, t
);
969 arg
= PyDict_Size(dict
);
970 v
= PyInt_FromLong(arg
);
975 if (PyDict_SetItem(dict
, t
, v
) < 0) {
983 arg
= PyInt_AsLong(v
);
989 compiler_addop_o(struct compiler
*c
, int opcode
, PyObject
*dict
,
992 int arg
= compiler_add_o(c
, dict
, o
);
995 return compiler_addop_i(c
, opcode
, arg
);
999 compiler_addop_name(struct compiler
*c
, int opcode
, PyObject
*dict
,
1003 PyObject
*mangled
= _Py_Mangle(c
->u
->u_private
, o
);
1006 arg
= compiler_add_o(c
, dict
, mangled
);
1010 return compiler_addop_i(c
, opcode
, arg
);
1013 /* Add an opcode with an integer argument.
1014 Returns 0 on failure, 1 on success.
1018 compiler_addop_i(struct compiler
*c
, int opcode
, int oparg
)
1022 off
= compiler_next_instr(c
, c
->u
->u_curblock
);
1025 i
= &c
->u
->u_curblock
->b_instr
[off
];
1026 i
->i_opcode
= opcode
;
1029 compiler_set_lineno(c
, off
);
1034 compiler_addop_j(struct compiler
*c
, int opcode
, basicblock
*b
, int absolute
)
1040 off
= compiler_next_instr(c
, c
->u
->u_curblock
);
1043 i
= &c
->u
->u_curblock
->b_instr
[off
];
1044 i
->i_opcode
= opcode
;
1051 compiler_set_lineno(c
, off
);
1055 /* The distinction between NEW_BLOCK and NEXT_BLOCK is subtle. (I'd
1056 like to find better names.) NEW_BLOCK() creates a new block and sets
1057 it as the current block. NEXT_BLOCK() also creates an implicit jump
1058 from the current block to the new block.
1061 /* The returns inside these macros make it impossible to decref objects
1062 created in the local function. Local objects should use the arena.
1066 #define NEW_BLOCK(C) { \
1067 if (compiler_use_new_block((C)) == NULL) \
1071 #define NEXT_BLOCK(C) { \
1072 if (compiler_next_block((C)) == NULL) \
1076 #define ADDOP(C, OP) { \
1077 if (!compiler_addop((C), (OP))) \
1081 #define ADDOP_IN_SCOPE(C, OP) { \
1082 if (!compiler_addop((C), (OP))) { \
1083 compiler_exit_scope(c); \
1088 #define ADDOP_O(C, OP, O, TYPE) { \
1089 if (!compiler_addop_o((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1093 #define ADDOP_NAME(C, OP, O, TYPE) { \
1094 if (!compiler_addop_name((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1098 #define ADDOP_I(C, OP, O) { \
1099 if (!compiler_addop_i((C), (OP), (O))) \
1103 #define ADDOP_JABS(C, OP, O) { \
1104 if (!compiler_addop_j((C), (OP), (O), 1)) \
1108 #define ADDOP_JREL(C, OP, O) { \
1109 if (!compiler_addop_j((C), (OP), (O), 0)) \
1113 /* VISIT and VISIT_SEQ takes an ASDL type as their second argument. They use
1114 the ASDL name to synthesize the name of the C type and the visit function.
1117 #define VISIT(C, TYPE, V) {\
1118 if (!compiler_visit_ ## TYPE((C), (V))) \
1122 #define VISIT_IN_SCOPE(C, TYPE, V) {\
1123 if (!compiler_visit_ ## TYPE((C), (V))) { \
1124 compiler_exit_scope(c); \
1129 #define VISIT_SLICE(C, V, CTX) {\
1130 if (!compiler_visit_slice((C), (V), (CTX))) \
1134 #define VISIT_SEQ(C, TYPE, SEQ) { \
1136 asdl_seq *seq = (SEQ); /* avoid variable capture */ \
1137 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1138 TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1139 if (!compiler_visit_ ## TYPE((C), elt)) \
1144 #define VISIT_SEQ_IN_SCOPE(C, TYPE, SEQ) { \
1146 asdl_seq *seq = (SEQ); /* avoid variable capture */ \
1147 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1148 TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1149 if (!compiler_visit_ ## TYPE((C), elt)) { \
1150 compiler_exit_scope(c); \
1157 compiler_isdocstring(stmt_ty s
)
1159 if (s
->kind
!= Expr_kind
)
1161 return s
->v
.Expr
.value
->kind
== Str_kind
;
1164 /* Compile a sequence of statements, checking for a docstring. */
1167 compiler_body(struct compiler
*c
, asdl_seq
*stmts
)
1172 if (!asdl_seq_LEN(stmts
))
1174 st
= (stmt_ty
)asdl_seq_GET(stmts
, 0);
1175 if (compiler_isdocstring(st
) && Py_OptimizeFlag
< 2) {
1176 /* don't generate docstrings if -OO */
1178 VISIT(c
, expr
, st
->v
.Expr
.value
);
1179 if (!compiler_nameop(c
, __doc__
, Store
))
1182 for (; i
< asdl_seq_LEN(stmts
); i
++)
1183 VISIT(c
, stmt
, (stmt_ty
)asdl_seq_GET(stmts
, i
));
1187 static PyCodeObject
*
1188 compiler_mod(struct compiler
*c
, mod_ty mod
)
1192 static PyObject
*module
;
1194 module
= PyString_InternFromString("<module>");
1198 /* Use 0 for firstlineno initially, will fixup in assemble(). */
1199 if (!compiler_enter_scope(c
, module
, mod
, 0))
1201 switch (mod
->kind
) {
1203 if (!compiler_body(c
, mod
->v
.Module
.body
)) {
1204 compiler_exit_scope(c
);
1208 case Interactive_kind
:
1209 c
->c_interactive
= 1;
1210 VISIT_SEQ_IN_SCOPE(c
, stmt
,
1211 mod
->v
.Interactive
.body
);
1213 case Expression_kind
:
1214 VISIT_IN_SCOPE(c
, expr
, mod
->v
.Expression
.body
);
1218 PyErr_SetString(PyExc_SystemError
,
1219 "suite should not be possible");
1222 PyErr_Format(PyExc_SystemError
,
1223 "module kind %d should not be possible",
1227 co
= assemble(c
, addNone
);
1228 compiler_exit_scope(c
);
1232 /* The test for LOCAL must come before the test for FREE in order to
1233 handle classes where name is both local and free. The local var is
1234 a method and the free var is a free var referenced within a method.
1238 get_ref_type(struct compiler
*c
, PyObject
*name
)
1240 int scope
= PyST_GetScope(c
->u
->u_ste
, name
);
1243 PyOS_snprintf(buf
, sizeof(buf
),
1244 "unknown scope for %.100s in %.100s(%s) in %s\n"
1245 "symbols: %s\nlocals: %s\nglobals: %s",
1246 PyString_AS_STRING(name
),
1247 PyString_AS_STRING(c
->u
->u_name
),
1248 PyObject_REPR(c
->u
->u_ste
->ste_id
),
1250 PyObject_REPR(c
->u
->u_ste
->ste_symbols
),
1251 PyObject_REPR(c
->u
->u_varnames
),
1252 PyObject_REPR(c
->u
->u_names
)
1261 compiler_lookup_arg(PyObject
*dict
, PyObject
*name
)
1264 k
= PyTuple_Pack(2, name
, name
->ob_type
);
1267 v
= PyDict_GetItem(dict
, k
);
1271 return PyInt_AS_LONG(v
);
1275 compiler_make_closure(struct compiler
*c
, PyCodeObject
*co
, int args
)
1277 int i
, free
= PyCode_GetNumFree(co
);
1279 ADDOP_O(c
, LOAD_CONST
, (PyObject
*)co
, consts
);
1280 ADDOP_I(c
, MAKE_FUNCTION
, args
);
1283 for (i
= 0; i
< free
; ++i
) {
1284 /* Bypass com_addop_varname because it will generate
1285 LOAD_DEREF but LOAD_CLOSURE is needed.
1287 PyObject
*name
= PyTuple_GET_ITEM(co
->co_freevars
, i
);
1290 /* Special case: If a class contains a method with a
1291 free variable that has the same name as a method,
1292 the name will be considered free *and* local in the
1293 class. It should be handled by the closure, as
1294 well as by the normal name loookup logic.
1296 reftype
= get_ref_type(c
, name
);
1297 if (reftype
== CELL
)
1298 arg
= compiler_lookup_arg(c
->u
->u_cellvars
, name
);
1299 else /* (reftype == FREE) */
1300 arg
= compiler_lookup_arg(c
->u
->u_freevars
, name
);
1302 printf("lookup %s in %s %d %d\n"
1303 "freevars of %s: %s\n",
1304 PyObject_REPR(name
),
1305 PyString_AS_STRING(c
->u
->u_name
),
1307 PyString_AS_STRING(co
->co_name
),
1308 PyObject_REPR(co
->co_freevars
));
1309 Py_FatalError("compiler_make_closure()");
1311 ADDOP_I(c
, LOAD_CLOSURE
, arg
);
1313 ADDOP_I(c
, BUILD_TUPLE
, free
);
1314 ADDOP_O(c
, LOAD_CONST
, (PyObject
*)co
, consts
);
1315 ADDOP_I(c
, MAKE_CLOSURE
, args
);
1320 compiler_decorators(struct compiler
*c
, asdl_seq
* decos
)
1327 for (i
= 0; i
< asdl_seq_LEN(decos
); i
++) {
1328 VISIT(c
, expr
, (expr_ty
)asdl_seq_GET(decos
, i
));
1334 compiler_arguments(struct compiler
*c
, arguments_ty args
)
1337 int n
= asdl_seq_LEN(args
->args
);
1338 /* Correctly handle nested argument lists */
1339 for (i
= 0; i
< n
; i
++) {
1340 expr_ty arg
= (expr_ty
)asdl_seq_GET(args
->args
, i
);
1341 if (arg
->kind
== Tuple_kind
) {
1342 PyObject
*id
= PyString_FromFormat(".%d", i
);
1346 if (!compiler_nameop(c
, id
, Load
)) {
1351 VISIT(c
, expr
, arg
);
1358 compiler_function(struct compiler
*c
, stmt_ty s
)
1361 PyObject
*first_const
= Py_None
;
1362 arguments_ty args
= s
->v
.FunctionDef
.args
;
1363 asdl_seq
* decos
= s
->v
.FunctionDef
.decorator_list
;
1365 int i
, n
, docstring
;
1367 assert(s
->kind
== FunctionDef_kind
);
1369 if (!compiler_decorators(c
, decos
))
1372 VISIT_SEQ(c
, expr
, args
->defaults
);
1373 if (!compiler_enter_scope(c
, s
->v
.FunctionDef
.name
, (void *)s
,
1377 st
= (stmt_ty
)asdl_seq_GET(s
->v
.FunctionDef
.body
, 0);
1378 docstring
= compiler_isdocstring(st
);
1379 if (docstring
&& Py_OptimizeFlag
< 2)
1380 first_const
= st
->v
.Expr
.value
->v
.Str
.s
;
1381 if (compiler_add_o(c
, c
->u
->u_consts
, first_const
) < 0) {
1382 compiler_exit_scope(c
);
1386 /* unpack nested arguments */
1387 compiler_arguments(c
, args
);
1389 c
->u
->u_argcount
= asdl_seq_LEN(args
->args
);
1390 n
= asdl_seq_LEN(s
->v
.FunctionDef
.body
);
1391 /* if there was a docstring, we need to skip the first statement */
1392 for (i
= docstring
; i
< n
; i
++) {
1393 st
= (stmt_ty
)asdl_seq_GET(s
->v
.FunctionDef
.body
, i
);
1394 VISIT_IN_SCOPE(c
, stmt
, st
);
1396 co
= assemble(c
, 1);
1397 compiler_exit_scope(c
);
1401 compiler_make_closure(c
, co
, asdl_seq_LEN(args
->defaults
));
1404 for (i
= 0; i
< asdl_seq_LEN(decos
); i
++) {
1405 ADDOP_I(c
, CALL_FUNCTION
, 1);
1408 return compiler_nameop(c
, s
->v
.FunctionDef
.name
, Store
);
1412 compiler_class(struct compiler
*c
, stmt_ty s
)
1417 asdl_seq
* decos
= s
->v
.ClassDef
.decorator_list
;
1419 if (!compiler_decorators(c
, decos
))
1422 /* push class name on stack, needed by BUILD_CLASS */
1423 ADDOP_O(c
, LOAD_CONST
, s
->v
.ClassDef
.name
, consts
);
1424 /* push the tuple of base classes on the stack */
1425 n
= asdl_seq_LEN(s
->v
.ClassDef
.bases
);
1427 VISIT_SEQ(c
, expr
, s
->v
.ClassDef
.bases
);
1428 ADDOP_I(c
, BUILD_TUPLE
, n
);
1429 if (!compiler_enter_scope(c
, s
->v
.ClassDef
.name
, (void *)s
,
1432 Py_XDECREF(c
->u
->u_private
);
1433 c
->u
->u_private
= s
->v
.ClassDef
.name
;
1434 Py_INCREF(c
->u
->u_private
);
1435 str
= PyString_InternFromString("__name__");
1436 if (!str
|| !compiler_nameop(c
, str
, Load
)) {
1438 compiler_exit_scope(c
);
1443 str
= PyString_InternFromString("__module__");
1444 if (!str
|| !compiler_nameop(c
, str
, Store
)) {
1446 compiler_exit_scope(c
);
1451 if (!compiler_body(c
, s
->v
.ClassDef
.body
)) {
1452 compiler_exit_scope(c
);
1456 ADDOP_IN_SCOPE(c
, LOAD_LOCALS
);
1457 ADDOP_IN_SCOPE(c
, RETURN_VALUE
);
1458 co
= assemble(c
, 1);
1459 compiler_exit_scope(c
);
1463 compiler_make_closure(c
, co
, 0);
1466 ADDOP_I(c
, CALL_FUNCTION
, 0);
1467 ADDOP(c
, BUILD_CLASS
);
1468 /* apply decorators */
1469 for (i
= 0; i
< asdl_seq_LEN(decos
); i
++) {
1470 ADDOP_I(c
, CALL_FUNCTION
, 1);
1472 if (!compiler_nameop(c
, s
->v
.ClassDef
.name
, Store
))
1478 compiler_ifexp(struct compiler
*c
, expr_ty e
)
1480 basicblock
*end
, *next
;
1482 assert(e
->kind
== IfExp_kind
);
1483 end
= compiler_new_block(c
);
1486 next
= compiler_new_block(c
);
1489 VISIT(c
, expr
, e
->v
.IfExp
.test
);
1490 ADDOP_JABS(c
, POP_JUMP_IF_FALSE
, next
);
1491 VISIT(c
, expr
, e
->v
.IfExp
.body
);
1492 ADDOP_JREL(c
, JUMP_FORWARD
, end
);
1493 compiler_use_next_block(c
, next
);
1494 VISIT(c
, expr
, e
->v
.IfExp
.orelse
);
1495 compiler_use_next_block(c
, end
);
1500 compiler_lambda(struct compiler
*c
, expr_ty e
)
1503 static identifier name
;
1504 arguments_ty args
= e
->v
.Lambda
.args
;
1505 assert(e
->kind
== Lambda_kind
);
1508 name
= PyString_InternFromString("<lambda>");
1514 VISIT_SEQ(c
, expr
, args
->defaults
);
1515 if (!compiler_enter_scope(c
, name
, (void *)e
, e
->lineno
))
1518 /* unpack nested arguments */
1519 compiler_arguments(c
, args
);
1521 c
->u
->u_argcount
= asdl_seq_LEN(args
->args
);
1522 VISIT_IN_SCOPE(c
, expr
, e
->v
.Lambda
.body
);
1523 if (c
->u
->u_ste
->ste_generator
) {
1524 ADDOP_IN_SCOPE(c
, POP_TOP
);
1527 ADDOP_IN_SCOPE(c
, RETURN_VALUE
);
1529 co
= assemble(c
, 1);
1530 compiler_exit_scope(c
);
1534 compiler_make_closure(c
, co
, asdl_seq_LEN(args
->defaults
));
1541 compiler_print(struct compiler
*c
, stmt_ty s
)
1546 assert(s
->kind
== Print_kind
);
1547 n
= asdl_seq_LEN(s
->v
.Print
.values
);
1549 if (s
->v
.Print
.dest
) {
1550 VISIT(c
, expr
, s
->v
.Print
.dest
);
1553 for (i
= 0; i
< n
; i
++) {
1554 expr_ty e
= (expr_ty
)asdl_seq_GET(s
->v
.Print
.values
, i
);
1559 ADDOP(c
, PRINT_ITEM_TO
);
1563 ADDOP(c
, PRINT_ITEM
);
1566 if (s
->v
.Print
.nl
) {
1568 ADDOP(c
, PRINT_NEWLINE_TO
)
1570 ADDOP(c
, PRINT_NEWLINE
)
1578 compiler_if(struct compiler
*c
, stmt_ty s
)
1580 basicblock
*end
, *next
;
1582 assert(s
->kind
== If_kind
);
1583 end
= compiler_new_block(c
);
1587 constant
= expr_constant(s
->v
.If
.test
);
1588 /* constant = 0: "if 0"
1589 * constant = 1: "if 1", "if 2", ...
1590 * constant = -1: rest */
1591 if (constant
== 0) {
1593 VISIT_SEQ(c
, stmt
, s
->v
.If
.orelse
);
1594 } else if (constant
== 1) {
1595 VISIT_SEQ(c
, stmt
, s
->v
.If
.body
);
1597 if (s
->v
.If
.orelse
) {
1598 next
= compiler_new_block(c
);
1604 VISIT(c
, expr
, s
->v
.If
.test
);
1605 ADDOP_JABS(c
, POP_JUMP_IF_FALSE
, next
);
1606 VISIT_SEQ(c
, stmt
, s
->v
.If
.body
);
1607 ADDOP_JREL(c
, JUMP_FORWARD
, end
);
1608 if (s
->v
.If
.orelse
) {
1609 compiler_use_next_block(c
, next
);
1610 VISIT_SEQ(c
, stmt
, s
->v
.If
.orelse
);
1613 compiler_use_next_block(c
, end
);
1618 compiler_for(struct compiler
*c
, stmt_ty s
)
1620 basicblock
*start
, *cleanup
, *end
;
1622 start
= compiler_new_block(c
);
1623 cleanup
= compiler_new_block(c
);
1624 end
= compiler_new_block(c
);
1625 if (start
== NULL
|| end
== NULL
|| cleanup
== NULL
)
1627 ADDOP_JREL(c
, SETUP_LOOP
, end
);
1628 if (!compiler_push_fblock(c
, LOOP
, start
))
1630 VISIT(c
, expr
, s
->v
.For
.iter
);
1632 compiler_use_next_block(c
, start
);
1633 ADDOP_JREL(c
, FOR_ITER
, cleanup
);
1634 VISIT(c
, expr
, s
->v
.For
.target
);
1635 VISIT_SEQ(c
, stmt
, s
->v
.For
.body
);
1636 ADDOP_JABS(c
, JUMP_ABSOLUTE
, start
);
1637 compiler_use_next_block(c
, cleanup
);
1638 ADDOP(c
, POP_BLOCK
);
1639 compiler_pop_fblock(c
, LOOP
, start
);
1640 VISIT_SEQ(c
, stmt
, s
->v
.For
.orelse
);
1641 compiler_use_next_block(c
, end
);
1646 compiler_while(struct compiler
*c
, stmt_ty s
)
1648 basicblock
*loop
, *orelse
, *end
, *anchor
= NULL
;
1649 int constant
= expr_constant(s
->v
.While
.test
);
1651 if (constant
== 0) {
1652 if (s
->v
.While
.orelse
)
1653 VISIT_SEQ(c
, stmt
, s
->v
.While
.orelse
);
1656 loop
= compiler_new_block(c
);
1657 end
= compiler_new_block(c
);
1658 if (constant
== -1) {
1659 anchor
= compiler_new_block(c
);
1663 if (loop
== NULL
|| end
== NULL
)
1665 if (s
->v
.While
.orelse
) {
1666 orelse
= compiler_new_block(c
);
1673 ADDOP_JREL(c
, SETUP_LOOP
, end
);
1674 compiler_use_next_block(c
, loop
);
1675 if (!compiler_push_fblock(c
, LOOP
, loop
))
1677 if (constant
== -1) {
1678 VISIT(c
, expr
, s
->v
.While
.test
);
1679 ADDOP_JABS(c
, POP_JUMP_IF_FALSE
, anchor
);
1681 VISIT_SEQ(c
, stmt
, s
->v
.While
.body
);
1682 ADDOP_JABS(c
, JUMP_ABSOLUTE
, loop
);
1684 /* XXX should the two POP instructions be in a separate block
1685 if there is no else clause ?
1688 if (constant
== -1) {
1689 compiler_use_next_block(c
, anchor
);
1690 ADDOP(c
, POP_BLOCK
);
1692 compiler_pop_fblock(c
, LOOP
, loop
);
1693 if (orelse
!= NULL
) /* what if orelse is just pass? */
1694 VISIT_SEQ(c
, stmt
, s
->v
.While
.orelse
);
1695 compiler_use_next_block(c
, end
);
1701 compiler_continue(struct compiler
*c
)
1703 static const char LOOP_ERROR_MSG
[] = "'continue' not properly in loop";
1704 static const char IN_FINALLY_ERROR_MSG
[] =
1705 "'continue' not supported inside 'finally' clause";
1708 if (!c
->u
->u_nfblocks
)
1709 return compiler_error(c
, LOOP_ERROR_MSG
);
1710 i
= c
->u
->u_nfblocks
- 1;
1711 switch (c
->u
->u_fblock
[i
].fb_type
) {
1713 ADDOP_JABS(c
, JUMP_ABSOLUTE
, c
->u
->u_fblock
[i
].fb_block
);
1717 while (--i
>= 0 && c
->u
->u_fblock
[i
].fb_type
!= LOOP
) {
1718 /* Prevent continue anywhere under a finally
1719 even if hidden in a sub-try or except. */
1720 if (c
->u
->u_fblock
[i
].fb_type
== FINALLY_END
)
1721 return compiler_error(c
, IN_FINALLY_ERROR_MSG
);
1724 return compiler_error(c
, LOOP_ERROR_MSG
);
1725 ADDOP_JABS(c
, CONTINUE_LOOP
, c
->u
->u_fblock
[i
].fb_block
);
1728 return compiler_error(c
, IN_FINALLY_ERROR_MSG
);
1734 /* Code generated for "try: <body> finally: <finalbody>" is as follows:
1740 L: <code for finalbody>
1743 The special instructions use the block stack. Each block
1744 stack entry contains the instruction that created it (here
1745 SETUP_FINALLY), the level of the value stack at the time the
1746 block stack entry was created, and a label (here L).
1749 Pushes the current value stack level and the label
1750 onto the block stack.
1752 Pops en entry from the block stack, and pops the value
1753 stack until its level is the same as indicated on the
1754 block stack. (The label is ignored.)
1756 Pops a variable number of entries from the *value* stack
1757 and re-raises the exception they specify. The number of
1758 entries popped depends on the (pseudo) exception type.
1760 The block stack is unwound when an exception is raised:
1761 when a SETUP_FINALLY entry is found, the exception is pushed
1762 onto the value stack (and the exception condition is cleared),
1763 and the interpreter jumps to the label gotten from the block
1768 compiler_try_finally(struct compiler
*c
, stmt_ty s
)
1770 basicblock
*body
, *end
;
1771 body
= compiler_new_block(c
);
1772 end
= compiler_new_block(c
);
1773 if (body
== NULL
|| end
== NULL
)
1776 ADDOP_JREL(c
, SETUP_FINALLY
, end
);
1777 compiler_use_next_block(c
, body
);
1778 if (!compiler_push_fblock(c
, FINALLY_TRY
, body
))
1780 VISIT_SEQ(c
, stmt
, s
->v
.TryFinally
.body
);
1781 ADDOP(c
, POP_BLOCK
);
1782 compiler_pop_fblock(c
, FINALLY_TRY
, body
);
1784 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
1785 compiler_use_next_block(c
, end
);
1786 if (!compiler_push_fblock(c
, FINALLY_END
, end
))
1788 VISIT_SEQ(c
, stmt
, s
->v
.TryFinally
.finalbody
);
1789 ADDOP(c
, END_FINALLY
);
1790 compiler_pop_fblock(c
, FINALLY_END
, end
);
1796 Code generated for "try: S except E1, V1: S1 except E2, V2: S2 ...":
1797 (The contents of the value stack is shown in [], with the top
1798 at the right; 'tb' is trace-back info, 'val' the exception's
1799 associated value, and 'exc' the exception.)
1801 Value stack Label Instruction Argument
1807 [tb, val, exc] L1: DUP )
1808 [tb, val, exc, exc] <evaluate E1> )
1809 [tb, val, exc, exc, E1] COMPARE_OP EXC_MATCH ) only if E1
1810 [tb, val, exc, 1-or-0] POP_JUMP_IF_FALSE L2 )
1812 [tb, val] <assign to V1> (or POP if no V1)
1817 [tb, val, exc] L2: DUP
1818 .............................etc.......................
1820 [tb, val, exc] Ln+1: END_FINALLY # re-raise exception
1822 [] L0: <next statement>
1824 Of course, parts are not generated if Vi or Ei is not present.
1827 compiler_try_except(struct compiler
*c
, stmt_ty s
)
1829 basicblock
*body
, *orelse
, *except
, *end
;
1832 body
= compiler_new_block(c
);
1833 except
= compiler_new_block(c
);
1834 orelse
= compiler_new_block(c
);
1835 end
= compiler_new_block(c
);
1836 if (body
== NULL
|| except
== NULL
|| orelse
== NULL
|| end
== NULL
)
1838 ADDOP_JREL(c
, SETUP_EXCEPT
, except
);
1839 compiler_use_next_block(c
, body
);
1840 if (!compiler_push_fblock(c
, EXCEPT
, body
))
1842 VISIT_SEQ(c
, stmt
, s
->v
.TryExcept
.body
);
1843 ADDOP(c
, POP_BLOCK
);
1844 compiler_pop_fblock(c
, EXCEPT
, body
);
1845 ADDOP_JREL(c
, JUMP_FORWARD
, orelse
);
1846 n
= asdl_seq_LEN(s
->v
.TryExcept
.handlers
);
1847 compiler_use_next_block(c
, except
);
1848 for (i
= 0; i
< n
; i
++) {
1849 excepthandler_ty handler
= (excepthandler_ty
)asdl_seq_GET(
1850 s
->v
.TryExcept
.handlers
, i
);
1851 if (!handler
->v
.ExceptHandler
.type
&& i
< n
-1)
1852 return compiler_error(c
, "default 'except:' must be last");
1853 c
->u
->u_lineno_set
= false;
1854 c
->u
->u_lineno
= handler
->lineno
;
1855 except
= compiler_new_block(c
);
1858 if (handler
->v
.ExceptHandler
.type
) {
1860 VISIT(c
, expr
, handler
->v
.ExceptHandler
.type
);
1861 ADDOP_I(c
, COMPARE_OP
, PyCmp_EXC_MATCH
);
1862 ADDOP_JABS(c
, POP_JUMP_IF_FALSE
, except
);
1865 if (handler
->v
.ExceptHandler
.name
) {
1866 VISIT(c
, expr
, handler
->v
.ExceptHandler
.name
);
1872 VISIT_SEQ(c
, stmt
, handler
->v
.ExceptHandler
.body
);
1873 ADDOP_JREL(c
, JUMP_FORWARD
, end
);
1874 compiler_use_next_block(c
, except
);
1876 ADDOP(c
, END_FINALLY
);
1877 compiler_use_next_block(c
, orelse
);
1878 VISIT_SEQ(c
, stmt
, s
->v
.TryExcept
.orelse
);
1879 compiler_use_next_block(c
, end
);
1884 compiler_import_as(struct compiler
*c
, identifier name
, identifier asname
)
1886 /* The IMPORT_NAME opcode was already generated. This function
1887 merely needs to bind the result to a name.
1889 If there is a dot in name, we need to split it and emit a
1890 LOAD_ATTR for each name.
1892 const char *src
= PyString_AS_STRING(name
);
1893 const char *dot
= strchr(src
, '.');
1895 /* Consume the base module name to get the first attribute */
1898 /* NB src is only defined when dot != NULL */
1900 dot
= strchr(src
, '.');
1901 attr
= PyString_FromStringAndSize(src
,
1902 dot
? dot
- src
: strlen(src
));
1905 ADDOP_O(c
, LOAD_ATTR
, attr
, names
);
1910 return compiler_nameop(c
, asname
, Store
);
1914 compiler_import(struct compiler
*c
, stmt_ty s
)
1916 /* The Import node stores a module name like a.b.c as a single
1917 string. This is convenient for all cases except
1919 where we need to parse that string to extract the individual
1921 XXX Perhaps change the representation to make this case simpler?
1923 int i
, n
= asdl_seq_LEN(s
->v
.Import
.names
);
1925 for (i
= 0; i
< n
; i
++) {
1926 alias_ty alias
= (alias_ty
)asdl_seq_GET(s
->v
.Import
.names
, i
);
1930 if (c
->c_flags
&& (c
->c_flags
->cf_flags
& CO_FUTURE_ABSOLUTE_IMPORT
))
1931 level
= PyInt_FromLong(0);
1933 level
= PyInt_FromLong(-1);
1938 ADDOP_O(c
, LOAD_CONST
, level
, consts
);
1940 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
1941 ADDOP_NAME(c
, IMPORT_NAME
, alias
->name
, names
);
1943 if (alias
->asname
) {
1944 r
= compiler_import_as(c
, alias
->name
, alias
->asname
);
1949 identifier tmp
= alias
->name
;
1950 const char *base
= PyString_AS_STRING(alias
->name
);
1951 char *dot
= strchr(base
, '.');
1953 tmp
= PyString_FromStringAndSize(base
,
1955 r
= compiler_nameop(c
, tmp
, Store
);
1967 compiler_from_import(struct compiler
*c
, stmt_ty s
)
1969 int i
, n
= asdl_seq_LEN(s
->v
.ImportFrom
.names
);
1971 PyObject
*names
= PyTuple_New(n
);
1973 static PyObject
*empty_string
;
1975 if (!empty_string
) {
1976 empty_string
= PyString_FromString("");
1984 if (s
->v
.ImportFrom
.level
== 0 && c
->c_flags
&&
1985 !(c
->c_flags
->cf_flags
& CO_FUTURE_ABSOLUTE_IMPORT
))
1986 level
= PyInt_FromLong(-1);
1988 level
= PyInt_FromLong(s
->v
.ImportFrom
.level
);
1995 /* build up the names */
1996 for (i
= 0; i
< n
; i
++) {
1997 alias_ty alias
= (alias_ty
)asdl_seq_GET(s
->v
.ImportFrom
.names
, i
);
1998 Py_INCREF(alias
->name
);
1999 PyTuple_SET_ITEM(names
, i
, alias
->name
);
2002 if (s
->lineno
> c
->c_future
->ff_lineno
&& s
->v
.ImportFrom
.module
&&
2003 !strcmp(PyString_AS_STRING(s
->v
.ImportFrom
.module
), "__future__")) {
2006 return compiler_error(c
, "from __future__ imports must occur "
2007 "at the beginning of the file");
2010 ADDOP_O(c
, LOAD_CONST
, level
, consts
);
2012 ADDOP_O(c
, LOAD_CONST
, names
, consts
);
2014 if (s
->v
.ImportFrom
.module
) {
2015 ADDOP_NAME(c
, IMPORT_NAME
, s
->v
.ImportFrom
.module
, names
);
2018 ADDOP_NAME(c
, IMPORT_NAME
, empty_string
, names
);
2020 for (i
= 0; i
< n
; i
++) {
2021 alias_ty alias
= (alias_ty
)asdl_seq_GET(s
->v
.ImportFrom
.names
, i
);
2022 identifier store_name
;
2024 if (i
== 0 && *PyString_AS_STRING(alias
->name
) == '*') {
2026 ADDOP(c
, IMPORT_STAR
);
2030 ADDOP_NAME(c
, IMPORT_FROM
, alias
->name
, names
);
2031 store_name
= alias
->name
;
2033 store_name
= alias
->asname
;
2035 if (!compiler_nameop(c
, store_name
, Store
)) {
2040 /* remove imported module */
2046 compiler_assert(struct compiler
*c
, stmt_ty s
)
2048 static PyObject
*assertion_error
= NULL
;
2051 if (Py_OptimizeFlag
)
2053 if (assertion_error
== NULL
) {
2054 assertion_error
= PyString_InternFromString("AssertionError");
2055 if (assertion_error
== NULL
)
2058 if (s
->v
.Assert
.test
->kind
== Tuple_kind
&&
2059 asdl_seq_LEN(s
->v
.Assert
.test
->v
.Tuple
.elts
) > 0) {
2061 "assertion is always true, perhaps remove parentheses?";
2062 if (PyErr_WarnExplicit(PyExc_SyntaxWarning
, msg
, c
->c_filename
,
2063 c
->u
->u_lineno
, NULL
, NULL
) == -1)
2066 VISIT(c
, expr
, s
->v
.Assert
.test
);
2067 end
= compiler_new_block(c
);
2070 ADDOP_JABS(c
, POP_JUMP_IF_TRUE
, end
);
2071 ADDOP_O(c
, LOAD_GLOBAL
, assertion_error
, names
);
2072 if (s
->v
.Assert
.msg
) {
2073 VISIT(c
, expr
, s
->v
.Assert
.msg
);
2074 ADDOP_I(c
, RAISE_VARARGS
, 2);
2077 ADDOP_I(c
, RAISE_VARARGS
, 1);
2079 compiler_use_next_block(c
, end
);
2084 compiler_visit_stmt(struct compiler
*c
, stmt_ty s
)
2088 /* Always assign a lineno to the next instruction for a stmt. */
2089 c
->u
->u_lineno
= s
->lineno
;
2090 c
->u
->u_lineno_set
= false;
2093 case FunctionDef_kind
:
2094 return compiler_function(c
, s
);
2096 return compiler_class(c
, s
);
2098 if (c
->u
->u_ste
->ste_type
!= FunctionBlock
)
2099 return compiler_error(c
, "'return' outside function");
2100 if (s
->v
.Return
.value
) {
2101 VISIT(c
, expr
, s
->v
.Return
.value
);
2104 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
2105 ADDOP(c
, RETURN_VALUE
);
2108 VISIT_SEQ(c
, expr
, s
->v
.Delete
.targets
)
2111 n
= asdl_seq_LEN(s
->v
.Assign
.targets
);
2112 VISIT(c
, expr
, s
->v
.Assign
.value
);
2113 for (i
= 0; i
< n
; i
++) {
2117 (expr_ty
)asdl_seq_GET(s
->v
.Assign
.targets
, i
));
2120 case AugAssign_kind
:
2121 return compiler_augassign(c
, s
);
2123 return compiler_print(c
, s
);
2125 return compiler_for(c
, s
);
2127 return compiler_while(c
, s
);
2129 return compiler_if(c
, s
);
2132 if (s
->v
.Raise
.type
) {
2133 VISIT(c
, expr
, s
->v
.Raise
.type
);
2135 if (s
->v
.Raise
.inst
) {
2136 VISIT(c
, expr
, s
->v
.Raise
.inst
);
2138 if (s
->v
.Raise
.tback
) {
2139 VISIT(c
, expr
, s
->v
.Raise
.tback
);
2144 ADDOP_I(c
, RAISE_VARARGS
, n
);
2146 case TryExcept_kind
:
2147 return compiler_try_except(c
, s
);
2148 case TryFinally_kind
:
2149 return compiler_try_finally(c
, s
);
2151 return compiler_assert(c
, s
);
2153 return compiler_import(c
, s
);
2154 case ImportFrom_kind
:
2155 return compiler_from_import(c
, s
);
2157 VISIT(c
, expr
, s
->v
.Exec
.body
);
2158 if (s
->v
.Exec
.globals
) {
2159 VISIT(c
, expr
, s
->v
.Exec
.globals
);
2160 if (s
->v
.Exec
.locals
) {
2161 VISIT(c
, expr
, s
->v
.Exec
.locals
);
2166 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
2169 ADDOP(c
, EXEC_STMT
);
2174 if (c
->c_interactive
&& c
->c_nestlevel
<= 1) {
2175 VISIT(c
, expr
, s
->v
.Expr
.value
);
2176 ADDOP(c
, PRINT_EXPR
);
2178 else if (s
->v
.Expr
.value
->kind
!= Str_kind
&&
2179 s
->v
.Expr
.value
->kind
!= Num_kind
) {
2180 VISIT(c
, expr
, s
->v
.Expr
.value
);
2187 if (!compiler_in_loop(c
))
2188 return compiler_error(c
, "'break' outside loop");
2189 ADDOP(c
, BREAK_LOOP
);
2192 return compiler_continue(c
);
2194 return compiler_with(c
, s
);
2200 unaryop(unaryop_ty op
)
2204 return UNARY_INVERT
;
2208 return UNARY_POSITIVE
;
2210 return UNARY_NEGATIVE
;
2212 PyErr_Format(PyExc_SystemError
,
2213 "unary op %d should not be possible", op
);
2219 binop(struct compiler
*c
, operator_ty op
)
2225 return BINARY_SUBTRACT
;
2227 return BINARY_MULTIPLY
;
2229 if (c
->c_flags
&& c
->c_flags
->cf_flags
& CO_FUTURE_DIVISION
)
2230 return BINARY_TRUE_DIVIDE
;
2232 return BINARY_DIVIDE
;
2234 return BINARY_MODULO
;
2236 return BINARY_POWER
;
2238 return BINARY_LSHIFT
;
2240 return BINARY_RSHIFT
;
2248 return BINARY_FLOOR_DIVIDE
;
2250 PyErr_Format(PyExc_SystemError
,
2251 "binary op %d should not be possible", op
);
2275 return PyCmp_IS_NOT
;
2279 return PyCmp_NOT_IN
;
2286 inplace_binop(struct compiler
*c
, operator_ty op
)
2292 return INPLACE_SUBTRACT
;
2294 return INPLACE_MULTIPLY
;
2296 if (c
->c_flags
&& c
->c_flags
->cf_flags
& CO_FUTURE_DIVISION
)
2297 return INPLACE_TRUE_DIVIDE
;
2299 return INPLACE_DIVIDE
;
2301 return INPLACE_MODULO
;
2303 return INPLACE_POWER
;
2305 return INPLACE_LSHIFT
;
2307 return INPLACE_RSHIFT
;
2315 return INPLACE_FLOOR_DIVIDE
;
2317 PyErr_Format(PyExc_SystemError
,
2318 "inplace binary op %d should not be possible", op
);
2324 compiler_nameop(struct compiler
*c
, identifier name
, expr_context_ty ctx
)
2327 enum { OP_FAST
, OP_GLOBAL
, OP_DEREF
, OP_NAME
} optype
;
2329 PyObject
*dict
= c
->u
->u_names
;
2331 /* XXX AugStore isn't used anywhere! */
2333 mangled
= _Py_Mangle(c
->u
->u_private
, name
);
2339 scope
= PyST_GetScope(c
->u
->u_ste
, mangled
);
2342 dict
= c
->u
->u_freevars
;
2346 dict
= c
->u
->u_cellvars
;
2350 if (c
->u
->u_ste
->ste_type
== FunctionBlock
)
2353 case GLOBAL_IMPLICIT
:
2354 if (c
->u
->u_ste
->ste_type
== FunctionBlock
&&
2355 !c
->u
->u_ste
->ste_unoptimized
)
2358 case GLOBAL_EXPLICIT
:
2362 /* scope can be 0 */
2366 /* XXX Leave assert here, but handle __doc__ and the like better */
2367 assert(scope
|| PyString_AS_STRING(name
)[0] == '_');
2372 case Load
: op
= LOAD_DEREF
; break;
2373 case Store
: op
= STORE_DEREF
; break;
2378 PyErr_Format(PyExc_SyntaxError
,
2379 "can not delete variable '%s' referenced "
2381 PyString_AS_STRING(name
));
2386 PyErr_SetString(PyExc_SystemError
,
2387 "param invalid for deref variable");
2393 case Load
: op
= LOAD_FAST
; break;
2394 case Store
: op
= STORE_FAST
; break;
2395 case Del
: op
= DELETE_FAST
; break;
2401 PyErr_SetString(PyExc_SystemError
,
2402 "param invalid for local variable");
2405 ADDOP_O(c
, op
, mangled
, varnames
);
2410 case Load
: op
= LOAD_GLOBAL
; break;
2411 case Store
: op
= STORE_GLOBAL
; break;
2412 case Del
: op
= DELETE_GLOBAL
; break;
2418 PyErr_SetString(PyExc_SystemError
,
2419 "param invalid for global variable");
2425 case Load
: op
= LOAD_NAME
; break;
2426 case Store
: op
= STORE_NAME
; break;
2427 case Del
: op
= DELETE_NAME
; break;
2433 PyErr_SetString(PyExc_SystemError
,
2434 "param invalid for name variable");
2441 arg
= compiler_add_o(c
, dict
, mangled
);
2445 return compiler_addop_i(c
, op
, arg
);
2449 compiler_boolop(struct compiler
*c
, expr_ty e
)
2455 assert(e
->kind
== BoolOp_kind
);
2456 if (e
->v
.BoolOp
.op
== And
)
2457 jumpi
= JUMP_IF_FALSE_OR_POP
;
2459 jumpi
= JUMP_IF_TRUE_OR_POP
;
2460 end
= compiler_new_block(c
);
2463 s
= e
->v
.BoolOp
.values
;
2464 n
= asdl_seq_LEN(s
) - 1;
2466 for (i
= 0; i
< n
; ++i
) {
2467 VISIT(c
, expr
, (expr_ty
)asdl_seq_GET(s
, i
));
2468 ADDOP_JABS(c
, jumpi
, end
);
2470 VISIT(c
, expr
, (expr_ty
)asdl_seq_GET(s
, n
));
2471 compiler_use_next_block(c
, end
);
2476 compiler_list(struct compiler
*c
, expr_ty e
)
2478 int n
= asdl_seq_LEN(e
->v
.List
.elts
);
2479 if (e
->v
.List
.ctx
== Store
) {
2480 ADDOP_I(c
, UNPACK_SEQUENCE
, n
);
2482 VISIT_SEQ(c
, expr
, e
->v
.List
.elts
);
2483 if (e
->v
.List
.ctx
== Load
) {
2484 ADDOP_I(c
, BUILD_LIST
, n
);
2490 compiler_tuple(struct compiler
*c
, expr_ty e
)
2492 int n
= asdl_seq_LEN(e
->v
.Tuple
.elts
);
2493 if (e
->v
.Tuple
.ctx
== Store
) {
2494 ADDOP_I(c
, UNPACK_SEQUENCE
, n
);
2496 VISIT_SEQ(c
, expr
, e
->v
.Tuple
.elts
);
2497 if (e
->v
.Tuple
.ctx
== Load
) {
2498 ADDOP_I(c
, BUILD_TUPLE
, n
);
2504 compiler_compare(struct compiler
*c
, expr_ty e
)
2507 basicblock
*cleanup
= NULL
;
2509 /* XXX the logic can be cleaned up for 1 or multiple comparisons */
2510 VISIT(c
, expr
, e
->v
.Compare
.left
);
2511 n
= asdl_seq_LEN(e
->v
.Compare
.ops
);
2514 cleanup
= compiler_new_block(c
);
2515 if (cleanup
== NULL
)
2518 (expr_ty
)asdl_seq_GET(e
->v
.Compare
.comparators
, 0));
2520 for (i
= 1; i
< n
; i
++) {
2522 ADDOP(c
, ROT_THREE
);
2523 ADDOP_I(c
, COMPARE_OP
,
2524 cmpop((cmpop_ty
)(asdl_seq_GET(
2525 e
->v
.Compare
.ops
, i
- 1))));
2526 ADDOP_JABS(c
, JUMP_IF_FALSE_OR_POP
, cleanup
);
2530 (expr_ty
)asdl_seq_GET(e
->v
.Compare
.comparators
, i
));
2532 VISIT(c
, expr
, (expr_ty
)asdl_seq_GET(e
->v
.Compare
.comparators
, n
- 1));
2533 ADDOP_I(c
, COMPARE_OP
,
2534 cmpop((cmpop_ty
)(asdl_seq_GET(e
->v
.Compare
.ops
, n
- 1))));
2536 basicblock
*end
= compiler_new_block(c
);
2539 ADDOP_JREL(c
, JUMP_FORWARD
, end
);
2540 compiler_use_next_block(c
, cleanup
);
2543 compiler_use_next_block(c
, end
);
2549 compiler_call(struct compiler
*c
, expr_ty e
)
2553 VISIT(c
, expr
, e
->v
.Call
.func
);
2554 n
= asdl_seq_LEN(e
->v
.Call
.args
);
2555 VISIT_SEQ(c
, expr
, e
->v
.Call
.args
);
2556 if (e
->v
.Call
.keywords
) {
2557 VISIT_SEQ(c
, keyword
, e
->v
.Call
.keywords
);
2558 n
|= asdl_seq_LEN(e
->v
.Call
.keywords
) << 8;
2560 if (e
->v
.Call
.starargs
) {
2561 VISIT(c
, expr
, e
->v
.Call
.starargs
);
2564 if (e
->v
.Call
.kwargs
) {
2565 VISIT(c
, expr
, e
->v
.Call
.kwargs
);
2570 ADDOP_I(c
, CALL_FUNCTION
, n
);
2573 ADDOP_I(c
, CALL_FUNCTION_VAR
, n
);
2576 ADDOP_I(c
, CALL_FUNCTION_KW
, n
);
2579 ADDOP_I(c
, CALL_FUNCTION_VAR_KW
, n
);
2586 compiler_listcomp_generator(struct compiler
*c
, asdl_seq
*generators
,
2587 int gen_index
, expr_ty elt
)
2589 /* generate code for the iterator, then each of the ifs,
2590 and then write to the element */
2593 basicblock
*start
, *anchor
, *skip
, *if_cleanup
;
2596 start
= compiler_new_block(c
);
2597 skip
= compiler_new_block(c
);
2598 if_cleanup
= compiler_new_block(c
);
2599 anchor
= compiler_new_block(c
);
2601 if (start
== NULL
|| skip
== NULL
|| if_cleanup
== NULL
||
2605 l
= (comprehension_ty
)asdl_seq_GET(generators
, gen_index
);
2606 VISIT(c
, expr
, l
->iter
);
2608 compiler_use_next_block(c
, start
);
2609 ADDOP_JREL(c
, FOR_ITER
, anchor
);
2611 VISIT(c
, expr
, l
->target
);
2613 /* XXX this needs to be cleaned up...a lot! */
2614 n
= asdl_seq_LEN(l
->ifs
);
2615 for (i
= 0; i
< n
; i
++) {
2616 expr_ty e
= (expr_ty
)asdl_seq_GET(l
->ifs
, i
);
2618 ADDOP_JABS(c
, POP_JUMP_IF_FALSE
, if_cleanup
);
2622 if (++gen_index
< asdl_seq_LEN(generators
))
2623 if (!compiler_listcomp_generator(c
, generators
, gen_index
, elt
))
2626 /* only append after the last for generator */
2627 if (gen_index
>= asdl_seq_LEN(generators
)) {
2628 VISIT(c
, expr
, elt
);
2629 ADDOP_I(c
, LIST_APPEND
, gen_index
+1);
2631 compiler_use_next_block(c
, skip
);
2633 compiler_use_next_block(c
, if_cleanup
);
2634 ADDOP_JABS(c
, JUMP_ABSOLUTE
, start
);
2635 compiler_use_next_block(c
, anchor
);
2641 compiler_listcomp(struct compiler
*c
, expr_ty e
)
2643 assert(e
->kind
== ListComp_kind
);
2644 ADDOP_I(c
, BUILD_LIST
, 0);
2645 return compiler_listcomp_generator(c
, e
->v
.ListComp
.generators
, 0,
2650 compiler_genexp_generator(struct compiler
*c
,
2651 asdl_seq
*generators
, int gen_index
,
2654 /* generate code for the iterator, then each of the ifs,
2655 and then write to the element */
2657 comprehension_ty ge
;
2658 basicblock
*start
, *anchor
, *skip
, *if_cleanup
, *end
;
2661 start
= compiler_new_block(c
);
2662 skip
= compiler_new_block(c
);
2663 if_cleanup
= compiler_new_block(c
);
2664 anchor
= compiler_new_block(c
);
2665 end
= compiler_new_block(c
);
2667 if (start
== NULL
|| skip
== NULL
|| if_cleanup
== NULL
||
2668 anchor
== NULL
|| end
== NULL
)
2671 ge
= (comprehension_ty
)asdl_seq_GET(generators
, gen_index
);
2672 ADDOP_JREL(c
, SETUP_LOOP
, end
);
2673 if (!compiler_push_fblock(c
, LOOP
, start
))
2676 if (gen_index
== 0) {
2677 /* Receive outermost iter as an implicit argument */
2678 c
->u
->u_argcount
= 1;
2679 ADDOP_I(c
, LOAD_FAST
, 0);
2682 /* Sub-iter - calculate on the fly */
2683 VISIT(c
, expr
, ge
->iter
);
2686 compiler_use_next_block(c
, start
);
2687 ADDOP_JREL(c
, FOR_ITER
, anchor
);
2689 VISIT(c
, expr
, ge
->target
);
2691 /* XXX this needs to be cleaned up...a lot! */
2692 n
= asdl_seq_LEN(ge
->ifs
);
2693 for (i
= 0; i
< n
; i
++) {
2694 expr_ty e
= (expr_ty
)asdl_seq_GET(ge
->ifs
, i
);
2696 ADDOP_JABS(c
, POP_JUMP_IF_FALSE
, if_cleanup
);
2700 if (++gen_index
< asdl_seq_LEN(generators
))
2701 if (!compiler_genexp_generator(c
, generators
, gen_index
, elt
))
2704 /* only append after the last 'for' generator */
2705 if (gen_index
>= asdl_seq_LEN(generators
)) {
2706 VISIT(c
, expr
, elt
);
2707 ADDOP(c
, YIELD_VALUE
);
2710 compiler_use_next_block(c
, skip
);
2712 compiler_use_next_block(c
, if_cleanup
);
2713 ADDOP_JABS(c
, JUMP_ABSOLUTE
, start
);
2714 compiler_use_next_block(c
, anchor
);
2715 ADDOP(c
, POP_BLOCK
);
2716 compiler_pop_fblock(c
, LOOP
, start
);
2717 compiler_use_next_block(c
, end
);
2723 compiler_genexp(struct compiler
*c
, expr_ty e
)
2725 static identifier name
;
2727 expr_ty outermost_iter
= ((comprehension_ty
)
2728 (asdl_seq_GET(e
->v
.GeneratorExp
.generators
,
2732 name
= PyString_FromString("<genexpr>");
2737 if (!compiler_enter_scope(c
, name
, (void *)e
, e
->lineno
))
2739 compiler_genexp_generator(c
, e
->v
.GeneratorExp
.generators
, 0,
2740 e
->v
.GeneratorExp
.elt
);
2741 co
= assemble(c
, 1);
2742 compiler_exit_scope(c
);
2746 compiler_make_closure(c
, co
, 0);
2749 VISIT(c
, expr
, outermost_iter
);
2751 ADDOP_I(c
, CALL_FUNCTION
, 1);
2757 compiler_visit_keyword(struct compiler
*c
, keyword_ty k
)
2759 ADDOP_O(c
, LOAD_CONST
, k
->arg
, consts
);
2760 VISIT(c
, expr
, k
->value
);
2764 /* Test whether expression is constant. For constants, report
2765 whether they are true or false.
2767 Return values: 1 for true, 0 for false, -1 for non-constant.
2771 expr_constant(expr_ty e
)
2775 return PyObject_IsTrue(e
->v
.Num
.n
);
2777 return PyObject_IsTrue(e
->v
.Str
.s
);
2779 /* __debug__ is not assignable, so we can optimize
2780 * it away in if and while statements */
2781 if (strcmp(PyString_AS_STRING(e
->v
.Name
.id
),
2783 return ! Py_OptimizeFlag
;
2791 Implements the with statement from PEP 343.
2793 The semantics outlined in that PEP are as follows:
2798 It is implemented roughly as:
2801 exit = context.__exit__ # not calling it
2802 value = context.__enter__()
2804 VAR = value # if VAR present in the syntax
2807 if an exception was raised:
2808 exc = copy of (exception, instance, traceback)
2810 exc = (None, None, None)
2814 compiler_with(struct compiler
*c
, stmt_ty s
)
2816 basicblock
*block
, *finally
;
2818 assert(s
->kind
== With_kind
);
2820 block
= compiler_new_block(c
);
2821 finally
= compiler_new_block(c
);
2822 if (!block
|| !finally
)
2826 VISIT(c
, expr
, s
->v
.With
.context_expr
);
2827 ADDOP_JREL(c
, SETUP_WITH
, finally
);
2829 /* SETUP_WITH pushes a finally block. */
2830 compiler_use_next_block(c
, block
);
2831 if (!compiler_push_fblock(c
, FINALLY_TRY
, block
)) {
2835 if (s
->v
.With
.optional_vars
) {
2836 VISIT(c
, expr
, s
->v
.With
.optional_vars
);
2839 /* Discard result from context.__enter__() */
2844 VISIT_SEQ(c
, stmt
, s
->v
.With
.body
);
2846 /* End of try block; start the finally block */
2847 ADDOP(c
, POP_BLOCK
);
2848 compiler_pop_fblock(c
, FINALLY_TRY
, block
);
2850 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
2851 compiler_use_next_block(c
, finally
);
2852 if (!compiler_push_fblock(c
, FINALLY_END
, finally
))
2855 /* Finally block starts; context.__exit__ is on the stack under
2856 the exception or return information. Just issue our magic
2858 ADDOP(c
, WITH_CLEANUP
);
2860 /* Finally block ends. */
2861 ADDOP(c
, END_FINALLY
);
2862 compiler_pop_fblock(c
, FINALLY_END
, finally
);
2867 compiler_visit_expr(struct compiler
*c
, expr_ty e
)
2871 /* If expr e has a different line number than the last expr/stmt,
2872 set a new line number for the next instruction.
2874 if (e
->lineno
> c
->u
->u_lineno
) {
2875 c
->u
->u_lineno
= e
->lineno
;
2876 c
->u
->u_lineno_set
= false;
2880 return compiler_boolop(c
, e
);
2882 VISIT(c
, expr
, e
->v
.BinOp
.left
);
2883 VISIT(c
, expr
, e
->v
.BinOp
.right
);
2884 ADDOP(c
, binop(c
, e
->v
.BinOp
.op
));
2887 VISIT(c
, expr
, e
->v
.UnaryOp
.operand
);
2888 ADDOP(c
, unaryop(e
->v
.UnaryOp
.op
));
2891 return compiler_lambda(c
, e
);
2893 return compiler_ifexp(c
, e
);
2895 n
= asdl_seq_LEN(e
->v
.Dict
.values
);
2896 ADDOP_I(c
, BUILD_MAP
, (n
>0xFFFF ? 0xFFFF : n
));
2897 for (i
= 0; i
< n
; i
++) {
2899 (expr_ty
)asdl_seq_GET(e
->v
.Dict
.values
, i
));
2901 (expr_ty
)asdl_seq_GET(e
->v
.Dict
.keys
, i
));
2902 ADDOP(c
, STORE_MAP
);
2906 return compiler_listcomp(c
, e
);
2907 case GeneratorExp_kind
:
2908 return compiler_genexp(c
, e
);
2910 if (c
->u
->u_ste
->ste_type
!= FunctionBlock
)
2911 return compiler_error(c
, "'yield' outside function");
2912 if (e
->v
.Yield
.value
) {
2913 VISIT(c
, expr
, e
->v
.Yield
.value
);
2916 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
2918 ADDOP(c
, YIELD_VALUE
);
2921 return compiler_compare(c
, e
);
2923 return compiler_call(c
, e
);
2925 VISIT(c
, expr
, e
->v
.Repr
.value
);
2926 ADDOP(c
, UNARY_CONVERT
);
2929 ADDOP_O(c
, LOAD_CONST
, e
->v
.Num
.n
, consts
);
2932 ADDOP_O(c
, LOAD_CONST
, e
->v
.Str
.s
, consts
);
2934 /* The following exprs can be assignment targets. */
2935 case Attribute_kind
:
2936 if (e
->v
.Attribute
.ctx
!= AugStore
)
2937 VISIT(c
, expr
, e
->v
.Attribute
.value
);
2938 switch (e
->v
.Attribute
.ctx
) {
2941 /* Fall through to load */
2943 ADDOP_NAME(c
, LOAD_ATTR
, e
->v
.Attribute
.attr
, names
);
2947 /* Fall through to save */
2949 ADDOP_NAME(c
, STORE_ATTR
, e
->v
.Attribute
.attr
, names
);
2952 ADDOP_NAME(c
, DELETE_ATTR
, e
->v
.Attribute
.attr
, names
);
2956 PyErr_SetString(PyExc_SystemError
,
2957 "param invalid in attribute expression");
2961 case Subscript_kind
:
2962 switch (e
->v
.Subscript
.ctx
) {
2964 VISIT(c
, expr
, e
->v
.Subscript
.value
);
2965 VISIT_SLICE(c
, e
->v
.Subscript
.slice
, AugLoad
);
2968 VISIT(c
, expr
, e
->v
.Subscript
.value
);
2969 VISIT_SLICE(c
, e
->v
.Subscript
.slice
, Load
);
2972 VISIT_SLICE(c
, e
->v
.Subscript
.slice
, AugStore
);
2975 VISIT(c
, expr
, e
->v
.Subscript
.value
);
2976 VISIT_SLICE(c
, e
->v
.Subscript
.slice
, Store
);
2979 VISIT(c
, expr
, e
->v
.Subscript
.value
);
2980 VISIT_SLICE(c
, e
->v
.Subscript
.slice
, Del
);
2984 PyErr_SetString(PyExc_SystemError
,
2985 "param invalid in subscript expression");
2990 return compiler_nameop(c
, e
->v
.Name
.id
, e
->v
.Name
.ctx
);
2991 /* child nodes of List and Tuple will have expr_context set */
2993 return compiler_list(c
, e
);
2995 return compiler_tuple(c
, e
);
3001 compiler_augassign(struct compiler
*c
, stmt_ty s
)
3003 expr_ty e
= s
->v
.AugAssign
.target
;
3006 assert(s
->kind
== AugAssign_kind
);
3009 case Attribute_kind
:
3010 auge
= Attribute(e
->v
.Attribute
.value
, e
->v
.Attribute
.attr
,
3011 AugLoad
, e
->lineno
, e
->col_offset
, c
->c_arena
);
3014 VISIT(c
, expr
, auge
);
3015 VISIT(c
, expr
, s
->v
.AugAssign
.value
);
3016 ADDOP(c
, inplace_binop(c
, s
->v
.AugAssign
.op
));
3017 auge
->v
.Attribute
.ctx
= AugStore
;
3018 VISIT(c
, expr
, auge
);
3020 case Subscript_kind
:
3021 auge
= Subscript(e
->v
.Subscript
.value
, e
->v
.Subscript
.slice
,
3022 AugLoad
, e
->lineno
, e
->col_offset
, c
->c_arena
);
3025 VISIT(c
, expr
, auge
);
3026 VISIT(c
, expr
, s
->v
.AugAssign
.value
);
3027 ADDOP(c
, inplace_binop(c
, s
->v
.AugAssign
.op
));
3028 auge
->v
.Subscript
.ctx
= AugStore
;
3029 VISIT(c
, expr
, auge
);
3032 if (!compiler_nameop(c
, e
->v
.Name
.id
, Load
))
3034 VISIT(c
, expr
, s
->v
.AugAssign
.value
);
3035 ADDOP(c
, inplace_binop(c
, s
->v
.AugAssign
.op
));
3036 return compiler_nameop(c
, e
->v
.Name
.id
, Store
);
3038 PyErr_Format(PyExc_SystemError
,
3039 "invalid node type (%d) for augmented assignment",
3047 compiler_push_fblock(struct compiler
*c
, enum fblocktype t
, basicblock
*b
)
3049 struct fblockinfo
*f
;
3050 if (c
->u
->u_nfblocks
>= CO_MAXBLOCKS
) {
3051 PyErr_SetString(PyExc_SystemError
,
3052 "too many statically nested blocks");
3055 f
= &c
->u
->u_fblock
[c
->u
->u_nfblocks
++];
3062 compiler_pop_fblock(struct compiler
*c
, enum fblocktype t
, basicblock
*b
)
3064 struct compiler_unit
*u
= c
->u
;
3065 assert(u
->u_nfblocks
> 0);
3067 assert(u
->u_fblock
[u
->u_nfblocks
].fb_type
== t
);
3068 assert(u
->u_fblock
[u
->u_nfblocks
].fb_block
== b
);
3072 compiler_in_loop(struct compiler
*c
) {
3074 struct compiler_unit
*u
= c
->u
;
3075 for (i
= 0; i
< u
->u_nfblocks
; ++i
) {
3076 if (u
->u_fblock
[i
].fb_type
== LOOP
)
3081 /* Raises a SyntaxError and returns 0.
3082 If something goes wrong, a different exception may be raised.
3086 compiler_error(struct compiler
*c
, const char *errstr
)
3089 PyObject
*u
= NULL
, *v
= NULL
;
3091 loc
= PyErr_ProgramText(c
->c_filename
, c
->u
->u_lineno
);
3096 u
= Py_BuildValue("(ziOO)", c
->c_filename
, c
->u
->u_lineno
,
3100 v
= Py_BuildValue("(zO)", errstr
, u
);
3103 PyErr_SetObject(PyExc_SyntaxError
, v
);
3112 compiler_handle_subscr(struct compiler
*c
, const char *kind
,
3113 expr_context_ty ctx
)
3117 /* XXX this code is duplicated */
3119 case AugLoad
: /* fall through to Load */
3120 case Load
: op
= BINARY_SUBSCR
; break;
3121 case AugStore
:/* fall through to Store */
3122 case Store
: op
= STORE_SUBSCR
; break;
3123 case Del
: op
= DELETE_SUBSCR
; break;
3125 PyErr_Format(PyExc_SystemError
,
3126 "invalid %s kind %d in subscript\n",
3130 if (ctx
== AugLoad
) {
3131 ADDOP_I(c
, DUP_TOPX
, 2);
3133 else if (ctx
== AugStore
) {
3134 ADDOP(c
, ROT_THREE
);
3141 compiler_slice(struct compiler
*c
, slice_ty s
, expr_context_ty ctx
)
3144 assert(s
->kind
== Slice_kind
);
3146 /* only handles the cases where BUILD_SLICE is emitted */
3147 if (s
->v
.Slice
.lower
) {
3148 VISIT(c
, expr
, s
->v
.Slice
.lower
);
3151 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
3154 if (s
->v
.Slice
.upper
) {
3155 VISIT(c
, expr
, s
->v
.Slice
.upper
);
3158 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
3161 if (s
->v
.Slice
.step
) {
3163 VISIT(c
, expr
, s
->v
.Slice
.step
);
3165 ADDOP_I(c
, BUILD_SLICE
, n
);
3170 compiler_simple_slice(struct compiler
*c
, slice_ty s
, expr_context_ty ctx
)
3172 int op
= 0, slice_offset
= 0, stack_count
= 0;
3174 assert(s
->v
.Slice
.step
== NULL
);
3175 if (s
->v
.Slice
.lower
) {
3178 if (ctx
!= AugStore
)
3179 VISIT(c
, expr
, s
->v
.Slice
.lower
);
3181 if (s
->v
.Slice
.upper
) {
3184 if (ctx
!= AugStore
)
3185 VISIT(c
, expr
, s
->v
.Slice
.upper
);
3188 if (ctx
== AugLoad
) {
3189 switch (stack_count
) {
3190 case 0: ADDOP(c
, DUP_TOP
); break;
3191 case 1: ADDOP_I(c
, DUP_TOPX
, 2); break;
3192 case 2: ADDOP_I(c
, DUP_TOPX
, 3); break;
3195 else if (ctx
== AugStore
) {
3196 switch (stack_count
) {
3197 case 0: ADDOP(c
, ROT_TWO
); break;
3198 case 1: ADDOP(c
, ROT_THREE
); break;
3199 case 2: ADDOP(c
, ROT_FOUR
); break;
3204 case AugLoad
: /* fall through to Load */
3205 case Load
: op
= SLICE
; break;
3206 case AugStore
:/* fall through to Store */
3207 case Store
: op
= STORE_SLICE
; break;
3208 case Del
: op
= DELETE_SLICE
; break;
3211 PyErr_SetString(PyExc_SystemError
,
3212 "param invalid in simple slice");
3216 ADDOP(c
, op
+ slice_offset
);
3221 compiler_visit_nested_slice(struct compiler
*c
, slice_ty s
,
3222 expr_context_ty ctx
)
3226 ADDOP_O(c
, LOAD_CONST
, Py_Ellipsis
, consts
);
3229 return compiler_slice(c
, s
, ctx
);
3231 VISIT(c
, expr
, s
->v
.Index
.value
);
3235 PyErr_SetString(PyExc_SystemError
,
3236 "extended slice invalid in nested slice");
3243 compiler_visit_slice(struct compiler
*c
, slice_ty s
, expr_context_ty ctx
)
3245 char * kindname
= NULL
;
3249 if (ctx
!= AugStore
) {
3250 VISIT(c
, expr
, s
->v
.Index
.value
);
3254 kindname
= "ellipsis";
3255 if (ctx
!= AugStore
) {
3256 ADDOP_O(c
, LOAD_CONST
, Py_Ellipsis
, consts
);
3261 if (!s
->v
.Slice
.step
)
3262 return compiler_simple_slice(c
, s
, ctx
);
3263 if (ctx
!= AugStore
) {
3264 if (!compiler_slice(c
, s
, ctx
))
3269 kindname
= "extended slice";
3270 if (ctx
!= AugStore
) {
3271 int i
, n
= asdl_seq_LEN(s
->v
.ExtSlice
.dims
);
3272 for (i
= 0; i
< n
; i
++) {
3273 slice_ty sub
= (slice_ty
)asdl_seq_GET(
3274 s
->v
.ExtSlice
.dims
, i
);
3275 if (!compiler_visit_nested_slice(c
, sub
, ctx
))
3278 ADDOP_I(c
, BUILD_TUPLE
, n
);
3282 PyErr_Format(PyExc_SystemError
,
3283 "invalid subscript kind %d", s
->kind
);
3286 return compiler_handle_subscr(c
, kindname
, ctx
);
3290 /* End of the compiler section, beginning of the assembler section */
3292 /* do depth-first search of basic block graph, starting with block.
3293 post records the block indices in post-order.
3295 XXX must handle implicit jumps from one block to next
3299 PyObject
*a_bytecode
; /* string containing bytecode */
3300 int a_offset
; /* offset into bytecode */
3301 int a_nblocks
; /* number of reachable blocks */
3302 basicblock
**a_postorder
; /* list of blocks in dfs postorder */
3303 PyObject
*a_lnotab
; /* string containing lnotab */
3304 int a_lnotab_off
; /* offset into lnotab */
3305 int a_lineno
; /* last lineno of emitted instruction */
3306 int a_lineno_off
; /* bytecode offset of last lineno */
3310 dfs(struct compiler
*c
, basicblock
*b
, struct assembler
*a
)
3313 struct instr
*instr
= NULL
;
3318 if (b
->b_next
!= NULL
)
3319 dfs(c
, b
->b_next
, a
);
3320 for (i
= 0; i
< b
->b_iused
; i
++) {
3321 instr
= &b
->b_instr
[i
];
3322 if (instr
->i_jrel
|| instr
->i_jabs
)
3323 dfs(c
, instr
->i_target
, a
);
3325 a
->a_postorder
[a
->a_nblocks
++] = b
;
3329 stackdepth_walk(struct compiler
*c
, basicblock
*b
, int depth
, int maxdepth
)
3332 struct instr
*instr
;
3333 if (b
->b_seen
|| b
->b_startdepth
>= depth
)
3336 b
->b_startdepth
= depth
;
3337 for (i
= 0; i
< b
->b_iused
; i
++) {
3338 instr
= &b
->b_instr
[i
];
3339 depth
+= opcode_stack_effect(instr
->i_opcode
, instr
->i_oparg
);
3340 if (depth
> maxdepth
)
3342 assert(depth
>= 0); /* invalid code or bug in stackdepth() */
3343 if (instr
->i_jrel
|| instr
->i_jabs
) {
3344 maxdepth
= stackdepth_walk(c
, instr
->i_target
,
3346 if (instr
->i_opcode
== JUMP_ABSOLUTE
||
3347 instr
->i_opcode
== JUMP_FORWARD
) {
3348 goto out
; /* remaining code is dead */
3353 maxdepth
= stackdepth_walk(c
, b
->b_next
, depth
, maxdepth
);
3359 /* Find the flow path that needs the largest stack. We assume that
3360 * cycles in the flow graph have no net effect on the stack depth.
3363 stackdepth(struct compiler
*c
)
3365 basicblock
*b
, *entryblock
;
3367 for (b
= c
->u
->u_blocks
; b
!= NULL
; b
= b
->b_list
) {
3369 b
->b_startdepth
= INT_MIN
;
3374 return stackdepth_walk(c
, entryblock
, 0, 0);
3378 assemble_init(struct assembler
*a
, int nblocks
, int firstlineno
)
3380 memset(a
, 0, sizeof(struct assembler
));
3381 a
->a_lineno
= firstlineno
;
3382 a
->a_bytecode
= PyString_FromStringAndSize(NULL
, DEFAULT_CODE_SIZE
);
3385 a
->a_lnotab
= PyString_FromStringAndSize(NULL
, DEFAULT_LNOTAB_SIZE
);
3388 if (nblocks
> PY_SIZE_MAX
/ sizeof(basicblock
*)) {
3392 a
->a_postorder
= (basicblock
**)PyObject_Malloc(
3393 sizeof(basicblock
*) * nblocks
);
3394 if (!a
->a_postorder
) {
3402 assemble_free(struct assembler
*a
)
3404 Py_XDECREF(a
->a_bytecode
);
3405 Py_XDECREF(a
->a_lnotab
);
3407 PyObject_Free(a
->a_postorder
);
3410 /* Return the size of a basic block in bytes. */
3413 instrsize(struct instr
*instr
)
3415 if (!instr
->i_hasarg
)
3416 return 1; /* 1 byte for the opcode*/
3417 if (instr
->i_oparg
> 0xffff)
3418 return 6; /* 1 (opcode) + 1 (EXTENDED_ARG opcode) + 2 (oparg) + 2(oparg extended) */
3419 return 3; /* 1 (opcode) + 2 (oparg) */
3423 blocksize(basicblock
*b
)
3428 for (i
= 0; i
< b
->b_iused
; i
++)
3429 size
+= instrsize(&b
->b_instr
[i
]);
3433 /* Appends a pair to the end of the line number table, a_lnotab, representing
3434 the instruction's bytecode offset and line number. See
3435 Objects/lnotab_notes.txt for the description of the line number table. */
3438 assemble_lnotab(struct assembler
*a
, struct instr
*i
)
3440 int d_bytecode
, d_lineno
;
3442 unsigned char *lnotab
;
3444 d_bytecode
= a
->a_offset
- a
->a_lineno_off
;
3445 d_lineno
= i
->i_lineno
- a
->a_lineno
;
3447 assert(d_bytecode
>= 0);
3448 assert(d_lineno
>= 0);
3450 if(d_bytecode
== 0 && d_lineno
== 0)
3453 if (d_bytecode
> 255) {
3454 int j
, nbytes
, ncodes
= d_bytecode
/ 255;
3455 nbytes
= a
->a_lnotab_off
+ 2 * ncodes
;
3456 len
= PyString_GET_SIZE(a
->a_lnotab
);
3457 if (nbytes
>= len
) {
3458 if ((len
<= INT_MAX
/ 2) && (len
* 2 < nbytes
))
3460 else if (len
<= INT_MAX
/ 2)
3466 if (_PyString_Resize(&a
->a_lnotab
, len
) < 0)
3469 lnotab
= (unsigned char *)
3470 PyString_AS_STRING(a
->a_lnotab
) + a
->a_lnotab_off
;
3471 for (j
= 0; j
< ncodes
; j
++) {
3475 d_bytecode
-= ncodes
* 255;
3476 a
->a_lnotab_off
+= ncodes
* 2;
3478 assert(d_bytecode
<= 255);
3479 if (d_lineno
> 255) {
3480 int j
, nbytes
, ncodes
= d_lineno
/ 255;
3481 nbytes
= a
->a_lnotab_off
+ 2 * ncodes
;
3482 len
= PyString_GET_SIZE(a
->a_lnotab
);
3483 if (nbytes
>= len
) {
3484 if ((len
<= INT_MAX
/ 2) && len
* 2 < nbytes
)
3486 else if (len
<= INT_MAX
/ 2)
3492 if (_PyString_Resize(&a
->a_lnotab
, len
) < 0)
3495 lnotab
= (unsigned char *)
3496 PyString_AS_STRING(a
->a_lnotab
) + a
->a_lnotab_off
;
3497 *lnotab
++ = d_bytecode
;
3500 for (j
= 1; j
< ncodes
; j
++) {
3504 d_lineno
-= ncodes
* 255;
3505 a
->a_lnotab_off
+= ncodes
* 2;
3508 len
= PyString_GET_SIZE(a
->a_lnotab
);
3509 if (a
->a_lnotab_off
+ 2 >= len
) {
3510 if (_PyString_Resize(&a
->a_lnotab
, len
* 2) < 0)
3513 lnotab
= (unsigned char *)
3514 PyString_AS_STRING(a
->a_lnotab
) + a
->a_lnotab_off
;
3516 a
->a_lnotab_off
+= 2;
3518 *lnotab
++ = d_bytecode
;
3519 *lnotab
++ = d_lineno
;
3521 else { /* First line of a block; def stmt, etc. */
3523 *lnotab
++ = d_lineno
;
3525 a
->a_lineno
= i
->i_lineno
;
3526 a
->a_lineno_off
= a
->a_offset
;
3531 Extend the bytecode with a new instruction.
3532 Update lnotab if necessary.
3536 assemble_emit(struct assembler
*a
, struct instr
*i
)
3538 int size
, arg
= 0, ext
= 0;
3539 Py_ssize_t len
= PyString_GET_SIZE(a
->a_bytecode
);
3542 size
= instrsize(i
);
3547 if (i
->i_lineno
&& !assemble_lnotab(a
, i
))
3549 if (a
->a_offset
+ size
>= len
) {
3550 if (len
> PY_SSIZE_T_MAX
/ 2)
3552 if (_PyString_Resize(&a
->a_bytecode
, len
* 2) < 0)
3555 code
= PyString_AS_STRING(a
->a_bytecode
) + a
->a_offset
;
3556 a
->a_offset
+= size
;
3558 assert(i
->i_hasarg
);
3559 *code
++ = (char)EXTENDED_ARG
;
3560 *code
++ = ext
& 0xff;
3564 *code
++ = i
->i_opcode
;
3566 assert(size
== 3 || size
== 6);
3567 *code
++ = arg
& 0xff;
3574 assemble_jump_offsets(struct assembler
*a
, struct compiler
*c
)
3577 int bsize
, totsize
, extended_arg_count
, last_extended_arg_count
= 0;
3580 /* Compute the size of each block and fixup jump args.
3581 Replace block pointer with position in bytecode. */
3584 for (i
= a
->a_nblocks
- 1; i
>= 0; i
--) {
3585 b
= a
->a_postorder
[i
];
3586 bsize
= blocksize(b
);
3587 b
->b_offset
= totsize
;
3590 extended_arg_count
= 0;
3591 for (b
= c
->u
->u_blocks
; b
!= NULL
; b
= b
->b_list
) {
3592 bsize
= b
->b_offset
;
3593 for (i
= 0; i
< b
->b_iused
; i
++) {
3594 struct instr
*instr
= &b
->b_instr
[i
];
3595 /* Relative jumps are computed relative to
3596 the instruction pointer after fetching
3597 the jump instruction.
3599 bsize
+= instrsize(instr
);
3601 instr
->i_oparg
= instr
->i_target
->b_offset
;
3602 else if (instr
->i_jrel
) {
3603 int delta
= instr
->i_target
->b_offset
- bsize
;
3604 instr
->i_oparg
= delta
;
3608 if (instr
->i_oparg
> 0xffff)
3609 extended_arg_count
++;
3613 /* XXX: This is an awful hack that could hurt performance, but
3614 on the bright side it should work until we come up
3615 with a better solution.
3617 In the meantime, should the goto be dropped in favor
3620 The issue is that in the first loop blocksize() is called
3621 which calls instrsize() which requires i_oparg be set
3622 appropriately. There is a bootstrap problem because
3623 i_oparg is calculated in the second loop above.
3625 So we loop until we stop seeing new EXTENDED_ARGs.
3626 The only EXTENDED_ARGs that could be popping up are
3627 ones in jump instructions. So this should converge
3630 if (last_extended_arg_count
!= extended_arg_count
) {
3631 last_extended_arg_count
= extended_arg_count
;
3637 dict_keys_inorder(PyObject
*dict
, int offset
)
3639 PyObject
*tuple
, *k
, *v
;
3640 Py_ssize_t i
, pos
= 0, size
= PyDict_Size(dict
);
3642 tuple
= PyTuple_New(size
);
3645 while (PyDict_Next(dict
, &pos
, &k
, &v
)) {
3646 i
= PyInt_AS_LONG(v
);
3647 /* The keys of the dictionary are tuples. (see compiler_add_o)
3648 The object we want is always first, though. */
3649 k
= PyTuple_GET_ITEM(k
, 0);
3651 assert((i
- offset
) < size
);
3652 assert((i
- offset
) >= 0);
3653 PyTuple_SET_ITEM(tuple
, i
- offset
, k
);
3659 compute_code_flags(struct compiler
*c
)
3661 PySTEntryObject
*ste
= c
->u
->u_ste
;
3663 if (ste
->ste_type
!= ModuleBlock
)
3664 flags
|= CO_NEWLOCALS
;
3665 if (ste
->ste_type
== FunctionBlock
) {
3666 if (!ste
->ste_unoptimized
)
3667 flags
|= CO_OPTIMIZED
;
3668 if (ste
->ste_nested
)
3670 if (ste
->ste_generator
)
3671 flags
|= CO_GENERATOR
;
3672 if (ste
->ste_varargs
)
3673 flags
|= CO_VARARGS
;
3674 if (ste
->ste_varkeywords
)
3675 flags
|= CO_VARKEYWORDS
;
3678 /* (Only) inherit compilerflags in PyCF_MASK */
3679 flags
|= (c
->c_flags
->cf_flags
& PyCF_MASK
);
3681 n
= PyDict_Size(c
->u
->u_freevars
);
3685 n
= PyDict_Size(c
->u
->u_cellvars
);
3696 static PyCodeObject
*
3697 makecode(struct compiler
*c
, struct assembler
*a
)
3700 PyCodeObject
*co
= NULL
;
3701 PyObject
*consts
= NULL
;
3702 PyObject
*names
= NULL
;
3703 PyObject
*varnames
= NULL
;
3704 PyObject
*filename
= NULL
;
3705 PyObject
*name
= NULL
;
3706 PyObject
*freevars
= NULL
;
3707 PyObject
*cellvars
= NULL
;
3708 PyObject
*bytecode
= NULL
;
3711 tmp
= dict_keys_inorder(c
->u
->u_consts
, 0);
3714 consts
= PySequence_List(tmp
); /* optimize_code requires a list */
3717 names
= dict_keys_inorder(c
->u
->u_names
, 0);
3718 varnames
= dict_keys_inorder(c
->u
->u_varnames
, 0);
3719 if (!consts
|| !names
|| !varnames
)
3722 cellvars
= dict_keys_inorder(c
->u
->u_cellvars
, 0);
3725 freevars
= dict_keys_inorder(c
->u
->u_freevars
, PyTuple_Size(cellvars
));
3728 filename
= PyString_FromString(c
->c_filename
);
3732 nlocals
= PyDict_Size(c
->u
->u_varnames
);
3733 flags
= compute_code_flags(c
);
3737 bytecode
= PyCode_Optimize(a
->a_bytecode
, consts
, names
, a
->a_lnotab
);
3741 tmp
= PyList_AsTuple(consts
); /* PyCode_New requires a tuple */
3747 co
= PyCode_New(c
->u
->u_argcount
, nlocals
, stackdepth(c
), flags
,
3748 bytecode
, consts
, names
, varnames
,
3750 filename
, c
->u
->u_name
,
3751 c
->u
->u_firstlineno
,
3756 Py_XDECREF(varnames
);
3757 Py_XDECREF(filename
);
3759 Py_XDECREF(freevars
);
3760 Py_XDECREF(cellvars
);
3761 Py_XDECREF(bytecode
);
3766 /* For debugging purposes only */
3769 dump_instr(const struct instr
*i
)
3771 const char *jrel
= i
->i_jrel
? "jrel " : "";
3772 const char *jabs
= i
->i_jabs
? "jabs " : "";
3777 sprintf(arg
, "arg: %d ", i
->i_oparg
);
3779 fprintf(stderr
, "line: %d, opcode: %d %s%s%s\n",
3780 i
->i_lineno
, i
->i_opcode
, arg
, jabs
, jrel
);
3784 dump_basicblock(const basicblock
*b
)
3786 const char *seen
= b
->b_seen
? "seen " : "";
3787 const char *b_return
= b
->b_return
? "return " : "";
3788 fprintf(stderr
, "used: %d, depth: %d, offset: %d %s%s\n",
3789 b
->b_iused
, b
->b_startdepth
, b
->b_offset
, seen
, b_return
);
3792 for (i
= 0; i
< b
->b_iused
; i
++) {
3793 fprintf(stderr
, " [%02d] ", i
);
3794 dump_instr(b
->b_instr
+ i
);
3800 static PyCodeObject
*
3801 assemble(struct compiler
*c
, int addNone
)
3803 basicblock
*b
, *entryblock
;
3806 PyCodeObject
*co
= NULL
;
3808 /* Make sure every block that falls off the end returns None.
3809 XXX NEXT_BLOCK() isn't quite right, because if the last
3810 block ends with a jump or return b_next shouldn't set.
3812 if (!c
->u
->u_curblock
->b_return
) {
3815 ADDOP_O(c
, LOAD_CONST
, Py_None
, consts
);
3816 ADDOP(c
, RETURN_VALUE
);
3821 for (b
= c
->u
->u_blocks
; b
!= NULL
; b
= b
->b_list
) {
3826 /* Set firstlineno if it wasn't explicitly set. */
3827 if (!c
->u
->u_firstlineno
) {
3828 if (entryblock
&& entryblock
->b_instr
)
3829 c
->u
->u_firstlineno
= entryblock
->b_instr
->i_lineno
;
3831 c
->u
->u_firstlineno
= 1;
3833 if (!assemble_init(&a
, nblocks
, c
->u
->u_firstlineno
))
3835 dfs(c
, entryblock
, &a
);
3837 /* Can't modify the bytecode after computing jump offsets. */
3838 assemble_jump_offsets(&a
, c
);
3840 /* Emit code in reverse postorder from dfs. */
3841 for (i
= a
.a_nblocks
- 1; i
>= 0; i
--) {
3842 b
= a
.a_postorder
[i
];
3843 for (j
= 0; j
< b
->b_iused
; j
++)
3844 if (!assemble_emit(&a
, &b
->b_instr
[j
]))
3848 if (_PyString_Resize(&a
.a_lnotab
, a
.a_lnotab_off
) < 0)
3850 if (_PyString_Resize(&a
.a_bytecode
, a
.a_offset
) < 0)
3853 co
= makecode(c
, &a
);