2 /* Execute compiled code */
5 XXX speed up searching for keywords by using a dictionary
9 /* enable more aggressive intra-module optimizations, where available */
10 #define PY_LOCAL_AGGRESSIVE
15 #include "frameobject.h"
18 #include "structmember.h"
24 #define READ_TIMESTAMP(var)
28 typedef unsigned long long uint64
;
30 #if defined(__ppc__) /* <- Don't know if this is the correct symbol; this
31 section should work for GCC on any PowerPC
32 platform, irrespective of OS.
33 POWER? Who knows :-) */
35 #define READ_TIMESTAMP(var) ppc_getcounter(&var)
38 ppc_getcounter(uint64
*v
)
40 register unsigned long tbu
, tb
, tbu2
;
43 asm volatile ("mftbu %0" : "=r" (tbu
) );
44 asm volatile ("mftb %0" : "=r" (tb
) );
45 asm volatile ("mftbu %0" : "=r" (tbu2
));
46 if (__builtin_expect(tbu
!= tbu2
, 0)) goto loop
;
48 /* The slightly peculiar way of writing the next lines is
49 compiled better by GCC than any other way I tried. */
50 ((long*)(v
))[0] = tbu
;
54 #else /* this is for linux/x86 (and probably any other GCC/x86 combo) */
56 #define READ_TIMESTAMP(val) \
57 __asm__ __volatile__("rdtsc" : "=A" (val))
61 void dump_tsc(int opcode
, int ticked
, uint64 inst0
, uint64 inst1
,
62 uint64 loop0
, uint64 loop1
, uint64 intr0
, uint64 intr1
)
64 uint64 intr
, inst
, loop
;
65 PyThreadState
*tstate
= PyThreadState_Get();
66 if (!tstate
->interp
->tscdump
)
69 inst
= inst1
- inst0
- intr
;
70 loop
= loop1
- loop0
- intr
;
71 fprintf(stderr
, "opcode=%03d t=%d inst=%06lld loop=%06lld\n",
72 opcode
, ticked
, inst
, loop
);
77 /* Turn this on if your compiler chokes on the big switch: */
78 /* #define CASE_TOO_BIG 1 */
81 /* For debugging the interpreter: */
82 #define LLTRACE 1 /* Low-level trace feature */
83 #define CHECKEXC 1 /* Double-check exception checking */
86 typedef PyObject
*(*callproc
)(PyObject
*, PyObject
*, PyObject
*);
88 /* Forward declarations */
90 static PyObject
* call_function(PyObject
***, int, uint64
*, uint64
*);
92 static PyObject
* call_function(PyObject
***, int);
94 static PyObject
* fast_function(PyObject
*, PyObject
***, int, int, int);
95 static PyObject
* do_call(PyObject
*, PyObject
***, int, int);
96 static PyObject
* ext_do_call(PyObject
*, PyObject
***, int, int, int);
97 static PyObject
* update_keyword_args(PyObject
*, int, PyObject
***,
99 static PyObject
* update_star_args(int, int, PyObject
*, PyObject
***);
100 static PyObject
* load_args(PyObject
***, int);
101 #define CALL_FLAG_VAR 1
102 #define CALL_FLAG_KW 2
106 static int prtrace(PyObject
*, char *);
108 static int call_trace(Py_tracefunc
, PyObject
*, PyFrameObject
*,
110 static int call_trace_protected(Py_tracefunc
, PyObject
*,
111 PyFrameObject
*, int, PyObject
*);
112 static void call_exc_trace(Py_tracefunc
, PyObject
*, PyFrameObject
*);
113 static int maybe_call_line_trace(Py_tracefunc
, PyObject
*,
114 PyFrameObject
*, int *, int *, int *);
116 static PyObject
* apply_slice(PyObject
*, PyObject
*, PyObject
*);
117 static int assign_slice(PyObject
*, PyObject
*,
118 PyObject
*, PyObject
*);
119 static PyObject
* cmp_outcome(int, PyObject
*, PyObject
*);
120 static PyObject
* import_from(PyObject
*, PyObject
*);
121 static int import_all_from(PyObject
*, PyObject
*);
122 static PyObject
* build_class(PyObject
*, PyObject
*, PyObject
*);
123 static int exec_statement(PyFrameObject
*,
124 PyObject
*, PyObject
*, PyObject
*);
125 static void set_exc_info(PyThreadState
*, PyObject
*, PyObject
*, PyObject
*);
126 static void reset_exc_info(PyThreadState
*);
127 static void format_exc_check_arg(PyObject
*, char *, PyObject
*);
128 static PyObject
* string_concatenate(PyObject
*, PyObject
*,
129 PyFrameObject
*, unsigned char *);
130 static PyObject
* kwd_as_string(PyObject
*);
132 #define NAME_ERROR_MSG \
133 "name '%.200s' is not defined"
134 #define GLOBAL_NAME_ERROR_MSG \
135 "global name '%.200s' is not defined"
136 #define UNBOUNDLOCAL_ERROR_MSG \
137 "local variable '%.200s' referenced before assignment"
138 #define UNBOUNDFREE_ERROR_MSG \
139 "free variable '%.200s' referenced before assignment" \
140 " in enclosing scope"
142 /* Dynamic execution profile */
143 #ifdef DYNAMIC_EXECUTION_PROFILE
145 static long dxpairs
[257][256];
146 #define dxp dxpairs[256]
148 static long dxp
[256];
152 /* Function call profile */
155 static int pcall
[PCALL_NUM
];
158 #define PCALL_FUNCTION 1
159 #define PCALL_FAST_FUNCTION 2
160 #define PCALL_FASTER_FUNCTION 3
161 #define PCALL_METHOD 4
162 #define PCALL_BOUND_METHOD 5
163 #define PCALL_CFUNCTION 6
165 #define PCALL_GENERATOR 8
166 #define PCALL_OTHER 9
169 /* Notes about the statistics
173 FAST_FUNCTION means no argument tuple needs to be created.
174 FASTER_FUNCTION means that the fast-path frame setup code is used.
176 If there is a method call where the call can be optimized by changing
177 the argument tuple and calling the function directly, it gets recorded
180 As a result, the relationship among the statistics appears to be
181 PCALL_ALL == PCALL_FUNCTION + PCALL_METHOD - PCALL_BOUND_METHOD +
182 PCALL_CFUNCTION + PCALL_TYPE + PCALL_GENERATOR + PCALL_OTHER
183 PCALL_FUNCTION > PCALL_FAST_FUNCTION > PCALL_FASTER_FUNCTION
184 PCALL_METHOD > PCALL_BOUND_METHOD
187 #define PCALL(POS) pcall[POS]++
190 PyEval_GetCallStats(PyObject
*self
)
192 return Py_BuildValue("iiiiiiiiiii",
193 pcall
[0], pcall
[1], pcall
[2], pcall
[3],
194 pcall
[4], pcall
[5], pcall
[6], pcall
[7],
195 pcall
[8], pcall
[9], pcall
[10]);
201 PyEval_GetCallStats(PyObject
*self
)
214 #include "pythread.h"
216 static PyThread_type_lock interpreter_lock
= 0; /* This is the GIL */
217 static PyThread_type_lock pending_lock
= 0; /* for pending calls */
218 static long main_thread
= 0;
221 PyEval_ThreadsInitialized(void)
223 return interpreter_lock
!= 0;
227 PyEval_InitThreads(void)
229 if (interpreter_lock
)
231 interpreter_lock
= PyThread_allocate_lock();
232 PyThread_acquire_lock(interpreter_lock
, 1);
233 main_thread
= PyThread_get_thread_ident();
237 PyEval_AcquireLock(void)
239 PyThread_acquire_lock(interpreter_lock
, 1);
243 PyEval_ReleaseLock(void)
245 PyThread_release_lock(interpreter_lock
);
249 PyEval_AcquireThread(PyThreadState
*tstate
)
252 Py_FatalError("PyEval_AcquireThread: NULL new thread state");
253 /* Check someone has called PyEval_InitThreads() to create the lock */
254 assert(interpreter_lock
);
255 PyThread_acquire_lock(interpreter_lock
, 1);
256 if (PyThreadState_Swap(tstate
) != NULL
)
258 "PyEval_AcquireThread: non-NULL old thread state");
262 PyEval_ReleaseThread(PyThreadState
*tstate
)
265 Py_FatalError("PyEval_ReleaseThread: NULL thread state");
266 if (PyThreadState_Swap(NULL
) != tstate
)
267 Py_FatalError("PyEval_ReleaseThread: wrong thread state");
268 PyThread_release_lock(interpreter_lock
);
271 /* This function is called from PyOS_AfterFork to ensure that newly
272 created child processes don't hold locks referring to threads which
273 are not running in the child process. (This could also be done using
274 pthread_atfork mechanism, at least for the pthreads implementation.) */
277 PyEval_ReInitThreads(void)
279 PyObject
*threading
, *result
;
280 PyThreadState
*tstate
;
282 if (!interpreter_lock
)
284 /*XXX Can't use PyThread_free_lock here because it does too
285 much error-checking. Doing this cleanly would require
286 adding a new function to each thread_*.h. Instead, just
287 create a new lock and waste a little bit of memory */
288 interpreter_lock
= PyThread_allocate_lock();
289 pending_lock
= PyThread_allocate_lock();
290 PyThread_acquire_lock(interpreter_lock
, 1);
291 main_thread
= PyThread_get_thread_ident();
293 /* Update the threading module with the new state.
295 tstate
= PyThreadState_GET();
296 threading
= PyMapping_GetItemString(tstate
->interp
->modules
,
298 if (threading
== NULL
) {
299 /* threading not imported */
303 result
= PyObject_CallMethod(threading
, "_after_fork", NULL
);
305 PyErr_WriteUnraisable(threading
);
308 Py_DECREF(threading
);
312 /* Functions save_thread and restore_thread are always defined so
313 dynamically loaded modules needn't be compiled separately for use
314 with and without threads: */
317 PyEval_SaveThread(void)
319 PyThreadState
*tstate
= PyThreadState_Swap(NULL
);
321 Py_FatalError("PyEval_SaveThread: NULL tstate");
323 if (interpreter_lock
)
324 PyThread_release_lock(interpreter_lock
);
330 PyEval_RestoreThread(PyThreadState
*tstate
)
333 Py_FatalError("PyEval_RestoreThread: NULL tstate");
335 if (interpreter_lock
) {
337 PyThread_acquire_lock(interpreter_lock
, 1);
341 PyThreadState_Swap(tstate
);
345 /* Mechanism whereby asynchronously executing callbacks (e.g. UNIX
346 signal handlers or Mac I/O completion routines) can schedule calls
347 to a function to be called synchronously.
348 The synchronous function is called with one void* argument.
349 It should return 0 for success or -1 for failure -- failure should
350 be accompanied by an exception.
352 If registry succeeds, the registry function returns 0; if it fails
353 (e.g. due to too many pending calls) it returns -1 (without setting
354 an exception condition).
356 Note that because registry may occur from within signal handlers,
357 or other asynchronous events, calling malloc() is unsafe!
360 Any thread can schedule pending calls, but only the main thread
362 There is no facility to schedule calls to a particular thread, but
363 that should be easy to change, should that ever be required. In
364 that case, the static variables here should go into the python
371 /* The WITH_THREAD implementation is thread-safe. It allows
372 scheduling to be made from any thread, and even from an executing
376 #define NPENDINGCALLS 32
380 } pendingcalls
[NPENDINGCALLS
];
381 static int pendingfirst
= 0;
382 static int pendinglast
= 0;
383 static volatile int pendingcalls_to_do
= 1; /* trigger initialization of lock */
384 static char pendingbusy
= 0;
387 Py_AddPendingCall(int (*func
)(void *), void *arg
)
390 PyThread_type_lock lock
= pending_lock
;
392 /* try a few times for the lock. Since this mechanism is used
393 * for signal handling (on the main thread), there is a (slim)
394 * chance that a signal is delivered on the same thread while we
395 * hold the lock during the Py_MakePendingCalls() function.
396 * This avoids a deadlock in that case.
397 * Note that signals can be delivered on any thread. In particular,
398 * on Windows, a SIGINT is delivered on a system-created worker
400 * We also check for lock being NULL, in the unlikely case that
401 * this function is called before any bytecode evaluation takes place.
404 for (i
= 0; i
<100; i
++) {
405 if (PyThread_acquire_lock(lock
, NOWAIT_LOCK
))
413 j
= (i
+ 1) % NPENDINGCALLS
;
414 if (j
== pendingfirst
) {
415 result
= -1; /* Queue full */
417 pendingcalls
[i
].func
= func
;
418 pendingcalls
[i
].arg
= arg
;
421 /* signal main loop */
423 pendingcalls_to_do
= 1;
425 PyThread_release_lock(lock
);
430 Py_MakePendingCalls(void)
436 /* initial allocation of the lock */
437 pending_lock
= PyThread_allocate_lock();
438 if (pending_lock
== NULL
)
442 /* only service pending calls on main thread */
443 if (main_thread
&& PyThread_get_thread_ident() != main_thread
)
445 /* don't perform recursive pending calls */
449 /* perform a bounded number of calls, in case of recursion */
450 for (i
=0; i
<NPENDINGCALLS
; i
++) {
455 /* pop one item off the queue while holding the lock */
456 PyThread_acquire_lock(pending_lock
, WAIT_LOCK
);
458 if (j
== pendinglast
) {
459 func
= NULL
; /* Queue empty */
461 func
= pendingcalls
[j
].func
;
462 arg
= pendingcalls
[j
].arg
;
463 pendingfirst
= (j
+ 1) % NPENDINGCALLS
;
465 pendingcalls_to_do
= pendingfirst
!= pendinglast
;
466 PyThread_release_lock(pending_lock
);
467 /* having released the lock, perform the callback */
478 #else /* if ! defined WITH_THREAD */
481 WARNING! ASYNCHRONOUSLY EXECUTING CODE!
482 This code is used for signal handling in python that isn't built
484 Don't use this implementation when Py_AddPendingCalls() can happen
485 on a different thread!
487 There are two possible race conditions:
488 (1) nested asynchronous calls to Py_AddPendingCall()
489 (2) AddPendingCall() calls made while pending calls are being processed.
491 (1) is very unlikely because typically signal delivery
492 is blocked during signal handling. So it should be impossible.
493 (2) is a real possibility.
494 The current code is safe against (2), but not against (1).
495 The safety against (2) is derived from the fact that only one
496 thread is present, interrupted by signals, and that the critical
497 section is protected with the "busy" variable. On Windows, which
498 delivers SIGINT on a system thread, this does not hold and therefore
499 Windows really shouldn't use this version.
500 The two threads could theoretically wiggle around the "busy" variable.
503 #define NPENDINGCALLS 32
507 } pendingcalls
[NPENDINGCALLS
];
508 static volatile int pendingfirst
= 0;
509 static volatile int pendinglast
= 0;
510 static volatile int pendingcalls_to_do
= 0;
513 Py_AddPendingCall(int (*func
)(void *), void *arg
)
515 static volatile int busy
= 0;
517 /* XXX Begin critical section */
522 j
= (i
+ 1) % NPENDINGCALLS
;
523 if (j
== pendingfirst
) {
525 return -1; /* Queue full */
527 pendingcalls
[i
].func
= func
;
528 pendingcalls
[i
].arg
= arg
;
532 pendingcalls_to_do
= 1; /* Signal main loop */
534 /* XXX End critical section */
539 Py_MakePendingCalls(void)
545 pendingcalls_to_do
= 0;
551 if (i
== pendinglast
)
552 break; /* Queue empty */
553 func
= pendingcalls
[i
].func
;
554 arg
= pendingcalls
[i
].arg
;
555 pendingfirst
= (i
+ 1) % NPENDINGCALLS
;
558 pendingcalls_to_do
= 1; /* We're not done yet */
566 #endif /* WITH_THREAD */
569 /* The interpreter's recursion limit */
571 #ifndef Py_DEFAULT_RECURSION_LIMIT
572 #define Py_DEFAULT_RECURSION_LIMIT 1000
574 static int recursion_limit
= Py_DEFAULT_RECURSION_LIMIT
;
575 int _Py_CheckRecursionLimit
= Py_DEFAULT_RECURSION_LIMIT
;
578 Py_GetRecursionLimit(void)
580 return recursion_limit
;
584 Py_SetRecursionLimit(int new_limit
)
586 recursion_limit
= new_limit
;
587 _Py_CheckRecursionLimit
= recursion_limit
;
590 /* the macro Py_EnterRecursiveCall() only calls _Py_CheckRecursiveCall()
591 if the recursion_depth reaches _Py_CheckRecursionLimit.
592 If USE_STACKCHECK, the macro decrements _Py_CheckRecursionLimit
593 to guarantee that _Py_CheckRecursiveCall() is regularly called.
594 Without USE_STACKCHECK, there is no need for this. */
596 _Py_CheckRecursiveCall(char *where
)
598 PyThreadState
*tstate
= PyThreadState_GET();
600 #ifdef USE_STACKCHECK
601 if (PyOS_CheckStack()) {
602 --tstate
->recursion_depth
;
603 PyErr_SetString(PyExc_MemoryError
, "Stack overflow");
607 if (tstate
->recursion_depth
> recursion_limit
) {
608 --tstate
->recursion_depth
;
609 PyErr_Format(PyExc_RuntimeError
,
610 "maximum recursion depth exceeded%s",
614 _Py_CheckRecursionLimit
= recursion_limit
;
618 /* Status code for main loop (reason for stack unwind) */
620 WHY_NOT
= 0x0001, /* No error */
621 WHY_EXCEPTION
= 0x0002, /* Exception occurred */
622 WHY_RERAISE
= 0x0004, /* Exception re-raised by 'finally' */
623 WHY_RETURN
= 0x0008, /* 'return' statement */
624 WHY_BREAK
= 0x0010, /* 'break' statement */
625 WHY_CONTINUE
= 0x0020, /* 'continue' statement */
626 WHY_YIELD
= 0x0040 /* 'yield' operator */
629 static enum why_code
do_raise(PyObject
*, PyObject
*, PyObject
*);
630 static int unpack_iterable(PyObject
*, int, PyObject
**);
632 /* Records whether tracing is on for any thread. Counts the number of
633 threads for which tstate->c_tracefunc is non-NULL, so if the value
634 is 0, we know we don't have to check this thread's c_tracefunc.
635 This speeds up the if statement in PyEval_EvalFrameEx() after
637 static int _Py_TracingPossible
= 0;
639 /* for manipulating the thread switch and periodic "stuff" - used to be
640 per thread, now just a pair o' globals */
641 int _Py_CheckInterval
= 100;
642 volatile int _Py_Ticker
= 0; /* so that we hit a "tick" first thing */
645 PyEval_EvalCode(PyCodeObject
*co
, PyObject
*globals
, PyObject
*locals
)
647 return PyEval_EvalCodeEx(co
,
649 (PyObject
**)NULL
, 0,
650 (PyObject
**)NULL
, 0,
651 (PyObject
**)NULL
, 0,
656 /* Interpreter main loop */
659 PyEval_EvalFrame(PyFrameObject
*f
) {
660 /* This is for backward compatibility with extension modules that
661 used this API; core interpreter code should call
662 PyEval_EvalFrameEx() */
663 return PyEval_EvalFrameEx(f
, 0);
667 PyEval_EvalFrameEx(PyFrameObject
*f
, int throwflag
)
672 register PyObject
**stack_pointer
; /* Next free slot in value stack */
673 register unsigned char *next_instr
;
674 register int opcode
; /* Current opcode */
675 register int oparg
; /* Current opcode argument, if any */
676 register enum why_code why
; /* Reason for block stack unwind */
677 register int err
; /* Error status -- nonzero if error */
678 register PyObject
*x
; /* Result object -- NULL if error */
679 register PyObject
*v
; /* Temporary objects popped off stack */
680 register PyObject
*w
;
681 register PyObject
*u
;
682 register PyObject
*t
;
683 register PyObject
*stream
= NULL
; /* for PRINT opcodes */
684 register PyObject
**fastlocals
, **freevars
;
685 PyObject
*retval
= NULL
; /* Return value */
686 PyThreadState
*tstate
= PyThreadState_GET();
689 /* when tracing we set things up so that
691 not (instr_lb <= current_bytecode_offset < instr_ub)
693 is true when the line being executed has changed. The
694 initial values are such as to make this false the first
695 time it is tested. */
696 int instr_ub
= -1, instr_lb
= 0, instr_prev
= -1;
698 unsigned char *first_instr
;
701 #if defined(Py_DEBUG) || defined(LLTRACE)
702 /* Make it easier to find out where we are with a debugger */
706 /* Tuple access macros */
709 #define GETITEM(v, i) PyTuple_GET_ITEM((PyTupleObject *)(v), (i))
711 #define GETITEM(v, i) PyTuple_GetItem((v), (i))
715 /* Use Pentium timestamp counter to mark certain events:
716 inst0 -- beginning of switch statement for opcode dispatch
717 inst1 -- end of switch statement (may be skipped)
718 loop0 -- the top of the mainloop
719 loop1 -- place where control returns again to top of mainloop
721 intr1 -- beginning of long interruption
722 intr2 -- end of long interruption
724 Many opcodes call out to helper C functions. In some cases, the
725 time in those functions should be counted towards the time for the
726 opcode, but not in all cases. For example, a CALL_FUNCTION opcode
727 calls another Python function; there's no point in charge all the
728 bytecode executed by the called function to the caller.
730 It's hard to make a useful judgement statically. In the presence
731 of operator overloading, it's impossible to tell if a call will
732 execute new Python code or not.
734 It's a case-by-case judgement. I'll use intr1 for the following
740 CALL_FUNCTION (and friends)
743 uint64 inst0
, inst1
, loop0
, loop1
, intr0
= 0, intr1
= 0;
746 READ_TIMESTAMP(inst0
);
747 READ_TIMESTAMP(inst1
);
748 READ_TIMESTAMP(loop0
);
749 READ_TIMESTAMP(loop1
);
751 /* shut up the compiler */
755 /* Code access macros */
757 #define INSTR_OFFSET() ((int)(next_instr - first_instr))
758 #define NEXTOP() (*next_instr++)
759 #define NEXTARG() (next_instr += 2, (next_instr[-1]<<8) + next_instr[-2])
760 #define PEEKARG() ((next_instr[2]<<8) + next_instr[1])
761 #define JUMPTO(x) (next_instr = first_instr + (x))
762 #define JUMPBY(x) (next_instr += (x))
764 /* OpCode prediction macros
765 Some opcodes tend to come in pairs thus making it possible to
766 predict the second code when the first is run. For example,
767 GET_ITER is often followed by FOR_ITER. And FOR_ITER is often
768 followed by STORE_FAST or UNPACK_SEQUENCE.
770 Verifying the prediction costs a single high-speed test of a register
771 variable against a constant. If the pairing was good, then the
772 processor's own internal branch predication has a high likelihood of
773 success, resulting in a nearly zero-overhead transition to the
774 next opcode. A successful prediction saves a trip through the eval-loop
775 including its two unpredictable branches, the HAS_ARG test and the
776 switch-case. Combined with the processor's internal branch prediction,
777 a successful PREDICT has the effect of making the two opcodes run as if
778 they were a single new opcode with the bodies combined.
780 If collecting opcode statistics, your choices are to either keep the
781 predictions turned-on and interpret the results as if some opcodes
782 had been combined or turn-off predictions so that the opcode frequency
783 counter updates for both opcodes.
786 #ifdef DYNAMIC_EXECUTION_PROFILE
787 #define PREDICT(op) if (0) goto PRED_##op
789 #define PREDICT(op) if (*next_instr == op) goto PRED_##op
792 #define PREDICTED(op) PRED_##op: next_instr++
793 #define PREDICTED_WITH_ARG(op) PRED_##op: oparg = PEEKARG(); next_instr += 3
795 /* Stack manipulation macros */
797 /* The stack can grow at most MAXINT deep, as co_nlocals and
798 co_stacksize are ints. */
799 #define STACK_LEVEL() ((int)(stack_pointer - f->f_valuestack))
800 #define EMPTY() (STACK_LEVEL() == 0)
801 #define TOP() (stack_pointer[-1])
802 #define SECOND() (stack_pointer[-2])
803 #define THIRD() (stack_pointer[-3])
804 #define FOURTH() (stack_pointer[-4])
805 #define SET_TOP(v) (stack_pointer[-1] = (v))
806 #define SET_SECOND(v) (stack_pointer[-2] = (v))
807 #define SET_THIRD(v) (stack_pointer[-3] = (v))
808 #define SET_FOURTH(v) (stack_pointer[-4] = (v))
809 #define BASIC_STACKADJ(n) (stack_pointer += n)
810 #define BASIC_PUSH(v) (*stack_pointer++ = (v))
811 #define BASIC_POP() (*--stack_pointer)
814 #define PUSH(v) { (void)(BASIC_PUSH(v), \
815 lltrace && prtrace(TOP(), "push")); \
816 assert(STACK_LEVEL() <= co->co_stacksize); }
817 #define POP() ((void)(lltrace && prtrace(TOP(), "pop")), \
819 #define STACKADJ(n) { (void)(BASIC_STACKADJ(n), \
820 lltrace && prtrace(TOP(), "stackadj")); \
821 assert(STACK_LEVEL() <= co->co_stacksize); }
822 #define EXT_POP(STACK_POINTER) ((void)(lltrace && \
823 prtrace((STACK_POINTER)[-1], "ext_pop")), \
826 #define PUSH(v) BASIC_PUSH(v)
827 #define POP() BASIC_POP()
828 #define STACKADJ(n) BASIC_STACKADJ(n)
829 #define EXT_POP(STACK_POINTER) (*--(STACK_POINTER))
832 /* Local variable macros */
834 #define GETLOCAL(i) (fastlocals[i])
836 /* The SETLOCAL() macro must not DECREF the local variable in-place and
837 then store the new value; it must copy the old value to a temporary
838 value, then store the new value, and then DECREF the temporary value.
839 This is because it is possible that during the DECREF the frame is
840 accessed by other code (e.g. a __del__ method or gc.collect()) and the
841 variable would be pointing to already-freed memory. */
842 #define SETLOCAL(i, value) do { PyObject *tmp = GETLOCAL(i); \
843 GETLOCAL(i) = value; \
844 Py_XDECREF(tmp); } while (0)
852 if (Py_EnterRecursiveCall(""))
857 if (tstate
->use_tracing
) {
858 if (tstate
->c_tracefunc
!= NULL
) {
859 /* tstate->c_tracefunc, if defined, is a
860 function that will be called on *every* entry
861 to a code block. Its return value, if not
862 None, is a function that will be called at
863 the start of each executed line of code.
864 (Actually, the function must return itself
865 in order to continue tracing.) The trace
866 functions are called with three arguments:
867 a pointer to the current frame, a string
868 indicating why the function is called, and
869 an argument which depends on the situation.
870 The global trace function is also called
871 whenever an exception is detected. */
872 if (call_trace_protected(tstate
->c_tracefunc
,
874 f
, PyTrace_CALL
, Py_None
)) {
875 /* Trace function raised an error */
876 goto exit_eval_frame
;
879 if (tstate
->c_profilefunc
!= NULL
) {
880 /* Similar for c_profilefunc, except it needn't
881 return itself and isn't called for "line" events */
882 if (call_trace_protected(tstate
->c_profilefunc
,
883 tstate
->c_profileobj
,
884 f
, PyTrace_CALL
, Py_None
)) {
885 /* Profile function raised an error */
886 goto exit_eval_frame
;
892 names
= co
->co_names
;
893 consts
= co
->co_consts
;
894 fastlocals
= f
->f_localsplus
;
895 freevars
= f
->f_localsplus
+ co
->co_nlocals
;
896 first_instr
= (unsigned char*) PyString_AS_STRING(co
->co_code
);
897 /* An explanation is in order for the next line.
899 f->f_lasti now refers to the index of the last instruction
900 executed. You might think this was obvious from the name, but
901 this wasn't always true before 2.3! PyFrame_New now sets
902 f->f_lasti to -1 (i.e. the index *before* the first instruction)
903 and YIELD_VALUE doesn't fiddle with f_lasti any more. So this
906 When the PREDICT() macros are enabled, some opcode pairs follow in
907 direct succession without updating f->f_lasti. A successful
908 prediction effectively links the two codes together as if they
909 were a single new opcode; accordingly,f->f_lasti will point to
910 the first code in the pair (for instance, GET_ITER followed by
911 FOR_ITER is effectively a single opcode and f->f_lasti will point
912 at to the beginning of the combined pair.)
914 next_instr
= first_instr
+ f
->f_lasti
+ 1;
915 stack_pointer
= f
->f_stacktop
;
916 assert(stack_pointer
!= NULL
);
917 f
->f_stacktop
= NULL
; /* remains NULL unless yield suspends frame */
920 lltrace
= PyDict_GetItemString(f
->f_globals
, "__lltrace__") != NULL
;
922 #if defined(Py_DEBUG) || defined(LLTRACE)
923 filename
= PyString_AsString(co
->co_filename
);
928 x
= Py_None
; /* Not a reference, just anything non-NULL */
931 if (throwflag
) { /* support for generator.throw() */
939 /* Almost surely, the opcode executed a break
940 or a continue, preventing inst1 from being set
941 on the way out of the loop.
943 READ_TIMESTAMP(inst1
);
946 dump_tsc(opcode
, ticked
, inst0
, inst1
, loop0
, loop1
,
952 READ_TIMESTAMP(loop0
);
954 assert(stack_pointer
>= f
->f_valuestack
); /* else underflow */
955 assert(STACK_LEVEL() <= co
->co_stacksize
); /* else overflow */
957 /* Do periodic things. Doing this every time through
958 the loop would add too much overhead, so we do it
959 only every Nth instruction. We also do it if
960 ``pendingcalls_to_do'' is set, i.e. when an asynchronous
961 event needs attention (e.g. a signal handler or
962 async I/O handler); see Py_AddPendingCall() and
963 Py_MakePendingCalls() above. */
965 if (--_Py_Ticker
< 0) {
966 if (*next_instr
== SETUP_FINALLY
) {
967 /* Make the last opcode before
968 a try: finally: block uninterruptable. */
969 goto fast_next_opcode
;
971 _Py_Ticker
= _Py_CheckInterval
;
972 tstate
->tick_counter
++;
976 if (pendingcalls_to_do
) {
977 if (Py_MakePendingCalls() < 0) {
981 if (pendingcalls_to_do
)
982 /* MakePendingCalls() didn't succeed.
983 Force early re-execution of this
984 "periodic" code, possibly after
989 if (interpreter_lock
) {
990 /* Give another thread a chance */
992 if (PyThreadState_Swap(NULL
) != tstate
)
993 Py_FatalError("ceval: tstate mix-up");
994 PyThread_release_lock(interpreter_lock
);
996 /* Other threads may run now */
998 PyThread_acquire_lock(interpreter_lock
, 1);
999 if (PyThreadState_Swap(tstate
) != NULL
)
1000 Py_FatalError("ceval: orphan tstate");
1002 /* Check for thread interrupts */
1004 if (tstate
->async_exc
!= NULL
) {
1005 x
= tstate
->async_exc
;
1006 tstate
->async_exc
= NULL
;
1009 why
= WHY_EXCEPTION
;
1017 f
->f_lasti
= INSTR_OFFSET();
1019 /* line-by-line tracing support */
1021 if (_Py_TracingPossible
&&
1022 tstate
->c_tracefunc
!= NULL
&& !tstate
->tracing
) {
1023 /* see maybe_call_line_trace
1024 for expository comments */
1025 f
->f_stacktop
= stack_pointer
;
1027 err
= maybe_call_line_trace(tstate
->c_tracefunc
,
1029 f
, &instr_lb
, &instr_ub
,
1031 /* Reload possibly changed frame fields */
1033 if (f
->f_stacktop
!= NULL
) {
1034 stack_pointer
= f
->f_stacktop
;
1035 f
->f_stacktop
= NULL
;
1038 /* trace function raised an exception */
1043 /* Extract opcode and argument */
1046 oparg
= 0; /* allows oparg to be stored in a register because
1047 it doesn't have to be remembered across a full loop */
1048 if (HAS_ARG(opcode
))
1051 #ifdef DYNAMIC_EXECUTION_PROFILE
1053 dxpairs
[lastopcode
][opcode
]++;
1054 lastopcode
= opcode
;
1060 /* Instruction tracing */
1063 if (HAS_ARG(opcode
)) {
1064 printf("%d: %d, %d\n",
1065 f
->f_lasti
, opcode
, oparg
);
1069 f
->f_lasti
, opcode
);
1074 /* Main switch on opcode */
1075 READ_TIMESTAMP(inst0
);
1080 It is essential that any operation that fails sets either
1081 x to NULL, err to nonzero, or why to anything but WHY_NOT,
1082 and that no operation that succeeds does this! */
1084 /* case STOP_CODE: this is an error! */
1087 goto fast_next_opcode
;
1090 x
= GETLOCAL(oparg
);
1094 goto fast_next_opcode
;
1096 format_exc_check_arg(PyExc_UnboundLocalError
,
1097 UNBOUNDLOCAL_ERROR_MSG
,
1098 PyTuple_GetItem(co
->co_varnames
, oparg
));
1102 x
= GETITEM(consts
, oparg
);
1105 goto fast_next_opcode
;
1107 PREDICTED_WITH_ARG(STORE_FAST
);
1111 goto fast_next_opcode
;
1116 goto fast_next_opcode
;
1123 goto fast_next_opcode
;
1132 goto fast_next_opcode
;
1143 goto fast_next_opcode
;
1149 goto fast_next_opcode
;
1160 goto fast_next_opcode
;
1161 } else if (oparg
== 3) {
1172 goto fast_next_opcode
;
1174 Py_FatalError("invalid argument to DUP_TOPX"
1175 " (bytecode corruption?)");
1176 /* Never returns, so don't bother to set why. */
1179 case UNARY_POSITIVE
:
1181 x
= PyNumber_Positive(v
);
1184 if (x
!= NULL
) continue;
1187 case UNARY_NEGATIVE
:
1189 x
= PyNumber_Negative(v
);
1192 if (x
!= NULL
) continue;
1197 err
= PyObject_IsTrue(v
);
1205 Py_INCREF(Py_False
);
1215 x
= PyObject_Repr(v
);
1218 if (x
!= NULL
) continue;
1223 x
= PyNumber_Invert(v
);
1226 if (x
!= NULL
) continue;
1232 x
= PyNumber_Power(v
, w
, Py_None
);
1236 if (x
!= NULL
) continue;
1239 case BINARY_MULTIPLY
:
1242 x
= PyNumber_Multiply(v
, w
);
1246 if (x
!= NULL
) continue;
1250 if (!_Py_QnewFlag
) {
1253 x
= PyNumber_Divide(v
, w
);
1257 if (x
!= NULL
) continue;
1260 /* -Qnew is in effect: fall through to
1261 BINARY_TRUE_DIVIDE */
1262 case BINARY_TRUE_DIVIDE
:
1265 x
= PyNumber_TrueDivide(v
, w
);
1269 if (x
!= NULL
) continue;
1272 case BINARY_FLOOR_DIVIDE
:
1275 x
= PyNumber_FloorDivide(v
, w
);
1279 if (x
!= NULL
) continue;
1285 if (PyString_CheckExact(v
))
1286 x
= PyString_Format(v
, w
);
1288 x
= PyNumber_Remainder(v
, w
);
1292 if (x
!= NULL
) continue;
1298 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1299 /* INLINE: int + int */
1300 register long a
, b
, i
;
1301 a
= PyInt_AS_LONG(v
);
1302 b
= PyInt_AS_LONG(w
);
1304 if ((i
^a
) < 0 && (i
^b
) < 0)
1306 x
= PyInt_FromLong(i
);
1308 else if (PyString_CheckExact(v
) &&
1309 PyString_CheckExact(w
)) {
1310 x
= string_concatenate(v
, w
, f
, next_instr
);
1311 /* string_concatenate consumed the ref to v */
1312 goto skip_decref_vx
;
1316 x
= PyNumber_Add(v
, w
);
1322 if (x
!= NULL
) continue;
1325 case BINARY_SUBTRACT
:
1328 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1329 /* INLINE: int - int */
1330 register long a
, b
, i
;
1331 a
= PyInt_AS_LONG(v
);
1332 b
= PyInt_AS_LONG(w
);
1334 if ((i
^a
) < 0 && (i
^~b
) < 0)
1336 x
= PyInt_FromLong(i
);
1340 x
= PyNumber_Subtract(v
, w
);
1345 if (x
!= NULL
) continue;
1351 if (PyList_CheckExact(v
) && PyInt_CheckExact(w
)) {
1352 /* INLINE: list[int] */
1353 Py_ssize_t i
= PyInt_AsSsize_t(w
);
1355 i
+= PyList_GET_SIZE(v
);
1356 if (i
>= 0 && i
< PyList_GET_SIZE(v
)) {
1357 x
= PyList_GET_ITEM(v
, i
);
1365 x
= PyObject_GetItem(v
, w
);
1369 if (x
!= NULL
) continue;
1375 x
= PyNumber_Lshift(v
, w
);
1379 if (x
!= NULL
) continue;
1385 x
= PyNumber_Rshift(v
, w
);
1389 if (x
!= NULL
) continue;
1395 x
= PyNumber_And(v
, w
);
1399 if (x
!= NULL
) continue;
1405 x
= PyNumber_Xor(v
, w
);
1409 if (x
!= NULL
) continue;
1415 x
= PyNumber_Or(v
, w
);
1419 if (x
!= NULL
) continue;
1424 v
= stack_pointer
[-oparg
];
1425 err
= PyList_Append(v
, w
);
1428 PREDICT(JUMP_ABSOLUTE
);
1436 x
= PyNumber_InPlacePower(v
, w
, Py_None
);
1440 if (x
!= NULL
) continue;
1443 case INPLACE_MULTIPLY
:
1446 x
= PyNumber_InPlaceMultiply(v
, w
);
1450 if (x
!= NULL
) continue;
1453 case INPLACE_DIVIDE
:
1454 if (!_Py_QnewFlag
) {
1457 x
= PyNumber_InPlaceDivide(v
, w
);
1461 if (x
!= NULL
) continue;
1464 /* -Qnew is in effect: fall through to
1465 INPLACE_TRUE_DIVIDE */
1466 case INPLACE_TRUE_DIVIDE
:
1469 x
= PyNumber_InPlaceTrueDivide(v
, w
);
1473 if (x
!= NULL
) continue;
1476 case INPLACE_FLOOR_DIVIDE
:
1479 x
= PyNumber_InPlaceFloorDivide(v
, w
);
1483 if (x
!= NULL
) continue;
1486 case INPLACE_MODULO
:
1489 x
= PyNumber_InPlaceRemainder(v
, w
);
1493 if (x
!= NULL
) continue;
1499 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1500 /* INLINE: int + int */
1501 register long a
, b
, i
;
1502 a
= PyInt_AS_LONG(v
);
1503 b
= PyInt_AS_LONG(w
);
1505 if ((i
^a
) < 0 && (i
^b
) < 0)
1507 x
= PyInt_FromLong(i
);
1509 else if (PyString_CheckExact(v
) &&
1510 PyString_CheckExact(w
)) {
1511 x
= string_concatenate(v
, w
, f
, next_instr
);
1512 /* string_concatenate consumed the ref to v */
1517 x
= PyNumber_InPlaceAdd(v
, w
);
1523 if (x
!= NULL
) continue;
1526 case INPLACE_SUBTRACT
:
1529 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1530 /* INLINE: int - int */
1531 register long a
, b
, i
;
1532 a
= PyInt_AS_LONG(v
);
1533 b
= PyInt_AS_LONG(w
);
1535 if ((i
^a
) < 0 && (i
^~b
) < 0)
1537 x
= PyInt_FromLong(i
);
1541 x
= PyNumber_InPlaceSubtract(v
, w
);
1546 if (x
!= NULL
) continue;
1549 case INPLACE_LSHIFT
:
1552 x
= PyNumber_InPlaceLshift(v
, w
);
1556 if (x
!= NULL
) continue;
1559 case INPLACE_RSHIFT
:
1562 x
= PyNumber_InPlaceRshift(v
, w
);
1566 if (x
!= NULL
) continue;
1572 x
= PyNumber_InPlaceAnd(v
, w
);
1576 if (x
!= NULL
) continue;
1582 x
= PyNumber_InPlaceXor(v
, w
);
1586 if (x
!= NULL
) continue;
1592 x
= PyNumber_InPlaceOr(v
, w
);
1596 if (x
!= NULL
) continue;
1603 if ((opcode
-SLICE
) & 2)
1607 if ((opcode
-SLICE
) & 1)
1612 x
= apply_slice(u
, v
, w
);
1617 if (x
!= NULL
) continue;
1624 if ((opcode
-STORE_SLICE
) & 2)
1628 if ((opcode
-STORE_SLICE
) & 1)
1634 err
= assign_slice(u
, v
, w
, t
); /* u[v:w] = t */
1639 if (err
== 0) continue;
1642 case DELETE_SLICE
+0:
1643 case DELETE_SLICE
+1:
1644 case DELETE_SLICE
+2:
1645 case DELETE_SLICE
+3:
1646 if ((opcode
-DELETE_SLICE
) & 2)
1650 if ((opcode
-DELETE_SLICE
) & 1)
1655 err
= assign_slice(u
, v
, w
, (PyObject
*)NULL
);
1660 if (err
== 0) continue;
1669 err
= PyObject_SetItem(v
, w
, u
);
1673 if (err
== 0) continue;
1681 err
= PyObject_DelItem(v
, w
);
1684 if (err
== 0) continue;
1689 w
= PySys_GetObject("displayhook");
1691 PyErr_SetString(PyExc_RuntimeError
,
1692 "lost sys.displayhook");
1697 x
= PyTuple_Pack(1, v
);
1702 w
= PyEval_CallObject(w
, x
);
1713 /* fall through to PRINT_ITEM */
1717 if (stream
== NULL
|| stream
== Py_None
) {
1718 w
= PySys_GetObject("stdout");
1720 PyErr_SetString(PyExc_RuntimeError
,
1725 /* PyFile_SoftSpace() can exececute arbitrary code
1726 if sys.stdout is an instance with a __getattr__.
1727 If __getattr__ raises an exception, w will
1728 be freed, so we need to prevent that temporarily. */
1730 if (w
!= NULL
&& PyFile_SoftSpace(w
, 0))
1731 err
= PyFile_WriteString(" ", w
);
1733 err
= PyFile_WriteObject(v
, w
, Py_PRINT_RAW
);
1735 /* XXX move into writeobject() ? */
1736 if (PyString_Check(v
)) {
1737 char *s
= PyString_AS_STRING(v
);
1738 Py_ssize_t len
= PyString_GET_SIZE(v
);
1740 !isspace(Py_CHARMASK(s
[len
-1])) ||
1742 PyFile_SoftSpace(w
, 1);
1744 #ifdef Py_USING_UNICODE
1745 else if (PyUnicode_Check(v
)) {
1746 Py_UNICODE
*s
= PyUnicode_AS_UNICODE(v
);
1747 Py_ssize_t len
= PyUnicode_GET_SIZE(v
);
1749 !Py_UNICODE_ISSPACE(s
[len
-1]) ||
1751 PyFile_SoftSpace(w
, 1);
1755 PyFile_SoftSpace(w
, 1);
1765 case PRINT_NEWLINE_TO
:
1767 /* fall through to PRINT_NEWLINE */
1770 if (stream
== NULL
|| stream
== Py_None
) {
1771 w
= PySys_GetObject("stdout");
1773 PyErr_SetString(PyExc_RuntimeError
,
1775 why
= WHY_EXCEPTION
;
1779 /* w.write() may replace sys.stdout, so we
1780 * have to keep our reference to it */
1782 err
= PyFile_WriteString("\n", w
);
1784 PyFile_SoftSpace(w
, 0);
1793 default: switch (opcode
) {
1799 u
= POP(); /* traceback */
1802 v
= POP(); /* value */
1805 w
= POP(); /* exc */
1806 case 0: /* Fallthrough */
1807 why
= do_raise(w
, v
, u
);
1810 PyErr_SetString(PyExc_SystemError
,
1811 "bad RAISE_VARARGS oparg");
1812 why
= WHY_EXCEPTION
;
1818 if ((x
= f
->f_locals
) != NULL
) {
1823 PyErr_SetString(PyExc_SystemError
, "no locals");
1829 goto fast_block_end
;
1833 f
->f_stacktop
= stack_pointer
;
1842 READ_TIMESTAMP(intr0
);
1843 err
= exec_statement(f
, u
, v
, w
);
1844 READ_TIMESTAMP(intr1
);
1852 PyTryBlock
*b
= PyFrame_BlockPop(f
);
1853 while (STACK_LEVEL() > b
->b_level
) {
1860 PREDICTED(END_FINALLY
);
1863 if (PyInt_Check(v
)) {
1864 why
= (enum why_code
) PyInt_AS_LONG(v
);
1865 assert(why
!= WHY_YIELD
);
1866 if (why
== WHY_RETURN
||
1867 why
== WHY_CONTINUE
)
1870 else if (PyExceptionClass_Check(v
) ||
1871 PyString_Check(v
)) {
1874 PyErr_Restore(v
, w
, u
);
1878 else if (v
!= Py_None
) {
1879 PyErr_SetString(PyExc_SystemError
,
1880 "'finally' pops bad exception");
1881 why
= WHY_EXCEPTION
;
1891 x
= build_class(u
, v
, w
);
1899 w
= GETITEM(names
, oparg
);
1901 if ((x
= f
->f_locals
) != NULL
) {
1902 if (PyDict_CheckExact(x
))
1903 err
= PyDict_SetItem(x
, w
, v
);
1905 err
= PyObject_SetItem(x
, w
, v
);
1907 if (err
== 0) continue;
1910 PyErr_Format(PyExc_SystemError
,
1911 "no locals found when storing %s",
1916 w
= GETITEM(names
, oparg
);
1917 if ((x
= f
->f_locals
) != NULL
) {
1918 if ((err
= PyObject_DelItem(x
, w
)) != 0)
1919 format_exc_check_arg(PyExc_NameError
,
1924 PyErr_Format(PyExc_SystemError
,
1925 "no locals when deleting %s",
1929 PREDICTED_WITH_ARG(UNPACK_SEQUENCE
);
1930 case UNPACK_SEQUENCE
:
1932 if (PyTuple_CheckExact(v
) &&
1933 PyTuple_GET_SIZE(v
) == oparg
) {
1934 PyObject
**items
= \
1935 ((PyTupleObject
*)v
)->ob_item
;
1943 } else if (PyList_CheckExact(v
) &&
1944 PyList_GET_SIZE(v
) == oparg
) {
1945 PyObject
**items
= \
1946 ((PyListObject
*)v
)->ob_item
;
1952 } else if (unpack_iterable(v
, oparg
,
1953 stack_pointer
+ oparg
)) {
1954 stack_pointer
+= oparg
;
1956 /* unpack_iterable() raised an exception */
1957 why
= WHY_EXCEPTION
;
1963 w
= GETITEM(names
, oparg
);
1967 err
= PyObject_SetAttr(v
, w
, u
); /* v.w = u */
1970 if (err
== 0) continue;
1974 w
= GETITEM(names
, oparg
);
1976 err
= PyObject_SetAttr(v
, w
, (PyObject
*)NULL
);
1982 w
= GETITEM(names
, oparg
);
1984 err
= PyDict_SetItem(f
->f_globals
, w
, v
);
1986 if (err
== 0) continue;
1990 w
= GETITEM(names
, oparg
);
1991 if ((err
= PyDict_DelItem(f
->f_globals
, w
)) != 0)
1992 format_exc_check_arg(
1993 PyExc_NameError
, GLOBAL_NAME_ERROR_MSG
, w
);
1997 w
= GETITEM(names
, oparg
);
1998 if ((v
= f
->f_locals
) == NULL
) {
1999 PyErr_Format(PyExc_SystemError
,
2000 "no locals when loading %s",
2002 why
= WHY_EXCEPTION
;
2005 if (PyDict_CheckExact(v
)) {
2006 x
= PyDict_GetItem(v
, w
);
2010 x
= PyObject_GetItem(v
, w
);
2011 if (x
== NULL
&& PyErr_Occurred()) {
2012 if (!PyErr_ExceptionMatches(
2019 x
= PyDict_GetItem(f
->f_globals
, w
);
2021 x
= PyDict_GetItem(f
->f_builtins
, w
);
2023 format_exc_check_arg(
2035 w
= GETITEM(names
, oparg
);
2036 if (PyString_CheckExact(w
)) {
2037 /* Inline the PyDict_GetItem() calls.
2038 WARNING: this is an extreme speed hack.
2039 Do not try this at home. */
2040 long hash
= ((PyStringObject
*)w
)->ob_shash
;
2044 d
= (PyDictObject
*)(f
->f_globals
);
2045 e
= d
->ma_lookup(d
, w
, hash
);
2056 d
= (PyDictObject
*)(f
->f_builtins
);
2057 e
= d
->ma_lookup(d
, w
, hash
);
2068 goto load_global_error
;
2071 /* This is the un-inlined version of the code above */
2072 x
= PyDict_GetItem(f
->f_globals
, w
);
2074 x
= PyDict_GetItem(f
->f_builtins
, w
);
2077 format_exc_check_arg(
2079 GLOBAL_NAME_ERROR_MSG
, w
);
2088 x
= GETLOCAL(oparg
);
2090 SETLOCAL(oparg
, NULL
);
2093 format_exc_check_arg(
2094 PyExc_UnboundLocalError
,
2095 UNBOUNDLOCAL_ERROR_MSG
,
2096 PyTuple_GetItem(co
->co_varnames
, oparg
)
2101 x
= freevars
[oparg
];
2104 if (x
!= NULL
) continue;
2108 x
= freevars
[oparg
];
2115 /* Don't stomp existing exception */
2116 if (PyErr_Occurred())
2118 if (oparg
< PyTuple_GET_SIZE(co
->co_cellvars
)) {
2119 v
= PyTuple_GET_ITEM(co
->co_cellvars
,
2121 format_exc_check_arg(
2122 PyExc_UnboundLocalError
,
2123 UNBOUNDLOCAL_ERROR_MSG
,
2126 v
= PyTuple_GET_ITEM(co
->co_freevars
, oparg
-
2127 PyTuple_GET_SIZE(co
->co_cellvars
));
2128 format_exc_check_arg(PyExc_NameError
,
2129 UNBOUNDFREE_ERROR_MSG
, v
);
2135 x
= freevars
[oparg
];
2141 x
= PyTuple_New(oparg
);
2143 for (; --oparg
>= 0;) {
2145 PyTuple_SET_ITEM(x
, oparg
, w
);
2153 x
= PyList_New(oparg
);
2155 for (; --oparg
>= 0;) {
2157 PyList_SET_ITEM(x
, oparg
, w
);
2165 x
= _PyDict_NewPresized((Py_ssize_t
)oparg
);
2167 if (x
!= NULL
) continue;
2171 w
= TOP(); /* key */
2172 u
= SECOND(); /* value */
2173 v
= THIRD(); /* dict */
2175 assert (PyDict_CheckExact(v
));
2176 err
= PyDict_SetItem(v
, w
, u
); /* v[w] = u */
2179 if (err
== 0) continue;
2183 w
= GETITEM(names
, oparg
);
2185 x
= PyObject_GetAttr(v
, w
);
2188 if (x
!= NULL
) continue;
2194 if (PyInt_CheckExact(w
) && PyInt_CheckExact(v
)) {
2195 /* INLINE: cmp(int, int) */
2198 a
= PyInt_AS_LONG(v
);
2199 b
= PyInt_AS_LONG(w
);
2201 case PyCmp_LT
: res
= a
< b
; break;
2202 case PyCmp_LE
: res
= a
<= b
; break;
2203 case PyCmp_EQ
: res
= a
== b
; break;
2204 case PyCmp_NE
: res
= a
!= b
; break;
2205 case PyCmp_GT
: res
= a
> b
; break;
2206 case PyCmp_GE
: res
= a
>= b
; break;
2207 case PyCmp_IS
: res
= v
== w
; break;
2208 case PyCmp_IS_NOT
: res
= v
!= w
; break;
2209 default: goto slow_compare
;
2211 x
= res
? Py_True
: Py_False
;
2216 x
= cmp_outcome(oparg
, v
, w
);
2221 if (x
== NULL
) break;
2222 PREDICT(POP_JUMP_IF_FALSE
);
2223 PREDICT(POP_JUMP_IF_TRUE
);
2227 w
= GETITEM(names
, oparg
);
2228 x
= PyDict_GetItemString(f
->f_builtins
, "__import__");
2230 PyErr_SetString(PyExc_ImportError
,
2231 "__import__ not found");
2237 if (PyInt_AsLong(u
) != -1 || PyErr_Occurred())
2241 f
->f_locals
== NULL
?
2242 Py_None
: f
->f_locals
,
2249 f
->f_locals
== NULL
?
2250 Py_None
: f
->f_locals
,
2260 READ_TIMESTAMP(intr0
);
2262 x
= PyEval_CallObject(v
, w
);
2264 READ_TIMESTAMP(intr1
);
2267 if (x
!= NULL
) continue;
2272 PyFrame_FastToLocals(f
);
2273 if ((x
= f
->f_locals
) == NULL
) {
2274 PyErr_SetString(PyExc_SystemError
,
2275 "no locals found during 'import *'");
2278 READ_TIMESTAMP(intr0
);
2279 err
= import_all_from(x
, v
);
2280 READ_TIMESTAMP(intr1
);
2281 PyFrame_LocalsToFast(f
, 0);
2283 if (err
== 0) continue;
2287 w
= GETITEM(names
, oparg
);
2289 READ_TIMESTAMP(intr0
);
2290 x
= import_from(v
, w
);
2291 READ_TIMESTAMP(intr1
);
2293 if (x
!= NULL
) continue;
2298 goto fast_next_opcode
;
2300 PREDICTED_WITH_ARG(POP_JUMP_IF_FALSE
);
2301 case POP_JUMP_IF_FALSE
:
2305 goto fast_next_opcode
;
2307 if (w
== Py_False
) {
2310 goto fast_next_opcode
;
2312 err
= PyObject_IsTrue(w
);
2322 PREDICTED_WITH_ARG(POP_JUMP_IF_TRUE
);
2323 case POP_JUMP_IF_TRUE
:
2325 if (w
== Py_False
) {
2327 goto fast_next_opcode
;
2332 goto fast_next_opcode
;
2334 err
= PyObject_IsTrue(w
);
2346 case JUMP_IF_FALSE_OR_POP
:
2351 goto fast_next_opcode
;
2353 if (w
== Py_False
) {
2355 goto fast_next_opcode
;
2357 err
= PyObject_IsTrue(w
);
2369 case JUMP_IF_TRUE_OR_POP
:
2371 if (w
== Py_False
) {
2374 goto fast_next_opcode
;
2378 goto fast_next_opcode
;
2380 err
= PyObject_IsTrue(w
);
2385 else if (err
== 0) {
2393 PREDICTED_WITH_ARG(JUMP_ABSOLUTE
);
2397 /* Enabling this path speeds-up all while and for-loops by bypassing
2398 the per-loop checks for signals. By default, this should be turned-off
2399 because it prevents detection of a control-break in tight loops like
2400 "while 1: pass". Compile with this option turned-on when you need
2401 the speed-up and do not need break checking inside tight loops (ones
2402 that contain only instructions ending with goto fast_next_opcode).
2404 goto fast_next_opcode
;
2410 /* before: [obj]; after [getiter(obj)] */
2412 x
= PyObject_GetIter(v
);
2422 PREDICTED_WITH_ARG(FOR_ITER
);
2424 /* before: [iter]; after: [iter, iter()] *or* [] */
2426 x
= (*v
->ob_type
->tp_iternext
)(v
);
2429 PREDICT(STORE_FAST
);
2430 PREDICT(UNPACK_SEQUENCE
);
2433 if (PyErr_Occurred()) {
2434 if (!PyErr_ExceptionMatches(
2435 PyExc_StopIteration
))
2439 /* iterator ended normally */
2447 goto fast_block_end
;
2450 retval
= PyInt_FromLong(oparg
);
2456 goto fast_block_end
;
2461 /* NOTE: If you add any new block-setup opcodes that
2462 are not try/except/finally handlers, you may need
2463 to update the PyGen_NeedsFinalizing() function.
2466 PyFrame_BlockSetup(f
, opcode
, INSTR_OFFSET() + oparg
,
2472 /* At the top of the stack are 1-3 values indicating
2473 how/why we entered the finally clause:
2475 - (TOP, SECOND) = (WHY_{RETURN,CONTINUE}), retval
2476 - TOP = WHY_*; no retval below it
2477 - (TOP, SECOND, THIRD) = exc_info()
2478 Below them is EXIT, the context.__exit__ bound method.
2479 In the last case, we must call
2480 EXIT(TOP, SECOND, THIRD)
2481 otherwise we must call
2482 EXIT(None, None, None)
2484 In all cases, we remove EXIT from the stack, leaving
2485 the rest in the same order.
2487 In addition, if the stack represents an exception,
2488 *and* the function call returns a 'true' value, we
2489 "zap" this information, to prevent END_FINALLY from
2490 re-raising the exception. (But non-local gotos
2491 should still be resumed.)
2494 PyObject
*exit_func
;
2502 else if (PyInt_Check(u
)) {
2503 switch(PyInt_AS_LONG(u
)) {
2506 /* Retval in TOP. */
2507 exit_func
= SECOND();
2516 u
= v
= w
= Py_None
;
2521 exit_func
= THIRD();
2526 /* XXX Not the fastest way to call it... */
2527 x
= PyObject_CallFunctionObjArgs(exit_func
, u
, v
, w
,
2529 Py_DECREF(exit_func
);
2531 break; /* Go to error exit */
2534 err
= PyObject_IsTrue(x
);
2540 break; /* Go to error exit */
2543 /* There was an exception and a true return */
2551 /* The stack was rearranged to remove EXIT
2552 above. Let END_FINALLY do its thing */
2554 PREDICT(END_FINALLY
);
2564 x
= call_function(&sp
, oparg
, &intr0
, &intr1
);
2566 x
= call_function(&sp
, oparg
);
2575 case CALL_FUNCTION_VAR
:
2576 case CALL_FUNCTION_KW
:
2577 case CALL_FUNCTION_VAR_KW
:
2579 int na
= oparg
& 0xff;
2580 int nk
= (oparg
>>8) & 0xff;
2581 int flags
= (opcode
- CALL_FUNCTION
) & 3;
2582 int n
= na
+ 2 * nk
;
2583 PyObject
**pfunc
, *func
, **sp
;
2585 if (flags
& CALL_FLAG_VAR
)
2587 if (flags
& CALL_FLAG_KW
)
2589 pfunc
= stack_pointer
- n
- 1;
2592 if (PyMethod_Check(func
)
2593 && PyMethod_GET_SELF(func
) != NULL
) {
2594 PyObject
*self
= PyMethod_GET_SELF(func
);
2596 func
= PyMethod_GET_FUNCTION(func
);
2605 READ_TIMESTAMP(intr0
);
2606 x
= ext_do_call(func
, &sp
, flags
, na
, nk
);
2607 READ_TIMESTAMP(intr1
);
2611 while (stack_pointer
> pfunc
) {
2622 v
= POP(); /* code object */
2623 x
= PyFunction_New(v
, f
->f_globals
);
2625 /* XXX Maybe this should be a separate opcode? */
2626 if (x
!= NULL
&& oparg
> 0) {
2627 v
= PyTuple_New(oparg
);
2633 while (--oparg
>= 0) {
2635 PyTuple_SET_ITEM(v
, oparg
, w
);
2637 err
= PyFunction_SetDefaults(x
, v
);
2645 v
= POP(); /* code object */
2646 x
= PyFunction_New(v
, f
->f_globals
);
2650 if (PyFunction_SetClosure(x
, v
) != 0) {
2651 /* Can't happen unless bytecode is corrupt. */
2652 why
= WHY_EXCEPTION
;
2656 if (x
!= NULL
&& oparg
> 0) {
2657 v
= PyTuple_New(oparg
);
2663 while (--oparg
>= 0) {
2665 PyTuple_SET_ITEM(v
, oparg
, w
);
2667 if (PyFunction_SetDefaults(x
, v
) != 0) {
2668 /* Can't happen unless
2669 PyFunction_SetDefaults changes. */
2670 why
= WHY_EXCEPTION
;
2685 x
= PySlice_New(u
, v
, w
);
2690 if (x
!= NULL
) continue;
2695 oparg
= oparg
<<16 | NEXTARG();
2696 goto dispatch_opcode
;
2700 "XXX lineno: %d, opcode: %d\n",
2701 PyCode_Addr2Line(f
->f_code
, f
->f_lasti
),
2703 PyErr_SetString(PyExc_SystemError
, "unknown opcode");
2704 why
= WHY_EXCEPTION
;
2715 READ_TIMESTAMP(inst1
);
2717 /* Quickly continue if no error occurred */
2719 if (why
== WHY_NOT
) {
2720 if (err
== 0 && x
!= NULL
) {
2722 /* This check is expensive! */
2723 if (PyErr_Occurred())
2725 "XXX undetected error\n");
2728 READ_TIMESTAMP(loop1
);
2729 continue; /* Normal, fast path */
2734 why
= WHY_EXCEPTION
;
2739 /* Double-check exception status */
2741 if (why
== WHY_EXCEPTION
|| why
== WHY_RERAISE
) {
2742 if (!PyErr_Occurred()) {
2743 PyErr_SetString(PyExc_SystemError
,
2744 "error return without exception set");
2745 why
= WHY_EXCEPTION
;
2750 /* This check is expensive! */
2751 if (PyErr_Occurred()) {
2753 sprintf(buf
, "Stack unwind with exception "
2754 "set and why=%d", why
);
2760 /* Log traceback info if this is a real exception */
2762 if (why
== WHY_EXCEPTION
) {
2763 PyTraceBack_Here(f
);
2765 if (tstate
->c_tracefunc
!= NULL
)
2766 call_exc_trace(tstate
->c_tracefunc
,
2767 tstate
->c_traceobj
, f
);
2770 /* For the rest, treat WHY_RERAISE as WHY_EXCEPTION */
2772 if (why
== WHY_RERAISE
)
2773 why
= WHY_EXCEPTION
;
2775 /* Unwind stacks if a (pseudo) exception occurred */
2778 while (why
!= WHY_NOT
&& f
->f_iblock
> 0) {
2779 PyTryBlock
*b
= PyFrame_BlockPop(f
);
2781 assert(why
!= WHY_YIELD
);
2782 if (b
->b_type
== SETUP_LOOP
&& why
== WHY_CONTINUE
) {
2783 /* For a continue inside a try block,
2784 don't pop the block for the loop. */
2785 PyFrame_BlockSetup(f
, b
->b_type
, b
->b_handler
,
2788 JUMPTO(PyInt_AS_LONG(retval
));
2793 while (STACK_LEVEL() > b
->b_level
) {
2797 if (b
->b_type
== SETUP_LOOP
&& why
== WHY_BREAK
) {
2799 JUMPTO(b
->b_handler
);
2802 if (b
->b_type
== SETUP_FINALLY
||
2803 (b
->b_type
== SETUP_EXCEPT
&&
2804 why
== WHY_EXCEPTION
)) {
2805 if (why
== WHY_EXCEPTION
) {
2806 PyObject
*exc
, *val
, *tb
;
2807 PyErr_Fetch(&exc
, &val
, &tb
);
2812 /* Make the raw exception data
2813 available to the handler,
2814 so a program can emulate the
2815 Python main loop. Don't do
2816 this for 'finally'. */
2817 if (b
->b_type
== SETUP_EXCEPT
) {
2818 PyErr_NormalizeException(
2820 set_exc_info(tstate
,
2832 if (why
& (WHY_RETURN
| WHY_CONTINUE
))
2834 v
= PyInt_FromLong((long)why
);
2838 JUMPTO(b
->b_handler
);
2841 } /* unwind stack */
2843 /* End the loop if we still have an error (or return) */
2847 READ_TIMESTAMP(loop1
);
2851 assert(why
!= WHY_YIELD
);
2852 /* Pop remaining stack entries. */
2858 if (why
!= WHY_RETURN
)
2862 if (tstate
->use_tracing
) {
2863 if (tstate
->c_tracefunc
) {
2864 if (why
== WHY_RETURN
|| why
== WHY_YIELD
) {
2865 if (call_trace(tstate
->c_tracefunc
,
2866 tstate
->c_traceobj
, f
,
2867 PyTrace_RETURN
, retval
)) {
2870 why
= WHY_EXCEPTION
;
2873 else if (why
== WHY_EXCEPTION
) {
2874 call_trace_protected(tstate
->c_tracefunc
,
2875 tstate
->c_traceobj
, f
,
2876 PyTrace_RETURN
, NULL
);
2879 if (tstate
->c_profilefunc
) {
2880 if (why
== WHY_EXCEPTION
)
2881 call_trace_protected(tstate
->c_profilefunc
,
2882 tstate
->c_profileobj
, f
,
2883 PyTrace_RETURN
, NULL
);
2884 else if (call_trace(tstate
->c_profilefunc
,
2885 tstate
->c_profileobj
, f
,
2886 PyTrace_RETURN
, retval
)) {
2889 why
= WHY_EXCEPTION
;
2894 if (tstate
->frame
->f_exc_type
!= NULL
)
2895 reset_exc_info(tstate
);
2897 assert(tstate
->frame
->f_exc_value
== NULL
);
2898 assert(tstate
->frame
->f_exc_traceback
== NULL
);
2903 Py_LeaveRecursiveCall();
2904 tstate
->frame
= f
->f_back
;
2909 /* This is gonna seem *real weird*, but if you put some other code between
2910 PyEval_EvalFrame() and PyEval_EvalCodeEx() you will need to adjust
2911 the test in the if statements in Misc/gdbinit (pystack and pystackv). */
2914 PyEval_EvalCodeEx(PyCodeObject
*co
, PyObject
*globals
, PyObject
*locals
,
2915 PyObject
**args
, int argcount
, PyObject
**kws
, int kwcount
,
2916 PyObject
**defs
, int defcount
, PyObject
*closure
)
2918 register PyFrameObject
*f
;
2919 register PyObject
*retval
= NULL
;
2920 register PyObject
**fastlocals
, **freevars
;
2921 PyThreadState
*tstate
= PyThreadState_GET();
2924 if (globals
== NULL
) {
2925 PyErr_SetString(PyExc_SystemError
,
2926 "PyEval_EvalCodeEx: NULL globals");
2930 assert(tstate
!= NULL
);
2931 assert(globals
!= NULL
);
2932 f
= PyFrame_New(tstate
, co
, globals
, locals
);
2936 fastlocals
= f
->f_localsplus
;
2937 freevars
= f
->f_localsplus
+ co
->co_nlocals
;
2939 if (co
->co_argcount
> 0 ||
2940 co
->co_flags
& (CO_VARARGS
| CO_VARKEYWORDS
)) {
2943 PyObject
*kwdict
= NULL
;
2944 if (co
->co_flags
& CO_VARKEYWORDS
) {
2945 kwdict
= PyDict_New();
2948 i
= co
->co_argcount
;
2949 if (co
->co_flags
& CO_VARARGS
)
2951 SETLOCAL(i
, kwdict
);
2953 if (argcount
> co
->co_argcount
) {
2954 if (!(co
->co_flags
& CO_VARARGS
)) {
2955 PyErr_Format(PyExc_TypeError
,
2956 "%.200s() takes %s %d "
2957 "%sargument%s (%d given)",
2958 PyString_AsString(co
->co_name
),
2959 defcount
? "at most" : "exactly",
2961 kwcount
? "non-keyword " : "",
2962 co
->co_argcount
== 1 ? "" : "s",
2966 n
= co
->co_argcount
;
2968 for (i
= 0; i
< n
; i
++) {
2973 if (co
->co_flags
& CO_VARARGS
) {
2974 u
= PyTuple_New(argcount
- n
);
2977 SETLOCAL(co
->co_argcount
, u
);
2978 for (i
= n
; i
< argcount
; i
++) {
2981 PyTuple_SET_ITEM(u
, i
-n
, x
);
2984 for (i
= 0; i
< kwcount
; i
++) {
2985 PyObject
**co_varnames
;
2986 PyObject
*keyword
= kws
[2*i
];
2987 PyObject
*value
= kws
[2*i
+ 1];
2989 if (keyword
== NULL
|| !(PyString_Check(keyword
)
2990 #ifdef Py_USING_UNICODE
2991 || PyUnicode_Check(keyword
)
2994 PyErr_Format(PyExc_TypeError
,
2995 "%.200s() keywords must be strings",
2996 PyString_AsString(co
->co_name
));
2999 /* Speed hack: do raw pointer compares. As names are
3000 normally interned this should almost always hit. */
3001 co_varnames
= PySequence_Fast_ITEMS(co
->co_varnames
);
3002 for (j
= 0; j
< co
->co_argcount
; j
++) {
3003 PyObject
*nm
= co_varnames
[j
];
3007 /* Slow fallback, just in case */
3008 for (j
= 0; j
< co
->co_argcount
; j
++) {
3009 PyObject
*nm
= co_varnames
[j
];
3010 int cmp
= PyObject_RichCompareBool(
3011 keyword
, nm
, Py_EQ
);
3017 /* Check errors from Compare */
3018 if (PyErr_Occurred())
3020 if (j
>= co
->co_argcount
) {
3021 if (kwdict
== NULL
) {
3022 PyObject
*kwd_str
= kwd_as_string(keyword
);
3024 PyErr_Format(PyExc_TypeError
,
3025 "%.200s() got an unexpected "
3026 "keyword argument '%.400s'",
3027 PyString_AsString(co
->co_name
),
3028 PyString_AsString(kwd_str
));
3033 PyDict_SetItem(kwdict
, keyword
, value
);
3037 if (GETLOCAL(j
) != NULL
) {
3038 PyObject
*kwd_str
= kwd_as_string(keyword
);
3040 PyErr_Format(PyExc_TypeError
,
3041 "%.200s() got multiple "
3042 "values for keyword "
3043 "argument '%.400s'",
3044 PyString_AsString(co
->co_name
),
3045 PyString_AsString(kwd_str
));
3053 if (argcount
< co
->co_argcount
) {
3054 int m
= co
->co_argcount
- defcount
;
3055 for (i
= argcount
; i
< m
; i
++) {
3056 if (GETLOCAL(i
) == NULL
) {
3057 PyErr_Format(PyExc_TypeError
,
3058 "%.200s() takes %s %d "
3059 "%sargument%s (%d given)",
3060 PyString_AsString(co
->co_name
),
3061 ((co
->co_flags
& CO_VARARGS
) ||
3062 defcount
) ? "at least"
3064 m
, kwcount
? "non-keyword " : "",
3065 m
== 1 ? "" : "s", i
);
3073 for (; i
< defcount
; i
++) {
3074 if (GETLOCAL(m
+i
) == NULL
) {
3075 PyObject
*def
= defs
[i
];
3083 if (argcount
> 0 || kwcount
> 0) {
3084 PyErr_Format(PyExc_TypeError
,
3085 "%.200s() takes no arguments (%d given)",
3086 PyString_AsString(co
->co_name
),
3087 argcount
+ kwcount
);
3091 /* Allocate and initialize storage for cell vars, and copy free
3092 vars into frame. This isn't too efficient right now. */
3093 if (PyTuple_GET_SIZE(co
->co_cellvars
)) {
3094 int i
, j
, nargs
, found
;
3095 char *cellname
, *argname
;
3098 nargs
= co
->co_argcount
;
3099 if (co
->co_flags
& CO_VARARGS
)
3101 if (co
->co_flags
& CO_VARKEYWORDS
)
3104 /* Initialize each cell var, taking into account
3105 cell vars that are initialized from arguments.
3107 Should arrange for the compiler to put cellvars
3108 that are arguments at the beginning of the cellvars
3109 list so that we can march over it more efficiently?
3111 for (i
= 0; i
< PyTuple_GET_SIZE(co
->co_cellvars
); ++i
) {
3112 cellname
= PyString_AS_STRING(
3113 PyTuple_GET_ITEM(co
->co_cellvars
, i
));
3115 for (j
= 0; j
< nargs
; j
++) {
3116 argname
= PyString_AS_STRING(
3117 PyTuple_GET_ITEM(co
->co_varnames
, j
));
3118 if (strcmp(cellname
, argname
) == 0) {
3119 c
= PyCell_New(GETLOCAL(j
));
3122 GETLOCAL(co
->co_nlocals
+ i
) = c
;
3128 c
= PyCell_New(NULL
);
3131 SETLOCAL(co
->co_nlocals
+ i
, c
);
3135 if (PyTuple_GET_SIZE(co
->co_freevars
)) {
3137 for (i
= 0; i
< PyTuple_GET_SIZE(co
->co_freevars
); ++i
) {
3138 PyObject
*o
= PyTuple_GET_ITEM(closure
, i
);
3140 freevars
[PyTuple_GET_SIZE(co
->co_cellvars
) + i
] = o
;
3144 if (co
->co_flags
& CO_GENERATOR
) {
3145 /* Don't need to keep the reference to f_back, it will be set
3146 * when the generator is resumed. */
3147 Py_XDECREF(f
->f_back
);
3150 PCALL(PCALL_GENERATOR
);
3152 /* Create a new generator that owns the ready to run frame
3153 * and return that as the value. */
3154 return PyGen_New(f
);
3157 retval
= PyEval_EvalFrameEx(f
,0);
3159 fail
: /* Jump here from prelude on failure */
3161 /* decref'ing the frame can cause __del__ methods to get invoked,
3162 which can call back into Python. While we're done with the
3163 current Python frame (f), the associated C stack is still in use,
3164 so recursion_depth must be boosted for the duration.
3166 assert(tstate
!= NULL
);
3167 ++tstate
->recursion_depth
;
3169 --tstate
->recursion_depth
;
3176 kwd_as_string(PyObject
*kwd
) {
3177 #ifdef Py_USING_UNICODE
3178 if (PyString_Check(kwd
)) {
3180 assert(PyString_Check(kwd
));
3184 #ifdef Py_USING_UNICODE
3186 return _PyUnicode_AsDefaultEncodedString(kwd
, "replace");
3191 /* Implementation notes for set_exc_info() and reset_exc_info():
3193 - Below, 'exc_ZZZ' stands for 'exc_type', 'exc_value' and
3194 'exc_traceback'. These always travel together.
3196 - tstate->curexc_ZZZ is the "hot" exception that is set by
3197 PyErr_SetString(), cleared by PyErr_Clear(), and so on.
3199 - Once an exception is caught by an except clause, it is transferred
3200 from tstate->curexc_ZZZ to tstate->exc_ZZZ, from which sys.exc_info()
3201 can pick it up. This is the primary task of set_exc_info().
3202 XXX That can't be right: set_exc_info() doesn't look at tstate->curexc_ZZZ.
3204 - Now let me explain the complicated dance with frame->f_exc_ZZZ.
3206 Long ago, when none of this existed, there were just a few globals:
3207 one set corresponding to the "hot" exception, and one set
3208 corresponding to sys.exc_ZZZ. (Actually, the latter weren't C
3209 globals; they were simply stored as sys.exc_ZZZ. For backwards
3210 compatibility, they still are!) The problem was that in code like
3214 "something that may fail"
3215 except "some exception":
3216 "do something else first"
3217 "print the exception from sys.exc_ZZZ."
3219 if "do something else first" invoked something that raised and caught
3220 an exception, sys.exc_ZZZ were overwritten. That was a frequent
3221 cause of subtle bugs. I fixed this by changing the semantics as
3224 - Within one frame, sys.exc_ZZZ will hold the last exception caught
3227 - But initially, and as long as no exception is caught in a given
3228 frame, sys.exc_ZZZ will hold the last exception caught in the
3229 previous frame (or the frame before that, etc.).
3231 The first bullet fixed the bug in the above example. The second
3232 bullet was for backwards compatibility: it was (and is) common to
3233 have a function that is called when an exception is caught, and to
3234 have that function access the caught exception via sys.exc_ZZZ.
3235 (Example: traceback.print_exc()).
3237 At the same time I fixed the problem that sys.exc_ZZZ weren't
3238 thread-safe, by introducing sys.exc_info() which gets it from tstate;
3239 but that's really a separate improvement.
3241 The reset_exc_info() function in ceval.c restores the tstate->exc_ZZZ
3242 variables to what they were before the current frame was called. The
3243 set_exc_info() function saves them on the frame so that
3244 reset_exc_info() can restore them. The invariant is that
3245 frame->f_exc_ZZZ is NULL iff the current frame never caught an
3246 exception (where "catching" an exception applies only to successful
3247 except clauses); and if the current frame ever caught an exception,
3248 frame->f_exc_ZZZ is the exception that was stored in tstate->exc_ZZZ
3249 at the start of the current frame.
3254 set_exc_info(PyThreadState
*tstate
,
3255 PyObject
*type
, PyObject
*value
, PyObject
*tb
)
3257 PyFrameObject
*frame
= tstate
->frame
;
3258 PyObject
*tmp_type
, *tmp_value
, *tmp_tb
;
3260 assert(type
!= NULL
);
3261 assert(frame
!= NULL
);
3262 if (frame
->f_exc_type
== NULL
) {
3263 assert(frame
->f_exc_value
== NULL
);
3264 assert(frame
->f_exc_traceback
== NULL
);
3265 /* This frame didn't catch an exception before. */
3266 /* Save previous exception of this thread in this frame. */
3267 if (tstate
->exc_type
== NULL
) {
3268 /* XXX Why is this set to Py_None? */
3270 tstate
->exc_type
= Py_None
;
3272 Py_INCREF(tstate
->exc_type
);
3273 Py_XINCREF(tstate
->exc_value
);
3274 Py_XINCREF(tstate
->exc_traceback
);
3275 frame
->f_exc_type
= tstate
->exc_type
;
3276 frame
->f_exc_value
= tstate
->exc_value
;
3277 frame
->f_exc_traceback
= tstate
->exc_traceback
;
3279 /* Set new exception for this thread. */
3280 tmp_type
= tstate
->exc_type
;
3281 tmp_value
= tstate
->exc_value
;
3282 tmp_tb
= tstate
->exc_traceback
;
3286 tstate
->exc_type
= type
;
3287 tstate
->exc_value
= value
;
3288 tstate
->exc_traceback
= tb
;
3289 Py_XDECREF(tmp_type
);
3290 Py_XDECREF(tmp_value
);
3292 /* For b/w compatibility */
3293 PySys_SetObject("exc_type", type
);
3294 PySys_SetObject("exc_value", value
);
3295 PySys_SetObject("exc_traceback", tb
);
3299 reset_exc_info(PyThreadState
*tstate
)
3301 PyFrameObject
*frame
;
3302 PyObject
*tmp_type
, *tmp_value
, *tmp_tb
;
3304 /* It's a precondition that the thread state's frame caught an
3305 * exception -- verify in a debug build.
3307 assert(tstate
!= NULL
);
3308 frame
= tstate
->frame
;
3309 assert(frame
!= NULL
);
3310 assert(frame
->f_exc_type
!= NULL
);
3312 /* Copy the frame's exception info back to the thread state. */
3313 tmp_type
= tstate
->exc_type
;
3314 tmp_value
= tstate
->exc_value
;
3315 tmp_tb
= tstate
->exc_traceback
;
3316 Py_INCREF(frame
->f_exc_type
);
3317 Py_XINCREF(frame
->f_exc_value
);
3318 Py_XINCREF(frame
->f_exc_traceback
);
3319 tstate
->exc_type
= frame
->f_exc_type
;
3320 tstate
->exc_value
= frame
->f_exc_value
;
3321 tstate
->exc_traceback
= frame
->f_exc_traceback
;
3322 Py_XDECREF(tmp_type
);
3323 Py_XDECREF(tmp_value
);
3326 /* For b/w compatibility */
3327 PySys_SetObject("exc_type", frame
->f_exc_type
);
3328 PySys_SetObject("exc_value", frame
->f_exc_value
);
3329 PySys_SetObject("exc_traceback", frame
->f_exc_traceback
);
3331 /* Clear the frame's exception info. */
3332 tmp_type
= frame
->f_exc_type
;
3333 tmp_value
= frame
->f_exc_value
;
3334 tmp_tb
= frame
->f_exc_traceback
;
3335 frame
->f_exc_type
= NULL
;
3336 frame
->f_exc_value
= NULL
;
3337 frame
->f_exc_traceback
= NULL
;
3338 Py_DECREF(tmp_type
);
3339 Py_XDECREF(tmp_value
);
3343 /* Logic for the raise statement (too complicated for inlining).
3344 This *consumes* a reference count to each of its arguments. */
3345 static enum why_code
3346 do_raise(PyObject
*type
, PyObject
*value
, PyObject
*tb
)
3350 PyThreadState
*tstate
= PyThreadState_GET();
3351 type
= tstate
->exc_type
== NULL
? Py_None
: tstate
->exc_type
;
3352 value
= tstate
->exc_value
;
3353 tb
= tstate
->exc_traceback
;
3359 /* We support the following forms of raise:
3360 raise <class>, <classinstance>
3361 raise <class>, <argument tuple>
3363 raise <class>, <argument>
3364 raise <classinstance>, None
3365 raise <string>, <object>
3366 raise <string>, None
3368 An omitted second argument is the same as None.
3370 In addition, raise <tuple>, <anything> is the same as
3371 raising the tuple's first item (and it better have one!);
3372 this rule is applied recursively.
3374 Finally, an optional third argument can be supplied, which
3375 gives the traceback to be substituted (useful when
3376 re-raising an exception after examining it). */
3378 /* First, check the traceback argument, replacing None with
3380 if (tb
== Py_None
) {
3384 else if (tb
!= NULL
&& !PyTraceBack_Check(tb
)) {
3385 PyErr_SetString(PyExc_TypeError
,
3386 "raise: arg 3 must be a traceback or None");
3390 /* Next, replace a missing value with None */
3391 if (value
== NULL
) {
3396 /* Next, repeatedly, replace a tuple exception with its first item */
3397 while (PyTuple_Check(type
) && PyTuple_Size(type
) > 0) {
3398 PyObject
*tmp
= type
;
3399 type
= PyTuple_GET_ITEM(type
, 0);
3404 if (PyExceptionClass_Check(type
))
3405 PyErr_NormalizeException(&type
, &value
, &tb
);
3407 else if (PyExceptionInstance_Check(type
)) {
3408 /* Raising an instance. The value should be a dummy. */
3409 if (value
!= Py_None
) {
3410 PyErr_SetString(PyExc_TypeError
,
3411 "instance exception may not have a separate value");
3415 /* Normalize to raise <class>, <instance> */
3418 type
= PyExceptionInstance_Class(type
);
3423 /* Not something you can raise. You get an exception
3424 anyway, just not what you specified :-) */
3425 PyErr_Format(PyExc_TypeError
,
3426 "exceptions must be classes or instances, not %s",
3427 type
->ob_type
->tp_name
);
3431 assert(PyExceptionClass_Check(type
));
3432 if (Py_Py3kWarningFlag
&& PyClass_Check(type
)) {
3433 if (PyErr_WarnEx(PyExc_DeprecationWarning
,
3434 "exceptions must derive from BaseException "
3439 PyErr_Restore(type
, value
, tb
);
3441 return WHY_EXCEPTION
;
3448 return WHY_EXCEPTION
;
3451 /* Iterate v argcnt times and store the results on the stack (via decreasing
3452 sp). Return 1 for success, 0 if error. */
3455 unpack_iterable(PyObject
*v
, int argcnt
, PyObject
**sp
)
3458 PyObject
*it
; /* iter(v) */
3463 it
= PyObject_GetIter(v
);
3467 for (; i
< argcnt
; i
++) {
3468 w
= PyIter_Next(it
);
3470 /* Iterator done, via error or exhaustion. */
3471 if (!PyErr_Occurred()) {
3472 PyErr_Format(PyExc_ValueError
,
3473 "need more than %d value%s to unpack",
3474 i
, i
== 1 ? "" : "s");
3481 /* We better have exhausted the iterator now. */
3482 w
= PyIter_Next(it
);
3484 if (PyErr_Occurred())
3490 PyErr_SetString(PyExc_ValueError
, "too many values to unpack");
3493 for (; i
> 0; i
--, sp
++)
3502 prtrace(PyObject
*v
, char *str
)
3505 if (PyObject_Print(v
, stdout
, 0) != 0)
3506 PyErr_Clear(); /* Don't know what else to do */
3513 call_exc_trace(Py_tracefunc func
, PyObject
*self
, PyFrameObject
*f
)
3515 PyObject
*type
, *value
, *traceback
, *arg
;
3517 PyErr_Fetch(&type
, &value
, &traceback
);
3518 if (value
== NULL
) {
3522 arg
= PyTuple_Pack(3, type
, value
, traceback
);
3524 PyErr_Restore(type
, value
, traceback
);
3527 err
= call_trace(func
, self
, f
, PyTrace_EXCEPTION
, arg
);
3530 PyErr_Restore(type
, value
, traceback
);
3534 Py_XDECREF(traceback
);
3539 call_trace_protected(Py_tracefunc func
, PyObject
*obj
, PyFrameObject
*frame
,
3540 int what
, PyObject
*arg
)
3542 PyObject
*type
, *value
, *traceback
;
3544 PyErr_Fetch(&type
, &value
, &traceback
);
3545 err
= call_trace(func
, obj
, frame
, what
, arg
);
3548 PyErr_Restore(type
, value
, traceback
);
3554 Py_XDECREF(traceback
);
3560 call_trace(Py_tracefunc func
, PyObject
*obj
, PyFrameObject
*frame
,
3561 int what
, PyObject
*arg
)
3563 register PyThreadState
*tstate
= frame
->f_tstate
;
3565 if (tstate
->tracing
)
3568 tstate
->use_tracing
= 0;
3569 result
= func(obj
, frame
, what
, arg
);
3570 tstate
->use_tracing
= ((tstate
->c_tracefunc
!= NULL
)
3571 || (tstate
->c_profilefunc
!= NULL
));
3577 _PyEval_CallTracing(PyObject
*func
, PyObject
*args
)
3579 PyFrameObject
*frame
= PyEval_GetFrame();
3580 PyThreadState
*tstate
= frame
->f_tstate
;
3581 int save_tracing
= tstate
->tracing
;
3582 int save_use_tracing
= tstate
->use_tracing
;
3585 tstate
->tracing
= 0;
3586 tstate
->use_tracing
= ((tstate
->c_tracefunc
!= NULL
)
3587 || (tstate
->c_profilefunc
!= NULL
));
3588 result
= PyObject_Call(func
, args
, NULL
);
3589 tstate
->tracing
= save_tracing
;
3590 tstate
->use_tracing
= save_use_tracing
;
3595 maybe_call_line_trace(Py_tracefunc func
, PyObject
*obj
,
3596 PyFrameObject
*frame
, int *instr_lb
, int *instr_ub
,
3601 /* If the last instruction executed isn't in the current
3602 instruction window, reset the window. If the last
3603 instruction happens to fall at the start of a line or if it
3604 represents a jump backwards, call the trace function.
3606 if ((frame
->f_lasti
< *instr_lb
|| frame
->f_lasti
>= *instr_ub
)) {
3610 line
= PyCode_CheckLineNumber(frame
->f_code
, frame
->f_lasti
,
3613 frame
->f_lineno
= line
;
3614 result
= call_trace(func
, obj
, frame
,
3615 PyTrace_LINE
, Py_None
);
3617 *instr_lb
= bounds
.ap_lower
;
3618 *instr_ub
= bounds
.ap_upper
;
3620 else if (frame
->f_lasti
<= *instr_prev
) {
3621 result
= call_trace(func
, obj
, frame
, PyTrace_LINE
, Py_None
);
3623 *instr_prev
= frame
->f_lasti
;
3628 PyEval_SetProfile(Py_tracefunc func
, PyObject
*arg
)
3630 PyThreadState
*tstate
= PyThreadState_GET();
3631 PyObject
*temp
= tstate
->c_profileobj
;
3633 tstate
->c_profilefunc
= NULL
;
3634 tstate
->c_profileobj
= NULL
;
3635 /* Must make sure that tracing is not ignored if 'temp' is freed */
3636 tstate
->use_tracing
= tstate
->c_tracefunc
!= NULL
;
3638 tstate
->c_profilefunc
= func
;
3639 tstate
->c_profileobj
= arg
;
3640 /* Flag that tracing or profiling is turned on */
3641 tstate
->use_tracing
= (func
!= NULL
) || (tstate
->c_tracefunc
!= NULL
);
3645 PyEval_SetTrace(Py_tracefunc func
, PyObject
*arg
)
3647 PyThreadState
*tstate
= PyThreadState_GET();
3648 PyObject
*temp
= tstate
->c_traceobj
;
3649 _Py_TracingPossible
+= (func
!= NULL
) - (tstate
->c_tracefunc
!= NULL
);
3651 tstate
->c_tracefunc
= NULL
;
3652 tstate
->c_traceobj
= NULL
;
3653 /* Must make sure that profiling is not ignored if 'temp' is freed */
3654 tstate
->use_tracing
= tstate
->c_profilefunc
!= NULL
;
3656 tstate
->c_tracefunc
= func
;
3657 tstate
->c_traceobj
= arg
;
3658 /* Flag that tracing or profiling is turned on */
3659 tstate
->use_tracing
= ((func
!= NULL
)
3660 || (tstate
->c_profilefunc
!= NULL
));
3664 PyEval_GetBuiltins(void)
3666 PyFrameObject
*current_frame
= PyEval_GetFrame();
3667 if (current_frame
== NULL
)
3668 return PyThreadState_GET()->interp
->builtins
;
3670 return current_frame
->f_builtins
;
3674 PyEval_GetLocals(void)
3676 PyFrameObject
*current_frame
= PyEval_GetFrame();
3677 if (current_frame
== NULL
)
3679 PyFrame_FastToLocals(current_frame
);
3680 return current_frame
->f_locals
;
3684 PyEval_GetGlobals(void)
3686 PyFrameObject
*current_frame
= PyEval_GetFrame();
3687 if (current_frame
== NULL
)
3690 return current_frame
->f_globals
;
3694 PyEval_GetFrame(void)
3696 PyThreadState
*tstate
= PyThreadState_GET();
3697 return _PyThreadState_GetFrame(tstate
);
3701 PyEval_GetRestricted(void)
3703 PyFrameObject
*current_frame
= PyEval_GetFrame();
3704 return current_frame
== NULL
? 0 : PyFrame_IsRestricted(current_frame
);
3708 PyEval_MergeCompilerFlags(PyCompilerFlags
*cf
)
3710 PyFrameObject
*current_frame
= PyEval_GetFrame();
3711 int result
= cf
->cf_flags
!= 0;
3713 if (current_frame
!= NULL
) {
3714 const int codeflags
= current_frame
->f_code
->co_flags
;
3715 const int compilerflags
= codeflags
& PyCF_MASK
;
3716 if (compilerflags
) {
3718 cf
->cf_flags
|= compilerflags
;
3720 #if 0 /* future keyword */
3721 if (codeflags
& CO_GENERATOR_ALLOWED
) {
3723 cf
->cf_flags
|= CO_GENERATOR_ALLOWED
;
3733 PyObject
*f
= PySys_GetObject("stdout");
3736 if (!PyFile_SoftSpace(f
, 0))
3738 return PyFile_WriteString("\n", f
);
3742 /* External interface to call any callable object.
3743 The arg must be a tuple or NULL. */
3745 #undef PyEval_CallObject
3746 /* for backward compatibility: export this interface */
3749 PyEval_CallObject(PyObject
*func
, PyObject
*arg
)
3751 return PyEval_CallObjectWithKeywords(func
, arg
, (PyObject
*)NULL
);
3753 #define PyEval_CallObject(func,arg) \
3754 PyEval_CallObjectWithKeywords(func, arg, (PyObject *)NULL)
3757 PyEval_CallObjectWithKeywords(PyObject
*func
, PyObject
*arg
, PyObject
*kw
)
3762 arg
= PyTuple_New(0);
3766 else if (!PyTuple_Check(arg
)) {
3767 PyErr_SetString(PyExc_TypeError
,
3768 "argument list must be a tuple");
3774 if (kw
!= NULL
&& !PyDict_Check(kw
)) {
3775 PyErr_SetString(PyExc_TypeError
,
3776 "keyword list must be a dictionary");
3781 result
= PyObject_Call(func
, arg
, kw
);
3787 PyEval_GetFuncName(PyObject
*func
)
3789 if (PyMethod_Check(func
))
3790 return PyEval_GetFuncName(PyMethod_GET_FUNCTION(func
));
3791 else if (PyFunction_Check(func
))
3792 return PyString_AsString(((PyFunctionObject
*)func
)->func_name
);
3793 else if (PyCFunction_Check(func
))
3794 return ((PyCFunctionObject
*)func
)->m_ml
->ml_name
;
3795 else if (PyClass_Check(func
))
3796 return PyString_AsString(((PyClassObject
*)func
)->cl_name
);
3797 else if (PyInstance_Check(func
)) {
3798 return PyString_AsString(
3799 ((PyInstanceObject
*)func
)->in_class
->cl_name
);
3801 return func
->ob_type
->tp_name
;
3806 PyEval_GetFuncDesc(PyObject
*func
)
3808 if (PyMethod_Check(func
))
3810 else if (PyFunction_Check(func
))
3812 else if (PyCFunction_Check(func
))
3814 else if (PyClass_Check(func
))
3815 return " constructor";
3816 else if (PyInstance_Check(func
)) {
3824 err_args(PyObject
*func
, int flags
, int nargs
)
3826 if (flags
& METH_NOARGS
)
3827 PyErr_Format(PyExc_TypeError
,
3828 "%.200s() takes no arguments (%d given)",
3829 ((PyCFunctionObject
*)func
)->m_ml
->ml_name
,
3832 PyErr_Format(PyExc_TypeError
,
3833 "%.200s() takes exactly one argument (%d given)",
3834 ((PyCFunctionObject
*)func
)->m_ml
->ml_name
,
3838 #define C_TRACE(x, call) \
3839 if (tstate->use_tracing && tstate->c_profilefunc) { \
3840 if (call_trace(tstate->c_profilefunc, \
3841 tstate->c_profileobj, \
3842 tstate->frame, PyTrace_C_CALL, \
3848 if (tstate->c_profilefunc != NULL) { \
3850 call_trace_protected(tstate->c_profilefunc, \
3851 tstate->c_profileobj, \
3852 tstate->frame, PyTrace_C_EXCEPTION, \
3854 /* XXX should pass (type, value, tb) */ \
3856 if (call_trace(tstate->c_profilefunc, \
3857 tstate->c_profileobj, \
3858 tstate->frame, PyTrace_C_RETURN, \
3871 call_function(PyObject
***pp_stack
, int oparg
3873 , uint64
* pintr0
, uint64
* pintr1
3877 int na
= oparg
& 0xff;
3878 int nk
= (oparg
>>8) & 0xff;
3879 int n
= na
+ 2 * nk
;
3880 PyObject
**pfunc
= (*pp_stack
) - n
- 1;
3881 PyObject
*func
= *pfunc
;
3884 /* Always dispatch PyCFunction first, because these are
3885 presumed to be the most frequent callable object.
3887 if (PyCFunction_Check(func
) && nk
== 0) {
3888 int flags
= PyCFunction_GET_FLAGS(func
);
3889 PyThreadState
*tstate
= PyThreadState_GET();
3891 PCALL(PCALL_CFUNCTION
);
3892 if (flags
& (METH_NOARGS
| METH_O
)) {
3893 PyCFunction meth
= PyCFunction_GET_FUNCTION(func
);
3894 PyObject
*self
= PyCFunction_GET_SELF(func
);
3895 if (flags
& METH_NOARGS
&& na
== 0) {
3896 C_TRACE(x
, (*meth
)(self
,NULL
));
3898 else if (flags
& METH_O
&& na
== 1) {
3899 PyObject
*arg
= EXT_POP(*pp_stack
);
3900 C_TRACE(x
, (*meth
)(self
,arg
));
3904 err_args(func
, flags
, na
);
3910 callargs
= load_args(pp_stack
, na
);
3911 READ_TIMESTAMP(*pintr0
);
3912 C_TRACE(x
, PyCFunction_Call(func
,callargs
,NULL
));
3913 READ_TIMESTAMP(*pintr1
);
3914 Py_XDECREF(callargs
);
3917 if (PyMethod_Check(func
) && PyMethod_GET_SELF(func
) != NULL
) {
3918 /* optimize access to bound methods */
3919 PyObject
*self
= PyMethod_GET_SELF(func
);
3920 PCALL(PCALL_METHOD
);
3921 PCALL(PCALL_BOUND_METHOD
);
3923 func
= PyMethod_GET_FUNCTION(func
);
3931 READ_TIMESTAMP(*pintr0
);
3932 if (PyFunction_Check(func
))
3933 x
= fast_function(func
, pp_stack
, n
, na
, nk
);
3935 x
= do_call(func
, pp_stack
, na
, nk
);
3936 READ_TIMESTAMP(*pintr1
);
3940 /* Clear the stack of the function object. Also removes
3941 the arguments in case they weren't consumed already
3942 (fast_function() and err_args() leave them on the stack).
3944 while ((*pp_stack
) > pfunc
) {
3945 w
= EXT_POP(*pp_stack
);
3952 /* The fast_function() function optimize calls for which no argument
3953 tuple is necessary; the objects are passed directly from the stack.
3954 For the simplest case -- a function that takes only positional
3955 arguments and is called with only positional arguments -- it
3956 inlines the most primitive frame setup code from
3957 PyEval_EvalCodeEx(), which vastly reduces the checks that must be
3958 done before evaluating the frame.
3962 fast_function(PyObject
*func
, PyObject
***pp_stack
, int n
, int na
, int nk
)
3964 PyCodeObject
*co
= (PyCodeObject
*)PyFunction_GET_CODE(func
);
3965 PyObject
*globals
= PyFunction_GET_GLOBALS(func
);
3966 PyObject
*argdefs
= PyFunction_GET_DEFAULTS(func
);
3967 PyObject
**d
= NULL
;
3970 PCALL(PCALL_FUNCTION
);
3971 PCALL(PCALL_FAST_FUNCTION
);
3972 if (argdefs
== NULL
&& co
->co_argcount
== n
&& nk
==0 &&
3973 co
->co_flags
== (CO_OPTIMIZED
| CO_NEWLOCALS
| CO_NOFREE
)) {
3975 PyObject
*retval
= NULL
;
3976 PyThreadState
*tstate
= PyThreadState_GET();
3977 PyObject
**fastlocals
, **stack
;
3980 PCALL(PCALL_FASTER_FUNCTION
);
3981 assert(globals
!= NULL
);
3982 /* XXX Perhaps we should create a specialized
3983 PyFrame_New() that doesn't take locals, but does
3984 take builtins without sanity checking them.
3986 assert(tstate
!= NULL
);
3987 f
= PyFrame_New(tstate
, co
, globals
, NULL
);
3991 fastlocals
= f
->f_localsplus
;
3992 stack
= (*pp_stack
) - n
;
3994 for (i
= 0; i
< n
; i
++) {
3996 fastlocals
[i
] = *stack
++;
3998 retval
= PyEval_EvalFrameEx(f
,0);
3999 ++tstate
->recursion_depth
;
4001 --tstate
->recursion_depth
;
4004 if (argdefs
!= NULL
) {
4005 d
= &PyTuple_GET_ITEM(argdefs
, 0);
4006 nd
= Py_SIZE(argdefs
);
4008 return PyEval_EvalCodeEx(co
, globals
,
4009 (PyObject
*)NULL
, (*pp_stack
)-n
, na
,
4010 (*pp_stack
)-2*nk
, nk
, d
, nd
,
4011 PyFunction_GET_CLOSURE(func
));
4015 update_keyword_args(PyObject
*orig_kwdict
, int nk
, PyObject
***pp_stack
,
4018 PyObject
*kwdict
= NULL
;
4019 if (orig_kwdict
== NULL
)
4020 kwdict
= PyDict_New();
4022 kwdict
= PyDict_Copy(orig_kwdict
);
4023 Py_DECREF(orig_kwdict
);
4029 PyObject
*value
= EXT_POP(*pp_stack
);
4030 PyObject
*key
= EXT_POP(*pp_stack
);
4031 if (PyDict_GetItem(kwdict
, key
) != NULL
) {
4032 PyErr_Format(PyExc_TypeError
,
4033 "%.200s%s got multiple values "
4034 "for keyword argument '%.200s'",
4035 PyEval_GetFuncName(func
),
4036 PyEval_GetFuncDesc(func
),
4037 PyString_AsString(key
));
4043 err
= PyDict_SetItem(kwdict
, key
, value
);
4055 update_star_args(int nstack
, int nstar
, PyObject
*stararg
,
4056 PyObject
***pp_stack
)
4058 PyObject
*callargs
, *w
;
4060 callargs
= PyTuple_New(nstack
+ nstar
);
4061 if (callargs
== NULL
) {
4066 for (i
= 0; i
< nstar
; i
++) {
4067 PyObject
*a
= PyTuple_GET_ITEM(stararg
, i
);
4069 PyTuple_SET_ITEM(callargs
, nstack
+ i
, a
);
4072 while (--nstack
>= 0) {
4073 w
= EXT_POP(*pp_stack
);
4074 PyTuple_SET_ITEM(callargs
, nstack
, w
);
4080 load_args(PyObject
***pp_stack
, int na
)
4082 PyObject
*args
= PyTuple_New(na
);
4088 w
= EXT_POP(*pp_stack
);
4089 PyTuple_SET_ITEM(args
, na
, w
);
4095 do_call(PyObject
*func
, PyObject
***pp_stack
, int na
, int nk
)
4097 PyObject
*callargs
= NULL
;
4098 PyObject
*kwdict
= NULL
;
4099 PyObject
*result
= NULL
;
4102 kwdict
= update_keyword_args(NULL
, nk
, pp_stack
, func
);
4106 callargs
= load_args(pp_stack
, na
);
4107 if (callargs
== NULL
)
4110 /* At this point, we have to look at the type of func to
4111 update the call stats properly. Do it here so as to avoid
4112 exposing the call stats machinery outside ceval.c
4114 if (PyFunction_Check(func
))
4115 PCALL(PCALL_FUNCTION
);
4116 else if (PyMethod_Check(func
))
4117 PCALL(PCALL_METHOD
);
4118 else if (PyType_Check(func
))
4123 result
= PyObject_Call(func
, callargs
, kwdict
);
4125 Py_XDECREF(callargs
);
4131 ext_do_call(PyObject
*func
, PyObject
***pp_stack
, int flags
, int na
, int nk
)
4134 PyObject
*callargs
= NULL
;
4135 PyObject
*stararg
= NULL
;
4136 PyObject
*kwdict
= NULL
;
4137 PyObject
*result
= NULL
;
4139 if (flags
& CALL_FLAG_KW
) {
4140 kwdict
= EXT_POP(*pp_stack
);
4141 if (!PyDict_Check(kwdict
)) {
4146 if (PyDict_Update(d
, kwdict
) != 0) {
4148 /* PyDict_Update raises attribute
4149 * error (percolated from an attempt
4150 * to get 'keys' attribute) instead of
4151 * a type error if its second argument
4154 if (PyErr_ExceptionMatches(PyExc_AttributeError
)) {
4155 PyErr_Format(PyExc_TypeError
,
4156 "%.200s%.200s argument after ** "
4157 "must be a mapping, not %.200s",
4158 PyEval_GetFuncName(func
),
4159 PyEval_GetFuncDesc(func
),
4160 kwdict
->ob_type
->tp_name
);
4168 if (flags
& CALL_FLAG_VAR
) {
4169 stararg
= EXT_POP(*pp_stack
);
4170 if (!PyTuple_Check(stararg
)) {
4172 t
= PySequence_Tuple(stararg
);
4174 if (PyErr_ExceptionMatches(PyExc_TypeError
)) {
4175 PyErr_Format(PyExc_TypeError
,
4176 "%.200s%.200s argument after * "
4177 "must be a sequence, not %200s",
4178 PyEval_GetFuncName(func
),
4179 PyEval_GetFuncDesc(func
),
4180 stararg
->ob_type
->tp_name
);
4187 nstar
= PyTuple_GET_SIZE(stararg
);
4190 kwdict
= update_keyword_args(kwdict
, nk
, pp_stack
, func
);
4194 callargs
= update_star_args(na
, nstar
, stararg
, pp_stack
);
4195 if (callargs
== NULL
)
4198 /* At this point, we have to look at the type of func to
4199 update the call stats properly. Do it here so as to avoid
4200 exposing the call stats machinery outside ceval.c
4202 if (PyFunction_Check(func
))
4203 PCALL(PCALL_FUNCTION
);
4204 else if (PyMethod_Check(func
))
4205 PCALL(PCALL_METHOD
);
4206 else if (PyType_Check(func
))
4211 result
= PyObject_Call(func
, callargs
, kwdict
);
4213 Py_XDECREF(callargs
);
4215 Py_XDECREF(stararg
);
4219 /* Extract a slice index from a PyInt or PyLong or an object with the
4220 nb_index slot defined, and store in *pi.
4221 Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX,
4222 and silently boost values less than -PY_SSIZE_T_MAX-1 to -PY_SSIZE_T_MAX-1.
4223 Return 0 on error, 1 on success.
4225 /* Note: If v is NULL, return success without storing into *pi. This
4226 is because_PyEval_SliceIndex() is called by apply_slice(), which can be
4227 called by the SLICE opcode with v and/or w equal to NULL.
4230 _PyEval_SliceIndex(PyObject
*v
, Py_ssize_t
*pi
)
4234 if (PyInt_Check(v
)) {
4235 /* XXX(nnorwitz): I think PyInt_AS_LONG is correct,
4236 however, it looks like it should be AsSsize_t.
4237 There should be a comment here explaining why.
4239 x
= PyInt_AS_LONG(v
);
4241 else if (PyIndex_Check(v
)) {
4242 x
= PyNumber_AsSsize_t(v
, NULL
);
4243 if (x
== -1 && PyErr_Occurred())
4247 PyErr_SetString(PyExc_TypeError
,
4248 "slice indices must be integers or "
4249 "None or have an __index__ method");
4258 #define ISINDEX(x) ((x) == NULL || \
4259 PyInt_Check(x) || PyLong_Check(x) || PyIndex_Check(x))
4262 apply_slice(PyObject
*u
, PyObject
*v
, PyObject
*w
) /* return u[v:w] */
4264 PyTypeObject
*tp
= u
->ob_type
;
4265 PySequenceMethods
*sq
= tp
->tp_as_sequence
;
4267 if (sq
&& sq
->sq_slice
&& ISINDEX(v
) && ISINDEX(w
)) {
4268 Py_ssize_t ilow
= 0, ihigh
= PY_SSIZE_T_MAX
;
4269 if (!_PyEval_SliceIndex(v
, &ilow
))
4271 if (!_PyEval_SliceIndex(w
, &ihigh
))
4273 return PySequence_GetSlice(u
, ilow
, ihigh
);
4276 PyObject
*slice
= PySlice_New(v
, w
, NULL
);
4277 if (slice
!= NULL
) {
4278 PyObject
*res
= PyObject_GetItem(u
, slice
);
4288 assign_slice(PyObject
*u
, PyObject
*v
, PyObject
*w
, PyObject
*x
)
4291 PyTypeObject
*tp
= u
->ob_type
;
4292 PySequenceMethods
*sq
= tp
->tp_as_sequence
;
4294 if (sq
&& sq
->sq_ass_slice
&& ISINDEX(v
) && ISINDEX(w
)) {
4295 Py_ssize_t ilow
= 0, ihigh
= PY_SSIZE_T_MAX
;
4296 if (!_PyEval_SliceIndex(v
, &ilow
))
4298 if (!_PyEval_SliceIndex(w
, &ihigh
))
4301 return PySequence_DelSlice(u
, ilow
, ihigh
);
4303 return PySequence_SetSlice(u
, ilow
, ihigh
, x
);
4306 PyObject
*slice
= PySlice_New(v
, w
, NULL
);
4307 if (slice
!= NULL
) {
4310 res
= PyObject_SetItem(u
, slice
, x
);
4312 res
= PyObject_DelItem(u
, slice
);
4321 #define Py3kExceptionClass_Check(x) \
4322 (PyType_Check((x)) && \
4323 PyType_FastSubclass((PyTypeObject*)(x), Py_TPFLAGS_BASE_EXC_SUBCLASS))
4325 #define CANNOT_CATCH_MSG "catching classes that don't inherit from " \
4326 "BaseException is not allowed in 3.x"
4329 cmp_outcome(int op
, register PyObject
*v
, register PyObject
*w
)
4340 res
= PySequence_Contains(w
, v
);
4345 res
= PySequence_Contains(w
, v
);
4350 case PyCmp_EXC_MATCH
:
4351 if (PyTuple_Check(w
)) {
4352 Py_ssize_t i
, length
;
4353 length
= PyTuple_Size(w
);
4354 for (i
= 0; i
< length
; i
+= 1) {
4355 PyObject
*exc
= PyTuple_GET_ITEM(w
, i
);
4356 if (PyString_Check(exc
)) {
4358 ret_val
= PyErr_WarnEx(
4359 PyExc_DeprecationWarning
,
4360 "catching of string "
4361 "exceptions is deprecated", 1);
4365 else if (Py_Py3kWarningFlag
&&
4366 !PyTuple_Check(exc
) &&
4367 !Py3kExceptionClass_Check(exc
))
4370 ret_val
= PyErr_WarnEx(
4371 PyExc_DeprecationWarning
,
4372 CANNOT_CATCH_MSG
, 1);
4379 if (PyString_Check(w
)) {
4381 ret_val
= PyErr_WarnEx(
4382 PyExc_DeprecationWarning
,
4383 "catching of string "
4384 "exceptions is deprecated", 1);
4388 else if (Py_Py3kWarningFlag
&&
4389 !PyTuple_Check(w
) &&
4390 !Py3kExceptionClass_Check(w
))
4393 ret_val
= PyErr_WarnEx(
4394 PyExc_DeprecationWarning
,
4395 CANNOT_CATCH_MSG
, 1);
4400 res
= PyErr_GivenExceptionMatches(v
, w
);
4403 return PyObject_RichCompare(v
, w
, op
);
4405 v
= res
? Py_True
: Py_False
;
4411 import_from(PyObject
*v
, PyObject
*name
)
4415 x
= PyObject_GetAttr(v
, name
);
4416 if (x
== NULL
&& PyErr_ExceptionMatches(PyExc_AttributeError
)) {
4417 PyErr_Format(PyExc_ImportError
,
4418 "cannot import name %.230s",
4419 PyString_AsString(name
));
4425 import_all_from(PyObject
*locals
, PyObject
*v
)
4427 PyObject
*all
= PyObject_GetAttrString(v
, "__all__");
4428 PyObject
*dict
, *name
, *value
;
4429 int skip_leading_underscores
= 0;
4433 if (!PyErr_ExceptionMatches(PyExc_AttributeError
))
4434 return -1; /* Unexpected error */
4436 dict
= PyObject_GetAttrString(v
, "__dict__");
4438 if (!PyErr_ExceptionMatches(PyExc_AttributeError
))
4440 PyErr_SetString(PyExc_ImportError
,
4441 "from-import-* object has no __dict__ and no __all__");
4444 all
= PyMapping_Keys(dict
);
4448 skip_leading_underscores
= 1;
4451 for (pos
= 0, err
= 0; ; pos
++) {
4452 name
= PySequence_GetItem(all
, pos
);
4454 if (!PyErr_ExceptionMatches(PyExc_IndexError
))
4460 if (skip_leading_underscores
&&
4461 PyString_Check(name
) &&
4462 PyString_AS_STRING(name
)[0] == '_')
4467 value
= PyObject_GetAttr(v
, name
);
4470 else if (PyDict_CheckExact(locals
))
4471 err
= PyDict_SetItem(locals
, name
, value
);
4473 err
= PyObject_SetItem(locals
, name
, value
);
4484 build_class(PyObject
*methods
, PyObject
*bases
, PyObject
*name
)
4486 PyObject
*metaclass
= NULL
, *result
, *base
;
4488 if (PyDict_Check(methods
))
4489 metaclass
= PyDict_GetItemString(methods
, "__metaclass__");
4490 if (metaclass
!= NULL
)
4491 Py_INCREF(metaclass
);
4492 else if (PyTuple_Check(bases
) && PyTuple_GET_SIZE(bases
) > 0) {
4493 base
= PyTuple_GET_ITEM(bases
, 0);
4494 metaclass
= PyObject_GetAttrString(base
, "__class__");
4495 if (metaclass
== NULL
) {
4497 metaclass
= (PyObject
*)base
->ob_type
;
4498 Py_INCREF(metaclass
);
4502 PyObject
*g
= PyEval_GetGlobals();
4503 if (g
!= NULL
&& PyDict_Check(g
))
4504 metaclass
= PyDict_GetItemString(g
, "__metaclass__");
4505 if (metaclass
== NULL
)
4506 metaclass
= (PyObject
*) &PyClass_Type
;
4507 Py_INCREF(metaclass
);
4509 result
= PyObject_CallFunctionObjArgs(metaclass
, name
, bases
, methods
,
4511 Py_DECREF(metaclass
);
4512 if (result
== NULL
&& PyErr_ExceptionMatches(PyExc_TypeError
)) {
4513 /* A type error here likely means that the user passed
4514 in a base that was not a class (such the random module
4515 instead of the random.random type). Help them out with
4516 by augmenting the error message with more information.*/
4518 PyObject
*ptype
, *pvalue
, *ptraceback
;
4520 PyErr_Fetch(&ptype
, &pvalue
, &ptraceback
);
4521 if (PyString_Check(pvalue
)) {
4523 newmsg
= PyString_FromFormat(
4524 "Error when calling the metaclass bases\n"
4526 PyString_AS_STRING(pvalue
));
4527 if (newmsg
!= NULL
) {
4532 PyErr_Restore(ptype
, pvalue
, ptraceback
);
4538 exec_statement(PyFrameObject
*f
, PyObject
*prog
, PyObject
*globals
,
4545 if (PyTuple_Check(prog
) && globals
== Py_None
&& locals
== Py_None
&&
4546 ((n
= PyTuple_Size(prog
)) == 2 || n
== 3)) {
4547 /* Backward compatibility hack */
4548 globals
= PyTuple_GetItem(prog
, 1);
4550 locals
= PyTuple_GetItem(prog
, 2);
4551 prog
= PyTuple_GetItem(prog
, 0);
4553 if (globals
== Py_None
) {
4554 globals
= PyEval_GetGlobals();
4555 if (locals
== Py_None
) {
4556 locals
= PyEval_GetLocals();
4559 if (!globals
|| !locals
) {
4560 PyErr_SetString(PyExc_SystemError
,
4561 "globals and locals cannot be NULL");
4565 else if (locals
== Py_None
)
4567 if (!PyString_Check(prog
) &&
4568 #ifdef Py_USING_UNICODE
4569 !PyUnicode_Check(prog
) &&
4571 !PyCode_Check(prog
) &&
4572 !PyFile_Check(prog
)) {
4573 PyErr_SetString(PyExc_TypeError
,
4574 "exec: arg 1 must be a string, file, or code object");
4577 if (!PyDict_Check(globals
)) {
4578 PyErr_SetString(PyExc_TypeError
,
4579 "exec: arg 2 must be a dictionary or None");
4582 if (!PyMapping_Check(locals
)) {
4583 PyErr_SetString(PyExc_TypeError
,
4584 "exec: arg 3 must be a mapping or None");
4587 if (PyDict_GetItemString(globals
, "__builtins__") == NULL
)
4588 PyDict_SetItemString(globals
, "__builtins__", f
->f_builtins
);
4589 if (PyCode_Check(prog
)) {
4590 if (PyCode_GetNumFree((PyCodeObject
*)prog
) > 0) {
4591 PyErr_SetString(PyExc_TypeError
,
4592 "code object passed to exec may not contain free variables");
4595 v
= PyEval_EvalCode((PyCodeObject
*) prog
, globals
, locals
);
4597 else if (PyFile_Check(prog
)) {
4598 FILE *fp
= PyFile_AsFile(prog
);
4599 char *name
= PyString_AsString(PyFile_Name(prog
));
4604 if (PyEval_MergeCompilerFlags(&cf
))
4605 v
= PyRun_FileFlags(fp
, name
, Py_file_input
, globals
,
4608 v
= PyRun_File(fp
, name
, Py_file_input
, globals
,
4612 PyObject
*tmp
= NULL
;
4616 #ifdef Py_USING_UNICODE
4617 if (PyUnicode_Check(prog
)) {
4618 tmp
= PyUnicode_AsUTF8String(prog
);
4622 cf
.cf_flags
|= PyCF_SOURCE_IS_UTF8
;
4625 if (PyString_AsStringAndSize(prog
, &str
, NULL
))
4627 if (PyEval_MergeCompilerFlags(&cf
))
4628 v
= PyRun_StringFlags(str
, Py_file_input
, globals
,
4631 v
= PyRun_String(str
, Py_file_input
, globals
, locals
);
4635 PyFrame_LocalsToFast(f
, 0);
4643 format_exc_check_arg(PyObject
*exc
, char *format_str
, PyObject
*obj
)
4650 obj_str
= PyString_AsString(obj
);
4654 PyErr_Format(exc
, format_str
, obj_str
);
4658 string_concatenate(PyObject
*v
, PyObject
*w
,
4659 PyFrameObject
*f
, unsigned char *next_instr
)
4661 /* This function implements 'variable += expr' when both arguments
4663 Py_ssize_t v_len
= PyString_GET_SIZE(v
);
4664 Py_ssize_t w_len
= PyString_GET_SIZE(w
);
4665 Py_ssize_t new_len
= v_len
+ w_len
;
4667 PyErr_SetString(PyExc_OverflowError
,
4668 "strings are too large to concat");
4672 if (v
->ob_refcnt
== 2) {
4673 /* In the common case, there are 2 references to the value
4674 * stored in 'variable' when the += is performed: one on the
4675 * value stack (in 'v') and one still stored in the
4676 * 'variable'. We try to delete the variable now to reduce
4679 switch (*next_instr
) {
4682 int oparg
= PEEKARG();
4683 PyObject
**fastlocals
= f
->f_localsplus
;
4684 if (GETLOCAL(oparg
) == v
)
4685 SETLOCAL(oparg
, NULL
);
4690 PyObject
**freevars
= (f
->f_localsplus
+
4691 f
->f_code
->co_nlocals
);
4692 PyObject
*c
= freevars
[PEEKARG()];
4693 if (PyCell_GET(c
) == v
)
4694 PyCell_Set(c
, NULL
);
4699 PyObject
*names
= f
->f_code
->co_names
;
4700 PyObject
*name
= GETITEM(names
, PEEKARG());
4701 PyObject
*locals
= f
->f_locals
;
4702 if (PyDict_CheckExact(locals
) &&
4703 PyDict_GetItem(locals
, name
) == v
) {
4704 if (PyDict_DelItem(locals
, name
) != 0) {
4713 if (v
->ob_refcnt
== 1 && !PyString_CHECK_INTERNED(v
)) {
4714 /* Now we own the last reference to 'v', so we can resize it
4717 if (_PyString_Resize(&v
, new_len
) != 0) {
4718 /* XXX if _PyString_Resize() fails, 'v' has been
4719 * deallocated so it cannot be put back into
4720 * 'variable'. The MemoryError is raised when there
4721 * is no value in 'variable', which might (very
4722 * remotely) be a cause of incompatibilities.
4726 /* copy 'w' into the newly allocated area of 'v' */
4727 memcpy(PyString_AS_STRING(v
) + v_len
,
4728 PyString_AS_STRING(w
), w_len
);
4732 /* When in-place resizing is not an option. */
4733 PyString_Concat(&v
, w
);
4738 #ifdef DYNAMIC_EXECUTION_PROFILE
4741 getarray(long a
[256])
4744 PyObject
*l
= PyList_New(256);
4745 if (l
== NULL
) return NULL
;
4746 for (i
= 0; i
< 256; i
++) {
4747 PyObject
*x
= PyInt_FromLong(a
[i
]);
4752 PyList_SetItem(l
, i
, x
);
4754 for (i
= 0; i
< 256; i
++)
4760 _Py_GetDXProfile(PyObject
*self
, PyObject
*args
)
4763 return getarray(dxp
);
4766 PyObject
*l
= PyList_New(257);
4767 if (l
== NULL
) return NULL
;
4768 for (i
= 0; i
< 257; i
++) {
4769 PyObject
*x
= getarray(dxpairs
[i
]);
4774 PyList_SetItem(l
, i
, x
);