2 /* Execute compiled code */
5 XXX speed up searching for keywords by using a dictionary
9 /* enable more aggressive intra-module optimizations, where available */
10 #define PY_LOCAL_AGGRESSIVE
15 #include "frameobject.h"
18 #include "structmember.h"
24 #define READ_TIMESTAMP(var)
28 typedef unsigned long long uint64
;
30 #if defined(__ppc__) /* <- Don't know if this is the correct symbol; this
31 section should work for GCC on any PowerPC
32 platform, irrespective of OS.
33 POWER? Who knows :-) */
35 #define READ_TIMESTAMP(var) ppc_getcounter(&var)
38 ppc_getcounter(uint64
*v
)
40 register unsigned long tbu
, tb
, tbu2
;
43 asm volatile ("mftbu %0" : "=r" (tbu
) );
44 asm volatile ("mftb %0" : "=r" (tb
) );
45 asm volatile ("mftbu %0" : "=r" (tbu2
));
46 if (__builtin_expect(tbu
!= tbu2
, 0)) goto loop
;
48 /* The slightly peculiar way of writing the next lines is
49 compiled better by GCC than any other way I tried. */
50 ((long*)(v
))[0] = tbu
;
54 #elif defined(__i386__)
56 /* this is for linux/x86 (and probably any other GCC/x86 combo) */
58 #define READ_TIMESTAMP(val) \
59 __asm__ __volatile__("rdtsc" : "=A" (val))
61 #elif defined(__x86_64__)
63 /* for gcc/x86_64, the "A" constraint in DI mode means *either* rax *or* rdx;
64 not edx:eax as it does for i386. Since rdtsc puts its result in edx:eax
65 even in 64-bit mode, we need to use "a" and "d" for the lower and upper
66 32-bit pieces of the result. */
68 #define READ_TIMESTAMP(val) \
69 __asm__ __volatile__("rdtsc" : \
70 "=a" (((int*)&(val))[0]), "=d" (((int*)&(val))[1]));
75 #error "Don't know how to implement timestamp counter for this architecture"
79 void dump_tsc(int opcode
, int ticked
, uint64 inst0
, uint64 inst1
,
80 uint64 loop0
, uint64 loop1
, uint64 intr0
, uint64 intr1
)
82 uint64 intr
, inst
, loop
;
83 PyThreadState
*tstate
= PyThreadState_Get();
84 if (!tstate
->interp
->tscdump
)
87 inst
= inst1
- inst0
- intr
;
88 loop
= loop1
- loop0
- intr
;
89 fprintf(stderr
, "opcode=%03d t=%d inst=%06lld loop=%06lld\n",
90 opcode
, ticked
, inst
, loop
);
95 /* Turn this on if your compiler chokes on the big switch: */
96 /* #define CASE_TOO_BIG 1 */
99 /* For debugging the interpreter: */
100 #define LLTRACE 1 /* Low-level trace feature */
101 #define CHECKEXC 1 /* Double-check exception checking */
104 typedef PyObject
*(*callproc
)(PyObject
*, PyObject
*, PyObject
*);
106 /* Forward declarations */
108 static PyObject
* call_function(PyObject
***, int, uint64
*, uint64
*);
110 static PyObject
* call_function(PyObject
***, int);
112 static PyObject
* fast_function(PyObject
*, PyObject
***, int, int, int);
113 static PyObject
* do_call(PyObject
*, PyObject
***, int, int);
114 static PyObject
* ext_do_call(PyObject
*, PyObject
***, int, int, int);
115 static PyObject
* update_keyword_args(PyObject
*, int, PyObject
***,
117 static PyObject
* update_star_args(int, int, PyObject
*, PyObject
***);
118 static PyObject
* load_args(PyObject
***, int);
119 #define CALL_FLAG_VAR 1
120 #define CALL_FLAG_KW 2
124 static int prtrace(PyObject
*, char *);
126 static int call_trace(Py_tracefunc
, PyObject
*, PyFrameObject
*,
128 static int call_trace_protected(Py_tracefunc
, PyObject
*,
129 PyFrameObject
*, int, PyObject
*);
130 static void call_exc_trace(Py_tracefunc
, PyObject
*, PyFrameObject
*);
131 static int maybe_call_line_trace(Py_tracefunc
, PyObject
*,
132 PyFrameObject
*, int *, int *, int *);
134 static PyObject
* apply_slice(PyObject
*, PyObject
*, PyObject
*);
135 static int assign_slice(PyObject
*, PyObject
*,
136 PyObject
*, PyObject
*);
137 static PyObject
* cmp_outcome(int, PyObject
*, PyObject
*);
138 static PyObject
* import_from(PyObject
*, PyObject
*);
139 static int import_all_from(PyObject
*, PyObject
*);
140 static PyObject
* build_class(PyObject
*, PyObject
*, PyObject
*);
141 static int exec_statement(PyFrameObject
*,
142 PyObject
*, PyObject
*, PyObject
*);
143 static void set_exc_info(PyThreadState
*, PyObject
*, PyObject
*, PyObject
*);
144 static void reset_exc_info(PyThreadState
*);
145 static void format_exc_check_arg(PyObject
*, char *, PyObject
*);
146 static PyObject
* string_concatenate(PyObject
*, PyObject
*,
147 PyFrameObject
*, unsigned char *);
148 static PyObject
* kwd_as_string(PyObject
*);
149 static PyObject
* special_lookup(PyObject
*, char *, PyObject
**);
151 #define NAME_ERROR_MSG \
152 "name '%.200s' is not defined"
153 #define GLOBAL_NAME_ERROR_MSG \
154 "global name '%.200s' is not defined"
155 #define UNBOUNDLOCAL_ERROR_MSG \
156 "local variable '%.200s' referenced before assignment"
157 #define UNBOUNDFREE_ERROR_MSG \
158 "free variable '%.200s' referenced before assignment" \
159 " in enclosing scope"
161 /* Dynamic execution profile */
162 #ifdef DYNAMIC_EXECUTION_PROFILE
164 static long dxpairs
[257][256];
165 #define dxp dxpairs[256]
167 static long dxp
[256];
171 /* Function call profile */
174 static int pcall
[PCALL_NUM
];
177 #define PCALL_FUNCTION 1
178 #define PCALL_FAST_FUNCTION 2
179 #define PCALL_FASTER_FUNCTION 3
180 #define PCALL_METHOD 4
181 #define PCALL_BOUND_METHOD 5
182 #define PCALL_CFUNCTION 6
184 #define PCALL_GENERATOR 8
185 #define PCALL_OTHER 9
188 /* Notes about the statistics
192 FAST_FUNCTION means no argument tuple needs to be created.
193 FASTER_FUNCTION means that the fast-path frame setup code is used.
195 If there is a method call where the call can be optimized by changing
196 the argument tuple and calling the function directly, it gets recorded
199 As a result, the relationship among the statistics appears to be
200 PCALL_ALL == PCALL_FUNCTION + PCALL_METHOD - PCALL_BOUND_METHOD +
201 PCALL_CFUNCTION + PCALL_TYPE + PCALL_GENERATOR + PCALL_OTHER
202 PCALL_FUNCTION > PCALL_FAST_FUNCTION > PCALL_FASTER_FUNCTION
203 PCALL_METHOD > PCALL_BOUND_METHOD
206 #define PCALL(POS) pcall[POS]++
209 PyEval_GetCallStats(PyObject
*self
)
211 return Py_BuildValue("iiiiiiiiiii",
212 pcall
[0], pcall
[1], pcall
[2], pcall
[3],
213 pcall
[4], pcall
[5], pcall
[6], pcall
[7],
214 pcall
[8], pcall
[9], pcall
[10]);
220 PyEval_GetCallStats(PyObject
*self
)
233 #include "pythread.h"
235 static PyThread_type_lock interpreter_lock
= 0; /* This is the GIL */
236 static PyThread_type_lock pending_lock
= 0; /* for pending calls */
237 static long main_thread
= 0;
240 PyEval_ThreadsInitialized(void)
242 return interpreter_lock
!= 0;
246 PyEval_InitThreads(void)
248 if (interpreter_lock
)
250 interpreter_lock
= PyThread_allocate_lock();
251 PyThread_acquire_lock(interpreter_lock
, 1);
252 main_thread
= PyThread_get_thread_ident();
256 PyEval_AcquireLock(void)
258 PyThread_acquire_lock(interpreter_lock
, 1);
262 PyEval_ReleaseLock(void)
264 PyThread_release_lock(interpreter_lock
);
268 PyEval_AcquireThread(PyThreadState
*tstate
)
271 Py_FatalError("PyEval_AcquireThread: NULL new thread state");
272 /* Check someone has called PyEval_InitThreads() to create the lock */
273 assert(interpreter_lock
);
274 PyThread_acquire_lock(interpreter_lock
, 1);
275 if (PyThreadState_Swap(tstate
) != NULL
)
277 "PyEval_AcquireThread: non-NULL old thread state");
281 PyEval_ReleaseThread(PyThreadState
*tstate
)
284 Py_FatalError("PyEval_ReleaseThread: NULL thread state");
285 if (PyThreadState_Swap(NULL
) != tstate
)
286 Py_FatalError("PyEval_ReleaseThread: wrong thread state");
287 PyThread_release_lock(interpreter_lock
);
290 /* This function is called from PyOS_AfterFork to ensure that newly
291 created child processes don't hold locks referring to threads which
292 are not running in the child process. (This could also be done using
293 pthread_atfork mechanism, at least for the pthreads implementation.) */
296 PyEval_ReInitThreads(void)
298 PyObject
*threading
, *result
;
299 PyThreadState
*tstate
;
301 if (!interpreter_lock
)
303 /*XXX Can't use PyThread_free_lock here because it does too
304 much error-checking. Doing this cleanly would require
305 adding a new function to each thread_*.h. Instead, just
306 create a new lock and waste a little bit of memory */
307 interpreter_lock
= PyThread_allocate_lock();
308 pending_lock
= PyThread_allocate_lock();
309 PyThread_acquire_lock(interpreter_lock
, 1);
310 main_thread
= PyThread_get_thread_ident();
312 /* Update the threading module with the new state.
314 tstate
= PyThreadState_GET();
315 threading
= PyMapping_GetItemString(tstate
->interp
->modules
,
317 if (threading
== NULL
) {
318 /* threading not imported */
322 result
= PyObject_CallMethod(threading
, "_after_fork", NULL
);
324 PyErr_WriteUnraisable(threading
);
327 Py_DECREF(threading
);
331 /* Functions save_thread and restore_thread are always defined so
332 dynamically loaded modules needn't be compiled separately for use
333 with and without threads: */
336 PyEval_SaveThread(void)
338 PyThreadState
*tstate
= PyThreadState_Swap(NULL
);
340 Py_FatalError("PyEval_SaveThread: NULL tstate");
342 if (interpreter_lock
)
343 PyThread_release_lock(interpreter_lock
);
349 PyEval_RestoreThread(PyThreadState
*tstate
)
352 Py_FatalError("PyEval_RestoreThread: NULL tstate");
354 if (interpreter_lock
) {
356 PyThread_acquire_lock(interpreter_lock
, 1);
360 PyThreadState_Swap(tstate
);
364 /* Mechanism whereby asynchronously executing callbacks (e.g. UNIX
365 signal handlers or Mac I/O completion routines) can schedule calls
366 to a function to be called synchronously.
367 The synchronous function is called with one void* argument.
368 It should return 0 for success or -1 for failure -- failure should
369 be accompanied by an exception.
371 If registry succeeds, the registry function returns 0; if it fails
372 (e.g. due to too many pending calls) it returns -1 (without setting
373 an exception condition).
375 Note that because registry may occur from within signal handlers,
376 or other asynchronous events, calling malloc() is unsafe!
379 Any thread can schedule pending calls, but only the main thread
381 There is no facility to schedule calls to a particular thread, but
382 that should be easy to change, should that ever be required. In
383 that case, the static variables here should go into the python
390 /* The WITH_THREAD implementation is thread-safe. It allows
391 scheduling to be made from any thread, and even from an executing
395 #define NPENDINGCALLS 32
399 } pendingcalls
[NPENDINGCALLS
];
400 static int pendingfirst
= 0;
401 static int pendinglast
= 0;
402 static volatile int pendingcalls_to_do
= 1; /* trigger initialization of lock */
403 static char pendingbusy
= 0;
406 Py_AddPendingCall(int (*func
)(void *), void *arg
)
409 PyThread_type_lock lock
= pending_lock
;
411 /* try a few times for the lock. Since this mechanism is used
412 * for signal handling (on the main thread), there is a (slim)
413 * chance that a signal is delivered on the same thread while we
414 * hold the lock during the Py_MakePendingCalls() function.
415 * This avoids a deadlock in that case.
416 * Note that signals can be delivered on any thread. In particular,
417 * on Windows, a SIGINT is delivered on a system-created worker
419 * We also check for lock being NULL, in the unlikely case that
420 * this function is called before any bytecode evaluation takes place.
423 for (i
= 0; i
<100; i
++) {
424 if (PyThread_acquire_lock(lock
, NOWAIT_LOCK
))
432 j
= (i
+ 1) % NPENDINGCALLS
;
433 if (j
== pendingfirst
) {
434 result
= -1; /* Queue full */
436 pendingcalls
[i
].func
= func
;
437 pendingcalls
[i
].arg
= arg
;
440 /* signal main loop */
442 pendingcalls_to_do
= 1;
444 PyThread_release_lock(lock
);
449 Py_MakePendingCalls(void)
455 /* initial allocation of the lock */
456 pending_lock
= PyThread_allocate_lock();
457 if (pending_lock
== NULL
)
461 /* only service pending calls on main thread */
462 if (main_thread
&& PyThread_get_thread_ident() != main_thread
)
464 /* don't perform recursive pending calls */
468 /* perform a bounded number of calls, in case of recursion */
469 for (i
=0; i
<NPENDINGCALLS
; i
++) {
474 /* pop one item off the queue while holding the lock */
475 PyThread_acquire_lock(pending_lock
, WAIT_LOCK
);
477 if (j
== pendinglast
) {
478 func
= NULL
; /* Queue empty */
480 func
= pendingcalls
[j
].func
;
481 arg
= pendingcalls
[j
].arg
;
482 pendingfirst
= (j
+ 1) % NPENDINGCALLS
;
484 pendingcalls_to_do
= pendingfirst
!= pendinglast
;
485 PyThread_release_lock(pending_lock
);
486 /* having released the lock, perform the callback */
497 #else /* if ! defined WITH_THREAD */
500 WARNING! ASYNCHRONOUSLY EXECUTING CODE!
501 This code is used for signal handling in python that isn't built
503 Don't use this implementation when Py_AddPendingCalls() can happen
504 on a different thread!
506 There are two possible race conditions:
507 (1) nested asynchronous calls to Py_AddPendingCall()
508 (2) AddPendingCall() calls made while pending calls are being processed.
510 (1) is very unlikely because typically signal delivery
511 is blocked during signal handling. So it should be impossible.
512 (2) is a real possibility.
513 The current code is safe against (2), but not against (1).
514 The safety against (2) is derived from the fact that only one
515 thread is present, interrupted by signals, and that the critical
516 section is protected with the "busy" variable. On Windows, which
517 delivers SIGINT on a system thread, this does not hold and therefore
518 Windows really shouldn't use this version.
519 The two threads could theoretically wiggle around the "busy" variable.
522 #define NPENDINGCALLS 32
526 } pendingcalls
[NPENDINGCALLS
];
527 static volatile int pendingfirst
= 0;
528 static volatile int pendinglast
= 0;
529 static volatile int pendingcalls_to_do
= 0;
532 Py_AddPendingCall(int (*func
)(void *), void *arg
)
534 static volatile int busy
= 0;
536 /* XXX Begin critical section */
541 j
= (i
+ 1) % NPENDINGCALLS
;
542 if (j
== pendingfirst
) {
544 return -1; /* Queue full */
546 pendingcalls
[i
].func
= func
;
547 pendingcalls
[i
].arg
= arg
;
551 pendingcalls_to_do
= 1; /* Signal main loop */
553 /* XXX End critical section */
558 Py_MakePendingCalls(void)
564 pendingcalls_to_do
= 0;
570 if (i
== pendinglast
)
571 break; /* Queue empty */
572 func
= pendingcalls
[i
].func
;
573 arg
= pendingcalls
[i
].arg
;
574 pendingfirst
= (i
+ 1) % NPENDINGCALLS
;
577 pendingcalls_to_do
= 1; /* We're not done yet */
585 #endif /* WITH_THREAD */
588 /* The interpreter's recursion limit */
590 #ifndef Py_DEFAULT_RECURSION_LIMIT
591 #define Py_DEFAULT_RECURSION_LIMIT 1000
593 static int recursion_limit
= Py_DEFAULT_RECURSION_LIMIT
;
594 int _Py_CheckRecursionLimit
= Py_DEFAULT_RECURSION_LIMIT
;
597 Py_GetRecursionLimit(void)
599 return recursion_limit
;
603 Py_SetRecursionLimit(int new_limit
)
605 recursion_limit
= new_limit
;
606 _Py_CheckRecursionLimit
= recursion_limit
;
609 /* the macro Py_EnterRecursiveCall() only calls _Py_CheckRecursiveCall()
610 if the recursion_depth reaches _Py_CheckRecursionLimit.
611 If USE_STACKCHECK, the macro decrements _Py_CheckRecursionLimit
612 to guarantee that _Py_CheckRecursiveCall() is regularly called.
613 Without USE_STACKCHECK, there is no need for this. */
615 _Py_CheckRecursiveCall(char *where
)
617 PyThreadState
*tstate
= PyThreadState_GET();
619 #ifdef USE_STACKCHECK
620 if (PyOS_CheckStack()) {
621 --tstate
->recursion_depth
;
622 PyErr_SetString(PyExc_MemoryError
, "Stack overflow");
626 if (tstate
->recursion_depth
> recursion_limit
) {
627 --tstate
->recursion_depth
;
628 PyErr_Format(PyExc_RuntimeError
,
629 "maximum recursion depth exceeded%s",
633 _Py_CheckRecursionLimit
= recursion_limit
;
637 /* Status code for main loop (reason for stack unwind) */
639 WHY_NOT
= 0x0001, /* No error */
640 WHY_EXCEPTION
= 0x0002, /* Exception occurred */
641 WHY_RERAISE
= 0x0004, /* Exception re-raised by 'finally' */
642 WHY_RETURN
= 0x0008, /* 'return' statement */
643 WHY_BREAK
= 0x0010, /* 'break' statement */
644 WHY_CONTINUE
= 0x0020, /* 'continue' statement */
645 WHY_YIELD
= 0x0040 /* 'yield' operator */
648 static enum why_code
do_raise(PyObject
*, PyObject
*, PyObject
*);
649 static int unpack_iterable(PyObject
*, int, PyObject
**);
651 /* Records whether tracing is on for any thread. Counts the number of
652 threads for which tstate->c_tracefunc is non-NULL, so if the value
653 is 0, we know we don't have to check this thread's c_tracefunc.
654 This speeds up the if statement in PyEval_EvalFrameEx() after
656 static int _Py_TracingPossible
= 0;
658 /* for manipulating the thread switch and periodic "stuff" - used to be
659 per thread, now just a pair o' globals */
660 int _Py_CheckInterval
= 100;
661 volatile int _Py_Ticker
= 0; /* so that we hit a "tick" first thing */
664 PyEval_EvalCode(PyCodeObject
*co
, PyObject
*globals
, PyObject
*locals
)
666 return PyEval_EvalCodeEx(co
,
668 (PyObject
**)NULL
, 0,
669 (PyObject
**)NULL
, 0,
670 (PyObject
**)NULL
, 0,
675 /* Interpreter main loop */
678 PyEval_EvalFrame(PyFrameObject
*f
) {
679 /* This is for backward compatibility with extension modules that
680 used this API; core interpreter code should call
681 PyEval_EvalFrameEx() */
682 return PyEval_EvalFrameEx(f
, 0);
686 PyEval_EvalFrameEx(PyFrameObject
*f
, int throwflag
)
691 register PyObject
**stack_pointer
; /* Next free slot in value stack */
692 register unsigned char *next_instr
;
693 register int opcode
; /* Current opcode */
694 register int oparg
; /* Current opcode argument, if any */
695 register enum why_code why
; /* Reason for block stack unwind */
696 register int err
; /* Error status -- nonzero if error */
697 register PyObject
*x
; /* Result object -- NULL if error */
698 register PyObject
*v
; /* Temporary objects popped off stack */
699 register PyObject
*w
;
700 register PyObject
*u
;
701 register PyObject
*t
;
702 register PyObject
*stream
= NULL
; /* for PRINT opcodes */
703 register PyObject
**fastlocals
, **freevars
;
704 PyObject
*retval
= NULL
; /* Return value */
705 PyThreadState
*tstate
= PyThreadState_GET();
708 /* when tracing we set things up so that
710 not (instr_lb <= current_bytecode_offset < instr_ub)
712 is true when the line being executed has changed. The
713 initial values are such as to make this false the first
714 time it is tested. */
715 int instr_ub
= -1, instr_lb
= 0, instr_prev
= -1;
717 unsigned char *first_instr
;
720 #if defined(Py_DEBUG) || defined(LLTRACE)
721 /* Make it easier to find out where we are with a debugger */
725 /* Tuple access macros */
728 #define GETITEM(v, i) PyTuple_GET_ITEM((PyTupleObject *)(v), (i))
730 #define GETITEM(v, i) PyTuple_GetItem((v), (i))
734 /* Use Pentium timestamp counter to mark certain events:
735 inst0 -- beginning of switch statement for opcode dispatch
736 inst1 -- end of switch statement (may be skipped)
737 loop0 -- the top of the mainloop
738 loop1 -- place where control returns again to top of mainloop
740 intr1 -- beginning of long interruption
741 intr2 -- end of long interruption
743 Many opcodes call out to helper C functions. In some cases, the
744 time in those functions should be counted towards the time for the
745 opcode, but not in all cases. For example, a CALL_FUNCTION opcode
746 calls another Python function; there's no point in charge all the
747 bytecode executed by the called function to the caller.
749 It's hard to make a useful judgement statically. In the presence
750 of operator overloading, it's impossible to tell if a call will
751 execute new Python code or not.
753 It's a case-by-case judgement. I'll use intr1 for the following
759 CALL_FUNCTION (and friends)
762 uint64 inst0
, inst1
, loop0
, loop1
, intr0
= 0, intr1
= 0;
765 READ_TIMESTAMP(inst0
);
766 READ_TIMESTAMP(inst1
);
767 READ_TIMESTAMP(loop0
);
768 READ_TIMESTAMP(loop1
);
770 /* shut up the compiler */
774 /* Code access macros */
776 #define INSTR_OFFSET() ((int)(next_instr - first_instr))
777 #define NEXTOP() (*next_instr++)
778 #define NEXTARG() (next_instr += 2, (next_instr[-1]<<8) + next_instr[-2])
779 #define PEEKARG() ((next_instr[2]<<8) + next_instr[1])
780 #define JUMPTO(x) (next_instr = first_instr + (x))
781 #define JUMPBY(x) (next_instr += (x))
783 /* OpCode prediction macros
784 Some opcodes tend to come in pairs thus making it possible to
785 predict the second code when the first is run. For example,
786 GET_ITER is often followed by FOR_ITER. And FOR_ITER is often
787 followed by STORE_FAST or UNPACK_SEQUENCE.
789 Verifying the prediction costs a single high-speed test of a register
790 variable against a constant. If the pairing was good, then the
791 processor's own internal branch predication has a high likelihood of
792 success, resulting in a nearly zero-overhead transition to the
793 next opcode. A successful prediction saves a trip through the eval-loop
794 including its two unpredictable branches, the HAS_ARG test and the
795 switch-case. Combined with the processor's internal branch prediction,
796 a successful PREDICT has the effect of making the two opcodes run as if
797 they were a single new opcode with the bodies combined.
799 If collecting opcode statistics, your choices are to either keep the
800 predictions turned-on and interpret the results as if some opcodes
801 had been combined or turn-off predictions so that the opcode frequency
802 counter updates for both opcodes.
805 #ifdef DYNAMIC_EXECUTION_PROFILE
806 #define PREDICT(op) if (0) goto PRED_##op
808 #define PREDICT(op) if (*next_instr == op) goto PRED_##op
811 #define PREDICTED(op) PRED_##op: next_instr++
812 #define PREDICTED_WITH_ARG(op) PRED_##op: oparg = PEEKARG(); next_instr += 3
814 /* Stack manipulation macros */
816 /* The stack can grow at most MAXINT deep, as co_nlocals and
817 co_stacksize are ints. */
818 #define STACK_LEVEL() ((int)(stack_pointer - f->f_valuestack))
819 #define EMPTY() (STACK_LEVEL() == 0)
820 #define TOP() (stack_pointer[-1])
821 #define SECOND() (stack_pointer[-2])
822 #define THIRD() (stack_pointer[-3])
823 #define FOURTH() (stack_pointer[-4])
824 #define PEEK(n) (stack_pointer[-(n)])
825 #define SET_TOP(v) (stack_pointer[-1] = (v))
826 #define SET_SECOND(v) (stack_pointer[-2] = (v))
827 #define SET_THIRD(v) (stack_pointer[-3] = (v))
828 #define SET_FOURTH(v) (stack_pointer[-4] = (v))
829 #define SET_VALUE(n, v) (stack_pointer[-(n)] = (v))
830 #define BASIC_STACKADJ(n) (stack_pointer += n)
831 #define BASIC_PUSH(v) (*stack_pointer++ = (v))
832 #define BASIC_POP() (*--stack_pointer)
835 #define PUSH(v) { (void)(BASIC_PUSH(v), \
836 lltrace && prtrace(TOP(), "push")); \
837 assert(STACK_LEVEL() <= co->co_stacksize); }
838 #define POP() ((void)(lltrace && prtrace(TOP(), "pop")), \
840 #define STACKADJ(n) { (void)(BASIC_STACKADJ(n), \
841 lltrace && prtrace(TOP(), "stackadj")); \
842 assert(STACK_LEVEL() <= co->co_stacksize); }
843 #define EXT_POP(STACK_POINTER) ((void)(lltrace && \
844 prtrace((STACK_POINTER)[-1], "ext_pop")), \
847 #define PUSH(v) BASIC_PUSH(v)
848 #define POP() BASIC_POP()
849 #define STACKADJ(n) BASIC_STACKADJ(n)
850 #define EXT_POP(STACK_POINTER) (*--(STACK_POINTER))
853 /* Local variable macros */
855 #define GETLOCAL(i) (fastlocals[i])
857 /* The SETLOCAL() macro must not DECREF the local variable in-place and
858 then store the new value; it must copy the old value to a temporary
859 value, then store the new value, and then DECREF the temporary value.
860 This is because it is possible that during the DECREF the frame is
861 accessed by other code (e.g. a __del__ method or gc.collect()) and the
862 variable would be pointing to already-freed memory. */
863 #define SETLOCAL(i, value) do { PyObject *tmp = GETLOCAL(i); \
864 GETLOCAL(i) = value; \
865 Py_XDECREF(tmp); } while (0)
873 if (Py_EnterRecursiveCall(""))
878 if (tstate
->use_tracing
) {
879 if (tstate
->c_tracefunc
!= NULL
) {
880 /* tstate->c_tracefunc, if defined, is a
881 function that will be called on *every* entry
882 to a code block. Its return value, if not
883 None, is a function that will be called at
884 the start of each executed line of code.
885 (Actually, the function must return itself
886 in order to continue tracing.) The trace
887 functions are called with three arguments:
888 a pointer to the current frame, a string
889 indicating why the function is called, and
890 an argument which depends on the situation.
891 The global trace function is also called
892 whenever an exception is detected. */
893 if (call_trace_protected(tstate
->c_tracefunc
,
895 f
, PyTrace_CALL
, Py_None
)) {
896 /* Trace function raised an error */
897 goto exit_eval_frame
;
900 if (tstate
->c_profilefunc
!= NULL
) {
901 /* Similar for c_profilefunc, except it needn't
902 return itself and isn't called for "line" events */
903 if (call_trace_protected(tstate
->c_profilefunc
,
904 tstate
->c_profileobj
,
905 f
, PyTrace_CALL
, Py_None
)) {
906 /* Profile function raised an error */
907 goto exit_eval_frame
;
913 names
= co
->co_names
;
914 consts
= co
->co_consts
;
915 fastlocals
= f
->f_localsplus
;
916 freevars
= f
->f_localsplus
+ co
->co_nlocals
;
917 first_instr
= (unsigned char*) PyString_AS_STRING(co
->co_code
);
918 /* An explanation is in order for the next line.
920 f->f_lasti now refers to the index of the last instruction
921 executed. You might think this was obvious from the name, but
922 this wasn't always true before 2.3! PyFrame_New now sets
923 f->f_lasti to -1 (i.e. the index *before* the first instruction)
924 and YIELD_VALUE doesn't fiddle with f_lasti any more. So this
927 When the PREDICT() macros are enabled, some opcode pairs follow in
928 direct succession without updating f->f_lasti. A successful
929 prediction effectively links the two codes together as if they
930 were a single new opcode; accordingly,f->f_lasti will point to
931 the first code in the pair (for instance, GET_ITER followed by
932 FOR_ITER is effectively a single opcode and f->f_lasti will point
933 at to the beginning of the combined pair.)
935 next_instr
= first_instr
+ f
->f_lasti
+ 1;
936 stack_pointer
= f
->f_stacktop
;
937 assert(stack_pointer
!= NULL
);
938 f
->f_stacktop
= NULL
; /* remains NULL unless yield suspends frame */
941 lltrace
= PyDict_GetItemString(f
->f_globals
, "__lltrace__") != NULL
;
943 #if defined(Py_DEBUG) || defined(LLTRACE)
944 filename
= PyString_AsString(co
->co_filename
);
949 x
= Py_None
; /* Not a reference, just anything non-NULL */
952 if (throwflag
) { /* support for generator.throw() */
960 /* Almost surely, the opcode executed a break
961 or a continue, preventing inst1 from being set
962 on the way out of the loop.
964 READ_TIMESTAMP(inst1
);
967 dump_tsc(opcode
, ticked
, inst0
, inst1
, loop0
, loop1
,
973 READ_TIMESTAMP(loop0
);
975 assert(stack_pointer
>= f
->f_valuestack
); /* else underflow */
976 assert(STACK_LEVEL() <= co
->co_stacksize
); /* else overflow */
978 /* Do periodic things. Doing this every time through
979 the loop would add too much overhead, so we do it
980 only every Nth instruction. We also do it if
981 ``pendingcalls_to_do'' is set, i.e. when an asynchronous
982 event needs attention (e.g. a signal handler or
983 async I/O handler); see Py_AddPendingCall() and
984 Py_MakePendingCalls() above. */
986 if (--_Py_Ticker
< 0) {
987 if (*next_instr
== SETUP_FINALLY
) {
988 /* Make the last opcode before
989 a try: finally: block uninterruptable. */
990 goto fast_next_opcode
;
992 _Py_Ticker
= _Py_CheckInterval
;
993 tstate
->tick_counter
++;
997 if (pendingcalls_to_do
) {
998 if (Py_MakePendingCalls() < 0) {
1002 if (pendingcalls_to_do
)
1003 /* MakePendingCalls() didn't succeed.
1004 Force early re-execution of this
1005 "periodic" code, possibly after
1010 if (interpreter_lock
) {
1011 /* Give another thread a chance */
1013 if (PyThreadState_Swap(NULL
) != tstate
)
1014 Py_FatalError("ceval: tstate mix-up");
1015 PyThread_release_lock(interpreter_lock
);
1017 /* Other threads may run now */
1019 PyThread_acquire_lock(interpreter_lock
, 1);
1020 if (PyThreadState_Swap(tstate
) != NULL
)
1021 Py_FatalError("ceval: orphan tstate");
1023 /* Check for thread interrupts */
1025 if (tstate
->async_exc
!= NULL
) {
1026 x
= tstate
->async_exc
;
1027 tstate
->async_exc
= NULL
;
1030 why
= WHY_EXCEPTION
;
1038 f
->f_lasti
= INSTR_OFFSET();
1040 /* line-by-line tracing support */
1042 if (_Py_TracingPossible
&&
1043 tstate
->c_tracefunc
!= NULL
&& !tstate
->tracing
) {
1044 /* see maybe_call_line_trace
1045 for expository comments */
1046 f
->f_stacktop
= stack_pointer
;
1048 err
= maybe_call_line_trace(tstate
->c_tracefunc
,
1050 f
, &instr_lb
, &instr_ub
,
1052 /* Reload possibly changed frame fields */
1054 if (f
->f_stacktop
!= NULL
) {
1055 stack_pointer
= f
->f_stacktop
;
1056 f
->f_stacktop
= NULL
;
1059 /* trace function raised an exception */
1064 /* Extract opcode and argument */
1067 oparg
= 0; /* allows oparg to be stored in a register because
1068 it doesn't have to be remembered across a full loop */
1069 if (HAS_ARG(opcode
))
1072 #ifdef DYNAMIC_EXECUTION_PROFILE
1074 dxpairs
[lastopcode
][opcode
]++;
1075 lastopcode
= opcode
;
1081 /* Instruction tracing */
1084 if (HAS_ARG(opcode
)) {
1085 printf("%d: %d, %d\n",
1086 f
->f_lasti
, opcode
, oparg
);
1090 f
->f_lasti
, opcode
);
1095 /* Main switch on opcode */
1096 READ_TIMESTAMP(inst0
);
1101 It is essential that any operation that fails sets either
1102 x to NULL, err to nonzero, or why to anything but WHY_NOT,
1103 and that no operation that succeeds does this! */
1105 /* case STOP_CODE: this is an error! */
1108 goto fast_next_opcode
;
1111 x
= GETLOCAL(oparg
);
1115 goto fast_next_opcode
;
1117 format_exc_check_arg(PyExc_UnboundLocalError
,
1118 UNBOUNDLOCAL_ERROR_MSG
,
1119 PyTuple_GetItem(co
->co_varnames
, oparg
));
1123 x
= GETITEM(consts
, oparg
);
1126 goto fast_next_opcode
;
1128 PREDICTED_WITH_ARG(STORE_FAST
);
1132 goto fast_next_opcode
;
1137 goto fast_next_opcode
;
1144 goto fast_next_opcode
;
1153 goto fast_next_opcode
;
1164 goto fast_next_opcode
;
1170 goto fast_next_opcode
;
1181 goto fast_next_opcode
;
1182 } else if (oparg
== 3) {
1193 goto fast_next_opcode
;
1195 Py_FatalError("invalid argument to DUP_TOPX"
1196 " (bytecode corruption?)");
1197 /* Never returns, so don't bother to set why. */
1200 case UNARY_POSITIVE
:
1202 x
= PyNumber_Positive(v
);
1205 if (x
!= NULL
) continue;
1208 case UNARY_NEGATIVE
:
1210 x
= PyNumber_Negative(v
);
1213 if (x
!= NULL
) continue;
1218 err
= PyObject_IsTrue(v
);
1226 Py_INCREF(Py_False
);
1236 x
= PyObject_Repr(v
);
1239 if (x
!= NULL
) continue;
1244 x
= PyNumber_Invert(v
);
1247 if (x
!= NULL
) continue;
1253 x
= PyNumber_Power(v
, w
, Py_None
);
1257 if (x
!= NULL
) continue;
1260 case BINARY_MULTIPLY
:
1263 x
= PyNumber_Multiply(v
, w
);
1267 if (x
!= NULL
) continue;
1271 if (!_Py_QnewFlag
) {
1274 x
= PyNumber_Divide(v
, w
);
1278 if (x
!= NULL
) continue;
1281 /* -Qnew is in effect: fall through to
1282 BINARY_TRUE_DIVIDE */
1283 case BINARY_TRUE_DIVIDE
:
1286 x
= PyNumber_TrueDivide(v
, w
);
1290 if (x
!= NULL
) continue;
1293 case BINARY_FLOOR_DIVIDE
:
1296 x
= PyNumber_FloorDivide(v
, w
);
1300 if (x
!= NULL
) continue;
1306 if (PyString_CheckExact(v
))
1307 x
= PyString_Format(v
, w
);
1309 x
= PyNumber_Remainder(v
, w
);
1313 if (x
!= NULL
) continue;
1319 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1320 /* INLINE: int + int */
1321 register long a
, b
, i
;
1322 a
= PyInt_AS_LONG(v
);
1323 b
= PyInt_AS_LONG(w
);
1325 if ((i
^a
) < 0 && (i
^b
) < 0)
1327 x
= PyInt_FromLong(i
);
1329 else if (PyString_CheckExact(v
) &&
1330 PyString_CheckExact(w
)) {
1331 x
= string_concatenate(v
, w
, f
, next_instr
);
1332 /* string_concatenate consumed the ref to v */
1333 goto skip_decref_vx
;
1337 x
= PyNumber_Add(v
, w
);
1343 if (x
!= NULL
) continue;
1346 case BINARY_SUBTRACT
:
1349 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1350 /* INLINE: int - int */
1351 register long a
, b
, i
;
1352 a
= PyInt_AS_LONG(v
);
1353 b
= PyInt_AS_LONG(w
);
1355 if ((i
^a
) < 0 && (i
^~b
) < 0)
1357 x
= PyInt_FromLong(i
);
1361 x
= PyNumber_Subtract(v
, w
);
1366 if (x
!= NULL
) continue;
1372 if (PyList_CheckExact(v
) && PyInt_CheckExact(w
)) {
1373 /* INLINE: list[int] */
1374 Py_ssize_t i
= PyInt_AsSsize_t(w
);
1376 i
+= PyList_GET_SIZE(v
);
1377 if (i
>= 0 && i
< PyList_GET_SIZE(v
)) {
1378 x
= PyList_GET_ITEM(v
, i
);
1386 x
= PyObject_GetItem(v
, w
);
1390 if (x
!= NULL
) continue;
1396 x
= PyNumber_Lshift(v
, w
);
1400 if (x
!= NULL
) continue;
1406 x
= PyNumber_Rshift(v
, w
);
1410 if (x
!= NULL
) continue;
1416 x
= PyNumber_And(v
, w
);
1420 if (x
!= NULL
) continue;
1426 x
= PyNumber_Xor(v
, w
);
1430 if (x
!= NULL
) continue;
1436 x
= PyNumber_Or(v
, w
);
1440 if (x
!= NULL
) continue;
1446 err
= PyList_Append(v
, w
);
1449 PREDICT(JUMP_ABSOLUTE
);
1457 x
= PyNumber_InPlacePower(v
, w
, Py_None
);
1461 if (x
!= NULL
) continue;
1464 case INPLACE_MULTIPLY
:
1467 x
= PyNumber_InPlaceMultiply(v
, w
);
1471 if (x
!= NULL
) continue;
1474 case INPLACE_DIVIDE
:
1475 if (!_Py_QnewFlag
) {
1478 x
= PyNumber_InPlaceDivide(v
, w
);
1482 if (x
!= NULL
) continue;
1485 /* -Qnew is in effect: fall through to
1486 INPLACE_TRUE_DIVIDE */
1487 case INPLACE_TRUE_DIVIDE
:
1490 x
= PyNumber_InPlaceTrueDivide(v
, w
);
1494 if (x
!= NULL
) continue;
1497 case INPLACE_FLOOR_DIVIDE
:
1500 x
= PyNumber_InPlaceFloorDivide(v
, w
);
1504 if (x
!= NULL
) continue;
1507 case INPLACE_MODULO
:
1510 x
= PyNumber_InPlaceRemainder(v
, w
);
1514 if (x
!= NULL
) continue;
1520 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1521 /* INLINE: int + int */
1522 register long a
, b
, i
;
1523 a
= PyInt_AS_LONG(v
);
1524 b
= PyInt_AS_LONG(w
);
1526 if ((i
^a
) < 0 && (i
^b
) < 0)
1528 x
= PyInt_FromLong(i
);
1530 else if (PyString_CheckExact(v
) &&
1531 PyString_CheckExact(w
)) {
1532 x
= string_concatenate(v
, w
, f
, next_instr
);
1533 /* string_concatenate consumed the ref to v */
1538 x
= PyNumber_InPlaceAdd(v
, w
);
1544 if (x
!= NULL
) continue;
1547 case INPLACE_SUBTRACT
:
1550 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1551 /* INLINE: int - int */
1552 register long a
, b
, i
;
1553 a
= PyInt_AS_LONG(v
);
1554 b
= PyInt_AS_LONG(w
);
1556 if ((i
^a
) < 0 && (i
^~b
) < 0)
1558 x
= PyInt_FromLong(i
);
1562 x
= PyNumber_InPlaceSubtract(v
, w
);
1567 if (x
!= NULL
) continue;
1570 case INPLACE_LSHIFT
:
1573 x
= PyNumber_InPlaceLshift(v
, w
);
1577 if (x
!= NULL
) continue;
1580 case INPLACE_RSHIFT
:
1583 x
= PyNumber_InPlaceRshift(v
, w
);
1587 if (x
!= NULL
) continue;
1593 x
= PyNumber_InPlaceAnd(v
, w
);
1597 if (x
!= NULL
) continue;
1603 x
= PyNumber_InPlaceXor(v
, w
);
1607 if (x
!= NULL
) continue;
1613 x
= PyNumber_InPlaceOr(v
, w
);
1617 if (x
!= NULL
) continue;
1624 if ((opcode
-SLICE
) & 2)
1628 if ((opcode
-SLICE
) & 1)
1633 x
= apply_slice(u
, v
, w
);
1638 if (x
!= NULL
) continue;
1645 if ((opcode
-STORE_SLICE
) & 2)
1649 if ((opcode
-STORE_SLICE
) & 1)
1655 err
= assign_slice(u
, v
, w
, t
); /* u[v:w] = t */
1660 if (err
== 0) continue;
1663 case DELETE_SLICE
+0:
1664 case DELETE_SLICE
+1:
1665 case DELETE_SLICE
+2:
1666 case DELETE_SLICE
+3:
1667 if ((opcode
-DELETE_SLICE
) & 2)
1671 if ((opcode
-DELETE_SLICE
) & 1)
1676 err
= assign_slice(u
, v
, w
, (PyObject
*)NULL
);
1681 if (err
== 0) continue;
1690 err
= PyObject_SetItem(v
, w
, u
);
1694 if (err
== 0) continue;
1702 err
= PyObject_DelItem(v
, w
);
1705 if (err
== 0) continue;
1710 w
= PySys_GetObject("displayhook");
1712 PyErr_SetString(PyExc_RuntimeError
,
1713 "lost sys.displayhook");
1718 x
= PyTuple_Pack(1, v
);
1723 w
= PyEval_CallObject(w
, x
);
1734 /* fall through to PRINT_ITEM */
1738 if (stream
== NULL
|| stream
== Py_None
) {
1739 w
= PySys_GetObject("stdout");
1741 PyErr_SetString(PyExc_RuntimeError
,
1746 /* PyFile_SoftSpace() can exececute arbitrary code
1747 if sys.stdout is an instance with a __getattr__.
1748 If __getattr__ raises an exception, w will
1749 be freed, so we need to prevent that temporarily. */
1751 if (w
!= NULL
&& PyFile_SoftSpace(w
, 0))
1752 err
= PyFile_WriteString(" ", w
);
1754 err
= PyFile_WriteObject(v
, w
, Py_PRINT_RAW
);
1756 /* XXX move into writeobject() ? */
1757 if (PyString_Check(v
)) {
1758 char *s
= PyString_AS_STRING(v
);
1759 Py_ssize_t len
= PyString_GET_SIZE(v
);
1761 !isspace(Py_CHARMASK(s
[len
-1])) ||
1763 PyFile_SoftSpace(w
, 1);
1765 #ifdef Py_USING_UNICODE
1766 else if (PyUnicode_Check(v
)) {
1767 Py_UNICODE
*s
= PyUnicode_AS_UNICODE(v
);
1768 Py_ssize_t len
= PyUnicode_GET_SIZE(v
);
1770 !Py_UNICODE_ISSPACE(s
[len
-1]) ||
1772 PyFile_SoftSpace(w
, 1);
1776 PyFile_SoftSpace(w
, 1);
1786 case PRINT_NEWLINE_TO
:
1788 /* fall through to PRINT_NEWLINE */
1791 if (stream
== NULL
|| stream
== Py_None
) {
1792 w
= PySys_GetObject("stdout");
1794 PyErr_SetString(PyExc_RuntimeError
,
1796 why
= WHY_EXCEPTION
;
1800 /* w.write() may replace sys.stdout, so we
1801 * have to keep our reference to it */
1803 err
= PyFile_WriteString("\n", w
);
1805 PyFile_SoftSpace(w
, 0);
1814 default: switch (opcode
) {
1820 u
= POP(); /* traceback */
1823 v
= POP(); /* value */
1826 w
= POP(); /* exc */
1827 case 0: /* Fallthrough */
1828 why
= do_raise(w
, v
, u
);
1831 PyErr_SetString(PyExc_SystemError
,
1832 "bad RAISE_VARARGS oparg");
1833 why
= WHY_EXCEPTION
;
1839 if ((x
= f
->f_locals
) != NULL
) {
1844 PyErr_SetString(PyExc_SystemError
, "no locals");
1850 goto fast_block_end
;
1854 f
->f_stacktop
= stack_pointer
;
1863 READ_TIMESTAMP(intr0
);
1864 err
= exec_statement(f
, u
, v
, w
);
1865 READ_TIMESTAMP(intr1
);
1873 PyTryBlock
*b
= PyFrame_BlockPop(f
);
1874 while (STACK_LEVEL() > b
->b_level
) {
1881 PREDICTED(END_FINALLY
);
1884 if (PyInt_Check(v
)) {
1885 why
= (enum why_code
) PyInt_AS_LONG(v
);
1886 assert(why
!= WHY_YIELD
);
1887 if (why
== WHY_RETURN
||
1888 why
== WHY_CONTINUE
)
1891 else if (PyExceptionClass_Check(v
) ||
1892 PyString_Check(v
)) {
1895 PyErr_Restore(v
, w
, u
);
1899 else if (v
!= Py_None
) {
1900 PyErr_SetString(PyExc_SystemError
,
1901 "'finally' pops bad exception");
1902 why
= WHY_EXCEPTION
;
1912 x
= build_class(u
, v
, w
);
1920 w
= GETITEM(names
, oparg
);
1922 if ((x
= f
->f_locals
) != NULL
) {
1923 if (PyDict_CheckExact(x
))
1924 err
= PyDict_SetItem(x
, w
, v
);
1926 err
= PyObject_SetItem(x
, w
, v
);
1928 if (err
== 0) continue;
1931 PyErr_Format(PyExc_SystemError
,
1932 "no locals found when storing %s",
1937 w
= GETITEM(names
, oparg
);
1938 if ((x
= f
->f_locals
) != NULL
) {
1939 if ((err
= PyObject_DelItem(x
, w
)) != 0)
1940 format_exc_check_arg(PyExc_NameError
,
1945 PyErr_Format(PyExc_SystemError
,
1946 "no locals when deleting %s",
1950 PREDICTED_WITH_ARG(UNPACK_SEQUENCE
);
1951 case UNPACK_SEQUENCE
:
1953 if (PyTuple_CheckExact(v
) &&
1954 PyTuple_GET_SIZE(v
) == oparg
) {
1955 PyObject
**items
= \
1956 ((PyTupleObject
*)v
)->ob_item
;
1964 } else if (PyList_CheckExact(v
) &&
1965 PyList_GET_SIZE(v
) == oparg
) {
1966 PyObject
**items
= \
1967 ((PyListObject
*)v
)->ob_item
;
1973 } else if (unpack_iterable(v
, oparg
,
1974 stack_pointer
+ oparg
)) {
1977 /* unpack_iterable() raised an exception */
1978 why
= WHY_EXCEPTION
;
1984 w
= GETITEM(names
, oparg
);
1988 err
= PyObject_SetAttr(v
, w
, u
); /* v.w = u */
1991 if (err
== 0) continue;
1995 w
= GETITEM(names
, oparg
);
1997 err
= PyObject_SetAttr(v
, w
, (PyObject
*)NULL
);
2003 w
= GETITEM(names
, oparg
);
2005 err
= PyDict_SetItem(f
->f_globals
, w
, v
);
2007 if (err
== 0) continue;
2011 w
= GETITEM(names
, oparg
);
2012 if ((err
= PyDict_DelItem(f
->f_globals
, w
)) != 0)
2013 format_exc_check_arg(
2014 PyExc_NameError
, GLOBAL_NAME_ERROR_MSG
, w
);
2018 w
= GETITEM(names
, oparg
);
2019 if ((v
= f
->f_locals
) == NULL
) {
2020 PyErr_Format(PyExc_SystemError
,
2021 "no locals when loading %s",
2023 why
= WHY_EXCEPTION
;
2026 if (PyDict_CheckExact(v
)) {
2027 x
= PyDict_GetItem(v
, w
);
2031 x
= PyObject_GetItem(v
, w
);
2032 if (x
== NULL
&& PyErr_Occurred()) {
2033 if (!PyErr_ExceptionMatches(
2040 x
= PyDict_GetItem(f
->f_globals
, w
);
2042 x
= PyDict_GetItem(f
->f_builtins
, w
);
2044 format_exc_check_arg(
2056 w
= GETITEM(names
, oparg
);
2057 if (PyString_CheckExact(w
)) {
2058 /* Inline the PyDict_GetItem() calls.
2059 WARNING: this is an extreme speed hack.
2060 Do not try this at home. */
2061 long hash
= ((PyStringObject
*)w
)->ob_shash
;
2065 d
= (PyDictObject
*)(f
->f_globals
);
2066 e
= d
->ma_lookup(d
, w
, hash
);
2077 d
= (PyDictObject
*)(f
->f_builtins
);
2078 e
= d
->ma_lookup(d
, w
, hash
);
2089 goto load_global_error
;
2092 /* This is the un-inlined version of the code above */
2093 x
= PyDict_GetItem(f
->f_globals
, w
);
2095 x
= PyDict_GetItem(f
->f_builtins
, w
);
2098 format_exc_check_arg(
2100 GLOBAL_NAME_ERROR_MSG
, w
);
2109 x
= GETLOCAL(oparg
);
2111 SETLOCAL(oparg
, NULL
);
2114 format_exc_check_arg(
2115 PyExc_UnboundLocalError
,
2116 UNBOUNDLOCAL_ERROR_MSG
,
2117 PyTuple_GetItem(co
->co_varnames
, oparg
)
2122 x
= freevars
[oparg
];
2125 if (x
!= NULL
) continue;
2129 x
= freevars
[oparg
];
2136 /* Don't stomp existing exception */
2137 if (PyErr_Occurred())
2139 if (oparg
< PyTuple_GET_SIZE(co
->co_cellvars
)) {
2140 v
= PyTuple_GET_ITEM(co
->co_cellvars
,
2142 format_exc_check_arg(
2143 PyExc_UnboundLocalError
,
2144 UNBOUNDLOCAL_ERROR_MSG
,
2147 v
= PyTuple_GET_ITEM(co
->co_freevars
, oparg
-
2148 PyTuple_GET_SIZE(co
->co_cellvars
));
2149 format_exc_check_arg(PyExc_NameError
,
2150 UNBOUNDFREE_ERROR_MSG
, v
);
2156 x
= freevars
[oparg
];
2162 x
= PyTuple_New(oparg
);
2164 for (; --oparg
>= 0;) {
2166 PyTuple_SET_ITEM(x
, oparg
, w
);
2174 x
= PyList_New(oparg
);
2176 for (; --oparg
>= 0;) {
2178 PyList_SET_ITEM(x
, oparg
, w
);
2186 x
= _PyDict_NewPresized((Py_ssize_t
)oparg
);
2188 if (x
!= NULL
) continue;
2192 w
= TOP(); /* key */
2193 u
= SECOND(); /* value */
2194 v
= THIRD(); /* dict */
2196 assert (PyDict_CheckExact(v
));
2197 err
= PyDict_SetItem(v
, w
, u
); /* v[w] = u */
2200 if (err
== 0) continue;
2204 w
= GETITEM(names
, oparg
);
2206 x
= PyObject_GetAttr(v
, w
);
2209 if (x
!= NULL
) continue;
2215 if (PyInt_CheckExact(w
) && PyInt_CheckExact(v
)) {
2216 /* INLINE: cmp(int, int) */
2219 a
= PyInt_AS_LONG(v
);
2220 b
= PyInt_AS_LONG(w
);
2222 case PyCmp_LT
: res
= a
< b
; break;
2223 case PyCmp_LE
: res
= a
<= b
; break;
2224 case PyCmp_EQ
: res
= a
== b
; break;
2225 case PyCmp_NE
: res
= a
!= b
; break;
2226 case PyCmp_GT
: res
= a
> b
; break;
2227 case PyCmp_GE
: res
= a
>= b
; break;
2228 case PyCmp_IS
: res
= v
== w
; break;
2229 case PyCmp_IS_NOT
: res
= v
!= w
; break;
2230 default: goto slow_compare
;
2232 x
= res
? Py_True
: Py_False
;
2237 x
= cmp_outcome(oparg
, v
, w
);
2242 if (x
== NULL
) break;
2243 PREDICT(POP_JUMP_IF_FALSE
);
2244 PREDICT(POP_JUMP_IF_TRUE
);
2248 w
= GETITEM(names
, oparg
);
2249 x
= PyDict_GetItemString(f
->f_builtins
, "__import__");
2251 PyErr_SetString(PyExc_ImportError
,
2252 "__import__ not found");
2258 if (PyInt_AsLong(u
) != -1 || PyErr_Occurred())
2262 f
->f_locals
== NULL
?
2263 Py_None
: f
->f_locals
,
2270 f
->f_locals
== NULL
?
2271 Py_None
: f
->f_locals
,
2281 READ_TIMESTAMP(intr0
);
2283 x
= PyEval_CallObject(v
, w
);
2285 READ_TIMESTAMP(intr1
);
2288 if (x
!= NULL
) continue;
2293 PyFrame_FastToLocals(f
);
2294 if ((x
= f
->f_locals
) == NULL
) {
2295 PyErr_SetString(PyExc_SystemError
,
2296 "no locals found during 'import *'");
2299 READ_TIMESTAMP(intr0
);
2300 err
= import_all_from(x
, v
);
2301 READ_TIMESTAMP(intr1
);
2302 PyFrame_LocalsToFast(f
, 0);
2304 if (err
== 0) continue;
2308 w
= GETITEM(names
, oparg
);
2310 READ_TIMESTAMP(intr0
);
2311 x
= import_from(v
, w
);
2312 READ_TIMESTAMP(intr1
);
2314 if (x
!= NULL
) continue;
2319 goto fast_next_opcode
;
2321 PREDICTED_WITH_ARG(POP_JUMP_IF_FALSE
);
2322 case POP_JUMP_IF_FALSE
:
2326 goto fast_next_opcode
;
2328 if (w
== Py_False
) {
2331 goto fast_next_opcode
;
2333 err
= PyObject_IsTrue(w
);
2343 PREDICTED_WITH_ARG(POP_JUMP_IF_TRUE
);
2344 case POP_JUMP_IF_TRUE
:
2346 if (w
== Py_False
) {
2348 goto fast_next_opcode
;
2353 goto fast_next_opcode
;
2355 err
= PyObject_IsTrue(w
);
2367 case JUMP_IF_FALSE_OR_POP
:
2372 goto fast_next_opcode
;
2374 if (w
== Py_False
) {
2376 goto fast_next_opcode
;
2378 err
= PyObject_IsTrue(w
);
2390 case JUMP_IF_TRUE_OR_POP
:
2392 if (w
== Py_False
) {
2395 goto fast_next_opcode
;
2399 goto fast_next_opcode
;
2401 err
= PyObject_IsTrue(w
);
2406 else if (err
== 0) {
2414 PREDICTED_WITH_ARG(JUMP_ABSOLUTE
);
2418 /* Enabling this path speeds-up all while and for-loops by bypassing
2419 the per-loop checks for signals. By default, this should be turned-off
2420 because it prevents detection of a control-break in tight loops like
2421 "while 1: pass". Compile with this option turned-on when you need
2422 the speed-up and do not need break checking inside tight loops (ones
2423 that contain only instructions ending with goto fast_next_opcode).
2425 goto fast_next_opcode
;
2431 /* before: [obj]; after [getiter(obj)] */
2433 x
= PyObject_GetIter(v
);
2443 PREDICTED_WITH_ARG(FOR_ITER
);
2445 /* before: [iter]; after: [iter, iter()] *or* [] */
2447 x
= (*v
->ob_type
->tp_iternext
)(v
);
2450 PREDICT(STORE_FAST
);
2451 PREDICT(UNPACK_SEQUENCE
);
2454 if (PyErr_Occurred()) {
2455 if (!PyErr_ExceptionMatches(
2456 PyExc_StopIteration
))
2460 /* iterator ended normally */
2468 goto fast_block_end
;
2471 retval
= PyInt_FromLong(oparg
);
2477 goto fast_block_end
;
2482 /* NOTE: If you add any new block-setup opcodes that
2483 are not try/except/finally handlers, you may need
2484 to update the PyGen_NeedsFinalizing() function.
2487 PyFrame_BlockSetup(f
, opcode
, INSTR_OFFSET() + oparg
,
2493 static PyObject
*exit
, *enter
;
2495 x
= special_lookup(w
, "__exit__", &exit
);
2499 u
= special_lookup(w
, "__enter__", &enter
);
2505 x
= PyObject_CallFunctionObjArgs(u
, NULL
);
2509 /* Setup the finally block before pushing the result
2510 of __enter__ on the stack. */
2511 PyFrame_BlockSetup(f
, SETUP_FINALLY
, INSTR_OFFSET() + oparg
,
2520 /* At the top of the stack are 1-3 values indicating
2521 how/why we entered the finally clause:
2523 - (TOP, SECOND) = (WHY_{RETURN,CONTINUE}), retval
2524 - TOP = WHY_*; no retval below it
2525 - (TOP, SECOND, THIRD) = exc_info()
2526 Below them is EXIT, the context.__exit__ bound method.
2527 In the last case, we must call
2528 EXIT(TOP, SECOND, THIRD)
2529 otherwise we must call
2530 EXIT(None, None, None)
2532 In all cases, we remove EXIT from the stack, leaving
2533 the rest in the same order.
2535 In addition, if the stack represents an exception,
2536 *and* the function call returns a 'true' value, we
2537 "zap" this information, to prevent END_FINALLY from
2538 re-raising the exception. (But non-local gotos
2539 should still be resumed.)
2542 PyObject
*exit_func
;
2550 else if (PyInt_Check(u
)) {
2551 switch(PyInt_AS_LONG(u
)) {
2554 /* Retval in TOP. */
2555 exit_func
= SECOND();
2564 u
= v
= w
= Py_None
;
2569 exit_func
= THIRD();
2574 /* XXX Not the fastest way to call it... */
2575 x
= PyObject_CallFunctionObjArgs(exit_func
, u
, v
, w
,
2577 Py_DECREF(exit_func
);
2579 break; /* Go to error exit */
2582 err
= PyObject_IsTrue(x
);
2588 break; /* Go to error exit */
2591 /* There was an exception and a true return */
2599 /* The stack was rearranged to remove EXIT
2600 above. Let END_FINALLY do its thing */
2602 PREDICT(END_FINALLY
);
2612 x
= call_function(&sp
, oparg
, &intr0
, &intr1
);
2614 x
= call_function(&sp
, oparg
);
2623 case CALL_FUNCTION_VAR
:
2624 case CALL_FUNCTION_KW
:
2625 case CALL_FUNCTION_VAR_KW
:
2627 int na
= oparg
& 0xff;
2628 int nk
= (oparg
>>8) & 0xff;
2629 int flags
= (opcode
- CALL_FUNCTION
) & 3;
2630 int n
= na
+ 2 * nk
;
2631 PyObject
**pfunc
, *func
, **sp
;
2633 if (flags
& CALL_FLAG_VAR
)
2635 if (flags
& CALL_FLAG_KW
)
2637 pfunc
= stack_pointer
- n
- 1;
2640 if (PyMethod_Check(func
)
2641 && PyMethod_GET_SELF(func
) != NULL
) {
2642 PyObject
*self
= PyMethod_GET_SELF(func
);
2644 func
= PyMethod_GET_FUNCTION(func
);
2653 READ_TIMESTAMP(intr0
);
2654 x
= ext_do_call(func
, &sp
, flags
, na
, nk
);
2655 READ_TIMESTAMP(intr1
);
2659 while (stack_pointer
> pfunc
) {
2670 v
= POP(); /* code object */
2671 x
= PyFunction_New(v
, f
->f_globals
);
2673 /* XXX Maybe this should be a separate opcode? */
2674 if (x
!= NULL
&& oparg
> 0) {
2675 v
= PyTuple_New(oparg
);
2681 while (--oparg
>= 0) {
2683 PyTuple_SET_ITEM(v
, oparg
, w
);
2685 err
= PyFunction_SetDefaults(x
, v
);
2693 v
= POP(); /* code object */
2694 x
= PyFunction_New(v
, f
->f_globals
);
2698 if (PyFunction_SetClosure(x
, v
) != 0) {
2699 /* Can't happen unless bytecode is corrupt. */
2700 why
= WHY_EXCEPTION
;
2704 if (x
!= NULL
&& oparg
> 0) {
2705 v
= PyTuple_New(oparg
);
2711 while (--oparg
>= 0) {
2713 PyTuple_SET_ITEM(v
, oparg
, w
);
2715 if (PyFunction_SetDefaults(x
, v
) != 0) {
2716 /* Can't happen unless
2717 PyFunction_SetDefaults changes. */
2718 why
= WHY_EXCEPTION
;
2733 x
= PySlice_New(u
, v
, w
);
2738 if (x
!= NULL
) continue;
2743 oparg
= oparg
<<16 | NEXTARG();
2744 goto dispatch_opcode
;
2748 "XXX lineno: %d, opcode: %d\n",
2749 PyFrame_GetLineNumber(f
),
2751 PyErr_SetString(PyExc_SystemError
, "unknown opcode");
2752 why
= WHY_EXCEPTION
;
2763 READ_TIMESTAMP(inst1
);
2765 /* Quickly continue if no error occurred */
2767 if (why
== WHY_NOT
) {
2768 if (err
== 0 && x
!= NULL
) {
2770 /* This check is expensive! */
2771 if (PyErr_Occurred())
2773 "XXX undetected error\n");
2776 READ_TIMESTAMP(loop1
);
2777 continue; /* Normal, fast path */
2782 why
= WHY_EXCEPTION
;
2787 /* Double-check exception status */
2789 if (why
== WHY_EXCEPTION
|| why
== WHY_RERAISE
) {
2790 if (!PyErr_Occurred()) {
2791 PyErr_SetString(PyExc_SystemError
,
2792 "error return without exception set");
2793 why
= WHY_EXCEPTION
;
2798 /* This check is expensive! */
2799 if (PyErr_Occurred()) {
2801 sprintf(buf
, "Stack unwind with exception "
2802 "set and why=%d", why
);
2808 /* Log traceback info if this is a real exception */
2810 if (why
== WHY_EXCEPTION
) {
2811 PyTraceBack_Here(f
);
2813 if (tstate
->c_tracefunc
!= NULL
)
2814 call_exc_trace(tstate
->c_tracefunc
,
2815 tstate
->c_traceobj
, f
);
2818 /* For the rest, treat WHY_RERAISE as WHY_EXCEPTION */
2820 if (why
== WHY_RERAISE
)
2821 why
= WHY_EXCEPTION
;
2823 /* Unwind stacks if a (pseudo) exception occurred */
2826 while (why
!= WHY_NOT
&& f
->f_iblock
> 0) {
2827 /* Peek at the current block. */
2828 PyTryBlock
*b
= &f
->f_blockstack
[f
->f_iblock
- 1];
2830 assert(why
!= WHY_YIELD
);
2831 if (b
->b_type
== SETUP_LOOP
&& why
== WHY_CONTINUE
) {
2833 JUMPTO(PyInt_AS_LONG(retval
));
2838 /* Now we have to pop the block. */
2841 while (STACK_LEVEL() > b
->b_level
) {
2845 if (b
->b_type
== SETUP_LOOP
&& why
== WHY_BREAK
) {
2847 JUMPTO(b
->b_handler
);
2850 if (b
->b_type
== SETUP_FINALLY
||
2851 (b
->b_type
== SETUP_EXCEPT
&&
2852 why
== WHY_EXCEPTION
)) {
2853 if (why
== WHY_EXCEPTION
) {
2854 PyObject
*exc
, *val
, *tb
;
2855 PyErr_Fetch(&exc
, &val
, &tb
);
2860 /* Make the raw exception data
2861 available to the handler,
2862 so a program can emulate the
2863 Python main loop. Don't do
2864 this for 'finally'. */
2865 if (b
->b_type
== SETUP_EXCEPT
) {
2866 PyErr_NormalizeException(
2868 set_exc_info(tstate
,
2880 if (why
& (WHY_RETURN
| WHY_CONTINUE
))
2882 v
= PyInt_FromLong((long)why
);
2886 JUMPTO(b
->b_handler
);
2889 } /* unwind stack */
2891 /* End the loop if we still have an error (or return) */
2895 READ_TIMESTAMP(loop1
);
2899 assert(why
!= WHY_YIELD
);
2900 /* Pop remaining stack entries. */
2906 if (why
!= WHY_RETURN
)
2910 if (tstate
->use_tracing
) {
2911 if (tstate
->c_tracefunc
) {
2912 if (why
== WHY_RETURN
|| why
== WHY_YIELD
) {
2913 if (call_trace(tstate
->c_tracefunc
,
2914 tstate
->c_traceobj
, f
,
2915 PyTrace_RETURN
, retval
)) {
2918 why
= WHY_EXCEPTION
;
2921 else if (why
== WHY_EXCEPTION
) {
2922 call_trace_protected(tstate
->c_tracefunc
,
2923 tstate
->c_traceobj
, f
,
2924 PyTrace_RETURN
, NULL
);
2927 if (tstate
->c_profilefunc
) {
2928 if (why
== WHY_EXCEPTION
)
2929 call_trace_protected(tstate
->c_profilefunc
,
2930 tstate
->c_profileobj
, f
,
2931 PyTrace_RETURN
, NULL
);
2932 else if (call_trace(tstate
->c_profilefunc
,
2933 tstate
->c_profileobj
, f
,
2934 PyTrace_RETURN
, retval
)) {
2937 why
= WHY_EXCEPTION
;
2942 if (tstate
->frame
->f_exc_type
!= NULL
)
2943 reset_exc_info(tstate
);
2945 assert(tstate
->frame
->f_exc_value
== NULL
);
2946 assert(tstate
->frame
->f_exc_traceback
== NULL
);
2951 Py_LeaveRecursiveCall();
2952 tstate
->frame
= f
->f_back
;
2957 /* This is gonna seem *real weird*, but if you put some other code between
2958 PyEval_EvalFrame() and PyEval_EvalCodeEx() you will need to adjust
2959 the test in the if statements in Misc/gdbinit (pystack and pystackv). */
2962 PyEval_EvalCodeEx(PyCodeObject
*co
, PyObject
*globals
, PyObject
*locals
,
2963 PyObject
**args
, int argcount
, PyObject
**kws
, int kwcount
,
2964 PyObject
**defs
, int defcount
, PyObject
*closure
)
2966 register PyFrameObject
*f
;
2967 register PyObject
*retval
= NULL
;
2968 register PyObject
**fastlocals
, **freevars
;
2969 PyThreadState
*tstate
= PyThreadState_GET();
2972 if (globals
== NULL
) {
2973 PyErr_SetString(PyExc_SystemError
,
2974 "PyEval_EvalCodeEx: NULL globals");
2978 assert(tstate
!= NULL
);
2979 assert(globals
!= NULL
);
2980 f
= PyFrame_New(tstate
, co
, globals
, locals
);
2984 fastlocals
= f
->f_localsplus
;
2985 freevars
= f
->f_localsplus
+ co
->co_nlocals
;
2987 if (co
->co_argcount
> 0 ||
2988 co
->co_flags
& (CO_VARARGS
| CO_VARKEYWORDS
)) {
2991 PyObject
*kwdict
= NULL
;
2992 if (co
->co_flags
& CO_VARKEYWORDS
) {
2993 kwdict
= PyDict_New();
2996 i
= co
->co_argcount
;
2997 if (co
->co_flags
& CO_VARARGS
)
2999 SETLOCAL(i
, kwdict
);
3001 if (argcount
> co
->co_argcount
) {
3002 if (!(co
->co_flags
& CO_VARARGS
)) {
3003 PyErr_Format(PyExc_TypeError
,
3004 "%.200s() takes %s %d "
3005 "%sargument%s (%d given)",
3006 PyString_AsString(co
->co_name
),
3007 defcount
? "at most" : "exactly",
3009 kwcount
? "non-keyword " : "",
3010 co
->co_argcount
== 1 ? "" : "s",
3014 n
= co
->co_argcount
;
3016 for (i
= 0; i
< n
; i
++) {
3021 if (co
->co_flags
& CO_VARARGS
) {
3022 u
= PyTuple_New(argcount
- n
);
3025 SETLOCAL(co
->co_argcount
, u
);
3026 for (i
= n
; i
< argcount
; i
++) {
3029 PyTuple_SET_ITEM(u
, i
-n
, x
);
3032 for (i
= 0; i
< kwcount
; i
++) {
3033 PyObject
**co_varnames
;
3034 PyObject
*keyword
= kws
[2*i
];
3035 PyObject
*value
= kws
[2*i
+ 1];
3037 if (keyword
== NULL
|| !(PyString_Check(keyword
)
3038 #ifdef Py_USING_UNICODE
3039 || PyUnicode_Check(keyword
)
3042 PyErr_Format(PyExc_TypeError
,
3043 "%.200s() keywords must be strings",
3044 PyString_AsString(co
->co_name
));
3047 /* Speed hack: do raw pointer compares. As names are
3048 normally interned this should almost always hit. */
3049 co_varnames
= PySequence_Fast_ITEMS(co
->co_varnames
);
3050 for (j
= 0; j
< co
->co_argcount
; j
++) {
3051 PyObject
*nm
= co_varnames
[j
];
3055 /* Slow fallback, just in case */
3056 for (j
= 0; j
< co
->co_argcount
; j
++) {
3057 PyObject
*nm
= co_varnames
[j
];
3058 int cmp
= PyObject_RichCompareBool(
3059 keyword
, nm
, Py_EQ
);
3065 /* Check errors from Compare */
3066 if (PyErr_Occurred())
3068 if (j
>= co
->co_argcount
) {
3069 if (kwdict
== NULL
) {
3070 PyObject
*kwd_str
= kwd_as_string(keyword
);
3072 PyErr_Format(PyExc_TypeError
,
3073 "%.200s() got an unexpected "
3074 "keyword argument '%.400s'",
3075 PyString_AsString(co
->co_name
),
3076 PyString_AsString(kwd_str
));
3081 PyDict_SetItem(kwdict
, keyword
, value
);
3085 if (GETLOCAL(j
) != NULL
) {
3086 PyObject
*kwd_str
= kwd_as_string(keyword
);
3088 PyErr_Format(PyExc_TypeError
,
3089 "%.200s() got multiple "
3090 "values for keyword "
3091 "argument '%.400s'",
3092 PyString_AsString(co
->co_name
),
3093 PyString_AsString(kwd_str
));
3101 if (argcount
< co
->co_argcount
) {
3102 int m
= co
->co_argcount
- defcount
;
3103 for (i
= argcount
; i
< m
; i
++) {
3104 if (GETLOCAL(i
) == NULL
) {
3105 PyErr_Format(PyExc_TypeError
,
3106 "%.200s() takes %s %d "
3107 "%sargument%s (%d given)",
3108 PyString_AsString(co
->co_name
),
3109 ((co
->co_flags
& CO_VARARGS
) ||
3110 defcount
) ? "at least"
3112 m
, kwcount
? "non-keyword " : "",
3113 m
== 1 ? "" : "s", i
);
3121 for (; i
< defcount
; i
++) {
3122 if (GETLOCAL(m
+i
) == NULL
) {
3123 PyObject
*def
= defs
[i
];
3131 if (argcount
> 0 || kwcount
> 0) {
3132 PyErr_Format(PyExc_TypeError
,
3133 "%.200s() takes no arguments (%d given)",
3134 PyString_AsString(co
->co_name
),
3135 argcount
+ kwcount
);
3139 /* Allocate and initialize storage for cell vars, and copy free
3140 vars into frame. This isn't too efficient right now. */
3141 if (PyTuple_GET_SIZE(co
->co_cellvars
)) {
3142 int i
, j
, nargs
, found
;
3143 char *cellname
, *argname
;
3146 nargs
= co
->co_argcount
;
3147 if (co
->co_flags
& CO_VARARGS
)
3149 if (co
->co_flags
& CO_VARKEYWORDS
)
3152 /* Initialize each cell var, taking into account
3153 cell vars that are initialized from arguments.
3155 Should arrange for the compiler to put cellvars
3156 that are arguments at the beginning of the cellvars
3157 list so that we can march over it more efficiently?
3159 for (i
= 0; i
< PyTuple_GET_SIZE(co
->co_cellvars
); ++i
) {
3160 cellname
= PyString_AS_STRING(
3161 PyTuple_GET_ITEM(co
->co_cellvars
, i
));
3163 for (j
= 0; j
< nargs
; j
++) {
3164 argname
= PyString_AS_STRING(
3165 PyTuple_GET_ITEM(co
->co_varnames
, j
));
3166 if (strcmp(cellname
, argname
) == 0) {
3167 c
= PyCell_New(GETLOCAL(j
));
3170 GETLOCAL(co
->co_nlocals
+ i
) = c
;
3176 c
= PyCell_New(NULL
);
3179 SETLOCAL(co
->co_nlocals
+ i
, c
);
3183 if (PyTuple_GET_SIZE(co
->co_freevars
)) {
3185 for (i
= 0; i
< PyTuple_GET_SIZE(co
->co_freevars
); ++i
) {
3186 PyObject
*o
= PyTuple_GET_ITEM(closure
, i
);
3188 freevars
[PyTuple_GET_SIZE(co
->co_cellvars
) + i
] = o
;
3192 if (co
->co_flags
& CO_GENERATOR
) {
3193 /* Don't need to keep the reference to f_back, it will be set
3194 * when the generator is resumed. */
3195 Py_XDECREF(f
->f_back
);
3198 PCALL(PCALL_GENERATOR
);
3200 /* Create a new generator that owns the ready to run frame
3201 * and return that as the value. */
3202 return PyGen_New(f
);
3205 retval
= PyEval_EvalFrameEx(f
,0);
3207 fail
: /* Jump here from prelude on failure */
3209 /* decref'ing the frame can cause __del__ methods to get invoked,
3210 which can call back into Python. While we're done with the
3211 current Python frame (f), the associated C stack is still in use,
3212 so recursion_depth must be boosted for the duration.
3214 assert(tstate
!= NULL
);
3215 ++tstate
->recursion_depth
;
3217 --tstate
->recursion_depth
;
3223 special_lookup(PyObject
*o
, char *meth
, PyObject
**cache
)
3226 if (PyInstance_Check(o
)) {
3228 return PyObject_GetAttrString(o
, meth
);
3230 return PyObject_GetAttr(o
, *cache
);
3232 res
= _PyObject_LookupSpecial(o
, meth
, cache
);
3233 if (res
== NULL
&& !PyErr_Occurred()) {
3234 PyErr_SetObject(PyExc_AttributeError
, *cache
);
3242 kwd_as_string(PyObject
*kwd
) {
3243 #ifdef Py_USING_UNICODE
3244 if (PyString_Check(kwd
)) {
3246 assert(PyString_Check(kwd
));
3250 #ifdef Py_USING_UNICODE
3252 return _PyUnicode_AsDefaultEncodedString(kwd
, "replace");
3257 /* Implementation notes for set_exc_info() and reset_exc_info():
3259 - Below, 'exc_ZZZ' stands for 'exc_type', 'exc_value' and
3260 'exc_traceback'. These always travel together.
3262 - tstate->curexc_ZZZ is the "hot" exception that is set by
3263 PyErr_SetString(), cleared by PyErr_Clear(), and so on.
3265 - Once an exception is caught by an except clause, it is transferred
3266 from tstate->curexc_ZZZ to tstate->exc_ZZZ, from which sys.exc_info()
3267 can pick it up. This is the primary task of set_exc_info().
3268 XXX That can't be right: set_exc_info() doesn't look at tstate->curexc_ZZZ.
3270 - Now let me explain the complicated dance with frame->f_exc_ZZZ.
3272 Long ago, when none of this existed, there were just a few globals:
3273 one set corresponding to the "hot" exception, and one set
3274 corresponding to sys.exc_ZZZ. (Actually, the latter weren't C
3275 globals; they were simply stored as sys.exc_ZZZ. For backwards
3276 compatibility, they still are!) The problem was that in code like
3280 "something that may fail"
3281 except "some exception":
3282 "do something else first"
3283 "print the exception from sys.exc_ZZZ."
3285 if "do something else first" invoked something that raised and caught
3286 an exception, sys.exc_ZZZ were overwritten. That was a frequent
3287 cause of subtle bugs. I fixed this by changing the semantics as
3290 - Within one frame, sys.exc_ZZZ will hold the last exception caught
3293 - But initially, and as long as no exception is caught in a given
3294 frame, sys.exc_ZZZ will hold the last exception caught in the
3295 previous frame (or the frame before that, etc.).
3297 The first bullet fixed the bug in the above example. The second
3298 bullet was for backwards compatibility: it was (and is) common to
3299 have a function that is called when an exception is caught, and to
3300 have that function access the caught exception via sys.exc_ZZZ.
3301 (Example: traceback.print_exc()).
3303 At the same time I fixed the problem that sys.exc_ZZZ weren't
3304 thread-safe, by introducing sys.exc_info() which gets it from tstate;
3305 but that's really a separate improvement.
3307 The reset_exc_info() function in ceval.c restores the tstate->exc_ZZZ
3308 variables to what they were before the current frame was called. The
3309 set_exc_info() function saves them on the frame so that
3310 reset_exc_info() can restore them. The invariant is that
3311 frame->f_exc_ZZZ is NULL iff the current frame never caught an
3312 exception (where "catching" an exception applies only to successful
3313 except clauses); and if the current frame ever caught an exception,
3314 frame->f_exc_ZZZ is the exception that was stored in tstate->exc_ZZZ
3315 at the start of the current frame.
3320 set_exc_info(PyThreadState
*tstate
,
3321 PyObject
*type
, PyObject
*value
, PyObject
*tb
)
3323 PyFrameObject
*frame
= tstate
->frame
;
3324 PyObject
*tmp_type
, *tmp_value
, *tmp_tb
;
3326 assert(type
!= NULL
);
3327 assert(frame
!= NULL
);
3328 if (frame
->f_exc_type
== NULL
) {
3329 assert(frame
->f_exc_value
== NULL
);
3330 assert(frame
->f_exc_traceback
== NULL
);
3331 /* This frame didn't catch an exception before. */
3332 /* Save previous exception of this thread in this frame. */
3333 if (tstate
->exc_type
== NULL
) {
3334 /* XXX Why is this set to Py_None? */
3336 tstate
->exc_type
= Py_None
;
3338 Py_INCREF(tstate
->exc_type
);
3339 Py_XINCREF(tstate
->exc_value
);
3340 Py_XINCREF(tstate
->exc_traceback
);
3341 frame
->f_exc_type
= tstate
->exc_type
;
3342 frame
->f_exc_value
= tstate
->exc_value
;
3343 frame
->f_exc_traceback
= tstate
->exc_traceback
;
3345 /* Set new exception for this thread. */
3346 tmp_type
= tstate
->exc_type
;
3347 tmp_value
= tstate
->exc_value
;
3348 tmp_tb
= tstate
->exc_traceback
;
3352 tstate
->exc_type
= type
;
3353 tstate
->exc_value
= value
;
3354 tstate
->exc_traceback
= tb
;
3355 Py_XDECREF(tmp_type
);
3356 Py_XDECREF(tmp_value
);
3358 /* For b/w compatibility */
3359 PySys_SetObject("exc_type", type
);
3360 PySys_SetObject("exc_value", value
);
3361 PySys_SetObject("exc_traceback", tb
);
3365 reset_exc_info(PyThreadState
*tstate
)
3367 PyFrameObject
*frame
;
3368 PyObject
*tmp_type
, *tmp_value
, *tmp_tb
;
3370 /* It's a precondition that the thread state's frame caught an
3371 * exception -- verify in a debug build.
3373 assert(tstate
!= NULL
);
3374 frame
= tstate
->frame
;
3375 assert(frame
!= NULL
);
3376 assert(frame
->f_exc_type
!= NULL
);
3378 /* Copy the frame's exception info back to the thread state. */
3379 tmp_type
= tstate
->exc_type
;
3380 tmp_value
= tstate
->exc_value
;
3381 tmp_tb
= tstate
->exc_traceback
;
3382 Py_INCREF(frame
->f_exc_type
);
3383 Py_XINCREF(frame
->f_exc_value
);
3384 Py_XINCREF(frame
->f_exc_traceback
);
3385 tstate
->exc_type
= frame
->f_exc_type
;
3386 tstate
->exc_value
= frame
->f_exc_value
;
3387 tstate
->exc_traceback
= frame
->f_exc_traceback
;
3388 Py_XDECREF(tmp_type
);
3389 Py_XDECREF(tmp_value
);
3392 /* For b/w compatibility */
3393 PySys_SetObject("exc_type", frame
->f_exc_type
);
3394 PySys_SetObject("exc_value", frame
->f_exc_value
);
3395 PySys_SetObject("exc_traceback", frame
->f_exc_traceback
);
3397 /* Clear the frame's exception info. */
3398 tmp_type
= frame
->f_exc_type
;
3399 tmp_value
= frame
->f_exc_value
;
3400 tmp_tb
= frame
->f_exc_traceback
;
3401 frame
->f_exc_type
= NULL
;
3402 frame
->f_exc_value
= NULL
;
3403 frame
->f_exc_traceback
= NULL
;
3404 Py_DECREF(tmp_type
);
3405 Py_XDECREF(tmp_value
);
3409 /* Logic for the raise statement (too complicated for inlining).
3410 This *consumes* a reference count to each of its arguments. */
3411 static enum why_code
3412 do_raise(PyObject
*type
, PyObject
*value
, PyObject
*tb
)
3416 PyThreadState
*tstate
= PyThreadState_GET();
3417 type
= tstate
->exc_type
== NULL
? Py_None
: tstate
->exc_type
;
3418 value
= tstate
->exc_value
;
3419 tb
= tstate
->exc_traceback
;
3425 /* We support the following forms of raise:
3426 raise <class>, <classinstance>
3427 raise <class>, <argument tuple>
3429 raise <class>, <argument>
3430 raise <classinstance>, None
3431 raise <string>, <object>
3432 raise <string>, None
3434 An omitted second argument is the same as None.
3436 In addition, raise <tuple>, <anything> is the same as
3437 raising the tuple's first item (and it better have one!);
3438 this rule is applied recursively.
3440 Finally, an optional third argument can be supplied, which
3441 gives the traceback to be substituted (useful when
3442 re-raising an exception after examining it). */
3444 /* First, check the traceback argument, replacing None with
3446 if (tb
== Py_None
) {
3450 else if (tb
!= NULL
&& !PyTraceBack_Check(tb
)) {
3451 PyErr_SetString(PyExc_TypeError
,
3452 "raise: arg 3 must be a traceback or None");
3456 /* Next, replace a missing value with None */
3457 if (value
== NULL
) {
3462 /* Next, repeatedly, replace a tuple exception with its first item */
3463 while (PyTuple_Check(type
) && PyTuple_Size(type
) > 0) {
3464 PyObject
*tmp
= type
;
3465 type
= PyTuple_GET_ITEM(type
, 0);
3470 if (PyExceptionClass_Check(type
))
3471 PyErr_NormalizeException(&type
, &value
, &tb
);
3473 else if (PyExceptionInstance_Check(type
)) {
3474 /* Raising an instance. The value should be a dummy. */
3475 if (value
!= Py_None
) {
3476 PyErr_SetString(PyExc_TypeError
,
3477 "instance exception may not have a separate value");
3481 /* Normalize to raise <class>, <instance> */
3484 type
= PyExceptionInstance_Class(type
);
3489 /* Not something you can raise. You get an exception
3490 anyway, just not what you specified :-) */
3491 PyErr_Format(PyExc_TypeError
,
3492 "exceptions must be classes or instances, not %s",
3493 type
->ob_type
->tp_name
);
3497 assert(PyExceptionClass_Check(type
));
3498 if (Py_Py3kWarningFlag
&& PyClass_Check(type
)) {
3499 if (PyErr_WarnEx(PyExc_DeprecationWarning
,
3500 "exceptions must derive from BaseException "
3505 PyErr_Restore(type
, value
, tb
);
3507 return WHY_EXCEPTION
;
3514 return WHY_EXCEPTION
;
3517 /* Iterate v argcnt times and store the results on the stack (via decreasing
3518 sp). Return 1 for success, 0 if error. */
3521 unpack_iterable(PyObject
*v
, int argcnt
, PyObject
**sp
)
3524 PyObject
*it
; /* iter(v) */
3529 it
= PyObject_GetIter(v
);
3533 for (; i
< argcnt
; i
++) {
3534 w
= PyIter_Next(it
);
3536 /* Iterator done, via error or exhaustion. */
3537 if (!PyErr_Occurred()) {
3538 PyErr_Format(PyExc_ValueError
,
3539 "need more than %d value%s to unpack",
3540 i
, i
== 1 ? "" : "s");
3547 /* We better have exhausted the iterator now. */
3548 w
= PyIter_Next(it
);
3550 if (PyErr_Occurred())
3556 PyErr_SetString(PyExc_ValueError
, "too many values to unpack");
3559 for (; i
> 0; i
--, sp
++)
3568 prtrace(PyObject
*v
, char *str
)
3571 if (PyObject_Print(v
, stdout
, 0) != 0)
3572 PyErr_Clear(); /* Don't know what else to do */
3579 call_exc_trace(Py_tracefunc func
, PyObject
*self
, PyFrameObject
*f
)
3581 PyObject
*type
, *value
, *traceback
, *arg
;
3583 PyErr_Fetch(&type
, &value
, &traceback
);
3584 if (value
== NULL
) {
3588 arg
= PyTuple_Pack(3, type
, value
, traceback
);
3590 PyErr_Restore(type
, value
, traceback
);
3593 err
= call_trace(func
, self
, f
, PyTrace_EXCEPTION
, arg
);
3596 PyErr_Restore(type
, value
, traceback
);
3600 Py_XDECREF(traceback
);
3605 call_trace_protected(Py_tracefunc func
, PyObject
*obj
, PyFrameObject
*frame
,
3606 int what
, PyObject
*arg
)
3608 PyObject
*type
, *value
, *traceback
;
3610 PyErr_Fetch(&type
, &value
, &traceback
);
3611 err
= call_trace(func
, obj
, frame
, what
, arg
);
3614 PyErr_Restore(type
, value
, traceback
);
3620 Py_XDECREF(traceback
);
3626 call_trace(Py_tracefunc func
, PyObject
*obj
, PyFrameObject
*frame
,
3627 int what
, PyObject
*arg
)
3629 register PyThreadState
*tstate
= frame
->f_tstate
;
3631 if (tstate
->tracing
)
3634 tstate
->use_tracing
= 0;
3635 result
= func(obj
, frame
, what
, arg
);
3636 tstate
->use_tracing
= ((tstate
->c_tracefunc
!= NULL
)
3637 || (tstate
->c_profilefunc
!= NULL
));
3643 _PyEval_CallTracing(PyObject
*func
, PyObject
*args
)
3645 PyFrameObject
*frame
= PyEval_GetFrame();
3646 PyThreadState
*tstate
= frame
->f_tstate
;
3647 int save_tracing
= tstate
->tracing
;
3648 int save_use_tracing
= tstate
->use_tracing
;
3651 tstate
->tracing
= 0;
3652 tstate
->use_tracing
= ((tstate
->c_tracefunc
!= NULL
)
3653 || (tstate
->c_profilefunc
!= NULL
));
3654 result
= PyObject_Call(func
, args
, NULL
);
3655 tstate
->tracing
= save_tracing
;
3656 tstate
->use_tracing
= save_use_tracing
;
3660 /* See Objects/lnotab_notes.txt for a description of how tracing works. */
3662 maybe_call_line_trace(Py_tracefunc func
, PyObject
*obj
,
3663 PyFrameObject
*frame
, int *instr_lb
, int *instr_ub
,
3667 int line
= frame
->f_lineno
;
3669 /* If the last instruction executed isn't in the current
3670 instruction window, reset the window.
3672 if (frame
->f_lasti
< *instr_lb
|| frame
->f_lasti
>= *instr_ub
) {
3674 line
= _PyCode_CheckLineNumber(frame
->f_code
, frame
->f_lasti
,
3676 *instr_lb
= bounds
.ap_lower
;
3677 *instr_ub
= bounds
.ap_upper
;
3679 /* If the last instruction falls at the start of a line or if
3680 it represents a jump backwards, update the frame's line
3681 number and call the trace function. */
3682 if (frame
->f_lasti
== *instr_lb
|| frame
->f_lasti
< *instr_prev
) {
3683 frame
->f_lineno
= line
;
3684 result
= call_trace(func
, obj
, frame
, PyTrace_LINE
, Py_None
);
3686 *instr_prev
= frame
->f_lasti
;
3691 PyEval_SetProfile(Py_tracefunc func
, PyObject
*arg
)
3693 PyThreadState
*tstate
= PyThreadState_GET();
3694 PyObject
*temp
= tstate
->c_profileobj
;
3696 tstate
->c_profilefunc
= NULL
;
3697 tstate
->c_profileobj
= NULL
;
3698 /* Must make sure that tracing is not ignored if 'temp' is freed */
3699 tstate
->use_tracing
= tstate
->c_tracefunc
!= NULL
;
3701 tstate
->c_profilefunc
= func
;
3702 tstate
->c_profileobj
= arg
;
3703 /* Flag that tracing or profiling is turned on */
3704 tstate
->use_tracing
= (func
!= NULL
) || (tstate
->c_tracefunc
!= NULL
);
3708 PyEval_SetTrace(Py_tracefunc func
, PyObject
*arg
)
3710 PyThreadState
*tstate
= PyThreadState_GET();
3711 PyObject
*temp
= tstate
->c_traceobj
;
3712 _Py_TracingPossible
+= (func
!= NULL
) - (tstate
->c_tracefunc
!= NULL
);
3714 tstate
->c_tracefunc
= NULL
;
3715 tstate
->c_traceobj
= NULL
;
3716 /* Must make sure that profiling is not ignored if 'temp' is freed */
3717 tstate
->use_tracing
= tstate
->c_profilefunc
!= NULL
;
3719 tstate
->c_tracefunc
= func
;
3720 tstate
->c_traceobj
= arg
;
3721 /* Flag that tracing or profiling is turned on */
3722 tstate
->use_tracing
= ((func
!= NULL
)
3723 || (tstate
->c_profilefunc
!= NULL
));
3727 PyEval_GetBuiltins(void)
3729 PyFrameObject
*current_frame
= PyEval_GetFrame();
3730 if (current_frame
== NULL
)
3731 return PyThreadState_GET()->interp
->builtins
;
3733 return current_frame
->f_builtins
;
3737 PyEval_GetLocals(void)
3739 PyFrameObject
*current_frame
= PyEval_GetFrame();
3740 if (current_frame
== NULL
)
3742 PyFrame_FastToLocals(current_frame
);
3743 return current_frame
->f_locals
;
3747 PyEval_GetGlobals(void)
3749 PyFrameObject
*current_frame
= PyEval_GetFrame();
3750 if (current_frame
== NULL
)
3753 return current_frame
->f_globals
;
3757 PyEval_GetFrame(void)
3759 PyThreadState
*tstate
= PyThreadState_GET();
3760 return _PyThreadState_GetFrame(tstate
);
3764 PyEval_GetRestricted(void)
3766 PyFrameObject
*current_frame
= PyEval_GetFrame();
3767 return current_frame
== NULL
? 0 : PyFrame_IsRestricted(current_frame
);
3771 PyEval_MergeCompilerFlags(PyCompilerFlags
*cf
)
3773 PyFrameObject
*current_frame
= PyEval_GetFrame();
3774 int result
= cf
->cf_flags
!= 0;
3776 if (current_frame
!= NULL
) {
3777 const int codeflags
= current_frame
->f_code
->co_flags
;
3778 const int compilerflags
= codeflags
& PyCF_MASK
;
3779 if (compilerflags
) {
3781 cf
->cf_flags
|= compilerflags
;
3783 #if 0 /* future keyword */
3784 if (codeflags
& CO_GENERATOR_ALLOWED
) {
3786 cf
->cf_flags
|= CO_GENERATOR_ALLOWED
;
3796 PyObject
*f
= PySys_GetObject("stdout");
3799 if (!PyFile_SoftSpace(f
, 0))
3801 return PyFile_WriteString("\n", f
);
3805 /* External interface to call any callable object.
3806 The arg must be a tuple or NULL. */
3808 #undef PyEval_CallObject
3809 /* for backward compatibility: export this interface */
3812 PyEval_CallObject(PyObject
*func
, PyObject
*arg
)
3814 return PyEval_CallObjectWithKeywords(func
, arg
, (PyObject
*)NULL
);
3816 #define PyEval_CallObject(func,arg) \
3817 PyEval_CallObjectWithKeywords(func, arg, (PyObject *)NULL)
3820 PyEval_CallObjectWithKeywords(PyObject
*func
, PyObject
*arg
, PyObject
*kw
)
3825 arg
= PyTuple_New(0);
3829 else if (!PyTuple_Check(arg
)) {
3830 PyErr_SetString(PyExc_TypeError
,
3831 "argument list must be a tuple");
3837 if (kw
!= NULL
&& !PyDict_Check(kw
)) {
3838 PyErr_SetString(PyExc_TypeError
,
3839 "keyword list must be a dictionary");
3844 result
= PyObject_Call(func
, arg
, kw
);
3850 PyEval_GetFuncName(PyObject
*func
)
3852 if (PyMethod_Check(func
))
3853 return PyEval_GetFuncName(PyMethod_GET_FUNCTION(func
));
3854 else if (PyFunction_Check(func
))
3855 return PyString_AsString(((PyFunctionObject
*)func
)->func_name
);
3856 else if (PyCFunction_Check(func
))
3857 return ((PyCFunctionObject
*)func
)->m_ml
->ml_name
;
3858 else if (PyClass_Check(func
))
3859 return PyString_AsString(((PyClassObject
*)func
)->cl_name
);
3860 else if (PyInstance_Check(func
)) {
3861 return PyString_AsString(
3862 ((PyInstanceObject
*)func
)->in_class
->cl_name
);
3864 return func
->ob_type
->tp_name
;
3869 PyEval_GetFuncDesc(PyObject
*func
)
3871 if (PyMethod_Check(func
))
3873 else if (PyFunction_Check(func
))
3875 else if (PyCFunction_Check(func
))
3877 else if (PyClass_Check(func
))
3878 return " constructor";
3879 else if (PyInstance_Check(func
)) {
3887 err_args(PyObject
*func
, int flags
, int nargs
)
3889 if (flags
& METH_NOARGS
)
3890 PyErr_Format(PyExc_TypeError
,
3891 "%.200s() takes no arguments (%d given)",
3892 ((PyCFunctionObject
*)func
)->m_ml
->ml_name
,
3895 PyErr_Format(PyExc_TypeError
,
3896 "%.200s() takes exactly one argument (%d given)",
3897 ((PyCFunctionObject
*)func
)->m_ml
->ml_name
,
3901 #define C_TRACE(x, call) \
3902 if (tstate->use_tracing && tstate->c_profilefunc) { \
3903 if (call_trace(tstate->c_profilefunc, \
3904 tstate->c_profileobj, \
3905 tstate->frame, PyTrace_C_CALL, \
3911 if (tstate->c_profilefunc != NULL) { \
3913 call_trace_protected(tstate->c_profilefunc, \
3914 tstate->c_profileobj, \
3915 tstate->frame, PyTrace_C_EXCEPTION, \
3917 /* XXX should pass (type, value, tb) */ \
3919 if (call_trace(tstate->c_profilefunc, \
3920 tstate->c_profileobj, \
3921 tstate->frame, PyTrace_C_RETURN, \
3934 call_function(PyObject
***pp_stack
, int oparg
3936 , uint64
* pintr0
, uint64
* pintr1
3940 int na
= oparg
& 0xff;
3941 int nk
= (oparg
>>8) & 0xff;
3942 int n
= na
+ 2 * nk
;
3943 PyObject
**pfunc
= (*pp_stack
) - n
- 1;
3944 PyObject
*func
= *pfunc
;
3947 /* Always dispatch PyCFunction first, because these are
3948 presumed to be the most frequent callable object.
3950 if (PyCFunction_Check(func
) && nk
== 0) {
3951 int flags
= PyCFunction_GET_FLAGS(func
);
3952 PyThreadState
*tstate
= PyThreadState_GET();
3954 PCALL(PCALL_CFUNCTION
);
3955 if (flags
& (METH_NOARGS
| METH_O
)) {
3956 PyCFunction meth
= PyCFunction_GET_FUNCTION(func
);
3957 PyObject
*self
= PyCFunction_GET_SELF(func
);
3958 if (flags
& METH_NOARGS
&& na
== 0) {
3959 C_TRACE(x
, (*meth
)(self
,NULL
));
3961 else if (flags
& METH_O
&& na
== 1) {
3962 PyObject
*arg
= EXT_POP(*pp_stack
);
3963 C_TRACE(x
, (*meth
)(self
,arg
));
3967 err_args(func
, flags
, na
);
3973 callargs
= load_args(pp_stack
, na
);
3974 READ_TIMESTAMP(*pintr0
);
3975 C_TRACE(x
, PyCFunction_Call(func
,callargs
,NULL
));
3976 READ_TIMESTAMP(*pintr1
);
3977 Py_XDECREF(callargs
);
3980 if (PyMethod_Check(func
) && PyMethod_GET_SELF(func
) != NULL
) {
3981 /* optimize access to bound methods */
3982 PyObject
*self
= PyMethod_GET_SELF(func
);
3983 PCALL(PCALL_METHOD
);
3984 PCALL(PCALL_BOUND_METHOD
);
3986 func
= PyMethod_GET_FUNCTION(func
);
3994 READ_TIMESTAMP(*pintr0
);
3995 if (PyFunction_Check(func
))
3996 x
= fast_function(func
, pp_stack
, n
, na
, nk
);
3998 x
= do_call(func
, pp_stack
, na
, nk
);
3999 READ_TIMESTAMP(*pintr1
);
4003 /* Clear the stack of the function object. Also removes
4004 the arguments in case they weren't consumed already
4005 (fast_function() and err_args() leave them on the stack).
4007 while ((*pp_stack
) > pfunc
) {
4008 w
= EXT_POP(*pp_stack
);
4015 /* The fast_function() function optimize calls for which no argument
4016 tuple is necessary; the objects are passed directly from the stack.
4017 For the simplest case -- a function that takes only positional
4018 arguments and is called with only positional arguments -- it
4019 inlines the most primitive frame setup code from
4020 PyEval_EvalCodeEx(), which vastly reduces the checks that must be
4021 done before evaluating the frame.
4025 fast_function(PyObject
*func
, PyObject
***pp_stack
, int n
, int na
, int nk
)
4027 PyCodeObject
*co
= (PyCodeObject
*)PyFunction_GET_CODE(func
);
4028 PyObject
*globals
= PyFunction_GET_GLOBALS(func
);
4029 PyObject
*argdefs
= PyFunction_GET_DEFAULTS(func
);
4030 PyObject
**d
= NULL
;
4033 PCALL(PCALL_FUNCTION
);
4034 PCALL(PCALL_FAST_FUNCTION
);
4035 if (argdefs
== NULL
&& co
->co_argcount
== n
&& nk
==0 &&
4036 co
->co_flags
== (CO_OPTIMIZED
| CO_NEWLOCALS
| CO_NOFREE
)) {
4038 PyObject
*retval
= NULL
;
4039 PyThreadState
*tstate
= PyThreadState_GET();
4040 PyObject
**fastlocals
, **stack
;
4043 PCALL(PCALL_FASTER_FUNCTION
);
4044 assert(globals
!= NULL
);
4045 /* XXX Perhaps we should create a specialized
4046 PyFrame_New() that doesn't take locals, but does
4047 take builtins without sanity checking them.
4049 assert(tstate
!= NULL
);
4050 f
= PyFrame_New(tstate
, co
, globals
, NULL
);
4054 fastlocals
= f
->f_localsplus
;
4055 stack
= (*pp_stack
) - n
;
4057 for (i
= 0; i
< n
; i
++) {
4059 fastlocals
[i
] = *stack
++;
4061 retval
= PyEval_EvalFrameEx(f
,0);
4062 ++tstate
->recursion_depth
;
4064 --tstate
->recursion_depth
;
4067 if (argdefs
!= NULL
) {
4068 d
= &PyTuple_GET_ITEM(argdefs
, 0);
4069 nd
= Py_SIZE(argdefs
);
4071 return PyEval_EvalCodeEx(co
, globals
,
4072 (PyObject
*)NULL
, (*pp_stack
)-n
, na
,
4073 (*pp_stack
)-2*nk
, nk
, d
, nd
,
4074 PyFunction_GET_CLOSURE(func
));
4078 update_keyword_args(PyObject
*orig_kwdict
, int nk
, PyObject
***pp_stack
,
4081 PyObject
*kwdict
= NULL
;
4082 if (orig_kwdict
== NULL
)
4083 kwdict
= PyDict_New();
4085 kwdict
= PyDict_Copy(orig_kwdict
);
4086 Py_DECREF(orig_kwdict
);
4092 PyObject
*value
= EXT_POP(*pp_stack
);
4093 PyObject
*key
= EXT_POP(*pp_stack
);
4094 if (PyDict_GetItem(kwdict
, key
) != NULL
) {
4095 PyErr_Format(PyExc_TypeError
,
4096 "%.200s%s got multiple values "
4097 "for keyword argument '%.200s'",
4098 PyEval_GetFuncName(func
),
4099 PyEval_GetFuncDesc(func
),
4100 PyString_AsString(key
));
4106 err
= PyDict_SetItem(kwdict
, key
, value
);
4118 update_star_args(int nstack
, int nstar
, PyObject
*stararg
,
4119 PyObject
***pp_stack
)
4121 PyObject
*callargs
, *w
;
4123 callargs
= PyTuple_New(nstack
+ nstar
);
4124 if (callargs
== NULL
) {
4129 for (i
= 0; i
< nstar
; i
++) {
4130 PyObject
*a
= PyTuple_GET_ITEM(stararg
, i
);
4132 PyTuple_SET_ITEM(callargs
, nstack
+ i
, a
);
4135 while (--nstack
>= 0) {
4136 w
= EXT_POP(*pp_stack
);
4137 PyTuple_SET_ITEM(callargs
, nstack
, w
);
4143 load_args(PyObject
***pp_stack
, int na
)
4145 PyObject
*args
= PyTuple_New(na
);
4151 w
= EXT_POP(*pp_stack
);
4152 PyTuple_SET_ITEM(args
, na
, w
);
4158 do_call(PyObject
*func
, PyObject
***pp_stack
, int na
, int nk
)
4160 PyObject
*callargs
= NULL
;
4161 PyObject
*kwdict
= NULL
;
4162 PyObject
*result
= NULL
;
4165 kwdict
= update_keyword_args(NULL
, nk
, pp_stack
, func
);
4169 callargs
= load_args(pp_stack
, na
);
4170 if (callargs
== NULL
)
4173 /* At this point, we have to look at the type of func to
4174 update the call stats properly. Do it here so as to avoid
4175 exposing the call stats machinery outside ceval.c
4177 if (PyFunction_Check(func
))
4178 PCALL(PCALL_FUNCTION
);
4179 else if (PyMethod_Check(func
))
4180 PCALL(PCALL_METHOD
);
4181 else if (PyType_Check(func
))
4183 else if (PyCFunction_Check(func
))
4184 PCALL(PCALL_CFUNCTION
);
4188 if (PyCFunction_Check(func
)) {
4189 PyThreadState
*tstate
= PyThreadState_GET();
4190 C_TRACE(result
, PyCFunction_Call(func
, callargs
, kwdict
));
4193 result
= PyObject_Call(func
, callargs
, kwdict
);
4195 Py_XDECREF(callargs
);
4201 ext_do_call(PyObject
*func
, PyObject
***pp_stack
, int flags
, int na
, int nk
)
4204 PyObject
*callargs
= NULL
;
4205 PyObject
*stararg
= NULL
;
4206 PyObject
*kwdict
= NULL
;
4207 PyObject
*result
= NULL
;
4209 if (flags
& CALL_FLAG_KW
) {
4210 kwdict
= EXT_POP(*pp_stack
);
4211 if (!PyDict_Check(kwdict
)) {
4216 if (PyDict_Update(d
, kwdict
) != 0) {
4218 /* PyDict_Update raises attribute
4219 * error (percolated from an attempt
4220 * to get 'keys' attribute) instead of
4221 * a type error if its second argument
4224 if (PyErr_ExceptionMatches(PyExc_AttributeError
)) {
4225 PyErr_Format(PyExc_TypeError
,
4226 "%.200s%.200s argument after ** "
4227 "must be a mapping, not %.200s",
4228 PyEval_GetFuncName(func
),
4229 PyEval_GetFuncDesc(func
),
4230 kwdict
->ob_type
->tp_name
);
4238 if (flags
& CALL_FLAG_VAR
) {
4239 stararg
= EXT_POP(*pp_stack
);
4240 if (!PyTuple_Check(stararg
)) {
4242 t
= PySequence_Tuple(stararg
);
4244 if (PyErr_ExceptionMatches(PyExc_TypeError
)) {
4245 PyErr_Format(PyExc_TypeError
,
4246 "%.200s%.200s argument after * "
4247 "must be a sequence, not %200s",
4248 PyEval_GetFuncName(func
),
4249 PyEval_GetFuncDesc(func
),
4250 stararg
->ob_type
->tp_name
);
4257 nstar
= PyTuple_GET_SIZE(stararg
);
4260 kwdict
= update_keyword_args(kwdict
, nk
, pp_stack
, func
);
4264 callargs
= update_star_args(na
, nstar
, stararg
, pp_stack
);
4265 if (callargs
== NULL
)
4268 /* At this point, we have to look at the type of func to
4269 update the call stats properly. Do it here so as to avoid
4270 exposing the call stats machinery outside ceval.c
4272 if (PyFunction_Check(func
))
4273 PCALL(PCALL_FUNCTION
);
4274 else if (PyMethod_Check(func
))
4275 PCALL(PCALL_METHOD
);
4276 else if (PyType_Check(func
))
4278 else if (PyCFunction_Check(func
))
4279 PCALL(PCALL_CFUNCTION
);
4283 if (PyCFunction_Check(func
)) {
4284 PyThreadState
*tstate
= PyThreadState_GET();
4285 C_TRACE(result
, PyCFunction_Call(func
, callargs
, kwdict
));
4288 result
= PyObject_Call(func
, callargs
, kwdict
);
4290 Py_XDECREF(callargs
);
4292 Py_XDECREF(stararg
);
4296 /* Extract a slice index from a PyInt or PyLong or an object with the
4297 nb_index slot defined, and store in *pi.
4298 Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX,
4299 and silently boost values less than -PY_SSIZE_T_MAX-1 to -PY_SSIZE_T_MAX-1.
4300 Return 0 on error, 1 on success.
4302 /* Note: If v is NULL, return success without storing into *pi. This
4303 is because_PyEval_SliceIndex() is called by apply_slice(), which can be
4304 called by the SLICE opcode with v and/or w equal to NULL.
4307 _PyEval_SliceIndex(PyObject
*v
, Py_ssize_t
*pi
)
4311 if (PyInt_Check(v
)) {
4312 /* XXX(nnorwitz): I think PyInt_AS_LONG is correct,
4313 however, it looks like it should be AsSsize_t.
4314 There should be a comment here explaining why.
4316 x
= PyInt_AS_LONG(v
);
4318 else if (PyIndex_Check(v
)) {
4319 x
= PyNumber_AsSsize_t(v
, NULL
);
4320 if (x
== -1 && PyErr_Occurred())
4324 PyErr_SetString(PyExc_TypeError
,
4325 "slice indices must be integers or "
4326 "None or have an __index__ method");
4335 #define ISINDEX(x) ((x) == NULL || \
4336 PyInt_Check(x) || PyLong_Check(x) || PyIndex_Check(x))
4339 apply_slice(PyObject
*u
, PyObject
*v
, PyObject
*w
) /* return u[v:w] */
4341 PyTypeObject
*tp
= u
->ob_type
;
4342 PySequenceMethods
*sq
= tp
->tp_as_sequence
;
4344 if (sq
&& sq
->sq_slice
&& ISINDEX(v
) && ISINDEX(w
)) {
4345 Py_ssize_t ilow
= 0, ihigh
= PY_SSIZE_T_MAX
;
4346 if (!_PyEval_SliceIndex(v
, &ilow
))
4348 if (!_PyEval_SliceIndex(w
, &ihigh
))
4350 return PySequence_GetSlice(u
, ilow
, ihigh
);
4353 PyObject
*slice
= PySlice_New(v
, w
, NULL
);
4354 if (slice
!= NULL
) {
4355 PyObject
*res
= PyObject_GetItem(u
, slice
);
4365 assign_slice(PyObject
*u
, PyObject
*v
, PyObject
*w
, PyObject
*x
)
4368 PyTypeObject
*tp
= u
->ob_type
;
4369 PySequenceMethods
*sq
= tp
->tp_as_sequence
;
4371 if (sq
&& sq
->sq_ass_slice
&& ISINDEX(v
) && ISINDEX(w
)) {
4372 Py_ssize_t ilow
= 0, ihigh
= PY_SSIZE_T_MAX
;
4373 if (!_PyEval_SliceIndex(v
, &ilow
))
4375 if (!_PyEval_SliceIndex(w
, &ihigh
))
4378 return PySequence_DelSlice(u
, ilow
, ihigh
);
4380 return PySequence_SetSlice(u
, ilow
, ihigh
, x
);
4383 PyObject
*slice
= PySlice_New(v
, w
, NULL
);
4384 if (slice
!= NULL
) {
4387 res
= PyObject_SetItem(u
, slice
, x
);
4389 res
= PyObject_DelItem(u
, slice
);
4398 #define Py3kExceptionClass_Check(x) \
4399 (PyType_Check((x)) && \
4400 PyType_FastSubclass((PyTypeObject*)(x), Py_TPFLAGS_BASE_EXC_SUBCLASS))
4402 #define CANNOT_CATCH_MSG "catching classes that don't inherit from " \
4403 "BaseException is not allowed in 3.x"
4406 cmp_outcome(int op
, register PyObject
*v
, register PyObject
*w
)
4417 res
= PySequence_Contains(w
, v
);
4422 res
= PySequence_Contains(w
, v
);
4427 case PyCmp_EXC_MATCH
:
4428 if (PyTuple_Check(w
)) {
4429 Py_ssize_t i
, length
;
4430 length
= PyTuple_Size(w
);
4431 for (i
= 0; i
< length
; i
+= 1) {
4432 PyObject
*exc
= PyTuple_GET_ITEM(w
, i
);
4433 if (PyString_Check(exc
)) {
4435 ret_val
= PyErr_WarnEx(
4436 PyExc_DeprecationWarning
,
4437 "catching of string "
4438 "exceptions is deprecated", 1);
4442 else if (Py_Py3kWarningFlag
&&
4443 !PyTuple_Check(exc
) &&
4444 !Py3kExceptionClass_Check(exc
))
4447 ret_val
= PyErr_WarnEx(
4448 PyExc_DeprecationWarning
,
4449 CANNOT_CATCH_MSG
, 1);
4456 if (PyString_Check(w
)) {
4458 ret_val
= PyErr_WarnEx(
4459 PyExc_DeprecationWarning
,
4460 "catching of string "
4461 "exceptions is deprecated", 1);
4465 else if (Py_Py3kWarningFlag
&&
4466 !PyTuple_Check(w
) &&
4467 !Py3kExceptionClass_Check(w
))
4470 ret_val
= PyErr_WarnEx(
4471 PyExc_DeprecationWarning
,
4472 CANNOT_CATCH_MSG
, 1);
4477 res
= PyErr_GivenExceptionMatches(v
, w
);
4480 return PyObject_RichCompare(v
, w
, op
);
4482 v
= res
? Py_True
: Py_False
;
4488 import_from(PyObject
*v
, PyObject
*name
)
4492 x
= PyObject_GetAttr(v
, name
);
4493 if (x
== NULL
&& PyErr_ExceptionMatches(PyExc_AttributeError
)) {
4494 PyErr_Format(PyExc_ImportError
,
4495 "cannot import name %.230s",
4496 PyString_AsString(name
));
4502 import_all_from(PyObject
*locals
, PyObject
*v
)
4504 PyObject
*all
= PyObject_GetAttrString(v
, "__all__");
4505 PyObject
*dict
, *name
, *value
;
4506 int skip_leading_underscores
= 0;
4510 if (!PyErr_ExceptionMatches(PyExc_AttributeError
))
4511 return -1; /* Unexpected error */
4513 dict
= PyObject_GetAttrString(v
, "__dict__");
4515 if (!PyErr_ExceptionMatches(PyExc_AttributeError
))
4517 PyErr_SetString(PyExc_ImportError
,
4518 "from-import-* object has no __dict__ and no __all__");
4521 all
= PyMapping_Keys(dict
);
4525 skip_leading_underscores
= 1;
4528 for (pos
= 0, err
= 0; ; pos
++) {
4529 name
= PySequence_GetItem(all
, pos
);
4531 if (!PyErr_ExceptionMatches(PyExc_IndexError
))
4537 if (skip_leading_underscores
&&
4538 PyString_Check(name
) &&
4539 PyString_AS_STRING(name
)[0] == '_')
4544 value
= PyObject_GetAttr(v
, name
);
4547 else if (PyDict_CheckExact(locals
))
4548 err
= PyDict_SetItem(locals
, name
, value
);
4550 err
= PyObject_SetItem(locals
, name
, value
);
4561 build_class(PyObject
*methods
, PyObject
*bases
, PyObject
*name
)
4563 PyObject
*metaclass
= NULL
, *result
, *base
;
4565 if (PyDict_Check(methods
))
4566 metaclass
= PyDict_GetItemString(methods
, "__metaclass__");
4567 if (metaclass
!= NULL
)
4568 Py_INCREF(metaclass
);
4569 else if (PyTuple_Check(bases
) && PyTuple_GET_SIZE(bases
) > 0) {
4570 base
= PyTuple_GET_ITEM(bases
, 0);
4571 metaclass
= PyObject_GetAttrString(base
, "__class__");
4572 if (metaclass
== NULL
) {
4574 metaclass
= (PyObject
*)base
->ob_type
;
4575 Py_INCREF(metaclass
);
4579 PyObject
*g
= PyEval_GetGlobals();
4580 if (g
!= NULL
&& PyDict_Check(g
))
4581 metaclass
= PyDict_GetItemString(g
, "__metaclass__");
4582 if (metaclass
== NULL
)
4583 metaclass
= (PyObject
*) &PyClass_Type
;
4584 Py_INCREF(metaclass
);
4586 result
= PyObject_CallFunctionObjArgs(metaclass
, name
, bases
, methods
,
4588 Py_DECREF(metaclass
);
4589 if (result
== NULL
&& PyErr_ExceptionMatches(PyExc_TypeError
)) {
4590 /* A type error here likely means that the user passed
4591 in a base that was not a class (such the random module
4592 instead of the random.random type). Help them out with
4593 by augmenting the error message with more information.*/
4595 PyObject
*ptype
, *pvalue
, *ptraceback
;
4597 PyErr_Fetch(&ptype
, &pvalue
, &ptraceback
);
4598 if (PyString_Check(pvalue
)) {
4600 newmsg
= PyString_FromFormat(
4601 "Error when calling the metaclass bases\n"
4603 PyString_AS_STRING(pvalue
));
4604 if (newmsg
!= NULL
) {
4609 PyErr_Restore(ptype
, pvalue
, ptraceback
);
4615 exec_statement(PyFrameObject
*f
, PyObject
*prog
, PyObject
*globals
,
4622 if (PyTuple_Check(prog
) && globals
== Py_None
&& locals
== Py_None
&&
4623 ((n
= PyTuple_Size(prog
)) == 2 || n
== 3)) {
4624 /* Backward compatibility hack */
4625 globals
= PyTuple_GetItem(prog
, 1);
4627 locals
= PyTuple_GetItem(prog
, 2);
4628 prog
= PyTuple_GetItem(prog
, 0);
4630 if (globals
== Py_None
) {
4631 globals
= PyEval_GetGlobals();
4632 if (locals
== Py_None
) {
4633 locals
= PyEval_GetLocals();
4636 if (!globals
|| !locals
) {
4637 PyErr_SetString(PyExc_SystemError
,
4638 "globals and locals cannot be NULL");
4642 else if (locals
== Py_None
)
4644 if (!PyString_Check(prog
) &&
4645 #ifdef Py_USING_UNICODE
4646 !PyUnicode_Check(prog
) &&
4648 !PyCode_Check(prog
) &&
4649 !PyFile_Check(prog
)) {
4650 PyErr_SetString(PyExc_TypeError
,
4651 "exec: arg 1 must be a string, file, or code object");
4654 if (!PyDict_Check(globals
)) {
4655 PyErr_SetString(PyExc_TypeError
,
4656 "exec: arg 2 must be a dictionary or None");
4659 if (!PyMapping_Check(locals
)) {
4660 PyErr_SetString(PyExc_TypeError
,
4661 "exec: arg 3 must be a mapping or None");
4664 if (PyDict_GetItemString(globals
, "__builtins__") == NULL
)
4665 PyDict_SetItemString(globals
, "__builtins__", f
->f_builtins
);
4666 if (PyCode_Check(prog
)) {
4667 if (PyCode_GetNumFree((PyCodeObject
*)prog
) > 0) {
4668 PyErr_SetString(PyExc_TypeError
,
4669 "code object passed to exec may not contain free variables");
4672 v
= PyEval_EvalCode((PyCodeObject
*) prog
, globals
, locals
);
4674 else if (PyFile_Check(prog
)) {
4675 FILE *fp
= PyFile_AsFile(prog
);
4676 char *name
= PyString_AsString(PyFile_Name(prog
));
4681 if (PyEval_MergeCompilerFlags(&cf
))
4682 v
= PyRun_FileFlags(fp
, name
, Py_file_input
, globals
,
4685 v
= PyRun_File(fp
, name
, Py_file_input
, globals
,
4689 PyObject
*tmp
= NULL
;
4693 #ifdef Py_USING_UNICODE
4694 if (PyUnicode_Check(prog
)) {
4695 tmp
= PyUnicode_AsUTF8String(prog
);
4699 cf
.cf_flags
|= PyCF_SOURCE_IS_UTF8
;
4702 if (PyString_AsStringAndSize(prog
, &str
, NULL
))
4704 if (PyEval_MergeCompilerFlags(&cf
))
4705 v
= PyRun_StringFlags(str
, Py_file_input
, globals
,
4708 v
= PyRun_String(str
, Py_file_input
, globals
, locals
);
4712 PyFrame_LocalsToFast(f
, 0);
4720 format_exc_check_arg(PyObject
*exc
, char *format_str
, PyObject
*obj
)
4727 obj_str
= PyString_AsString(obj
);
4731 PyErr_Format(exc
, format_str
, obj_str
);
4735 string_concatenate(PyObject
*v
, PyObject
*w
,
4736 PyFrameObject
*f
, unsigned char *next_instr
)
4738 /* This function implements 'variable += expr' when both arguments
4740 Py_ssize_t v_len
= PyString_GET_SIZE(v
);
4741 Py_ssize_t w_len
= PyString_GET_SIZE(w
);
4742 Py_ssize_t new_len
= v_len
+ w_len
;
4744 PyErr_SetString(PyExc_OverflowError
,
4745 "strings are too large to concat");
4749 if (v
->ob_refcnt
== 2) {
4750 /* In the common case, there are 2 references to the value
4751 * stored in 'variable' when the += is performed: one on the
4752 * value stack (in 'v') and one still stored in the
4753 * 'variable'. We try to delete the variable now to reduce
4756 switch (*next_instr
) {
4759 int oparg
= PEEKARG();
4760 PyObject
**fastlocals
= f
->f_localsplus
;
4761 if (GETLOCAL(oparg
) == v
)
4762 SETLOCAL(oparg
, NULL
);
4767 PyObject
**freevars
= (f
->f_localsplus
+
4768 f
->f_code
->co_nlocals
);
4769 PyObject
*c
= freevars
[PEEKARG()];
4770 if (PyCell_GET(c
) == v
)
4771 PyCell_Set(c
, NULL
);
4776 PyObject
*names
= f
->f_code
->co_names
;
4777 PyObject
*name
= GETITEM(names
, PEEKARG());
4778 PyObject
*locals
= f
->f_locals
;
4779 if (PyDict_CheckExact(locals
) &&
4780 PyDict_GetItem(locals
, name
) == v
) {
4781 if (PyDict_DelItem(locals
, name
) != 0) {
4790 if (v
->ob_refcnt
== 1 && !PyString_CHECK_INTERNED(v
)) {
4791 /* Now we own the last reference to 'v', so we can resize it
4794 if (_PyString_Resize(&v
, new_len
) != 0) {
4795 /* XXX if _PyString_Resize() fails, 'v' has been
4796 * deallocated so it cannot be put back into
4797 * 'variable'. The MemoryError is raised when there
4798 * is no value in 'variable', which might (very
4799 * remotely) be a cause of incompatibilities.
4803 /* copy 'w' into the newly allocated area of 'v' */
4804 memcpy(PyString_AS_STRING(v
) + v_len
,
4805 PyString_AS_STRING(w
), w_len
);
4809 /* When in-place resizing is not an option. */
4810 PyString_Concat(&v
, w
);
4815 #ifdef DYNAMIC_EXECUTION_PROFILE
4818 getarray(long a
[256])
4821 PyObject
*l
= PyList_New(256);
4822 if (l
== NULL
) return NULL
;
4823 for (i
= 0; i
< 256; i
++) {
4824 PyObject
*x
= PyInt_FromLong(a
[i
]);
4829 PyList_SetItem(l
, i
, x
);
4831 for (i
= 0; i
< 256; i
++)
4837 _Py_GetDXProfile(PyObject
*self
, PyObject
*args
)
4840 return getarray(dxp
);
4843 PyObject
*l
= PyList_New(257);
4844 if (l
== NULL
) return NULL
;
4845 for (i
= 0; i
< 257; i
++) {
4846 PyObject
*x
= getarray(dxpairs
[i
]);
4851 PyList_SetItem(l
, i
, x
);