2 /* Execute compiled code */
5 XXX speed up searching for keywords by using a dictionary
9 /* enable more aggressive intra-module optimizations, where available */
10 #define PY_LOCAL_AGGRESSIVE
15 #include "frameobject.h"
18 #include "structmember.h"
24 #define READ_TIMESTAMP(var)
28 typedef unsigned long long uint64
;
30 #if defined(__ppc__) /* <- Don't know if this is the correct symbol; this
31 section should work for GCC on any PowerPC
32 platform, irrespective of OS.
33 POWER? Who knows :-) */
35 #define READ_TIMESTAMP(var) ppc_getcounter(&var)
38 ppc_getcounter(uint64
*v
)
40 register unsigned long tbu
, tb
, tbu2
;
43 asm volatile ("mftbu %0" : "=r" (tbu
) );
44 asm volatile ("mftb %0" : "=r" (tb
) );
45 asm volatile ("mftbu %0" : "=r" (tbu2
));
46 if (__builtin_expect(tbu
!= tbu2
, 0)) goto loop
;
48 /* The slightly peculiar way of writing the next lines is
49 compiled better by GCC than any other way I tried. */
50 ((long*)(v
))[0] = tbu
;
54 #elif defined(__i386__)
56 /* this is for linux/x86 (and probably any other GCC/x86 combo) */
58 #define READ_TIMESTAMP(val) \
59 __asm__ __volatile__("rdtsc" : "=A" (val))
61 #elif defined(__x86_64__)
63 /* for gcc/x86_64, the "A" constraint in DI mode means *either* rax *or* rdx;
64 not edx:eax as it does for i386. Since rdtsc puts its result in edx:eax
65 even in 64-bit mode, we need to use "a" and "d" for the lower and upper
66 32-bit pieces of the result. */
68 #define READ_TIMESTAMP(val) \
69 __asm__ __volatile__("rdtsc" : \
70 "=a" (((int*)&(val))[0]), "=d" (((int*)&(val))[1]));
75 #error "Don't know how to implement timestamp counter for this architecture"
79 void dump_tsc(int opcode
, int ticked
, uint64 inst0
, uint64 inst1
,
80 uint64 loop0
, uint64 loop1
, uint64 intr0
, uint64 intr1
)
82 uint64 intr
, inst
, loop
;
83 PyThreadState
*tstate
= PyThreadState_Get();
84 if (!tstate
->interp
->tscdump
)
87 inst
= inst1
- inst0
- intr
;
88 loop
= loop1
- loop0
- intr
;
89 fprintf(stderr
, "opcode=%03d t=%d inst=%06lld loop=%06lld\n",
90 opcode
, ticked
, inst
, loop
);
95 /* Turn this on if your compiler chokes on the big switch: */
96 /* #define CASE_TOO_BIG 1 */
99 /* For debugging the interpreter: */
100 #define LLTRACE 1 /* Low-level trace feature */
101 #define CHECKEXC 1 /* Double-check exception checking */
104 typedef PyObject
*(*callproc
)(PyObject
*, PyObject
*, PyObject
*);
106 /* Forward declarations */
108 static PyObject
* call_function(PyObject
***, int, uint64
*, uint64
*);
110 static PyObject
* call_function(PyObject
***, int);
112 static PyObject
* fast_function(PyObject
*, PyObject
***, int, int, int);
113 static PyObject
* do_call(PyObject
*, PyObject
***, int, int);
114 static PyObject
* ext_do_call(PyObject
*, PyObject
***, int, int, int);
115 static PyObject
* update_keyword_args(PyObject
*, int, PyObject
***,
117 static PyObject
* update_star_args(int, int, PyObject
*, PyObject
***);
118 static PyObject
* load_args(PyObject
***, int);
119 #define CALL_FLAG_VAR 1
120 #define CALL_FLAG_KW 2
124 static int prtrace(PyObject
*, char *);
126 static int call_trace(Py_tracefunc
, PyObject
*, PyFrameObject
*,
128 static int call_trace_protected(Py_tracefunc
, PyObject
*,
129 PyFrameObject
*, int, PyObject
*);
130 static void call_exc_trace(Py_tracefunc
, PyObject
*, PyFrameObject
*);
131 static int maybe_call_line_trace(Py_tracefunc
, PyObject
*,
132 PyFrameObject
*, int *, int *, int *);
134 static PyObject
* apply_slice(PyObject
*, PyObject
*, PyObject
*);
135 static int assign_slice(PyObject
*, PyObject
*,
136 PyObject
*, PyObject
*);
137 static PyObject
* cmp_outcome(int, PyObject
*, PyObject
*);
138 static PyObject
* import_from(PyObject
*, PyObject
*);
139 static int import_all_from(PyObject
*, PyObject
*);
140 static PyObject
* build_class(PyObject
*, PyObject
*, PyObject
*);
141 static int exec_statement(PyFrameObject
*,
142 PyObject
*, PyObject
*, PyObject
*);
143 static void set_exc_info(PyThreadState
*, PyObject
*, PyObject
*, PyObject
*);
144 static void reset_exc_info(PyThreadState
*);
145 static void format_exc_check_arg(PyObject
*, char *, PyObject
*);
146 static PyObject
* string_concatenate(PyObject
*, PyObject
*,
147 PyFrameObject
*, unsigned char *);
148 static PyObject
* kwd_as_string(PyObject
*);
149 static PyObject
* special_lookup(PyObject
*, char *, PyObject
**);
151 #define NAME_ERROR_MSG \
152 "name '%.200s' is not defined"
153 #define GLOBAL_NAME_ERROR_MSG \
154 "global name '%.200s' is not defined"
155 #define UNBOUNDLOCAL_ERROR_MSG \
156 "local variable '%.200s' referenced before assignment"
157 #define UNBOUNDFREE_ERROR_MSG \
158 "free variable '%.200s' referenced before assignment" \
159 " in enclosing scope"
161 /* Dynamic execution profile */
162 #ifdef DYNAMIC_EXECUTION_PROFILE
164 static long dxpairs
[257][256];
165 #define dxp dxpairs[256]
167 static long dxp
[256];
171 /* Function call profile */
174 static int pcall
[PCALL_NUM
];
177 #define PCALL_FUNCTION 1
178 #define PCALL_FAST_FUNCTION 2
179 #define PCALL_FASTER_FUNCTION 3
180 #define PCALL_METHOD 4
181 #define PCALL_BOUND_METHOD 5
182 #define PCALL_CFUNCTION 6
184 #define PCALL_GENERATOR 8
185 #define PCALL_OTHER 9
188 /* Notes about the statistics
192 FAST_FUNCTION means no argument tuple needs to be created.
193 FASTER_FUNCTION means that the fast-path frame setup code is used.
195 If there is a method call where the call can be optimized by changing
196 the argument tuple and calling the function directly, it gets recorded
199 As a result, the relationship among the statistics appears to be
200 PCALL_ALL == PCALL_FUNCTION + PCALL_METHOD - PCALL_BOUND_METHOD +
201 PCALL_CFUNCTION + PCALL_TYPE + PCALL_GENERATOR + PCALL_OTHER
202 PCALL_FUNCTION > PCALL_FAST_FUNCTION > PCALL_FASTER_FUNCTION
203 PCALL_METHOD > PCALL_BOUND_METHOD
206 #define PCALL(POS) pcall[POS]++
209 PyEval_GetCallStats(PyObject
*self
)
211 return Py_BuildValue("iiiiiiiiiii",
212 pcall
[0], pcall
[1], pcall
[2], pcall
[3],
213 pcall
[4], pcall
[5], pcall
[6], pcall
[7],
214 pcall
[8], pcall
[9], pcall
[10]);
220 PyEval_GetCallStats(PyObject
*self
)
233 #include "pythread.h"
235 static PyThread_type_lock interpreter_lock
= 0; /* This is the GIL */
236 static PyThread_type_lock pending_lock
= 0; /* for pending calls */
237 static long main_thread
= 0;
240 PyEval_ThreadsInitialized(void)
242 return interpreter_lock
!= 0;
246 PyEval_InitThreads(void)
248 if (interpreter_lock
)
250 interpreter_lock
= PyThread_allocate_lock();
251 PyThread_acquire_lock(interpreter_lock
, 1);
252 main_thread
= PyThread_get_thread_ident();
256 PyEval_AcquireLock(void)
258 PyThread_acquire_lock(interpreter_lock
, 1);
262 PyEval_ReleaseLock(void)
264 PyThread_release_lock(interpreter_lock
);
268 PyEval_AcquireThread(PyThreadState
*tstate
)
271 Py_FatalError("PyEval_AcquireThread: NULL new thread state");
272 /* Check someone has called PyEval_InitThreads() to create the lock */
273 assert(interpreter_lock
);
274 PyThread_acquire_lock(interpreter_lock
, 1);
275 if (PyThreadState_Swap(tstate
) != NULL
)
277 "PyEval_AcquireThread: non-NULL old thread state");
281 PyEval_ReleaseThread(PyThreadState
*tstate
)
284 Py_FatalError("PyEval_ReleaseThread: NULL thread state");
285 if (PyThreadState_Swap(NULL
) != tstate
)
286 Py_FatalError("PyEval_ReleaseThread: wrong thread state");
287 PyThread_release_lock(interpreter_lock
);
290 /* This function is called from PyOS_AfterFork to ensure that newly
291 created child processes don't hold locks referring to threads which
292 are not running in the child process. (This could also be done using
293 pthread_atfork mechanism, at least for the pthreads implementation.) */
296 PyEval_ReInitThreads(void)
298 PyObject
*threading
, *result
;
299 PyThreadState
*tstate
;
301 if (!interpreter_lock
)
303 /*XXX Can't use PyThread_free_lock here because it does too
304 much error-checking. Doing this cleanly would require
305 adding a new function to each thread_*.h. Instead, just
306 create a new lock and waste a little bit of memory */
307 interpreter_lock
= PyThread_allocate_lock();
308 pending_lock
= PyThread_allocate_lock();
309 PyThread_acquire_lock(interpreter_lock
, 1);
310 main_thread
= PyThread_get_thread_ident();
312 /* Update the threading module with the new state.
314 tstate
= PyThreadState_GET();
315 threading
= PyMapping_GetItemString(tstate
->interp
->modules
,
317 if (threading
== NULL
) {
318 /* threading not imported */
322 result
= PyObject_CallMethod(threading
, "_after_fork", NULL
);
324 PyErr_WriteUnraisable(threading
);
327 Py_DECREF(threading
);
331 /* Functions save_thread and restore_thread are always defined so
332 dynamically loaded modules needn't be compiled separately for use
333 with and without threads: */
336 PyEval_SaveThread(void)
338 PyThreadState
*tstate
= PyThreadState_Swap(NULL
);
340 Py_FatalError("PyEval_SaveThread: NULL tstate");
342 if (interpreter_lock
)
343 PyThread_release_lock(interpreter_lock
);
349 PyEval_RestoreThread(PyThreadState
*tstate
)
352 Py_FatalError("PyEval_RestoreThread: NULL tstate");
354 if (interpreter_lock
) {
356 PyThread_acquire_lock(interpreter_lock
, 1);
360 PyThreadState_Swap(tstate
);
364 /* Mechanism whereby asynchronously executing callbacks (e.g. UNIX
365 signal handlers or Mac I/O completion routines) can schedule calls
366 to a function to be called synchronously.
367 The synchronous function is called with one void* argument.
368 It should return 0 for success or -1 for failure -- failure should
369 be accompanied by an exception.
371 If registry succeeds, the registry function returns 0; if it fails
372 (e.g. due to too many pending calls) it returns -1 (without setting
373 an exception condition).
375 Note that because registry may occur from within signal handlers,
376 or other asynchronous events, calling malloc() is unsafe!
379 Any thread can schedule pending calls, but only the main thread
381 There is no facility to schedule calls to a particular thread, but
382 that should be easy to change, should that ever be required. In
383 that case, the static variables here should go into the python
390 /* The WITH_THREAD implementation is thread-safe. It allows
391 scheduling to be made from any thread, and even from an executing
395 #define NPENDINGCALLS 32
399 } pendingcalls
[NPENDINGCALLS
];
400 static int pendingfirst
= 0;
401 static int pendinglast
= 0;
402 static volatile int pendingcalls_to_do
= 1; /* trigger initialization of lock */
403 static char pendingbusy
= 0;
406 Py_AddPendingCall(int (*func
)(void *), void *arg
)
409 PyThread_type_lock lock
= pending_lock
;
411 /* try a few times for the lock. Since this mechanism is used
412 * for signal handling (on the main thread), there is a (slim)
413 * chance that a signal is delivered on the same thread while we
414 * hold the lock during the Py_MakePendingCalls() function.
415 * This avoids a deadlock in that case.
416 * Note that signals can be delivered on any thread. In particular,
417 * on Windows, a SIGINT is delivered on a system-created worker
419 * We also check for lock being NULL, in the unlikely case that
420 * this function is called before any bytecode evaluation takes place.
423 for (i
= 0; i
<100; i
++) {
424 if (PyThread_acquire_lock(lock
, NOWAIT_LOCK
))
432 j
= (i
+ 1) % NPENDINGCALLS
;
433 if (j
== pendingfirst
) {
434 result
= -1; /* Queue full */
436 pendingcalls
[i
].func
= func
;
437 pendingcalls
[i
].arg
= arg
;
440 /* signal main loop */
442 pendingcalls_to_do
= 1;
444 PyThread_release_lock(lock
);
449 Py_MakePendingCalls(void)
455 /* initial allocation of the lock */
456 pending_lock
= PyThread_allocate_lock();
457 if (pending_lock
== NULL
)
461 /* only service pending calls on main thread */
462 if (main_thread
&& PyThread_get_thread_ident() != main_thread
)
464 /* don't perform recursive pending calls */
468 /* perform a bounded number of calls, in case of recursion */
469 for (i
=0; i
<NPENDINGCALLS
; i
++) {
474 /* pop one item off the queue while holding the lock */
475 PyThread_acquire_lock(pending_lock
, WAIT_LOCK
);
477 if (j
== pendinglast
) {
478 func
= NULL
; /* Queue empty */
480 func
= pendingcalls
[j
].func
;
481 arg
= pendingcalls
[j
].arg
;
482 pendingfirst
= (j
+ 1) % NPENDINGCALLS
;
484 pendingcalls_to_do
= pendingfirst
!= pendinglast
;
485 PyThread_release_lock(pending_lock
);
486 /* having released the lock, perform the callback */
497 #else /* if ! defined WITH_THREAD */
500 WARNING! ASYNCHRONOUSLY EXECUTING CODE!
501 This code is used for signal handling in python that isn't built
503 Don't use this implementation when Py_AddPendingCalls() can happen
504 on a different thread!
506 There are two possible race conditions:
507 (1) nested asynchronous calls to Py_AddPendingCall()
508 (2) AddPendingCall() calls made while pending calls are being processed.
510 (1) is very unlikely because typically signal delivery
511 is blocked during signal handling. So it should be impossible.
512 (2) is a real possibility.
513 The current code is safe against (2), but not against (1).
514 The safety against (2) is derived from the fact that only one
515 thread is present, interrupted by signals, and that the critical
516 section is protected with the "busy" variable. On Windows, which
517 delivers SIGINT on a system thread, this does not hold and therefore
518 Windows really shouldn't use this version.
519 The two threads could theoretically wiggle around the "busy" variable.
522 #define NPENDINGCALLS 32
526 } pendingcalls
[NPENDINGCALLS
];
527 static volatile int pendingfirst
= 0;
528 static volatile int pendinglast
= 0;
529 static volatile int pendingcalls_to_do
= 0;
532 Py_AddPendingCall(int (*func
)(void *), void *arg
)
534 static volatile int busy
= 0;
536 /* XXX Begin critical section */
541 j
= (i
+ 1) % NPENDINGCALLS
;
542 if (j
== pendingfirst
) {
544 return -1; /* Queue full */
546 pendingcalls
[i
].func
= func
;
547 pendingcalls
[i
].arg
= arg
;
551 pendingcalls_to_do
= 1; /* Signal main loop */
553 /* XXX End critical section */
558 Py_MakePendingCalls(void)
564 pendingcalls_to_do
= 0;
570 if (i
== pendinglast
)
571 break; /* Queue empty */
572 func
= pendingcalls
[i
].func
;
573 arg
= pendingcalls
[i
].arg
;
574 pendingfirst
= (i
+ 1) % NPENDINGCALLS
;
577 pendingcalls_to_do
= 1; /* We're not done yet */
585 #endif /* WITH_THREAD */
588 /* The interpreter's recursion limit */
590 #ifndef Py_DEFAULT_RECURSION_LIMIT
591 #define Py_DEFAULT_RECURSION_LIMIT 1000
593 static int recursion_limit
= Py_DEFAULT_RECURSION_LIMIT
;
594 int _Py_CheckRecursionLimit
= Py_DEFAULT_RECURSION_LIMIT
;
597 Py_GetRecursionLimit(void)
599 return recursion_limit
;
603 Py_SetRecursionLimit(int new_limit
)
605 recursion_limit
= new_limit
;
606 _Py_CheckRecursionLimit
= recursion_limit
;
609 /* the macro Py_EnterRecursiveCall() only calls _Py_CheckRecursiveCall()
610 if the recursion_depth reaches _Py_CheckRecursionLimit.
611 If USE_STACKCHECK, the macro decrements _Py_CheckRecursionLimit
612 to guarantee that _Py_CheckRecursiveCall() is regularly called.
613 Without USE_STACKCHECK, there is no need for this. */
615 _Py_CheckRecursiveCall(char *where
)
617 PyThreadState
*tstate
= PyThreadState_GET();
619 #ifdef USE_STACKCHECK
620 if (PyOS_CheckStack()) {
621 --tstate
->recursion_depth
;
622 PyErr_SetString(PyExc_MemoryError
, "Stack overflow");
626 if (tstate
->recursion_depth
> recursion_limit
) {
627 --tstate
->recursion_depth
;
628 PyErr_Format(PyExc_RuntimeError
,
629 "maximum recursion depth exceeded%s",
633 _Py_CheckRecursionLimit
= recursion_limit
;
637 /* Status code for main loop (reason for stack unwind) */
639 WHY_NOT
= 0x0001, /* No error */
640 WHY_EXCEPTION
= 0x0002, /* Exception occurred */
641 WHY_RERAISE
= 0x0004, /* Exception re-raised by 'finally' */
642 WHY_RETURN
= 0x0008, /* 'return' statement */
643 WHY_BREAK
= 0x0010, /* 'break' statement */
644 WHY_CONTINUE
= 0x0020, /* 'continue' statement */
645 WHY_YIELD
= 0x0040 /* 'yield' operator */
648 static enum why_code
do_raise(PyObject
*, PyObject
*, PyObject
*);
649 static int unpack_iterable(PyObject
*, int, PyObject
**);
651 /* Records whether tracing is on for any thread. Counts the number of
652 threads for which tstate->c_tracefunc is non-NULL, so if the value
653 is 0, we know we don't have to check this thread's c_tracefunc.
654 This speeds up the if statement in PyEval_EvalFrameEx() after
656 static int _Py_TracingPossible
= 0;
658 /* for manipulating the thread switch and periodic "stuff" - used to be
659 per thread, now just a pair o' globals */
660 int _Py_CheckInterval
= 100;
661 volatile int _Py_Ticker
= 0; /* so that we hit a "tick" first thing */
664 PyEval_EvalCode(PyCodeObject
*co
, PyObject
*globals
, PyObject
*locals
)
666 return PyEval_EvalCodeEx(co
,
668 (PyObject
**)NULL
, 0,
669 (PyObject
**)NULL
, 0,
670 (PyObject
**)NULL
, 0,
675 /* Interpreter main loop */
678 PyEval_EvalFrame(PyFrameObject
*f
) {
679 /* This is for backward compatibility with extension modules that
680 used this API; core interpreter code should call
681 PyEval_EvalFrameEx() */
682 return PyEval_EvalFrameEx(f
, 0);
686 PyEval_EvalFrameEx(PyFrameObject
*f
, int throwflag
)
691 register PyObject
**stack_pointer
; /* Next free slot in value stack */
692 register unsigned char *next_instr
;
693 register int opcode
; /* Current opcode */
694 register int oparg
; /* Current opcode argument, if any */
695 register enum why_code why
; /* Reason for block stack unwind */
696 register int err
; /* Error status -- nonzero if error */
697 register PyObject
*x
; /* Result object -- NULL if error */
698 register PyObject
*v
; /* Temporary objects popped off stack */
699 register PyObject
*w
;
700 register PyObject
*u
;
701 register PyObject
*t
;
702 register PyObject
*stream
= NULL
; /* for PRINT opcodes */
703 register PyObject
**fastlocals
, **freevars
;
704 PyObject
*retval
= NULL
; /* Return value */
705 PyThreadState
*tstate
= PyThreadState_GET();
708 /* when tracing we set things up so that
710 not (instr_lb <= current_bytecode_offset < instr_ub)
712 is true when the line being executed has changed. The
713 initial values are such as to make this false the first
714 time it is tested. */
715 int instr_ub
= -1, instr_lb
= 0, instr_prev
= -1;
717 unsigned char *first_instr
;
720 #if defined(Py_DEBUG) || defined(LLTRACE)
721 /* Make it easier to find out where we are with a debugger */
725 /* Tuple access macros */
728 #define GETITEM(v, i) PyTuple_GET_ITEM((PyTupleObject *)(v), (i))
730 #define GETITEM(v, i) PyTuple_GetItem((v), (i))
734 /* Use Pentium timestamp counter to mark certain events:
735 inst0 -- beginning of switch statement for opcode dispatch
736 inst1 -- end of switch statement (may be skipped)
737 loop0 -- the top of the mainloop
738 loop1 -- place where control returns again to top of mainloop
740 intr1 -- beginning of long interruption
741 intr2 -- end of long interruption
743 Many opcodes call out to helper C functions. In some cases, the
744 time in those functions should be counted towards the time for the
745 opcode, but not in all cases. For example, a CALL_FUNCTION opcode
746 calls another Python function; there's no point in charge all the
747 bytecode executed by the called function to the caller.
749 It's hard to make a useful judgement statically. In the presence
750 of operator overloading, it's impossible to tell if a call will
751 execute new Python code or not.
753 It's a case-by-case judgement. I'll use intr1 for the following
759 CALL_FUNCTION (and friends)
762 uint64 inst0
, inst1
, loop0
, loop1
, intr0
= 0, intr1
= 0;
765 READ_TIMESTAMP(inst0
);
766 READ_TIMESTAMP(inst1
);
767 READ_TIMESTAMP(loop0
);
768 READ_TIMESTAMP(loop1
);
770 /* shut up the compiler */
774 /* Code access macros */
776 #define INSTR_OFFSET() ((int)(next_instr - first_instr))
777 #define NEXTOP() (*next_instr++)
778 #define NEXTARG() (next_instr += 2, (next_instr[-1]<<8) + next_instr[-2])
779 #define PEEKARG() ((next_instr[2]<<8) + next_instr[1])
780 #define JUMPTO(x) (next_instr = first_instr + (x))
781 #define JUMPBY(x) (next_instr += (x))
783 /* OpCode prediction macros
784 Some opcodes tend to come in pairs thus making it possible to
785 predict the second code when the first is run. For example,
786 GET_ITER is often followed by FOR_ITER. And FOR_ITER is often
787 followed by STORE_FAST or UNPACK_SEQUENCE.
789 Verifying the prediction costs a single high-speed test of a register
790 variable against a constant. If the pairing was good, then the
791 processor's own internal branch predication has a high likelihood of
792 success, resulting in a nearly zero-overhead transition to the
793 next opcode. A successful prediction saves a trip through the eval-loop
794 including its two unpredictable branches, the HAS_ARG test and the
795 switch-case. Combined with the processor's internal branch prediction,
796 a successful PREDICT has the effect of making the two opcodes run as if
797 they were a single new opcode with the bodies combined.
799 If collecting opcode statistics, your choices are to either keep the
800 predictions turned-on and interpret the results as if some opcodes
801 had been combined or turn-off predictions so that the opcode frequency
802 counter updates for both opcodes.
805 #ifdef DYNAMIC_EXECUTION_PROFILE
806 #define PREDICT(op) if (0) goto PRED_##op
808 #define PREDICT(op) if (*next_instr == op) goto PRED_##op
811 #define PREDICTED(op) PRED_##op: next_instr++
812 #define PREDICTED_WITH_ARG(op) PRED_##op: oparg = PEEKARG(); next_instr += 3
814 /* Stack manipulation macros */
816 /* The stack can grow at most MAXINT deep, as co_nlocals and
817 co_stacksize are ints. */
818 #define STACK_LEVEL() ((int)(stack_pointer - f->f_valuestack))
819 #define EMPTY() (STACK_LEVEL() == 0)
820 #define TOP() (stack_pointer[-1])
821 #define SECOND() (stack_pointer[-2])
822 #define THIRD() (stack_pointer[-3])
823 #define FOURTH() (stack_pointer[-4])
824 #define PEEK(n) (stack_pointer[-(n)])
825 #define SET_TOP(v) (stack_pointer[-1] = (v))
826 #define SET_SECOND(v) (stack_pointer[-2] = (v))
827 #define SET_THIRD(v) (stack_pointer[-3] = (v))
828 #define SET_FOURTH(v) (stack_pointer[-4] = (v))
829 #define SET_VALUE(n, v) (stack_pointer[-(n)] = (v))
830 #define BASIC_STACKADJ(n) (stack_pointer += n)
831 #define BASIC_PUSH(v) (*stack_pointer++ = (v))
832 #define BASIC_POP() (*--stack_pointer)
835 #define PUSH(v) { (void)(BASIC_PUSH(v), \
836 lltrace && prtrace(TOP(), "push")); \
837 assert(STACK_LEVEL() <= co->co_stacksize); }
838 #define POP() ((void)(lltrace && prtrace(TOP(), "pop")), \
840 #define STACKADJ(n) { (void)(BASIC_STACKADJ(n), \
841 lltrace && prtrace(TOP(), "stackadj")); \
842 assert(STACK_LEVEL() <= co->co_stacksize); }
843 #define EXT_POP(STACK_POINTER) ((void)(lltrace && \
844 prtrace((STACK_POINTER)[-1], "ext_pop")), \
847 #define PUSH(v) BASIC_PUSH(v)
848 #define POP() BASIC_POP()
849 #define STACKADJ(n) BASIC_STACKADJ(n)
850 #define EXT_POP(STACK_POINTER) (*--(STACK_POINTER))
853 /* Local variable macros */
855 #define GETLOCAL(i) (fastlocals[i])
857 /* The SETLOCAL() macro must not DECREF the local variable in-place and
858 then store the new value; it must copy the old value to a temporary
859 value, then store the new value, and then DECREF the temporary value.
860 This is because it is possible that during the DECREF the frame is
861 accessed by other code (e.g. a __del__ method or gc.collect()) and the
862 variable would be pointing to already-freed memory. */
863 #define SETLOCAL(i, value) do { PyObject *tmp = GETLOCAL(i); \
864 GETLOCAL(i) = value; \
865 Py_XDECREF(tmp); } while (0)
873 if (Py_EnterRecursiveCall(""))
878 if (tstate
->use_tracing
) {
879 if (tstate
->c_tracefunc
!= NULL
) {
880 /* tstate->c_tracefunc, if defined, is a
881 function that will be called on *every* entry
882 to a code block. Its return value, if not
883 None, is a function that will be called at
884 the start of each executed line of code.
885 (Actually, the function must return itself
886 in order to continue tracing.) The trace
887 functions are called with three arguments:
888 a pointer to the current frame, a string
889 indicating why the function is called, and
890 an argument which depends on the situation.
891 The global trace function is also called
892 whenever an exception is detected. */
893 if (call_trace_protected(tstate
->c_tracefunc
,
895 f
, PyTrace_CALL
, Py_None
)) {
896 /* Trace function raised an error */
897 goto exit_eval_frame
;
900 if (tstate
->c_profilefunc
!= NULL
) {
901 /* Similar for c_profilefunc, except it needn't
902 return itself and isn't called for "line" events */
903 if (call_trace_protected(tstate
->c_profilefunc
,
904 tstate
->c_profileobj
,
905 f
, PyTrace_CALL
, Py_None
)) {
906 /* Profile function raised an error */
907 goto exit_eval_frame
;
913 names
= co
->co_names
;
914 consts
= co
->co_consts
;
915 fastlocals
= f
->f_localsplus
;
916 freevars
= f
->f_localsplus
+ co
->co_nlocals
;
917 first_instr
= (unsigned char*) PyString_AS_STRING(co
->co_code
);
918 /* An explanation is in order for the next line.
920 f->f_lasti now refers to the index of the last instruction
921 executed. You might think this was obvious from the name, but
922 this wasn't always true before 2.3! PyFrame_New now sets
923 f->f_lasti to -1 (i.e. the index *before* the first instruction)
924 and YIELD_VALUE doesn't fiddle with f_lasti any more. So this
927 When the PREDICT() macros are enabled, some opcode pairs follow in
928 direct succession without updating f->f_lasti. A successful
929 prediction effectively links the two codes together as if they
930 were a single new opcode; accordingly,f->f_lasti will point to
931 the first code in the pair (for instance, GET_ITER followed by
932 FOR_ITER is effectively a single opcode and f->f_lasti will point
933 at to the beginning of the combined pair.)
935 next_instr
= first_instr
+ f
->f_lasti
+ 1;
936 stack_pointer
= f
->f_stacktop
;
937 assert(stack_pointer
!= NULL
);
938 f
->f_stacktop
= NULL
; /* remains NULL unless yield suspends frame */
941 lltrace
= PyDict_GetItemString(f
->f_globals
, "__lltrace__") != NULL
;
943 #if defined(Py_DEBUG) || defined(LLTRACE)
944 filename
= PyString_AsString(co
->co_filename
);
949 x
= Py_None
; /* Not a reference, just anything non-NULL */
952 if (throwflag
) { /* support for generator.throw() */
960 /* Almost surely, the opcode executed a break
961 or a continue, preventing inst1 from being set
962 on the way out of the loop.
964 READ_TIMESTAMP(inst1
);
967 dump_tsc(opcode
, ticked
, inst0
, inst1
, loop0
, loop1
,
973 READ_TIMESTAMP(loop0
);
975 assert(stack_pointer
>= f
->f_valuestack
); /* else underflow */
976 assert(STACK_LEVEL() <= co
->co_stacksize
); /* else overflow */
978 /* Do periodic things. Doing this every time through
979 the loop would add too much overhead, so we do it
980 only every Nth instruction. We also do it if
981 ``pendingcalls_to_do'' is set, i.e. when an asynchronous
982 event needs attention (e.g. a signal handler or
983 async I/O handler); see Py_AddPendingCall() and
984 Py_MakePendingCalls() above. */
986 if (--_Py_Ticker
< 0) {
987 if (*next_instr
== SETUP_FINALLY
) {
988 /* Make the last opcode before
989 a try: finally: block uninterruptable. */
990 goto fast_next_opcode
;
992 _Py_Ticker
= _Py_CheckInterval
;
993 tstate
->tick_counter
++;
997 if (pendingcalls_to_do
) {
998 if (Py_MakePendingCalls() < 0) {
1002 if (pendingcalls_to_do
)
1003 /* MakePendingCalls() didn't succeed.
1004 Force early re-execution of this
1005 "periodic" code, possibly after
1010 if (interpreter_lock
) {
1011 /* Give another thread a chance */
1013 if (PyThreadState_Swap(NULL
) != tstate
)
1014 Py_FatalError("ceval: tstate mix-up");
1015 PyThread_release_lock(interpreter_lock
);
1017 /* Other threads may run now */
1019 PyThread_acquire_lock(interpreter_lock
, 1);
1020 if (PyThreadState_Swap(tstate
) != NULL
)
1021 Py_FatalError("ceval: orphan tstate");
1023 /* Check for thread interrupts */
1025 if (tstate
->async_exc
!= NULL
) {
1026 x
= tstate
->async_exc
;
1027 tstate
->async_exc
= NULL
;
1030 why
= WHY_EXCEPTION
;
1038 f
->f_lasti
= INSTR_OFFSET();
1040 /* line-by-line tracing support */
1042 if (_Py_TracingPossible
&&
1043 tstate
->c_tracefunc
!= NULL
&& !tstate
->tracing
) {
1044 /* see maybe_call_line_trace
1045 for expository comments */
1046 f
->f_stacktop
= stack_pointer
;
1048 err
= maybe_call_line_trace(tstate
->c_tracefunc
,
1050 f
, &instr_lb
, &instr_ub
,
1052 /* Reload possibly changed frame fields */
1054 if (f
->f_stacktop
!= NULL
) {
1055 stack_pointer
= f
->f_stacktop
;
1056 f
->f_stacktop
= NULL
;
1059 /* trace function raised an exception */
1064 /* Extract opcode and argument */
1067 oparg
= 0; /* allows oparg to be stored in a register because
1068 it doesn't have to be remembered across a full loop */
1069 if (HAS_ARG(opcode
))
1072 #ifdef DYNAMIC_EXECUTION_PROFILE
1074 dxpairs
[lastopcode
][opcode
]++;
1075 lastopcode
= opcode
;
1081 /* Instruction tracing */
1084 if (HAS_ARG(opcode
)) {
1085 printf("%d: %d, %d\n",
1086 f
->f_lasti
, opcode
, oparg
);
1090 f
->f_lasti
, opcode
);
1095 /* Main switch on opcode */
1096 READ_TIMESTAMP(inst0
);
1101 It is essential that any operation that fails sets either
1102 x to NULL, err to nonzero, or why to anything but WHY_NOT,
1103 and that no operation that succeeds does this! */
1105 /* case STOP_CODE: this is an error! */
1108 goto fast_next_opcode
;
1111 x
= GETLOCAL(oparg
);
1115 goto fast_next_opcode
;
1117 format_exc_check_arg(PyExc_UnboundLocalError
,
1118 UNBOUNDLOCAL_ERROR_MSG
,
1119 PyTuple_GetItem(co
->co_varnames
, oparg
));
1123 x
= GETITEM(consts
, oparg
);
1126 goto fast_next_opcode
;
1128 PREDICTED_WITH_ARG(STORE_FAST
);
1132 goto fast_next_opcode
;
1137 goto fast_next_opcode
;
1144 goto fast_next_opcode
;
1153 goto fast_next_opcode
;
1164 goto fast_next_opcode
;
1170 goto fast_next_opcode
;
1181 goto fast_next_opcode
;
1182 } else if (oparg
== 3) {
1193 goto fast_next_opcode
;
1195 Py_FatalError("invalid argument to DUP_TOPX"
1196 " (bytecode corruption?)");
1197 /* Never returns, so don't bother to set why. */
1200 case UNARY_POSITIVE
:
1202 x
= PyNumber_Positive(v
);
1205 if (x
!= NULL
) continue;
1208 case UNARY_NEGATIVE
:
1210 x
= PyNumber_Negative(v
);
1213 if (x
!= NULL
) continue;
1218 err
= PyObject_IsTrue(v
);
1226 Py_INCREF(Py_False
);
1236 x
= PyObject_Repr(v
);
1239 if (x
!= NULL
) continue;
1244 x
= PyNumber_Invert(v
);
1247 if (x
!= NULL
) continue;
1253 x
= PyNumber_Power(v
, w
, Py_None
);
1257 if (x
!= NULL
) continue;
1260 case BINARY_MULTIPLY
:
1263 x
= PyNumber_Multiply(v
, w
);
1267 if (x
!= NULL
) continue;
1271 if (!_Py_QnewFlag
) {
1274 x
= PyNumber_Divide(v
, w
);
1278 if (x
!= NULL
) continue;
1281 /* -Qnew is in effect: fall through to
1282 BINARY_TRUE_DIVIDE */
1283 case BINARY_TRUE_DIVIDE
:
1286 x
= PyNumber_TrueDivide(v
, w
);
1290 if (x
!= NULL
) continue;
1293 case BINARY_FLOOR_DIVIDE
:
1296 x
= PyNumber_FloorDivide(v
, w
);
1300 if (x
!= NULL
) continue;
1306 if (PyString_CheckExact(v
))
1307 x
= PyString_Format(v
, w
);
1309 x
= PyNumber_Remainder(v
, w
);
1313 if (x
!= NULL
) continue;
1319 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1320 /* INLINE: int + int */
1321 register long a
, b
, i
;
1322 a
= PyInt_AS_LONG(v
);
1323 b
= PyInt_AS_LONG(w
);
1324 /* cast to avoid undefined behaviour
1326 i
= (long)((unsigned long)a
+ b
);
1327 if ((i
^a
) < 0 && (i
^b
) < 0)
1329 x
= PyInt_FromLong(i
);
1331 else if (PyString_CheckExact(v
) &&
1332 PyString_CheckExact(w
)) {
1333 x
= string_concatenate(v
, w
, f
, next_instr
);
1334 /* string_concatenate consumed the ref to v */
1335 goto skip_decref_vx
;
1339 x
= PyNumber_Add(v
, w
);
1345 if (x
!= NULL
) continue;
1348 case BINARY_SUBTRACT
:
1351 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1352 /* INLINE: int - int */
1353 register long a
, b
, i
;
1354 a
= PyInt_AS_LONG(v
);
1355 b
= PyInt_AS_LONG(w
);
1356 /* cast to avoid undefined behaviour
1358 i
= (long)((unsigned long)a
- b
);
1359 if ((i
^a
) < 0 && (i
^~b
) < 0)
1361 x
= PyInt_FromLong(i
);
1365 x
= PyNumber_Subtract(v
, w
);
1370 if (x
!= NULL
) continue;
1376 if (PyList_CheckExact(v
) && PyInt_CheckExact(w
)) {
1377 /* INLINE: list[int] */
1378 Py_ssize_t i
= PyInt_AsSsize_t(w
);
1380 i
+= PyList_GET_SIZE(v
);
1381 if (i
>= 0 && i
< PyList_GET_SIZE(v
)) {
1382 x
= PyList_GET_ITEM(v
, i
);
1390 x
= PyObject_GetItem(v
, w
);
1394 if (x
!= NULL
) continue;
1400 x
= PyNumber_Lshift(v
, w
);
1404 if (x
!= NULL
) continue;
1410 x
= PyNumber_Rshift(v
, w
);
1414 if (x
!= NULL
) continue;
1420 x
= PyNumber_And(v
, w
);
1424 if (x
!= NULL
) continue;
1430 x
= PyNumber_Xor(v
, w
);
1434 if (x
!= NULL
) continue;
1440 x
= PyNumber_Or(v
, w
);
1444 if (x
!= NULL
) continue;
1450 err
= PyList_Append(v
, w
);
1453 PREDICT(JUMP_ABSOLUTE
);
1460 v
= stack_pointer
[-oparg
];
1461 err
= PySet_Add(v
, w
);
1464 PREDICT(JUMP_ABSOLUTE
);
1472 x
= PyNumber_InPlacePower(v
, w
, Py_None
);
1476 if (x
!= NULL
) continue;
1479 case INPLACE_MULTIPLY
:
1482 x
= PyNumber_InPlaceMultiply(v
, w
);
1486 if (x
!= NULL
) continue;
1489 case INPLACE_DIVIDE
:
1490 if (!_Py_QnewFlag
) {
1493 x
= PyNumber_InPlaceDivide(v
, w
);
1497 if (x
!= NULL
) continue;
1500 /* -Qnew is in effect: fall through to
1501 INPLACE_TRUE_DIVIDE */
1502 case INPLACE_TRUE_DIVIDE
:
1505 x
= PyNumber_InPlaceTrueDivide(v
, w
);
1509 if (x
!= NULL
) continue;
1512 case INPLACE_FLOOR_DIVIDE
:
1515 x
= PyNumber_InPlaceFloorDivide(v
, w
);
1519 if (x
!= NULL
) continue;
1522 case INPLACE_MODULO
:
1525 x
= PyNumber_InPlaceRemainder(v
, w
);
1529 if (x
!= NULL
) continue;
1535 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1536 /* INLINE: int + int */
1537 register long a
, b
, i
;
1538 a
= PyInt_AS_LONG(v
);
1539 b
= PyInt_AS_LONG(w
);
1541 if ((i
^a
) < 0 && (i
^b
) < 0)
1543 x
= PyInt_FromLong(i
);
1545 else if (PyString_CheckExact(v
) &&
1546 PyString_CheckExact(w
)) {
1547 x
= string_concatenate(v
, w
, f
, next_instr
);
1548 /* string_concatenate consumed the ref to v */
1553 x
= PyNumber_InPlaceAdd(v
, w
);
1559 if (x
!= NULL
) continue;
1562 case INPLACE_SUBTRACT
:
1565 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1566 /* INLINE: int - int */
1567 register long a
, b
, i
;
1568 a
= PyInt_AS_LONG(v
);
1569 b
= PyInt_AS_LONG(w
);
1571 if ((i
^a
) < 0 && (i
^~b
) < 0)
1573 x
= PyInt_FromLong(i
);
1577 x
= PyNumber_InPlaceSubtract(v
, w
);
1582 if (x
!= NULL
) continue;
1585 case INPLACE_LSHIFT
:
1588 x
= PyNumber_InPlaceLshift(v
, w
);
1592 if (x
!= NULL
) continue;
1595 case INPLACE_RSHIFT
:
1598 x
= PyNumber_InPlaceRshift(v
, w
);
1602 if (x
!= NULL
) continue;
1608 x
= PyNumber_InPlaceAnd(v
, w
);
1612 if (x
!= NULL
) continue;
1618 x
= PyNumber_InPlaceXor(v
, w
);
1622 if (x
!= NULL
) continue;
1628 x
= PyNumber_InPlaceOr(v
, w
);
1632 if (x
!= NULL
) continue;
1639 if ((opcode
-SLICE
) & 2)
1643 if ((opcode
-SLICE
) & 1)
1648 x
= apply_slice(u
, v
, w
);
1653 if (x
!= NULL
) continue;
1660 if ((opcode
-STORE_SLICE
) & 2)
1664 if ((opcode
-STORE_SLICE
) & 1)
1670 err
= assign_slice(u
, v
, w
, t
); /* u[v:w] = t */
1675 if (err
== 0) continue;
1678 case DELETE_SLICE
+0:
1679 case DELETE_SLICE
+1:
1680 case DELETE_SLICE
+2:
1681 case DELETE_SLICE
+3:
1682 if ((opcode
-DELETE_SLICE
) & 2)
1686 if ((opcode
-DELETE_SLICE
) & 1)
1691 err
= assign_slice(u
, v
, w
, (PyObject
*)NULL
);
1696 if (err
== 0) continue;
1705 err
= PyObject_SetItem(v
, w
, u
);
1709 if (err
== 0) continue;
1717 err
= PyObject_DelItem(v
, w
);
1720 if (err
== 0) continue;
1725 w
= PySys_GetObject("displayhook");
1727 PyErr_SetString(PyExc_RuntimeError
,
1728 "lost sys.displayhook");
1733 x
= PyTuple_Pack(1, v
);
1738 w
= PyEval_CallObject(w
, x
);
1749 /* fall through to PRINT_ITEM */
1753 if (stream
== NULL
|| stream
== Py_None
) {
1754 w
= PySys_GetObject("stdout");
1756 PyErr_SetString(PyExc_RuntimeError
,
1761 /* PyFile_SoftSpace() can exececute arbitrary code
1762 if sys.stdout is an instance with a __getattr__.
1763 If __getattr__ raises an exception, w will
1764 be freed, so we need to prevent that temporarily. */
1766 if (w
!= NULL
&& PyFile_SoftSpace(w
, 0))
1767 err
= PyFile_WriteString(" ", w
);
1769 err
= PyFile_WriteObject(v
, w
, Py_PRINT_RAW
);
1771 /* XXX move into writeobject() ? */
1772 if (PyString_Check(v
)) {
1773 char *s
= PyString_AS_STRING(v
);
1774 Py_ssize_t len
= PyString_GET_SIZE(v
);
1776 !isspace(Py_CHARMASK(s
[len
-1])) ||
1778 PyFile_SoftSpace(w
, 1);
1780 #ifdef Py_USING_UNICODE
1781 else if (PyUnicode_Check(v
)) {
1782 Py_UNICODE
*s
= PyUnicode_AS_UNICODE(v
);
1783 Py_ssize_t len
= PyUnicode_GET_SIZE(v
);
1785 !Py_UNICODE_ISSPACE(s
[len
-1]) ||
1787 PyFile_SoftSpace(w
, 1);
1791 PyFile_SoftSpace(w
, 1);
1801 case PRINT_NEWLINE_TO
:
1803 /* fall through to PRINT_NEWLINE */
1806 if (stream
== NULL
|| stream
== Py_None
) {
1807 w
= PySys_GetObject("stdout");
1809 PyErr_SetString(PyExc_RuntimeError
,
1811 why
= WHY_EXCEPTION
;
1815 /* w.write() may replace sys.stdout, so we
1816 * have to keep our reference to it */
1818 err
= PyFile_WriteString("\n", w
);
1820 PyFile_SoftSpace(w
, 0);
1829 default: switch (opcode
) {
1835 u
= POP(); /* traceback */
1838 v
= POP(); /* value */
1841 w
= POP(); /* exc */
1842 case 0: /* Fallthrough */
1843 why
= do_raise(w
, v
, u
);
1846 PyErr_SetString(PyExc_SystemError
,
1847 "bad RAISE_VARARGS oparg");
1848 why
= WHY_EXCEPTION
;
1854 if ((x
= f
->f_locals
) != NULL
) {
1859 PyErr_SetString(PyExc_SystemError
, "no locals");
1865 goto fast_block_end
;
1869 f
->f_stacktop
= stack_pointer
;
1878 READ_TIMESTAMP(intr0
);
1879 err
= exec_statement(f
, u
, v
, w
);
1880 READ_TIMESTAMP(intr1
);
1888 PyTryBlock
*b
= PyFrame_BlockPop(f
);
1889 while (STACK_LEVEL() > b
->b_level
) {
1896 PREDICTED(END_FINALLY
);
1899 if (PyInt_Check(v
)) {
1900 why
= (enum why_code
) PyInt_AS_LONG(v
);
1901 assert(why
!= WHY_YIELD
);
1902 if (why
== WHY_RETURN
||
1903 why
== WHY_CONTINUE
)
1906 else if (PyExceptionClass_Check(v
) ||
1907 PyString_Check(v
)) {
1910 PyErr_Restore(v
, w
, u
);
1914 else if (v
!= Py_None
) {
1915 PyErr_SetString(PyExc_SystemError
,
1916 "'finally' pops bad exception");
1917 why
= WHY_EXCEPTION
;
1927 x
= build_class(u
, v
, w
);
1935 w
= GETITEM(names
, oparg
);
1937 if ((x
= f
->f_locals
) != NULL
) {
1938 if (PyDict_CheckExact(x
))
1939 err
= PyDict_SetItem(x
, w
, v
);
1941 err
= PyObject_SetItem(x
, w
, v
);
1943 if (err
== 0) continue;
1946 PyErr_Format(PyExc_SystemError
,
1947 "no locals found when storing %s",
1952 w
= GETITEM(names
, oparg
);
1953 if ((x
= f
->f_locals
) != NULL
) {
1954 if ((err
= PyObject_DelItem(x
, w
)) != 0)
1955 format_exc_check_arg(PyExc_NameError
,
1960 PyErr_Format(PyExc_SystemError
,
1961 "no locals when deleting %s",
1965 PREDICTED_WITH_ARG(UNPACK_SEQUENCE
);
1966 case UNPACK_SEQUENCE
:
1968 if (PyTuple_CheckExact(v
) &&
1969 PyTuple_GET_SIZE(v
) == oparg
) {
1970 PyObject
**items
= \
1971 ((PyTupleObject
*)v
)->ob_item
;
1979 } else if (PyList_CheckExact(v
) &&
1980 PyList_GET_SIZE(v
) == oparg
) {
1981 PyObject
**items
= \
1982 ((PyListObject
*)v
)->ob_item
;
1988 } else if (unpack_iterable(v
, oparg
,
1989 stack_pointer
+ oparg
)) {
1992 /* unpack_iterable() raised an exception */
1993 why
= WHY_EXCEPTION
;
1999 w
= GETITEM(names
, oparg
);
2003 err
= PyObject_SetAttr(v
, w
, u
); /* v.w = u */
2006 if (err
== 0) continue;
2010 w
= GETITEM(names
, oparg
);
2012 err
= PyObject_SetAttr(v
, w
, (PyObject
*)NULL
);
2018 w
= GETITEM(names
, oparg
);
2020 err
= PyDict_SetItem(f
->f_globals
, w
, v
);
2022 if (err
== 0) continue;
2026 w
= GETITEM(names
, oparg
);
2027 if ((err
= PyDict_DelItem(f
->f_globals
, w
)) != 0)
2028 format_exc_check_arg(
2029 PyExc_NameError
, GLOBAL_NAME_ERROR_MSG
, w
);
2033 w
= GETITEM(names
, oparg
);
2034 if ((v
= f
->f_locals
) == NULL
) {
2035 PyErr_Format(PyExc_SystemError
,
2036 "no locals when loading %s",
2038 why
= WHY_EXCEPTION
;
2041 if (PyDict_CheckExact(v
)) {
2042 x
= PyDict_GetItem(v
, w
);
2046 x
= PyObject_GetItem(v
, w
);
2047 if (x
== NULL
&& PyErr_Occurred()) {
2048 if (!PyErr_ExceptionMatches(
2055 x
= PyDict_GetItem(f
->f_globals
, w
);
2057 x
= PyDict_GetItem(f
->f_builtins
, w
);
2059 format_exc_check_arg(
2071 w
= GETITEM(names
, oparg
);
2072 if (PyString_CheckExact(w
)) {
2073 /* Inline the PyDict_GetItem() calls.
2074 WARNING: this is an extreme speed hack.
2075 Do not try this at home. */
2076 long hash
= ((PyStringObject
*)w
)->ob_shash
;
2080 d
= (PyDictObject
*)(f
->f_globals
);
2081 e
= d
->ma_lookup(d
, w
, hash
);
2092 d
= (PyDictObject
*)(f
->f_builtins
);
2093 e
= d
->ma_lookup(d
, w
, hash
);
2104 goto load_global_error
;
2107 /* This is the un-inlined version of the code above */
2108 x
= PyDict_GetItem(f
->f_globals
, w
);
2110 x
= PyDict_GetItem(f
->f_builtins
, w
);
2113 format_exc_check_arg(
2115 GLOBAL_NAME_ERROR_MSG
, w
);
2124 x
= GETLOCAL(oparg
);
2126 SETLOCAL(oparg
, NULL
);
2129 format_exc_check_arg(
2130 PyExc_UnboundLocalError
,
2131 UNBOUNDLOCAL_ERROR_MSG
,
2132 PyTuple_GetItem(co
->co_varnames
, oparg
)
2137 x
= freevars
[oparg
];
2140 if (x
!= NULL
) continue;
2144 x
= freevars
[oparg
];
2151 /* Don't stomp existing exception */
2152 if (PyErr_Occurred())
2154 if (oparg
< PyTuple_GET_SIZE(co
->co_cellvars
)) {
2155 v
= PyTuple_GET_ITEM(co
->co_cellvars
,
2157 format_exc_check_arg(
2158 PyExc_UnboundLocalError
,
2159 UNBOUNDLOCAL_ERROR_MSG
,
2162 v
= PyTuple_GET_ITEM(co
->co_freevars
, oparg
-
2163 PyTuple_GET_SIZE(co
->co_cellvars
));
2164 format_exc_check_arg(PyExc_NameError
,
2165 UNBOUNDFREE_ERROR_MSG
, v
);
2171 x
= freevars
[oparg
];
2177 x
= PyTuple_New(oparg
);
2179 for (; --oparg
>= 0;) {
2181 PyTuple_SET_ITEM(x
, oparg
, w
);
2189 x
= PyList_New(oparg
);
2191 for (; --oparg
>= 0;) {
2193 PyList_SET_ITEM(x
, oparg
, w
);
2201 x
= PySet_New(NULL
);
2203 for (; --oparg
>= 0;) {
2206 err
= PySet_Add(x
, w
);
2220 x
= _PyDict_NewPresized((Py_ssize_t
)oparg
);
2222 if (x
!= NULL
) continue;
2226 w
= TOP(); /* key */
2227 u
= SECOND(); /* value */
2228 v
= THIRD(); /* dict */
2230 assert (PyDict_CheckExact(v
));
2231 err
= PyDict_SetItem(v
, w
, u
); /* v[w] = u */
2234 if (err
== 0) continue;
2238 w
= TOP(); /* key */
2239 u
= SECOND(); /* value */
2241 v
= stack_pointer
[-oparg
]; /* dict */
2242 assert (PyDict_CheckExact(v
));
2243 err
= PyDict_SetItem(v
, w
, u
); /* v[w] = u */
2247 PREDICT(JUMP_ABSOLUTE
);
2253 w
= GETITEM(names
, oparg
);
2255 x
= PyObject_GetAttr(v
, w
);
2258 if (x
!= NULL
) continue;
2264 if (PyInt_CheckExact(w
) && PyInt_CheckExact(v
)) {
2265 /* INLINE: cmp(int, int) */
2268 a
= PyInt_AS_LONG(v
);
2269 b
= PyInt_AS_LONG(w
);
2271 case PyCmp_LT
: res
= a
< b
; break;
2272 case PyCmp_LE
: res
= a
<= b
; break;
2273 case PyCmp_EQ
: res
= a
== b
; break;
2274 case PyCmp_NE
: res
= a
!= b
; break;
2275 case PyCmp_GT
: res
= a
> b
; break;
2276 case PyCmp_GE
: res
= a
>= b
; break;
2277 case PyCmp_IS
: res
= v
== w
; break;
2278 case PyCmp_IS_NOT
: res
= v
!= w
; break;
2279 default: goto slow_compare
;
2281 x
= res
? Py_True
: Py_False
;
2286 x
= cmp_outcome(oparg
, v
, w
);
2291 if (x
== NULL
) break;
2292 PREDICT(POP_JUMP_IF_FALSE
);
2293 PREDICT(POP_JUMP_IF_TRUE
);
2297 w
= GETITEM(names
, oparg
);
2298 x
= PyDict_GetItemString(f
->f_builtins
, "__import__");
2300 PyErr_SetString(PyExc_ImportError
,
2301 "__import__ not found");
2307 if (PyInt_AsLong(u
) != -1 || PyErr_Occurred())
2311 f
->f_locals
== NULL
?
2312 Py_None
: f
->f_locals
,
2319 f
->f_locals
== NULL
?
2320 Py_None
: f
->f_locals
,
2330 READ_TIMESTAMP(intr0
);
2332 x
= PyEval_CallObject(v
, w
);
2334 READ_TIMESTAMP(intr1
);
2337 if (x
!= NULL
) continue;
2342 PyFrame_FastToLocals(f
);
2343 if ((x
= f
->f_locals
) == NULL
) {
2344 PyErr_SetString(PyExc_SystemError
,
2345 "no locals found during 'import *'");
2348 READ_TIMESTAMP(intr0
);
2349 err
= import_all_from(x
, v
);
2350 READ_TIMESTAMP(intr1
);
2351 PyFrame_LocalsToFast(f
, 0);
2353 if (err
== 0) continue;
2357 w
= GETITEM(names
, oparg
);
2359 READ_TIMESTAMP(intr0
);
2360 x
= import_from(v
, w
);
2361 READ_TIMESTAMP(intr1
);
2363 if (x
!= NULL
) continue;
2368 goto fast_next_opcode
;
2370 PREDICTED_WITH_ARG(POP_JUMP_IF_FALSE
);
2371 case POP_JUMP_IF_FALSE
:
2375 goto fast_next_opcode
;
2377 if (w
== Py_False
) {
2380 goto fast_next_opcode
;
2382 err
= PyObject_IsTrue(w
);
2392 PREDICTED_WITH_ARG(POP_JUMP_IF_TRUE
);
2393 case POP_JUMP_IF_TRUE
:
2395 if (w
== Py_False
) {
2397 goto fast_next_opcode
;
2402 goto fast_next_opcode
;
2404 err
= PyObject_IsTrue(w
);
2416 case JUMP_IF_FALSE_OR_POP
:
2421 goto fast_next_opcode
;
2423 if (w
== Py_False
) {
2425 goto fast_next_opcode
;
2427 err
= PyObject_IsTrue(w
);
2439 case JUMP_IF_TRUE_OR_POP
:
2441 if (w
== Py_False
) {
2444 goto fast_next_opcode
;
2448 goto fast_next_opcode
;
2450 err
= PyObject_IsTrue(w
);
2455 else if (err
== 0) {
2463 PREDICTED_WITH_ARG(JUMP_ABSOLUTE
);
2467 /* Enabling this path speeds-up all while and for-loops by bypassing
2468 the per-loop checks for signals. By default, this should be turned-off
2469 because it prevents detection of a control-break in tight loops like
2470 "while 1: pass". Compile with this option turned-on when you need
2471 the speed-up and do not need break checking inside tight loops (ones
2472 that contain only instructions ending with goto fast_next_opcode).
2474 goto fast_next_opcode
;
2480 /* before: [obj]; after [getiter(obj)] */
2482 x
= PyObject_GetIter(v
);
2492 PREDICTED_WITH_ARG(FOR_ITER
);
2494 /* before: [iter]; after: [iter, iter()] *or* [] */
2496 x
= (*v
->ob_type
->tp_iternext
)(v
);
2499 PREDICT(STORE_FAST
);
2500 PREDICT(UNPACK_SEQUENCE
);
2503 if (PyErr_Occurred()) {
2504 if (!PyErr_ExceptionMatches(
2505 PyExc_StopIteration
))
2509 /* iterator ended normally */
2517 goto fast_block_end
;
2520 retval
= PyInt_FromLong(oparg
);
2526 goto fast_block_end
;
2531 /* NOTE: If you add any new block-setup opcodes that
2532 are not try/except/finally handlers, you may need
2533 to update the PyGen_NeedsFinalizing() function.
2536 PyFrame_BlockSetup(f
, opcode
, INSTR_OFFSET() + oparg
,
2542 static PyObject
*exit
, *enter
;
2544 x
= special_lookup(w
, "__exit__", &exit
);
2548 u
= special_lookup(w
, "__enter__", &enter
);
2554 x
= PyObject_CallFunctionObjArgs(u
, NULL
);
2558 /* Setup the finally block before pushing the result
2559 of __enter__ on the stack. */
2560 PyFrame_BlockSetup(f
, SETUP_FINALLY
, INSTR_OFFSET() + oparg
,
2569 /* At the top of the stack are 1-3 values indicating
2570 how/why we entered the finally clause:
2572 - (TOP, SECOND) = (WHY_{RETURN,CONTINUE}), retval
2573 - TOP = WHY_*; no retval below it
2574 - (TOP, SECOND, THIRD) = exc_info()
2575 Below them is EXIT, the context.__exit__ bound method.
2576 In the last case, we must call
2577 EXIT(TOP, SECOND, THIRD)
2578 otherwise we must call
2579 EXIT(None, None, None)
2581 In all cases, we remove EXIT from the stack, leaving
2582 the rest in the same order.
2584 In addition, if the stack represents an exception,
2585 *and* the function call returns a 'true' value, we
2586 "zap" this information, to prevent END_FINALLY from
2587 re-raising the exception. (But non-local gotos
2588 should still be resumed.)
2591 PyObject
*exit_func
;
2599 else if (PyInt_Check(u
)) {
2600 switch(PyInt_AS_LONG(u
)) {
2603 /* Retval in TOP. */
2604 exit_func
= SECOND();
2613 u
= v
= w
= Py_None
;
2618 exit_func
= THIRD();
2623 /* XXX Not the fastest way to call it... */
2624 x
= PyObject_CallFunctionObjArgs(exit_func
, u
, v
, w
,
2626 Py_DECREF(exit_func
);
2628 break; /* Go to error exit */
2631 err
= PyObject_IsTrue(x
);
2637 break; /* Go to error exit */
2640 /* There was an exception and a true return */
2648 /* The stack was rearranged to remove EXIT
2649 above. Let END_FINALLY do its thing */
2651 PREDICT(END_FINALLY
);
2661 x
= call_function(&sp
, oparg
, &intr0
, &intr1
);
2663 x
= call_function(&sp
, oparg
);
2672 case CALL_FUNCTION_VAR
:
2673 case CALL_FUNCTION_KW
:
2674 case CALL_FUNCTION_VAR_KW
:
2676 int na
= oparg
& 0xff;
2677 int nk
= (oparg
>>8) & 0xff;
2678 int flags
= (opcode
- CALL_FUNCTION
) & 3;
2679 int n
= na
+ 2 * nk
;
2680 PyObject
**pfunc
, *func
, **sp
;
2682 if (flags
& CALL_FLAG_VAR
)
2684 if (flags
& CALL_FLAG_KW
)
2686 pfunc
= stack_pointer
- n
- 1;
2689 if (PyMethod_Check(func
)
2690 && PyMethod_GET_SELF(func
) != NULL
) {
2691 PyObject
*self
= PyMethod_GET_SELF(func
);
2693 func
= PyMethod_GET_FUNCTION(func
);
2702 READ_TIMESTAMP(intr0
);
2703 x
= ext_do_call(func
, &sp
, flags
, na
, nk
);
2704 READ_TIMESTAMP(intr1
);
2708 while (stack_pointer
> pfunc
) {
2719 v
= POP(); /* code object */
2720 x
= PyFunction_New(v
, f
->f_globals
);
2722 /* XXX Maybe this should be a separate opcode? */
2723 if (x
!= NULL
&& oparg
> 0) {
2724 v
= PyTuple_New(oparg
);
2730 while (--oparg
>= 0) {
2732 PyTuple_SET_ITEM(v
, oparg
, w
);
2734 err
= PyFunction_SetDefaults(x
, v
);
2742 v
= POP(); /* code object */
2743 x
= PyFunction_New(v
, f
->f_globals
);
2747 if (PyFunction_SetClosure(x
, v
) != 0) {
2748 /* Can't happen unless bytecode is corrupt. */
2749 why
= WHY_EXCEPTION
;
2753 if (x
!= NULL
&& oparg
> 0) {
2754 v
= PyTuple_New(oparg
);
2760 while (--oparg
>= 0) {
2762 PyTuple_SET_ITEM(v
, oparg
, w
);
2764 if (PyFunction_SetDefaults(x
, v
) != 0) {
2765 /* Can't happen unless
2766 PyFunction_SetDefaults changes. */
2767 why
= WHY_EXCEPTION
;
2782 x
= PySlice_New(u
, v
, w
);
2787 if (x
!= NULL
) continue;
2792 oparg
= oparg
<<16 | NEXTARG();
2793 goto dispatch_opcode
;
2797 "XXX lineno: %d, opcode: %d\n",
2798 PyFrame_GetLineNumber(f
),
2800 PyErr_SetString(PyExc_SystemError
, "unknown opcode");
2801 why
= WHY_EXCEPTION
;
2812 READ_TIMESTAMP(inst1
);
2814 /* Quickly continue if no error occurred */
2816 if (why
== WHY_NOT
) {
2817 if (err
== 0 && x
!= NULL
) {
2819 /* This check is expensive! */
2820 if (PyErr_Occurred())
2822 "XXX undetected error\n");
2825 READ_TIMESTAMP(loop1
);
2826 continue; /* Normal, fast path */
2831 why
= WHY_EXCEPTION
;
2836 /* Double-check exception status */
2838 if (why
== WHY_EXCEPTION
|| why
== WHY_RERAISE
) {
2839 if (!PyErr_Occurred()) {
2840 PyErr_SetString(PyExc_SystemError
,
2841 "error return without exception set");
2842 why
= WHY_EXCEPTION
;
2847 /* This check is expensive! */
2848 if (PyErr_Occurred()) {
2850 sprintf(buf
, "Stack unwind with exception "
2851 "set and why=%d", why
);
2857 /* Log traceback info if this is a real exception */
2859 if (why
== WHY_EXCEPTION
) {
2860 PyTraceBack_Here(f
);
2862 if (tstate
->c_tracefunc
!= NULL
)
2863 call_exc_trace(tstate
->c_tracefunc
,
2864 tstate
->c_traceobj
, f
);
2867 /* For the rest, treat WHY_RERAISE as WHY_EXCEPTION */
2869 if (why
== WHY_RERAISE
)
2870 why
= WHY_EXCEPTION
;
2872 /* Unwind stacks if a (pseudo) exception occurred */
2875 while (why
!= WHY_NOT
&& f
->f_iblock
> 0) {
2876 /* Peek at the current block. */
2877 PyTryBlock
*b
= &f
->f_blockstack
[f
->f_iblock
- 1];
2879 assert(why
!= WHY_YIELD
);
2880 if (b
->b_type
== SETUP_LOOP
&& why
== WHY_CONTINUE
) {
2882 JUMPTO(PyInt_AS_LONG(retval
));
2887 /* Now we have to pop the block. */
2890 while (STACK_LEVEL() > b
->b_level
) {
2894 if (b
->b_type
== SETUP_LOOP
&& why
== WHY_BREAK
) {
2896 JUMPTO(b
->b_handler
);
2899 if (b
->b_type
== SETUP_FINALLY
||
2900 (b
->b_type
== SETUP_EXCEPT
&&
2901 why
== WHY_EXCEPTION
)) {
2902 if (why
== WHY_EXCEPTION
) {
2903 PyObject
*exc
, *val
, *tb
;
2904 PyErr_Fetch(&exc
, &val
, &tb
);
2909 /* Make the raw exception data
2910 available to the handler,
2911 so a program can emulate the
2912 Python main loop. Don't do
2913 this for 'finally'. */
2914 if (b
->b_type
== SETUP_EXCEPT
) {
2915 PyErr_NormalizeException(
2917 set_exc_info(tstate
,
2929 if (why
& (WHY_RETURN
| WHY_CONTINUE
))
2931 v
= PyInt_FromLong((long)why
);
2935 JUMPTO(b
->b_handler
);
2938 } /* unwind stack */
2940 /* End the loop if we still have an error (or return) */
2944 READ_TIMESTAMP(loop1
);
2948 assert(why
!= WHY_YIELD
);
2949 /* Pop remaining stack entries. */
2955 if (why
!= WHY_RETURN
)
2959 if (tstate
->use_tracing
) {
2960 if (tstate
->c_tracefunc
) {
2961 if (why
== WHY_RETURN
|| why
== WHY_YIELD
) {
2962 if (call_trace(tstate
->c_tracefunc
,
2963 tstate
->c_traceobj
, f
,
2964 PyTrace_RETURN
, retval
)) {
2967 why
= WHY_EXCEPTION
;
2970 else if (why
== WHY_EXCEPTION
) {
2971 call_trace_protected(tstate
->c_tracefunc
,
2972 tstate
->c_traceobj
, f
,
2973 PyTrace_RETURN
, NULL
);
2976 if (tstate
->c_profilefunc
) {
2977 if (why
== WHY_EXCEPTION
)
2978 call_trace_protected(tstate
->c_profilefunc
,
2979 tstate
->c_profileobj
, f
,
2980 PyTrace_RETURN
, NULL
);
2981 else if (call_trace(tstate
->c_profilefunc
,
2982 tstate
->c_profileobj
, f
,
2983 PyTrace_RETURN
, retval
)) {
2986 why
= WHY_EXCEPTION
;
2991 if (tstate
->frame
->f_exc_type
!= NULL
)
2992 reset_exc_info(tstate
);
2994 assert(tstate
->frame
->f_exc_value
== NULL
);
2995 assert(tstate
->frame
->f_exc_traceback
== NULL
);
3000 Py_LeaveRecursiveCall();
3001 tstate
->frame
= f
->f_back
;
3006 /* This is gonna seem *real weird*, but if you put some other code between
3007 PyEval_EvalFrame() and PyEval_EvalCodeEx() you will need to adjust
3008 the test in the if statements in Misc/gdbinit (pystack and pystackv). */
3011 PyEval_EvalCodeEx(PyCodeObject
*co
, PyObject
*globals
, PyObject
*locals
,
3012 PyObject
**args
, int argcount
, PyObject
**kws
, int kwcount
,
3013 PyObject
**defs
, int defcount
, PyObject
*closure
)
3015 register PyFrameObject
*f
;
3016 register PyObject
*retval
= NULL
;
3017 register PyObject
**fastlocals
, **freevars
;
3018 PyThreadState
*tstate
= PyThreadState_GET();
3021 if (globals
== NULL
) {
3022 PyErr_SetString(PyExc_SystemError
,
3023 "PyEval_EvalCodeEx: NULL globals");
3027 assert(tstate
!= NULL
);
3028 assert(globals
!= NULL
);
3029 f
= PyFrame_New(tstate
, co
, globals
, locals
);
3033 fastlocals
= f
->f_localsplus
;
3034 freevars
= f
->f_localsplus
+ co
->co_nlocals
;
3036 if (co
->co_argcount
> 0 ||
3037 co
->co_flags
& (CO_VARARGS
| CO_VARKEYWORDS
)) {
3040 PyObject
*kwdict
= NULL
;
3041 if (co
->co_flags
& CO_VARKEYWORDS
) {
3042 kwdict
= PyDict_New();
3045 i
= co
->co_argcount
;
3046 if (co
->co_flags
& CO_VARARGS
)
3048 SETLOCAL(i
, kwdict
);
3050 if (argcount
> co
->co_argcount
) {
3051 if (!(co
->co_flags
& CO_VARARGS
)) {
3052 PyErr_Format(PyExc_TypeError
,
3053 "%.200s() takes %s %d "
3054 "%sargument%s (%d given)",
3055 PyString_AsString(co
->co_name
),
3056 defcount
? "at most" : "exactly",
3058 kwcount
? "non-keyword " : "",
3059 co
->co_argcount
== 1 ? "" : "s",
3063 n
= co
->co_argcount
;
3065 for (i
= 0; i
< n
; i
++) {
3070 if (co
->co_flags
& CO_VARARGS
) {
3071 u
= PyTuple_New(argcount
- n
);
3074 SETLOCAL(co
->co_argcount
, u
);
3075 for (i
= n
; i
< argcount
; i
++) {
3078 PyTuple_SET_ITEM(u
, i
-n
, x
);
3081 for (i
= 0; i
< kwcount
; i
++) {
3082 PyObject
**co_varnames
;
3083 PyObject
*keyword
= kws
[2*i
];
3084 PyObject
*value
= kws
[2*i
+ 1];
3086 if (keyword
== NULL
|| !(PyString_Check(keyword
)
3087 #ifdef Py_USING_UNICODE
3088 || PyUnicode_Check(keyword
)
3091 PyErr_Format(PyExc_TypeError
,
3092 "%.200s() keywords must be strings",
3093 PyString_AsString(co
->co_name
));
3096 /* Speed hack: do raw pointer compares. As names are
3097 normally interned this should almost always hit. */
3098 co_varnames
= PySequence_Fast_ITEMS(co
->co_varnames
);
3099 for (j
= 0; j
< co
->co_argcount
; j
++) {
3100 PyObject
*nm
= co_varnames
[j
];
3104 /* Slow fallback, just in case */
3105 for (j
= 0; j
< co
->co_argcount
; j
++) {
3106 PyObject
*nm
= co_varnames
[j
];
3107 int cmp
= PyObject_RichCompareBool(
3108 keyword
, nm
, Py_EQ
);
3114 /* Check errors from Compare */
3115 if (PyErr_Occurred())
3117 if (j
>= co
->co_argcount
) {
3118 if (kwdict
== NULL
) {
3119 PyObject
*kwd_str
= kwd_as_string(keyword
);
3121 PyErr_Format(PyExc_TypeError
,
3122 "%.200s() got an unexpected "
3123 "keyword argument '%.400s'",
3124 PyString_AsString(co
->co_name
),
3125 PyString_AsString(kwd_str
));
3130 PyDict_SetItem(kwdict
, keyword
, value
);
3134 if (GETLOCAL(j
) != NULL
) {
3135 PyObject
*kwd_str
= kwd_as_string(keyword
);
3137 PyErr_Format(PyExc_TypeError
,
3138 "%.200s() got multiple "
3139 "values for keyword "
3140 "argument '%.400s'",
3141 PyString_AsString(co
->co_name
),
3142 PyString_AsString(kwd_str
));
3150 if (argcount
< co
->co_argcount
) {
3151 int m
= co
->co_argcount
- defcount
;
3152 for (i
= argcount
; i
< m
; i
++) {
3153 if (GETLOCAL(i
) == NULL
) {
3154 PyErr_Format(PyExc_TypeError
,
3155 "%.200s() takes %s %d "
3156 "%sargument%s (%d given)",
3157 PyString_AsString(co
->co_name
),
3158 ((co
->co_flags
& CO_VARARGS
) ||
3159 defcount
) ? "at least"
3161 m
, kwcount
? "non-keyword " : "",
3162 m
== 1 ? "" : "s", i
);
3170 for (; i
< defcount
; i
++) {
3171 if (GETLOCAL(m
+i
) == NULL
) {
3172 PyObject
*def
= defs
[i
];
3180 if (argcount
> 0 || kwcount
> 0) {
3181 PyErr_Format(PyExc_TypeError
,
3182 "%.200s() takes no arguments (%d given)",
3183 PyString_AsString(co
->co_name
),
3184 argcount
+ kwcount
);
3188 /* Allocate and initialize storage for cell vars, and copy free
3189 vars into frame. This isn't too efficient right now. */
3190 if (PyTuple_GET_SIZE(co
->co_cellvars
)) {
3191 int i
, j
, nargs
, found
;
3192 char *cellname
, *argname
;
3195 nargs
= co
->co_argcount
;
3196 if (co
->co_flags
& CO_VARARGS
)
3198 if (co
->co_flags
& CO_VARKEYWORDS
)
3201 /* Initialize each cell var, taking into account
3202 cell vars that are initialized from arguments.
3204 Should arrange for the compiler to put cellvars
3205 that are arguments at the beginning of the cellvars
3206 list so that we can march over it more efficiently?
3208 for (i
= 0; i
< PyTuple_GET_SIZE(co
->co_cellvars
); ++i
) {
3209 cellname
= PyString_AS_STRING(
3210 PyTuple_GET_ITEM(co
->co_cellvars
, i
));
3212 for (j
= 0; j
< nargs
; j
++) {
3213 argname
= PyString_AS_STRING(
3214 PyTuple_GET_ITEM(co
->co_varnames
, j
));
3215 if (strcmp(cellname
, argname
) == 0) {
3216 c
= PyCell_New(GETLOCAL(j
));
3219 GETLOCAL(co
->co_nlocals
+ i
) = c
;
3225 c
= PyCell_New(NULL
);
3228 SETLOCAL(co
->co_nlocals
+ i
, c
);
3232 if (PyTuple_GET_SIZE(co
->co_freevars
)) {
3234 for (i
= 0; i
< PyTuple_GET_SIZE(co
->co_freevars
); ++i
) {
3235 PyObject
*o
= PyTuple_GET_ITEM(closure
, i
);
3237 freevars
[PyTuple_GET_SIZE(co
->co_cellvars
) + i
] = o
;
3241 if (co
->co_flags
& CO_GENERATOR
) {
3242 /* Don't need to keep the reference to f_back, it will be set
3243 * when the generator is resumed. */
3244 Py_XDECREF(f
->f_back
);
3247 PCALL(PCALL_GENERATOR
);
3249 /* Create a new generator that owns the ready to run frame
3250 * and return that as the value. */
3251 return PyGen_New(f
);
3254 retval
= PyEval_EvalFrameEx(f
,0);
3256 fail
: /* Jump here from prelude on failure */
3258 /* decref'ing the frame can cause __del__ methods to get invoked,
3259 which can call back into Python. While we're done with the
3260 current Python frame (f), the associated C stack is still in use,
3261 so recursion_depth must be boosted for the duration.
3263 assert(tstate
!= NULL
);
3264 ++tstate
->recursion_depth
;
3266 --tstate
->recursion_depth
;
3272 special_lookup(PyObject
*o
, char *meth
, PyObject
**cache
)
3275 if (PyInstance_Check(o
)) {
3277 return PyObject_GetAttrString(o
, meth
);
3279 return PyObject_GetAttr(o
, *cache
);
3281 res
= _PyObject_LookupSpecial(o
, meth
, cache
);
3282 if (res
== NULL
&& !PyErr_Occurred()) {
3283 PyErr_SetObject(PyExc_AttributeError
, *cache
);
3291 kwd_as_string(PyObject
*kwd
) {
3292 #ifdef Py_USING_UNICODE
3293 if (PyString_Check(kwd
)) {
3295 assert(PyString_Check(kwd
));
3299 #ifdef Py_USING_UNICODE
3301 return _PyUnicode_AsDefaultEncodedString(kwd
, "replace");
3306 /* Implementation notes for set_exc_info() and reset_exc_info():
3308 - Below, 'exc_ZZZ' stands for 'exc_type', 'exc_value' and
3309 'exc_traceback'. These always travel together.
3311 - tstate->curexc_ZZZ is the "hot" exception that is set by
3312 PyErr_SetString(), cleared by PyErr_Clear(), and so on.
3314 - Once an exception is caught by an except clause, it is transferred
3315 from tstate->curexc_ZZZ to tstate->exc_ZZZ, from which sys.exc_info()
3316 can pick it up. This is the primary task of set_exc_info().
3317 XXX That can't be right: set_exc_info() doesn't look at tstate->curexc_ZZZ.
3319 - Now let me explain the complicated dance with frame->f_exc_ZZZ.
3321 Long ago, when none of this existed, there were just a few globals:
3322 one set corresponding to the "hot" exception, and one set
3323 corresponding to sys.exc_ZZZ. (Actually, the latter weren't C
3324 globals; they were simply stored as sys.exc_ZZZ. For backwards
3325 compatibility, they still are!) The problem was that in code like
3329 "something that may fail"
3330 except "some exception":
3331 "do something else first"
3332 "print the exception from sys.exc_ZZZ."
3334 if "do something else first" invoked something that raised and caught
3335 an exception, sys.exc_ZZZ were overwritten. That was a frequent
3336 cause of subtle bugs. I fixed this by changing the semantics as
3339 - Within one frame, sys.exc_ZZZ will hold the last exception caught
3342 - But initially, and as long as no exception is caught in a given
3343 frame, sys.exc_ZZZ will hold the last exception caught in the
3344 previous frame (or the frame before that, etc.).
3346 The first bullet fixed the bug in the above example. The second
3347 bullet was for backwards compatibility: it was (and is) common to
3348 have a function that is called when an exception is caught, and to
3349 have that function access the caught exception via sys.exc_ZZZ.
3350 (Example: traceback.print_exc()).
3352 At the same time I fixed the problem that sys.exc_ZZZ weren't
3353 thread-safe, by introducing sys.exc_info() which gets it from tstate;
3354 but that's really a separate improvement.
3356 The reset_exc_info() function in ceval.c restores the tstate->exc_ZZZ
3357 variables to what they were before the current frame was called. The
3358 set_exc_info() function saves them on the frame so that
3359 reset_exc_info() can restore them. The invariant is that
3360 frame->f_exc_ZZZ is NULL iff the current frame never caught an
3361 exception (where "catching" an exception applies only to successful
3362 except clauses); and if the current frame ever caught an exception,
3363 frame->f_exc_ZZZ is the exception that was stored in tstate->exc_ZZZ
3364 at the start of the current frame.
3369 set_exc_info(PyThreadState
*tstate
,
3370 PyObject
*type
, PyObject
*value
, PyObject
*tb
)
3372 PyFrameObject
*frame
= tstate
->frame
;
3373 PyObject
*tmp_type
, *tmp_value
, *tmp_tb
;
3375 assert(type
!= NULL
);
3376 assert(frame
!= NULL
);
3377 if (frame
->f_exc_type
== NULL
) {
3378 assert(frame
->f_exc_value
== NULL
);
3379 assert(frame
->f_exc_traceback
== NULL
);
3380 /* This frame didn't catch an exception before. */
3381 /* Save previous exception of this thread in this frame. */
3382 if (tstate
->exc_type
== NULL
) {
3383 /* XXX Why is this set to Py_None? */
3385 tstate
->exc_type
= Py_None
;
3387 Py_INCREF(tstate
->exc_type
);
3388 Py_XINCREF(tstate
->exc_value
);
3389 Py_XINCREF(tstate
->exc_traceback
);
3390 frame
->f_exc_type
= tstate
->exc_type
;
3391 frame
->f_exc_value
= tstate
->exc_value
;
3392 frame
->f_exc_traceback
= tstate
->exc_traceback
;
3394 /* Set new exception for this thread. */
3395 tmp_type
= tstate
->exc_type
;
3396 tmp_value
= tstate
->exc_value
;
3397 tmp_tb
= tstate
->exc_traceback
;
3401 tstate
->exc_type
= type
;
3402 tstate
->exc_value
= value
;
3403 tstate
->exc_traceback
= tb
;
3404 Py_XDECREF(tmp_type
);
3405 Py_XDECREF(tmp_value
);
3407 /* For b/w compatibility */
3408 PySys_SetObject("exc_type", type
);
3409 PySys_SetObject("exc_value", value
);
3410 PySys_SetObject("exc_traceback", tb
);
3414 reset_exc_info(PyThreadState
*tstate
)
3416 PyFrameObject
*frame
;
3417 PyObject
*tmp_type
, *tmp_value
, *tmp_tb
;
3419 /* It's a precondition that the thread state's frame caught an
3420 * exception -- verify in a debug build.
3422 assert(tstate
!= NULL
);
3423 frame
= tstate
->frame
;
3424 assert(frame
!= NULL
);
3425 assert(frame
->f_exc_type
!= NULL
);
3427 /* Copy the frame's exception info back to the thread state. */
3428 tmp_type
= tstate
->exc_type
;
3429 tmp_value
= tstate
->exc_value
;
3430 tmp_tb
= tstate
->exc_traceback
;
3431 Py_INCREF(frame
->f_exc_type
);
3432 Py_XINCREF(frame
->f_exc_value
);
3433 Py_XINCREF(frame
->f_exc_traceback
);
3434 tstate
->exc_type
= frame
->f_exc_type
;
3435 tstate
->exc_value
= frame
->f_exc_value
;
3436 tstate
->exc_traceback
= frame
->f_exc_traceback
;
3437 Py_XDECREF(tmp_type
);
3438 Py_XDECREF(tmp_value
);
3441 /* For b/w compatibility */
3442 PySys_SetObject("exc_type", frame
->f_exc_type
);
3443 PySys_SetObject("exc_value", frame
->f_exc_value
);
3444 PySys_SetObject("exc_traceback", frame
->f_exc_traceback
);
3446 /* Clear the frame's exception info. */
3447 tmp_type
= frame
->f_exc_type
;
3448 tmp_value
= frame
->f_exc_value
;
3449 tmp_tb
= frame
->f_exc_traceback
;
3450 frame
->f_exc_type
= NULL
;
3451 frame
->f_exc_value
= NULL
;
3452 frame
->f_exc_traceback
= NULL
;
3453 Py_DECREF(tmp_type
);
3454 Py_XDECREF(tmp_value
);
3458 /* Logic for the raise statement (too complicated for inlining).
3459 This *consumes* a reference count to each of its arguments. */
3460 static enum why_code
3461 do_raise(PyObject
*type
, PyObject
*value
, PyObject
*tb
)
3465 PyThreadState
*tstate
= PyThreadState_GET();
3466 type
= tstate
->exc_type
== NULL
? Py_None
: tstate
->exc_type
;
3467 value
= tstate
->exc_value
;
3468 tb
= tstate
->exc_traceback
;
3474 /* We support the following forms of raise:
3475 raise <class>, <classinstance>
3476 raise <class>, <argument tuple>
3478 raise <class>, <argument>
3479 raise <classinstance>, None
3480 raise <string>, <object>
3481 raise <string>, None
3483 An omitted second argument is the same as None.
3485 In addition, raise <tuple>, <anything> is the same as
3486 raising the tuple's first item (and it better have one!);
3487 this rule is applied recursively.
3489 Finally, an optional third argument can be supplied, which
3490 gives the traceback to be substituted (useful when
3491 re-raising an exception after examining it). */
3493 /* First, check the traceback argument, replacing None with
3495 if (tb
== Py_None
) {
3499 else if (tb
!= NULL
&& !PyTraceBack_Check(tb
)) {
3500 PyErr_SetString(PyExc_TypeError
,
3501 "raise: arg 3 must be a traceback or None");
3505 /* Next, replace a missing value with None */
3506 if (value
== NULL
) {
3511 /* Next, repeatedly, replace a tuple exception with its first item */
3512 while (PyTuple_Check(type
) && PyTuple_Size(type
) > 0) {
3513 PyObject
*tmp
= type
;
3514 type
= PyTuple_GET_ITEM(type
, 0);
3519 if (PyExceptionClass_Check(type
))
3520 PyErr_NormalizeException(&type
, &value
, &tb
);
3522 else if (PyExceptionInstance_Check(type
)) {
3523 /* Raising an instance. The value should be a dummy. */
3524 if (value
!= Py_None
) {
3525 PyErr_SetString(PyExc_TypeError
,
3526 "instance exception may not have a separate value");
3530 /* Normalize to raise <class>, <instance> */
3533 type
= PyExceptionInstance_Class(type
);
3538 /* Not something you can raise. You get an exception
3539 anyway, just not what you specified :-) */
3540 PyErr_Format(PyExc_TypeError
,
3541 "exceptions must be classes or instances, not %s",
3542 type
->ob_type
->tp_name
);
3546 assert(PyExceptionClass_Check(type
));
3547 if (Py_Py3kWarningFlag
&& PyClass_Check(type
)) {
3548 if (PyErr_WarnEx(PyExc_DeprecationWarning
,
3549 "exceptions must derive from BaseException "
3554 PyErr_Restore(type
, value
, tb
);
3556 return WHY_EXCEPTION
;
3563 return WHY_EXCEPTION
;
3566 /* Iterate v argcnt times and store the results on the stack (via decreasing
3567 sp). Return 1 for success, 0 if error. */
3570 unpack_iterable(PyObject
*v
, int argcnt
, PyObject
**sp
)
3573 PyObject
*it
; /* iter(v) */
3578 it
= PyObject_GetIter(v
);
3582 for (; i
< argcnt
; i
++) {
3583 w
= PyIter_Next(it
);
3585 /* Iterator done, via error or exhaustion. */
3586 if (!PyErr_Occurred()) {
3587 PyErr_Format(PyExc_ValueError
,
3588 "need more than %d value%s to unpack",
3589 i
, i
== 1 ? "" : "s");
3596 /* We better have exhausted the iterator now. */
3597 w
= PyIter_Next(it
);
3599 if (PyErr_Occurred())
3605 PyErr_SetString(PyExc_ValueError
, "too many values to unpack");
3608 for (; i
> 0; i
--, sp
++)
3617 prtrace(PyObject
*v
, char *str
)
3620 if (PyObject_Print(v
, stdout
, 0) != 0)
3621 PyErr_Clear(); /* Don't know what else to do */
3628 call_exc_trace(Py_tracefunc func
, PyObject
*self
, PyFrameObject
*f
)
3630 PyObject
*type
, *value
, *traceback
, *arg
;
3632 PyErr_Fetch(&type
, &value
, &traceback
);
3633 if (value
== NULL
) {
3637 arg
= PyTuple_Pack(3, type
, value
, traceback
);
3639 PyErr_Restore(type
, value
, traceback
);
3642 err
= call_trace(func
, self
, f
, PyTrace_EXCEPTION
, arg
);
3645 PyErr_Restore(type
, value
, traceback
);
3649 Py_XDECREF(traceback
);
3654 call_trace_protected(Py_tracefunc func
, PyObject
*obj
, PyFrameObject
*frame
,
3655 int what
, PyObject
*arg
)
3657 PyObject
*type
, *value
, *traceback
;
3659 PyErr_Fetch(&type
, &value
, &traceback
);
3660 err
= call_trace(func
, obj
, frame
, what
, arg
);
3663 PyErr_Restore(type
, value
, traceback
);
3669 Py_XDECREF(traceback
);
3675 call_trace(Py_tracefunc func
, PyObject
*obj
, PyFrameObject
*frame
,
3676 int what
, PyObject
*arg
)
3678 register PyThreadState
*tstate
= frame
->f_tstate
;
3680 if (tstate
->tracing
)
3683 tstate
->use_tracing
= 0;
3684 result
= func(obj
, frame
, what
, arg
);
3685 tstate
->use_tracing
= ((tstate
->c_tracefunc
!= NULL
)
3686 || (tstate
->c_profilefunc
!= NULL
));
3692 _PyEval_CallTracing(PyObject
*func
, PyObject
*args
)
3694 PyFrameObject
*frame
= PyEval_GetFrame();
3695 PyThreadState
*tstate
= frame
->f_tstate
;
3696 int save_tracing
= tstate
->tracing
;
3697 int save_use_tracing
= tstate
->use_tracing
;
3700 tstate
->tracing
= 0;
3701 tstate
->use_tracing
= ((tstate
->c_tracefunc
!= NULL
)
3702 || (tstate
->c_profilefunc
!= NULL
));
3703 result
= PyObject_Call(func
, args
, NULL
);
3704 tstate
->tracing
= save_tracing
;
3705 tstate
->use_tracing
= save_use_tracing
;
3709 /* See Objects/lnotab_notes.txt for a description of how tracing works. */
3711 maybe_call_line_trace(Py_tracefunc func
, PyObject
*obj
,
3712 PyFrameObject
*frame
, int *instr_lb
, int *instr_ub
,
3716 int line
= frame
->f_lineno
;
3718 /* If the last instruction executed isn't in the current
3719 instruction window, reset the window.
3721 if (frame
->f_lasti
< *instr_lb
|| frame
->f_lasti
>= *instr_ub
) {
3723 line
= _PyCode_CheckLineNumber(frame
->f_code
, frame
->f_lasti
,
3725 *instr_lb
= bounds
.ap_lower
;
3726 *instr_ub
= bounds
.ap_upper
;
3728 /* If the last instruction falls at the start of a line or if
3729 it represents a jump backwards, update the frame's line
3730 number and call the trace function. */
3731 if (frame
->f_lasti
== *instr_lb
|| frame
->f_lasti
< *instr_prev
) {
3732 frame
->f_lineno
= line
;
3733 result
= call_trace(func
, obj
, frame
, PyTrace_LINE
, Py_None
);
3735 *instr_prev
= frame
->f_lasti
;
3740 PyEval_SetProfile(Py_tracefunc func
, PyObject
*arg
)
3742 PyThreadState
*tstate
= PyThreadState_GET();
3743 PyObject
*temp
= tstate
->c_profileobj
;
3745 tstate
->c_profilefunc
= NULL
;
3746 tstate
->c_profileobj
= NULL
;
3747 /* Must make sure that tracing is not ignored if 'temp' is freed */
3748 tstate
->use_tracing
= tstate
->c_tracefunc
!= NULL
;
3750 tstate
->c_profilefunc
= func
;
3751 tstate
->c_profileobj
= arg
;
3752 /* Flag that tracing or profiling is turned on */
3753 tstate
->use_tracing
= (func
!= NULL
) || (tstate
->c_tracefunc
!= NULL
);
3757 PyEval_SetTrace(Py_tracefunc func
, PyObject
*arg
)
3759 PyThreadState
*tstate
= PyThreadState_GET();
3760 PyObject
*temp
= tstate
->c_traceobj
;
3761 _Py_TracingPossible
+= (func
!= NULL
) - (tstate
->c_tracefunc
!= NULL
);
3763 tstate
->c_tracefunc
= NULL
;
3764 tstate
->c_traceobj
= NULL
;
3765 /* Must make sure that profiling is not ignored if 'temp' is freed */
3766 tstate
->use_tracing
= tstate
->c_profilefunc
!= NULL
;
3768 tstate
->c_tracefunc
= func
;
3769 tstate
->c_traceobj
= arg
;
3770 /* Flag that tracing or profiling is turned on */
3771 tstate
->use_tracing
= ((func
!= NULL
)
3772 || (tstate
->c_profilefunc
!= NULL
));
3776 PyEval_GetBuiltins(void)
3778 PyFrameObject
*current_frame
= PyEval_GetFrame();
3779 if (current_frame
== NULL
)
3780 return PyThreadState_GET()->interp
->builtins
;
3782 return current_frame
->f_builtins
;
3786 PyEval_GetLocals(void)
3788 PyFrameObject
*current_frame
= PyEval_GetFrame();
3789 if (current_frame
== NULL
)
3791 PyFrame_FastToLocals(current_frame
);
3792 return current_frame
->f_locals
;
3796 PyEval_GetGlobals(void)
3798 PyFrameObject
*current_frame
= PyEval_GetFrame();
3799 if (current_frame
== NULL
)
3802 return current_frame
->f_globals
;
3806 PyEval_GetFrame(void)
3808 PyThreadState
*tstate
= PyThreadState_GET();
3809 return _PyThreadState_GetFrame(tstate
);
3813 PyEval_GetRestricted(void)
3815 PyFrameObject
*current_frame
= PyEval_GetFrame();
3816 return current_frame
== NULL
? 0 : PyFrame_IsRestricted(current_frame
);
3820 PyEval_MergeCompilerFlags(PyCompilerFlags
*cf
)
3822 PyFrameObject
*current_frame
= PyEval_GetFrame();
3823 int result
= cf
->cf_flags
!= 0;
3825 if (current_frame
!= NULL
) {
3826 const int codeflags
= current_frame
->f_code
->co_flags
;
3827 const int compilerflags
= codeflags
& PyCF_MASK
;
3828 if (compilerflags
) {
3830 cf
->cf_flags
|= compilerflags
;
3832 #if 0 /* future keyword */
3833 if (codeflags
& CO_GENERATOR_ALLOWED
) {
3835 cf
->cf_flags
|= CO_GENERATOR_ALLOWED
;
3845 PyObject
*f
= PySys_GetObject("stdout");
3848 if (!PyFile_SoftSpace(f
, 0))
3850 return PyFile_WriteString("\n", f
);
3854 /* External interface to call any callable object.
3855 The arg must be a tuple or NULL. */
3857 #undef PyEval_CallObject
3858 /* for backward compatibility: export this interface */
3861 PyEval_CallObject(PyObject
*func
, PyObject
*arg
)
3863 return PyEval_CallObjectWithKeywords(func
, arg
, (PyObject
*)NULL
);
3865 #define PyEval_CallObject(func,arg) \
3866 PyEval_CallObjectWithKeywords(func, arg, (PyObject *)NULL)
3869 PyEval_CallObjectWithKeywords(PyObject
*func
, PyObject
*arg
, PyObject
*kw
)
3874 arg
= PyTuple_New(0);
3878 else if (!PyTuple_Check(arg
)) {
3879 PyErr_SetString(PyExc_TypeError
,
3880 "argument list must be a tuple");
3886 if (kw
!= NULL
&& !PyDict_Check(kw
)) {
3887 PyErr_SetString(PyExc_TypeError
,
3888 "keyword list must be a dictionary");
3893 result
= PyObject_Call(func
, arg
, kw
);
3899 PyEval_GetFuncName(PyObject
*func
)
3901 if (PyMethod_Check(func
))
3902 return PyEval_GetFuncName(PyMethod_GET_FUNCTION(func
));
3903 else if (PyFunction_Check(func
))
3904 return PyString_AsString(((PyFunctionObject
*)func
)->func_name
);
3905 else if (PyCFunction_Check(func
))
3906 return ((PyCFunctionObject
*)func
)->m_ml
->ml_name
;
3907 else if (PyClass_Check(func
))
3908 return PyString_AsString(((PyClassObject
*)func
)->cl_name
);
3909 else if (PyInstance_Check(func
)) {
3910 return PyString_AsString(
3911 ((PyInstanceObject
*)func
)->in_class
->cl_name
);
3913 return func
->ob_type
->tp_name
;
3918 PyEval_GetFuncDesc(PyObject
*func
)
3920 if (PyMethod_Check(func
))
3922 else if (PyFunction_Check(func
))
3924 else if (PyCFunction_Check(func
))
3926 else if (PyClass_Check(func
))
3927 return " constructor";
3928 else if (PyInstance_Check(func
)) {
3936 err_args(PyObject
*func
, int flags
, int nargs
)
3938 if (flags
& METH_NOARGS
)
3939 PyErr_Format(PyExc_TypeError
,
3940 "%.200s() takes no arguments (%d given)",
3941 ((PyCFunctionObject
*)func
)->m_ml
->ml_name
,
3944 PyErr_Format(PyExc_TypeError
,
3945 "%.200s() takes exactly one argument (%d given)",
3946 ((PyCFunctionObject
*)func
)->m_ml
->ml_name
,
3950 #define C_TRACE(x, call) \
3951 if (tstate->use_tracing && tstate->c_profilefunc) { \
3952 if (call_trace(tstate->c_profilefunc, \
3953 tstate->c_profileobj, \
3954 tstate->frame, PyTrace_C_CALL, \
3960 if (tstate->c_profilefunc != NULL) { \
3962 call_trace_protected(tstate->c_profilefunc, \
3963 tstate->c_profileobj, \
3964 tstate->frame, PyTrace_C_EXCEPTION, \
3966 /* XXX should pass (type, value, tb) */ \
3968 if (call_trace(tstate->c_profilefunc, \
3969 tstate->c_profileobj, \
3970 tstate->frame, PyTrace_C_RETURN, \
3983 call_function(PyObject
***pp_stack
, int oparg
3985 , uint64
* pintr0
, uint64
* pintr1
3989 int na
= oparg
& 0xff;
3990 int nk
= (oparg
>>8) & 0xff;
3991 int n
= na
+ 2 * nk
;
3992 PyObject
**pfunc
= (*pp_stack
) - n
- 1;
3993 PyObject
*func
= *pfunc
;
3996 /* Always dispatch PyCFunction first, because these are
3997 presumed to be the most frequent callable object.
3999 if (PyCFunction_Check(func
) && nk
== 0) {
4000 int flags
= PyCFunction_GET_FLAGS(func
);
4001 PyThreadState
*tstate
= PyThreadState_GET();
4003 PCALL(PCALL_CFUNCTION
);
4004 if (flags
& (METH_NOARGS
| METH_O
)) {
4005 PyCFunction meth
= PyCFunction_GET_FUNCTION(func
);
4006 PyObject
*self
= PyCFunction_GET_SELF(func
);
4007 if (flags
& METH_NOARGS
&& na
== 0) {
4008 C_TRACE(x
, (*meth
)(self
,NULL
));
4010 else if (flags
& METH_O
&& na
== 1) {
4011 PyObject
*arg
= EXT_POP(*pp_stack
);
4012 C_TRACE(x
, (*meth
)(self
,arg
));
4016 err_args(func
, flags
, na
);
4022 callargs
= load_args(pp_stack
, na
);
4023 READ_TIMESTAMP(*pintr0
);
4024 C_TRACE(x
, PyCFunction_Call(func
,callargs
,NULL
));
4025 READ_TIMESTAMP(*pintr1
);
4026 Py_XDECREF(callargs
);
4029 if (PyMethod_Check(func
) && PyMethod_GET_SELF(func
) != NULL
) {
4030 /* optimize access to bound methods */
4031 PyObject
*self
= PyMethod_GET_SELF(func
);
4032 PCALL(PCALL_METHOD
);
4033 PCALL(PCALL_BOUND_METHOD
);
4035 func
= PyMethod_GET_FUNCTION(func
);
4043 READ_TIMESTAMP(*pintr0
);
4044 if (PyFunction_Check(func
))
4045 x
= fast_function(func
, pp_stack
, n
, na
, nk
);
4047 x
= do_call(func
, pp_stack
, na
, nk
);
4048 READ_TIMESTAMP(*pintr1
);
4052 /* Clear the stack of the function object. Also removes
4053 the arguments in case they weren't consumed already
4054 (fast_function() and err_args() leave them on the stack).
4056 while ((*pp_stack
) > pfunc
) {
4057 w
= EXT_POP(*pp_stack
);
4064 /* The fast_function() function optimize calls for which no argument
4065 tuple is necessary; the objects are passed directly from the stack.
4066 For the simplest case -- a function that takes only positional
4067 arguments and is called with only positional arguments -- it
4068 inlines the most primitive frame setup code from
4069 PyEval_EvalCodeEx(), which vastly reduces the checks that must be
4070 done before evaluating the frame.
4074 fast_function(PyObject
*func
, PyObject
***pp_stack
, int n
, int na
, int nk
)
4076 PyCodeObject
*co
= (PyCodeObject
*)PyFunction_GET_CODE(func
);
4077 PyObject
*globals
= PyFunction_GET_GLOBALS(func
);
4078 PyObject
*argdefs
= PyFunction_GET_DEFAULTS(func
);
4079 PyObject
**d
= NULL
;
4082 PCALL(PCALL_FUNCTION
);
4083 PCALL(PCALL_FAST_FUNCTION
);
4084 if (argdefs
== NULL
&& co
->co_argcount
== n
&& nk
==0 &&
4085 co
->co_flags
== (CO_OPTIMIZED
| CO_NEWLOCALS
| CO_NOFREE
)) {
4087 PyObject
*retval
= NULL
;
4088 PyThreadState
*tstate
= PyThreadState_GET();
4089 PyObject
**fastlocals
, **stack
;
4092 PCALL(PCALL_FASTER_FUNCTION
);
4093 assert(globals
!= NULL
);
4094 /* XXX Perhaps we should create a specialized
4095 PyFrame_New() that doesn't take locals, but does
4096 take builtins without sanity checking them.
4098 assert(tstate
!= NULL
);
4099 f
= PyFrame_New(tstate
, co
, globals
, NULL
);
4103 fastlocals
= f
->f_localsplus
;
4104 stack
= (*pp_stack
) - n
;
4106 for (i
= 0; i
< n
; i
++) {
4108 fastlocals
[i
] = *stack
++;
4110 retval
= PyEval_EvalFrameEx(f
,0);
4111 ++tstate
->recursion_depth
;
4113 --tstate
->recursion_depth
;
4116 if (argdefs
!= NULL
) {
4117 d
= &PyTuple_GET_ITEM(argdefs
, 0);
4118 nd
= Py_SIZE(argdefs
);
4120 return PyEval_EvalCodeEx(co
, globals
,
4121 (PyObject
*)NULL
, (*pp_stack
)-n
, na
,
4122 (*pp_stack
)-2*nk
, nk
, d
, nd
,
4123 PyFunction_GET_CLOSURE(func
));
4127 update_keyword_args(PyObject
*orig_kwdict
, int nk
, PyObject
***pp_stack
,
4130 PyObject
*kwdict
= NULL
;
4131 if (orig_kwdict
== NULL
)
4132 kwdict
= PyDict_New();
4134 kwdict
= PyDict_Copy(orig_kwdict
);
4135 Py_DECREF(orig_kwdict
);
4141 PyObject
*value
= EXT_POP(*pp_stack
);
4142 PyObject
*key
= EXT_POP(*pp_stack
);
4143 if (PyDict_GetItem(kwdict
, key
) != NULL
) {
4144 PyErr_Format(PyExc_TypeError
,
4145 "%.200s%s got multiple values "
4146 "for keyword argument '%.200s'",
4147 PyEval_GetFuncName(func
),
4148 PyEval_GetFuncDesc(func
),
4149 PyString_AsString(key
));
4155 err
= PyDict_SetItem(kwdict
, key
, value
);
4167 update_star_args(int nstack
, int nstar
, PyObject
*stararg
,
4168 PyObject
***pp_stack
)
4170 PyObject
*callargs
, *w
;
4172 callargs
= PyTuple_New(nstack
+ nstar
);
4173 if (callargs
== NULL
) {
4178 for (i
= 0; i
< nstar
; i
++) {
4179 PyObject
*a
= PyTuple_GET_ITEM(stararg
, i
);
4181 PyTuple_SET_ITEM(callargs
, nstack
+ i
, a
);
4184 while (--nstack
>= 0) {
4185 w
= EXT_POP(*pp_stack
);
4186 PyTuple_SET_ITEM(callargs
, nstack
, w
);
4192 load_args(PyObject
***pp_stack
, int na
)
4194 PyObject
*args
= PyTuple_New(na
);
4200 w
= EXT_POP(*pp_stack
);
4201 PyTuple_SET_ITEM(args
, na
, w
);
4207 do_call(PyObject
*func
, PyObject
***pp_stack
, int na
, int nk
)
4209 PyObject
*callargs
= NULL
;
4210 PyObject
*kwdict
= NULL
;
4211 PyObject
*result
= NULL
;
4214 kwdict
= update_keyword_args(NULL
, nk
, pp_stack
, func
);
4218 callargs
= load_args(pp_stack
, na
);
4219 if (callargs
== NULL
)
4222 /* At this point, we have to look at the type of func to
4223 update the call stats properly. Do it here so as to avoid
4224 exposing the call stats machinery outside ceval.c
4226 if (PyFunction_Check(func
))
4227 PCALL(PCALL_FUNCTION
);
4228 else if (PyMethod_Check(func
))
4229 PCALL(PCALL_METHOD
);
4230 else if (PyType_Check(func
))
4232 else if (PyCFunction_Check(func
))
4233 PCALL(PCALL_CFUNCTION
);
4237 if (PyCFunction_Check(func
)) {
4238 PyThreadState
*tstate
= PyThreadState_GET();
4239 C_TRACE(result
, PyCFunction_Call(func
, callargs
, kwdict
));
4242 result
= PyObject_Call(func
, callargs
, kwdict
);
4244 Py_XDECREF(callargs
);
4250 ext_do_call(PyObject
*func
, PyObject
***pp_stack
, int flags
, int na
, int nk
)
4253 PyObject
*callargs
= NULL
;
4254 PyObject
*stararg
= NULL
;
4255 PyObject
*kwdict
= NULL
;
4256 PyObject
*result
= NULL
;
4258 if (flags
& CALL_FLAG_KW
) {
4259 kwdict
= EXT_POP(*pp_stack
);
4260 if (!PyDict_Check(kwdict
)) {
4265 if (PyDict_Update(d
, kwdict
) != 0) {
4267 /* PyDict_Update raises attribute
4268 * error (percolated from an attempt
4269 * to get 'keys' attribute) instead of
4270 * a type error if its second argument
4273 if (PyErr_ExceptionMatches(PyExc_AttributeError
)) {
4274 PyErr_Format(PyExc_TypeError
,
4275 "%.200s%.200s argument after ** "
4276 "must be a mapping, not %.200s",
4277 PyEval_GetFuncName(func
),
4278 PyEval_GetFuncDesc(func
),
4279 kwdict
->ob_type
->tp_name
);
4287 if (flags
& CALL_FLAG_VAR
) {
4288 stararg
= EXT_POP(*pp_stack
);
4289 if (!PyTuple_Check(stararg
)) {
4291 t
= PySequence_Tuple(stararg
);
4293 if (PyErr_ExceptionMatches(PyExc_TypeError
)) {
4294 PyErr_Format(PyExc_TypeError
,
4295 "%.200s%.200s argument after * "
4296 "must be a sequence, not %200s",
4297 PyEval_GetFuncName(func
),
4298 PyEval_GetFuncDesc(func
),
4299 stararg
->ob_type
->tp_name
);
4306 nstar
= PyTuple_GET_SIZE(stararg
);
4309 kwdict
= update_keyword_args(kwdict
, nk
, pp_stack
, func
);
4313 callargs
= update_star_args(na
, nstar
, stararg
, pp_stack
);
4314 if (callargs
== NULL
)
4317 /* At this point, we have to look at the type of func to
4318 update the call stats properly. Do it here so as to avoid
4319 exposing the call stats machinery outside ceval.c
4321 if (PyFunction_Check(func
))
4322 PCALL(PCALL_FUNCTION
);
4323 else if (PyMethod_Check(func
))
4324 PCALL(PCALL_METHOD
);
4325 else if (PyType_Check(func
))
4327 else if (PyCFunction_Check(func
))
4328 PCALL(PCALL_CFUNCTION
);
4332 if (PyCFunction_Check(func
)) {
4333 PyThreadState
*tstate
= PyThreadState_GET();
4334 C_TRACE(result
, PyCFunction_Call(func
, callargs
, kwdict
));
4337 result
= PyObject_Call(func
, callargs
, kwdict
);
4339 Py_XDECREF(callargs
);
4341 Py_XDECREF(stararg
);
4345 /* Extract a slice index from a PyInt or PyLong or an object with the
4346 nb_index slot defined, and store in *pi.
4347 Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX,
4348 and silently boost values less than -PY_SSIZE_T_MAX-1 to -PY_SSIZE_T_MAX-1.
4349 Return 0 on error, 1 on success.
4351 /* Note: If v is NULL, return success without storing into *pi. This
4352 is because_PyEval_SliceIndex() is called by apply_slice(), which can be
4353 called by the SLICE opcode with v and/or w equal to NULL.
4356 _PyEval_SliceIndex(PyObject
*v
, Py_ssize_t
*pi
)
4360 if (PyInt_Check(v
)) {
4361 /* XXX(nnorwitz): I think PyInt_AS_LONG is correct,
4362 however, it looks like it should be AsSsize_t.
4363 There should be a comment here explaining why.
4365 x
= PyInt_AS_LONG(v
);
4367 else if (PyIndex_Check(v
)) {
4368 x
= PyNumber_AsSsize_t(v
, NULL
);
4369 if (x
== -1 && PyErr_Occurred())
4373 PyErr_SetString(PyExc_TypeError
,
4374 "slice indices must be integers or "
4375 "None or have an __index__ method");
4384 #define ISINDEX(x) ((x) == NULL || \
4385 PyInt_Check(x) || PyLong_Check(x) || PyIndex_Check(x))
4388 apply_slice(PyObject
*u
, PyObject
*v
, PyObject
*w
) /* return u[v:w] */
4390 PyTypeObject
*tp
= u
->ob_type
;
4391 PySequenceMethods
*sq
= tp
->tp_as_sequence
;
4393 if (sq
&& sq
->sq_slice
&& ISINDEX(v
) && ISINDEX(w
)) {
4394 Py_ssize_t ilow
= 0, ihigh
= PY_SSIZE_T_MAX
;
4395 if (!_PyEval_SliceIndex(v
, &ilow
))
4397 if (!_PyEval_SliceIndex(w
, &ihigh
))
4399 return PySequence_GetSlice(u
, ilow
, ihigh
);
4402 PyObject
*slice
= PySlice_New(v
, w
, NULL
);
4403 if (slice
!= NULL
) {
4404 PyObject
*res
= PyObject_GetItem(u
, slice
);
4414 assign_slice(PyObject
*u
, PyObject
*v
, PyObject
*w
, PyObject
*x
)
4417 PyTypeObject
*tp
= u
->ob_type
;
4418 PySequenceMethods
*sq
= tp
->tp_as_sequence
;
4420 if (sq
&& sq
->sq_ass_slice
&& ISINDEX(v
) && ISINDEX(w
)) {
4421 Py_ssize_t ilow
= 0, ihigh
= PY_SSIZE_T_MAX
;
4422 if (!_PyEval_SliceIndex(v
, &ilow
))
4424 if (!_PyEval_SliceIndex(w
, &ihigh
))
4427 return PySequence_DelSlice(u
, ilow
, ihigh
);
4429 return PySequence_SetSlice(u
, ilow
, ihigh
, x
);
4432 PyObject
*slice
= PySlice_New(v
, w
, NULL
);
4433 if (slice
!= NULL
) {
4436 res
= PyObject_SetItem(u
, slice
, x
);
4438 res
= PyObject_DelItem(u
, slice
);
4447 #define Py3kExceptionClass_Check(x) \
4448 (PyType_Check((x)) && \
4449 PyType_FastSubclass((PyTypeObject*)(x), Py_TPFLAGS_BASE_EXC_SUBCLASS))
4451 #define CANNOT_CATCH_MSG "catching classes that don't inherit from " \
4452 "BaseException is not allowed in 3.x"
4455 cmp_outcome(int op
, register PyObject
*v
, register PyObject
*w
)
4466 res
= PySequence_Contains(w
, v
);
4471 res
= PySequence_Contains(w
, v
);
4476 case PyCmp_EXC_MATCH
:
4477 if (PyTuple_Check(w
)) {
4478 Py_ssize_t i
, length
;
4479 length
= PyTuple_Size(w
);
4480 for (i
= 0; i
< length
; i
+= 1) {
4481 PyObject
*exc
= PyTuple_GET_ITEM(w
, i
);
4482 if (PyString_Check(exc
)) {
4484 ret_val
= PyErr_WarnEx(
4485 PyExc_DeprecationWarning
,
4486 "catching of string "
4487 "exceptions is deprecated", 1);
4491 else if (Py_Py3kWarningFlag
&&
4492 !PyTuple_Check(exc
) &&
4493 !Py3kExceptionClass_Check(exc
))
4496 ret_val
= PyErr_WarnEx(
4497 PyExc_DeprecationWarning
,
4498 CANNOT_CATCH_MSG
, 1);
4505 if (PyString_Check(w
)) {
4507 ret_val
= PyErr_WarnEx(
4508 PyExc_DeprecationWarning
,
4509 "catching of string "
4510 "exceptions is deprecated", 1);
4514 else if (Py_Py3kWarningFlag
&&
4515 !PyTuple_Check(w
) &&
4516 !Py3kExceptionClass_Check(w
))
4519 ret_val
= PyErr_WarnEx(
4520 PyExc_DeprecationWarning
,
4521 CANNOT_CATCH_MSG
, 1);
4526 res
= PyErr_GivenExceptionMatches(v
, w
);
4529 return PyObject_RichCompare(v
, w
, op
);
4531 v
= res
? Py_True
: Py_False
;
4537 import_from(PyObject
*v
, PyObject
*name
)
4541 x
= PyObject_GetAttr(v
, name
);
4542 if (x
== NULL
&& PyErr_ExceptionMatches(PyExc_AttributeError
)) {
4543 PyErr_Format(PyExc_ImportError
,
4544 "cannot import name %.230s",
4545 PyString_AsString(name
));
4551 import_all_from(PyObject
*locals
, PyObject
*v
)
4553 PyObject
*all
= PyObject_GetAttrString(v
, "__all__");
4554 PyObject
*dict
, *name
, *value
;
4555 int skip_leading_underscores
= 0;
4559 if (!PyErr_ExceptionMatches(PyExc_AttributeError
))
4560 return -1; /* Unexpected error */
4562 dict
= PyObject_GetAttrString(v
, "__dict__");
4564 if (!PyErr_ExceptionMatches(PyExc_AttributeError
))
4566 PyErr_SetString(PyExc_ImportError
,
4567 "from-import-* object has no __dict__ and no __all__");
4570 all
= PyMapping_Keys(dict
);
4574 skip_leading_underscores
= 1;
4577 for (pos
= 0, err
= 0; ; pos
++) {
4578 name
= PySequence_GetItem(all
, pos
);
4580 if (!PyErr_ExceptionMatches(PyExc_IndexError
))
4586 if (skip_leading_underscores
&&
4587 PyString_Check(name
) &&
4588 PyString_AS_STRING(name
)[0] == '_')
4593 value
= PyObject_GetAttr(v
, name
);
4596 else if (PyDict_CheckExact(locals
))
4597 err
= PyDict_SetItem(locals
, name
, value
);
4599 err
= PyObject_SetItem(locals
, name
, value
);
4610 build_class(PyObject
*methods
, PyObject
*bases
, PyObject
*name
)
4612 PyObject
*metaclass
= NULL
, *result
, *base
;
4614 if (PyDict_Check(methods
))
4615 metaclass
= PyDict_GetItemString(methods
, "__metaclass__");
4616 if (metaclass
!= NULL
)
4617 Py_INCREF(metaclass
);
4618 else if (PyTuple_Check(bases
) && PyTuple_GET_SIZE(bases
) > 0) {
4619 base
= PyTuple_GET_ITEM(bases
, 0);
4620 metaclass
= PyObject_GetAttrString(base
, "__class__");
4621 if (metaclass
== NULL
) {
4623 metaclass
= (PyObject
*)base
->ob_type
;
4624 Py_INCREF(metaclass
);
4628 PyObject
*g
= PyEval_GetGlobals();
4629 if (g
!= NULL
&& PyDict_Check(g
))
4630 metaclass
= PyDict_GetItemString(g
, "__metaclass__");
4631 if (metaclass
== NULL
)
4632 metaclass
= (PyObject
*) &PyClass_Type
;
4633 Py_INCREF(metaclass
);
4635 result
= PyObject_CallFunctionObjArgs(metaclass
, name
, bases
, methods
,
4637 Py_DECREF(metaclass
);
4638 if (result
== NULL
&& PyErr_ExceptionMatches(PyExc_TypeError
)) {
4639 /* A type error here likely means that the user passed
4640 in a base that was not a class (such the random module
4641 instead of the random.random type). Help them out with
4642 by augmenting the error message with more information.*/
4644 PyObject
*ptype
, *pvalue
, *ptraceback
;
4646 PyErr_Fetch(&ptype
, &pvalue
, &ptraceback
);
4647 if (PyString_Check(pvalue
)) {
4649 newmsg
= PyString_FromFormat(
4650 "Error when calling the metaclass bases\n"
4652 PyString_AS_STRING(pvalue
));
4653 if (newmsg
!= NULL
) {
4658 PyErr_Restore(ptype
, pvalue
, ptraceback
);
4664 exec_statement(PyFrameObject
*f
, PyObject
*prog
, PyObject
*globals
,
4671 if (PyTuple_Check(prog
) && globals
== Py_None
&& locals
== Py_None
&&
4672 ((n
= PyTuple_Size(prog
)) == 2 || n
== 3)) {
4673 /* Backward compatibility hack */
4674 globals
= PyTuple_GetItem(prog
, 1);
4676 locals
= PyTuple_GetItem(prog
, 2);
4677 prog
= PyTuple_GetItem(prog
, 0);
4679 if (globals
== Py_None
) {
4680 globals
= PyEval_GetGlobals();
4681 if (locals
== Py_None
) {
4682 locals
= PyEval_GetLocals();
4685 if (!globals
|| !locals
) {
4686 PyErr_SetString(PyExc_SystemError
,
4687 "globals and locals cannot be NULL");
4691 else if (locals
== Py_None
)
4693 if (!PyString_Check(prog
) &&
4694 #ifdef Py_USING_UNICODE
4695 !PyUnicode_Check(prog
) &&
4697 !PyCode_Check(prog
) &&
4698 !PyFile_Check(prog
)) {
4699 PyErr_SetString(PyExc_TypeError
,
4700 "exec: arg 1 must be a string, file, or code object");
4703 if (!PyDict_Check(globals
)) {
4704 PyErr_SetString(PyExc_TypeError
,
4705 "exec: arg 2 must be a dictionary or None");
4708 if (!PyMapping_Check(locals
)) {
4709 PyErr_SetString(PyExc_TypeError
,
4710 "exec: arg 3 must be a mapping or None");
4713 if (PyDict_GetItemString(globals
, "__builtins__") == NULL
)
4714 PyDict_SetItemString(globals
, "__builtins__", f
->f_builtins
);
4715 if (PyCode_Check(prog
)) {
4716 if (PyCode_GetNumFree((PyCodeObject
*)prog
) > 0) {
4717 PyErr_SetString(PyExc_TypeError
,
4718 "code object passed to exec may not contain free variables");
4721 v
= PyEval_EvalCode((PyCodeObject
*) prog
, globals
, locals
);
4723 else if (PyFile_Check(prog
)) {
4724 FILE *fp
= PyFile_AsFile(prog
);
4725 char *name
= PyString_AsString(PyFile_Name(prog
));
4730 if (PyEval_MergeCompilerFlags(&cf
))
4731 v
= PyRun_FileFlags(fp
, name
, Py_file_input
, globals
,
4734 v
= PyRun_File(fp
, name
, Py_file_input
, globals
,
4738 PyObject
*tmp
= NULL
;
4742 #ifdef Py_USING_UNICODE
4743 if (PyUnicode_Check(prog
)) {
4744 tmp
= PyUnicode_AsUTF8String(prog
);
4748 cf
.cf_flags
|= PyCF_SOURCE_IS_UTF8
;
4751 if (PyString_AsStringAndSize(prog
, &str
, NULL
))
4753 if (PyEval_MergeCompilerFlags(&cf
))
4754 v
= PyRun_StringFlags(str
, Py_file_input
, globals
,
4757 v
= PyRun_String(str
, Py_file_input
, globals
, locals
);
4761 PyFrame_LocalsToFast(f
, 0);
4769 format_exc_check_arg(PyObject
*exc
, char *format_str
, PyObject
*obj
)
4776 obj_str
= PyString_AsString(obj
);
4780 PyErr_Format(exc
, format_str
, obj_str
);
4784 string_concatenate(PyObject
*v
, PyObject
*w
,
4785 PyFrameObject
*f
, unsigned char *next_instr
)
4787 /* This function implements 'variable += expr' when both arguments
4789 Py_ssize_t v_len
= PyString_GET_SIZE(v
);
4790 Py_ssize_t w_len
= PyString_GET_SIZE(w
);
4791 Py_ssize_t new_len
= v_len
+ w_len
;
4793 PyErr_SetString(PyExc_OverflowError
,
4794 "strings are too large to concat");
4798 if (v
->ob_refcnt
== 2) {
4799 /* In the common case, there are 2 references to the value
4800 * stored in 'variable' when the += is performed: one on the
4801 * value stack (in 'v') and one still stored in the
4802 * 'variable'. We try to delete the variable now to reduce
4805 switch (*next_instr
) {
4808 int oparg
= PEEKARG();
4809 PyObject
**fastlocals
= f
->f_localsplus
;
4810 if (GETLOCAL(oparg
) == v
)
4811 SETLOCAL(oparg
, NULL
);
4816 PyObject
**freevars
= (f
->f_localsplus
+
4817 f
->f_code
->co_nlocals
);
4818 PyObject
*c
= freevars
[PEEKARG()];
4819 if (PyCell_GET(c
) == v
)
4820 PyCell_Set(c
, NULL
);
4825 PyObject
*names
= f
->f_code
->co_names
;
4826 PyObject
*name
= GETITEM(names
, PEEKARG());
4827 PyObject
*locals
= f
->f_locals
;
4828 if (PyDict_CheckExact(locals
) &&
4829 PyDict_GetItem(locals
, name
) == v
) {
4830 if (PyDict_DelItem(locals
, name
) != 0) {
4839 if (v
->ob_refcnt
== 1 && !PyString_CHECK_INTERNED(v
)) {
4840 /* Now we own the last reference to 'v', so we can resize it
4843 if (_PyString_Resize(&v
, new_len
) != 0) {
4844 /* XXX if _PyString_Resize() fails, 'v' has been
4845 * deallocated so it cannot be put back into
4846 * 'variable'. The MemoryError is raised when there
4847 * is no value in 'variable', which might (very
4848 * remotely) be a cause of incompatibilities.
4852 /* copy 'w' into the newly allocated area of 'v' */
4853 memcpy(PyString_AS_STRING(v
) + v_len
,
4854 PyString_AS_STRING(w
), w_len
);
4858 /* When in-place resizing is not an option. */
4859 PyString_Concat(&v
, w
);
4864 #ifdef DYNAMIC_EXECUTION_PROFILE
4867 getarray(long a
[256])
4870 PyObject
*l
= PyList_New(256);
4871 if (l
== NULL
) return NULL
;
4872 for (i
= 0; i
< 256; i
++) {
4873 PyObject
*x
= PyInt_FromLong(a
[i
]);
4878 PyList_SetItem(l
, i
, x
);
4880 for (i
= 0; i
< 256; i
++)
4886 _Py_GetDXProfile(PyObject
*self
, PyObject
*args
)
4889 return getarray(dxp
);
4892 PyObject
*l
= PyList_New(257);
4893 if (l
== NULL
) return NULL
;
4894 for (i
= 0; i
< 257; i
++) {
4895 PyObject
*x
= getarray(dxpairs
[i
]);
4900 PyList_SetItem(l
, i
, x
);