2 /* Execute compiled code */
5 XXX speed up searching for keywords by using a dictionary
9 /* enable more aggressive intra-module optimizations, where available */
10 #define PY_LOCAL_AGGRESSIVE
15 #include "frameobject.h"
18 #include "structmember.h"
24 #define READ_TIMESTAMP(var)
28 typedef unsigned long long uint64;
31 "__ppc__" appears to be the preprocessor definition to detect on OS X, whereas
32 "__powerpc__" appears to be the correct one for Linux with GCC
34 #if defined(__ppc__) || defined (__powerpc__)
36 #define READ_TIMESTAMP(var) ppc_getcounter(&var)
39 ppc_getcounter(uint64 *v)
41 register unsigned long tbu, tb, tbu2;
44 asm volatile ("mftbu %0" : "=r" (tbu) );
45 asm volatile ("mftb %0" : "=r" (tb) );
46 asm volatile ("mftbu %0" : "=r" (tbu2));
47 if (__builtin_expect(tbu != tbu2, 0)) goto loop;
49 /* The slightly peculiar way of writing the next lines is
50 compiled better by GCC than any other way I tried. */
51 ((long*)(v))[0] = tbu;
55 #elif defined(__i386__)
57 /* this is for linux/x86 (and probably any other GCC/x86 combo) */
59 #define READ_TIMESTAMP(val) \
60 __asm__ __volatile__("rdtsc" : "=A" (val))
62 #elif defined(__x86_64__)
64 /* for gcc/x86_64, the "A" constraint in DI mode means *either* rax *or* rdx;
65 not edx:eax as it does for i386. Since rdtsc puts its result in edx:eax
66 even in 64-bit mode, we need to use "a" and "d" for the lower and upper
67 32-bit pieces of the result. */
69 #define READ_TIMESTAMP(val) \
70 __asm__ __volatile__("rdtsc" : \
71 "=a" (((int*)&(val))[0]), "=d" (((int*)&(val))[1]));
76 #error "Don't know how to implement timestamp counter for this architecture"
80 void dump_tsc(int opcode, int ticked, uint64 inst0, uint64 inst1,
81 uint64 loop0, uint64 loop1, uint64 intr0, uint64 intr1)
83 uint64 intr, inst, loop;
84 PyThreadState *tstate = PyThreadState_Get();
85 if (!tstate->interp->tscdump)
88 inst = inst1 - inst0 - intr;
89 loop = loop1 - loop0 - intr;
90 fprintf(stderr, "opcode=%03d t=%d inst=%06lld loop=%06lld\n",
91 opcode, ticked, inst, loop);
96 /* Turn this on if your compiler chokes on the big switch: */
97 /* #define CASE_TOO_BIG 1 */
100 /* For debugging the interpreter: */
101 #define LLTRACE 1 /* Low-level trace feature */
102 #define CHECKEXC 1 /* Double-check exception checking */
105 typedef PyObject *(*callproc)(PyObject *, PyObject *, PyObject *);
107 /* Forward declarations */
109 static PyObject * call_function(PyObject ***, int, uint64*, uint64*);
111 static PyObject * call_function(PyObject ***, int);
113 static PyObject * fast_function(PyObject *, PyObject ***, int, int, int);
114 static PyObject * do_call(PyObject *, PyObject ***, int, int);
115 static PyObject * ext_do_call(PyObject *, PyObject ***, int, int, int);
116 static PyObject * update_keyword_args(PyObject *, int, PyObject ***,
118 static PyObject * update_star_args(int, int, PyObject *, PyObject ***);
119 static PyObject * load_args(PyObject ***, int);
120 #define CALL_FLAG_VAR 1
121 #define CALL_FLAG_KW 2
125 static int prtrace(PyObject *, char *);
127 static int call_trace(Py_tracefunc, PyObject *, PyFrameObject *,
129 static int call_trace_protected(Py_tracefunc, PyObject *,
130 PyFrameObject *, int, PyObject *);
131 static void call_exc_trace(Py_tracefunc, PyObject *, PyFrameObject *);
132 static int maybe_call_line_trace(Py_tracefunc, PyObject *,
133 PyFrameObject *, int *, int *, int *);
135 static PyObject * apply_slice(PyObject *, PyObject *, PyObject *);
136 static int assign_slice(PyObject *, PyObject *,
137 PyObject *, PyObject *);
138 static PyObject * cmp_outcome(int, PyObject *, PyObject *);
139 static PyObject * import_from(PyObject *, PyObject *);
140 static int import_all_from(PyObject *, PyObject *);
141 static PyObject * build_class(PyObject *, PyObject *, PyObject *);
142 static int exec_statement(PyFrameObject *,
143 PyObject *, PyObject *, PyObject *);
144 static void set_exc_info(PyThreadState *, PyObject *, PyObject *, PyObject *);
145 static void reset_exc_info(PyThreadState *);
146 static void format_exc_check_arg(PyObject *, char *, PyObject *);
147 static PyObject * string_concatenate(PyObject *, PyObject *,
148 PyFrameObject *, unsigned char *);
149 static PyObject * kwd_as_string(PyObject *);
150 static PyObject * special_lookup(PyObject *, char *, PyObject **);
152 #define NAME_ERROR_MSG \
153 "name '%.200s' is not defined"
154 #define GLOBAL_NAME_ERROR_MSG \
155 "global name '%.200s' is not defined"
156 #define UNBOUNDLOCAL_ERROR_MSG \
157 "local variable '%.200s' referenced before assignment"
158 #define UNBOUNDFREE_ERROR_MSG \
159 "free variable '%.200s' referenced before assignment" \
160 " in enclosing scope"
162 /* Dynamic execution profile */
163 #ifdef DYNAMIC_EXECUTION_PROFILE
165 static long dxpairs[257][256];
166 #define dxp dxpairs[256]
168 static long dxp[256];
172 /* Function call profile */
175 static int pcall[PCALL_NUM];
178 #define PCALL_FUNCTION 1
179 #define PCALL_FAST_FUNCTION 2
180 #define PCALL_FASTER_FUNCTION 3
181 #define PCALL_METHOD 4
182 #define PCALL_BOUND_METHOD 5
183 #define PCALL_CFUNCTION 6
185 #define PCALL_GENERATOR 8
186 #define PCALL_OTHER 9
189 /* Notes about the statistics
193 FAST_FUNCTION means no argument tuple needs to be created.
194 FASTER_FUNCTION means that the fast-path frame setup code is used.
196 If there is a method call where the call can be optimized by changing
197 the argument tuple and calling the function directly, it gets recorded
200 As a result, the relationship among the statistics appears to be
201 PCALL_ALL == PCALL_FUNCTION + PCALL_METHOD - PCALL_BOUND_METHOD +
202 PCALL_CFUNCTION + PCALL_TYPE + PCALL_GENERATOR + PCALL_OTHER
203 PCALL_FUNCTION > PCALL_FAST_FUNCTION > PCALL_FASTER_FUNCTION
204 PCALL_METHOD > PCALL_BOUND_METHOD
207 #define PCALL(POS) pcall[POS]++
210 PyEval_GetCallStats(PyObject *self)
212 return Py_BuildValue("iiiiiiiiiii",
213 pcall[0], pcall[1], pcall[2], pcall[3],
214 pcall[4], pcall[5], pcall[6], pcall[7],
215 pcall[8], pcall[9], pcall[10]);
221 PyEval_GetCallStats(PyObject *self)
234 #include "pythread.h"
236 static PyThread_type_lock interpreter_lock = 0; /* This is the GIL */
237 static PyThread_type_lock pending_lock = 0; /* for pending calls */
238 static long main_thread = 0;
241 PyEval_ThreadsInitialized(void)
243 return interpreter_lock != 0;
247 PyEval_InitThreads(void)
249 if (interpreter_lock)
251 interpreter_lock = PyThread_allocate_lock();
252 PyThread_acquire_lock(interpreter_lock, 1);
253 main_thread = PyThread_get_thread_ident();
257 PyEval_AcquireLock(void)
259 PyThread_acquire_lock(interpreter_lock, 1);
263 PyEval_ReleaseLock(void)
265 PyThread_release_lock(interpreter_lock);
269 PyEval_AcquireThread(PyThreadState *tstate)
272 Py_FatalError("PyEval_AcquireThread: NULL new thread state");
273 /* Check someone has called PyEval_InitThreads() to create the lock */
274 assert(interpreter_lock);
275 PyThread_acquire_lock(interpreter_lock, 1);
276 if (PyThreadState_Swap(tstate) != NULL)
278 "PyEval_AcquireThread: non-NULL old thread state");
282 PyEval_ReleaseThread(PyThreadState *tstate)
285 Py_FatalError("PyEval_ReleaseThread: NULL thread state");
286 if (PyThreadState_Swap(NULL) != tstate)
287 Py_FatalError("PyEval_ReleaseThread: wrong thread state");
288 PyThread_release_lock(interpreter_lock);
291 /* This function is called from PyOS_AfterFork to ensure that newly
292 created child processes don't hold locks referring to threads which
293 are not running in the child process. (This could also be done using
294 pthread_atfork mechanism, at least for the pthreads implementation.) */
297 PyEval_ReInitThreads(void)
299 PyObject *threading, *result;
300 PyThreadState *tstate;
302 if (!interpreter_lock)
304 /*XXX Can't use PyThread_free_lock here because it does too
305 much error-checking. Doing this cleanly would require
306 adding a new function to each thread_*.h. Instead, just
307 create a new lock and waste a little bit of memory */
308 interpreter_lock = PyThread_allocate_lock();
309 pending_lock = PyThread_allocate_lock();
310 PyThread_acquire_lock(interpreter_lock, 1);
311 main_thread = PyThread_get_thread_ident();
313 /* Update the threading module with the new state.
315 tstate = PyThreadState_GET();
316 threading = PyMapping_GetItemString(tstate->interp->modules,
318 if (threading == NULL) {
319 /* threading not imported */
323 result = PyObject_CallMethod(threading, "_after_fork", NULL);
325 PyErr_WriteUnraisable(threading);
328 Py_DECREF(threading);
332 /* Functions save_thread and restore_thread are always defined so
333 dynamically loaded modules needn't be compiled separately for use
334 with and without threads: */
337 PyEval_SaveThread(void)
339 PyThreadState *tstate = PyThreadState_Swap(NULL);
341 Py_FatalError("PyEval_SaveThread: NULL tstate");
343 if (interpreter_lock)
344 PyThread_release_lock(interpreter_lock);
350 PyEval_RestoreThread(PyThreadState *tstate)
353 Py_FatalError("PyEval_RestoreThread: NULL tstate");
355 if (interpreter_lock) {
357 PyThread_acquire_lock(interpreter_lock, 1);
361 PyThreadState_Swap(tstate);
365 /* Mechanism whereby asynchronously executing callbacks (e.g. UNIX
366 signal handlers or Mac I/O completion routines) can schedule calls
367 to a function to be called synchronously.
368 The synchronous function is called with one void* argument.
369 It should return 0 for success or -1 for failure -- failure should
370 be accompanied by an exception.
372 If registry succeeds, the registry function returns 0; if it fails
373 (e.g. due to too many pending calls) it returns -1 (without setting
374 an exception condition).
376 Note that because registry may occur from within signal handlers,
377 or other asynchronous events, calling malloc() is unsafe!
380 Any thread can schedule pending calls, but only the main thread
382 There is no facility to schedule calls to a particular thread, but
383 that should be easy to change, should that ever be required. In
384 that case, the static variables here should go into the python
391 /* The WITH_THREAD implementation is thread-safe. It allows
392 scheduling to be made from any thread, and even from an executing
396 #define NPENDINGCALLS 32
400 } pendingcalls[NPENDINGCALLS];
401 static int pendingfirst = 0;
402 static int pendinglast = 0;
403 static volatile int pendingcalls_to_do = 1; /* trigger initialization of lock */
404 static char pendingbusy = 0;
407 Py_AddPendingCall(int (*func)(void *), void *arg)
410 PyThread_type_lock lock = pending_lock;
412 /* try a few times for the lock. Since this mechanism is used
413 * for signal handling (on the main thread), there is a (slim)
414 * chance that a signal is delivered on the same thread while we
415 * hold the lock during the Py_MakePendingCalls() function.
416 * This avoids a deadlock in that case.
417 * Note that signals can be delivered on any thread. In particular,
418 * on Windows, a SIGINT is delivered on a system-created worker
420 * We also check for lock being NULL, in the unlikely case that
421 * this function is called before any bytecode evaluation takes place.
424 for (i = 0; i<100; i++) {
425 if (PyThread_acquire_lock(lock, NOWAIT_LOCK))
433 j = (i + 1) % NPENDINGCALLS;
434 if (j == pendingfirst) {
435 result = -1; /* Queue full */
437 pendingcalls[i].func = func;
438 pendingcalls[i].arg = arg;
441 /* signal main loop */
443 pendingcalls_to_do = 1;
445 PyThread_release_lock(lock);
450 Py_MakePendingCalls(void)
456 /* initial allocation of the lock */
457 pending_lock = PyThread_allocate_lock();
458 if (pending_lock == NULL)
462 /* only service pending calls on main thread */
463 if (main_thread && PyThread_get_thread_ident() != main_thread)
465 /* don't perform recursive pending calls */
469 /* perform a bounded number of calls, in case of recursion */
470 for (i=0; i<NPENDINGCALLS; i++) {
475 /* pop one item off the queue while holding the lock */
476 PyThread_acquire_lock(pending_lock, WAIT_LOCK);
478 if (j == pendinglast) {
479 func = NULL; /* Queue empty */
481 func = pendingcalls[j].func;
482 arg = pendingcalls[j].arg;
483 pendingfirst = (j + 1) % NPENDINGCALLS;
485 pendingcalls_to_do = pendingfirst != pendinglast;
486 PyThread_release_lock(pending_lock);
487 /* having released the lock, perform the callback */
498 #else /* if ! defined WITH_THREAD */
501 WARNING! ASYNCHRONOUSLY EXECUTING CODE!
502 This code is used for signal handling in python that isn't built
504 Don't use this implementation when Py_AddPendingCalls() can happen
505 on a different thread!
507 There are two possible race conditions:
508 (1) nested asynchronous calls to Py_AddPendingCall()
509 (2) AddPendingCall() calls made while pending calls are being processed.
511 (1) is very unlikely because typically signal delivery
512 is blocked during signal handling. So it should be impossible.
513 (2) is a real possibility.
514 The current code is safe against (2), but not against (1).
515 The safety against (2) is derived from the fact that only one
516 thread is present, interrupted by signals, and that the critical
517 section is protected with the "busy" variable. On Windows, which
518 delivers SIGINT on a system thread, this does not hold and therefore
519 Windows really shouldn't use this version.
520 The two threads could theoretically wiggle around the "busy" variable.
523 #define NPENDINGCALLS 32
527 } pendingcalls[NPENDINGCALLS];
528 static volatile int pendingfirst = 0;
529 static volatile int pendinglast = 0;
530 static volatile int pendingcalls_to_do = 0;
533 Py_AddPendingCall(int (*func)(void *), void *arg)
535 static volatile int busy = 0;
537 /* XXX Begin critical section */
542 j = (i + 1) % NPENDINGCALLS;
543 if (j == pendingfirst) {
545 return -1; /* Queue full */
547 pendingcalls[i].func = func;
548 pendingcalls[i].arg = arg;
552 pendingcalls_to_do = 1; /* Signal main loop */
554 /* XXX End critical section */
559 Py_MakePendingCalls(void)
565 pendingcalls_to_do = 0;
571 if (i == pendinglast)
572 break; /* Queue empty */
573 func = pendingcalls[i].func;
574 arg = pendingcalls[i].arg;
575 pendingfirst = (i + 1) % NPENDINGCALLS;
578 pendingcalls_to_do = 1; /* We're not done yet */
586 #endif /* WITH_THREAD */
589 /* The interpreter's recursion limit */
591 #ifndef Py_DEFAULT_RECURSION_LIMIT
592 #define Py_DEFAULT_RECURSION_LIMIT 1000
594 static int recursion_limit = Py_DEFAULT_RECURSION_LIMIT;
595 int _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT;
598 Py_GetRecursionLimit(void)
600 return recursion_limit;
604 Py_SetRecursionLimit(int new_limit)
606 recursion_limit = new_limit;
607 _Py_CheckRecursionLimit = recursion_limit;
610 /* the macro Py_EnterRecursiveCall() only calls _Py_CheckRecursiveCall()
611 if the recursion_depth reaches _Py_CheckRecursionLimit.
612 If USE_STACKCHECK, the macro decrements _Py_CheckRecursionLimit
613 to guarantee that _Py_CheckRecursiveCall() is regularly called.
614 Without USE_STACKCHECK, there is no need for this. */
616 _Py_CheckRecursiveCall(char *where)
618 PyThreadState *tstate = PyThreadState_GET();
620 #ifdef USE_STACKCHECK
621 if (PyOS_CheckStack()) {
622 --tstate->recursion_depth;
623 PyErr_SetString(PyExc_MemoryError, "Stack overflow");
627 if (tstate->recursion_depth > recursion_limit) {
628 --tstate->recursion_depth;
629 PyErr_Format(PyExc_RuntimeError,
630 "maximum recursion depth exceeded%s",
634 _Py_CheckRecursionLimit = recursion_limit;
638 /* Status code for main loop (reason for stack unwind) */
640 WHY_NOT = 0x0001, /* No error */
641 WHY_EXCEPTION = 0x0002, /* Exception occurred */
642 WHY_RERAISE = 0x0004, /* Exception re-raised by 'finally' */
643 WHY_RETURN = 0x0008, /* 'return' statement */
644 WHY_BREAK = 0x0010, /* 'break' statement */
645 WHY_CONTINUE = 0x0020, /* 'continue' statement */
646 WHY_YIELD = 0x0040 /* 'yield' operator */
649 static enum why_code do_raise(PyObject *, PyObject *, PyObject *);
650 static int unpack_iterable(PyObject *, int, PyObject **);
652 /* Records whether tracing is on for any thread. Counts the number of
653 threads for which tstate->c_tracefunc is non-NULL, so if the value
654 is 0, we know we don't have to check this thread's c_tracefunc.
655 This speeds up the if statement in PyEval_EvalFrameEx() after
657 static int _Py_TracingPossible = 0;
659 /* for manipulating the thread switch and periodic "stuff" - used to be
660 per thread, now just a pair o' globals */
661 int _Py_CheckInterval = 100;
662 volatile int _Py_Ticker = 0; /* so that we hit a "tick" first thing */
665 PyEval_EvalCode(PyCodeObject *co, PyObject *globals, PyObject *locals)
667 return PyEval_EvalCodeEx(co,
669 (PyObject **)NULL, 0,
670 (PyObject **)NULL, 0,
671 (PyObject **)NULL, 0,
676 /* Interpreter main loop */
679 PyEval_EvalFrame(PyFrameObject *f) {
680 /* This is for backward compatibility with extension modules that
681 used this API; core interpreter code should call
682 PyEval_EvalFrameEx() */
683 return PyEval_EvalFrameEx(f, 0);
687 PyEval_EvalFrameEx(PyFrameObject *f, int throwflag)
692 register PyObject **stack_pointer; /* Next free slot in value stack */
693 register unsigned char *next_instr;
694 register int opcode; /* Current opcode */
695 register int oparg; /* Current opcode argument, if any */
696 register enum why_code why; /* Reason for block stack unwind */
697 register int err; /* Error status -- nonzero if error */
698 register PyObject *x; /* Result object -- NULL if error */
699 register PyObject *v; /* Temporary objects popped off stack */
700 register PyObject *w;
701 register PyObject *u;
702 register PyObject *t;
703 register PyObject *stream = NULL; /* for PRINT opcodes */
704 register PyObject **fastlocals, **freevars;
705 PyObject *retval = NULL; /* Return value */
706 PyThreadState *tstate = PyThreadState_GET();
709 /* when tracing we set things up so that
711 not (instr_lb <= current_bytecode_offset < instr_ub)
713 is true when the line being executed has changed. The
714 initial values are such as to make this false the first
715 time it is tested. */
716 int instr_ub = -1, instr_lb = 0, instr_prev = -1;
718 unsigned char *first_instr;
721 #if defined(Py_DEBUG) || defined(LLTRACE)
722 /* Make it easier to find out where we are with a debugger */
726 /* Tuple access macros */
729 #define GETITEM(v, i) PyTuple_GET_ITEM((PyTupleObject *)(v), (i))
731 #define GETITEM(v, i) PyTuple_GetItem((v), (i))
735 /* Use Pentium timestamp counter to mark certain events:
736 inst0 -- beginning of switch statement for opcode dispatch
737 inst1 -- end of switch statement (may be skipped)
738 loop0 -- the top of the mainloop
739 loop1 -- place where control returns again to top of mainloop
741 intr1 -- beginning of long interruption
742 intr2 -- end of long interruption
744 Many opcodes call out to helper C functions. In some cases, the
745 time in those functions should be counted towards the time for the
746 opcode, but not in all cases. For example, a CALL_FUNCTION opcode
747 calls another Python function; there's no point in charge all the
748 bytecode executed by the called function to the caller.
750 It's hard to make a useful judgement statically. In the presence
751 of operator overloading, it's impossible to tell if a call will
752 execute new Python code or not.
754 It's a case-by-case judgement. I'll use intr1 for the following
760 CALL_FUNCTION (and friends)
763 uint64 inst0, inst1, loop0, loop1, intr0 = 0, intr1 = 0;
766 READ_TIMESTAMP(inst0);
767 READ_TIMESTAMP(inst1);
768 READ_TIMESTAMP(loop0);
769 READ_TIMESTAMP(loop1);
771 /* shut up the compiler */
775 /* Code access macros */
777 #define INSTR_OFFSET() ((int)(next_instr - first_instr))
778 #define NEXTOP() (*next_instr++)
779 #define NEXTARG() (next_instr += 2, (next_instr[-1]<<8) + next_instr[-2])
780 #define PEEKARG() ((next_instr[2]<<8) + next_instr[1])
781 #define JUMPTO(x) (next_instr = first_instr + (x))
782 #define JUMPBY(x) (next_instr += (x))
784 /* OpCode prediction macros
785 Some opcodes tend to come in pairs thus making it possible to
786 predict the second code when the first is run. For example,
787 GET_ITER is often followed by FOR_ITER. And FOR_ITER is often
788 followed by STORE_FAST or UNPACK_SEQUENCE.
790 Verifying the prediction costs a single high-speed test of a register
791 variable against a constant. If the pairing was good, then the
792 processor's own internal branch predication has a high likelihood of
793 success, resulting in a nearly zero-overhead transition to the
794 next opcode. A successful prediction saves a trip through the eval-loop
795 including its two unpredictable branches, the HAS_ARG test and the
796 switch-case. Combined with the processor's internal branch prediction,
797 a successful PREDICT has the effect of making the two opcodes run as if
798 they were a single new opcode with the bodies combined.
800 If collecting opcode statistics, your choices are to either keep the
801 predictions turned-on and interpret the results as if some opcodes
802 had been combined or turn-off predictions so that the opcode frequency
803 counter updates for both opcodes.
806 #ifdef DYNAMIC_EXECUTION_PROFILE
807 #define PREDICT(op) if (0) goto PRED_##op
809 #define PREDICT(op) if (*next_instr == op) goto PRED_##op
812 #define PREDICTED(op) PRED_##op: next_instr++
813 #define PREDICTED_WITH_ARG(op) PRED_##op: oparg = PEEKARG(); next_instr += 3
815 /* Stack manipulation macros */
817 /* The stack can grow at most MAXINT deep, as co_nlocals and
818 co_stacksize are ints. */
819 #define STACK_LEVEL() ((int)(stack_pointer - f->f_valuestack))
820 #define EMPTY() (STACK_LEVEL() == 0)
821 #define TOP() (stack_pointer[-1])
822 #define SECOND() (stack_pointer[-2])
823 #define THIRD() (stack_pointer[-3])
824 #define FOURTH() (stack_pointer[-4])
825 #define PEEK(n) (stack_pointer[-(n)])
826 #define SET_TOP(v) (stack_pointer[-1] = (v))
827 #define SET_SECOND(v) (stack_pointer[-2] = (v))
828 #define SET_THIRD(v) (stack_pointer[-3] = (v))
829 #define SET_FOURTH(v) (stack_pointer[-4] = (v))
830 #define SET_VALUE(n, v) (stack_pointer[-(n)] = (v))
831 #define BASIC_STACKADJ(n) (stack_pointer += n)
832 #define BASIC_PUSH(v) (*stack_pointer++ = (v))
833 #define BASIC_POP() (*--stack_pointer)
836 #define PUSH(v) { (void)(BASIC_PUSH(v), \
837 lltrace && prtrace(TOP(), "push")); \
838 assert(STACK_LEVEL() <= co->co_stacksize); }
839 #define POP() ((void)(lltrace && prtrace(TOP(), "pop")), \
841 #define STACKADJ(n) { (void)(BASIC_STACKADJ(n), \
842 lltrace && prtrace(TOP(), "stackadj")); \
843 assert(STACK_LEVEL() <= co->co_stacksize); }
844 #define EXT_POP(STACK_POINTER) ((void)(lltrace && \
845 prtrace((STACK_POINTER)[-1], "ext_pop")), \
848 #define PUSH(v) BASIC_PUSH(v)
849 #define POP() BASIC_POP()
850 #define STACKADJ(n) BASIC_STACKADJ(n)
851 #define EXT_POP(STACK_POINTER) (*--(STACK_POINTER))
854 /* Local variable macros */
856 #define GETLOCAL(i) (fastlocals[i])
858 /* The SETLOCAL() macro must not DECREF the local variable in-place and
859 then store the new value; it must copy the old value to a temporary
860 value, then store the new value, and then DECREF the temporary value.
861 This is because it is possible that during the DECREF the frame is
862 accessed by other code (e.g. a __del__ method or gc.collect()) and the
863 variable would be pointing to already-freed memory. */
864 #define SETLOCAL(i, value) do { PyObject *tmp = GETLOCAL(i); \
865 GETLOCAL(i) = value; \
866 Py_XDECREF(tmp); } while (0)
874 if (Py_EnterRecursiveCall(""))
879 if (tstate->use_tracing) {
880 if (tstate->c_tracefunc != NULL) {
881 /* tstate->c_tracefunc, if defined, is a
882 function that will be called on *every* entry
883 to a code block. Its return value, if not
884 None, is a function that will be called at
885 the start of each executed line of code.
886 (Actually, the function must return itself
887 in order to continue tracing.) The trace
888 functions are called with three arguments:
889 a pointer to the current frame, a string
890 indicating why the function is called, and
891 an argument which depends on the situation.
892 The global trace function is also called
893 whenever an exception is detected. */
894 if (call_trace_protected(tstate->c_tracefunc,
896 f, PyTrace_CALL, Py_None)) {
897 /* Trace function raised an error */
898 goto exit_eval_frame;
901 if (tstate->c_profilefunc != NULL) {
902 /* Similar for c_profilefunc, except it needn't
903 return itself and isn't called for "line" events */
904 if (call_trace_protected(tstate->c_profilefunc,
905 tstate->c_profileobj,
906 f, PyTrace_CALL, Py_None)) {
907 /* Profile function raised an error */
908 goto exit_eval_frame;
914 names = co->co_names;
915 consts = co->co_consts;
916 fastlocals = f->f_localsplus;
917 freevars = f->f_localsplus + co->co_nlocals;
918 first_instr = (unsigned char*) PyString_AS_STRING(co->co_code);
919 /* An explanation is in order for the next line.
921 f->f_lasti now refers to the index of the last instruction
922 executed. You might think this was obvious from the name, but
923 this wasn't always true before 2.3! PyFrame_New now sets
924 f->f_lasti to -1 (i.e. the index *before* the first instruction)
925 and YIELD_VALUE doesn't fiddle with f_lasti any more. So this
928 When the PREDICT() macros are enabled, some opcode pairs follow in
929 direct succession without updating f->f_lasti. A successful
930 prediction effectively links the two codes together as if they
931 were a single new opcode; accordingly,f->f_lasti will point to
932 the first code in the pair (for instance, GET_ITER followed by
933 FOR_ITER is effectively a single opcode and f->f_lasti will point
934 at to the beginning of the combined pair.)
936 next_instr = first_instr + f->f_lasti + 1;
937 stack_pointer = f->f_stacktop;
938 assert(stack_pointer != NULL);
939 f->f_stacktop = NULL; /* remains NULL unless yield suspends frame */
942 lltrace = PyDict_GetItemString(f->f_globals, "__lltrace__") != NULL;
944 #if defined(Py_DEBUG) || defined(LLTRACE)
945 filename = PyString_AsString(co->co_filename);
950 x = Py_None; /* Not a reference, just anything non-NULL */
953 if (throwflag) { /* support for generator.throw() */
961 /* Almost surely, the opcode executed a break
962 or a continue, preventing inst1 from being set
963 on the way out of the loop.
965 READ_TIMESTAMP(inst1);
968 dump_tsc(opcode, ticked, inst0, inst1, loop0, loop1,
974 READ_TIMESTAMP(loop0);
976 assert(stack_pointer >= f->f_valuestack); /* else underflow */
977 assert(STACK_LEVEL() <= co->co_stacksize); /* else overflow */
979 /* Do periodic things. Doing this every time through
980 the loop would add too much overhead, so we do it
981 only every Nth instruction. We also do it if
982 ``pendingcalls_to_do'' is set, i.e. when an asynchronous
983 event needs attention (e.g. a signal handler or
984 async I/O handler); see Py_AddPendingCall() and
985 Py_MakePendingCalls() above. */
987 if (--_Py_Ticker < 0) {
988 if (*next_instr == SETUP_FINALLY) {
989 /* Make the last opcode before
990 a try: finally: block uninterruptible. */
991 goto fast_next_opcode;
993 _Py_Ticker = _Py_CheckInterval;
994 tstate->tick_counter++;
998 if (pendingcalls_to_do) {
999 if (Py_MakePendingCalls() < 0) {
1000 why = WHY_EXCEPTION;
1003 if (pendingcalls_to_do)
1004 /* MakePendingCalls() didn't succeed.
1005 Force early re-execution of this
1006 "periodic" code, possibly after
1011 if (interpreter_lock) {
1012 /* Give another thread a chance */
1014 if (PyThreadState_Swap(NULL) != tstate)
1015 Py_FatalError("ceval: tstate mix-up");
1016 PyThread_release_lock(interpreter_lock);
1018 /* Other threads may run now */
1020 PyThread_acquire_lock(interpreter_lock, 1);
1021 if (PyThreadState_Swap(tstate) != NULL)
1022 Py_FatalError("ceval: orphan tstate");
1024 /* Check for thread interrupts */
1026 if (tstate->async_exc != NULL) {
1027 x = tstate->async_exc;
1028 tstate->async_exc = NULL;
1031 why = WHY_EXCEPTION;
1039 f->f_lasti = INSTR_OFFSET();
1041 /* line-by-line tracing support */
1043 if (_Py_TracingPossible &&
1044 tstate->c_tracefunc != NULL && !tstate->tracing) {
1045 /* see maybe_call_line_trace
1046 for expository comments */
1047 f->f_stacktop = stack_pointer;
1049 err = maybe_call_line_trace(tstate->c_tracefunc,
1051 f, &instr_lb, &instr_ub,
1053 /* Reload possibly changed frame fields */
1055 if (f->f_stacktop != NULL) {
1056 stack_pointer = f->f_stacktop;
1057 f->f_stacktop = NULL;
1060 /* trace function raised an exception */
1065 /* Extract opcode and argument */
1068 oparg = 0; /* allows oparg to be stored in a register because
1069 it doesn't have to be remembered across a full loop */
1070 if (HAS_ARG(opcode))
1073 #ifdef DYNAMIC_EXECUTION_PROFILE
1075 dxpairs[lastopcode][opcode]++;
1076 lastopcode = opcode;
1082 /* Instruction tracing */
1085 if (HAS_ARG(opcode)) {
1086 printf("%d: %d, %d\n",
1087 f->f_lasti, opcode, oparg);
1091 f->f_lasti, opcode);
1096 /* Main switch on opcode */
1097 READ_TIMESTAMP(inst0);
1102 It is essential that any operation that fails sets either
1103 x to NULL, err to nonzero, or why to anything but WHY_NOT,
1104 and that no operation that succeeds does this! */
1106 /* case STOP_CODE: this is an error! */
1109 goto fast_next_opcode;
1112 x = GETLOCAL(oparg);
1116 goto fast_next_opcode;
1118 format_exc_check_arg(PyExc_UnboundLocalError,
1119 UNBOUNDLOCAL_ERROR_MSG,
1120 PyTuple_GetItem(co->co_varnames, oparg));
1124 x = GETITEM(consts, oparg);
1127 goto fast_next_opcode;
1129 PREDICTED_WITH_ARG(STORE_FAST);
1133 goto fast_next_opcode;
1138 goto fast_next_opcode;
1145 goto fast_next_opcode;
1154 goto fast_next_opcode;
1165 goto fast_next_opcode;
1171 goto fast_next_opcode;
1182 goto fast_next_opcode;
1183 } else if (oparg == 3) {
1194 goto fast_next_opcode;
1196 Py_FatalError("invalid argument to DUP_TOPX"
1197 " (bytecode corruption?)");
1198 /* Never returns, so don't bother to set why. */
1201 case UNARY_POSITIVE:
1203 x = PyNumber_Positive(v);
1206 if (x != NULL) continue;
1209 case UNARY_NEGATIVE:
1211 x = PyNumber_Negative(v);
1214 if (x != NULL) continue;
1219 err = PyObject_IsTrue(v);
1227 Py_INCREF(Py_False);
1237 x = PyObject_Repr(v);
1240 if (x != NULL) continue;
1245 x = PyNumber_Invert(v);
1248 if (x != NULL) continue;
1254 x = PyNumber_Power(v, w, Py_None);
1258 if (x != NULL) continue;
1261 case BINARY_MULTIPLY:
1264 x = PyNumber_Multiply(v, w);
1268 if (x != NULL) continue;
1272 if (!_Py_QnewFlag) {
1275 x = PyNumber_Divide(v, w);
1279 if (x != NULL) continue;
1282 /* -Qnew is in effect: fall through to
1283 BINARY_TRUE_DIVIDE */
1284 case BINARY_TRUE_DIVIDE:
1287 x = PyNumber_TrueDivide(v, w);
1291 if (x != NULL) continue;
1294 case BINARY_FLOOR_DIVIDE:
1297 x = PyNumber_FloorDivide(v, w);
1301 if (x != NULL) continue;
1307 if (PyString_CheckExact(v))
1308 x = PyString_Format(v, w);
1310 x = PyNumber_Remainder(v, w);
1314 if (x != NULL) continue;
1320 if (PyInt_CheckExact(v) && PyInt_CheckExact(w)) {
1321 /* INLINE: int + int */
1322 register long a, b, i;
1323 a = PyInt_AS_LONG(v);
1324 b = PyInt_AS_LONG(w);
1325 /* cast to avoid undefined behaviour
1327 i = (long)((unsigned long)a + b);
1328 if ((i^a) < 0 && (i^b) < 0)
1330 x = PyInt_FromLong(i);
1332 else if (PyString_CheckExact(v) &&
1333 PyString_CheckExact(w)) {
1334 x = string_concatenate(v, w, f, next_instr);
1335 /* string_concatenate consumed the ref to v */
1336 goto skip_decref_vx;
1340 x = PyNumber_Add(v, w);
1346 if (x != NULL) continue;
1349 case BINARY_SUBTRACT:
1352 if (PyInt_CheckExact(v) && PyInt_CheckExact(w)) {
1353 /* INLINE: int - int */
1354 register long a, b, i;
1355 a = PyInt_AS_LONG(v);
1356 b = PyInt_AS_LONG(w);
1357 /* cast to avoid undefined behaviour
1359 i = (long)((unsigned long)a - b);
1360 if ((i^a) < 0 && (i^~b) < 0)
1362 x = PyInt_FromLong(i);
1366 x = PyNumber_Subtract(v, w);
1371 if (x != NULL) continue;
1377 if (PyList_CheckExact(v) && PyInt_CheckExact(w)) {
1378 /* INLINE: list[int] */
1379 Py_ssize_t i = PyInt_AsSsize_t(w);
1381 i += PyList_GET_SIZE(v);
1382 if (i >= 0 && i < PyList_GET_SIZE(v)) {
1383 x = PyList_GET_ITEM(v, i);
1391 x = PyObject_GetItem(v, w);
1395 if (x != NULL) continue;
1401 x = PyNumber_Lshift(v, w);
1405 if (x != NULL) continue;
1411 x = PyNumber_Rshift(v, w);
1415 if (x != NULL) continue;
1421 x = PyNumber_And(v, w);
1425 if (x != NULL) continue;
1431 x = PyNumber_Xor(v, w);
1435 if (x != NULL) continue;
1441 x = PyNumber_Or(v, w);
1445 if (x != NULL) continue;
1451 err = PyList_Append(v, w);
1454 PREDICT(JUMP_ABSOLUTE);
1461 v = stack_pointer[-oparg];
1462 err = PySet_Add(v, w);
1465 PREDICT(JUMP_ABSOLUTE);
1473 x = PyNumber_InPlacePower(v, w, Py_None);
1477 if (x != NULL) continue;
1480 case INPLACE_MULTIPLY:
1483 x = PyNumber_InPlaceMultiply(v, w);
1487 if (x != NULL) continue;
1490 case INPLACE_DIVIDE:
1491 if (!_Py_QnewFlag) {
1494 x = PyNumber_InPlaceDivide(v, w);
1498 if (x != NULL) continue;
1501 /* -Qnew is in effect: fall through to
1502 INPLACE_TRUE_DIVIDE */
1503 case INPLACE_TRUE_DIVIDE:
1506 x = PyNumber_InPlaceTrueDivide(v, w);
1510 if (x != NULL) continue;
1513 case INPLACE_FLOOR_DIVIDE:
1516 x = PyNumber_InPlaceFloorDivide(v, w);
1520 if (x != NULL) continue;
1523 case INPLACE_MODULO:
1526 x = PyNumber_InPlaceRemainder(v, w);
1530 if (x != NULL) continue;
1536 if (PyInt_CheckExact(v) && PyInt_CheckExact(w)) {
1537 /* INLINE: int + int */
1538 register long a, b, i;
1539 a = PyInt_AS_LONG(v);
1540 b = PyInt_AS_LONG(w);
1542 if ((i^a) < 0 && (i^b) < 0)
1544 x = PyInt_FromLong(i);
1546 else if (PyString_CheckExact(v) &&
1547 PyString_CheckExact(w)) {
1548 x = string_concatenate(v, w, f, next_instr);
1549 /* string_concatenate consumed the ref to v */
1554 x = PyNumber_InPlaceAdd(v, w);
1560 if (x != NULL) continue;
1563 case INPLACE_SUBTRACT:
1566 if (PyInt_CheckExact(v) && PyInt_CheckExact(w)) {
1567 /* INLINE: int - int */
1568 register long a, b, i;
1569 a = PyInt_AS_LONG(v);
1570 b = PyInt_AS_LONG(w);
1572 if ((i^a) < 0 && (i^~b) < 0)
1574 x = PyInt_FromLong(i);
1578 x = PyNumber_InPlaceSubtract(v, w);
1583 if (x != NULL) continue;
1586 case INPLACE_LSHIFT:
1589 x = PyNumber_InPlaceLshift(v, w);
1593 if (x != NULL) continue;
1596 case INPLACE_RSHIFT:
1599 x = PyNumber_InPlaceRshift(v, w);
1603 if (x != NULL) continue;
1609 x = PyNumber_InPlaceAnd(v, w);
1613 if (x != NULL) continue;
1619 x = PyNumber_InPlaceXor(v, w);
1623 if (x != NULL) continue;
1629 x = PyNumber_InPlaceOr(v, w);
1633 if (x != NULL) continue;
1640 if ((opcode-SLICE) & 2)
1644 if ((opcode-SLICE) & 1)
1649 x = apply_slice(u, v, w);
1654 if (x != NULL) continue;
1661 if ((opcode-STORE_SLICE) & 2)
1665 if ((opcode-STORE_SLICE) & 1)
1671 err = assign_slice(u, v, w, t); /* u[v:w] = t */
1676 if (err == 0) continue;
1679 case DELETE_SLICE+0:
1680 case DELETE_SLICE+1:
1681 case DELETE_SLICE+2:
1682 case DELETE_SLICE+3:
1683 if ((opcode-DELETE_SLICE) & 2)
1687 if ((opcode-DELETE_SLICE) & 1)
1692 err = assign_slice(u, v, w, (PyObject *)NULL);
1697 if (err == 0) continue;
1706 err = PyObject_SetItem(v, w, u);
1710 if (err == 0) continue;
1718 err = PyObject_DelItem(v, w);
1721 if (err == 0) continue;
1726 w = PySys_GetObject("displayhook");
1728 PyErr_SetString(PyExc_RuntimeError,
1729 "lost sys.displayhook");
1734 x = PyTuple_Pack(1, v);
1739 w = PyEval_CallObject(w, x);
1750 /* fall through to PRINT_ITEM */
1754 if (stream == NULL || stream == Py_None) {
1755 w = PySys_GetObject("stdout");
1757 PyErr_SetString(PyExc_RuntimeError,
1762 /* PyFile_SoftSpace() can exececute arbitrary code
1763 if sys.stdout is an instance with a __getattr__.
1764 If __getattr__ raises an exception, w will
1765 be freed, so we need to prevent that temporarily. */
1767 if (w != NULL && PyFile_SoftSpace(w, 0))
1768 err = PyFile_WriteString(" ", w);
1770 err = PyFile_WriteObject(v, w, Py_PRINT_RAW);
1772 /* XXX move into writeobject() ? */
1773 if (PyString_Check(v)) {
1774 char *s = PyString_AS_STRING(v);
1775 Py_ssize_t len = PyString_GET_SIZE(v);
1777 !isspace(Py_CHARMASK(s[len-1])) ||
1779 PyFile_SoftSpace(w, 1);
1781 #ifdef Py_USING_UNICODE
1782 else if (PyUnicode_Check(v)) {
1783 Py_UNICODE *s = PyUnicode_AS_UNICODE(v);
1784 Py_ssize_t len = PyUnicode_GET_SIZE(v);
1786 !Py_UNICODE_ISSPACE(s[len-1]) ||
1788 PyFile_SoftSpace(w, 1);
1792 PyFile_SoftSpace(w, 1);
1802 case PRINT_NEWLINE_TO:
1804 /* fall through to PRINT_NEWLINE */
1807 if (stream == NULL || stream == Py_None) {
1808 w = PySys_GetObject("stdout");
1810 PyErr_SetString(PyExc_RuntimeError,
1812 why = WHY_EXCEPTION;
1816 /* w.write() may replace sys.stdout, so we
1817 * have to keep our reference to it */
1819 err = PyFile_WriteString("\n", w);
1821 PyFile_SoftSpace(w, 0);
1830 default: switch (opcode) {
1836 u = POP(); /* traceback */
1839 v = POP(); /* value */
1842 w = POP(); /* exc */
1843 case 0: /* Fallthrough */
1844 why = do_raise(w, v, u);
1847 PyErr_SetString(PyExc_SystemError,
1848 "bad RAISE_VARARGS oparg");
1849 why = WHY_EXCEPTION;
1855 if ((x = f->f_locals) != NULL) {
1860 PyErr_SetString(PyExc_SystemError, "no locals");
1866 goto fast_block_end;
1870 f->f_stacktop = stack_pointer;
1879 READ_TIMESTAMP(intr0);
1880 err = exec_statement(f, u, v, w);
1881 READ_TIMESTAMP(intr1);
1889 PyTryBlock *b = PyFrame_BlockPop(f);
1890 while (STACK_LEVEL() > b->b_level) {
1897 PREDICTED(END_FINALLY);
1900 if (PyInt_Check(v)) {
1901 why = (enum why_code) PyInt_AS_LONG(v);
1902 assert(why != WHY_YIELD);
1903 if (why == WHY_RETURN ||
1904 why == WHY_CONTINUE)
1907 else if (PyExceptionClass_Check(v) ||
1908 PyString_Check(v)) {
1911 PyErr_Restore(v, w, u);
1915 else if (v != Py_None) {
1916 PyErr_SetString(PyExc_SystemError,
1917 "'finally' pops bad exception");
1918 why = WHY_EXCEPTION;
1928 x = build_class(u, v, w);
1936 w = GETITEM(names, oparg);
1938 if ((x = f->f_locals) != NULL) {
1939 if (PyDict_CheckExact(x))
1940 err = PyDict_SetItem(x, w, v);
1942 err = PyObject_SetItem(x, w, v);
1944 if (err == 0) continue;
1947 PyErr_Format(PyExc_SystemError,
1948 "no locals found when storing %s",
1953 w = GETITEM(names, oparg);
1954 if ((x = f->f_locals) != NULL) {
1955 if ((err = PyObject_DelItem(x, w)) != 0)
1956 format_exc_check_arg(PyExc_NameError,
1961 PyErr_Format(PyExc_SystemError,
1962 "no locals when deleting %s",
1966 PREDICTED_WITH_ARG(UNPACK_SEQUENCE);
1967 case UNPACK_SEQUENCE:
1969 if (PyTuple_CheckExact(v) &&
1970 PyTuple_GET_SIZE(v) == oparg) {
1971 PyObject **items = \
1972 ((PyTupleObject *)v)->ob_item;
1980 } else if (PyList_CheckExact(v) &&
1981 PyList_GET_SIZE(v) == oparg) {
1982 PyObject **items = \
1983 ((PyListObject *)v)->ob_item;
1989 } else if (unpack_iterable(v, oparg,
1990 stack_pointer + oparg)) {
1993 /* unpack_iterable() raised an exception */
1994 why = WHY_EXCEPTION;
2000 w = GETITEM(names, oparg);
2004 err = PyObject_SetAttr(v, w, u); /* v.w = u */
2007 if (err == 0) continue;
2011 w = GETITEM(names, oparg);
2013 err = PyObject_SetAttr(v, w, (PyObject *)NULL);
2019 w = GETITEM(names, oparg);
2021 err = PyDict_SetItem(f->f_globals, w, v);
2023 if (err == 0) continue;
2027 w = GETITEM(names, oparg);
2028 if ((err = PyDict_DelItem(f->f_globals, w)) != 0)
2029 format_exc_check_arg(
2030 PyExc_NameError, GLOBAL_NAME_ERROR_MSG, w);
2034 w = GETITEM(names, oparg);
2035 if ((v = f->f_locals) == NULL) {
2036 PyErr_Format(PyExc_SystemError,
2037 "no locals when loading %s",
2039 why = WHY_EXCEPTION;
2042 if (PyDict_CheckExact(v)) {
2043 x = PyDict_GetItem(v, w);
2047 x = PyObject_GetItem(v, w);
2048 if (x == NULL && PyErr_Occurred()) {
2049 if (!PyErr_ExceptionMatches(
2056 x = PyDict_GetItem(f->f_globals, w);
2058 x = PyDict_GetItem(f->f_builtins, w);
2060 format_exc_check_arg(
2072 w = GETITEM(names, oparg);
2073 if (PyString_CheckExact(w)) {
2074 /* Inline the PyDict_GetItem() calls.
2075 WARNING: this is an extreme speed hack.
2076 Do not try this at home. */
2077 long hash = ((PyStringObject *)w)->ob_shash;
2081 d = (PyDictObject *)(f->f_globals);
2082 e = d->ma_lookup(d, w, hash);
2093 d = (PyDictObject *)(f->f_builtins);
2094 e = d->ma_lookup(d, w, hash);
2105 goto load_global_error;
2108 /* This is the un-inlined version of the code above */
2109 x = PyDict_GetItem(f->f_globals, w);
2111 x = PyDict_GetItem(f->f_builtins, w);
2114 format_exc_check_arg(
2116 GLOBAL_NAME_ERROR_MSG, w);
2125 x = GETLOCAL(oparg);
2127 SETLOCAL(oparg, NULL);
2130 format_exc_check_arg(
2131 PyExc_UnboundLocalError,
2132 UNBOUNDLOCAL_ERROR_MSG,
2133 PyTuple_GetItem(co->co_varnames, oparg)
2138 x = freevars[oparg];
2141 if (x != NULL) continue;
2145 x = freevars[oparg];
2152 /* Don't stomp existing exception */
2153 if (PyErr_Occurred())
2155 if (oparg < PyTuple_GET_SIZE(co->co_cellvars)) {
2156 v = PyTuple_GET_ITEM(co->co_cellvars,
2158 format_exc_check_arg(
2159 PyExc_UnboundLocalError,
2160 UNBOUNDLOCAL_ERROR_MSG,
2163 v = PyTuple_GET_ITEM(co->co_freevars, oparg -
2164 PyTuple_GET_SIZE(co->co_cellvars));
2165 format_exc_check_arg(PyExc_NameError,
2166 UNBOUNDFREE_ERROR_MSG, v);
2172 x = freevars[oparg];
2178 x = PyTuple_New(oparg);
2180 for (; --oparg >= 0;) {
2182 PyTuple_SET_ITEM(x, oparg, w);
2190 x = PyList_New(oparg);
2192 for (; --oparg >= 0;) {
2194 PyList_SET_ITEM(x, oparg, w);
2202 x = PySet_New(NULL);
2204 for (; --oparg >= 0;) {
2207 err = PySet_Add(x, w);
2221 x = _PyDict_NewPresized((Py_ssize_t)oparg);
2223 if (x != NULL) continue;
2227 w = TOP(); /* key */
2228 u = SECOND(); /* value */
2229 v = THIRD(); /* dict */
2231 assert (PyDict_CheckExact(v));
2232 err = PyDict_SetItem(v, w, u); /* v[w] = u */
2235 if (err == 0) continue;
2239 w = TOP(); /* key */
2240 u = SECOND(); /* value */
2242 v = stack_pointer[-oparg]; /* dict */
2243 assert (PyDict_CheckExact(v));
2244 err = PyDict_SetItem(v, w, u); /* v[w] = u */
2248 PREDICT(JUMP_ABSOLUTE);
2254 w = GETITEM(names, oparg);
2256 x = PyObject_GetAttr(v, w);
2259 if (x != NULL) continue;
2265 if (PyInt_CheckExact(w) && PyInt_CheckExact(v)) {
2266 /* INLINE: cmp(int, int) */
2269 a = PyInt_AS_LONG(v);
2270 b = PyInt_AS_LONG(w);
2272 case PyCmp_LT: res = a < b; break;
2273 case PyCmp_LE: res = a <= b; break;
2274 case PyCmp_EQ: res = a == b; break;
2275 case PyCmp_NE: res = a != b; break;
2276 case PyCmp_GT: res = a > b; break;
2277 case PyCmp_GE: res = a >= b; break;
2278 case PyCmp_IS: res = v == w; break;
2279 case PyCmp_IS_NOT: res = v != w; break;
2280 default: goto slow_compare;
2282 x = res ? Py_True : Py_False;
2287 x = cmp_outcome(oparg, v, w);
2292 if (x == NULL) break;
2293 PREDICT(POP_JUMP_IF_FALSE);
2294 PREDICT(POP_JUMP_IF_TRUE);
2298 w = GETITEM(names, oparg);
2299 x = PyDict_GetItemString(f->f_builtins, "__import__");
2301 PyErr_SetString(PyExc_ImportError,
2302 "__import__ not found");
2308 if (PyInt_AsLong(u) != -1 || PyErr_Occurred())
2312 f->f_locals == NULL ?
2313 Py_None : f->f_locals,
2320 f->f_locals == NULL ?
2321 Py_None : f->f_locals,
2331 READ_TIMESTAMP(intr0);
2333 x = PyEval_CallObject(v, w);
2335 READ_TIMESTAMP(intr1);
2338 if (x != NULL) continue;
2343 PyFrame_FastToLocals(f);
2344 if ((x = f->f_locals) == NULL) {
2345 PyErr_SetString(PyExc_SystemError,
2346 "no locals found during 'import *'");
2349 READ_TIMESTAMP(intr0);
2350 err = import_all_from(x, v);
2351 READ_TIMESTAMP(intr1);
2352 PyFrame_LocalsToFast(f, 0);
2354 if (err == 0) continue;
2358 w = GETITEM(names, oparg);
2360 READ_TIMESTAMP(intr0);
2361 x = import_from(v, w);
2362 READ_TIMESTAMP(intr1);
2364 if (x != NULL) continue;
2369 goto fast_next_opcode;
2371 PREDICTED_WITH_ARG(POP_JUMP_IF_FALSE);
2372 case POP_JUMP_IF_FALSE:
2376 goto fast_next_opcode;
2378 if (w == Py_False) {
2381 goto fast_next_opcode;
2383 err = PyObject_IsTrue(w);
2393 PREDICTED_WITH_ARG(POP_JUMP_IF_TRUE);
2394 case POP_JUMP_IF_TRUE:
2396 if (w == Py_False) {
2398 goto fast_next_opcode;
2403 goto fast_next_opcode;
2405 err = PyObject_IsTrue(w);
2417 case JUMP_IF_FALSE_OR_POP:
2422 goto fast_next_opcode;
2424 if (w == Py_False) {
2426 goto fast_next_opcode;
2428 err = PyObject_IsTrue(w);
2440 case JUMP_IF_TRUE_OR_POP:
2442 if (w == Py_False) {
2445 goto fast_next_opcode;
2449 goto fast_next_opcode;
2451 err = PyObject_IsTrue(w);
2456 else if (err == 0) {
2464 PREDICTED_WITH_ARG(JUMP_ABSOLUTE);
2468 /* Enabling this path speeds-up all while and for-loops by bypassing
2469 the per-loop checks for signals. By default, this should be turned-off
2470 because it prevents detection of a control-break in tight loops like
2471 "while 1: pass". Compile with this option turned-on when you need
2472 the speed-up and do not need break checking inside tight loops (ones
2473 that contain only instructions ending with goto fast_next_opcode).
2475 goto fast_next_opcode;
2481 /* before: [obj]; after [getiter(obj)] */
2483 x = PyObject_GetIter(v);
2493 PREDICTED_WITH_ARG(FOR_ITER);
2495 /* before: [iter]; after: [iter, iter()] *or* [] */
2497 x = (*v->ob_type->tp_iternext)(v);
2500 PREDICT(STORE_FAST);
2501 PREDICT(UNPACK_SEQUENCE);
2504 if (PyErr_Occurred()) {
2505 if (!PyErr_ExceptionMatches(
2506 PyExc_StopIteration))
2510 /* iterator ended normally */
2518 goto fast_block_end;
2521 retval = PyInt_FromLong(oparg);
2527 goto fast_block_end;
2532 /* NOTE: If you add any new block-setup opcodes that
2533 are not try/except/finally handlers, you may need
2534 to update the PyGen_NeedsFinalizing() function.
2537 PyFrame_BlockSetup(f, opcode, INSTR_OFFSET() + oparg,
2543 static PyObject *exit, *enter;
2545 x = special_lookup(w, "__exit__", &exit);
2549 u = special_lookup(w, "__enter__", &enter);
2555 x = PyObject_CallFunctionObjArgs(u, NULL);
2559 /* Setup a finally block (SETUP_WITH as a block is
2560 equivalent to SETUP_FINALLY except it normalizes
2561 the exception) before pushing the result of
2562 __enter__ on the stack. */
2563 PyFrame_BlockSetup(f, SETUP_WITH, INSTR_OFFSET() + oparg,
2572 /* At the top of the stack are 1-3 values indicating
2573 how/why we entered the finally clause:
2575 - (TOP, SECOND) = (WHY_{RETURN,CONTINUE}), retval
2576 - TOP = WHY_*; no retval below it
2577 - (TOP, SECOND, THIRD) = exc_info()
2578 Below them is EXIT, the context.__exit__ bound method.
2579 In the last case, we must call
2580 EXIT(TOP, SECOND, THIRD)
2581 otherwise we must call
2582 EXIT(None, None, None)
2584 In all cases, we remove EXIT from the stack, leaving
2585 the rest in the same order.
2587 In addition, if the stack represents an exception,
2588 *and* the function call returns a 'true' value, we
2589 "zap" this information, to prevent END_FINALLY from
2590 re-raising the exception. (But non-local gotos
2591 should still be resumed.)
2594 PyObject *exit_func;
2602 else if (PyInt_Check(u)) {
2603 switch(PyInt_AS_LONG(u)) {
2606 /* Retval in TOP. */
2607 exit_func = SECOND();
2616 u = v = w = Py_None;
2621 exit_func = THIRD();
2626 /* XXX Not the fastest way to call it... */
2627 x = PyObject_CallFunctionObjArgs(exit_func, u, v, w,
2629 Py_DECREF(exit_func);
2631 break; /* Go to error exit */
2634 err = PyObject_IsTrue(x);
2640 break; /* Go to error exit */
2643 /* There was an exception and a true return */
2651 /* The stack was rearranged to remove EXIT
2652 above. Let END_FINALLY do its thing */
2654 PREDICT(END_FINALLY);
2664 x = call_function(&sp, oparg, &intr0, &intr1);
2666 x = call_function(&sp, oparg);
2675 case CALL_FUNCTION_VAR:
2676 case CALL_FUNCTION_KW:
2677 case CALL_FUNCTION_VAR_KW:
2679 int na = oparg & 0xff;
2680 int nk = (oparg>>8) & 0xff;
2681 int flags = (opcode - CALL_FUNCTION) & 3;
2682 int n = na + 2 * nk;
2683 PyObject **pfunc, *func, **sp;
2685 if (flags & CALL_FLAG_VAR)
2687 if (flags & CALL_FLAG_KW)
2689 pfunc = stack_pointer - n - 1;
2692 if (PyMethod_Check(func)
2693 && PyMethod_GET_SELF(func) != NULL) {
2694 PyObject *self = PyMethod_GET_SELF(func);
2696 func = PyMethod_GET_FUNCTION(func);
2704 READ_TIMESTAMP(intr0);
2705 x = ext_do_call(func, &sp, flags, na, nk);
2706 READ_TIMESTAMP(intr1);
2710 while (stack_pointer > pfunc) {
2721 v = POP(); /* code object */
2722 x = PyFunction_New(v, f->f_globals);
2724 /* XXX Maybe this should be a separate opcode? */
2725 if (x != NULL && oparg > 0) {
2726 v = PyTuple_New(oparg);
2732 while (--oparg >= 0) {
2734 PyTuple_SET_ITEM(v, oparg, w);
2736 err = PyFunction_SetDefaults(x, v);
2744 v = POP(); /* code object */
2745 x = PyFunction_New(v, f->f_globals);
2749 if (PyFunction_SetClosure(x, v) != 0) {
2750 /* Can't happen unless bytecode is corrupt. */
2751 why = WHY_EXCEPTION;
2755 if (x != NULL && oparg > 0) {
2756 v = PyTuple_New(oparg);
2762 while (--oparg >= 0) {
2764 PyTuple_SET_ITEM(v, oparg, w);
2766 if (PyFunction_SetDefaults(x, v) != 0) {
2767 /* Can't happen unless
2768 PyFunction_SetDefaults changes. */
2769 why = WHY_EXCEPTION;
2784 x = PySlice_New(u, v, w);
2789 if (x != NULL) continue;
2794 oparg = oparg<<16 | NEXTARG();
2795 goto dispatch_opcode;
2799 "XXX lineno: %d, opcode: %d\n",
2800 PyFrame_GetLineNumber(f),
2802 PyErr_SetString(PyExc_SystemError, "unknown opcode");
2803 why = WHY_EXCEPTION;
2814 READ_TIMESTAMP(inst1);
2816 /* Quickly continue if no error occurred */
2818 if (why == WHY_NOT) {
2819 if (err == 0 && x != NULL) {
2821 /* This check is expensive! */
2822 if (PyErr_Occurred())
2824 "XXX undetected error\n");
2827 READ_TIMESTAMP(loop1);
2828 continue; /* Normal, fast path */
2833 why = WHY_EXCEPTION;
2838 /* Double-check exception status */
2840 if (why == WHY_EXCEPTION || why == WHY_RERAISE) {
2841 if (!PyErr_Occurred()) {
2842 PyErr_SetString(PyExc_SystemError,
2843 "error return without exception set");
2844 why = WHY_EXCEPTION;
2849 /* This check is expensive! */
2850 if (PyErr_Occurred()) {
2852 sprintf(buf, "Stack unwind with exception "
2853 "set and why=%d", why);
2859 /* Log traceback info if this is a real exception */
2861 if (why == WHY_EXCEPTION) {
2862 PyTraceBack_Here(f);
2864 if (tstate->c_tracefunc != NULL)
2865 call_exc_trace(tstate->c_tracefunc,
2866 tstate->c_traceobj, f);
2869 /* For the rest, treat WHY_RERAISE as WHY_EXCEPTION */
2871 if (why == WHY_RERAISE)
2872 why = WHY_EXCEPTION;
2874 /* Unwind stacks if a (pseudo) exception occurred */
2877 while (why != WHY_NOT && f->f_iblock > 0) {
2878 /* Peek at the current block. */
2879 PyTryBlock *b = &f->f_blockstack[f->f_iblock - 1];
2881 assert(why != WHY_YIELD);
2882 if (b->b_type == SETUP_LOOP && why == WHY_CONTINUE) {
2884 JUMPTO(PyInt_AS_LONG(retval));
2889 /* Now we have to pop the block. */
2892 while (STACK_LEVEL() > b->b_level) {
2896 if (b->b_type == SETUP_LOOP && why == WHY_BREAK) {
2898 JUMPTO(b->b_handler);
2901 if (b->b_type == SETUP_FINALLY ||
2902 (b->b_type == SETUP_EXCEPT &&
2903 why == WHY_EXCEPTION) ||
2904 b->b_type == SETUP_WITH) {
2905 if (why == WHY_EXCEPTION) {
2906 PyObject *exc, *val, *tb;
2907 PyErr_Fetch(&exc, &val, &tb);
2912 /* Make the raw exception data
2913 available to the handler,
2914 so a program can emulate the
2915 Python main loop. Don't do
2916 this for 'finally'. */
2917 if (b->b_type == SETUP_EXCEPT ||
2918 b->b_type == SETUP_WITH) {
2919 PyErr_NormalizeException(
2921 set_exc_info(tstate,
2933 if (why & (WHY_RETURN | WHY_CONTINUE))
2935 v = PyInt_FromLong((long)why);
2939 JUMPTO(b->b_handler);
2942 } /* unwind stack */
2944 /* End the loop if we still have an error (or return) */
2948 READ_TIMESTAMP(loop1);
2952 assert(why != WHY_YIELD);
2953 /* Pop remaining stack entries. */
2959 if (why != WHY_RETURN)
2963 if (tstate->use_tracing) {
2964 if (tstate->c_tracefunc) {
2965 if (why == WHY_RETURN || why == WHY_YIELD) {
2966 if (call_trace(tstate->c_tracefunc,
2967 tstate->c_traceobj, f,
2968 PyTrace_RETURN, retval)) {
2971 why = WHY_EXCEPTION;
2974 else if (why == WHY_EXCEPTION) {
2975 call_trace_protected(tstate->c_tracefunc,
2976 tstate->c_traceobj, f,
2977 PyTrace_RETURN, NULL);
2980 if (tstate->c_profilefunc) {
2981 if (why == WHY_EXCEPTION)
2982 call_trace_protected(tstate->c_profilefunc,
2983 tstate->c_profileobj, f,
2984 PyTrace_RETURN, NULL);
2985 else if (call_trace(tstate->c_profilefunc,
2986 tstate->c_profileobj, f,
2987 PyTrace_RETURN, retval)) {
2990 why = WHY_EXCEPTION;
2995 if (tstate->frame->f_exc_type != NULL)
2996 reset_exc_info(tstate);
2998 assert(tstate->frame->f_exc_value == NULL);
2999 assert(tstate->frame->f_exc_traceback == NULL);
3004 Py_LeaveRecursiveCall();
3005 tstate->frame = f->f_back;
3010 /* This is gonna seem *real weird*, but if you put some other code between
3011 PyEval_EvalFrame() and PyEval_EvalCodeEx() you will need to adjust
3012 the test in the if statements in Misc/gdbinit (pystack and pystackv). */
3015 PyEval_EvalCodeEx(PyCodeObject *co, PyObject *globals, PyObject *locals,
3016 PyObject **args, int argcount, PyObject **kws, int kwcount,
3017 PyObject **defs, int defcount, PyObject *closure)
3019 register PyFrameObject *f;
3020 register PyObject *retval = NULL;
3021 register PyObject **fastlocals, **freevars;
3022 PyThreadState *tstate = PyThreadState_GET();
3025 if (globals == NULL) {
3026 PyErr_SetString(PyExc_SystemError,
3027 "PyEval_EvalCodeEx: NULL globals");
3031 assert(tstate != NULL);
3032 assert(globals != NULL);
3033 f = PyFrame_New(tstate, co, globals, locals);
3037 fastlocals = f->f_localsplus;
3038 freevars = f->f_localsplus + co->co_nlocals;
3040 if (co->co_argcount > 0 ||
3041 co->co_flags & (CO_VARARGS | CO_VARKEYWORDS)) {
3044 PyObject *kwdict = NULL;
3045 if (co->co_flags & CO_VARKEYWORDS) {
3046 kwdict = PyDict_New();
3049 i = co->co_argcount;
3050 if (co->co_flags & CO_VARARGS)
3052 SETLOCAL(i, kwdict);
3054 if (argcount > co->co_argcount) {
3055 if (!(co->co_flags & CO_VARARGS)) {
3056 PyErr_Format(PyExc_TypeError,
3057 "%.200s() takes %s %d "
3058 "argument%s (%d given)",
3059 PyString_AsString(co->co_name),
3060 defcount ? "at most" : "exactly",
3062 co->co_argcount == 1 ? "" : "s",
3063 argcount + kwcount);
3066 n = co->co_argcount;
3068 for (i = 0; i < n; i++) {
3073 if (co->co_flags & CO_VARARGS) {
3074 u = PyTuple_New(argcount - n);
3077 SETLOCAL(co->co_argcount, u);
3078 for (i = n; i < argcount; i++) {
3081 PyTuple_SET_ITEM(u, i-n, x);
3084 for (i = 0; i < kwcount; i++) {
3085 PyObject **co_varnames;
3086 PyObject *keyword = kws[2*i];
3087 PyObject *value = kws[2*i + 1];
3089 if (keyword == NULL || !(PyString_Check(keyword)
3090 #ifdef Py_USING_UNICODE
3091 || PyUnicode_Check(keyword)
3094 PyErr_Format(PyExc_TypeError,
3095 "%.200s() keywords must be strings",
3096 PyString_AsString(co->co_name));
3099 /* Speed hack: do raw pointer compares. As names are
3100 normally interned this should almost always hit. */
3101 co_varnames = ((PyTupleObject *)(co->co_varnames))->ob_item;
3102 for (j = 0; j < co->co_argcount; j++) {
3103 PyObject *nm = co_varnames[j];
3107 /* Slow fallback, just in case */
3108 for (j = 0; j < co->co_argcount; j++) {
3109 PyObject *nm = co_varnames[j];
3110 int cmp = PyObject_RichCompareBool(
3111 keyword, nm, Py_EQ);
3117 if (kwdict == NULL) {
3118 PyObject *kwd_str = kwd_as_string(keyword);
3120 PyErr_Format(PyExc_TypeError,
3121 "%.200s() got an unexpected "
3122 "keyword argument '%.400s'",
3123 PyString_AsString(co->co_name),
3124 PyString_AsString(kwd_str));
3129 PyDict_SetItem(kwdict, keyword, value);
3132 if (GETLOCAL(j) != NULL) {
3133 PyObject *kwd_str = kwd_as_string(keyword);
3135 PyErr_Format(PyExc_TypeError,
3136 "%.200s() got multiple "
3137 "values for keyword "
3138 "argument '%.400s'",
3139 PyString_AsString(co->co_name),
3140 PyString_AsString(kwd_str));
3148 if (argcount < co->co_argcount) {
3149 int m = co->co_argcount - defcount;
3150 for (i = argcount; i < m; i++) {
3151 if (GETLOCAL(i) == NULL) {
3153 for (j = 0; j < co->co_argcount; j++)
3156 PyErr_Format(PyExc_TypeError,
3157 "%.200s() takes %s %d "
3158 "argument%s (%d given)",
3159 PyString_AsString(co->co_name),
3160 ((co->co_flags & CO_VARARGS) ||
3161 defcount) ? "at least"
3163 m, m == 1 ? "" : "s", given);
3171 for (; i < defcount; i++) {
3172 if (GETLOCAL(m+i) == NULL) {
3173 PyObject *def = defs[i];
3180 else if (argcount > 0 || kwcount > 0) {
3181 PyErr_Format(PyExc_TypeError,
3182 "%.200s() takes no arguments (%d given)",
3183 PyString_AsString(co->co_name),
3184 argcount + kwcount);
3187 /* Allocate and initialize storage for cell vars, and copy free
3188 vars into frame. This isn't too efficient right now. */
3189 if (PyTuple_GET_SIZE(co->co_cellvars)) {
3190 int i, j, nargs, found;
3191 char *cellname, *argname;
3194 nargs = co->co_argcount;
3195 if (co->co_flags & CO_VARARGS)
3197 if (co->co_flags & CO_VARKEYWORDS)
3200 /* Initialize each cell var, taking into account
3201 cell vars that are initialized from arguments.
3203 Should arrange for the compiler to put cellvars
3204 that are arguments at the beginning of the cellvars
3205 list so that we can march over it more efficiently?
3207 for (i = 0; i < PyTuple_GET_SIZE(co->co_cellvars); ++i) {
3208 cellname = PyString_AS_STRING(
3209 PyTuple_GET_ITEM(co->co_cellvars, i));
3211 for (j = 0; j < nargs; j++) {
3212 argname = PyString_AS_STRING(
3213 PyTuple_GET_ITEM(co->co_varnames, j));
3214 if (strcmp(cellname, argname) == 0) {
3215 c = PyCell_New(GETLOCAL(j));
3218 GETLOCAL(co->co_nlocals + i) = c;
3224 c = PyCell_New(NULL);
3227 SETLOCAL(co->co_nlocals + i, c);
3231 if (PyTuple_GET_SIZE(co->co_freevars)) {
3233 for (i = 0; i < PyTuple_GET_SIZE(co->co_freevars); ++i) {
3234 PyObject *o = PyTuple_GET_ITEM(closure, i);
3236 freevars[PyTuple_GET_SIZE(co->co_cellvars) + i] = o;
3240 if (co->co_flags & CO_GENERATOR) {
3241 /* Don't need to keep the reference to f_back, it will be set
3242 * when the generator is resumed. */
3243 Py_XDECREF(f->f_back);
3246 PCALL(PCALL_GENERATOR);
3248 /* Create a new generator that owns the ready to run frame
3249 * and return that as the value. */
3250 return PyGen_New(f);
3253 retval = PyEval_EvalFrameEx(f,0);
3255 fail: /* Jump here from prelude on failure */
3257 /* decref'ing the frame can cause __del__ methods to get invoked,
3258 which can call back into Python. While we're done with the
3259 current Python frame (f), the associated C stack is still in use,
3260 so recursion_depth must be boosted for the duration.
3262 assert(tstate != NULL);
3263 ++tstate->recursion_depth;
3265 --tstate->recursion_depth;
3271 special_lookup(PyObject *o, char *meth, PyObject **cache)
3274 if (PyInstance_Check(o)) {
3276 return PyObject_GetAttrString(o, meth);
3278 return PyObject_GetAttr(o, *cache);
3280 res = _PyObject_LookupSpecial(o, meth, cache);
3281 if (res == NULL && !PyErr_Occurred()) {
3282 PyErr_SetObject(PyExc_AttributeError, *cache);
3290 kwd_as_string(PyObject *kwd) {
3291 #ifdef Py_USING_UNICODE
3292 if (PyString_Check(kwd)) {
3294 assert(PyString_Check(kwd));
3298 #ifdef Py_USING_UNICODE
3300 return _PyUnicode_AsDefaultEncodedString(kwd, "replace");
3305 /* Implementation notes for set_exc_info() and reset_exc_info():
3307 - Below, 'exc_ZZZ' stands for 'exc_type', 'exc_value' and
3308 'exc_traceback'. These always travel together.
3310 - tstate->curexc_ZZZ is the "hot" exception that is set by
3311 PyErr_SetString(), cleared by PyErr_Clear(), and so on.
3313 - Once an exception is caught by an except clause, it is transferred
3314 from tstate->curexc_ZZZ to tstate->exc_ZZZ, from which sys.exc_info()
3315 can pick it up. This is the primary task of set_exc_info().
3316 XXX That can't be right: set_exc_info() doesn't look at tstate->curexc_ZZZ.
3318 - Now let me explain the complicated dance with frame->f_exc_ZZZ.
3320 Long ago, when none of this existed, there were just a few globals:
3321 one set corresponding to the "hot" exception, and one set
3322 corresponding to sys.exc_ZZZ. (Actually, the latter weren't C
3323 globals; they were simply stored as sys.exc_ZZZ. For backwards
3324 compatibility, they still are!) The problem was that in code like
3328 "something that may fail"
3329 except "some exception":
3330 "do something else first"
3331 "print the exception from sys.exc_ZZZ."
3333 if "do something else first" invoked something that raised and caught
3334 an exception, sys.exc_ZZZ were overwritten. That was a frequent
3335 cause of subtle bugs. I fixed this by changing the semantics as
3338 - Within one frame, sys.exc_ZZZ will hold the last exception caught
3341 - But initially, and as long as no exception is caught in a given
3342 frame, sys.exc_ZZZ will hold the last exception caught in the
3343 previous frame (or the frame before that, etc.).
3345 The first bullet fixed the bug in the above example. The second
3346 bullet was for backwards compatibility: it was (and is) common to
3347 have a function that is called when an exception is caught, and to
3348 have that function access the caught exception via sys.exc_ZZZ.
3349 (Example: traceback.print_exc()).
3351 At the same time I fixed the problem that sys.exc_ZZZ weren't
3352 thread-safe, by introducing sys.exc_info() which gets it from tstate;
3353 but that's really a separate improvement.
3355 The reset_exc_info() function in ceval.c restores the tstate->exc_ZZZ
3356 variables to what they were before the current frame was called. The
3357 set_exc_info() function saves them on the frame so that
3358 reset_exc_info() can restore them. The invariant is that
3359 frame->f_exc_ZZZ is NULL iff the current frame never caught an
3360 exception (where "catching" an exception applies only to successful
3361 except clauses); and if the current frame ever caught an exception,
3362 frame->f_exc_ZZZ is the exception that was stored in tstate->exc_ZZZ
3363 at the start of the current frame.
3368 set_exc_info(PyThreadState *tstate,
3369 PyObject *type, PyObject *value, PyObject *tb)
3371 PyFrameObject *frame = tstate->frame;
3372 PyObject *tmp_type, *tmp_value, *tmp_tb;
3374 assert(type != NULL);
3375 assert(frame != NULL);
3376 if (frame->f_exc_type == NULL) {
3377 assert(frame->f_exc_value == NULL);
3378 assert(frame->f_exc_traceback == NULL);
3379 /* This frame didn't catch an exception before. */
3380 /* Save previous exception of this thread in this frame. */
3381 if (tstate->exc_type == NULL) {
3382 /* XXX Why is this set to Py_None? */
3384 tstate->exc_type = Py_None;
3386 Py_INCREF(tstate->exc_type);
3387 Py_XINCREF(tstate->exc_value);
3388 Py_XINCREF(tstate->exc_traceback);
3389 frame->f_exc_type = tstate->exc_type;
3390 frame->f_exc_value = tstate->exc_value;
3391 frame->f_exc_traceback = tstate->exc_traceback;
3393 /* Set new exception for this thread. */
3394 tmp_type = tstate->exc_type;
3395 tmp_value = tstate->exc_value;
3396 tmp_tb = tstate->exc_traceback;
3400 tstate->exc_type = type;
3401 tstate->exc_value = value;
3402 tstate->exc_traceback = tb;
3403 Py_XDECREF(tmp_type);
3404 Py_XDECREF(tmp_value);
3406 /* For b/w compatibility */
3407 PySys_SetObject("exc_type", type);
3408 PySys_SetObject("exc_value", value);
3409 PySys_SetObject("exc_traceback", tb);
3413 reset_exc_info(PyThreadState *tstate)
3415 PyFrameObject *frame;
3416 PyObject *tmp_type, *tmp_value, *tmp_tb;
3418 /* It's a precondition that the thread state's frame caught an
3419 * exception -- verify in a debug build.
3421 assert(tstate != NULL);
3422 frame = tstate->frame;
3423 assert(frame != NULL);
3424 assert(frame->f_exc_type != NULL);
3426 /* Copy the frame's exception info back to the thread state. */
3427 tmp_type = tstate->exc_type;
3428 tmp_value = tstate->exc_value;
3429 tmp_tb = tstate->exc_traceback;
3430 Py_INCREF(frame->f_exc_type);
3431 Py_XINCREF(frame->f_exc_value);
3432 Py_XINCREF(frame->f_exc_traceback);
3433 tstate->exc_type = frame->f_exc_type;
3434 tstate->exc_value = frame->f_exc_value;
3435 tstate->exc_traceback = frame->f_exc_traceback;
3436 Py_XDECREF(tmp_type);
3437 Py_XDECREF(tmp_value);
3440 /* For b/w compatibility */
3441 PySys_SetObject("exc_type", frame->f_exc_type);
3442 PySys_SetObject("exc_value", frame->f_exc_value);
3443 PySys_SetObject("exc_traceback", frame->f_exc_traceback);
3445 /* Clear the frame's exception info. */
3446 tmp_type = frame->f_exc_type;
3447 tmp_value = frame->f_exc_value;
3448 tmp_tb = frame->f_exc_traceback;
3449 frame->f_exc_type = NULL;
3450 frame->f_exc_value = NULL;
3451 frame->f_exc_traceback = NULL;
3452 Py_DECREF(tmp_type);
3453 Py_XDECREF(tmp_value);
3457 /* Logic for the raise statement (too complicated for inlining).
3458 This *consumes* a reference count to each of its arguments. */
3459 static enum why_code
3460 do_raise(PyObject *type, PyObject *value, PyObject *tb)
3464 PyThreadState *tstate = PyThreadState_GET();
3465 type = tstate->exc_type == NULL ? Py_None : tstate->exc_type;
3466 value = tstate->exc_value;
3467 tb = tstate->exc_traceback;
3473 /* We support the following forms of raise:
3474 raise <class>, <classinstance>
3475 raise <class>, <argument tuple>
3477 raise <class>, <argument>
3478 raise <classinstance>, None
3479 raise <string>, <object>
3480 raise <string>, None
3482 An omitted second argument is the same as None.
3484 In addition, raise <tuple>, <anything> is the same as
3485 raising the tuple's first item (and it better have one!);
3486 this rule is applied recursively.
3488 Finally, an optional third argument can be supplied, which
3489 gives the traceback to be substituted (useful when
3490 re-raising an exception after examining it). */
3492 /* First, check the traceback argument, replacing None with
3494 if (tb == Py_None) {
3498 else if (tb != NULL && !PyTraceBack_Check(tb)) {
3499 PyErr_SetString(PyExc_TypeError,
3500 "raise: arg 3 must be a traceback or None");
3504 /* Next, replace a missing value with None */
3505 if (value == NULL) {
3510 /* Next, repeatedly, replace a tuple exception with its first item */
3511 while (PyTuple_Check(type) && PyTuple_Size(type) > 0) {
3512 PyObject *tmp = type;
3513 type = PyTuple_GET_ITEM(type, 0);
3518 if (PyExceptionClass_Check(type)) {
3519 PyErr_NormalizeException(&type, &value, &tb);
3520 if (!PyExceptionInstance_Check(value)) {
3521 PyErr_Format(PyExc_TypeError,
3522 "calling %s() should have returned an instance of "
3523 "BaseException, not '%s'",
3524 ((PyTypeObject *)type)->tp_name,
3525 Py_TYPE(value)->tp_name);
3529 else if (PyExceptionInstance_Check(type)) {
3530 /* Raising an instance. The value should be a dummy. */
3531 if (value != Py_None) {
3532 PyErr_SetString(PyExc_TypeError,
3533 "instance exception may not have a separate value");
3537 /* Normalize to raise <class>, <instance> */
3540 type = PyExceptionInstance_Class(type);
3545 /* Not something you can raise. You get an exception
3546 anyway, just not what you specified :-) */
3547 PyErr_Format(PyExc_TypeError,
3548 "exceptions must be old-style classes or "
3549 "derived from BaseException, not %s",
3550 type->ob_type->tp_name);
3554 assert(PyExceptionClass_Check(type));
3555 if (Py_Py3kWarningFlag && PyClass_Check(type)) {
3556 if (PyErr_WarnEx(PyExc_DeprecationWarning,
3557 "exceptions must derive from BaseException "
3562 PyErr_Restore(type, value, tb);
3564 return WHY_EXCEPTION;
3571 return WHY_EXCEPTION;
3574 /* Iterate v argcnt times and store the results on the stack (via decreasing
3575 sp). Return 1 for success, 0 if error. */
3578 unpack_iterable(PyObject *v, int argcnt, PyObject **sp)
3581 PyObject *it; /* iter(v) */
3586 it = PyObject_GetIter(v);
3590 for (; i < argcnt; i++) {
3591 w = PyIter_Next(it);
3593 /* Iterator done, via error or exhaustion. */
3594 if (!PyErr_Occurred()) {
3595 PyErr_Format(PyExc_ValueError,
3596 "need more than %d value%s to unpack",
3597 i, i == 1 ? "" : "s");
3604 /* We better have exhausted the iterator now. */
3605 w = PyIter_Next(it);
3607 if (PyErr_Occurred())
3613 PyErr_SetString(PyExc_ValueError, "too many values to unpack");
3616 for (; i > 0; i--, sp++)
3625 prtrace(PyObject *v, char *str)
3628 if (PyObject_Print(v, stdout, 0) != 0)
3629 PyErr_Clear(); /* Don't know what else to do */
3636 call_exc_trace(Py_tracefunc func, PyObject *self, PyFrameObject *f)
3638 PyObject *type, *value, *traceback, *arg;
3640 PyErr_Fetch(&type, &value, &traceback);
3641 if (value == NULL) {
3645 arg = PyTuple_Pack(3, type, value, traceback);
3647 PyErr_Restore(type, value, traceback);
3650 err = call_trace(func, self, f, PyTrace_EXCEPTION, arg);
3653 PyErr_Restore(type, value, traceback);
3657 Py_XDECREF(traceback);
3662 call_trace_protected(Py_tracefunc func, PyObject *obj, PyFrameObject *frame,
3663 int what, PyObject *arg)
3665 PyObject *type, *value, *traceback;
3667 PyErr_Fetch(&type, &value, &traceback);
3668 err = call_trace(func, obj, frame, what, arg);
3671 PyErr_Restore(type, value, traceback);
3677 Py_XDECREF(traceback);
3683 call_trace(Py_tracefunc func, PyObject *obj, PyFrameObject *frame,
3684 int what, PyObject *arg)
3686 register PyThreadState *tstate = frame->f_tstate;
3688 if (tstate->tracing)
3691 tstate->use_tracing = 0;
3692 result = func(obj, frame, what, arg);
3693 tstate->use_tracing = ((tstate->c_tracefunc != NULL)
3694 || (tstate->c_profilefunc != NULL));
3700 _PyEval_CallTracing(PyObject *func, PyObject *args)
3702 PyFrameObject *frame = PyEval_GetFrame();
3703 PyThreadState *tstate = frame->f_tstate;
3704 int save_tracing = tstate->tracing;
3705 int save_use_tracing = tstate->use_tracing;
3708 tstate->tracing = 0;
3709 tstate->use_tracing = ((tstate->c_tracefunc != NULL)
3710 || (tstate->c_profilefunc != NULL));
3711 result = PyObject_Call(func, args, NULL);
3712 tstate->tracing = save_tracing;
3713 tstate->use_tracing = save_use_tracing;
3717 /* See Objects/lnotab_notes.txt for a description of how tracing works. */
3719 maybe_call_line_trace(Py_tracefunc func, PyObject *obj,
3720 PyFrameObject *frame, int *instr_lb, int *instr_ub,
3724 int line = frame->f_lineno;
3726 /* If the last instruction executed isn't in the current
3727 instruction window, reset the window.
3729 if (frame->f_lasti < *instr_lb || frame->f_lasti >= *instr_ub) {
3731 line = _PyCode_CheckLineNumber(frame->f_code, frame->f_lasti,
3733 *instr_lb = bounds.ap_lower;
3734 *instr_ub = bounds.ap_upper;
3736 /* If the last instruction falls at the start of a line or if
3737 it represents a jump backwards, update the frame's line
3738 number and call the trace function. */
3739 if (frame->f_lasti == *instr_lb || frame->f_lasti < *instr_prev) {
3740 frame->f_lineno = line;
3741 result = call_trace(func, obj, frame, PyTrace_LINE, Py_None);
3743 *instr_prev = frame->f_lasti;
3748 PyEval_SetProfile(Py_tracefunc func, PyObject *arg)
3750 PyThreadState *tstate = PyThreadState_GET();
3751 PyObject *temp = tstate->c_profileobj;
3753 tstate->c_profilefunc = NULL;
3754 tstate->c_profileobj = NULL;
3755 /* Must make sure that tracing is not ignored if 'temp' is freed */
3756 tstate->use_tracing = tstate->c_tracefunc != NULL;
3758 tstate->c_profilefunc = func;
3759 tstate->c_profileobj = arg;
3760 /* Flag that tracing or profiling is turned on */
3761 tstate->use_tracing = (func != NULL) || (tstate->c_tracefunc != NULL);
3765 PyEval_SetTrace(Py_tracefunc func, PyObject *arg)
3767 PyThreadState *tstate = PyThreadState_GET();
3768 PyObject *temp = tstate->c_traceobj;
3769 _Py_TracingPossible += (func != NULL) - (tstate->c_tracefunc != NULL);
3771 tstate->c_tracefunc = NULL;
3772 tstate->c_traceobj = NULL;
3773 /* Must make sure that profiling is not ignored if 'temp' is freed */
3774 tstate->use_tracing = tstate->c_profilefunc != NULL;
3776 tstate->c_tracefunc = func;
3777 tstate->c_traceobj = arg;
3778 /* Flag that tracing or profiling is turned on */
3779 tstate->use_tracing = ((func != NULL)
3780 || (tstate->c_profilefunc != NULL));
3784 PyEval_GetBuiltins(void)
3786 PyFrameObject *current_frame = PyEval_GetFrame();
3787 if (current_frame == NULL)
3788 return PyThreadState_GET()->interp->builtins;
3790 return current_frame->f_builtins;
3794 PyEval_GetLocals(void)
3796 PyFrameObject *current_frame = PyEval_GetFrame();
3797 if (current_frame == NULL)
3799 PyFrame_FastToLocals(current_frame);
3800 return current_frame->f_locals;
3804 PyEval_GetGlobals(void)
3806 PyFrameObject *current_frame = PyEval_GetFrame();
3807 if (current_frame == NULL)
3810 return current_frame->f_globals;
3814 PyEval_GetFrame(void)
3816 PyThreadState *tstate = PyThreadState_GET();
3817 return _PyThreadState_GetFrame(tstate);
3821 PyEval_GetRestricted(void)
3823 PyFrameObject *current_frame = PyEval_GetFrame();
3824 return current_frame == NULL ? 0 : PyFrame_IsRestricted(current_frame);
3828 PyEval_MergeCompilerFlags(PyCompilerFlags *cf)
3830 PyFrameObject *current_frame = PyEval_GetFrame();
3831 int result = cf->cf_flags != 0;
3833 if (current_frame != NULL) {
3834 const int codeflags = current_frame->f_code->co_flags;
3835 const int compilerflags = codeflags & PyCF_MASK;
3836 if (compilerflags) {
3838 cf->cf_flags |= compilerflags;
3840 #if 0 /* future keyword */
3841 if (codeflags & CO_GENERATOR_ALLOWED) {
3843 cf->cf_flags |= CO_GENERATOR_ALLOWED;
3853 PyObject *f = PySys_GetObject("stdout");
3856 if (!PyFile_SoftSpace(f, 0))
3858 return PyFile_WriteString("\n", f);
3862 /* External interface to call any callable object.
3863 The arg must be a tuple or NULL. The kw must be a dict or NULL. */
3866 PyEval_CallObjectWithKeywords(PyObject *func, PyObject *arg, PyObject *kw)
3871 arg = PyTuple_New(0);
3875 else if (!PyTuple_Check(arg)) {
3876 PyErr_SetString(PyExc_TypeError,
3877 "argument list must be a tuple");
3883 if (kw != NULL && !PyDict_Check(kw)) {
3884 PyErr_SetString(PyExc_TypeError,
3885 "keyword list must be a dictionary");
3890 result = PyObject_Call(func, arg, kw);
3896 PyEval_GetFuncName(PyObject *func)
3898 if (PyMethod_Check(func))
3899 return PyEval_GetFuncName(PyMethod_GET_FUNCTION(func));
3900 else if (PyFunction_Check(func))
3901 return PyString_AsString(((PyFunctionObject*)func)->func_name);
3902 else if (PyCFunction_Check(func))
3903 return ((PyCFunctionObject*)func)->m_ml->ml_name;
3904 else if (PyClass_Check(func))
3905 return PyString_AsString(((PyClassObject*)func)->cl_name);
3906 else if (PyInstance_Check(func)) {
3907 return PyString_AsString(
3908 ((PyInstanceObject*)func)->in_class->cl_name);
3910 return func->ob_type->tp_name;
3915 PyEval_GetFuncDesc(PyObject *func)
3917 if (PyMethod_Check(func))
3919 else if (PyFunction_Check(func))
3921 else if (PyCFunction_Check(func))
3923 else if (PyClass_Check(func))
3924 return " constructor";
3925 else if (PyInstance_Check(func)) {
3933 err_args(PyObject *func, int flags, int nargs)
3935 if (flags & METH_NOARGS)
3936 PyErr_Format(PyExc_TypeError,
3937 "%.200s() takes no arguments (%d given)",
3938 ((PyCFunctionObject *)func)->m_ml->ml_name,
3941 PyErr_Format(PyExc_TypeError,
3942 "%.200s() takes exactly one argument (%d given)",
3943 ((PyCFunctionObject *)func)->m_ml->ml_name,
3947 #define C_TRACE(x, call) \
3948 if (tstate->use_tracing && tstate->c_profilefunc) { \
3949 if (call_trace(tstate->c_profilefunc, \
3950 tstate->c_profileobj, \
3951 tstate->frame, PyTrace_C_CALL, \
3957 if (tstate->c_profilefunc != NULL) { \
3959 call_trace_protected(tstate->c_profilefunc, \
3960 tstate->c_profileobj, \
3961 tstate->frame, PyTrace_C_EXCEPTION, \
3963 /* XXX should pass (type, value, tb) */ \
3965 if (call_trace(tstate->c_profilefunc, \
3966 tstate->c_profileobj, \
3967 tstate->frame, PyTrace_C_RETURN, \
3980 call_function(PyObject ***pp_stack, int oparg
3982 , uint64* pintr0, uint64* pintr1
3986 int na = oparg & 0xff;
3987 int nk = (oparg>>8) & 0xff;
3988 int n = na + 2 * nk;
3989 PyObject **pfunc = (*pp_stack) - n - 1;
3990 PyObject *func = *pfunc;
3993 /* Always dispatch PyCFunction first, because these are
3994 presumed to be the most frequent callable object.
3996 if (PyCFunction_Check(func) && nk == 0) {
3997 int flags = PyCFunction_GET_FLAGS(func);
3998 PyThreadState *tstate = PyThreadState_GET();
4000 PCALL(PCALL_CFUNCTION);
4001 if (flags & (METH_NOARGS | METH_O)) {
4002 PyCFunction meth = PyCFunction_GET_FUNCTION(func);
4003 PyObject *self = PyCFunction_GET_SELF(func);
4004 if (flags & METH_NOARGS && na == 0) {
4005 C_TRACE(x, (*meth)(self,NULL));
4007 else if (flags & METH_O && na == 1) {
4008 PyObject *arg = EXT_POP(*pp_stack);
4009 C_TRACE(x, (*meth)(self,arg));
4013 err_args(func, flags, na);
4019 callargs = load_args(pp_stack, na);
4020 READ_TIMESTAMP(*pintr0);
4021 C_TRACE(x, PyCFunction_Call(func,callargs,NULL));
4022 READ_TIMESTAMP(*pintr1);
4023 Py_XDECREF(callargs);
4026 if (PyMethod_Check(func) && PyMethod_GET_SELF(func) != NULL) {
4027 /* optimize access to bound methods */
4028 PyObject *self = PyMethod_GET_SELF(func);
4029 PCALL(PCALL_METHOD);
4030 PCALL(PCALL_BOUND_METHOD);
4032 func = PyMethod_GET_FUNCTION(func);
4040 READ_TIMESTAMP(*pintr0);
4041 if (PyFunction_Check(func))
4042 x = fast_function(func, pp_stack, n, na, nk);
4044 x = do_call(func, pp_stack, na, nk);
4045 READ_TIMESTAMP(*pintr1);
4049 /* Clear the stack of the function object. Also removes
4050 the arguments in case they weren't consumed already
4051 (fast_function() and err_args() leave them on the stack).
4053 while ((*pp_stack) > pfunc) {
4054 w = EXT_POP(*pp_stack);
4061 /* The fast_function() function optimize calls for which no argument
4062 tuple is necessary; the objects are passed directly from the stack.
4063 For the simplest case -- a function that takes only positional
4064 arguments and is called with only positional arguments -- it
4065 inlines the most primitive frame setup code from
4066 PyEval_EvalCodeEx(), which vastly reduces the checks that must be
4067 done before evaluating the frame.
4071 fast_function(PyObject *func, PyObject ***pp_stack, int n, int na, int nk)
4073 PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
4074 PyObject *globals = PyFunction_GET_GLOBALS(func);
4075 PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
4076 PyObject **d = NULL;
4079 PCALL(PCALL_FUNCTION);
4080 PCALL(PCALL_FAST_FUNCTION);
4081 if (argdefs == NULL && co->co_argcount == n && nk==0 &&
4082 co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) {
4084 PyObject *retval = NULL;
4085 PyThreadState *tstate = PyThreadState_GET();
4086 PyObject **fastlocals, **stack;
4089 PCALL(PCALL_FASTER_FUNCTION);
4090 assert(globals != NULL);
4091 /* XXX Perhaps we should create a specialized
4092 PyFrame_New() that doesn't take locals, but does
4093 take builtins without sanity checking them.
4095 assert(tstate != NULL);
4096 f = PyFrame_New(tstate, co, globals, NULL);
4100 fastlocals = f->f_localsplus;
4101 stack = (*pp_stack) - n;
4103 for (i = 0; i < n; i++) {
4105 fastlocals[i] = *stack++;
4107 retval = PyEval_EvalFrameEx(f,0);
4108 ++tstate->recursion_depth;
4110 --tstate->recursion_depth;
4113 if (argdefs != NULL) {
4114 d = &PyTuple_GET_ITEM(argdefs, 0);
4115 nd = Py_SIZE(argdefs);
4117 return PyEval_EvalCodeEx(co, globals,
4118 (PyObject *)NULL, (*pp_stack)-n, na,
4119 (*pp_stack)-2*nk, nk, d, nd,
4120 PyFunction_GET_CLOSURE(func));
4124 update_keyword_args(PyObject *orig_kwdict, int nk, PyObject ***pp_stack,
4127 PyObject *kwdict = NULL;
4128 if (orig_kwdict == NULL)
4129 kwdict = PyDict_New();
4131 kwdict = PyDict_Copy(orig_kwdict);
4132 Py_DECREF(orig_kwdict);
4138 PyObject *value = EXT_POP(*pp_stack);
4139 PyObject *key = EXT_POP(*pp_stack);
4140 if (PyDict_GetItem(kwdict, key) != NULL) {
4141 PyErr_Format(PyExc_TypeError,
4142 "%.200s%s got multiple values "
4143 "for keyword argument '%.200s'",
4144 PyEval_GetFuncName(func),
4145 PyEval_GetFuncDesc(func),
4146 PyString_AsString(key));
4152 err = PyDict_SetItem(kwdict, key, value);
4164 update_star_args(int nstack, int nstar, PyObject *stararg,
4165 PyObject ***pp_stack)
4167 PyObject *callargs, *w;
4169 callargs = PyTuple_New(nstack + nstar);
4170 if (callargs == NULL) {
4175 for (i = 0; i < nstar; i++) {
4176 PyObject *a = PyTuple_GET_ITEM(stararg, i);
4178 PyTuple_SET_ITEM(callargs, nstack + i, a);
4181 while (--nstack >= 0) {
4182 w = EXT_POP(*pp_stack);
4183 PyTuple_SET_ITEM(callargs, nstack, w);
4189 load_args(PyObject ***pp_stack, int na)
4191 PyObject *args = PyTuple_New(na);
4197 w = EXT_POP(*pp_stack);
4198 PyTuple_SET_ITEM(args, na, w);
4204 do_call(PyObject *func, PyObject ***pp_stack, int na, int nk)
4206 PyObject *callargs = NULL;
4207 PyObject *kwdict = NULL;
4208 PyObject *result = NULL;
4211 kwdict = update_keyword_args(NULL, nk, pp_stack, func);
4215 callargs = load_args(pp_stack, na);
4216 if (callargs == NULL)
4219 /* At this point, we have to look at the type of func to
4220 update the call stats properly. Do it here so as to avoid
4221 exposing the call stats machinery outside ceval.c
4223 if (PyFunction_Check(func))
4224 PCALL(PCALL_FUNCTION);
4225 else if (PyMethod_Check(func))
4226 PCALL(PCALL_METHOD);
4227 else if (PyType_Check(func))
4229 else if (PyCFunction_Check(func))
4230 PCALL(PCALL_CFUNCTION);
4234 if (PyCFunction_Check(func)) {
4235 PyThreadState *tstate = PyThreadState_GET();
4236 C_TRACE(result, PyCFunction_Call(func, callargs, kwdict));
4239 result = PyObject_Call(func, callargs, kwdict);
4241 Py_XDECREF(callargs);
4247 ext_do_call(PyObject *func, PyObject ***pp_stack, int flags, int na, int nk)
4250 PyObject *callargs = NULL;
4251 PyObject *stararg = NULL;
4252 PyObject *kwdict = NULL;
4253 PyObject *result = NULL;
4255 if (flags & CALL_FLAG_KW) {
4256 kwdict = EXT_POP(*pp_stack);
4257 if (!PyDict_Check(kwdict)) {
4262 if (PyDict_Update(d, kwdict) != 0) {
4264 /* PyDict_Update raises attribute
4265 * error (percolated from an attempt
4266 * to get 'keys' attribute) instead of
4267 * a type error if its second argument
4270 if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
4271 PyErr_Format(PyExc_TypeError,
4272 "%.200s%.200s argument after ** "
4273 "must be a mapping, not %.200s",
4274 PyEval_GetFuncName(func),
4275 PyEval_GetFuncDesc(func),
4276 kwdict->ob_type->tp_name);
4284 if (flags & CALL_FLAG_VAR) {
4285 stararg = EXT_POP(*pp_stack);
4286 if (!PyTuple_Check(stararg)) {
4288 t = PySequence_Tuple(stararg);
4290 if (PyErr_ExceptionMatches(PyExc_TypeError)) {
4291 PyErr_Format(PyExc_TypeError,
4292 "%.200s%.200s argument after * "
4293 "must be a sequence, not %200s",
4294 PyEval_GetFuncName(func),
4295 PyEval_GetFuncDesc(func),
4296 stararg->ob_type->tp_name);
4303 nstar = PyTuple_GET_SIZE(stararg);
4306 kwdict = update_keyword_args(kwdict, nk, pp_stack, func);
4310 callargs = update_star_args(na, nstar, stararg, pp_stack);
4311 if (callargs == NULL)
4314 /* At this point, we have to look at the type of func to
4315 update the call stats properly. Do it here so as to avoid
4316 exposing the call stats machinery outside ceval.c
4318 if (PyFunction_Check(func))
4319 PCALL(PCALL_FUNCTION);
4320 else if (PyMethod_Check(func))
4321 PCALL(PCALL_METHOD);
4322 else if (PyType_Check(func))
4324 else if (PyCFunction_Check(func))
4325 PCALL(PCALL_CFUNCTION);
4329 if (PyCFunction_Check(func)) {
4330 PyThreadState *tstate = PyThreadState_GET();
4331 C_TRACE(result, PyCFunction_Call(func, callargs, kwdict));
4334 result = PyObject_Call(func, callargs, kwdict);
4336 Py_XDECREF(callargs);
4338 Py_XDECREF(stararg);
4342 /* Extract a slice index from a PyInt or PyLong or an object with the
4343 nb_index slot defined, and store in *pi.
4344 Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX,
4345 and silently boost values less than -PY_SSIZE_T_MAX-1 to -PY_SSIZE_T_MAX-1.
4346 Return 0 on error, 1 on success.
4348 /* Note: If v is NULL, return success without storing into *pi. This
4349 is because_PyEval_SliceIndex() is called by apply_slice(), which can be
4350 called by the SLICE opcode with v and/or w equal to NULL.
4353 _PyEval_SliceIndex(PyObject *v, Py_ssize_t *pi)
4357 if (PyInt_Check(v)) {
4358 /* XXX(nnorwitz): I think PyInt_AS_LONG is correct,
4359 however, it looks like it should be AsSsize_t.
4360 There should be a comment here explaining why.
4362 x = PyInt_AS_LONG(v);
4364 else if (PyIndex_Check(v)) {
4365 x = PyNumber_AsSsize_t(v, NULL);
4366 if (x == -1 && PyErr_Occurred())
4370 PyErr_SetString(PyExc_TypeError,
4371 "slice indices must be integers or "
4372 "None or have an __index__ method");
4381 #define ISINDEX(x) ((x) == NULL || \
4382 PyInt_Check(x) || PyLong_Check(x) || PyIndex_Check(x))
4385 apply_slice(PyObject *u, PyObject *v, PyObject *w) /* return u[v:w] */
4387 PyTypeObject *tp = u->ob_type;
4388 PySequenceMethods *sq = tp->tp_as_sequence;
4390 if (sq && sq->sq_slice && ISINDEX(v) && ISINDEX(w)) {
4391 Py_ssize_t ilow = 0, ihigh = PY_SSIZE_T_MAX;
4392 if (!_PyEval_SliceIndex(v, &ilow))
4394 if (!_PyEval_SliceIndex(w, &ihigh))
4396 return PySequence_GetSlice(u, ilow, ihigh);
4399 PyObject *slice = PySlice_New(v, w, NULL);
4400 if (slice != NULL) {
4401 PyObject *res = PyObject_GetItem(u, slice);
4411 assign_slice(PyObject *u, PyObject *v, PyObject *w, PyObject *x)
4414 PyTypeObject *tp = u->ob_type;
4415 PySequenceMethods *sq = tp->tp_as_sequence;
4417 if (sq && sq->sq_ass_slice && ISINDEX(v) && ISINDEX(w)) {
4418 Py_ssize_t ilow = 0, ihigh = PY_SSIZE_T_MAX;
4419 if (!_PyEval_SliceIndex(v, &ilow))
4421 if (!_PyEval_SliceIndex(w, &ihigh))
4424 return PySequence_DelSlice(u, ilow, ihigh);
4426 return PySequence_SetSlice(u, ilow, ihigh, x);
4429 PyObject *slice = PySlice_New(v, w, NULL);
4430 if (slice != NULL) {
4433 res = PyObject_SetItem(u, slice, x);
4435 res = PyObject_DelItem(u, slice);
4444 #define Py3kExceptionClass_Check(x) \
4445 (PyType_Check((x)) && \
4446 PyType_FastSubclass((PyTypeObject*)(x), Py_TPFLAGS_BASE_EXC_SUBCLASS))
4448 #define CANNOT_CATCH_MSG "catching classes that don't inherit from " \
4449 "BaseException is not allowed in 3.x"
4452 cmp_outcome(int op, register PyObject *v, register PyObject *w)
4463 res = PySequence_Contains(w, v);
4468 res = PySequence_Contains(w, v);
4473 case PyCmp_EXC_MATCH:
4474 if (PyTuple_Check(w)) {
4475 Py_ssize_t i, length;
4476 length = PyTuple_Size(w);
4477 for (i = 0; i < length; i += 1) {
4478 PyObject *exc = PyTuple_GET_ITEM(w, i);
4479 if (PyString_Check(exc)) {
4481 ret_val = PyErr_WarnEx(
4482 PyExc_DeprecationWarning,
4483 "catching of string "
4484 "exceptions is deprecated", 1);
4488 else if (Py_Py3kWarningFlag &&
4489 !PyTuple_Check(exc) &&
4490 !Py3kExceptionClass_Check(exc))
4493 ret_val = PyErr_WarnEx(
4494 PyExc_DeprecationWarning,
4495 CANNOT_CATCH_MSG, 1);
4502 if (PyString_Check(w)) {
4504 ret_val = PyErr_WarnEx(
4505 PyExc_DeprecationWarning,
4506 "catching of string "
4507 "exceptions is deprecated", 1);
4511 else if (Py_Py3kWarningFlag &&
4512 !PyTuple_Check(w) &&
4513 !Py3kExceptionClass_Check(w))
4516 ret_val = PyErr_WarnEx(
4517 PyExc_DeprecationWarning,
4518 CANNOT_CATCH_MSG, 1);
4523 res = PyErr_GivenExceptionMatches(v, w);
4526 return PyObject_RichCompare(v, w, op);
4528 v = res ? Py_True : Py_False;
4534 import_from(PyObject *v, PyObject *name)
4538 x = PyObject_GetAttr(v, name);
4539 if (x == NULL && PyErr_ExceptionMatches(PyExc_AttributeError)) {
4540 PyErr_Format(PyExc_ImportError,
4541 "cannot import name %.230s",
4542 PyString_AsString(name));
4548 import_all_from(PyObject *locals, PyObject *v)
4550 PyObject *all = PyObject_GetAttrString(v, "__all__");
4551 PyObject *dict, *name, *value;
4552 int skip_leading_underscores = 0;
4556 if (!PyErr_ExceptionMatches(PyExc_AttributeError))
4557 return -1; /* Unexpected error */
4559 dict = PyObject_GetAttrString(v, "__dict__");
4561 if (!PyErr_ExceptionMatches(PyExc_AttributeError))
4563 PyErr_SetString(PyExc_ImportError,
4564 "from-import-* object has no __dict__ and no __all__");
4567 all = PyMapping_Keys(dict);
4571 skip_leading_underscores = 1;
4574 for (pos = 0, err = 0; ; pos++) {
4575 name = PySequence_GetItem(all, pos);
4577 if (!PyErr_ExceptionMatches(PyExc_IndexError))
4583 if (skip_leading_underscores &&
4584 PyString_Check(name) &&
4585 PyString_AS_STRING(name)[0] == '_')
4590 value = PyObject_GetAttr(v, name);
4593 else if (PyDict_CheckExact(locals))
4594 err = PyDict_SetItem(locals, name, value);
4596 err = PyObject_SetItem(locals, name, value);
4607 build_class(PyObject *methods, PyObject *bases, PyObject *name)
4609 PyObject *metaclass = NULL, *result, *base;
4611 if (PyDict_Check(methods))
4612 metaclass = PyDict_GetItemString(methods, "__metaclass__");
4613 if (metaclass != NULL)
4614 Py_INCREF(metaclass);
4615 else if (PyTuple_Check(bases) && PyTuple_GET_SIZE(bases) > 0) {
4616 base = PyTuple_GET_ITEM(bases, 0);
4617 metaclass = PyObject_GetAttrString(base, "__class__");
4618 if (metaclass == NULL) {
4620 metaclass = (PyObject *)base->ob_type;
4621 Py_INCREF(metaclass);
4625 PyObject *g = PyEval_GetGlobals();
4626 if (g != NULL && PyDict_Check(g))
4627 metaclass = PyDict_GetItemString(g, "__metaclass__");
4628 if (metaclass == NULL)
4629 metaclass = (PyObject *) &PyClass_Type;
4630 Py_INCREF(metaclass);
4632 result = PyObject_CallFunctionObjArgs(metaclass, name, bases, methods,
4634 Py_DECREF(metaclass);
4635 if (result == NULL && PyErr_ExceptionMatches(PyExc_TypeError)) {
4636 /* A type error here likely means that the user passed
4637 in a base that was not a class (such the random module
4638 instead of the random.random type). Help them out with
4639 by augmenting the error message with more information.*/
4641 PyObject *ptype, *pvalue, *ptraceback;
4643 PyErr_Fetch(&ptype, &pvalue, &ptraceback);
4644 if (PyString_Check(pvalue)) {
4646 newmsg = PyString_FromFormat(
4647 "Error when calling the metaclass bases\n"
4649 PyString_AS_STRING(pvalue));
4650 if (newmsg != NULL) {
4655 PyErr_Restore(ptype, pvalue, ptraceback);
4661 exec_statement(PyFrameObject *f, PyObject *prog, PyObject *globals,
4668 if (PyTuple_Check(prog) && globals == Py_None && locals == Py_None &&
4669 ((n = PyTuple_Size(prog)) == 2 || n == 3)) {
4670 /* Backward compatibility hack */
4671 globals = PyTuple_GetItem(prog, 1);
4673 locals = PyTuple_GetItem(prog, 2);
4674 prog = PyTuple_GetItem(prog, 0);
4676 if (globals == Py_None) {
4677 globals = PyEval_GetGlobals();
4678 if (locals == Py_None) {
4679 locals = PyEval_GetLocals();
4682 if (!globals || !locals) {
4683 PyErr_SetString(PyExc_SystemError,
4684 "globals and locals cannot be NULL");
4688 else if (locals == Py_None)
4690 if (!PyString_Check(prog) &&
4691 #ifdef Py_USING_UNICODE
4692 !PyUnicode_Check(prog) &&
4694 !PyCode_Check(prog) &&
4695 !PyFile_Check(prog)) {
4696 PyErr_SetString(PyExc_TypeError,
4697 "exec: arg 1 must be a string, file, or code object");
4700 if (!PyDict_Check(globals)) {
4701 PyErr_SetString(PyExc_TypeError,
4702 "exec: arg 2 must be a dictionary or None");
4705 if (!PyMapping_Check(locals)) {
4706 PyErr_SetString(PyExc_TypeError,
4707 "exec: arg 3 must be a mapping or None");
4710 if (PyDict_GetItemString(globals, "__builtins__") == NULL)
4711 PyDict_SetItemString(globals, "__builtins__", f->f_builtins);
4712 if (PyCode_Check(prog)) {
4713 if (PyCode_GetNumFree((PyCodeObject *)prog) > 0) {
4714 PyErr_SetString(PyExc_TypeError,
4715 "code object passed to exec may not contain free variables");
4718 v = PyEval_EvalCode((PyCodeObject *) prog, globals, locals);
4720 else if (PyFile_Check(prog)) {
4721 FILE *fp = PyFile_AsFile(prog);
4722 char *name = PyString_AsString(PyFile_Name(prog));
4727 if (PyEval_MergeCompilerFlags(&cf))
4728 v = PyRun_FileFlags(fp, name, Py_file_input, globals,
4731 v = PyRun_File(fp, name, Py_file_input, globals,
4735 PyObject *tmp = NULL;
4739 #ifdef Py_USING_UNICODE
4740 if (PyUnicode_Check(prog)) {
4741 tmp = PyUnicode_AsUTF8String(prog);
4745 cf.cf_flags |= PyCF_SOURCE_IS_UTF8;
4748 if (PyString_AsStringAndSize(prog, &str, NULL))
4750 if (PyEval_MergeCompilerFlags(&cf))
4751 v = PyRun_StringFlags(str, Py_file_input, globals,
4754 v = PyRun_String(str, Py_file_input, globals, locals);
4758 PyFrame_LocalsToFast(f, 0);
4766 format_exc_check_arg(PyObject *exc, char *format_str, PyObject *obj)
4773 obj_str = PyString_AsString(obj);
4777 PyErr_Format(exc, format_str, obj_str);
4781 string_concatenate(PyObject *v, PyObject *w,
4782 PyFrameObject *f, unsigned char *next_instr)
4784 /* This function implements 'variable += expr' when both arguments
4786 Py_ssize_t v_len = PyString_GET_SIZE(v);
4787 Py_ssize_t w_len = PyString_GET_SIZE(w);
4788 Py_ssize_t new_len = v_len + w_len;
4790 PyErr_SetString(PyExc_OverflowError,
4791 "strings are too large to concat");
4795 if (v->ob_refcnt == 2) {
4796 /* In the common case, there are 2 references to the value
4797 * stored in 'variable' when the += is performed: one on the
4798 * value stack (in 'v') and one still stored in the
4799 * 'variable'. We try to delete the variable now to reduce
4802 switch (*next_instr) {
4805 int oparg = PEEKARG();
4806 PyObject **fastlocals = f->f_localsplus;
4807 if (GETLOCAL(oparg) == v)
4808 SETLOCAL(oparg, NULL);
4813 PyObject **freevars = (f->f_localsplus +
4814 f->f_code->co_nlocals);
4815 PyObject *c = freevars[PEEKARG()];
4816 if (PyCell_GET(c) == v)
4817 PyCell_Set(c, NULL);
4822 PyObject *names = f->f_code->co_names;
4823 PyObject *name = GETITEM(names, PEEKARG());
4824 PyObject *locals = f->f_locals;
4825 if (PyDict_CheckExact(locals) &&
4826 PyDict_GetItem(locals, name) == v) {
4827 if (PyDict_DelItem(locals, name) != 0) {
4836 if (v->ob_refcnt == 1 && !PyString_CHECK_INTERNED(v)) {
4837 /* Now we own the last reference to 'v', so we can resize it
4840 if (_PyString_Resize(&v, new_len) != 0) {
4841 /* XXX if _PyString_Resize() fails, 'v' has been
4842 * deallocated so it cannot be put back into
4843 * 'variable'. The MemoryError is raised when there
4844 * is no value in 'variable', which might (very
4845 * remotely) be a cause of incompatibilities.
4849 /* copy 'w' into the newly allocated area of 'v' */
4850 memcpy(PyString_AS_STRING(v) + v_len,
4851 PyString_AS_STRING(w), w_len);
4855 /* When in-place resizing is not an option. */
4856 PyString_Concat(&v, w);
4861 #ifdef DYNAMIC_EXECUTION_PROFILE
4864 getarray(long a[256])
4867 PyObject *l = PyList_New(256);
4868 if (l == NULL) return NULL;
4869 for (i = 0; i < 256; i++) {
4870 PyObject *x = PyInt_FromLong(a[i]);
4875 PyList_SetItem(l, i, x);
4877 for (i = 0; i < 256; i++)
4883 _Py_GetDXProfile(PyObject *self, PyObject *args)
4886 return getarray(dxp);
4889 PyObject *l = PyList_New(257);
4890 if (l == NULL) return NULL;
4891 for (i = 0; i < 257; i++) {
4892 PyObject *x = getarray(dxpairs[i]);
4897 PyList_SetItem(l, i, x);