1/* Frame object implementation */
2
3#include "Python.h"
4#include "pycore_ceval.h" // _PyEval_BuiltinsFromGlobals()
5#include "pycore_moduleobject.h" // _PyModule_GetDict()
6#include "pycore_object.h" // _PyObject_GC_UNTRACK()
7
8#include "frameobject.h" // PyFrameObject
9#include "opcode.h" // EXTENDED_ARG
10#include "structmember.h" // PyMemberDef
11
12#define OFF(x) offsetof(PyFrameObject, x)
13
14static PyMemberDef frame_memberlist[] = {
15 {"f_back", T_OBJECT, OFF(f_back), READONLY},
16 {"f_code", T_OBJECT, OFF(f_code), READONLY|PY_AUDIT_READ},
17 {"f_builtins", T_OBJECT, OFF(f_builtins), READONLY},
18 {"f_globals", T_OBJECT, OFF(f_globals), READONLY},
19 {"f_trace_lines", T_BOOL, OFF(f_trace_lines), 0},
20 {"f_trace_opcodes", T_BOOL, OFF(f_trace_opcodes), 0},
21 {NULL} /* Sentinel */
22};
23
24static struct _Py_frame_state *
25get_frame_state(void)
26{
27 PyInterpreterState *interp = _PyInterpreterState_GET();
28 return &interp->frame;
29}
30
31
32static PyObject *
33frame_getlocals(PyFrameObject *f, void *closure)
34{
35 if (PyFrame_FastToLocalsWithError(f) < 0)
36 return NULL;
37 Py_INCREF(f->f_locals);
38 return f->f_locals;
39}
40
41int
42PyFrame_GetLineNumber(PyFrameObject *f)
43{
44 assert(f != NULL);
45 if (f->f_lineno != 0) {
46 return f->f_lineno;
47 }
48 else {
49 return PyCode_Addr2Line(f->f_code, f->f_lasti*sizeof(_Py_CODEUNIT));
50 }
51}
52
53static PyObject *
54frame_getlineno(PyFrameObject *f, void *closure)
55{
56 int lineno = PyFrame_GetLineNumber(f);
57 if (lineno < 0) {
58 Py_RETURN_NONE;
59 }
60 else {
61 return PyLong_FromLong(lineno);
62 }
63}
64
65static PyObject *
66frame_getlasti(PyFrameObject *f, void *closure)
67{
68 if (f->f_lasti < 0) {
69 return PyLong_FromLong(-1);
70 }
71 return PyLong_FromLong(f->f_lasti*sizeof(_Py_CODEUNIT));
72}
73
74
75/* Given the index of the effective opcode,
76 scan back to construct the oparg with EXTENDED_ARG */
77static unsigned int
78get_arg(const _Py_CODEUNIT *codestr, Py_ssize_t i)
79{
80 _Py_CODEUNIT word;
81 unsigned int oparg = _Py_OPARG(codestr[i]);
82 if (i >= 1 && _Py_OPCODE(word = codestr[i-1]) == EXTENDED_ARG) {
83 oparg |= _Py_OPARG(word) << 8;
84 if (i >= 2 && _Py_OPCODE(word = codestr[i-2]) == EXTENDED_ARG) {
85 oparg |= _Py_OPARG(word) << 16;
86 if (i >= 3 && _Py_OPCODE(word = codestr[i-3]) == EXTENDED_ARG) {
87 oparg |= _Py_OPARG(word) << 24;
88 }
89 }
90 }
91 return oparg;
92}
93
94typedef enum kind {
95 With = 1,
96 Loop = 2,
97 Try = 3,
98 Except = 4,
99} Kind;
100
101#define BITS_PER_BLOCK 3
102
103static inline int64_t
104push_block(int64_t stack, Kind kind)
105{
106 assert(stack < ((int64_t)1)<<(BITS_PER_BLOCK*CO_MAXBLOCKS));
107 return (stack << BITS_PER_BLOCK) | kind;
108}
109
110static inline int64_t
111pop_block(int64_t stack)
112{
113 assert(stack > 0);
114 return stack >> BITS_PER_BLOCK;
115}
116
117static inline Kind
118top_block(int64_t stack)
119{
120 return stack & ((1<<BITS_PER_BLOCK)-1);
121}
122
123static int64_t *
124markblocks(PyCodeObject *code_obj, int len)
125{
126 const _Py_CODEUNIT *code =
127 (const _Py_CODEUNIT *)PyBytes_AS_STRING(code_obj->co_code);
128 int64_t *blocks = PyMem_New(int64_t, len+1);
129 int i, j, opcode;
130
131 if (blocks == NULL) {
132 PyErr_NoMemory();
133 return NULL;
134 }
135 memset(blocks, -1, (len+1)*sizeof(int64_t));
136 blocks[0] = 0;
137 int todo = 1;
138 while (todo) {
139 todo = 0;
140 for (i = 0; i < len; i++) {
141 int64_t block_stack = blocks[i];
142 int64_t except_stack;
143 if (block_stack == -1) {
144 continue;
145 }
146 opcode = _Py_OPCODE(code[i]);
147 switch (opcode) {
148 case JUMP_IF_FALSE_OR_POP:
149 case JUMP_IF_TRUE_OR_POP:
150 case POP_JUMP_IF_FALSE:
151 case POP_JUMP_IF_TRUE:
152 case JUMP_IF_NOT_EXC_MATCH:
153 j = get_arg(code, i);
154 assert(j < len);
155 if (blocks[j] == -1 && j < i) {
156 todo = 1;
157 }
158 assert(blocks[j] == -1 || blocks[j] == block_stack);
159 blocks[j] = block_stack;
160 blocks[i+1] = block_stack;
161 break;
162 case JUMP_ABSOLUTE:
163 j = get_arg(code, i);
164 assert(j < len);
165 if (blocks[j] == -1 && j < i) {
166 todo = 1;
167 }
168 assert(blocks[j] == -1 || blocks[j] == block_stack);
169 blocks[j] = block_stack;
170 break;
171 case SETUP_FINALLY:
172 j = get_arg(code, i) + i + 1;
173 assert(j < len);
174 except_stack = push_block(block_stack, Except);
175 assert(blocks[j] == -1 || blocks[j] == except_stack);
176 blocks[j] = except_stack;
177 block_stack = push_block(block_stack, Try);
178 blocks[i+1] = block_stack;
179 break;
180 case SETUP_WITH:
181 case SETUP_ASYNC_WITH:
182 j = get_arg(code, i) + i + 1;
183 assert(j < len);
184 except_stack = push_block(block_stack, Except);
185 assert(blocks[j] == -1 || blocks[j] == except_stack);
186 blocks[j] = except_stack;
187 block_stack = push_block(block_stack, With);
188 blocks[i+1] = block_stack;
189 break;
190 case JUMP_FORWARD:
191 j = get_arg(code, i) + i + 1;
192 assert(j < len);
193 assert(blocks[j] == -1 || blocks[j] == block_stack);
194 blocks[j] = block_stack;
195 break;
196 case GET_ITER:
197 case GET_AITER:
198 // For-loops get a Loop block, but comprehensions do not.
199 if (_Py_OPCODE(code[i + 1]) != CALL_FUNCTION) {
200 block_stack = push_block(block_stack, Loop);
201 }
202 blocks[i+1] = block_stack;
203 break;
204 case FOR_ITER:
205 blocks[i+1] = block_stack;
206 block_stack = pop_block(block_stack);
207 j = get_arg(code, i) + i + 1;
208 assert(j < len);
209 assert(blocks[j] == -1 || blocks[j] == block_stack);
210 blocks[j] = block_stack;
211 break;
212 case POP_BLOCK:
213 case POP_EXCEPT:
214 block_stack = pop_block(block_stack);
215 blocks[i+1] = block_stack;
216 break;
217 case END_ASYNC_FOR:
218 block_stack = pop_block(pop_block(block_stack));
219 blocks[i+1] = block_stack;
220 break;
221 case RETURN_VALUE:
222 case RAISE_VARARGS:
223 case RERAISE:
224 /* End of block */
225 break;
226 default:
227 blocks[i+1] = block_stack;
228
229 }
230 }
231 }
232 return blocks;
233}
234
235static int
236compatible_block_stack(int64_t from_stack, int64_t to_stack)
237{
238 if (to_stack < 0) {
239 return 0;
240 }
241 while(from_stack > to_stack) {
242 from_stack = pop_block(from_stack);
243 }
244 return from_stack == to_stack;
245}
246
247static const char *
248explain_incompatible_block_stack(int64_t to_stack)
249{
250 Kind target_kind = top_block(to_stack);
251 switch(target_kind) {
252 case Except:
253 return "can't jump into an 'except' block as there's no exception";
254 case Try:
255 return "can't jump into the body of a try statement";
256 case With:
257 return "can't jump into the body of a with statement";
258 case Loop:
259 return "can't jump into the body of a for loop";
260 default:
261 Py_UNREACHABLE();
262 }
263}
264
265static int *
266marklines(PyCodeObject *code, int len)
267{
268 PyCodeAddressRange bounds;
269 _PyCode_InitAddressRange(code, &bounds);
270 assert (bounds.ar_end == 0);
271
272 int *linestarts = PyMem_New(int, len);
273 if (linestarts == NULL) {
274 return NULL;
275 }
276 for (int i = 0; i < len; i++) {
277 linestarts[i] = -1;
278 }
279
280 while (PyLineTable_NextAddressRange(&bounds)) {
281 assert(bounds.ar_start/(int)sizeof(_Py_CODEUNIT) < len);
282 linestarts[bounds.ar_start/sizeof(_Py_CODEUNIT)] = bounds.ar_line;
283 }
284 return linestarts;
285}
286
287static int
288first_line_not_before(int *lines, int len, int line)
289{
290 int result = INT_MAX;
291 for (int i = 0; i < len; i++) {
292 if (lines[i] < result && lines[i] >= line) {
293 result = lines[i];
294 }
295 }
296 if (result == INT_MAX) {
297 return -1;
298 }
299 return result;
300}
301
302static void
303frame_stack_pop(PyFrameObject *f)
304{
305 assert(f->f_stackdepth >= 0);
306 f->f_stackdepth--;
307 PyObject *v = f->f_valuestack[f->f_stackdepth];
308 Py_DECREF(v);
309}
310
311static void
312frame_block_unwind(PyFrameObject *f)
313{
314 assert(f->f_stackdepth >= 0);
315 assert(f->f_iblock > 0);
316 f->f_iblock--;
317 PyTryBlock *b = &f->f_blockstack[f->f_iblock];
318 intptr_t delta = f->f_stackdepth - b->b_level;
319 while (delta > 0) {
320 frame_stack_pop(f);
321 delta--;
322 }
323}
324
325
326/* Setter for f_lineno - you can set f_lineno from within a trace function in
327 * order to jump to a given line of code, subject to some restrictions. Most
328 * lines are OK to jump to because they don't make any assumptions about the
329 * state of the stack (obvious because you could remove the line and the code
330 * would still work without any stack errors), but there are some constructs
331 * that limit jumping:
332 *
333 * o Lines with an 'except' statement on them can't be jumped to, because
334 * they expect an exception to be on the top of the stack.
335 * o Lines that live in a 'finally' block can't be jumped from or to, since
336 * we cannot be sure which state the interpreter was in or would be in
337 * during execution of the finally block.
338 * o 'try', 'with' and 'async with' blocks can't be jumped into because
339 * the blockstack needs to be set up before their code runs.
340 * o 'for' and 'async for' loops can't be jumped into because the
341 * iterator needs to be on the stack.
342 * o Jumps cannot be made from within a trace function invoked with a
343 * 'return' or 'exception' event since the eval loop has been exited at
344 * that time.
345 */
346static int
347frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignored))
348{
349 if (p_new_lineno == NULL) {
350 PyErr_SetString(PyExc_AttributeError, "cannot delete attribute");
351 return -1;
352 }
353 /* f_lineno must be an integer. */
354 if (!PyLong_CheckExact(p_new_lineno)) {
355 PyErr_SetString(PyExc_ValueError,
356 "lineno must be an integer");
357 return -1;
358 }
359
360 /*
361 * This code preserves the historical restrictions on
362 * setting the line number of a frame.
363 * Jumps are forbidden on a 'return' trace event (except after a yield).
364 * Jumps from 'call' trace events are also forbidden.
365 * In addition, jumps are forbidden when not tracing,
366 * as this is a debugging feature.
367 */
368 switch(f->f_state) {
369 case FRAME_CREATED:
370 PyErr_Format(PyExc_ValueError,
371 "can't jump from the 'call' trace event of a new frame");
372 return -1;
373 case FRAME_RETURNED:
374 case FRAME_UNWINDING:
375 case FRAME_RAISED:
376 case FRAME_CLEARED:
377 PyErr_SetString(PyExc_ValueError,
378 "can only jump from a 'line' trace event");
379 return -1;
380 case FRAME_EXECUTING:
381 case FRAME_SUSPENDED:
382 /* You can only do this from within a trace function, not via
383 * _getframe or similar hackery. */
384 if (!f->f_trace) {
385 PyErr_Format(PyExc_ValueError,
386 "f_lineno can only be set by a trace function");
387 return -1;
388 }
389 break;
390 }
391
392 int new_lineno;
393
394 /* Fail if the line falls outside the code block and
395 select first line with actual code. */
396 int overflow;
397 long l_new_lineno = PyLong_AsLongAndOverflow(p_new_lineno, &overflow);
398 if (overflow
399#if SIZEOF_LONG > SIZEOF_INT
400 || l_new_lineno > INT_MAX
401 || l_new_lineno < INT_MIN
402#endif
403 ) {
404 PyErr_SetString(PyExc_ValueError,
405 "lineno out of range");
406 return -1;
407 }
408 new_lineno = (int)l_new_lineno;
409
410 if (new_lineno < f->f_code->co_firstlineno) {
411 PyErr_Format(PyExc_ValueError,
412 "line %d comes before the current code block",
413 new_lineno);
414 return -1;
415 }
416
417 /* PyCode_NewWithPosOnlyArgs limits co_code to be under INT_MAX so this
418 * should never overflow. */
419 int len = (int)(PyBytes_GET_SIZE(f->f_code->co_code) / sizeof(_Py_CODEUNIT));
420 int *lines = marklines(f->f_code, len);
421 if (lines == NULL) {
422 return -1;
423 }
424
425 new_lineno = first_line_not_before(lines, len, new_lineno);
426 if (new_lineno < 0) {
427 PyErr_Format(PyExc_ValueError,
428 "line %d comes after the current code block",
429 (int)l_new_lineno);
430 PyMem_Free(lines);
431 return -1;
432 }
433
434 int64_t *blocks = markblocks(f->f_code, len);
435 if (blocks == NULL) {
436 PyMem_Free(lines);
437 return -1;
438 }
439
440 int64_t target_block_stack = -1;
441 int64_t best_block_stack = -1;
442 int best_addr = -1;
443 int64_t start_block_stack = blocks[f->f_lasti];
444 const char *msg = "cannot find bytecode for specified line";
445 for (int i = 0; i < len; i++) {
446 if (lines[i] == new_lineno) {
447 target_block_stack = blocks[i];
448 if (compatible_block_stack(start_block_stack, target_block_stack)) {
449 msg = NULL;
450 if (target_block_stack > best_block_stack) {
451 best_block_stack = target_block_stack;
452 best_addr = i;
453 }
454 }
455 else if (msg) {
456 if (target_block_stack >= 0) {
457 msg = explain_incompatible_block_stack(target_block_stack);
458 }
459 else {
460 msg = "code may be unreachable.";
461 }
462 }
463 }
464 }
465 PyMem_Free(blocks);
466 PyMem_Free(lines);
467 if (msg != NULL) {
468 PyErr_SetString(PyExc_ValueError, msg);
469 return -1;
470 }
471
472 /* Unwind block stack. */
473 while (start_block_stack > best_block_stack) {
474 Kind kind = top_block(start_block_stack);
475 switch(kind) {
476 case Loop:
477 frame_stack_pop(f);
478 break;
479 case Try:
480 frame_block_unwind(f);
481 break;
482 case With:
483 frame_block_unwind(f);
484 // Pop the exit function
485 frame_stack_pop(f);
486 break;
487 case Except:
488 PyErr_SetString(PyExc_ValueError,
489 "can't jump out of an 'except' block");
490 return -1;
491 }
492 start_block_stack = pop_block(start_block_stack);
493 }
494
495 /* Finally set the new f_lasti and return OK. */
496 f->f_lineno = 0;
497 f->f_lasti = best_addr;
498 return 0;
499}
500
501static PyObject *
502frame_gettrace(PyFrameObject *f, void *closure)
503{
504 PyObject* trace = f->f_trace;
505
506 if (trace == NULL)
507 trace = Py_None;
508
509 Py_INCREF(trace);
510
511 return trace;
512}
513
514static int
515frame_settrace(PyFrameObject *f, PyObject* v, void *closure)
516{
517 if (v == Py_None) {
518 v = NULL;
519 }
520 Py_XINCREF(v);
521 Py_XSETREF(f->f_trace, v);
522
523 return 0;
524}
525
526
527static PyGetSetDef frame_getsetlist[] = {
528 {"f_locals", (getter)frame_getlocals, NULL, NULL},
529 {"f_lineno", (getter)frame_getlineno,
530 (setter)frame_setlineno, NULL},
531 {"f_trace", (getter)frame_gettrace, (setter)frame_settrace, NULL},
532 {"f_lasti", (getter)frame_getlasti, NULL, NULL},
533 {0}
534};
535
536/* Stack frames are allocated and deallocated at a considerable rate.
537 In an attempt to improve the speed of function calls, we:
538
539 1. Hold a single "zombie" frame on each code object. This retains
540 the allocated and initialised frame object from an invocation of
541 the code object. The zombie is reanimated the next time we need a
542 frame object for that code object. Doing this saves the malloc/
543 realloc required when using a free_list frame that isn't the
544 correct size. It also saves some field initialisation.
545
546 In zombie mode, no field of PyFrameObject holds a reference, but
547 the following fields are still valid:
548
549 * ob_type, ob_size, f_code, f_valuestack;
550
551 * f_locals, f_trace are NULL;
552
553 * f_localsplus does not require re-allocation and
554 the local variables in f_localsplus are NULL.
555
556 2. We also maintain a separate free list of stack frames (just like
557 floats are allocated in a special way -- see floatobject.c). When
558 a stack frame is on the free list, only the following members have
559 a meaning:
560 ob_type == &Frametype
561 f_back next item on free list, or NULL
562 f_stacksize size of value stack
563 ob_size size of localsplus
564 Note that the value and block stacks are preserved -- this can save
565 another malloc() call or two (and two free() calls as well!).
566 Also note that, unlike for integers, each frame object is a
567 malloc'ed object in its own right -- it is only the actual calls to
568 malloc() that we are trying to save here, not the administration.
569 After all, while a typical program may make millions of calls, a
570 call depth of more than 20 or 30 is probably already exceptional
571 unless the program contains run-away recursion. I hope.
572
573 Later, PyFrame_MAXFREELIST was added to bound the # of frames saved on
574 free_list. Else programs creating lots of cyclic trash involving
575 frames could provoke free_list into growing without bound.
576*/
577/* max value for numfree */
578#define PyFrame_MAXFREELIST 200
579
580static void _Py_HOT_FUNCTION
581frame_dealloc(PyFrameObject *f)
582{
583 if (_PyObject_GC_IS_TRACKED(f)) {
584 _PyObject_GC_UNTRACK(f);
585 }
586
587 Py_TRASHCAN_BEGIN(f, frame_dealloc);
588 /* Kill all local variables */
589 PyObject **valuestack = f->f_valuestack;
590 for (PyObject **p = f->f_localsplus; p < valuestack; p++) {
591 Py_CLEAR(*p);
592 }
593
594 /* Free stack */
595 for (int i = 0; i < f->f_stackdepth; i++) {
596 Py_XDECREF(f->f_valuestack[i]);
597 }
598 f->f_stackdepth = 0;
599
600 Py_XDECREF(f->f_back);
601 Py_DECREF(f->f_builtins);
602 Py_DECREF(f->f_globals);
603 Py_CLEAR(f->f_locals);
604 Py_CLEAR(f->f_trace);
605
606 PyCodeObject *co = f->f_code;
607 if (co->co_zombieframe == NULL) {
608 co->co_zombieframe = f;
609 }
610 else {
611 struct _Py_frame_state *state = get_frame_state();
612#ifdef Py_DEBUG
613 // frame_dealloc() must not be called after _PyFrame_Fini()
614 assert(state->numfree != -1);
615#endif
616 if (state->numfree < PyFrame_MAXFREELIST) {
617 ++state->numfree;
618 f->f_back = state->free_list;
619 state->free_list = f;
620 }
621 else {
622 PyObject_GC_Del(f);
623 }
624 }
625
626 Py_DECREF(co);
627 Py_TRASHCAN_END;
628}
629
630static inline Py_ssize_t
631frame_nslots(PyFrameObject *frame)
632{
633 PyCodeObject *code = frame->f_code;
634 return (code->co_nlocals
635 + PyTuple_GET_SIZE(code->co_cellvars)
636 + PyTuple_GET_SIZE(code->co_freevars));
637}
638
639static int
640frame_traverse(PyFrameObject *f, visitproc visit, void *arg)
641{
642 Py_VISIT(f->f_back);
643 Py_VISIT(f->f_code);
644 Py_VISIT(f->f_builtins);
645 Py_VISIT(f->f_globals);
646 Py_VISIT(f->f_locals);
647 Py_VISIT(f->f_trace);
648
649 /* locals */
650 PyObject **fastlocals = f->f_localsplus;
651 for (Py_ssize_t i = frame_nslots(f); --i >= 0; ++fastlocals) {
652 Py_VISIT(*fastlocals);
653 }
654
655 /* stack */
656 for (int i = 0; i < f->f_stackdepth; i++) {
657 Py_VISIT(f->f_valuestack[i]);
658 }
659 return 0;
660}
661
662static int
663frame_tp_clear(PyFrameObject *f)
664{
665 /* Before anything else, make sure that this frame is clearly marked
666 * as being defunct! Else, e.g., a generator reachable from this
667 * frame may also point to this frame, believe itself to still be
668 * active, and try cleaning up this frame again.
669 */
670 f->f_state = FRAME_CLEARED;
671
672 Py_CLEAR(f->f_trace);
673
674 /* locals */
675 PyObject **fastlocals = f->f_localsplus;
676 for (Py_ssize_t i = frame_nslots(f); --i >= 0; ++fastlocals) {
677 Py_CLEAR(*fastlocals);
678 }
679
680 /* stack */
681 for (int i = 0; i < f->f_stackdepth; i++) {
682 Py_CLEAR(f->f_valuestack[i]);
683 }
684 f->f_stackdepth = 0;
685 return 0;
686}
687
688static PyObject *
689frame_clear(PyFrameObject *f, PyObject *Py_UNUSED(ignored))
690{
691 if (_PyFrame_IsExecuting(f)) {
692 PyErr_SetString(PyExc_RuntimeError,
693 "cannot clear an executing frame");
694 return NULL;
695 }
696 if (f->f_gen) {
697 _PyGen_Finalize(f->f_gen);
698 assert(f->f_gen == NULL);
699 }
700 (void)frame_tp_clear(f);
701 Py_RETURN_NONE;
702}
703
704PyDoc_STRVAR(clear__doc__,
705"F.clear(): clear most references held by the frame");
706
707static PyObject *
708frame_sizeof(PyFrameObject *f, PyObject *Py_UNUSED(ignored))
709{
710 Py_ssize_t res, extras, ncells, nfrees;
711
712 PyCodeObject *code = f->f_code;
713 ncells = PyTuple_GET_SIZE(code->co_cellvars);
714 nfrees = PyTuple_GET_SIZE(code->co_freevars);
715 extras = code->co_stacksize + code->co_nlocals + ncells + nfrees;
716 /* subtract one as it is already included in PyFrameObject */
717 res = sizeof(PyFrameObject) + (extras-1) * sizeof(PyObject *);
718
719 return PyLong_FromSsize_t(res);
720}
721
722PyDoc_STRVAR(sizeof__doc__,
723"F.__sizeof__() -> size of F in memory, in bytes");
724
725static PyObject *
726frame_repr(PyFrameObject *f)
727{
728 int lineno = PyFrame_GetLineNumber(f);
729 PyCodeObject *code = f->f_code;
730 return PyUnicode_FromFormat(
731 "<frame at %p, file %R, line %d, code %S>",
732 f, code->co_filename, lineno, code->co_name);
733}
734
735static PyMethodDef frame_methods[] = {
736 {"clear", (PyCFunction)frame_clear, METH_NOARGS,
737 clear__doc__},
738 {"__sizeof__", (PyCFunction)frame_sizeof, METH_NOARGS,
739 sizeof__doc__},
740 {NULL, NULL} /* sentinel */
741};
742
743PyTypeObject PyFrame_Type = {
744 PyVarObject_HEAD_INIT(&PyType_Type, 0)
745 "frame",
746 sizeof(PyFrameObject),
747 sizeof(PyObject *),
748 (destructor)frame_dealloc, /* tp_dealloc */
749 0, /* tp_vectorcall_offset */
750 0, /* tp_getattr */
751 0, /* tp_setattr */
752 0, /* tp_as_async */
753 (reprfunc)frame_repr, /* tp_repr */
754 0, /* tp_as_number */
755 0, /* tp_as_sequence */
756 0, /* tp_as_mapping */
757 0, /* tp_hash */
758 0, /* tp_call */
759 0, /* tp_str */
760 PyObject_GenericGetAttr, /* tp_getattro */
761 PyObject_GenericSetAttr, /* tp_setattro */
762 0, /* tp_as_buffer */
763 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
764 0, /* tp_doc */
765 (traverseproc)frame_traverse, /* tp_traverse */
766 (inquiry)frame_tp_clear, /* tp_clear */
767 0, /* tp_richcompare */
768 0, /* tp_weaklistoffset */
769 0, /* tp_iter */
770 0, /* tp_iternext */
771 frame_methods, /* tp_methods */
772 frame_memberlist, /* tp_members */
773 frame_getsetlist, /* tp_getset */
774 0, /* tp_base */
775 0, /* tp_dict */
776};
777
778_Py_IDENTIFIER(__builtins__);
779
780static inline PyFrameObject*
781frame_alloc(PyCodeObject *code)
782{
783 PyFrameObject *f = code->co_zombieframe;
784 if (f != NULL) {
785 code->co_zombieframe = NULL;
786 _Py_NewReference((PyObject *)f);
787 assert(f->f_code == code);
788 return f;
789 }
790
791 Py_ssize_t ncells = PyTuple_GET_SIZE(code->co_cellvars);
792 Py_ssize_t nfrees = PyTuple_GET_SIZE(code->co_freevars);
793 Py_ssize_t extras = code->co_stacksize + code->co_nlocals + ncells + nfrees;
794 struct _Py_frame_state *state = get_frame_state();
795 if (state->free_list == NULL)
796 {
797 f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, extras);
798 if (f == NULL) {
799 return NULL;
800 }
801 }
802 else {
803#ifdef Py_DEBUG
804 // frame_alloc() must not be called after _PyFrame_Fini()
805 assert(state->numfree != -1);
806#endif
807 assert(state->numfree > 0);
808 --state->numfree;
809 f = state->free_list;
810 state->free_list = state->free_list->f_back;
811 if (Py_SIZE(f) < extras) {
812 PyFrameObject *new_f = PyObject_GC_Resize(PyFrameObject, f, extras);
813 if (new_f == NULL) {
814 PyObject_GC_Del(f);
815 return NULL;
816 }
817 f = new_f;
818 }
819 _Py_NewReference((PyObject *)f);
820 }
821
822 extras = code->co_nlocals + ncells + nfrees;
823 f->f_valuestack = f->f_localsplus + extras;
824 for (Py_ssize_t i=0; i < extras; i++) {
825 f->f_localsplus[i] = NULL;
826 }
827 return f;
828}
829
830
831PyFrameObject* _Py_HOT_FUNCTION
832_PyFrame_New_NoTrack(PyThreadState *tstate, PyFrameConstructor *con, PyObject *locals)
833{
834 assert(con != NULL);
835 assert(con->fc_globals != NULL);
836 assert(con->fc_builtins != NULL);
837 assert(con->fc_code != NULL);
838 assert(locals == NULL || PyMapping_Check(locals));
839
840 PyFrameObject *f = frame_alloc((PyCodeObject *)con->fc_code);
841 if (f == NULL) {
842 return NULL;
843 }
844
845 f->f_back = (PyFrameObject*)Py_XNewRef(tstate->frame);
846 f->f_code = (PyCodeObject *)Py_NewRef(con->fc_code);
847 f->f_builtins = Py_NewRef(con->fc_builtins);
848 f->f_globals = Py_NewRef(con->fc_globals);
849 f->f_locals = Py_XNewRef(locals);
850 // f_valuestack initialized by frame_alloc()
851 f->f_trace = NULL;
852 f->f_stackdepth = 0;
853 f->f_trace_lines = 1;
854 f->f_trace_opcodes = 0;
855 f->f_gen = NULL;
856 f->f_lasti = -1;
857 f->f_lineno = 0;
858 f->f_iblock = 0;
859 f->f_state = FRAME_CREATED;
860 // f_blockstack and f_localsplus initialized by frame_alloc()
861 return f;
862}
863
864/* Legacy API */
865PyFrameObject*
866PyFrame_New(PyThreadState *tstate, PyCodeObject *code,
867 PyObject *globals, PyObject *locals)
868{
869 PyObject *builtins = _PyEval_BuiltinsFromGlobals(tstate, globals); // borrowed ref
870 if (builtins == NULL) {
871 return NULL;
872 }
873 PyFrameConstructor desc = {
874 .fc_globals = globals,
875 .fc_builtins = builtins,
876 .fc_name = code->co_name,
877 .fc_qualname = code->co_name,
878 .fc_code = (PyObject *)code,
879 .fc_defaults = NULL,
880 .fc_kwdefaults = NULL,
881 .fc_closure = NULL
882 };
883 PyFrameObject *f = _PyFrame_New_NoTrack(tstate, &desc, locals);
884 if (f) {
885 _PyObject_GC_TRACK(f);
886 }
887 return f;
888}
889
890
891/* Block management */
892
893void
894PyFrame_BlockSetup(PyFrameObject *f, int type, int handler, int level)
895{
896 PyTryBlock *b;
897 if (f->f_iblock >= CO_MAXBLOCKS) {
898 Py_FatalError("block stack overflow");
899 }
900 b = &f->f_blockstack[f->f_iblock++];
901 b->b_type = type;
902 b->b_level = level;
903 b->b_handler = handler;
904}
905
906PyTryBlock *
907PyFrame_BlockPop(PyFrameObject *f)
908{
909 PyTryBlock *b;
910 if (f->f_iblock <= 0) {
911 Py_FatalError("block stack underflow");
912 }
913 b = &f->f_blockstack[--f->f_iblock];
914 return b;
915}
916
917/* Convert between "fast" version of locals and dictionary version.
918
919 map and values are input arguments. map is a tuple of strings.
920 values is an array of PyObject*. At index i, map[i] is the name of
921 the variable with value values[i]. The function copies the first
922 nmap variable from map/values into dict. If values[i] is NULL,
923 the variable is deleted from dict.
924
925 If deref is true, then the values being copied are cell variables
926 and the value is extracted from the cell variable before being put
927 in dict.
928 */
929
930static int
931map_to_dict(PyObject *map, Py_ssize_t nmap, PyObject *dict, PyObject **values,
932 int deref)
933{
934 Py_ssize_t j;
935 assert(PyTuple_Check(map));
936 assert(PyDict_Check(dict));
937 assert(PyTuple_Size(map) >= nmap);
938 for (j=0; j < nmap; j++) {
939 PyObject *key = PyTuple_GET_ITEM(map, j);
940 PyObject *value = values[j];
941 assert(PyUnicode_Check(key));
942 if (deref && value != NULL) {
943 assert(PyCell_Check(value));
944 value = PyCell_GET(value);
945 }
946 if (value == NULL) {
947 if (PyObject_DelItem(dict, key) != 0) {
948 if (PyErr_ExceptionMatches(PyExc_KeyError))
949 PyErr_Clear();
950 else
951 return -1;
952 }
953 }
954 else {
955 if (PyObject_SetItem(dict, key, value) != 0)
956 return -1;
957 }
958 }
959 return 0;
960}
961
962/* Copy values from the "locals" dict into the fast locals.
963
964 dict is an input argument containing string keys representing
965 variables names and arbitrary PyObject* as values.
966
967 map and values are input arguments. map is a tuple of strings.
968 values is an array of PyObject*. At index i, map[i] is the name of
969 the variable with value values[i]. The function copies the first
970 nmap variable from map/values into dict. If values[i] is NULL,
971 the variable is deleted from dict.
972
973 If deref is true, then the values being copied are cell variables
974 and the value is extracted from the cell variable before being put
975 in dict. If clear is true, then variables in map but not in dict
976 are set to NULL in map; if clear is false, variables missing in
977 dict are ignored.
978
979 Exceptions raised while modifying the dict are silently ignored,
980 because there is no good way to report them.
981*/
982
983static void
984dict_to_map(PyObject *map, Py_ssize_t nmap, PyObject *dict, PyObject **values,
985 int deref, int clear)
986{
987 Py_ssize_t j;
988 assert(PyTuple_Check(map));
989 assert(PyDict_Check(dict));
990 assert(PyTuple_Size(map) >= nmap);
991 for (j=0; j < nmap; j++) {
992 PyObject *key = PyTuple_GET_ITEM(map, j);
993 PyObject *value = PyObject_GetItem(dict, key);
994 assert(PyUnicode_Check(key));
995 /* We only care about NULLs if clear is true. */
996 if (value == NULL) {
997 PyErr_Clear();
998 if (!clear)
999 continue;
1000 }
1001 if (deref) {
1002 assert(PyCell_Check(values[j]));
1003 if (PyCell_GET(values[j]) != value) {
1004 if (PyCell_Set(values[j], value) < 0)
1005 PyErr_Clear();
1006 }
1007 } else if (values[j] != value) {
1008 Py_XINCREF(value);
1009 Py_XSETREF(values[j], value);
1010 }
1011 Py_XDECREF(value);
1012 }
1013}
1014
1015int
1016PyFrame_FastToLocalsWithError(PyFrameObject *f)
1017{
1018 /* Merge fast locals into f->f_locals */
1019 PyObject *locals, *map;
1020 PyObject **fast;
1021 PyCodeObject *co;
1022 Py_ssize_t j;
1023 Py_ssize_t ncells, nfreevars;
1024
1025 if (f == NULL) {
1026 PyErr_BadInternalCall();
1027 return -1;
1028 }
1029 locals = f->f_locals;
1030 if (locals == NULL) {
1031 locals = f->f_locals = PyDict_New();
1032 if (locals == NULL)
1033 return -1;
1034 }
1035 co = f->f_code;
1036 map = co->co_varnames;
1037 if (!PyTuple_Check(map)) {
1038 PyErr_Format(PyExc_SystemError,
1039 "co_varnames must be a tuple, not %s",
1040 Py_TYPE(map)->tp_name);
1041 return -1;
1042 }
1043 fast = f->f_localsplus;
1044 j = PyTuple_GET_SIZE(map);
1045 if (j > co->co_nlocals)
1046 j = co->co_nlocals;
1047 if (co->co_nlocals) {
1048 if (map_to_dict(map, j, locals, fast, 0) < 0)
1049 return -1;
1050 }
1051 ncells = PyTuple_GET_SIZE(co->co_cellvars);
1052 nfreevars = PyTuple_GET_SIZE(co->co_freevars);
1053 if (ncells || nfreevars) {
1054 if (map_to_dict(co->co_cellvars, ncells,
1055 locals, fast + co->co_nlocals, 1))
1056 return -1;
1057
1058 /* If the namespace is unoptimized, then one of the
1059 following cases applies:
1060 1. It does not contain free variables, because it
1061 uses import * or is a top-level namespace.
1062 2. It is a class namespace.
1063 We don't want to accidentally copy free variables
1064 into the locals dict used by the class.
1065 */
1066 if (co->co_flags & CO_OPTIMIZED) {
1067 if (map_to_dict(co->co_freevars, nfreevars,
1068 locals, fast + co->co_nlocals + ncells, 1) < 0)
1069 return -1;
1070 }
1071 }
1072 return 0;
1073}
1074
1075void
1076PyFrame_FastToLocals(PyFrameObject *f)
1077{
1078 int res;
1079
1080 assert(!PyErr_Occurred());
1081
1082 res = PyFrame_FastToLocalsWithError(f);
1083 if (res < 0)
1084 PyErr_Clear();
1085}
1086
1087void
1088PyFrame_LocalsToFast(PyFrameObject *f, int clear)
1089{
1090 /* Merge f->f_locals into fast locals */
1091 PyObject *locals, *map;
1092 PyObject **fast;
1093 PyObject *error_type, *error_value, *error_traceback;
1094 PyCodeObject *co;
1095 Py_ssize_t j;
1096 Py_ssize_t ncells, nfreevars;
1097 if (f == NULL)
1098 return;
1099 locals = f->f_locals;
1100 co = f->f_code;
1101 map = co->co_varnames;
1102 if (locals == NULL)
1103 return;
1104 if (!PyTuple_Check(map))
1105 return;
1106 PyErr_Fetch(&error_type, &error_value, &error_traceback);
1107 fast = f->f_localsplus;
1108 j = PyTuple_GET_SIZE(map);
1109 if (j > co->co_nlocals)
1110 j = co->co_nlocals;
1111 if (co->co_nlocals)
1112 dict_to_map(co->co_varnames, j, locals, fast, 0, clear);
1113 ncells = PyTuple_GET_SIZE(co->co_cellvars);
1114 nfreevars = PyTuple_GET_SIZE(co->co_freevars);
1115 if (ncells || nfreevars) {
1116 dict_to_map(co->co_cellvars, ncells,
1117 locals, fast + co->co_nlocals, 1, clear);
1118 /* Same test as in PyFrame_FastToLocals() above. */
1119 if (co->co_flags & CO_OPTIMIZED) {
1120 dict_to_map(co->co_freevars, nfreevars,
1121 locals, fast + co->co_nlocals + ncells, 1,
1122 clear);
1123 }
1124 }
1125 PyErr_Restore(error_type, error_value, error_traceback);
1126}
1127
1128/* Clear out the free list */
1129void
1130_PyFrame_ClearFreeList(PyInterpreterState *interp)
1131{
1132 struct _Py_frame_state *state = &interp->frame;
1133 while (state->free_list != NULL) {
1134 PyFrameObject *f = state->free_list;
1135 state->free_list = state->free_list->f_back;
1136 PyObject_GC_Del(f);
1137 --state->numfree;
1138 }
1139 assert(state->numfree == 0);
1140}
1141
1142void
1143_PyFrame_Fini(PyInterpreterState *interp)
1144{
1145 _PyFrame_ClearFreeList(interp);
1146#ifdef Py_DEBUG
1147 struct _Py_frame_state *state = &interp->frame;
1148 state->numfree = -1;
1149#endif
1150}
1151
1152/* Print summary info about the state of the optimized allocator */
1153void
1154_PyFrame_DebugMallocStats(FILE *out)
1155{
1156 struct _Py_frame_state *state = get_frame_state();
1157 _PyDebugAllocatorStats(out,
1158 "free PyFrameObject",
1159 state->numfree, sizeof(PyFrameObject));
1160}
1161
1162
1163PyCodeObject *
1164PyFrame_GetCode(PyFrameObject *frame)
1165{
1166 assert(frame != NULL);
1167 PyCodeObject *code = frame->f_code;
1168 assert(code != NULL);
1169 Py_INCREF(code);
1170 return code;
1171}
1172
1173
1174PyFrameObject*
1175PyFrame_GetBack(PyFrameObject *frame)
1176{
1177 assert(frame != NULL);
1178 PyFrameObject *back = frame->f_back;
1179 Py_XINCREF(back);
1180 return back;
1181}
1182
1183PyObject*
1184_PyEval_BuiltinsFromGlobals(PyThreadState *tstate, PyObject *globals)
1185{
1186 PyObject *builtins = _PyDict_GetItemIdWithError(globals, &PyId___builtins__);
1187 if (builtins) {
1188 if (PyModule_Check(builtins)) {
1189 builtins = _PyModule_GetDict(builtins);
1190 assert(builtins != NULL);
1191 }
1192 return builtins;
1193 }
1194 if (PyErr_Occurred()) {
1195 return NULL;
1196 }
1197
1198 return _PyEval_GetBuiltins(tstate);
1199}
1200