Skip to content

Commit 48b6866

Browse files
authored
gh-144054: no deferred refcount for untracked (gh-144081)
This reverts gh-144055 and fixes the bug in a different way. Deferred reference counting relies on the object being tracked by the GC, otherwise the object will live until interpreter shutdown. So, take care that we do not enable deferred reference counting for objects that are untracked. Also, if a tuple has deferred reference counting enabled, don't untrack it.
1 parent 43bb630 commit 48b6866

File tree

2 files changed

+24
-36
lines changed

2 files changed

+24
-36
lines changed

Python/gc_free_threading.c

Lines changed: 23 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -308,18 +308,17 @@ disable_deferred_refcounting(PyObject *op)
308308
// should also be disabled when we turn off deferred refcounting.
309309
_PyObject_DisablePerThreadRefcounting(op);
310310
}
311-
if (_PyObject_GC_IS_TRACKED(op)) {
312-
// Generators and frame objects may contain deferred references to other
313-
// objects. If the pointed-to objects are part of cyclic trash, we may
314-
// have disabled deferred refcounting on them and need to ensure that we
315-
// use strong references, in case the generator or frame object is
316-
// resurrected by a finalizer.
317-
if (PyGen_CheckExact(op) || PyCoro_CheckExact(op) || PyAsyncGen_CheckExact(op)) {
318-
frame_disable_deferred_refcounting(&((PyGenObject *)op)->gi_iframe);
319-
}
320-
else if (PyFrame_Check(op)) {
321-
frame_disable_deferred_refcounting(((PyFrameObject *)op)->f_frame);
322-
}
311+
312+
// Generators and frame objects may contain deferred references to other
313+
// objects. If the pointed-to objects are part of cyclic trash, we may
314+
// have disabled deferred refcounting on them and need to ensure that we
315+
// use strong references, in case the generator or frame object is
316+
// resurrected by a finalizer.
317+
if (PyGen_CheckExact(op) || PyCoro_CheckExact(op) || PyAsyncGen_CheckExact(op)) {
318+
frame_disable_deferred_refcounting(&((PyGenObject *)op)->gi_iframe);
319+
}
320+
else if (PyFrame_Check(op)) {
321+
frame_disable_deferred_refcounting(((PyFrameObject *)op)->f_frame);
323322
}
324323
}
325324

@@ -507,6 +506,10 @@ gc_visit_thread_stacks(PyInterpreterState *interp, struct collection_state *stat
507506
static bool
508507
gc_maybe_untrack(PyObject *op)
509508
{
509+
if (_PyObject_HasDeferredRefcount(op)) {
510+
// deferred refcounting only works if the object is tracked
511+
return false;
512+
}
510513
// Currently we only check for tuples containing only non-GC objects. In
511514
// theory we could check other immutable objects that contain references
512515
// to non-GC objects.
@@ -1019,7 +1022,7 @@ update_refs(const mi_heap_t *heap, const mi_heap_area_t *area,
10191022
}
10201023
_PyObject_ASSERT(op, refcount >= 0);
10211024

1022-
if (refcount > 0 && !_PyObject_HasDeferredRefcount(op)) {
1025+
if (refcount > 0) {
10231026
if (gc_maybe_untrack(op)) {
10241027
gc_restore_refs(op);
10251028
return true;
@@ -1241,30 +1244,19 @@ scan_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area,
12411244
return true;
12421245
}
12431246

1247+
if (state->reason == _Py_GC_REASON_SHUTDOWN) {
1248+
// Disable deferred refcounting for reachable objects as well during
1249+
// interpreter shutdown. This ensures that these objects are collected
1250+
// immediately when their last reference is removed.
1251+
disable_deferred_refcounting(op);
1252+
}
1253+
12441254
// object is reachable, restore `ob_tid`; we're done with these objects
12451255
gc_restore_tid(op);
12461256
gc_clear_alive(op);
12471257
return true;
12481258
}
12491259

1250-
// Disable deferred refcounting for reachable objects during interpreter
1251-
// shutdown. This ensures that these objects are collected immediately when
1252-
// their last reference is removed. This needs to consider both tracked and
1253-
// untracked GC objects, since either might have deferred refcounts enabled.
1254-
static bool
1255-
scan_heap_disable_deferred(const mi_heap_t *heap, const mi_heap_area_t *area,
1256-
void *block, size_t block_size, void *args)
1257-
{
1258-
PyObject *op = op_from_block_all_gc(block, args);
1259-
if (op == NULL) {
1260-
return true;
1261-
}
1262-
if (!_Py_IsImmortal(op)) {
1263-
disable_deferred_refcounting(op);
1264-
}
1265-
return true;
1266-
}
1267-
12681260
static int
12691261
move_legacy_finalizer_reachable(struct collection_state *state);
12701262

@@ -1499,10 +1491,6 @@ deduce_unreachable_heap(PyInterpreterState *interp,
14991491
// Restores ob_tid for reachable objects.
15001492
gc_visit_heaps(interp, &scan_heap_visitor, &state->base);
15011493

1502-
if (state->reason == _Py_GC_REASON_SHUTDOWN) {
1503-
gc_visit_heaps(interp, &scan_heap_disable_deferred, &state->base);
1504-
}
1505-
15061494
if (state->legacy_finalizers.head) {
15071495
// There may be objects reachable from legacy finalizers that are in
15081496
// the unreachable set. We need to mark them as reachable.

Python/specialize.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,7 @@ static uint32_t function_get_version(PyObject *o, int opcode);
362362
static void
363363
maybe_enable_deferred_ref_count(PyObject *op)
364364
{
365-
if (!_Py_IsOwnedByCurrentThread(op)) {
365+
if (!_Py_IsOwnedByCurrentThread(op) && _PyObject_GC_IS_TRACKED(op)) {
366366
// For module level variables that are heavily used from multiple
367367
// threads, deferred reference counting provides good scaling
368368
// benefits. The downside is that the object will only be deallocated

0 commit comments

Comments
 (0)