From f8c6d1c106648f53874c9b0ebee1ae7382766dd0 Mon Sep 17 00:00:00 2001 From: Keno Fischer Date: Thu, 31 Oct 2024 13:21:46 -0400 Subject: [PATCH] Fix some missing write barriers and add some helpful comments (#56396) I was trying some performance optimization which didn't end up working out, but in the process I found two missing write barriers and added some helpful comments for future readers, so that part is probably still useful. --- base/runtime_internals.jl | 2 +- src/gc-interface.h | 5 +++++ src/julia.h | 6 +++--- src/module.c | 10 +++++++--- 4 files changed, 16 insertions(+), 7 deletions(-) diff --git a/base/runtime_internals.jl b/base/runtime_internals.jl index ab867f8fcae6d..dd526a24d6494 100644 --- a/base/runtime_internals.jl +++ b/base/runtime_internals.jl @@ -248,7 +248,7 @@ binding_kind(m::Module, s::Symbol) = binding_kind(lookup_binding_partition(tls_w delete_binding(mod::Module, sym::Symbol) Force the binding `mod.sym` to be undefined again, allowing it be redefined. -Note that this operation is very expensive, requirinig a full scan of all code in the system, +Note that this operation is very expensive, requiring a full scan of all code in the system, as well as potential recompilation of any methods that (may) have used binding information. diff --git a/src/gc-interface.h b/src/gc-interface.h index 0b5df17a3b8c5..eb6687d52d9ab 100644 --- a/src/gc-interface.h +++ b/src/gc-interface.h @@ -192,6 +192,11 @@ JL_DLLEXPORT void *jl_gc_perm_alloc(size_t sz, int zero, unsigned align, // object header must be included in the object size. This object is allocated in an // immortal region that is never swept. The second parameter specifies the type of the // object being allocated and will be used to set the object header. +// +// !!! warning: Because permanently allocated objects are not swept, the GC will not +// necessarily mark any objects that would have ordinarily been rooted by +// the allocated object. All objects stored in fields of this object +// must be either permanently allocated or have other roots. struct _jl_value_t *jl_gc_permobj(size_t sz, void *ty) JL_NOTSAFEPOINT; // ========================================================================= // diff --git a/src/julia.h b/src/julia.h index ffd669cd828a4..a710192d5756c 100644 --- a/src/julia.h +++ b/src/julia.h @@ -1138,15 +1138,15 @@ JL_DLLEXPORT jl_weakref_t *jl_gc_new_weakref(jl_value_t *value); STATIC_INLINE void jl_gc_wb(const void *parent, const void *ptr) JL_NOTSAFEPOINT { // parent and ptr isa jl_value_t* - if (__unlikely(jl_astaggedvalue(parent)->bits.gc == 3 && // parent is old and not in remset - (jl_astaggedvalue(ptr)->bits.gc & 1) == 0)) // ptr is young + if (__unlikely(jl_astaggedvalue(parent)->bits.gc == 3 /* GC_OLD_MARKED */ && // parent is old and not in remset + (jl_astaggedvalue(ptr)->bits.gc & 1 /* GC_MARKED */) == 0)) // ptr is young jl_gc_queue_root((jl_value_t*)parent); } STATIC_INLINE void jl_gc_wb_back(const void *ptr) JL_NOTSAFEPOINT // ptr isa jl_value_t* { // if ptr is old - if (__unlikely(jl_astaggedvalue(ptr)->bits.gc == 3)) { + if (__unlikely(jl_astaggedvalue(ptr)->bits.gc == 3 /* GC_OLD_MARKED */)) { jl_gc_queue_root((jl_value_t*)ptr); } } diff --git a/src/module.c b/src/module.c index 9b4d26cc7b000..bdacd487e978d 100644 --- a/src/module.c +++ b/src/module.c @@ -37,6 +37,7 @@ jl_binding_partition_t *jl_get_binding_partition(jl_binding_t *b, size_t world) _Atomic(jl_binding_partition_t *)*insert = &b->partitions; jl_binding_partition_t *bpart = jl_atomic_load_relaxed(insert); size_t max_world = (size_t)-1; + jl_binding_partition_t *new_bpart = NULL; while (1) { while (bpart && world < bpart->min_world) { insert = &bpart->next; @@ -46,11 +47,11 @@ jl_binding_partition_t *jl_get_binding_partition(jl_binding_t *b, size_t world) } if (bpart && world <= jl_atomic_load_relaxed(&bpart->max_world)) return bpart; - jl_binding_partition_t *new_bpart = new_binding_partition(); + if (!new_bpart) + new_bpart = new_binding_partition(); jl_atomic_store_relaxed(&new_bpart->next, bpart); jl_gc_wb_fresh(new_bpart, bpart); - if (bpart) - new_bpart->min_world = jl_atomic_load_relaxed(&bpart->max_world) + 1; + new_bpart->min_world = bpart ? jl_atomic_load_relaxed(&bpart->max_world) + 1 : 0; jl_atomic_store_relaxed(&new_bpart->max_world, max_world); if (jl_atomic_cmpswap(insert, &bpart, new_bpart)) { jl_gc_wb(parent, new_bpart); @@ -548,6 +549,7 @@ static jl_binding_t *jl_resolve_owner(jl_binding_t *b/*optional*/, jl_module_t * // changing, for example if this var is assigned to later. if (!jl_atomic_cmpswap(&bpart->restriction, &pku, encode_restriction((jl_value_t*)b2, BINDING_KIND_IMPLICIT))) goto retry; + jl_gc_wb(bpart, b2); if (b2->deprecated) { b->deprecated = 1; // we will warn about this below, but we might want to warn at the use sites too if (m != jl_main_module && m != jl_base_module && @@ -740,6 +742,7 @@ static void module_import_(jl_module_t *to, jl_module_t *from, jl_sym_t *asname, jl_ptr_kind_union_t new_pku = encode_restriction((jl_value_t*)b, (explici != 0) ? BINDING_KIND_IMPORTED : BINDING_KIND_EXPLICIT); if (!jl_atomic_cmpswap(&btopart->restriction, &bto_pku, new_pku)) goto retry; + jl_gc_wb(btopart, b); bto->deprecated |= b->deprecated; // we already warned about this above, but we might want to warn at the use sites too } else { @@ -749,6 +752,7 @@ static void module_import_(jl_module_t *to, jl_module_t *from, jl_sym_t *asname, jl_ptr_kind_union_t new_pku = encode_restriction(decode_restriction_value(bto_pku), (explici != 0) ? BINDING_KIND_IMPORTED : BINDING_KIND_EXPLICIT); if (!jl_atomic_cmpswap(&btopart->restriction, &bto_pku, new_pku)) goto retry; + // No wb, because the value is unchanged } } else if (jl_bkind_is_some_import(decode_restriction_kind(bto_pku))) {