aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--gc.c17
-rw-r--r--internal/gc.h11
-rw-r--r--test/ruby/test_weakmap.rb12
-rw-r--r--weakmap.c6
4 files changed, 32 insertions, 14 deletions
diff --git a/gc.c b/gc.c
index c8f570940c..c71fdd10ed 100644
--- a/gc.c
+++ b/gc.c
@@ -9799,15 +9799,6 @@ gc_is_moveable_obj(rb_objspace_t *objspace, VALUE obj)
return FALSE;
}
-/* Used in places that could malloc, which can cause the GC to run. We need to
- * temporarily disable the GC to allow the malloc to happen. */
-#define COULD_MALLOC_REGION_START() \
- GC_ASSERT(during_gc); \
- VALUE _already_disabled = rb_gc_disable_no_rest(); \
-
-#define COULD_MALLOC_REGION_END() \
- if (_already_disabled == Qfalse) rb_objspace_gc_enable(objspace);
-
static VALUE
gc_move(rb_objspace_t *objspace, VALUE scan, VALUE free, size_t src_slot_size, size_t slot_size)
{
@@ -9840,11 +9831,11 @@ gc_move(rb_objspace_t *objspace, VALUE scan, VALUE free, size_t src_slot_size, s
if (FL_TEST((VALUE)src, FL_EXIVAR)) {
/* Resizing the st table could cause a malloc */
- COULD_MALLOC_REGION_START();
+ DURING_GC_COULD_MALLOC_REGION_START();
{
rb_mv_generic_ivar((VALUE)src, (VALUE)dest);
}
- COULD_MALLOC_REGION_END();
+ DURING_GC_COULD_MALLOC_REGION_END();
}
st_data_t srcid = (st_data_t)src, id;
@@ -9854,12 +9845,12 @@ gc_move(rb_objspace_t *objspace, VALUE scan, VALUE free, size_t src_slot_size, s
if (st_lookup(objspace->obj_to_id_tbl, srcid, &id)) {
gc_report(4, objspace, "Moving object with seen id: %p -> %p\n", (void *)src, (void *)dest);
/* Resizing the st table could cause a malloc */
- COULD_MALLOC_REGION_START();
+ DURING_GC_COULD_MALLOC_REGION_START();
{
st_delete(objspace->obj_to_id_tbl, &srcid, 0);
st_insert(objspace->obj_to_id_tbl, (st_data_t)dest, id);
}
- COULD_MALLOC_REGION_END();
+ DURING_GC_COULD_MALLOC_REGION_END();
}
/* Move the object */
diff --git a/internal/gc.h b/internal/gc.h
index 28b82f4196..f8f88a41cb 100644
--- a/internal/gc.h
+++ b/internal/gc.h
@@ -189,6 +189,17 @@ struct rb_objspace; /* in vm_core.h */
# define SIZE_POOL_COUNT 5
#endif
+/* Used in places that could malloc during, which can cause the GC to run. We
+ * need to temporarily disable the GC to allow the malloc to happen.
+ * Allocating memory during GC is a bad idea, so use this only when absolutely
+ * necessary. */
+#define DURING_GC_COULD_MALLOC_REGION_START() \
+ assert(rb_during_gc()); \
+ VALUE _already_disabled = rb_gc_disable_no_rest()
+
+#define DURING_GC_COULD_MALLOC_REGION_END() \
+ if (_already_disabled == Qfalse) rb_gc_enable()
+
typedef struct ractor_newobj_size_pool_cache {
struct RVALUE *freelist;
struct heap_page *using_page;
diff --git a/test/ruby/test_weakmap.rb b/test/ruby/test_weakmap.rb
index b42787ad5e..a30004bce3 100644
--- a/test/ruby/test_weakmap.rb
+++ b/test/ruby/test_weakmap.rb
@@ -223,6 +223,18 @@ class TestWeakMap < Test::Unit::TestCase
assert_equal(val, wm[key])
end;
+
+ assert_separately(["-W0"], <<-'end;')
+ wm = ObjectSpace::WeakMap.new
+
+ ary = 10_000.times.map do
+ o = Object.new
+ wm[o] = 1
+ o
+ end
+
+ GC.verify_compaction_references(expand_heap: true, toward: :empty)
+ end;
end
def test_replaced_values_bug_19531
diff --git a/weakmap.c b/weakmap.c
index d79f5b3f94..3c7fd43f9b 100644
--- a/weakmap.c
+++ b/weakmap.c
@@ -122,7 +122,11 @@ wmap_compact_table_i(st_data_t key, st_data_t val, st_data_t data)
if (key_obj != new_key_obj) {
*(VALUE *)key = new_key_obj;
- st_insert(table, key, val);
+ DURING_GC_COULD_MALLOC_REGION_START();
+ {
+ st_insert(table, key, val);
+ }
+ DURING_GC_COULD_MALLOC_REGION_END();
return ST_DELETE;
}