aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJohn Hawthorn <john@hawthorn.email>2020-08-25 23:42:15 -0700
committerAaron Patterson <aaron.patterson@gmail.com>2020-09-02 14:54:29 -0700
commit0b81a484f3453082d28a48968a063fd907daa5b5 (patch)
tree8de85d3e490af9f92e623ab3fca6a29b151f9475
parenteada6350332155972f19bad52bd8621f607520a2 (diff)
downloadruby-0b81a484f3453082d28a48968a063fd907daa5b5.tar.gz
Initialize new T_OBJECT as ROBJECT_EMBED
Previously, when an object is first initialized, ROBJECT_EMBED isn't set. This means that for brand new objects, ROBJECT_NUMIV(obj) is 0 and ROBJECT_IV_INDEX_TBL(obj) is NULL. Previously, this combination meant that the inline cache would never be initialized when setting an ivar on an object for the first time since iv_index_tbl was NULL, and if it were it would never be used because ROBJECT_NUMIV was 0. Both cases always fell through to the generic rb_ivar_set which would then set the ROBJECT_EMBED flag and initialize the ivar array. This commit changes rb_class_allocate_instance to set the ROBJECT_EMBED flag on the object initially and to initialize all members of the embedded array to Qundef. This allows the inline cache to be set correctly on first use and to be used on future uses. This moves rb_class_allocate_instance to gc.c, so that it has access to newobj_of. This seems appropriate given that there are other allocating methods in this file (ex. rb_data_object_wrap, rb_imemo_new).
-rw-r--r--gc.c7
-rw-r--r--internal/gc.h1
-rw-r--r--object.c7
3 files changed, 8 insertions, 7 deletions
diff --git a/gc.c b/gc.c
index 05d75e16e2..9b453cb6ed 100644
--- a/gc.c
+++ b/gc.c
@@ -2374,6 +2374,13 @@ rb_imemo_new_debug(enum imemo_type type, VALUE v1, VALUE v2, VALUE v3, VALUE v0,
#endif
VALUE
+rb_class_allocate_instance(VALUE klass)
+{
+ VALUE flags = T_OBJECT | ROBJECT_EMBED;
+ return newobj_of(klass, flags, Qundef, Qundef, Qundef, RGENGC_WB_PROTECTED_OBJECT);
+}
+
+VALUE
rb_data_object_wrap(VALUE klass, void *datap, RUBY_DATA_FUNC dmark, RUBY_DATA_FUNC dfree)
{
RUBY_ASSERT_ALWAYS(dfree != (RUBY_DATA_FUNC)1);
diff --git a/internal/gc.h b/internal/gc.h
index 7258fed915..490f42e06a 100644
--- a/internal/gc.h
+++ b/internal/gc.h
@@ -78,6 +78,7 @@ RUBY_ATTR_MALLOC void *rb_xcalloc_mul_add_mul(size_t, size_t, size_t, size_t);
static inline void *ruby_sized_xrealloc_inlined(void *ptr, size_t new_size, size_t old_size) RUBY_ATTR_RETURNS_NONNULL RUBY_ATTR_ALLOC_SIZE((2));
static inline void *ruby_sized_xrealloc2_inlined(void *ptr, size_t new_count, size_t elemsiz, size_t old_count) RUBY_ATTR_RETURNS_NONNULL RUBY_ATTR_ALLOC_SIZE((2, 3));
static inline void ruby_sized_xfree_inlined(void *ptr, size_t size);
+VALUE rb_class_allocate_instance(VALUE klass);
RUBY_SYMBOL_EXPORT_BEGIN
/* gc.c (export) */
diff --git a/object.c b/object.c
index 08fec850d3..a7fb9dd3a0 100644
--- a/object.c
+++ b/object.c
@@ -2102,13 +2102,6 @@ rb_obj_alloc(VALUE klass)
return rb_class_alloc(klass);
}
-static VALUE
-rb_class_allocate_instance(VALUE klass)
-{
- NEWOBJ_OF(obj, struct RObject, klass, T_OBJECT | (RGENGC_WB_PROTECTED_OBJECT ? FL_WB_PROTECTED : 0));
- return (VALUE)obj;
-}
-
/*
* call-seq:
* class.new(args, ...) -> obj