aboutsummaryrefslogtreecommitdiffstats
path: root/class.c
diff options
context:
space:
mode:
author卜部昌平 <shyouhei@ruby-lang.org>2019-10-03 12:26:41 +0900
committer卜部昌平 <shyouhei@ruby-lang.org>2019-10-03 12:45:24 +0900
commiteb92159d72fc711387f7e17ffbaca1678f23fd47 (patch)
tree13c5177b80fbd50c7113eee5aca5158652f24f1b /class.c
parentef697388becedf36966a2edcdcf88baca342b9e2 (diff)
downloadruby-eb92159d72fc711387f7e17ffbaca1678f23fd47.tar.gz
Revert https://github.com/ruby/ruby/pull/2486
This reverts commits: 10d6a3aca7 8ba48c1b85 fba8627dc1 dd883de5ba 6c6a25feca 167e6b48f1 7cb96d41a5 3207979278 595b3c4fdd 1521f7cf89 c11c5e69ac cf33608203 3632a812c0 f56506be0d 86427a3219 . The reason for the revert is that we observe ABA problem around inline method cache. When a cache misshits, we search for a method entry. And if the entry is identical to what was cached before, we reuse the cache. But the commits we are reverting here introduced situations where a method entry is freed, then the identical memory region is used for another method entry. An inline method cache cannot detect that ABA. Here is a code that reproduce such situation: ```ruby require 'prime' class << Integer alias org_sqrt sqrt def sqrt(n) raise end GC.stress = true Prime.each(7*37){} rescue nil # <- Here we populate CC class << Object.new; end # These adjacent remove-then-alias maneuver # frees a method entry, then immediately # reuses it for another. remove_method :sqrt alias sqrt org_sqrt end Prime.each(7*37).to_a # <- SEGV ```
Diffstat (limited to 'class.c')
-rw-r--r--class.c45
1 files changed, 12 insertions, 33 deletions
diff --git a/class.c b/class.c
index aaf754146d..b4aeb59e25 100644
--- a/class.c
+++ b/class.c
@@ -956,41 +956,25 @@ include_modules_at(const VALUE klass, VALUE c, VALUE module, int search_super)
return method_changed;
}
-typedef struct tuple {
- struct RClass *klass;
- struct RClass *origin;
-} tuple;
-
-static enum rb_id_table_iterator_result
-inject_refined_method(ID *key, VALUE *value, void *data, int _)
-{
- const tuple *ptr = data;
- const rb_method_entry_t *me = *(const rb_method_entry_t **) value;
- const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
- const rb_method_entry_t *new_me =
- rb_method_entry_from_template(
- me, &(rb_method_refined_t) {
- .orig_me = NULL,
- .owner = me->def->body.refined.owner, });
- rb_id_table_insert(RCLASS_M_TBL(ptr->klass), *key, (VALUE)new_me);
- RB_OBJ_WRITTEN(ptr->klass, Qundef, new_me);
- *value = (VALUE)rb_method_entry_clone(orig_me);
- RB_OBJ_WRITTEN(ptr->origin, Qundef, orig_me);
- return ID_TABLE_CONTINUE;
-}
-
static enum rb_id_table_iterator_result
move_refined_method(ID key, VALUE value, void *data)
{
- const tuple *ptr = data;
- const rb_method_entry_t *me = (const rb_method_entry_t *) value;
+ rb_method_entry_t *me = (rb_method_entry_t *) value;
+ VALUE klass = (VALUE)data;
+ struct rb_id_table *tbl = RCLASS_M_TBL(klass);
if (me->def->type == VM_METHOD_TYPE_REFINED) {
if (me->def->body.refined.orig_me) {
- return ID_TABLE_REPLACE;
+ const rb_method_entry_t *orig_me = me->def->body.refined.orig_me, *new_me;
+ RB_OBJ_WRITE(me, &me->def->body.refined.orig_me, NULL);
+ new_me = rb_method_entry_clone(me);
+ rb_id_table_insert(tbl, key, (VALUE)new_me);
+ RB_OBJ_WRITTEN(klass, Qundef, new_me);
+ rb_method_entry_copy(me, orig_me);
+ return ID_TABLE_CONTINUE;
}
else {
- rb_id_table_insert(RCLASS_M_TBL(ptr->klass), key, (VALUE)me);
+ rb_id_table_insert(tbl, key, (VALUE)me);
return ID_TABLE_DELETE;
}
}
@@ -1016,12 +1000,7 @@ rb_prepend_module(VALUE klass, VALUE module)
RCLASS_SET_ORIGIN(klass, origin);
RCLASS_M_TBL(origin) = RCLASS_M_TBL(klass);
RCLASS_M_TBL_INIT(klass);
- rb_id_table_foreach_with_replace_with_key(
- RCLASS_M_TBL(origin),
- move_refined_method,
- inject_refined_method,
- &(tuple) { RCLASS(klass), RCLASS(origin), },
- true);
+ rb_id_table_foreach(RCLASS_M_TBL(origin), move_refined_method, (void *)klass);
}
changed = include_modules_at(klass, klass, module, FALSE);
if (changed < 0)