diff options
author | Koichi Sasada <ko1@atdot.net> | 2020-01-08 16:14:01 +0900 |
---|---|---|
committer | Koichi Sasada <ko1@atdot.net> | 2020-02-22 09:58:59 +0900 |
commit | b9007b6c548f91e88fd3f2ffa23de740431fa969 (patch) | |
tree | 1746393d1c5f704e8dc7e0a458198264062273bf /vm_method.c | |
parent | f2286925f08406bc857f7b03ad6779a5d61443ae (diff) | |
download | ruby-b9007b6c548f91e88fd3f2ffa23de740431fa969.tar.gz |
Introduce disposable call-cache.
This patch contains several ideas:
(1) Disposable inline method cache (IMC) for race-free inline method cache
* Making call-cache (CC) as a RVALUE (GC target object) and allocate new
CC on cache miss.
* This technique allows race-free access from parallel processing
elements like RCU.
(2) Introduce per-Class method cache (pCMC)
* Instead of fixed-size global method cache (GMC), pCMC allows flexible
cache size.
* Caching CCs reduces CC allocation and allow sharing CC's fast-path
between same call-info (CI) call-sites.
(3) Invalidate an inline method cache by invalidating corresponding method
entries (MEs)
* Instead of using class serials, we set "invalidated" flag for method
entry itself to represent cache invalidation.
* Compare with using class serials, the impact of method modification
(add/overwrite/delete) is small.
* Updating class serials invalidate all method caches of the class and
sub-classes.
* Proposed approach only invalidate the method cache of only one ME.
See [Feature #16614] for more details.
Diffstat (limited to 'vm_method.c')
-rw-r--r-- | vm_method.c | 630 |
1 files changed, 392 insertions, 238 deletions
diff --git a/vm_method.c b/vm_method.c index 4a82cc3e82..123298c6af 100644 --- a/vm_method.c +++ b/vm_method.c @@ -6,25 +6,6 @@ #define METHOD_DEBUG 0 -#if OPT_GLOBAL_METHOD_CACHE -#ifndef GLOBAL_METHOD_CACHE_SIZE -#define GLOBAL_METHOD_CACHE_SIZE 0x800 -#endif -#define LSB_ONLY(x) ((x) & ~((x) - 1)) -#define POWER_OF_2_P(x) ((x) == LSB_ONLY(x)) -#if !POWER_OF_2_P(GLOBAL_METHOD_CACHE_SIZE) -# error GLOBAL_METHOD_CACHE_SIZE must be power of 2 -#endif -#ifndef GLOBAL_METHOD_CACHE_MASK -#define GLOBAL_METHOD_CACHE_MASK (GLOBAL_METHOD_CACHE_SIZE-1) -#endif - -#define GLOBAL_METHOD_CACHE_KEY(c,m) ((((c)>>3)^(m))&(global_method_cache.mask)) -#define GLOBAL_METHOD_CACHE(c,m) (global_method_cache.entries + GLOBAL_METHOD_CACHE_KEY(c,m)) -#else -#define GLOBAL_METHOD_CACHE(c,m) (rb_bug("global method cache disabled improperly"), NULL) -#endif - static int vm_redefinition_check_flag(VALUE klass); static void rb_vm_check_redefinition_opt_method(const rb_method_entry_t *me, VALUE klass); @@ -37,50 +18,108 @@ static void rb_vm_check_redefinition_opt_method(const rb_method_entry_t *me, VAL #define singleton_undefined idSingleton_method_undefined #define attached id__attached__ -struct cache_entry { - rb_serial_t method_state; - rb_serial_t class_serial; - ID mid; - rb_method_entry_t* me; - VALUE defined_class; -}; - -#if OPT_GLOBAL_METHOD_CACHE -static struct { - unsigned int size; - unsigned int mask; - struct cache_entry *entries; -} global_method_cache = { - GLOBAL_METHOD_CACHE_SIZE, - GLOBAL_METHOD_CACHE_MASK, -}; -#endif - #define ruby_running (GET_VM()->running) /* int ruby_running = 0; */ -static void -rb_class_clear_method_cache(VALUE klass, VALUE arg) +static enum rb_id_table_iterator_result +vm_ccs_dump_i(ID mid, VALUE val, void *data) { - rb_serial_t old_serial = *(rb_serial_t *)arg; - if (RCLASS_SERIAL(klass) > old_serial) { - return; + const struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)val; + fprintf(stderr, " | %s (%d) ", rb_id2name(mid), ccs->len); + rp(ccs->cme); + + for (int i=0; i<ccs->len; i++) { + fprintf(stderr, " | [%d] ", i); vm_ci_dump(ccs->entries[i].ci); + rp_m( " | ", ccs->entries[i].cc); } - mjit_remove_class_serial(RCLASS_SERIAL(klass)); - RCLASS_SERIAL(klass) = rb_next_class_serial(); + return ID_TABLE_CONTINUE; +} - if (BUILTIN_TYPE(klass) == T_ICLASS) { - struct rb_id_table *table = RCLASS_CALLABLE_M_TBL(klass); - if (table) { - rb_id_table_clear(table); - } +static void +vm_ccs_dump(VALUE klass, ID target_mid) +{ + struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass); + if (cc_tbl) { + const struct rb_class_cc_entries *ccs; + if (target_mid) { + if (rb_id_table_lookup(cc_tbl, target_mid, (VALUE *)&ccs)) { + fprintf(stderr, " [CCTB] %p\n", cc_tbl); + vm_ccs_dump_i(target_mid, (VALUE)ccs, NULL); + } + } + else { + fprintf(stderr, " [CCTB] %p\n", cc_tbl); + rb_id_table_foreach(cc_tbl, vm_ccs_dump_i, (void *)target_mid); + } } - else { - VM_ASSERT(RCLASS_CALLABLE_M_TBL(klass) == 0); +} + +static enum rb_id_table_iterator_result +vm_cme_dump_i(ID mid, VALUE val, void *data) +{ + ID target_mid = (ID)data; + if (target_mid == 0 || mid == target_mid) { + rp_m(" > ", val); } + return ID_TABLE_CONTINUE; +} - rb_class_foreach_subclass(klass, rb_class_clear_method_cache, arg); +static VALUE +vm_mtbl_dump(VALUE klass, ID target_mid) +{ + fprintf(stderr, "# vm_mtbl\n"); + while (klass) { + rp_m(" -> ", klass); + rb_method_entry_t *me; + + if (RCLASS_M_TBL(klass)) { + if (target_mid != 0) { + if (rb_id_table_lookup(RCLASS_M_TBL(klass), target_mid, (VALUE *)&me)) { + rp_m(" [MTBL] ", me); + } + } + else { + fprintf(stderr, " ## RCLASS_M_TBL (%p)\n", RCLASS_M_TBL(klass)); + rb_id_table_foreach(RCLASS_M_TBL(klass), vm_cme_dump_i, NULL); + } + } + else { + fprintf(stderr, " MTBL: NULL\n"); + } + if (RCLASS_CALLABLE_M_TBL(klass)) { + if (target_mid != 0) { + if (rb_id_table_lookup(RCLASS_CALLABLE_M_TBL(klass), target_mid, (VALUE *)&me)) { + rp_m(" [CM**] ", me); + } + } + else { + fprintf(stderr, " ## RCLASS_CALLABLE_M_TBL\n"); + rb_id_table_foreach(RCLASS_CALLABLE_M_TBL(klass), vm_cme_dump_i, NULL); + } + } + if (RCLASS_CC_TBL(klass)) { + vm_ccs_dump(klass, target_mid); + } + klass = RCLASS_SUPER(klass); + } + return Qnil; +} + +void +rb_vm_mtbl_dump(const char *msg, VALUE klass, ID target_mid) +{ + fprintf(stderr, "[%s] ", msg); + vm_mtbl_dump(klass, target_mid); +} + +static inline void +vm_me_invalidate_cache(rb_callable_method_entry_t *cme) +{ + VM_ASSERT(IMEMO_TYPE_P(cme, imemo_ment)); + VM_ASSERT(callable_method_entry_p(cme)); + METHOD_ENTRY_INVALIDATED_SET(cme); + RB_DEBUG_COUNTER_INC(cc_cme_invalidate); } void @@ -89,31 +128,131 @@ rb_clear_constant_cache(void) INC_GLOBAL_CONSTANT_STATE(); } -void -rb_clear_method_cache_by_class(VALUE klass) +static rb_method_entry_t *rb_method_entry_alloc(ID called_id, VALUE owner, VALUE defined_class, const rb_method_definition_t *def); +const rb_method_entry_t * rb_method_entry_clone(const rb_method_entry_t *src_me); +static const rb_callable_method_entry_t *copmplemented_callable_method_entry(VALUE klass, ID id); + +static void +clear_method_cache_by_id_in_class(VALUE klass, ID mid) { - if (klass && klass != Qundef) { - int global = klass == rb_cBasicObject || klass == rb_cObject || klass == rb_mKernel; + VM_ASSERT(RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS)); - RUBY_DTRACE_HOOK(METHOD_CACHE_CLEAR, (global ? "global" : rb_class2name(klass))); + if (LIKELY(RCLASS_EXT(klass)->subclasses == NULL)) { + // no subclasses + // check only current class - if (global) { - INC_GLOBAL_METHOD_STATE(); - } - else { - rb_serial_t old_serial = PREV_CLASS_SERIAL(); - rb_class_clear_method_cache(klass, (VALUE)&old_serial); - } + struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass); + struct rb_class_cc_entries *ccs; + + // invalidate CCs + if (cc_tbl && rb_id_table_lookup(cc_tbl, mid, (VALUE *)&ccs)) { + rb_vm_ccs_free(ccs); + rb_id_table_delete(cc_tbl, mid); + RB_DEBUG_COUNTER_INC(cc_invalidate_leaf_ccs); + } + + // remove from callable_m_tbl, if exists + struct rb_id_table *cm_tbl; + if ((cm_tbl = RCLASS_CALLABLE_M_TBL(klass)) != NULL) { + rb_id_table_delete(cm_tbl, mid); + RB_DEBUG_COUNTER_INC(cc_invalidate_leaf_callable); + } + RB_DEBUG_COUNTER_INC(cc_invalidate_leaf); } + else { + const rb_callable_method_entry_t *cme = copmplemented_callable_method_entry(klass, mid); + + if (cme) { + // invalidate cme if found to invalidate the inline method cache. + + if (METHOD_ENTRY_CACHED(cme)) { + // invalidate cc by invalidating cc->cme + VALUE owner = cme->owner; + rb_callable_method_entry_t *new_cme = + (rb_callable_method_entry_t *)rb_method_entry_clone((const rb_method_entry_t *)cme); + struct rb_id_table *mtbl = RCLASS_M_TBL(RCLASS_ORIGIN(owner)); + rb_id_table_insert(mtbl, mid, (VALUE)new_cme); + RB_OBJ_WRITTEN(owner, cme, new_cme); + vm_me_invalidate_cache((rb_callable_method_entry_t *)cme); + + RB_DEBUG_COUNTER_INC(cc_invalidate_tree_cme); + } - if (klass == rb_mKernel) { - rb_subclass_entry_t *entry = RCLASS_EXT(klass)->subclasses; + // invalidate complement tbl + if (METHOD_ENTRY_COMPLEMENTED(cme)) { + VALUE defined_class = cme->defined_class; + struct rb_id_table *cm_tbl = RCLASS_CALLABLE_M_TBL(defined_class); + VM_ASSERT(cm_tbl != NULL); + int r = rb_id_table_delete(cm_tbl, mid); + VM_ASSERT(r == TRUE); (void)r; + RB_DEBUG_COUNTER_INC(cc_invalidate_tree_callable); + } - for (; entry != NULL; entry = entry->next) { - struct rb_id_table *table = RCLASS_CALLABLE_M_TBL(entry->klass); - if (table)rb_id_table_clear(table); - } + RB_DEBUG_COUNTER_INC(cc_invalidate_tree); + } + } +} + +static void +clear_iclass_method_cache_by_id(VALUE iclass, VALUE d) +{ + VM_ASSERT(RB_TYPE_P(iclass, T_ICLASS)); + ID mid = (ID)d; + clear_method_cache_by_id_in_class(iclass, mid); +} + +static void +clear_iclass_method_cache_by_id_for_refinements(VALUE klass, VALUE d) +{ + if (RB_TYPE_P(klass, T_ICLASS)) { + ID mid = (ID)d; + clear_method_cache_by_id_in_class(klass, mid); + } +} + +void +rb_clear_method_cache(VALUE klass_or_module, ID mid) +{ + if (RB_TYPE_P(klass_or_module, T_MODULE)) { + VALUE module = klass_or_module; // alias + + if (FL_TEST(module, RMODULE_IS_REFINEMENT)) { + VALUE refined_class = rb_refinement_module_get_refined_class(module); + rb_clear_method_cache(refined_class, mid); + rb_class_foreach_subclass(refined_class, clear_iclass_method_cache_by_id_for_refinements, mid); + } + rb_class_foreach_subclass(module, clear_iclass_method_cache_by_id, mid); + } + else { + clear_method_cache_by_id_in_class(klass_or_module, mid); + } +} + +// gc.c +void rb_cc_table_free(VALUE klass); + +static int +invalidate_all_cc(void *vstart, void *vend, size_t stride, void *data) +{ + VALUE v = (VALUE)vstart; + for (; v != (VALUE)vend; v += stride) { + if (RBASIC(v)->flags) { // liveness check + if (RB_TYPE_P(v, T_CLASS) || + RB_TYPE_P(v, T_ICLASS)) { + if (RCLASS_CC_TBL(v)) { + rb_cc_table_free(v); + } + RCLASS_CC_TBL(v) = NULL; + } + } } + return 0; // continue to iteration +} + +void +rb_clear_method_cache_all(void) +{ + rb_objspace_each_objects(invalidate_all_cc, NULL); } VALUE @@ -138,7 +277,7 @@ rb_add_method_cfunc(VALUE klass, ID mid, VALUE (*func)(ANYARGS), int argc, rb_me rb_method_cfunc_t opt; opt.func = func; opt.argc = argc; - rb_add_method(klass, mid, VM_METHOD_TYPE_CFUNC, &opt, visi); + rb_add_method(klass, mid, VM_METHOD_TYPE_CFUNC, &opt, visi); } else { rb_define_notimplement_method_id(klass, mid, visi); @@ -161,8 +300,13 @@ rb_method_definition_release(rb_method_definition_t *def, int complemented) xfree(def); } else { - if (complemented) def->complemented_count--; - else if (def->alias_count > 0) def->alias_count--; + if (complemented) { + VM_ASSERT(def->complemented_count > 0); + def->complemented_count--; + } + else if (def->alias_count > 0) { + def->alias_count--; + } if (METHOD_DEBUG) fprintf(stderr, "-%p-%s:%d->%d,%d->%d (dec)\n", (void *)def, rb_id2name(def->original_id), alias_count, def->alias_count, complemented_count, def->complemented_count); @@ -179,20 +323,6 @@ rb_free_method_entry(const rb_method_entry_t *me) static inline rb_method_entry_t *search_method(VALUE klass, ID id, VALUE *defined_class_ptr); extern int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2); -static inline rb_method_entry_t * -lookup_method_table(VALUE klass, ID id) -{ - st_data_t body; - struct rb_id_table *m_tbl = RCLASS_M_TBL(klass); - - if (rb_id_table_lookup(m_tbl, id, &body)) { - return (rb_method_entry_t *) body; - } - else { - return 0; - } -} - static VALUE (*call_cfunc_invoker_func(int argc))(VALUE recv, int argc, const VALUE *, VALUE (*func)(ANYARGS)) { @@ -406,7 +536,11 @@ const rb_method_entry_t * rb_method_entry_clone(const rb_method_entry_t *src_me) { rb_method_entry_t *me = rb_method_entry_alloc(src_me->called_id, src_me->owner, src_me->defined_class, - method_definition_addref(src_me->def)); + method_definition_addref(src_me->def)); + if (METHOD_ENTRY_COMPLEMENTED(src_me)) { + method_definition_addref_complement(src_me->def); + } + METHOD_ENTRY_FLAGS_COPY(me, src_me); return me; } @@ -487,6 +621,20 @@ make_method_entry_refined(VALUE owner, rb_method_entry_t *me) } } +static inline rb_method_entry_t * +lookup_method_table(VALUE klass, ID id) +{ + st_data_t body; + struct rb_id_table *m_tbl = RCLASS_M_TBL(klass); + + if (rb_id_table_lookup(m_tbl, id, &body)) { + return (rb_method_entry_t *) body; + } + else { + return 0; + } +} + void rb_add_refined_method_entry(VALUE refined_class, ID mid) { @@ -494,7 +642,7 @@ rb_add_refined_method_entry(VALUE refined_class, ID mid) if (me) { make_method_entry_refined(refined_class, me); - rb_clear_method_cache_by_class(refined_class); + rb_clear_method_cache(refined_class, mid); } else { rb_add_method(refined_class, mid, VM_METHOD_TYPE_REFINED, 0, METHOD_VISI_PUBLIC); @@ -615,7 +763,7 @@ rb_method_entry_make(VALUE klass, ID mid, VALUE defined_class, rb_method_visibil if (def == NULL) def = rb_method_definition_create(type, original_id); rb_method_definition_set(me, def, opts); - rb_clear_method_cache_by_class(klass); + rb_clear_method_cache(klass, mid); /* check mid */ if (klass == rb_cObject) { @@ -737,149 +885,169 @@ rb_get_alloc_func(VALUE klass) return 0; } +const rb_method_entry_t * +rb_method_entry_at(VALUE klass, ID id) +{ + return lookup_method_table(klass, id); +} + static inline rb_method_entry_t* search_method(VALUE klass, ID id, VALUE *defined_class_ptr) { rb_method_entry_t *me; + RB_DEBUG_COUNTER_INC(mc_search); + for (; klass; klass = RCLASS_SUPER(klass)) { RB_DEBUG_COUNTER_INC(mc_search_super); - if ((me = lookup_method_table(klass, id)) != 0) break; + if ((me = lookup_method_table(klass, id)) != 0) { + break; + } } - if (defined_class_ptr) - *defined_class_ptr = klass; + if (defined_class_ptr) *defined_class_ptr = klass; + + if (me == NULL) RB_DEBUG_COUNTER_INC(mc_search_notfound); + + VM_ASSERT(me == NULL || !METHOD_ENTRY_INVALIDATED(me)); return me; } -const rb_method_entry_t * -rb_method_entry_at(VALUE klass, ID id) +static rb_method_entry_t * +search_method_protect(VALUE klass, ID id, VALUE *defined_class_ptr) { - return lookup_method_table(klass, id); + rb_method_entry_t *me = search_method(klass, id, defined_class_ptr); + + if (!UNDEFINED_METHOD_ENTRY_P(me)) { + return me; + } + else { + return NULL; + } } -/* - * search method entry without the method cache. - * - * if you need method entry with method cache (normal case), use - * rb_method_entry() simply. - */ -static rb_method_entry_t * -method_entry_get_without_cache(VALUE klass, ID id, - VALUE *defined_class_ptr) +MJIT_FUNC_EXPORTED const rb_method_entry_t * +rb_method_entry(VALUE klass, ID id) { - VALUE defined_class; - rb_method_entry_t *me = search_method(klass, id, &defined_class); + return search_method(klass, id, NULL); +} - if (ruby_running) { - if (OPT_GLOBAL_METHOD_CACHE) { - struct cache_entry *ent; - ent = GLOBAL_METHOD_CACHE(klass, id); - ent->class_serial = RCLASS_SERIAL(klass); - ent->method_state = GET_GLOBAL_METHOD_STATE(); - ent->defined_class = defined_class; - ent->mid = id; +static inline const rb_callable_method_entry_t * +prepare_callable_method_entry(VALUE defined_class, ID id, const rb_method_entry_t * const me, int create) +{ + struct rb_id_table *mtbl; + const rb_callable_method_entry_t *cme; - if (UNDEFINED_METHOD_ENTRY_P(me)) { - me = ent->me = NULL; - } - else { - ent->me = me; - } - } - else if (UNDEFINED_METHOD_ENTRY_P(me)) { - me = NULL; - } + if (me) { + if (me->defined_class == 0) { + RB_DEBUG_COUNTER_INC(mc_cme_complement); + VM_ASSERT(RB_TYPE_P(defined_class, T_ICLASS) || RB_TYPE_P(defined_class, T_MODULE)); + VM_ASSERT(me->defined_class == 0); + + mtbl = RCLASS_CALLABLE_M_TBL(defined_class); + + if (mtbl && rb_id_table_lookup(mtbl, id, (VALUE *)&cme)) { + RB_DEBUG_COUNTER_INC(mc_cme_complement_hit); + VM_ASSERT(callable_method_entry_p(cme)); + VM_ASSERT(!METHOD_ENTRY_INVALIDATED(cme)); + } + else if (create) { + if (!mtbl) { + mtbl = RCLASS_EXT(defined_class)->callable_m_tbl = rb_id_table_create(0); + } + cme = rb_method_entry_complement_defined_class(me, me->called_id, defined_class); + rb_id_table_insert(mtbl, id, (VALUE)cme); + VM_ASSERT(callable_method_entry_p(cme)); + } + else { + return NULL; + } + } + else { + cme = (const rb_callable_method_entry_t *)me; + VM_ASSERT(callable_method_entry_p(cme)); + VM_ASSERT(!METHOD_ENTRY_INVALIDATED(cme)); + } + return cme; } - else if (UNDEFINED_METHOD_ENTRY_P(me)) { - me = NULL; + else { + return NULL; } +} - if (defined_class_ptr) - *defined_class_ptr = defined_class; - return me; +static const rb_callable_method_entry_t * +copmplemented_callable_method_entry(VALUE klass, ID id) +{ + VALUE defined_class; + rb_method_entry_t *me = search_method_protect(klass, id, &defined_class); + return prepare_callable_method_entry(defined_class, id, me, FALSE); } -static void -verify_method_cache(VALUE klass, ID id, VALUE defined_class, rb_method_entry_t *me) +static const rb_callable_method_entry_t * +cached_callable_method_entry(VALUE klass, ID mid) { - if (!VM_DEBUG_VERIFY_METHOD_CACHE) return; - VALUE actual_defined_class; - rb_method_entry_t *actual_me = - method_entry_get_without_cache(klass, id, &actual_defined_class); + struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass); + struct rb_class_cc_entries *ccs; - if (me != actual_me || defined_class != actual_defined_class) { - rb_bug("method cache verification failed"); + if (cc_tbl && rb_id_table_lookup(cc_tbl, mid, (VALUE *)&ccs)) { + VM_ASSERT(vm_ccs_p(ccs)); + + if (LIKELY(!METHOD_ENTRY_INVALIDATED(ccs->cme))) { + VM_ASSERT(ccs->cme->called_id == mid); + RB_DEBUG_COUNTER_INC(ccs_found); + return ccs->cme; + } + else { + rb_vm_ccs_free(ccs); + rb_id_table_delete(cc_tbl, mid); + } } + return NULL; } -static rb_method_entry_t * -method_entry_get(VALUE klass, ID id, VALUE *defined_class_ptr) +static void +cache_callable_method_entry(VALUE klass, ID mid, const rb_callable_method_entry_t *cme) { - struct cache_entry *ent; - if (!OPT_GLOBAL_METHOD_CACHE) goto nocache; - ent = GLOBAL_METHOD_CACHE(klass, id); - if (ent->method_state == GET_GLOBAL_METHOD_STATE() && - ent->class_serial == RCLASS_SERIAL(klass) && - ent->mid == id) { - verify_method_cache(klass, id, ent->defined_class, ent->me); - if (defined_class_ptr) *defined_class_ptr = ent->defined_class; - RB_DEBUG_COUNTER_INC(mc_global_hit); - return ent->me; - } + struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass); + struct rb_class_cc_entries *ccs; - nocache: - RB_DEBUG_COUNTER_INC(mc_global_miss); - return method_entry_get_without_cache(klass, id, defined_class_ptr); -} + if (!cc_tbl) { + cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2); + } -MJIT_FUNC_EXPORTED const rb_method_entry_t * -rb_method_entry(VALUE klass, ID id) -{ - return method_entry_get(klass, id, NULL); + if (rb_id_table_lookup(cc_tbl, mid, (VALUE *)&ccs)) { + VM_ASSERT(ccs->cme == cme); + } + else { + ccs = vm_ccs_create(klass, cme); + rb_id_table_insert(cc_tbl, mid, (VALUE)ccs); + } } static const rb_callable_method_entry_t * -prepare_callable_method_entry(VALUE defined_class, ID id, const rb_method_entry_t *me) +callable_method_entry(VALUE klass, ID mid, VALUE *defined_class_ptr) { - struct rb_id_table *mtbl; - const rb_callable_method_entry_t *cme; + VM_ASSERT(RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS)); + const rb_callable_method_entry_t *cme = cached_callable_method_entry(klass, mid); - if (me && me->defined_class == 0) { - RB_DEBUG_COUNTER_INC(mc_cme_complement); - VM_ASSERT(RB_TYPE_P(defined_class, T_ICLASS) || RB_TYPE_P(defined_class, T_MODULE)); - VM_ASSERT(me->defined_class == 0); - - mtbl = RCLASS_CALLABLE_M_TBL(defined_class); - - if (mtbl && rb_id_table_lookup(mtbl, id, (VALUE *)&me)) { - RB_DEBUG_COUNTER_INC(mc_cme_complement_hit); - cme = (rb_callable_method_entry_t *)me; - VM_ASSERT(callable_method_entry_p(cme)); - } - else { - if (!mtbl) { - mtbl = RCLASS_EXT(defined_class)->callable_m_tbl = rb_id_table_create(0); - } - cme = rb_method_entry_complement_defined_class(me, me->called_id, defined_class); - rb_id_table_insert(mtbl, id, (VALUE)cme); - VM_ASSERT(callable_method_entry_p(cme)); - } + if (cme) { + if (defined_class_ptr != NULL) *defined_class_ptr = cme->defined_class; } else { - cme = (const rb_callable_method_entry_t *)me; - VM_ASSERT(callable_method_entry_p(cme)); + VALUE defined_class; + rb_method_entry_t *me = search_method_protect(klass, mid, &defined_class); + if (defined_class_ptr) *defined_class_ptr = defined_class; + cme = prepare_callable_method_entry(defined_class, mid, me, TRUE); + if (cme) cache_callable_method_entry(klass, mid, cme); } return cme; } MJIT_FUNC_EXPORTED const rb_callable_method_entry_t * -rb_callable_method_entry(VALUE klass, ID id) +rb_callable_method_entry(VALUE klass, ID mid) { - VALUE defined_class; - rb_method_entry_t *me = method_entry_get(klass, id, &defined_class); - return prepare_callable_method_entry(defined_class, id, me); + return callable_method_entry(klass, mid, NULL); } static const rb_method_entry_t *resolve_refined_method(VALUE refinements, const rb_method_entry_t *me, VALUE *defined_class_ptr); @@ -887,7 +1055,7 @@ static const rb_method_entry_t *resolve_refined_method(VALUE refinements, const static const rb_method_entry_t * method_entry_resolve_refinement(VALUE klass, ID id, int with_refinement, VALUE *defined_class_ptr) { - const rb_method_entry_t *me = method_entry_get(klass, id, defined_class_ptr); + const rb_method_entry_t *me = search_method_protect(klass, id, defined_class_ptr); if (me) { if (me->def->type == VM_METHOD_TYPE_REFINED) { @@ -916,9 +1084,15 @@ rb_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class_ptr) MJIT_FUNC_EXPORTED const rb_callable_method_entry_t * rb_callable_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class_ptr) { - VALUE defined_class, *dcp = defined_class_ptr ? defined_class_ptr : &defined_class; - const rb_method_entry_t *me = method_entry_resolve_refinement(klass, id, TRUE, dcp); - return prepare_callable_method_entry(*dcp, id, me); + const rb_callable_method_entry_t *cme = callable_method_entry(klass, id, defined_class_ptr); + if (cme == NULL || cme->def->type != VM_METHOD_TYPE_REFINED) { + return cme; + } + else { + VALUE defined_class, *dcp = defined_class_ptr ? defined_class_ptr : &defined_class; + const rb_method_entry_t *me = method_entry_resolve_refinement(klass, id, TRUE, dcp); + return prepare_callable_method_entry(*dcp, id, me, TRUE); + } } const rb_method_entry_t * @@ -932,7 +1106,7 @@ rb_callable_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_ { VALUE defined_class, *dcp = defined_class_ptr ? defined_class_ptr : &defined_class; const rb_method_entry_t *me = method_entry_resolve_refinement(klass, id, FALSE, dcp); - return prepare_callable_method_entry(*dcp, id, me); + return prepare_callable_method_entry(*dcp, id, me, TRUE); } static const rb_method_entry_t * @@ -945,7 +1119,7 @@ resolve_refined_method(VALUE refinements, const rb_method_entry_t *me, VALUE *de refinement = find_refinement(refinements, me->owner); if (!NIL_P(refinement)) { - tmp_me = method_entry_get(refinement, me->called_id, defined_class_ptr); + tmp_me = search_method_protect(refinement, me->called_id, defined_class_ptr); if (tmp_me && tmp_me->def->type != VM_METHOD_TYPE_REFINED) { return tmp_me; @@ -963,7 +1137,7 @@ resolve_refined_method(VALUE refinements, const rb_method_entry_t *me, VALUE *de return 0; } - me = method_entry_get(super, me->called_id, defined_class_ptr); + me = search_method_protect(super, me->called_id, defined_class_ptr); } return me; } @@ -1010,10 +1184,10 @@ remove_method(VALUE klass, ID mid) klass, ID2SYM(mid)); } + rb_clear_method_cache(klass, mid); rb_id_table_delete(RCLASS_M_TBL(klass), mid); rb_vm_check_redefinition_opt_method(me, klass); - rb_clear_method_cache_by_class(klass); if (me->def->type == VM_METHOD_TYPE_REFINED) { rb_add_refined_method_entry(klass, mid); @@ -1069,6 +1243,7 @@ rb_export_method(VALUE klass, ID name, rb_method_visibility_t visi) VALUE origin_class = RCLASS_ORIGIN(klass); me = search_method(origin_class, name, &defined_class); + if (!me && RB_TYPE_P(klass, T_MODULE)) { me = search_method(rb_cObject, name, &defined_class); } @@ -1087,7 +1262,7 @@ rb_export_method(VALUE klass, ID name, rb_method_visibility_t visi) if (me->def->type == VM_METHOD_TYPE_REFINED && me->def->body.refined.orig_me) { METHOD_ENTRY_VISI_SET((rb_method_entry_t *)me->def->body.refined.orig_me, visi); } - rb_clear_method_cache_by_class(klass); + rb_clear_method_cache(klass, name); } else { rb_add_method(klass, name, VM_METHOD_TYPE_ZSUPER, 0, visi); @@ -1110,8 +1285,8 @@ rb_method_boundp(VALUE klass, ID id, int ex) me = rb_method_entry_without_refinements(klass, id, NULL); } - if (me != 0) { - if ((ex & ~BOUND_RESPONDS) && + if (me != NULL) { + if ((ex & ~BOUND_RESPONDS) && ((METHOD_ENTRY_VISI(me) == METHOD_VISI_PRIVATE) || ((ex & BOUND_RESPONDS) && (METHOD_ENTRY_VISI(me) == METHOD_VISI_PROTECTED)))) { return 0; @@ -1593,6 +1768,7 @@ rb_alias(VALUE klass, ID alias_name, ID original_name) again: orig_me = search_method(klass, original_name, &defined_class); + if (orig_me && orig_me->def->type == VM_METHOD_TYPE_REFINED) { orig_me = rb_resolve_refined_method(Qnil, orig_me); } @@ -1841,7 +2017,7 @@ rb_mod_ruby2_keywords(int argc, VALUE *argv, VALUE module) !me->def->body.iseq.iseqptr->body->param.flags.has_kw && !me->def->body.iseq.iseqptr->body->param.flags.has_kwrest) { me->def->body.iseq.iseqptr->body->param.flags.ruby2_keywords = 1; - rb_clear_method_cache_by_class(module); + rb_clear_method_cache(module, name); } else { rb_warn("Skipping set of ruby2_keywords flag for %s (method accepts keywords or method does not accept argument splat)", rb_id2name(name)); @@ -1860,7 +2036,7 @@ rb_mod_ruby2_keywords(int argc, VALUE *argv, VALUE module) !iseq->body->param.flags.has_kw && !iseq->body->param.flags.has_kwrest) { iseq->body->param.flags.ruby2_keywords = 1; - rb_clear_method_cache_by_class(module); + rb_clear_method_cache(module, name); } else { rb_warn("Skipping set of ruby2_keywords flag for %s (method accepts keywords or method does not accept argument splat)", rb_id2name(name)); @@ -2061,10 +2237,10 @@ rb_mod_modfunc(int argc, VALUE *argv, VALUE module) int rb_method_basic_definition_p(VALUE klass, ID id) { - const rb_method_entry_t *me; + const rb_callable_method_entry_t *cme; if (!klass) return TRUE; /* hidden object cannot be overridden */ - me = rb_method_entry(klass, id); - return (me && METHOD_ENTRY_BASIC(me)) ? TRUE : FALSE; + cme = rb_callable_method_entry(klass, id); + return (cme && METHOD_ENTRY_BASIC(cme)) ? TRUE : FALSE; } #ifdef __GNUC__ #pragma pop_macro("rb_method_basic_definition_p") @@ -2072,10 +2248,8 @@ rb_method_basic_definition_p(VALUE klass, ID id) static VALUE call_method_entry(rb_execution_context_t *ec, VALUE defined_class, VALUE obj, ID id, - const rb_method_entry_t *me, int argc, const VALUE *argv, int kw_splat) + const rb_callable_method_entry_t *cme, int argc, const VALUE *argv, int kw_splat) { - const rb_callable_method_entry_t *cme = - prepare_callable_method_entry(defined_class, id, me); VALUE passed_block_handler = vm_passed_block_handler(ec); VALUE result = rb_vm_call_kw(ec, obj, id, argc, argv, cme, kw_splat); vm_passed_block_handler_set(ec, passed_block_handler); @@ -2088,13 +2262,12 @@ basic_obj_respond_to_missing(rb_execution_context_t *ec, VALUE klass, VALUE obj, { VALUE defined_class, args[2]; const ID rtmid = idRespond_to_missing; - const rb_method_entry_t *const me = - method_entry_get(klass, rtmid, &defined_class); + const rb_callable_method_entry_t *const cme = callable_method_entry(klass, rtmid, &defined_class); - if (!me || METHOD_ENTRY_BASIC(me)) return Qundef; + if (!cme || METHOD_ENTRY_BASIC(cme)) return Qundef; args[0] = mid; args[1] = priv; - return call_method_entry(ec, defined_class, obj, rtmid, me, 2, args, RB_NO_KEYWORDS); + return call_method_entry(ec, defined_class, obj, rtmid, cme, 2, args, RB_NO_KEYWORDS); } static inline int @@ -2120,11 +2293,10 @@ vm_respond_to(rb_execution_context_t *ec, VALUE klass, VALUE obj, ID id, int pri { VALUE defined_class; const ID resid = idRespond_to; - const rb_method_entry_t *const me = - method_entry_get(klass, resid, &defined_class); + const rb_callable_method_entry_t *const cme = callable_method_entry(klass, resid, &defined_class); - if (!me) return -1; - if (METHOD_ENTRY_BASIC(me)) { + if (!cme) return -1; + if (METHOD_ENTRY_BASIC(cme)) { return -1; } else { @@ -2135,7 +2307,7 @@ vm_respond_to(rb_execution_context_t *ec, VALUE klass, VALUE obj, ID id, int pri args[0] = ID2SYM(id); args[1] = Qtrue; if (priv) { - argc = rb_method_entry_arity(me); + argc = rb_method_entry_arity((const rb_method_entry_t *)cme); if (argc > 2) { rb_raise(rb_eArgError, "respond_to? must accept 1 or 2 arguments (requires %d)", @@ -2145,7 +2317,7 @@ vm_respond_to(rb_execution_context_t *ec, VALUE klass, VALUE obj, ID id, int pri argc = 2; } else if (!NIL_P(ruby_verbose)) { - VALUE location = rb_method_entry_location(me); + VALUE location = rb_method_entry_location((const rb_method_entry_t *)cme); rb_warn("%"PRIsVALUE"%c""respond_to?(:%"PRIsVALUE") uses" " the deprecated method signature, which takes one parameter", (FL_TEST(klass, FL_SINGLETON) ? obj : klass), @@ -2161,7 +2333,7 @@ vm_respond_to(rb_execution_context_t *ec, VALUE klass, VALUE obj, ID id, int pri } } } - result = call_method_entry(ec, defined_class, obj, resid, me, argc, args, RB_NO_KEYWORDS); + result = call_method_entry(ec, defined_class, obj, resid, cme, argc, args, RB_NO_KEYWORDS); return RTEST(result); } } @@ -2246,25 +2418,7 @@ obj_respond_to_missing(VALUE obj, VALUE mid, VALUE priv) void Init_Method(void) { - if (!OPT_GLOBAL_METHOD_CACHE) return; - char *ptr = getenv("RUBY_GLOBAL_METHOD_CACHE_SIZE"); - int val; - - if (ptr != NULL && (val = atoi(ptr)) > 0) { - if ((val & (val - 1)) == 0) { /* ensure val is a power of 2 */ - global_method_cache.size = val; - global_method_cache.mask = val - 1; - } - else { - fprintf(stderr, "RUBY_GLOBAL_METHOD_CACHE_SIZE was set to %d but ignored because the value is not a power of 2.\n", val); - } - } - - global_method_cache.entries = (struct cache_entry *)calloc(global_method_cache.size, sizeof(struct cache_entry)); - if (global_method_cache.entries == NULL) { - fprintf(stderr, "[FATAL] failed to allocate memory\n"); - exit(EXIT_FAILURE); - } + // } void |