aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--method.h1
-rw-r--r--vm_insnhelper.c8
-rw-r--r--vm_insnhelper.h9
3 files changed, 11 insertions, 7 deletions
diff --git a/method.h b/method.h
index 2f4504bfec..16d212a1c8 100644
--- a/method.h
+++ b/method.h
@@ -75,7 +75,6 @@ typedef struct rb_callable_method_entry_struct { /* same fields with rb_method_e
#define METHOD_ENTRY_CACHED_SET(me) ((me)->flags |= IMEMO_FL_USER4)
#define METHOD_ENTRY_INVALIDATED(me) ((me)->flags & IMEMO_FL_USER5)
#define METHOD_ENTRY_INVALIDATED_SET(me) ((me)->flags |= IMEMO_FL_USER5)
-#define METHOD_ENTRY_CACHEABLE(me) !(METHOD_ENTRY_VISI(me) == METHOD_VISI_PROTECTED)
static inline void
METHOD_ENTRY_VISI_SET(rb_method_entry_t *me, rb_method_visibility_t visi)
diff --git a/vm_insnhelper.c b/vm_insnhelper.c
index 248a59f642..da04e3521f 100644
--- a/vm_insnhelper.c
+++ b/vm_insnhelper.c
@@ -2534,12 +2534,12 @@ vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
!IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
- cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
+ cacheable_ci && vm_call_cacheable(ci, cc));
}
else {
CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
!IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
- cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
+ cacheable_ci && vm_call_cacheable(ci, cc));
}
/* initialize opt vars for self-references */
@@ -2567,7 +2567,7 @@ vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
- cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
+ cacheable_ci && vm_call_cacheable(ci, cc));
return 0;
}
@@ -2580,7 +2580,7 @@ vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
if (klocals[kw_param->num] == INT2FIX(0)) {
/* copy from default_values */
CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
- cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
+ cacheable_ci && vm_call_cacheable(ci, cc));
}
return 0;
diff --git a/vm_insnhelper.h b/vm_insnhelper.h
index 126867025f..2199ee37cc 100644
--- a/vm_insnhelper.h
+++ b/vm_insnhelper.h
@@ -252,13 +252,18 @@ THROW_DATA_CONSUMED_SET(struct vm_throw_data *obj)
#define IS_ARGS_KW_OR_KW_SPLAT(ci) (vm_ci_flag(ci) & (VM_CALL_KWARG | VM_CALL_KW_SPLAT))
#define IS_ARGS_KW_SPLAT_MUT(ci) (vm_ci_flag(ci) & VM_CALL_KW_SPLAT_MUT)
+static inline bool
+vm_call_cacheable(const struct rb_callinfo *ci, const struct rb_callcache *cc)
+{
+ return (vm_ci_flag(ci) & VM_CALL_FCALL) ||
+ METHOD_ENTRY_VISI(vm_cc_cme(cc)) != METHOD_VISI_PROTECTED;
+}
/* If this returns true, an optimized function returned by `vm_call_iseq_setup_func`
can be used as a fastpath. */
static inline bool
vm_call_iseq_optimizable_p(const struct rb_callinfo *ci, const struct rb_callcache *cc)
{
- return !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
- METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc));
+ return !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) && vm_call_cacheable(ci, cc);
}
#endif /* RUBY_INSNHELPER_H */