aboutsummaryrefslogtreecommitdiffstats
path: root/gc.c
diff options
context:
space:
mode:
authorko1 <ko1@b2dd03c8-39d4-4d8f-98ff-823fe69b080e>2015-10-07 11:18:17 +0000
committerko1 <ko1@b2dd03c8-39d4-4d8f-98ff-823fe69b080e>2015-10-07 11:18:17 +0000
commite2cabc22be830ac2a1871dc6c8107cbd759da7ab (patch)
tree04a38364b4a115c965e656a1d344394cb433212e /gc.c
parent9fe491e788720e07cec6c8ec1cb385a347b64ec0 (diff)
downloadruby-e2cabc22be830ac2a1871dc6c8107cbd759da7ab.tar.gz
* gc.c (newobj_of): divide fast path and slow path
to avoid register savings for fast path. This idea is given by Kazuho Oku <kazuho@natadeco.co>. git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@52072 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
Diffstat (limited to 'gc.c')
-rw-r--r--gc.c103
1 files changed, 70 insertions, 33 deletions
diff --git a/gc.c b/gc.c
index a3f1e9975c..156e622d7b 100644
--- a/gc.c
+++ b/gc.c
@@ -1655,6 +1655,16 @@ heap_get_freeobj_from_next_freepage(rb_objspace_t *objspace, rb_heap_t *heap)
}
static inline VALUE
+heap_get_freeobj_head(rb_objspace_t *objspace, rb_heap_t *heap)
+{
+ RVALUE *p = heap->freelist;
+ if (LIKELY(p != NULL)) {
+ heap->freelist = p->as.free.next;
+ }
+ return (VALUE)p;
+}
+
+static inline VALUE
heap_get_freeobj(rb_objspace_t *objspace, rb_heap_t *heap)
{
RVALUE *p = heap->freelist;
@@ -1683,44 +1693,17 @@ gc_event_hook_body(rb_thread_t *th, rb_objspace_t *objspace, const rb_event_flag
EXEC_EVENT_HOOK(th, event, th->cfp->self, 0, 0, data);
}
+#define gc_event_hook_needed_p(objspace, event) ((objspace)->hook_events & (event))
+
#define gc_event_hook(objspace, event, data) do { \
- if (UNLIKELY((objspace)->hook_events & (event))) { \
+ if (gc_event_hook_needed_p(objspace, event)) { \
gc_event_hook_body(GET_THREAD(), (objspace), (event), (data)); \
} \
} while (0)
-static VALUE
-newobj_of(VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3)
+static inline VALUE
+newobj_of_init(rb_objspace_t *objspace, VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3, VALUE obj, int hook_needed)
{
- rb_objspace_t *objspace = &rb_objspace;
- VALUE obj;
-
-#if GC_DEBUG_STRESS_TO_CLASS
- if (UNLIKELY(stress_to_class)) {
- long i, cnt = RARRAY_LEN(stress_to_class);
- const VALUE *ptr = RARRAY_CONST_PTR(stress_to_class);
- for (i = 0; i < cnt; ++i) {
- if (klass == ptr[i]) rb_memerror();
- }
- }
-#endif
-
- if (UNLIKELY(during_gc || ruby_gc_stressful)) {
- if (during_gc) {
- dont_gc = 1;
- during_gc = 0;
- rb_bug("object allocation during garbage collection phase");
- }
-
- if (ruby_gc_stressful) {
- if (!garbage_collect(objspace, FALSE, FALSE, FALSE, GPR_FLAG_NEWOBJ)) {
- rb_memerror();
- }
- }
- }
-
- obj = heap_get_freeobj(objspace, heap_eden);
-
if (RGENGC_CHECK_MODE > 0) assert(BUILTIN_TYPE(obj) == T_NONE);
/* OBJSETUP */
@@ -1773,7 +1756,10 @@ newobj_of(VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3)
#endif
objspace->total_allocated_objects++;
- gc_event_hook(objspace, RUBY_INTERNAL_EVENT_NEWOBJ, obj);
+
+ if (hook_needed) {
+ gc_event_hook(objspace, RUBY_INTERNAL_EVENT_NEWOBJ, obj);
+ }
gc_report(5, objspace, "newobj: %s\n", obj_info(obj));
#if RGENGC_OLD_NEWOBJ_CHECK > 0
@@ -1798,6 +1784,57 @@ newobj_of(VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3)
return obj;
}
+NOINLINE(static VALUE newobj_of_slowpass(rb_objspace_t *objspace, VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3, int hook_needed));
+
+static VALUE
+newobj_of_slowpass(rb_objspace_t *objspace, VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3, int hook_needed)
+{
+ VALUE obj;
+
+ if (UNLIKELY(during_gc || ruby_gc_stressful)) {
+ if (during_gc) {
+ dont_gc = 1;
+ during_gc = 0;
+ rb_bug("object allocation during garbage collection phase");
+ }
+
+ if (ruby_gc_stressful) {
+ if (!garbage_collect(objspace, FALSE, FALSE, FALSE, GPR_FLAG_NEWOBJ)) {
+ rb_memerror();
+ }
+ }
+ }
+
+ obj = heap_get_freeobj(objspace, heap_eden);
+ return newobj_of_init(objspace, klass, flags, v1, v2, v3, obj, hook_needed);
+}
+
+static VALUE
+newobj_of(VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3)
+{
+ rb_objspace_t *objspace = &rb_objspace;
+ VALUE obj;
+ int hook_needed = gc_event_hook_needed_p(objspace, RUBY_INTERNAL_EVENT_NEWOBJ);
+
+#if GC_DEBUG_STRESS_TO_CLASS
+ if (UNLIKELY(stress_to_class)) {
+ long i, cnt = RARRAY_LEN(stress_to_class);
+ const VALUE *ptr = RARRAY_CONST_PTR(stress_to_class);
+ for (i = 0; i < cnt; ++i) {
+ if (klass == ptr[i]) rb_memerror();
+ }
+ }
+#endif
+
+ if (LIKELY(!(during_gc || ruby_gc_stressful) && hook_needed == FALSE &&
+ (obj = heap_get_freeobj_head(objspace, heap_eden)) != Qfalse)) {
+ return newobj_of_init(objspace, klass, flags, v1, v2, v3, obj, FALSE);
+ }
+ else {
+ return newobj_of_slowpass(objspace, klass, flags, v1, v2, v3, hook_needed);
+ }
+}
+
VALUE
rb_newobj(void)
{