aboutsummaryrefslogtreecommitdiffstats
path: root/thread_pthread.c
diff options
context:
space:
mode:
authorKoichi Sasada <ko1@atdot.net>2023-10-13 01:14:17 +0900
committerKoichi Sasada <ko1@atdot.net>2023-10-13 09:19:31 +0900
commitcdb36dfe7ddb7cbd7ed95e84b24114c8869a7e5e (patch)
tree2de31772f8288869838c7fa5fff6ba60c342453f /thread_pthread.c
parent2794a8fef65eb16767c2f46f8f5058c10b4591b9 (diff)
downloadruby-cdb36dfe7ddb7cbd7ed95e84b24114c8869a7e5e.tar.gz
fix `native_thread_destroy()` timing
With M:N thread scheduler, the native thread (NT) related resources should be freed when the NT is no longer needed. So the calling `native_thread_destroy()` at the end of `is will be freed when `thread_cleanup_func()` (at the end of Ruby thread) is not correct timing. Call it when the corresponding Ruby thread is collected.
Diffstat (limited to 'thread_pthread.c')
-rw-r--r--thread_pthread.c33
1 files changed, 16 insertions, 17 deletions
diff --git a/thread_pthread.c b/thread_pthread.c
index 9c4310d5c9..1ca805843e 100644
--- a/thread_pthread.c
+++ b/thread_pthread.c
@@ -1717,14 +1717,19 @@ native_thread_assign(struct rb_native_thread *nt, rb_thread_t *th)
}
static void
-native_thread_destroy(rb_thread_t *th)
+native_thread_destroy(struct rb_native_thread *nt)
{
- struct rb_native_thread *nt = th->nt;
+ if (nt) {
+ rb_native_cond_destroy(&nt->cond.readyq);
- rb_native_cond_destroy(&nt->cond.readyq);
+ if (&nt->cond.readyq != &nt->cond.intr) {
+ rb_native_cond_destroy(&nt->cond.intr);
+ }
- if (&nt->cond.readyq != &nt->cond.intr)
- rb_native_cond_destroy(&nt->cond.intr);
+ RB_ALTSTACK_FREE(nt->altstack);
+ ruby_xfree(nt->nt_context);
+ ruby_xfree(nt);
+ }
}
#if defined HAVE_PTHREAD_GETATTR_NP || defined HAVE_PTHREAD_ATTR_GET_NP
@@ -2109,6 +2114,7 @@ static struct rb_native_thread *
native_thread_alloc(void)
{
struct rb_native_thread *nt = ZALLOC(struct rb_native_thread);
+ native_thread_setup(nt);
#if USE_MN_THREADS
nt->nt_context = ruby_xmalloc(sizeof(struct coroutine_context));
@@ -2128,7 +2134,6 @@ native_thread_create_dedicated(rb_thread_t *th)
th->nt->vm = th->vm;
th->nt->running_thread = th;
th->nt->dedicated = 1;
- native_thread_setup(th->nt);
// vm stack
size_t vm_stack_word_size = th->vm->default_params.thread_vm_stack_size / sizeof(VALUE);
@@ -2265,10 +2270,9 @@ rb_threadptr_sched_free(rb_thread_t *th)
{
#if USE_MN_THREADS
if (th->sched.malloc_stack) {
+ // has dedicated
ruby_xfree(th->sched.context_stack);
- RB_ALTSTACK_FREE(th->nt->altstack);
- ruby_xfree(th->nt->nt_context);
- ruby_xfree(th->nt);
+ native_thread_destroy(th->nt);
}
else {
nt_free_stack(th->sched.context_stack);
@@ -2279,17 +2283,12 @@ rb_threadptr_sched_free(rb_thread_t *th)
ruby_xfree(th->sched.context);
VM_ASSERT((th->sched.context = NULL) == NULL);
}
-
- th->nt = NULL;
#else
ruby_xfree(th->sched.context_stack);
-
- struct rb_native_thread *nt = th->nt;
- if (nt) { // TODO: not sure why nt is NULL
- RB_ALTSTACK_FREE(nt->altstack);
- ruby_xfree(nt);
- }
+ native_thread_destroy(th->nt);
#endif
+
+ th->nt = NULL;
}
void