2020-04-10 08:11:40 +03:00
|
|
|
#ifndef RUBY_THREAD_PTHREAD_H
|
|
|
|
#define RUBY_THREAD_PTHREAD_H
|
2006-12-31 18:02:22 +03:00
|
|
|
/**********************************************************************
|
|
|
|
|
|
|
|
thread_pthread.h -
|
|
|
|
|
|
|
|
$Author$
|
|
|
|
|
* blockinlining.c, compile.c, compile.h, debug.c, debug.h,
id.c, insnhelper.h, insns.def, thread.c, thread_pthread.ci,
thread_pthread.h, thread_win32.ci, thread_win32.h, vm.h,
vm_dump.c, vm_evalbody.ci, vm_opts.h: fix comments and
copyright year.
git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@13920 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
2007-11-14 01:13:04 +03:00
|
|
|
Copyright (C) 2004-2007 Koichi Sasada
|
2006-12-31 18:02:22 +03:00
|
|
|
|
|
|
|
**********************************************************************/
|
|
|
|
|
2010-02-04 10:17:03 +03:00
|
|
|
#ifdef HAVE_PTHREAD_NP_H
|
|
|
|
#include <pthread_np.h>
|
|
|
|
#endif
|
2011-05-06 20:47:38 +04:00
|
|
|
|
thread_pthread: prefer rb_nativethread* types/functions
This will make it easier for us to try alternative mutex/condvar
implementations while still using pthreads for thread management.
[Feature #10134]
* thread_pthread.h: define RB_NATIVETHREAD_LOCK_INIT and
RB_NATIVETHREAD_COND_INIT macros
* thread_pthread.c (native_mutex_lock, native_mutex_unlock,
native_mutex_trylock, native_mutex_initialize,
native_mutex_destroy, native_cond_wait):
use rb_nativethread_lock_t instead of pthread_mutex_t
* thread_pthread.c (native_mutex_debug): make argument type-agnostic
to avoid later cast.
* thread_pthread.c (register_cached_thread_and_wait):
replace PTHREAD_COND_INITIALIZER with RB_NATIVETHREAD_COND_INIT,
use native_mutex_{lock,unlock}
* thread_pthread.c (use_cached_thread):
use native_mutex_{lock,unlock}
* thread_pthread.c (native_sleep):
use rb_nativethread_lock_t to match th->interrupt_lock,
use native_mutex_{lock,unlock}
* thread_pthread.c (timer_thread_lock): use rb_nativethread_lock_t type
git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@47185 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
2014-08-15 04:17:53 +04:00
|
|
|
#define RB_NATIVETHREAD_LOCK_INIT PTHREAD_MUTEX_INITIALIZER
|
2018-04-22 15:09:07 +03:00
|
|
|
#define RB_NATIVETHREAD_COND_INIT PTHREAD_COND_INITIALIZER
|
thread_pthread: prefer rb_nativethread* types/functions
This will make it easier for us to try alternative mutex/condvar
implementations while still using pthreads for thread management.
[Feature #10134]
* thread_pthread.h: define RB_NATIVETHREAD_LOCK_INIT and
RB_NATIVETHREAD_COND_INIT macros
* thread_pthread.c (native_mutex_lock, native_mutex_unlock,
native_mutex_trylock, native_mutex_initialize,
native_mutex_destroy, native_cond_wait):
use rb_nativethread_lock_t instead of pthread_mutex_t
* thread_pthread.c (native_mutex_debug): make argument type-agnostic
to avoid later cast.
* thread_pthread.c (register_cached_thread_and_wait):
replace PTHREAD_COND_INITIALIZER with RB_NATIVETHREAD_COND_INIT,
use native_mutex_{lock,unlock}
* thread_pthread.c (use_cached_thread):
use native_mutex_{lock,unlock}
* thread_pthread.c (native_sleep):
use rb_nativethread_lock_t to match th->interrupt_lock,
use native_mutex_{lock,unlock}
* thread_pthread.c (timer_thread_lock): use rb_nativethread_lock_t type
git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@47185 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
2014-08-15 04:17:53 +04:00
|
|
|
|
2022-04-22 15:19:03 +03:00
|
|
|
// per-Thead scheduler helper data
|
|
|
|
struct rb_thread_sched_item {
|
2023-04-10 04:53:13 +03:00
|
|
|
struct {
|
2022-03-30 10:36:31 +03:00
|
|
|
struct ccan_list_node ubf;
|
2023-04-10 04:53:13 +03:00
|
|
|
|
|
|
|
// connected to ractor->threads.sched.reqdyq
|
|
|
|
// locked by ractor->threads.sched.lock
|
|
|
|
struct ccan_list_node readyq;
|
|
|
|
|
|
|
|
// connected to vm->ractor.sched.timeslice_threads
|
|
|
|
// locked by vm->ractor.sched.lock
|
|
|
|
struct ccan_list_node timeslice_threads;
|
|
|
|
|
|
|
|
// connected to vm->ractor.sched.running_threads
|
|
|
|
// locked by vm->ractor.sched.lock
|
|
|
|
struct ccan_list_node running_threads;
|
|
|
|
|
|
|
|
// connected to vm->ractor.sched.zombie_threads
|
|
|
|
struct ccan_list_node zombie_threads;
|
2018-08-15 08:31:31 +03:00
|
|
|
} node;
|
2023-04-10 04:53:13 +03:00
|
|
|
|
|
|
|
// this data should be protected by timer_th.waiting_lock
|
|
|
|
struct {
|
|
|
|
enum thread_sched_waiting_flag {
|
|
|
|
thread_sched_waiting_none = 0x00,
|
|
|
|
thread_sched_waiting_timeout = 0x01,
|
|
|
|
thread_sched_waiting_io_read = 0x02,
|
|
|
|
thread_sched_waiting_io_write = 0x08,
|
|
|
|
thread_sched_waiting_io_force = 0x40, // ignore readable
|
|
|
|
} flags;
|
|
|
|
|
|
|
|
struct {
|
|
|
|
// should be compat with hrtime.h
|
|
|
|
#ifdef MY_RUBY_BUILD_MAY_TIME_TRAVEL
|
|
|
|
int128_t timeout;
|
|
|
|
#else
|
|
|
|
uint64_t timeout;
|
|
|
|
#endif
|
|
|
|
int fd; // -1 for timeout only
|
|
|
|
int result;
|
|
|
|
} data;
|
|
|
|
|
|
|
|
// connected to timer_th.waiting
|
|
|
|
struct ccan_list_node node;
|
|
|
|
} waiting_reason;
|
|
|
|
|
|
|
|
bool finished;
|
|
|
|
bool malloc_stack;
|
|
|
|
void *context_stack;
|
|
|
|
struct coroutine_context *context;
|
2022-04-22 15:19:03 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
struct rb_native_thread {
|
2023-03-30 21:52:58 +03:00
|
|
|
rb_atomic_t serial;
|
2023-04-10 04:53:13 +03:00
|
|
|
struct rb_vm_struct *vm;
|
2022-04-22 15:19:03 +03:00
|
|
|
|
|
|
|
rb_nativethread_id_t thread_id;
|
|
|
|
|
|
|
|
#ifdef RB_THREAD_T_HAS_NATIVE_ID
|
|
|
|
int tid;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
struct rb_thread_struct *running_thread;
|
|
|
|
|
|
|
|
// to control native thread
|
2018-08-14 00:34:20 +03:00
|
|
|
#if defined(__GLIBC__) || defined(__FreeBSD__)
|
|
|
|
union
|
|
|
|
#else
|
|
|
|
/*
|
|
|
|
* assume the platform condvars are badly implemented and have a
|
|
|
|
* "memory" of which mutex they're associated with
|
|
|
|
*/
|
|
|
|
struct
|
|
|
|
#endif
|
2022-04-22 15:19:03 +03:00
|
|
|
{
|
2018-08-14 00:34:20 +03:00
|
|
|
rb_nativethread_cond_t intr; /* th->interrupt_lock */
|
2022-04-16 21:40:23 +03:00
|
|
|
rb_nativethread_cond_t readyq; /* use sched->lock */
|
2018-08-14 00:34:20 +03:00
|
|
|
} cond;
|
2022-05-24 10:39:45 +03:00
|
|
|
|
|
|
|
#ifdef USE_SIGALTSTACK
|
|
|
|
void *altstack;
|
|
|
|
#endif
|
2023-04-10 04:53:13 +03:00
|
|
|
|
|
|
|
struct coroutine_context *nt_context;
|
|
|
|
int dedicated;
|
|
|
|
|
|
|
|
size_t machine_stack_maxsize;
|
2022-04-22 15:19:03 +03:00
|
|
|
};
|
2006-12-31 18:02:22 +03:00
|
|
|
|
2011-10-29 04:05:11 +04:00
|
|
|
#undef except
|
|
|
|
#undef try
|
|
|
|
#undef leave
|
|
|
|
#undef finally
|
|
|
|
|
2022-04-16 21:40:23 +03:00
|
|
|
// per-Ractor
|
|
|
|
struct rb_thread_sched {
|
2023-04-10 04:53:13 +03:00
|
|
|
rb_nativethread_lock_t lock_;
|
|
|
|
#if VM_CHECK_MODE
|
|
|
|
struct rb_thread_struct *lock_owner;
|
|
|
|
#endif
|
|
|
|
struct rb_thread_struct *running; // running thread or NULL
|
|
|
|
bool is_running;
|
|
|
|
bool is_running_timeslice;
|
|
|
|
bool enable_mn_threads;
|
2011-06-13 18:14:53 +04:00
|
|
|
|
2022-04-16 21:40:23 +03:00
|
|
|
struct ccan_list_head readyq;
|
2023-04-10 04:53:13 +03:00
|
|
|
int readyq_cnt;
|
|
|
|
// ractor scheduling
|
|
|
|
struct ccan_list_node grq_node;
|
2022-04-16 21:40:23 +03:00
|
|
|
};
|
2020-10-19 10:47:32 +03:00
|
|
|
|
2022-12-19 07:27:33 +03:00
|
|
|
#ifdef RB_THREAD_LOCAL_SPECIFIER
|
2023-04-10 04:53:13 +03:00
|
|
|
NOINLINE(void rb_current_ec_set(struct rb_execution_context_struct *));
|
|
|
|
NOINLINE(struct rb_execution_context_struct *rb_current_ec_noinline(void));
|
|
|
|
|
|
|
|
# ifdef __APPLE__
|
|
|
|
// on Darwin, TLS can not be accessed across .so
|
2023-12-24 08:33:14 +03:00
|
|
|
NOINLINE(struct rb_execution_context_struct *rb_current_ec(void));
|
2023-04-10 04:53:13 +03:00
|
|
|
# else
|
|
|
|
RUBY_EXTERN RB_THREAD_LOCAL_SPECIFIER struct rb_execution_context_struct *ruby_current_ec;
|
|
|
|
|
|
|
|
// for RUBY_DEBUG_LOG()
|
|
|
|
RUBY_EXTERN RB_THREAD_LOCAL_SPECIFIER rb_atomic_t ruby_nt_serial;
|
|
|
|
#define RUBY_NT_SERIAL 1
|
|
|
|
# endif
|
2022-12-19 07:27:33 +03:00
|
|
|
#else
|
2020-03-09 20:22:11 +03:00
|
|
|
typedef pthread_key_t native_tls_key_t;
|
|
|
|
|
|
|
|
static inline void *
|
|
|
|
native_tls_get(native_tls_key_t key)
|
|
|
|
{
|
2022-05-23 22:54:26 +03:00
|
|
|
// return value should be checked by caller
|
|
|
|
return pthread_getspecific(key);
|
2020-03-09 20:22:11 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
|
|
|
native_tls_set(native_tls_key_t key, void *ptr)
|
|
|
|
{
|
|
|
|
if (UNLIKELY(pthread_setspecific(key, ptr) != 0)) {
|
|
|
|
rb_bug("pthread_setspecific error");
|
|
|
|
}
|
|
|
|
}
|
2020-10-19 10:47:32 +03:00
|
|
|
|
2022-12-19 07:27:33 +03:00
|
|
|
RUBY_EXTERN native_tls_key_t ruby_current_ec_key;
|
2020-10-19 10:47:32 +03:00
|
|
|
#endif
|
2020-03-09 20:22:11 +03:00
|
|
|
|
2008-01-18 11:56:11 +03:00
|
|
|
#endif /* RUBY_THREAD_PTHREAD_H */
|