2007-02-05 15:21:01 +03:00
|
|
|
#ifndef RUBY_GC_H
|
|
|
|
#define RUBY_GC_H 1
|
2019-12-04 11:16:30 +03:00
|
|
|
#include "ruby/ruby.h"
|
2007-02-05 15:21:01 +03:00
|
|
|
|
2017-10-23 08:56:25 +03:00
|
|
|
#if defined(__x86_64__) && !defined(_ILP32) && defined(__GNUC__)
|
2013-02-10 09:41:01 +04:00
|
|
|
#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("movq\t%%rsp, %0" : "=r" (*(p)))
|
2017-10-23 08:56:25 +03:00
|
|
|
#elif defined(__i386) && defined(__GNUC__)
|
2013-02-10 09:41:01 +04:00
|
|
|
#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("movl\t%%esp, %0" : "=r" (*(p)))
|
2021-10-05 07:06:43 +03:00
|
|
|
#elif (defined(__powerpc__) || defined(__powerpc64__)) && defined(__GNUC__) && !defined(_AIX)
|
2019-12-16 04:05:21 +03:00
|
|
|
#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("mr\t%0, %%r1" : "=r" (*(p)))
|
2021-10-05 07:06:43 +03:00
|
|
|
#elif (defined(__powerpc__) || defined(__powerpc64__)) && defined(__GNUC__) && defined(_AIX)
|
|
|
|
#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("mr %0,1" : "=r" (*(p)))
|
2020-08-13 20:15:54 +03:00
|
|
|
#elif defined(__aarch64__) && defined(__GNUC__)
|
|
|
|
#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("mov\t%0, sp" : "=r" (*(p)))
|
2007-07-14 11:19:59 +04:00
|
|
|
#else
|
2007-02-05 15:21:01 +03:00
|
|
|
NOINLINE(void rb_gc_set_stack_end(VALUE **stack_end_p));
|
2007-07-14 11:19:59 +04:00
|
|
|
#define SET_MACHINE_STACK_END(p) rb_gc_set_stack_end(p)
|
|
|
|
#define USE_CONSERVATIVE_STACK_END
|
|
|
|
#endif
|
|
|
|
|
2020-03-09 20:22:11 +03:00
|
|
|
#define RB_GC_SAVE_MACHINE_CONTEXT(th) \
|
|
|
|
do { \
|
|
|
|
FLUSH_REGISTER_WINDOWS; \
|
|
|
|
setjmp((th)->ec->machine.regs); \
|
|
|
|
SET_MACHINE_STACK_END(&(th)->ec->machine.stack_end); \
|
|
|
|
} while (0)
|
|
|
|
|
2007-02-05 15:21:01 +03:00
|
|
|
/* for GC debug */
|
|
|
|
|
2007-06-25 06:44:20 +04:00
|
|
|
#ifndef RUBY_MARK_FREE_DEBUG
|
|
|
|
#define RUBY_MARK_FREE_DEBUG 0
|
2006-12-31 18:02:22 +03:00
|
|
|
#endif
|
|
|
|
|
2007-06-25 06:44:20 +04:00
|
|
|
#if RUBY_MARK_FREE_DEBUG
|
2007-12-21 11:13:39 +03:00
|
|
|
extern int ruby_gc_debug_indent;
|
2006-12-31 18:02:22 +03:00
|
|
|
|
2010-08-14 10:11:03 +04:00
|
|
|
static inline void
|
2006-12-31 18:02:22 +03:00
|
|
|
rb_gc_debug_indent(void)
|
|
|
|
{
|
2021-09-09 17:21:06 +03:00
|
|
|
ruby_debug_printf("%*s", ruby_gc_debug_indent, "");
|
2006-12-31 18:02:22 +03:00
|
|
|
}
|
|
|
|
|
2010-08-14 10:11:03 +04:00
|
|
|
static inline void
|
2009-10-16 08:40:11 +04:00
|
|
|
rb_gc_debug_body(const char *mode, const char *msg, int st, void *ptr)
|
2006-12-31 18:02:22 +03:00
|
|
|
{
|
|
|
|
if (st == 0) {
|
2007-06-25 06:44:20 +04:00
|
|
|
ruby_gc_debug_indent--;
|
2006-12-31 18:02:22 +03:00
|
|
|
}
|
|
|
|
rb_gc_debug_indent();
|
2021-09-09 17:21:06 +03:00
|
|
|
ruby_debug_printf("%s: %s %s (%p)\n", mode, st ? "->" : "<-", msg, ptr);
|
2007-06-25 06:44:20 +04:00
|
|
|
|
2006-12-31 18:02:22 +03:00
|
|
|
if (st) {
|
2007-06-25 06:44:20 +04:00
|
|
|
ruby_gc_debug_indent++;
|
2006-12-31 18:02:22 +03:00
|
|
|
}
|
2007-06-25 06:44:20 +04:00
|
|
|
|
2006-12-31 18:02:22 +03:00
|
|
|
fflush(stdout);
|
|
|
|
}
|
|
|
|
|
2011-01-18 17:05:20 +03:00
|
|
|
#define RUBY_MARK_ENTER(msg) rb_gc_debug_body("mark", (msg), 1, ptr)
|
|
|
|
#define RUBY_MARK_LEAVE(msg) rb_gc_debug_body("mark", (msg), 0, ptr)
|
|
|
|
#define RUBY_FREE_ENTER(msg) rb_gc_debug_body("free", (msg), 1, ptr)
|
|
|
|
#define RUBY_FREE_LEAVE(msg) rb_gc_debug_body("free", (msg), 0, ptr)
|
2021-09-09 17:21:06 +03:00
|
|
|
#define RUBY_GC_INFO rb_gc_debug_indent(), ruby_debug_printf
|
2006-12-31 18:02:22 +03:00
|
|
|
|
|
|
|
#else
|
2007-06-25 06:44:20 +04:00
|
|
|
#define RUBY_MARK_ENTER(msg)
|
|
|
|
#define RUBY_MARK_LEAVE(msg)
|
|
|
|
#define RUBY_FREE_ENTER(msg)
|
|
|
|
#define RUBY_FREE_LEAVE(msg)
|
|
|
|
#define RUBY_GC_INFO if(0)printf
|
2006-12-31 18:02:22 +03:00
|
|
|
#endif
|
|
|
|
|
2020-01-08 14:40:08 +03:00
|
|
|
#define RUBY_MARK_MOVABLE_UNLESS_NULL(ptr) do { \
|
2019-04-20 04:19:47 +03:00
|
|
|
VALUE markobj = (ptr); \
|
2019-08-12 23:09:21 +03:00
|
|
|
if (RTEST(markobj)) {rb_gc_mark_movable(markobj);} \
|
2019-04-20 04:19:47 +03:00
|
|
|
} while (0)
|
2015-07-15 02:59:23 +03:00
|
|
|
#define RUBY_MARK_UNLESS_NULL(ptr) do { \
|
|
|
|
VALUE markobj = (ptr); \
|
|
|
|
if (RTEST(markobj)) {rb_gc_mark(markobj);} \
|
|
|
|
} while (0)
|
2011-02-11 13:45:34 +03:00
|
|
|
#define RUBY_FREE_UNLESS_NULL(ptr) if(ptr){ruby_xfree(ptr);(ptr)=NULL;}
|
2007-02-05 15:21:01 +03:00
|
|
|
|
2008-06-14 06:59:19 +04:00
|
|
|
#if STACK_GROW_DIRECTION > 0
|
2011-01-18 17:05:20 +03:00
|
|
|
# define STACK_UPPER(x, a, b) (a)
|
2008-06-14 06:59:19 +04:00
|
|
|
#elif STACK_GROW_DIRECTION < 0
|
2011-01-18 17:05:20 +03:00
|
|
|
# define STACK_UPPER(x, a, b) (b)
|
2008-06-14 06:59:19 +04:00
|
|
|
#else
|
|
|
|
RUBY_EXTERN int ruby_stack_grow_direction;
|
2010-04-14 11:29:04 +04:00
|
|
|
int ruby_get_stack_grow_direction(volatile VALUE *addr);
|
2008-06-14 06:59:19 +04:00
|
|
|
# define stack_growup_p(x) ( \
|
|
|
|
(ruby_stack_grow_direction ? \
|
|
|
|
ruby_stack_grow_direction : \
|
|
|
|
ruby_get_stack_grow_direction(x)) > 0)
|
2011-01-18 17:05:20 +03:00
|
|
|
# define STACK_UPPER(x, a, b) (stack_growup_p(x) ? (a) : (b))
|
2008-06-14 06:59:19 +04:00
|
|
|
#endif
|
|
|
|
|
2019-06-03 12:35:03 +03:00
|
|
|
/*
|
|
|
|
STACK_GROW_DIR_DETECTION is used with STACK_DIR_UPPER.
|
2019-07-12 09:02:25 +03:00
|
|
|
|
2019-06-03 12:35:03 +03:00
|
|
|
On most normal systems, stacks grow from high address to lower address. In
|
|
|
|
this case, STACK_DIR_UPPER(a, b) will return (b), but on exotic systems where
|
|
|
|
the stack grows UP (from low address to high address), it will return (a).
|
|
|
|
*/
|
|
|
|
|
2010-05-08 20:15:20 +04:00
|
|
|
#if STACK_GROW_DIRECTION
|
|
|
|
#define STACK_GROW_DIR_DETECTION
|
2011-01-18 17:05:20 +03:00
|
|
|
#define STACK_DIR_UPPER(a,b) STACK_UPPER(0, (a), (b))
|
2010-05-08 20:15:20 +04:00
|
|
|
#else
|
|
|
|
#define STACK_GROW_DIR_DETECTION VALUE stack_grow_dir_detection
|
2011-01-18 17:05:20 +03:00
|
|
|
#define STACK_DIR_UPPER(a,b) STACK_UPPER(&stack_grow_dir_detection, (a), (b))
|
2010-05-08 20:15:20 +04:00
|
|
|
#endif
|
2012-06-10 12:54:38 +04:00
|
|
|
#define IS_STACK_DIR_UPPER() STACK_DIR_UPPER(1,0)
|
2010-05-08 20:15:20 +04:00
|
|
|
|
2015-05-31 22:12:42 +03:00
|
|
|
const char *rb_obj_info(VALUE obj);
|
2015-07-02 12:36:59 +03:00
|
|
|
const char *rb_raw_obj_info(char *buff, const int buff_size, VALUE obj);
|
2015-05-31 22:12:42 +03:00
|
|
|
|
2019-05-16 10:44:30 +03:00
|
|
|
VALUE rb_gc_disable_no_rest(void);
|
|
|
|
|
2017-04-12 17:47:50 +03:00
|
|
|
struct rb_thread_struct;
|
|
|
|
|
2013-04-05 14:29:38 +04:00
|
|
|
RUBY_SYMBOL_EXPORT_BEGIN
|
2010-08-14 10:11:03 +04:00
|
|
|
|
2012-10-05 12:14:09 +04:00
|
|
|
/* exports for objspace module */
|
2010-08-14 10:11:03 +04:00
|
|
|
size_t rb_objspace_data_type_memsize(VALUE obj);
|
2012-10-24 04:04:56 +04:00
|
|
|
void rb_objspace_reachable_objects_from(VALUE obj, void (func)(VALUE, void *), void *data);
|
2013-10-15 14:22:33 +04:00
|
|
|
void rb_objspace_reachable_objects_from_root(void (func)(const char *category, VALUE, void *), void *data);
|
2012-10-24 04:04:56 +04:00
|
|
|
int rb_objspace_markable_object_p(VALUE obj);
|
|
|
|
int rb_objspace_internal_object_p(VALUE obj);
|
2014-02-08 11:03:43 +04:00
|
|
|
int rb_objspace_marked_object_p(VALUE obj);
|
2014-07-06 19:42:14 +04:00
|
|
|
int rb_objspace_garbage_object_p(VALUE obj);
|
2012-10-05 12:14:09 +04:00
|
|
|
|
2010-08-14 10:11:03 +04:00
|
|
|
void rb_objspace_each_objects(
|
|
|
|
int (*callback)(void *start, void *end, size_t stride, void *data),
|
|
|
|
void *data);
|
|
|
|
|
2014-04-02 15:59:50 +04:00
|
|
|
void rb_objspace_each_objects_without_setup(
|
|
|
|
int (*callback)(void *, void *, size_t, void *),
|
|
|
|
void *data);
|
|
|
|
|
2013-04-05 14:29:38 +04:00
|
|
|
RUBY_SYMBOL_EXPORT_END
|
2010-08-14 10:11:03 +04:00
|
|
|
|
2008-06-14 06:59:19 +04:00
|
|
|
#endif /* RUBY_GC_H */
|