Move responsibility of heap walking into Ruby

This commit removes the need for the GC implementation to implement heap
walking and instead Ruby will implement it.
This commit is contained in:
Peter Zhu 2024-08-30 13:05:24 -04:00
Родитель a2243ee48b
Коммит 3c63a01295
6 изменённых файлов: 160 добавлений и 172 удалений

274
gc.c
Просмотреть файл

@ -187,7 +187,7 @@ rb_gc_event_hook(VALUE obj, rb_event_flag_t event)
void *
rb_gc_get_objspace(void)
{
return GET_VM()->objspace;
return GET_VM()->gc.objspace;
}
void
@ -337,7 +337,7 @@ void rb_vm_update_references(void *ptr);
#define unless_objspace(objspace) \
void *objspace; \
rb_vm_t *unless_objspace_vm = GET_VM(); \
if (unless_objspace_vm) objspace = unless_objspace_vm->objspace; \
if (unless_objspace_vm) objspace = unless_objspace_vm->gc.objspace; \
else /* return; or objspace will be warned uninitialized */
#define RMOVED(obj) ((struct RMoved *)(obj))
@ -865,7 +865,7 @@ rb_objspace_alloc(void)
#endif
void *objspace = rb_gc_impl_objspace_alloc();
ruby_current_vm_ptr->objspace = objspace;
ruby_current_vm_ptr->gc.objspace = objspace;
rb_gc_impl_objspace_init(objspace);
@ -2036,54 +2036,62 @@ ruby_stack_check(void)
/* ==================== Marking ==================== */
static inline void
gc_mark_internal(void *objspace, VALUE obj)
{
if (RB_SPECIAL_CONST_P(obj)) return;
#define RB_GC_MARK_OR_TRAVERSE(func, obj_or_ptr, obj, check_obj) do { \
if (!RB_SPECIAL_CONST_P(obj)) { \
rb_vm_t *vm = GET_VM(); \
if (LIKELY(vm->gc.mark_func_data == NULL)) { \
(func)(vm->gc.objspace, (obj_or_ptr)); \
} \
else if (check_obj ? rb_gc_impl_pointer_to_heap_p(vm->gc.objspace, (const void *)obj) : true) { \
GC_ASSERT(!rb_gc_impl_during_gc_p(vm->gc.objspace)); \
struct gc_mark_func_data_struct *mark_func_data = vm->gc.mark_func_data; \
vm->gc.mark_func_data = NULL; \
mark_func_data->mark_func((obj), mark_func_data->data); \
vm->gc.mark_func_data = mark_func_data; \
} \
} \
} while (0)
rb_gc_impl_mark(objspace, obj);
static inline void
gc_mark_internal(VALUE obj)
{
RB_GC_MARK_OR_TRAVERSE(rb_gc_impl_mark, obj, obj, false);
}
void
rb_gc_mark_movable(VALUE obj)
{
gc_mark_internal(rb_gc_get_objspace(), obj);
gc_mark_internal(obj);
}
void
rb_gc_mark_and_move(VALUE *ptr)
{
if (SPECIAL_CONST_P(*ptr)) return;
rb_gc_impl_mark_and_move(rb_gc_get_objspace(), ptr);
RB_GC_MARK_OR_TRAVERSE(rb_gc_impl_mark_and_move, ptr, *ptr, false);
}
static inline void
gc_mark_and_pin_internal(void *objspace, VALUE obj)
gc_mark_and_pin_internal(VALUE obj)
{
if (RB_SPECIAL_CONST_P(obj)) return;
rb_gc_impl_mark_and_pin(objspace, obj);
RB_GC_MARK_OR_TRAVERSE(rb_gc_impl_mark_and_pin, obj, obj, false);
}
void
rb_gc_mark(VALUE obj)
{
gc_mark_and_pin_internal(rb_gc_get_objspace(), obj);
gc_mark_and_pin_internal(obj);
}
static inline void
gc_mark_maybe_internal(void *objspace, VALUE obj)
gc_mark_maybe_internal(VALUE obj)
{
if (RB_SPECIAL_CONST_P(obj)) return;
rb_gc_impl_mark_maybe(objspace, obj);
RB_GC_MARK_OR_TRAVERSE(rb_gc_impl_mark_maybe, obj, obj, true);
}
void
rb_gc_mark_maybe(VALUE obj)
{
gc_mark_maybe_internal(rb_gc_get_objspace(), obj);
gc_mark_maybe_internal(obj);
}
void
@ -2098,36 +2106,42 @@ rb_gc_remove_weak(VALUE parent_obj, VALUE *ptr)
rb_gc_impl_remove_weak(rb_gc_get_objspace(), parent_obj, ptr);
}
ATTRIBUTE_NO_ADDRESS_SAFETY_ANALYSIS(static void each_location(register const VALUE *x, register long n, void (*cb)(void *data, VALUE), void *data));
ATTRIBUTE_NO_ADDRESS_SAFETY_ANALYSIS(static void each_location(register const VALUE *x, register long n, void (*cb)(VALUE, void *), void *data));
static void
each_location(register const VALUE *x, register long n, void (*cb)(void *data, VALUE obj), void *data)
each_location(register const VALUE *x, register long n, void (*cb)(VALUE, void *), void *data)
{
VALUE v;
while (n--) {
v = *x;
cb(data, v);
cb(v, data);
x++;
}
}
static void
each_location_ptr(const VALUE *start, const VALUE *end, void (*cb)(void *, VALUE), void *data)
each_location_ptr(const VALUE *start, const VALUE *end, void (*cb)(VALUE, void *), void *data)
{
if (end <= start) return;
each_location(start, end - start, cb, data);
}
static void
gc_mark_maybe_each_location(VALUE obj, void *data)
{
gc_mark_maybe_internal(obj);
}
void
rb_gc_mark_locations(const VALUE *start, const VALUE *end)
{
each_location_ptr(start, end, rb_gc_impl_mark_maybe, rb_gc_get_objspace());
each_location_ptr(start, end, gc_mark_maybe_each_location, NULL);
}
void
rb_gc_mark_values(long n, const VALUE *values)
{
for (long i = 0; i < n; i++) {
gc_mark_internal(rb_gc_get_objspace(), values[i]);
gc_mark_internal(values[i]);
}
}
@ -2135,16 +2149,14 @@ void
rb_gc_mark_vm_stack_values(long n, const VALUE *values)
{
for (long i = 0; i < n; i++) {
gc_mark_and_pin_internal(rb_gc_get_objspace(), values[i]);
gc_mark_and_pin_internal(values[i]);
}
}
static int
mark_key(st_data_t key, st_data_t value, st_data_t data)
{
void *objspace = (void *)data;
gc_mark_and_pin_internal(objspace, (VALUE)key);
gc_mark_and_pin_internal((VALUE)key);
return ST_CONTINUE;
}
@ -2160,10 +2172,8 @@ rb_mark_set(st_table *tbl)
static int
mark_keyvalue(st_data_t key, st_data_t value, st_data_t data)
{
void *objspace = (void *)data;
gc_mark_internal(objspace, (VALUE)key);
gc_mark_internal(objspace, (VALUE)value);
gc_mark_internal((VALUE)key);
gc_mark_internal((VALUE)value);
return ST_CONTINUE;
}
@ -2171,10 +2181,8 @@ mark_keyvalue(st_data_t key, st_data_t value, st_data_t data)
static int
pin_key_pin_value(st_data_t key, st_data_t value, st_data_t data)
{
void *objspace = (void *)data;
gc_mark_and_pin_internal(objspace, (VALUE)key);
gc_mark_and_pin_internal(objspace, (VALUE)value);
gc_mark_and_pin_internal((VALUE)key);
gc_mark_and_pin_internal((VALUE)value);
return ST_CONTINUE;
}
@ -2182,25 +2190,23 @@ pin_key_pin_value(st_data_t key, st_data_t value, st_data_t data)
static int
pin_key_mark_value(st_data_t key, st_data_t value, st_data_t data)
{
void *objspace = (void *)data;
gc_mark_and_pin_internal(objspace, (VALUE)key);
gc_mark_internal(objspace, (VALUE)value);
gc_mark_and_pin_internal((VALUE)key);
gc_mark_internal((VALUE)value);
return ST_CONTINUE;
}
static void
mark_hash(void *objspace, VALUE hash)
mark_hash(VALUE hash)
{
if (rb_hash_compare_by_id_p(hash)) {
rb_hash_stlike_foreach(hash, pin_key_mark_value, (st_data_t)objspace);
rb_hash_stlike_foreach(hash, pin_key_mark_value, 0);
}
else {
rb_hash_stlike_foreach(hash, mark_keyvalue, (st_data_t)objspace);
rb_hash_stlike_foreach(hash, mark_keyvalue, 0);
}
gc_mark_internal(objspace, RHASH(hash)->ifnone);
gc_mark_internal(RHASH(hash)->ifnone);
}
void
@ -2208,13 +2214,13 @@ rb_mark_hash(st_table *tbl)
{
if (!tbl) return;
st_foreach(tbl, pin_key_pin_value, (st_data_t)rb_gc_get_objspace());
st_foreach(tbl, pin_key_pin_value, 0);
}
static enum rb_id_table_iterator_result
mark_method_entry_i(VALUE me, void *objspace)
{
gc_mark_internal(objspace, me);
gc_mark_internal(me);
return ID_TABLE_CONTINUE;
}
@ -2237,22 +2243,13 @@ mark_m_tbl(void *objspace, struct rb_id_table *tbl)
((start) = STACK_END, (end) = STACK_START) : ((start) = STACK_START, (end) = STACK_END+(appendix)))
#endif
struct mark_machine_stack_location_maybe_data {
void *objspace;
#ifdef RUBY_ASAN_ENABLED
const rb_execution_context_t *ec;
#endif
};
static void
gc_mark_machine_stack_location_maybe(void *data, VALUE obj)
gc_mark_machine_stack_location_maybe(VALUE obj, void *data)
{
void *objspace = ((struct mark_machine_stack_location_maybe_data *)data)->objspace;
gc_mark_maybe_internal(objspace, obj);
gc_mark_maybe_internal(obj);
#ifdef RUBY_ASAN_ENABLED
const rb_execution_context_t *ec = ((struct mark_machine_stack_location_maybe_data *)data)->ec;
const rb_execution_context_t *ec = (const rb_execution_context_t *)data;
void *fake_frame_start;
void *fake_frame_end;
bool is_fake_frame = asan_get_fake_stack_extents(
@ -2261,7 +2258,7 @@ gc_mark_machine_stack_location_maybe(void *data, VALUE obj)
&fake_frame_start, &fake_frame_end
);
if (is_fake_frame) {
each_location_ptr(fake_frame_start, fake_frame_end, rb_gc_impl_mark_maybe, objspace);
each_location_ptr(fake_frame_start, fake_frame_end, gc_mark_maybe_each_location, NULL);
}
#endif
}
@ -2281,26 +2278,26 @@ rb_mark_locations(void *begin, void *end)
# if defined(__EMSCRIPTEN__)
static void
mark_current_machine_context(void *objspace, rb_execution_context_t *ec)
mark_current_machine_context(rb_execution_context_t *ec)
{
emscripten_scan_stack(rb_mark_locations);
each_location_ptr(rb_stack_range_tmp[0], rb_stack_range_tmp[1], rb_gc_impl_mark_maybe, objspace);
each_location_ptr(rb_stack_range_tmp[0], rb_stack_range_tmp[1], gc_mark_maybe_each_location, NULL);
emscripten_scan_registers(rb_mark_locations);
each_location_ptr(rb_stack_range_tmp[0], rb_stack_range_tmp[1], rb_gc_impl_mark_maybe, objspace);
each_location_ptr(rb_stack_range_tmp[0], rb_stack_range_tmp[1], gc_mark_maybe_each_location, NULL);
}
# else // use Asyncify version
static void
mark_current_machine_context(void *objspace, rb_execution_context_t *ec)
mark_current_machine_context(rb_execution_context_t *ec)
{
VALUE *stack_start, *stack_end;
SET_STACK_END;
GET_STACK_BOUNDS(stack_start, stack_end, 1);
each_location_ptr(stack_start, stack_end, rb_gc_impl_mark_maybe, objspace);
each_location_ptr(stack_start, stack_end, gc_mark_maybe_each_location, NULL);
rb_wasm_scan_locals(rb_mark_locations);
each_location_ptr(rb_stack_range_tmp[0], rb_stack_range_tmp[1], rb_gc_impl_mark_maybe, objspace);
each_location_ptr(rb_stack_range_tmp[0], rb_stack_range_tmp[1], gc_mark_maybe_each_location, NULL);
}
# endif
@ -2308,7 +2305,7 @@ mark_current_machine_context(void *objspace, rb_execution_context_t *ec)
#else // !defined(__wasm__)
static void
mark_current_machine_context(void *objspace, rb_execution_context_t *ec)
mark_current_machine_context(rb_execution_context_t *ec)
{
union {
rb_jmp_buf j;
@ -2327,15 +2324,15 @@ mark_current_machine_context(void *objspace, rb_execution_context_t *ec)
SET_STACK_END;
GET_STACK_BOUNDS(stack_start, stack_end, 1);
struct mark_machine_stack_location_maybe_data data = {
.objspace = objspace,
void *data =
#ifdef RUBY_ASAN_ENABLED
.ec = ec
ec;
#else
NULL;
#endif
};
each_location(save_regs_gc_mark.v, numberof(save_regs_gc_mark.v), gc_mark_machine_stack_location_maybe, &data);
each_location_ptr(stack_start, stack_end, gc_mark_machine_stack_location_maybe, &data);
each_location(save_regs_gc_mark.v, numberof(save_regs_gc_mark.v), gc_mark_machine_stack_location_maybe, data);
each_location_ptr(stack_start, stack_end, gc_mark_machine_stack_location_maybe, data);
}
#endif
@ -2347,24 +2344,22 @@ rb_gc_mark_machine_context(const rb_execution_context_t *ec)
GET_STACK_BOUNDS(stack_start, stack_end, 0);
RUBY_DEBUG_LOG("ec->th:%u stack_start:%p stack_end:%p", rb_ec_thread_ptr(ec)->serial, stack_start, stack_end);
struct mark_machine_stack_location_maybe_data data = {
.objspace = rb_gc_get_objspace(),
void *data =
#ifdef RUBY_ASAN_ENABLED
.ec = ec
ec;
#else
NULL;
#endif
};
each_location_ptr(stack_start, stack_end, gc_mark_machine_stack_location_maybe, &data);
each_location_ptr(stack_start, stack_end, gc_mark_machine_stack_location_maybe, data);
int num_regs = sizeof(ec->machine.regs)/(sizeof(VALUE));
each_location((VALUE*)&ec->machine.regs, num_regs, gc_mark_machine_stack_location_maybe, &data);
each_location((VALUE*)&ec->machine.regs, num_regs, gc_mark_machine_stack_location_maybe, data);
}
static int
rb_mark_tbl_i(st_data_t key, st_data_t value, st_data_t data)
{
void *objspace = (void *)data;
gc_mark_and_pin_internal(objspace, (VALUE)value);
gc_mark_and_pin_internal((VALUE)value);
return ST_CONTINUE;
}
@ -2374,21 +2369,21 @@ rb_mark_tbl(st_table *tbl)
{
if (!tbl || tbl->num_entries == 0) return;
st_foreach(tbl, rb_mark_tbl_i, (st_data_t)rb_gc_get_objspace());
st_foreach(tbl, rb_mark_tbl_i, 0);
}
static void
gc_mark_tbl_no_pin(void *objspace, st_table *tbl)
gc_mark_tbl_no_pin(st_table *tbl)
{
if (!tbl || tbl->num_entries == 0) return;
st_foreach(tbl, gc_mark_tbl_no_pin_i, (st_data_t)objspace);
st_foreach(tbl, gc_mark_tbl_no_pin_i, 0);
}
void
rb_mark_tbl_no_pin(st_table *tbl)
{
gc_mark_tbl_no_pin(rb_gc_get_objspace(), tbl);
gc_mark_tbl_no_pin(tbl);
}
static enum rb_id_table_iterator_result
@ -2399,7 +2394,7 @@ mark_cvc_tbl_i(VALUE cvc_entry, void *objspace)
entry = (struct rb_cvar_class_tbl_entry *)cvc_entry;
RUBY_ASSERT(entry->cref == 0 || (BUILTIN_TYPE((VALUE)entry->cref) == T_IMEMO && IMEMO_TYPE_P(entry->cref, imemo_cref)));
gc_mark_internal(objspace, (VALUE)entry->cref);
gc_mark_internal((VALUE)entry->cref);
return ID_TABLE_CONTINUE;
}
@ -2424,8 +2419,8 @@ mark_const_table_i(VALUE value, void *objspace)
{
const rb_const_entry_t *ce = (const rb_const_entry_t *)value;
gc_mark_internal(objspace, ce->value);
gc_mark_internal(objspace, ce->file);
gc_mark_internal(ce->value);
gc_mark_internal(ce->file);
return ID_TABLE_CONTINUE;
}
@ -2442,10 +2437,10 @@ rb_gc_mark_roots(void *objspace, const char **categoryp)
MARK_CHECKPOINT("vm");
rb_vm_mark(vm);
if (vm->self) gc_mark_internal(objspace, vm->self);
if (vm->self) gc_mark_internal(vm->self);
MARK_CHECKPOINT("machine_context");
mark_current_machine_context(objspace, ec);
mark_current_machine_context(ec);
MARK_CHECKPOINT("end_proc");
rb_mark_end_proc();
@ -2500,28 +2495,28 @@ rb_gc_mark_children(void *objspace, VALUE obj)
break;
}
gc_mark_internal(objspace, RBASIC(obj)->klass);
gc_mark_internal(RBASIC(obj)->klass);
switch (BUILTIN_TYPE(obj)) {
case T_CLASS:
if (FL_TEST(obj, FL_SINGLETON)) {
gc_mark_internal(objspace, RCLASS_ATTACHED_OBJECT(obj));
gc_mark_internal(RCLASS_ATTACHED_OBJECT(obj));
}
// Continue to the shared T_CLASS/T_MODULE
case T_MODULE:
if (RCLASS_SUPER(obj)) {
gc_mark_internal(objspace, RCLASS_SUPER(obj));
gc_mark_internal(RCLASS_SUPER(obj));
}
mark_m_tbl(objspace, RCLASS_M_TBL(obj));
mark_cvc_tbl(objspace, obj);
rb_cc_table_mark(obj);
if (rb_shape_obj_too_complex(obj)) {
gc_mark_tbl_no_pin(objspace, (st_table *)RCLASS_IVPTR(obj));
gc_mark_tbl_no_pin((st_table *)RCLASS_IVPTR(obj));
}
else {
for (attr_index_t i = 0; i < RCLASS_IV_COUNT(obj); i++) {
gc_mark_internal(objspace, RCLASS_IVPTR(obj)[i]);
gc_mark_internal(RCLASS_IVPTR(obj)[i]);
}
}
@ -2529,7 +2524,7 @@ rb_gc_mark_children(void *objspace, VALUE obj)
rb_id_table_foreach_values(RCLASS_CONST_TBL(obj), mark_const_table_i, objspace);
}
gc_mark_internal(objspace, RCLASS_EXT(obj)->classpath);
gc_mark_internal(RCLASS_EXT(obj)->classpath);
break;
case T_ICLASS:
@ -2537,11 +2532,11 @@ rb_gc_mark_children(void *objspace, VALUE obj)
mark_m_tbl(objspace, RCLASS_M_TBL(obj));
}
if (RCLASS_SUPER(obj)) {
gc_mark_internal(objspace, RCLASS_SUPER(obj));
gc_mark_internal(RCLASS_SUPER(obj));
}
if (RCLASS_INCLUDER(obj)) {
gc_mark_internal(objspace, RCLASS_INCLUDER(obj));
gc_mark_internal(RCLASS_INCLUDER(obj));
}
mark_m_tbl(objspace, RCLASS_CALLABLE_M_TBL(obj));
rb_cc_table_mark(obj);
@ -2550,19 +2545,19 @@ rb_gc_mark_children(void *objspace, VALUE obj)
case T_ARRAY:
if (ARY_SHARED_P(obj)) {
VALUE root = ARY_SHARED_ROOT(obj);
gc_mark_internal(objspace, root);
gc_mark_internal(root);
}
else {
long len = RARRAY_LEN(obj);
const VALUE *ptr = RARRAY_CONST_PTR(obj);
for (long i = 0; i < len; i++) {
gc_mark_internal(objspace, ptr[i]);
gc_mark_internal(ptr[i]);
}
}
break;
case T_HASH:
mark_hash(objspace, obj);
mark_hash(obj);
break;
case T_STRING:
@ -2572,10 +2567,10 @@ rb_gc_mark_children(void *objspace, VALUE obj)
* points into the slot of the shared string. There may be code
* using the RSTRING_PTR on the stack, which would pin this
* string but not pin the shared string, causing it to move. */
gc_mark_and_pin_internal(objspace, RSTRING(obj)->as.heap.aux.shared);
gc_mark_and_pin_internal(RSTRING(obj)->as.heap.aux.shared);
}
else {
gc_mark_internal(objspace, RSTRING(obj)->as.heap.aux.shared);
gc_mark_internal(RSTRING(obj)->as.heap.aux.shared);
}
}
break;
@ -2588,7 +2583,7 @@ rb_gc_mark_children(void *objspace, VALUE obj)
size_t *offset_list = (size_t *)RTYPEDDATA(obj)->type->function.dmark;
for (size_t offset = *offset_list; offset != RUBY_REF_END; offset = *offset_list++) {
gc_mark_internal(objspace, *(VALUE *)((char *)ptr + offset));
gc_mark_internal(*(VALUE *)((char *)ptr + offset));
}
}
else {
@ -2606,14 +2601,14 @@ rb_gc_mark_children(void *objspace, VALUE obj)
rb_shape_t *shape = rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj));
if (rb_shape_obj_too_complex(obj)) {
gc_mark_tbl_no_pin(objspace, ROBJECT_IV_HASH(obj));
gc_mark_tbl_no_pin(ROBJECT_IV_HASH(obj));
}
else {
const VALUE * const ptr = ROBJECT_IVPTR(obj);
uint32_t len = ROBJECT_IV_COUNT(obj);
for (uint32_t i = 0; i < len; i++) {
gc_mark_internal(objspace, ptr[i]);
gc_mark_internal(ptr[i]);
}
}
@ -2632,36 +2627,36 @@ rb_gc_mark_children(void *objspace, VALUE obj)
case T_FILE:
if (RFILE(obj)->fptr) {
gc_mark_internal(objspace, RFILE(obj)->fptr->self);
gc_mark_internal(objspace, RFILE(obj)->fptr->pathv);
gc_mark_internal(objspace, RFILE(obj)->fptr->tied_io_for_writing);
gc_mark_internal(objspace, RFILE(obj)->fptr->writeconv_asciicompat);
gc_mark_internal(objspace, RFILE(obj)->fptr->writeconv_pre_ecopts);
gc_mark_internal(objspace, RFILE(obj)->fptr->encs.ecopts);
gc_mark_internal(objspace, RFILE(obj)->fptr->write_lock);
gc_mark_internal(objspace, RFILE(obj)->fptr->timeout);
gc_mark_internal(RFILE(obj)->fptr->self);
gc_mark_internal(RFILE(obj)->fptr->pathv);
gc_mark_internal(RFILE(obj)->fptr->tied_io_for_writing);
gc_mark_internal(RFILE(obj)->fptr->writeconv_asciicompat);
gc_mark_internal(RFILE(obj)->fptr->writeconv_pre_ecopts);
gc_mark_internal(RFILE(obj)->fptr->encs.ecopts);
gc_mark_internal(RFILE(obj)->fptr->write_lock);
gc_mark_internal(RFILE(obj)->fptr->timeout);
}
break;
case T_REGEXP:
gc_mark_internal(objspace, RREGEXP(obj)->src);
gc_mark_internal(RREGEXP(obj)->src);
break;
case T_MATCH:
gc_mark_internal(objspace, RMATCH(obj)->regexp);
gc_mark_internal(RMATCH(obj)->regexp);
if (RMATCH(obj)->str) {
gc_mark_internal(objspace, RMATCH(obj)->str);
gc_mark_internal(RMATCH(obj)->str);
}
break;
case T_RATIONAL:
gc_mark_internal(objspace, RRATIONAL(obj)->num);
gc_mark_internal(objspace, RRATIONAL(obj)->den);
gc_mark_internal(RRATIONAL(obj)->num);
gc_mark_internal(RRATIONAL(obj)->den);
break;
case T_COMPLEX:
gc_mark_internal(objspace, RCOMPLEX(obj)->real);
gc_mark_internal(objspace, RCOMPLEX(obj)->imag);
gc_mark_internal(RCOMPLEX(obj)->real);
gc_mark_internal(RCOMPLEX(obj)->imag);
break;
case T_STRUCT: {
@ -2669,7 +2664,7 @@ rb_gc_mark_children(void *objspace, VALUE obj)
const VALUE * const ptr = RSTRUCT_CONST_PTR(obj);
for (long i = 0; i < len; i++) {
gc_mark_internal(objspace, ptr[i]);
gc_mark_internal(ptr[i]);
}
break;
@ -3523,13 +3518,6 @@ ruby_gc_set_params(void)
rb_gc_impl_set_params(rb_gc_get_objspace());
}
void
rb_gc_reachable_objects_from_callback(VALUE obj)
{
rb_ractor_t *cr = GET_RACTOR();
cr->mfd->mark_func(obj, cr->mfd->data);
}
void
rb_objspace_reachable_objects_from(VALUE obj, void (func)(VALUE, void *), void *data)
{
@ -3538,15 +3526,16 @@ rb_objspace_reachable_objects_from(VALUE obj, void (func)(VALUE, void *), void *
if (rb_gc_impl_during_gc_p(rb_gc_get_objspace())) rb_bug("rb_objspace_reachable_objects_from() is not supported while during GC");
if (!RB_SPECIAL_CONST_P(obj)) {
rb_ractor_t *cr = GET_RACTOR();
rb_vm_t *vm = GET_VM();
struct gc_mark_func_data_struct *prev_mfd = vm->gc.mark_func_data;
struct gc_mark_func_data_struct mfd = {
.mark_func = func,
.data = data,
}, *prev_mfd = cr->mfd;
};
cr->mfd = &mfd;
vm->gc.mark_func_data = &mfd;
rb_gc_mark_children(rb_gc_get_objspace(), obj);
cr->mfd = prev_mfd;
vm->gc.mark_func_data = prev_mfd;
}
}
RB_VM_LOCK_LEAVE();
@ -3570,19 +3559,22 @@ rb_objspace_reachable_objects_from_root(void (func)(const char *category, VALUE,
{
if (rb_gc_impl_during_gc_p(rb_gc_get_objspace())) rb_bug("rb_gc_impl_objspace_reachable_objects_from_root() is not supported while during GC");
rb_ractor_t *cr = GET_RACTOR();
rb_vm_t *vm = GET_VM();
struct root_objects_data data = {
.func = func,
.data = passing_data,
};
struct gc_mark_func_data_struct *prev_mfd = vm->gc.mark_func_data;
struct gc_mark_func_data_struct mfd = {
.mark_func = root_objects_from,
.data = &data,
}, *prev_mfd = cr->mfd;
};
cr->mfd = &mfd;
vm->gc.mark_func_data = &mfd;
rb_gc_mark_roots(rb_gc_get_objspace(), &data.category);
cr->mfd = prev_mfd;
vm->gc.mark_func_data = prev_mfd;
}
/*

Просмотреть файл

@ -4650,32 +4650,29 @@ gc_grey(rb_objspace_t *objspace, VALUE obj)
static void
gc_mark(rb_objspace_t *objspace, VALUE obj)
{
if (RB_LIKELY(during_gc)) {
rgengc_check_relation(objspace, obj);
if (!gc_mark_set(objspace, obj)) return; /* already marked */
GC_ASSERT(during_gc);
if (0) { // for debug GC marking miss
if (objspace->rgengc.parent_object) {
RUBY_DEBUG_LOG("%p (%s) parent:%p (%s)",
(void *)obj, obj_type_name(obj),
(void *)objspace->rgengc.parent_object, obj_type_name(objspace->rgengc.parent_object));
}
else {
RUBY_DEBUG_LOG("%p (%s)", (void *)obj, obj_type_name(obj));
}
rgengc_check_relation(objspace, obj);
if (!gc_mark_set(objspace, obj)) return; /* already marked */
if (0) { // for debug GC marking miss
if (objspace->rgengc.parent_object) {
RUBY_DEBUG_LOG("%p (%s) parent:%p (%s)",
(void *)obj, obj_type_name(obj),
(void *)objspace->rgengc.parent_object, obj_type_name(objspace->rgengc.parent_object));
}
if (RB_UNLIKELY(RB_TYPE_P(obj, T_NONE))) {
rb_obj_info_dump(obj);
rb_bug("try to mark T_NONE object"); /* check here will help debugging */
else {
RUBY_DEBUG_LOG("%p (%s)", (void *)obj, obj_type_name(obj));
}
}
gc_aging(objspace, obj);
gc_grey(objspace, obj);
}
else {
rb_gc_reachable_objects_from_callback(obj);
if (RB_UNLIKELY(RB_TYPE_P(obj, T_NONE))) {
rb_obj_info_dump(obj);
rb_bug("try to mark T_NONE object"); /* check here will help debugging */
}
gc_aging(objspace, obj);
gc_grey(objspace, obj);
}
static inline void

Просмотреть файл

@ -23,7 +23,6 @@ size_t rb_gc_obj_optimal_size(VALUE obj);
void rb_gc_mark_children(void *objspace, VALUE obj);
void rb_gc_update_object_references(void *objspace, VALUE obj);
void rb_gc_update_vm_references(void *objspace);
void rb_gc_reachable_objects_from_callback(VALUE obj);
void rb_gc_event_hook(VALUE obj, rb_event_flag_t event);
void *rb_gc_get_objspace(void);
size_t rb_size_mul_or_raise(size_t x, size_t y, VALUE exc);

Просмотреть файл

@ -188,12 +188,6 @@ struct rb_ractor_struct {
VALUE debug;
void *newobj_cache;
// gc.c rb_objspace_reachable_objects_from
struct gc_mark_func_data_struct {
void *data;
void (*mark_func)(VALUE v, void *data);
} *mfd;
}; // rb_ractor_t is defined in vm_core.h

4
vm.c
Просмотреть файл

@ -3099,7 +3099,7 @@ ruby_vm_destruct(rb_vm_t *vm)
}
}
struct rb_objspace *objspace = vm->objspace;
struct rb_objspace *objspace = vm->gc.objspace;
rb_vm_living_threads_init(vm);
ruby_vm_run_at_exit_hooks(vm);
@ -4209,7 +4209,7 @@ Init_VM(void)
rb_define_global_const("TOPLEVEL_BINDING", rb_binding_new());
#ifdef _WIN32
rb_objspace_gc_enable(vm->objspace);
rb_objspace_gc_enable(vm->gc.objspace);
#endif
}
vm_init_redefined_flag();

Просмотреть файл

@ -748,7 +748,13 @@ typedef struct rb_vm_struct {
VALUE coverages, me2counter;
int coverage_mode;
struct rb_objspace *objspace;
struct {
struct rb_objspace *objspace;
struct gc_mark_func_data_struct {
void *data;
void (*mark_func)(VALUE v, void *data);
} *mark_func_data;
} gc;
rb_at_exit_list *at_exit;