Always enabled read barrier even on GC.compact

Some objects can survive the GC before compaction, but get collected in
the second compaction.  This means we could have objects reference
T_MOVED during "free" in the second, compacting GC.  If that is the
case, we need to invalidate those "moved" addresses.  Invalidation is
done via read barrier, so we need to make sure the read barrier is
active even during `GC.compact`.

This also means we don't actually need to do one GC before compaction,
we can just do the compaction and GC in one step.
This commit is contained in:
Aaron Patterson 2021-01-21 09:52:56 -08:00
Родитель a3efbda712
Коммит 0130e17a41
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 953170BCB4FFAFC6
1 изменённых файлов: 3 добавлений и 17 удалений

20
gc.c
Просмотреть файл

@ -4761,12 +4761,8 @@ gc_compact_finish(rb_objspace_t *objspace, rb_heap_t *heap)
{
GC_ASSERT(heap->sweeping_page == heap->compact_cursor);
/* If this is an explicit compaction (GC.compact), no read barrier was set
* so we don't need to unprotect pages or uninstall the SEGV handler */
if (!(objspace->flags.during_compacting >> 1)) {
gc_unprotect_pages(objspace, heap);
uninstall_handlers();
}
gc_unprotect_pages(objspace, heap);
uninstall_handlers();
/* The mutator is allowed to run during incremental sweeping. T_MOVED
* objects can get pushed on the stack and when the compaction process
@ -5306,12 +5302,6 @@ gc_compact_start(rb_objspace_t *objspace, rb_heap_t *heap)
memset(objspace->rcompactor.considered_count_table, 0, T_MASK * sizeof(size_t));
memset(objspace->rcompactor.moved_count_table, 0, T_MASK * sizeof(size_t));
/* If this is an explicit compaction (GC.compact), we don't need a read
* barrier, so just return early. */
if (objspace->flags.during_compacting >> 1) {
return;
}
/* Set up read barrier for pages containing MOVED objects */
install_handlers();
}
@ -9487,11 +9477,7 @@ heap_check_moved_i(void *vstart, void *vend, size_t stride, void *data)
static VALUE
gc_compact(rb_execution_context_t *ec, VALUE self)
{
/* Clear the heap. */
gc_start_internal(ec, self, Qtrue, Qtrue, Qtrue, Qfalse);
/* At this point, all references are live and the mutator is not allowed
* to run, so we don't need a read barrier. */
/* Run GC with compaction enabled */
gc_start_internal(ec, self, Qtrue, Qtrue, Qtrue, Qtrue);
return gc_compact_stats(ec, self);