YJIT: Mark and update stubs in invalidated blocks (#9104)

Like in the example given in delayed_deallocation(), stubs can be hit
even if the block housing it is invalidated. Mark them so we don't
work with invalidate ISeqs when hitting these stubs.
This commit is contained in:
Alan Wu 2023-12-04 10:13:40 -05:00 коммит произвёл GitHub
Родитель f40727f4aa
Коммит b5a62eb9ab
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
2 изменённых файлов: 121 добавлений и 49 удалений

Просмотреть файл

@ -1,3 +1,25 @@
# regression test for GC marking stubs in invalidated code
assert_normal_exit %q{
garbage = Array.new(10_000) { [] } # create garbage to cause iseq movement
eval(<<~RUBY)
def foo(n, garbage)
if n == 2
# 1.times.each to create a cfunc frame to preserve the JIT frame
# which will return to a stub housed in an invalidated block
return 1.times.each do
Object.define_method(:foo) {}
garbage.clear
GC.verify_compaction_references(toward: :empty, expand_heap: true)
end
end
foo(n + 1, garbage)
end
RUBY
foo(1, garbage)
}
# regression test for callee block handler overlapping with arguments
assert_equal '3', %q{
def foo(_req, *args) = args.last

Просмотреть файл

@ -1174,7 +1174,19 @@ pub extern "C" fn rb_yjit_iseq_mark(payload: *mut c_void) {
for block in versions {
// SAFETY: all blocks inside version_map are initialized.
let block = unsafe { block.as_ref() };
mark_block(block, cb, false);
}
}
// Mark dead blocks, since there could be stubs pointing at them
for blockref in &payload.dead_blocks {
// SAFETY: dead blocks come from version_map, which only have initialized blocks
let block = unsafe { blockref.as_ref() };
mark_block(block, cb, true);
}
return;
fn mark_block(block: &Block, cb: &CodeBlock, dead: bool) {
unsafe { rb_gc_mark_movable(block.iseq.get().into()) };
// Mark method entry dependencies
@ -1187,7 +1199,17 @@ pub extern "C" fn rb_yjit_iseq_mark(payload: *mut c_void) {
let branch = unsafe { branch.as_ref() };
for target in branch.targets.iter() {
// SAFETY: no mutation inside unsafe
let target_iseq = unsafe { target.ref_unchecked().as_ref().map(|target| target.get_blockid().iseq) };
let target_iseq = unsafe {
target.ref_unchecked().as_ref().and_then(|target| {
// Avoid get_blockid() on blockref. Can be dangling on dead blocks,
// and the iseq housing the block already naturally handles it.
if target.get_block().is_some() {
None
} else {
Some(target.get_blockid().iseq)
}
})
};
if let Some(target_iseq) = target_iseq {
unsafe { rb_gc_mark_movable(target_iseq.into()) };
@ -1195,7 +1217,9 @@ pub extern "C" fn rb_yjit_iseq_mark(payload: *mut c_void) {
}
}
// Walk over references to objects in generated code.
// Mark references to objects in generated code.
// Skip for dead blocks since they shouldn't run.
if !dead {
for offset in block.gc_obj_offsets.iter() {
let value_address: *const u8 = cb.get_ptr(offset.as_usize()).raw_ptr(cb);
// Creating an unaligned pointer is well defined unlike in C.
@ -1241,7 +1265,26 @@ pub extern "C" fn rb_yjit_iseq_update_references(payload: *mut c_void) {
for version in versions {
// SAFETY: all blocks inside version_map are initialized
let block = unsafe { version.as_ref() };
block_update_references(block, cb, false);
}
}
// Update dead blocks, since there could be stubs pointing at them
for blockref in &payload.dead_blocks {
// SAFETY: dead blocks come from version_map, which only have initialized blocks
let block = unsafe { blockref.as_ref() };
block_update_references(block, cb, true);
}
// Note that we would have returned already if YJIT is off.
cb.mark_all_executable();
CodegenGlobals::get_outlined_cb()
.unwrap()
.mark_all_executable();
return;
fn block_update_references(block: &Block, cb: &mut CodeBlock, dead: bool) {
block.iseq.set(unsafe { rb_gc_location(block.iseq.get().into()) }.as_iseq());
// Update method entry dependencies
@ -1251,7 +1294,37 @@ pub extern "C" fn rb_yjit_iseq_update_references(payload: *mut c_void) {
cme_dep.set(new_cme);
}
// Walk over references to objects in generated code.
// Update outgoing branch entries
for branch in block.outgoing.iter() {
let branch = unsafe { branch.as_ref() };
for target in branch.targets.iter() {
// SAFETY: no mutation inside unsafe
let current_iseq = unsafe {
target.ref_unchecked().as_ref().and_then(|target| {
// Avoid get_blockid() on blockref. Can be dangling on dead blocks,
// and the iseq housing the block already naturally handles it.
if target.get_block().is_some() {
None
} else {
Some(target.get_blockid().iseq)
}
})
};
if let Some(current_iseq) = current_iseq {
let updated_iseq = unsafe { rb_gc_location(current_iseq.into()) }
.as_iseq();
// SAFETY: the Cell::set is not on the reference given out
// by ref_unchecked.
unsafe { target.ref_unchecked().as_ref().unwrap().set_iseq(updated_iseq) };
}
}
}
// Update references to objects in generated code.
// Skip for dead blocks since they shouldn't run and
// so there is no potential of writing over invalidation jumps
if !dead {
for offset in block.gc_obj_offsets.iter() {
let offset_to_value = offset.as_usize();
let value_code_ptr = cb.get_ptr(offset_to_value);
@ -1272,32 +1345,9 @@ pub extern "C" fn rb_yjit_iseq_update_references(payload: *mut c_void) {
}
}
}
// Update outgoing branch entries
for branch in block.outgoing.iter() {
let branch = unsafe { branch.as_ref() };
for target in branch.targets.iter() {
// SAFETY: no mutation inside unsafe
let current_iseq = unsafe { target.ref_unchecked().as_ref().map(|target| target.get_blockid().iseq) };
if let Some(current_iseq) = current_iseq {
let updated_iseq = unsafe { rb_gc_location(current_iseq.into()) }
.as_iseq();
// SAFETY: the Cell::set is not on the reference given out
// by ref_unchecked.
unsafe { target.ref_unchecked().as_ref().unwrap().set_iseq(updated_iseq) };
}
}
}
}
}
// Note that we would have returned already if YJIT is off.
cb.mark_all_executable();
CodegenGlobals::get_outlined_cb()
.unwrap()
.mark_all_executable();
}
}
/// Get all blocks for a particular place in an iseq.
@ -3268,9 +3318,9 @@ pub fn invalidate_block_version(blockref: &BlockRef) {
// invalidated branch pointers. Example:
// def foo(n)
// if n == 2
// # 1.times{} to use a cfunc to avoid exiting from the
// # frame which will use the retained return address
// return 1.times { Object.define_method(:foo) {} }
// # 1.times.each to create a cfunc frame to preserve the JIT frame
// # which will return to a stub housed in an invalidated block
// return 1.times.each { Object.define_method(:foo) {} }
// end
//
// foo(n + 1)