зеркало из https://github.com/github/ruby.git
YJIT: Invalidate redefined methods only through cme (#6734)
Co-authored-by: Alan Wu <alansi.xingwu@shopify.com> Co-authored-by: Alan Wu <alansi.xingwu@shopify.com>
This commit is contained in:
Родитель
9751b54971
Коммит
1125274c4e
|
@ -187,6 +187,7 @@ clear_method_cache_by_id_in_class(VALUE klass, ID mid)
|
|||
// invalidate CCs
|
||||
if (cc_tbl && rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
|
||||
struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
|
||||
rb_yjit_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
|
||||
if (NIL_P(ccs->cme->owner)) invalidate_negative_cache(mid);
|
||||
rb_vm_ccs_free(ccs);
|
||||
rb_id_table_delete(cc_tbl, mid);
|
||||
|
@ -196,6 +197,10 @@ clear_method_cache_by_id_in_class(VALUE klass, ID mid)
|
|||
// remove from callable_m_tbl, if exists
|
||||
struct rb_id_table *cm_tbl;
|
||||
if ((cm_tbl = RCLASS_CALLABLE_M_TBL(klass)) != NULL) {
|
||||
VALUE cme;
|
||||
if (rb_yjit_enabled_p() && rb_id_table_lookup(cm_tbl, mid, &cme)) {
|
||||
rb_yjit_cme_invalidate((rb_callable_method_entry_t *)cme);
|
||||
}
|
||||
rb_id_table_delete(cm_tbl, mid);
|
||||
RB_DEBUG_COUNTER_INC(cc_invalidate_leaf_callable);
|
||||
}
|
||||
|
@ -255,8 +260,6 @@ clear_method_cache_by_id_in_class(VALUE klass, ID mid)
|
|||
}
|
||||
}
|
||||
RB_VM_LOCK_LEAVE();
|
||||
|
||||
rb_yjit_method_lookup_change(klass, mid);
|
||||
}
|
||||
|
||||
static void
|
||||
|
|
2
yjit.h
2
yjit.h
|
@ -28,7 +28,6 @@
|
|||
bool rb_yjit_enabled_p(void);
|
||||
unsigned rb_yjit_call_threshold(void);
|
||||
void rb_yjit_invalidate_all_method_lookup_assumptions(void);
|
||||
void rb_yjit_method_lookup_change(VALUE klass, ID mid);
|
||||
void rb_yjit_cme_invalidate(rb_callable_method_entry_t *cme);
|
||||
void rb_yjit_collect_vm_usage_insn(int insn);
|
||||
void rb_yjit_collect_binding_alloc(void);
|
||||
|
@ -51,7 +50,6 @@ void rb_yjit_tracing_invalidate_all(void);
|
|||
static inline bool rb_yjit_enabled_p(void) { return false; }
|
||||
static inline unsigned rb_yjit_call_threshold(void) { return UINT_MAX; }
|
||||
static inline void rb_yjit_invalidate_all_method_lookup_assumptions(void) {}
|
||||
static inline void rb_yjit_method_lookup_change(VALUE klass, ID mid) {}
|
||||
static inline void rb_yjit_cme_invalidate(rb_callable_method_entry_t *cme) {}
|
||||
static inline void rb_yjit_collect_vm_usage_insn(int insn) {}
|
||||
static inline void rb_yjit_collect_binding_alloc(void) {}
|
||||
|
|
|
@ -3895,7 +3895,7 @@ fn jit_obj_respond_to(
|
|||
// Invalidate this block if method lookup changes for the method being queried. This works
|
||||
// both for the case where a method does or does not exist, as for the latter we asked for a
|
||||
// "negative CME" earlier.
|
||||
assume_method_lookup_stable(jit, ocb, recv_class, target_cme);
|
||||
assume_method_lookup_stable(jit, ocb, target_cme);
|
||||
|
||||
// Generate a side exit
|
||||
let side_exit = get_side_exit(jit, ocb, ctx);
|
||||
|
@ -5291,7 +5291,7 @@ fn gen_send_general(
|
|||
|
||||
// Register block for invalidation
|
||||
//assert!(cme->called_id == mid);
|
||||
assume_method_lookup_stable(jit, ocb, comptime_recv_klass, cme);
|
||||
assume_method_lookup_stable(jit, ocb, cme);
|
||||
|
||||
// To handle the aliased method case (VM_METHOD_TYPE_ALIAS)
|
||||
loop {
|
||||
|
@ -5470,7 +5470,7 @@ fn gen_send_general(
|
|||
|
||||
flags |= VM_CALL_FCALL | VM_CALL_OPT_SEND;
|
||||
|
||||
assume_method_lookup_stable(jit, ocb, comptime_recv_klass, cme);
|
||||
assume_method_lookup_stable(jit, ocb, cme);
|
||||
|
||||
let (known_class, type_mismatch_exit) = {
|
||||
if compile_time_name.string_p() {
|
||||
|
@ -5891,8 +5891,8 @@ fn gen_invokesuper(
|
|||
|
||||
// We need to assume that both our current method entry and the super
|
||||
// method entry we invoke remain stable
|
||||
assume_method_lookup_stable(jit, ocb, current_defined_class, me);
|
||||
assume_method_lookup_stable(jit, ocb, comptime_superclass, cme);
|
||||
assume_method_lookup_stable(jit, ocb, me);
|
||||
assume_method_lookup_stable(jit, ocb, cme);
|
||||
|
||||
// Method calls may corrupt types
|
||||
ctx.clear_local_types();
|
||||
|
|
|
@ -373,7 +373,6 @@ impl Branch {
|
|||
// help to remove all pointers to this block in the system.
|
||||
#[derive(Debug)]
|
||||
pub struct CmeDependency {
|
||||
pub receiver_klass: VALUE,
|
||||
pub callee_cme: *const rb_callable_method_entry_t,
|
||||
}
|
||||
|
||||
|
@ -636,7 +635,6 @@ pub extern "C" fn rb_yjit_iseq_mark(payload: *mut c_void) {
|
|||
|
||||
// Mark method entry dependencies
|
||||
for cme_dep in &block.cme_dependencies {
|
||||
unsafe { rb_gc_mark_movable(cme_dep.receiver_klass) };
|
||||
unsafe { rb_gc_mark_movable(cme_dep.callee_cme.into()) };
|
||||
}
|
||||
|
||||
|
@ -692,7 +690,6 @@ pub extern "C" fn rb_yjit_iseq_update_references(payload: *mut c_void) {
|
|||
|
||||
// Update method entry dependencies
|
||||
for cme_dep in &mut block.cme_dependencies {
|
||||
cme_dep.receiver_klass = unsafe { rb_gc_location(cme_dep.receiver_klass) };
|
||||
cme_dep.callee_cme = unsafe { rb_gc_location(cme_dep.callee_cme.into()) }.as_cme();
|
||||
}
|
||||
|
||||
|
@ -889,7 +886,6 @@ fn add_block_version(blockref: &BlockRef, cb: &CodeBlock) {
|
|||
// contains new references to Ruby objects. Run write barriers.
|
||||
let iseq: VALUE = block.blockid.iseq.into();
|
||||
for dep in block.iter_cme_deps() {
|
||||
obj_written!(iseq, dep.receiver_klass);
|
||||
obj_written!(iseq, dep.callee_cme.into());
|
||||
}
|
||||
|
||||
|
@ -1009,11 +1005,9 @@ impl Block {
|
|||
/// dependencies for this block.
|
||||
pub fn add_cme_dependency(
|
||||
&mut self,
|
||||
receiver_klass: VALUE,
|
||||
callee_cme: *const rb_callable_method_entry_t,
|
||||
) {
|
||||
self.cme_dependencies.push(CmeDependency {
|
||||
receiver_klass,
|
||||
callee_cme,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -25,11 +25,6 @@ pub struct Invariants {
|
|||
/// Tracks block assumptions about callable method entry validity.
|
||||
cme_validity: HashMap<*const rb_callable_method_entry_t, HashSet<BlockRef>>,
|
||||
|
||||
/// Tracks block assumptions about method lookup. Maps a class to a table of
|
||||
/// method ID points to a set of blocks. While a block `b` is in the table,
|
||||
/// b->callee_cme == rb_callable_method_entry(klass, mid).
|
||||
method_lookup: HashMap<VALUE, HashMap<ID, HashSet<BlockRef>>>,
|
||||
|
||||
/// A map from a class and its associated basic operator to a set of blocks
|
||||
/// that are assuming that that operator is not redefined. This is used for
|
||||
/// quick access to all of the blocks that are making this assumption when
|
||||
|
@ -68,7 +63,6 @@ impl Invariants {
|
|||
unsafe {
|
||||
INVARIANTS = Some(Invariants {
|
||||
cme_validity: HashMap::new(),
|
||||
method_lookup: HashMap::new(),
|
||||
basic_operator_blocks: HashMap::new(),
|
||||
block_basic_operators: HashMap::new(),
|
||||
single_ractor: HashSet::new(),
|
||||
|
@ -124,34 +118,20 @@ pub fn assume_bop_not_redefined(
|
|||
pub fn assume_method_lookup_stable(
|
||||
jit: &mut JITState,
|
||||
ocb: &mut OutlinedCb,
|
||||
receiver_klass: VALUE,
|
||||
callee_cme: *const rb_callable_method_entry_t,
|
||||
) {
|
||||
// RUBY_ASSERT(rb_callable_method_entry(receiver_klass, cme->called_id) == cme);
|
||||
// RUBY_ASSERT_ALWAYS(RB_TYPE_P(receiver_klass, T_CLASS) || RB_TYPE_P(receiver_klass, T_ICLASS));
|
||||
// RUBY_ASSERT_ALWAYS(!rb_objspace_garbage_object_p(receiver_klass));
|
||||
|
||||
jit_ensure_block_entry_exit(jit, ocb);
|
||||
|
||||
let block = jit.get_block();
|
||||
block
|
||||
.borrow_mut()
|
||||
.add_cme_dependency(receiver_klass, callee_cme);
|
||||
.add_cme_dependency(callee_cme);
|
||||
|
||||
Invariants::get_instance()
|
||||
.cme_validity
|
||||
.entry(callee_cme)
|
||||
.or_default()
|
||||
.insert(block.clone());
|
||||
|
||||
let mid = unsafe { (*callee_cme).called_id };
|
||||
Invariants::get_instance()
|
||||
.method_lookup
|
||||
.entry(receiver_klass)
|
||||
.or_default()
|
||||
.entry(mid)
|
||||
.or_default()
|
||||
.insert(block);
|
||||
}
|
||||
|
||||
// Checks rb_method_basic_definition_p and registers the current block for invalidation if method
|
||||
|
@ -166,7 +146,7 @@ pub fn assume_method_basic_definition(
|
|||
) -> bool {
|
||||
if unsafe { rb_method_basic_definition_p(klass, mid) } != 0 {
|
||||
let cme = unsafe { rb_callable_method_entry(klass, mid) };
|
||||
assume_method_lookup_stable(jit, ocb, klass, cme);
|
||||
assume_method_lookup_stable(jit, ocb, cme);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
|
@ -272,31 +252,6 @@ pub extern "C" fn rb_yjit_cme_invalidate(callee_cme: *const rb_callable_method_e
|
|||
});
|
||||
}
|
||||
|
||||
/// Callback for when rb_callable_method_entry(klass, mid) is going to change.
|
||||
/// Invalidate blocks that assume stable method lookup of `mid` in `klass` when this happens.
|
||||
/// This needs to be wrapped on the C side with RB_VM_LOCK_ENTER().
|
||||
#[no_mangle]
|
||||
pub extern "C" fn rb_yjit_method_lookup_change(klass: VALUE, mid: ID) {
|
||||
// If YJIT isn't enabled, do nothing
|
||||
if !yjit_enabled_p() {
|
||||
return;
|
||||
}
|
||||
|
||||
with_vm_lock(src_loc!(), || {
|
||||
Invariants::get_instance()
|
||||
.method_lookup
|
||||
.entry(klass)
|
||||
.and_modify(|deps| {
|
||||
if let Some(deps) = deps.remove(&mid) {
|
||||
for block in &deps {
|
||||
invalidate_block_version(block);
|
||||
incr_counter!(invalidate_method_lookup);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/// Callback for then Ruby is about to spawn a ractor. In that case we need to
|
||||
/// invalidate every block that is assuming single ractor mode.
|
||||
#[no_mangle]
|
||||
|
@ -387,16 +342,6 @@ pub extern "C" fn rb_yjit_root_mark() {
|
|||
|
||||
unsafe { rb_gc_mark(cme) };
|
||||
}
|
||||
|
||||
// Mark class and iclass objects
|
||||
for klass in invariants.method_lookup.keys() {
|
||||
// TODO: This is a leak. Unused blocks linger in the table forever, preventing the
|
||||
// callee class they speculate on from being collected.
|
||||
// We could do a bespoke weak reference scheme on classes similar to
|
||||
// the interpreter's call cache. See finalizer for T_CLASS and cc_table_free().
|
||||
|
||||
unsafe { rb_gc_mark(*klass) };
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove all invariant assumptions made by the block by removing the block as
|
||||
|
@ -413,14 +358,6 @@ pub fn block_assumptions_free(blockref: &BlockRef) {
|
|||
if let Some(blockset) = invariants.cme_validity.get_mut(&dep.callee_cme) {
|
||||
blockset.remove(blockref);
|
||||
}
|
||||
|
||||
// Remove tracking for lookup stability
|
||||
if let Some(id_to_block_set) = invariants.method_lookup.get_mut(&dep.receiver_klass) {
|
||||
let mid = unsafe { (*dep.callee_cme).called_id };
|
||||
if let Some(block_set) = id_to_block_set.get_mut(&mid) {
|
||||
block_set.remove(&blockref);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче