Get rid of dependency on rb_call_cache

This commit is contained in:
Alan Wu 2021-03-17 19:07:20 -04:00
Родитель 827cae94b3
Коммит 84ab77ba59
8 изменённых файлов: 238 добавлений и 97 удалений

Просмотреть файл

@ -17068,6 +17068,7 @@ yjit_iface.$(OBJEXT): {$(VPATH)}config.h
yjit_iface.$(OBJEXT): {$(VPATH)}darray.h yjit_iface.$(OBJEXT): {$(VPATH)}darray.h
yjit_iface.$(OBJEXT): {$(VPATH)}debug_counter.h yjit_iface.$(OBJEXT): {$(VPATH)}debug_counter.h
yjit_iface.$(OBJEXT): {$(VPATH)}defines.h yjit_iface.$(OBJEXT): {$(VPATH)}defines.h
yjit_iface.$(OBJEXT): {$(VPATH)}gc.h
yjit_iface.$(OBJEXT): {$(VPATH)}id.h yjit_iface.$(OBJEXT): {$(VPATH)}id.h
yjit_iface.$(OBJEXT): {$(VPATH)}id_table.h yjit_iface.$(OBJEXT): {$(VPATH)}id_table.h
yjit_iface.$(OBJEXT): {$(VPATH)}insns.def yjit_iface.$(OBJEXT): {$(VPATH)}insns.def

Просмотреть файл

@ -122,7 +122,6 @@ rb_vm_cc_invalidate(const struct rb_callcache *cc)
VM_ASSERT(cc->klass != 0); // should be enable VM_ASSERT(cc->klass != 0); // should be enable
*(VALUE *)&cc->klass = 0; *(VALUE *)&cc->klass = 0;
rb_yjit_method_lookup_change((VALUE)cc);
RB_DEBUG_COUNTER_INC(cc_ent_invalidate); RB_DEBUG_COUNTER_INC(cc_ent_invalidate);
} }
@ -134,7 +133,8 @@ vm_cme_invalidate(rb_callable_method_entry_t *cme)
VM_ASSERT(callable_method_entry_p(cme)); VM_ASSERT(callable_method_entry_p(cme));
METHOD_ENTRY_INVALIDATED_SET(cme); METHOD_ENTRY_INVALIDATED_SET(cme);
RB_DEBUG_COUNTER_INC(cc_cme_invalidate); RB_DEBUG_COUNTER_INC(cc_cme_invalidate);
rb_yjit_method_lookup_change((VALUE)cme);
rb_yjit_cme_invalidate((VALUE)cme);
} }
void void
@ -228,6 +228,8 @@ clear_method_cache_by_id_in_class(VALUE klass, ID mid)
invalidate_negative_cache(mid); invalidate_negative_cache(mid);
} }
} }
rb_yjit_method_lookup_change(klass, mid);
} }
static void static void
@ -295,6 +297,8 @@ void
rb_clear_method_cache_all(void) rb_clear_method_cache_all(void)
{ {
rb_objspace_each_objects(invalidate_all_cc, NULL); rb_objspace_each_objects(invalidate_all_cc, NULL);
rb_yjit_invalidate_all_method_lookup_assumptions();
} }
void void

4
yjit.h
Просмотреть файл

@ -47,8 +47,10 @@ bool rb_yjit_enabled_p(void);
unsigned rb_yjit_call_threshold(void); unsigned rb_yjit_call_threshold(void);
RUBY_SYMBOL_EXPORT_END RUBY_SYMBOL_EXPORT_END
void rb_yjit_invalidate_all_method_lookup_assumptions(void);
void rb_yjit_method_lookup_change(VALUE klass, ID mid);
void rb_yjit_cme_invalidate(VALUE cme);
void rb_yjit_collect_vm_usage_insn(int insn); void rb_yjit_collect_vm_usage_insn(int insn);
void rb_yjit_method_lookup_change(VALUE cme_or_cc);
void rb_yjit_compile_iseq(const rb_iseq_t *iseq, rb_execution_context_t *ec); void rb_yjit_compile_iseq(const rb_iseq_t *iseq, rb_execution_context_t *ec);
void rb_yjit_init(struct rb_yjit_options *options); void rb_yjit_init(struct rb_yjit_options *options);
void rb_yjit_bop_redefined(VALUE klass, const rb_method_entry_t *me, enum ruby_basic_operators bop); void rb_yjit_bop_redefined(VALUE klass, const rb_method_entry_t *me, enum ruby_basic_operators bop);

Просмотреть файл

@ -1398,7 +1398,7 @@ gen_oswb_cfunc(jitstate_t* jit, ctx_t* ctx, struct rb_call_data * cd, const rb_c
x86opnd_t klass_opnd = mem_opnd(64, REG0, offsetof(struct RBasic, klass)); x86opnd_t klass_opnd = mem_opnd(64, REG0, offsetof(struct RBasic, klass));
// FIXME: This leaks when st_insert raises NoMemoryError // FIXME: This leaks when st_insert raises NoMemoryError
assume_method_lookup_stable(cd->cc, cme, jit->block); assume_method_lookup_stable(cd->cc->klass, cme, jit->block);
// Bail if receiver class is different from compile-time call cache class // Bail if receiver class is different from compile-time call cache class
jit_mov_gc_ptr(jit, cb, REG1, (VALUE)cd->cc->klass); jit_mov_gc_ptr(jit, cb, REG1, (VALUE)cd->cc->klass);
@ -1630,7 +1630,7 @@ gen_oswb_iseq(jitstate_t* jit, ctx_t* ctx, struct rb_call_data * cd, const rb_ca
// Pointer to the klass field of the receiver &(recv->klass) // Pointer to the klass field of the receiver &(recv->klass)
x86opnd_t klass_opnd = mem_opnd(64, REG0, offsetof(struct RBasic, klass)); x86opnd_t klass_opnd = mem_opnd(64, REG0, offsetof(struct RBasic, klass));
assume_method_lookup_stable(cd->cc, cme, jit->block); assume_method_lookup_stable(cd->cc->klass, cme, jit->block);
// Bail if receiver class is different from compile-time call cache class // Bail if receiver class is different from compile-time call cache class
jit_mov_gc_ptr(jit, cb, REG1, (VALUE)cd->cc->klass); jit_mov_gc_ptr(jit, cb, REG1, (VALUE)cd->cc->klass);

Просмотреть файл

@ -209,8 +209,8 @@ add_block_version(blockid_t blockid, block_t* block)
{ {
// By writing the new block to the iseq, the iseq now // By writing the new block to the iseq, the iseq now
// contains new references to Ruby objects. Run write barriers. // contains new references to Ruby objects. Run write barriers.
RB_OBJ_WRITTEN(iseq, Qundef, block->dependencies.cc); RB_OBJ_WRITTEN(iseq, Qundef, block->receiver_klass);
RB_OBJ_WRITTEN(iseq, Qundef, block->dependencies.cme); RB_OBJ_WRITTEN(iseq, Qundef, block->callee_cme);
// Run write barriers for all objects in generated code. // Run write barriers for all objects in generated code.
uint32_t *offset_element; uint32_t *offset_element;

Просмотреть файл

@ -127,11 +127,10 @@ typedef struct yjit_block_version
// Offsets for GC managed objects in the mainline code block // Offsets for GC managed objects in the mainline code block
int32_array_t gc_object_offsets; int32_array_t gc_object_offsets;
// GC managed objects that this block depend on // In case this block is invalidated, these two pieces of info
struct { // help to remove all pointers to this block in the system.
VALUE cc; VALUE receiver_klass;
VALUE cme; VALUE callee_cme;
} dependencies;
// Index one past the last instruction in the iseq // Index one past the last instruction in the iseq
uint32_t end_idx; uint32_t end_idx;

Просмотреть файл

@ -5,6 +5,7 @@
#include "vm_sync.h" #include "vm_sync.h"
#include "vm_callinfo.h" #include "vm_callinfo.h"
#include "builtin.h" #include "builtin.h"
#include "gc.h"
#include "internal/compile.h" #include "internal/compile.h"
#include "internal/class.h" #include "internal/class.h"
#include "insns_info.inc" #include "insns_info.inc"
@ -141,39 +142,6 @@ struct yjit_root_struct {
int unused; // empty structs are not legal in C99 int unused; // empty structs are not legal in C99
}; };
static void
block_array_shuffle_remove(rb_yjit_block_array_t blocks, block_t *to_remove) {
block_t **elem;
rb_darray_foreach(blocks, i, elem) {
if (*elem == to_remove) {
// Remove the current element by moving the last element here then popping.
*elem = rb_darray_get(blocks, rb_darray_size(blocks) - 1);
rb_darray_pop_back(blocks);
break;
}
}
}
// Map cme_or_cc => [block]
static st_table *method_lookup_dependency;
static int
add_lookup_dependency_i(st_data_t *key, st_data_t *value, st_data_t data, int existing)
{
block_t *new_block = (block_t *)data;
rb_yjit_block_array_t blocks = NULL;
if (existing) {
blocks = (rb_yjit_block_array_t)*value;
}
if (!rb_darray_append(&blocks, new_block)) {
rb_bug("yjit: failed to add method lookup dependency"); // TODO: we could bail out of compiling instead
}
*value = (st_data_t)blocks;
return ST_CONTINUE;
}
// Hash table of BOP blocks // Hash table of BOP blocks
static st_table *blocks_assuming_bops; static st_table *blocks_assuming_bops;
@ -191,16 +159,95 @@ assume_bop_not_redefined(block_t *block, int redefined_flag, enum ruby_basic_ope
} }
} }
// Remember that the currently compiling block is only valid while cme and cc are valid // Map klass => id_table[mid, set of blocks]
void // While a block `b` is in the table, b->callee_cme == rb_callable_method_entry(klass, mid).
assume_method_lookup_stable(const struct rb_callcache *cc, const rb_callable_method_entry_t *cme, block_t *block) // See assume_method_lookup_stable()
static st_table *method_lookup_dependency;
// For adding to method_lookup_dependency data with st_update
struct lookup_dependency_insertion {
block_t *block;
ID mid;
};
// Map cme => set of blocks
// See assume_method_lookup_stable()
static st_table *cme_validity_dependency;
static int
add_cme_validity_dependency_i(st_data_t *key, st_data_t *value, st_data_t new_block, int existing)
{ {
RUBY_ASSERT(block != NULL); st_table *block_set;
RUBY_ASSERT(block->dependencies.cc == 0 && block->dependencies.cme == 0); if (existing) {
st_update(method_lookup_dependency, (st_data_t)cme, add_lookup_dependency_i, (st_data_t)block); block_set = (st_table *)*value;
block->dependencies.cme = (VALUE)cme; }
st_update(method_lookup_dependency, (st_data_t)cc, add_lookup_dependency_i, (st_data_t)block); else {
block->dependencies.cc = (VALUE)cc; // Make the set and put it into cme_validity_dependency
block_set = st_init_numtable();
*value = (st_data_t)block_set;
}
// Put block into set
st_insert(block_set, new_block, 1);
return ST_CONTINUE;
}
static int
add_lookup_dependency_i(st_data_t *key, st_data_t *value, st_data_t data, int existing)
{
struct lookup_dependency_insertion *info = (void *)data;
// Find or make an id table
struct rb_id_table *id2blocks;
if (existing) {
id2blocks = (void *)*value;
}
else {
// Make an id table and put it into the st_table
id2blocks = rb_id_table_create(1);
*value = (st_data_t)id2blocks;
}
// Find or make a block set
st_table *block_set;
{
VALUE blocks;
if (rb_id_table_lookup(id2blocks, info->mid, &blocks)) {
// Take existing set
block_set = (st_table *)blocks;
}
else {
// Make new block set and put it into the id table
block_set = st_init_numtable();
rb_id_table_insert(id2blocks, info->mid, (VALUE)block_set);
}
}
st_insert(block_set, (st_data_t)info->block, 1);
return ST_CONTINUE;
}
// Remember that a block assumes that rb_callable_method_entry(receiver_klass, mid) == cme and that
// cme is vald.
// When either of these assumptions becomes invalid, rb_yjit_method_lookup_change() or
// rb_yjit_cme_invalidate() invalidates the block.
void
assume_method_lookup_stable(VALUE receiver_klass, const rb_callable_method_entry_t *cme, block_t *block)
{
RUBY_ASSERT(!block->receiver_klass && !block->callee_cme);
RUBY_ASSERT(cme_validity_dependency);
RUBY_ASSERT(method_lookup_dependency);
RUBY_ASSERT_ALWAYS(RB_TYPE_P(receiver_klass, T_CLASS));
RUBY_ASSERT_ALWAYS(!rb_objspace_garbage_object_p(receiver_klass));
block->callee_cme = (VALUE)cme;
st_update(cme_validity_dependency, (st_data_t)cme, add_cme_validity_dependency_i, (st_data_t)block);
block->receiver_klass = receiver_klass;
struct lookup_dependency_insertion info = { block, cme->called_id };
st_update(method_lookup_dependency, (st_data_t)receiver_klass, add_lookup_dependency_i, (st_data_t)&info);
} }
static st_table *blocks_assuming_single_ractor_mode; static st_table *blocks_assuming_single_ractor_mode;
@ -227,19 +274,15 @@ assume_stable_global_constant_state(block_t *block) {
} }
static int static int
yjit_root_mark_i(st_data_t k, st_data_t v, st_data_t ignore) mark_keys_movable_i(st_data_t k, st_data_t v, st_data_t ignore)
{ {
// Lifetime notes: cc and cme get added in pairs into the table. One of
// them should become invalid before dying. When one of them invalidate we
// remove the pair from the table. Blocks remove themself from the table
// when they die.
rb_gc_mark_movable((VALUE)k); rb_gc_mark_movable((VALUE)k);
return ST_CONTINUE; return ST_CONTINUE;
} }
static int static int
method_lookup_dep_table_update_keys(st_data_t *key, st_data_t *value, st_data_t argp, int existing) table_update_keys_i(st_data_t *key, st_data_t *value, st_data_t argp, int existing)
{ {
*key = rb_gc_location(rb_gc_location((VALUE)*key)); *key = rb_gc_location(rb_gc_location((VALUE)*key));
@ -257,7 +300,13 @@ static void
yjit_root_update_references(void *ptr) yjit_root_update_references(void *ptr)
{ {
if (method_lookup_dependency) { if (method_lookup_dependency) {
if (st_foreach_with_replace(method_lookup_dependency, replace_all, method_lookup_dep_table_update_keys, 0)) { if (st_foreach_with_replace(method_lookup_dependency, replace_all, table_update_keys_i, 0)) {
RUBY_ASSERT(false);
}
}
if (cme_validity_dependency) {
if (st_foreach_with_replace(cme_validity_dependency, replace_all, table_update_keys_i, 0)) {
RUBY_ASSERT(false); RUBY_ASSERT(false);
} }
} }
@ -270,7 +319,15 @@ static void
yjit_root_mark(void *ptr) yjit_root_mark(void *ptr)
{ {
if (method_lookup_dependency) { if (method_lookup_dependency) {
st_foreach(method_lookup_dependency, yjit_root_mark_i, 0); // TODO: This is a leak. Unused blocks linger in the table forever, preventing the
// callee class they speculate on from being collected.
// We could do a bespoke weak reference scheme on classes similar to
// the interpreter's call cache. See finalizer for T_CLASS and cc_table_free().
st_foreach(method_lookup_dependency, mark_keys_movable_i, 0);
}
if (cme_validity_dependency) {
st_foreach(cme_validity_dependency, mark_keys_movable_i, 0);
} }
} }
@ -288,7 +345,6 @@ yjit_root_memsize(const void *ptr)
} }
// Custom type for interacting with the GC // Custom type for interacting with the GC
// TODO: compaction support
// TODO: make this write barrier protected // TODO: make this write barrier protected
static const rb_data_type_t yjit_root_type = { static const rb_data_type_t yjit_root_type = {
"yjit_root", "yjit_root",
@ -296,55 +352,130 @@ static const rb_data_type_t yjit_root_type = {
0, 0, RUBY_TYPED_FREE_IMMEDIATELY 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
}; };
// Callback when cme or cc become invalid static int
void block_set_invalidate_i(st_data_t key, st_data_t v, st_data_t ignore)
rb_yjit_method_lookup_change(VALUE cme_or_cc)
{ {
if (!method_lookup_dependency) block_t *version = (block_t *)key;
return;
invalidate_block_version(version);
return ST_CONTINUE;
}
// Callback for when rb_callable_method_entry(klass, mid) is going to change.
// Invalidate blocks that assume stable method lookup of `mid` in `klass` when this happens.
void
rb_yjit_method_lookup_change(VALUE klass, ID mid)
{
if (!method_lookup_dependency) return;
RB_VM_LOCK_ENTER(); RB_VM_LOCK_ENTER();
RUBY_ASSERT(IMEMO_TYPE_P(cme_or_cc, imemo_ment) || IMEMO_TYPE_P(cme_or_cc, imemo_callcache)); st_data_t image;
st_data_t key = (st_data_t)klass;
if (st_lookup(method_lookup_dependency, key, &image)) {
struct rb_id_table *id2blocks = (void *)image;
VALUE blocks;
// Invalidate all regions that depend on the cme or cc // Invalidate all blocks in method_lookup_dependency[klass][mid]
st_data_t key = (st_data_t)cme_or_cc, image; if (rb_id_table_lookup(id2blocks, mid, &blocks)) {
if (st_delete(method_lookup_dependency, &key, &image)) { rb_id_table_delete(id2blocks, mid);
rb_yjit_block_array_t array = (void *)image;
block_t **elem;
rb_darray_foreach(array, i, elem) { st_table *block_set = (st_table *)blocks;
invalidate_block_version(*elem); st_foreach(block_set, block_set_invalidate_i, 0);
st_free_table(block_set);
} }
rb_darray_free(array);
} }
RB_VM_LOCK_LEAVE(); RB_VM_LOCK_LEAVE();
} }
// Callback for when a cme becomes invalid.
// Invalidate all blocks that depend on cme being valid.
void
rb_yjit_cme_invalidate(VALUE cme)
{
if (!cme_validity_dependency) return;
RUBY_ASSERT(IMEMO_TYPE_P(cme, imemo_ment));
RB_VM_LOCK_ENTER();
// Delete the block set from the table
st_data_t cme_as_st_data = (st_data_t)cme;
st_data_t blocks;
if (st_delete(cme_validity_dependency, &cme_as_st_data, &blocks)) {
st_table *block_set = (st_table *)blocks;
// Invalidate each block
st_foreach(block_set, block_set_invalidate_i, 0);
st_free_table(block_set);
}
RB_VM_LOCK_LEAVE();
}
// For dealing with refinements
void
rb_yjit_invalidate_all_method_lookup_assumptions(void)
{
// TODO: implement
}
// Remove a block from the method lookup dependency table // Remove a block from the method lookup dependency table
static void static void
remove_method_lookup_dependency(VALUE cc_or_cme, block_t *block) remove_method_lookup_dependency(block_t *block)
{ {
st_data_t key = (st_data_t)cc_or_cme, image; if (!block->receiver_klass) return;
RUBY_ASSERT(block->callee_cme); // callee_cme should be set when receiver_klass is set
st_data_t image;
st_data_t key = (st_data_t)block->receiver_klass;
if (st_lookup(method_lookup_dependency, key, &image)) { if (st_lookup(method_lookup_dependency, key, &image)) {
rb_yjit_block_array_t array = (void *)image; struct rb_id_table *id2blocks = (void *)image;
const rb_callable_method_entry_t *cme = (void *)block->callee_cme;
ID mid = cme->called_id;
block_array_shuffle_remove(array, block); // Find block set
VALUE blocks;
if (rb_id_table_lookup(id2blocks, mid, &blocks)) {
st_table *block_set = (st_table *)blocks;
if (rb_darray_size(array) == 0) { // Remove block from block set
st_delete(method_lookup_dependency, &key, NULL); st_data_t block_as_st_data = (st_data_t)block;
rb_darray_free(array); (void)st_delete(block_set, &block_as_st_data, NULL);
if (block_set->num_entries == 0) {
// Block set now empty. Remove from id table.
rb_id_table_delete(id2blocks, mid);
st_free_table(block_set);
}
} }
} }
} }
// Remove a block from cme_validity_dependency
static void
remove_cme_validity_dependency(block_t *block)
{
if (!block->callee_cme) return;
st_data_t blocks;
if (st_lookup(cme_validity_dependency, block->callee_cme, &blocks)) {
st_table *block_set = (st_table *)blocks;
st_data_t block_as_st_data = (st_data_t)block;
(void)st_delete(block_set, &block_as_st_data, NULL);
}
}
void void
yjit_unlink_method_lookup_dependency(block_t *block) yjit_unlink_method_lookup_dependency(block_t *block)
{ {
if (block->dependencies.cc) remove_method_lookup_dependency(block->dependencies.cc, block); remove_method_lookup_dependency(block);
if (block->dependencies.cme) remove_method_lookup_dependency(block->dependencies.cme, block); remove_cme_validity_dependency(block);
} }
void void
@ -715,8 +846,8 @@ rb_yjit_iseq_mark(const struct rb_iseq_constant_body *body)
block_t *block = rb_darray_get(version_array, block_idx); block_t *block = rb_darray_get(version_array, block_idx);
rb_gc_mark_movable((VALUE)block->blockid.iseq); rb_gc_mark_movable((VALUE)block->blockid.iseq);
rb_gc_mark_movable(block->dependencies.cc); rb_gc_mark_movable(block->receiver_klass);
rb_gc_mark_movable(block->dependencies.cme); rb_gc_mark_movable(block->callee_cme);
// Walk over references to objects in generated code. // Walk over references to objects in generated code.
uint32_t *offset_element; uint32_t *offset_element;
@ -743,8 +874,8 @@ rb_yjit_iseq_update_references(const struct rb_iseq_constant_body *body)
block->blockid.iseq = (const rb_iseq_t *)rb_gc_location((VALUE)block->blockid.iseq); block->blockid.iseq = (const rb_iseq_t *)rb_gc_location((VALUE)block->blockid.iseq);
block->dependencies.cc = rb_gc_location(block->dependencies.cc); block->receiver_klass = rb_gc_location(block->receiver_klass);
block->dependencies.cme = rb_gc_location(block->dependencies.cme); block->callee_cme = rb_gc_location(block->callee_cme);
// Walk over references to objects in generated code. // Walk over references to objects in generated code.
uint32_t *offset_element; uint32_t *offset_element;
@ -782,12 +913,14 @@ rb_yjit_iseq_free(const struct rb_iseq_constant_body *body)
rb_darray_free(body->yjit_blocks); rb_darray_free(body->yjit_blocks);
} }
bool rb_yjit_enabled_p(void) bool
rb_yjit_enabled_p(void)
{ {
return rb_yjit_opts.yjit_enabled; return rb_yjit_opts.yjit_enabled;
} }
unsigned rb_yjit_call_threshold(void) unsigned
rb_yjit_call_threshold(void)
{ {
return rb_yjit_opts.call_threshold; return rb_yjit_opts.call_threshold;
} }
@ -795,8 +928,7 @@ unsigned rb_yjit_call_threshold(void)
void void
rb_yjit_init(struct rb_yjit_options *options) rb_yjit_init(struct rb_yjit_options *options)
{ {
if (!yjit_scrape_successful || !PLATFORM_SUPPORTED_P) if (!yjit_scrape_successful || !PLATFORM_SUPPORTED_P) {
{
return; return;
} }
@ -839,8 +971,11 @@ rb_yjit_init(struct rb_yjit_options *options)
rb_block_call(rb_mKernel, rb_intern("at_exit"), 0, NULL, at_exit_print_stats, Qfalse); rb_block_call(rb_mKernel, rb_intern("at_exit"), 0, NULL, at_exit_print_stats, Qfalse);
} }
// Initialize the GC hooks // Make dependency tables
method_lookup_dependency = st_init_numtable(); method_lookup_dependency = st_init_numtable();
cme_validity_dependency = st_init_numtable();
// Initialize the GC hooks
struct yjit_root_struct *root; struct yjit_root_struct *root;
VALUE yjit_root = TypedData_Make_Struct(0, struct yjit_root_struct, &yjit_root_type, root); VALUE yjit_root = TypedData_Make_Struct(0, struct yjit_root_struct, &yjit_root_type, root);
rb_gc_register_mark_object(yjit_root); rb_gc_register_mark_object(yjit_root);

Просмотреть файл

@ -75,7 +75,7 @@ void check_cfunc_dispatch(VALUE receiver, struct rb_call_data *cd, void *callee,
bool cfunc_needs_frame(const rb_method_cfunc_t *cfunc); bool cfunc_needs_frame(const rb_method_cfunc_t *cfunc);
RBIMPL_ATTR_NODISCARD() bool assume_bop_not_redefined(block_t *block, int redefined_flag, enum ruby_basic_operators bop); RBIMPL_ATTR_NODISCARD() bool assume_bop_not_redefined(block_t *block, int redefined_flag, enum ruby_basic_operators bop);
void assume_method_lookup_stable(const struct rb_callcache *cc, const rb_callable_method_entry_t *cme, block_t* block); void assume_method_lookup_stable(VALUE receiver_klass, const rb_callable_method_entry_t *cme, block_t *block);
RBIMPL_ATTR_NODISCARD() bool assume_single_ractor_mode(block_t *block); RBIMPL_ATTR_NODISCARD() bool assume_single_ractor_mode(block_t *block);
RBIMPL_ATTR_NODISCARD() bool assume_stable_global_constant_state(block_t *block); RBIMPL_ATTR_NODISCARD() bool assume_stable_global_constant_state(block_t *block);