зеркало из https://github.com/github/ruby.git
Adjust indents [ci skip]
This commit is contained in:
Родитель
504e388525
Коммит
70bc8cc6c2
126
shape.c
126
shape.c
|
@ -10,7 +10,8 @@
|
|||
* Shape getters
|
||||
*/
|
||||
static rb_shape_t*
|
||||
rb_shape_get_root_shape(void) {
|
||||
rb_shape_get_root_shape(void)
|
||||
{
|
||||
return GET_VM()->root_shape;
|
||||
}
|
||||
|
||||
|
@ -21,12 +22,14 @@ rb_shape_id(rb_shape_t * shape)
|
|||
}
|
||||
|
||||
static rb_shape_t*
|
||||
rb_shape_get_frozen_root_shape(void) {
|
||||
rb_shape_get_frozen_root_shape(void)
|
||||
{
|
||||
return GET_VM()->frozen_root_shape;
|
||||
}
|
||||
|
||||
bool
|
||||
rb_shape_root_shape_p(rb_shape_t* shape) {
|
||||
rb_shape_root_shape_p(rb_shape_t* shape)
|
||||
{
|
||||
return shape == rb_shape_get_root_shape();
|
||||
}
|
||||
|
||||
|
@ -90,7 +93,8 @@ rb_shape_get_shape(VALUE obj)
|
|||
}
|
||||
|
||||
static rb_shape_t *
|
||||
rb_shape_lookup_id(rb_shape_t* shape, ID id, enum shape_type shape_type) {
|
||||
rb_shape_lookup_id(rb_shape_t* shape, ID id, enum shape_type shape_type)
|
||||
{
|
||||
while (shape->parent_id != INVALID_SHAPE_ID) {
|
||||
if (shape->edge_name == id) {
|
||||
// If the shape type is different, we don't
|
||||
|
@ -136,25 +140,25 @@ get_next_shape_internal(rb_shape_t* shape, ID id, VALUE obj, enum shape_type sha
|
|||
|
||||
new_shape->type = (uint8_t)shape_type;
|
||||
|
||||
switch(shape_type) {
|
||||
case SHAPE_IVAR:
|
||||
new_shape->iv_count = rb_shape_get_shape_by_id(new_shape->parent_id)->iv_count + 1;
|
||||
switch (shape_type) {
|
||||
case SHAPE_IVAR:
|
||||
new_shape->iv_count = rb_shape_get_shape_by_id(new_shape->parent_id)->iv_count + 1;
|
||||
|
||||
// Check if we should update max_iv_count on the object's class
|
||||
if (BUILTIN_TYPE(obj) == T_OBJECT) {
|
||||
VALUE klass = rb_obj_class(obj);
|
||||
if (new_shape->iv_count > RCLASS_EXT(klass)->max_iv_count) {
|
||||
RCLASS_EXT(klass)->max_iv_count = new_shape->iv_count;
|
||||
}
|
||||
// Check if we should update max_iv_count on the object's class
|
||||
if (BUILTIN_TYPE(obj) == T_OBJECT) {
|
||||
VALUE klass = rb_obj_class(obj);
|
||||
if (new_shape->iv_count > RCLASS_EXT(klass)->max_iv_count) {
|
||||
RCLASS_EXT(klass)->max_iv_count = new_shape->iv_count;
|
||||
}
|
||||
break;
|
||||
case SHAPE_IVAR_UNDEF:
|
||||
case SHAPE_FROZEN:
|
||||
new_shape->iv_count = rb_shape_get_shape_by_id(new_shape->parent_id)->iv_count;
|
||||
break;
|
||||
case SHAPE_ROOT:
|
||||
rb_bug("Unreachable");
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case SHAPE_IVAR_UNDEF:
|
||||
case SHAPE_FROZEN:
|
||||
new_shape->iv_count = rb_shape_get_shape_by_id(new_shape->parent_id)->iv_count;
|
||||
break;
|
||||
case SHAPE_ROOT:
|
||||
rb_bug("Unreachable");
|
||||
break;
|
||||
}
|
||||
|
||||
rb_id_table_insert(shape->edges, id, (VALUE)new_shape);
|
||||
|
@ -199,13 +203,13 @@ rb_shape_transition_shape_frozen(VALUE obj)
|
|||
rb_shape_t* next_shape;
|
||||
|
||||
if (shape == rb_shape_get_root_shape()) {
|
||||
switch(BUILTIN_TYPE(obj)) {
|
||||
case T_OBJECT:
|
||||
case T_CLASS:
|
||||
case T_MODULE:
|
||||
break;
|
||||
default:
|
||||
return;
|
||||
switch (BUILTIN_TYPE(obj)) {
|
||||
case T_OBJECT:
|
||||
case T_CLASS:
|
||||
case T_MODULE:
|
||||
break;
|
||||
default:
|
||||
return;
|
||||
}
|
||||
next_shape = rb_shape_get_frozen_root_shape();
|
||||
}
|
||||
|
@ -239,22 +243,23 @@ rb_shape_get_next(rb_shape_t* shape, VALUE obj, ID id)
|
|||
}
|
||||
|
||||
bool
|
||||
rb_shape_get_iv_index(rb_shape_t * shape, ID id, attr_index_t *value) {
|
||||
rb_shape_get_iv_index(rb_shape_t * shape, ID id, attr_index_t *value)
|
||||
{
|
||||
while (shape->parent_id != INVALID_SHAPE_ID) {
|
||||
if (shape->edge_name == id) {
|
||||
enum shape_type shape_type;
|
||||
shape_type = (enum shape_type)shape->type;
|
||||
|
||||
switch(shape_type) {
|
||||
case SHAPE_IVAR:
|
||||
RUBY_ASSERT(shape->iv_count > 0);
|
||||
*value = shape->iv_count - 1;
|
||||
return true;
|
||||
case SHAPE_IVAR_UNDEF:
|
||||
case SHAPE_ROOT:
|
||||
return false;
|
||||
case SHAPE_FROZEN:
|
||||
rb_bug("Ivar should not exist on frozen transition\n");
|
||||
switch (shape_type) {
|
||||
case SHAPE_IVAR:
|
||||
RUBY_ASSERT(shape->iv_count > 0);
|
||||
*value = shape->iv_count - 1;
|
||||
return true;
|
||||
case SHAPE_IVAR_UNDEF:
|
||||
case SHAPE_ROOT:
|
||||
return false;
|
||||
case SHAPE_FROZEN:
|
||||
rb_bug("Ivar should not exist on frozen transition\n");
|
||||
}
|
||||
}
|
||||
shape = rb_shape_get_shape_by_id(shape->parent_id);
|
||||
|
@ -313,14 +318,16 @@ static const rb_data_type_t shape_data_type = {
|
|||
};
|
||||
|
||||
static VALUE
|
||||
rb_wrapped_shape_id(VALUE self) {
|
||||
rb_wrapped_shape_id(VALUE self)
|
||||
{
|
||||
rb_shape_t * shape;
|
||||
TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
|
||||
return INT2NUM(rb_shape_id(shape));
|
||||
}
|
||||
|
||||
static VALUE
|
||||
rb_shape_type(VALUE self) {
|
||||
rb_shape_type(VALUE self)
|
||||
{
|
||||
rb_shape_t * shape;
|
||||
TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
|
||||
return INT2NUM(shape->type);
|
||||
|
@ -339,16 +346,20 @@ rb_shape_parent_id(VALUE self)
|
|||
}
|
||||
}
|
||||
|
||||
static VALUE parse_key(ID key) {
|
||||
static VALUE
|
||||
parse_key(ID key)
|
||||
{
|
||||
if ((key & RUBY_ID_INTERNAL) == RUBY_ID_INTERNAL) {
|
||||
return LONG2NUM(key);
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
return ID2SYM(key);
|
||||
}
|
||||
}
|
||||
|
||||
static VALUE
|
||||
rb_shape_t_to_rb_cShape(rb_shape_t *shape) {
|
||||
rb_shape_t_to_rb_cShape(rb_shape_t *shape)
|
||||
{
|
||||
union { const rb_shape_t *in; void *out; } deconst;
|
||||
VALUE res;
|
||||
deconst.in = shape;
|
||||
|
@ -357,7 +368,8 @@ rb_shape_t_to_rb_cShape(rb_shape_t *shape) {
|
|||
return res;
|
||||
}
|
||||
|
||||
static enum rb_id_table_iterator_result rb_edges_to_hash(ID key, VALUE value, void *ref)
|
||||
static enum rb_id_table_iterator_result
|
||||
rb_edges_to_hash(ID key, VALUE value, void *ref)
|
||||
{
|
||||
rb_hash_aset(*(VALUE *)ref, parse_key(key), rb_shape_t_to_rb_cShape((rb_shape_t*)value));
|
||||
return ID_TABLE_CONTINUE;
|
||||
|
@ -428,15 +440,21 @@ rb_shape_parent(VALUE self)
|
|||
}
|
||||
}
|
||||
|
||||
VALUE rb_shape_debug_shape(VALUE self, VALUE obj) {
|
||||
VALUE
|
||||
rb_shape_debug_shape(VALUE self, VALUE obj)
|
||||
{
|
||||
return rb_shape_t_to_rb_cShape(rb_shape_get_shape(obj));
|
||||
}
|
||||
|
||||
VALUE rb_shape_root_shape(VALUE self) {
|
||||
VALUE
|
||||
rb_shape_root_shape(VALUE self)
|
||||
{
|
||||
return rb_shape_t_to_rb_cShape(rb_shape_get_root_shape());
|
||||
}
|
||||
|
||||
VALUE rb_shape_frozen_root_shape(VALUE self) {
|
||||
VALUE
|
||||
rb_shape_frozen_root_shape(VALUE self)
|
||||
{
|
||||
return rb_shape_t_to_rb_cShape(rb_shape_get_frozen_root_shape());
|
||||
}
|
||||
|
||||
|
@ -456,7 +474,9 @@ static VALUE edges(struct rb_id_table* edges)
|
|||
return hash;
|
||||
}
|
||||
|
||||
VALUE rb_obj_shape(rb_shape_t* shape) {
|
||||
VALUE
|
||||
rb_obj_shape(rb_shape_t* shape)
|
||||
{
|
||||
VALUE rb_shape = rb_hash_new();
|
||||
|
||||
rb_hash_aset(rb_shape, ID2SYM(rb_intern("id")), INT2NUM(rb_shape_id(shape)));
|
||||
|
@ -473,11 +493,15 @@ VALUE rb_obj_shape(rb_shape_t* shape) {
|
|||
return rb_shape;
|
||||
}
|
||||
|
||||
static VALUE shape_transition_tree(VALUE self) {
|
||||
static VALUE
|
||||
shape_transition_tree(VALUE self)
|
||||
{
|
||||
return rb_obj_shape(rb_shape_get_root_shape());
|
||||
}
|
||||
|
||||
static VALUE next_shape_id(VALUE self) {
|
||||
static VALUE
|
||||
next_shape_id(VALUE self)
|
||||
{
|
||||
return INT2NUM(GET_VM()->next_shape_id);
|
||||
}
|
||||
|
||||
|
|
253
vm_insnhelper.c
253
vm_insnhelper.c
|
@ -1130,30 +1130,31 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
|
|||
#endif
|
||||
|
||||
switch (BUILTIN_TYPE(obj)) {
|
||||
case T_OBJECT:
|
||||
ivar_list = ROBJECT_IVPTR(obj);
|
||||
VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
|
||||
case T_OBJECT:
|
||||
ivar_list = ROBJECT_IVPTR(obj);
|
||||
VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
|
||||
|
||||
#if !SHAPE_IN_BASIC_FLAGS
|
||||
shape_id = ROBJECT_SHAPE_ID(obj);
|
||||
shape_id = ROBJECT_SHAPE_ID(obj);
|
||||
#endif
|
||||
break;
|
||||
case T_CLASS:
|
||||
case T_MODULE:
|
||||
{
|
||||
goto general_path;
|
||||
}
|
||||
default:
|
||||
if (FL_TEST_RAW(obj, FL_EXIVAR)) {
|
||||
struct gen_ivtbl *ivtbl;
|
||||
rb_gen_ivtbl_get(obj, id, &ivtbl);
|
||||
break;
|
||||
case T_CLASS:
|
||||
case T_MODULE:
|
||||
{
|
||||
goto general_path;
|
||||
}
|
||||
default:
|
||||
if (FL_TEST_RAW(obj, FL_EXIVAR)) {
|
||||
struct gen_ivtbl *ivtbl;
|
||||
rb_gen_ivtbl_get(obj, id, &ivtbl);
|
||||
#if !SHAPE_IN_BASIC_FLAGS
|
||||
shape_id = ivtbl->shape_id;
|
||||
shape_id = ivtbl->shape_id;
|
||||
#endif
|
||||
ivar_list = ivtbl->ivptr;
|
||||
} else {
|
||||
return Qnil;
|
||||
}
|
||||
ivar_list = ivtbl->ivptr;
|
||||
}
|
||||
else {
|
||||
return Qnil;
|
||||
}
|
||||
}
|
||||
|
||||
shape_id_t cached_id;
|
||||
|
@ -1172,7 +1173,7 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
|
|||
vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
|
||||
}
|
||||
|
||||
if(LIKELY(cached_id == shape_id)) {
|
||||
if (LIKELY(cached_id == shape_id)) {
|
||||
if (index == ATTR_INDEX_NOT_SET) {
|
||||
return Qnil;
|
||||
}
|
||||
|
@ -1185,14 +1186,16 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
|
|||
if (is_attr) {
|
||||
if (cached_id != INVALID_SHAPE_ID) {
|
||||
RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (cached_id != INVALID_SHAPE_ID) {
|
||||
RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
|
||||
}
|
||||
}
|
||||
|
@ -1264,69 +1267,69 @@ vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic,
|
|||
#if OPT_IC_FOR_IVAR
|
||||
switch (BUILTIN_TYPE(obj)) {
|
||||
case T_OBJECT:
|
||||
{
|
||||
rb_check_frozen_internal(obj);
|
||||
{
|
||||
rb_check_frozen_internal(obj);
|
||||
|
||||
attr_index_t index;
|
||||
attr_index_t index;
|
||||
|
||||
uint32_t num_iv = ROBJECT_NUMIV(obj);
|
||||
rb_shape_t* shape = rb_shape_get_shape(obj);
|
||||
shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
|
||||
uint32_t num_iv = ROBJECT_NUMIV(obj);
|
||||
rb_shape_t* shape = rb_shape_get_shape(obj);
|
||||
shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
|
||||
|
||||
rb_shape_t* next_shape = rb_shape_get_next(shape, obj, id);
|
||||
rb_shape_t* next_shape = rb_shape_get_next(shape, obj, id);
|
||||
|
||||
if (shape != next_shape) {
|
||||
RUBY_ASSERT(next_shape->parent_id == rb_shape_id(shape));
|
||||
rb_shape_set_shape(obj, next_shape);
|
||||
next_shape_id = ROBJECT_SHAPE_ID(obj);
|
||||
}
|
||||
|
||||
if (rb_shape_get_iv_index(next_shape, id, &index)) { // based off the hash stored in the transition tree
|
||||
if (index >= MAX_IVARS) {
|
||||
rb_raise(rb_eArgError, "too many instance variables");
|
||||
}
|
||||
|
||||
populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
|
||||
}
|
||||
else {
|
||||
rb_bug("Didn't find instance variable %s\n", rb_id2name(id));
|
||||
}
|
||||
|
||||
// Ensure the IV buffer is wide enough to store the IV
|
||||
if (UNLIKELY(index >= num_iv)) {
|
||||
RUBY_ASSERT(index == num_iv);
|
||||
rb_init_iv_list(obj);
|
||||
}
|
||||
|
||||
VALUE *ptr = ROBJECT_IVPTR(obj);
|
||||
RB_OBJ_WRITE(obj, &ptr[index], val);
|
||||
RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
|
||||
|
||||
return val;
|
||||
if (shape != next_shape) {
|
||||
RUBY_ASSERT(next_shape->parent_id == rb_shape_id(shape));
|
||||
rb_shape_set_shape(obj, next_shape);
|
||||
next_shape_id = ROBJECT_SHAPE_ID(obj);
|
||||
}
|
||||
|
||||
if (rb_shape_get_iv_index(next_shape, id, &index)) { // based off the hash stored in the transition tree
|
||||
if (index >= MAX_IVARS) {
|
||||
rb_raise(rb_eArgError, "too many instance variables");
|
||||
}
|
||||
|
||||
populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
|
||||
}
|
||||
else {
|
||||
rb_bug("Didn't find instance variable %s\n", rb_id2name(id));
|
||||
}
|
||||
|
||||
// Ensure the IV buffer is wide enough to store the IV
|
||||
if (UNLIKELY(index >= num_iv)) {
|
||||
RUBY_ASSERT(index == num_iv);
|
||||
rb_init_iv_list(obj);
|
||||
}
|
||||
|
||||
VALUE *ptr = ROBJECT_IVPTR(obj);
|
||||
RB_OBJ_WRITE(obj, &ptr[index], val);
|
||||
RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
|
||||
|
||||
return val;
|
||||
}
|
||||
case T_CLASS:
|
||||
case T_MODULE:
|
||||
break;
|
||||
break;
|
||||
default:
|
||||
{
|
||||
rb_ivar_set(obj, id, val);
|
||||
shape_id_t next_shape_id = rb_shape_get_shape_id(obj);
|
||||
rb_shape_t *next_shape = rb_shape_get_shape_by_id(next_shape_id);
|
||||
attr_index_t index;
|
||||
{
|
||||
rb_ivar_set(obj, id, val);
|
||||
shape_id_t next_shape_id = rb_shape_get_shape_id(obj);
|
||||
rb_shape_t *next_shape = rb_shape_get_shape_by_id(next_shape_id);
|
||||
attr_index_t index;
|
||||
|
||||
if (rb_shape_get_iv_index(next_shape, id, &index)) { // based off the hash stored in the transition tree
|
||||
if (index >= MAX_IVARS) {
|
||||
rb_raise(rb_eArgError, "too many instance variables");
|
||||
}
|
||||
|
||||
populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
|
||||
}
|
||||
else {
|
||||
rb_bug("didn't find the id\n");
|
||||
if (rb_shape_get_iv_index(next_shape, id, &index)) { // based off the hash stored in the transition tree
|
||||
if (index >= MAX_IVARS) {
|
||||
rb_raise(rb_eArgError, "too many instance variables");
|
||||
}
|
||||
|
||||
return val;
|
||||
populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
|
||||
}
|
||||
else {
|
||||
rb_bug("didn't find the id\n");
|
||||
}
|
||||
|
||||
return val;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
|
||||
|
@ -1399,45 +1402,46 @@ vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t i
|
|||
#if OPT_IC_FOR_IVAR
|
||||
switch (BUILTIN_TYPE(obj)) {
|
||||
case T_OBJECT:
|
||||
{
|
||||
VM_ASSERT(!rb_ractor_shareable_p(obj) || rb_obj_frozen_p(obj));
|
||||
{
|
||||
VM_ASSERT(!rb_ractor_shareable_p(obj) || rb_obj_frozen_p(obj));
|
||||
|
||||
shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
|
||||
shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
|
||||
|
||||
if (LIKELY(shape_id == dest_shape_id)) {
|
||||
if (LIKELY(shape_id == dest_shape_id)) {
|
||||
RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
|
||||
VM_ASSERT(!rb_ractor_shareable_p(obj));
|
||||
}
|
||||
else if (dest_shape_id != INVALID_SHAPE_ID) {
|
||||
rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
|
||||
shape_id_t source_shape_id = dest_shape->parent_id;
|
||||
if (shape_id == source_shape_id && dest_shape->edge_name == id && dest_shape->type == SHAPE_IVAR) {
|
||||
RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
|
||||
VM_ASSERT(!rb_ractor_shareable_p(obj));
|
||||
if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {
|
||||
rb_init_iv_list(obj);
|
||||
}
|
||||
|
||||
ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
|
||||
|
||||
RUBY_ASSERT(rb_shape_get_next(rb_shape_get_shape_by_id(source_shape_id), obj, id) == dest_shape);
|
||||
RUBY_ASSERT(index < ROBJECT_NUMIV(obj));
|
||||
|
||||
}
|
||||
else if (dest_shape_id != INVALID_SHAPE_ID) {
|
||||
rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
|
||||
shape_id_t source_shape_id = dest_shape->parent_id;
|
||||
if (shape_id == source_shape_id && dest_shape->edge_name == id && dest_shape->type == SHAPE_IVAR) {
|
||||
RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
|
||||
if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {
|
||||
rb_init_iv_list(obj);
|
||||
}
|
||||
|
||||
ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
|
||||
|
||||
RUBY_ASSERT(rb_shape_get_next(rb_shape_get_shape_by_id(source_shape_id), obj, id) == dest_shape);
|
||||
RUBY_ASSERT(index < ROBJECT_NUMIV(obj));
|
||||
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
else {
|
||||
break;
|
||||
}
|
||||
|
||||
VALUE *ptr = ROBJECT_IVPTR(obj);
|
||||
|
||||
RB_OBJ_WRITE(obj, &ptr[index], val);
|
||||
|
||||
RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
|
||||
return val;
|
||||
}
|
||||
break;
|
||||
else {
|
||||
break;
|
||||
}
|
||||
|
||||
VALUE *ptr = ROBJECT_IVPTR(obj);
|
||||
|
||||
RB_OBJ_WRITE(obj, &ptr[index], val);
|
||||
|
||||
RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
|
||||
return val;
|
||||
}
|
||||
break;
|
||||
case T_CLASS:
|
||||
case T_MODULE:
|
||||
RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
|
||||
|
@ -1547,14 +1551,14 @@ vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC i
|
|||
|
||||
if (UNLIKELY(vm_setivar(obj, id, val, dest_shape_id, index) == Qundef)) {
|
||||
switch (BUILTIN_TYPE(obj)) {
|
||||
case T_OBJECT:
|
||||
case T_CLASS:
|
||||
case T_MODULE:
|
||||
break;
|
||||
default:
|
||||
if (vm_setivar_default(obj, id, val, dest_shape_id, index) != Qundef) {
|
||||
return;
|
||||
}
|
||||
case T_OBJECT:
|
||||
case T_CLASS:
|
||||
case T_MODULE:
|
||||
break;
|
||||
default:
|
||||
if (vm_setivar_default(obj, id, val, dest_shape_id, index) != Qundef) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
vm_setivar_slowpath_ivar(obj, id, val, iseq, ic);
|
||||
}
|
||||
|
@ -3275,17 +3279,17 @@ vm_call_attrset_direct(rb_execution_context_t *ec, rb_control_frame_t *cfp, cons
|
|||
VALUE res = vm_setivar(obj, id, val, dest_shape_id, index);
|
||||
if (res == Qundef) {
|
||||
switch (BUILTIN_TYPE(obj)) {
|
||||
case T_OBJECT:
|
||||
case T_CLASS:
|
||||
case T_MODULE:
|
||||
break;
|
||||
default:
|
||||
{
|
||||
res = vm_setivar_default(obj, id, val, dest_shape_id, index);
|
||||
if (res != Qundef) {
|
||||
return res;
|
||||
}
|
||||
case T_OBJECT:
|
||||
case T_CLASS:
|
||||
case T_MODULE:
|
||||
break;
|
||||
default:
|
||||
{
|
||||
res = vm_setivar_default(obj, id, val, dest_shape_id, index);
|
||||
if (res != Qundef) {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
res = vm_setivar_slowpath_attr(obj, id, val, cc);
|
||||
}
|
||||
|
@ -3895,7 +3899,8 @@ vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, st
|
|||
VM_CALL_METHOD_ATTR(v,
|
||||
vm_call_attrset_direct(ec, cfp, cc, calling->recv),
|
||||
CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
cc = &((struct rb_callcache) {
|
||||
.flags = T_IMEMO |
|
||||
(imemo_callcache << FL_USHIFT) |
|
||||
|
|
Загрузка…
Ссылка в новой задаче