This is a variation of the `defined` instruction, for use when we
are checking for an instance variable. Splitting this out as a
separate instruction lets us skip some checks, and it also allows
us to use an instance variable cache, letting shape analysis
speed up the operation further.
This commit is contained in:
Ole Friis Østergaard 2023-02-28 13:44:57 +01:00 коммит произвёл John Hawthorn
Родитель dc1e6573f2
Коммит 1a3f8e1c9f
3 изменённых файлов: 28 добавлений и 13 удалений

Просмотреть файл

@ -5460,9 +5460,8 @@ defined_expr0(rb_iseq_t *iseq, LINK_ANCHOR *const ret,
#define PUSH_VAL(type) (needstr == Qfalse ? Qtrue : rb_iseq_defined_string(type))
case NODE_IVAR:
ADD_INSN(ret, line_node, putnil);
ADD_INSN3(ret, line_node, defined, INT2FIX(DEFINED_IVAR),
ID2SYM(node->nd_vid), PUSH_VAL(DEFINED_IVAR));
ADD_INSN3(ret, line_node, defined_ivar,
ID2SYM(node->nd_vid), get_ivar_ic_value(iseq,node->nd_vid), PUSH_VAL(DEFINED_IVAR));
return;
case NODE_GVAR:

Просмотреть файл

@ -701,6 +701,20 @@ defined
}
}
/* defined?(@foo) */
DEFINE_INSN
defined_ivar
(ID id, IVC ic, VALUE pushval)
()
(VALUE val)
// attr bool leaf = false;
{
val = Qnil;
if (vm_getivar(GET_SELF(), id, GET_ISEQ(), ic, NULL, FALSE, Qundef) != Qundef) {
val = pushval;
}
}
/* check `target' matches `pattern'.
`flag & VM_CHECKMATCH_TYPE_MASK' describe how to check pattern.
VM_CHECKMATCH_TYPE_WHEN: ignore target and check pattern is truthy.

Просмотреть файл

@ -1143,9 +1143,9 @@ fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, in
#define ATTR_INDEX_NOT_SET (attr_index_t)-1
ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, const rb_iseq_t *, IVC, const struct rb_callcache *, int));
ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, const rb_iseq_t *, IVC, const struct rb_callcache *, int, VALUE));
static inline VALUE
vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, VALUE default_value)
{
#if OPT_IC_FOR_IVAR
VALUE val = Qundef;
@ -1153,7 +1153,7 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
VALUE * ivar_list;
if (SPECIAL_CONST_P(obj)) {
return Qnil;
return default_value;
}
#if SHAPE_IN_BASIC_FLAGS
@ -1200,7 +1200,7 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
ivar_list = ivtbl->ivptr;
}
else {
return Qnil;
return default_value;
}
}
@ -1218,7 +1218,7 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
if (index == ATTR_INDEX_NOT_SET) {
return Qnil;
return default_value;
}
val = ivar_list[index];
@ -1260,7 +1260,7 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
if (!rb_id_table_lookup(ROBJECT_IV_HASH(obj), id, &val)) {
val = Qnil;
val = default_value;
}
}
else {
@ -1281,13 +1281,15 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
vm_ic_attr_index_initialize(ic, shape_id);
}
val = Qnil;
val = default_value;
}
}
}
RUBY_ASSERT(!UNDEF_P(val));
if (default_value != Qundef) {
RUBY_ASSERT(!UNDEF_P(val));
}
return val;
@ -1572,7 +1574,7 @@ rb_vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID
static inline VALUE
vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic)
{
return vm_getivar(obj, id, iseq, ic, NULL, FALSE);
return vm_getivar(obj, id, iseq, ic, NULL, FALSE, Qnil);
}
static inline void
@ -3459,7 +3461,7 @@ vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_call
const struct rb_callcache *cc = calling->cc;
RB_DEBUG_COUNTER_INC(ccf_ivar);
cfp->sp -= 1;
VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE);
VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE, Qnil);
return ivar;
}