* method.h: make rb_method_entry_t a VALUE.

Motivation and new data structure are described in [Bug #11203].
  This patch also solve the following issues.
  * [Bug #11200] Memory leak of method entries
  * [Bug #11046] __callee__ returns incorrect method name in orphan
                 proc
* test/ruby/test_method.rb: add a test for [Bug #11046].
* vm_core.h: remvoe rb_control_frame_t::me. me is located at value
  stack.
* vm_core.h, gc.c, vm_method.c: remove unlinked_method... codes
  because method entries are simple VALUEs.
* method.h:  Now, all method entries has own independent method
  definititons. Strictly speaking, this change is not essential,
  but for future changes.
  * rb_method_entry_t::flag is move to rb_method_definition_t::flag.
  * rb_method_definition_t::alias_count is now
    rb_method_definition_t::alias_count_ptr, a pointer to the counter.
* vm_core.h, vm_insnhelper.c (rb_vm_frame_method_entry) added to
  search the current method entry from value stack.
* vm_insnhelper.c (VM_CHECK_MODE): introduced to enable/disable
  assertions.



git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@50728 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
This commit is contained in:
ko1 2015-06-02 04:20:30 +00:00
Родитель ae042f21fb
Коммит 57b817f4c5
20 изменённых файлов: 849 добавлений и 641 удалений

Просмотреть файл

@ -1,3 +1,36 @@
Tue Jun 2 12:43:46 2015 Koichi Sasada <ko1@atdot.net>
* method.h: make rb_method_entry_t a VALUE.
Motivation and new data structure are described in [Bug #11203].
This patch also solve the following issues.
* [Bug #11200] Memory leak of method entries
* [Bug #11046] __callee__ returns incorrect method name in orphan
proc
* test/ruby/test_method.rb: add a test for [Bug #11046].
* vm_core.h: remvoe rb_control_frame_t::me. me is located at value
stack.
* vm_core.h, gc.c, vm_method.c: remove unlinked_method... codes
because method entries are simple VALUEs.
* method.h: Now, all method entries has own independent method
definititons. Strictly speaking, this change is not essential,
but for future changes.
* rb_method_entry_t::flag is move to rb_method_definition_t::flag.
* rb_method_definition_t::alias_count is now
rb_method_definition_t::alias_count_ptr, a pointer to the counter.
* vm_core.h, vm_insnhelper.c (rb_vm_frame_method_entry) added to
search the current method entry from value stack.
* vm_insnhelper.c (VM_CHECK_MODE): introduced to enable/disable
assertions.
Tue Jun 2 10:46:36 2015 Eric Wong <e@80x24.org>
* test/socket/test_nonblock.rb: new test for sendmsg_nonblock

31
class.c
Просмотреть файл

@ -243,18 +243,21 @@ rb_class_new(VALUE super)
static void
clone_method(VALUE klass, ID mid, const rb_method_entry_t *me)
{
VALUE newiseqval;
if (me->def && me->def->type == VM_METHOD_TYPE_ISEQ) {
rb_iseq_t *iseq;
rb_cref_t *new_cref;
newiseqval = rb_iseq_clone(me->def->body.iseq_body.iseq->self, klass);
GetISeqPtr(newiseqval, iseq);
rb_vm_rewrite_cref_stack(me->def->body.iseq_body.cref, me->klass, klass, &new_cref);
rb_add_method_iseq(klass, mid, iseq, new_cref, me->flag);
RB_GC_GUARD(newiseqval);
if (me->def) {
if (me->def->type == VM_METHOD_TYPE_ISEQ) {
VALUE newiseqval;
rb_cref_t *new_cref;
newiseqval = rb_iseq_clone(me->def->body.iseq.iseqval, klass);
rb_vm_rewrite_cref_stack(me->def->body.iseq.cref, me->klass, klass, &new_cref);
rb_add_method_iseq(klass, mid, newiseqval, new_cref, me->def->flag);
RB_GC_GUARD(newiseqval);
}
else {
rb_method_entry_set(klass, mid, me, me->def->flag);
}
}
else {
rb_method_entry_set(klass, mid, me, me->flag);
rb_bug("clone_method: unsupported");
}
}
@ -904,11 +907,9 @@ move_refined_method(st_data_t key, st_data_t value, st_data_t data)
if (me->def->body.orig_me) {
rb_method_entry_t *orig_me = me->def->body.orig_me, *new_me;
me->def->body.orig_me = NULL;
new_me = ALLOC(rb_method_entry_t);
*new_me = *me;
new_me = rb_method_entry_clone(me);
st_add_direct(tbl, key, (st_data_t) new_me);
*me = *orig_me;
xfree(orig_me);
rb_method_entry_copy(me, orig_me);
return ST_CONTINUE;
}
else {
@ -1125,7 +1126,7 @@ method_entry_i(st_data_t key, st_data_t value, st_data_t data)
type = -1; /* none */
}
else {
type = VISI(me->flag);
type = VISI(me->def->flag);
}
st_add_direct(arg->list, key, type);
}

3
cont.c
Просмотреть файл

@ -1208,7 +1208,6 @@ fiber_init(VALUE fibval, VALUE proc)
th->cfp->iseq = 0;
th->cfp->proc = 0;
th->cfp->block_iseq = 0;
th->cfp->me = 0;
th->tag = 0;
th->local_storage = st_init_numtable();
th->local_storage_recursive_hash = Qnil;
@ -1256,7 +1255,7 @@ rb_fiber_start(void)
cont->value = Qnil;
th->errinfo = Qnil;
th->root_lep = rb_vm_ep_local_ep(proc->block.ep);
th->root_svar = Qnil;
th->root_svar = Qfalse;
fib->status = RUNNING;
cont->value = rb_vm_invoke_proc(th, proc, argc, argv, 0);

37
eval.c
Просмотреть файл

@ -747,10 +747,11 @@ void
rb_raise_jump(VALUE mesg, VALUE cause)
{
rb_thread_t *th = GET_THREAD();
rb_control_frame_t *cfp = th->cfp;
VALUE klass = cfp->me->klass;
const rb_control_frame_t *cfp = th->cfp;
const rb_method_entry_t *me = rb_vm_frame_method_entry(cfp);
VALUE klass = me->klass;
VALUE self = cfp->self;
ID mid = cfp->me->called_id;
ID mid = me->called_id;
th->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(th->cfp);
EXEC_EVENT_HOOK(th, RUBY_EVENT_C_RETURN, self, mid, klass, Qnil);
@ -922,15 +923,14 @@ rb_ensure(VALUE (*b_proc)(ANYARGS), VALUE data1, VALUE (*e_proc)(ANYARGS), VALUE
}
static const rb_method_entry_t *
method_entry_of_iseq(rb_control_frame_t *cfp, rb_iseq_t *iseq)
method_entry_of_iseq(const rb_control_frame_t *cfp, const rb_iseq_t *iseq)
{
rb_thread_t *th = GET_THREAD();
rb_control_frame_t *cfp_limit;
const rb_control_frame_t *cfp_limit;
cfp_limit = (rb_control_frame_t *)(th->stack + th->stack_size);
while (cfp_limit > cfp) {
if (cfp->iseq == iseq)
return cfp->me;
if (cfp->iseq == iseq) return rb_vm_frame_method_entry(cfp); /* TODO: fix me */
cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
}
return 0;
@ -940,9 +940,11 @@ static ID
frame_func_id(rb_control_frame_t *cfp)
{
const rb_method_entry_t *me_local;
rb_iseq_t *iseq = cfp->iseq;
if (cfp->me) {
return cfp->me->def->original_id;
const rb_iseq_t *iseq = cfp->iseq;
const rb_method_entry_t *me = rb_vm_frame_method_entry(cfp);
if (me) {
return me->def->original_id;
}
while (iseq) {
if (RUBY_VM_IFUNC_P(iseq)) {
@ -952,7 +954,6 @@ frame_func_id(rb_control_frame_t *cfp)
}
me_local = method_entry_of_iseq(cfp, iseq);
if (me_local) {
cfp->me = me_local;
return me_local->def->original_id;
}
if (iseq->defined_method_id) {
@ -970,9 +971,11 @@ static ID
frame_called_id(rb_control_frame_t *cfp)
{
const rb_method_entry_t *me_local;
rb_iseq_t *iseq = cfp->iseq;
if (cfp->me) {
return cfp->me->called_id;
const rb_iseq_t *iseq = cfp->iseq;
const rb_method_entry_t *me = rb_vm_frame_method_entry(cfp);
if (me) {
return me->called_id;
}
while (iseq) {
if (RUBY_VM_IFUNC_P(iseq)) {
@ -982,7 +985,6 @@ frame_called_id(rb_control_frame_t *cfp)
}
me_local = method_entry_of_iseq(cfp, iseq);
if (me_local) {
cfp->me = me_local;
return me_local->called_id;
}
if (iseq->defined_method_id) {
@ -1488,9 +1490,8 @@ top_using(VALUE self, VALUE module)
const rb_cref_t *cref = rb_vm_cref();
rb_control_frame_t *prev_cfp = previous_frame(GET_THREAD());
if (CREF_NEXT(cref) || (prev_cfp && prev_cfp->me)) {
rb_raise(rb_eRuntimeError,
"main.using is permitted only at toplevel");
if (CREF_NEXT(cref) || (prev_cfp && rb_vm_frame_method_entry(prev_cfp))) {
rb_raise(rb_eRuntimeError, "main.using is permitted only at toplevel");
}
rb_using_module(cref, module);
return self;

62
gc.c
Просмотреть файл

@ -403,6 +403,7 @@ typedef struct RVALUE {
struct vm_throw_data throw_data;
struct vm_ifunc ifunc;
struct MEMO memo;
struct rb_method_entry_struct ment;
} imemo;
struct {
struct RBasic basic;
@ -1923,21 +1924,10 @@ is_pointer_to_heap(rb_objspace_t *objspace, void *ptr)
return FALSE;
}
static int
free_method_entry_i(st_data_t key, st_data_t value, st_data_t data)
{
rb_method_entry_t *me = (rb_method_entry_t *)value;
if (!me->mark) {
rb_free_method_entry(me);
}
return ST_CONTINUE;
}
static void
rb_free_m_tbl(st_table *tbl)
{
if (tbl) {
st_foreach(tbl, free_method_entry_i, 0);
st_free_table(tbl);
}
}
@ -2106,7 +2096,6 @@ obj_free(rb_objspace_t *objspace, VALUE obj)
break;
case T_RATIONAL:
case T_COMPLEX:
case T_IMEMO:
break;
case T_ICLASS:
/* Basically , T_ICLASS shares table with the module */
@ -2149,6 +2138,14 @@ obj_free(rb_objspace_t *objspace, VALUE obj)
}
break;
case T_IMEMO:
{
if (imemo_type(obj) == imemo_ment) {
rb_free_method_entry(&RANY(obj)->as.imemo.ment);
}
}
break;
default:
rb_bug("gc_sweep(): unknown data type 0x%x(%p) 0x%"PRIxVALUE,
BUILTIN_TYPE(obj), (void*)obj, RBASIC(obj)->flags);
@ -3406,11 +3403,6 @@ gc_sweep_start(rb_objspace_t *objspace)
gc_stat_transition(objspace, gc_stat_sweeping);
/* sweep unlinked method entries */
if (GET_VM()->unlinked_method_entry_list) {
rb_sweep_method_entry(GET_VM());
}
/* sometimes heap_allocatable_pages is not 0 */
heap_pages_swept_slots = heap_allocatable_pages * HEAP_OBJ_LIMIT;
total_limit_slot = objspace_available_slots(objspace);
@ -3938,13 +3930,10 @@ mark_method_entry(rb_objspace_t *objspace, const rb_method_entry_t *me)
gc_mark(objspace, me->klass);
again:
if (!def) return;
switch (def->type) {
case VM_METHOD_TYPE_ISEQ:
gc_mark(objspace, def->body.iseq_body.iseq->self);
gc_mark(objspace, (VALUE)def->body.iseq_body.cref);
gc_mark(objspace, def->body.iseq.iseqval);
gc_mark(objspace, (VALUE)def->body.iseq.cref);
break;
case VM_METHOD_TYPE_ATTRSET:
case VM_METHOD_TYPE_IVAR:
@ -3954,13 +3943,10 @@ mark_method_entry(rb_objspace_t *objspace, const rb_method_entry_t *me)
gc_mark(objspace, def->body.proc);
break;
case VM_METHOD_TYPE_ALIAS:
mark_method_entry(objspace, def->body.alias.original_me);
gc_mark(objspace, (VALUE)def->body.alias.original_me);
return;
case VM_METHOD_TYPE_REFINED:
if (def->body.orig_me) {
def = def->body.orig_me->def;
goto again;
}
gc_mark(objspace, (VALUE)def->body.orig_me);
break;
case VM_METHOD_TYPE_CFUNC:
case VM_METHOD_TYPE_ZSUPER:
@ -3972,18 +3958,12 @@ mark_method_entry(rb_objspace_t *objspace, const rb_method_entry_t *me)
}
}
void
rb_mark_method_entry(const rb_method_entry_t *me)
{
mark_method_entry(&rb_objspace, me);
}
static int
mark_method_entry_i(st_data_t key, st_data_t value, st_data_t data)
{
const rb_method_entry_t *me = (const rb_method_entry_t *)value;
VALUE me = (VALUE)value;
struct mark_tbl_arg *arg = (void*)data;
mark_method_entry(arg->objspace, me);
gc_mark(arg->objspace, me);
return ST_CONTINUE;
}
@ -4295,7 +4275,7 @@ gc_mark_children(rb_objspace_t *objspace, VALUE obj)
gc_mark(objspace, RANY(obj)->as.imemo.cref.refinements);
return;
case imemo_svar:
gc_mark(objspace, (VALUE)RANY(obj)->as.imemo.svar.cref);
gc_mark(objspace, RANY(obj)->as.imemo.svar.cref_or_me);
gc_mark(objspace, RANY(obj)->as.imemo.svar.lastline);
gc_mark(objspace, RANY(obj)->as.imemo.svar.backref);
gc_mark(objspace, RANY(obj)->as.imemo.svar.others);
@ -4311,6 +4291,9 @@ gc_mark_children(rb_objspace_t *objspace, VALUE obj)
gc_mark(objspace, RANY(obj)->as.imemo.memo.v2);
gc_mark_maybe(objspace, RANY(obj)->as.imemo.memo.u3.value);
return;
case imemo_ment:
mark_method_entry(objspace, &RANY(obj)->as.imemo.ment);
return;
default:
rb_bug("T_IMEMO: unreachable");
}
@ -4600,9 +4583,6 @@ gc_mark_roots(rb_objspace_t *objspace, const char **categoryp)
MARK_CHECKPOINT("generic_ivars");
rb_mark_generic_ivar_tbl();
MARK_CHECKPOINT("live_method_entries");
rb_gc_mark_unlinked_live_method_entries(th->vm);
if (stress_to_class) rb_gc_mark(stress_to_class);
MARK_CHECKPOINT("finish");
@ -8953,10 +8933,14 @@ obj_info(VALUE obj)
IMEMO_NAME(throw_data);
IMEMO_NAME(ifunc);
IMEMO_NAME(memo);
IMEMO_NAME(ment);
default: rb_bug("unknown IMEMO");
#undef IMEMO_NAME
}
snprintf(buff, OBJ_INFO_BUFFERS_SIZE, "%s %s", buff, imemo_name);
if (imemo_type(obj) == imemo_ment) {
snprintf(buff, OBJ_INFO_BUFFERS_SIZE, "%s (type: %d)", buff, RANY(obj)->as.imemo.ment.def->type);
}
}
default:
break;

Просмотреть файл

@ -760,8 +760,9 @@ defined
const rb_method_entry_t *me = rb_method_entry(klass, SYM2ID(obj), 0);
if (me) {
if (!(me->flag & NOEX_PRIVATE)) {
if (!((me->flag & NOEX_PROTECTED) &&
const rb_method_definition_t *def = me->def;
if (!(def->flag & NOEX_PRIVATE)) {
if (!((def->flag & NOEX_PROTECTED) &&
!rb_obj_is_kind_of(GET_SELF(),
rb_class_real(klass)))) {
expr_type = DEFINED_METHOD;
@ -1015,9 +1016,9 @@ defineclass
vm_push_frame(th, class_iseq, VM_FRAME_MAGIC_CLASS,
klass, 0,
VM_ENVVAL_BLOCK_PTR(GET_BLOCK_PTR()),
vm_cref_push(th, klass, NOEX_PUBLIC, NULL),
(VALUE)vm_cref_push(th, klass, NOEX_PUBLIC, NULL),
class_iseq->iseq_encoded, GET_SP(),
class_iseq->local_size, 0, class_iseq->stack_max);
class_iseq->local_size, class_iseq->stack_max);
RESTORE_REGS();
NEXT_INSN();

Просмотреть файл

@ -535,6 +535,7 @@ enum imemo_type {
imemo_throw_data = 3,
imemo_ifunc = 4,
imemo_memo = 5,
imemo_ment = 6,
imemo_mask = 0x07
};
@ -558,7 +559,7 @@ typedef struct rb_cref_struct {
struct vm_svar {
VALUE flags;
const rb_cref_t * const cref;
const VALUE cref_or_me;
const VALUE lastline;
const VALUE backref;
const VALUE others;
@ -615,6 +616,8 @@ struct MEMO {
#define NEW_MEMO_FOR(type, value) \
((value) = rb_ary_tmp_new_fill(type_roomof(type, VALUE)), MEMO_FOR(type, value))
/* ment is in method.h */
/* global variable */
struct rb_global_entry {

Просмотреть файл

@ -44,6 +44,14 @@ typedef enum {
/* method data type */
typedef struct rb_method_entry_struct {
VALUE flags;
VALUE reserved;
struct rb_method_definition_struct * const def;
ID called_id;
const VALUE klass; /* should be marked */
} rb_method_entry_t;
typedef enum {
VM_METHOD_TYPE_ISEQ,
VM_METHOD_TYPE_CFUNC,
@ -61,6 +69,14 @@ typedef enum {
END_OF_ENUMERATION(VM_METHOD_TYPE)
} rb_method_type_t;
typedef struct rb_iseq_struct rb_iseq_t;
typedef struct rb_method_iseq_struct {
rb_iseq_t * const iseqptr; /* should be separated from iseqval */
rb_cref_t * const cref; /* shoudl be marked */
const VALUE iseqval; /* should be marked */
} rb_method_iseq_t;
typedef struct rb_method_cfunc_struct {
VALUE (*func)(ANYARGS);
VALUE (*invoker)(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv);
@ -69,25 +85,21 @@ typedef struct rb_method_cfunc_struct {
typedef struct rb_method_attr_struct {
ID id;
const VALUE location;
const VALUE location; /* sould be marked */
} rb_method_attr_t;
typedef struct rb_method_alias_struct {
const struct rb_method_entry_struct *original_me; /* original_me->klass is original owner */
} rb_method_alias_t;
typedef struct rb_iseq_struct rb_iseq_t;
typedef struct rb_method_definition_struct {
rb_method_flag_t flag;
rb_method_type_t type; /* method type */
int alias_count;
int *alias_count_ptr;
ID original_id;
union {
struct {
rb_iseq_t *const iseq; /* should be marked */
rb_cref_t *cref;
} iseq_body;
rb_method_iseq_t iseq;
rb_method_cfunc_t cfunc;
rb_method_attr_t attr;
rb_method_alias_t alias;
@ -102,26 +114,13 @@ typedef struct rb_method_definition_struct {
} body;
} rb_method_definition_t;
typedef struct rb_method_entry_struct {
rb_method_flag_t flag;
char mark;
rb_method_definition_t *def;
ID called_id;
VALUE klass; /* should be marked */
} rb_method_entry_t;
struct unlinked_method_entry_list_entry {
struct unlinked_method_entry_list_entry *next;
rb_method_entry_t *me;
};
#define UNDEFINED_METHOD_ENTRY_P(me) (!(me) || !(me)->def || (me)->def->type == VM_METHOD_TYPE_UNDEF)
#define UNDEFINED_REFINED_METHOD_P(def) \
((def)->type == VM_METHOD_TYPE_REFINED && \
UNDEFINED_METHOD_ENTRY_P((def)->body.orig_me))
void rb_add_method_cfunc(VALUE klass, ID mid, VALUE (*func)(ANYARGS), int argc, rb_method_flag_t noex);
void rb_add_method_iseq(VALUE klass, ID mid, rb_iseq_t *iseq, rb_cref_t *cref, rb_method_flag_t noex);
void rb_add_method_iseq(VALUE klass, ID mid, VALUE iseq, rb_cref_t *cref, rb_method_flag_t noex);
rb_method_entry_t *rb_add_method(VALUE klass, ID mid, rb_method_type_t type, void *option, rb_method_flag_t noex);
rb_method_entry_t *rb_method_entry(VALUE klass, ID id, VALUE *define_class_ptr);
rb_method_entry_t *rb_method_entry_at(VALUE obj, ID id);
@ -145,8 +144,11 @@ VALUE rb_method_entry_location(const rb_method_entry_t *me);
VALUE rb_mod_method_location(VALUE mod, ID id);
VALUE rb_obj_method_location(VALUE obj, ID id);
void rb_mark_method_entry(const rb_method_entry_t *me);
void rb_free_method_entry(const rb_method_entry_t *me);
void rb_sweep_method_entry(void *vm);
rb_method_entry_t *rb_method_entry_create(ID called_id, VALUE klass, rb_method_definition_t *def);
rb_method_entry_t *rb_method_entry_clone(const rb_method_entry_t *me);
void rb_method_entry_copy(rb_method_entry_t *dst, rb_method_entry_t *src);
#endif /* METHOD_H */

77
proc.c
Просмотреть файл

@ -21,8 +21,7 @@ struct METHOD {
VALUE rclass;
VALUE defined_class;
ID id;
rb_method_entry_t *me;
struct unlinked_method_entry_list_entry *ume;
rb_method_entry_t * const me;
};
VALUE rb_cUnboundMethod;
@ -1100,18 +1099,12 @@ bm_mark(void *ptr)
rb_gc_mark(data->defined_class);
rb_gc_mark(data->rclass);
rb_gc_mark(data->recv);
if (data->me) rb_mark_method_entry(data->me);
rb_gc_mark((VALUE)data->me);
}
static void
bm_free(void *ptr)
{
struct METHOD *data = ptr;
struct unlinked_method_entry_list_entry *ume = data->ume;
data->me->mark = 0;
ume->me = data->me;
ume->next = GET_VM()->unlinked_method_entry_list;
GET_VM()->unlinked_method_entry_list = ume;
xfree(ptr);
}
@ -1167,22 +1160,13 @@ mnew_missing(VALUE rclass, VALUE klass, VALUE obj, ID id, ID rid, VALUE mclass)
data->defined_class = klass;
data->id = rid;
me = ALLOC(rb_method_entry_t);
data->me = me;
me->flag = 0;
me->mark = 0;
me->called_id = id;
me->klass = klass;
me->def = 0;
def = ALLOC(rb_method_definition_t);
me->def = def;
def = ZALLOC(rb_method_definition_t);
def->flag = 0;
def->type = VM_METHOD_TYPE_MISSING;
def->original_id = id;
def->alias_count = 0;
data->ume = ALLOC(struct unlinked_method_entry_list_entry);
data->me->def->alias_count++;
me = rb_method_entry_create(id, klass, def);
RB_OBJ_WRITE(method, &data->me, me);
OBJ_INFECT(method, klass);
@ -1210,13 +1194,13 @@ mnew_internal(const rb_method_entry_t *me, VALUE defined_class, VALUE klass,
}
def = me->def;
if (flag == NOEX_UNDEF) {
flag = me->flag;
flag = def->flag;
if (scope && (flag & NOEX_MASK) != NOEX_PUBLIC) {
if (!error) return Qnil;
rb_print_inaccessible(klass, id, flag & NOEX_MASK);
}
}
if (def && def->type == VM_METHOD_TYPE_ZSUPER) {
if (def->type == VM_METHOD_TYPE_ZSUPER) {
klass = RCLASS_SUPER(defined_class);
id = def->original_id;
me = rb_method_entry_without_refinements(klass, id, &defined_class);
@ -1236,13 +1220,8 @@ mnew_internal(const rb_method_entry_t *me, VALUE defined_class, VALUE klass,
data->rclass = rclass;
data->defined_class = defined_class;
data->id = rid;
data->me = ALLOC(rb_method_entry_t);
*data->me = *me;
data->ume = ALLOC(struct unlinked_method_entry_list_entry);
data->me->def->alias_count++;
RB_OBJ_WRITE(method, &data->me, rb_method_entry_clone(me));
OBJ_INFECT(method, klass);
return method;
}
@ -1364,12 +1343,9 @@ method_unbind(VALUE obj)
&method_data_type, data);
data->recv = Qundef;
data->id = orig->id;
data->me = ALLOC(rb_method_entry_t);
*data->me = *orig->me;
if (orig->me->def) orig->me->def->alias_count++;
RB_OBJ_WRITE(method, &data->me, rb_method_entry_clone(orig->me));
data->rclass = orig->rclass;
data->defined_class = orig->defined_class;
data->ume = ALLOC(struct unlinked_method_entry_list_entry);
OBJ_INFECT(method, obj);
return method;
@ -1832,12 +1808,11 @@ method_clone(VALUE self)
TypedData_Get_Struct(self, struct METHOD, &method_data_type, orig);
clone = TypedData_Make_Struct(CLASS_OF(self), struct METHOD, &method_data_type, data);
CLONESETUP(clone, self);
*data = *orig;
data->me = ALLOC(rb_method_entry_t);
*data->me = *orig->me;
if (data->me->def) data->me->def->alias_count++;
data->ume = ALLOC(struct unlinked_method_entry_list_entry);
data->recv = orig->recv;
data->rclass = orig->rclass;
data->defined_class = orig->defined_class;
data->id = orig->id;
RB_OBJ_WRITE(clone, &data->me, rb_method_entry_clone(orig->me));
return clone;
}
@ -2020,10 +1995,11 @@ umethod_bind(VALUE method, VALUE recv)
}
method = TypedData_Make_Struct(rb_cMethod, struct METHOD, &method_data_type, bound);
*bound = *data;
bound->me = ALLOC(rb_method_entry_t);
*bound->me = *data->me;
if (bound->me->def) bound->me->def->alias_count++;
bound->recv = data->recv;
bound->rclass = data->rclass;
bound->defined_class = data->defined_class;
bound->id = data->id;
RB_OBJ_WRITE(method, &bound->me, rb_method_entry_clone(data->me));
rclass = CLASS_OF(recv);
if (BUILTIN_TYPE(bound->defined_class) == T_MODULE) {
VALUE ic = rb_class_search_ancestor(rclass, bound->defined_class);
@ -2036,7 +2012,6 @@ umethod_bind(VALUE method, VALUE recv)
}
bound->recv = recv;
bound->rclass = rclass;
data->ume = ALLOC(struct unlinked_method_entry_list_entry);
return method;
}
@ -2071,7 +2046,8 @@ rb_method_entry_min_max_arity(const rb_method_entry_t *me, int *max)
case VM_METHOD_TYPE_BMETHOD:
return rb_proc_min_max_arity(def->body.proc, max);
case VM_METHOD_TYPE_ISEQ: {
rb_iseq_t *iseq = def->body.iseq_body.iseq;
rb_iseq_t *iseq;
GetISeqPtr(def->body.iseq.iseqval, iseq);
return rb_iseq_min_max_arity(iseq, max);
}
case VM_METHOD_TYPE_UNDEF:
@ -2207,7 +2183,11 @@ method_def_iseq(const rb_method_definition_t *def)
{
switch (def->type) {
case VM_METHOD_TYPE_ISEQ:
return def->body.iseq_body.iseq;
{
rb_iseq_t *iseq;
GetISeqPtr(def->body.iseq.iseqval, iseq);
return iseq;
}
case VM_METHOD_TYPE_BMETHOD:
return get_proc_iseq(def->body.proc, 0);
case VM_METHOD_TYPE_ALIAS:
@ -2240,7 +2220,7 @@ method_cref(VALUE method)
again:
switch (def->type) {
case VM_METHOD_TYPE_ISEQ:
return def->body.iseq_body.cref;
return def->body.iseq.cref;
case VM_METHOD_TYPE_ALIAS:
def = def->body.alias.original_me->def;
goto again;
@ -2675,6 +2655,7 @@ proc_curry(int argc, const VALUE *argv, VALUE self)
else {
sarity = FIX2INT(arity);
if (rb_proc_lambda_p(self)) {
bp();
rb_check_arity(sarity, min_arity, max_arity);
}
}

Просмотреть файл

@ -176,9 +176,8 @@ define_aref_method(VALUE nstr, VALUE name, VALUE off)
{
rb_control_frame_t *FUNC_FASTCALL(rb_vm_opt_struct_aref)(rb_thread_t *, rb_control_frame_t *);
VALUE iseqval = rb_method_for_self_aref(name, off, rb_vm_opt_struct_aref);
rb_iseq_t *iseq = DATA_PTR(iseqval);
rb_add_method_iseq(nstr, SYM2ID(name), iseq, NULL, NOEX_PUBLIC);
rb_add_method_iseq(nstr, SYM2ID(name), iseqval, NULL, NOEX_PUBLIC);
RB_GC_GUARD(iseqval);
}
@ -187,9 +186,8 @@ define_aset_method(VALUE nstr, VALUE name, VALUE off)
{
rb_control_frame_t *FUNC_FASTCALL(rb_vm_opt_struct_aset)(rb_thread_t *, rb_control_frame_t *);
VALUE iseqval = rb_method_for_self_aset(name, off, rb_vm_opt_struct_aset);
rb_iseq_t *iseq = DATA_PTR(iseqval);
rb_add_method_iseq(nstr, SYM2ID(name), iseq, NULL, NOEX_PUBLIC);
rb_add_method_iseq(nstr, SYM2ID(name), iseqval, NULL, NOEX_PUBLIC);
RB_GC_GUARD(iseqval);
}

Просмотреть файл

@ -124,6 +124,11 @@ class TestMethod < Test::Unit::TestCase
assert_nil(eval("class TestCallee; __callee__; end"))
end
def test_orphan_callee
c = Class.new{def foo; proc{__callee__}; end; alias alias_foo foo}
assert_equal(:alias_foo, c.new.alias_foo.call, '[Bug #11046]')
end
def test_method_in_define_method_block
bug4606 = '[ruby-core:35386]'
c = Class.new do

Просмотреть файл

@ -580,7 +580,7 @@ thread_start_func_2(rb_thread_t *th, VALUE *stack_start, VALUE *register_stack_s
GetProcPtr(th->first_proc, proc);
th->errinfo = Qnil;
th->root_lep = rb_vm_ep_local_ep(proc->block.ep);
th->root_svar = Qnil;
th->root_svar = Qfalse;
EXEC_EVENT_HOOK(th, RUBY_EVENT_THREAD_BEGIN, th->self, 0, 0, Qundef);
th->value = rb_vm_invoke_proc(th, proc, (int)RARRAY_LEN(args), RARRAY_CONST_PTR(args), 0);
EXEC_EVENT_HOOK(th, RUBY_EVENT_THREAD_END, th->self, 0, 0, Qundef);

80
vm.c
Просмотреть файл

@ -252,8 +252,8 @@ vm_set_top_stack(rb_thread_t *th, VALUE iseqval)
vm_push_frame(th, iseq, VM_FRAME_MAGIC_TOP | VM_FRAME_FLAG_FINISH,
th->top_self, rb_cObject,
VM_ENVVAL_BLOCK_PTR(0),
vm_cref_new_toplevel(th),
iseq->iseq_encoded, th->cfp->sp, iseq->local_size, 0, iseq->stack_max);
(VALUE)vm_cref_new_toplevel(th), /* cref or me */
iseq->iseq_encoded, th->cfp->sp, iseq->local_size, iseq->stack_max);
}
static void
@ -265,9 +265,9 @@ vm_set_eval_stack(rb_thread_t * th, VALUE iseqval, const rb_cref_t *cref, rb_blo
vm_push_frame(th, iseq, VM_FRAME_MAGIC_EVAL | VM_FRAME_FLAG_FINISH,
base_block->self, base_block->klass,
VM_ENVVAL_PREV_EP_PTR(base_block->ep),
cref,
(VALUE)cref, /* cref or me */
iseq->iseq_encoded,
th->cfp->sp, iseq->local_size, 0, iseq->stack_max);
th->cfp->sp, iseq->local_size, iseq->stack_max);
}
static void
@ -339,7 +339,8 @@ void
rb_vm_pop_cfunc_frame(void)
{
rb_thread_t *th = GET_THREAD();
const rb_method_entry_t *me = th->cfp->me;
const rb_method_entry_t *me = rb_vm_frame_method_entry(th->cfp);
EXEC_EVENT_HOOK(th, RUBY_EVENT_C_RETURN, th->cfp->self, me->called_id, me->klass, Qnil);
RUBY_DTRACE_CMETHOD_RETURN_HOOK(th, me->klass, me->called_id);
vm_pop_frame(th);
@ -510,7 +511,7 @@ vm_make_env_each(const rb_thread_t *const th, rb_control_frame_t *const cfp,
}
if (!RUBY_VM_NORMAL_ISEQ_P(cfp->iseq)) {
local_size = 2;
local_size = 2; /* specva + cref/me */
}
else {
local_size = cfp->iseq->local_size;
@ -816,7 +817,7 @@ invoke_block_from_c(rb_thread_t *th, const rb_block_t *block,
int i, opt_pc, arg_size = iseq->param.size;
int type = block_proc_is_lambda(block->proc) ? VM_FRAME_MAGIC_LAMBDA : VM_FRAME_MAGIC_BLOCK;
const rb_method_entry_t *me = th->passed_bmethod_me;
th->passed_bmethod_me = 0;
th->passed_bmethod_me = NULL;
cfp = th->cfp;
for (i=0; i<argc; i++) {
@ -831,10 +832,10 @@ invoke_block_from_c(rb_thread_t *th, const rb_block_t *block,
vm_push_frame(th, iseq, type | VM_FRAME_FLAG_FINISH | VM_FRAME_FLAG_BMETHOD,
self, defined_class,
VM_ENVVAL_PREV_EP_PTR(block->ep),
cref,
(VALUE)me, /* cref or method (TODO: can we ignore cref?) */
iseq->iseq_encoded + opt_pc,
cfp->sp + arg_size, iseq->local_size - arg_size,
me, iseq->stack_max);
iseq->stack_max);
RUBY_DTRACE_METHOD_ENTRY_HOOK(th, me->klass, me->called_id);
EXEC_EVENT_HOOK(th, RUBY_EVENT_CALL, self, me->called_id, me->klass, Qnil);
@ -843,10 +844,10 @@ invoke_block_from_c(rb_thread_t *th, const rb_block_t *block,
vm_push_frame(th, iseq, type | VM_FRAME_FLAG_FINISH,
self, defined_class,
VM_ENVVAL_PREV_EP_PTR(block->ep),
cref,
(VALUE)cref, /* cref or method */
iseq->iseq_encoded + opt_pc,
cfp->sp + arg_size, iseq->local_size - arg_size,
0, iseq->stack_max);
iseq->stack_max);
}
ret = vm_exec(th);
@ -1349,7 +1350,9 @@ hook_before_rewind(rb_thread_t *th, rb_control_frame_t *cfp)
case VM_FRAME_MAGIC_LAMBDA:
if (VM_FRAME_TYPE_BMETHOD_P(th->cfp)) {
EXEC_EVENT_HOOK(th, RUBY_EVENT_B_RETURN, th->cfp->self, 0, 0, Qnil);
EXEC_EVENT_HOOK_AND_POP_FRAME(th, RUBY_EVENT_RETURN, th->cfp->self, th->cfp->me->called_id, th->cfp->me->klass, Qnil);
EXEC_EVENT_HOOK_AND_POP_FRAME(th, RUBY_EVENT_RETURN, th->cfp->self,
rb_vm_frame_method_entry(th->cfp)->called_id,
rb_vm_frame_method_entry(th->cfp)->klass, Qnil);
}
else {
EXEC_EVENT_HOOK_AND_POP_FRAME(th, RUBY_EVENT_B_RETURN, th->cfp->self, 0, 0, Qnil);
@ -1495,9 +1498,10 @@ vm_exec(rb_thread_t *th)
while (th->cfp->pc == 0 || th->cfp->iseq == 0) {
if (UNLIKELY(VM_FRAME_TYPE(th->cfp) == VM_FRAME_MAGIC_CFUNC)) {
const rb_method_entry_t *me = th->cfp->me;
EXEC_EVENT_HOOK(th, RUBY_EVENT_C_RETURN, th->cfp->self, me->called_id, me->klass, Qnil);
RUBY_DTRACE_METHOD_RETURN_HOOK(th, me->klass, me->called_id);
EXEC_EVENT_HOOK(th, RUBY_EVENT_C_RETURN, th->cfp->self,
rb_vm_frame_method_entry(th->cfp)->called_id, rb_vm_frame_method_entry(th->cfp)->klass, Qnil);
RUBY_DTRACE_METHOD_RETURN_HOOK(th, rb_vm_frame_method_entry(th->cfp)->klass,
rb_vm_frame_method_entry(th->cfp)->called_id);
}
th->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(th->cfp);
}
@ -1662,11 +1666,11 @@ vm_exec(rb_thread_t *th)
vm_push_frame(th, catch_iseq, VM_FRAME_MAGIC_RESCUE,
cfp->self, cfp->klass,
VM_ENVVAL_PREV_EP_PTR(cfp->ep),
NULL,
0, /* cref or me */
catch_iseq->iseq_encoded,
cfp->sp + 1 /* push value */,
catch_iseq->local_size - 1,
cfp->me, catch_iseq->stack_max);
catch_iseq->stack_max);
state = 0;
th->state = 0;
@ -1726,9 +1730,11 @@ int
rb_vm_control_frame_id_and_class(const rb_control_frame_t *cfp, ID *idp, VALUE *klassp)
{
rb_iseq_t *iseq = cfp->iseq;
if (!iseq && cfp->me) {
if (idp) *idp = cfp->me->def->original_id;
if (klassp) *klassp = cfp->me->klass;
const rb_method_entry_t *me = rb_vm_frame_method_entry(cfp);
if (!iseq && me) { /* TODO: me should know all */
if (idp) *idp = me->def->original_id;
if (klassp) *klassp = me->klass;
return 1;
}
while (iseq) {
@ -1766,6 +1772,7 @@ VALUE
rb_thread_current_status(const rb_thread_t *th)
{
const rb_control_frame_t *cfp = th->cfp;
const rb_method_entry_t *me;
VALUE str = Qnil;
if (cfp->iseq != 0) {
@ -1776,10 +1783,10 @@ rb_thread_current_status(const rb_thread_t *th)
iseq->location.path, line_no, iseq->location.label);
}
}
else if (cfp->me->def->original_id) {
else if ((me = rb_vm_frame_method_entry(cfp)) && me->def->original_id) {
str = rb_sprintf("`%"PRIsVALUE"#%"PRIsVALUE"' (cfunc)",
rb_class_path(cfp->me->klass),
rb_id2str(cfp->me->def->original_id));
rb_class_path(me->klass),
rb_id2str(me->def->original_id));
}
return str;
@ -1795,8 +1802,9 @@ rb_vm_call_cfunc(VALUE recv, VALUE (*func)(VALUE), VALUE arg,
VALUE val;
vm_push_frame(th, DATA_PTR(iseqval), VM_FRAME_MAGIC_TOP | VM_FRAME_FLAG_FINISH,
recv, CLASS_OF(recv), VM_ENVVAL_BLOCK_PTR(blockptr), vm_cref_new_toplevel(th),
0, reg_cfp->sp, 1, 0, 0);
recv, CLASS_OF(recv), VM_ENVVAL_BLOCK_PTR(blockptr),
(VALUE)vm_cref_new_toplevel(th), /* cref or me */
0, reg_cfp->sp, 1, 0);
val = (*func)(arg);
@ -2075,12 +2083,6 @@ rb_thread_mark(void *ptr)
if (iseq) {
rb_gc_mark(RUBY_VM_NORMAL_ISEQ_P(iseq) ? iseq->self : (VALUE)iseq);
}
if (cfp->me) {
/* bitmap marking `me' does not seem worth the trouble:
* [ruby-core:64340] [ruby-core:64341] */
((rb_method_entry_t *)cfp->me)->mark = 1;
rb_mark_method_entry(cfp->me);
}
cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
}
}
@ -2232,16 +2234,16 @@ th_init(rb_thread_t *th, VALUE self)
th->cfp = (void *)(th->stack + th->stack_size);
vm_push_frame(th, 0 /* dummy iseq */, VM_FRAME_MAGIC_DUMMY | VM_FRAME_FLAG_FINISH,
Qnil /* dummy self */, Qnil /* dummy klass */, VM_ENVVAL_BLOCK_PTR(0),
NULL /* dummy cref */,
0 /* dummy pc */, th->stack, 1, 0, 0);
vm_push_frame(th, 0 /* dummy iseq */, VM_FRAME_MAGIC_DUMMY | VM_FRAME_FLAG_FINISH /* dummy frame */,
Qnil /* dummy self */, Qnil /* dummy klass */, VM_ENVVAL_BLOCK_PTR(0) /* dummy block ptr */,
0 /* dummy cref/me */,
0 /* dummy pc */, th->stack, 1, 0);
th->status = THREAD_RUNNABLE;
th->errinfo = Qnil;
th->last_status = Qnil;
th->waiting_fd = -1;
th->root_svar = Qnil;
th->root_svar = Qfalse;
th->local_storage_recursive_hash = Qnil;
th->local_storage_recursive_hash_for_trace = Qnil;
#ifdef NON_SCALAR_THREAD_ID
@ -2266,7 +2268,7 @@ ruby_thread_init(VALUE self)
th->top_wrapper = 0;
th->top_self = rb_vm_top_self();
th->root_svar = Qnil;
th->root_svar = Qfalse;
return self;
}
@ -2304,11 +2306,11 @@ vm_define_method(rb_thread_t *th, VALUE obj, ID id, VALUE iseqval,
/* dup */
RB_OBJ_WRITE(miseq->self, &miseq->klass, klass);
miseq->defined_method_id = id;
rb_add_method_iseq(klass, id, miseq, cref, noex);
rb_add_method_iseq(klass, id, iseqval, cref, noex);
if (!is_singleton && noex == NOEX_MODFUNC) {
klass = rb_singleton_class(klass);
rb_add_method_iseq(klass, id, miseq, cref, NOEX_PUBLIC);
rb_add_method_iseq(klass, id, iseqval, cref, NOEX_PUBLIC);
}
}

Просмотреть файл

@ -689,8 +689,9 @@ raise_argument_error(rb_thread_t *th, const rb_iseq_t *iseq, const VALUE exc)
VALUE at;
if (iseq) {
vm_push_frame(th, iseq, VM_FRAME_MAGIC_DUMMY, Qnil /* self */, Qnil /* klass */, Qnil /* specval*/, NULL /* cref */,
iseq->iseq_encoded, th->cfp->sp, 0 /* local_size */, 0 /* me */, 0 /* stack_max */);
vm_push_frame(th, iseq, VM_FRAME_MAGIC_DUMMY, Qnil /* self */, Qnil /* klass */,
VM_ENVVAL_BLOCK_PTR(0) /* specval*/, Qfalse /* me or cref */,
iseq->iseq_encoded, th->cfp->sp, 1 /* local_size (cref/me) */, 0 /* stack_max */);
at = rb_vm_backtrace_object();
vm_pop_frame(th);
}

Просмотреть файл

@ -465,7 +465,8 @@ backtrace_each(rb_thread_t *th,
}
}
else if (RUBYVM_CFUNC_FRAME_P(cfp)) {
ID mid = cfp->me->def ? cfp->me->def->original_id : cfp->me->called_id;
const rb_method_entry_t *me = rb_vm_frame_method_entry(cfp);
ID mid = me->def->original_id;
iter_cfunc(arg, cfp, mid);
}

Просмотреть файл

@ -440,8 +440,6 @@ typedef struct rb_vm_struct {
VALUE verbose, debug, orig_progname, progname;
VALUE coverages;
struct unlinked_method_entry_list_entry *unlinked_method_entry_list;
VALUE defined_module_hash;
#if defined(ENABLE_VM_OBJSPACE) && ENABLE_VM_OBJSPACE
@ -513,10 +511,9 @@ typedef struct rb_control_frame_struct {
VALUE *ep; /* cfp[6] / block[2] */
rb_iseq_t *block_iseq; /* cfp[7] / block[3] */
VALUE proc; /* cfp[8] / block[4] */
const rb_method_entry_t *me;/* cfp[9] */
#if VM_DEBUG_BP_CHECK
VALUE *bp_check; /* cfp[10] */
VALUE *bp_check; /* cfp[9] */
#endif
} rb_control_frame_t;
@ -954,7 +951,6 @@ void rb_vm_gvl_destroy(rb_vm_t *vm);
VALUE rb_vm_call(rb_thread_t *th, VALUE recv, VALUE id, int argc,
const VALUE *argv, const rb_method_entry_t *me,
VALUE defined_class);
void rb_gc_mark_unlinked_live_method_entries(void *pvm);
void rb_thread_start_timer_thread(void);
void rb_thread_stop_timer_thread(int);
@ -1001,6 +997,8 @@ int rb_autoloading_value(VALUE mod, ID id, VALUE* value);
void rb_vm_rewrite_cref_stack(rb_cref_t *node, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr);
const rb_method_entry_t *rb_vm_frame_method_entry(const rb_control_frame_t *cfp);
#define sysstack_error GET_VM()->special_exceptions[ruby_error_sysstack]
#define RUBY_CONST_ASSERT(expr) (1/!!(expr)) /* expr must be a compile-time constant */

Просмотреть файл

@ -36,6 +36,8 @@ control_frame_dump(rb_thread_t *th, rb_control_frame_t *cfp)
const char *magic, *iseq_name = "-", *selfstr = "-", *biseq_name = "-";
VALUE tmp;
const rb_method_entry_t *me;
if (cfp->block_iseq != 0 && !RUBY_VM_IFUNC_P(cfp->block_iseq)) {
biseq_name = ""; /* RSTRING(cfp->block_iseq->location.label)->ptr; */
}
@ -105,8 +107,8 @@ control_frame_dump(rb_thread_t *th, rb_control_frame_t *cfp)
}
}
}
else if (cfp->me) {
iseq_name = rb_id2name(cfp->me->def->original_id);
else if ((me = rb_vm_frame_method_entry(cfp)) != NULL) {
iseq_name = rb_id2name(me->def->original_id);
snprintf(posbuf, MAX_POSBUF, ":%s", iseq_name);
line = -1;
}

Просмотреть файл

@ -120,8 +120,8 @@ vm_call0_cfunc_with_frame(rb_thread_t* th, rb_call_info_t *ci, const VALUE *argv
rb_control_frame_t *reg_cfp = th->cfp;
vm_push_frame(th, 0, VM_FRAME_MAGIC_CFUNC, recv, defined_class,
VM_ENVVAL_BLOCK_PTR(blockptr), NULL /* cref */,
0, reg_cfp->sp, 1, me, 0);
VM_ENVVAL_BLOCK_PTR(blockptr), (VALUE)me,
0, reg_cfp->sp, 1, 0);
if (len >= 0) rb_check_arity(argc, len, len);
@ -200,8 +200,7 @@ vm_call0_body(rb_thread_t* th, rb_call_info_t *ci, const VALUE *argv)
case VM_METHOD_TYPE_REFINED:
{
const rb_method_type_t type = ci->me->def->type;
if (type == VM_METHOD_TYPE_REFINED &&
ci->me->def->body.orig_me) {
if (type == VM_METHOD_TYPE_REFINED && ci->me->def->body.orig_me) {
ci->me = ci->me->def->body.orig_me;
goto again;
}
@ -283,7 +282,7 @@ vm_call_super(rb_thread_t *th, int argc, const VALUE *argv)
klass = RCLASS_ORIGIN(cfp->klass);
klass = RCLASS_SUPER(klass);
id = cfp->me->def->original_id;
id = rb_vm_frame_method_entry(cfp)->def->original_id;
me = rb_method_entry(klass, id, &klass);
if (!me) {
return method_missing(recv, id, argc, argv, NOEX_SUPER);
@ -392,7 +391,7 @@ check_funcall_respond_to(rb_thread_t *th, VALUE klass, VALUE recv, ID mid)
VALUE defined_class;
const rb_method_entry_t *me = rb_method_entry(klass, idRespond_to, &defined_class);
if (me && !(me->flag & NOEX_BASIC)) {
if (me && !(me->def->flag & NOEX_BASIC)) {
const rb_block_t *passed_block = th->passed_block;
VALUE args[2], result;
int arity = rb_method_entry_arity(me);
@ -575,7 +574,7 @@ rb_method_call_status(rb_thread_t *th, const rb_method_entry_t *me, call_type sc
}
klass = me->klass;
oid = me->def->original_id;
noex = me->flag;
noex = me->def->flag;
if (oid != idMethodMissing) {
/* receiver specified form for private method */

Просмотреть файл

@ -16,6 +16,14 @@
#include "probes.h"
#include "probes_helper.h"
/*
* Enable check mode.
* 1: enable local assertions.
*/
#ifndef VM_CHECK_MODE
#define VM_CHECK_MODE 0
#endif
/* control stack frame */
#ifndef INLINE
@ -39,6 +47,40 @@ vm_stackoverflow(void)
rb_exc_raise(ruby_vm_sysstack_error_copy());
}
#if VM_CHECK_MODE > 0
static void
check_frame(int magic, int req_block, int req_me, int req_cref, VALUE specval, VALUE cref_or_me)
{
if (req_block && !VM_ENVVAL_BLOCK_PTR_P(specval)) {
rb_bug("vm_push_frame: specval (%p) should be a block_ptr on %x frame", (void *)specval, magic);
}
if (!req_block && VM_ENVVAL_BLOCK_PTR_P(specval)) {
rb_bug("vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (void *)specval, magic);
}
if (req_me) {
if (!RB_TYPE_P(cref_or_me, T_IMEMO) || imemo_type(cref_or_me) != imemo_ment) {
rb_bug("vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
}
}
else {
if (req_cref && (!RB_TYPE_P(cref_or_me, T_IMEMO) || imemo_type(cref_or_me) != imemo_cref)) {
rb_bug("vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
}
else { /* cref or Qfalse */
if (cref_or_me != Qfalse && (!RB_TYPE_P(cref_or_me, T_IMEMO) || imemo_type(cref_or_me) != imemo_cref)) {
if ((magic == VM_FRAME_MAGIC_LAMBDA || magic == VM_FRAME_MAGIC_IFUNC) && (RB_TYPE_P(cref_or_me, T_IMEMO) && imemo_type(cref_or_me) == imemo_ment)) {
/* ignore */
}
else {
rb_bug("vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
}
}
}
}
}
#endif
static inline rb_control_frame_t *
vm_push_frame(rb_thread_t *th,
const rb_iseq_t *iseq,
@ -46,16 +88,37 @@ vm_push_frame(rb_thread_t *th,
VALUE self,
VALUE klass,
VALUE specval,
const rb_cref_t *cref,
VALUE cref_or_me,
const VALUE *pc,
VALUE *sp,
int local_size,
const rb_method_entry_t *me,
int stack_max)
{
rb_control_frame_t *const cfp = th->cfp - 1;
int i;
#if VM_CHECK_MODE > 0
int magic = (int)(type & VM_FRAME_MAGIC_MASK);
#define CHECK(magic, req_block, req_me, req_cref) case magic: check_frame(magic, req_block, req_me, req_cref, specval, cref_or_me); break;
switch (magic) {
/* BLK ME CREF */
CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE);
CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE);
CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE);
CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE);
CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE);
CHECK(VM_FRAME_MAGIC_PROC, FALSE, FALSE, FALSE);
CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE);
CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE);
CHECK(VM_FRAME_MAGIC_LAMBDA, FALSE, FALSE, FALSE);
CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE);
CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE);
default:
rb_bug("vm_push_frame: unknown type (%x)", magic);
}
#endif
/* check stack overflow */
CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
@ -69,7 +132,7 @@ vm_push_frame(rb_thread_t *th,
}
/* set special val */
sp[-1] = (VALUE)cref;
sp[-1] = cref_or_me; /* Qnil or T_IMEMO(cref) or T_IMEMO(ment) */
sp[ 0] = specval;
/* setup vm control frame stack */
@ -85,7 +148,7 @@ vm_push_frame(rb_thread_t *th,
cfp->self = self;
cfp->block_iseq = 0;
cfp->proc = 0;
cfp->me = me;
if (klass) {
cfp->klass = klass;
}
@ -144,16 +207,37 @@ rb_error_arity(int argc, int min, int max)
static inline struct vm_svar **
lep_svar_place(rb_thread_t *th, const VALUE *lep)
{
const VALUE *svar;
const VALUE *svar_place;
if (lep && (th == NULL || th->root_lep != lep)) {
svar = &lep[-1];
svar_place = &lep[-1];
}
else {
svar = &th->root_svar;
svar_place = &th->root_svar;
}
return (struct vm_svar **)svar;
#if VM_CHECK_MODE > 0
{
VALUE svar = *svar_place;
if (svar != Qfalse) {
if (RB_TYPE_P((VALUE)svar, T_IMEMO)) {
switch (imemo_type(svar)) {
case imemo_svar:
case imemo_cref:
case imemo_ment:
goto okay;
default:
break; /* fall through */
}
}
rb_bug("lep_svar_place: unknown type: %s", rb_obj_info(svar));
}
okay:;
}
#endif
return (struct vm_svar **)svar_place;
}
static VALUE
@ -162,8 +246,7 @@ lep_svar_get(rb_thread_t *th, const VALUE *lep, rb_num_t key)
struct vm_svar ** const svar_place = lep_svar_place(th, lep);
const struct vm_svar *const svar = *svar_place;
if (NIL_P((VALUE)svar)) return Qnil;
if (RB_TYPE_P((VALUE)svar, T_IMEMO) && imemo_type((VALUE)svar) == imemo_cref) return Qnil;
if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) return Qnil;
switch (key) {
case VM_SVAR_LASTLINE:
@ -184,9 +267,9 @@ lep_svar_get(rb_thread_t *th, const VALUE *lep, rb_num_t key)
}
static struct vm_svar *
svar_new(const rb_cref_t *cref)
svar_new(VALUE obj)
{
return (struct vm_svar *)rb_imemo_new(imemo_svar, Qnil, Qnil, Qnil, (VALUE)cref);
return (struct vm_svar *)rb_imemo_new(imemo_svar, Qnil, Qnil, Qnil, obj);
}
static void
@ -195,12 +278,8 @@ lep_svar_set(rb_thread_t *th, VALUE *lep, rb_num_t key, VALUE val)
struct vm_svar **svar_place = lep_svar_place(th, lep);
struct vm_svar *svar = *svar_place;
if (NIL_P((VALUE)svar)) {
svar = *svar_place = svar_new(NULL);
}
else if (RB_TYPE_P((VALUE)svar, T_IMEMO) && imemo_type((VALUE)svar) == imemo_cref) {
const rb_cref_t *cref = (rb_cref_t *)svar;
svar = *svar_place = svar_new(cref);
if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) {
svar = *svar_place = svar_new((VALUE)svar);
}
switch (key) {
@ -257,38 +336,126 @@ vm_getspecial(rb_thread_t *th, VALUE *lep, rb_num_t key, rb_num_t type)
return val;
}
static rb_cref_t *
lep_cref(const VALUE *ep)
static rb_method_entry_t *
check_method_entry(VALUE obj, int can_be_svar)
{
const VALUE svar = ep[-1];
if (obj == Qfalse) return NULL;
if (!svar) {
#if VM_CHECK_MODE > 0
if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_method_entry: unknown type: %s", rb_obj_info(obj));
#endif
switch (imemo_type(obj)) {
case imemo_ment:
return (rb_method_entry_t *)obj;
case imemo_cref:
return NULL;
case imemo_svar:
if (can_be_svar) {
return check_method_entry(((struct vm_svar *)obj)->cref_or_me, FALSE);
}
default:
#if VM_CHECK_MODE > 0
rb_bug("check_method_entry: svar should not be there:");
#endif
return NULL;
}
else if (RB_TYPE_P((VALUE)svar, T_IMEMO) && imemo_type(svar) == imemo_cref) {
return (rb_cref_t *)svar;
}
const rb_method_entry_t *
rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
{
VALUE *ep = cfp->ep;
rb_method_entry_t *me;
while (!VM_EP_LEP_P(ep)) {
if ((me = check_method_entry(ep[-1], FALSE)) != NULL) return me;
ep = VM_EP_PREV_EP(ep);
}
else {
return (rb_cref_t *)((struct vm_svar *)svar)->cref;
return check_method_entry(ep[-1], TRUE);
}
static rb_cref_t *
method_entry_cref(rb_method_entry_t *me)
{
switch (me->def->type) {
case VM_METHOD_TYPE_ISEQ:
return me->def->body.iseq.cref;
default:
return NULL;
}
}
static rb_cref_t *
vm_get_cref0(const VALUE *ep)
check_cref(VALUE obj, int can_be_svar)
{
while (!VM_EP_LEP_P(ep)) {
if (ep[-1]) {
return (rb_cref_t *)ep[-1];
if (obj == Qfalse) return NULL;
#if VM_CHECK_MODE > 0
if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_cref: unknown type: %s", rb_obj_info(obj));
#endif
switch (imemo_type(obj)) {
case imemo_ment:
return method_entry_cref((rb_method_entry_t *)obj);
case imemo_cref:
return (rb_cref_t *)obj;
case imemo_svar:
if (can_be_svar) {
return check_cref(((struct vm_svar *)obj)->cref_or_me, FALSE);
}
ep = VM_EP_PREV_EP(ep);
default:
#if VM_CHECK_MODE > 0
rb_bug("check_method_entry: svar should not be there:");
#endif
return NULL;
}
return lep_cref(ep);
}
rb_cref_t *
static rb_cref_t *
vm_env_cref(const VALUE *ep)
{
rb_cref_t *cref;
while (!VM_EP_LEP_P(ep)) {
if ((cref = check_cref(ep[-1], FALSE)) != NULL) return cref;
ep = VM_EP_PREV_EP(ep);
}
return check_cref(ep[-1], TRUE);
}
static int
is_cref(const VALUE v, int can_be_svar)
{
if (RB_TYPE_P(v, T_IMEMO)) {
switch (imemo_type(v)) {
case imemo_cref:
return TRUE;
case imemo_svar:
if (can_be_svar) return is_cref(((struct vm_svar *)v)->cref_or_me, FALSE);
default:
break;
}
}
return FALSE;
}
static int
vm_env_cref_by_cref(const VALUE *ep)
{
while (!VM_EP_LEP_P(ep)) {
if (is_cref(ep[-1], FALSE)) return TRUE;
ep = VM_EP_PREV_EP(ep);
}
return is_cref(ep[-1], TRUE);
}
static rb_cref_t *
rb_vm_get_cref(const VALUE *ep)
{
rb_cref_t *cref = vm_get_cref0(ep);
rb_cref_t *cref = vm_env_cref(ep);
if (cref == 0) {
rb_bug("rb_vm_get_cref: unreachable");
@ -326,13 +493,13 @@ vm_cref_push(rb_thread_t *th, VALUE klass, int noex, rb_block_t *blockptr)
rb_cref_t *cref = NULL;
if (blockptr) {
prev_cref = vm_get_cref0(blockptr->ep);
prev_cref = vm_env_cref(blockptr->ep);
}
else {
rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(th, th->cfp);
if (cfp) {
prev_cref = vm_get_cref0(cfp->ep);
prev_cref = vm_env_cref(cfp->ep);
}
}
cref = vm_cref_new(klass, noex, prev_cref);
@ -1148,17 +1315,29 @@ vm_callee_setup_arg(rb_thread_t *th, rb_call_info_t *ci, const rb_iseq_t *iseq,
CI_SET_FASTPATH(ci,
(UNLIKELY(ci->flag & VM_CALL_TAILCALL) ? vm_call_iseq_setup_tailcall : vm_call_iseq_setup_normal),
(!IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) && !(ci->me->flag & NOEX_PROTECTED)));
(!IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) && !(ci->me->def->flag & NOEX_PROTECTED)));
}
else {
ci->aux.opt_pc = setup_parameters_complex(th, iseq, ci, argv, arg_setup_method);
}
}
static rb_iseq_t *
def_iseq_ptr(rb_method_definition_t *def)
{
#if VM_CHECK_MODE > 0
rb_iseq_t *iseq;
if (def->type != VM_METHOD_TYPE_ISEQ) rb_bug("def_iseq_ptr: not iseq (%d)", def->type);
GetISeqPtr(def->body.iseq.iseqval, iseq);
if (def->body.iseq.iseqptr != iseq) rb_bug("def_iseq_ptr: ???.");
#endif
return def->body.iseq.iseqptr;
}
static VALUE
vm_call_iseq_setup(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_info_t *ci)
{
vm_callee_setup_arg(th, ci, ci->me->def->body.iseq_body.iseq, cfp->sp - ci->argc);
vm_callee_setup_arg(th, ci, def_iseq_ptr(ci->me->def), cfp->sp - ci->argc);
return vm_call_iseq_setup_2(th, cfp, ci);
}
@ -1178,7 +1357,8 @@ vm_call_iseq_setup_normal(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_info
{
int i, local_size;
VALUE *argv = cfp->sp - ci->argc;
rb_iseq_t *iseq = ci->me->def->body.iseq_body.iseq;
const rb_method_entry_t *me = ci->me;
rb_iseq_t *iseq = def_iseq_ptr(me->def);
VALUE *sp = argv + iseq->param.size;
/* clear local variables (arg_size...local_size) */
@ -1187,8 +1367,8 @@ vm_call_iseq_setup_normal(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_info
}
vm_push_frame(th, iseq, VM_FRAME_MAGIC_METHOD, ci->recv, ci->defined_class,
VM_ENVVAL_BLOCK_PTR(ci->blockptr), ci->me->def->body.iseq_body.cref,
iseq->iseq_encoded + ci->aux.opt_pc, sp, 0, ci->me, iseq->stack_max);
VM_ENVVAL_BLOCK_PTR(ci->blockptr), (VALUE)me,
iseq->iseq_encoded + ci->aux.opt_pc, sp, 0, iseq->stack_max);
cfp->sp = argv - 1 /* recv */;
return Qundef;
@ -1199,7 +1379,8 @@ vm_call_iseq_setup_tailcall(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_in
{
int i;
VALUE *argv = cfp->sp - ci->argc;
rb_iseq_t *iseq = ci->me->def->body.iseq_body.iseq;
const rb_method_entry_t *me = ci->me;
rb_iseq_t *iseq = def_iseq_ptr(me->def);
VALUE *src_argv = argv;
VALUE *sp_orig, *sp;
VALUE finish_flag = VM_FRAME_TYPE_FINISH_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
@ -1226,8 +1407,8 @@ vm_call_iseq_setup_tailcall(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_in
vm_push_frame(th, iseq, VM_FRAME_MAGIC_METHOD | finish_flag,
ci->recv, ci->defined_class,
VM_ENVVAL_BLOCK_PTR(ci->blockptr), ci->me->def->body.iseq_body.cref,
iseq->iseq_encoded + ci->aux.opt_pc, sp, 0, ci->me, iseq->stack_max);
VM_ENVVAL_BLOCK_PTR(ci->blockptr), (VALUE)me,
iseq->iseq_encoded + ci->aux.opt_pc, sp, 0, iseq->stack_max);
cfp->sp = sp_orig;
return Qundef;
@ -1408,9 +1589,9 @@ vm_call_cfunc_with_frame(rb_thread_t *th, rb_control_frame_t *reg_cfp, rb_call_i
RUBY_DTRACE_CMETHOD_ENTRY_HOOK(th, me->klass, me->called_id);
EXEC_EVENT_HOOK(th, RUBY_EVENT_C_CALL, recv, me->called_id, me->klass, Qundef);
vm_push_frame(th, 0, VM_FRAME_MAGIC_CFUNC, recv, defined_class,
VM_ENVVAL_BLOCK_PTR(blockptr), NULL /* cref */,
0, th->cfp->sp, 1, me, 0);
vm_push_frame(th, NULL, VM_FRAME_MAGIC_CFUNC, recv, defined_class,
VM_ENVVAL_BLOCK_PTR(blockptr), (VALUE)me,
0, th->cfp->sp, 1, 0);
if (len >= 0) rb_check_arity(argc, len, len);
@ -1478,7 +1659,7 @@ vm_call_cfunc(rb_thread_t *th, rb_control_frame_t *reg_cfp, rb_call_info_t *ci)
RUBY_DTRACE_CMETHOD_ENTRY_HOOK(th, me->klass, me->called_id);
EXEC_EVENT_HOOK(th, RUBY_EVENT_C_CALL, recv, me->called_id, me->klass, Qnil);
if (!(ci->me->flag & NOEX_PROTECTED) &&
if (!(ci->me->def->flag & NOEX_PROTECTED) &&
!(ci->flag & VM_CALL_ARGS_SPLAT) &&
!(ci->kw_arg != NULL)) {
CI_SET_FASTPATH(ci, vm_call_cfunc_latter, 1);
@ -1499,8 +1680,8 @@ rb_vm_call_cfunc_push_frame(rb_thread_t *th)
th->passed_ci = 0;
vm_push_frame(th, 0, VM_FRAME_MAGIC_CFUNC, ci->recv, ci->defined_class,
VM_ENVVAL_BLOCK_PTR(ci->blockptr), NULL /* cref */,
0, th->cfp->sp + ci->aux.inc_sp, 1, me, 0);
VM_ENVVAL_BLOCK_PTR(ci->blockptr), (VALUE)me /* cref */,
0, th->cfp->sp + ci->aux.inc_sp, 1, 0);
if (ci->call != vm_call_general) {
ci->call = vm_call_cfunc_with_frame;
@ -1734,7 +1915,7 @@ vm_call_method(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_info_t *ci)
start_method_dispatch:
if (ci->me != 0) {
if ((ci->me->flag == 0)) {
if ((ci->me->def->flag == 0)) {
VALUE klass;
normal_method_dispatch:
@ -1826,9 +2007,10 @@ vm_call_method(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_info_t *ci)
me = rb_method_entry(refinement, ci->mid, &defined_class);
if (me) {
if (ci->call == vm_call_super_method) {
rb_control_frame_t *top_cfp = current_method_entry(th, cfp);
if (top_cfp->me &&
rb_method_definition_eq(me->def, top_cfp->me->def)) {
const rb_control_frame_t *top_cfp = current_method_entry(th, cfp);
const rb_method_entry_t *top_me = rb_vm_frame_method_entry(top_cfp);
if (top_me && rb_method_definition_eq(me->def, top_me->def)) {
goto no_refinement_dispatch;
}
}
@ -1861,7 +2043,7 @@ vm_call_method(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_info_t *ci)
}
else {
int noex_safe;
if (!(ci->flag & VM_CALL_FCALL) && (ci->me->flag & NOEX_MASK) & NOEX_PRIVATE) {
if (!(ci->flag & VM_CALL_FCALL) && (ci->me->def->flag & NOEX_MASK) & NOEX_PRIVATE) {
int stat = NOEX_PRIVATE;
if (ci->flag & VM_CALL_VCALL) {
@ -1871,7 +2053,7 @@ vm_call_method(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_info_t *ci)
CI_SET_FASTPATH(ci, vm_call_method_missing, 1);
return vm_call_method_missing(th, cfp, ci);
}
else if (!(ci->flag & VM_CALL_OPT_SEND) && (ci->me->flag & NOEX_MASK) & NOEX_PROTECTED) {
else if (!(ci->flag & VM_CALL_OPT_SEND) && (ci->me->def->flag & NOEX_MASK) & NOEX_PROTECTED) {
enable_fastpath = 0;
if (!rb_obj_is_kind_of(cfp->self, ci->defined_class)) {
ci->aux.missing_reason = NOEX_PROTECTED;
@ -1881,7 +2063,7 @@ vm_call_method(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_info_t *ci)
goto normal_method_dispatch;
}
}
else if ((noex_safe = NOEX_SAFE(ci->me->flag)) > th->safe_level && (noex_safe > 2)) {
else if ((noex_safe = NOEX_SAFE(ci->me->def->flag)) > th->safe_level && (noex_safe > 2)) {
rb_raise(rb_eSecurityError, "calling insecure method: %"PRIsVALUE, rb_id2str(ci->mid));
}
else {
@ -1941,6 +2123,8 @@ vm_super_outside(void)
static int
vm_search_superclass(rb_control_frame_t *reg_cfp, rb_iseq_t *iseq, VALUE sigval, rb_call_info_t *ci)
{
const rb_method_entry_t *me;
while (iseq && !iseq->klass) {
iseq = iseq->parent_iseq;
}
@ -1974,12 +2158,15 @@ vm_search_superclass(rb_control_frame_t *reg_cfp, rb_iseq_t *iseq, VALUE sigval,
}
}
me = rb_vm_frame_method_entry(lcfp);
/* temporary measure for [Bug #2420] [Bug #3136] */
if (!lcfp->me) {
if (!me) {
fprintf(stderr, "kore?\n");
return -1;
}
ci->mid = lcfp->me->def->original_id;
ci->mid = me->def->original_id;
ci->klass = vm_search_normal_superclass(lcfp->klass);
}
else {
@ -2042,7 +2229,7 @@ vm_search_super_method(rb_thread_t *th, rb_control_frame_t *reg_cfp, rb_call_inf
iseq = iseq->parent_iseq;
}
if (ci->me && ci->me->def->type == VM_METHOD_TYPE_ISEQ && ci->me->def->body.iseq_body.iseq == iseq) {
if (ci->me && ci->me->def->type == VM_METHOD_TYPE_ISEQ && def_iseq_ptr(ci->me->def) == iseq) {
ci->klass = RCLASS_SUPER(ci->defined_class);
ci->me = rb_method_entry(ci->klass, ci->mid, &ci->defined_class);
}
@ -2073,6 +2260,8 @@ vm_yield_with_cfunc(rb_thread_t *th, const rb_block_t *block,
const struct vm_ifunc *ifunc = (struct vm_ifunc *)block->iseq;
VALUE val, arg, blockarg;
int lambda = block_proc_is_lambda(block->proc);
const rb_method_entry_t *me = th->passed_bmethod_me;
th->passed_bmethod_me = NULL;
if (lambda) {
arg = rb_ary_new4(argc, argv);
@ -2098,8 +2287,8 @@ vm_yield_with_cfunc(rb_thread_t *th, const rb_block_t *block,
vm_push_frame(th, (rb_iseq_t *)ifunc, VM_FRAME_MAGIC_IFUNC,
self, defined_class,
VM_ENVVAL_PREV_EP_PTR(block->ep), NULL /* cref */,
0, th->cfp->sp, 1, th->passed_bmethod_me, 0);
VM_ENVVAL_PREV_EP_PTR(block->ep), (VALUE)me,
0, th->cfp->sp, 1, 0);
val = (*ifunc->func) (arg, ifunc->data, argc, argv, blockarg);
@ -2154,10 +2343,10 @@ vm_invoke_block(rb_thread_t *th, rb_control_frame_t *reg_cfp, rb_call_info_t *ci
is_lambda ? VM_FRAME_MAGIC_LAMBDA : VM_FRAME_MAGIC_BLOCK,
block->self,
block->klass,
VM_ENVVAL_PREV_EP_PTR(block->ep), NULL /* cref */,
VM_ENVVAL_PREV_EP_PTR(block->ep), 0,
iseq->iseq_encoded + opt_pc,
rsp + arg_size,
iseq->local_size - arg_size, 0, iseq->stack_max);
iseq->local_size - arg_size, iseq->stack_max);
return Qundef;
}

Просмотреть файл

@ -2,6 +2,8 @@
* This file is included by vm.c
*/
#define METHOD_DEBUG 0
#if OPT_GLOBAL_METHOD_CACHE
#ifndef GLOBAL_METHOD_CACHE_SIZE
#define GLOBAL_METHOD_CACHE_SIZE 0x800
@ -110,7 +112,7 @@ rb_f_notimplement(int argc, const VALUE *argv, VALUE obj)
static void
rb_define_notimplement_method_id(VALUE mod, ID id, rb_method_flag_t noex)
{
rb_add_method(mod, id, VM_METHOD_TYPE_NOTIMPLEMENTED, 0, noex);
rb_add_method(mod, id, VM_METHOD_TYPE_NOTIMPLEMENTED, (void *)1, noex);
}
void
@ -129,80 +131,33 @@ rb_add_method_cfunc(VALUE klass, ID mid, VALUE (*func)(ANYARGS), int argc, rb_me
}
static void
rb_unlink_method_entry(rb_method_entry_t *me)
rb_method_definition_release(rb_method_definition_t *def)
{
struct unlinked_method_entry_list_entry *ume = ALLOC(struct unlinked_method_entry_list_entry);
ume->me = me;
ume->next = GET_VM()->unlinked_method_entry_list;
GET_VM()->unlinked_method_entry_list = ume;
}
void
rb_gc_mark_unlinked_live_method_entries(void *pvm)
{
rb_vm_t *vm = pvm;
struct unlinked_method_entry_list_entry *ume = vm->unlinked_method_entry_list;
while (ume) {
if (ume->me->mark) {
rb_mark_method_entry(ume->me);
}
ume = ume->next;
}
}
void
rb_sweep_method_entry(void *pvm)
{
rb_vm_t *vm = pvm;
struct unlinked_method_entry_list_entry **prev_ume = &vm->unlinked_method_entry_list, *ume = *prev_ume, *curr_ume;
while (ume) {
if (ume->me->mark) {
ume->me->mark = 0;
prev_ume = &ume->next;
ume = *prev_ume;
if (def != NULL) {
if (def->alias_count_ptr == NULL) {
if (METHOD_DEBUG) fprintf(stderr, " %p-%s:NULL\n", def, rb_id2name(def->original_id));
}
else {
rb_free_method_entry(ume->me);
int *iptr = def->alias_count_ptr;
curr_ume = ume;
ume = ume->next;
*prev_ume = ume;
xfree(curr_ume);
}
}
}
static void
release_method_definition(rb_method_definition_t *def)
{
if (def == 0) return;
if (def->alias_count == 0) {
switch (def->type) {
case VM_METHOD_TYPE_REFINED:
if (def->body.orig_me) rb_free_method_entry(def->body.orig_me);
break;
case VM_METHOD_TYPE_ALIAS:
if (!def->body.alias.original_me) rb_free_method_entry(def->body.alias.original_me);
break;
default:
break;
if (*iptr == 0) {
if (METHOD_DEBUG) fprintf(stderr, "-%p-%s:%d\n", def, rb_id2name(def->original_id), *iptr);
xfree(iptr);
}
else {
if (METHOD_DEBUG) fprintf(stderr, "-%p-%s:%d->%d\n", def, rb_id2name(def->original_id), *iptr, *iptr-1);
*iptr -= 1;
}
}
xfree(def);
}
else if (def->alias_count > 0) {
def->alias_count--;
}
}
void
rb_free_method_entry(const rb_method_entry_t *me)
{
release_method_definition(me->def);
xfree((void *)me);
rb_method_definition_release(me->def);
}
static inline rb_method_entry_t *search_method(VALUE klass, ID id, VALUE *defined_class_ptr);
@ -221,203 +176,6 @@ lookup_method_table(VALUE klass, ID id)
}
}
static void
make_method_entry_refined(rb_method_entry_t *me)
{
rb_method_definition_t *new_def;
if (me->def && me->def->type == VM_METHOD_TYPE_REFINED)
return;
new_def = ALLOC(rb_method_definition_t);
new_def->type = VM_METHOD_TYPE_REFINED;
new_def->original_id = me->called_id;
new_def->alias_count = 0;
new_def->body.orig_me = ALLOC(rb_method_entry_t);
*new_def->body.orig_me = *me;
rb_vm_check_redefinition_opt_method(me, me->klass);
if (me->def) me->def->alias_count++;
me->flag = NOEX_WITH_SAFE(NOEX_PUBLIC);
me->def = new_def;
}
void
rb_add_refined_method_entry(VALUE refined_class, ID mid)
{
rb_method_entry_t *me = lookup_method_table(refined_class, mid);
if (me) {
make_method_entry_refined(me);
rb_clear_method_cache_by_class(refined_class);
}
else {
rb_add_method(refined_class, mid, VM_METHOD_TYPE_REFINED, 0, NOEX_PUBLIC);
}
}
static rb_method_entry_t *
rb_method_entry_make(VALUE klass, ID mid, rb_method_type_t type,
rb_method_definition_t *def, rb_method_flag_t noex,
VALUE defined_class)
{
rb_method_entry_t *me;
#if NOEX_NOREDEF
VALUE rklass;
#endif
st_table *mtbl;
st_data_t data;
int make_refined = 0;
if (NIL_P(klass)) {
klass = rb_cObject;
}
if (!FL_TEST(klass, FL_SINGLETON) &&
type != VM_METHOD_TYPE_NOTIMPLEMENTED &&
type != VM_METHOD_TYPE_ZSUPER) {
switch (mid) {
case idInitialize:
case idInitialize_copy:
case idInitialize_clone:
case idInitialize_dup:
case idRespond_to_missing:
noex |= NOEX_PRIVATE;
}
}
rb_frozen_class_p(klass);
#if NOEX_NOREDEF
rklass = klass;
#endif
if (FL_TEST(klass, RMODULE_IS_REFINEMENT)) {
VALUE refined_class =
rb_refinement_module_get_refined_class(klass);
rb_add_refined_method_entry(refined_class, mid);
}
if (type == VM_METHOD_TYPE_REFINED) {
rb_method_entry_t *old_me =
lookup_method_table(RCLASS_ORIGIN(klass), mid);
if (old_me) rb_vm_check_redefinition_opt_method(old_me, klass);
}
else {
klass = RCLASS_ORIGIN(klass);
}
mtbl = RCLASS_M_TBL(klass);
/* check re-definition */
if (st_lookup(mtbl, mid, &data)) {
rb_method_entry_t *old_me = (rb_method_entry_t *)data;
rb_method_definition_t *old_def = old_me->def;
if (rb_method_definition_eq(old_def, def)) return old_me;
#if NOEX_NOREDEF
if (old_me->flag & NOEX_NOREDEF) {
rb_raise(rb_eTypeError, "cannot redefine %"PRIsVALUE"#%"PRIsVALUE,
rb_class_name(rklass), rb_id2str(mid));
}
#endif
rb_vm_check_redefinition_opt_method(old_me, klass);
if (old_def->type == VM_METHOD_TYPE_REFINED)
make_refined = 1;
if (RTEST(ruby_verbose) &&
type != VM_METHOD_TYPE_UNDEF &&
old_def->alias_count == 0 &&
old_def->type != VM_METHOD_TYPE_UNDEF &&
old_def->type != VM_METHOD_TYPE_ZSUPER &&
old_def->type != VM_METHOD_TYPE_ALIAS) {
const rb_iseq_t *iseq = 0;
rb_warning("method redefined; discarding old %"PRIsVALUE, rb_id2str(mid));
switch (old_def->type) {
case VM_METHOD_TYPE_ISEQ:
iseq = old_def->body.iseq_body.iseq;
break;
case VM_METHOD_TYPE_BMETHOD:
iseq = rb_proc_get_iseq(old_def->body.proc, 0);
break;
default:
break;
}
if (iseq && !NIL_P(iseq->location.path)) {
int line = iseq->line_info_table ? FIX2INT(rb_iseq_first_lineno(iseq->self)) : 0;
rb_compile_warning(RSTRING_PTR(iseq->location.path), line,
"previous definition of %"PRIsVALUE" was here",
rb_id2str(old_def->original_id));
}
}
rb_unlink_method_entry(old_me);
}
me = ALLOC(rb_method_entry_t);
rb_clear_method_cache_by_class(klass);
me->flag = NOEX_WITH_SAFE(noex);
me->mark = 0;
me->called_id = mid;
RB_OBJ_WRITE(klass, &me->klass, defined_class);
me->def = def;
if (def) {
def->alias_count++;
switch(def->type) {
case VM_METHOD_TYPE_ISEQ:
RB_OBJ_WRITTEN(klass, Qundef, def->body.iseq_body.iseq->self);
RB_OBJ_WRITTEN(klass, Qundef, def->body.iseq_body.cref);
break;
case VM_METHOD_TYPE_IVAR:
RB_OBJ_WRITTEN(klass, Qundef, def->body.attr.location);
break;
case VM_METHOD_TYPE_BMETHOD:
RB_OBJ_WRITTEN(klass, Qundef, def->body.proc);
break;
default:;
/* ignore */
}
}
/* check mid */
if (klass == rb_cObject && mid == idInitialize) {
rb_warn("redefining Object#initialize may cause infinite loop");
}
/* check mid */
if (mid == object_id || mid == id__send__) {
if (type == VM_METHOD_TYPE_ISEQ && search_method(klass, mid, 0)) {
rb_warn("redefining `%s' may cause serious problems", rb_id2name(mid));
}
}
if (make_refined) {
make_method_entry_refined(me);
}
st_insert(mtbl, mid, (st_data_t) me);
return me;
}
#define CALL_METHOD_HOOK(klass, hook, mid) do { \
const VALUE arg = ID2SYM(mid); \
VALUE recv_class = (klass); \
ID hook_id = (hook); \
if (FL_TEST((klass), FL_SINGLETON)) { \
recv_class = rb_ivar_get((klass), attached); \
hook_id = singleton_##hook; \
} \
rb_funcall2(recv_class, hook_id, 1, &arg); \
} while (0)
static void
method_added(VALUE klass, ID mid)
{
if (ruby_running) {
CALL_METHOD_HOOK(klass, added, mid);
}
}
static VALUE
(*call_cfunc_invoker_func(int argc))(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *)
{
@ -453,111 +211,364 @@ setup_method_cfunc_struct(rb_method_cfunc_t *cfunc, VALUE (*func)(), int argc)
cfunc->invoker = call_cfunc_invoker_func(argc);
}
rb_method_entry_t *
rb_add_method0(VALUE klass, ID mid, rb_method_type_t type, void *opts, rb_method_flag_t noex, rb_cref_t *cref)
static void
def_obj_write(VALUE *ptr, VALUE val)
{
rb_thread_t *th;
rb_control_frame_t *cfp;
int line;
rb_method_entry_t *me = rb_method_entry_make(klass, mid, type, 0, noex, klass);
rb_method_definition_t *def = ALLOC(rb_method_definition_t);
*ptr = val;
}
if (me->def && me->def->type == VM_METHOD_TYPE_REFINED) {
me->def->body.orig_me->def = def;
}
else {
me->def = def;
}
static void
rb_method_definition_set(rb_method_definition_t *def, void *opts)
{
#define DEF_OBJ_WRITE(ptr, val) def_obj_write((VALUE *)(ptr), (VALUE)(val))
switch (def->type) {
case VM_METHOD_TYPE_ISEQ:
{
rb_method_iseq_t *iseq_body = (rb_method_iseq_t *)opts;
rb_cref_t *method_cref, *cref = iseq_body->cref;
rb_iseq_t *iseq;
GetISeqPtr(iseq_body->iseqval, iseq);
if (0 && cref) vm_cref_dump("rb_add_method0", cref);
/* setup iseq first (before invoking GC) */
DEF_OBJ_WRITE(&def->body.iseq.iseqval, iseq_body->iseqval);
DEF_OBJ_WRITE(&def->body.iseq.iseqptr, iseq);
def->type = type;
def->original_id = mid;
def->alias_count = 0;
if (0) vm_cref_dump("rb_method_definition_create", cref);
switch (type) {
case VM_METHOD_TYPE_ISEQ: {
rb_iseq_t *iseq = (rb_iseq_t *)opts;
rb_cref_t *private_cref;
if (cref) {
method_cref = cref;
}
else {
method_cref = vm_cref_new_toplevel(GET_THREAD()); /* TODO: can we reuse? */
}
*(rb_iseq_t **)&def->body.iseq_body.iseq = iseq;
RB_OBJ_WRITTEN(klass, Qundef, iseq->self); /* should be set iseq before newobj */
def->body.iseq_body.cref = NULL;
private_cref = vm_cref_new_toplevel(GET_THREAD()); /* TODO: CREF should be shared with other methods */
if (cref) COPY_CREF(private_cref, cref);
CREF_VISI_SET(private_cref, NOEX_PUBLIC);
RB_OBJ_WRITE(klass, &def->body.iseq_body.cref, private_cref);
break;
}
DEF_OBJ_WRITE(&def->body.iseq.cref, method_cref);
return;
}
case VM_METHOD_TYPE_CFUNC:
{
rb_method_cfunc_t *cfunc = (rb_method_cfunc_t *)opts;
setup_method_cfunc_struct(&def->body.cfunc, cfunc->func, cfunc->argc);
return;
}
break;
case VM_METHOD_TYPE_ATTRSET:
case VM_METHOD_TYPE_IVAR:
def->body.attr.id = (ID)(VALUE)opts;
RB_OBJ_WRITE(klass, &def->body.attr.location, Qfalse);
th = GET_THREAD();
cfp = rb_vm_get_ruby_level_next_cfp(th, th->cfp);
if (cfp && (line = rb_vm_get_sourceline(cfp))) {
VALUE location = rb_ary_new3(2, cfp->iseq->location.path, INT2FIX(line));
RB_OBJ_WRITE(klass, &def->body.attr.location, rb_ary_freeze(location));
{
rb_thread_t *th = th = GET_THREAD();
rb_control_frame_t *cfp;
int line;
def->body.attr.id = (ID)(VALUE)opts;
cfp = rb_vm_get_ruby_level_next_cfp(th, th->cfp);
if (cfp && (line = rb_vm_get_sourceline(cfp))) {
VALUE location = rb_ary_new3(2, cfp->iseq->location.path, INT2FIX(line));
DEF_OBJ_WRITE(&def->body.attr.location, rb_ary_freeze(location));
}
else {
assert(def->body.attr.location == 0);
}
return;
}
break;
case VM_METHOD_TYPE_BMETHOD:
RB_OBJ_WRITE(klass, &def->body.proc, (VALUE)opts);
break;
DEF_OBJ_WRITE(&def->body.proc, (VALUE)opts);
return;
case VM_METHOD_TYPE_NOTIMPLEMENTED:
setup_method_cfunc_struct(&def->body.cfunc, rb_f_notimplement, -1);
break;
return;
case VM_METHOD_TYPE_OPTIMIZED:
def->body.optimize_type = (enum method_optimized_type)opts;
break;
return;
case VM_METHOD_TYPE_REFINED:
DEF_OBJ_WRITE(&def->body.orig_me, (rb_method_entry_t *)opts);
return;
case VM_METHOD_TYPE_ALIAS:
DEF_OBJ_WRITE(&def->body.alias.original_me, (rb_method_entry_t *)opts);
return;
case VM_METHOD_TYPE_ZSUPER:
case VM_METHOD_TYPE_UNDEF:
case VM_METHOD_TYPE_MISSING:
return;
}
#undef DEF_OBJ_WRITE
rb_bug("rb_add_method: unsupported method type (%d)\n", def->type);
}
static rb_method_definition_t *
rb_method_definition_create(rb_method_flag_t flag, rb_method_type_t type, ID mid, void *opts)
{
rb_method_definition_t *def = ZALLOC(rb_method_definition_t);
/* def->alias_count_ptr = NULL; already cleared */
def->flag = flag;
def->type = type;
def->original_id = mid;
if (opts != NULL) rb_method_definition_set(def, opts);
return def;
}
static void
rb_method_definition_reset(rb_method_entry_t *me, rb_method_definition_t *def)
{
switch(def->type) {
case VM_METHOD_TYPE_ISEQ:
RB_OBJ_WRITTEN(me, Qundef, def->body.iseq.iseqval);
RB_OBJ_WRITTEN(me, Qundef, def->body.iseq.cref);
break;
case VM_METHOD_TYPE_IVAR:
RB_OBJ_WRITTEN(me, Qundef, def->body.attr.location);
break;
case VM_METHOD_TYPE_BMETHOD:
RB_OBJ_WRITTEN(me, Qundef, def->body.proc);
break;
case VM_METHOD_TYPE_REFINED:
def->body.orig_me = (rb_method_entry_t *) opts;
RB_OBJ_WRITTEN(me, Qundef, def->body.orig_me);
break;
default:
rb_bug("rb_add_method: unsupported method type (%d)\n", type);
case VM_METHOD_TYPE_ALIAS:
RB_OBJ_WRITTEN(me, Qundef, def->body.alias.original_me);
break;
default:;
/* ignore */
}
*(rb_method_definition_t **)&me->def = def;
}
static rb_method_definition_t *
rb_method_definition_clone(rb_method_definition_t *src_def)
{
int *iptr = src_def->alias_count_ptr;
rb_method_definition_t *def = rb_method_definition_create(src_def->flag, src_def->type, src_def->original_id, NULL);
memcpy(&def->body, &src_def->body, sizeof(def->body));
def->alias_count_ptr = src_def->alias_count_ptr;
if (!src_def->alias_count_ptr) {
iptr = def->alias_count_ptr = src_def->alias_count_ptr = ALLOC(int);
*iptr = 0;
}
*iptr += 1;
if (METHOD_DEBUG) fprintf(stderr, "+%p-%s:%d\n", src_def, rb_id2name(src_def->original_id), *iptr);
return def;
}
rb_method_entry_t *
rb_method_entry_create(ID called_id, VALUE klass, rb_method_definition_t *def)
{
rb_method_entry_t *me = (rb_method_entry_t *)rb_imemo_new(imemo_ment, (VALUE)NULL, (VALUE)called_id, (VALUE)klass, 0);
rb_method_definition_reset(me, def);
assert(def != NULL);
return me;
}
rb_method_entry_t *
rb_method_entry_clone(const rb_method_entry_t *src_me)
{
rb_method_entry_t *me = rb_method_entry_create(src_me->called_id, src_me->klass, rb_method_definition_clone(src_me->def));
return me;
}
void
rb_method_entry_copy(rb_method_entry_t *dst, rb_method_entry_t *src)
{
rb_method_definition_reset(dst, rb_method_definition_clone(src->def));
dst->called_id = src->called_id;
RB_OBJ_WRITE((VALUE)dst, &dst->klass, src->klass);
}
static void
make_method_entry_refined(rb_method_entry_t *me)
{
rb_method_definition_t *new_def;
if (me->def->type == VM_METHOD_TYPE_REFINED) return;
rb_vm_check_redefinition_opt_method(me, me->klass);
new_def = rb_method_definition_create(NOEX_WITH_SAFE(NOEX_PUBLIC), VM_METHOD_TYPE_REFINED, me->called_id, rb_method_entry_clone(me));
rb_method_definition_reset(me, new_def);
}
void
rb_add_refined_method_entry(VALUE refined_class, ID mid)
{
rb_method_entry_t *me = lookup_method_table(refined_class, mid);
if (me) {
make_method_entry_refined(me);
rb_clear_method_cache_by_class(refined_class);
}
else {
rb_add_method(refined_class, mid, VM_METHOD_TYPE_REFINED, 0, NOEX_PUBLIC);
}
}
static rb_method_entry_t *
rb_method_entry_make(VALUE klass, ID mid, rb_method_type_t type, rb_method_definition_t *def, rb_method_flag_t noex, VALUE defined_class)
{
rb_method_entry_t *me;
#if NOEX_NOREDEF
VALUE rklass;
#endif
st_table *mtbl;
st_data_t data;
int make_refined = 0;
if (NIL_P(klass)) {
klass = rb_cObject;
}
if (!FL_TEST(klass, FL_SINGLETON) &&
type != VM_METHOD_TYPE_NOTIMPLEMENTED &&
type != VM_METHOD_TYPE_ZSUPER) {
switch (mid) {
case idInitialize:
case idInitialize_copy:
case idInitialize_clone:
case idInitialize_dup:
case idRespond_to_missing:
noex |= NOEX_PRIVATE;
}
}
rb_frozen_class_p(klass);
#if NOEX_NOREDEF
rklass = klass;
#endif
if (FL_TEST(klass, RMODULE_IS_REFINEMENT)) {
VALUE refined_class =
rb_refinement_module_get_refined_class(klass);
rb_add_refined_method_entry(refined_class, mid);
}
if (type == VM_METHOD_TYPE_REFINED) {
rb_method_entry_t *old_me = lookup_method_table(RCLASS_ORIGIN(klass), mid);
if (old_me) rb_vm_check_redefinition_opt_method(old_me, klass);
}
else {
klass = RCLASS_ORIGIN(klass);
}
mtbl = RCLASS_M_TBL(klass);
/* check re-definition */
if (st_lookup(mtbl, mid, &data)) {
rb_method_entry_t *old_me = (rb_method_entry_t *)data;
rb_method_definition_t *old_def = old_me->def;
if (rb_method_definition_eq(old_def, def)) return old_me;
#if NOEX_NOREDEF
if (old_me->flag & NOEX_NOREDEF) {
rb_raise(rb_eTypeError, "cannot redefine %"PRIsVALUE"#%"PRIsVALUE,
rb_class_name(rklass), rb_id2str(mid));
}
#endif
rb_vm_check_redefinition_opt_method(old_me, klass);
if (old_def->type == VM_METHOD_TYPE_REFINED)
make_refined = 1;
if (RTEST(ruby_verbose) &&
type != VM_METHOD_TYPE_UNDEF &&
(old_def->alias_count_ptr == NULL || *old_def->alias_count_ptr == 0) &&
old_def->type != VM_METHOD_TYPE_UNDEF &&
old_def->type != VM_METHOD_TYPE_ZSUPER &&
old_def->type != VM_METHOD_TYPE_ALIAS) {
const rb_iseq_t *iseq = 0;
rb_warning("method redefined; discarding old %"PRIsVALUE, rb_id2str(mid));
switch (old_def->type) {
case VM_METHOD_TYPE_ISEQ:
iseq = def_iseq_ptr(old_def);
break;
case VM_METHOD_TYPE_BMETHOD:
iseq = rb_proc_get_iseq(old_def->body.proc, 0);
break;
default:
break;
}
if (iseq && !NIL_P(iseq->location.path)) {
int line = iseq->line_info_table ? FIX2INT(rb_iseq_first_lineno(iseq->self)) : 0;
rb_compile_warning(RSTRING_PTR(iseq->location.path), line,
"previous definition of %"PRIsVALUE" was here",
rb_id2str(old_def->original_id));
}
}
}
me = rb_method_entry_create(mid, defined_class, def);
def->flag = NOEX_WITH_SAFE(noex);;
rb_clear_method_cache_by_class(klass);
/* check mid */
if (klass == rb_cObject && mid == idInitialize) {
rb_warn("redefining Object#initialize may cause infinite loop");
}
/* check mid */
if (mid == object_id || mid == id__send__) {
if (type == VM_METHOD_TYPE_ISEQ && search_method(klass, mid, 0)) {
rb_warn("redefining `%s' may cause serious problems", rb_id2name(mid));
}
}
if (make_refined) {
make_method_entry_refined(me);
}
st_insert(mtbl, mid, (st_data_t) me);
RB_OBJ_WRITTEN(klass, Qundef, (VALUE)me);
return me;
}
#define CALL_METHOD_HOOK(klass, hook, mid) do { \
const VALUE arg = ID2SYM(mid); \
VALUE recv_class = (klass); \
ID hook_id = (hook); \
if (FL_TEST((klass), FL_SINGLETON)) { \
recv_class = rb_ivar_get((klass), attached); \
hook_id = singleton_##hook; \
} \
rb_funcall2(recv_class, hook_id, 1, &arg); \
} while (0)
static void
method_added(VALUE klass, ID mid)
{
if (ruby_running) {
CALL_METHOD_HOOK(klass, added, mid);
}
}
rb_method_entry_t *
rb_add_method(VALUE klass, ID mid, rb_method_type_t type, void *opts, rb_method_flag_t noex)
{
rb_method_definition_t *def = rb_method_definition_create(noex, type, mid, opts);
rb_method_entry_t *me = rb_method_entry_make(klass, mid, type, def, noex, klass);
if (me->def->type == VM_METHOD_TYPE_REFINED && me->def->body.orig_me) { /* TODO: really needed? */
rb_method_definition_reset(me->def->body.orig_me, def);
}
if (type != VM_METHOD_TYPE_UNDEF && type != VM_METHOD_TYPE_REFINED) {
method_added(klass, mid);
}
return me;
}
rb_method_entry_t *
rb_add_method(VALUE klass, ID mid, rb_method_type_t type, void *opts, rb_method_flag_t noex)
{
return rb_add_method0(klass, mid, type, opts, noex, NULL);
}
void
rb_add_method_iseq(VALUE klass, ID mid, rb_iseq_t *iseq, rb_cref_t *cref, rb_method_flag_t noex)
rb_add_method_iseq(VALUE klass, ID mid, VALUE iseqval, rb_cref_t *cref, rb_method_flag_t noex)
{
rb_add_method0(klass, mid, VM_METHOD_TYPE_ISEQ, iseq, noex, cref);
}
static rb_method_entry_t *
method_entry_set0(VALUE klass, ID mid, rb_method_type_t type,
rb_method_definition_t *def, rb_method_flag_t noex, VALUE defined_class)
{
rb_method_entry_t *newme = rb_method_entry_make(klass, mid, type, def, noex, defined_class);
method_added(klass, mid);
return newme;
rb_method_iseq_t iseq_body = {NULL, cref, iseqval};
rb_add_method(klass, mid, VM_METHOD_TYPE_ISEQ, &iseq_body, noex);
}
static rb_method_entry_t *
method_entry_set(VALUE klass, ID mid, const rb_method_entry_t *me,
rb_method_flag_t noex, VALUE defined_class)
{
rb_method_type_t type = me->def ? me->def->type : VM_METHOD_TYPE_UNDEF;
return method_entry_set0(klass, mid, type, me->def, noex, defined_class);
rb_method_entry_t *newme = rb_method_entry_make(klass, mid, me->def->type, rb_method_definition_clone(me->def), noex, defined_class);
method_added(klass, mid);
return newme;
}
rb_method_entry_t *
@ -752,8 +763,9 @@ rb_method_entry_with_refinements(VALUE klass, ID id,
me = rb_resolve_refined_method(refinements, me, &defined_class);
}
if (defined_class_ptr)
*defined_class_ptr = defined_class;
if (defined_class_ptr) *defined_class_ptr = defined_class;
return me;
}
@ -803,7 +815,6 @@ remove_method(VALUE klass, ID mid)
rb_vm_check_redefinition_opt_method(me, klass);
rb_clear_method_cache_by_class(klass);
rb_unlink_method_entry(me);
if (me->def->type == VM_METHOD_TYPE_REFINED) {
rb_add_refined_method_entry(klass, mid);
@ -867,14 +878,14 @@ rb_export_method(VALUE klass, ID name, rb_method_flag_t noex)
rb_print_undef(klass, name, 0);
}
if (me->flag != noex) {
if (me->def->flag != noex) {
rb_vm_check_redefinition_opt_method(me, klass);
if (klass == defined_class ||
RCLASS_ORIGIN(klass) == defined_class) {
me->flag = noex;
if (me->def->type == VM_METHOD_TYPE_REFINED) {
me->def->body.orig_me->flag = noex;
if (klass == defined_class || RCLASS_ORIGIN(klass) == defined_class) {
me->def->flag = noex;
if (me->def->type == VM_METHOD_TYPE_REFINED && me->def->body.orig_me) {
me->def->body.orig_me->def->flag = noex;
}
rb_clear_method_cache_by_class(klass);
}
@ -891,13 +902,15 @@ rb_method_boundp(VALUE klass, ID id, int ex)
rb_method_entry_without_refinements(klass, id, 0);
if (me != 0) {
rb_method_definition_t *def = me->def;
if ((ex & ~NOEX_RESPONDS) &&
((me->flag & NOEX_PRIVATE) ||
((ex & NOEX_RESPONDS) && (me->flag & NOEX_PROTECTED)))) {
((def->flag & NOEX_PRIVATE) ||
((ex & NOEX_RESPONDS) && (def->flag & NOEX_PROTECTED)))) {
return 0;
}
if (!me->def) return 0;
if (me->def->type == VM_METHOD_TYPE_NOTIMPLEMENTED) {
if (def->type == VM_METHOD_TYPE_NOTIMPLEMENTED) {
if (ex & NOEX_RESPONDS) return 2;
return 0;
}
@ -911,7 +924,15 @@ extern ID rb_check_attr_id(ID id);
static int
rb_frame_visibility_test(rb_method_flag_t flag)
{
return CREF_VISI(rb_vm_cref()) & flag;
rb_thread_t *th = GET_THREAD();
rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(th, th->cfp);
if (!vm_env_cref_by_cref(cfp->ep)) {
return NOEX_PUBLIC & flag;
}
else {
return CREF_VISI(rb_vm_cref()) & flag;
}
}
static int
@ -1113,8 +1134,7 @@ check_definition(VALUE mod, VALUE mid, rb_method_flag_t noex)
if (!id) return Qfalse;
me = rb_method_entry_without_refinements(mod, id, 0);
if (me) {
if (VISI_CHECK(me->flag, noex))
return Qtrue;
if (VISI_CHECK(me->def->flag, noex)) return Qtrue;
}
return Qfalse;
}
@ -1261,7 +1281,7 @@ rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_defini
switch (d1->type) {
case VM_METHOD_TYPE_ISEQ:
return d1->body.iseq_body.iseq == d2->body.iseq_body.iseq;
return d1->body.iseq.iseqval == d2->body.iseq.iseqval;
case VM_METHOD_TYPE_CFUNC:
return
d1->body.cfunc.func == d2->body.cfunc.func &&
@ -1296,7 +1316,7 @@ rb_hash_method_definition(st_index_t hash, const rb_method_definition_t *def)
switch (def->type) {
case VM_METHOD_TYPE_ISEQ:
return rb_hash_uint(hash, (st_index_t)def->body.iseq_body.iseq);
return rb_hash_uint(hash, (st_index_t)def->body.iseq.iseqval);
case VM_METHOD_TYPE_CFUNC:
hash = rb_hash_uint(hash, (st_index_t)def->body.cfunc.func);
return rb_hash_uint(hash, def->body.cfunc.argc);
@ -1358,16 +1378,15 @@ rb_alias(VALUE klass, ID name, ID def)
if (orig_me->def->type == VM_METHOD_TYPE_ZSUPER) {
klass = RCLASS_SUPER(klass);
def = orig_me->def->original_id;
flag = orig_me->flag;
flag = orig_me->def->flag;
goto again;
}
if (flag == NOEX_UNDEF) flag = orig_me->flag;
if (flag == NOEX_UNDEF) flag = orig_me->def->flag;
if (defined_class != target_klass) { /* inter class/module alias */
VALUE real_owner;
rb_method_entry_t *new_orig_me;
rb_method_definition_t *def;
rb_method_entry_t *alias_me;
if (RB_TYPE_P(defined_class, T_ICLASS)) {
defined_class = real_owner = RBASIC_CLASS(defined_class);
@ -1376,25 +1395,15 @@ rb_alias(VALUE klass, ID name, ID def)
real_owner = defined_class;
}
/* make ne me */
new_orig_me = ALLOC(rb_method_entry_t);
*new_orig_me = *orig_me;
new_orig_me->called_id = name;
/* make alias def */
def = ALLOC(rb_method_definition_t);
def->type = VM_METHOD_TYPE_ALIAS;
def->original_id = orig_me->called_id;
def->alias_count = -1; /* will be increment at method_entry_set0() */
def->body.alias.original_me = new_orig_me;
if (new_orig_me->def) new_orig_me->def->alias_count++;
/* make copy */
method_entry_set0(target_klass, name, VM_METHOD_TYPE_ALIAS, def, flag, defined_class);
/* make mthod entry */
alias_me = rb_add_method(target_klass, name, VM_METHOD_TYPE_ALIAS, rb_method_entry_clone(orig_me), flag);
RB_OBJ_WRITE(alias_me, &alias_me->klass, defined_class);
alias_me->def->original_id = orig_me->called_id;
*(ID *)&alias_me->def->body.alias.original_me->called_id = name;
}
else {
method_entry_set(target_klass, name, orig_me, flag, defined_class);
}
method_entry_set(target_klass, name, orig_me, flag, defined_class);
}
}
/*
@ -1690,8 +1699,7 @@ int
rb_method_basic_definition_p(VALUE klass, ID id)
{
const rb_method_entry_t *me = rb_method_entry(klass, id, 0);
if (me && (me->flag & NOEX_BASIC))
return 1;
if (me && (me->def->flag & NOEX_BASIC)) return 1;
return 0;
}