/* -*- C -*- insns.def - YARV instruction definitions $Author: $ created at: 04/01/01 01:17:55 JST Copyright (C) 2004-2007 Koichi Sasada Massive rewrite by @shyouhei in 2017. */ /* Some comments about this file's contents: - The new format aims to be editable by C editor of your choice; your mileage might vary of course. - Each instructions are in following format: DEFINE_INSN instruction_name (type operand, type operand, ..) (pop_values, ..) (return values ..) // attr type name contents.. { .. // insn body } - Unlike the old format which was line-oriented, you can now place newlines and comments at liberal positions. - `DEFINE_INSN` is a keyword. - An instruction name must be a valid C identifier. - Operands, pop values, return values are series of either variable declarations, keyword `void`, or keyword `...`. They are much like C function declarations. - Attribute pragmas are optional, and can include arbitrary C expressions. You can write anything there but as of writing, supported attributes are: * sp_inc: Used to dynamically calculate sp increase in `insn_stack_increase`. * handles_sp: If it is true, VM deals with sp in the insn. Default is if the instruction takes ISEQ operand or not. * leaf: indicates that the instruction is "leaf" i.e. it does not introduce new stack frame on top of it. If an instruction handles sp, that can never be a leaf. - Attributes can access operands, but not stack (push/pop) variables. - An instruction's body is a pure C block, copied verbatimly into the generated C source code. */ /* nop */ DEFINE_INSN nop () () () { /* none */ } /**********************************************************/ /* deal with variables */ /**********************************************************/ /* Get local variable (pointed by `idx' and `level'). 'level' indicates the nesting depth from the current block. */ DEFINE_INSN getlocal (lindex_t idx, rb_num_t level) () (VALUE val) { val = *(vm_get_ep(GET_EP(), level) - idx); RB_DEBUG_COUNTER_INC(lvar_get); (void)RB_DEBUG_COUNTER_INC_IF(lvar_get_dynamic, level > 0); } /* Set a local variable (pointed to by 'idx') as val. 'level' indicates the nesting depth from the current block. */ DEFINE_INSN setlocal (lindex_t idx, rb_num_t level) (VALUE val) () { vm_env_write(vm_get_ep(GET_EP(), level), -(int)idx, val); RB_DEBUG_COUNTER_INC(lvar_set); (void)RB_DEBUG_COUNTER_INC_IF(lvar_set_dynamic, level > 0); } /* Get a block parameter. */ DEFINE_INSN getblockparam (lindex_t idx, rb_num_t level) () (VALUE val) { const VALUE *ep = vm_get_ep(GET_EP(), level); VM_ASSERT(VM_ENV_LOCAL_P(ep)); if (!VM_ENV_FLAGS(ep, VM_FRAME_FLAG_MODIFIED_BLOCK_PARAM)) { val = rb_vm_bh_to_procval(ec, VM_ENV_BLOCK_HANDLER(ep)); vm_env_write(ep, -(int)idx, val); VM_ENV_FLAGS_SET(ep, VM_FRAME_FLAG_MODIFIED_BLOCK_PARAM); } else { val = *(ep - idx); RB_DEBUG_COUNTER_INC(lvar_get); (void)RB_DEBUG_COUNTER_INC_IF(lvar_get_dynamic, level > 0); } } /* Set block parameter. */ DEFINE_INSN setblockparam (lindex_t idx, rb_num_t level) (VALUE val) () { const VALUE *ep = vm_get_ep(GET_EP(), level); VM_ASSERT(VM_ENV_LOCAL_P(ep)); vm_env_write(ep, -(int)idx, val); RB_DEBUG_COUNTER_INC(lvar_set); (void)RB_DEBUG_COUNTER_INC_IF(lvar_set_dynamic, level > 0); VM_ENV_FLAGS_SET(ep, VM_FRAME_FLAG_MODIFIED_BLOCK_PARAM); } /* Get special proxy object which only responds to `call` method if the block parameter represents a iseq/ifunc block. Otherwise, same as `getblockparam`. */ DEFINE_INSN getblockparamproxy (lindex_t idx, rb_num_t level) () (VALUE val) { const VALUE *ep = vm_get_ep(GET_EP(), level); VM_ASSERT(VM_ENV_LOCAL_P(ep)); if (!VM_ENV_FLAGS(ep, VM_FRAME_FLAG_MODIFIED_BLOCK_PARAM)) { VALUE block_handler = VM_ENV_BLOCK_HANDLER(ep); if (block_handler) { switch (vm_block_handler_type(block_handler)) { case block_handler_type_iseq: case block_handler_type_ifunc: val = rb_block_param_proxy; break; case block_handler_type_symbol: val = rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler)); goto INSN_LABEL(set); case block_handler_type_proc: val = VM_BH_TO_PROC(block_handler); goto INSN_LABEL(set); default: VM_UNREACHABLE(getblockparamproxy); } } else { val = Qnil; INSN_LABEL(set): vm_env_write(ep, -(int)idx, val); VM_ENV_FLAGS_SET(ep, VM_FRAME_FLAG_MODIFIED_BLOCK_PARAM); } } else { val = *(ep - idx); RB_DEBUG_COUNTER_INC(lvar_get); (void)RB_DEBUG_COUNTER_INC_IF(lvar_get_dynamic, level > 0); } } /* Get value of special local variable ($~, $_, ..). */ DEFINE_INSN getspecial (rb_num_t key, rb_num_t type) () (VALUE val) /* `$~ = MatchData.allocate; $&` can raise. */ // attr bool leaf = (type == 0) ? true : false; { val = vm_getspecial(ec, GET_LEP(), key, type); } /* Set value of special local variable ($~, $_, ...) to obj. */ DEFINE_INSN setspecial (rb_num_t key) (VALUE obj) () { lep_svar_set(ec, GET_LEP(), key, obj); } /* Get value of instance variable id of self. */ DEFINE_INSN getinstancevariable (ID id, IVC ic) () (VALUE val) /* Ractor crashes when it accesses class/module-level instances variables. */ // attr bool leaf = false; /* has IVAR_ACCESSOR_SHOULD_BE_MAIN_RACTOR() */ { val = vm_getinstancevariable(GET_ISEQ(), GET_SELF(), id, ic); } /* Set value of instance variable id of self to val. */ DEFINE_INSN setinstancevariable (ID id, IVC ic) (VALUE val) () // attr bool leaf = false; /* has rb_check_frozen() */ { vm_setinstancevariable(GET_ISEQ(), GET_SELF(), id, val, ic); } /* Get value of class variable id of klass as val. */ DEFINE_INSN getclassvariable (ID id, ICVARC ic) () (VALUE val) /* "class variable access from toplevel" warning can be hooked. */ // attr bool leaf = false; /* has rb_warning() */ { rb_control_frame_t *cfp = GET_CFP(); val = vm_getclassvariable(GET_ISEQ(), cfp, id, ic); } /* Set value of class variable id of klass as val. */ DEFINE_INSN setclassvariable (ID id, ICVARC ic) (VALUE val) () /* "class variable access from toplevel" warning can be hooked. */ // attr bool leaf = false; /* has rb_warning() */ { vm_ensure_not_refinement_module(GET_SELF()); vm_setclassvariable(GET_ISEQ(), GET_CFP(), id, val, ic); } DEFINE_INSN opt_getconstant_path (IC ic) () (VALUE val) // attr bool leaf = false; /* may autoload or raise */ { val = rb_vm_opt_getconstant_path(ec, GET_CFP(), ic); } /* Get constant variable id. If klass is Qnil and allow_nil is Qtrue, constants are searched in the current scope. Otherwise, get constant under klass class or module. */ DEFINE_INSN getconstant (ID id) (VALUE klass, VALUE allow_nil) (VALUE val) /* getconstant can kick autoload */ // attr bool leaf = false; /* has rb_autoload_load() */ { val = vm_get_ev_const(ec, klass, id, allow_nil == Qtrue, 0); } /* Set constant variable id under cbase class or module. */ DEFINE_INSN setconstant (ID id) (VALUE val, VALUE cbase) () /* Assigning an object to a constant is basically a leaf operation. * The problem is, assigning a Module instance to a constant _names_ * that module. Naming involves string manipulations, which are * method calls. */ // attr bool leaf = false; /* has StringValue() */ { vm_check_if_namespace(cbase); vm_ensure_not_refinement_module(GET_SELF()); rb_const_set(cbase, id, val); } /* get global variable id. */ DEFINE_INSN getglobal (ID gid) () (VALUE val) // attr bool leaf = false; { val = rb_gvar_get(gid); } /* set global variable id as val. */ DEFINE_INSN setglobal (ID gid) (VALUE val) () // attr bool leaf = false; { rb_gvar_set(gid, val); } /**********************************************************/ /* deal with values */ /**********************************************************/ /* put nil to stack. */ DEFINE_INSN putnil () () (VALUE val) { val = Qnil; } /* put self. */ DEFINE_INSN putself () () (VALUE val) { val = GET_SELF(); } /* put some object. i.e. Fixnum, true, false, nil, and so on. */ DEFINE_INSN putobject (VALUE val) () (VALUE val) { /* */ } /* put special object. "value_type" is for expansion. */ DEFINE_INSN putspecialobject (rb_num_t value_type) () (VALUE val) // attr bool leaf = (value_type == VM_SPECIAL_OBJECT_VMCORE); /* others may raise when allocating singleton */ { enum vm_special_object_type type; type = (enum vm_special_object_type)value_type; val = vm_get_special_object(GET_EP(), type); } /* put string val. string will be copied. */ DEFINE_INSN putstring (VALUE str) () (VALUE val) { val = rb_ec_str_resurrect(ec, str, false); } /* put chilled string val. string will be copied but frozen in the future. */ DEFINE_INSN putchilledstring (VALUE str) () (VALUE val) { val = rb_ec_str_resurrect(ec, str, true); } /* put concatenate strings */ DEFINE_INSN concatstrings (rb_num_t num) (...) (VALUE val) /* This instruction can concat UTF-8 and binary strings, resulting in * Encoding::CompatibilityError. */ // attr bool leaf = false; /* has rb_enc_cr_str_buf_cat() */ // attr rb_snum_t sp_inc = 1 - (rb_snum_t)num; { val = rb_str_concat_literals(num, STACK_ADDR_FROM_TOP(num)); } /* Convert the result to string if not already a string. This is used as a backup if to_s does not return a string. */ DEFINE_INSN anytostring () (VALUE val, VALUE str) (VALUE val) { val = rb_obj_as_string_result(str, val); } /* compile str to Regexp and push it. opt is the option for the Regexp. */ DEFINE_INSN toregexp (rb_num_t opt, rb_num_t cnt) (...) (VALUE val) /* This instruction can raise RegexpError, thus can call * RegexpError#initialize */ // attr bool leaf = false; // attr rb_snum_t sp_inc = 1 - (rb_snum_t)cnt; { const VALUE ary = rb_ary_tmp_new_from_values(0, cnt, STACK_ADDR_FROM_TOP(cnt)); val = rb_reg_new_ary(ary, (int)opt); rb_ary_clear(ary); } /* intern str to Symbol and push it. */ DEFINE_INSN intern () (VALUE str) (VALUE sym) { sym = rb_str_intern(str); } /* put new array initialized with num values on the stack. */ DEFINE_INSN newarray (rb_num_t num) (...) (VALUE val) // attr rb_snum_t sp_inc = 1 - (rb_snum_t)num; { val = rb_ec_ary_new_from_values(ec, num, STACK_ADDR_FROM_TOP(num)); } /* push hash onto array unless the hash is empty (as empty keyword splats should be ignored). */ DEFINE_INSN pushtoarraykwsplat () (VALUE ary, VALUE hash) (VALUE ary) { if (!RHASH_EMPTY_P(hash)) { rb_ary_push(ary, hash); } } /* dup array */ DEFINE_INSN duparray (VALUE ary) () (VALUE val) { RUBY_DTRACE_CREATE_HOOK(ARRAY, RARRAY_LEN(ary)); val = rb_ary_resurrect(ary); } /* dup hash */ DEFINE_INSN duphash (VALUE hash) () (VALUE val) { RUBY_DTRACE_CREATE_HOOK(HASH, RHASH_SIZE(hash) << 1); val = rb_hash_resurrect(hash); } /* if TOS is an array expand, expand it to num objects. if the number of the array is less than num, push nils to fill. if it is greater than num, exceeding elements are dropped. unless TOS is an array, push num - 1 nils. if flags is non-zero, push the array of the rest elements. flag: 0x01 - rest args array flag: 0x02 - for postarg flag: 0x04 - reverse? */ DEFINE_INSN expandarray (rb_num_t num, rb_num_t flag) (..., VALUE ary) (...) // attr bool handles_sp = true; // attr bool leaf = false; /* has rb_check_array_type() */ // attr rb_snum_t sp_inc = (rb_snum_t)num - 1 + (flag & 1 ? 1 : 0); { vm_expandarray(GET_CFP(), ary, num, (int)flag); } /* concat two arrays, without modifying first array. * attempts to convert both objects to arrays using to_a. */ DEFINE_INSN concatarray () (VALUE ary1, VALUE ary2) (VALUE ary) // attr bool leaf = false; /* has rb_check_array_type() */ { ary = vm_concat_array(ary1, ary2); } /* concat second array to first array. * first argument must already be an array. * attempts to convert second object to array using to_a. */ DEFINE_INSN concattoarray () (VALUE ary1, VALUE ary2) (VALUE ary) // attr bool leaf = false; /* has rb_check_array_type() */ { ary = vm_concat_to_array(ary1, ary2); } /* push given number of objects to array directly before. */ DEFINE_INSN pushtoarray (rb_num_t num) (...) (VALUE val) // attr rb_snum_t sp_inc = -(rb_snum_t)num; { const VALUE *objp = STACK_ADDR_FROM_TOP(num); val = rb_ary_cat(*(objp-1), objp, num); } /* call to_a on array ary to splat */ DEFINE_INSN splatarray (VALUE flag) (VALUE ary) (VALUE obj) // attr bool leaf = false; /* has rb_check_array_type() */ { obj = vm_splat_array(flag, ary); } /* call to_hash on hash to keyword splat before converting block */ DEFINE_INSN splatkw () (VALUE hash, VALUE block) (VALUE obj, VALUE block) // attr bool leaf = false; /* has rb_to_hash_type() */ { if (NIL_P(hash)) { obj = Qnil; } else { obj = rb_to_hash_type(hash); } } /* put new Hash from n elements. n must be an even number. */ DEFINE_INSN newhash (rb_num_t num) (...) (VALUE val) // attr bool leaf = false; /* has rb_hash_key_str() */ // attr rb_snum_t sp_inc = 1 - (rb_snum_t)num; { RUBY_DTRACE_CREATE_HOOK(HASH, num); if (num) { val = rb_hash_new_with_size(num / 2); rb_hash_bulk_insert(num, STACK_ADDR_FROM_TOP(num), val); } else { val = rb_hash_new(); } } /* put new Range object.(Range.new(low, high, flag)) */ DEFINE_INSN newrange (rb_num_t flag) (VALUE low, VALUE high) (VALUE val) /* rb_range_new() exercises "bad value for range" check. */ // attr bool leaf = false; /* see also: range.c:range_init() */ { val = rb_range_new(low, high, (int)flag); } /**********************************************************/ /* deal with stack operation */ /**********************************************************/ /* pop from stack. */ DEFINE_INSN pop () (VALUE val) () { (void)val; /* none */ } /* duplicate stack top. */ DEFINE_INSN dup () (VALUE val) (VALUE val1, VALUE val2) { val1 = val2 = val; } /* duplicate stack top n elements */ DEFINE_INSN dupn (rb_num_t n) (...) (...) // attr rb_snum_t sp_inc = n; { void *dst = GET_SP(); void *src = STACK_ADDR_FROM_TOP(n); MEMCPY(dst, src, VALUE, n); } /* swap top 2 vals */ DEFINE_INSN swap () (VALUE val, VALUE obj) (VALUE obj, VALUE val) { /* none */ } /* reverse stack top N order. */ DEFINE_INSN opt_reverse (rb_num_t n) (...) (...) // attr rb_snum_t sp_inc = 0; { rb_num_t i; VALUE *sp = STACK_ADDR_FROM_TOP(n); for (i=0; iiseq_encoded, GET_SP(), ISEQ_BODY(class_iseq)->local_table_size, ISEQ_BODY(class_iseq)->stack_max); RESTORE_REGS(); NEXT_INSN(); } DEFINE_INSN definemethod (ID id, ISEQ iseq) () () { vm_define_method(ec, Qnil, id, (VALUE)iseq, FALSE); } DEFINE_INSN definesmethod (ID id, ISEQ iseq) (VALUE obj) () { vm_define_method(ec, obj, id, (VALUE)iseq, TRUE); } /**********************************************************/ /* deal with control flow 2: method/iterator */ /**********************************************************/ /* invoke method. */ DEFINE_INSN send (CALL_DATA cd, ISEQ blockiseq) (...) (VALUE val) // attr rb_snum_t sp_inc = sp_inc_of_sendish(cd->ci); // attr rb_snum_t comptime_sp_inc = sp_inc_of_sendish(ci); { VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq, false); val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method); JIT_EXEC(ec, val); if (UNDEF_P(val)) { RESTORE_REGS(); NEXT_INSN(); } } /* invoke forward method. */ DEFINE_INSN sendforward (CALL_DATA cd, ISEQ blockiseq) (...) (VALUE val) // attr rb_snum_t sp_inc = sp_inc_of_sendish(cd->ci); // attr rb_snum_t comptime_sp_inc = sp_inc_of_sendish(ci); { struct rb_forwarding_call_data adjusted_cd; struct rb_callinfo adjusted_ci; VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq, 0, &adjusted_cd, &adjusted_ci); val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method); JIT_EXEC(ec, val); if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) { RB_OBJ_WRITE(GET_ISEQ(), &cd->cc, adjusted_cd.cd.cc); } if (UNDEF_P(val)) { RESTORE_REGS(); NEXT_INSN(); } } /* Invoke method without block */ DEFINE_INSN opt_send_without_block (CALL_DATA cd) (...) (VALUE val) // attr bool handles_sp = true; // attr rb_snum_t sp_inc = sp_inc_of_sendish(cd->ci); // attr rb_snum_t comptime_sp_inc = sp_inc_of_sendish(ci); { VALUE bh = VM_BLOCK_HANDLER_NONE; val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method); JIT_EXEC(ec, val); if (UNDEF_P(val)) { RESTORE_REGS(); NEXT_INSN(); } } /* Convert object to string using to_s or equivalent. */ DEFINE_INSN objtostring (CALL_DATA cd) (VALUE recv) (VALUE val) // attr bool leaf = false; { val = vm_objtostring(GET_ISEQ(), recv, cd); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } DEFINE_INSN opt_ary_freeze (VALUE ary, CALL_DATA cd) () (VALUE val) { val = vm_opt_ary_freeze(ary, BOP_FREEZE, idFreeze); if (UNDEF_P(val)) { RUBY_DTRACE_CREATE_HOOK(ARRAY, RARRAY_LEN(ary)); PUSH(rb_ary_resurrect(ary)); CALL_SIMPLE_METHOD(); } } DEFINE_INSN opt_hash_freeze (VALUE hash, CALL_DATA cd) () (VALUE val) { val = vm_opt_hash_freeze(hash, BOP_FREEZE, idFreeze); if (UNDEF_P(val)) { RUBY_DTRACE_CREATE_HOOK(HASH, RHASH_SIZE(hash) << 1); PUSH(rb_hash_resurrect(hash)); CALL_SIMPLE_METHOD(); } } DEFINE_INSN opt_str_freeze (VALUE str, CALL_DATA cd) () (VALUE val) { val = vm_opt_str_freeze(str, BOP_FREEZE, idFreeze); if (UNDEF_P(val)) { PUSH(rb_str_resurrect(str)); CALL_SIMPLE_METHOD(); } } /* optimized nil? */ DEFINE_INSN opt_nil_p (CALL_DATA cd) (VALUE recv) (VALUE val) { val = vm_opt_nil_p(GET_ISEQ(), cd, recv); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } DEFINE_INSN opt_str_uminus (VALUE str, CALL_DATA cd) () (VALUE val) { val = vm_opt_str_freeze(str, BOP_UMINUS, idUMinus); if (UNDEF_P(val)) { PUSH(rb_str_resurrect(str)); CALL_SIMPLE_METHOD(); } } DEFINE_INSN opt_newarray_send (rb_num_t num, rb_num_t method) (...) (VALUE val) /* This instruction typically has no funcalls. But it compares array * contents each other by nature. That part could call methods when * necessary. No way to detect such method calls beforehand. We * cannot but mark it being not leaf. */ // attr bool leaf = false; /* has rb_funcall() */ // attr rb_snum_t sp_inc = 1 - (rb_snum_t)num; // attr rb_snum_t comptime_sp_inc = 1 - (rb_snum_t)num; { switch(method) { case VM_OPT_NEWARRAY_SEND_HASH: val = vm_opt_newarray_hash(ec, num, STACK_ADDR_FROM_TOP(num)); break; case VM_OPT_NEWARRAY_SEND_MIN: val = vm_opt_newarray_min(ec, num, STACK_ADDR_FROM_TOP(num)); break; case VM_OPT_NEWARRAY_SEND_MAX: val = vm_opt_newarray_max(ec, num, STACK_ADDR_FROM_TOP(num)); break; case VM_OPT_NEWARRAY_SEND_PACK: val = vm_opt_newarray_pack_buffer(ec, (long)num-1, STACK_ADDR_FROM_TOP(num), TOPN(0), Qundef); break; case VM_OPT_NEWARRAY_SEND_PACK_BUFFER: val = vm_opt_newarray_pack_buffer(ec, (long)num-2, STACK_ADDR_FROM_TOP(num), TOPN(1), TOPN(0)); break; default: rb_bug("unreachable"); } } /* super(args) # args.size => num */ DEFINE_INSN invokesuper (CALL_DATA cd, ISEQ blockiseq) (...) (VALUE val) // attr rb_snum_t sp_inc = sp_inc_of_sendish(cd->ci); // attr rb_snum_t comptime_sp_inc = sp_inc_of_sendish(ci); { VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq, true); val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super); JIT_EXEC(ec, val); if (UNDEF_P(val)) { RESTORE_REGS(); NEXT_INSN(); } } /* super(args) # args.size => num */ DEFINE_INSN invokesuperforward (CALL_DATA cd, ISEQ blockiseq) (...) (VALUE val) // attr rb_snum_t sp_inc = sp_inc_of_sendish(cd->ci); // attr rb_snum_t comptime_sp_inc = sp_inc_of_sendish(ci); { struct rb_forwarding_call_data adjusted_cd; struct rb_callinfo adjusted_ci; VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq, 1, &adjusted_cd, &adjusted_ci); val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super); JIT_EXEC(ec, val); if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) { RB_OBJ_WRITE(GET_ISEQ(), &cd->cc, adjusted_cd.cd.cc); } if (UNDEF_P(val)) { RESTORE_REGS(); NEXT_INSN(); } } /* yield(args) */ DEFINE_INSN invokeblock (CALL_DATA cd) (...) (VALUE val) // attr bool handles_sp = true; // attr rb_snum_t sp_inc = sp_inc_of_invokeblock(cd->ci); // attr rb_snum_t comptime_sp_inc = sp_inc_of_invokeblock(ci); { VALUE bh = VM_BLOCK_HANDLER_NONE; val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock); JIT_EXEC(ec, val); if (UNDEF_P(val)) { RESTORE_REGS(); NEXT_INSN(); } } /* return from this scope. */ DEFINE_INSN leave () (VALUE val) (VALUE val) /* This is super surprising but when leaving from a frame, we check * for interrupts. If any, that should be executed on top of the * current execution context. This is a method call. */ // attr bool leaf = false; /* has rb_threadptr_execute_interrupts() */ // attr bool handles_sp = true; { if (OPT_CHECKED_RUN) { const VALUE *const bp = vm_base_ptr(GET_CFP()); if (GET_SP() != bp) { vm_stack_consistency_error(ec, GET_CFP(), bp); } } if (vm_pop_frame(ec, GET_CFP(), GET_EP())) { #if OPT_CALL_THREADED_CODE rb_ec_thread_ptr(ec)->retval = val; return 0; #else return val; #endif } else { RESTORE_REGS(); } } /**********************************************************/ /* deal with control flow 3: exception */ /**********************************************************/ /* longjump */ DEFINE_INSN throw (rb_num_t throw_state) (VALUE throwobj) (VALUE val) /* Same discussion as leave. */ // attr bool leaf = false; /* has rb_threadptr_execute_interrupts() */ { val = vm_throw(ec, GET_CFP(), throw_state, throwobj); THROW_EXCEPTION(val); /* unreachable */ } /**********************************************************/ /* deal with control flow 4: local jump */ /**********************************************************/ /* set PC to (PC + dst). */ DEFINE_INSN jump (OFFSET dst) () () /* Same discussion as leave. */ // attr bool leaf = leafness_of_check_ints; /* has rb_threadptr_execute_interrupts() */ { RUBY_VM_CHECK_INTS(ec); JUMP(dst); } /* if val is not false or nil, set PC to (PC + dst). */ DEFINE_INSN branchif (OFFSET dst) (VALUE val) () /* Same discussion as jump. */ // attr bool leaf = leafness_of_check_ints; /* has rb_threadptr_execute_interrupts() */ { if (RTEST(val)) { RUBY_VM_CHECK_INTS(ec); JUMP(dst); } } /* if val is false or nil, set PC to (PC + dst). */ DEFINE_INSN branchunless (OFFSET dst) (VALUE val) () /* Same discussion as jump. */ // attr bool leaf = leafness_of_check_ints; /* has rb_threadptr_execute_interrupts() */ { if (!RTEST(val)) { RUBY_VM_CHECK_INTS(ec); JUMP(dst); } } /* if val is nil, set PC to (PC + dst). */ DEFINE_INSN branchnil (OFFSET dst) (VALUE val) () /* Same discussion as jump. */ // attr bool leaf = leafness_of_check_ints; /* has rb_threadptr_execute_interrupts() */ { if (NIL_P(val)) { RUBY_VM_CHECK_INTS(ec); JUMP(dst); } } /**********************************************************/ /* for optimize */ /**********************************************************/ /* run iseq only once */ DEFINE_INSN once (ISEQ iseq, ISE ise) () (VALUE val) { val = vm_once_dispatch(ec, iseq, ise); } /* case dispatcher, jump by table if possible */ DEFINE_INSN opt_case_dispatch (CDHASH hash, OFFSET else_offset) (..., VALUE key) () // attr rb_snum_t sp_inc = -1; { OFFSET dst = vm_case_dispatch(hash, else_offset, key); if (dst) { JUMP(dst); } } /** simple functions */ /* optimized X+Y. */ DEFINE_INSN opt_plus (CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) { val = vm_opt_plus(recv, obj); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized X-Y. */ DEFINE_INSN opt_minus (CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) { val = vm_opt_minus(recv, obj); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized X*Y. */ DEFINE_INSN opt_mult (CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) { val = vm_opt_mult(recv, obj); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized X/Y. */ DEFINE_INSN opt_div (CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) /* In case of division by zero, it raises. Thus * ZeroDivisionError#initialize is called. */ // attr bool leaf = false; { val = vm_opt_div(recv, obj); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized X%Y. */ DEFINE_INSN opt_mod (CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) /* Same discussion as opt_div. */ // attr bool leaf = false; { val = vm_opt_mod(recv, obj); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized X==Y. */ DEFINE_INSN opt_eq (CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) { val = opt_equality(GET_ISEQ(), recv, obj, cd); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized X!=Y. */ DEFINE_INSN opt_neq (CALL_DATA cd_eq, CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) { val = vm_opt_neq(GET_ISEQ(), cd, cd_eq, recv, obj); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized XY. */ DEFINE_INSN opt_gt (CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) { val = vm_opt_gt(recv, obj); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized X>=Y. */ DEFINE_INSN opt_ge (CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) { val = vm_opt_ge(recv, obj); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* << */ DEFINE_INSN opt_ltlt (CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) /* This instruction can append an integer, as a codepoint, into a * string. Then what happens if that codepoint does not exist in the * string's encoding? Of course an exception. That's not a leaf. */ // attr bool leaf = false; /* has "invalid codepoint" exception */ { val = vm_opt_ltlt(recv, obj); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized X&Y. */ DEFINE_INSN opt_and (CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) { val = vm_opt_and(recv, obj); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized X|Y. */ DEFINE_INSN opt_or (CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) { val = vm_opt_or(recv, obj); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* [] */ DEFINE_INSN opt_aref (CALL_DATA cd) (VALUE recv, VALUE obj) (VALUE val) /* This is complicated. In case of hash, vm_opt_aref() resorts to * rb_hash_aref(). If `recv` has no `obj`, this function then yields * default_proc. This is a method call. So opt_aref is * (surprisingly) not leaf. */ // attr bool leaf = false; /* has rb_funcall() */ /* calls #yield */ { val = vm_opt_aref(recv, obj); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* recv[obj] = set */ DEFINE_INSN opt_aset (CALL_DATA cd) (VALUE recv, VALUE obj, VALUE set) (VALUE val) /* This is another story than opt_aref. When vm_opt_aset() resorts * to rb_hash_aset(), which should call #hash for `obj`. */ // attr bool leaf = false; /* has rb_funcall() */ /* calls #hash */ { val = vm_opt_aset(recv, obj, set); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* recv[str] = set */ DEFINE_INSN opt_aset_with (VALUE key, CALL_DATA cd) (VALUE recv, VALUE val) (VALUE val) /* Same discussion as opt_aset. */ // attr bool leaf = false; /* has rb_funcall() */ /* calls #hash */ { VALUE tmp = vm_opt_aset_with(recv, key, val); if (!UNDEF_P(tmp)) { val = tmp; } else { TOPN(0) = rb_str_resurrect(key); PUSH(val); CALL_SIMPLE_METHOD(); } } /* recv[str] */ DEFINE_INSN opt_aref_with (VALUE key, CALL_DATA cd) (VALUE recv) (VALUE val) /* Same discussion as opt_aref. */ // attr bool leaf = false; /* has rb_funcall() */ /* calls #yield */ { val = vm_opt_aref_with(recv, key); if (UNDEF_P(val)) { PUSH(rb_str_resurrect(key)); CALL_SIMPLE_METHOD(); } } /* optimized length */ DEFINE_INSN opt_length (CALL_DATA cd) (VALUE recv) (VALUE val) { val = vm_opt_length(recv, BOP_LENGTH); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized size */ DEFINE_INSN opt_size (CALL_DATA cd) (VALUE recv) (VALUE val) { val = vm_opt_length(recv, BOP_SIZE); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized empty? */ DEFINE_INSN opt_empty_p (CALL_DATA cd) (VALUE recv) (VALUE val) { val = vm_opt_empty_p(recv); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized succ */ DEFINE_INSN opt_succ (CALL_DATA cd) (VALUE recv) (VALUE val) { val = vm_opt_succ(recv); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized not */ DEFINE_INSN opt_not (CALL_DATA cd) (VALUE recv) (VALUE val) { val = vm_opt_not(GET_ISEQ(), cd, recv); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* optimized regexp match 2 */ DEFINE_INSN opt_regexpmatch2 (CALL_DATA cd) (VALUE obj2, VALUE obj1) (VALUE val) // attr bool leaf = false; /* match_at() has rb_thread_check_ints() */ { val = vm_opt_regexpmatch2(obj2, obj1); if (UNDEF_P(val)) { CALL_SIMPLE_METHOD(); } } /* call specific function with args */ DEFINE_INSN invokebuiltin (RB_BUILTIN bf) (...) (VALUE val) // attr bool leaf = false; /* anything can happen inside */ // attr rb_snum_t sp_inc = 1 - bf->argc; { val = vm_invoke_builtin(ec, reg_cfp, bf, STACK_ADDR_FROM_TOP(bf->argc)); } /* call specific function with args (same parameters) */ DEFINE_INSN opt_invokebuiltin_delegate (RB_BUILTIN bf, rb_num_t index) () (VALUE val) // attr bool leaf = false; /* anything can happen inside */ { val = vm_invoke_builtin_delegate(ec, reg_cfp, bf, (unsigned int)index); } /* call specific function with args (same parameters) and leave */ DEFINE_INSN opt_invokebuiltin_delegate_leave (RB_BUILTIN bf, rb_num_t index) () (VALUE val) // attr bool leaf = false; /* anything can happen inside */ { val = vm_invoke_builtin_delegate(ec, reg_cfp, bf, (unsigned int)index); /* leave fastpath */ /* TracePoint/return fallbacks this insn to opt_invokebuiltin_delegate */ if (vm_pop_frame(ec, GET_CFP(), GET_EP())) { #if OPT_CALL_THREADED_CODE rb_ec_thread_ptr(ec)->retval = val; return 0; #else return val; #endif } else { RESTORE_REGS(); } } /* BLT */ DEFINE_INSN_IF(SUPPORT_JOKE) bitblt () () (VALUE ret) { ret = rb_str_new2("a bit of bacon, lettuce and tomato"); } /* The Answer to Life, the Universe, and Everything */ DEFINE_INSN_IF(SUPPORT_JOKE) answer () () (VALUE ret) { ret = INT2FIX(42); }