зеркало из https://github.com/github/ruby.git
Remove splatarray true -> splatarray false peephole optimization
The compiler now uses splatarray false for all cases that would previously have been optimized, so this is all dead code.
This commit is contained in:
Родитель
3de20efc30
Коммит
2c79a7641f
149
compile.c
149
compile.c
|
@ -3237,34 +3237,6 @@ ci_argc_set(const rb_iseq_t *iseq, const struct rb_callinfo *ci, int argc)
|
|||
return nci;
|
||||
}
|
||||
|
||||
static bool
|
||||
optimize_args_splat_no_copy(rb_iseq_t *iseq, INSN *insn, LINK_ELEMENT *niobj,
|
||||
unsigned int set_flags, unsigned int unset_flags, unsigned int remove_flags)
|
||||
{
|
||||
LINK_ELEMENT *iobj = (LINK_ELEMENT *)insn;
|
||||
if ((set_flags & VM_CALL_ARGS_BLOCKARG) && (set_flags & VM_CALL_KW_SPLAT) &&
|
||||
IS_NEXT_INSN_ID(niobj, splatkw)) {
|
||||
niobj = niobj->next;
|
||||
}
|
||||
if (!IS_NEXT_INSN_ID(niobj, send) && !IS_NEXT_INSN_ID(niobj, invokesuper)) {
|
||||
return false;
|
||||
}
|
||||
niobj = niobj->next;
|
||||
|
||||
const struct rb_callinfo *ci = (const struct rb_callinfo *)OPERAND_AT(niobj, 0);
|
||||
unsigned int flags = vm_ci_flag(ci);
|
||||
if ((flags & set_flags) == set_flags && !(flags & unset_flags)) {
|
||||
RUBY_ASSERT(flags & VM_CALL_ARGS_SPLAT_MUT);
|
||||
OPERAND_AT(iobj, 0) = Qfalse;
|
||||
const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
|
||||
flags & ~(VM_CALL_ARGS_SPLAT_MUT|remove_flags), vm_ci_argc(ci), vm_ci_kwarg(ci));
|
||||
RB_OBJ_WRITTEN(iseq, ci, nci);
|
||||
OPERAND_AT(niobj, 0) = (VALUE)nci;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static int
|
||||
iseq_peephole_optimize(rb_iseq_t *iseq, LINK_ELEMENT *list, const int do_tailcallopt)
|
||||
{
|
||||
|
@ -3937,127 +3909,6 @@ iseq_peephole_optimize(rb_iseq_t *iseq, LINK_ELEMENT *list, const int do_tailcal
|
|||
}
|
||||
}
|
||||
|
||||
if (IS_INSN_ID(iobj, splatarray) && OPERAND_AT(iobj, 0) == Qtrue) {
|
||||
LINK_ELEMENT *niobj = &iobj->link;
|
||||
|
||||
/*
|
||||
* Eliminate array allocation for f(1, *a)
|
||||
*
|
||||
* splatarray true
|
||||
* send ARGS_SPLAT and not KW_SPLAT|ARGS_BLOCKARG
|
||||
* =>
|
||||
* splatarray false
|
||||
* send
|
||||
*/
|
||||
if (optimize_args_splat_no_copy(iseq, iobj, niobj,
|
||||
VM_CALL_ARGS_SPLAT, VM_CALL_KW_SPLAT|VM_CALL_ARGS_BLOCKARG, 0)) goto optimized_splat;
|
||||
|
||||
if (IS_NEXT_INSN_ID(niobj, getlocal) || IS_NEXT_INSN_ID(niobj, getinstancevariable)) {
|
||||
niobj = niobj->next;
|
||||
|
||||
/*
|
||||
* Eliminate array allocation for f(1, *a, &lvar) and f(1, *a, &@iv)
|
||||
*
|
||||
* splatarray true
|
||||
* getlocal / getinstancevariable
|
||||
* send ARGS_SPLAT|ARGS_BLOCKARG and not KW_SPLAT
|
||||
* =>
|
||||
* splatarray false
|
||||
* getlocal / getinstancevariable
|
||||
* send
|
||||
*/
|
||||
if (optimize_args_splat_no_copy(iseq, iobj, niobj,
|
||||
VM_CALL_ARGS_SPLAT|VM_CALL_ARGS_BLOCKARG, VM_CALL_KW_SPLAT, 0)) goto optimized_splat;
|
||||
|
||||
/*
|
||||
* Eliminate array allocation for f(*a, **lvar) and f(*a, **@iv)
|
||||
*
|
||||
* splatarray true
|
||||
* getlocal / getinstancevariable
|
||||
* send ARGS_SPLAT|KW_SPLAT and not ARGS_BLOCKARG
|
||||
* =>
|
||||
* splatarray false
|
||||
* getlocal / getinstancevariable
|
||||
* send
|
||||
*/
|
||||
if (optimize_args_splat_no_copy(iseq, iobj, niobj,
|
||||
VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT, VM_CALL_ARGS_BLOCKARG, 0)) goto optimized_splat;
|
||||
|
||||
if (IS_NEXT_INSN_ID(niobj, getlocal) || IS_NEXT_INSN_ID(niobj, getinstancevariable) ||
|
||||
IS_NEXT_INSN_ID(niobj, getblockparamproxy)) {
|
||||
niobj = niobj->next;
|
||||
|
||||
/*
|
||||
* Eliminate array allocation for f(*a, **lvar, &{arg,lvar,@iv})
|
||||
*
|
||||
* splatarray true
|
||||
* getlocal / getinstancevariable
|
||||
* getlocal / getinstancevariable / getblockparamproxy
|
||||
* send ARGS_SPLAT|KW_SPLAT|ARGS_BLOCKARG
|
||||
* =>
|
||||
* splatarray false
|
||||
* getlocal / getinstancevariable
|
||||
* getlocal / getinstancevariable / getblockparamproxy
|
||||
* send
|
||||
*/
|
||||
optimize_args_splat_no_copy(iseq, iobj, niobj,
|
||||
VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_ARGS_BLOCKARG, 0, 0);
|
||||
}
|
||||
} else if (IS_NEXT_INSN_ID(niobj, getblockparamproxy)) {
|
||||
/*
|
||||
* Eliminate array allocation for f(1, *a, &arg)
|
||||
*
|
||||
* splatarray true
|
||||
* getblockparamproxy
|
||||
* send ARGS_SPLAT|ARGS_BLOCKARG and not KW_SPLAT
|
||||
* =>
|
||||
* splatarray false
|
||||
* getblockparamproxy
|
||||
* send
|
||||
*/
|
||||
optimize_args_splat_no_copy(iseq, iobj, niobj,
|
||||
VM_CALL_ARGS_SPLAT|VM_CALL_ARGS_BLOCKARG, VM_CALL_KW_SPLAT, 0);
|
||||
} else if (IS_NEXT_INSN_ID(niobj, duphash)) {
|
||||
niobj = niobj->next;
|
||||
|
||||
/*
|
||||
* Eliminate array allocation for f(*a, kw: 1)
|
||||
*
|
||||
* splatarray true
|
||||
* duphash
|
||||
* send ARGS_SPLAT|KW_SPLAT|KW_SPLAT_MUT and not ARGS_BLOCKARG
|
||||
* =>
|
||||
* splatarray false
|
||||
* duphash
|
||||
* send
|
||||
*/
|
||||
if (optimize_args_splat_no_copy(iseq, iobj, niobj,
|
||||
VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_KW_SPLAT_MUT, VM_CALL_ARGS_BLOCKARG, 0)) {
|
||||
|
||||
goto optimized_splat;
|
||||
}
|
||||
|
||||
if (IS_NEXT_INSN_ID(niobj, getlocal) || IS_NEXT_INSN_ID(niobj, getinstancevariable) ||
|
||||
IS_NEXT_INSN_ID(niobj, getblockparamproxy)) {
|
||||
/*
|
||||
* Eliminate array allocation for f(*a, kw: 1, &{arg,lvar,@iv})
|
||||
*
|
||||
* splatarray true
|
||||
* duphash
|
||||
* getlocal / getinstancevariable / getblockparamproxy
|
||||
* send ARGS_SPLAT|KW_SPLAT|KW_SPLAT_MUT|ARGS_BLOCKARG
|
||||
* =>
|
||||
* splatarray false
|
||||
* duphash
|
||||
* getlocal / getinstancevariable / getblockparamproxy
|
||||
* send
|
||||
*/
|
||||
optimize_args_splat_no_copy(iseq, iobj, niobj->next,
|
||||
VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_KW_SPLAT_MUT|VM_CALL_ARGS_BLOCKARG, 0, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
optimized_splat:
|
||||
if (IS_INSN_ID(iobj, splatarray) && OPERAND_AT(iobj, 0) == false) {
|
||||
LINK_ELEMENT *niobj = &iobj->link;
|
||||
if (IS_NEXT_INSN_ID(niobj, duphash)) {
|
||||
|
|
Загрузка…
Ссылка в новой задаче