зеркало из https://github.com/mozilla/pjs.git
Bug 561359 - Predication of method optimization is too dynamic, causing "Assertion failure: &shape.methodObject() == &prev.toObject()". r=dvander.
--HG-- extra : rebase_source : 0a101adb7b237c56bde65ff7af3fdfcee0ad13df
This commit is contained in:
Родитель
2d3dda6542
Коммит
81c001571c
|
@ -0,0 +1,4 @@
|
||||||
|
for (let z = 0; z < 2; z++) {
|
||||||
|
with ({x: function () {}})
|
||||||
|
f = x;
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
function f(s) {
|
||||||
|
var obj = {m: function () { return a; }};
|
||||||
|
eval(s);
|
||||||
|
return obj;
|
||||||
|
}
|
||||||
|
var obj = f("var a = 'right';");
|
||||||
|
var a = 'wrong';
|
||||||
|
assertEq(obj.m(), 'right');
|
|
@ -234,8 +234,7 @@ JSCompartment::wrap(JSContext *cx, Value *vp)
|
||||||
/*
|
/*
|
||||||
* Wrappers should really be parented to the wrapped parent of the wrapped
|
* Wrappers should really be parented to the wrapped parent of the wrapped
|
||||||
* object, but in that case a wrapped global object would have a NULL
|
* object, but in that case a wrapped global object would have a NULL
|
||||||
* parent without being a proper global object (JSCLASS_IS_GLOBAL). Instead
|
* parent without being a proper global object (JSCLASS_IS_GLOBAL). Instead,
|
||||||
,
|
|
||||||
* we parent all wrappers to the global object in their home compartment.
|
* we parent all wrappers to the global object in their home compartment.
|
||||||
* This loses us some transparency, and is generally very cheesy.
|
* This loses us some transparency, and is generally very cheesy.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -5014,7 +5014,7 @@ BEGIN_CASE(JSOP_LAMBDA)
|
||||||
if (fun->isNullClosure()) {
|
if (fun->isNullClosure()) {
|
||||||
parent = ®s.fp()->scopeChain();
|
parent = ®s.fp()->scopeChain();
|
||||||
|
|
||||||
if (obj->getParent() == parent) {
|
if (fun->joinable()) {
|
||||||
jsbytecode *pc2 = AdvanceOverBlockchainOp(regs.pc + JSOP_LAMBDA_LENGTH);
|
jsbytecode *pc2 = AdvanceOverBlockchainOp(regs.pc + JSOP_LAMBDA_LENGTH);
|
||||||
JSOp op2 = JSOp(*pc2);
|
JSOp op2 = JSOp(*pc2);
|
||||||
|
|
||||||
|
@ -5049,8 +5049,7 @@ BEGIN_CASE(JSOP_LAMBDA)
|
||||||
fun->setMethodAtom(script->getAtom(GET_FULL_INDEX(pc2 - regs.pc)));
|
fun->setMethodAtom(script->getAtom(GET_FULL_INDEX(pc2 - regs.pc)));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} else if (fun->joinable()) {
|
} else if (op2 == JSOP_CALL) {
|
||||||
if (op2 == JSOP_CALL) {
|
|
||||||
/*
|
/*
|
||||||
* Array.prototype.sort and String.prototype.replace are
|
* Array.prototype.sort and String.prototype.replace are
|
||||||
* optimized as if they are special form. We know that they
|
* optimized as if they are special form. We know that they
|
||||||
|
@ -5085,7 +5084,6 @@ BEGIN_CASE(JSOP_LAMBDA)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
parent = GetScopeChainFast(cx, regs.fp(), JSOP_LAMBDA, JSOP_LAMBDA_LENGTH);
|
parent = GetScopeChainFast(cx, regs.fp(), JSOP_LAMBDA, JSOP_LAMBDA_LENGTH);
|
||||||
if (!parent)
|
if (!parent)
|
||||||
|
|
|
@ -311,7 +311,9 @@ bool
|
||||||
JSFunctionBox::joinable() const
|
JSFunctionBox::joinable() const
|
||||||
{
|
{
|
||||||
return function()->isNullClosure() &&
|
return function()->isNullClosure() &&
|
||||||
!(tcflags & (TCF_FUN_USES_ARGUMENTS | TCF_FUN_USES_OWN_NAME));
|
(tcflags & (TCF_FUN_USES_ARGUMENTS |
|
||||||
|
TCF_FUN_USES_OWN_NAME |
|
||||||
|
TCF_COMPILE_N_GO)) == TCF_COMPILE_N_GO;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
|
@ -4434,8 +4436,6 @@ CloneParseTree(JSParseNode *opn, JSTreeContext *tc)
|
||||||
|
|
||||||
#endif /* JS_HAS_DESTRUCTURING */
|
#endif /* JS_HAS_DESTRUCTURING */
|
||||||
|
|
||||||
extern const char js_with_statement_str[];
|
|
||||||
|
|
||||||
static JSParseNode *
|
static JSParseNode *
|
||||||
ContainsStmt(JSParseNode *pn, TokenKind tt)
|
ContainsStmt(JSParseNode *pn, TokenKind tt)
|
||||||
{
|
{
|
||||||
|
|
|
@ -15456,7 +15456,8 @@ TraceRecorder::record_JSOP_LAMBDA()
|
||||||
* JSOP_INITMETHOD logic governing the early ARECORD_CONTINUE returns below
|
* JSOP_INITMETHOD logic governing the early ARECORD_CONTINUE returns below
|
||||||
* must agree with the corresponding break-from-do-while(0) logic there.
|
* must agree with the corresponding break-from-do-while(0) logic there.
|
||||||
*/
|
*/
|
||||||
if (fun->isNullClosure() && fun->getParent() == &cx->fp()->scopeChain()) {
|
if (fun->isNullClosure()) {
|
||||||
|
if (fun->joinable()) {
|
||||||
jsbytecode *pc2 = AdvanceOverBlockchainOp(cx->regs().pc + JSOP_LAMBDA_LENGTH);
|
jsbytecode *pc2 = AdvanceOverBlockchainOp(cx->regs().pc + JSOP_LAMBDA_LENGTH);
|
||||||
JSOp op2 = JSOp(*pc2);
|
JSOp op2 = JSOp(*pc2);
|
||||||
|
|
||||||
|
@ -15472,14 +15473,13 @@ TraceRecorder::record_JSOP_LAMBDA()
|
||||||
stack(0, w.immpObjGC(fun));
|
stack(0, w.immpObjGC(fun));
|
||||||
return ARECORD_CONTINUE;
|
return ARECORD_CONTINUE;
|
||||||
}
|
}
|
||||||
} else if (fun->joinable()) {
|
} else if (op2 == JSOP_CALL) {
|
||||||
if (op2 == JSOP_CALL) {
|
|
||||||
/*
|
/*
|
||||||
* Array.prototype.sort and String.prototype.replace are
|
* Array.prototype.sort and String.prototype.replace are
|
||||||
* optimized as if they are special form. We know that they
|
* optimized as if they are special form. We know that they
|
||||||
* won't leak the joined function object in obj, therefore
|
* won't leak the joined function object in obj, therefore we
|
||||||
* we don't need to clone that compiler- created function
|
* don't need to clone that compiler-created function object
|
||||||
* object for identity/mutation reasons.
|
* for identity/mutation reasons.
|
||||||
*/
|
*/
|
||||||
int iargc = GET_ARGC(pc2);
|
int iargc = GET_ARGC(pc2);
|
||||||
|
|
||||||
|
|
|
@ -2465,19 +2465,25 @@ mjit::Compiler::generateMethod()
|
||||||
JSObjStubFun stub = stubs::Lambda;
|
JSObjStubFun stub = stubs::Lambda;
|
||||||
uint32 uses = 0;
|
uint32 uses = 0;
|
||||||
|
|
||||||
jsbytecode *pc2 = AdvanceOverBlockchainOp(PC + JSOP_LAMBDA_LENGTH);
|
jsbytecode *pc2 = NULL;
|
||||||
|
if (fun->joinable()) {
|
||||||
|
pc2 = AdvanceOverBlockchainOp(PC + JSOP_LAMBDA_LENGTH);
|
||||||
JSOp next = JSOp(*pc2);
|
JSOp next = JSOp(*pc2);
|
||||||
|
|
||||||
if (next == JSOP_INITMETHOD) {
|
if (next == JSOP_INITMETHOD) {
|
||||||
stub = stubs::LambdaForInit;
|
stub = stubs::LambdaJoinableForInit;
|
||||||
} else if (next == JSOP_SETMETHOD) {
|
} else if (next == JSOP_SETMETHOD) {
|
||||||
stub = stubs::LambdaForSet;
|
stub = stubs::LambdaJoinableForSet;
|
||||||
uses = 1;
|
uses = 1;
|
||||||
} else if (fun->joinable()) {
|
} else if (next == JSOP_CALL) {
|
||||||
if (next == JSOP_CALL) {
|
int iargc = GET_ARGC(pc2);
|
||||||
|
if (iargc == 1 || iargc == 2) {
|
||||||
stub = stubs::LambdaJoinableForCall;
|
stub = stubs::LambdaJoinableForCall;
|
||||||
uses = frame.frameSlots();
|
uses = frame.frameSlots();
|
||||||
|
}
|
||||||
} else if (next == JSOP_NULL) {
|
} else if (next == JSOP_NULL) {
|
||||||
|
pc2 += JSOP_NULL_LENGTH;
|
||||||
|
if (JSOp(*pc2) == JSOP_CALL && GET_ARGC(pc2) == 0)
|
||||||
stub = stubs::LambdaJoinableForNull;
|
stub = stubs::LambdaJoinableForNull;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1481,28 +1481,23 @@ stubs::RegExp(VMFrame &f, JSObject *regex)
|
||||||
}
|
}
|
||||||
|
|
||||||
JSObject * JS_FASTCALL
|
JSObject * JS_FASTCALL
|
||||||
stubs::LambdaForInit(VMFrame &f, JSFunction *fun)
|
stubs::LambdaJoinableForInit(VMFrame &f, JSFunction *fun)
|
||||||
{
|
{
|
||||||
JSObject *obj = fun;
|
|
||||||
jsbytecode *nextpc = (jsbytecode *) f.scratch;
|
jsbytecode *nextpc = (jsbytecode *) f.scratch;
|
||||||
if (fun->isNullClosure() && obj->getParent() == &f.fp()->scopeChain()) {
|
JS_ASSERT(fun->joinable());
|
||||||
fun->setMethodAtom(f.script()->getAtom(GET_SLOTNO(nextpc)));
|
fun->setMethodAtom(f.fp()->script()->getAtom(GET_SLOTNO(nextpc)));
|
||||||
return obj;
|
return fun;
|
||||||
}
|
|
||||||
return Lambda(f, fun);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
JSObject * JS_FASTCALL
|
JSObject * JS_FASTCALL
|
||||||
stubs::LambdaForSet(VMFrame &f, JSFunction *fun)
|
stubs::LambdaJoinableForSet(VMFrame &f, JSFunction *fun)
|
||||||
{
|
{
|
||||||
JSObject *obj = fun;
|
JS_ASSERT(fun->joinable());
|
||||||
jsbytecode *nextpc = (jsbytecode *) f.scratch;
|
jsbytecode *nextpc = (jsbytecode *) f.scratch;
|
||||||
if (fun->isNullClosure() && obj->getParent() == &f.fp()->scopeChain()) {
|
|
||||||
const Value &lref = f.regs.sp[-1];
|
const Value &lref = f.regs.sp[-1];
|
||||||
if (lref.isObject() && lref.toObject().canHaveMethodBarrier()) {
|
if (lref.isObject() && lref.toObject().canHaveMethodBarrier()) {
|
||||||
fun->setMethodAtom(f.script()->getAtom(GET_SLOTNO(nextpc)));
|
fun->setMethodAtom(f.fp()->script()->getAtom(GET_SLOTNO(nextpc)));
|
||||||
return obj;
|
return fun;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return Lambda(f, fun);
|
return Lambda(f, fun);
|
||||||
}
|
}
|
||||||
|
@ -1510,22 +1505,21 @@ stubs::LambdaForSet(VMFrame &f, JSFunction *fun)
|
||||||
JSObject * JS_FASTCALL
|
JSObject * JS_FASTCALL
|
||||||
stubs::LambdaJoinableForCall(VMFrame &f, JSFunction *fun)
|
stubs::LambdaJoinableForCall(VMFrame &f, JSFunction *fun)
|
||||||
{
|
{
|
||||||
JSObject *obj = fun;
|
JS_ASSERT(fun->joinable());
|
||||||
jsbytecode *nextpc = (jsbytecode *) f.scratch;
|
jsbytecode *nextpc = (jsbytecode *) f.scratch;
|
||||||
if (fun->isNullClosure() && obj->getParent() == &f.fp()->scopeChain()) {
|
|
||||||
/*
|
/*
|
||||||
* Array.prototype.sort and String.prototype.replace are
|
* Array.prototype.sort and String.prototype.replace are optimized as if
|
||||||
* optimized as if they are special form. We know that they
|
* they are special form. We know that they won't leak the joined function
|
||||||
* won't leak the joined function object in obj, therefore
|
* object fun, therefore we don't need to clone that compiler-created
|
||||||
* we don't need to clone that compiler- created function
|
* function object for identity/mutation reasons.
|
||||||
* object for identity/mutation reasons.
|
|
||||||
*/
|
*/
|
||||||
int iargc = GET_ARGC(nextpc);
|
int iargc = GET_ARGC(nextpc);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Note that we have not yet pushed obj as the final argument,
|
* Note that we have not yet pushed fun as the final argument, so
|
||||||
* so regs.sp[1 - (iargc + 2)], and not regs.sp[-(iargc + 2)],
|
* regs.sp[1 - (iargc + 2)], and not regs.sp[-(iargc + 2)], is the callee
|
||||||
* is the callee for this JSOP_CALL.
|
* for this JSOP_CALL.
|
||||||
*/
|
*/
|
||||||
const Value &cref = f.regs.sp[1 - (iargc + 2)];
|
const Value &cref = f.regs.sp[1 - (iargc + 2)];
|
||||||
JSObject *callee;
|
JSObject *callee;
|
||||||
|
@ -1536,10 +1530,9 @@ stubs::LambdaJoinableForCall(VMFrame &f, JSFunction *fun)
|
||||||
|
|
||||||
if (native) {
|
if (native) {
|
||||||
if (iargc == 1 && native == array_sort)
|
if (iargc == 1 && native == array_sort)
|
||||||
return obj;
|
return fun;
|
||||||
if (iargc == 2 && native == str_replace)
|
if (iargc == 2 && native == str_replace)
|
||||||
return obj;
|
return fun;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Lambda(f, fun);
|
return Lambda(f, fun);
|
||||||
|
@ -1548,23 +1541,13 @@ stubs::LambdaJoinableForCall(VMFrame &f, JSFunction *fun)
|
||||||
JSObject * JS_FASTCALL
|
JSObject * JS_FASTCALL
|
||||||
stubs::LambdaJoinableForNull(VMFrame &f, JSFunction *fun)
|
stubs::LambdaJoinableForNull(VMFrame &f, JSFunction *fun)
|
||||||
{
|
{
|
||||||
JSObject *obj = fun;
|
JS_ASSERT(fun->joinable());
|
||||||
jsbytecode *nextpc = (jsbytecode *) f.scratch;
|
return fun;
|
||||||
if (fun->isNullClosure() && obj->getParent() == &f.fp()->scopeChain()) {
|
|
||||||
jsbytecode *pc2 = nextpc + JSOP_NULL_LENGTH;
|
|
||||||
JSOp op2 = JSOp(*pc2);
|
|
||||||
|
|
||||||
if (op2 == JSOP_CALL && GET_ARGC(pc2) == 0)
|
|
||||||
return obj;
|
|
||||||
}
|
|
||||||
return Lambda(f, fun);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
JSObject * JS_FASTCALL
|
JSObject * JS_FASTCALL
|
||||||
stubs::Lambda(VMFrame &f, JSFunction *fun)
|
stubs::Lambda(VMFrame &f, JSFunction *fun)
|
||||||
{
|
{
|
||||||
JSObject *obj = fun;
|
|
||||||
|
|
||||||
JSObject *parent;
|
JSObject *parent;
|
||||||
if (fun->isNullClosure()) {
|
if (fun->isNullClosure()) {
|
||||||
parent = &f.fp()->scopeChain();
|
parent = &f.fp()->scopeChain();
|
||||||
|
@ -1574,7 +1557,7 @@ stubs::Lambda(VMFrame &f, JSFunction *fun)
|
||||||
THROWV(NULL);
|
THROWV(NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
obj = CloneFunctionObject(f.cx, fun, parent, true);
|
JSObject *obj = CloneFunctionObject(f.cx, fun, parent, true);
|
||||||
if (!obj)
|
if (!obj)
|
||||||
THROWV(NULL);
|
THROWV(NULL);
|
||||||
|
|
||||||
|
|
|
@ -156,8 +156,8 @@ JSObject * JS_FASTCALL DefLocalFun(VMFrame &f, JSFunction *fun);
|
||||||
JSObject * JS_FASTCALL DefLocalFun_FC(VMFrame &f, JSFunction *fun);
|
JSObject * JS_FASTCALL DefLocalFun_FC(VMFrame &f, JSFunction *fun);
|
||||||
JSObject * JS_FASTCALL RegExp(VMFrame &f, JSObject *regex);
|
JSObject * JS_FASTCALL RegExp(VMFrame &f, JSObject *regex);
|
||||||
JSObject * JS_FASTCALL Lambda(VMFrame &f, JSFunction *fun);
|
JSObject * JS_FASTCALL Lambda(VMFrame &f, JSFunction *fun);
|
||||||
JSObject * JS_FASTCALL LambdaForInit(VMFrame &f, JSFunction *fun);
|
JSObject * JS_FASTCALL LambdaJoinableForInit(VMFrame &f, JSFunction *fun);
|
||||||
JSObject * JS_FASTCALL LambdaForSet(VMFrame &f, JSFunction *fun);
|
JSObject * JS_FASTCALL LambdaJoinableForSet(VMFrame &f, JSFunction *fun);
|
||||||
JSObject * JS_FASTCALL LambdaJoinableForCall(VMFrame &f, JSFunction *fun);
|
JSObject * JS_FASTCALL LambdaJoinableForCall(VMFrame &f, JSFunction *fun);
|
||||||
JSObject * JS_FASTCALL LambdaJoinableForNull(VMFrame &f, JSFunction *fun);
|
JSObject * JS_FASTCALL LambdaJoinableForNull(VMFrame &f, JSFunction *fun);
|
||||||
JSObject * JS_FASTCALL FlatLambda(VMFrame &f, JSFunction *fun);
|
JSObject * JS_FASTCALL FlatLambda(VMFrame &f, JSFunction *fun);
|
||||||
|
|
Загрузка…
Ссылка в новой задаче