Bug 561359 - Predication of method optimization is too dynamic, causing "Assertion failure: &shape.methodObject() == &prev.toObject()". r=dvander.

--HG--
extra : rebase_source : 0a101adb7b237c56bde65ff7af3fdfcee0ad13df
This commit is contained in:
Jason Orendorff 2011-08-30 12:55:46 -05:00
Родитель 2d3dda6542
Коммит 81c001571c
9 изменённых файлов: 170 добавлений и 172 удалений

Просмотреть файл

@ -0,0 +1,4 @@
for (let z = 0; z < 2; z++) {
with ({x: function () {}})
f = x;
}

Просмотреть файл

@ -0,0 +1,8 @@
function f(s) {
var obj = {m: function () { return a; }};
eval(s);
return obj;
}
var obj = f("var a = 'right';");
var a = 'wrong';
assertEq(obj.m(), 'right');

Просмотреть файл

@ -234,8 +234,7 @@ JSCompartment::wrap(JSContext *cx, Value *vp)
/* /*
* Wrappers should really be parented to the wrapped parent of the wrapped * Wrappers should really be parented to the wrapped parent of the wrapped
* object, but in that case a wrapped global object would have a NULL * object, but in that case a wrapped global object would have a NULL
* parent without being a proper global object (JSCLASS_IS_GLOBAL). Instead * parent without being a proper global object (JSCLASS_IS_GLOBAL). Instead,
,
* we parent all wrappers to the global object in their home compartment. * we parent all wrappers to the global object in their home compartment.
* This loses us some transparency, and is generally very cheesy. * This loses us some transparency, and is generally very cheesy.
*/ */

Просмотреть файл

@ -5014,7 +5014,7 @@ BEGIN_CASE(JSOP_LAMBDA)
if (fun->isNullClosure()) { if (fun->isNullClosure()) {
parent = &regs.fp()->scopeChain(); parent = &regs.fp()->scopeChain();
if (obj->getParent() == parent) { if (fun->joinable()) {
jsbytecode *pc2 = AdvanceOverBlockchainOp(regs.pc + JSOP_LAMBDA_LENGTH); jsbytecode *pc2 = AdvanceOverBlockchainOp(regs.pc + JSOP_LAMBDA_LENGTH);
JSOp op2 = JSOp(*pc2); JSOp op2 = JSOp(*pc2);
@ -5049,41 +5049,39 @@ BEGIN_CASE(JSOP_LAMBDA)
fun->setMethodAtom(script->getAtom(GET_FULL_INDEX(pc2 - regs.pc))); fun->setMethodAtom(script->getAtom(GET_FULL_INDEX(pc2 - regs.pc)));
break; break;
} }
} else if (fun->joinable()) { } else if (op2 == JSOP_CALL) {
if (op2 == JSOP_CALL) { /*
/* * Array.prototype.sort and String.prototype.replace are
* Array.prototype.sort and String.prototype.replace are * optimized as if they are special form. We know that they
* optimized as if they are special form. We know that they * won't leak the joined function object in obj, therefore
* won't leak the joined function object in obj, therefore * we don't need to clone that compiler-created function
* we don't need to clone that compiler- created function * object for identity/mutation reasons.
* object for identity/mutation reasons. */
*/ int iargc = GET_ARGC(pc2);
int iargc = GET_ARGC(pc2);
/* /*
* Note that we have not yet pushed obj as the final argument, * Note that we have not yet pushed obj as the final argument,
* so regs.sp[1 - (iargc + 2)], and not regs.sp[-(iargc + 2)], * so regs.sp[1 - (iargc + 2)], and not regs.sp[-(iargc + 2)],
* is the callee for this JSOP_CALL. * is the callee for this JSOP_CALL.
*/ */
const Value &cref = regs.sp[1 - (iargc + 2)]; const Value &cref = regs.sp[1 - (iargc + 2)];
JSObject *callee; JSObject *callee;
if (IsFunctionObject(cref, &callee)) { if (IsFunctionObject(cref, &callee)) {
JSFunction *calleeFun = callee->getFunctionPrivate(); JSFunction *calleeFun = callee->getFunctionPrivate();
if (Native native = calleeFun->maybeNative()) { if (Native native = calleeFun->maybeNative()) {
if ((iargc == 1 && native == array_sort) || if ((iargc == 1 && native == array_sort) ||
(iargc == 2 && native == str_replace)) { (iargc == 2 && native == str_replace)) {
break; break;
}
} }
} }
} else if (op2 == JSOP_NULL) {
pc2 += JSOP_NULL_LENGTH;
op2 = JSOp(*pc2);
if (op2 == JSOP_CALL && GET_ARGC(pc2) == 0)
break;
} }
} else if (op2 == JSOP_NULL) {
pc2 += JSOP_NULL_LENGTH;
op2 = JSOp(*pc2);
if (op2 == JSOP_CALL && GET_ARGC(pc2) == 0)
break;
} }
} }
} else { } else {

Просмотреть файл

@ -311,7 +311,9 @@ bool
JSFunctionBox::joinable() const JSFunctionBox::joinable() const
{ {
return function()->isNullClosure() && return function()->isNullClosure() &&
!(tcflags & (TCF_FUN_USES_ARGUMENTS | TCF_FUN_USES_OWN_NAME)); (tcflags & (TCF_FUN_USES_ARGUMENTS |
TCF_FUN_USES_OWN_NAME |
TCF_COMPILE_N_GO)) == TCF_COMPILE_N_GO;
} }
bool bool
@ -4434,8 +4436,6 @@ CloneParseTree(JSParseNode *opn, JSTreeContext *tc)
#endif /* JS_HAS_DESTRUCTURING */ #endif /* JS_HAS_DESTRUCTURING */
extern const char js_with_statement_str[];
static JSParseNode * static JSParseNode *
ContainsStmt(JSParseNode *pn, TokenKind tt) ContainsStmt(JSParseNode *pn, TokenKind tt)
{ {

Просмотреть файл

@ -154,7 +154,7 @@ using namespace js::tjit;
/* Implement embedder-specific nanojit members. */ /* Implement embedder-specific nanojit members. */
/* /*
* Nanojit requires infallible allocations most of the time. We satisfy this by * Nanojit requires infallible allocations most of the time. We satisfy this by
* reserving some space in each allocator which is used as a fallback if * reserving some space in each allocator which is used as a fallback if
* rt->calloc_() fails. Ideally this reserve space should be big enough to allow * rt->calloc_() fails. Ideally this reserve space should be big enough to allow
@ -2433,7 +2433,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, TraceMonitor *tm,
LIns* counterValue = w.ldiVolatile(counterPtr); LIns* counterValue = w.ldiVolatile(counterPtr);
LIns* test = w.ltiN(counterValue, LOOP_COUNT_MAX); LIns* test = w.ltiN(counterValue, LOOP_COUNT_MAX);
LIns *branch = w.jfUnoptimizable(test); LIns *branch = w.jfUnoptimizable(test);
/* /*
* stiVolatile() uses ACCSET_STORE_ANY; If LICM is implemented * stiVolatile() uses ACCSET_STORE_ANY; If LICM is implemented
* (bug 545406) this counter will need its own region. * (bug 545406) this counter will need its own region.
*/ */
@ -2924,7 +2924,7 @@ ContainsUnrechableGCThingImpl(JSContext *cx, TreeFragment *f)
if (f->visiting) if (f->visiting)
return false; return false;
f->visiting = true; f->visiting = true;
if (!f->code()) if (!f->code())
return false; return false;
@ -2972,7 +2972,7 @@ ClearVisitingFlag(TreeFragment *f)
* Recursively check if the fragment and its dependent and linked trees has * Recursively check if the fragment and its dependent and linked trees has
* dead GC things. As the trees can point to each other we use the visiting * dead GC things. As the trees can point to each other we use the visiting
* flag to detect already visited fragments. The flag is cleared after we * flag to detect already visited fragments. The flag is cleared after we
* walked the whole graph in the separated ClearVisitingFlag function. * walked the whole graph in the separated ClearVisitingFlag function.
*/ */
static bool static bool
ContainsUnrechableGCThing(JSContext *cx, TreeFragment *f) ContainsUnrechableGCThing(JSContext *cx, TreeFragment *f)
@ -2994,7 +2994,7 @@ TraceMonitor::sweep(JSContext *cx)
recorderTree = recorder->getTree(); recorderTree = recorder->getTree();
shouldAbortRecording = HasUnreachableGCThings(cx, recorderTree); shouldAbortRecording = HasUnreachableGCThings(cx, recorderTree);
} }
for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) { for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
TreeFragment** fragp = &vmfragments[i]; TreeFragment** fragp = &vmfragments[i];
while (TreeFragment* frag = *fragp) { while (TreeFragment* frag = *fragp) {
@ -3008,7 +3008,7 @@ TraceMonitor::sweep(JSContext *cx)
fragp = &frag->next; fragp = &frag->next;
continue; continue;
} }
debug_only_printf(LC_TMTracer, debug_only_printf(LC_TMTracer,
"TreeFragment peer %p has dead gc thing." "TreeFragment peer %p has dead gc thing."
"Disconnecting tree %p with ip %p\n", "Disconnecting tree %p with ip %p\n",
@ -4381,7 +4381,7 @@ TraceRecorder::snapshot(ExitType exitType)
} else if (pendingSpecializedNative && } else if (pendingSpecializedNative &&
(pendingSpecializedNative->flags & JSTN_RETURN_NULLABLE_OBJ)) { (pendingSpecializedNative->flags & JSTN_RETURN_NULLABLE_OBJ)) {
typemap[stackSlots - 1] = JSVAL_TYPE_OBJORNULL; typemap[stackSlots - 1] = JSVAL_TYPE_OBJORNULL;
} }
/* Now restore the the original pc (after which early returns are ok). */ /* Now restore the the original pc (after which early returns are ok). */
if (resumeAfter) { if (resumeAfter) {
@ -4852,7 +4852,7 @@ class SlotMap : public SlotVisitorBase
JS_ASSERT(info.type == JSVAL_TYPE_INT32 || info.type == JSVAL_TYPE_DOUBLE); JS_ASSERT(info.type == JSVAL_TYPE_INT32 || info.type == JSVAL_TYPE_DOUBLE);
/* /*
* This should only happen if the slot has a trivial conversion, i.e. * This should only happen if the slot has a trivial conversion, i.e.
* IsPromotedInt32() is true. We check this. * IsPromotedInt32() is true. We check this.
* *
* Note that getFromTracker() will return NULL if the slot was * Note that getFromTracker() will return NULL if the slot was
* never used, in which case we don't do the check. We could * never used, in which case we don't do the check. We could
@ -4862,7 +4862,7 @@ class SlotMap : public SlotVisitorBase
*/ */
LIns* ins = mRecorder.getFromTrackerImpl(info.vp); LIns* ins = mRecorder.getFromTrackerImpl(info.vp);
JS_ASSERT_IF(ins, IsPromotedInt32(ins)); JS_ASSERT_IF(ins, IsPromotedInt32(ins));
} else } else
#endif #endif
if (info.lastCheck == TypeCheck_Demote) { if (info.lastCheck == TypeCheck_Demote) {
JS_ASSERT(info.type == JSVAL_TYPE_INT32 || info.type == JSVAL_TYPE_DOUBLE); JS_ASSERT(info.type == JSVAL_TYPE_INT32 || info.type == JSVAL_TYPE_DOUBLE);
@ -4918,7 +4918,7 @@ class DefaultSlotMap : public SlotMap
DefaultSlotMap(TraceRecorder& tr) : SlotMap(tr) DefaultSlotMap(TraceRecorder& tr) : SlotMap(tr)
{ {
} }
virtual ~DefaultSlotMap() virtual ~DefaultSlotMap()
{ {
} }
@ -6268,7 +6268,7 @@ IsEntryTypeCompatible(const Value &v, JSValueType type)
} else if (v.isDouble()) { } else if (v.isDouble()) {
int32_t _; int32_t _;
ok = (type == JSVAL_TYPE_DOUBLE) || ok = (type == JSVAL_TYPE_DOUBLE) ||
(type == JSVAL_TYPE_INT32 && JSDOUBLE_IS_INT32(v.toDouble(), &_)); (type == JSVAL_TYPE_INT32 && JSDOUBLE_IS_INT32(v.toDouble(), &_));
} else if (v.isObject()) { } else if (v.isObject()) {
@ -6579,7 +6579,7 @@ TracerState::~TracerState()
JS_ASSERT(JS_THREAD_DATA(cx)->onTraceCompartment == cx->compartment); JS_ASSERT(JS_THREAD_DATA(cx)->onTraceCompartment == cx->compartment);
JS_THREAD_DATA(cx)->onTraceCompartment = NULL; JS_THREAD_DATA(cx)->onTraceCompartment = NULL;
} }
traceMonitor->tracerState = prev; traceMonitor->tracerState = prev;
traceMonitor->tracecx = NULL; traceMonitor->tracecx = NULL;
} }
@ -6738,7 +6738,7 @@ ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f,
CurrentLine(cx), CurrentLine(cx),
js_CodeName[fp->hasImacropc() ? *fp->imacropc() : *cx->regs().pc]); js_CodeName[fp->hasImacropc() ? *fp->imacropc() : *cx->regs().pc]);
#endif #endif
#ifdef JS_METHODJIT #ifdef JS_METHODJIT
if (cx->methodJitEnabled) { if (cx->methodJitEnabled) {
if (lr->exitType == LOOP_EXIT && f->iters < MIN_LOOP_ITERS if (lr->exitType == LOOP_EXIT && f->iters < MIN_LOOP_ITERS
@ -7331,7 +7331,7 @@ TraceRecorder::monitorRecording(JSOp op)
JS_ASSERT(!addPropShapeBefore); JS_ASSERT(!addPropShapeBefore);
JS_ASSERT(traceMonitor == cx->compartment->traceMonitor()); JS_ASSERT(traceMonitor == cx->compartment->traceMonitor());
TraceMonitor &localtm = *traceMonitor; TraceMonitor &localtm = *traceMonitor;
debug_only_stmt( JSContext *localcx = cx; ) debug_only_stmt( JSContext *localcx = cx; )
assertInsideLoop(); assertInsideLoop();
@ -7927,7 +7927,7 @@ PurgeScriptFragments(TraceMonitor* tm, JSScript* script)
"Purging fragments for JSScript %p.\n", (void*)script); "Purging fragments for JSScript %p.\n", (void*)script);
/* A recorder script is being evaluated and can not be destroyed or GC-ed. */ /* A recorder script is being evaluated and can not be destroyed or GC-ed. */
JS_ASSERT_IF(tm->recorder, JS_ASSERT_IF(tm->recorder,
JS_UPTRDIFF(tm->recorder->getTree()->ip, script->code) >= script->length); JS_UPTRDIFF(tm->recorder->getTree()->ip, script->code) >= script->length);
for (LoopProfileMap::Enum e(*tm->loopProfiles); !e.empty(); e.popFront()) { for (LoopProfileMap::Enum e(*tm->loopProfiles); !e.empty(); e.popFront()) {
@ -8422,13 +8422,13 @@ TraceRecorder::tryToDemote(LOpcode op, jsdouble v0, jsdouble v1, LIns* s0, LIns*
* at record-time, and the oracle doesn't direct us otherwise, we * at record-time, and the oracle doesn't direct us otherwise, we
* speculatively emit a demoted (integer) operation, betting that at * speculatively emit a demoted (integer) operation, betting that at
* runtime we will get integer results again. * runtime we will get integer results again.
* *
* We also have to protect against various edge cases. For example, * We also have to protect against various edge cases. For example,
* to protect against overflow we emit a guard that will inform the oracle * to protect against overflow we emit a guard that will inform the oracle
* on overflow and cause a non-demoted trace to be attached that uses * on overflow and cause a non-demoted trace to be attached that uses
* floating-point math for this operation; the exception to this case is * floating-point math for this operation; the exception to this case is
* if the operands guarantee that the result will be an integer (e.g. * if the operands guarantee that the result will be an integer (e.g.
* z = d0 * d1 with 0 <= (d0|d1) <= 0xffff guarantees z <= fffe0001). * z = d0 * d1 with 0 <= (d0|d1) <= 0xffff guarantees z <= fffe0001).
*/ */
if (!oracle || oracle->isInstructionUndemotable(cx->regs().pc) || if (!oracle || oracle->isInstructionUndemotable(cx->regs().pc) ||
@ -8499,7 +8499,7 @@ TraceRecorder::tryToDemote(LOpcode op, jsdouble v0, jsdouble v1, LIns* s0, LIns*
} }
/* /*
* A would-be negative zero result can only occur if we have * A would-be negative zero result can only occur if we have
* mul(0, -n) or mul(-n, 0), where n != 0. In particular, a multiply * mul(0, -n) or mul(-n, 0), where n != 0. In particular, a multiply
* where one operand is a positive immediate cannot result in negative * where one operand is a positive immediate cannot result in negative
* zero. * zero.
@ -9733,7 +9733,7 @@ TraceRecorder::stobj_set_fslot(LIns *obj_ins, unsigned slot, const Value &v, LIn
} }
void void
TraceRecorder::stobj_set_dslot(LIns *obj_ins, unsigned slot, LIns*& slots_ins, TraceRecorder::stobj_set_dslot(LIns *obj_ins, unsigned slot, LIns*& slots_ins,
const Value &v, LIns* v_ins) const Value &v, LIns* v_ins)
{ {
if (!slots_ins) if (!slots_ins)
@ -10116,7 +10116,7 @@ TraceRecorder::getThis(LIns*& this_ins)
return RECORD_CONTINUE; return RECORD_CONTINUE;
} }
JS_ASSERT(fp->callee().getGlobal() == globalObj); JS_ASSERT(fp->callee().getGlobal() == globalObj);
Value& thisv = fp->thisValue(); Value& thisv = fp->thisValue();
if (thisv.isObject() || fp->fun()->inStrictMode()) { if (thisv.isObject() || fp->fun()->inStrictMode()) {
@ -10359,7 +10359,7 @@ TraceRecorder::putActivationObjects()
if (nslots) { if (nslots) {
slots_ins = w.allocp(sizeof(Value) * nslots); slots_ins = w.allocp(sizeof(Value) * nslots);
for (int i = 0; i < nslots; ++i) { for (int i = 0; i < nslots; ++i) {
box_value_into(fp->slots()[i], get(&fp->slots()[i]), box_value_into(fp->slots()[i], get(&fp->slots()[i]),
AllocSlotsAddress(slots_ins, i)); AllocSlotsAddress(slots_ins, i));
} }
} else { } else {
@ -12603,7 +12603,7 @@ GetPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, Value* vp)
SetBuiltinError(tm); SetBuiltinError(tm);
return false; return false;
} }
return WasBuiltinSuccessful(tm); return WasBuiltinSuccessful(tm);
} }
JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyByName, CONTEXT, OBJECT, STRINGPTR, VALUEPTR, JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyByName, CONTEXT, OBJECT, STRINGPTR, VALUEPTR,
@ -13085,7 +13085,7 @@ SetPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, Value* vp, JSB
} }
return WasBuiltinSuccessful(tm); return WasBuiltinSuccessful(tm);
} }
JS_DEFINE_CALLINFO_5(static, BOOL_FAIL, SetPropertyByName, JS_DEFINE_CALLINFO_5(static, BOOL_FAIL, SetPropertyByName,
CONTEXT, OBJECT, STRINGPTR, VALUEPTR, BOOL, CONTEXT, OBJECT, STRINGPTR, VALUEPTR, BOOL,
0, ACCSET_STORE_ANY) 0, ACCSET_STORE_ANY)
@ -13225,7 +13225,7 @@ TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex)
JSObject* tarray = js::TypedArray::getTypedArray(obj); JSObject* tarray = js::TypedArray::getTypedArray(obj);
// The index was on the stack and is therefore a LIR float; force it to // The index was on the stack and is therefore a LIR float; force it to
// be an integer. // be an integer.
CHECK_STATUS_A(makeNumberInt32(idx_ins, &idx_ins)); CHECK_STATUS_A(makeNumberInt32(idx_ins, &idx_ins));
// Ensure idx >= 0 && idx < length (by using uint32) // Ensure idx >= 0 && idx < length (by using uint32)
@ -13291,7 +13291,7 @@ TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex)
// Do nothing, this is already a float // Do nothing, this is already a float
break; break;
default: default:
JS_NOT_REACHED("Unknown typed array type in tracer"); JS_NOT_REACHED("Unknown typed array type in tracer");
} }
switch (js::TypedArray::getType(tarray)) { switch (js::TypedArray::getType(tarray)) {
@ -13315,7 +13315,7 @@ TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex)
w.stdTypedArrayElement(typed_v_ins, data_ins, pidx_ins); w.stdTypedArrayElement(typed_v_ins, data_ins, pidx_ins);
break; break;
default: default:
JS_NOT_REACHED("Unknown typed array type in tracer"); JS_NOT_REACHED("Unknown typed array type in tracer");
} }
} else if (idx.toInt32() < 0 || !obj->isDenseArray()) { } else if (idx.toInt32() < 0 || !obj->isDenseArray()) {
CHECK_STATUS_A(initOrSetPropertyByIndex(obj_ins, idx_ins, &v, CHECK_STATUS_A(initOrSetPropertyByIndex(obj_ins, idx_ins, &v,
@ -13326,7 +13326,7 @@ TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex)
VMSideExit* mismatchExit = snapshot(MISMATCH_EXIT); VMSideExit* mismatchExit = snapshot(MISMATCH_EXIT);
// Make sure the array is actually dense. // Make sure the array is actually dense.
if (!obj->isDenseArray()) if (!obj->isDenseArray())
return ARECORD_STOP; return ARECORD_STOP;
guardDenseArray(obj_ins, branchExit); guardDenseArray(obj_ins, branchExit);
@ -15456,30 +15456,30 @@ TraceRecorder::record_JSOP_LAMBDA()
* JSOP_INITMETHOD logic governing the early ARECORD_CONTINUE returns below * JSOP_INITMETHOD logic governing the early ARECORD_CONTINUE returns below
* must agree with the corresponding break-from-do-while(0) logic there. * must agree with the corresponding break-from-do-while(0) logic there.
*/ */
if (fun->isNullClosure() && fun->getParent() == &cx->fp()->scopeChain()) { if (fun->isNullClosure()) {
jsbytecode *pc2 = AdvanceOverBlockchainOp(cx->regs().pc + JSOP_LAMBDA_LENGTH); if (fun->joinable()) {
JSOp op2 = JSOp(*pc2); jsbytecode *pc2 = AdvanceOverBlockchainOp(cx->regs().pc + JSOP_LAMBDA_LENGTH);
JSOp op2 = JSOp(*pc2);
if (op2 == JSOP_INITMETHOD) { if (op2 == JSOP_INITMETHOD) {
stack(0, w.immpObjGC(fun));
return ARECORD_CONTINUE;
}
if (op2 == JSOP_SETMETHOD) {
Value lval = stackval(-1);
if (!lval.isPrimitive() && lval.toObject().canHaveMethodBarrier()) {
stack(0, w.immpObjGC(fun)); stack(0, w.immpObjGC(fun));
return ARECORD_CONTINUE; return ARECORD_CONTINUE;
} }
} else if (fun->joinable()) {
if (op2 == JSOP_CALL) { if (op2 == JSOP_SETMETHOD) {
Value lval = stackval(-1);
if (!lval.isPrimitive() && lval.toObject().canHaveMethodBarrier()) {
stack(0, w.immpObjGC(fun));
return ARECORD_CONTINUE;
}
} else if (op2 == JSOP_CALL) {
/* /*
* Array.prototype.sort and String.prototype.replace are * Array.prototype.sort and String.prototype.replace are
* optimized as if they are special form. We know that they * optimized as if they are special form. We know that they
* won't leak the joined function object in obj, therefore * won't leak the joined function object in obj, therefore we
* we don't need to clone that compiler- created function * don't need to clone that compiler-created function object
* object for identity/mutation reasons. * for identity/mutation reasons.
*/ */
int iargc = GET_ARGC(pc2); int iargc = GET_ARGC(pc2);
@ -16734,7 +16734,7 @@ RecordTracePoint(JSContext* cx, TraceMonitor* tm, bool* blacklist, bool execAllo
return TPA_Error; return TPA_Error;
JS_ASSERT(!cx->isExceptionPending()); JS_ASSERT(!cx->isExceptionPending());
return TPA_RanStuff; return TPA_RanStuff;
} }
@ -17094,7 +17094,7 @@ LoopProfile::profileOperation(JSContext* cx, JSOp op)
if (op == JSOP_LOOKUPSWITCH) if (op == JSOP_LOOKUPSWITCH)
branchMultiplier *= GET_UINT16(pc + JUMP_OFFSET_LEN); branchMultiplier *= GET_UINT16(pc + JUMP_OFFSET_LEN);
if (numAllOps >= MAX_PROFILE_OPS) { if (numAllOps >= MAX_PROFILE_OPS) {
debug_only_print0(LC_TMProfiler, "Profiling complete (maxops)\n"); debug_only_print0(LC_TMProfiler, "Profiling complete (maxops)\n");
tm->profile->decide(cx); tm->profile->decide(cx);
@ -17165,7 +17165,7 @@ LoopProfile::profileOperation(JSContext* cx, JSOp op)
} else { } else {
stackClear(); stackClear();
} }
return ProfContinue; return ProfContinue;
} }
@ -17214,7 +17214,7 @@ LoopProfile::isCompilationUnprofitable(JSContext *cx, uintN goodOps)
if (goodOps <= 22 && allOps[OP_FWDJUMP]) if (goodOps <= 22 && allOps[OP_FWDJUMP])
return true; return true;
/* Ensure that inner loops aren't fleeting. */ /* Ensure that inner loops aren't fleeting. */
for (uintN i=0; i<numInnerLoops; i++) { for (uintN i=0; i<numInnerLoops; i++) {
LoopProfile *prof = LookupLoopProfile(traceMonitor, innerLoops[i].top); LoopProfile *prof = LookupLoopProfile(traceMonitor, innerLoops[i].top);
@ -17231,7 +17231,7 @@ LoopProfile::decide(JSContext *cx)
{ {
bool wasUndecided = undecided; bool wasUndecided = undecided;
bool wasTraceOK = traceOK; bool wasTraceOK = traceOK;
profiled = true; profiled = true;
traceOK = false; traceOK = false;
undecided = false; undecided = false;
@ -17364,7 +17364,7 @@ AbortProfiling(JSContext *cx)
{ {
JS_ASSERT(TRACE_PROFILER(cx)); JS_ASSERT(TRACE_PROFILER(cx));
LoopProfile *prof = TRACE_PROFILER(cx); LoopProfile *prof = TRACE_PROFILER(cx);
debug_only_print0(LC_TMProfiler, "Profiling complete (aborted)\n"); debug_only_print0(LC_TMProfiler, "Profiling complete (aborted)\n");
prof->profiled = true; prof->profiled = true;
prof->traceOK = false; prof->traceOK = false;

Просмотреть файл

@ -110,7 +110,7 @@ mjit::Compiler::Compiler(JSContext *cx, JSScript *outerScript, bool isConstructi
traceICs(CompilerAllocPolicy(cx, *thisFromCtor())), traceICs(CompilerAllocPolicy(cx, *thisFromCtor())),
#endif #endif
#if defined JS_POLYIC #if defined JS_POLYIC
pics(CompilerAllocPolicy(cx, *thisFromCtor())), pics(CompilerAllocPolicy(cx, *thisFromCtor())),
getElemICs(CompilerAllocPolicy(cx, *thisFromCtor())), getElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
setElemICs(CompilerAllocPolicy(cx, *thisFromCtor())), setElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
#endif #endif
@ -870,7 +870,7 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
JSC::ExecutableAllocator::makeWritable(result, codeSize); JSC::ExecutableAllocator::makeWritable(result, codeSize);
masm.executableCopy(result); masm.executableCopy(result);
stubcc.masm.executableCopy(result + masm.size()); stubcc.masm.executableCopy(result + masm.size());
JSC::LinkBuffer fullCode(result, codeSize); JSC::LinkBuffer fullCode(result, codeSize);
JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size()); JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size());
@ -927,7 +927,7 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
} }
jit->pcLengths = pcLengths; jit->pcLengths = pcLengths;
/* /*
* WARNING: mics(), callICs() et al depend on the ordering of these * WARNING: mics(), callICs() et al depend on the ordering of these
* variable-length sections. See JITScript's declaration for details. * variable-length sections. See JITScript's declaration for details.
*/ */
@ -1107,7 +1107,7 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
fullCode.locationOf(callICs[i].funGuard); fullCode.locationOf(callICs[i].funGuard);
jitCallICs[i].joinPointOffset = offset; jitCallICs[i].joinPointOffset = offset;
JS_ASSERT(jitCallICs[i].joinPointOffset == offset); JS_ASSERT(jitCallICs[i].joinPointOffset == offset);
/* Compute the OOL call offset. */ /* Compute the OOL call offset. */
offset = stubCode.locationOf(callICs[i].oolCall) - offset = stubCode.locationOf(callICs[i].oolCall) -
stubCode.locationOf(callICs[i].slowPathStart); stubCode.locationOf(callICs[i].slowPathStart);
@ -1167,7 +1167,7 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
if (equalityICs[i].jumpToStub.isSet()) if (equalityICs[i].jumpToStub.isSet())
jitEqualityICs[i].jumpToStub = fullCode.locationOf(equalityICs[i].jumpToStub.get()); jitEqualityICs[i].jumpToStub = fullCode.locationOf(equalityICs[i].jumpToStub.get());
jitEqualityICs[i].fallThrough = fullCode.locationOf(equalityICs[i].fallThrough); jitEqualityICs[i].fallThrough = fullCode.locationOf(equalityICs[i].fallThrough);
stubCode.patch(equalityICs[i].addrLabel, &jitEqualityICs[i]); stubCode.patch(equalityICs[i].addrLabel, &jitEqualityICs[i]);
} }
@ -1204,7 +1204,7 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
jitTraceICs[i].loopCounterStart = hotloop; jitTraceICs[i].loopCounterStart = hotloop;
jitTraceICs[i].loopCounter = hotloop < prevCount ? 1 : hotloop - prevCount; jitTraceICs[i].loopCounter = hotloop < prevCount ? 1 : hotloop - prevCount;
#endif #endif
stubCode.patch(traceICs[i].addrLabel, &jitTraceICs[i]); stubCode.patch(traceICs[i].addrLabel, &jitTraceICs[i]);
} }
#endif /* JS_MONOIC */ #endif /* JS_MONOIC */
@ -1565,7 +1565,7 @@ mjit::Compiler::generateMethod()
/********************** /**********************
* BEGIN COMPILER OPS * * BEGIN COMPILER OPS *
**********************/ **********************/
lastPC = PC; lastPC = PC;
@ -1936,7 +1936,7 @@ mjit::Compiler::generateMethod()
frame.pop(); frame.pop();
pushSyncedEntry(0); pushSyncedEntry(0);
} }
END_CASE(JSOP_DELPROP) END_CASE(JSOP_DELPROP)
BEGIN_CASE(JSOP_DELELEM) BEGIN_CASE(JSOP_DELELEM)
{ {
@ -2465,20 +2465,26 @@ mjit::Compiler::generateMethod()
JSObjStubFun stub = stubs::Lambda; JSObjStubFun stub = stubs::Lambda;
uint32 uses = 0; uint32 uses = 0;
jsbytecode *pc2 = AdvanceOverBlockchainOp(PC + JSOP_LAMBDA_LENGTH); jsbytecode *pc2 = NULL;
JSOp next = JSOp(*pc2); if (fun->joinable()) {
pc2 = AdvanceOverBlockchainOp(PC + JSOP_LAMBDA_LENGTH);
if (next == JSOP_INITMETHOD) { JSOp next = JSOp(*pc2);
stub = stubs::LambdaForInit;
} else if (next == JSOP_SETMETHOD) { if (next == JSOP_INITMETHOD) {
stub = stubs::LambdaForSet; stub = stubs::LambdaJoinableForInit;
uses = 1; } else if (next == JSOP_SETMETHOD) {
} else if (fun->joinable()) { stub = stubs::LambdaJoinableForSet;
if (next == JSOP_CALL) { uses = 1;
stub = stubs::LambdaJoinableForCall; } else if (next == JSOP_CALL) {
uses = frame.frameSlots(); int iargc = GET_ARGC(pc2);
if (iargc == 1 || iargc == 2) {
stub = stubs::LambdaJoinableForCall;
uses = frame.frameSlots();
}
} else if (next == JSOP_NULL) { } else if (next == JSOP_NULL) {
stub = stubs::LambdaJoinableForNull; pc2 += JSOP_NULL_LENGTH;
if (JSOp(*pc2) == JSOP_CALL && GET_ARGC(pc2) == 0)
stub = stubs::LambdaJoinableForNull;
} }
} }
@ -2712,7 +2718,7 @@ mjit::Compiler::generateMethod()
/********************** /**********************
* END COMPILER OPS * * END COMPILER OPS *
**********************/ **********************/
if (cx->typeInferenceEnabled() && PC == lastPC + analyze::GetBytecodeLength(lastPC)) { if (cx->typeInferenceEnabled() && PC == lastPC + analyze::GetBytecodeLength(lastPC)) {
/* /*
@ -4031,7 +4037,7 @@ mjit::Compiler::compareTwoValues(JSContext *cx, JSOp op, const Value &lhs, const
} }
} else { } else {
double ld, rd; double ld, rd;
/* These should be infallible w/ primitives. */ /* These should be infallible w/ primitives. */
JS_ALWAYS_TRUE(ToNumber(cx, lhs, &ld)); JS_ALWAYS_TRUE(ToNumber(cx, lhs, &ld));
JS_ALWAYS_TRUE(ToNumber(cx, rhs, &rd)); JS_ALWAYS_TRUE(ToNumber(cx, rhs, &rd));
@ -4525,7 +4531,7 @@ mjit::Compiler::jsop_callprop_generic(JSAtom *atom)
RETURN_IF_OOM(false); RETURN_IF_OOM(false);
/* /*
* Initialize op labels. We use GetPropLabels here because we have the same patching * Initialize op labels. We use GetPropLabels here because we have the same patching
* requirements for CallProp. * requirements for CallProp.
*/ */
@ -4559,7 +4565,7 @@ mjit::Compiler::jsop_callprop_str(JSAtom *atom)
{ {
if (!globalObj) { if (!globalObj) {
jsop_callprop_slow(atom); jsop_callprop_slow(atom);
return true; return true;
} }
/* /*
@ -4622,7 +4628,7 @@ mjit::Compiler::jsop_callprop_obj(JSAtom *atom)
JS_ASSERT(top->isTypeKnown()); JS_ASSERT(top->isTypeKnown());
JS_ASSERT(top->getKnownType() == JSVAL_TYPE_OBJECT); JS_ASSERT(top->getKnownType() == JSVAL_TYPE_OBJECT);
RESERVE_IC_SPACE(masm); RESERVE_IC_SPACE(masm);
pic.pc = PC; pic.pc = PC;
@ -4683,7 +4689,7 @@ mjit::Compiler::jsop_callprop_obj(JSAtom *atom)
frame.storeRegs(-2, shapeReg, objReg, knownPushedType(0)); frame.storeRegs(-2, shapeReg, objReg, knownPushedType(0));
BarrierState barrier = testBarrier(shapeReg, objReg); BarrierState barrier = testBarrier(shapeReg, objReg);
/* /*
* Assert correctness of hardcoded offsets. * Assert correctness of hardcoded offsets.
* No type guard: type is asserted. * No type guard: type is asserted.
*/ */
@ -5420,7 +5426,7 @@ mjit::Compiler::jsop_this()
{ {
frame.pushThis(); frame.pushThis();
/* /*
* In strict mode code, we don't wrap 'this'. * In strict mode code, we don't wrap 'this'.
* In direct-call eval code, we wrapped 'this' before entering the eval. * In direct-call eval code, we wrapped 'this' before entering the eval.
* In global code, 'this' is always an object. * In global code, 'this' is always an object.
@ -5840,7 +5846,7 @@ mjit::Compiler::jsop_getgname(uint32 index)
masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg); masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
Address address(objReg, slot); Address address(objReg, slot);
/* Allocate any register other than objReg. */ /* Allocate any register other than objReg. */
RegisterID treg = frame.allocReg(); RegisterID treg = frame.allocReg();
/* After dreg is loaded, it's safe to clobber objReg. */ /* After dreg is loaded, it's safe to clobber objReg. */
@ -6138,7 +6144,7 @@ mjit::Compiler::jsop_instanceof()
OOL_STUBCALL(stubs::InstanceOf, REJOIN_FALLTHROUGH); OOL_STUBCALL(stubs::InstanceOf, REJOIN_FALLTHROUGH);
firstSlow = stubcc.masm.jump(); firstSlow = stubcc.masm.jump();
} }
/* This is sadly necessary because the error case needs the object. */ /* This is sadly necessary because the error case needs the object. */
frame.dup(); frame.dup();

Просмотреть файл

@ -1481,28 +1481,23 @@ stubs::RegExp(VMFrame &f, JSObject *regex)
} }
JSObject * JS_FASTCALL JSObject * JS_FASTCALL
stubs::LambdaForInit(VMFrame &f, JSFunction *fun) stubs::LambdaJoinableForInit(VMFrame &f, JSFunction *fun)
{ {
JSObject *obj = fun;
jsbytecode *nextpc = (jsbytecode *) f.scratch; jsbytecode *nextpc = (jsbytecode *) f.scratch;
if (fun->isNullClosure() && obj->getParent() == &f.fp()->scopeChain()) { JS_ASSERT(fun->joinable());
fun->setMethodAtom(f.script()->getAtom(GET_SLOTNO(nextpc))); fun->setMethodAtom(f.fp()->script()->getAtom(GET_SLOTNO(nextpc)));
return obj; return fun;
}
return Lambda(f, fun);
} }
JSObject * JS_FASTCALL JSObject * JS_FASTCALL
stubs::LambdaForSet(VMFrame &f, JSFunction *fun) stubs::LambdaJoinableForSet(VMFrame &f, JSFunction *fun)
{ {
JSObject *obj = fun; JS_ASSERT(fun->joinable());
jsbytecode *nextpc = (jsbytecode *) f.scratch; jsbytecode *nextpc = (jsbytecode *) f.scratch;
if (fun->isNullClosure() && obj->getParent() == &f.fp()->scopeChain()) { const Value &lref = f.regs.sp[-1];
const Value &lref = f.regs.sp[-1]; if (lref.isObject() && lref.toObject().canHaveMethodBarrier()) {
if (lref.isObject() && lref.toObject().canHaveMethodBarrier()) { fun->setMethodAtom(f.fp()->script()->getAtom(GET_SLOTNO(nextpc)));
fun->setMethodAtom(f.script()->getAtom(GET_SLOTNO(nextpc))); return fun;
return obj;
}
} }
return Lambda(f, fun); return Lambda(f, fun);
} }
@ -1510,36 +1505,34 @@ stubs::LambdaForSet(VMFrame &f, JSFunction *fun)
JSObject * JS_FASTCALL JSObject * JS_FASTCALL
stubs::LambdaJoinableForCall(VMFrame &f, JSFunction *fun) stubs::LambdaJoinableForCall(VMFrame &f, JSFunction *fun)
{ {
JSObject *obj = fun; JS_ASSERT(fun->joinable());
jsbytecode *nextpc = (jsbytecode *) f.scratch; jsbytecode *nextpc = (jsbytecode *) f.scratch;
if (fun->isNullClosure() && obj->getParent() == &f.fp()->scopeChain()) {
/*
* Array.prototype.sort and String.prototype.replace are
* optimized as if they are special form. We know that they
* won't leak the joined function object in obj, therefore
* we don't need to clone that compiler- created function
* object for identity/mutation reasons.
*/
int iargc = GET_ARGC(nextpc);
/* /*
* Note that we have not yet pushed obj as the final argument, * Array.prototype.sort and String.prototype.replace are optimized as if
* so regs.sp[1 - (iargc + 2)], and not regs.sp[-(iargc + 2)], * they are special form. We know that they won't leak the joined function
* is the callee for this JSOP_CALL. * object fun, therefore we don't need to clone that compiler-created
*/ * function object for identity/mutation reasons.
const Value &cref = f.regs.sp[1 - (iargc + 2)]; */
JSObject *callee; int iargc = GET_ARGC(nextpc);
if (IsFunctionObject(cref, &callee)) { /*
JSFunction *calleeFun = callee->getFunctionPrivate(); * Note that we have not yet pushed fun as the final argument, so
Native native = calleeFun->maybeNative(); * regs.sp[1 - (iargc + 2)], and not regs.sp[-(iargc + 2)], is the callee
* for this JSOP_CALL.
*/
const Value &cref = f.regs.sp[1 - (iargc + 2)];
JSObject *callee;
if (native) { if (IsFunctionObject(cref, &callee)) {
if (iargc == 1 && native == array_sort) JSFunction *calleeFun = callee->getFunctionPrivate();
return obj; Native native = calleeFun->maybeNative();
if (iargc == 2 && native == str_replace)
return obj; if (native) {
} if (iargc == 1 && native == array_sort)
return fun;
if (iargc == 2 && native == str_replace)
return fun;
} }
} }
return Lambda(f, fun); return Lambda(f, fun);
@ -1548,23 +1541,13 @@ stubs::LambdaJoinableForCall(VMFrame &f, JSFunction *fun)
JSObject * JS_FASTCALL JSObject * JS_FASTCALL
stubs::LambdaJoinableForNull(VMFrame &f, JSFunction *fun) stubs::LambdaJoinableForNull(VMFrame &f, JSFunction *fun)
{ {
JSObject *obj = fun; JS_ASSERT(fun->joinable());
jsbytecode *nextpc = (jsbytecode *) f.scratch; return fun;
if (fun->isNullClosure() && obj->getParent() == &f.fp()->scopeChain()) {
jsbytecode *pc2 = nextpc + JSOP_NULL_LENGTH;
JSOp op2 = JSOp(*pc2);
if (op2 == JSOP_CALL && GET_ARGC(pc2) == 0)
return obj;
}
return Lambda(f, fun);
} }
JSObject * JS_FASTCALL JSObject * JS_FASTCALL
stubs::Lambda(VMFrame &f, JSFunction *fun) stubs::Lambda(VMFrame &f, JSFunction *fun)
{ {
JSObject *obj = fun;
JSObject *parent; JSObject *parent;
if (fun->isNullClosure()) { if (fun->isNullClosure()) {
parent = &f.fp()->scopeChain(); parent = &f.fp()->scopeChain();
@ -1574,7 +1557,7 @@ stubs::Lambda(VMFrame &f, JSFunction *fun)
THROWV(NULL); THROWV(NULL);
} }
obj = CloneFunctionObject(f.cx, fun, parent, true); JSObject *obj = CloneFunctionObject(f.cx, fun, parent, true);
if (!obj) if (!obj)
THROWV(NULL); THROWV(NULL);

Просмотреть файл

@ -156,8 +156,8 @@ JSObject * JS_FASTCALL DefLocalFun(VMFrame &f, JSFunction *fun);
JSObject * JS_FASTCALL DefLocalFun_FC(VMFrame &f, JSFunction *fun); JSObject * JS_FASTCALL DefLocalFun_FC(VMFrame &f, JSFunction *fun);
JSObject * JS_FASTCALL RegExp(VMFrame &f, JSObject *regex); JSObject * JS_FASTCALL RegExp(VMFrame &f, JSObject *regex);
JSObject * JS_FASTCALL Lambda(VMFrame &f, JSFunction *fun); JSObject * JS_FASTCALL Lambda(VMFrame &f, JSFunction *fun);
JSObject * JS_FASTCALL LambdaForInit(VMFrame &f, JSFunction *fun); JSObject * JS_FASTCALL LambdaJoinableForInit(VMFrame &f, JSFunction *fun);
JSObject * JS_FASTCALL LambdaForSet(VMFrame &f, JSFunction *fun); JSObject * JS_FASTCALL LambdaJoinableForSet(VMFrame &f, JSFunction *fun);
JSObject * JS_FASTCALL LambdaJoinableForCall(VMFrame &f, JSFunction *fun); JSObject * JS_FASTCALL LambdaJoinableForCall(VMFrame &f, JSFunction *fun);
JSObject * JS_FASTCALL LambdaJoinableForNull(VMFrame &f, JSFunction *fun); JSObject * JS_FASTCALL LambdaJoinableForNull(VMFrame &f, JSFunction *fun);
JSObject * JS_FASTCALL FlatLambda(VMFrame &f, JSFunction *fun); JSObject * JS_FASTCALL FlatLambda(VMFrame &f, JSFunction *fun);