Optimize FrameState for large linear scripts (bug 591836, r=dmandelin).

* * *
Remove FrameState::base (bug 591836 part 1, r=dmandelin).
* * *
New register pinning invariants (bug 591836 part 2, r=dmandelin).
* * *
Remove FrameState::tosFe() (bug 591836 part 3, r=dmandelin).
* * *
New copy order invariant (bug 591836 part 4, r=dmandelin).
* * *
Optimize immutable frame syncing (bug 591836 part 5, r=dmandelin).
* * *
Optimize frame merging (bug 591836 part 6, r=dmandelin).
* * *
Optimize copying frame entries (bug 591836 part 7, r=dmandelin).
* * *
Optimize mutable frame syncing (bug 591836 part 8, r=dmandelin).
* * *
Optimize syncing in try blocks (bug 591836 part 9, r=dmandelin).
This commit is contained in:
David Anderson 2010-09-21 18:34:42 -07:00
Родитель 31e63ea8d9
Коммит 2e4ff06ce2
10 изменённых файлов: 684 добавлений и 394 удалений

Просмотреть файл

@ -602,7 +602,7 @@ mjit::Compiler::generateMethod()
OpcodeStatus &opinfo = analysis[PC]; OpcodeStatus &opinfo = analysis[PC];
frame.setInTryBlock(opinfo.inTryBlock); frame.setInTryBlock(opinfo.inTryBlock);
if (opinfo.nincoming || opinfo.trap) { if (opinfo.nincoming || opinfo.trap) {
frame.forgetEverything(opinfo.stackDepth); frame.syncAndForgetEverything(opinfo.stackDepth);
opinfo.safePoint = true; opinfo.safePoint = true;
} }
jumpMap[uint32(PC - script->code)] = masm.label(); jumpMap[uint32(PC - script->code)] = masm.label();
@ -683,7 +683,7 @@ mjit::Compiler::generateMethod()
BEGIN_CASE(JSOP_GOTO) BEGIN_CASE(JSOP_GOTO)
{ {
/* :XXX: this isn't really necessary if we follow the branch. */ /* :XXX: this isn't really necessary if we follow the branch. */
frame.forgetEverything(); frame.syncAndForgetEverything();
Jump j = masm.jump(); Jump j = masm.jump();
jumpAndTrace(j, PC + GET_JUMP_OFFSET(PC)); jumpAndTrace(j, PC + GET_JUMP_OFFSET(PC));
} }
@ -786,7 +786,7 @@ mjit::Compiler::generateMethod()
/* Branch is never taken, don't bother doing anything. */ /* Branch is never taken, don't bother doing anything. */
if (result) { if (result) {
frame.forgetEverything(); frame.syncAndForgetEverything();
Jump j = masm.jump(); Jump j = masm.jump();
jumpAndTrace(j, target); jumpAndTrace(j, target);
} }
@ -1105,10 +1105,10 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_AND) END_CASE(JSOP_AND)
BEGIN_CASE(JSOP_TABLESWITCH) BEGIN_CASE(JSOP_TABLESWITCH)
frame.forgetEverything(); frame.syncAndForgetEverything();
masm.move(ImmPtr(PC), Registers::ArgReg1); masm.move(ImmPtr(PC), Registers::ArgReg1);
/* prepareStubCall() is not needed due to forgetEverything() */ /* prepareStubCall() is not needed due to syncAndForgetEverything() */
stubCall(stubs::TableSwitch); stubCall(stubs::TableSwitch);
frame.pop(); frame.pop();
@ -1118,10 +1118,10 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_TABLESWITCH) END_CASE(JSOP_TABLESWITCH)
BEGIN_CASE(JSOP_LOOKUPSWITCH) BEGIN_CASE(JSOP_LOOKUPSWITCH)
frame.forgetEverything(); frame.syncAndForgetEverything();
masm.move(ImmPtr(PC), Registers::ArgReg1); masm.move(ImmPtr(PC), Registers::ArgReg1);
/* prepareStubCall() is not needed due to forgetEverything() */ /* prepareStubCall() is not needed due to syncAndForgetEverything() */
stubCall(stubs::LookupSwitch); stubCall(stubs::LookupSwitch);
frame.pop(); frame.pop();
@ -1213,12 +1213,23 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_GETLOCAL) END_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_SETLOCAL) BEGIN_CASE(JSOP_SETLOCAL)
BEGIN_CASE(JSOP_SETLOCALPOP) {
frame.storeLocal(GET_SLOTNO(PC)); jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
if (op == JSOP_SETLOCALPOP) bool pop = JSOp(*next) == JSOP_POP && !analysis[next].nincoming;
frame.storeLocal(GET_SLOTNO(PC), pop);
if (pop) {
frame.pop(); frame.pop();
PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
break;
}
}
END_CASE(JSOP_SETLOCAL) END_CASE(JSOP_SETLOCAL)
BEGIN_CASE(JSOP_SETLOCALPOP)
frame.storeLocal(GET_SLOTNO(PC), true);
frame.pop();
END_CASE(JSOP_SETLOCALPOP)
BEGIN_CASE(JSOP_UINT16) BEGIN_CASE(JSOP_UINT16)
frame.push(Value(Int32Value((int32_t) GET_UINT16(PC)))); frame.push(Value(Int32Value((int32_t) GET_UINT16(PC))));
END_CASE(JSOP_UINT16) END_CASE(JSOP_UINT16)
@ -1361,7 +1372,7 @@ mjit::Compiler::generateMethod()
if (fun) { if (fun) {
JSLocalKind localKind = fun->lookupLocal(cx, inner->atom, NULL); JSLocalKind localKind = fun->lookupLocal(cx, inner->atom, NULL);
if (localKind != JSLOCAL_NONE) if (localKind != JSLOCAL_NONE)
frame.forgetEverything(); frame.syncAndForgetEverything();
} }
prepareStubCall(Uses(0)); prepareStubCall(Uses(0));
@ -1428,6 +1439,7 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_LAMBDA) END_CASE(JSOP_LAMBDA)
BEGIN_CASE(JSOP_TRY) BEGIN_CASE(JSOP_TRY)
frame.syncAndForgetEverything();
END_CASE(JSOP_TRY) END_CASE(JSOP_TRY)
BEGIN_CASE(JSOP_GETFCSLOT) BEGIN_CASE(JSOP_GETFCSLOT)
@ -1552,7 +1564,7 @@ mjit::Compiler::generateMethod()
/* For now, don't bother doing anything for this opcode. */ /* For now, don't bother doing anything for this opcode. */
JSObject *obj = script->getObject(fullAtomIndex(PC)); JSObject *obj = script->getObject(fullAtomIndex(PC));
frame.forgetEverything(); frame.syncAndForgetEverything();
masm.move(ImmPtr(obj), Registers::ArgReg1); masm.move(ImmPtr(obj), Registers::ArgReg1);
uint32 n = js_GetEnterBlockStackDefs(cx, script, PC); uint32 n = js_GetEnterBlockStackDefs(cx, script, PC);
stubCall(stubs::EnterBlock); stubCall(stubs::EnterBlock);
@ -1821,14 +1833,14 @@ mjit::Compiler::emitReturn()
/* There will always be a call object. */ /* There will always be a call object. */
prepareStubCall(Uses(0)); prepareStubCall(Uses(0));
stubCall(stubs::PutCallObject); stubCall(stubs::PutCallObject);
frame.throwaway(); frame.discardFrame();
} else { } else {
/* if (hasCallObj() || hasArgsObj()) stubs::PutActivationObjects() */ /* if (hasCallObj() || hasArgsObj()) stubs::PutActivationObjects() */
Jump putObjs = masm.branchTest32(Assembler::NonZero, Jump putObjs = masm.branchTest32(Assembler::NonZero,
Address(JSFrameReg, JSStackFrame::offsetOfFlags()), Address(JSFrameReg, JSStackFrame::offsetOfFlags()),
Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ)); Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ));
stubcc.linkExit(putObjs, Uses(frame.frameDepth())); stubcc.linkExit(putObjs, Uses(frame.frameDepth()));
frame.throwaway(); frame.discardFrame();
stubcc.leave(); stubcc.leave();
stubcc.call(stubs::PutActivationObjects); stubcc.call(stubs::PutActivationObjects);
@ -1914,6 +1926,8 @@ mjit::Compiler::interruptCheckHelper()
Jump noInterrupt = stubcc.masm.branchTest32(Assembler::Zero, flag); Jump noInterrupt = stubcc.masm.branchTest32(Assembler::Zero, flag);
#endif #endif
frame.freeReg(reg);
frame.sync(stubcc.masm, Uses(0)); frame.sync(stubcc.masm, Uses(0));
stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1); stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
stubcc.call(stubs::Interrupt); stubcc.call(stubs::Interrupt);
@ -1923,8 +1937,6 @@ mjit::Compiler::interruptCheckHelper()
#ifdef JS_THREADSAFE #ifdef JS_THREADSAFE
stubcc.linkRejoin(noInterrupt); stubcc.linkRejoin(noInterrupt);
#endif #endif
frame.freeReg(reg);
} }
void void
@ -2016,7 +2028,9 @@ mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
* registers we've preserved. * registers we've preserved.
*/ */
frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2)); frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
frame.resetRegState(); frame.unpinKilledReg(dataReg);
if (typeReg.isSet())
frame.unpinKilledReg(typeReg.reg());
Registers tempRegs; Registers tempRegs;
@ -2280,7 +2294,7 @@ mjit::Compiler::emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused)
} else { } else {
JS_ASSERT(fused == JSOP_IFEQ || fused == JSOP_IFNE); JS_ASSERT(fused == JSOP_IFEQ || fused == JSOP_IFNE);
frame.forgetEverything(); frame.syncAndForgetEverything();
Assembler::Condition cond = (fused == JSOP_IFEQ) Assembler::Condition cond = (fused == JSOP_IFEQ)
? Assembler::Zero ? Assembler::Zero
: Assembler::NonZero; : Assembler::NonZero;
@ -3680,7 +3694,7 @@ mjit::Compiler::iterMore()
/* Get props_cursor, test */ /* Get props_cursor, test */
RegisterID T2 = frame.allocReg(); RegisterID T2 = frame.allocReg();
frame.forgetEverything(); frame.syncAndForgetEverything();
masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2); masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
masm.loadPtr(Address(T1, offsetof(NativeIterator, props_end)), T1); masm.loadPtr(Address(T1, offsetof(NativeIterator, props_end)), T1);
Jump jFast = masm.branchPtr(Assembler::LessThan, T2, T1); Jump jFast = masm.branchPtr(Assembler::LessThan, T2, T1);
@ -3917,8 +3931,7 @@ mjit::Compiler::jsop_setgname(uint32 index)
mic.shape); mic.shape);
masm.move(ImmPtr(obj), objReg); masm.move(ImmPtr(obj), objReg);
} else { } else {
objReg = frame.tempRegForData(objFe); objReg = frame.copyDataIntoReg(objFe);
frame.pinReg(objReg);
RegisterID reg = frame.allocReg(); RegisterID reg = frame.allocReg();
masm.loadShape(objReg, reg); masm.loadShape(objReg, reg);
@ -4012,8 +4025,7 @@ mjit::Compiler::jsop_setgname(uint32 index)
JS_ASSERT(mic.patchValueOffset == masm.differenceBetween(mic.load, masm.label())); JS_ASSERT(mic.patchValueOffset == masm.differenceBetween(mic.load, masm.label()));
#endif #endif
if (objFe->isConstant()) frame.freeReg(objReg);
frame.freeReg(objReg);
frame.popn(2); frame.popn(2);
if (mic.u.name.dataConst) { if (mic.u.name.dataConst) {
frame.push(v); frame.push(v);

Просмотреть файл

@ -1000,7 +1000,7 @@ mjit::Compiler::jsop_equality_int_string(JSOp op, BoolStub stub, jsbytecode *tar
frame.pop(); frame.pop();
frame.pop(); frame.pop();
frame.throwaway(); frame.discardFrame();
/* Start of the slow path for equality stub call. */ /* Start of the slow path for equality stub call. */
Label stubCall = stubcc.masm.label(); Label stubCall = stubcc.masm.label();
@ -1286,7 +1286,7 @@ mjit::Compiler::jsop_relational_double(JSOp op, BoolStub stub, jsbytecode *targe
stubcc.call(stub); stubcc.call(stub);
frame.popn(2); frame.popn(2);
frame.forgetEverything(); frame.syncAndForgetEverything();
Jump j = masm.branchDouble(dblCond, fpLeft, fpRight); Jump j = masm.branchDouble(dblCond, fpLeft, fpRight);
@ -1453,7 +1453,12 @@ mjit::Compiler::jsop_relational_full(JSOp op, BoolStub stub, jsbytecode *target,
frame.pinReg(reg.reg()); frame.pinReg(reg.reg());
frame.popn(2); frame.popn(2);
frame.forgetEverything();
frame.syncAndKillEverything();
frame.unpinKilledReg(cmpReg);
if (reg.isSet())
frame.unpinKilledReg(reg.reg());
frame.syncAndForgetEverything();
/* Operands could have been reordered, so use cmpOp. */ /* Operands could have been reordered, so use cmpOp. */
Assembler::Condition i32Cond; Assembler::Condition i32Cond;

Просмотреть файл

@ -660,7 +660,7 @@ mjit::Compiler::jsop_equality(JSOp op, BoolStub stub, jsbytecode *target, JSOp f
*/ */
if (target) { if (target) {
frame.forgetEverything(); frame.syncAndForgetEverything();
if ((op == JSOP_EQ && fused == JSOP_IFNE) || if ((op == JSOP_EQ && fused == JSOP_IFNE) ||
(op == JSOP_NE && fused == JSOP_IFEQ)) { (op == JSOP_NE && fused == JSOP_IFEQ)) {
@ -907,8 +907,7 @@ mjit::Compiler::booleanJumpScript(JSOp op, jsbytecode *target)
type.setReg(frame.copyTypeIntoReg(fe)); type.setReg(frame.copyTypeIntoReg(fe));
data.setReg(frame.copyDataIntoReg(fe)); data.setReg(frame.copyDataIntoReg(fe));
/* :FIXME: Can something more lightweight be used? */ frame.syncAndForgetEverything();
frame.forgetEverything();
Assembler::Condition cond = (op == JSOP_IFNE || op == JSOP_OR) Assembler::Condition cond = (op == JSOP_IFNE || op == JSOP_OR)
? Assembler::NonZero ? Assembler::NonZero
@ -995,7 +994,7 @@ mjit::Compiler::jsop_ifneq(JSOp op, jsbytecode *target)
if (op == JSOP_IFEQ) if (op == JSOP_IFEQ)
b = !b; b = !b;
if (b) { if (b) {
frame.forgetEverything(); frame.syncAndForgetEverything();
jumpAndTrace(masm.jump(), target); jumpAndTrace(masm.jump(), target);
} }
return; return;
@ -1015,7 +1014,7 @@ mjit::Compiler::jsop_andor(JSOp op, jsbytecode *target)
/* Short-circuit. */ /* Short-circuit. */
if ((op == JSOP_OR && b == JS_TRUE) || if ((op == JSOP_OR && b == JS_TRUE) ||
(op == JSOP_AND && b == JS_FALSE)) { (op == JSOP_AND && b == JS_FALSE)) {
frame.forgetEverything(); frame.syncAndForgetEverything();
jumpAndTrace(masm.jump(), target); jumpAndTrace(masm.jump(), target);
} }

Просмотреть файл

@ -128,6 +128,7 @@ class FrameEntry
void track(uint32 index) { void track(uint32 index) {
clear(); clear();
index_ = index; index_ = index;
tracked = true;
} }
void clear() { void clear() {
@ -210,6 +211,14 @@ class FrameEntry
copy = fe; copy = fe;
} }
inline bool isTracked() const {
return tracked;
}
inline void untrack() {
tracked = false;
}
private: private:
JSValueType knownType; JSValueType knownType;
jsval_layout v_; jsval_layout v_;
@ -219,7 +228,8 @@ class FrameEntry
FrameEntry *copy; FrameEntry *copy;
bool copied; bool copied;
bool isNumber; bool isNumber;
char padding[2]; bool tracked;
char padding[1];
}; };
} /* namespace mjit */ } /* namespace mjit */

Просмотреть файл

@ -43,16 +43,13 @@
namespace js { namespace js {
namespace mjit { namespace mjit {
inline FrameEntry * inline void
FrameState::addToTracker(uint32 index) FrameState::addToTracker(FrameEntry *fe)
{ {
JS_ASSERT(!base[index]); JS_ASSERT(!fe->isTracked());
FrameEntry *fe = &entries[index];
base[index] = fe;
fe->track(tracker.nentries); fe->track(tracker.nentries);
tracker.add(fe); tracker.add(fe);
JS_ASSERT(tracker.nentries <= script->nslots); JS_ASSERT(tracker.nentries <= script->nslots);
return fe;
} }
inline FrameEntry * inline FrameEntry *
@ -60,9 +57,9 @@ FrameState::peek(int32 depth)
{ {
JS_ASSERT(depth < 0); JS_ASSERT(depth < 0);
JS_ASSERT(sp + depth >= spBase); JS_ASSERT(sp + depth >= spBase);
FrameEntry *fe = sp[depth]; FrameEntry *fe = &sp[depth];
if (!fe) { if (!fe->isTracked()) {
fe = addToTracker(indexOf(depth)); addToTracker(fe);
fe->resetSynced(); fe->resetSynced();
} }
return fe; return fe;
@ -89,11 +86,13 @@ inline JSC::MacroAssembler::RegisterID
FrameState::allocReg() FrameState::allocReg()
{ {
RegisterID reg; RegisterID reg;
if (!freeRegs.empty()) if (!freeRegs.empty()) {
reg = freeRegs.takeAnyReg(); reg = freeRegs.takeAnyReg();
else } else {
reg = evictSomeReg(); reg = evictSomeReg();
regstate[reg].fe = NULL; regstate[reg].forget();
}
return reg; return reg;
} }
@ -101,11 +100,13 @@ inline JSC::MacroAssembler::RegisterID
FrameState::allocReg(uint32 mask) FrameState::allocReg(uint32 mask)
{ {
RegisterID reg; RegisterID reg;
if (freeRegs.hasRegInMask(mask)) if (freeRegs.hasRegInMask(mask)) {
reg = freeRegs.takeRegInMask(mask); reg = freeRegs.takeRegInMask(mask);
else } else {
reg = evictSomeReg(mask); reg = evictSomeReg(mask);
regstate[reg].fe = NULL; regstate[reg].forget();
}
return reg; return reg;
} }
@ -113,11 +114,15 @@ inline JSC::MacroAssembler::RegisterID
FrameState::allocReg(FrameEntry *fe, RematInfo::RematType type) FrameState::allocReg(FrameEntry *fe, RematInfo::RematType type)
{ {
RegisterID reg; RegisterID reg;
if (!freeRegs.empty()) if (!freeRegs.empty()) {
reg = freeRegs.takeAnyReg(); reg = freeRegs.takeAnyReg();
else } else {
reg = evictSomeReg(); reg = evictSomeReg();
regstate[reg] = RegisterState(fe, type); regstate[reg].forget();
}
regstate[reg].associate(fe, type);
return reg; return reg;
} }
@ -162,8 +167,8 @@ FrameState::pop()
{ {
JS_ASSERT(sp > spBase); JS_ASSERT(sp > spBase);
FrameEntry *fe = *--sp; FrameEntry *fe = --sp;
if (!fe) if (!fe->isTracked())
return; return;
forgetAllRegs(fe); forgetAllRegs(fe);
@ -172,7 +177,8 @@ FrameState::pop()
inline void inline void
FrameState::freeReg(RegisterID reg) FrameState::freeReg(RegisterID reg)
{ {
JS_ASSERT(regstate[reg].fe == NULL); JS_ASSERT(!regstate[reg].usedBy());
freeRegs.putReg(reg); freeRegs.putReg(reg);
} }
@ -183,28 +189,30 @@ FrameState::forgetReg(RegisterID reg)
* Important: Do not touch the fe here. We can peephole optimize away * Important: Do not touch the fe here. We can peephole optimize away
* loads and stores by re-using the contents of old FEs. * loads and stores by re-using the contents of old FEs.
*/ */
JS_ASSERT_IF(regstate[reg].fe, !regstate[reg].fe->isCopy()); JS_ASSERT_IF(regstate[reg].fe(), !regstate[reg].fe()->isCopy());
freeRegs.putReg(reg);
if (!regstate[reg].isPinned()) {
regstate[reg].forget();
freeRegs.putReg(reg);
}
} }
inline void inline void
FrameState::forgetEverything(uint32 newStackDepth) FrameState::syncAndForgetEverything(uint32 newStackDepth)
{ {
forgetEverything(); syncAndForgetEverything();
sp = spBase + newStackDepth; sp = spBase + newStackDepth;
} }
inline FrameEntry * inline FrameEntry *
FrameState::rawPush() FrameState::rawPush()
{ {
JS_ASSERT(unsigned(sp - base) < nargs + script->nslots); JS_ASSERT(unsigned(sp - entries) < nargs + script->nslots);
sp++; if (!sp->isTracked())
addToTracker(sp);
if (FrameEntry *fe = sp[-1]) return sp++;
return fe;
return addToTracker(&sp[-1] - base);
} }
inline void inline void
@ -217,10 +225,9 @@ FrameState::push(const Value &v)
inline void inline void
FrameState::pushSynced() FrameState::pushSynced()
{ {
if (sp->isTracked())
sp->resetSynced();
sp++; sp++;
if (FrameEntry *fe = sp[-1])
fe->resetSynced();
} }
inline void inline void
@ -242,7 +249,7 @@ FrameState::pushSynced(JSValueType type, RegisterID reg)
fe->data.sync(); fe->data.sync();
fe->setType(type); fe->setType(type);
fe->data.setRegister(reg); fe->data.setRegister(reg);
regstate[reg] = RegisterState(fe, RematInfo::DATA); regstate[reg].associate(fe, RematInfo::DATA);
} }
inline void inline void
@ -281,8 +288,8 @@ FrameState::pushRegs(RegisterID type, RegisterID data)
fe->resetUnsynced(); fe->resetUnsynced();
fe->type.setRegister(type); fe->type.setRegister(type);
fe->data.setRegister(data); fe->data.setRegister(data);
regstate[type] = RegisterState(fe, RematInfo::TYPE); regstate[type].associate(fe, RematInfo::TYPE);
regstate[data] = RegisterState(fe, RematInfo::DATA); regstate[data].associate(fe, RematInfo::DATA);
} }
inline void inline void
@ -295,7 +302,7 @@ FrameState::pushTypedPayload(JSValueType type, RegisterID payload)
fe->resetUnsynced(); fe->resetUnsynced();
fe->setType(type); fe->setType(type);
fe->data.setRegister(payload); fe->data.setRegister(payload);
regstate[payload] = RegisterState(fe, RematInfo::DATA); regstate[payload].associate(fe, RematInfo::DATA);
} }
inline void inline void
@ -320,7 +327,7 @@ FrameState::pushNumber(MaybeRegisterID payload, bool asInt32)
if (payload.isSet()) { if (payload.isSet()) {
fe->data.unsync(); fe->data.unsync();
fe->data.setRegister(payload.reg()); fe->data.setRegister(payload.reg());
regstate[payload.reg()] = RegisterState(fe, RematInfo::DATA); regstate[payload.reg()].associate(fe, RematInfo::DATA);
} else { } else {
fe->data.setMemory(); fe->data.setMemory();
} }
@ -339,7 +346,7 @@ FrameState::pushInt32(RegisterID payload)
fe->isNumber = true; fe->isNumber = true;
fe->data.unsync(); fe->data.unsync();
fe->data.setRegister(payload); fe->data.setRegister(payload);
regstate[payload] = RegisterState(fe, RematInfo::DATA); regstate[payload].associate(fe, RematInfo::DATA);
} }
inline void inline void
@ -362,13 +369,13 @@ FrameState::pushUntypedPayload(JSValueType type, RegisterID payload)
fe->setNotCopied(); fe->setNotCopied();
fe->setCopyOf(NULL); fe->setCopyOf(NULL);
fe->data.setRegister(payload); fe->data.setRegister(payload);
regstate[payload] = RegisterState(fe, RematInfo::DATA); regstate[payload].associate(fe, RematInfo::DATA);
} }
inline JSC::MacroAssembler::RegisterID inline JSC::MacroAssembler::RegisterID
FrameState::tempRegForType(FrameEntry *fe, RegisterID fallback) FrameState::tempRegForType(FrameEntry *fe, RegisterID fallback)
{ {
JS_ASSERT(regstate[fallback].fe == NULL); JS_ASSERT(!regstate[fallback].fe());
if (fe->isCopy()) if (fe->isCopy())
fe = fe->copyOf(); fe = fe->copyOf();
@ -435,7 +442,7 @@ FrameState::tempRegInMaskForData(FrameEntry *fe, uint32 mask)
return old; return old;
/* Keep the old register pinned. */ /* Keep the old register pinned. */
regstate[old].fe = NULL; regstate[old].forget();
reg = allocReg(mask); reg = allocReg(mask);
masm.move(old, reg); masm.move(old, reg);
freeReg(old); freeReg(old);
@ -443,7 +450,7 @@ FrameState::tempRegInMaskForData(FrameEntry *fe, uint32 mask)
reg = allocReg(mask); reg = allocReg(mask);
masm.loadPayload(addressOf(fe), reg); masm.loadPayload(addressOf(fe), reg);
} }
regstate[reg] = RegisterState(fe, RematInfo::DATA); regstate[reg].associate(fe, RematInfo::DATA);
fe->data.setRegister(reg); fe->data.setRegister(reg);
return reg; return reg;
} }
@ -618,28 +625,31 @@ inline FrameEntry *
FrameState::getLocal(uint32 slot) FrameState::getLocal(uint32 slot)
{ {
uint32 index = nargs + slot; uint32 index = nargs + slot;
if (FrameEntry *fe = base[index]) FrameEntry *fe = &entries[index];
return fe; if (!fe->isTracked()) {
FrameEntry *fe = addToTracker(index); addToTracker(fe);
fe->resetSynced(); fe->resetSynced();
}
return fe; return fe;
} }
inline void inline void
FrameState::pinReg(RegisterID reg) FrameState::pinReg(RegisterID reg)
{ {
JS_ASSERT(!freeRegs.hasReg(reg)); regstate[reg].pin();
JS_ASSERT(regstate[reg].fe);
regstate[reg].save = regstate[reg].fe;
regstate[reg].fe = NULL;
} }
inline void inline void
FrameState::unpinReg(RegisterID reg) FrameState::unpinReg(RegisterID reg)
{ {
JS_ASSERT(!freeRegs.hasReg(reg)); regstate[reg].unpin();
JS_ASSERT(!regstate[reg].fe); }
regstate[reg].fe = regstate[reg].save;
inline void
FrameState::unpinKilledReg(RegisterID reg)
{
regstate[reg].unpinUnsafe();
freeRegs.putReg(reg);
} }
inline void inline void
@ -651,12 +661,6 @@ FrameState::forgetAllRegs(FrameEntry *fe)
forgetReg(fe->data.reg()); forgetReg(fe->data.reg());
} }
inline FrameEntry *
FrameState::tosFe() const
{
return &entries[uint32(sp - base)];
}
inline void inline void
FrameState::swapInTracker(FrameEntry *lhs, FrameEntry *rhs) FrameState::swapInTracker(FrameEntry *lhs, FrameEntry *rhs)
{ {
@ -711,8 +715,8 @@ FrameState::pushLocal(uint32 n)
* SETLOCAL equivocation of stack slots, and let expressions, just * SETLOCAL equivocation of stack slots, and let expressions, just
* weakly assert on the fixed local vars. * weakly assert on the fixed local vars.
*/ */
FrameEntry *fe = base[localIndex(n)]; FrameEntry *fe = &locals[n];
if (fe && n < script->nfixed) { if (fe->isTracked() && n < script->nfixed) {
JS_ASSERT(fe->type.inMemory()); JS_ASSERT(fe->type.inMemory());
JS_ASSERT(fe->data.inMemory()); JS_ASSERT(fe->data.inMemory());
} }

Просмотреть файл

@ -64,19 +64,17 @@ FrameState::init(uint32 nargs)
uint32 nslots = script->nslots + nargs; uint32 nslots = script->nslots + nargs;
if (!nslots) { if (!nslots) {
sp = spBase = locals = args = base = NULL; sp = spBase = locals = args = NULL;
return true; return true;
} }
uint32 nlocals = script->nslots; eval = script->usesEval || cx->compartment->debugMode;
if ((eval = script->usesEval || cx->compartment->debugMode))
nlocals = 0;
uint8 *cursor = (uint8 *)cx->malloc(sizeof(FrameEntry) * nslots + // entries[] size_t totalBytes = sizeof(FrameEntry) * nslots + // entries[]
sizeof(FrameEntry *) * nslots + // base[] sizeof(FrameEntry *) * nslots + // tracker.entries
sizeof(FrameEntry *) * nslots + // tracker.entries[] (eval ? 0 : sizeof(uint32) * nslots); // closedVars[]
sizeof(uint32) * nlocals // escaping[]
); uint8 *cursor = (uint8 *)cx->calloc(totalBytes);
if (!cursor) if (!cursor)
return false; return false;
@ -86,22 +84,21 @@ FrameState::init(uint32 nargs)
entries = (FrameEntry *)cursor; entries = (FrameEntry *)cursor;
cursor += sizeof(FrameEntry) * nslots; cursor += sizeof(FrameEntry) * nslots;
base = (FrameEntry **)cursor; args = entries;
args = base; locals = args + nargs;
locals = base + nargs;
spBase = locals + script->nfixed; spBase = locals + script->nfixed;
sp = spBase; sp = spBase;
memset(base, 0, sizeof(FrameEntry *) * nslots);
cursor += sizeof(FrameEntry *) * nslots;
tracker.entries = (FrameEntry **)cursor; tracker.entries = (FrameEntry **)cursor;
cursor += sizeof(FrameEntry *) * nslots; cursor += sizeof(FrameEntry *) * nslots;
if (nlocals) { if (!eval && nslots) {
escaping = (uint32 *)cursor; escaping = (uint32 *)cursor;
memset(escaping, 0, sizeof(uint32) * nlocals); cursor += sizeof(uint32) * nslots;
} }
JS_ASSERT(reinterpret_cast<uint8 *>(entries) + totalBytes == cursor);
return true; return true;
} }
@ -110,19 +107,20 @@ FrameState::takeReg(RegisterID reg)
{ {
if (freeRegs.hasReg(reg)) { if (freeRegs.hasReg(reg)) {
freeRegs.takeReg(reg); freeRegs.takeReg(reg);
JS_ASSERT(!regstate[reg].usedBy());
} else { } else {
JS_ASSERT(regstate[reg].fe); JS_ASSERT(regstate[reg].fe());
evictReg(reg); evictReg(reg);
regstate[reg].forget();
} }
regstate[reg].fe = NULL;
} }
void void
FrameState::evictReg(RegisterID reg) FrameState::evictReg(RegisterID reg)
{ {
FrameEntry *fe = regstate[reg].fe; FrameEntry *fe = regstate[reg].fe();
if (regstate[reg].type == RematInfo::TYPE) { if (regstate[reg].type() == RematInfo::TYPE) {
if (!fe->type.synced()) { if (!fe->type.synced()) {
syncType(fe, addressOf(fe), masm); syncType(fe, addressOf(fe), masm);
fe->type.sync(); fe->type.sync();
@ -153,7 +151,7 @@ FrameState::evictSomeReg(uint32 mask)
continue; continue;
/* Register is not owned by the FrameState. */ /* Register is not owned by the FrameState. */
FrameEntry *fe = regstate[i].fe; FrameEntry *fe = regstate[i].fe();
if (!fe) if (!fe)
continue; continue;
@ -163,11 +161,11 @@ FrameState::evictSomeReg(uint32 mask)
#endif #endif
fallback = reg; fallback = reg;
if (regstate[i].type == RematInfo::TYPE && fe->type.synced()) { if (regstate[i].type() == RematInfo::TYPE && fe->type.synced()) {
fe->type.setMemory(); fe->type.setMemory();
return fallback; return fallback;
} }
if (regstate[i].type == RematInfo::DATA && fe->data.synced()) { if (regstate[i].type() == RematInfo::DATA && fe->data.synced()) {
fe->data.setMemory(); fe->data.setMemory();
return fallback; return fallback;
} }
@ -181,24 +179,42 @@ FrameState::evictSomeReg(uint32 mask)
void void
FrameState::forgetEverything() FrameState::syncAndForgetEverything()
{ {
syncAndKill(Registers(Registers::AvailRegs), Uses(frameDepth())); syncAndKill(Registers(Registers::AvailRegs), Uses(frameDepth()));
forgetEverything();
throwaway();
} }
void void
FrameState::throwaway() FrameState::resetInternalState()
{ {
for (uint32 i = 0; i < tracker.nentries; i++) for (uint32 i = 0; i < tracker.nentries; i++)
base[indexOfFe(tracker[i])] = NULL; tracker[i]->untrack();
tracker.reset(); tracker.reset();
freeRegs.reset(); freeRegs.reset();
} }
void
FrameState::discardFrame()
{
resetInternalState();
memset(regstate, 0, sizeof(regstate));
}
void
FrameState::forgetEverything()
{
resetInternalState();
#ifdef DEBUG
for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
JS_ASSERT(!regstate[i].usedBy());
}
#endif
}
void void
FrameState::storeTo(FrameEntry *fe, Address address, bool popped) FrameState::storeTo(FrameEntry *fe, Address address, bool popped)
{ {
@ -253,46 +269,49 @@ FrameState::assertValidRegisterState() const
{ {
Registers checkedFreeRegs; Registers checkedFreeRegs;
FrameEntry *tos = tosFe();
for (uint32 i = 0; i < tracker.nentries; i++) { for (uint32 i = 0; i < tracker.nentries; i++) {
FrameEntry *fe = tracker[i]; FrameEntry *fe = tracker[i];
if (fe >= tos) if (fe >= sp)
continue; continue;
JS_ASSERT(i == fe->trackerIndex()); JS_ASSERT(i == fe->trackerIndex());
JS_ASSERT_IF(fe->isCopy(), JS_ASSERT_IF(fe->isCopy(),
fe->trackerIndex() > fe->copyOf()->trackerIndex()); fe->trackerIndex() > fe->copyOf()->trackerIndex());
JS_ASSERT_IF(fe->isCopy(), fe > fe->copyOf());
JS_ASSERT_IF(fe->isCopy(), !fe->type.inRegister() && !fe->data.inRegister()); JS_ASSERT_IF(fe->isCopy(), !fe->type.inRegister() && !fe->data.inRegister());
JS_ASSERT_IF(fe->isCopy(), fe->copyOf() < tos); JS_ASSERT_IF(fe->isCopy(), fe->copyOf() < sp);
JS_ASSERT_IF(fe->isCopy(), fe->copyOf()->isCopied()); JS_ASSERT_IF(fe->isCopy(), fe->copyOf()->isCopied());
if (fe->isCopy()) if (fe->isCopy())
continue; continue;
if (fe->type.inRegister()) { if (fe->type.inRegister()) {
checkedFreeRegs.takeReg(fe->type.reg()); checkedFreeRegs.takeReg(fe->type.reg());
JS_ASSERT(regstate[fe->type.reg()].fe == fe); JS_ASSERT(regstate[fe->type.reg()].fe() == fe);
} }
if (fe->data.inRegister()) { if (fe->data.inRegister()) {
checkedFreeRegs.takeReg(fe->data.reg()); checkedFreeRegs.takeReg(fe->data.reg());
JS_ASSERT(regstate[fe->data.reg()].fe == fe); JS_ASSERT(regstate[fe->data.reg()].fe() == fe);
} }
} }
JS_ASSERT(checkedFreeRegs == freeRegs); JS_ASSERT(checkedFreeRegs == freeRegs);
for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
JS_ASSERT(!regstate[i].isPinned());
JS_ASSERT_IF(regstate[i].fe(), !freeRegs.hasReg(RegisterID(i)));
JS_ASSERT_IF(regstate[i].fe(), regstate[i].fe()->isTracked());
}
} }
#endif #endif
void void
FrameState::syncFancy(Assembler &masm, Registers avail, uint32 resumeAt, FrameState::syncFancy(Assembler &masm, Registers avail, FrameEntry *resumeAt,
FrameEntry *bottom) const FrameEntry *bottom) const
{ {
/* :TODO: can be resumeAt? */ reifier.reset(&masm, avail, resumeAt, bottom);
reifier.reset(&masm, avail, tracker.nentries, bottom);
FrameEntry *tos = tosFe(); for (FrameEntry *fe = resumeAt; fe >= bottom; fe--) {
for (uint32 i = resumeAt; i < tracker.nentries; i--) { if (!fe->isTracked())
FrameEntry *fe = tracker[i];
if (fe >= tos)
continue; continue;
reifier.sync(fe); reifier.sync(fe);
@ -302,6 +321,29 @@ FrameState::syncFancy(Assembler &masm, Registers avail, uint32 resumeAt,
void void
FrameState::sync(Assembler &masm, Uses uses) const FrameState::sync(Assembler &masm, Uses uses) const
{ {
if (!entries)
return;
/* Sync all registers up-front. */
for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
RegisterID reg = RegisterID(i);
FrameEntry *fe = regstate[reg].usedBy();
if (!fe)
continue;
JS_ASSERT(fe->isTracked());
if (regstate[reg].type() == RematInfo::DATA) {
JS_ASSERT(fe->data.reg() == reg);
if (!fe->data.synced())
syncData(fe, addressOf(fe), masm);
} else {
JS_ASSERT(fe->type.reg() == reg);
if (!fe->type.synced())
syncType(fe, addressOf(fe), masm);
}
}
/* /*
* Keep track of free registers using a bitmask. If we have to drop into * Keep track of free registers using a bitmask. If we have to drop into
* syncFancy(), then this mask will help avoid eviction. * syncFancy(), then this mask will help avoid eviction.
@ -309,35 +351,33 @@ FrameState::sync(Assembler &masm, Uses uses) const
Registers avail(freeRegs); Registers avail(freeRegs);
Registers temp(Registers::TempRegs); Registers temp(Registers::TempRegs);
FrameEntry *tos = tosFe(); FrameEntry *bottom = sp - uses.nuses;
FrameEntry *bottom = tos - uses.nuses;
if (inTryBlock) for (FrameEntry *fe = sp - 1; fe >= bottom; fe--) {
bottom = NULL; if (!fe->isTracked())
for (uint32 i = tracker.nentries - 1; i < tracker.nentries; i--) {
FrameEntry *fe = tracker[i];
if (fe >= tos)
continue; continue;
Address address = addressOf(fe); Address address = addressOf(fe);
if (!fe->isCopy()) { if (!fe->isCopy()) {
/* Keep track of registers that can be clobbered. */ /*
if (fe->data.inRegister()) * If this |fe| has registers, track them as available. They've
* already been synced. Otherwise, see if a constant needs to be
* synced.
*/
if (fe->data.inRegister()) {
avail.putReg(fe->data.reg()); avail.putReg(fe->data.reg());
if (fe->type.inRegister()) } else if (!fe->data.synced()) {
avail.putReg(fe->type.reg());
/* Sync. */
if (!fe->data.synced() && (fe->data.inRegister() || fe >= bottom)) {
syncData(fe, address, masm); syncData(fe, address, masm);
if (fe->isConstant()) if (fe->isConstant())
continue; continue;
} }
if (!fe->type.synced() && (fe->type.inRegister() || fe >= bottom))
syncType(fe, addressOf(fe), masm); if (fe->type.inRegister())
} else if (fe >= bottom) { avail.putReg(fe->type.reg());
else if (!fe->type.synced())
syncType(fe, address, masm);
} else {
FrameEntry *backing = fe->copyOf(); FrameEntry *backing = fe->copyOf();
JS_ASSERT(backing != fe); JS_ASSERT(backing != fe);
JS_ASSERT(!backing->isConstant() && !fe->isConstant()); JS_ASSERT(!backing->isConstant() && !fe->isConstant());
@ -348,7 +388,7 @@ FrameState::sync(Assembler &masm, Uses uses) const
*/ */
if ((!fe->type.synced() && !backing->type.inRegister()) || if ((!fe->type.synced() && !backing->type.inRegister()) ||
(!fe->data.synced() && !backing->data.inRegister())) { (!fe->data.synced() && !backing->data.inRegister())) {
syncFancy(masm, avail, i, bottom); syncFancy(masm, avail, fe, bottom);
return; return;
} }
@ -371,93 +411,130 @@ FrameState::sync(Assembler &masm, Uses uses) const
void void
FrameState::syncAndKill(Registers kill, Uses uses, Uses ignore) FrameState::syncAndKill(Registers kill, Uses uses, Uses ignore)
{ {
/* Backwards, so we can allocate registers to backing slots better. */ FrameEntry *spStop = sp - ignore.nuses;
FrameEntry *tos = tosFe();
FrameEntry *bottom = tos - uses.nuses;
tos -= ignore.nuses; /* Sync all kill-registers up-front. */
Registers search(kill.freeMask & ~freeRegs.freeMask);
while (!search.empty()) {
RegisterID reg = search.takeAnyReg();
FrameEntry *fe = regstate[reg].usedBy();
if (!fe || fe >= spStop)
continue;
if (inTryBlock) JS_ASSERT(fe->isTracked());
bottom = NULL;
for (uint32 i = tracker.nentries - 1; i < tracker.nentries; i--) { if (regstate[reg].type() == RematInfo::DATA) {
FrameEntry *fe = tracker[i]; JS_ASSERT(fe->data.reg() == reg);
if (fe >= tos) if (!fe->data.synced()) {
syncData(fe, addressOf(fe), masm);
fe->data.sync();
}
} else {
JS_ASSERT(fe->type.reg() == reg);
if (!fe->type.synced()) {
syncType(fe, addressOf(fe), masm);
fe->type.sync();
}
}
}
uint32 maxvisits = tracker.nentries;
FrameEntry *bottom = sp - uses.nuses;
for (FrameEntry *fe = sp - 1; fe >= bottom && maxvisits; fe--) {
if (!fe->isTracked())
continue;
maxvisits--;
if (fe >= spStop)
continue; continue;
Address address = addressOf(fe); Address address = addressOf(fe);
FrameEntry *backing = fe; FrameEntry *backing = fe;
if (fe->isCopy()) {
if (!inTryBlock && fe < bottom) if (fe->isCopy())
continue;
backing = fe->copyOf(); backing = fe->copyOf();
}
JS_ASSERT_IF(i == 0, !fe->isCopy()); if (!fe->data.synced()) {
bool killData = fe->data.inRegister() && kill.hasReg(fe->data.reg());
if (!fe->data.synced() && (killData || fe >= bottom)) {
if (backing != fe && backing->data.inMemory()) if (backing != fe && backing->data.inMemory())
tempRegForData(backing); tempRegForData(backing);
syncData(backing, address, masm); syncData(backing, address, masm);
fe->data.sync(); fe->data.sync();
if (fe->isConstant() && !fe->type.synced()) if (fe->isConstant() && !fe->type.synced()) {
fe->type.sync(); fe->type.sync();
} } else if (fe->data.inRegister() && kill.hasReg(fe->data.reg())) {
if (killData) {
JS_ASSERT(backing == fe);
JS_ASSERT(fe->data.synced());
if (regstate[fe->data.reg()].fe)
forgetReg(fe->data.reg()); forgetReg(fe->data.reg());
fe->data.setMemory(); fe->data.setMemory();
}
} }
bool killType = fe->type.inRegister() && kill.hasReg(fe->type.reg()); if (!fe->type.synced()) {
if (!fe->type.synced() && (killType || fe >= bottom)) {
if (backing != fe && backing->type.inMemory()) if (backing != fe && backing->type.inMemory())
tempRegForType(backing); tempRegForType(backing);
syncType(backing, address, masm); syncType(backing, address, masm);
fe->type.sync(); fe->type.sync();
} if (fe->type.inRegister() && kill.hasReg(fe->type.reg())) {
if (killType) {
JS_ASSERT(backing == fe);
JS_ASSERT(fe->type.synced());
if (regstate[fe->type.reg()].fe)
forgetReg(fe->type.reg()); forgetReg(fe->type.reg());
fe->type.setMemory(); fe->type.setMemory();
}
} }
} }
}
void /*
FrameState::resetRegState() * Anything still alive at this point is guaranteed to be synced. However,
{ * it is necessary to evict temporary registers.
freeRegs = Registers(); */
search = Registers(kill.freeMask & ~freeRegs.freeMask);
while (!search.empty()) {
RegisterID reg = search.takeAnyReg();
FrameEntry *fe = regstate[reg].usedBy();
if (!fe || fe >= spStop)
continue;
JS_ASSERT(fe->isTracked());
if (regstate[reg].type() == RematInfo::DATA) {
JS_ASSERT(fe->data.reg() == reg);
JS_ASSERT(fe->data.synced());
fe->data.setMemory();
} else {
JS_ASSERT(fe->type.reg() == reg);
JS_ASSERT(fe->type.synced());
fe->type.setMemory();
}
forgetReg(reg);
}
} }
void void
FrameState::merge(Assembler &masm, Changes changes) const FrameState::merge(Assembler &masm, Changes changes) const
{ {
FrameEntry *tos = tosFe(); Registers search(Registers::AvailRegs & ~freeRegs.freeMask);
Registers temp(Registers::TempRegs);
for (uint32 i = 0; i < tracker.nentries; i++) { while (!search.empty()) {
FrameEntry *fe = tracker[i]; RegisterID reg = search.peekReg();
if (fe >= tos) FrameEntry *fe = regstate[reg].usedBy();
continue;
/* Copies do not have registers. */ if (!fe) {
if (fe->isCopy()) { search.takeReg(reg);
JS_ASSERT(!fe->data.inRegister());
JS_ASSERT(!fe->type.inRegister());
continue; continue;
} }
if (fe->data.inRegister() && fe->type.inRegister()) if (fe->data.inRegister() && fe->type.inRegister()) {
search.takeReg(fe->data.reg());
search.takeReg(fe->type.reg());
masm.loadValueAsComponents(addressOf(fe), fe->type.reg(), fe->data.reg()); masm.loadValueAsComponents(addressOf(fe), fe->type.reg(), fe->data.reg());
else if (fe->data.inRegister()) } else {
masm.loadPayload(addressOf(fe), fe->data.reg()); if (fe->data.inRegister()) {
else if (fe->type.inRegister()) search.takeReg(fe->data.reg());
masm.loadTypeTag(addressOf(fe), fe->type.reg()); masm.loadPayload(addressOf(fe), fe->data.reg());
}
if (fe->type.inRegister()) {
search.takeReg(fe->type.reg());
masm.loadTypeTag(addressOf(fe), fe->type.reg());
}
}
} }
} }
@ -488,9 +565,9 @@ FrameState::copyDataIntoReg(FrameEntry *fe, RegisterID hint)
reg = allocReg(); reg = allocReg();
masm.move(hint, reg); masm.move(hint, reg);
fe->data.setRegister(reg); fe->data.setRegister(reg);
regstate[reg] = regstate[hint]; regstate[reg].associate(regstate[hint].fe(), RematInfo::DATA);
} }
regstate[hint].fe = NULL; regstate[hint].forget();
} else { } else {
pinReg(reg); pinReg(reg);
takeReg(hint); takeReg(hint);
@ -513,7 +590,7 @@ FrameState::copyDataIntoReg(Assembler &masm, FrameEntry *fe)
if (!fe->data.synced()) if (!fe->data.synced())
syncData(fe, addressOf(fe), masm); syncData(fe, addressOf(fe), masm);
fe->data.setMemory(); fe->data.setMemory();
regstate[reg].fe = NULL; regstate[reg].forget();
} else { } else {
RegisterID newReg = allocReg(); RegisterID newReg = allocReg();
masm.move(reg, newReg); masm.move(reg, newReg);
@ -546,7 +623,7 @@ FrameState::copyTypeIntoReg(FrameEntry *fe)
if (!fe->type.synced()) if (!fe->type.synced())
syncType(fe, addressOf(fe), masm); syncType(fe, addressOf(fe), masm);
fe->type.setMemory(); fe->type.setMemory();
regstate[reg].fe = NULL; regstate[reg].forget();
} else { } else {
RegisterID newReg = allocReg(); RegisterID newReg = allocReg();
masm.move(reg, newReg); masm.move(reg, newReg);
@ -633,7 +710,7 @@ FrameState::ownRegForType(FrameEntry *fe)
syncType(backing, addressOf(backing), masm); syncType(backing, addressOf(backing), masm);
reg = backing->type.reg(); reg = backing->type.reg();
backing->type.setMemory(); backing->type.setMemory();
moveOwnership(reg, NULL); regstate[reg].forget();
} else { } else {
reg = allocReg(); reg = allocReg();
masm.move(backing->type.reg(), reg); masm.move(backing->type.reg(), reg);
@ -643,10 +720,11 @@ FrameState::ownRegForType(FrameEntry *fe)
if (fe->type.inRegister()) { if (fe->type.inRegister()) {
reg = fe->type.reg(); reg = fe->type.reg();
/* Remove ownership of this register. */ /* Remove ownership of this register. */
JS_ASSERT(regstate[reg].fe == fe); JS_ASSERT(regstate[reg].fe() == fe);
JS_ASSERT(regstate[reg].type == RematInfo::TYPE); JS_ASSERT(regstate[reg].type() == RematInfo::TYPE);
regstate[reg].fe = NULL; regstate[reg].forget();
fe->type.invalidate(); fe->type.invalidate();
} else { } else {
JS_ASSERT(fe->type.inMemory()); JS_ASSERT(fe->type.inMemory());
@ -676,7 +754,7 @@ FrameState::ownRegForData(FrameEntry *fe)
syncData(backing, addressOf(backing), masm); syncData(backing, addressOf(backing), masm);
reg = backing->data.reg(); reg = backing->data.reg();
backing->data.setMemory(); backing->data.setMemory();
moveOwnership(reg, NULL); regstate[reg].forget();
} else { } else {
reg = allocReg(); reg = allocReg();
masm.move(backing->data.reg(), reg); masm.move(backing->data.reg(), reg);
@ -696,9 +774,9 @@ FrameState::ownRegForData(FrameEntry *fe)
if (fe->data.inRegister()) { if (fe->data.inRegister()) {
reg = fe->data.reg(); reg = fe->data.reg();
/* Remove ownership of this register. */ /* Remove ownership of this register. */
JS_ASSERT(regstate[reg].fe == fe); JS_ASSERT(regstate[reg].fe() == fe);
JS_ASSERT(regstate[reg].type == RematInfo::DATA); JS_ASSERT(regstate[reg].type() == RematInfo::DATA);
regstate[reg].fe = NULL; regstate[reg].forget();
fe->data.invalidate(); fe->data.invalidate();
} else { } else {
JS_ASSERT(fe->data.inMemory()); JS_ASSERT(fe->data.inMemory());
@ -739,34 +817,14 @@ FrameState::pushCopyOf(uint32 index)
} }
FrameEntry * FrameEntry *
FrameState::uncopy(FrameEntry *original) FrameState::walkTrackerForUncopy(FrameEntry *original)
{ {
JS_ASSERT(original->isCopied());
/*
* Copies have two critical invariants:
* 1) The backing store precedes all copies in the tracker.
* 2) The backing store of a copy cannot be popped from the stack
* while the copy is still live.
*
* Maintaining this invariant iteratively is kind of hard, so we choose
* the "lowest" copy in the frame up-front.
*
* For example, if the stack is:
* [A, B, C, D]
* And the tracker has:
* [A, D, C, B]
*
* If B, C, and D are copies of A - we will walk the tracker to the end
* and select D, not B (see bug 583684).
*/
uint32 firstCopy = InvalidIndex; uint32 firstCopy = InvalidIndex;
FrameEntry *tos = tosFe();
FrameEntry *bestFe = NULL; FrameEntry *bestFe = NULL;
uint32 ncopies = 0; uint32 ncopies = 0;
for (uint32 i = 0; i < tracker.nentries; i++) { for (uint32 i = original->trackerIndex() + 1; i < tracker.nentries; i++) {
FrameEntry *fe = tracker[i]; FrameEntry *fe = tracker[i];
if (fe >= tos) if (fe >= sp)
continue; continue;
if (fe->isCopy() && fe->copyOf() == original) { if (fe->isCopy() && fe->copyOf() == original) {
if (firstCopy == InvalidIndex) { if (firstCopy == InvalidIndex) {
@ -782,12 +840,12 @@ FrameState::uncopy(FrameEntry *original)
if (!ncopies) { if (!ncopies) {
JS_ASSERT(firstCopy == InvalidIndex); JS_ASSERT(firstCopy == InvalidIndex);
JS_ASSERT(!bestFe); JS_ASSERT(!bestFe);
original->copied = false;
return NULL; return NULL;
} }
JS_ASSERT(firstCopy != InvalidIndex); JS_ASSERT(firstCopy != InvalidIndex);
JS_ASSERT(bestFe); JS_ASSERT(bestFe);
JS_ASSERT(bestFe > original);
/* Mark all extra copies as copies of the new backing index. */ /* Mark all extra copies as copies of the new backing index. */
bestFe->setCopyOf(NULL); bestFe->setCopyOf(NULL);
@ -795,7 +853,7 @@ FrameState::uncopy(FrameEntry *original)
bestFe->setCopied(); bestFe->setCopied();
for (uint32 i = firstCopy; i < tracker.nentries; i++) { for (uint32 i = firstCopy; i < tracker.nentries; i++) {
FrameEntry *other = tracker[i]; FrameEntry *other = tracker[i];
if (other >= tos || other == bestFe) if (other >= sp || other == bestFe)
continue; continue;
/* The original must be tracked before copies. */ /* The original must be tracked before copies. */
@ -820,7 +878,80 @@ FrameState::uncopy(FrameEntry *original)
bestFe->setNotCopied(); bestFe->setNotCopied();
} }
FrameEntry *fe = bestFe; return bestFe;
}
FrameEntry *
FrameState::walkFrameForUncopy(FrameEntry *original)
{
FrameEntry *bestFe = NULL;
uint32 ncopies = 0;
/* It's only necessary to visit as many FEs are being tracked. */
uint32 maxvisits = tracker.nentries;
for (FrameEntry *fe = original + 1; fe < sp && maxvisits; fe++) {
if (!fe->isTracked())
continue;
maxvisits--;
if (fe->isCopy() && fe->copyOf() == original) {
if (!bestFe) {
bestFe = fe;
bestFe->setCopyOf(NULL);
} else {
fe->setCopyOf(bestFe);
if (fe->trackerIndex() < bestFe->trackerIndex())
swapInTracker(bestFe, fe);
}
ncopies++;
}
}
if (ncopies)
bestFe->setCopied();
return bestFe;
}
FrameEntry *
FrameState::uncopy(FrameEntry *original)
{
JS_ASSERT(original->isCopied());
/*
* Copies have three critical invariants:
* 1) The backing store precedes all copies in the tracker.
* 2) The backing store precedes all copies in the FrameState.
* 3) The backing store of a copy cannot be popped from the stack
* while the copy is still live.
*
* Maintaining this invariant iteratively is kind of hard, so we choose
* the "lowest" copy in the frame up-front.
*
* For example, if the stack is:
* [A, B, C, D]
* And the tracker has:
* [A, D, C, B]
*
* If B, C, and D are copies of A - we will walk the tracker to the end
* and select B, not D (see bug 583684).
*
* Note: |tracker.nentries <= (nslots + nargs)|. However, this walk is
* sub-optimal if |tracker.nentries - original->trackerIndex() > sp - original|.
* With large scripts this may be a problem worth investigating. Note that
* the tracker is walked twice, so we multiply by 2 for pessimism.
*/
FrameEntry *fe;
if ((tracker.nentries - original->trackerIndex()) * 2 > uint32(sp - original))
fe = walkFrameForUncopy(original);
else
fe = walkTrackerForUncopy(original);
if (!fe) {
original->setNotCopied();
return NULL;
}
/* /*
* Switch the new backing store to the old backing store. During * Switch the new backing store to the old backing store. During
@ -837,7 +968,7 @@ FrameState::uncopy(FrameEntry *original)
tempRegForType(original); tempRegForType(original);
fe->type.inherit(original->type); fe->type.inherit(original->type);
if (fe->type.inRegister()) if (fe->type.inRegister())
moveOwnership(fe->type.reg(), fe); regstate[fe->type.reg()].reassociate(fe);
} else { } else {
JS_ASSERT(fe->isTypeKnown()); JS_ASSERT(fe->isTypeKnown());
JS_ASSERT(fe->getKnownType() == original->getKnownType()); JS_ASSERT(fe->getKnownType() == original->getKnownType());
@ -846,7 +977,7 @@ FrameState::uncopy(FrameEntry *original)
tempRegForData(original); tempRegForData(original);
fe->data.inherit(original->data); fe->data.inherit(original->data);
if (fe->data.inRegister()) if (fe->data.inRegister())
moveOwnership(fe->data.reg(), fe); regstate[fe->data.reg()].reassociate(fe);
return fe; return fe;
} }
@ -854,52 +985,86 @@ FrameState::uncopy(FrameEntry *original)
void void
FrameState::storeLocal(uint32 n, bool popGuaranteed, bool typeChange) FrameState::storeLocal(uint32 n, bool popGuaranteed, bool typeChange)
{ {
FrameEntry *localFe = getLocal(n); FrameEntry *local = getLocal(n);
bool cacheable = !eval && !escaping[n];
if (!popGuaranteed && !cacheable) { storeTop(local, popGuaranteed, typeChange);
JS_ASSERT_IF(base[localIndex(n)] && (!eval || n < script->nfixed),
entries[localIndex(n)].type.inMemory() && bool closed = eval || escaping[n];
entries[localIndex(n)].data.inMemory()); if (closed || inTryBlock) {
Address local(JSFrameReg, sizeof(JSStackFrame) + n * sizeof(Value)); /* Ensure that the local variable remains synced. */
storeTo(peek(-1), local, false); if (local->isCopy()) {
forgetAllRegs(getLocal(n)); FrameEntry *backing = local->copyOf();
localFe->resetSynced(); if (!local->data.synced()) {
return; if (backing->data.inMemory())
tempRegForData(backing);
syncData(backing, addressOf(local), masm);
}
if (!local->type.synced()) {
if (backing->type.inMemory())
tempRegForType(backing);
syncType(backing, addressOf(local), masm);
}
} else if (local->isConstant()) {
if (!local->data.synced())
syncData(local, addressOf(local), masm);
} else {
if (!local->data.synced()) {
syncData(local, addressOf(local), masm);
local->data.sync();
}
if (!local->type.synced()) {
syncType(local, addressOf(local), masm);
local->type.sync();
}
if (closed)
forgetEntry(local);
}
if (closed)
local->resetSynced();
} }
}
bool wasSynced = localFe->type.synced(); void
FrameState::forgetEntry(FrameEntry *fe)
{
if (fe->isCopied()) {
uncopy(fe);
if (!fe->isCopied())
forgetAllRegs(fe);
} else {
forgetAllRegs(fe);
}
}
void
FrameState::storeTop(FrameEntry *target, bool popGuaranteed, bool typeChange)
{
bool wasSynced = target->type.synced();
/* Detect something like (x = x) which is a no-op. */ /* Detect something like (x = x) which is a no-op. */
FrameEntry *top = peek(-1); FrameEntry *top = peek(-1);
if (top->isCopy() && top->copyOf() == localFe) { if (top->isCopy() && top->copyOf() == target) {
JS_ASSERT(localFe->isCopied()); JS_ASSERT(target->isCopied());
return; return;
} }
/* Completely invalidate the local variable. */ /* Completely invalidate the local variable. */
if (localFe->isCopied()) { forgetEntry(target);
uncopy(localFe); target->resetUnsynced();
if (!localFe->isCopied())
forgetAllRegs(localFe);
} else {
forgetAllRegs(localFe);
}
localFe->resetUnsynced();
/* Constants are easy to propagate. */ /* Constants are easy to propagate. */
if (top->isConstant()) { if (top->isConstant()) {
localFe->setCopyOf(NULL); target->setCopyOf(NULL);
localFe->setNotCopied(); target->setNotCopied();
localFe->setConstant(Jsvalify(top->getValue())); target->setConstant(Jsvalify(top->getValue()));
return; return;
} }
/* /*
* When dealing with copies, there are two important invariants: * When dealing with copies, there are three important invariants:
* *
* 1) The backing store precedes all copies in the tracker. * 1) The backing store precedes all copies in the tracker.
* 2) The backing store precedes all copies in the FrameState.
* 2) The backing store of a local is never a stack slot, UNLESS the local * 2) The backing store of a local is never a stack slot, UNLESS the local
* variable itself is a stack slot (blocks) that precedes the stack * variable itself is a stack slot (blocks) that precedes the stack
* slot. * slot.
@ -909,24 +1074,23 @@ FrameState::storeLocal(uint32 n, bool popGuaranteed, bool typeChange)
* condition does not hold, force it to hold by swapping in-place. * condition does not hold, force it to hold by swapping in-place.
*/ */
FrameEntry *backing = top; FrameEntry *backing = top;
bool copied = false;
if (top->isCopy()) { if (top->isCopy()) {
backing = top->copyOf(); backing = top->copyOf();
JS_ASSERT(backing->trackerIndex() < top->trackerIndex()); JS_ASSERT(backing->trackerIndex() < top->trackerIndex());
uint32 backingIndex = indexOfFe(backing); if (backing < target) {
uint32 tol = uint32(spBase - base);
if (backingIndex < tol || backingIndex < localIndex(n)) {
/* local.idx < backing.idx means local cannot be a copy yet */ /* local.idx < backing.idx means local cannot be a copy yet */
if (localFe->trackerIndex() < backing->trackerIndex()) if (target->trackerIndex() < backing->trackerIndex())
swapInTracker(backing, localFe); swapInTracker(backing, target);
localFe->setNotCopied(); target->setNotCopied();
localFe->setCopyOf(backing); target->setCopyOf(backing);
if (backing->isTypeKnown()) if (backing->isTypeKnown())
localFe->setType(backing->getKnownType()); target->setType(backing->getKnownType());
else else
localFe->type.invalidate(); target->type.invalidate();
localFe->data.invalidate(); target->data.invalidate();
localFe->isNumber = backing->isNumber; target->isNumber = backing->isNumber;
return; return;
} }
@ -948,13 +1112,14 @@ FrameState::storeLocal(uint32 n, bool popGuaranteed, bool typeChange)
* but even so there's a quick workaround. We take all copies of the * but even so there's a quick workaround. We take all copies of the
* backing fe, and redirect them to be copies of the destination. * backing fe, and redirect them to be copies of the destination.
*/ */
FrameEntry *tos = tosFe();
for (uint32 i = backing->trackerIndex() + 1; i < tracker.nentries; i++) { for (uint32 i = backing->trackerIndex() + 1; i < tracker.nentries; i++) {
FrameEntry *fe = tracker[i]; FrameEntry *fe = tracker[i];
if (fe >= tos) if (fe >= sp)
continue; continue;
if (fe->isCopy() && fe->copyOf() == backing) if (fe->isCopy() && fe->copyOf() == backing) {
fe->setCopyOf(localFe); fe->setCopyOf(target);
copied = true;
}
} }
} }
backing->setNotCopied(); backing->setNotCopied();
@ -964,50 +1129,50 @@ FrameState::storeLocal(uint32 n, bool popGuaranteed, bool typeChange)
* consistent ordering - all copies of |backing| are tracked after * consistent ordering - all copies of |backing| are tracked after
* |backing|. Transitively, only one swap is needed. * |backing|. Transitively, only one swap is needed.
*/ */
if (backing->trackerIndex() < localFe->trackerIndex()) if (backing->trackerIndex() < target->trackerIndex())
swapInTracker(backing, localFe); swapInTracker(backing, target);
/* /*
* Move the backing store down - we spill registers here, but we could be * Move the backing store down - we spill registers here, but we could be
* smarter and re-use the type reg. * smarter and re-use the type reg.
*/ */
RegisterID reg = tempRegForData(backing); RegisterID reg = tempRegForData(backing);
localFe->data.setRegister(reg); target->data.setRegister(reg);
moveOwnership(reg, localFe); regstate[reg].reassociate(target);
if (typeChange) { if (typeChange) {
if (backing->isTypeKnown()) { if (backing->isTypeKnown()) {
localFe->setType(backing->getKnownType()); target->setType(backing->getKnownType());
} else { } else {
RegisterID reg = tempRegForType(backing); RegisterID reg = tempRegForType(backing);
localFe->type.setRegister(reg); target->type.setRegister(reg);
moveOwnership(reg, localFe); regstate[reg].reassociate(target);
} }
} else { } else {
if (!wasSynced) if (!wasSynced)
masm.storeTypeTag(ImmType(backing->getKnownType()), addressOf(localFe)); masm.storeTypeTag(ImmType(backing->getKnownType()), addressOf(target));
localFe->type.setMemory(); target->type.setMemory();
} }
if (!backing->isTypeKnown()) if (!backing->isTypeKnown())
backing->type.invalidate(); backing->type.invalidate();
backing->data.invalidate(); backing->data.invalidate();
backing->setCopyOf(localFe); backing->setCopyOf(target);
backing->isNumber = localFe->isNumber; backing->isNumber = target->isNumber;
localFe->setCopied();
if (!cacheable) { JS_ASSERT(top->copyOf() == target);
/* TODO: x64 optimization */
if (!localFe->type.synced())
syncType(localFe, addressOf(localFe), masm);
if (!localFe->data.synced())
syncData(localFe, addressOf(localFe), masm);
forgetAllRegs(localFe);
localFe->type.setMemory();
localFe->data.setMemory();
}
JS_ASSERT(top->copyOf() == localFe); /*
* Right now, |backing| is a copy of |target| (note the reversal), but
* |target| is not marked as copied. This is an optimization so uncopy()
* may avoid frame traversal.
*
* There are two cases where we must set the copy bit, however:
* - The fixup phase redirected more copies to |target|.
* - An immediate pop is not guaranteed.
*/
if (copied || !popGuaranteed)
target->setCopied();
} }
void void
@ -1015,7 +1180,7 @@ FrameState::shimmy(uint32 n)
{ {
JS_ASSERT(sp - n >= spBase); JS_ASSERT(sp - n >= spBase);
int32 depth = 0 - int32(n); int32 depth = 0 - int32(n);
storeLocal(uint32(&sp[depth - 1] - locals), true); storeTop(&sp[depth - 1], true);
popn(n); popn(n);
} }
@ -1024,7 +1189,7 @@ FrameState::shift(int32 n)
{ {
JS_ASSERT(n < 0); JS_ASSERT(n < 0);
JS_ASSERT(sp + n - 1 >= spBase); JS_ASSERT(sp + n - 1 >= spBase);
storeLocal(uint32(&sp[n - 1] - locals), true); storeTop(&sp[n - 1], true);
pop(); pop();
} }

Просмотреть файл

@ -175,22 +175,103 @@ class FrameState
uint32 nentries; uint32 nentries;
}; };
/*
* Some RegisterState invariants.
*
* If |fe| is non-NULL, |save| is NULL.
* If |save| is non-NULL, |fe| is NULL.
* That is, both |fe| and |save| cannot be non-NULL.
*
* If either |fe| or |save| is non-NULL, the register is not in freeRegs.
* If both |fe| and |save| are NULL, the register is either in freeRegs,
* or owned by the compiler.
*/
struct RegisterState { struct RegisterState {
RegisterState() RegisterState() : fe_(NULL), save_(NULL)
{ } { }
RegisterState(FrameEntry *fe, RematInfo::RematType type) RegisterState(FrameEntry *fe, RematInfo::RematType type)
: fe(fe), type(type) : fe_(fe), save_(NULL), type_(type)
{ } {
JS_ASSERT(!save_);
}
bool isPinned() const {
assertConsistency();
return !!save_;
}
void assertConsistency() const {
JS_ASSERT_IF(fe_, !save_);
JS_ASSERT_IF(save_, !fe_);
}
FrameEntry *fe() const {
assertConsistency();
return fe_;
}
RematInfo::RematType type() const {
assertConsistency();
return type_;
}
FrameEntry *usedBy() const {
if (fe_)
return fe_;
return save_;
}
void associate(FrameEntry *fe, RematInfo::RematType type) {
JS_ASSERT(!fe_);
JS_ASSERT(!save_);
fe_ = fe;
type_ = type;
JS_ASSERT(!save_);
}
/* Change ownership. */
void reassociate(FrameEntry *fe) {
assertConsistency();
JS_ASSERT(fe);
fe_ = fe;
}
/* Unassociate this register from the FE. */
void forget() {
JS_ASSERT(fe_);
fe_ = NULL;
JS_ASSERT(!save_);
}
void pin() {
assertConsistency();
save_ = fe_;
fe_ = NULL;
}
void unpin() {
assertConsistency();
fe_ = save_;
save_ = NULL;
}
void unpinUnsafe() {
assertConsistency();
save_ = NULL;
}
private:
/* FrameEntry owning this register, or NULL if not owned by a frame. */ /* FrameEntry owning this register, or NULL if not owned by a frame. */
FrameEntry *fe; FrameEntry *fe_;
/* Hack - simplifies register allocation for pairs. */ /* Hack - simplifies register allocation for pairs. */
FrameEntry *save; FrameEntry *save_;
/* Part of the FrameEntry that owns the FE. */ /* Part of the FrameEntry that owns the FE. */
RematInfo::RematType type; RematInfo::RematType type_;
}; };
public: public:
@ -505,9 +586,10 @@ class FrameState
void storeTo(FrameEntry *fe, Address address, bool popHint); void storeTo(FrameEntry *fe, Address address, bool popHint);
/* /*
* Stores the top stack slot back to a local variable. * Stores the top stack slot back to a slot.
*/ */
void storeLocal(uint32 n, bool popGuaranteed = false, bool typeChange = true); void storeLocal(uint32 n, bool popGuaranteed = false, bool typeChange = true);
void storeTop(FrameEntry *target, bool popGuaranteed = false, bool typeChange = true);
/* /*
* Restores state from a slow path. * Restores state from a slow path.
@ -526,28 +608,34 @@ class FrameState
void syncAndKill(Registers kill, Uses uses, Uses ignored); void syncAndKill(Registers kill, Uses uses, Uses ignored);
void syncAndKill(Registers kill, Uses uses) { syncAndKill(kill, uses, Uses(0)); } void syncAndKill(Registers kill, Uses uses) { syncAndKill(kill, uses, Uses(0)); }
/* /* Syncs and kills everything. */
* Reset the register state. void syncAndKillEverything() {
*/ syncAndKill(Registers(Registers::AvailRegs), Uses(frameDepth()));
void resetRegState(); }
/* /*
* Clear all tracker entries, syncing all outstanding stores in the process. * Clear all tracker entries, syncing all outstanding stores in the process.
* The stack depth is in case some merge points' edges did not immediately * The stack depth is in case some merge points' edges did not immediately
* precede the current instruction. * precede the current instruction.
*/ */
inline void forgetEverything(uint32 newStackDepth); inline void syncAndForgetEverything(uint32 newStackDepth);
/* /*
* Same as above, except the stack depth is not changed. This is used for * Same as above, except the stack depth is not changed. This is used for
* branching opcodes. * branching opcodes.
*/ */
void forgetEverything(); void syncAndForgetEverything();
/* /*
* Throw away the entire frame state, without syncing anything. * Throw away the entire frame state, without syncing anything.
* This can only be called after a syncAndKill() against all registers.
*/ */
void throwaway(); void forgetEverything();
/*
* Discard the entire framestate forcefully.
*/
void discardFrame();
/* /*
* Mark an existing slot with a type. * Mark an existing slot with a type.
@ -603,8 +691,9 @@ class FrameState
/* /*
* Marks a register such that it cannot be spilled by the register * Marks a register such that it cannot be spilled by the register
* allocator. Any pinned registers must be unpinned at the end of the op. * allocator. Any pinned registers must be unpinned at the end of the op,
* Note: This function should only be used on registers tied to FEs. * no matter what. In addition, pinReg() can only be used on registers
* which are associated with FrameEntries.
*/ */
inline void pinReg(RegisterID reg); inline void pinReg(RegisterID reg);
@ -613,6 +702,11 @@ class FrameState
*/ */
inline void unpinReg(RegisterID reg); inline void unpinReg(RegisterID reg);
/*
* Same as unpinReg(), but does not restore the FrameEntry.
*/
inline void unpinKilledReg(RegisterID reg);
/* /*
* Dups the top item on the stack. * Dups the top item on the stack.
*/ */
@ -639,7 +733,6 @@ class FrameState
*/ */
uint32 stackDepth() const { return sp - spBase; } uint32 stackDepth() const { return sp - spBase; }
uint32 frameDepth() const { return stackDepth() + script->nfixed; } uint32 frameDepth() const { return stackDepth() + script->nfixed; }
inline FrameEntry *tosFe() const;
#ifdef DEBUG #ifdef DEBUG
void assertValidRegisterState() const; void assertValidRegisterState() const;
@ -687,7 +780,7 @@ class FrameState
RegisterID evictSomeReg(uint32 mask); RegisterID evictSomeReg(uint32 mask);
void evictReg(RegisterID reg); void evictReg(RegisterID reg);
inline FrameEntry *rawPush(); inline FrameEntry *rawPush();
inline FrameEntry *addToTracker(uint32 index); inline void addToTracker(FrameEntry *fe);
inline void syncType(const FrameEntry *fe, Address to, Assembler &masm) const; inline void syncType(const FrameEntry *fe, Address to, Assembler &masm) const;
inline void syncData(const FrameEntry *fe, Address to, Assembler &masm) const; inline void syncData(const FrameEntry *fe, Address to, Assembler &masm) const;
inline FrameEntry *getLocal(uint32 slot); inline FrameEntry *getLocal(uint32 slot);
@ -695,9 +788,10 @@ class FrameState
inline void swapInTracker(FrameEntry *lhs, FrameEntry *rhs); inline void swapInTracker(FrameEntry *lhs, FrameEntry *rhs);
inline uint32 localIndex(uint32 n); inline uint32 localIndex(uint32 n);
void pushCopyOf(uint32 index); void pushCopyOf(uint32 index);
void syncFancy(Assembler &masm, Registers avail, uint32 resumeAt, void syncFancy(Assembler &masm, Registers avail, FrameEntry *resumeAt,
FrameEntry *bottom) const; FrameEntry *bottom) const;
inline bool tryFastDoubleLoad(FrameEntry *fe, FPRegisterID fpReg, Assembler &masm) const; inline bool tryFastDoubleLoad(FrameEntry *fe, FPRegisterID fpReg, Assembler &masm) const;
void resetInternalState();
/* /*
* "Uncopies" the backing store of a FrameEntry that has been copied. The * "Uncopies" the backing store of a FrameEntry that has been copied. The
@ -708,25 +802,29 @@ class FrameState
* Later addition: uncopy() returns the first copy found. * Later addition: uncopy() returns the first copy found.
*/ */
FrameEntry *uncopy(FrameEntry *original); FrameEntry *uncopy(FrameEntry *original);
FrameEntry *walkTrackerForUncopy(FrameEntry *original);
FrameEntry *walkFrameForUncopy(FrameEntry *original);
/*
* All registers in the FE are forgotten. If it is copied, it is uncopied
* beforehand.
*/
void forgetEntry(FrameEntry *fe);
FrameEntry *entryFor(uint32 index) const { FrameEntry *entryFor(uint32 index) const {
JS_ASSERT(base[index]); JS_ASSERT(entries[index].isTracked());
return &entries[index]; return &entries[index];
} }
void moveOwnership(RegisterID reg, FrameEntry *newFe) {
regstate[reg].fe = newFe;
}
RegisterID evictSomeReg() { RegisterID evictSomeReg() {
return evictSomeReg(Registers::AvailRegs); return evictSomeReg(Registers::AvailRegs);
} }
uint32 indexOf(int32 depth) { uint32 indexOf(int32 depth) {
return uint32((sp + depth) - base); return uint32((sp + depth) - entries);
} }
uint32 indexOfFe(FrameEntry *fe) { uint32 indexOfFe(FrameEntry *fe) const {
return uint32(fe - entries); return uint32(fe - entries);
} }
@ -742,20 +840,17 @@ class FrameState
/* Cache of FrameEntry objects. */ /* Cache of FrameEntry objects. */
FrameEntry *entries; FrameEntry *entries;
/* Base pointer of the FrameEntry vector. */
FrameEntry **base;
/* Base pointer for arguments. */ /* Base pointer for arguments. */
FrameEntry **args; FrameEntry *args;
/* Base pointer for local variables. */ /* Base pointer for local variables. */
FrameEntry **locals; FrameEntry *locals;
/* Base pointer for the stack. */ /* Base pointer for the stack. */
FrameEntry **spBase; FrameEntry *spBase;
/* Dynamic stack pointer. */ /* Dynamic stack pointer. */
FrameEntry **sp; FrameEntry *sp;
/* Vector of tracked slot indexes. */ /* Vector of tracked slot indexes. */
Tracker tracker; Tracker tracker;

Просмотреть файл

@ -45,7 +45,7 @@ using namespace js;
using namespace js::mjit; using namespace js::mjit;
ImmutableSync::ImmutableSync(JSContext *cx, const FrameState &frame) ImmutableSync::ImmutableSync(JSContext *cx, const FrameState &frame)
: cx(cx), entries(NULL), frame(frame) : cx(cx), entries(NULL), frame(frame), generation(0)
{ {
} }
@ -57,19 +57,18 @@ ImmutableSync::~ImmutableSync()
bool bool
ImmutableSync::init(uint32 nentries) ImmutableSync::init(uint32 nentries)
{ {
entries = (SyncEntry *)cx->malloc(sizeof(SyncEntry) * nentries); entries = (SyncEntry *)cx->calloc(sizeof(SyncEntry) * nentries);
return !!entries; return !!entries;
} }
void void
ImmutableSync::reset(Assembler *masm, Registers avail, uint32 n, ImmutableSync::reset(Assembler *masm, Registers avail, FrameEntry *top, FrameEntry *bottom)
FrameEntry *bottom)
{ {
this->avail = avail; this->avail = avail;
this->nentries = n;
this->masm = masm; this->masm = masm;
this->top = top;
this->bottom = bottom; this->bottom = bottom;
memset(entries, 0, sizeof(SyncEntry) * nentries); this->generation++;
memset(regs, 0, sizeof(regs)); memset(regs, 0, sizeof(regs));
} }
@ -92,17 +91,10 @@ ImmutableSync::allocReg()
if (!regs[i]) { if (!regs[i]) {
/* If the frame does not own this register, take it! */ /* If the frame does not own this register, take it! */
FrameEntry *fe = frame.regstate[i].fe; FrameEntry *fe = frame.regstate[i].fe();
if (!fe) if (!fe)
return reg; return reg;
/*
* The Reifier does not own this register, but the frame does.
* This must mean that we've not yet processed this entry, and
* that it's data has not been clobbered.
*/
JS_ASSERT(fe->trackerIndex() < nentries);
evictFromFrame = i; evictFromFrame = i;
/* /*
@ -115,18 +107,14 @@ ImmutableSync::allocReg()
} }
if (evictFromFrame != FrameState::InvalidIndex) { if (evictFromFrame != FrameState::InvalidIndex) {
FrameEntry *fe = frame.regstate[evictFromFrame].fe; FrameEntry *fe = frame.regstate[evictFromFrame].fe();
SyncEntry &e = entryFor(fe); SyncEntry &e = entryFor(fe);
if (frame.regstate[evictFromFrame].type == RematInfo::TYPE) { if (frame.regstate[evictFromFrame].type() == RematInfo::TYPE) {
JS_ASSERT(!e.typeClobbered); JS_ASSERT(!e.typeClobbered);
e.typeSynced = true;
e.typeClobbered = true; e.typeClobbered = true;
masm->storeTypeTag(fe->type.reg(), frame.addressOf(fe));
} else { } else {
JS_ASSERT(!e.dataClobbered); JS_ASSERT(!e.dataClobbered);
e.dataSynced = true;
e.dataClobbered = true; e.dataClobbered = true;
masm->storePayload(fe->data.reg(), frame.addressOf(fe));
} }
return RegisterID(evictFromFrame); return RegisterID(evictFromFrame);
} }
@ -150,39 +138,38 @@ ImmutableSync::allocReg()
inline ImmutableSync::SyncEntry & inline ImmutableSync::SyncEntry &
ImmutableSync::entryFor(FrameEntry *fe) ImmutableSync::entryFor(FrameEntry *fe)
{ {
JS_ASSERT(fe->trackerIndex() < nentries); JS_ASSERT(fe <= top);
return entries[fe->trackerIndex()]; SyncEntry &e = entries[frame.indexOfFe(fe)];
if (e.generation != generation)
e.reset(generation);
return e;
} }
void void
ImmutableSync::sync(FrameEntry *fe) ImmutableSync::sync(FrameEntry *fe)
{ {
JS_ASSERT(nentries); #ifdef DEBUG
top = fe;
#endif
if (fe->isCopy()) if (fe->isCopy())
syncCopy(fe); syncCopy(fe);
else else
syncNormal(fe); syncNormal(fe);
nentries--;
} }
bool bool
ImmutableSync::shouldSyncType(FrameEntry *fe, SyncEntry &e) ImmutableSync::shouldSyncType(FrameEntry *fe, SyncEntry &e)
{ {
if (fe->type.inRegister() && !e.typeClobbered) /* Registers are synced up-front. */
return true; return !fe->type.synced() && !fe->type.inRegister();
if (e.hasTypeReg)
return true;
return frame.inTryBlock || fe >= bottom;
} }
bool bool
ImmutableSync::shouldSyncData(FrameEntry *fe, SyncEntry &e) ImmutableSync::shouldSyncData(FrameEntry *fe, SyncEntry &e)
{ {
if (fe->data.inRegister() && !e.dataClobbered) /* Registers are synced up-front. */
return true; return !fe->data.synced() && !fe->data.inRegister();
if (e.hasDataReg)
return true;
return frame.inTryBlock || fe >= bottom;
} }
JSC::MacroAssembler::RegisterID JSC::MacroAssembler::RegisterID
@ -216,8 +203,7 @@ ImmutableSync::ensureDataReg(FrameEntry *fe, SyncEntry &e)
void void
ImmutableSync::syncCopy(FrameEntry *fe) ImmutableSync::syncCopy(FrameEntry *fe)
{ {
if (!frame.inTryBlock && fe < bottom) JS_ASSERT(fe >= bottom);
return;
FrameEntry *backing = fe->copyOf(); FrameEntry *backing = fe->copyOf();
SyncEntry &e = entryFor(backing); SyncEntry &e = entryFor(backing);
@ -254,7 +240,7 @@ ImmutableSync::syncNormal(FrameEntry *fe)
e.type = fe->getKnownType(); e.type = fe->getKnownType();
} }
if (!fe->data.synced() && !e.dataSynced && shouldSyncData(fe, e)) { if (shouldSyncData(fe, e)) {
if (fe->isConstant()) { if (fe->isConstant()) {
masm->storeValue(fe->getValue(), addr); masm->storeValue(fe->getValue(), addr);
return; return;
@ -262,7 +248,7 @@ ImmutableSync::syncNormal(FrameEntry *fe)
masm->storePayload(ensureDataReg(fe, e), addr); masm->storePayload(ensureDataReg(fe, e), addr);
} }
if (!fe->type.synced() && !e.typeSynced && shouldSyncType(fe, e)) { if (shouldSyncType(fe, e)) {
if (e.learnedType) if (e.learnedType)
masm->storeTypeTag(ImmType(e.type), addr); masm->storeTypeTag(ImmType(e.type), addr);
else else
@ -272,14 +258,14 @@ ImmutableSync::syncNormal(FrameEntry *fe)
if (e.hasDataReg) { if (e.hasDataReg) {
avail.putReg(e.dataReg); avail.putReg(e.dataReg);
regs[e.dataReg] = NULL; regs[e.dataReg] = NULL;
} else if (!e.dataClobbered && fe->data.inRegister() && frame.regstate[fe->data.reg()].fe) { } else if (!e.dataClobbered && fe->data.inRegister() && frame.regstate[fe->data.reg()].fe()) {
avail.putReg(fe->data.reg()); avail.putReg(fe->data.reg());
} }
if (e.hasTypeReg) { if (e.hasTypeReg) {
avail.putReg(e.typeReg); avail.putReg(e.typeReg);
regs[e.typeReg] = NULL; regs[e.typeReg] = NULL;
} else if (!e.typeClobbered && fe->type.inRegister() && frame.regstate[fe->type.reg()].fe) { } else if (!e.typeClobbered && fe->type.inRegister() && frame.regstate[fe->type.reg()].fe()) {
avail.putReg(fe->type.reg()); avail.putReg(fe->type.reg());
} }
} }

Просмотреть файл

@ -70,16 +70,24 @@ class ImmutableSync
* *
* They are separated for readability. * They are separated for readability.
*/ */
bool dataSynced; uint32 generation;
bool typeSynced;
bool dataClobbered; bool dataClobbered;
bool typeClobbered; bool typeClobbered;
RegisterID dataReg;
RegisterID typeReg;
bool hasDataReg; bool hasDataReg;
bool hasTypeReg; bool hasTypeReg;
bool learnedType; bool learnedType;
RegisterID dataReg;
RegisterID typeReg;
JSValueType type; JSValueType type;
void reset(uint32 gen) {
dataClobbered = false;
typeClobbered = false;
hasDataReg = false;
hasTypeReg = false;
learnedType = false;
generation = gen;
}
}; };
public: public:
@ -87,8 +95,7 @@ class ImmutableSync
~ImmutableSync(); ~ImmutableSync();
bool init(uint32 nentries); bool init(uint32 nentries);
void reset(Assembler *masm, Registers avail, uint32 n, void reset(Assembler *masm, Registers avail, FrameEntry *top, FrameEntry *bottom);
FrameEntry *bottom);
void sync(FrameEntry *fe); void sync(FrameEntry *fe);
private: private:
@ -111,7 +118,9 @@ class ImmutableSync
Registers avail; Registers avail;
Assembler *masm; Assembler *masm;
SyncEntry *regs[Assembler::TotalRegisters]; SyncEntry *regs[Assembler::TotalRegisters];
FrameEntry *top;
FrameEntry *bottom; FrameEntry *bottom;
uint32 generation;
}; };
} /* namespace mjit */ } /* namespace mjit */

Просмотреть файл

@ -190,11 +190,16 @@ struct Registers {
return !(freeMask & mask); return !(freeMask & mask);
} }
RegisterID takeAnyReg() { RegisterID peekReg() {
JS_ASSERT(!empty()); JS_ASSERT(!empty());
int ireg; int ireg;
JS_FLOOR_LOG2(ireg, freeMask); JS_FLOOR_LOG2(ireg, freeMask);
RegisterID reg = (RegisterID)ireg; RegisterID reg = (RegisterID)ireg;
return reg;
}
RegisterID takeAnyReg() {
RegisterID reg = peekReg();
takeReg(reg); takeReg(reg);
return reg; return reg;
} }