[INFER] Rewrite CompileFunction as an UncachedNew/UncachedCall wrapper, bug 660850.

This commit is contained in:
Brian Hackett 2011-05-31 10:31:39 -07:00
Родитель d473be683c
Коммит 37b15c8920
4 изменённых файлов: 27 добавлений и 73 удалений

Просмотреть файл

@ -280,8 +280,14 @@ stubs::FixupArity(VMFrame &f, uint32 nactual)
return newfp;
}
struct ResetStubRejoin {
VMFrame &f;
ResetStubRejoin(VMFrame &f) : f(f) {}
~ResetStubRejoin() { f.stubRejoin = 0; }
};
void * JS_FASTCALL
stubs::CompileFunction(VMFrame &f, uint32 nactual)
stubs::CompileFunction(VMFrame &f, uint32 argc)
{
/*
* Note: the stubRejoin kind for the frame was written before the call, and
@ -289,73 +295,12 @@ stubs::CompileFunction(VMFrame &f, uint32 nactual)
* IC stub will not handle cases where we recompiled or threw).
*/
JS_ASSERT_IF(f.cx->typeInferenceEnabled(), f.stubRejoin);
ResetStubRejoin reset(f);
/*
* We have a partially constructed frame. That's not really good enough to
* compile though because we could throw, so get a full, adjusted frame.
*/
JSContext *cx = f.cx;
StackFrame *fp = f.fp();
bool isConstructing = f.fp()->isConstructing();
f.regs.popPartialFrame((Value *)f.fp());
/*
* Since we can only use members set by initCallFrameCallerHalf,
* we must carefully extract the callee from the nactual.
*/
JSObject &callee = fp->formalArgsEnd()[-(int(nactual) + 2)].toObject();
JSFunction *fun = callee.getFunctionPrivate();
JSScript *script = fun->script();
/*
* FixupArity/RemovePartialFrame expect to be called after the early
* prologue.
*/
fp->initCallFrameEarlyPrologue(fun, nactual);
if (nactual != fp->numFormalArgs()) {
fp = (StackFrame *)FixupArity(f, nactual);
if (!fp) {
f.stubRejoin = 0;
return NULL;
}
}
CallArgs args = CallArgsFromArgv(fp->numFormalArgs(), fp->formalArgs());
cx->typeMonitorCall(args, fp->isConstructing());
/* Finish frame initialization. */
fp->initCallFrameLatePrologue();
/* These would have been initialized by the prologue. */
f.regs.prepareToRun(fp, script);
if (fun->isHeavyweight() && !js::CreateFunCallObject(cx, fp)) {
f.stubRejoin = 0;
THROWV(NULL);
}
CompileStatus status = CanMethodJIT(cx, script, fp, CompileRequest_JIT);
if (status == Compile_Okay) {
void *entry = script->getJIT(fp->isConstructing())->invokeEntry;
/* Same constraint on fp as UncachedInlineCall. */
f.regs.popFrame((Value *) f.regs.fp());
f.stubRejoin = 0;
return entry;
}
/* Force computation of the previous PC, as Interpret will clear it. */
fp->prev()->pc(cx, fp);
/* Function did not compile... interpret it. */
JSBool ok = Interpret(cx, fp);
InlineReturn(f);
f.stubRejoin = 0;
if (!ok)
THROWV(NULL);
return NULL;
return isConstructing ? UncachedNew(f, argc) : UncachedCall(f, argc);
}
static inline bool

Просмотреть файл

@ -667,20 +667,25 @@ class CallCompiler : public BaseCompiler
/* Try and compile. On success we get back the nmap pointer. */
void *compilePtr = JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction);
DataLabelPtr inlined;
if (ic.frameSize.isStatic()) {
masm.move(Imm32(ic.frameSize.staticArgc()), Registers::ArgReg1);
masm.fallibleVMCall(cx->typeInferenceEnabled(),
compilePtr, NULL, NULL, ic.frameSize.staticLocalSlots());
compilePtr, f.regs.pc, &inlined, ic.frameSize.staticLocalSlots());
} else {
masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), Registers::ArgReg1);
masm.fallibleVMCall(cx->typeInferenceEnabled(),
compilePtr, NULL, NULL, -1);
compilePtr, f.regs.pc, &inlined, -1);
}
Jump notCompiled = masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
Registers::ReturnReg);
masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.sp)), JSFrameReg);
/* Compute the value of ncode to use at this call site. */
uint8 *ncode = (uint8 *) f.jit()->code.m_code.executableAddress() + ic.call->codeOffset;
masm.storePtr(ImmPtr(ncode), Address(JSFrameReg, StackFrame::offsetOfNcode()));
masm.jump(Registers::ReturnReg);
hasCode.linkTo(masm.label(), &masm);
@ -708,6 +713,11 @@ class CallCompiler : public BaseCompiler
JaegerSpew(JSpew_PICs, "generated CALL stub %p (%d bytes)\n", cs.executableAddress(),
masm.size());
if (f.regs.inlined()) {
JSC::LinkBuffer code((uint8 *) cs.executableAddress(), masm.size());
code.patch(inlined, f.regs.inlined());
}
Repatcher repatch(from);
JSC::CodeLocationJump oolJump = ic.slowPathStart.jumpAtOffset(ic.oolJumpOffset);
repatch.relink(oolJump, cs);

Просмотреть файл

@ -246,9 +246,9 @@ Recompiler::expandInlineFrames(JSContext *cx, StackFrame *fp, mjit::CallSite *in
StackFrame *innerfp = expandInlineFrameChain(cx, fp, inner);
/* Check if the VMFrame returns into the inlined frame. */
if (f->stubRejoin && (f->stubRejoin & 0x1) && f->regs.fp()->prev() == fp) {
if (f->stubRejoin && (f->stubRejoin & 0x1) && f->fp() == fp) {
/* The VMFrame is calling CompileFunction. */
fp->prev()->setRejoin(StubRejoin((RejoinState) f->stubRejoin));
innerfp->setRejoin(StubRejoin((RejoinState) f->stubRejoin));
*frameAddr = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpoline);
}
if (*frameAddr == codeStart + inlined->codeOffset) {
@ -417,8 +417,8 @@ Recompiler::recompile(bool resetUses)
patchNative(cx, script->jitNormal, fp, fp->pc(cx, NULL), NULL, rejoin);
} else if (rejoin) {
/* Recompilation triggered by CompileFunction. */
if (fp->prev()->script() == script) {
fp->prev()->setRejoin(StubRejoin(rejoin));
if (fp->script() == script) {
fp->setRejoin(StubRejoin(rejoin));
*addr = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpoline);
}
} else if (script->jitCtor && script->jitCtor->isValidCode(*addr)) {

Просмотреть файл

@ -1061,7 +1061,6 @@ class FrameRegs
void popPartialFrame(Value *newsp) {
sp = newsp;
fp_ = fp_->prev();
inlined_ = NULL;
}
/* For InternalInterpret: */