[JAEGER] ICs for scripted calls (bug 587698, r=dmandelin).

This commit is contained in:
David Anderson 2010-08-27 17:50:53 -07:00
Родитель be26fccdba
Коммит 0c9dc1d142
20 изменённых файлов: 1581 добавлений и 527 удалений

Просмотреть файл

@ -46,6 +46,8 @@ include $(topsrcdir)/config/rules.mk
# Disabled due to timeouts.
# test_bug563329.html
# Disabled due to lack of present support for JSD in JM
# test_bug448602.html
_TEST_FILES = \
test_bug226361.xhtml \
bug226361_iframe.xhtml \
@ -72,7 +74,6 @@ _TEST_FILES = \
test_bug426082.html \
test_bug443985.html \
test_bug447736.html \
test_bug448602.html \
test_bug450876.html \
test_bug456273.html \
test_bug457672.html \

Просмотреть файл

@ -159,6 +159,13 @@ public:
return !m_value;
}
ptrdiff_t operator -(const MacroAssemblerCodePtr &other) const
{
JS_ASSERT(m_value);
return reinterpret_cast<uint8 *>(m_value) -
reinterpret_cast<uint8 *>(other.m_value);
}
private:
void* m_value;
};

Просмотреть файл

@ -76,6 +76,11 @@ enum JSFrameFlags {
JSFRAME_SPECIAL = JSFRAME_DEBUGGER | JSFRAME_EVAL
};
namespace js { namespace mjit {
class Compiler;
class InlineFrameAssembler;
} }
/*
* JS stack frame, may be allocated on the C stack by native callers. Always
* allocated on cx->stackPool for calls from the interpreter to an interpreted
@ -289,6 +294,10 @@ struct JSStackFrame
blockChain = obj;
}
static size_t offsetBlockChain() {
return offsetof(JSStackFrame, blockChain);
}
/* IMacroPC accessors. */
bool hasIMacroPC() const { return flags & JSFRAME_IN_IMACRO; }
@ -335,6 +344,10 @@ struct JSStackFrame
annotation = annot;
}
static size_t offsetAnnotation() {
return offsetof(JSStackFrame, annotation);
}
/* Debugger hook data accessors */
bool hasHookData() const {
@ -354,6 +367,10 @@ struct JSStackFrame
hookData = data;
}
static size_t offsetHookData() {
return offsetof(JSStackFrame, hookData);
}
/* Version accessors */
JSVersion getCallerVersion() const {
@ -364,6 +381,10 @@ struct JSStackFrame
callerVersion = version;
}
static size_t offsetCallerVersion() {
return offsetof(JSStackFrame, callerVersion);
}
/* Script accessors */
bool hasScript() const {
@ -410,6 +431,10 @@ struct JSStackFrame
return fun;
}
static size_t offsetFunction() {
return offsetof(JSStackFrame, fun);
}
size_t numFormalArgs() const {
JS_ASSERT(!isEvalFrame());
return getFunction()->nargs;

Просмотреть файл

@ -1320,6 +1320,11 @@ js_TraceScript(JSTracer *trc, JSScript *script)
if (IS_GC_MARKING_TRACER(trc) && script->filename)
js_MarkScriptFilename(script->filename);
#ifdef JS_METHODJIT
if (script->jit)
mjit::TraceScriptCache(trc, script);
#endif
}
JSBool

Просмотреть файл

@ -180,6 +180,7 @@ namespace ic {
# endif
# if defined JS_MONOIC
struct MICInfo;
struct CallICInfo;
# endif
}
struct CallSite;
@ -263,6 +264,7 @@ struct JSScript {
# endif
# if defined JS_MONOIC
js::mjit::ic::MICInfo *mics; /* MICs in this script. */
js::mjit::ic::CallICInfo *callICs; /* CallICs in this script. */
# endif
bool isValidJitCode(void *jcode);

Просмотреть файл

@ -136,11 +136,13 @@ class BaseAssembler : public JSC::MacroAssembler
/* Register pair storing returned type/data for calls. */
#if defined(JS_CPU_X86) || defined(JS_CPU_X64)
static const JSC::MacroAssembler::RegisterID JSReturnReg_Type = JSC::X86Registers::ecx;
static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = JSC::X86Registers::edx;
static const JSC::MacroAssembler::RegisterID JSReturnReg_Type = JSC::X86Registers::ecx;
static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = JSC::X86Registers::edx;
static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::X86Registers::ecx;
#elif defined(JS_CPU_ARM)
static const JSC::MacroAssembler::RegisterID JSReturnReg_Type = JSC::ARMRegisters::r2;
static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = JSC::ARMRegisters::r1;
static const JSC::MacroAssembler::RegisterID JSReturnReg_Type = JSC::ARMRegisters::r2;
static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = JSC::ARMRegisters::r1;
static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::ARMRegisters::r1;
#endif
size_t distanceOf(Label l) {
@ -257,6 +259,10 @@ static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = JSC::ARMRegister
/* VMFrame -> ArgReg0 */
setupVMFrame();
return wrapCall(pfun);
}
Call wrapCall(void *pfun) {
#ifdef JS_METHODJIT_PROFILE_STUBS
push(Registers::ArgReg0);
push(Registers::ArgReg1);
@ -299,6 +305,10 @@ static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = JSC::ARMRegister
move(MacroAssembler::stackPointerRegister, Registers::ArgReg0);
}
Call call() {
return JSC::MacroAssembler::call();
}
Call call(void *fun) {
Call cl = JSC::MacroAssembler::call();
@ -322,6 +332,11 @@ static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = JSC::ARMRegister
#endif
}
void saveReturnAddress(RegisterID reg)
{
storePtr(reg, Address(JSFrameReg, offsetof(JSStackFrame, ncode)));
}
void finalize(uint8 *ncode) {
JSC::JITCode jc(ncode, size());
JSC::CodeBlock cb(jc);
@ -332,24 +347,13 @@ static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = JSC::ARMRegister
repatchBuffer.relink(JSC::CodeLocationCall(cp), callPatches[i].fun);
}
}
/*
* Write a jump instruction at source which goes to target, clobbering any
* instructions already at source. Can't use a patch/link buffer here
* as there is no original instruction we are setting the target for.
*/
#ifdef JS_CPU_X86
static void insertJump(uint8 *source, const uint8 *target) {
source[0] = 0xE9; /* JSC::X86Assembler::OP_JMP_rel32; */
*reinterpret_cast<int*>(source + 1) = (int) target - (int) source - 5;
}
#endif
};
/* Save some typing. */
static const JSC::MacroAssembler::RegisterID JSFrameReg = BaseAssembler::JSFrameReg;
static const JSC::MacroAssembler::RegisterID JSReturnReg_Type = BaseAssembler::JSReturnReg_Type;
static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = BaseAssembler::JSReturnReg_Data;
static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = BaseAssembler::JSParamReg_Argc;
} /* namespace mjit */
} /* namespace js */

Просмотреть файл

@ -51,6 +51,7 @@
#include "assembler/assembler/LinkBuffer.h"
#include "FrameState-inl.h"
#include "jsscriptinlines.h"
#include "InlineFrameAssembler.h"
#include "jsautooplen.h"
@ -76,6 +77,7 @@ mjit::Compiler::Compiler(JSContext *cx, JSScript *script, JSFunction *fun, JSObj
branchPatches(ContextAllocPolicy(cx)),
#if defined JS_MONOIC
mics(ContextAllocPolicy(cx)),
callICs(ContextAllocPolicy(cx)),
#endif
#if defined JS_POLYIC
pics(ContextAllocPolicy(cx)),
@ -130,6 +132,13 @@ mjit::Compiler::Compile()
prof.start();
#endif
/* Initialize PC early so stub calls in the prologue can be fallible. */
PC = script->code;
#ifdef JS_METHODJIT
script->debugMode = cx->compartment->debugMode;
#endif
CHECK_STATUS(generatePrologue());
CHECK_STATUS(generateMethod());
CHECK_STATUS(generateEpilogue());
@ -168,16 +177,15 @@ mjit::TryCompile(JSContext *cx, JSScript *script, JSFunction *fun, JSObject *sco
return status;
}
void
mjit::Compiler::saveReturnAddress()
JSC::MacroAssembler::RegisterID
mjit::Compiler::takeHWReturnAddress(Assembler &masm)
{
#ifndef JS_CPU_ARM
JS_STATIC_ASSERT(JSParamReg_Argc != Registers::ReturnReg);
masm.pop(Registers::ReturnReg);
restoreFrameRegs(masm);
masm.storePtr(Registers::ReturnReg, Address(JSFrameReg, offsetof(JSStackFrame, ncode)));
return Registers::ReturnReg;
#else
restoreFrameRegs(masm);
masm.storePtr(JSC::ARMRegisters::lr, Address(JSFrameReg, offsetof(JSStackFrame, ncode)));
return JSC::ARMRegisters::lr;
#endif
}
@ -186,7 +194,9 @@ mjit::Compiler::generatePrologue()
{
invokeLabel = masm.label();
saveReturnAddress();
RegisterID retAddr = takeHWReturnAddress(masm);
restoreFrameRegs(masm);
masm.saveReturnAddress(retAddr);
/*
* If there is no function, then this can only be called via JaegerShot(),
@ -194,8 +204,81 @@ mjit::Compiler::generatePrologue()
*/
if (fun) {
Jump j = masm.jump();
/*
* Entry point #2: The caller has partially constructed a frame, and
* either argc >= nargs or the arity check has corrected the frame.
*/
invokeLabel = masm.label();
saveReturnAddress();
RegisterID retAddr = takeHWReturnAddress(masm);
masm.saveReturnAddress(retAddr);
Label fastPath = masm.label();
/* Store these early on so slow paths can access them. */
masm.storePtr(ImmPtr(script), Address(JSFrameReg, JSStackFrame::offsetScript()));
masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
{
/*
* Entry point #3: The caller has partially constructed a frame,
* but argc might be != nargs, so an arity check might be called.
*
* This loops back to entry point #2.
*/
arityLabel = stubcc.masm.label();
RegisterID retAddr = takeHWReturnAddress(stubcc.masm);
stubcc.masm.saveReturnAddress(retAddr);
Jump argMatch = stubcc.masm.branch32(Assembler::AboveOrEqual, JSParamReg_Argc,
Imm32(fun->nargs));
stubcc.crossJump(argMatch, fastPath);
/* Slow path - call the arity check function. Returns new fp. */
stubcc.masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetFunction()));
stubcc.masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
stubcc.call(stubs::CheckArity);
stubcc.masm.move(Registers::ReturnReg, JSFrameReg);
stubcc.crossJump(stubcc.masm.jump(), fastPath);
}
/*
* Guard that there is enough stack space. Note we include the size of
* a second frame, to ensure we can create a frame from call sites.
*/
masm.addPtr(Imm32((script->nslots + VALUES_PER_STACK_FRAME * 2) * sizeof(Value)),
JSFrameReg,
Registers::ReturnReg);
Jump stackCheck = masm.branchPtr(Assembler::AboveOrEqual, Registers::ReturnReg,
FrameAddress(offsetof(VMFrame, stackLimit)));
/* If the stack check fails... */
{
stubcc.linkExitDirect(stackCheck, stubcc.masm.label());
stubcc.masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetFunction()));
stubcc.call(stubs::CheckStackQuota);
stubcc.crossJump(stubcc.masm.jump(), masm.label());
}
/* Easy frame members. Hard ones are in caller. */
masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetFunction()));
masm.storePtr(ImmPtr(NULL), Address(JSFrameReg, JSStackFrame::offsetCallObj()));
masm.storePtr(ImmPtr(NULL), Address(JSFrameReg, JSStackFrame::offsetArgsObj()));
masm.storeValue(UndefinedValue(), Address(JSFrameReg, JSStackFrame::offsetReturnValue()));
masm.storePtr(ImmPtr(NULL), Address(JSFrameReg, JSStackFrame::offsetAnnotation()));
masm.storePtr(ImmPtr(NULL), Address(JSFrameReg, JSStackFrame::offsetBlockChain()));
if (script->debugMode)
masm.storePtr(ImmPtr(NULL), Address(JSFrameReg, JSStackFrame::offsetHookData()));
#ifdef DEBUG
masm.storePtr(ImmPtr(JSStackFrame::sInvalidPC),
Address(JSFrameReg, offsetof(JSStackFrame, savedPC)));
#endif
/* :TODO: This is entirely wrong. */
masm.store32(Imm32(cx->version),
Address(JSFrameReg, JSStackFrame::offsetCallerVersion()));
/* Set cx->fp */
masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), Registers::ReturnReg);
/* Set locals to undefined. */
for (uint32 i = 0; i < script->nfixed; i++) {
@ -256,6 +339,7 @@ mjit::Compiler::finishThisUp()
sizeof(void *) * script->length +
#if defined JS_MONOIC
sizeof(ic::MICInfo) * mics.length() +
sizeof(ic::CallICInfo) * callICs.length() +
#endif
#if defined JS_POLYIC
sizeof(ic::PICInfo) * pics.length() +
@ -300,6 +384,9 @@ mjit::Compiler::finishThisUp()
}
}
if (fun)
script->jit->arityCheck = stubCode.locationOf(arityLabel).executableAddress();
#if defined JS_MONOIC
script->jit->nMICs = mics.length();
if (mics.length()) {
@ -325,17 +412,6 @@ mjit::Compiler::finishThisUp()
script->mics[i].patchValueOffset = mics[i].patchValueOffset;
#endif
break;
case ic::MICInfo::CALL:
script->mics[i].frameDepth = mics[i].frameDepth;
script->mics[i].knownObject = fullCode.locationOf(mics[i].knownObject);
script->mics[i].callEnd = fullCode.locationOf(mics[i].callEnd);
script->mics[i].stubEntry = stubCode.locationOf(mics[i].stubEntry);
script->mics[i].dataReg = mics[i].dataReg;
script->mics[i].u.generated = false;
/* FALLTHROUGH */
case ic::MICInfo::EMPTYCALL:
script->mics[i].argc = mics[i].argc;
break;
case ic::MICInfo::TRACER: {
uint32 offs = uint32(mics[i].jumpTarget - script->code);
JS_ASSERT(jumpMap[offs].isValid());
@ -350,6 +426,64 @@ mjit::Compiler::finishThisUp()
JS_NOT_REACHED("Bad MIC kind");
}
}
script->jit->nCallICs = callICs.length();
if (callICs.length()) {
script->callICs = (ic::CallICInfo *)cursor;
cursor += sizeof(ic::CallICInfo) * callICs.length();
} else {
script->callICs = NULL;
}
for (size_t i = 0; i < callICs.length(); i++) {
script->callICs[i].reset();
script->callICs[i].funGuard = fullCode.locationOf(callICs[i].funGuard);
script->callICs[i].funJump = fullCode.locationOf(callICs[i].funJump);
script->callICs[i].slowPathStart = stubCode.locationOf(callICs[i].slowPathStart);
/* Compute the hot call offset. */
uint32 offset = fullCode.locationOf(callICs[i].hotCall) -
fullCode.locationOf(callICs[i].funGuard);
script->callICs[i].hotCallOffset = offset;
JS_ASSERT(script->callICs[i].hotCallOffset == offset);
/* Compute the join point offset. */
offset = fullCode.locationOf(callICs[i].joinPoint) -
fullCode.locationOf(callICs[i].funGuard);
script->callICs[i].joinPointOffset = offset;
JS_ASSERT(script->callICs[i].joinPointOffset == offset);
/* Compute the OOL call offset. */
offset = stubCode.locationOf(callICs[i].oolCall) -
stubCode.locationOf(callICs[i].slowPathStart);
script->callICs[i].oolCallOffset = offset;
JS_ASSERT(script->callICs[i].oolCallOffset == offset);
/* Compute the OOL jump offset. */
offset = stubCode.locationOf(callICs[i].oolJump) -
stubCode.locationOf(callICs[i].slowPathStart);
script->callICs[i].oolJumpOffset = offset;
JS_ASSERT(script->callICs[i].oolJumpOffset == offset);
/* Compute the slow join point offset. */
offset = stubCode.locationOf(callICs[i].slowJoinPoint) -
stubCode.locationOf(callICs[i].slowPathStart);
script->callICs[i].slowJoinOffset = offset;
JS_ASSERT(script->callICs[i].slowJoinOffset == offset);
/* Compute the join point offset for continuing on the hot path. */
offset = stubCode.locationOf(callICs[i].hotPathLabel) -
stubCode.locationOf(callICs[i].funGuard);
script->callICs[i].hotPathOffset = offset;
JS_ASSERT(script->callICs[i].hotPathOffset == offset);
script->callICs[i].argc = callICs[i].argc;
script->callICs[i].funObjReg = callICs[i].funObjReg;
script->callICs[i].funPtrReg = callICs[i].funPtrReg;
script->callICs[i].frameDepth = callICs[i].frameDepth;
script->callICs[i].isConstantThis = callICs[i].isConstantThis;
script->callICs[i].constantThis = callICs[i].constantThis;
}
#endif /* JS_MONOIC */
#if defined JS_POLYIC
@ -437,10 +571,6 @@ mjit::Compiler::finishThisUp()
JS_ASSERT(size_t(cursor - (uint8*)script->jit) == totalBytes);
#ifdef JS_METHODJIT
script->debugMode = cx->compartment->debugMode;
#endif
return Compile_Okay;
}
@ -468,7 +598,6 @@ CompileStatus
mjit::Compiler::generateMethod()
{
mjit::AutoScriptRetrapper trapper(cx, script);
PC = script->code;
for (;;) {
JSOp op = JSOp(*PC);
@ -1714,6 +1843,49 @@ mjit::Compiler::interruptCheckHelper()
stubcc.rejoin(Changes(0));
}
void
mjit::Compiler::emitPrimitiveTestForNew(uint32 argc)
{
Jump primitive = masm.testPrimitive(Assembler::Equal, JSReturnReg_Type);
stubcc.linkExitDirect(primitive, stubcc.masm.label());
FrameEntry *fe = frame.peek(-int(argc + 1));
Address thisv(frame.addressOf(fe));
stubcc.masm.loadTypeTag(thisv, JSReturnReg_Type);
stubcc.masm.loadPayload(thisv, JSReturnReg_Data);
Jump primFix = stubcc.masm.jump();
stubcc.crossJump(primFix, masm.label());
}
void
mjit::Compiler::emitUncachedCall(uint32 argc, bool callingNew)
{
RegisterID r0 = Registers::ReturnReg;
VoidPtrStubUInt32 stub = callingNew ? stubs::UncachedNew : stubs::UncachedCall;
frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
prepareStubCall(Uses(argc + 2));
masm.move(Imm32(argc), Registers::ArgReg1);
stubCall(stub);
ADD_CALLSITE(false);
Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0);
stubcc.linkExitDirect(notCompiled, stubcc.masm.label());
masm.call(r0);
ADD_CALLSITE(false);
if (callingNew)
emitPrimitiveTestForNew(argc);
frame.popn(argc + 2);
frame.takeReg(JSReturnReg_Type);
frame.takeReg(JSReturnReg_Data);
frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data);
stubcc.rejoin(Changes(0));
}
/* See MonoIC.cpp, CallCompiler for more information on call ICs. */
void
mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
{
@ -1721,53 +1893,41 @@ mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
interruptCheckHelper();
FrameEntry *fe = frame.peek(-int(argc + 2));
bool typeKnown = fe->isTypeKnown();
if (typeKnown && fe->getKnownType() != JSVAL_TYPE_OBJECT) {
#ifdef JS_MONOIC
/*
* Make an otherwise empty MIC to hold the argument count.
* This can't be a fast native so the rest of the MIC won't be used.
*/
MICGenInfo mic(ic::MICInfo::EMPTYCALL);
mic.entry = masm.label();
mic.argc = argc;
mics.append(mic);
#endif
prepareStubCall(Uses(argc + 2));
VoidPtrStubUInt32 stub = callingNew ? stubs::SlowNew : stubs::SlowCall;
#ifdef JS_MONOIC
masm.move(Imm32(mics.length() - 1), Registers::ArgReg1);
#else
masm.move(Imm32(argc), Registers::ArgReg1);
#endif
masm.stubCall(stub, PC, frame.stackDepth() + script->nfixed);
ADD_CALLSITE(false);
frame.popn(argc + 2);
frame.pushSynced();
/* Currently, we don't support constant functions. */
if (fe->isNotType(JSVAL_TYPE_OBJECT) || script->debugMode || fe->isConstant()) {
emitUncachedCall(argc, callingNew);
return;
}
#ifdef JS_MONOIC
MICGenInfo mic(ic::MICInfo::CALL);
mic.entry = masm.label();
mic.argc = argc;
mic.frameDepth = frame.frameDepth() - argc - 2;
#endif
FrameEntry *thisvFe = frame.peek(-int(argc + 1));
Address thisvAddr = frame.addressOf(thisvFe);
MaybeRegisterID typeReg;
RegisterID data = frame.tempRegForData(fe);
frame.pinReg(data);
CallGenInfo callIC(argc);
uint32 callICIndex = callICs.length();
Address addr = frame.addressOf(fe);
if (!typeKnown) {
if (!frame.shouldAvoidTypeRemat(fe)) {
typeReg = frame.tempRegForType(fe);
frame.pinReg(typeReg.reg());
}
/*
* Save constant |this| to optimize thisv stores for common call cases
* like CALL[LOCAL, GLOBAL, ARG] which push NULL.
*/
callIC.isConstantThis = false;
if (thisvFe->isConstant()) {
callIC.isConstantThis = true;
callIC.constantThis = thisvFe->getValue();
}
callIC.frameDepth = frame.frameDepth();
/* Grab type and data registers up-front. */
MaybeRegisterID typeReg;
frame.ensureFullRegs(fe);
if (!fe->isTypeKnown()) {
typeReg = frame.tempRegForType(fe);
frame.pinReg(typeReg.reg());
}
RegisterID dataReg = frame.tempRegForData(fe);
frame.pinReg(dataReg);
/*
* We rely on the fact that syncAndKill() is not allowed to touch the
@ -1776,120 +1936,164 @@ mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
frame.resetRegState();
Label invoke = stubcc.masm.label();
Registers tempRegs;
#ifdef JS_MONOIC
mic.stubEntry = invoke;
mic.dataReg = data;
#endif
Jump j;
if (!typeKnown) {
if (!typeReg.isSet())
j = masm.testObject(Assembler::NotEqual, frame.addressOf(fe));
else
j = masm.testObject(Assembler::NotEqual, typeReg.reg());
stubcc.linkExit(j, Uses(argc + 2));
}
#ifdef JS_MONOIC
mic.knownObject = masm.label();
#endif
j = masm.testFunction(Assembler::NotEqual, data);
stubcc.linkExit(j, Uses(argc + 2));
stubcc.leave();
#ifdef JS_MONOIC
stubcc.masm.move(Imm32(mics.length()), Registers::ArgReg1);
#else
stubcc.masm.move(Imm32(argc), Registers::ArgReg1);
#endif
stubcc.call(callingNew ? stubs::SlowNew : stubs::SlowCall);
ADD_CALLSITE(true);
/* Get function private pointer. */
masm.loadFunctionPrivate(data, data);
frame.takeReg(data);
RegisterID t0 = frame.allocReg();
RegisterID t1 = frame.allocReg();
/* Test if the function is interpreted, and if not, take a slow path. */
{
masm.load16(Address(data, offsetof(JSFunction, flags)), t0);
masm.move(t0, t1);
masm.and32(Imm32(JSFUN_KINDMASK), t1);
Jump notInterp = masm.branch32(Assembler::Below, t1, Imm32(JSFUN_INTERPRETED));
stubcc.linkExitDirect(notInterp, invoke);
}
/* Test if it's not got compiled code. */
Address scriptAddr(data, offsetof(JSFunction, u) + offsetof(JSFunction::U::Scripted, script));
masm.loadPtr(scriptAddr, data);
Jump notCompiled = masm.branchPtr(Assembler::BelowOrEqual,
Address(data, offsetof(JSScript, ncode)),
ImmIntPtr(1));
{
stubcc.linkExitDirect(notCompiled, invoke);
}
frame.freeReg(t0);
frame.freeReg(t1);
frame.freeReg(data);
/* Scripted call. */
masm.move(Imm32(argc), Registers::ArgReg1);
masm.stubCall(callingNew ? stubs::New : stubs::Call,
PC, frame.stackDepth() + script->nfixed);
Jump invokeCallDone;
{
/*
* Stub call returns a pointer to JIT'd code, or NULL.
*
* If the function could not be JIT'd, it was already invoked using
* js_Interpret() or js_Invoke(). In that case, the stack frame has
* already been popped. We don't have to do any extra work.
*/
Jump j = stubcc.masm.branchTestPtr(Assembler::NonZero, Registers::ReturnReg, Registers::ReturnReg);
stubcc.crossJump(j, masm.label());
if (callingNew)
invokeCallDone = stubcc.masm.jump();
}
/* Fast-path: return address contains scripted call. */
masm.call(Registers::ReturnReg);
#if (defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)) || defined(_WIN64)
masm.callLabel = masm.label();
#endif
ADD_CALLSITE(false);
/* Test the type if necessary. Failing this always takes a really slow path. */
MaybeJump notObjectJump;
if (typeReg.isSet())
notObjectJump = masm.testObject(Assembler::NotEqual, typeReg.reg());
/*
* Functions invoked with |new| can return, for some reason, primitive
* values. Just deal with this here.
* Ensure that dataReg stays in a register which won't be clobbered
* by the intervening call to NewObject.
*/
if (callingNew) {
Jump primitive = masm.testPrimitive(Assembler::Equal, JSReturnReg_Type);
stubcc.linkExitDirect(primitive, stubcc.masm.label());
FrameEntry *fe = frame.peek(-int(argc + 1));
Address thisv(frame.addressOf(fe));
stubcc.masm.loadTypeTag(thisv, JSReturnReg_Type);
stubcc.masm.loadPayload(thisv, JSReturnReg_Data);
Jump primFix = stubcc.masm.jump();
stubcc.crossJump(primFix, masm.label());
invokeCallDone.linkTo(stubcc.masm.label(), &stubcc.masm);
if (callingNew && !(Registers::maskReg(dataReg) & Registers::SavedRegs)) {
RegisterID reg = Registers(Registers::SavedRegs).takeAnyReg();
masm.move(dataReg, reg);
dataReg = reg;
}
tempRegs.takeReg(dataReg);
RegisterID t0 = tempRegs.takeAnyReg();
RegisterID t1 = tempRegs.takeAnyReg();
/*
* Guard on the callee identity. This misses on the first run. If the
* callee is scripted, compiled/compilable, and argc == nargs, then this
* guard is patched, and the compiled code address is baked in.
*/
Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, dataReg, callIC.funGuard);
callIC.funJump = j;
Jump oolCallDone;
Jump rejoin1, rejoin2;
{
stubcc.linkExitDirect(j, stubcc.masm.label());
callIC.slowPathStart = stubcc.masm.label();
/*
* Test if the callee is even a function. If this doesn't match, we
* take a _really_ slow path later.
*/
Jump notFunction = stubcc.masm.testFunction(Assembler::NotEqual, dataReg);
/* Test if the function is scripted. */
stubcc.masm.loadFunctionPrivate(dataReg, t0);
stubcc.masm.load16(Address(t0, offsetof(JSFunction, flags)), t1);
stubcc.masm.and32(Imm32(JSFUN_KINDMASK), t1);
Jump isNative = stubcc.masm.branch32(Assembler::Below, t1, Imm32(JSFUN_INTERPRETED));
/* Create the new object. This requires some fiddling to save the two values. */
if (callingNew) {
void *pfun = stubcc.masm.getCallTarget(JS_FUNC_TO_DATA_PTR(void *, stubs::NewObject));
stubcc.masm.storePtr(ImmPtr(PC),
FrameAddress(offsetof(VMFrame, regs) + offsetof(JSFrameRegs, pc)));
stubcc.masm.fixScriptStack(frame.frameDepth());
stubcc.masm.setupVMFrame();
#if defined(JS_CPU_X86) || defined(JS_CPU_X64)
/* Need to stay 16-byte aligned on x86/x64. */
stubcc.masm.subPtr(Imm32(8), JSC::MacroAssembler::stackPointerRegister);
#endif
stubcc.masm.push(dataReg);
stubcc.masm.push(t0);
stubcc.masm.move(Imm32(argc), Registers::ArgReg1);
stubcc.masm.wrapCall(pfun);
stubcc.masm.pop(t0);
stubcc.masm.pop(dataReg);
#if defined(JS_CPU_X86) || defined(JS_CPU_X64)
stubcc.masm.addPtr(Imm32(8), JSC::MacroAssembler::stackPointerRegister);
#endif
}
/*
* No-op jump that gets re-patched. This is so ArgReg1 won't be
* clobbered, with the added bonus that the generated stub doesn't
* need to pop its own return address.
*/
Jump toPatch = stubcc.masm.jump();
toPatch.linkTo(stubcc.masm.label(), &stubcc.masm);
callIC.oolJump = toPatch;
/* At this point the function is definitely scripted. Call the link routine. */
stubcc.masm.move(Imm32(callICIndex), Registers::ArgReg1);
callIC.oolCall = stubcc.call(callingNew ? ic::New : ic::Call);
callIC.funObjReg = dataReg;
callIC.funPtrReg = t0;
/*
* The IC call either returns NULL, meaning call completed, or a
* function pointer to jump to. Caveat: Must restore JSFrameReg
* because a new frame has been pushed.
*
* This function only executes once. If hit, it will generate a stub
* to compile and execute calls on demand.
*/
rejoin1 = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
Registers::ReturnReg);
stubcc.masm.move(Imm32(argc), JSParamReg_Argc);
stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
stubcc.masm.call(Registers::ReturnReg);
oolCallDone = stubcc.masm.jump();
/* Catch-all case, for natives this will turn into a MIC. */
if (notObjectJump.isSet())
stubcc.linkExitDirect(notObjectJump.get(), stubcc.masm.label());
notFunction.linkTo(stubcc.masm.label(), &stubcc.masm);
isNative.linkTo(stubcc.masm.label(), &stubcc.masm);
stubcc.masm.move(Imm32(callICIndex), Registers::ArgReg1);
stubcc.call(callingNew ? ic::NativeNew : ic::NativeCall);
rejoin2 = stubcc.masm.jump();
}
/*
* If the call site goes to a closure over the same function, it will
* generate an out-of-line stub that joins back here.
*/
callIC.hotPathLabel = masm.label();
/* If calling |new|, make sure to allocate a new object. */
if (callingNew) {
prepareStubCall(Uses(argc + 2));
masm.move(Imm32(argc), Registers::ArgReg1);
stubCall(stubs::NewObject);
}
uint32 flags = 0;
if (callingNew)
flags |= JSFRAME_CONSTRUCTING;
InlineFrameAssembler inlFrame(masm, callIC, PC, flags);
inlFrame.assemble();
callIC.hotCall = masm.call();
stubcc.crossJump(oolCallDone, masm.label());
callIC.joinPoint = masm.label();
/*
* Functions invoked with |new| can return primitive values.
* Just deal with this here.
*/
if (callingNew)
emitPrimitiveTestForNew(argc);
frame.popn(argc + 2);
frame.takeReg(JSReturnReg_Type);
frame.takeReg(JSReturnReg_Data);
frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data);
callIC.slowJoinPoint = stubcc.masm.label();
rejoin1.linkTo(callIC.slowJoinPoint, &stubcc.masm);
rejoin2.linkTo(callIC.slowJoinPoint, &stubcc.masm);
stubcc.rejoin(Changes(0));
#ifdef JS_MONOIC
mic.callEnd = masm.label();
mics.append(mic);
callICs.append(callIC);
#else
emitUncachedCall(argc, callingNew);
#endif
}

Просмотреть файл

@ -90,11 +90,6 @@ class Compiler
Call call;
ic::MICInfo::Kind kind;
jsbytecode *jumpTarget;
uint32 argc;
uint32 frameDepth;
Label knownObject;
Label callEnd;
JSC::MacroAssembler::RegisterID dataReg;
Jump traceHint;
MaybeJump slowTraceHint;
union {
@ -107,6 +102,36 @@ class Compiler
} tracer;
} u;
};
/* InlineFrameAssembler wants to see this. */
public:
struct CallGenInfo {
CallGenInfo(uint32 argc)
: argc(argc), constantThis(UndefinedValue())
{ }
/*
* These members map to members in CallICInfo. See that structure for
* more comments.
*/
uint32 argc;
DataLabelPtr funGuard;
Jump funJump;
Call hotCall;
Call oolCall;
Label joinPoint;
Label slowJoinPoint;
Label slowPathStart;
Label hotPathLabel;
Jump oolJump;
RegisterID funObjReg;
RegisterID funPtrReg;
uint32 frameDepth;
bool isConstantThis;
Value constantThis;
};
private:
#endif
#if defined JS_POLYIC
@ -184,6 +209,7 @@ class Compiler
js::Vector<BranchPatch, 64> branchPatches;
#if defined JS_MONOIC
js::Vector<MICGenInfo, 64> mics;
js::Vector<CallGenInfo, 64> callICs;
#endif
#if defined JS_POLYIC
js::Vector<PICGenInfo, 64> pics;
@ -193,6 +219,7 @@ class Compiler
js::Vector<uint32, 16> escapingList;
StubCompiler stubcc;
Label invokeLabel;
Label arityLabel;
bool addTraceHints;
public:
@ -225,7 +252,7 @@ class Compiler
void addCallSite(uint32 id, bool stub);
/* Emitting helpers. */
void saveReturnAddress();
RegisterID takeHWReturnAddress(Assembler &masm);
void restoreReturnAddress(Assembler &masm);
void restoreFrameRegs(Assembler &masm);
void emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused);
@ -246,6 +273,8 @@ class Compiler
void emitReturn();
void dispatchCall(VoidPtrStubUInt32 stub, uint32 argc);
void interruptCheckHelper();
void emitUncachedCall(uint32 argc, bool callingNew);
void emitPrimitiveTestForNew(uint32 argc);
void inlineCallHelper(uint32 argc, bool callingNew);
void jsop_gnameinc(JSOp op, VoidStubAtom stub, uint32 index);
void jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index);
@ -353,6 +382,7 @@ class Compiler
STUB_CALL_TYPE(VoidPtrStubPC);
STUB_CALL_TYPE(VoidVpStub);
STUB_CALL_TYPE(VoidStubPC);
STUB_CALL_TYPE(BoolStubUInt32);
#undef STUB_CALL_TYPE
void prepareStubCall(Uses uses);

Просмотреть файл

@ -1065,6 +1065,34 @@ FrameState::allocForSameBinary(FrameEntry *fe, JSOp op, BinaryAlloc &alloc)
unpinReg(alloc.lhsType.reg());
}
void
FrameState::ensureFullRegs(FrameEntry *fe)
{
FrameEntry *backing = fe;
if (fe->isCopy())
backing = fe->copyOf();
if (!fe->type.inMemory()) {
if (fe->data.inRegister())
return;
if (fe->type.inRegister())
pinReg(fe->type.reg());
if (fe->data.inMemory())
tempRegForData(fe);
if (fe->type.inRegister())
unpinReg(fe->type.reg());
} else if (!fe->data.inMemory()) {
if (fe->type.inRegister())
return;
if (fe->data.inRegister())
pinReg(fe->data.reg());
if (fe->type.inMemory())
tempRegForType(fe);
if (fe->data.inRegister())
unpinReg(fe->data.reg());
}
}
void
FrameState::allocForBinary(FrameEntry *lhs, FrameEntry *rhs, JSOp op, BinaryAlloc &alloc,
bool needsResult)

Просмотреть файл

@ -421,6 +421,9 @@ class FrameState
void allocForBinary(FrameEntry *lhs, FrameEntry *rhs, JSOp op, BinaryAlloc &alloc,
bool resultNeeded = true);
/* Ensures that an FE has both type and data remat'd in registers. */
void ensureFullRegs(FrameEntry *fe);
/*
* Similar to allocForBinary, except works when the LHS and RHS have the
* same backing FE. Only a reduced subset of BinaryAlloc is used:

Просмотреть файл

@ -0,0 +1,170 @@
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=99:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
* May 28, 2008.
*
* The Initial Developer of the Original Code is
* Brendan Eich <brendan@mozilla.org>
*
* Contributor(s):
* David Anderson <danderson@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#if !defined jsjaeger_inl_frame_asm_h__ && defined JS_METHODJIT && defined JS_MONOIC
#define jsjaeger_inl_frame_asm_h__
#include "assembler/assembler/MacroAssembler.h"
#include "assembler/assembler/CodeLocation.h"
#include "methodjit/MethodJIT.h"
#include "CodeGenIncludes.h"
namespace js {
namespace mjit {
/*
* This is used for emitting code to inline callee-side frame creation.
* Specifically, it initializes the following members:
*
* savedPC
* argc
* flags
* scopeChain
* argv
* thisv
* down
*
* Once finished, JSFrameReg is advanced to be the new fp.
*/
class InlineFrameAssembler {
typedef JSC::MacroAssembler::RegisterID RegisterID;
typedef JSC::MacroAssembler::Address Address;
typedef JSC::MacroAssembler::Imm32 Imm32;
typedef JSC::MacroAssembler::ImmPtr ImmPtr;
Assembler &masm;
bool isConstantThis; // Is |thisv| constant?
Value constantThis; // If so, this is the value.
uint32 frameDepth; // script->nfixed + stack depth at caller call site
uint32 argc; // number of args being passed to the function
RegisterID funObjReg; // register containing the function object (callee)
jsbytecode *pc; // bytecode location at the caller call site
uint32 flags; // frame flags
public:
/*
* Register state, so consumers of this class can restrict which registers
* can and can't be clobbered.
*/
Registers tempRegs;
InlineFrameAssembler(Assembler &masm, JSContext *cx, ic::CallICInfo &ic, uint32 flags)
: masm(masm), flags(flags)
{
isConstantThis = ic.isConstantThis;
constantThis = ic.constantThis;
frameDepth = ic.frameDepth;
argc = ic.argc;
funObjReg = ic.funObjReg;
pc = cx->regs->pc;
tempRegs.takeReg(ic.funPtrReg);
tempRegs.takeReg(funObjReg);
}
InlineFrameAssembler(Assembler &masm, Compiler::CallGenInfo &gen, jsbytecode *pc, uint32 flags)
: masm(masm), pc(pc), flags(flags)
{
isConstantThis = gen.isConstantThis;
constantThis = gen.constantThis;
frameDepth = gen.frameDepth;
argc = gen.argc;
funObjReg = gen.funObjReg;
tempRegs.takeReg(funObjReg);
}
void assemble()
{
struct AdjustedFrame {
AdjustedFrame(uint32 baseOffset)
: baseOffset(baseOffset)
{ }
uint32 baseOffset;
Address addrOf(uint32 offset) {
return Address(JSFrameReg, baseOffset + offset);
}
};
RegisterID t0 = tempRegs.takeAnyReg();
/* Note: savedPC goes into the down frame. */
masm.storePtr(ImmPtr(pc), Address(JSFrameReg, offsetof(JSStackFrame, savedPC)));
AdjustedFrame adj(sizeof(JSStackFrame) + frameDepth * sizeof(Value));
masm.store32(Imm32(argc), adj.addrOf(offsetof(JSStackFrame, argc)));
masm.store32(Imm32(flags), adj.addrOf(offsetof(JSStackFrame, flags)));
masm.loadPtr(Address(funObjReg, offsetof(JSObject, parent)), t0);
masm.storePtr(t0, adj.addrOf(JSStackFrame::offsetScopeChain()));
masm.addPtr(Imm32(adj.baseOffset - (argc * sizeof(Value))), JSFrameReg, t0);
masm.storePtr(t0, adj.addrOf(offsetof(JSStackFrame, argv)));
Address targetThis = adj.addrOf(JSStackFrame::offsetThisValue());
if (isConstantThis) {
masm.storeValue(constantThis, targetThis);
} else {
Address thisvAddr = Address(t0, -int32(sizeof(Value) * 1));
#ifdef JS_NUNBOX32
RegisterID t1 = tempRegs.takeAnyReg();
masm.loadPayload(thisvAddr, t1);
masm.storePayload(t1, targetThis);
masm.loadTypeTag(thisvAddr, t1);
masm.storeTypeTag(t1, targetThis);
tempRegs.putReg(t1);
#elif JS_PUNBOX64
masm.loadPtr(thisvAddr, t0);
masm.storePtr(t0, targetThis);
#endif
}
masm.storePtr(JSFrameReg, adj.addrOf(offsetof(JSStackFrame, down)));
/* Adjust JSFrameReg. Callee fills in the rest. */
masm.addPtr(Imm32(sizeof(JSStackFrame) + sizeof(Value) * frameDepth), JSFrameReg);
tempRegs.putReg(t0);
}
};
} /* namespace mjit */
} /* namespace js */
#endif /* jsjaeger_inl_frame_asm_h__ */

Просмотреть файл

@ -181,14 +181,243 @@ top:
return NULL;
}
static inline void
FixVMFrame(VMFrame &f, JSStackFrame *fp)
static bool
InlineReturn(VMFrame &f, JSBool ok)
{
JS_ASSERT(f.fp() == fp->down);
f.fp() = fp;
JSContext *cx = f.cx;
JSStackFrame *fp = f.regs.fp;
JS_ASSERT(f.fp() != f.entryFp);
JS_ASSERT(!fp->hasBlockChain());
JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->getScopeChain(), 0));
// Marker for debug support.
if (JS_UNLIKELY(fp->hasHookData())) {
JSInterpreterHook hook;
JSBool status;
hook = cx->debugHooks->callHook;
if (hook) {
/*
* Do not pass &ok directly as exposing the address inhibits
* optimizations and uninitialised warnings.
*/
status = ok;
hook(cx, fp, JS_FALSE, &status, fp->getHookData());
ok = (status == JS_TRUE);
// CHECK_INTERRUPT_HANDLER();
}
}
fp->putActivationObjects(cx);
/* :TODO: version stuff */
if (fp->flags & JSFRAME_CONSTRUCTING && fp->getReturnValue().isPrimitive())
fp->setReturnValue(fp->getThisValue());
Value *newsp = fp->argv - 1;
cx->stack().popInlineFrame(cx, fp, fp->down);
cx->regs->sp = newsp;
cx->regs->sp[-1] = fp->getReturnValue();
JS_ASSERT(cx->regs->pc != JSStackFrame::sInvalidPC);
return ok;
}
static inline bool
JSBool JS_FASTCALL
stubs::NewObject(VMFrame &f, uint32 argc)
{
JSContext *cx = f.cx;
Value *vp = f.regs.sp - (argc + 2);
JSObject *funobj = &vp[0].toObject();
JS_ASSERT(funobj->isFunction());
jsid id = ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom);
if (!funobj->getProperty(cx, id, &vp[1]))
THROWV(JS_FALSE);
JSObject *proto = vp[1].isObject() ? &vp[1].toObject() : NULL;
JSObject *obj = NewNonFunction<WithProto::Class>(cx, &js_ObjectClass, proto, funobj->getParent());
if (!obj)
THROWV(JS_FALSE);
vp[1].setObject(*obj);
return JS_TRUE;
}
void JS_FASTCALL
stubs::SlowCall(VMFrame &f, uint32 argc)
{
Value *vp = f.regs.sp - (argc + 2);
if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(vp, argc), 0))
THROW();
}
void JS_FASTCALL
stubs::SlowNew(VMFrame &f, uint32 argc)
{
JSContext *cx = f.cx;
Value *vp = f.regs.sp - (argc + 2);
if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(vp, argc)))
THROW();
}
static inline void
RemovePartialFrame(VMFrame &f)
{
/* Unwind the half-pushed frame. */
f.regs.pc = f.fp()->down->savedPC;
f.regs.sp = f.fp()->argv + f.fp()->argc;
#ifdef DEBUG
f.fp()->down->savedPC = JSStackFrame::sInvalidPC;
#endif
f.regs.fp = f.fp()->down;
}
void JS_FASTCALL
stubs::CheckStackQuota(VMFrame &f)
{
if (JS_LIKELY(f.ensureSpace(0, f.fp()->getScript()->nslots)))
return;
RemovePartialFrame(f);
js_ReportOverRecursed(f.cx);
THROW();
}
void * JS_FASTCALL
stubs::CheckArity(VMFrame &f)
{
JSContext *cx = f.cx;
JSStackFrame *fp = f.fp();
uint32 argc = fp->argc;
JSFunction *fun = fp->getFunction();
JS_ASSERT(argc < fun->nargs);
/*
* Grossssss! *move* the stack frame. If this ends up being perf-critical,
* we can figure out how to spot-optimize it. As the frame shrinks it will
* matter less.
*/
uint32 flags = fp->flags;
JSObject *scopeChain = fp->getScopeChain();
Value *argv = fp->argv;
JSStackFrame *down = fp->down;
void *ncode = fp->ncode;
/* Pop the inline frame. */
RemovePartialFrame(f);
uint32 missing = fun->nargs - argc;
/* Include an extra stack frame for callees. */
if (!f.ensureSpace(missing, fun->u.i.script->nslots + VALUES_PER_STACK_FRAME)) {
js_ReportOverRecursed(cx);
THROWV(NULL);
}
#ifdef DEBUG
down->savedPC = f.regs.pc;
#endif
SetValueRangeToUndefined(f.regs.sp, missing);
f.regs.sp += missing;
JSStackFrame *newfp = (JSStackFrame *)f.regs.sp;
newfp->argc = argc;
newfp->setFunction(fun);
newfp->flags = flags;
newfp->argv = argv;
newfp->setScopeChain(scopeChain);
newfp->down = down;
newfp->ncode = ncode;
newfp->setThisValue(argv[-1]);
return newfp;
}
void * JS_FASTCALL
stubs::CompileFunction(VMFrame &f)
{
/*
* We have a partially constructed frame. That's not really good enough to
* compile though because we could throw, so get a full, adjusted frame.
*/
JSContext *cx = f.cx;
JSStackFrame *fp = f.fp();
uint32 argc = fp->argc;
JSObject *obj = &fp->argv[-2].toObject();
JSFunction *fun = obj->getFunctionPrivate();
JSScript *script = fun->u.i.script;
bool callingNew = !!(fp->flags & JSFRAME_CONSTRUCTING);
/* Empty script does nothing. */
if (script->isEmpty()) {
RemovePartialFrame(f);
if (callingNew)
f.regs.sp[argc - 2] = f.regs.sp[argc - 1];
else
f.regs.sp[argc - 2].setUndefined();
return NULL;
}
/* CheckArity expects fun to be set. */
fp->setFunction(fun);
if (argc < fun->nargs) {
fp = (JSStackFrame *)CheckArity(f);
if (!fp)
return NULL;
}
fp->setCallObj(NULL);
fp->setArgsObj(NULL);
fp->setBlockChain(NULL);
fp->setHookData(NULL);
fp->setAnnotation(NULL);
fp->setCallerVersion(fp->down->getCallerVersion());
fp->setScript(script);
fp->clearReturnValue();
#ifdef DEBUG
fp->savedPC = JSStackFrame::sInvalidPC;
#endif
SetValueRangeToUndefined(f.regs.sp, script->nfixed);
f.regs.fp = fp;
if (fun->isHeavyweight() && !js_GetCallObject(cx, fp))
THROWV(NULL);
CompileStatus status = CanMethodJIT(cx, script, fun, fp->getScopeChain());
if (status == Compile_Okay)
return script->jit->invoke;
/* Function did not compile... interpret it. */
JSBool ok = Interpret(cx, fp);
InlineReturn(f, ok);
if (!ok)
THROWV(NULL);
return NULL;
}
/* Preserved for when calls need to be slow (debug mode, no ICs) */
static bool
CreateFrame(VMFrame &f, uint32 flags, uint32 argc)
{
JSContext *cx = f.cx;
@ -257,19 +486,18 @@ CreateFrame(VMFrame &f, uint32 flags, uint32 argc)
if (JSInterpreterHook hook = cx->debugHooks->callHook) {
newfp->setHookData(hook(cx, fp, JS_TRUE, 0,
cx->debugHooks->callHookData));
// CHECK_INTERRUPT_HANDLER();
} else {
newfp->setHookData(NULL);
}
stack.pushInlineFrame(cx, fp, cx->regs->pc, newfp);
FixVMFrame(f, newfp);
f.regs.fp = newfp;
return true;
}
static inline bool
InlineCall(VMFrame &f, uint32 flags, void **pret, uint32 argc)
UncachedInlineCall(VMFrame &f, uint32 flags, void **pret, uint32 argc)
{
if (!CreateFrame(f, flags, argc))
return false;
@ -301,81 +529,55 @@ InlineCall(VMFrame &f, uint32 flags, void **pret, uint32 argc)
return ok;
}
static bool
InlineReturn(VMFrame &f, JSBool ok)
void * JS_FASTCALL
stubs::UncachedNew(VMFrame &f, uint32 argc)
{
JSContext *cx = f.cx;
JSStackFrame *fp = cx->fp();
JS_ASSERT(f.fp() == cx->fp());
JS_ASSERT(f.fp() != f.entryFp);
Value *vp = f.regs.sp - (argc + 2);
JS_ASSERT(!fp->hasBlockChain());
JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->getScopeChain(), 0));
JSObject *obj;
if (IsFunctionObject(*vp, &obj)) {
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, obj);
// Marker for debug support.
if (JS_UNLIKELY(fp->hasHookData())) {
JSInterpreterHook hook;
JSBool status;
if (fun->isInterpreted()) {
JSScript *script = fun->u.i.script;
if (!stubs::NewObject(f, argc))
THROWV(NULL);
hook = cx->debugHooks->callHook;
if (hook) {
/*
* Do not pass &ok directly as exposing the address inhibits
* optimizations and uninitialised warnings.
*/
status = ok;
hook(cx, fp, JS_FALSE, &status, fp->getHookData());
ok = (status == JS_TRUE);
// CHECK_INTERRUPT_HANDLER();
if (script->isEmpty()) {
vp[0] = vp[1];
return NULL;
}
void *ret;
if (!UncachedInlineCall(f, JSFRAME_CONSTRUCTING, &ret, argc))
THROWV(NULL);
return ret;
}
if (fun->isFastConstructor()) {
vp[1].setMagic(JS_FAST_CONSTRUCTOR);
FastNative fn = (FastNative)fun->u.n.native;
if (!fn(cx, argc, vp))
THROWV(NULL);
JS_ASSERT(!vp->isPrimitive());
return NULL;
}
}
fp->putActivationObjects(cx);
if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(vp, argc)))
THROWV(NULL);
/* :TODO: version stuff */
if (fp->flags & JSFRAME_CONSTRUCTING && fp->getReturnValue().isPrimitive())
fp->setReturnValue(fp->getThisValue());
Value *newsp = fp->argv - 1;
cx->stack().popInlineFrame(cx, fp, fp->down);
f.fp() = cx->fp();
cx->regs->sp = newsp;
cx->regs->sp[-1] = fp->getReturnValue();
return ok;
}
static inline JSObject *
InlineConstruct(VMFrame &f, uint32 argc)
{
JSContext *cx = f.cx;
Value *vp = f.regs.sp - (argc + 2);
JSObject *funobj = &vp[0].toObject();
JS_ASSERT(funobj->isFunction());
jsid id = ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom);
if (!funobj->getProperty(cx, id, &vp[1]))
return NULL;
JSObject *proto = vp[1].isObject() ? &vp[1].toObject() : NULL;
return NewNonFunction<WithProto::Class>(cx, &js_ObjectClass, proto, funobj->getParent());
return NULL;
}
void * JS_FASTCALL
stubs::SlowCall(VMFrame &f, uint32 argc)
stubs::UncachedCall(VMFrame &f, uint32 argc)
{
JSContext *cx = f.cx;
#ifdef JS_MONOIC
ic::MICInfo &mic = f.fp()->getScript()->mics[argc];
argc = mic.argc;
#endif
Value *vp = f.regs.sp - (argc + 2);
JSObject *obj;
@ -391,19 +593,13 @@ stubs::SlowCall(VMFrame &f, uint32 argc)
return NULL;
}
if (!InlineCall(f, 0, &ret, argc))
if (!UncachedInlineCall(f, 0, &ret, argc))
THROWV(NULL);
return ret;
}
if (fun->isFastNative()) {
#ifdef JS_MONOIC
#ifdef JS_CPU_X86
ic::CallFastNative(cx, f.fp()->getScript(), mic, fun, false);
#endif
#endif
FastNative fn = (FastNative)fun->u.n.native;
if (!fn(cx, argc, vp))
THROWV(NULL);
@ -417,65 +613,6 @@ stubs::SlowCall(VMFrame &f, uint32 argc)
return NULL;
}
void * JS_FASTCALL
stubs::SlowNew(VMFrame &f, uint32 argc)
{
JSContext *cx = f.cx;
#ifdef JS_MONOIC
ic::MICInfo &mic = f.fp()->getScript()->mics[argc];
argc = mic.argc;
#endif
Value *vp = f.regs.sp - (argc + 2);
JSObject *obj;
if (IsFunctionObject(*vp, &obj)) {
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, obj);
if (fun->isInterpreted()) {
JSScript *script = fun->u.i.script;
JSObject *obj2 = InlineConstruct(f, argc);
if (!obj2)
THROWV(NULL);
if (script->isEmpty()) {
vp[0].setObject(*obj2);
return NULL;
}
void *ret;
vp[1].setObject(*obj2);
if (!InlineCall(f, JSFRAME_CONSTRUCTING, &ret, argc))
THROWV(NULL);
return ret;
}
if (fun->isFastConstructor()) {
#ifdef JS_MONOIC
#ifdef JS_CPU_X86
ic::CallFastNative(cx, f.fp()->getScript(), mic, fun, true);
#endif
#endif
vp[1].setMagic(JS_FAST_CONSTRUCTOR);
FastNative fn = (FastNative)fun->u.n.native;
if (!fn(cx, argc, vp))
THROWV(NULL);
JS_ASSERT(!vp->isPrimitive());
return NULL;
}
}
if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(vp, argc)))
THROWV(NULL);
return NULL;
}
static inline bool
CreateLightFrame(VMFrame &f, uint32 flags, uint32 argc)
{
@ -538,40 +675,10 @@ CreateLightFrame(VMFrame &f, uint32 flags, uint32 argc)
#endif
newfp->down = fp;
fp->savedPC = f.regs.pc;
FixVMFrame(f, newfp);
return true;
}
/*
* stubs::Call is guaranteed to be called on a scripted call with JIT'd code.
*/
void * JS_FASTCALL
stubs::Call(VMFrame &f, uint32 argc)
{
if (!CreateLightFrame(f, 0, argc))
THROWV(NULL);
return f.fp()->getScript()->ncode;
}
/*
* stubs::New is guaranteed to be called on a scripted call with JIT'd code.
*/
void * JS_FASTCALL
stubs::New(VMFrame &f, uint32 argc)
{
JSObject *obj = InlineConstruct(f, argc);
if (!obj)
THROWV(NULL);
f.regs.sp[-int(argc + 1)].setObject(*obj);
if (!CreateLightFrame(f, JSFRAME_CONSTRUCTING, argc))
THROWV(NULL);
return f.fp()->getScript()->ncode;
}
void JS_FASTCALL
stubs::PutCallObject(VMFrame &f)
{

Просмотреть файл

@ -66,11 +66,13 @@ struct Registers {
# else
static const RegisterID ArgReg0 = JSC::X86Registers::edi;
static const RegisterID ArgReg1 = JSC::X86Registers::esi;
static const RegisterID ArgReg2 = JSC::X86Registers::edx;
# endif
#elif JS_CPU_ARM
static const RegisterID ReturnReg = JSC::ARMRegisters::r0;
static const RegisterID ArgReg0 = JSC::ARMRegisters::r0;
static const RegisterID ArgReg1 = JSC::ARMRegisters::r1;
static const RegisterID ArgReg2 = JSC::ARMRegisters::r2;
#endif
static const RegisterID StackPointer = JSC::MacroAssembler::stackPointerRegister;

Просмотреть файл

@ -751,7 +751,19 @@ EnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code, void *safePoint)
JSStackFrame *checkFp = fp;
#endif
Value *stackLimit = cx->stack().makeStackLimit(reinterpret_cast<Value*>(fp));
Value *fpAsVp = reinterpret_cast<Value*>(fp);
StackSpace &stack = cx->stack();
Value *stackLimit = stack.makeStackLimit(fpAsVp);
/*
* We ensure that there is always enough space to speculatively create a
* stack frame. By passing nslots = 0, we ensure only sizeof(JSStackFrame).
*/
if (fpAsVp + VALUES_PER_STACK_FRAME >= stackLimit &&
!stack.ensureSpace(cx, fpAsVp, cx->regs->sp, stackLimit, 0)) {
js_ReportOutOfScriptQuota(cx);
return false;
}
JSAutoResolveFlags rf(cx, JSRESOLVE_INFER);
JSBool ok = JaegerTrampoline(cx, fp, code, stackLimit, safePoint);
@ -811,6 +823,7 @@ mjit::ReleaseScriptCode(JSContext *cx, JSScript *script)
#endif
script->jit->execPool->release();
script->jit->execPool = NULL;
// Releasing the execPool takes care of releasing the code.
script->ncode = NULL;
@ -820,7 +833,14 @@ mjit::ReleaseScriptCode(JSContext *cx, JSScript *script)
Destroy(script->pics[i].execPools);
}
#endif
#if defined JS_MONOIC
for (uint32 i = 0; i < script->jit->nCallICs; i++)
script->callICs[i].releasePools();
#endif
cx->free(script->jit);
// The recompiler may call ReleaseScriptCode, in which case it
// will get called again when the script is destroyed, so we
// must protect against calling ReleaseScriptCode twice.
@ -828,6 +848,27 @@ mjit::ReleaseScriptCode(JSContext *cx, JSScript *script)
}
}
void
mjit::TraceScriptCache(JSTracer *trc, JSScript *script)
{
#ifdef JS_MONOIC
uint32 numCallICs = script->jit->nCallICs;
for (uint32 i = 0; i < numCallICs; i++) {
ic::CallICInfo &ic = script->callICs[i];
if (ic.fastGuardedObject) {
JS_SET_TRACING_NAME(trc, "callIC fun");
Mark(trc, ic.fastGuardedObject, JSTRACE_OBJECT);
}
if (ic.fastGuardedNative) {
JS_SET_TRACING_NAME(trc, "callIC native");
Mark(trc, ic.fastGuardedNative, JSTRACE_OBJECT);
}
if (ic.isConstantThis)
MarkValue(trc, ic.constantThis, "callIC this");
}
#endif
}
#ifdef JS_METHODJIT_PROFILE_STUBS
void JS_FASTCALL
mjit::ProfileStubCall(VMFrame &f)
@ -841,6 +882,6 @@ bool
VMFrame::slowEnsureSpace(uint32 nslots)
{
return cx->stack().ensureSpace(cx, reinterpret_cast<Value*>(entryFp), regs.sp,
stackLimit, nslots);
stackLimit, nslots + VALUES_PER_STACK_FRAME);
}

Просмотреть файл

@ -167,6 +167,7 @@ typedef JSString * (JS_FASTCALL *JSStrStub)(VMFrame &);
typedef JSString * (JS_FASTCALL *JSStrStubUInt32)(VMFrame &, uint32);
typedef void (JS_FASTCALL *VoidStubJSObj)(VMFrame &, JSObject *);
typedef void (JS_FASTCALL *VoidStubPC)(VMFrame &, jsbytecode *);
typedef JSBool (JS_FASTCALL *BoolStubUInt32)(VMFrame &f, uint32);
#define JS_UNJITTABLE_METHOD (reinterpret_cast<void*>(1))
@ -180,11 +181,13 @@ struct JITScript {
uint32 nCallSites;
#ifdef JS_MONOIC
uint32 nMICs; /* number of MonoICs */
uint32 nCallICs; /* number of call ICs */
#endif
#ifdef JS_POLYIC
uint32 nPICs; /* number of PolyICs */
#endif
void *invoke; /* invoke address */
void *arityCheck; /* arity check address */
uint32 *escaping; /* list of escaping slots */
uint32 nescaping; /* number of escaping slots */
};
@ -208,6 +211,9 @@ ProfileStubCall(VMFrame &f);
CompileStatus
TryCompile(JSContext *cx, JSScript *script, JSFunction *fun, JSObject *scopeChain);
void
TraceScriptCache(JSTracer *trc, JSScript *script);
void
ReleaseScriptCode(JSContext *cx, JSScript *script);

Просмотреть файл

@ -41,18 +41,30 @@
#include "jsnum.h"
#include "MonoIC.h"
#include "StubCalls.h"
#include "StubCalls-inl.h"
#include "assembler/assembler/LinkBuffer.h"
#include "assembler/assembler/RepatchBuffer.h"
#include "assembler/assembler/MacroAssembler.h"
#include "assembler/assembler/CodeLocation.h"
#include "CodeGenIncludes.h"
#include "methodjit/Compiler.h"
#include "InlineFrameAssembler.h"
#include "jsobj.h"
#include "jsobjinlines.h"
#include "jsscopeinlines.h"
#include "jsscriptinlines.h"
using namespace js;
using namespace js::mjit;
using namespace js::mjit::ic;
typedef JSC::MacroAssembler::RegisterID RegisterID;
typedef JSC::MacroAssembler::Address Address;
typedef JSC::MacroAssembler::Jump Jump;
typedef JSC::MacroAssembler::Imm32 Imm32;
typedef JSC::MacroAssembler::ImmPtr ImmPtr;
typedef JSC::MacroAssembler::Call Call;
#if defined JS_MONOIC
static void
@ -202,131 +214,508 @@ ic::SetGlobalName(VMFrame &f, uint32 index)
GetStubForSetGlobalName(f)(f, atom);
}
static void * JS_FASTCALL
SlowCallFromIC(VMFrame &f, uint32 index)
{
JSScript *oldscript = f.fp()->getScript();
CallICInfo &ic= oldscript->callICs[index];
stubs::SlowCall(f, ic.argc);
return NULL;
}
static void * JS_FASTCALL
SlowNewFromIC(VMFrame &f, uint32 index)
{
JSScript *oldscript = f.fp()->getScript();
CallICInfo &ic = oldscript->callICs[index];
stubs::SlowNew(f, ic.argc);
return NULL;
}
/*
* Calls have an inline path and an out-of-line path. The inline path is used
* in the fastest case: the method has JIT'd code, and |argc == nargs|.
*
* The inline path and OOL path are separated by a guard on the identity of
* the callee object. This guard starts as NULL and always fails on the first
* hit. On the OOL path, the callee is verified to be both a function and a
* scripted function. If these conditions hold, |ic::Call| is invoked.
*
* |ic::Call| first ensures that the callee has JIT code. If it doesn't, the
* call to |ic::Call| is patched to a slow path. If it does have JIT'd code,
* the following cases can occur:
*
* 1) args != nargs: The call to |ic::Call| is patched with a dynamically
* generated stub. This stub inlines a path that looks like:
* ----
* push frame
* if (callee is not compiled) {
* Compile(callee);
* }
* call callee->arityLabel
*
* The arity label is a special entry point for correcting frames for
* arity mismatches.
*
* 2) args == nargs, and the inline call site was not patched yet.
* The guard dividing the two paths is patched to guard on the given
* function object identity, and the proceeding call is patched to
* directly call the JIT code.
*
* 3) args == nargs, and the inline call site was patched already.
* A small stub is created which extends the original guard to also
* guard on the JSFunction lying underneath the function object.
*
* If the OOL path does not have a scripted function, but does have a
* scripted native, then a small stub is generated which inlines the native
* invocation.
*/
class CallCompiler
{
VMFrame &f;
JSContext *cx;
CallICInfo &ic;
Value *vp;
bool callingNew;
public:
CallCompiler(VMFrame &f, CallICInfo &ic, bool callingNew)
: f(f), cx(f.cx), ic(ic), vp(f.regs.sp - (ic.argc + 2)), callingNew(callingNew)
{
}
JSC::ExecutablePool *poolForSize(size_t size, CallICInfo::PoolIndex index)
{
mjit::ThreadData *jm = &JS_METHODJIT_DATA(cx);
JSC::ExecutablePool *ep = jm->execPool->poolForSize(size);
if (!ep) {
js_ReportOutOfMemory(f.cx);
return NULL;
}
JS_ASSERT(!ic.pools[index]);
ic.pools[index] = ep;
return ep;
}
inline void pushFrameFromCaller(JSObject *scopeChain, uint32 flags)
{
JSStackFrame *fp = (JSStackFrame *)f.regs.sp;
fp->argc = ic.argc;
fp->argv = vp + 2;
fp->flags = flags;
fp->setScopeChain(scopeChain);
fp->setThisValue(vp[1]);
fp->down = f.fp();
fp->savedPC = f.regs.pc;
fp->down->savedPC = f.regs.pc;
#ifdef DEBUG
fp->savedPC = JSStackFrame::sInvalidPC;
#endif
f.regs.fp = fp;
}
bool generateFullCallStub(JSScript *script, uint32 flags)
{
/*
* Create a stub that works with arity mismatches. Like the fast-path,
* this allocates a frame on the caller side, but also performs extra
* checks for compilability. Perhaps this should be a separate, shared
* trampoline, but for now we generate it dynamically.
*/
Assembler masm;
InlineFrameAssembler inlFrame(masm, cx, ic, flags);
RegisterID t0 = inlFrame.tempRegs.takeAnyReg();
/* Generate the inline frame creation. */
inlFrame.assemble();
/* funPtrReg is still valid. Check if a compilation is needed. */
Address scriptAddr(ic.funPtrReg, offsetof(JSFunction, u) +
offsetof(JSFunction::U::Scripted, script));
masm.loadPtr(scriptAddr, t0);
/*
* Test if script->nmap is NULL - same as checking ncode, but faster
* here since ncode has two failure modes and we need to load out of
* nmap anyway.
*/
masm.loadPtr(Address(t0, offsetof(JSScript, jit)), t0);
Jump hasCode = masm.branchTestPtr(Assembler::NonZero, t0, t0);
/* Try and compile. On success we get back the nmap pointer. */
masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
JSC::MacroAssembler::Call tryCompile =
masm.stubCall(JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction),
script->code, ic.frameDepth);
Jump notCompiled = masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
Registers::ReturnReg);
masm.call(Registers::ReturnReg);
Jump done = masm.jump();
hasCode.linkTo(masm.label(), &masm);
/* Get nmap[ARITY], set argc, call. */
masm.move(Imm32(ic.argc), JSParamReg_Argc);
masm.loadPtr(Address(t0, offsetof(JITScript, arityCheck)), t0);
masm.call(t0);
/* Rejoin with the fast path. */
Jump rejoin = masm.jump();
/* Worst case - function didn't compile. */
notCompiled.linkTo(masm.label(), &masm);
masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
notCompiled = masm.jump();
JSC::ExecutablePool *ep = poolForSize(masm.size(), CallICInfo::Pool_ScriptStub);
if (!ep)
return false;
JSC::LinkBuffer buffer(&masm, ep);
buffer.link(rejoin, ic.funGuard.labelAtOffset(ic.joinPointOffset));
buffer.link(done, ic.funGuard.labelAtOffset(ic.joinPointOffset));
buffer.link(notCompiled, ic.slowPathStart.labelAtOffset(ic.slowJoinOffset));
buffer.link(tryCompile,
JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction)));
JSC::CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generated CALL stub %p (%d bytes)\n", cs.executableAddress(),
masm.size());
JSC::CodeLocationJump oolJump = ic.slowPathStart.jumpAtOffset(ic.oolJumpOffset);
uint8 *start = (uint8 *)oolJump.executableAddress();
JSC::RepatchBuffer repatch(start - 32, 64);
repatch.relink(oolJump, cs);
return true;
}
void patchInlinePath(JSScript *script, JSObject *obj)
{
/* Very fast path. */
uint8 *start = (uint8 *)ic.funGuard.executableAddress();
JSC::RepatchBuffer repatch(start - 32, 64);
ic.fastGuardedObject = obj;
repatch.repatch(ic.funGuard, obj);
repatch.relink(ic.funGuard.callAtOffset(ic.hotCallOffset),
JSC::FunctionPtr(script->ncode));
JaegerSpew(JSpew_PICs, "patched CALL path %p (obj: %)\n", start, ic.fastGuardedObject);
}
bool generateStubForClosures(JSObject *obj)
{
/* Slightly less fast path - guard on fun->getFunctionPrivate() instead. */
Assembler masm;
Registers tempRegs;
tempRegs.takeReg(ic.funObjReg);
RegisterID t0 = tempRegs.takeAnyReg();
/* Guard that it's actually a function object. */
Jump claspGuard = masm.branchPtr(Assembler::NotEqual,
Address(ic.funObjReg, offsetof(JSObject, clasp)),
ImmPtr(&js_FunctionClass));
/* Guard that it's the same function. */
JSFunction *fun = obj->getFunctionPrivate();
masm.loadFunctionPrivate(ic.funObjReg, t0);
Jump funGuard = masm.branchPtr(Assembler::NotEqual, t0, ImmPtr(fun));
Jump done = masm.jump();
JSC::ExecutablePool *ep = poolForSize(masm.size(), CallICInfo::Pool_ClosureStub);
if (!ep)
return false;
JSC::LinkBuffer buffer(&masm, ep);
buffer.link(claspGuard, ic.slowPathStart);
buffer.link(funGuard, ic.slowPathStart);
buffer.link(done, ic.funGuard.labelAtOffset(ic.hotPathOffset));
JSC::CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generated CALL closure stub %p (%d bytes)\n",
cs.executableAddress(), masm.size());
uint8 *start = (uint8 *)ic.funJump.executableAddress();
JSC::RepatchBuffer repatch(start - 32, 64);
repatch.relink(ic.funJump, cs);
/* Retarget funJump for future ICs. */
ic.funJump = buffer.locationOf(funGuard);
ic.hasJsFunCheck = true;
return true;
}
bool generateNativeStub()
{
Value *vp = f.regs.sp - (ic.argc + 2);
JSObject *obj;
if (!IsFunctionObject(*vp, &obj))
return false;
JSFunction *fun = obj->getFunctionPrivate();
if ((!callingNew && !fun->isFastNative()) || (callingNew && !fun->isFastConstructor()))
return false;
if (callingNew)
vp[1].setMagic(JS_FAST_CONSTRUCTOR);
FastNative fn = (FastNative)fun->u.n.native;
if (!fn(cx, ic.argc, vp))
THROWV(true);
/* Right now, take slow-path for IC misses. */
if (ic.fastGuardedNative)
return true;
/* Native MIC needs to warm up first. */
if (!ic.hit) {
ic.hit = true;
return true;
}
/* Generate fast-path for calling this native. */
Assembler masm;
/* Guard on the function object identity, for now. */
Jump funGuard = masm.branchPtr(Assembler::NotEqual, ic.funObjReg, ImmPtr(obj));
Registers tempRegs;
#ifndef JS_CPU_X86
tempRegs.takeReg(Registers::ArgReg0);
tempRegs.takeReg(Registers::ArgReg1);
tempRegs.takeReg(Registers::ArgReg2);
#endif
RegisterID t0 = tempRegs.takeAnyReg();
/* Store pc. */
masm.storePtr(ImmPtr(cx->regs->pc),
FrameAddress(offsetof(VMFrame, regs) + offsetof(JSFrameRegs, pc)));
/* Store sp. */
uint32 spOffset = sizeof(JSStackFrame) + ic.frameDepth * sizeof(Value);
masm.addPtr(Imm32(spOffset), JSFrameReg, t0);
masm.storePtr(t0, FrameAddress(offsetof(VMFrame, regs) + offsetof(JSFrameRegs, sp)));
/* Grab cx early on to avoid stack mucking on x86. */
#ifdef JS_CPU_X86
RegisterID cxReg = tempRegs.takeAnyReg();
#else
RegisterID cxReg = Registers::ArgReg0;
#endif
masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), cxReg);
ic::NativeCallCompiler::NativeCallCompiler()
: jumps(SystemAllocPolicy())
{}
void
ic::NativeCallCompiler::finish(JSScript *script, uint8 *start, uint8 *fallthrough)
{
/* Add a jump to fallthrough. */
Jump fallJump = masm.jump();
addLink(fallJump, fallthrough);
uint8 *result = (uint8 *)script->jit->execPool->alloc(masm.size());
JSC::ExecutableAllocator::makeWritable(result, masm.size());
masm.executableCopy(result);
/* Overwrite start with a jump to the call buffer. */
BaseAssembler::insertJump(start, result);
/* Patch all calls with the correct target. */
masm.finalize(result);
/* Patch all jumps with the correct target. */
JSC::LinkBuffer linkmasm(result, masm.size());
for (size_t i = 0; i < jumps.length(); i++)
linkmasm.link(jumps[i].from, JSC::CodeLocationLabel(jumps[i].to));
}
void
ic::CallFastNative(JSContext *cx, JSScript *script, MICInfo &mic, JSFunction *fun, bool isNew)
{
if (mic.u.generated) {
/* Already generated a MIC at this site, don't make another one. */
return;
}
mic.u.generated = true;
JS_ASSERT(fun->isFastNative());
if (isNew)
JS_ASSERT(fun->isFastConstructor());
FastNative fn = (FastNative)fun->u.n.native;
typedef JSC::MacroAssembler::ImmPtr ImmPtr;
typedef JSC::MacroAssembler::Imm32 Imm32;
typedef JSC::MacroAssembler::Address Address;
typedef JSC::MacroAssembler::Jump Jump;
uint8 *start = (uint8*) mic.knownObject.executableAddress();
uint8 *stubEntry = (uint8*) mic.stubEntry.executableAddress();
uint8 *fallthrough = (uint8*) mic.callEnd.executableAddress();
NativeCallCompiler ncc;
Jump differentFunction = ncc.masm.branchPtr(Assembler::NotEqual, mic.dataReg, ImmPtr(fun));
ncc.addLink(differentFunction, stubEntry);
/* Manually construct the X86 stack. TODO: get a more portable way of doing this. */
/* Register to use for filling in the fast native's arguments. */
JSC::MacroAssembler::RegisterID temp = mic.dataReg;
/* Store the pc, which is the same as for the current slow call. */
ncc.masm.storePtr(ImmPtr(cx->regs->pc),
FrameAddress(offsetof(VMFrame, regs) + offsetof(JSFrameRegs, pc)));
/* Store sp. */
uint32 spOffset = sizeof(JSStackFrame) + (mic.frameDepth + mic.argc + 2) * sizeof(jsval);
ncc.masm.addPtr(Imm32(spOffset), JSFrameReg, temp);
ncc.masm.storePtr(temp, FrameAddress(offsetof(VMFrame, regs) + offsetof(JSFrameRegs, sp)));
/* Make room for the three arguments on the stack, preserving stack register alignment. */
const uint32 stackAdjustment = 16;
ncc.masm.sub32(Imm32(stackAdjustment), JSC::X86Registers::esp);
/* Compute and push vp */
uint32 vpOffset = sizeof(JSStackFrame) + mic.frameDepth * sizeof(jsval);
ncc.masm.addPtr(Imm32(vpOffset), JSFrameReg, temp);
ncc.masm.storePtr(temp, Address(JSC::X86Registers::esp, 0x8));
if (isNew) {
/* Mark vp[1] as magic. */
ncc.masm.storeValue(MagicValue(JS_FAST_CONSTRUCTOR), Address(temp, sizeof(Value)));
}
/* Push argc */
ncc.masm.store32(Imm32(mic.argc), Address(JSC::X86Registers::esp, 0x4));
/* Push cx. The VMFrame is homed at the stack register, so adjust for the amount we pushed. */
ncc.masm.loadPtr(FrameAddress(stackAdjustment + offsetof(VMFrame, cx)), temp);
ncc.masm.storePtr(temp, Address(JSC::X86Registers::esp, 0));
/* Do the call. */
ncc.masm.call(JS_FUNC_TO_DATA_PTR(void *, fn));
/* Restore stack. */
ncc.masm.add32(Imm32(stackAdjustment), JSC::X86Registers::esp);
#if defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)
// Usually JaegerThrowpoline got called from return address.
// So in JaegerThrowpoline without fastcall, esp was added by 8.
// If we just want to jump there, we need to sub esp by 8 first.
ncc.masm.sub32(Imm32(8), JSC::X86Registers::esp);
#ifdef JS_CPU_X86
/* x86's stack should be 16-byte aligned. */
masm.subPtr(Imm32(16), Assembler::stackPointerRegister);
#endif
/* Check if the call is throwing, and jump to the throwpoline. */
Jump hasException =
ncc.masm.branchTest32(Assembler::Zero, Registers::ReturnReg, Registers::ReturnReg);
ncc.addLink(hasException, JS_FUNC_TO_DATA_PTR(uint8 *, JaegerThrowpoline));
/* Compute vp. */
#ifdef JS_CPU_X86
RegisterID vpReg = t0;
#else
RegisterID vpReg = Registers::ArgReg2;
#endif
uint32 vpOffset = sizeof(JSStackFrame) + (ic.frameDepth - ic.argc - 2) * sizeof(Value);
masm.addPtr(Imm32(vpOffset), JSFrameReg, vpReg);
#if defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)
ncc.masm.add32(Imm32(8), JSC::X86Registers::esp);
/* Mark vp[1] as magic for |new|. */
if (callingNew)
masm.storeValue(MagicValue(JS_FAST_CONSTRUCTOR), Address(vpReg, sizeof(Value)));
#ifdef JS_CPU_X86
masm.storePtr(vpReg, Address(Assembler::stackPointerRegister, 8));
#endif
/* Load *vp into the return register pair. */
Address rval(JSFrameReg, vpOffset);
ncc.masm.loadPayload(rval, JSReturnReg_Data);
ncc.masm.loadTypeTag(rval, JSReturnReg_Type);
/* Push argc. */
#ifdef JS_CPU_X86
masm.store32(Imm32(ic.argc), Address(Assembler::stackPointerRegister, 4));
#else
masm.move(Imm32(ic.argc), Registers::ArgReg1);
#endif
ncc.finish(script, start, fallthrough);
/* Push cx. */
#ifdef JS_CPU_X86
masm.storePtr(cxReg, Address(Assembler::stackPointerRegister, 0));
#endif
/* Make the call. */
Assembler::Call call = masm.call();
#ifdef JS_CPU_X86
masm.addPtr(Imm32(16), Assembler::stackPointerRegister);
#endif
#if defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)
// Usually JaegerThrowpoline got called from return address.
// So in JaegerThrowpoline without fastcall, esp was added by 8.
// If we just want to jump there, we need to sub esp by 8 first.
masm.subPtr(Imm32(8), Assembler::stackPointerRegister);
#endif
Jump hasException = masm.branchTest32(Assembler::Zero, Registers::ReturnReg,
Registers::ReturnReg);
#if defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)
// Usually JaegerThrowpoline got called from return address.
// So in JaegerThrowpoline without fastcall, esp was added by 8.
// If we just want to jump there, we need to sub esp by 8 first.
masm.addPtr(Imm32(8), Assembler::stackPointerRegister);
#endif
Jump done = masm.jump();
JSC::ExecutablePool *ep = poolForSize(masm.size(), CallICInfo::Pool_NativeStub);
if (!ep)
THROWV(true);
JSC::LinkBuffer buffer(&masm, ep);
buffer.link(done, ic.slowPathStart.labelAtOffset(ic.slowJoinOffset));
buffer.link(call, JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, fun->u.n.native)));
buffer.link(hasException, JSC::CodeLocationLabel(JS_FUNC_TO_DATA_PTR(void *, JaegerThrowpoline)));
buffer.link(funGuard, ic.slowPathStart);
JSC::CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generated native CALL stub %p (%d bytes)\n",
cs.executableAddress(), masm.size());
uint8 *start = (uint8 *)ic.funJump.executableAddress();
JSC::RepatchBuffer repatch(start - 32, 64);
repatch.relink(ic.funJump, cs);
ic.fastGuardedNative = obj;
/* Retarget funJump for future ICs. */
ic.funJump = buffer.locationOf(funGuard);
return true;
}
void *update()
{
JSObject *obj;
if (!IsFunctionObject(*vp, &obj) || !(cx->options & JSOPTION_METHODJIT)) {
/* Ugh. Can't do anything with this! */
if (callingNew)
stubs::SlowNew(f, ic.argc);
else
stubs::SlowCall(f, ic.argc);
return NULL;
}
JSFunction *fun = obj->getFunctionPrivate();
JSObject *scopeChain = obj->getParent();
/* The slow path guards against natives. */
JS_ASSERT(fun->isInterpreted());
JSScript *script = fun->u.i.script;
if (!script->ncode && !script->isEmpty()) {
if (mjit::TryCompile(cx, script, fun, scopeChain) == Compile_Error)
THROWV(NULL);
}
JS_ASSERT(script->isEmpty() || script->ncode);
if (script->ncode == JS_UNJITTABLE_METHOD || script->isEmpty()) {
/* This should always go to a slow path, sadly. */
JSC::CodeLocationCall oolCall = ic.slowPathStart.callAtOffset(ic.oolCallOffset);
uint8 *start = (uint8 *)oolCall.executableAddress();
JSC::RepatchBuffer repatch(start - 32, 64);
JSC::FunctionPtr fptr = callingNew
? JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowNewFromIC))
: JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowCallFromIC));
repatch.relink(oolCall, fptr);
if (callingNew)
stubs::SlowNew(f, ic.argc);
else
stubs::SlowCall(f, ic.argc);
return NULL;
}
uint32 flags = callingNew ? JSFRAME_CONSTRUCTING : 0;
if (callingNew)
stubs::NewObject(f, ic.argc);
if (!ic.hit) {
if (ic.argc < fun->nargs) {
if (!generateFullCallStub(script, flags))
THROWV(NULL);
} else {
if (!ic.fastGuardedObject) {
patchInlinePath(script, obj);
} else if (!ic.hasJsFunCheck &&
ic.fastGuardedObject->getFunctionPrivate() == fun) {
if (!generateStubForClosures(obj))
THROWV(NULL);
} else {
if (!generateFullCallStub(script, flags))
THROWV(NULL);
}
}
} else {
ic.hit = true;
}
/* We'll still return to the OOL path, so make sure a frame exists. */
pushFrameFromCaller(scopeChain, flags);
if (ic.argc >= fun->nargs)
return script->ncode;
return script->jit->arityCheck;
}
};
void * JS_FASTCALL
ic::Call(VMFrame &f, uint32 index)
{
JSScript *oldscript = f.fp()->getScript();
CallICInfo &ic = oldscript->callICs[index];
CallCompiler cc(f, ic, false);
return cc.update();
}
#endif /* JS_CPU_X86 */
void * JS_FASTCALL
ic::New(VMFrame &f, uint32 index)
{
JSScript *oldscript = f.fp()->getScript();
CallICInfo &ic = oldscript->callICs[index];
CallCompiler cc(f, ic, true);
return cc.update();
}
void JS_FASTCALL
ic::NativeCall(VMFrame &f, uint32 index)
{
JSScript *oldscript = f.fp()->getScript();
CallICInfo &ic = oldscript->callICs[index];
CallCompiler cc(f, ic, false);
if (!cc.generateNativeStub())
stubs::SlowCall(f, ic.argc);
}
void JS_FASTCALL
ic::NativeNew(VMFrame &f, uint32 index)
{
JSScript *oldscript = f.fp()->getScript();
CallICInfo &ic = oldscript->callICs[index];
CallCompiler cc(f, ic, true);
if (!cc.generateNativeStub())
stubs::SlowNew(f, ic.argc);
}
void
ic::PurgeMICs(JSContext *cx, JSScript *script)
@ -351,8 +740,6 @@ ic::PurgeMICs(JSContext *cx, JSScript *script)
*/
break;
}
case ic::MICInfo::CALL:
case ic::MICInfo::EMPTYCALL:
case ic::MICInfo::TRACER:
/* Nothing to patch! */
break;

Просмотреть файл

@ -70,8 +70,6 @@ struct MICInfo {
{
GET,
SET,
CALL,
EMPTYCALL, /* placeholder call which cannot refer to a fast native */
TRACER
};
@ -89,19 +87,12 @@ struct MICInfo {
uint32 patchValueOffset;
#endif
/* Used by CALL. */
uint32 argc;
uint32 frameDepth;
JSC::CodeLocationLabel knownObject;
JSC::CodeLocationLabel callEnd;
JSC::MacroAssembler::RegisterID dataReg;
/* Used by TRACER. */
JSC::CodeLocationJump traceHint;
JSC::CodeLocationJump slowTraceHint;
/* Used by all MICs. */
Kind kind : 4;
Kind kind : 3;
union {
/* Used by GET/SET. */
struct {
@ -109,8 +100,6 @@ struct MICInfo {
bool typeConst : 1;
bool dataConst : 1;
} name;
/* Used by CALL. */
bool generated;
/* Used by TRACER. */
bool hasSlowTraceHint;
} u;
@ -119,48 +108,84 @@ struct MICInfo {
void JS_FASTCALL GetGlobalName(VMFrame &f, uint32 index);
void JS_FASTCALL SetGlobalName(VMFrame &f, uint32 index);
#ifdef JS_CPU_X86
/* See MonoIC.cpp, CallCompiler for more information on call ICs. */
struct CallICInfo {
typedef JSC::MacroAssembler::RegisterID RegisterID;
/* Compiler for generating fast paths for a MIC'ed native call. */
class NativeCallCompiler
{
typedef JSC::MacroAssembler::Jump Jump;
enum PoolIndex {
Pool_ScriptStub,
Pool_ClosureStub,
Pool_NativeStub,
struct Patch {
Patch(Jump from, uint8 *to)
: from(from), to(to)
{ }
Jump from;
uint8 *to;
Total_Pools
};
public:
Assembler masm;
JSC::ExecutablePool *pools[Total_Pools];
/* Used for rooting and reification. */
JSObject *fastGuardedObject;
JSObject *fastGuardedNative;
Value constantThis;
uint32 argc : 16;
uint32 frameDepth : 16;
/* Function object identity guard. */
JSC::CodeLocationDataLabelPtr funGuard;
/* Starting point for all slow call paths. */
JSC::CodeLocationLabel slowPathStart;
/* Inline to OOL jump, redirected by stubs. */
JSC::CodeLocationJump funJump;
/* Offset to inline scripted call, from funGuard. */
uint32 hotCallOffset : 8;
uint32 joinPointOffset : 8;
/* Out of line slow call. */
uint32 oolCallOffset : 8;
/* Jump to patch for out-of-line scripted calls. */
uint32 oolJumpOffset : 8;
/* Offset for deep-fun check to rejoin at. */
uint32 hotPathOffset : 8;
/* Join point for all slow call paths. */
uint32 slowJoinOffset : 9;
RegisterID funObjReg : 5;
RegisterID funPtrReg : 5;
bool isConstantThis : 1;
bool hit : 1;
bool hasJsFunCheck : 1;
inline void reset() {
fastGuardedObject = NULL;
fastGuardedNative = NULL;
hit = false;
hasJsFunCheck = false;
pools[0] = pools[1] = pools[2] = NULL;
}
inline void releasePools() {
releasePool(Pool_ScriptStub);
releasePool(Pool_ClosureStub);
releasePool(Pool_NativeStub);
}
private:
/* :TODO: oom check */
Vector<Patch, 8, SystemAllocPolicy> jumps;
public:
NativeCallCompiler();
size_t size() { return masm.size(); }
uint8 *buffer() { return masm.buffer(); }
/* Exit from the call path to target. */
void addLink(Jump j, uint8 *target) { jumps.append(Patch(j, target)); }
/*
* Finish up this native, and add an incoming jump from start
* and an outgoing jump to fallthrough.
*/
void finish(JSScript *script, uint8 *start, uint8 *fallthrough);
inline void releasePool(PoolIndex index) {
if (pools[index])
pools[index]->release();
}
};
void CallFastNative(JSContext *cx, JSScript *script, MICInfo &mic, JSFunction *fun, bool isNew);
#endif /* JS_CPU_X86 */
void * JS_FASTCALL New(VMFrame &f, uint32 index);
void * JS_FASTCALL Call(VMFrame &f, uint32 index);
void JS_FASTCALL NativeNew(VMFrame &f, uint32 index);
void JS_FASTCALL NativeCall(VMFrame &f, uint32 index);
void PurgeMICs(JSContext *cx, JSScript *script);

Просмотреть файл

@ -59,14 +59,16 @@ void JS_FASTCALL InitElem(VMFrame &f, uint32 last);
void JS_FASTCALL InitProp(VMFrame &f, JSAtom *atom);
void JS_FASTCALL InitMethod(VMFrame &f, JSAtom *atom);
void * JS_FASTCALL Call(VMFrame &f, uint32 argc);
void * JS_FASTCALL New(VMFrame &f, uint32 argc);
void * JS_FASTCALL SlowNew(VMFrame &f, uint32 argc);
void * JS_FASTCALL SlowCall(VMFrame &f, uint32 argc);
JSObject * JS_FASTCALL NewObject(VMFrame &f);
void JS_FASTCALL CheckStackQuota(VMFrame &f);
void * JS_FASTCALL CheckArity(VMFrame &f);
void * JS_FASTCALL CompileFunction(VMFrame &f);
void JS_FASTCALL SlowNew(VMFrame &f, uint32 argc);
void JS_FASTCALL SlowCall(VMFrame &f, uint32 argc);
void * JS_FASTCALL UncachedNew(VMFrame &f, uint32 argc);
void * JS_FASTCALL UncachedCall(VMFrame &f, uint32 argc);
JSBool JS_FASTCALL NewObject(VMFrame &f, uint32 argc);
void JS_FASTCALL Throw(VMFrame &f);
void * JS_FASTCALL LookupSwitch(VMFrame &f, jsbytecode *pc);
void * JS_FASTCALL TableSwitch(VMFrame &f, jsbytecode *origPc);
void JS_FASTCALL PutCallObject(VMFrame &f);
void JS_FASTCALL PutArgsObject(VMFrame &f);
void JS_FASTCALL CopyThisv(VMFrame &f);
@ -78,6 +80,9 @@ void * JS_FASTCALL InvokeTracer(VMFrame &f, uint32 index);
void * JS_FASTCALL InvokeTracer(VMFrame &f);
#endif
void * JS_FASTCALL LookupSwitch(VMFrame &f, jsbytecode *pc);
void * JS_FASTCALL TableSwitch(VMFrame &f, jsbytecode *origPc);
void JS_FASTCALL BindName(VMFrame &f);
JSObject * JS_FASTCALL BindGlobalName(VMFrame &f);
void JS_FASTCALL SetName(VMFrame &f, JSAtom *atom);

Просмотреть файл

@ -54,6 +54,13 @@ namespace mjit {
pool = NULL; \
JS_END_MACRO
typedef JSC::MacroAssembler::Address Address;
typedef JSC::MacroAssembler::Label Label;
typedef JSC::MacroAssembler::Jump Jump;
typedef JSC::MacroAssembler::ImmPtr ImmPtr;
typedef JSC::MacroAssembler::Imm32 Imm32;
typedef JSC::MacroAssembler::Address Address;
bool
TrampolineCompiler::compile()
{

Просмотреть файл

@ -49,11 +49,6 @@ namespace mjit {
class TrampolineCompiler
{
typedef Assembler::Label Label;
typedef Assembler::Jump Jump;
typedef Assembler::ImmPtr ImmPtr;
typedef Assembler::Imm32 Imm32;
typedef Assembler::Address Address;
typedef bool (*TrampolineGenerator)(Assembler &masm);
public: