Bug 1416572 - Use a single JitCode instance for all VMFunction wrappers. r=nbp

This commit is contained in:
Jan de Mooij 2017-11-14 10:45:00 +01:00
Родитель 2e25120626
Коммит e24d7b18cd
27 изменённых файлов: 152 добавлений и 244 удалений

Просмотреть файл

@ -149,10 +149,7 @@ BaselineCacheIRCompiler::callVM(MacroAssembler& masm, const VMFunction& fun)
{
MOZ_ASSERT(inStubFrame_);
JitCode* code = cx_->runtime()->jitRuntime()->getVMWrapper(fun);
if (!code)
return false;
uint8_t* code = cx_->runtime()->jitRuntime()->getVMWrapper(fun);
MOZ_ASSERT(fun.expectTailCall == NonTailCall);
MOZ_ASSERT(engine_ == ICStubEngine::Baseline);

Просмотреть файл

@ -4867,9 +4867,7 @@ BaselineCompiler::emit_JSOP_RESUME()
pushArg(genObj);
pushArg(scratch2);
JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(GeneratorThrowInfo);
if (!code)
return false;
uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(GeneratorThrowInfo);
// Create the frame descriptor.
masm.subStackPtrFrom(scratch1);
@ -4884,7 +4882,7 @@ BaselineCompiler::emit_JSOP_RESUME()
#ifndef JS_CODEGEN_ARM64
masm.push(ImmWord(0));
#endif
masm.jump(code);
masm.jump(ImmPtr(code));
}
// If the generator script has no JIT code, call into the VM.

Просмотреть файл

@ -37,6 +37,7 @@
#include "jit/JitCompartment.h"
#include "jit/JitSpewer.h"
#include "jit/LICM.h"
#include "jit/Linker.h"
#include "jit/LIR.h"
#include "jit/LoopUnroller.h"
#include "jit/Lowering.h"
@ -203,6 +204,7 @@ JitRuntime::JitRuntime(JSRuntime* rt)
invalidator_(nullptr),
debugTrapHandler_(nullptr),
baselineDebugModeOSRHandler_(nullptr),
functionWrapperCode_(nullptr),
functionWrappers_(nullptr),
preventBackedgePatching_(false),
jitcodeGlobalTable_(nullptr)
@ -334,11 +336,27 @@ JitRuntime::initialize(JSContext* cx, AutoLockForExclusiveAccess& lock)
if (!freeStub_)
return false;
JitSpew(JitSpew_Codegen, "# Emitting VM function wrappers");
for (VMFunction* fun = VMFunction::functions; fun; fun = fun->next) {
JitSpew(JitSpew_Codegen, "# VM function wrapper");
if (!generateVMWrapper(cx, *fun))
{
JitSpew(JitSpew_Codegen, "# Emitting VM function wrappers");
MacroAssembler masm;
for (VMFunction* fun = VMFunction::functions; fun; fun = fun->next) {
JitSpew(JitSpew_Codegen, "# VM function wrapper");
if (!generateVMWrapper(cx, masm, *fun))
return false;
}
Linker linker(masm);
AutoFlushICache afc("VMWrappers");
functionWrapperCode_ = linker.newCode<NoGC>(cx, OTHER_CODE);
if (!functionWrapperCode_)
return false;
#ifdef JS_ION_PERF
writePerfSpewerJitCodeProfile(functionWrapperCode_, "VMWrappers");
#endif
#ifdef MOZ_VTUNE
vtune::MarkStub(functionWrapperCode_, "VMWrappers");
#endif
}
JitSpew(JitSpew_Codegen, "# Emitting lazy link stub");
@ -718,15 +736,20 @@ JitRuntime::getBailoutTable(const FrameSizeClass& frameClass) const
return bailoutTables_.ref()[frameClass.classId()];
}
JitCode*
uint8_t*
JitRuntime::getVMWrapper(const VMFunction& f) const
{
MOZ_ASSERT(functionWrappers_);
MOZ_ASSERT(functionWrappers_->initialized());
MOZ_ASSERT(functionWrapperCode_);
JitRuntime::VMWrapperMap::Ptr p = functionWrappers_->readonlyThreadsafeLookup(&f);
MOZ_ASSERT(p);
return p->value();
uint32_t offset = p->value();
MOZ_ASSERT(offset < functionWrapperCode_->instructionsSize());
return functionWrapperCode_->raw() + offset;
}
template <AllowGC allowGC>

Просмотреть файл

@ -358,15 +358,13 @@ IonCacheIRCompiler::callVM(MacroAssembler& masm, const VMFunction& fun)
{
MOZ_ASSERT(calledPrepareVMCall_);
JitCode* code = cx_->runtime()->jitRuntime()->getVMWrapper(fun);
if (!code)
return false;
uint8_t* code = cx_->runtime()->jitRuntime()->getVMWrapper(fun);
uint32_t frameSize = fun.explicitStackSlots() * sizeof(void*);
uint32_t descriptor = MakeFrameDescriptor(frameSize, JitFrame_IonICCall,
ExitFrameLayout::Size());
masm.Push(Imm32(descriptor));
masm.callJit(code);
masm.callJit(ImmPtr(code));
// Remove rest of the frame left on the stack. We remove the return address
// which is implicitly poped when returning.

Просмотреть файл

@ -135,8 +135,12 @@ class JitRuntime
ExclusiveAccessLockWriteOnceData<JitCode*> baselineDebugModeOSRHandler_;
ExclusiveAccessLockWriteOnceData<void*> baselineDebugModeOSRHandlerNoFrameRegPopAddr_;
// Map VMFunction addresses to the JitCode of the wrapper.
using VMWrapperMap = HashMap<const VMFunction*, JitCode*>;
// Code for all VMFunction wrappers.
ExclusiveAccessLockWriteOnceData<JitCode*> functionWrapperCode_;
// Map VMFunction addresses to the offset of the wrapper in
// functionWrapperCode_.
using VMWrapperMap = HashMap<const VMFunction*, uint32_t>;
ExclusiveAccessLockWriteOnceData<VMWrapperMap*> functionWrappers_;
// If true, the signal handler to interrupt Ion code should not attempt to
@ -161,7 +165,7 @@ class JitRuntime
JitCode* generateFreeStub(JSContext* cx);
JitCode* generateDebugTrapHandler(JSContext* cx);
JitCode* generateBaselineDebugModeOSRHandler(JSContext* cx, uint32_t* noFrameRegPopOffsetOut);
JitCode* generateVMWrapper(JSContext* cx, const VMFunction& f);
bool generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f);
bool generateTLEventVM(JSContext* cx, MacroAssembler& masm, const VMFunction& f, bool enter);
@ -224,7 +228,7 @@ class JitRuntime
return preventBackedgePatching_;
}
JitCode* getVMWrapper(const VMFunction& f) const;
uint8_t* getVMWrapper(const VMFunction& f) const;
JitCode* debugTrapHandler(JSContext* cx);
JitCode* getBaselineDebugModeOSRHandler(JSContext* cx);
void* getBaselineDebugModeOSRHandlerAddress(JSContext* cx, bool popFrameReg);

Просмотреть файл

@ -1188,7 +1188,7 @@ TraceJitExitFrame(JSTracer* trc, const JSJitFrameIter& frame)
return;
}
TraceRoot(trc, footer->addressOfJitCode(), "ion-exit-code");
MOZ_ASSERT(frame.exitFrame()->isWrapperExit());
const VMFunction* f = footer->function();
if (f == nullptr)

Просмотреть файл

@ -514,6 +514,7 @@ enum class ExitFrameToken : uint8_t
IonOOLPropertyOp = 0x6,
IonOOLSetterOp = 0x7,
IonOOLProxy = 0x8,
VMFunction = 0xFD,
LazyLink = 0xFE,
Bare = 0xFF
};
@ -529,6 +530,7 @@ class ExitFrameLayout : public CommonFrameLayout
// Pushed for "bare" fake exit frames that have no GC things on stack to be
// traced.
static JitCode* BareToken() { return (JitCode*)ExitFrameToken::Bare; }
static JitCode* VMFunctionToken() { return (JitCode*)ExitFrameToken::VMFunction; }
static inline size_t Size() {
return sizeof(ExitFrameLayout);
@ -551,7 +553,7 @@ class ExitFrameLayout : public CommonFrameLayout
}
inline bool isWrapperExit() {
return footer()->function() != nullptr;
return footer()->jitCode() == VMFunctionToken();
}
inline bool isBareExit() {
return footer()->jitCode() == BareToken();

Просмотреть файл

@ -233,6 +233,14 @@ MacroAssembler::callJit(JitCode* callee)
return currentOffset();
}
uint32_t
MacroAssembler::callJit(ImmPtr code)
{
AutoProfilerCallInstrumentation profiler(*this);
call(code);
return currentOffset();
}
void
MacroAssembler::makeFrameDescriptor(Register frameSizeReg, FrameType type, uint32_t headerSize)
{
@ -308,8 +316,7 @@ void
MacroAssembler::enterExitFrame(Register cxreg, Register scratch, const VMFunction* f)
{
linkExitFrame(cxreg, scratch);
// Push the JitCode pointer. (Keep the code alive, when on the stack)
PushStubCode();
Push(Imm32(int32_t(ExitFrameToken::VMFunction)));
// Push VMFunction pointer, to mark arguments.
Push(ImmPtr(f));
}

Просмотреть файл

@ -651,6 +651,7 @@ class MacroAssembler : public MacroAssemblerSpecific
inline uint32_t callJitNoProfiler(Register callee);
inline uint32_t callJit(Register callee);
inline uint32_t callJit(JitCode* code);
inline uint32_t callJit(ImmPtr code);
// The frame descriptor is the second field of all Jit frames, pushed before
// calling the Jit function. It is a composite value defined in JitFrames.h

Просмотреть файл

@ -554,10 +554,7 @@ ICStubCompiler::getStubCode()
bool
ICStubCompiler::tailCallVM(const VMFunction& fun, MacroAssembler& masm)
{
JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
if (!code)
return false;
uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
MOZ_ASSERT(fun.expectTailCall == TailCall);
uint32_t argSize = fun.explicitStackSlots() * sizeof(void*);
if (engine_ == Engine::Baseline) {
@ -574,10 +571,7 @@ ICStubCompiler::callVM(const VMFunction& fun, MacroAssembler& masm)
{
MOZ_ASSERT(inStubFrame_);
JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
if (!code)
return false;
uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
MOZ_ASSERT(fun.expectTailCall == NonTailCall);
MOZ_ASSERT(engine_ == Engine::Baseline);

Просмотреть файл

@ -664,6 +664,11 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
void jump(JitCode* code) {
branch(code);
}
void jump(ImmPtr code) {
ScratchRegisterScope scratch(asMasm());
movePtr(code, scratch);
ma_bx(scratch);
}
void jump(Register reg) {
ma_bx(reg);
}

Просмотреть файл

@ -79,7 +79,7 @@ EmitChangeICReturnAddress(MacroAssembler& masm, Register reg)
}
inline void
EmitBaselineTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t argSize)
EmitBaselineTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t argSize)
{
// We assume during this that R0 and R1 have been pushed, and that R2 is
// unused.
@ -105,11 +105,11 @@ EmitBaselineTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t argSize)
masm.makeFrameDescriptor(r0, JitFrame_BaselineJS, ExitFrameLayout::Size());
masm.push(r0);
masm.push(lr);
masm.branch(target);
masm.jump(ImmPtr(target));
}
inline void
EmitIonTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t stackSize)
EmitIonTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t stackSize)
{
// We assume during this that R0 and R1 have been pushed, and that R2 is
// unused.
@ -127,7 +127,7 @@ EmitIonTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t stackSize)
masm.makeFrameDescriptor(r0, JitFrame_IonJS, ExitFrameLayout::Size());
masm.push(r0);
masm.push(lr);
masm.branch(target);
masm.jump(ImmPtr(target));
}
inline void
@ -143,11 +143,11 @@ EmitBaselineCreateStubFrameDescriptor(MacroAssembler& masm, Register reg, uint32
}
inline void
EmitBaselineCallVM(JitCode* target, MacroAssembler& masm)
EmitBaselineCallVM(uint8_t* target, MacroAssembler& masm)
{
EmitBaselineCreateStubFrameDescriptor(masm, r0, ExitFrameLayout::Size());
masm.push(r0);
masm.call(target);
masm.call(ImmPtr(target));
}
// Size of vales pushed by EmitEnterStubFrame.

Просмотреть файл

@ -761,17 +761,15 @@ JitRuntime::generateBailoutHandler(JSContext* cx)
return code;
}
JitCode*
JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
bool
JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
{
MOZ_ASSERT(functionWrappers_);
MOZ_ASSERT(functionWrappers_->initialized());
VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
if (p)
return p->value();
// Generate a separated code for the wrapper.
MacroAssembler masm(cx);
masm.flushBuffer();
uint32_t wrapperOffset = masm.currentOffset();
AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
@ -842,7 +840,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
}
if (!generateTLEnterVM(cx, masm, f))
return nullptr;
return false;
masm.setupUnalignedABICall(regs.getAny());
masm.passABIArg(cxreg);
@ -882,7 +880,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
masm.callWithABI(f.wrapped, MoveOp::GENERAL, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
if (!generateTLExitVM(cx, masm, f))
return nullptr;
return false;
// Test for failure.
switch (f.failType()) {
@ -937,22 +935,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
f.explicitStackSlots() * sizeof(void*) +
f.extraValuesToPop * sizeof(Value)));
Linker linker(masm);
AutoFlushICache afc("VMWrapper");
JitCode* wrapper = linker.newCode<NoGC>(cx, OTHER_CODE);
if (!wrapper)
return nullptr;
// linker.newCode may trigger a GC and sweep functionWrappers_ so we have to
// use relookupOrAdd instead of add.
if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
return nullptr;
#ifdef JS_ION_PERF
writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
#endif
return wrapper;
return functionWrappers_->putNew(&f, wrapperOffset);
}
JitCode*
@ -1015,10 +998,7 @@ JitRuntime::generateDebugTrapHandler(JSContext* cx)
masm.movePtr(ImmPtr(nullptr), ICStubReg);
EmitBaselineEnterStubFrame(masm, scratch2);
JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
if (!code)
return nullptr;
uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
masm.push(lr);
masm.push(scratch1);
EmitBaselineCallVM(code, masm);

Просмотреть файл

@ -658,6 +658,11 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
void jump(JitCode* code) {
branch(code);
}
void jump(ImmPtr code) {
syncStackPtr();
BufferOffset loc = b(-1); // The jump target will be patched by executableCopy().
addPendingJump(loc, code, Relocation::HARDCODED);
}
void jump(RepatchLabel* label) {
MOZ_CRASH("jump (repatchlabel)");
}

Просмотреть файл

@ -79,7 +79,7 @@ EmitChangeICReturnAddress(MacroAssembler& masm, Register reg)
}
inline void
EmitBaselineTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t argSize)
EmitBaselineTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t argSize)
{
// We assume that R0 has been pushed, and R2 is unused.
MOZ_ASSERT(R2 == ValueOperand(r0));
@ -108,11 +108,11 @@ EmitBaselineTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t argSize)
// ICTailCallReg (lr) already contains the return address (as we keep
// it there through the stub calls).
masm.branch(target);
masm.jump(ImmPtr(target));
}
inline void
EmitIonTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t stackSize)
EmitIonTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t stackSize)
{
MOZ_CRASH("Not implemented yet.");
}
@ -130,11 +130,11 @@ EmitBaselineCreateStubFrameDescriptor(MacroAssembler& masm, Register reg, uint32
}
inline void
EmitBaselineCallVM(JitCode* target, MacroAssembler& masm)
EmitBaselineCallVM(uint8_t* target, MacroAssembler& masm)
{
EmitBaselineCreateStubFrameDescriptor(masm, r0, ExitFrameLayout::Size());
masm.push(r0);
masm.call(target);
masm.call(ImmPtr(target));
}
// Size of values pushed by EmitEnterStubFrame.

Просмотреть файл

@ -556,16 +556,14 @@ JitRuntime::generateBailoutHandler(JSContext* cx)
return code;
}
JitCode*
JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
bool
JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
{
MOZ_ASSERT(functionWrappers_);
MOZ_ASSERT(functionWrappers_->initialized());
VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
if (p)
return p->value();
MacroAssembler masm(cx);
masm.flushBuffer();
uint32_t wrapperOffset = masm.currentOffset();
// Avoid conflicts with argument registers while discarding the result after
// the function call.
@ -645,7 +643,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
}
if (!generateTLEnterVM(cx, masm, f))
return nullptr;
return false;
masm.setupUnalignedABICall(regs.getAny());
masm.passABIArg(reg_cx);
@ -684,7 +682,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
masm.callWithABI(f.wrapped, MoveOp::GENERAL, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
if (!generateTLExitVM(cx, masm, f))
return nullptr;
return false;
// SP is used to transfer stack across call boundaries.
if (!masm.GetStackPointer64().Is(vixl::sp))
@ -746,22 +744,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
f.explicitStackSlots() * sizeof(void*) +
f.extraValuesToPop * sizeof(Value)));
Linker linker(masm);
AutoFlushICache afc("VMWrapper");
JitCode* wrapper = linker.newCode<NoGC>(cx, OTHER_CODE);
if (!wrapper)
return nullptr;
#ifdef JS_ION_PERF
writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
#endif
// linker.newCode may trigger a GC and sweep functionWrappers_ so we have to
// use relookupOrAdd instead of add.
if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
return nullptr;
return wrapper;
return functionWrappers_->putNew(&f, wrapperOffset);
}
JitCode*
@ -821,10 +804,7 @@ JitRuntime::generateDebugTrapHandler(JSContext* cx)
masm.movePtr(ImmPtr(nullptr), ICStubReg);
EmitBaselineEnterStubFrame(masm, scratch2);
JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
if (!code)
return nullptr;
uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
masm.asVIXL().Push(vixl::lr, ARMRegister(scratch1, 64));
EmitBaselineCallVM(code, masm);

Просмотреть файл

@ -78,7 +78,7 @@ EmitChangeICReturnAddress(MacroAssembler& masm, Register reg)
}
inline void
EmitBaselineTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t argSize)
EmitBaselineTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t argSize)
{
Register scratch = R2.scratchReg();
@ -106,7 +106,7 @@ EmitBaselineTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t argSize)
}
inline void
EmitIonTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t stackSize)
EmitIonTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t stackSize)
{
Register scratch = R2.scratchReg();
@ -135,7 +135,7 @@ EmitBaselineCreateStubFrameDescriptor(MacroAssembler& masm, Register reg, uint32
}
inline void
EmitBaselineCallVM(JitCode* target, MacroAssembler& masm)
EmitBaselineCallVM(uint8_t* target, MacroAssembler& masm)
{
Register scratch = R2.scratchReg();
EmitBaselineCreateStubFrameDescriptor(masm, scratch, ExitFrameLayout::Size());

Просмотреть файл

@ -714,16 +714,14 @@ JitRuntime::generateBailoutHandler(JSContext* cx)
return code;
}
JitCode*
JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
bool
JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
{
MOZ_ASSERT(functionWrappers_);
MOZ_ASSERT(functionWrappers_->initialized());
VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
if (p)
return p->value();
MacroAssembler masm(cx);
masm.flushBuffer();
uint32_t wrapperOffset = masm.currentOffset();
AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
@ -800,7 +798,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
masm.movePtr(StackPointer, doubleArgs);
if (!generateTLEnterVM(cx, masm, f))
return nullptr;
return false;
masm.setupAlignedABICall();
masm.passABIArg(cxreg);
@ -852,7 +850,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
masm.callWithABI(f.wrapped, MoveOp::GENERAL, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
if (!generateTLExitVM(cx, masm, f))
return nullptr;
return false;
// Test for failure.
switch (f.failType()) {
@ -915,22 +913,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
f.explicitStackSlots() * sizeof(uintptr_t) +
f.extraValuesToPop * sizeof(Value)));
Linker linker(masm);
AutoFlushICache afc("VMWrapper");
JitCode* wrapper = linker.newCode<NoGC>(cx, OTHER_CODE);
if (!wrapper)
return nullptr;
// linker.newCode may trigger a GC and sweep functionWrappers_ so we have
// to use relookupOrAdd instead of add.
if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
return nullptr;
#ifdef JS_ION_PERF
writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
#endif
return wrapper;
return functionWrappers_->putNew(&f, wrapperOffset);
}
JitCode*
@ -994,9 +977,7 @@ JitRuntime::generateDebugTrapHandler(JSContext* cx)
masm.movePtr(ImmPtr(nullptr), ICStubReg);
EmitBaselineEnterStubFrame(masm, scratch2);
JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
if (!code)
return nullptr;
uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
masm.storePtr(ra, Address(StackPointer, sizeof(uintptr_t)));

Просмотреть файл

@ -687,16 +687,14 @@ JitRuntime::generateBailoutHandler(JSContext* cx)
return code;
}
JitCode*
JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
bool
JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
{
MOZ_ASSERT(functionWrappers_);
MOZ_ASSERT(functionWrappers_->initialized());
VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
if (p)
return p->value();
MacroAssembler masm(cx);
masm.flushBuffer();
uint32_t wrapperOffset = masm.currentOffset();
AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
@ -764,7 +762,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
}
if (!generateTLEnterVM(cx, masm, f))
return nullptr;
return false;
masm.setupUnalignedABICall(regs.getAny());
masm.passABIArg(cxreg);
@ -801,7 +799,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
masm.callWithABI(f.wrapped, MoveOp::GENERAL, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
if (!generateTLExitVM(cx, masm, f))
return nullptr;
return false;
// Test for failure.
switch (f.failType()) {
@ -863,22 +861,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
f.explicitStackSlots() * sizeof(void*) +
f.extraValuesToPop * sizeof(Value)));
Linker linker(masm);
AutoFlushICache afc("VMWrapper");
JitCode* wrapper = linker.newCode<NoGC>(cx, OTHER_CODE);
if (!wrapper)
return nullptr;
// linker.newCode may trigger a GC and sweep functionWrappers_ so we have
// to use relookupOrAdd instead of add.
if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
return nullptr;
#ifdef JS_ION_PERF
writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
#endif
return wrapper;
return functionWrappers_->putNew(&f, wrapperOffset);
}
JitCode*
@ -942,9 +925,7 @@ JitRuntime::generateDebugTrapHandler(JSContext* cx)
masm.movePtr(ImmPtr(nullptr), ICStubReg);
EmitBaselineEnterStubFrame(masm, scratch2);
JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
if (!code)
return nullptr;
uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
masm.storePtr(ra, Address(StackPointer, sizeof(uintptr_t)));

Просмотреть файл

@ -21,12 +21,13 @@ JitCode* JitRuntime::generateInvalidator(JSContext*) { MOZ_CRASH(); }
JitCode* JitRuntime::generateArgumentsRectifier(JSContext*, void**) { MOZ_CRASH(); }
JitCode* JitRuntime::generateBailoutTable(JSContext*, uint32_t) { MOZ_CRASH(); }
JitCode* JitRuntime::generateBailoutHandler(JSContext*) { MOZ_CRASH(); }
JitCode* JitRuntime::generateVMWrapper(JSContext*, const VMFunction&) { MOZ_CRASH(); }
JitCode* JitRuntime::generatePreBarrier(JSContext*, MIRType) { MOZ_CRASH(); }
JitCode* JitRuntime::generateDebugTrapHandler(JSContext*) { MOZ_CRASH(); }
JitCode* JitRuntime::generateExceptionTailStub(JSContext*, void*) { MOZ_CRASH(); }
JitCode* JitRuntime::generateBailoutTailStub(JSContext*) { MOZ_CRASH(); }
bool JitRuntime::generateVMWrapper(JSContext*, MacroAssembler&, const VMFunction&) { MOZ_CRASH(); }
FrameSizeClass FrameSizeClass::FromDepth(uint32_t) { MOZ_CRASH(); }
FrameSizeClass FrameSizeClass::ClassLimit() { MOZ_CRASH(); }
uint32_t FrameSizeClass::frameSize() const { MOZ_CRASH(); }

Просмотреть файл

@ -58,9 +58,7 @@ BaselineCompilerShared::prepareVMCall()
bool
BaselineCompilerShared::callVM(const VMFunction& fun, CallVMPhase phase)
{
JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
if (!code)
return false;
uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
#ifdef DEBUG
// Assert prepareVMCall() has been called.
@ -125,7 +123,7 @@ BaselineCompilerShared::callVM(const VMFunction& fun, CallVMPhase phase)
}
MOZ_ASSERT(fun.expectTailCall == NonTailCall);
// Perform the call.
masm.call(code);
masm.call(ImmPtr(code));
uint32_t callOffset = masm.currentOffset();
masm.pop(BaselineFrameReg);

Просмотреть файл

@ -1366,11 +1366,7 @@ CodeGeneratorShared::callVM(const VMFunction& fun, LInstruction* ins, const Regi
#endif
// Get the wrapper of the VM function.
JitCode* wrapper = gen->jitRuntime()->getVMWrapper(fun);
if (!wrapper) {
masm.setOOM();
return;
}
uint8_t* wrapper = gen->jitRuntime()->getVMWrapper(fun);
#ifdef CHECK_OSIPOINT_REGISTERS
if (shouldVerifyOsiPointRegs(ins->safepoint()))
@ -1392,7 +1388,7 @@ CodeGeneratorShared::callVM(const VMFunction& fun, LInstruction* ins, const Regi
// when returning from the call. Failures are handled with exceptions based
// on the return value of the C functions. To guard the outcome of the
// returned value, use another LIR instruction.
uint32_t callOffset = masm.callJit(wrapper);
uint32_t callOffset = masm.callJit(ImmPtr(wrapper));
markSafepointAt(callOffset, ins);
// Remove rest of the frame left on the stack. We remove the return address

Просмотреть файл

@ -70,7 +70,7 @@ EmitChangeICReturnAddress(MacroAssembler& masm, Register reg)
}
inline void
EmitBaselineTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t argSize)
EmitBaselineTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t argSize)
{
ScratchRegisterScope scratch(masm);
@ -88,11 +88,11 @@ EmitBaselineTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t argSize)
masm.makeFrameDescriptor(scratch, JitFrame_BaselineJS, ExitFrameLayout::Size());
masm.push(scratch);
masm.push(ICTailCallReg);
masm.jmp(target);
masm.jmp(ImmPtr(target));
}
inline void
EmitIonTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t stackSize)
EmitIonTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t stackSize)
{
// For tail calls, find the already pushed JitFrame_IonJS signifying the
// end of the Ion frame. Retrieve the length of the frame and repush
@ -109,7 +109,7 @@ EmitIonTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t stackSize)
masm.makeFrameDescriptor(scratch, JitFrame_IonJS, ExitFrameLayout::Size());
masm.push(scratch);
masm.push(ICTailCallReg);
masm.jmp(target);
masm.jmp(ImmPtr(target));
}
inline void
@ -125,12 +125,12 @@ EmitBaselineCreateStubFrameDescriptor(MacroAssembler& masm, Register reg, uint32
}
inline void
EmitBaselineCallVM(JitCode* target, MacroAssembler& masm)
EmitBaselineCallVM(uint8_t* target, MacroAssembler& masm)
{
ScratchRegisterScope scratch(masm);
EmitBaselineCreateStubFrameDescriptor(masm, scratch, ExitFrameLayout::Size());
masm.push(scratch);
masm.call(target);
masm.call(ImmPtr(target));
}
// Size of vales pushed by EmitEnterStubFrame.

Просмотреть файл

@ -651,17 +651,14 @@ JitRuntime::generateBailoutHandler(JSContext* cx)
return code;
}
JitCode*
JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
bool
JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
{
MOZ_ASSERT(functionWrappers_);
MOZ_ASSERT(functionWrappers_->initialized());
VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
if (p)
return p->value();
// Generate a separated code for the wrapper.
MacroAssembler masm;
masm.flushBuffer();
uint32_t wrapperOffset = masm.currentOffset();
// Avoid conflicts with argument registers while discarding the result after
// the function call.
@ -732,7 +729,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
}
if (!generateTLEnterVM(cx, masm, f))
return nullptr;
return false;
masm.setupUnalignedABICall(regs.getAny());
masm.passABIArg(cxreg);
@ -768,7 +765,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
masm.callWithABI(f.wrapped, MoveOp::GENERAL, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
if (!generateTLExitVM(cx, masm, f))
return nullptr;
return false;
// Test for failure.
switch (f.failType()) {
@ -826,24 +823,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
f.explicitStackSlots() * sizeof(void*) +
f.extraValuesToPop * sizeof(Value)));
Linker linker(masm);
JitCode* wrapper = linker.newCode<NoGC>(cx, OTHER_CODE);
if (!wrapper)
return nullptr;
#ifdef JS_ION_PERF
writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
#endif
#ifdef MOZ_VTUNE
vtune::MarkStub(wrapper, "VMWrapper");
#endif
// linker.newCode may trigger a GC and sweep functionWrappers_ so we have to
// use relookupOrAdd instead of add.
if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
return nullptr;
return wrapper;
return functionWrappers_->putNew(&f, wrapperOffset);
}
JitCode*
@ -911,10 +891,7 @@ JitRuntime::generateDebugTrapHandler(JSContext* cx)
masm.movePtr(ImmPtr(nullptr), ICStubReg);
EmitBaselineEnterStubFrame(masm, scratch3);
JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
if (!code)
return nullptr;
uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
masm.push(scratch1);
masm.push(scratch2);
EmitBaselineCallVM(code, masm);

Просмотреть файл

@ -541,6 +541,9 @@ class MacroAssemblerX86Shared : public Assembler
void jump(JitCode* code) {
jmp(code);
}
void jump(ImmPtr code) {
jmp(code);
}
void jump(RepatchLabel* label) {
jmp(label);
}

Просмотреть файл

@ -71,7 +71,7 @@ EmitChangeICReturnAddress(MacroAssembler& masm, Register reg)
}
inline void
EmitBaselineTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t argSize)
EmitBaselineTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t argSize)
{
// We assume during this that R0 and R1 have been pushed.
@ -89,11 +89,11 @@ EmitBaselineTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t argSize)
masm.makeFrameDescriptor(eax, JitFrame_BaselineJS, ExitFrameLayout::Size());
masm.push(eax);
masm.push(ICTailCallReg);
masm.jmp(target);
masm.jmp(ImmPtr(target));
}
inline void
EmitIonTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t stackSize)
EmitIonTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t stackSize)
{
// For tail calls, find the already pushed JitFrame_IonJS signifying the
// end of the Ion frame. Retrieve the length of the frame and repush
@ -108,7 +108,7 @@ EmitIonTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t stackSize)
masm.makeFrameDescriptor(eax, JitFrame_IonJS, ExitFrameLayout::Size());
masm.push(eax);
masm.push(ICTailCallReg);
masm.jmp(target);
masm.jmp(ImmPtr(target));
}
inline void
@ -124,11 +124,11 @@ EmitBaselineCreateStubFrameDescriptor(MacroAssembler& masm, Register reg, uint32
}
inline void
EmitBaselineCallVM(JitCode* target, MacroAssembler& masm)
EmitBaselineCallVM(uint8_t* target, MacroAssembler& masm)
{
EmitBaselineCreateStubFrameDescriptor(masm, eax, ExitFrameLayout::Size());
masm.push(eax);
masm.call(target);
masm.call(ImmPtr(target));
}
// Size of vales pushed by EmitEnterStubFrame.

Просмотреть файл

@ -682,17 +682,14 @@ JitRuntime::generateBailoutHandler(JSContext* cx)
return code;
}
JitCode*
JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
bool
JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
{
MOZ_ASSERT(functionWrappers_);
MOZ_ASSERT(functionWrappers_->initialized());
VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
if (p)
return p->value();
// Generate a separated code for the wrapper.
MacroAssembler masm;
masm.flushBuffer();
uint32_t wrapperOffset = masm.currentOffset();
// Avoid conflicts with argument registers while discarding the result after
// the function call.
@ -756,7 +753,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
}
if (!generateTLEnterVM(cx, masm, f))
return nullptr;
return false;
masm.setupUnalignedABICall(regs.getAny());
masm.passABIArg(cxreg);
@ -799,7 +796,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
masm.callWithABI(f.wrapped, MoveOp::GENERAL, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
if (!generateTLExitVM(cx, masm, f))
return nullptr;
return false;
// Test for failure.
switch (f.failType()) {
@ -852,24 +849,7 @@ JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
f.explicitStackSlots() * sizeof(void*) +
f.extraValuesToPop * sizeof(Value)));
Linker linker(masm);
JitCode* wrapper = linker.newCode<NoGC>(cx, OTHER_CODE);
if (!wrapper)
return nullptr;
#ifdef JS_ION_PERF
writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
#endif
#ifdef MOZ_VTUNE
vtune::MarkStub(wrapper, "VMWrapper");
#endif
// linker.newCode may trigger a GC and sweep functionWrappers_ so we have to
// use relookupOrAdd instead of add.
if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
return nullptr;
return wrapper;
return functionWrappers_->putNew(&f, wrapperOffset);
}
JitCode*
@ -942,10 +922,7 @@ JitRuntime::generateDebugTrapHandler(JSContext* cx)
masm.movePtr(ImmPtr(nullptr), ICStubReg);
EmitBaselineEnterStubFrame(masm, scratch3);
JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
if (!code)
return nullptr;
uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
masm.push(scratch1);
masm.push(scratch2);
EmitBaselineCallVM(code, masm);