Bug 1027885 - OdinMonkey: set up AsmJSFrame and use instead of exitFP for exit stubs (r=dougc)

This commit is contained in:
Luke Wagner 2014-07-21 10:57:29 -05:00
Родитель 3414c83d98
Коммит 260c370ed0
23 изменённых файлов: 338 добавлений и 299 удалений

Просмотреть файл

@ -5866,31 +5866,17 @@ CheckModuleReturn(ModuleCompiler &m)
return true;
}
static void
LoadAsmJSActivationIntoRegister(MacroAssembler &masm, Register reg)
{
#if defined(JS_CODEGEN_X64)
CodeOffsetLabel label = masm.loadRipRelativeInt64(reg);
masm.append(AsmJSGlobalAccess(label, AsmJSModule::activationGlobalDataOffset()));
#elif defined(JS_CODEGEN_X86)
CodeOffsetLabel label = masm.movlWithPatch(PatchedAbsoluteAddress(), reg);
masm.append(AsmJSGlobalAccess(label, AsmJSModule::activationGlobalDataOffset()));
#else
masm.loadPtr(Address(GlobalReg, AsmJSModule::activationGlobalDataOffset()), reg);
#endif
}
static void
AssertStackAlignment(MacroAssembler &masm)
{
JS_ASSERT((AsmJSFrameSize + masm.framePushed()) % StackAlignment == 0);
#ifdef DEBUG
Label ok;
JS_ASSERT(IsPowerOfTwo(StackAlignment));
masm.branchTestPtr(Assembler::Zero, StackPointer, Imm32(StackAlignment - 1), &ok);
masm.breakpoint();
masm.bind(&ok);
#endif
JS_ASSERT((sizeof(AsmJSFrame) + masm.framePushed()) % StackAlignment == 0);
masm.assertStackAlignment();
}
static unsigned
StackDecrementForCall(MacroAssembler &masm, unsigned bytesToPush)
{
return StackDecrementForCall(sizeof(AsmJSFrame) + masm.framePushed(), bytesToPush);
}
template <class VectorT>
@ -5903,15 +5889,6 @@ StackArgBytes(const VectorT &argTypes)
return iter.stackBytesConsumedSoFar();
}
static unsigned
StackDecrementForCall(MacroAssembler &masm, unsigned bytesToPush)
{
// Include extra padding so that, after pushing the bytesToPush,
// the stack is aligned for a call instruction.
unsigned alreadyPushed = AsmJSFrameSize + masm.framePushed();
return AlignBytes(alreadyPushed + bytesToPush, StackAlignment) - alreadyPushed;
}
template <class VectorT>
static unsigned
StackDecrementForCall(MacroAssembler &masm, const VectorT &argTypes, unsigned extraBytes = 0)
@ -5947,23 +5924,11 @@ static bool
GenerateEntry(ModuleCompiler &m, const AsmJSModule::ExportedFunction &exportedFunc)
{
MacroAssembler &masm = m.masm();
GenerateAsmJSEntryPrologue(masm);
// In constrast to the system ABI, the Ion convention is that all registers
// are clobbered by calls. Thus, we must save the caller's non-volatile
// registers.
//
// NB: GenerateExits assumes that masm.framePushed() == 0 before
// PushRegsInMask(NonVolatileRegs).
masm.setFramePushed(0);
// See AsmJSFrameSize comment in Assembler-shared.h.
#if defined(JS_CODEGEN_ARM)
masm.push(lr);
#endif // JS_CODEGEN_ARM
#if defined(JS_CODEGEN_MIPS)
masm.push(ra);
#endif
masm.PushRegsInMask(NonVolatileRegs);
JS_ASSERT(masm.framePushed() == FramePushedAfterSave);
@ -5986,14 +5951,14 @@ GenerateEntry(ModuleCompiler &m, const AsmJSModule::ExportedFunction &exportedFu
// right after the (C++) caller's non-volatile registers were saved so that
// they can be restored.
Register activation = ABIArgGenerator::NonArgReturnVolatileReg0;
LoadAsmJSActivationIntoRegister(masm, activation);
masm.loadAsmJSActivation(activation);
masm.storePtr(StackPointer, Address(activation, AsmJSActivation::offsetOfErrorRejoinSP()));
// Get 'argv' into a non-arg register and save it on the stack.
Register argv = ABIArgGenerator::NonArgReturnVolatileReg0;
Register scratch = ABIArgGenerator::NonArgReturnVolatileReg1;
#if defined(JS_CODEGEN_X86)
masm.loadPtr(Address(StackPointer, AsmJSFrameSize + masm.framePushed()), argv);
masm.loadPtr(Address(StackPointer, sizeof(AsmJSFrame) + masm.framePushed()), argv);
#else
masm.movePtr(IntArgReg0, argv);
#endif
@ -6056,11 +6021,10 @@ GenerateEntry(ModuleCompiler &m, const AsmJSModule::ExportedFunction &exportedFu
// Restore clobbered non-volatile registers of the caller.
masm.PopRegsInMask(NonVolatileRegs);
JS_ASSERT(masm.framePushed() == 0);
masm.move32(Imm32(true), ReturnReg);
masm.ret();
GenerateAsmJSEntryEpilogue(masm);
return true;
}
@ -6077,11 +6041,10 @@ FillArgumentArray(ModuleCompiler &m, const VarTypeVector &argTypes,
case ABIArg::GPR:
masm.storeValue(JSVAL_TYPE_INT32, i->gpr(), dstAddr);
break;
case ABIArg::FPU: {
masm.canonicalizeDouble(i->fpu());
masm.storeDouble(i->fpu(), dstAddr);
break;
}
case ABIArg::FPU:
masm.canonicalizeDouble(i->fpu());
masm.storeDouble(i->fpu(), dstAddr);
break;
case ABIArg::Stack:
if (i.mirType() == MIRType_Int32) {
Address src(StackPointer, offsetToCallerStackArgs + i->offsetFromArgBase());
@ -6108,22 +6071,9 @@ GenerateFFIInterpreterExit(ModuleCompiler &m, const ModuleCompiler::ExitDescript
unsigned exitIndex, Label *throwLabel)
{
MacroAssembler &masm = m.masm();
masm.align(CodeAlignment);
m.setInterpExitOffset(exitIndex);
masm.setFramePushed(0);
// See AsmJSFrameSize comment in Assembler-shared.h.
#if defined(JS_CODEGEN_ARM)
masm.push(lr);
#elif defined(JS_CODEGEN_MIPS)
masm.push(ra);
#endif
// Store the frame pointer in AsmJSActivation::exitFP for stack unwinding.
Register activation = ABIArgGenerator::NonArgReturnVolatileReg0;
LoadAsmJSActivationIntoRegister(masm, activation);
masm.storePtr(StackPointer, Address(activation, AsmJSActivation::offsetOfExitFP()));
JS_ASSERT(masm.framePushed() == 0);
// Argument types for InvokeFromAsmJS_*:
MIRType typeArray[] = { MIRType_Pointer, // exitDatum
MIRType_Int32, // argc
MIRType_Pointer }; // argv
@ -6136,12 +6086,15 @@ GenerateFFIInterpreterExit(ModuleCompiler &m, const ModuleCompiler::ExitDescript
// padding between argv and retaddr ensures that sp is aligned.
unsigned offsetToArgv = AlignBytes(StackArgBytes(invokeArgTypes), sizeof(double));
unsigned argvBytes = Max<size_t>(1, exit.sig().args().length()) * sizeof(Value);
unsigned stackDec = StackDecrementForCall(masm, offsetToArgv + argvBytes);
masm.reserveStack(stackDec);
unsigned framePushed = StackDecrementForCall(masm, offsetToArgv + argvBytes);
// Emit prologue code.
m.setInterpExitOffset(exitIndex);
GenerateAsmJSFFIExitPrologue(masm, framePushed);
// Fill the argument array.
unsigned offsetToCallerStackArgs = AsmJSFrameSize + masm.framePushed();
Register scratch = ABIArgGenerator::NonArgReturnVolatileReg1;
unsigned offsetToCallerStackArgs = sizeof(AsmJSFrame) + masm.framePushed();
Register scratch = ABIArgGenerator::NonArgReturnVolatileReg0;
FillArgumentArray(m, exit.sig().args(), offsetToArgv, offsetToCallerStackArgs, scratch);
// Prepare the arguments for the call to InvokeFromAsmJS_*.
@ -6195,15 +6148,7 @@ GenerateFFIInterpreterExit(ModuleCompiler &m, const ModuleCompiler::ExitDescript
break;
}
// Note: the caller is IonMonkey code which means there are no non-volatile
// registers to restore.
masm.freeStack(stackDec);
// Clear exitFP before the frame is destroyed.
LoadAsmJSActivationIntoRegister(masm, activation);
masm.storePtr(ImmWord(0), Address(activation, AsmJSActivation::offsetOfExitFP()));
masm.ret();
GenerateAsmJSFFIExitEpilogue(masm, framePushed);
}
// On ARM/MIPS, we need to include an extra word of space at the top of the
@ -6221,25 +6166,6 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
unsigned exitIndex, Label *throwLabel)
{
MacroAssembler &masm = m.masm();
masm.align(CodeAlignment);
m.setIonExitOffset(exitIndex);
masm.setFramePushed(0);
// See AsmJSFrameSize comment in Assembler-shared.h.
#if defined(JS_CODEGEN_ARM)
masm.push(lr);
#elif defined(JS_CODEGEN_MIPS)
masm.push(ra);
#endif
// 'callee' stays live throughout much of the Ion exit and 'scratch' is
// constantly clobbered.
Register callee = ABIArgGenerator::NonArgReturnVolatileReg0;
Register scratch = ABIArgGenerator::NonArgReturnVolatileReg1;
// Store the frame pointer in AsmJSActivation::exitFP for stack unwinding.
LoadAsmJSActivationIntoRegister(masm, scratch);
masm.storePtr(StackPointer, Address(scratch, AsmJSActivation::offsetOfExitFP()));
// Even though the caller has saved volatile registers, we still need to
// save/restore globally-pinned asm.js registers at Ion calls since Ion does
@ -6273,9 +6199,11 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
unsigned totalCoerceBytes = offsetToCoerceArgv + sizeof(Value) + savedRegBytes;
unsigned coerceFrameSize = StackDecrementForCall(masm, totalCoerceBytes);
// Allocate a frame large enough for both of the above calls.
unsigned framePushed = Max(ionFrameSize, coerceFrameSize);
masm.reserveStack(framePushed);
// Emit prologue code.
m.setIonExitOffset(exitIndex);
GenerateAsmJSFFIExitPrologue(masm, framePushed);
// 1. Descriptor
size_t argOffset = offsetToIonArgs;
@ -6284,6 +6212,8 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
argOffset += sizeof(size_t);
// 2. Callee
Register callee = ABIArgGenerator::NonArgReturnVolatileReg0; // live until call
Register scratch = ABIArgGenerator::NonArgReturnVolatileReg1; // clobbered
// 2.1. Get ExitDatum
unsigned globalDataOffset = m.module().exitIndexToGlobalDataOffset(exitIndex);
@ -6316,7 +6246,7 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
argOffset += sizeof(Value);
// 5. Fill the arguments
unsigned offsetToCallerStackArgs = framePushed + AsmJSFrameSize;
unsigned offsetToCallerStackArgs = framePushed + sizeof(AsmJSFrame);
FillArgumentArray(m, exit.sig().args(), argOffset, offsetToCallerStackArgs, scratch);
argOffset += exit.sig().args().length() * sizeof(Value);
JS_ASSERT(argOffset == offsetToIonArgs + ionArgBytes);
@ -6356,7 +6286,7 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
size_t offsetOfJitTop = offsetof(JSRuntime, mainThread) + offsetof(PerThreadData, jitTop);
size_t offsetOfJitJSContext = offsetof(JSRuntime, mainThread) +
offsetof(PerThreadData, jitJSContext);
LoadAsmJSActivationIntoRegister(masm, reg0);
masm.loadAsmJSActivation(reg0);
masm.loadPtr(Address(reg0, AsmJSActivation::offsetOfContext()), reg3);
masm.loadPtr(Address(reg3, JSContext::offsetOfRuntime()), reg0);
masm.loadPtr(Address(reg0, offsetOfActivation), reg1);
@ -6432,13 +6362,7 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
masm.loadPtr(Address(StackPointer, savedGlobalOffset), GlobalReg);
#endif
masm.freeStack(framePushed);
// Clear exitFP before the frame is destroyed.
LoadAsmJSActivationIntoRegister(masm, scratch);
masm.storePtr(ImmWord(0), Address(scratch, AsmJSActivation::offsetOfExitFP()));
masm.ret();
GenerateAsmJSFFIExitEpilogue(masm, framePushed);
if (oolConvert.used()) {
masm.bind(&oolConvert);
@ -6495,39 +6419,6 @@ GenerateFFIExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit, u
GenerateFFIIonExit(m, exit, exitIndex, throwLabel);
}
// The stack-overflow exit is called when the stack limit has definitely been
// exceeded. In this case, we can clobber everything since we are about to pop
// all the frames.
static bool
GenerateStackOverflowExit(ModuleCompiler &m, Label *throwLabel)
{
MacroAssembler &masm = m.masm();
masm.align(CodeAlignment);
masm.bind(&m.stackOverflowLabel());
// For the benefit of AssertStackAlignment.
masm.setFramePushed(0);
// Store the frame pointer in AsmJSActivation::exitFP for stack unwinding.
Register activation = ABIArgGenerator::NonArgReturnVolatileReg0;
LoadAsmJSActivationIntoRegister(masm, activation);
masm.storePtr(StackPointer, Address(activation, AsmJSActivation::offsetOfExitFP()));
// Even without arguments, various platforms require stack adjustment.
masm.reserveStack(ComputeByteAlignment(AsmJSFrameSize + ShadowStackSpace, StackAlignment));
AssertStackAlignment(masm);
masm.call(AsmJSImmPtr(AsmJSImm_ReportOverRecursed));
// Clear exitFP before the frame is destroyed.
LoadAsmJSActivationIntoRegister(masm, activation);
masm.storePtr(ImmWord(0), Address(activation, AsmJSActivation::offsetOfExitFP()));
// Don't worry about restoring the stack; throwLabel will pop everything.
masm.jump(throwLabel);
return !masm.oom();
}
static const RegisterSet AllRegsExceptSP =
RegisterSet(GeneralRegisterSet(Registers::AllMask &
~(uint32_t(1) << Registers::StackPointer)),
@ -6560,7 +6451,7 @@ GenerateAsyncInterruptExit(ModuleCompiler &m, Label *throwLabel)
Register scratch = ABIArgGenerator::NonArgReturnVolatileReg0;
// Store resumePC into the reserved space.
LoadAsmJSActivationIntoRegister(masm, scratch);
masm.loadAsmJSActivation(scratch);
masm.loadPtr(Address(scratch, AsmJSActivation::offsetOfResumePC()), scratch);
masm.storePtr(scratch, Address(StackPointer, masm.framePushed() + sizeof(void*)));
@ -6571,7 +6462,7 @@ GenerateAsyncInterruptExit(ModuleCompiler &m, Label *throwLabel)
if (ShadowStackSpace)
masm.subPtr(Imm32(ShadowStackSpace), StackPointer);
AssertStackAlignment(masm);
masm.assertStackAlignment();
masm.call(AsmJSImmPtr(AsmJSImm_HandleExecutionInterrupt));
masm.branchIfFalseBool(ReturnReg, throwLabel);
@ -6597,13 +6488,14 @@ GenerateAsyncInterruptExit(ModuleCompiler &m, Label *throwLabel)
masm.ma_and(StackPointer, StackPointer, Imm32(~(StackAlignment - 1)));
// Store resumePC into the reserved space.
LoadAsmJSActivationIntoRegister(masm, IntArgReg0);
masm.loadAsmJSActivation(IntArgReg0);
masm.loadPtr(Address(IntArgReg0, AsmJSActivation::offsetOfResumePC()), IntArgReg1);
masm.storePtr(IntArgReg1, Address(s0, masm.framePushed()));
// MIPS ABI requires rewserving stack for registes $a0 to $a3.
masm.subPtr(Imm32(4 * sizeof(intptr_t)), StackPointer);
masm.assertStackAlignment();
masm.call(AsmJSImm_HandleExecutionInterrupt);
masm.addPtr(Imm32(4 * sizeof(intptr_t)), StackPointer);
@ -6633,12 +6525,15 @@ GenerateAsyncInterruptExit(ModuleCompiler &m, Label *throwLabel)
masm.ma_and(Imm32(~7), sp, sp);
// Store resumePC into the return PC stack slot.
LoadAsmJSActivationIntoRegister(masm, IntArgReg0);
masm.loadAsmJSActivation(IntArgReg0);
masm.loadPtr(Address(IntArgReg0, AsmJSActivation::offsetOfResumePC()), IntArgReg1);
masm.storePtr(IntArgReg1, Address(r6, 14 * sizeof(uint32_t*)));
masm.PushRegsInMask(RegisterSet(GeneralRegisterSet(0), FloatRegisterSet(FloatRegisters::AllDoubleMask))); // save all FP registers
masm.assertStackAlignment();
masm.call(AsmJSImm_HandleExecutionInterrupt);
masm.branchIfFalseBool(ReturnReg, throwLabel);
// Restore the machine state to before the interrupt. this will set the pc!
@ -6672,10 +6567,6 @@ GenerateAsyncInterruptExit(ModuleCompiler &m, Label *throwLabel)
return !masm.oom();
}
static const RegisterSet VolatileRegs =
RegisterSet(GeneralRegisterSet(Registers::ArgRegMask),
FloatRegisterSet(FloatRegisters::VolatileMask));
static bool
GenerateSyncInterruptExit(ModuleCompiler &m, Label *throwLabel)
{
@ -6692,13 +6583,6 @@ GenerateSyncInterruptExit(ModuleCompiler &m, Label *throwLabel)
masm.push(ra);
#endif
// Record sp in the AsmJSActivation for stack unwinding.
Register activation = ABIArgGenerator::NonArgReturnVolatileReg0;
LoadAsmJSActivationIntoRegister(masm, activation);
masm.storePtr(StackPointer, Address(activation, AsmJSActivation::offsetOfExitFP()));
masm.PushRegsInMask(VolatileRegs);
unsigned stackDec = StackDecrementForCall(masm, ShadowStackSpace);
masm.reserveStack(stackDec);
@ -6707,11 +6591,6 @@ GenerateSyncInterruptExit(ModuleCompiler &m, Label *throwLabel)
masm.branchIfFalseBool(ReturnReg, throwLabel);
masm.freeStack(stackDec);
masm.PopRegsInMask(VolatileRegs);
// Clear exitFP before the frame is destroyed.
LoadAsmJSActivationIntoRegister(masm, activation);
masm.storePtr(ImmWord(0), Address(activation, AsmJSActivation::offsetOfExitFP()));
JS_ASSERT(masm.framePushed() == 0);
masm.ret();
@ -6724,22 +6603,26 @@ GenerateSyncInterruptExit(ModuleCompiler &m, Label *throwLabel)
// 2. PopRegsInMask to restore the caller's non-volatile registers.
// 3. Return (to CallAsmJS).
static bool
GenerateThrowExit(ModuleCompiler &m, Label *throwLabel)
GenerateThrowStub(ModuleCompiler &m, Label *throwLabel)
{
MacroAssembler &masm = m.masm();
masm.align(CodeAlignment);
masm.bind(throwLabel);
// We are about to pop all frames in this AsmJSActivation. Set fp to null to
// maintain the invariant that fp is either null or pointing to a valid
// frame.
Register activation = ABIArgGenerator::NonArgReturnVolatileReg0;
LoadAsmJSActivationIntoRegister(masm, activation);
masm.loadAsmJSActivation(activation);
masm.storePtr(ImmWord(0), Address(activation, AsmJSActivation::offsetOfFP()));
masm.setFramePushed(FramePushedAfterSave);
masm.loadPtr(Address(activation, AsmJSActivation::offsetOfErrorRejoinSP()), StackPointer);
masm.PopRegsInMask(NonVolatileRegs);
JS_ASSERT(masm.framePushed() == 0);
masm.mov(ImmWord(0), ReturnReg);
masm.addPtr(Imm32(AsmJSFrameBytesAfterReturnAddress), StackPointer);
masm.ret();
return !masm.oom();
@ -6766,15 +6649,15 @@ GenerateStubs(ModuleCompiler &m)
return false;
}
if (m.stackOverflowLabel().used() && !GenerateStackOverflowExit(m, &throwLabel))
return false;
if (m.stackOverflowLabel().used())
GenerateAsmJSStackOverflowExit(m.masm(), &m.stackOverflowLabel(), &throwLabel);
if (!GenerateAsyncInterruptExit(m, &throwLabel))
return false;
if (m.syncInterruptLabel().used() && !GenerateSyncInterruptExit(m, &throwLabel))
return false;
if (!GenerateThrowExit(m, &throwLabel))
if (!GenerateThrowStub(m, &throwLabel))
return false;
return true;

Просмотреть файл

@ -8,26 +8,27 @@
#include "jit/AsmJS.h"
#include "jit/AsmJSModule.h"
#include "jit/IonMacroAssembler.h"
using namespace js;
using namespace js::jit;
/*****************************************************************************/
// AsmJSFrameIterator implementation
static void *
ReturnAddressFromFP(uint8_t *fp)
{
// In asm.js code, the "frame" consists of a single word: the saved
// return address of the caller.
static_assert(AsmJSFrameSize == sizeof(void*), "Frame size mismatch");
return *(uint8_t**)fp;
return reinterpret_cast<AsmJSFrame*>(fp)->returnAddress;
}
AsmJSFrameIterator::AsmJSFrameIterator(const AsmJSActivation &activation)
: module_(&activation.module()),
fp_(activation.exitFP())
fp_(activation.fp())
{
if (!fp_)
return;
settle(ReturnAddressFromFP(fp_));
settle();
}
void
@ -35,13 +36,15 @@ AsmJSFrameIterator::operator++()
{
JS_ASSERT(!done());
fp_ += callsite_->stackDepth();
settle(ReturnAddressFromFP(fp_));
settle();
}
void
AsmJSFrameIterator::settle(void *returnAddress)
AsmJSFrameIterator::settle()
{
const AsmJSModule::CodeRange *codeRange = module_->lookupCodeRange(ReturnAddressFromFP(fp_));
void *returnAddress = ReturnAddressFromFP(fp_);
const AsmJSModule::CodeRange *codeRange = module_->lookupCodeRange(returnAddress);
JS_ASSERT(codeRange);
codeRange_ = codeRange;
@ -73,3 +76,152 @@ AsmJSFrameIterator::computeLine(uint32_t *column) const
return callsite_->line();
}
/*****************************************************************************/
// Prologue/epilogue code generation
static void
PushRetAddr(MacroAssembler &masm)
{
#if defined(JS_CODEGEN_ARM)
masm.push(lr);
#elif defined(JS_CODEGEN_MIPS)
masm.push(ra);
#else
// The x86/x64 call instruction pushes the return address.
#endif
}
void
js::GenerateAsmJSFunctionPrologue(MacroAssembler &masm, unsigned framePushed,
Label *maybeOverflowThunk, Label *overflowExit)
{
// When not in profiling mode, the only way to observe fp (i.e.,
// AsmJSActivation::fp) is to call out to C++ so, as an optimization, we
// don't update fp. Technically, the interrupt exit can observe fp at an
// arbitrary pc, but we don't care about providing an accurate stack in this
// case. We still need to reserve space for the saved frame pointer, though,
// to maintain the AsmJSFrame layout.
PushRetAddr(masm);
masm.subPtr(Imm32(framePushed + AsmJSFrameBytesAfterReturnAddress), StackPointer);
masm.setFramePushed(framePushed);
// Overflow checks are omitted by CodeGenerator in some cases (leaf
// functions with small framePushed). Perform overflow-checking after
// pushing framePushed to catch cases with really large frames.
if (maybeOverflowThunk) {
// If framePushed is zero, we don't need a thunk.
Label *target = framePushed ? maybeOverflowThunk : overflowExit;
masm.branchPtr(Assembler::AboveOrEqual,
AsmJSAbsoluteAddress(AsmJSImm_StackLimit),
StackPointer,
target);
}
}
void
js::GenerateAsmJSFunctionEpilogue(MacroAssembler &masm, unsigned framePushed,
Label *maybeOverflowThunk, Label *overflowExit)
{
// Inverse of GenerateAsmJSFunctionPrologue:
JS_ASSERT(masm.framePushed() == framePushed);
masm.addPtr(Imm32(framePushed + AsmJSFrameBytesAfterReturnAddress), StackPointer);
masm.ret();
masm.setFramePushed(0);
if (maybeOverflowThunk && maybeOverflowThunk->used()) {
// The general throw stub assumes that only sizeof(AsmJSFrame) bytes
// have been pushed. The overflow check occurs after incrementing by
// framePushed, so pop that before jumping to the overflow exit.
masm.bind(maybeOverflowThunk);
masm.addPtr(Imm32(framePushed), StackPointer);
masm.jump(overflowExit);
}
}
void
js::GenerateAsmJSStackOverflowExit(MacroAssembler &masm, Label *overflowExit, Label *throwLabel)
{
masm.bind(overflowExit);
// If we reach here via the non-profiling prologue, AsmJSActivation::fp has
// not been updated. To enable stack unwinding from C++, store to it now. If
// we reached here via the profiling prologue, we'll just store the same
// value again. Do not update AsmJSFrame::callerFP as it is not necessary in
// the non-profiling case (there is no return path from this point) and, in
// the profiling case, it is already correct.
Register activation = ABIArgGenerator::NonArgReturnVolatileReg0;
masm.loadAsmJSActivation(activation);
masm.storePtr(StackPointer, Address(activation, AsmJSActivation::offsetOfFP()));
// Prepare the stack for calling C++.
if (unsigned stackDec = StackDecrementForCall(sizeof(AsmJSFrame), ShadowStackSpace))
masm.subPtr(Imm32(stackDec), StackPointer);
// No need to restore the stack; the throw stub pops everything.
masm.assertStackAlignment();
masm.call(AsmJSImmPtr(AsmJSImm_ReportOverRecursed));
masm.jump(throwLabel);
}
void
js::GenerateAsmJSEntryPrologue(MacroAssembler &masm)
{
// Stack-unwinding stops at the entry prologue, so there is no need to
// update AsmJSActivation::fp. Furthermore, on ARM/MIPS, GlobalReg is not
// yet initialized, so we can't even if we wanted to.
PushRetAddr(masm);
masm.subPtr(Imm32(AsmJSFrameBytesAfterReturnAddress), StackPointer);
masm.setFramePushed(0);
}
void
js::GenerateAsmJSEntryEpilogue(MacroAssembler &masm)
{
// Inverse of GenerateAsmJSEntryPrologue:
JS_ASSERT(masm.framePushed() == 0);
masm.addPtr(Imm32(AsmJSFrameBytesAfterReturnAddress), StackPointer);
masm.ret();
masm.setFramePushed(0);
}
void
js::GenerateAsmJSFFIExitPrologue(MacroAssembler &masm, unsigned framePushed)
{
// Stack-unwinding from C++ starts unwinding depends on AsmJSActivation::fp.
PushRetAddr(masm);
Register activation = ABIArgGenerator::NonArgReturnVolatileReg0;
masm.loadAsmJSActivation(activation);
Address fp(activation, AsmJSActivation::offsetOfFP());
masm.push(fp);
masm.storePtr(StackPointer, fp);
if (framePushed)
masm.subPtr(Imm32(framePushed), StackPointer);
masm.setFramePushed(framePushed);
}
void
js::GenerateAsmJSFFIExitEpilogue(MacroAssembler &masm, unsigned framePushed)
{
// Inverse of GenerateAsmJSFFIExitPrologue:
JS_ASSERT(masm.framePushed() == framePushed);
if (framePushed)
masm.addPtr(Imm32(framePushed), StackPointer);
Register activation = ABIArgGenerator::NonArgReturnVolatileReg0;
masm.loadAsmJSActivation(activation);
#if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
masm.pop(Operand(activation, AsmJSActivation::offsetOfFP()));
#else
Register fp = ABIArgGenerator::NonArgReturnVolatileReg1;
masm.pop(fp);
masm.storePtr(fp, Address(activation, AsmJSActivation::offsetOfFP()));
#endif
masm.ret();
masm.setFramePushed(0);
}

Просмотреть файл

@ -12,12 +12,13 @@
#include <stdint.h>
class JSAtom;
struct JSContext;
namespace js {
class AsmJSActivation;
class AsmJSModule;
namespace jit { struct CallSite; }
namespace jit { struct CallSite; class MacroAssembler; class Label; }
// Iterates over the frames of a single AsmJSActivation.
class AsmJSFrameIterator
@ -30,7 +31,7 @@ class AsmJSFrameIterator
// nested classes, so use void* to avoid pulling in all of AsmJSModule.h.
const void *codeRange_;
void settle(void *returnAddress);
void settle();
public:
explicit AsmJSFrameIterator() : module_(nullptr) {}
@ -41,6 +42,29 @@ class AsmJSFrameIterator
unsigned computeLine(uint32_t *column) const;
};
/******************************************************************************/
// Prologue/epilogue code generation.
void
GenerateAsmJSFunctionPrologue(jit::MacroAssembler &masm, unsigned framePushed,
jit::Label *maybeOverflowThunk, jit::Label *overflowExit);
void
GenerateAsmJSFunctionEpilogue(jit::MacroAssembler &masm, unsigned framePushed,
jit::Label *maybeOverflowThunk, jit::Label *overflowExit);
void
GenerateAsmJSStackOverflowExit(jit::MacroAssembler &masm, jit::Label *overflowExit,
jit::Label *throwLabel);
void
GenerateAsmJSEntryPrologue(jit::MacroAssembler &masm);
void
GenerateAsmJSEntryEpilogue(jit::MacroAssembler &masm);
void
GenerateAsmJSFFIExitPrologue(jit::MacroAssembler &masm, unsigned framePushed);
void
GenerateAsmJSFFIExitEpilogue(jit::MacroAssembler &masm, unsigned framePushed);
} // namespace js
#endif // jit_AsmJSFrameIterator_h

Просмотреть файл

@ -925,6 +925,7 @@ class AsmJSModule
pod.funcPtrTableAndExitBytes_;
}
static unsigned activationGlobalDataOffset() {
JS_STATIC_ASSERT(jit::AsmJSActivationGlobalDataOffset == 0);
return 0;
}
AsmJSActivation *&activation() const {

Просмотреть файл

@ -6486,25 +6486,25 @@ CodeGenerator::generateAsmJS(Label *stackOverflowLabel)
{
IonSpew(IonSpew_Codegen, "# Emitting asm.js code");
// AsmJS doesn't do profiler instrumentation.
// AsmJS doesn't do SPS instrumentation.
sps_.disable();
// The caller (either another asm.js function or the external-entry
// trampoline) has placed all arguments in registers and on the stack
// according to the system ABI. The MAsmJSParameters which represent these
// parameters have been useFixed()ed to these ABI-specified positions.
// Thus, there is nothing special to do in the prologue except (possibly)
// bump the stack.
if (!generateAsmJSPrologue(stackOverflowLabel))
return false;
Label overflowThunk;
Label *maybeOverflowThunk = omitOverRecursedCheck() ? nullptr : &overflowThunk;
GenerateAsmJSFunctionPrologue(masm, frameSize(), maybeOverflowThunk, stackOverflowLabel);
if (!generateBody())
return false;
if (!generateEpilogue())
return false;
masm.bind(&returnLabel_);
GenerateAsmJSFunctionEpilogue(masm, frameSize(), maybeOverflowThunk, stackOverflowLabel);
#if defined(JS_ION_PERF)
// Note the end of the inline code and start of the OOL code.
gen->perfSpewer().noteEndInlineCode(masm);
#endif
if (!generateOutOfLineCode())
return false;
@ -8571,7 +8571,7 @@ CodeGenerator::visitAsmJSCall(LAsmJSCall *ins)
if (mir->spIncrement())
masm.freeStack(mir->spIncrement());
JS_ASSERT((AsmJSFrameSize + masm.framePushed()) % StackAlignment == 0);
JS_ASSERT((sizeof(AsmJSFrame) + masm.framePushed()) % StackAlignment == 0);
#ifdef DEBUG
Label ok;
@ -8814,7 +8814,7 @@ CodeGenerator::visitAsmJSInterruptCheck(LAsmJSInterruptCheck *lir)
Label rejoin;
masm.branch32(Assembler::Equal, scratch, Imm32(0), &rejoin);
{
uint32_t stackFixup = ComputeByteAlignment(masm.framePushed() + AsmJSFrameSize,
uint32_t stackFixup = ComputeByteAlignment(masm.framePushed() + sizeof(AsmJSFrame),
StackAlignment);
masm.reserveStack(stackFixup);
masm.call(lir->funcDesc(), lir->interruptExit());

Просмотреть файл

@ -1425,6 +1425,16 @@ class MacroAssembler : public MacroAssemblerSpecific
JS_ASSERT(framePushed() == aic.initialStack);
PopRegsInMask(liveRegs);
}
void assertStackAlignment() {
#ifdef DEBUG
Label ok;
JS_ASSERT(IsPowerOfTwo(StackAlignment));
branchTestPtr(Assembler::Zero, StackPointer, Imm32(StackAlignment - 1), &ok);
breakpoint();
bind(&ok);
#endif
}
};
static inline Assembler::DoubleCondition
@ -1497,6 +1507,13 @@ JSOpToCondition(JSOp op, bool isSigned)
}
}
static inline size_t
StackDecrementForCall(size_t bytesAlreadyPushed, size_t bytesToPush)
{
return bytesToPush +
ComputeByteAlignment(bytesAlreadyPushed + bytesToPush, StackAlignment);
}
} // namespace jit
} // namespace js

Просмотреть файл

@ -48,37 +48,14 @@ CodeGeneratorARM::generatePrologue()
return true;
}
bool
CodeGeneratorARM::generateAsmJSPrologue(Label *stackOverflowLabel)
{
JS_ASSERT(gen->compilingAsmJS());
// See comment in Assembler-shared.h about AsmJSFrameSize.
masm.push(lr);
// The asm.js over-recursed handler wants to be able to assume that SP
// points to the return address, so perform the check after pushing lr but
// before pushing frameDepth.
if (!omitOverRecursedCheck()) {
masm.branchPtr(Assembler::AboveOrEqual,
AsmJSAbsoluteAddress(AsmJSImm_StackLimit),
StackPointer,
stackOverflowLabel);
}
// Note that this automatically sets MacroAssembler::framePushed().
masm.reserveStack(frameDepth_);
masm.checkStackAlignment();
return true;
}
bool
CodeGeneratorARM::generateEpilogue()
{
JS_ASSERT(!gen->compilingAsmJS());
masm.bind(&returnLabel_);
#ifdef JS_TRACE_LOGGING
if (!gen->compilingAsmJS() && gen->info().executionMode() == SequentialExecution) {
if (gen->info().executionMode() == SequentialExecution) {
if (!emitTracelogStopEvent(TraceLogger::IonMonkey))
return false;
if (!emitTracelogScriptStop())
@ -86,10 +63,7 @@ CodeGeneratorARM::generateEpilogue()
}
#endif
if (gen->compilingAsmJS())
masm.freeStack(frameDepth_);
else
masm.freeStack(frameSize());
masm.freeStack(frameSize());
JS_ASSERT(masm.framePushed() == 0);
masm.pop(pc);
masm.flushBuffer();

Просмотреть файл

@ -75,7 +75,6 @@ class CodeGeneratorARM : public CodeGeneratorShared
protected:
bool generatePrologue();
bool generateAsmJSPrologue(Label *stackOverflowLabel);
bool generateEpilogue();
bool generateOutOfLineCode();

Просмотреть файл

@ -3938,7 +3938,7 @@ MacroAssemblerARMCompat::callWithABIPre(uint32_t *stackAdjust, bool callFromAsmJ
if (UseHardFpABI())
*stackAdjust += 2*((usedFloatSlots_ > NumFloatArgRegs) ? usedFloatSlots_ - NumFloatArgRegs : 0) * sizeof(intptr_t);
#endif
uint32_t alignmentAtPrologue = callFromAsmJS ? AsmJSFrameSize : 0;
uint32_t alignmentAtPrologue = callFromAsmJS ? sizeof(AsmJSFrame) : 0;
if (!dynamicAlignment_) {
*stackAdjust += ComputeByteAlignment(framePushed_ + *stackAdjust + alignmentAtPrologue,

Просмотреть файл

@ -1606,6 +1606,10 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
void branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label);
void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp, Label *label);
#endif
void loadAsmJSActivation(Register dest) {
loadPtr(Address(GlobalReg, AsmJSActivationGlobalDataOffset), dest);
}
};
typedef MacroAssemblerARMCompat MacroAssemblerSpecific;

Просмотреть файл

@ -47,37 +47,14 @@ CodeGeneratorMIPS::generatePrologue()
return true;
}
bool
CodeGeneratorMIPS::generateAsmJSPrologue(Label *stackOverflowLabel)
{
JS_ASSERT(gen->compilingAsmJS());
// See comment in Assembler-shared.h about AsmJSFrameSize.
masm.push(ra);
// The asm.js over-recursed handler wants to be able to assume that SP
// points to the return address, so perform the check after pushing ra but
// before pushing frameDepth.
if (!omitOverRecursedCheck()) {
masm.branchPtr(Assembler::AboveOrEqual,
AsmJSAbsoluteAddress(AsmJSImm_StackLimit),
StackPointer,
stackOverflowLabel);
}
// Note that this automatically sets MacroAssembler::framePushed().
masm.reserveStack(frameDepth_);
masm.checkStackAlignment();
return true;
}
bool
CodeGeneratorMIPS::generateEpilogue()
{
MOZ_ASSERT(!gen->compilingAsmJS());
masm.bind(&returnLabel_);
#ifdef JS_TRACE_LOGGING
if (!gen->compilingAsmJS() && gen->info().executionMode() == SequentialExecution) {
if (gen->info().executionMode() == SequentialExecution) {
if (!emitTracelogStopEvent(TraceLogger::IonMonkey))
return false;
if (!emitTracelogScriptStop())
@ -85,10 +62,7 @@ CodeGeneratorMIPS::generateEpilogue()
}
#endif
if (gen->compilingAsmJS())
masm.freeStack(frameDepth_);
else
masm.freeStack(frameSize());
masm.freeStack(frameSize());
JS_ASSERT(masm.framePushed() == 0);
masm.ret();
return true;

Просмотреть файл

@ -104,7 +104,6 @@ class CodeGeneratorMIPS : public CodeGeneratorShared
protected:
bool generatePrologue();
bool generateAsmJSPrologue(Label *stackOverflowLabel);
bool generateEpilogue();
bool generateOutOfLineCode();

Просмотреть файл

@ -3265,7 +3265,7 @@ MacroAssemblerMIPSCompat::callWithABIPre(uint32_t *stackAdjust, bool callFromAsm
usedArgSlots_ * sizeof(intptr_t) :
NumIntArgRegs * sizeof(intptr_t);
uint32_t alignmentAtPrologue = callFromAsmJS ? AsmJSFrameSize : 0;
uint32_t alignmentAtPrologue = callFromAsmJS ? sizeof(AsmJSFrame) : 0;
if (dynamicAlignment_) {
*stackAdjust += ComputeByteAlignment(*stackAdjust, StackAlignment);

Просмотреть файл

@ -1274,6 +1274,10 @@ public:
void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp,
Label *label);
#endif
void loadAsmJSActivation(Register dest) {
loadPtr(Address(GlobalReg, AsmJSActivationGlobalDataOffset), dest);
}
};
typedef MacroAssemblerMIPSCompat MacroAssemblerSpecific;

Просмотреть файл

@ -621,11 +621,28 @@ struct CallSite : public CallSiteDesc
typedef Vector<CallSite, 0, SystemAllocPolicy> CallSiteVector;
// As an invariant across architectures, within asm.js code:
// $sp % StackAlignment = (AsmJSFrameSize + masm.framePushed) % StackAlignment
// AsmJSFrameSize is 1 word, for the return address pushed by the call (or, in
// the case of ARM/MIPS, by the first instruction of the prologue). This means
// masm.framePushed never includes the pushed return address.
static const uint32_t AsmJSFrameSize = sizeof(void*);
// $sp % StackAlignment = (sizeof(AsmJSFrame) + masm.framePushed) % StackAlignment
// Thus, AsmJSFrame represents the bytes pushed after the call (which occurred
// with a StackAlignment-aligned StackPointer) that are not included in
// masm.framePushed.
struct AsmJSFrame
{
// The caller's saved frame pointer. In non-profiling mode, internal
// asm.js-to-asm.js calls don't update fp and thus don't save the caller's
// frame pointer; the space is reserved, however, so that profiling mode can
// reuse the same function body without recompiling.
uint8_t *callerFP;
// The return address pushed by the call (in the case of ARM/MIPS the return
// address is pushed by the first instruction of the prologue).
void *returnAddress;
};
static_assert(sizeof(AsmJSFrame) == 2 * sizeof(void*), "?!");
static const uint32_t AsmJSFrameBytesAfterReturnAddress = sizeof(void*);
// A hoisting of AsmJSModule::activationGlobalDataOffset that avoids #including
// AsmJSModule everywhere.
static const unsigned AsmJSActivationGlobalDataOffset = 0;
// Summarizes a heap access made by asm.js code that needs to be patched later
// and/or looked up by the asm.js signal handlers. Different architectures need
@ -795,9 +812,9 @@ class AssemblerShared
}
void append(const CallSiteDesc &desc, size_t currentOffset, size_t framePushed) {
// framePushed does not include AsmJSFrameSize, so add it in here (see
// framePushed does not include sizeof(AsmJSFrame), so add it in here (see
// CallSite::stackDepth).
CallSite callsite(desc, currentOffset, framePushed + AsmJSFrameSize);
CallSite callsite(desc, currentOffset, framePushed + sizeof(AsmJSFrame));
enoughMemory_ &= callsites_.append(callsite);
}
CallSiteVector &&extractCallSites() { return Move(callsites_); }

Просмотреть файл

@ -69,7 +69,7 @@ CodeGeneratorShared::CodeGeneratorShared(MIRGenerator *gen, LIRGraph *graph, Mac
// relies on the a priori stack adjustment (in the prologue) on platforms
// (like x64) which require the stack to be aligned.
if (StackKeptAligned || gen->needsInitialStackAlignment()) {
unsigned alignmentAtCall = AsmJSFrameSize + frameDepth_;
unsigned alignmentAtCall = sizeof(AsmJSFrame) + frameDepth_;
if (unsigned rem = alignmentAtCall % StackAlignment)
frameDepth_ += StackAlignment - rem;
}

Просмотреть файл

@ -153,7 +153,7 @@ class CodeGeneratorShared : public LInstructionVisitor
// For arguments to the current function.
inline int32_t ArgToStackOffset(int32_t slot) const {
return masm.framePushed() +
(gen->compilingAsmJS() ? AsmJSFrameSize : sizeof(IonJSFrameLayout)) +
(gen->compilingAsmJS() ? sizeof(AsmJSFrame) : sizeof(IonJSFrameLayout)) +
slot;
}
@ -224,6 +224,9 @@ class CodeGeneratorShared : public LInstructionVisitor
#endif
public:
MIRGenerator *mirGen() const {
return gen;
}
// When appending to runtimeData_, the vector might realloc, leaving pointers
// int the origianl vector stale and unusable. DataPtr acts like a pointer,

Просмотреть файл

@ -49,33 +49,15 @@ CodeGeneratorX86Shared::generatePrologue()
return true;
}
bool
CodeGeneratorX86Shared::generateAsmJSPrologue(Label *stackOverflowLabel)
{
JS_ASSERT(gen->compilingAsmJS());
// The asm.js over-recursed handler wants to be able to assume that SP
// points to the return address, so perform the check before pushing
// frameDepth.
if (!omitOverRecursedCheck()) {
masm.branchPtr(Assembler::AboveOrEqual,
AsmJSAbsoluteAddress(AsmJSImm_StackLimit),
StackPointer,
stackOverflowLabel);
}
// Note that this automatically sets MacroAssembler::framePushed().
masm.reserveStack(frameSize());
return true;
}
bool
CodeGeneratorX86Shared::generateEpilogue()
{
JS_ASSERT(!gen->compilingAsmJS());
masm.bind(&returnLabel_);
#ifdef JS_TRACE_LOGGING
if (!gen->compilingAsmJS() && gen->info().executionMode() == SequentialExecution) {
if (gen->info().executionMode() == SequentialExecution) {
if (!emitTracelogStopEvent(TraceLogger::IonMonkey))
return false;
if (!emitTracelogScriptStop())

Просмотреть файл

@ -117,7 +117,6 @@ class CodeGeneratorX86Shared : public CodeGeneratorShared
protected:
bool generatePrologue();
bool generateAsmJSPrologue(Label *stackOverflowLabel);
bool generateEpilogue();
bool generateOutOfLineCode();

Просмотреть файл

@ -601,6 +601,11 @@ class Assembler : public AssemblerX86Shared
return CodeOffsetLabel(masm.leaq_rip(dest.code()).offset());
}
void loadAsmJSActivation(Register dest) {
CodeOffsetLabel label = loadRipRelativeInt64(dest);
append(AsmJSGlobalAccess(label, AsmJSActivationGlobalDataOffset));
}
// The below cmpq methods switch the lhs and rhs when it invokes the
// macroassembler to conform with intel standard. When calling this
// function put the left operand on the left as you would expect.

Просмотреть файл

@ -530,6 +530,10 @@ class Assembler : public AssemblerX86Shared
return CodeOffsetLabel(masm.currentOffset());
}
void loadAsmJSActivation(Register dest) {
CodeOffsetLabel label = movlWithPatch(PatchedAbsoluteAddress(), dest);
append(AsmJSGlobalAccess(label, AsmJSActivationGlobalDataOffset));
}
};
// Get a register in which we plan to put a quantity that will be used as an

Просмотреть файл

@ -1688,7 +1688,7 @@ AsmJSActivation::AsmJSActivation(JSContext *cx, AsmJSModule &module)
errorRejoinSP_(nullptr),
profiler_(nullptr),
resumePC_(nullptr),
exitFP_(nullptr)
fp_(nullptr)
{
if (cx->runtime()->spsProfiler.enabled()) {
// Use a profiler string that matches jsMatch regex in
@ -1715,6 +1715,8 @@ AsmJSActivation::~AsmJSActivation()
if (profiler_)
profiler_->exitNative();
JS_ASSERT(fp_ == nullptr);
JS_ASSERT(module_.activation() == this);
module_.activation() = prevAsmJSForModule_;

Просмотреть файл

@ -1479,7 +1479,7 @@ class AsmJSActivation : public Activation
void *errorRejoinSP_;
SPSProfiler *profiler_;
void *resumePC_;
uint8_t *exitFP_;
uint8_t *fp_;
public:
AsmJSActivation(JSContext *cx, AsmJSModule &module);
@ -1489,24 +1489,20 @@ class AsmJSActivation : public Activation
AsmJSModule &module() const { return module_; }
AsmJSActivation *prevAsmJS() const { return prevAsmJS_; }
// Returns a pointer to the base of the innermost stack frame of asm.js code
// in this activation.
uint8_t *fp() const { return fp_; }
// Read by JIT code:
static unsigned offsetOfContext() { return offsetof(AsmJSActivation, cx_); }
static unsigned offsetOfResumePC() { return offsetof(AsmJSActivation, resumePC_); }
// Initialized by JIT code:
// Written by JIT code:
static unsigned offsetOfErrorRejoinSP() { return offsetof(AsmJSActivation, errorRejoinSP_); }
static unsigned offsetOfExitFP() { return offsetof(AsmJSActivation, exitFP_); }
static unsigned offsetOfFP() { return offsetof(AsmJSActivation, fp_); }
// Set from SIGSEGV handler:
void setResumePC(void *pc) { resumePC_ = pc; }
// If pc is in C++/Ion code, exitFP points to the innermost asm.js frame
// (the one that called into C++). While in asm.js code, exitFP is either
// null or points to the innermost asm.js frame. Thus, it is always valid to
// unwind a non-null exitFP. The only way C++ can observe a null exitFP is
// asychronous interruption of asm.js execution (viz., via the profiler,
// a signal handler, or the interrupt exit).
uint8_t *exitFP() const { return exitFP_; }
};
// A FrameIter walks over the runtime's stack of JS script activations,