Bug 1774546 part 3 - Store caller frame pointer in CommonFrameLayout. r=iain,rhunt

Now that all frames store the caller's frame pointer, we can add it to `CommonFrameLayout`
and get rid of `FramePointerOffset`.

This also removes `JitFrameLayout::unused_`. This has to happen at the same time so that
`sizeof(JitFrameLayout)` doesn't change (a lot of code depends on that for alignment).

`JitFrameLayout` is now aligned on the frame pointer instead of the return address.
This lets us simplify some of the Wasm stubs code (especially for ARM64) where we had
to workaround the old aligned-after-return-address invariant.

Differential Revision: https://phabricator.services.mozilla.com/D149760
This commit is contained in:
Jan de Mooij 2022-06-22 08:01:15 +00:00
Родитель 44a743c598
Коммит 5d6240653e
36 изменённых файлов: 339 добавлений и 564 удалений

Просмотреть файл

@ -139,8 +139,6 @@ class MOZ_STACK_CLASS BaselineStackBuilder {
MOZ_ASSERT(!header_);
MOZ_ASSERT(bufferUsed_ == 0);
prevFramePtr_ = frame_->callerFramePtr();
uint8_t* bufferRaw = cx_->pod_calloc<uint8_t>(bufferTotal_);
if (!bufferRaw) {
return false;
@ -228,7 +226,10 @@ class MOZ_STACK_CLASS BaselineStackBuilder {
return excInfo_ && excInfo_->propagatingIonExceptionForDebugMode();
}
void* prevFramePtr() const { return prevFramePtr_; }
void* prevFramePtr() const {
MOZ_ASSERT(prevFramePtr_);
return prevFramePtr_;
}
BufferPointer<BaselineFrame>& blFrame() { return blFrame_.ref(); }
void setNextCallee(JSFunction* nextCallee);
@ -460,20 +461,23 @@ bool BaselineStackBuilder::initFrame() {
MOZ_ASSERT(exprStackSlots_ <= totalFrameSlots);
}
resetFramePushed();
JitSpew(JitSpew_BaselineBailouts, " Unpacking %s:%u:%u",
script_->filename(), script_->lineno(), script_->column());
JitSpew(JitSpew_BaselineBailouts, " [BASELINE-JS FRAME]");
// Write the previous frame pointer value. Record the virtual stack offset at
// this location. Later on, if we end up writing out a BaselineStub frame for
// the next callee, we'll need to save the address.
if (!writePtr(prevFramePtr(), "PrevFramePtr")) {
return false;
// Write the previous frame pointer value. For the outermost frame we reuse
// the value in the JitFrameLayout already on the stack. Record the virtual
// stack offset at this location. Later on, if we end up writing out a
// BaselineStub frame for the next callee, we'll need to save the address.
if (!isOutermostFrame()) {
if (!writePtr(prevFramePtr(), "PrevFramePtr")) {
return false;
}
}
prevFramePtr_ = virtualPointerAtStackOffset(0);
resetFramePushed();
return true;
}
@ -882,8 +886,6 @@ bool BaselineStackBuilder::buildStubFrame(uint32_t frameSize,
// +---------------+
// | ThisV |
// +---------------+
// | ActualArgC |
// +---------------+
// | CalleeToken |
// +---------------+
// | Descr(BLStub) |
@ -993,11 +995,6 @@ bool BaselineStackBuilder::buildStubFrame(uint32_t frameSize,
// rectifier frame, save the framePushed values here for later use.
size_t endOfBaselineStubArgs = framePushed();
// Push unused_ field.
if (!writeWord(JitFrameLayout::UnusedValue, "Unused")) {
return false;
}
// Push callee token (must be a JS Function)
JitSpew(JitSpew_BaselineBailouts, " Callee = %016" PRIx64,
callee.asRawBits());
@ -1021,7 +1018,9 @@ bool BaselineStackBuilder::buildStubFrame(uint32_t frameSize,
if (!writePtr(baselineCallReturnAddr, "ReturnAddr")) {
return false;
}
MOZ_ASSERT(framePushed() % JitStackAlignment == 0);
// The stack must be aligned after the callee pushes the frame pointer.
MOZ_ASSERT((framePushed() + sizeof(void*)) % JitStackAlignment == 0);
// Build a rectifier frame if necessary
if (actualArgc < calleeFun->nargs() &&
@ -1052,8 +1051,6 @@ bool BaselineStackBuilder::buildRectifierFrame(uint32_t actualArgc,
// +---------------+
// | ThisV |
// +---------------+
// | ActualArgC |
// +---------------+
// | CalleeToken |
// +---------------+
// | Descr(Rect) |
@ -1069,14 +1066,6 @@ bool BaselineStackBuilder::buildRectifierFrame(uint32_t actualArgc,
}
prevFramePtr_ = virtualPointerAtStackOffset(0);
#ifdef JS_NUNBOX32
// 32-bit platforms push an extra padding word. Follow the same logic as in
// JitRuntime::generateArgumentsRectifier.
if (!writePtr(prevFramePtr(), "Padding")) {
return false;
}
#endif
// Align the stack based on the number of arguments.
size_t afterFrameSize =
(nextCallee()->nargs() + 1 + pushedNewTarget) * sizeof(Value) +
@ -1112,11 +1101,6 @@ bool BaselineStackBuilder::buildRectifierFrame(uint32_t actualArgc,
memcpy(pointerAtStackOffset<uint8_t>(0).get(), stubArgsEnd.get(),
(actualArgc + 1) * sizeof(Value));
// Push unused_ field.
if (!writeWord(JitFrameLayout::UnusedValue, "Unused")) {
return false;
}
// Push calleeToken again.
if (!writePtr(CalleeToToken(nextCallee(), pushedNewTarget), "CalleeToken")) {
return false;
@ -1137,7 +1121,9 @@ bool BaselineStackBuilder::buildRectifierFrame(uint32_t actualArgc,
if (!writePtr(rectReturnAddr, "ReturnAddr")) {
return false;
}
MOZ_ASSERT(framePushed() % JitStackAlignment == 0);
// The stack must be aligned after the callee pushes the frame pointer.
MOZ_ASSERT((framePushed() + sizeof(void*)) % JitStackAlignment == 0);
return true;
}

Просмотреть файл

@ -496,7 +496,6 @@ bool BaselineCacheIRCompiler::emitCallScriptedGetterShared(
masm.storeICScriptInJSContext(scratch);
}
masm.Push(ImmWord(JitFrameLayout::UnusedValue));
masm.Push(callee);
masm.PushFrameDescriptorForJitCall(FrameType::BaselineStub, /* argc = */ 0);
@ -1579,8 +1578,6 @@ bool BaselineCacheIRCompiler::emitCallScriptedSetterShared(
masm.Push(val);
masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(receiver)));
masm.Push(ImmWord(JitFrameLayout::UnusedValue));
// Push callee.
masm.Push(callee);
@ -2604,6 +2601,7 @@ bool BaselineCacheIRCompiler::emitCallNativeShared(
masm.pushFrameDescriptor(FrameType::BaselineStub);
masm.push(ICTailCallReg);
masm.push(FramePointer);
masm.loadJSContext(scratch);
masm.enterFakeExitFrameForNative(scratch, scratch, isConstructing);
@ -2845,11 +2843,12 @@ void BaselineCacheIRCompiler::updateReturnValue() {
// ...
// Arg0
// ThisVal <---- We want this value.
// argc ^
// Callee token | Skip three stack slots.
// Callee token | Skip two stack slots.
// Frame descriptor v
// [Top of stack]
Address thisAddress(masm.getStackPointer(), 3 * sizeof(size_t));
size_t thisvOffset =
JitFrameLayout::offsetOfThis() - JitFrameLayout::bytesPoppedAfterCall();
Address thisAddress(masm.getStackPointer(), thisvOffset);
masm.loadValue(thisAddress, JSReturnOperand);
#ifdef DEBUG
@ -2901,7 +2900,6 @@ bool BaselineCacheIRCompiler::emitCallScriptedFunction(ObjOperandId calleeId,
// Note that we use Push, not push, so that callJit will align the stack
// properly on ARM.
masm.Push(ImmWord(JitFrameLayout::UnusedValue));
masm.PushCalleeToken(calleeReg, isConstructing);
masm.PushFrameDescriptorForJitCall(FrameType::BaselineStub, argcReg, scratch);
@ -3009,7 +3007,6 @@ bool BaselineCacheIRCompiler::emitCallInlinedFunction(ObjOperandId calleeId,
// Note that we use Push, not push, so that callJit will align the stack
// properly on ARM.
masm.Push(ImmWord(JitFrameLayout::UnusedValue));
masm.PushCalleeToken(calleeReg, isConstructing);
masm.PushFrameDescriptorForJitCall(FrameType::BaselineStub, argcReg, scratch);
@ -3184,7 +3181,6 @@ bool BaselineCacheIRCompiler::emitCloseIterScriptedResult(
masm.pushValue(UndefinedValue());
}
masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(iter)));
masm.Push(ImmWord(JitFrameLayout::UnusedValue));
masm.Push(callee);
masm.PushFrameDescriptorForJitCall(FrameType::BaselineStub, /* argc = */ 0);

Просмотреть файл

@ -674,10 +674,9 @@ void BaselineCompilerCodeGen::computeFrameSize(Register dest) {
template <>
void BaselineInterpreterCodeGen::computeFrameSize(Register dest) {
// dest = FramePointer + BaselineFrame::FramePointerOffset - StackPointer.
// dest := FramePointer - StackPointer.
MOZ_ASSERT(!inCall_, "must not be called in the middle of a VM call");
masm.computeEffectiveAddress(
Address(FramePointer, BaselineFrame::FramePointerOffset), dest);
masm.mov(FramePointer, dest);
masm.subStackPtrFrom(dest);
}
@ -707,10 +706,8 @@ void BaselineInterpreterCodeGen::storeFrameSizeAndPushDescriptor(
uint32_t argSize, Register scratch) {
#ifdef DEBUG
// Store the frame size without VMFunction arguments in debug builds.
// scratch := FramePointer + BaselineFrame::FramePointerOffset - StackPointer
// - argSize.
masm.computeEffectiveAddress(
Address(FramePointer, BaselineFrame::FramePointerOffset), scratch);
// scratch := FramePointer - StackPointer - argSize.
masm.mov(FramePointer, scratch);
masm.subStackPtrFrom(scratch);
masm.sub32(Imm32(argSize), scratch);
masm.store32(scratch, frame.addressOfDebugFrameSize());
@ -1468,10 +1465,9 @@ bool BaselineCompilerCodeGen::emitWarmUpCounterIncrement() {
}
#endif
// Restore the stack pointer so that the return address is on top of
// Restore the stack pointer so that the saved frame pointer is on top of
// the stack.
masm.moveToStackPtr(FramePointer);
masm.pop(FramePointer);
// Jump into Ion.
masm.loadPtr(Address(osrDataReg, IonOsrTempData::offsetOfBaselineFrame()),
@ -5859,13 +5855,11 @@ bool BaselineCodeGen<Handler>::emit_Resume() {
#ifdef DEBUG
// Update BaselineFrame debugFrameSize field.
masm.computeEffectiveAddress(
Address(FramePointer, BaselineFrame::FramePointerOffset), scratch2);
masm.mov(FramePointer, scratch2);
masm.subStackPtrFrom(scratch2);
masm.store32(scratch2, frame.addressOfDebugFrameSize());
#endif
masm.push(ImmWord(JitFrameLayout::UnusedValue));
masm.PushCalleeToken(callee, /* constructing = */ false);
masm.pushFrameDescriptorForJitCall(FrameType::BaselineJS, /* argc = */ 0);
@ -6327,12 +6321,13 @@ bool BaselineCodeGen<Handler>::emitPrologue() {
#ifdef JS_USE_LINK_REGISTER
// Push link register from generateEnterJIT()'s BLR.
masm.pushReturnAddress();
masm.checkStackAlignment();
#endif
masm.push(FramePointer);
masm.moveStackPtrTo(FramePointer);
masm.checkStackAlignment();
emitProfilerEnterFrame();
masm.subFromStackPtr(Imm32(BaselineFrame::Size()));

Просмотреть файл

@ -24,8 +24,7 @@ class JSJitFrameIter;
// The stack looks like this, fp is the frame pointer:
//
// fp+y arguments
// fp+x JitFrameLayout (frame header)
// fp => saved frame pointer
// fp => JitFrameLayout (frame header)
// fp-x BaselineFrame
// locals
// stack values
@ -74,28 +73,19 @@ class BaselineFrame {
uint32_t flags_;
#ifdef DEBUG
// Size of the frame. Stored in DEBUG builds when calling into C++. This is
// the saved frame pointer (FramePointerOffset) + BaselineFrame::Size() + the
// size of the local and expression stack Values.
// BaselineFrame::Size() + the size of the local and expression stack Values.
//
// We don't store this in release builds because it's redundant with the frame
// size stored in the frame descriptor (frame iterators can compute this value
// from the descriptor). In debug builds it's still useful for assertions.
// size computed from the frame pointers. In debug builds it's still useful
// for assertions.
uint32_t debugFrameSize_;
#else
uint32_t unused_;
#endif
uint32_t loReturnValue_; // If HAS_RVAL, the frame's return value.
uint32_t hiReturnValue_;
#if JS_BITS_PER_WORD == 32
// Ensure frame is 8-byte aligned, see static_assert below.
uint32_t padding_;
#endif
public:
// Distance between the frame pointer and the frame header (return address).
// This is the old frame pointer saved in the prologue.
static const uint32_t FramePointerOffset = sizeof(void*);
[[nodiscard]] bool initForOsr(InterpreterFrame* fp, uint32_t numStackValues);
#ifdef DEBUG
@ -130,9 +120,8 @@ class BaselineFrame {
size_t numValueSlots(size_t frameSize) const {
MOZ_ASSERT(frameSize == debugFrameSize());
MOZ_ASSERT(frameSize >=
BaselineFrame::FramePointerOffset + BaselineFrame::Size());
frameSize -= BaselineFrame::FramePointerOffset + BaselineFrame::Size();
MOZ_ASSERT(frameSize >= BaselineFrame::Size());
frameSize -= BaselineFrame::Size();
MOZ_ASSERT((frameSize % sizeof(Value)) == 0);
return frameSize / sizeof(Value);
@ -148,8 +137,7 @@ class BaselineFrame {
}
static size_t frameSizeForNumValueSlots(size_t numValueSlots) {
return BaselineFrame::FramePointerOffset + BaselineFrame::Size() +
numValueSlots * sizeof(Value);
return BaselineFrame::Size() + numValueSlots * sizeof(Value);
}
Value& unaliasedFormal(
@ -327,20 +315,17 @@ class BaselineFrame {
bool isDebuggerEvalFrame() const { return false; }
JitFrameLayout* framePrefix() const {
uint8_t* fp = (uint8_t*)this + Size() + FramePointerOffset;
uint8_t* fp = (uint8_t*)this + Size();
return (JitFrameLayout*)fp;
}
// Methods below are used by the compiler.
static size_t offsetOfCalleeToken() {
return FramePointerOffset + js::jit::JitFrameLayout::offsetOfCalleeToken();
}
static size_t offsetOfThis() {
return FramePointerOffset + js::jit::JitFrameLayout::offsetOfThis();
return JitFrameLayout::offsetOfCalleeToken();
}
static size_t offsetOfThis() { return JitFrameLayout::offsetOfThis(); }
static size_t offsetOfArg(size_t index) {
return FramePointerOffset +
js::jit::JitFrameLayout::offsetOfActualArg(index);
return JitFrameLayout::offsetOfActualArg(index);
}
static size_t Size() { return sizeof(BaselineFrame); }
@ -396,9 +381,7 @@ class BaselineFrame {
};
// Ensure the frame is 8-byte aligned (required on ARM).
static_assert(((sizeof(BaselineFrame) + BaselineFrame::FramePointerOffset) %
8) == 0,
"frame (including frame pointer) must be 8-byte aligned");
static_assert((sizeof(BaselineFrame) % 8) == 0, "frame must be 8-byte aligned");
} // namespace jit
} // namespace js

Просмотреть файл

@ -1790,8 +1790,10 @@ bool FallbackICCodeCompiler::emitCall(bool isSpread, bool isConstructing) {
// Load passed-in ThisV into R1 just in case it's needed. Need to do this
// before we leave the stub frame since that info will be lost.
// Current stack: [...., ThisV, ActualArgc, CalleeToken, Descriptor ]
masm.loadValue(Address(masm.getStackPointer(), 3 * sizeof(size_t)), R1);
// Current stack: [...., ThisV, CalleeToken, Descriptor ]
size_t thisvOffset =
JitFrameLayout::offsetOfThis() - JitFrameLayout::bytesPoppedAfterCall();
masm.loadValue(Address(masm.getStackPointer(), thisvOffset), R1);
leaveStubFrame(masm);

Просмотреть файл

@ -8738,20 +8738,18 @@ void CacheIRCompiler::callVMInternal(MacroAssembler& masm, VMFunctionId id) {
if (mode_ == Mode::Ion) {
TrampolinePtr code = cx_->runtime()->jitRuntime()->getVMWrapper(id);
const VMFunctionData& fun = GetVMFunction(id);
uint32_t frameSize = fun.explicitStackSlots() * sizeof(void*) +
IonICCallFrameLayout::FramePointerOffset;
uint32_t frameSize = fun.explicitStackSlots() * sizeof(void*);
masm.PushFrameDescriptor(FrameType::IonICCall);
masm.callJit(code);
// Remove rest of the frame left on the stack. We remove the return address
// which is implicitly popped when returning.
int framePop = sizeof(ExitFrameLayout) - sizeof(void*);
// Pop rest of the exit frame and the arguments left on the stack.
int framePop =
sizeof(ExitFrameLayout) - ExitFrameLayout::bytesPoppedAfterCall();
masm.implicitPop(frameSize + framePop);
// Pop arguments from framePushed and restore the frame pointer.
masm.implicitPop(frameSize + framePop -
IonICCallFrameLayout::FramePointerOffset);
// Pop IonICCallFrameLayout.
masm.Pop(FramePointer);
masm.freeStack(IonICCallFrameLayout::Size());
masm.freeStack(IonICCallFrameLayout::Size() - sizeof(void*));
return;
}

Просмотреть файл

@ -361,12 +361,11 @@ void CodeGenerator::callVMInternal(VMFunctionId id, LInstruction* ins) {
}
#endif
// Remove rest of the frame left on the stack. We remove the return address
// which is implicitly poped when returning.
int framePop = sizeof(ExitFrameLayout) - sizeof(void*);
// Pop arguments from framePushed.
// Pop rest of the exit frame and the arguments left on the stack.
int framePop =
sizeof(ExitFrameLayout) - ExitFrameLayout::bytesPoppedAfterCall();
masm.implicitPop(fun.explicitStackSlots() * sizeof(void*) + framePop);
// Stack is:
// ... frame ...
}
@ -3722,16 +3721,15 @@ void CodeGenerator::visitOsrEntry(LOsrEntry* lir) {
MOZ_ASSERT(masm.framePushed() == frameSize());
masm.setFramePushed(0);
// Frame prologue.
masm.Push(FramePointer);
masm.moveStackPtrTo(FramePointer);
// The Baseline code ensured both the frame pointer and stack pointer point to
// the JitFrameLayout on the stack.
// If profiling, save the current frame pointer to a per-thread global field.
if (isProfilerInstrumentationEnabled()) {
masm.profilerEnterFrame(FramePointer, temp);
}
masm.reserveStack(frameSize() - sizeof(uintptr_t));
masm.reserveStack(frameSize());
MOZ_ASSERT(masm.framePushed() == frameSize());
// Ensure that the Ion frames is properly aligned.
@ -5423,7 +5421,6 @@ void CodeGenerator::visitCallGeneric(LCallGeneric* call) {
masm.freeStack(unusedStack);
// Construct the JitFrameLayout.
masm.Push(ImmWord(JitFrameLayout::UnusedValue));
masm.PushCalleeToken(calleereg, call->mir()->isConstructing());
masm.PushFrameDescriptorForJitCall(FrameType::IonJS, call->numActualArgs());
@ -5458,9 +5455,10 @@ void CodeGenerator::visitCallGeneric(LCallGeneric* call) {
masm.switchToRealm(gen->realm->realmPtr(), ReturnReg);
}
// Increment to remove IonFramePrefix; decrement to fill FrameSizeClass.
// The return address has already been removed from the Ion frame.
int prefixGarbage = sizeof(JitFrameLayout) - sizeof(void*);
// Restore stack pointer: pop JitFrameLayout fields still left on the stack
// and undo the earlier |freeStack(unusedStack)|.
int prefixGarbage =
sizeof(JitFrameLayout) - JitFrameLayout::bytesPoppedAfterCall();
masm.adjustStack(prefixGarbage - unusedStack);
masm.jump(&end);
@ -5528,7 +5526,6 @@ void CodeGenerator::visitCallKnown(LCallKnown* call) {
masm.freeStack(unusedStack);
// Construct the JitFrameLayout.
masm.Push(ImmWord(JitFrameLayout::UnusedValue));
masm.PushCalleeToken(calleereg, call->mir()->isConstructing());
masm.PushFrameDescriptorForJitCall(FrameType::IonJS, call->numActualArgs());
@ -5542,9 +5539,10 @@ void CodeGenerator::visitCallKnown(LCallKnown* call) {
masm.switchToRealm(gen->realm->realmPtr(), ReturnReg);
}
// Increment to remove IonFramePrefix; decrement to fill FrameSizeClass.
// The return address has already been removed from the Ion frame.
int prefixGarbage = sizeof(JitFrameLayout) - sizeof(void*);
// Restore stack pointer: pop JitFrameLayout fields still left on the stack
// and undo the earlier |freeStack(unusedStack)|.
int prefixGarbage =
sizeof(JitFrameLayout) - JitFrameLayout::bytesPoppedAfterCall();
masm.adjustStack(prefixGarbage - unusedStack);
// If the return value of the constructing function is Primitive,
@ -5691,7 +5689,7 @@ void CodeGenerator::emitRestoreStackPointerFromFP() {
MOZ_ASSERT(masm.framePushed() == frameSize());
int32_t offset = -int32_t(frameSize() - JitFrameLayout::FramePointerOffset);
int32_t offset = -int32_t(frameSize());
masm.computeEffectiveAddress(Address(FramePointer, offset),
masm.getStackPointer());
}
@ -5714,9 +5712,8 @@ void CodeGenerator::emitPushArguments(Register argcreg, Register scratch,
// Compute the source and destination offsets into the stack.
Register argvSrcBase = FramePointer;
size_t argvSrcOffset = JitFrameLayout::FramePointerOffset +
JitFrameLayout::offsetOfActualArgs() +
extraFormals * sizeof(JS::Value);
size_t argvSrcOffset =
JitFrameLayout::offsetOfActualArgs() + extraFormals * sizeof(JS::Value);
size_t argvDstOffset = 0;
Register argvIndex = scratch;
@ -5981,7 +5978,6 @@ void CodeGenerator::emitApplyGeneric(T* apply) {
// Knowing that calleereg is a non-native function, load jitcode.
masm.loadJitCodeRaw(calleereg, objreg);
masm.Push(ImmWord(JitFrameLayout::UnusedValue));
masm.PushCalleeToken(calleereg, constructing);
masm.PushFrameDescriptorForJitCall(FrameType::IonJS, argcreg, scratch);
@ -6024,9 +6020,9 @@ void CodeGenerator::emitApplyGeneric(T* apply) {
masm.switchToRealm(gen->realm->realmPtr(), ReturnReg);
}
// Discard JitFrameLayout. The return address has already been removed from
// the Ion frame.
masm.freeStack(sizeof(JitFrameLayout) - sizeof(void*));
// Discard JitFrameLayout fields still left on the stack.
masm.freeStack(sizeof(JitFrameLayout) -
JitFrameLayout::bytesPoppedAfterCall());
masm.jump(&end);
}
@ -10687,6 +10683,7 @@ void JitRuntime::generateLazyLinkStub(MacroAssembler& masm) {
#ifdef JS_USE_LINK_REGISTER
masm.pushReturnAddress();
#endif
masm.Push(FramePointer);
AllocatableGeneralRegisterSet regs(GeneralRegisterSet::Volatile());
Register temp0 = regs.takeAny();
@ -10704,7 +10701,8 @@ void JitRuntime::generateLazyLinkStub(MacroAssembler& masm) {
masm.callWithABI<Fn, LazyLinkTopActivation>(
MoveOp::GENERAL, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
masm.leaveExitFrame();
// Discard exit frame and frame pointer.
masm.leaveExitFrame(sizeof(void*));
#ifdef JS_USE_LINK_REGISTER
// Restore the return address such that the emitPrologue function of the
@ -10722,6 +10720,7 @@ void JitRuntime::generateInterpreterStub(MacroAssembler& masm) {
#ifdef JS_USE_LINK_REGISTER
masm.pushReturnAddress();
#endif
masm.Push(FramePointer);
AllocatableGeneralRegisterSet regs(GeneralRegisterSet::Volatile());
Register temp0 = regs.takeAny();
@ -10740,13 +10739,16 @@ void JitRuntime::generateInterpreterStub(MacroAssembler& masm) {
MoveOp::GENERAL, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
masm.branchIfFalseBool(ReturnReg, masm.failureLabel());
masm.leaveExitFrame();
// Discard exit frame and frame pointer.
masm.leaveExitFrame(sizeof(void*));
// InvokeFromInterpreterStub stores the return value in argv[0], where the
// caller stored |this|.
masm.loadValue(
Address(masm.getStackPointer(), JitFrameLayout::offsetOfThis()),
JSReturnOperand);
// caller stored |this|. Subtract |sizeof(void*)| for the frame pointer we
// just popped.
masm.loadValue(Address(masm.getStackPointer(),
JitFrameLayout::offsetOfThis() - sizeof(void*)),
JSReturnOperand);
masm.ret();
}

Просмотреть файл

@ -892,13 +892,13 @@ bool IonCacheIRCompiler::emitCallScriptedGetterResult(
masm.movePtr(ImmGCPtr(target), scratch);
masm.Push(ImmWord(JitFrameLayout::UnusedValue));
masm.Push(scratch);
masm.PushFrameDescriptorForJitCall(FrameType::IonICCall, /* argc = */ 0);
// Check stack alignment. Add sizeof(uintptr_t) for the return address.
MOZ_ASSERT(((masm.framePushed() + sizeof(uintptr_t)) % JitStackAlignment) ==
0);
// Check stack alignment. Add 2 * sizeof(uintptr_t) for the return address and
// frame pointer pushed by the call/callee.
MOZ_ASSERT(
((masm.framePushed() + 2 * sizeof(uintptr_t)) % JitStackAlignment) == 0);
MOZ_ASSERT(target->hasJitEntry());
masm.loadJitCodeRaw(scratch, scratch);
@ -1540,13 +1540,13 @@ bool IonCacheIRCompiler::emitCallScriptedSetter(ObjOperandId receiverId,
masm.movePtr(ImmGCPtr(target), scratch);
masm.Push(ImmWord(JitFrameLayout::UnusedValue));
masm.Push(scratch);
masm.PushFrameDescriptorForJitCall(FrameType::IonICCall, /* argc = */ 1);
// Check stack alignment. Add sizeof(uintptr_t) for the return address.
MOZ_ASSERT(((masm.framePushed() + sizeof(uintptr_t)) % JitStackAlignment) ==
0);
// Check stack alignment. Add 2 * sizeof(uintptr_t) for the return address and
// frame pointer pushed by the call/callee.
MOZ_ASSERT(
((masm.framePushed() + 2 * sizeof(uintptr_t)) % JitStackAlignment) == 0);
MOZ_ASSERT(target->hasJitEntry());
masm.loadJitCodeRaw(scratch, scratch);
@ -1944,7 +1944,6 @@ bool IonCacheIRCompiler::emitCloseIterScriptedResult(ObjOperandId iterId,
}
masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(iter)));
masm.Push(ImmWord(JitFrameLayout::UnusedValue));
masm.Push(callee);
masm.PushFrameDescriptorForJitCall(FrameType::IonICCall, /* argc = */ 0);

Просмотреть файл

@ -43,8 +43,7 @@ inline JSScript* JSJitProfilingFrameIterator::frameScript() const {
inline BaselineFrame* JSJitFrameIter::baselineFrame() const {
MOZ_ASSERT(isBaselineJS());
return (BaselineFrame*)(fp() - BaselineFrame::FramePointerOffset -
BaselineFrame::Size());
return (BaselineFrame*)(fp() - BaselineFrame::Size());
}
inline uint32_t JSJitFrameIter::baselineFrameNumValueSlots() const {

Просмотреть файл

@ -151,12 +151,7 @@ void JSJitFrameIter::baselineScriptAndPc(JSScript** scriptRes,
Value* JSJitFrameIter::actualArgs() const { return jsFrame()->argv() + 1; }
uint8_t* JSJitFrameIter::prevFp() const {
if (current()->prevType() == FrameType::WasmToJSJit) {
return current()->callerFramePtr();
}
return current()->callerFramePtr() + CommonFrameLayout::FramePointerOffset;
}
uint8_t* JSJitFrameIter::prevFp() const { return current()->callerFramePtr(); }
// Compute the size of a Baseline frame excluding pushed VMFunction arguments or
// callee frame headers. This is used to calculate the number of Value slots in
@ -164,8 +159,7 @@ uint8_t* JSJitFrameIter::prevFp() const {
static uint32_t ComputeBaselineFrameSize(const JSJitFrameIter& frame) {
MOZ_ASSERT(frame.prevType() == FrameType::BaselineJS);
uint32_t frameSize = frame.current()->callerFramePtr() +
CommonFrameLayout::FramePointerOffset - frame.fp();
uint32_t frameSize = frame.current()->callerFramePtr() - frame.fp();
if (frame.isBaselineStub()) {
return frameSize - BaselineStubFrameLayout::Size();
@ -556,11 +550,7 @@ JSJitProfilingFrameIterator::JSJitProfilingFrameIterator(JSContext* cx,
template <typename ReturnType = CommonFrameLayout*>
static inline ReturnType GetPreviousRawFrame(CommonFrameLayout* frame) {
if (frame->prevType() == FrameType::WasmToJSJit) {
return ReturnType(frame->callerFramePtr());
}
static constexpr size_t FPOffset = CommonFrameLayout::FramePointerOffset;
return ReturnType(frame->callerFramePtr() + FPOffset);
return ReturnType(frame->callerFramePtr());
}
JSJitProfilingFrameIterator::JSJitProfilingFrameIterator(
@ -656,9 +646,7 @@ const char* JSJitProfilingFrameIterator::baselineInterpreterLabel() const {
void JSJitProfilingFrameIterator::baselineInterpreterScriptPC(
JSScript** script, jsbytecode** pc, uint64_t* realmID) const {
MOZ_ASSERT(type_ == FrameType::BaselineJS);
BaselineFrame* blFrame =
(BaselineFrame*)(fp_ - BaselineFrame::FramePointerOffset -
BaselineFrame::Size());
BaselineFrame* blFrame = (BaselineFrame*)(fp_ - BaselineFrame::Size());
*script = frameScript();
*pc = (*script)->code();

Просмотреть файл

@ -215,12 +215,11 @@ static void OnLeaveIonFrame(JSContext* cx, const InlineFrameIterator& frame,
Value& rval = rematFrame->returnValue();
MOZ_RELEASE_ASSERT(!rval.isMagic());
// Set both framePointer and stackPointer to the address of the saved frame
// pointer. The profiler's exit frame trampoline will use this frame pointer.
// Set both framePointer and stackPointer to the address of the
// JitFrameLayout.
rfe->kind = ExceptionResumeKind::ForcedReturnIon;
uint8_t* fp = frame.frame().fp() - CommonFrameLayout::FramePointerOffset;
rfe->framePointer = fp;
rfe->stackPointer = fp;
rfe->framePointer = frame.frame().fp();
rfe->stackPointer = frame.frame().fp();
rfe->exception = rval;
act->removeIonFrameRecovery(frame.frame().jsFrame());
@ -354,7 +353,7 @@ static void OnLeaveBaselineFrame(JSContext* cx, const JSJitFrameIter& frame,
bool returnFromThisFrame = jit::DebugEpilogue(cx, baselineFrame, pc, frameOk);
if (returnFromThisFrame) {
rfe->kind = ExceptionResumeKind::ForcedReturnBaseline;
rfe->framePointer = frame.fp() - BaselineFrame::FramePointerOffset;
rfe->framePointer = frame.fp();
rfe->stackPointer = reinterpret_cast<uint8_t*>(baselineFrame);
}
}
@ -363,7 +362,7 @@ static inline void BaselineFrameAndStackPointersFromTryNote(
const TryNote* tn, const JSJitFrameIter& frame, uint8_t** framePointer,
uint8_t** stackPointer) {
JSScript* script = frame.baselineFrame()->script();
*framePointer = frame.fp() - BaselineFrame::FramePointerOffset;
*framePointer = frame.fp();
*stackPointer = *framePointer - BaselineFrame::Size() -
(script->nfixed() + tn->stackDepth) * sizeof(Value);
}
@ -631,10 +630,8 @@ static JitFrameLayout* GetLastProfilingFrame(ResumeFromException* rfe) {
case ExceptionResumeKind::Catch:
case ExceptionResumeKind::Finally:
case ExceptionResumeKind::ForcedReturnBaseline:
case ExceptionResumeKind::ForcedReturnIon: {
uint8_t* fp = rfe->framePointer + CommonFrameLayout::FramePointerOffset;
return reinterpret_cast<JitFrameLayout*>(fp);
}
case ExceptionResumeKind::ForcedReturnIon:
return reinterpret_cast<JitFrameLayout*>(rfe->framePointer);
// When resuming into a bailed-out ion frame, use the bailout info to
// find the frame we are resuming into.
@ -801,7 +798,8 @@ void HandleException(ResumeFromException* rfe) {
if (iter.isJSJit()) {
MOZ_ASSERT(rfe->kind == ExceptionResumeKind::EntryFrame);
rfe->framePointer = iter.asJSJit().current()->callerFramePtr();
rfe->stackPointer = iter.asJSJit().fp();
rfe->stackPointer =
iter.asJSJit().fp() + CommonFrameLayout::offsetOfReturnAddress();
}
}
@ -866,7 +864,7 @@ CalleeToken TraceCalleeToken(JSTracer* trc, CalleeToken token) {
uintptr_t* JitFrameLayout::slotRef(SafepointSlotEntry where) {
if (where.stack) {
return (uintptr_t*)((uint8_t*)this - IonFirstSlotOffset - where.slot);
return (uintptr_t*)((uint8_t*)this - where.slot);
}
return (uintptr_t*)((uint8_t*)argv() + where.slot);
}
@ -2472,10 +2470,9 @@ void AssertJitStackInvariants(JSContext* cx) {
"The rectifier frame should keep the alignment");
size_t expectedFrameSize =
sizeof(void*) /* frame pointer */
+ sizeof(Value) *
(frames.callee()->nargs() + 1 /* |this| argument */ +
frames.isConstructing() /* new.target */) +
sizeof(Value) *
(frames.callee()->nargs() + 1 /* |this| argument */ +
frames.isConstructing() /* new.target */) +
sizeof(JitFrameLayout);
MOZ_RELEASE_ASSERT(frameSize >= expectedFrameSize,
"The frame is large enough to hold all arguments");

Просмотреть файл

@ -203,14 +203,11 @@ inline uint8_t* alignDoubleSpill(uint8_t* pointer) {
// Layout of the frame prefix. This assumes the stack architecture grows down.
// If this is ever not the case, we'll have to refactor.
class CommonFrameLayout {
uint8_t* callerFramePtr_;
uint8_t* returnAddress_;
uintptr_t descriptor_;
public:
// All frames have the caller's frame pointer as first word (pushed after the
// return address is pushed).
static constexpr size_t FramePointerOffset = sizeof(void*);
static constexpr size_t offsetOfDescriptor() {
return offsetof(CommonFrameLayout, descriptor_);
}
@ -231,26 +228,21 @@ class CommonFrameLayout {
uint8_t* returnAddress() const { return returnAddress_; }
void setReturnAddress(uint8_t* addr) { returnAddress_ = addr; }
uint8_t* callerFramePtr() const {
auto* p = reinterpret_cast<const uintptr_t*>(this) - 1;
return reinterpret_cast<uint8_t*>(*p);
uint8_t* callerFramePtr() const { return callerFramePtr_; }
static constexpr size_t offsetOfCallerFramePtr() {
return offsetof(CommonFrameLayout, callerFramePtr_);
}
static constexpr size_t bytesPoppedAfterCall() {
// The return address and frame pointer are popped by the callee/call.
return 2 * sizeof(void*);
}
};
class JitFrameLayout : public CommonFrameLayout {
CalleeToken calleeToken_;
protected: // Silence Clang warning about unused private field.
uintptr_t unused_;
public:
// TODO: removed in the next patch.
static constexpr uintptr_t UnusedValue = 0xbad0bad1;
CalleeToken calleeToken() const {
MOZ_ASSERT(unused_ == UnusedValue);
return calleeToken_;
}
CalleeToken calleeToken() const { return calleeToken_; }
void replaceCalleeToken(CalleeToken calleeToken) {
calleeToken_ = calleeToken;
}
@ -275,15 +267,9 @@ class JitFrameLayout : public CommonFrameLayout {
return (JS::Value*)(this + 1);
}
uintptr_t numActualArgs() const {
MOZ_ASSERT(unused_ == UnusedValue);
return descriptor() >> NUMACTUALARGS_SHIFT;
}
// For IonJS frames: the distance from the JitFrameLayout to the first local
// slot. The caller's frame pointer is stored in this space. 32-bit platforms
// have 4 bytes of padding to ensure doubles are properly aligned.
static constexpr size_t IonFirstSlotOffset = 8;
// Computes a reference to a stack or argument slot, where a slot is a
// distance from the base frame pointer, as would be used for LStackSlot
// or LArgument.
@ -335,14 +321,8 @@ class ExitFooterFrame {
// VMFunctionData*.
uintptr_t data_;
// Saved frame pointer. This must be the last word, so that this overlaps with
// CommonFrameLayout::FramePointerOffset.
protected: // Silence warning about unused private field.
static_assert(CommonFrameLayout::FramePointerOffset == sizeof(void*));
uint8_t* callerFP_;
public:
static inline size_t Size() { return sizeof(ExitFooterFrame); }
static constexpr size_t Size() { return sizeof(ExitFooterFrame); }
void setUnwoundJitExitFrame() {
data_ = uintptr_t(ExitFrameType::UnwoundJit);
}
@ -380,10 +360,6 @@ class ExitFooterFrame {
uint8_t* address = alignedForABI();
return reinterpret_cast<T*>(address - sizeof(T));
}
static constexpr size_t offsetOfCallerFP() {
return offsetof(ExitFooterFrame, callerFP_);
}
};
class NativeExitFrameLayout;
@ -710,15 +686,14 @@ class BaselineStubFrameLayout : public CommonFrameLayout {
// +-----------------------+
// | - Descriptor | => Marks end of FrameType::BaselineJS
// | - Return address |
// | - CallerFramePtr |
// +-----------------------+
// | - FramePtr | Technically these last two fields are not part
// | - StubPtr | of the frame layout.
// +-----------------------+
// | - StubPtr | Technically this last field is not part
// +-----------------------+ of the frame layout.
public:
static_assert(FramePointerOffset == sizeof(void*));
static constexpr size_t ICStubOffset = 2 * sizeof(void*);
static constexpr int ICStubOffsetFromFP = -int(sizeof(void*));
static constexpr size_t ICStubOffset = sizeof(void*);
static constexpr int ICStubOffsetFromFP = -int(ICStubOffset);
static inline size_t Size() { return sizeof(BaselineStubFrameLayout); }

Просмотреть файл

@ -297,9 +297,7 @@ void MacroAssembler::PushFrameDescriptorForJitCall(FrameType type,
}
void MacroAssembler::loadNumActualArgs(Register framePtr, Register dest) {
static constexpr uint32_t Offset =
JitFrameLayout::FramePointerOffset + JitFrameLayout::offsetOfDescriptor();
loadPtr(Address(framePtr, Offset), dest);
loadPtr(Address(framePtr, JitFrameLayout::offsetOfDescriptor()), dest);
rshift32(Imm32(NUMACTUALARGS_SHIFT), dest);
}
@ -335,6 +333,7 @@ uint32_t MacroAssembler::buildFakeExitFrame(Register scratch) {
PushFrameDescriptor(FrameType::IonJS);
uint32_t retAddr = pushFakeReturnAddress(scratch);
Push(FramePointer);
MOZ_ASSERT(framePushed() == initialDepth + ExitFrameLayout::Size());
return retAddr;
@ -347,7 +346,6 @@ void MacroAssembler::enterExitFrame(Register cxreg, Register scratch,
const VMFunctionData* f) {
MOZ_ASSERT(f);
linkExitFrame(cxreg, scratch);
Push(FramePointer);
// Push VMFunction pointer, to mark arguments.
Push(ImmPtr(f));
}
@ -355,7 +353,6 @@ void MacroAssembler::enterExitFrame(Register cxreg, Register scratch,
void MacroAssembler::enterFakeExitFrame(Register cxreg, Register scratch,
ExitFrameType type) {
linkExitFrame(cxreg, scratch);
Push(FramePointer);
Push(Imm32(int32_t(type)));
}

Просмотреть файл

@ -2063,6 +2063,7 @@ void MacroAssembler::generateBailoutTail(Register scratch,
// Enter exit frame for the FinishBailoutToBaseline call.
pushFrameDescriptor(FrameType::BaselineJS);
push(Address(bailoutInfo, offsetof(BaselineBailoutInfo, resumeAddr)));
push(FramePointer);
// No GC things to mark on the stack, push a bare token.
loadJSContext(scratch);
enterFakeExitFrame(scratch, scratch, ExitFrameType::Bare);

Просмотреть файл

@ -24,17 +24,16 @@ void JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm,
profilerExitFrameTailOffset_ = startTrampolineCode(masm);
masm.bind(profilerExitTail);
// Offset from frame pointer to CommonFrameLayout.
static constexpr size_t FPOffset = CommonFrameLayout::FramePointerOffset;
static constexpr size_t CallerFPOffset =
CommonFrameLayout::offsetOfCallerFramePtr();
// Assert the caller frame's type is one of the types we expect.
auto emitAssertPrevFrameType = [&masm](
Register framePtr, Register scratch,
std::initializer_list<FrameType> types) {
#ifdef DEBUG
masm.loadPtr(
Address(framePtr, FPOffset + CommonFrameLayout::offsetOfDescriptor()),
scratch);
masm.loadPtr(Address(framePtr, CommonFrameLayout::offsetOfDescriptor()),
scratch);
masm.and32(Imm32(FRAMETYPE_MASK), scratch);
Label checkOk;
@ -61,8 +60,7 @@ void JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm,
//
// Thus the expected state is:
//
// [JitFrameLayout]
// [CallerFramePtr] <-- FramePointer
// [JitFrameLayout] <-- FramePointer
// [frame contents] <-- StackPointer
//
// The generated jitcode is responsible for overwriting the
@ -134,9 +132,8 @@ void JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm,
// Load the frame descriptor into |scratch|, figure out what to do depending
// on its type.
masm.loadPtr(
Address(fpScratch, FPOffset + JitFrameLayout::offsetOfDescriptor()),
scratch);
masm.loadPtr(Address(fpScratch, JitFrameLayout::offsetOfDescriptor()),
scratch);
masm.and32(Imm32(FRAMETYPE_MASK), scratch);
// Handling of each case is dependent on FrameDescriptor.type
@ -171,13 +168,12 @@ void JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm,
// Returning directly to a Baseline or Ion frame.
// lastProfilingCallSite := ReturnAddress
masm.loadPtr(
Address(fpScratch, FPOffset + JitFrameLayout::offsetOfReturnAddress()),
scratch);
masm.loadPtr(Address(fpScratch, JitFrameLayout::offsetOfReturnAddress()),
scratch);
masm.storePtr(scratch, lastProfilingCallSite);
// lastProfilingFrame := CallerFrame
masm.loadPtr(Address(fpScratch, 0), scratch);
masm.loadPtr(Address(fpScratch, CallerFPOffset), scratch);
masm.storePtr(scratch, lastProfilingFrame);
masm.moveToStackPtr(FramePointer);
@ -188,17 +184,16 @@ void JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm,
// Shared implementation for BaselineStub and IonICCall frames.
auto emitHandleStubFrame = [&](FrameType expectedPrevType) {
// Load pointer to stub frame and assert type of its caller frame.
masm.loadPtr(Address(fpScratch, 0), fpScratch);
masm.loadPtr(Address(fpScratch, CallerFPOffset), fpScratch);
emitAssertPrevFrameType(fpScratch, scratch, {expectedPrevType});
// lastProfilingCallSite := StubFrame.ReturnAddress
masm.loadPtr(Address(fpScratch,
FPOffset + CommonFrameLayout::offsetOfReturnAddress()),
masm.loadPtr(Address(fpScratch, CommonFrameLayout::offsetOfReturnAddress()),
scratch);
masm.storePtr(scratch, lastProfilingCallSite);
// lastProfilingFrame := StubFrame.CallerFrame
masm.loadPtr(Address(fpScratch, 0), scratch);
masm.loadPtr(Address(fpScratch, CallerFPOffset), scratch);
masm.storePtr(scratch, lastProfilingFrame);
masm.moveToStackPtr(FramePointer);
@ -222,7 +217,7 @@ void JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm,
{
// There can be multiple previous frame types so just "unwrap" the arguments
// rectifier frame and try again.
masm.loadPtr(Address(fpScratch, 0), fpScratch);
masm.loadPtr(Address(fpScratch, CallerFPOffset), fpScratch);
emitAssertPrevFrameType(fpScratch, scratch,
{FrameType::IonJS, FrameType::BaselineStub,
FrameType::CppToJSJit, FrameType::WasmToJSJit});

Просмотреть файл

@ -1651,7 +1651,7 @@ BufferOffset MacroAssemblerARM::ma_vstr(VFPRegister src, Register base,
bool MacroAssemblerARMCompat::buildOOLFakeExitFrame(void* fakeReturnAddr) {
asMasm().PushFrameDescriptor(FrameType::IonJS); // descriptor_
asMasm().Push(ImmPtr(fakeReturnAddr));
asMasm().Push(FramePointer);
return true;
}

Просмотреть файл

@ -24,7 +24,6 @@ inline void EmitBaselineTailCallVM(TrampolinePtr target, MacroAssembler& masm,
// Store frame size without VMFunction arguments for debug assertions.
masm.movePtr(FramePointer, r0);
masm.as_add(r0, r0, Imm8(BaselineFrame::FramePointerOffset));
masm.ma_sub(BaselineStackReg, r0);
masm.sub32(Imm32(argSize), r0);
Address frameSizeAddr(FramePointer,
@ -53,7 +52,6 @@ inline void EmitBaselineEnterStubFrame(MacroAssembler& masm, Register scratch) {
#ifdef DEBUG
// Compute frame size.
masm.mov(FramePointer, scratch);
masm.as_add(scratch, scratch, Imm8(BaselineFrame::FramePointerOffset));
masm.ma_sub(BaselineStackReg, scratch);
Address frameSizeAddr(FramePointer,

Просмотреть файл

@ -199,8 +199,6 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
masm.bind(&footer);
}
masm.push(ImmWord(JitFrameLayout::UnusedValue));
// Push the callee token.
masm.push(r9);
@ -261,6 +259,7 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
// Enter exit frame.
masm.pushFrameDescriptor(FrameType::BaselineJS);
masm.push(Imm32(0)); // Fake return address.
masm.push(FramePointer);
// No GC things to mark on the stack, push a bare token.
masm.loadJSContext(scratch);
masm.enterFakeExitFrame(scratch, scratch, ExitFrameType::Bare);
@ -314,9 +313,9 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
R1.scratchReg());
}
// The callee will push the return address on the stack, thus we check that
// the stack would be aligned once the call is complete.
masm.assertStackAlignment(JitStackAlignment, sizeof(uintptr_t));
// The callee will push the return address and frame pointer on the stack,
// thus we check that the stack would be aligned once the call is complete.
masm.assertStackAlignment(JitStackAlignment, 2 * sizeof(uintptr_t));
// Call the function.
masm.callJitNoProfiler(r0);
@ -400,7 +399,6 @@ void JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail) {
// Pop the machine state and the dead frame.
masm.moveToStackPtr(FramePointer);
masm.pop(FramePointer);
// Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
masm.jump(bailoutTail);
@ -420,26 +418,23 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
}
masm.pushReturnAddress();
// Frame prologue. Push extra padding to ensure proper stack alignment.
// Frame prologue.
//
// NOTE: if this changes, fix the Baseline bailout code too!
// See BaselineStackBuilder::calculatePrevFramePtr and
// BaselineStackBuilder::buildRectifierFrame (in BaselineBailouts.cpp).
static_assert(sizeof(Value) == 2 * sizeof(void*));
static_assert(JitStackAlignment == sizeof(Value));
masm.push(FramePointer);
masm.mov(StackPointer, FramePointer);
masm.push(FramePointer); // Padding.
static_assert(JitStackAlignment == sizeof(Value));
// Copy number of actual arguments into r0 and r8.
masm.loadNumActualArgs(FramePointer, r0);
masm.mov(r0, r8);
// Load the number of |undefined|s to push into r6.
constexpr size_t FrameOffset = 2 * sizeof(void*); // Frame pointer + padding.
constexpr size_t TokenOffset =
FrameOffset + RectifierFrameLayout::offsetOfCalleeToken();
masm.ma_ldr(DTRAddr(sp, DtrOffImm(TokenOffset)), r1);
masm.loadPtr(
Address(FramePointer, RectifierFrameLayout::offsetOfCalleeToken()), r1);
{
ScratchRegisterScope scratch(masm);
masm.ma_and(Imm32(CalleeTokenMask), r1, r6, scratch);
@ -452,8 +447,7 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
{
ScratchRegisterScope scratch(masm);
masm.ma_alu(sp, lsl(r8, 3), r3, OpAdd); // r3 <- sp + nargs * 8
masm.ma_add(r3, Imm32(FrameOffset + sizeof(RectifierFrameLayout)), r3,
scratch);
masm.ma_add(r3, Imm32(sizeof(RectifierFrameLayout)), r3, scratch);
}
{
@ -497,7 +491,6 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
}
// Construct JitFrameLayout.
masm.push(ImmWord(JitFrameLayout::UnusedValue));
masm.ma_push(r1); // callee token
masm.pushFrameDescriptorForJitCall(FrameType::Rectifier, r0, r0);
@ -582,7 +575,6 @@ static void GenerateBailoutThunk(MacroAssembler& masm, Label* bailoutTail) {
// Remove both the bailout frame and the topmost Ion frame's stack.
masm.moveToStackPtr(FramePointer);
masm.pop(FramePointer);
// Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
masm.jump(bailoutTail);
@ -619,12 +611,12 @@ bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
// +8 [args] + argPadding
// +0 ExitFrame
//
// We're aligned to an exit frame, so link it up.
// If it isn't a tail call, then the return address needs to be saved
// If it isn't a tail call, then the return address needs to be saved.
// Push the frame pointer to finish the exit frame, then link it up.
if (f.expectTailCall == NonTailCall) {
masm.pushReturnAddress();
}
masm.Push(FramePointer);
masm.loadJSContext(cxreg);
masm.enterExitFrame(cxreg, regs.getAny(), &f);
@ -778,8 +770,11 @@ bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
masm.speculationBarrier();
}
masm.leaveExitFrame();
masm.retn(Imm32(sizeof(ExitFrameLayout) +
// Pop ExitFooterFrame and the frame pointer.
masm.leaveExitFrame(sizeof(void*));
// Return. Subtract sizeof(void*) for the frame pointer.
masm.retn(Imm32(sizeof(ExitFrameLayout) - sizeof(void*) +
f.explicitStackSlots() * sizeof(void*) +
f.extraValuesToPop * sizeof(Value)));

Просмотреть файл

@ -1613,6 +1613,7 @@ uint32_t MacroAssembler::pushFakeReturnAddress(Register scratch) {
bool MacroAssemblerCompat::buildOOLFakeExitFrame(void* fakeReturnAddr) {
asMasm().PushFrameDescriptor(FrameType::IonJS);
asMasm().Push(ImmPtr(fakeReturnAddr));
asMasm().Push(FramePointer);
return true;
}
@ -2109,8 +2110,11 @@ void MacroAssembler::enterFakeExitFrameForWasm(Register cxreg, Register scratch,
MOZ_RELEASE_ASSERT(sp.Is(GetStackPointer64()));
// SP has to be 16-byte aligned when we do a load/store, so push |type| twice
// and then add 8 bytes to SP. This leaves SP unaligned.
move32(Imm32(int32_t(type)), scratch);
push(FramePointer, scratch);
push(scratch, scratch);
Add(sp, sp, 8);
// Despite the above assertion, it is possible for control to flow from here
// to the code generated by

Просмотреть файл

@ -23,7 +23,6 @@ inline void EmitBaselineTailCallVM(TrampolinePtr target, MacroAssembler& masm,
// Store frame size without VMFunction arguments for debug assertions.
masm.Sub(x0, FramePointer64, masm.GetStackPointer64());
masm.Add(w0, w0, Operand(BaselineFrame::FramePointerOffset));
masm.Sub(w0, w0, Operand(argSize));
Address frameSizeAddr(FramePointer,
BaselineFrame::reverseOffsetOfDebugFrameSize());
@ -52,10 +51,7 @@ inline void EmitBaselineEnterStubFrame(MacroAssembler& masm, Register scratch) {
#ifdef DEBUG
// Compute frame size.
masm.Add(ARMRegister(scratch, 64), FramePointer64,
Operand(BaselineFrame::FramePointerOffset));
masm.Sub(ARMRegister(scratch, 64), ARMRegister(scratch, 64),
masm.GetStackPointer64());
masm.Sub(ARMRegister(scratch, 64), FramePointer64, masm.GetStackPointer64());
Address frameSizeAddr(FramePointer,
BaselineFrame::reverseOffsetOfDebugFrameSize());

Просмотреть файл

@ -97,10 +97,10 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
// JitFrameLayout is as follows (higher is higher in memory):
// N*8 - [ JS argument vector ] (base 16-byte aligned)
// 8 - numActualArgs
// 8 - calleeToken (16-byte aligned)
// 8 - frameDescriptor
// 8 - returnAddress (16-byte aligned, pushed by callee)
// 8 - calleeToken
// 8 - frameDescriptor (16-byte aligned)
// 8 - returnAddress
// 8 - frame pointer (16-byte aligned, pushed by callee)
// Touch frame incrementally (a requirement for Windows).
//
@ -176,14 +176,13 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
MOZ_ASSERT(temps.IsAvailable(ScratchReg64)); // ip0
temps.Exclude(ScratchReg64);
Register scratch = ScratchReg64.asUnsized();
masm.movePtr(ImmWord(JitFrameLayout::UnusedValue), scratch);
masm.push(scratch, reg_callee);
masm.checkStackAlignment();
masm.push(reg_callee);
// Push the descriptor.
masm.unboxInt32(Address(reg_vp, 0x0), scratch);
masm.PushFrameDescriptorForJitCall(FrameType::CppToJSJit, scratch, scratch);
}
masm.checkStackAlignment();
Label osrReturnPoint;
{
@ -221,6 +220,7 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
// Enter exit frame.
masm.pushFrameDescriptor(FrameType::BaselineJS);
masm.push(xzr); // Push xzr for a fake return address.
masm.push(FramePointer);
// No GC things to mark: push a bare token.
masm.loadJSContext(scratch);
masm.enterFakeExitFrame(scratch, scratch, ExitFrameType::Bare);
@ -272,6 +272,10 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
masm.movePtr(reg_scope, R1_);
}
// The callee will push the return address and frame pointer on the stack,
// thus we check that the stack would be aligned once the call is complete.
masm.assertStackAlignment(JitStackAlignment, 2 * sizeof(uintptr_t));
// Call function.
// Since AArch64 doesn't have the pc register available, the callee must push
// lr.
@ -400,7 +404,6 @@ void JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail) {
// Pop the machine state and the dead frame.
masm.moveToStackPtr(FramePointer);
masm.pop(FramePointer);
// Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
masm.jump(bailoutTail);
@ -427,16 +430,13 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
// NOTE: if this changes, fix the Baseline bailout code too!
// See BaselineStackBuilder::calculatePrevFramePtr and
// BaselineStackBuilder::buildRectifierFrame (in BaselineBailouts.cpp).
static_assert(sizeof(Value) == sizeof(void*));
masm.push(FramePointer);
masm.moveStackPtrTo(FramePointer);
// Load the information that the rectifier needs from the stack.
constexpr size_t FrameOffset = sizeof(void*); // Frame pointer.
constexpr size_t TokenOffset =
FrameOffset + RectifierFrameLayout::offsetOfCalleeToken();
masm.loadNumActualArgs(FramePointer, r0);
masm.Ldr(x1, MemOperand(masm.GetStackPointer64(), TokenOffset));
masm.loadPtr(
Address(FramePointer, RectifierFrameLayout::offsetOfCalleeToken()), r1);
// Extract a JSFunction pointer from the callee token and keep the
// intermediary to avoid later recalculation.
@ -449,18 +449,16 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
"Constructing must be low-order bit");
masm.And(x4, x1, Operand(CalleeToken_FunctionConstructing));
masm.Add(x7, x6, x4);
masm.Add(x7, x7, Operand(1)); // Account for saved frame pointer.
// Copy the number of actual arguments into r8.
masm.mov(r0, r8);
// Calculate the position that our arguments are at before sp gets modified.
masm.Add(x3, masm.GetStackPointer64(), Operand(x8, vixl::LSL, 3));
masm.Add(x3, x3, Operand(FrameOffset + sizeof(RectifierFrameLayout)));
masm.Add(x3, x3, Operand(sizeof(RectifierFrameLayout)));
// Pad to a multiple of 16 bytes. This neglects the |this| value,
// which will also be pushed, because the rest of the frame will
// round off that value. See pushes of |argc|, |callee| and |desc| below.
// If the number of Values without |this| is even, push 8 padding bytes to
// ensure the stack is 16-byte aligned.
Label noPadding;
masm.Tbnz(x7, 0, &noPadding);
masm.asVIXL().Push(xzr);
@ -504,7 +502,6 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
masm.B(&copyLoopTop, Assembler::NotSigned);
}
masm.push(ImmWord(JitFrameLayout::UnusedValue));
masm.push(r1); // Callee token.
masm.pushFrameDescriptorForJitCall(FrameType::Rectifier, r0, r0);
@ -568,7 +565,6 @@ static void GenerateBailoutThunk(MacroAssembler& masm, Label* bailoutTail) {
// Remove both the bailout frame and the topmost Ion frame's stack.
masm.moveToStackPtr(FramePointer);
masm.pop(FramePointer);
// Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
masm.jump(bailoutTail);
@ -614,7 +610,8 @@ bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
// +8 descriptor
// +0 returnAddress (pushed by this function, caller sets as lr)
//
// We're aligned to an exit frame, so link it up.
// Push the frame pointer to finish the exit frame, then link it up.
masm.Push(FramePointer);
masm.loadJSContext(reg_cx);
masm.enterExitFrame(reg_cx, regs.getAny(), &f);
@ -779,8 +776,11 @@ bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
masm.speculationBarrier();
}
masm.leaveExitFrame();
masm.retn(Imm32(sizeof(ExitFrameLayout) +
// Pop ExitFooterFrame and the frame pointer.
masm.leaveExitFrame(sizeof(void*));
// Return. Subtract sizeof(void*) for the frame pointer.
masm.retn(Imm32(sizeof(ExitFrameLayout) - sizeof(void*) +
f.explicitStackSlots() * sizeof(void*) +
f.extraValuesToPop * sizeof(Value)));

Просмотреть файл

@ -42,7 +42,7 @@ void MacroAssembler::clampDoubleToUint8(FloatRegister input, Register output) {
bool MacroAssemblerLOONG64Compat::buildOOLFakeExitFrame(void* fakeReturnAddr) {
asMasm().PushFrameDescriptor(FrameType::IonJS); // descriptor_
asMasm().Push(ImmPtr(fakeReturnAddr));
asMasm().Push(FramePointer);
return true;
}

Просмотреть файл

@ -22,7 +22,6 @@ inline void EmitBaselineTailCallVM(TrampolinePtr target, MacroAssembler& masm,
// Compute frame size.
masm.movePtr(FramePointer, scratch);
masm.addPtr(Imm32(BaselineFrame::FramePointerOffset), scratch);
masm.subPtr(BaselineStackReg, scratch);
// Store frame size without VMFunction arguments for debug assertions.
@ -58,7 +57,6 @@ inline void EmitBaselineEnterStubFrame(MacroAssembler& masm, Register scratch) {
#ifdef DEBUG
// Compute frame size.
masm.movePtr(FramePointer, scratch);
masm.addPtr(Imm32(BaselineFrame::FramePointerOffset), scratch);
masm.subPtr(BaselineStackReg, scratch);
Address frameSizeAddr(FramePointer,

Просмотреть файл

@ -185,7 +185,6 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
}
masm.bind(&footer);
masm.push(ImmWord(JitFrameLayout::UnusedValue));
masm.push(reg_token);
masm.pushFrameDescriptorForJitCall(FrameType::CppToJSJit, s3, s3);
@ -225,12 +224,13 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
masm.subPtr(scratch, StackPointer);
// Enter exit frame.
// Push frame descriptor and fake return address.
masm.reserveStack(2 * sizeof(uintptr_t));
masm.reserveStack(3 * sizeof(uintptr_t));
masm.storePtr(
ImmWord(MakeFrameDescriptor(FrameType::BaselineJS)),
Address(StackPointer, sizeof(uintptr_t))); // Frame descriptor
masm.storePtr(zero, Address(StackPointer, 0)); // fake return address
Address(StackPointer, 2 * sizeof(uintptr_t))); // Frame descriptor
masm.storePtr(
zero, Address(StackPointer, sizeof(uintptr_t))); // fake return address
masm.storePtr(FramePointer, Address(StackPointer, 0));
// No GC things to mark, push a bare token.
masm.loadJSContext(scratch);
@ -289,9 +289,9 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
masm.as_or(R1.scratchReg(), reg_chain, zero);
}
// The call will push the return address on the stack, thus we check that
// the stack would be aligned once the call is complete.
masm.assertStackAlignment(JitStackAlignment, sizeof(uintptr_t));
// The call will push the return address and frame pointer on the stack, thus
// we check that the stack would be aligned once the call is complete.
masm.assertStackAlignment(JitStackAlignment, 2 * sizeof(uintptr_t));
// Call the function with pushing return address to stack.
masm.callJitNoProfiler(reg_code);
@ -356,7 +356,6 @@ void JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail) {
// Pop the machine state and the dead frame.
masm.moveToStackPtr(FramePointer);
masm.pop(FramePointer);
// Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
masm.jump(bailoutTail);
@ -396,10 +395,9 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
Register numArgsReg = a5;
// Load |nformals| into numArgsReg.
constexpr size_t FrameOffset = sizeof(void*); // Frame pointer.
constexpr size_t TokenOffset =
FrameOffset + RectifierFrameLayout::offsetOfCalleeToken();
masm.loadPtr(Address(StackPointer, TokenOffset), calleeTokenReg);
masm.loadPtr(
Address(FramePointer, RectifierFrameLayout::offsetOfCalleeToken()),
calleeTokenReg);
masm.mov(calleeTokenReg, numArgsReg);
masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), numArgsReg);
masm.loadFunctionArgCount(numArgsReg, numArgsReg);
@ -421,20 +419,20 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
static_assert(
sizeof(JitFrameLayout) % JitStackAlignment == 0,
"No need to consider the JitFrameLayout for aligning the stack");
static_assert((sizeof(Value) + sizeof(void*)) % JitStackAlignment == 0,
"No need to consider |this| and the frame pointer for "
"aligning the stack");
static_assert(
JitStackAlignment % sizeof(Value) == 0,
"Ensure that we can pad the stack by pushing extra UndefinedValue");
MOZ_ASSERT(mozilla::IsPowerOfTwo(JitStackValueAlignment));
masm.add32(Imm32(JitStackValueAlignment - 1 /* for padding */), numArgsReg);
masm.add32(
Imm32(JitStackValueAlignment - 1 /* for padding */ + 1 /* for |this| */),
numArgsReg);
masm.add32(t2, numArgsReg);
masm.and32(Imm32(~(JitStackValueAlignment - 1)), numArgsReg);
// Load the number of |undefined|s to push into t1.
// Load the number of |undefined|s to push into t1. Subtract 1 for |this|.
masm.as_sub_d(t1, numArgsReg, s3);
masm.sub32(Imm32(1), t1);
// Caller:
// [arg2] [arg1] [this] [ [argc] [callee] [descr] [raddr] ] <- sp
@ -465,11 +463,10 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
// Get the topmost argument.
static_assert(sizeof(Value) == 8, "TimesEight is used to skip arguments");
// Get the topmost argument. We did a push of fp earlier, so be sure to
// account for this in the offset.
// Get the topmost argument.
masm.as_slli_d(t0, s3, 3); // t0 <- nargs * 8
masm.as_add_d(t1, FramePointer, t0); // t1 <- fp(saved sp) + nargs * 8
masm.addPtr(Imm32(sizeof(RectifierFrameLayout) + sizeof(void*)), t1);
masm.addPtr(Imm32(sizeof(RectifierFrameLayout)), t1);
// Push arguments, |nargs| + 1 times (to include |this|).
masm.addPtr(Imm32(1), s3);
@ -497,11 +494,9 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
// thisFrame[numFormals] = prevFrame[argc]
ValueOperand newTarget(t0);
// Load vp[argc]. Add sizeof(Value) for |this| and sizeof(void *) for the
// saved frame pointer.
BaseIndex newTargetSrc(
FramePointer, numActArgsReg, TimesEight,
sizeof(RectifierFrameLayout) + sizeof(Value) + sizeof(void*));
// Load vp[argc]. Add sizeof(Value) for |this|.
BaseIndex newTargetSrc(FramePointer, numActArgsReg, TimesEight,
sizeof(RectifierFrameLayout) + sizeof(Value));
masm.loadValue(newTargetSrc, newTarget);
// Again, 1 for |this|
@ -521,7 +516,6 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
//
// Construct JitFrameLayout.
masm.push(ImmWord(JitFrameLayout::UnusedValue));
masm.push(calleeTokenReg);
masm.pushFrameDescriptorForJitCall(FrameType::Rectifier, numActArgsReg,
numActArgsReg);
@ -590,7 +584,6 @@ static void GenerateBailoutThunk(MacroAssembler& masm, Label* bailoutTail) {
// Remove both the bailout frame and the topmost Ion frame's stack.
masm.moveToStackPtr(FramePointer);
masm.pop(FramePointer);
// Jump to shared bailout tail. The BailoutInfo pointer has to be in a2.
masm.jump(bailoutTail);
@ -629,7 +622,8 @@ bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
masm.pushReturnAddress();
}
// We're aligned to an exit frame, so link it up.
// Push the frame pointer to finish the exit frame, then link it up.
masm.Push(FramePointer);
masm.loadJSContext(cxreg);
masm.enterExitFrame(cxreg, regs.getAny(), &f);
@ -779,8 +773,11 @@ bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
break;
}
masm.leaveExitFrame();
masm.retn(Imm32(sizeof(ExitFrameLayout) +
// Pop ExitFooterFrame and the frame pointer.
masm.leaveExitFrame(sizeof(void*));
// Return. Subtract sizeof(void*) for the frame pointer.
masm.retn(Imm32(sizeof(ExitFrameLayout) - sizeof(void*) +
f.explicitStackSlots() * sizeof(void*) +
f.extraValuesToPop * sizeof(Value)));

Просмотреть файл

@ -218,9 +218,8 @@ uint32_t CodeGeneratorShared::ArgToStackOffset(uint32_t slot) const {
uint32_t CodeGeneratorShared::SlotToStackOffset(uint32_t slot) const {
MOZ_ASSERT(slot > 0 && slot <= graph.localSlotsSize());
uint32_t offsetFromBase = offsetOfLocalSlots_ + slot;
MOZ_ASSERT(offsetFromBase <= masm.framePushed());
return masm.framePushed() - offsetFromBase;
MOZ_ASSERT(slot <= masm.framePushed());
return masm.framePushed() - slot;
}
// For argument construction for calls. Argslots are Value-sized.

Просмотреть файл

@ -113,16 +113,11 @@ CodeGeneratorShared::CodeGeneratorShared(MIRGenerator* gen, LIRGraph* graph,
"Trap exit stub needs 16-byte aligned stack pointer");
#endif
} else {
// Reserve space for frame pointer (and padding on 32-bit platforms).
offsetOfLocalSlots_ = JitFrameLayout::IonFirstSlotOffset;
frameDepth_ = offsetOfLocalSlots_;
// Allocate space for local slots (register allocator spills). Round to
// JitStackAlignment, and implicitly to sizeof(Value) as JitStackAlignment
// is a multiple of sizeof(Value). This was originally implemented for
// SIMD.js, but now lets us use faster ABI calls via setupAlignedABICall.
frameDepth_ += graph->localSlotsSize();
frameDepth_ = AlignBytes(frameDepth_, JitStackAlignment);
frameDepth_ = AlignBytes(graph->localSlotsSize(), JitStackAlignment);
// Allocate space for argument Values passed to callee functions.
offsetOfPassedArgSlots_ = frameDepth_;
@ -141,20 +136,20 @@ bool CodeGeneratorShared::generatePrologue() {
masm.pushReturnAddress();
#endif
// Frame prologue.
masm.push(FramePointer);
masm.moveStackPtrTo(FramePointer);
// Ensure that the Ion frame is properly aligned.
masm.assertStackAlignment(JitStackAlignment, 0);
// Frame prologue.
masm.Push(FramePointer);
masm.moveStackPtrTo(FramePointer);
// If profiling, save the current frame pointer to a per-thread global field.
if (isProfilerInstrumentationEnabled()) {
masm.profilerEnterFrame(FramePointer, CallTempReg0);
}
// Note that this automatically sets MacroAssembler::framePushed().
masm.reserveStack(frameSize() - sizeof(uintptr_t));
masm.reserveStack(frameSize());
MOZ_ASSERT(masm.framePushed() == frameSize());
masm.checkStackAlignment();
@ -358,7 +353,7 @@ void CodeGeneratorShared::dumpNativeToBytecodeEntry(uint32_t idx) {
static inline int32_t ToStackIndex(LAllocation* a) {
if (a->isStackSlot()) {
MOZ_ASSERT(a->toStackSlot()->slot() >= 1);
return JitFrameLayout::IonFirstSlotOffset + a->toStackSlot()->slot();
return a->toStackSlot()->slot();
}
return -int32_t(sizeof(JitFrameLayout) + a->toArgument()->index());
}

Просмотреть файл

@ -139,10 +139,6 @@ class CodeGeneratorShared : public LElementVisitor {
// spills.
uint32_t frameDepth_;
// Offset in bytes of the stack region reserved for values spilled by the
// register allocator.
uint32_t offsetOfLocalSlots_ = 0;
// Offset in bytes of the stack region reserved for passed argument Values.
uint32_t offsetOfPassedArgSlots_ = 0;

Просмотреть файл

@ -23,7 +23,6 @@ inline void EmitBaselineTailCallVM(TrampolinePtr target, MacroAssembler& masm,
// We can assume during this that R0 and R1 have been pushed.
// Store frame size without VMFunction arguments for debug assertions.
masm.movq(FramePointer, scratch);
masm.addq(Imm32(BaselineFrame::FramePointerOffset), scratch);
masm.subq(BaselineStackReg, scratch);
masm.subq(Imm32(argSize), scratch);
Address frameSizeAddr(FramePointer,
@ -48,20 +47,13 @@ inline void EmitBaselineEnterStubFrame(MacroAssembler& masm, Register) {
// this is:
//
// FramePointer
// + BaselineFrame::FramePointerOffset
// - BaselineStackReg
// - sizeof(return address)
//
// The two constants cancel each other out, so we can just calculate
// FramePointer - BaselineStackReg.
static_assert(
BaselineFrame::FramePointerOffset == sizeof(void*),
"FramePointerOffset must be the same as the return address size");
ScratchRegisterScope scratch(masm);
masm.movq(FramePointer, scratch);
masm.subq(BaselineStackReg, scratch);
masm.subq(Imm32(sizeof(void*)), scratch); // Return address.
Address frameSizeAddr(FramePointer,
BaselineFrame::reverseOffsetOfDebugFrameSize());

Просмотреть файл

@ -206,8 +206,6 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
masm.movq(result, reg_argc);
masm.unboxInt32(Operand(reg_argc, 0), reg_argc);
masm.push(ImmWord(JitFrameLayout::UnusedValue));
// Push the callee token.
masm.push(token);
@ -255,6 +253,7 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
// Enter exit frame.
masm.pushFrameDescriptor(FrameType::BaselineJS);
masm.push(Imm32(0)); // Fake return address.
masm.push(FramePointer);
// No GC things to mark, push a bare token.
masm.loadJSContext(scratch);
masm.enterFakeExitFrame(scratch, scratch, ExitFrameType::Bare);
@ -306,9 +305,9 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
masm.movq(scopeChain, R1.scratchReg());
}
// The call will push the return address on the stack, thus we check that
// the stack would be aligned once the call is complete.
masm.assertStackAlignment(JitStackAlignment, sizeof(uintptr_t));
// The call will push the return address and frame pointer on the stack, thus
// we check that the stack would be aligned once the call is complete.
masm.assertStackAlignment(JitStackAlignment, 2 * sizeof(uintptr_t));
// Call function.
masm.callJitNoProfiler(reg_code);
@ -438,7 +437,6 @@ void JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail) {
// Pop the machine state and the dead frame.
masm.moveToStackPtr(FramePointer);
masm.pop(FramePointer);
// Jump to shared bailout tail. The BailoutInfo pointer has to be in r9.
masm.jmp(bailoutTail);
@ -474,10 +472,7 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
masm.loadNumActualArgs(FramePointer, r8);
// Load |nformals| into %rcx.
constexpr size_t FrameOffset = sizeof(void*); // Frame pointer.
constexpr size_t TokenOffset =
FrameOffset + RectifierFrameLayout::offsetOfCalleeToken();
masm.loadPtr(Address(rsp, TokenOffset), rax);
masm.loadPtr(Address(rbp, RectifierFrameLayout::offsetOfCalleeToken()), rax);
masm.mov(rax, rcx);
masm.andq(Imm32(uint32_t(CalleeTokenMask)), rcx);
masm.loadFunctionArgCount(rcx, rcx);
@ -499,21 +494,21 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
static_assert(
sizeof(JitFrameLayout) % JitStackAlignment == 0,
"No need to consider the JitFrameLayout for aligning the stack");
static_assert((sizeof(Value) + sizeof(void*)) % JitStackAlignment == 0,
"No need to consider |this| and the frame pointer for "
"aligning the stack");
static_assert(
JitStackAlignment % sizeof(Value) == 0,
"Ensure that we can pad the stack by pushing extra UndefinedValue");
static_assert(IsPowerOfTwo(JitStackValueAlignment),
"must have power of two for masm.andl to do its job");
masm.addl(Imm32(JitStackValueAlignment - 1 /* for padding */), rcx);
masm.addl(
Imm32(JitStackValueAlignment - 1 /* for padding */ + 1 /* for |this| */),
rcx);
masm.addl(rdx, rcx);
masm.andl(Imm32(~(JitStackValueAlignment - 1)), rcx);
// Load the number of |undefined|s to push into %rcx.
masm.subq(r8, rcx);
// Load the number of |undefined|s to push into %rcx. Subtract 1 for |this|.
masm.subl(r8, rcx);
masm.subl(Imm32(1), rcx);
// Caller:
// [arg2] [arg1] [this] [ [argc] [callee] [descr] [raddr] ] <- rsp
@ -542,10 +537,8 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
// Get the topmost argument.
static_assert(sizeof(Value) == 8, "TimesEight is used to skip arguments");
// Get the topmost argument. We did a push of %rbp earlier, so be sure to
// account for this in the offset.
BaseIndex b(FramePointer, r8, TimesEight,
sizeof(RectifierFrameLayout) + sizeof(void*));
// Get the topmost argument.
BaseIndex b(FramePointer, r8, TimesEight, sizeof(RectifierFrameLayout));
masm.lea(Operand(b), rcx);
// Push arguments, |nargs| + 1 times (to include |this|).
@ -571,11 +564,9 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
// thisFrame[numFormals] = prevFrame[argc]
ValueOperand newTarget(r10);
// Load vp[argc]. Add sizeof(Value) for |this| and sizeof(void*) for the
// saved frame pointer.
BaseIndex newTargetSrc(
FramePointer, rdx, TimesEight,
sizeof(RectifierFrameLayout) + sizeof(Value) + sizeof(void*));
// Load vp[argc]. Add sizeof(Value) for |this|.
BaseIndex newTargetSrc(FramePointer, rdx, TimesEight,
sizeof(RectifierFrameLayout) + sizeof(Value));
masm.loadValue(newTargetSrc, newTarget);
// Again, 1 for |this|
@ -595,7 +586,6 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
//
// Construct JitFrameLayout.
masm.push(ImmWord(JitFrameLayout::UnusedValue));
masm.push(rax); // callee token
masm.pushFrameDescriptorForJitCall(FrameType::Rectifier, rdx, rdx);
@ -652,7 +642,6 @@ static void GenerateBailoutThunk(MacroAssembler& masm, Label* bailoutTail) {
// Remove both the bailout frame and the topmost Ion frame's stack.
masm.moveToStackPtr(FramePointer);
masm.pop(FramePointer);
// Jump to shared bailout tail. The BailoutInfo pointer has to be in r9.
masm.jmp(bailoutTail);
@ -692,7 +681,8 @@ bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
// +8 descriptor
// +0 returnAddress
//
// We're aligned to an exit frame, so link it up.
// Push the frame pointer to finish the exit frame, then link it up.
masm.Push(FramePointer);
masm.loadJSContext(cxreg);
masm.enterExitFrame(cxreg, regs.getAny(), &f);
@ -839,8 +829,11 @@ bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
masm.speculationBarrier();
}
masm.leaveExitFrame();
masm.retn(Imm32(sizeof(ExitFrameLayout) +
// Pop ExitFooterFrame and the frame pointer.
masm.leaveExitFrame(sizeof(void*));
// Return. Subtract sizeof(void*) for the frame pointer.
masm.retn(Imm32(sizeof(ExitFrameLayout) - sizeof(void*) +
f.explicitStackSlots() * sizeof(void*) +
f.extraValuesToPop * sizeof(Value)));

Просмотреть файл

@ -67,6 +67,7 @@ void MacroAssembler::clampDoubleToUint8(FloatRegister input, Register output) {
bool MacroAssemblerX86Shared::buildOOLFakeExitFrame(void* fakeReturnAddr) {
asMasm().PushFrameDescriptor(FrameType::IonJS);
asMasm().Push(ImmPtr(fakeReturnAddr));
asMasm().Push(FramePointer);
return true;
}

Просмотреть файл

@ -21,7 +21,6 @@ inline void EmitBaselineTailCallVM(TrampolinePtr target, MacroAssembler& masm,
// We assume during this that R0 and R1 have been pushed.
// Store frame size without VMFunction arguments for debug assertions.
masm.movl(FramePointer, eax);
masm.addl(Imm32(BaselineFrame::FramePointerOffset), eax);
masm.subl(BaselineStackReg, eax);
masm.subl(Imm32(argSize), eax);
Address frameSizeAddr(FramePointer,
@ -46,19 +45,12 @@ inline void EmitBaselineEnterStubFrame(MacroAssembler& masm, Register scratch) {
// this is:
//
// FramePointer
// + BaselineFrame::FramePointerOffset
// - BaselineStackReg
// - sizeof(return address)
//
// The two constants cancel each other out, so we can just calculate
// FramePointer - BaselineStackReg.
static_assert(
BaselineFrame::FramePointerOffset == sizeof(void*),
"FramePointerOffset must be the same as the return address size");
masm.movl(FramePointer, scratch);
masm.subl(BaselineStackReg, scratch);
masm.subl(Imm32(sizeof(void*)), scratch); // Return address.
Address frameSizeAddr(FramePointer,
BaselineFrame::reverseOffsetOfDebugFrameSize());

Просмотреть файл

@ -144,8 +144,6 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
masm.mov(Operand(ebp, ARG_RESULT), eax);
masm.unboxInt32(Address(eax, 0x0), eax);
masm.push(ImmWord(JitFrameLayout::UnusedValue));
// Push the callee token.
masm.push(Operand(ebp, ARG_CALLEETOKEN));
@ -199,6 +197,7 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
// Enter exit frame.
masm.pushFrameDescriptor(FrameType::BaselineJS);
masm.push(Imm32(0)); // Fake return address.
masm.push(FramePointer);
// No GC things to mark on the stack, push a bare token.
masm.loadJSContext(scratch);
masm.enterFakeExitFrame(scratch, scratch, ExitFrameType::Bare);
@ -248,9 +247,9 @@ void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
masm.loadPtr(Address(ebp, ARG_SCOPECHAIN), R1.scratchReg());
}
// The call will push the return address on the stack, thus we check that
// the stack would be aligned once the call is complete.
masm.assertStackAlignment(JitStackAlignment, sizeof(uintptr_t));
// The call will push the return address and frame pointer on the stack, thus
// we check that the stack would be aligned once the call is complete.
masm.assertStackAlignment(JitStackAlignment, 2 * sizeof(uintptr_t));
/***************************************************************
Call passed-in code, get return value and fill in the
@ -362,7 +361,6 @@ void JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail) {
// Pop the machine state and the dead frame.
masm.moveToStackPtr(FramePointer);
masm.pop(FramePointer);
// Jump to shared bailout tail. The BailoutInfo pointer has to be in ecx.
masm.jmp(bailoutTail);
@ -384,24 +382,19 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
// Caller:
// [arg2] [arg1] [this] [ [argc] [callee] [descr] [raddr] ] <- esp
// Frame prologue. Push extra padding to ensure proper stack alignment. See
// comments and assertions below.
// Frame prologue.
//
// NOTE: if this changes, fix the Baseline bailout code too!
// See BaselineStackBuilder::calculatePrevFramePtr and
// BaselineStackBuilder::buildRectifierFrame (in BaselineBailouts.cpp).
masm.push(FramePointer);
masm.movl(esp, FramePointer); // Save %esp.
masm.push(FramePointer); // Padding.
// Load argc.
masm.loadNumActualArgs(FramePointer, esi);
// Load the number of |undefined|s to push into %ecx.
constexpr size_t FrameOffset = 2 * sizeof(void*); // Frame pointer + padding.
constexpr size_t TokenOffset =
FrameOffset + RectifierFrameLayout::offsetOfCalleeToken();
masm.loadPtr(Address(esp, TokenOffset), eax);
masm.loadPtr(Address(ebp, RectifierFrameLayout::offsetOfCalleeToken()), eax);
masm.mov(eax, ecx);
masm.andl(Imm32(CalleeTokenMask), ecx);
masm.loadFunctionArgCount(ecx, ecx);
@ -413,16 +406,15 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
static_assert(
sizeof(JitFrameLayout) % JitStackAlignment == 0,
"No need to consider the JitFrameLayout for aligning the stack");
static_assert((sizeof(Value) + 2 * sizeof(void*)) % JitStackAlignment == 0,
"No need to consider |this| and the frame pointer and its "
"padding for aligning the stack");
static_assert(
JitStackAlignment % sizeof(Value) == 0,
"Ensure that we can pad the stack by pushing extra UndefinedValue");
static_assert(IsPowerOfTwo(JitStackValueAlignment),
"must have power of two for masm.andl to do its job");
masm.addl(Imm32(JitStackValueAlignment - 1 /* for padding */), ecx);
masm.addl(
Imm32(JitStackValueAlignment - 1 /* for padding */ + 1 /* for |this| */),
ecx);
// Account for newTarget, if necessary.
static_assert(
@ -434,6 +426,7 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
masm.andl(Imm32(~(JitStackValueAlignment - 1)), ecx);
masm.subl(esi, ecx);
masm.subl(Imm32(1), ecx); // For |this|.
// Copy the number of actual arguments into edx.
masm.mov(esi, edx);
@ -461,10 +454,8 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
masm.j(Assembler::NonZero, &undefLoopTop);
}
// Get the topmost argument. We did a push of %ebp earlier, so be sure to
// account for this in the offset.
BaseIndex b(FramePointer, esi, TimesEight,
sizeof(RectifierFrameLayout) + sizeof(void*));
// Get the topmost argument.
BaseIndex b(FramePointer, esi, TimesEight, sizeof(RectifierFrameLayout));
masm.lea(Operand(b), ecx);
// Push arguments, |nargs| + 1 times (to include |this|).
@ -488,9 +479,8 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
Imm32(CalleeToken_FunctionConstructing),
&notConstructing);
BaseValueIndex src(
FramePointer, edx,
sizeof(RectifierFrameLayout) + sizeof(Value) + sizeof(void*));
BaseValueIndex src(FramePointer, edx,
sizeof(RectifierFrameLayout) + sizeof(Value));
masm.andl(Imm32(CalleeTokenMask), ebx);
masm.loadFunctionArgCount(ebx, ebx);
@ -506,7 +496,6 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
}
// Construct JitFrameLayout.
masm.push(ImmWord(JitFrameLayout::UnusedValue));
masm.push(eax); // callee token
masm.pushFrameDescriptorForJitCall(FrameType::Rectifier, edx, edx);
@ -563,7 +552,6 @@ static void GenerateBailoutThunk(MacroAssembler& masm, Label* bailoutTail) {
// Remove both the bailout frame and the topmost Ion frame's stack.
masm.moveToStackPtr(FramePointer);
masm.pop(FramePointer);
// Jump to shared bailout tail. The BailoutInfo pointer has to be in ecx.
masm.jmp(bailoutTail);
@ -602,7 +590,8 @@ bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
// +4 descriptor
// +0 returnAddress
//
// We're aligned to an exit frame, so link it up.
// Push the frame pointer to finish the exit frame, then link it up.
masm.Push(FramePointer);
masm.loadJSContext(cxreg);
masm.enterExitFrame(cxreg, regs.getAny(), &f);
@ -748,8 +737,11 @@ bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
masm.speculationBarrier();
}
masm.leaveExitFrame();
masm.retn(Imm32(sizeof(ExitFrameLayout) +
// Pop ExitFooterFrame and the frame pointer.
masm.leaveExitFrame(sizeof(void*));
// Return. Subtract sizeof(void*) for the frame pointer.
masm.retn(Imm32(sizeof(ExitFrameLayout) - sizeof(void*) +
f.explicitStackSlots() * sizeof(void*) +
f.extraValuesToPop * sizeof(Value)));

Просмотреть файл

@ -84,9 +84,7 @@ class JitActivation : public Activation {
// When profiling is enabled, these fields will be updated to reflect the
// last pushed frame for this activation, and if that frame has been
// left for a call, the native code site of the call.
// Note: lastProfilingFrame_ points to the location of the caller's frame
// pointer: JitFrameLayout* - JitFrameLayout::FramePointerOffset.
mozilla::Atomic<void*, mozilla::Relaxed> lastProfilingFrame_;
mozilla::Atomic<JitFrameLayout*, mozilla::Relaxed> lastProfilingFrame_;
mozilla::Atomic<void*, mozilla::Relaxed> lastProfilingCallSite_;
static_assert(sizeof(mozilla::Atomic<void*, mozilla::Relaxed>) ==
sizeof(void*),
@ -199,23 +197,8 @@ class JitActivation : public Activation {
static size_t offsetOfLastProfilingFrame() {
return offsetof(JitActivation, lastProfilingFrame_);
}
JitFrameLayout* lastProfilingFrame() {
if (lastProfilingFrame_) {
void* frame = lastProfilingFrame_;
uint8_t* fp =
static_cast<uint8_t*>(frame) + JitFrameLayout::FramePointerOffset;
return reinterpret_cast<JitFrameLayout*>(fp);
}
return nullptr;
}
void setLastProfilingFrame(JitFrameLayout* ptr) {
if (ptr) {
lastProfilingFrame_ =
reinterpret_cast<uint8_t*>(ptr) - JitFrameLayout::FramePointerOffset;
} else {
lastProfilingFrame_ = nullptr;
}
}
JitFrameLayout* lastProfilingFrame() { return lastProfilingFrame_; }
void setLastProfilingFrame(JitFrameLayout* ptr) { lastProfilingFrame_ = ptr; }
static size_t offsetOfLastProfilingCallSite() {
return offsetof(JitActivation, lastProfilingCallSite_);

Просмотреть файл

@ -420,39 +420,6 @@ static_assert(FrameWithInstances::sizeOfInstanceFields() == 2 * sizeof(void*),
static_assert(sizeof(Frame) % 16 == 0, "frame is aligned");
#endif
// The JitEntry stub uses the following stack layout:
//
// [JS JIT caller]
// [JitFrameLayout] (frame descriptor, return address, etc)
// [JSJitToWasmFrame] (saved frame pointer)
// [Wasm arguments]
//
// The caller's frame pointer is not yet stored in JitFrameLayout, so
// JSJitToWasmFrame represents this space. On ARM64 it also has padding to
// ensure SP is 16-byte aligned.
class JSJitToWasmFrame {
protected: // Silence warning about unused padding_ field.
#if defined(JS_CODEGEN_ARM64)
uintptr_t padding_;
#endif
uint8_t* callerFP_;
public:
static constexpr uint32_t callerFPOffset() {
return offsetof(JSJitToWasmFrame, callerFP_);
}
// Distance from frame pointer to the JitFrameLayout pushed by the caller.
static constexpr uint32_t jitFrameLayoutOffsetFromFP() {
return sizeof(JSJitToWasmFrame) - callerFPOffset();
}
};
#if defined(JS_CODEGEN_ARM64)
static_assert(sizeof(JSJitToWasmFrame) % 16 == 0, "frame is aligned");
#endif
static_assert(JSJitToWasmFrame::jitFrameLayoutOffsetFromFP() == sizeof(void*),
"fp must point to caller fp followed by return address for "
"native stack walking to work");
} // namespace wasm
} // namespace js

Просмотреть файл

@ -199,8 +199,7 @@ void WasmFrameIter::popFrame() {
//
// The next value of FP is just a regular jit frame used as a marker to
// know that we should transition to a JSJit frame iterator.
unwoundJitCallerFP_ = reinterpret_cast<uint8_t*>(fp_) +
JSJitToWasmFrame::jitFrameLayoutOffsetFromFP();
unwoundJitCallerFP_ = reinterpret_cast<uint8_t*>(fp_);
unwoundJitFrameType_ = FrameType::JSJitToWasm;
fp_ = nullptr;
@ -829,8 +828,9 @@ void wasm::GenerateJitEntryPrologue(MacroAssembler& masm,
offsets->begin = masm.currentOffset();
static_assert(BeforePushRetAddr == 0);
// Subtract from SP first as SP must be aligned before offsetting.
masm.Sub(sp, sp, 8 + sizeof(JSJitToWasmFrame));
masm.Str(ARMRegister(lr, 64), MemOperand(sp, sizeof(JSJitToWasmFrame)));
masm.Sub(sp, sp, 16);
static_assert(JitFrameLayout::offsetOfReturnAddress() == 8);
masm.Str(ARMRegister(lr, 64), MemOperand(sp, 8));
#else
// The x86/x64 call instruction pushes the return address.
offsets->begin = masm.currentOffset();
@ -840,25 +840,19 @@ void wasm::GenerateJitEntryPrologue(MacroAssembler& masm,
// Save jit frame pointer, so unwinding from wasm to jit frames is trivial.
#if defined(JS_CODEGEN_ARM64)
masm.Str(ARMRegister(FramePointer, 64),
MemOperand(sp, JSJitToWasmFrame::callerFPOffset()));
static_assert(JitFrameLayout::offsetOfCallerFramePtr() == 0);
masm.Str(ARMRegister(FramePointer, 64), MemOperand(sp, 0));
#else
static_assert(sizeof(JSJitToWasmFrame) == sizeof(uintptr_t));
masm.Push(FramePointer);
#endif
MOZ_ASSERT_IF(!masm.oom(),
PushedFP == masm.currentOffset() - offsets->begin);
#if defined(JS_CODEGEN_ARM64)
masm.Add(ARMRegister(FramePointer, 64), sp,
JSJitToWasmFrame::callerFPOffset());
#else
masm.moveStackPtrTo(FramePointer);
#endif
MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - offsets->begin);
}
masm.setFramePushed(sizeof(JSJitToWasmFrame));
masm.setFramePushed(0);
}
void wasm::GenerateJitEntryEpilogue(MacroAssembler& masm,
@ -868,11 +862,11 @@ void wasm::GenerateJitEntryEpilogue(MacroAssembler& masm,
RegisterOrSP sp = masm.getStackPointer();
AutoForbidPoolsAndNops afp(&masm,
/* number of instructions in scope = */ 5);
masm.loadPtr(Address(sp, sizeof(JSJitToWasmFrame)), lr);
masm.loadPtr(Address(sp, JSJitToWasmFrame::callerFPOffset()), FramePointer);
masm.loadPtr(Address(sp, 8), lr);
masm.loadPtr(Address(sp, 0), FramePointer);
poppedFP = masm.currentOffset();
masm.addToStackPtr(Imm32(8 + sizeof(JSJitToWasmFrame)));
masm.addToStackPtr(Imm32(2 * sizeof(void*)));
// Copy SP into PSP to enforce return-point invariants (SP == PSP).
// `addToStackPtr` won't sync them because SP is the active pointer here.
// For the same reason, we can't use initPseudoStackPtr to do the sync, so
@ -888,8 +882,7 @@ void wasm::GenerateJitEntryEpilogue(MacroAssembler& masm,
AutoForbidPoolsAndNops afp(&masm, /* number of instructions in scope = */ 2);
# endif
static_assert(sizeof(JSJitToWasmFrame) == sizeof(uintptr_t));
masm.Pop(FramePointer);
masm.pop(FramePointer);
poppedFP = masm.currentOffset();
offsets->ret = masm.currentOffset();
@ -1009,8 +1002,7 @@ void ProfilingFrameIterator::initFromExitFP(const Frame* fp) {
case CodeRange::JitEntry:
callerPC_ = nullptr;
callerFP_ = nullptr;
unwoundJitCallerFP_ =
fp->rawCaller() + JSJitToWasmFrame::jitFrameLayoutOffsetFromFP();
unwoundJitCallerFP_ = fp->rawCaller();
break;
case CodeRange::Function:
fp = fp->wasmCaller();
@ -1338,9 +1330,9 @@ bool js::wasm::StartUnwinding(const RegisterState& registers,
}
// Set fixedFP to the address of the JitFrameLayout on the stack.
if (offsetFromEntry < SetFP) {
fixedFP = reinterpret_cast<uint8_t*>(sp) + sizeof(JSJitToWasmFrame);
fixedFP = reinterpret_cast<uint8_t*>(sp);
} else {
fixedFP = fp + JSJitToWasmFrame::jitFrameLayoutOffsetFromFP();
fixedFP = fp;
}
fixedPC = nullptr;
break;
@ -1474,8 +1466,7 @@ void ProfilingFrameIterator::operator++() {
MOZ_ASSERT(codeRange_);
if (codeRange_->isJitEntry()) {
unwoundJitCallerFP_ =
callerFP_ + JSJitToWasmFrame::jitFrameLayoutOffsetFromFP();
unwoundJitCallerFP_ = callerFP_;
MOZ_ASSERT(!done());
return;
}

Просмотреть файл

@ -927,15 +927,15 @@ static void GenerateJitEntryThrow(MacroAssembler& masm, unsigned frameSize) {
GenerateJitEntryLoadInstance(masm, frameSize);
// The frame pointer is still set to FailFP. Restore it before entering the
// exit frame.
MOZ_ASSERT(frameSize >= JitFrameLayout::FramePointerOffset);
uint32_t offset = frameSize - JitFrameLayout::FramePointerOffset;
masm.loadPtr(Address(masm.getStackPointer(), offset), FramePointer);
masm.freeStack(frameSize);
MoveSPForJitABI(masm);
// The frame pointer is still set to FailFP. Restore it before entering the
// exit frame.
masm.loadPtr(
Address(masm.getStackPointer(), JitFrameLayout::offsetOfCallerFramePtr()),
FramePointer);
masm.loadPtr(Address(InstanceReg, Instance::offsetOfCx()), ScratchIonEntry);
masm.enterFakeExitFrameForWasm(ScratchIonEntry, ScratchIonEntry,
ExitFrameType::WasmGenericJitEntry);
@ -1011,10 +1011,10 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
// left):
// <-- retAddr | descriptor | callee | argc | this | arg1..N
//
// GenerateJitEntryPrologue has additionally pushed a JSJitToWasmFrame storing
// the caller's frame pointer.
// GenerateJitEntryPrologue has additionally pushed the caller's frame
// pointer. The stack pointer is now JitStackAlignment-aligned.
MOZ_ASSERT(masm.framePushed() == sizeof(JSJitToWasmFrame));
MOZ_ASSERT(masm.framePushed() == 0);
unsigned normalBytesNeeded = StackArgBytesForWasmABI(fe.funcType());
@ -1353,7 +1353,7 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
masm.jump(&exception);
masm.bind(&done);
// Un-fixup the stack for the benefit of the assertion below.
masm.setFramePushed(sizeof(JSJitToWasmFrame));
masm.setFramePushed(0);
break;
}
case ValType::Rtt:
@ -1369,7 +1369,7 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
case RefType::Extern:
// Per comment above, the call may have clobbered the instance
// register, so reload since unboxing will need it.
GenerateJitEntryLoadInstance(masm, sizeof(JSJitToWasmFrame));
GenerateJitEntryLoadInstance(masm, 0);
UnboxAnyrefIntoValueReg(masm, InstanceReg, ReturnReg,
JSReturnOperand, WasmJitEntryReturnScratch);
break;
@ -1383,7 +1383,7 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
GenPrintf(DebugChannel::Function, masm, "\n");
MOZ_ASSERT(masm.framePushed() == sizeof(JSJitToWasmFrame));
MOZ_ASSERT(masm.framePushed() == 0);
AssertExpectedSP(masm);
GenerateJitEntryEpilogue(masm, offsets);
@ -1460,12 +1460,11 @@ void wasm::GenerateDirectCallFromJit(MacroAssembler& masm, const FuncExport& fe,
// Push a special frame descriptor that indicates the frame size so we can
// directly iterate from the current JIT frame without an extra call.
*callOffset = masm.buildFakeExitFrame(scratch);
masm.loadJSContext(scratch);
// Note: enterFakeExitFrame pushes an ExitFooterFrame containing the current
// Note: buildFakeExitFrame pushes an ExitFrameLayout containing the current
// frame pointer. We also use this to restore the frame pointer after the
// call.
*callOffset = masm.buildFakeExitFrame(scratch);
masm.loadJSContext(scratch);
masm.enterFakeExitFrame(scratch, scratch, ExitFrameType::DirectWasmJitCall);
// FP := ExitFrameLayout* | ExitOrJitEntryFPTag
masm.moveStackPtrTo(FramePointer);
@ -1669,10 +1668,10 @@ void wasm::GenerateDirectCallFromJit(MacroAssembler& masm, const FuncExport& fe,
GenPrintf(DebugChannel::Function, masm, "\n");
// Restore the frame pointer by loading it from the ExitFooterFrame.
masm.loadPtr(Address(masm.getStackPointer(),
bytesNeeded + ExitFooterFrame::offsetOfCallerFP()),
FramePointer);
// Restore the frame pointer by loading it from the ExitFrameLayout.
size_t fpOffset = bytesNeeded + ExitFooterFrame::Size() +
ExitFrameLayout::offsetOfCallerFramePtr();
masm.loadPtr(Address(masm.getStackPointer(), fpOffset), FramePointer);
// Free args + frame descriptor.
masm.leaveExitFrame(bytesNeeded + ExitFrameLayout::Size());
@ -2261,35 +2260,28 @@ static bool GenerateImportJitExit(MacroAssembler& masm, const FuncImport& fi,
// ^
// +-- sp
//
// Unlike most ABIs, the JIT ABI requires that sp be JitStackAlignment-
// aligned *after* pushing the return address.
// The JIT ABI requires that sp be JitStackAlignment-aligned after pushing
// the return address and frame pointer.
static_assert(WasmStackAlignment >= JitStackAlignment, "subsumes");
const unsigned sizeOfInstanceSlot = sizeof(void*);
const unsigned sizeOfRetAddr = sizeof(void*);
const unsigned sizeOfRetAddrAndFP = 2 * sizeof(void*);
const unsigned sizeOfPreFrame =
WasmToJSJitFrameLayout::Size() - sizeOfRetAddr;
WasmToJSJitFrameLayout::Size() - sizeOfRetAddrAndFP;
const unsigned sizeOfThisAndArgs =
(1 + fi.funcType().args().length()) * sizeof(Value);
const unsigned totalJitFrameBytes =
sizeOfRetAddr + sizeOfPreFrame + sizeOfThisAndArgs + sizeOfInstanceSlot;
const unsigned totalJitFrameBytes = sizeOfRetAddrAndFP + sizeOfPreFrame +
sizeOfThisAndArgs + sizeOfInstanceSlot;
const unsigned jitFramePushed =
StackDecrementForCall(JitStackAlignment,
sizeof(Frame), // pushed by prologue
totalJitFrameBytes) -
sizeOfRetAddr;
sizeOfRetAddrAndFP;
// On ARM64 we must align the SP to a 16-byte boundary.
#ifdef JS_CODEGEN_ARM64
const unsigned frameAlignExtra = sizeof(void*);
#else
const unsigned frameAlignExtra = 0;
#endif
GenerateJitExitPrologue(masm, jitFramePushed + frameAlignExtra, offsets);
GenerateJitExitPrologue(masm, jitFramePushed, offsets);
// 1. Descriptor.
unsigned argc = fi.funcType().args().length();
size_t argOffset = frameAlignExtra;
size_t argOffset = 0;
uint32_t descriptor =
MakeFrameDescriptorForJitCall(FrameType::WasmToJSJit, argc);
masm.storePtr(ImmWord(uintptr_t(descriptor)),
@ -2300,24 +2292,19 @@ static bool GenerateImportJitExit(MacroAssembler& masm, const FuncImport& fi,
// record offset here and set up callee later.
size_t calleeArgOffset = argOffset;
argOffset += sizeof(size_t);
MOZ_ASSERT(argOffset == sizeOfPreFrame);
// 3. unused_ field.
masm.storePtr(ImmWord(JitFrameLayout::UnusedValue),
Address(masm.getStackPointer(), argOffset));
argOffset += sizeof(size_t);
MOZ_ASSERT(argOffset == sizeOfPreFrame + frameAlignExtra);
// 4. |this| value.
// 3. |this| value.
masm.storeValue(UndefinedValue(), Address(masm.getStackPointer(), argOffset));
argOffset += sizeof(Value);
// 5. Fill the arguments.
// 4. Fill the arguments.
Register scratch = ABINonArgReturnReg1; // Repeatedly clobbered
Register scratch2 = ABINonArgReturnReg0; // Reused as callee below
FillArgumentArrayForJitExit(masm, InstanceReg, funcImportIndex, fi.funcType(),
argOffset, scratch, scratch2, throwLabel);
argOffset += fi.funcType().args().length() * sizeof(Value);
MOZ_ASSERT(argOffset == sizeOfThisAndArgs + sizeOfPreFrame + frameAlignExtra);
MOZ_ASSERT(argOffset == sizeOfThisAndArgs + sizeOfPreFrame);
// Preserve instance because the JIT callee clobbers it.
const size_t savedInstanceOffset = argOffset;
@ -2334,36 +2321,28 @@ static bool GenerateImportJitExit(MacroAssembler& masm, const FuncImport& fi,
// 2.2. Save callee.
masm.storePtr(callee, Address(masm.getStackPointer(), calleeArgOffset));
// 6. Check if we need to rectify arguments.
// 5. Check if we need to rectify arguments.
masm.loadFunctionArgCount(callee, scratch);
Label rectify;
masm.branch32(Assembler::Above, scratch, Imm32(fi.funcType().args().length()),
&rectify);
// 7. If we haven't rectified arguments, load callee executable entry point.
// 6. If we haven't rectified arguments, load callee executable entry point.
masm.loadJitCodeRaw(callee, callee);
Label rejoinBeforeCall;
masm.bind(&rejoinBeforeCall);
AssertStackAlignment(masm, JitStackAlignment,
sizeOfRetAddr + frameAlignExtra);
AssertStackAlignment(masm, JitStackAlignment, sizeOfRetAddrAndFP);
#ifdef JS_CODEGEN_ARM64
AssertExpectedSP(masm);
// Conform to JIT ABI. Note this doesn't update PSP since SP is the active
// pointer.
masm.addToStackPtr(Imm32(8));
// Manually resync PSP. Omitting this causes eg tests/wasm/import-export.js
// to segfault.
masm.moveStackPtrTo(PseudoStackPointer);
#endif
masm.callJitNoProfiler(callee);
#ifdef JS_CODEGEN_ARM64
// Conform to platform conventions - align the SP.
masm.subFromStackPtr(Imm32(8));
#endif
// Note that there might be a GC thing in the JSReturnOperand now.
// In all the code paths from here:
@ -2376,8 +2355,7 @@ static bool GenerateImportJitExit(MacroAssembler& masm, const FuncImport& fi,
// FramePointer, so restore those here. During this sequence of
// instructions, FP can't be trusted by the profiling frame iterator.
offsets->untrustedFPStart = masm.currentOffset();
AssertStackAlignment(masm, JitStackAlignment,
sizeOfRetAddr + frameAlignExtra);
AssertStackAlignment(masm, JitStackAlignment, sizeOfRetAddrAndFP);
masm.loadPtr(Address(masm.getStackPointer(), savedInstanceOffset),
InstanceReg);
@ -2385,17 +2363,12 @@ static bool GenerateImportJitExit(MacroAssembler& masm, const FuncImport& fi,
masm.addPtr(Imm32(masm.framePushed()), FramePointer);
offsets->untrustedFPEnd = masm.currentOffset();
// As explained above, the frame was aligned for the JIT ABI such that
// (sp + sizeof(void*)) % JitStackAlignment == 0
// The frame was aligned for the JIT ABI such that
// (sp - 2 * sizeof(void*)) % JitStackAlignment == 0
// But now we possibly want to call one of several different C++ functions,
// so subtract the sizeof(void*) so that sp is aligned for an ABI call.
// so subtract 2 * sizeof(void*) so that sp is aligned for an ABI call.
static_assert(ABIStackAlignment <= JitStackAlignment, "subsumes");
#ifdef JS_CODEGEN_ARM64
// We've already allocated the extra space for frame alignment.
static_assert(sizeOfRetAddr == frameAlignExtra, "ARM64 SP alignment");
#else
masm.reserveStack(sizeOfRetAddr);
#endif
masm.reserveStack(sizeOfRetAddrAndFP);
unsigned nativeFramePushed = masm.framePushed();
AssertStackAlignment(masm, ABIStackAlignment);