diff --git a/js/src/jit/CodeGenerator.cpp b/js/src/jit/CodeGenerator.cpp index 5aceb583a381..6e5ac7db272e 100644 --- a/js/src/jit/CodeGenerator.cpp +++ b/js/src/jit/CodeGenerator.cpp @@ -9845,9 +9845,14 @@ CodeGenerator::generateWasm(wasm::SigIdDesc sigId, wasm::BytecodeOffset trapOffs { JitSpew(JitSpew_Codegen, "# Emitting wasm code"); - wasm::IsLeaf isLeaf = !gen->needsOverrecursedCheck(); + wasm::GenerateFunctionPrologue(masm, sigId, mozilla::Nothing(), offsets); - wasm::GenerateFunctionPrologue(masm, frameSize(), isLeaf, sigId, trapOffset, offsets); + if (omitOverRecursedCheck()) + masm.reserveStack(frameSize()); + else + masm.wasmReserveStackChecked(frameSize(), trapOffset); + + MOZ_ASSERT(masm.framePushed() == frameSize()); if (!generateBody()) return false; diff --git a/js/src/jit/MacroAssembler.cpp b/js/src/jit/MacroAssembler.cpp index 37c3390cd086..cb5ecb6ff7d7 100644 --- a/js/src/jit/MacroAssembler.cpp +++ b/js/src/jit/MacroAssembler.cpp @@ -3379,6 +3379,37 @@ MacroAssembler::wasmTrap(wasm::Trap trap, wasm::BytecodeOffset bytecodeOffset) append(trap, wasm::TrapSite(wasmTrapInstruction().offset(), bytecodeOffset)); } +void +MacroAssembler::wasmReserveStackChecked(uint32_t amount, wasm::BytecodeOffset trapOffset) +{ + if (!amount) + return; + + // If the frame is large, don't bump sp until after the stack limit check so + // that the trap handler isn't called with a wild sp. + + if (amount > MAX_UNCHECKED_LEAF_FRAME_SIZE) { + Label ok; + Register scratch = ABINonArgReg0; + moveStackPtrTo(scratch); + subPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, stackLimit)), scratch); + branchPtr(Assembler::GreaterThan, scratch, Imm32(amount), &ok); + wasmTrap(wasm::Trap::StackOverflow, trapOffset); + bind(&ok); + } + + reserveStack(amount); + + if (amount <= MAX_UNCHECKED_LEAF_FRAME_SIZE) { + Label ok; + branchStackPtrRhs(Assembler::Below, + Address(WasmTlsReg, offsetof(wasm::TlsData, stackLimit)), + &ok); + wasmTrap(wasm::Trap::StackOverflow, trapOffset); + bind(&ok); + } +} + void MacroAssembler::wasmCallImport(const wasm::CallSiteDesc& desc, const wasm::CalleeDesc& callee) { diff --git a/js/src/jit/MacroAssembler.h b/js/src/jit/MacroAssembler.h index 1326ac2382e5..78672e94c2ec 100644 --- a/js/src/jit/MacroAssembler.h +++ b/js/src/jit/MacroAssembler.h @@ -1529,6 +1529,8 @@ class MacroAssembler : public MacroAssemblerSpecific void wasmTrap(wasm::Trap trap, wasm::BytecodeOffset bytecodeOffset); + void wasmReserveStackChecked(uint32_t amount, wasm::BytecodeOffset trapOffset); + // Emit a bounds check against the wasm heap limit, jumping to 'label' if // 'cond' holds. Required when WASM_HUGE_MEMORY is not defined. If // JitOptions.spectreMaskIndex is true, in speculative executions 'index' is diff --git a/js/src/wasm/WasmBaselineCompile.cpp b/js/src/wasm/WasmBaselineCompile.cpp index 5f1c3434abb4..4599859ceb49 100644 --- a/js/src/wasm/WasmBaselineCompile.cpp +++ b/js/src/wasm/WasmBaselineCompile.cpp @@ -1178,16 +1178,6 @@ class BaseStackFrame public: - void endFunctionPrologue() { - MOZ_ASSERT(masm.framePushed() == fixedSize()); - MOZ_ASSERT(fixedSize() % WasmStackAlignment == 0); - - maxFramePushed_ = localSize_; -#ifdef RABALDR_CHUNKY_STACK - currentFramePushed_ = localSize_; -#endif - } - // Initialize `localInfo` based on the types of `locals` and `args`. bool setupLocals(const ValTypeVector& locals, const ValTypeVector& args, bool debugEnabled, LocalVector* localInfo) @@ -1218,7 +1208,10 @@ class BaseStackFrame } localSize_ = AlignBytes(varHigh_, WasmStackAlignment); - + maxFramePushed_ = localSize_; +#ifdef RABALDR_CHUNKY_STACK + currentFramePushed_ = localSize_; +#endif return true; } @@ -1380,7 +1373,7 @@ class BaseStackFrame // Note the platform scratch register may be used by branchPtr(), so // generally tmp must be something else. - void allocStack(Register tmp, BytecodeOffset trapOffset) { + void checkStack(Register tmp, BytecodeOffset trapOffset) { stackAddOffset_ = masm.sub32FromStackPtrWithPatch(tmp); Label ok; masm.branchPtr(Assembler::Below, @@ -1390,9 +1383,8 @@ class BaseStackFrame masm.bind(&ok); } - void patchAllocStack() { - masm.patchSub32FromStackPtr(stackAddOffset_, - Imm32(int32_t(maxFramePushed_ - localSize_))); + void patchCheckStack() { + masm.patchSub32FromStackPtr(stackAddOffset_, Imm32(int32_t(maxFramePushed_))); } // Very large frames are implausible, probably an attack. @@ -3161,26 +3153,27 @@ class BaseCompiler final : public BaseCompilerInterface void beginFunction() { JitSpew(JitSpew_Codegen, "# Emitting wasm baseline code"); - // We are unconditionally checking for overflow in fr.allocStack(), so - // pass IsLeaf = true to avoid a second check in the prologue. - IsLeaf isLeaf = true; - SigIdDesc sigId = env_.funcSigs[func_.index]->id; - BytecodeOffset trapOffset(func_.lineOrBytecode); - GenerateFunctionPrologue(masm, fr.fixedSize(), isLeaf, sigId, trapOffset, &offsets_, - mode_ == CompileMode::Tier1 ? Some(func_.index) : Nothing()); - - fr.endFunctionPrologue(); + GenerateFunctionPrologue(masm, + env_.funcSigs[func_.index]->id, + mode_ == CompileMode::Tier1 ? Some(func_.index) : Nothing(), + &offsets_); + // Initialize DebugFrame fields before the stack overflow trap so that + // we have the invariant that all observable Frames in a debugEnabled + // Module have valid DebugFrames. if (debugEnabled_) { - // Initialize funcIndex and flag fields of DebugFrame. - size_t debugFrame = masm.framePushed() - DebugFrame::offsetOfFrame(); +#ifdef JS_CODEGEN_ARM64 + static_assert(DebugFrame::offsetOfFrame() % WasmStackAlignment == 0, "aligned"); +#endif + masm.reserveStack(DebugFrame::offsetOfFrame()); masm.store32(Imm32(func_.index), - Address(masm.getStackPointer(), debugFrame + DebugFrame::offsetOfFuncIndex())); + Address(masm.getStackPointer(), DebugFrame::offsetOfFuncIndex())); masm.storePtr(ImmWord(0), - Address(masm.getStackPointer(), debugFrame + DebugFrame::offsetOfFlagsWord())); + Address(masm.getStackPointer(), DebugFrame::offsetOfFlagsWord())); } - fr.allocStack(ABINonArgReg0, trapOffset); + fr.checkStack(ABINonArgReg0, BytecodeOffset(func_.lineOrBytecode)); + masm.reserveStack(fr.fixedSize() - masm.framePushed()); // Copy arguments from registers to stack. @@ -3276,7 +3269,7 @@ class BaseCompiler final : public BaseCompilerInterface if (masm.oom()) return false; - fr.patchAllocStack(); + fr.patchCheckStack(); masm.bind(&returnLabel_); diff --git a/js/src/wasm/WasmFrameIter.cpp b/js/src/wasm/WasmFrameIter.cpp index d05723642e0c..caab95e8c7fe 100644 --- a/js/src/wasm/WasmFrameIter.cpp +++ b/js/src/wasm/WasmFrameIter.cpp @@ -497,9 +497,8 @@ GenerateCallableEpilogue(MacroAssembler& masm, unsigned framePushed, ExitReason } void -wasm::GenerateFunctionPrologue(MacroAssembler& masm, uint32_t framePushed, IsLeaf isLeaf, - const SigIdDesc& sigId, BytecodeOffset trapOffset, - FuncOffsets* offsets, const Maybe& tier1FuncIndex) +wasm::GenerateFunctionPrologue(MacroAssembler& masm, const SigIdDesc& sigId, + const Maybe& tier1FuncIndex, FuncOffsets* offsets) { // Flush pending pools so they do not get dumped between the 'begin' and // 'normalEntry' offsets since the difference must be less than UINT8_MAX @@ -563,34 +562,7 @@ wasm::GenerateFunctionPrologue(MacroAssembler& masm, uint32_t framePushed, IsLea offsets->tierEntry = masm.currentOffset(); - // The framePushed value is tier-variant and thus the stack increment must - // go after the tiering jump/entry. - if (framePushed > 0) { - // If the frame is large, don't bump sp until after the stack limit check so - // that the trap handler isn't called with a wild sp. - if (framePushed > MAX_UNCHECKED_LEAF_FRAME_SIZE) { - Label ok; - Register scratch = ABINonArgReg0; - masm.moveStackPtrTo(scratch); - masm.subPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, stackLimit)), scratch); - masm.branchPtr(Assembler::GreaterThan, scratch, Imm32(framePushed), &ok); - masm.wasmTrap(wasm::Trap::StackOverflow, trapOffset); - masm.bind(&ok); - } - - masm.reserveStack(framePushed); - - if (framePushed <= MAX_UNCHECKED_LEAF_FRAME_SIZE && !isLeaf) { - Label ok; - masm.branchStackPtrRhs(Assembler::Below, - Address(WasmTlsReg, offsetof(wasm::TlsData, stackLimit)), - &ok); - masm.wasmTrap(wasm::Trap::StackOverflow, trapOffset); - masm.bind(&ok); - } - } - - MOZ_ASSERT(masm.framePushed() == framePushed); + MOZ_ASSERT(masm.framePushed() == 0); } void diff --git a/js/src/wasm/WasmFrameIter.h b/js/src/wasm/WasmFrameIter.h index 77c10f8f8935..faae5cc1767e 100644 --- a/js/src/wasm/WasmFrameIter.h +++ b/js/src/wasm/WasmFrameIter.h @@ -217,12 +217,10 @@ GenerateJitExitEpilogue(jit::MacroAssembler& masm, unsigned framePushed, Callabl void GenerateJitEntryPrologue(jit::MacroAssembler& masm, Offsets* offsets); -typedef bool IsLeaf; - void -GenerateFunctionPrologue(jit::MacroAssembler& masm, uint32_t framePushed, IsLeaf isLeaf, - const SigIdDesc& sigId, BytecodeOffset trapOffset, FuncOffsets* offsets, - const mozilla::Maybe& tier1FuncIndex = mozilla::Nothing()); +GenerateFunctionPrologue(jit::MacroAssembler& masm, const SigIdDesc& sigId, + const mozilla::Maybe& tier1FuncIndex, + FuncOffsets* offsets); void GenerateFunctionEpilogue(jit::MacroAssembler& masm, unsigned framePushed, FuncOffsets* offsets); diff --git a/js/src/wasm/WasmStubs.cpp b/js/src/wasm/WasmStubs.cpp index ade85ce8716a..ed2ca8e69db4 100644 --- a/js/src/wasm/WasmStubs.cpp +++ b/js/src/wasm/WasmStubs.cpp @@ -962,11 +962,12 @@ GenerateImportFunction(jit::MacroAssembler& masm, const FuncImport& fi, SigIdDes FuncOffsets* offsets) { AssertExpectedSP(masm); - masm.setFramePushed(0); + + GenerateFunctionPrologue(masm, sigId, Nothing(), offsets); unsigned framePushed = StackDecrementForCall(masm, WasmStackAlignment, fi.sig().args()); - - GenerateFunctionPrologue(masm, framePushed, IsLeaf(false), sigId, BytecodeOffset(0), offsets); + masm.wasmReserveStackChecked(framePushed, BytecodeOffset(0)); + MOZ_ASSERT(masm.framePushed() == framePushed); // The argument register state is already setup by our caller. We just need // to be sure not to clobber it before the call.