diff --git a/js/src/jit-test/tests/wasm/profiling.js b/js/src/jit-test/tests/wasm/profiling.js index c8bf8467ebce..5f9a5b43fd53 100644 --- a/js/src/jit-test/tests/wasm/profiling.js +++ b/js/src/jit-test/tests/wasm/profiling.js @@ -127,7 +127,7 @@ testError( (func (export "") (call $foo)) )`, WebAssembly.RuntimeError, -["", ">", "1,>", "0,1,>", "trap handling,0,1,>", "inline stub,0,1,>", ""]); +["", ">", "1,>", "0,1,>", "trap handling,0,1,>", "inline stub,0,1,>", "trap handling,0,1,>", "inline stub,0,1,>", ""]); testError( `(module @@ -142,7 +142,7 @@ WebAssembly.RuntimeError, // Technically we have this one *one-instruction* interval where // the caller is lost (the stack with "1,>"). It's annoying to fix and shouldn't // mess up profiles in practice so we ignore it. -["", ">", "0,>", "1,0,>", "1,>", "trap handling,0,>", "inline stub,0,>", ""]); +["", ">", "0,>", "1,0,>", "1,>", "trap handling,0,>", "inline stub,0,>", "trap handling,0,>", "inline stub,0,>", ""]); (function() { var e = wasmEvalText(` diff --git a/js/src/jit/MacroAssembler.h b/js/src/jit/MacroAssembler.h index e898a67d836a..23fea9a8c797 100644 --- a/js/src/jit/MacroAssembler.h +++ b/js/src/jit/MacroAssembler.h @@ -529,6 +529,12 @@ class MacroAssembler : public MacroAssemblerSpecific static void patchNopToNearJump(uint8_t* jump, uint8_t* target) PER_SHARED_ARCH; static void patchNearJumpToNop(uint8_t* jump) PER_SHARED_ARCH; + // Emit a nop that can be patched to and from a nop and a call with int32 + // relative displacement. + CodeOffset nopPatchableToCall(const wasm::CallSiteDesc& desc) PER_SHARED_ARCH; + static void patchNopToCall(uint8_t* callsite, uint8_t* target) PER_SHARED_ARCH; + static void patchCallToNop(uint8_t* callsite) PER_SHARED_ARCH; + public: // =============================================================== // ABI function calls. diff --git a/js/src/jit/arm/MacroAssembler-arm.cpp b/js/src/jit/arm/MacroAssembler-arm.cpp index e6cce5d880c4..ed576102324b 100644 --- a/js/src/jit/arm/MacroAssembler-arm.cpp +++ b/js/src/jit/arm/MacroAssembler-arm.cpp @@ -5128,6 +5128,34 @@ MacroAssembler::patchNearJumpToNop(uint8_t* jump) new (jump) InstNOP(); } +CodeOffset +MacroAssembler::nopPatchableToCall(const wasm::CallSiteDesc& desc) +{ + CodeOffset offset(currentOffset()); + ma_nop(); + append(desc, CodeOffset(currentOffset()), framePushed()); + return offset; +} + +void +MacroAssembler::patchNopToCall(uint8_t* call, uint8_t* target) +{ + uint8_t* inst = call - 4; + MOZ_ASSERT(reinterpret_cast(inst)->is() || + reinterpret_cast(inst)->is()); + + new (inst) InstBLImm(BOffImm(target - inst), Assembler::Always); +} + +void +MacroAssembler::patchCallToNop(uint8_t* call) +{ + uint8_t* inst = call - 4; + MOZ_ASSERT(reinterpret_cast(inst)->is() || + reinterpret_cast(inst)->is()); + new (inst) InstNOP(); +} + void MacroAssembler::pushReturnAddress() { diff --git a/js/src/jit/arm64/MacroAssembler-arm64.cpp b/js/src/jit/arm64/MacroAssembler-arm64.cpp index 54af3f10a9dd..71816d5ea147 100644 --- a/js/src/jit/arm64/MacroAssembler-arm64.cpp +++ b/js/src/jit/arm64/MacroAssembler-arm64.cpp @@ -587,6 +587,25 @@ MacroAssembler::patchNearJumpToNop(uint8_t* jump) MOZ_CRASH("NYI"); } +CodeOffset +MacroAssembler::nopPatchableToCall(const wasm::CallSiteDesc& desc) +{ + MOZ_CRASH("NYI"); + return CodeOffset(); +} + +void +MacroAssembler::patchNopToCall(uint8_t* call, uint8_t* target) +{ + MOZ_CRASH("NYI"); +} + +void +MacroAssembler::patchCallToNop(uint8_t* call) +{ + MOZ_CRASH("NYI"); +} + void MacroAssembler::pushReturnAddress() { diff --git a/js/src/jit/mips-shared/MacroAssembler-mips-shared.cpp b/js/src/jit/mips-shared/MacroAssembler-mips-shared.cpp index 29f89584bf10..1c6ba37dc746 100644 --- a/js/src/jit/mips-shared/MacroAssembler-mips-shared.cpp +++ b/js/src/jit/mips-shared/MacroAssembler-mips-shared.cpp @@ -1678,6 +1678,25 @@ MacroAssembler::call(JitCode* c) callJitNoProfiler(ScratchRegister); } +CodeOffset +MacroAssembler::nopPatchableToCall(const wasm::CallSiteDesc& desc) +{ + MOZ_CRASH("NYI"); + return CodeOffset(); +} + +void +MacroAssembler::patchNopToCall(uint8_t* call, uint8_t* target) +{ + MOZ_CRASH("NYI"); +} + +void +MacroAssembler::patchCallToNop(uint8_t* call) +{ + MOZ_CRASH("NYI"); +} + void MacroAssembler::pushReturnAddress() { diff --git a/js/src/jit/x86-shared/Assembler-x86-shared.h b/js/src/jit/x86-shared/Assembler-x86-shared.h index 3e23b3816f18..9eeb2078b8ed 100644 --- a/js/src/jit/x86-shared/Assembler-x86-shared.h +++ b/js/src/jit/x86-shared/Assembler-x86-shared.h @@ -1106,6 +1106,13 @@ class AssemblerX86Shared : public AssemblerShared X86Encoding::BaseAssembler::patchJumpToTwoByteNop(jump); } + static void patchFiveByteNopToCall(uint8_t* callsite, uint8_t* target) { + X86Encoding::BaseAssembler::patchFiveByteNopToCall(callsite, target); + } + static void patchCallToFiveByteNop(uint8_t* callsite) { + X86Encoding::BaseAssembler::patchCallToFiveByteNop(callsite); + } + void breakpoint() { masm.int3(); } diff --git a/js/src/jit/x86-shared/BaseAssembler-x86-shared.h b/js/src/jit/x86-shared/BaseAssembler-x86-shared.h index 2f38cbab7042..fd26903803f2 100644 --- a/js/src/jit/x86-shared/BaseAssembler-x86-shared.h +++ b/js/src/jit/x86-shared/BaseAssembler-x86-shared.h @@ -116,6 +116,40 @@ public: jump[1] = OP_NOP; } + static void patchFiveByteNopToCall(uint8_t* callsite, uint8_t* target) + { + // Note: the offset is relative to the address of the instruction after + // the call which is five bytes. + uint8_t* inst = callsite - sizeof(int32_t) - 1; + // The nop can be already patched as call, overriding the call. + // See also nop_five. + MOZ_ASSERT(inst[0] == OP_NOP_0F || inst[0] == OP_CALL_rel32); + MOZ_ASSERT_IF(inst[0] == OP_NOP_0F, inst[1] == OP_NOP_1F || + inst[2] == OP_NOP_44 || + inst[3] == OP_NOP_00 || + inst[4] == OP_NOP_00); + inst[0] = OP_CALL_rel32; + SetRel32(callsite, target); + } + + static void patchCallToFiveByteNop(uint8_t* callsite) + { + // See also patchFiveByteNopToCall and nop_five. + uint8_t* inst = callsite - sizeof(int32_t) - 1; + // The call can be already patched as nop. + if (inst[0] == OP_NOP_0F) { + MOZ_ASSERT(inst[1] == OP_NOP_1F || inst[2] == OP_NOP_44 || + inst[3] == OP_NOP_00 || inst[4] == OP_NOP_00); + return; + } + MOZ_ASSERT(inst[0] == OP_CALL_rel32); + inst[0] = OP_NOP_0F; + inst[1] = OP_NOP_1F; + inst[2] = OP_NOP_44; + inst[3] = OP_NOP_00; + inst[4] = OP_NOP_00; + } + /* * The nop multibytes sequences are directly taken from the Intel's * architecture software developer manual. diff --git a/js/src/jit/x86-shared/MacroAssembler-x86-shared.cpp b/js/src/jit/x86-shared/MacroAssembler-x86-shared.cpp index 8fc07d17bcc2..140502d14a95 100644 --- a/js/src/jit/x86-shared/MacroAssembler-x86-shared.cpp +++ b/js/src/jit/x86-shared/MacroAssembler-x86-shared.cpp @@ -741,6 +741,28 @@ MacroAssembler::patchNearJumpToNop(uint8_t* jump) Assembler::patchJumpToTwoByteNop(jump); } +CodeOffset +MacroAssembler::nopPatchableToCall(const wasm::CallSiteDesc& desc) +{ + CodeOffset offset(currentOffset()); + masm.nop_five(); + append(desc, CodeOffset(currentOffset()), framePushed()); + MOZ_ASSERT_IF(!oom(), size() - offset.offset() == ToggledCallSize(nullptr)); + return offset; +} + +void +MacroAssembler::patchNopToCall(uint8_t* callsite, uint8_t* target) +{ + Assembler::patchFiveByteNopToCall(callsite, target); +} + +void +MacroAssembler::patchCallToNop(uint8_t* callsite) +{ + Assembler::patchCallToFiveByteNop(callsite); +} + // =============================================================== // Jit Frames. diff --git a/js/src/wasm/WasmBaselineCompile.cpp b/js/src/wasm/WasmBaselineCompile.cpp index 0803ff30b5ef..f34831639570 100644 --- a/js/src/wasm/WasmBaselineCompile.cpp +++ b/js/src/wasm/WasmBaselineCompile.cpp @@ -2080,6 +2080,11 @@ class BaseCompiler labelPool_.free(label); } + void insertBreakablePoint(CallSiteDesc::Kind kind) { + const uint32_t offset = iter_.currentOffset(); + masm.nopPatchableToCall(CallSiteDesc(offset, kind)); + } + ////////////////////////////////////////////////////////////////////// // // Function prologue and epilogue. @@ -2167,6 +2172,58 @@ class BaseCompiler for (int32_t i = varLow_ ; i < varHigh_ ; i += 4) storeToFrameI32(scratch, i + 4); } + + if (debugEnabled_) + insertBreakablePoint(CallSiteDesc::EnterFrame); + } + + void saveResult() { + MOZ_ASSERT(debugEnabled_); + size_t debugFrameOffset = masm.framePushed() - DebugFrame::offsetOfFrame(); + Address resultsAddress(StackPointer, debugFrameOffset + DebugFrame::offsetOfResults()); + switch (func_.sig().ret()) { + case ExprType::Void: + break; + case ExprType::I32: + masm.store32(RegI32(ReturnReg), resultsAddress); + break; + + case ExprType::I64: + masm.store64(RegI64(ReturnReg64), resultsAddress); + break; + case ExprType::F64: + masm.storeDouble(RegF64(ReturnDoubleReg), resultsAddress); + break; + case ExprType::F32: + masm.storeFloat32(RegF32(ReturnFloat32Reg), resultsAddress); + break; + default: + MOZ_CRASH("Function return type"); + } + } + + void restoreResult() { + MOZ_ASSERT(debugEnabled_); + size_t debugFrameOffset = masm.framePushed() - DebugFrame::offsetOfFrame(); + Address resultsAddress(StackPointer, debugFrameOffset + DebugFrame::offsetOfResults()); + switch (func_.sig().ret()) { + case ExprType::Void: + break; + case ExprType::I32: + masm.load32(resultsAddress, RegI32(ReturnReg)); + break; + case ExprType::I64: + masm.load64(resultsAddress, RegI64(ReturnReg64)); + break; + case ExprType::F64: + masm.loadDouble(resultsAddress, RegF64(ReturnDoubleReg)); + break; + case ExprType::F32: + masm.loadFloat32(resultsAddress, RegF32(ReturnFloat32Reg)); + break; + default: + MOZ_CRASH("Function return type"); + } } bool endFunction() { @@ -2188,6 +2245,14 @@ class BaseCompiler masm.bind(&returnLabel_); + if (debugEnabled_) { + // Store and reload the return value from DebugFrame::return so that + // it can be clobbered, and/or modified by the debug trap. + saveResult(); + insertBreakablePoint(CallSiteDesc::LeaveFrame); + restoreResult(); + } + // Restore the TLS register in case it was overwritten by the function. loadFromFramePtr(WasmTlsReg, frameOffsetFromSlot(tlsSlot_, MIRType::Pointer)); diff --git a/js/src/wasm/WasmCode.cpp b/js/src/wasm/WasmCode.cpp index 4d6657613faf..cdba2782af4c 100644 --- a/js/src/wasm/WasmCode.cpp +++ b/js/src/wasm/WasmCode.cpp @@ -346,7 +346,8 @@ CodeRange::CodeRange(Kind kind, Offsets offsets) kind_(kind) { MOZ_ASSERT(begin_ <= end_); - MOZ_ASSERT(kind_ == Entry || kind_ == Inline || kind_ == FarJumpIsland); + MOZ_ASSERT(kind_ == Entry || kind_ == Inline || + kind_ == FarJumpIsland || kind_ == DebugTrap); } CodeRange::CodeRange(Kind kind, ProfilingOffsets offsets) @@ -458,7 +459,7 @@ Metadata::serializedSize() const uint8_t* Metadata::serialize(uint8_t* cursor) const { - MOZ_ASSERT(!debugEnabled); + MOZ_ASSERT(!debugEnabled && debugTrapFarJumpOffsets.empty()); cursor = WriteBytes(cursor, &pod(), sizeof(pod())); cursor = SerializeVector(cursor, funcImports); cursor = SerializeVector(cursor, funcExports); @@ -496,6 +497,7 @@ Metadata::deserialize(const uint8_t* cursor) (cursor = DeserializePodVector(cursor, &customSections)) && (cursor = filename.deserialize(cursor)); debugEnabled = false; + debugTrapFarJumpOffsets.clear(); return cursor; } @@ -573,6 +575,7 @@ Code::Code(UniqueCodeSegment segment, : segment_(Move(segment)), metadata_(&metadata), maybeBytecode_(maybeBytecode), + enterAndLeaveFrameTrapsCounter_(0), profilingEnabled_(false) { MOZ_ASSERT_IF(metadata_->debugEnabled, maybeBytecode); @@ -820,6 +823,52 @@ Code::ensureProfilingState(JSRuntime* rt, bool newProfilingEnabled) return true; } +void +Code::toggleDebugTrap(uint32_t offset, bool enabled) +{ + MOZ_ASSERT(offset); + uint8_t* trap = segment_->base() + offset; + const Uint32Vector& farJumpOffsets = metadata_->debugTrapFarJumpOffsets; + if (enabled) { + MOZ_ASSERT(farJumpOffsets.length() > 0); + size_t i = 0; + while (i < farJumpOffsets.length() && offset < farJumpOffsets[i]) + i++; + if (i >= farJumpOffsets.length() || + (i > 0 && offset - farJumpOffsets[i - 1] < farJumpOffsets[i] - offset)) + i--; + uint8_t* farJump = segment_->base() + farJumpOffsets[i]; + MacroAssembler::patchNopToCall(trap, farJump); + } else { + MacroAssembler::patchCallToNop(trap); + } +} + +void +Code::adjustEnterAndLeaveFrameTrapsState(JSContext* cx, bool enabled) +{ + MOZ_ASSERT(metadata_->debugEnabled); + MOZ_ASSERT_IF(!enabled, enterAndLeaveFrameTrapsCounter_ > 0); + + bool wasEnabled = enterAndLeaveFrameTrapsCounter_ > 0; + if (enabled) + ++enterAndLeaveFrameTrapsCounter_; + else + --enterAndLeaveFrameTrapsCounter_; + bool stillEnabled = enterAndLeaveFrameTrapsCounter_ > 0; + if (wasEnabled == stillEnabled) + return; + + AutoWritableJitCode awjc(cx->runtime(), segment_->base(), segment_->codeLength()); + AutoFlushICache afc("Code::adjustEnterAndLeaveFrameTrapsState"); + AutoFlushICache::setRange(uintptr_t(segment_->base()), segment_->codeLength()); + for (const CallSite& callSite : metadata_->callSites) { + if (callSite.kind() != CallSite::EnterFrame && callSite.kind() != CallSite::LeaveFrame) + continue; + toggleDebugTrap(callSite.returnAddressOffset(), stillEnabled); + } +} + void Code::addSizeOfMisc(MallocSizeOf mallocSizeOf, Metadata::SeenSet* seenMetadata, diff --git a/js/src/wasm/WasmCode.h b/js/src/wasm/WasmCode.h index 4a8f1480f3ad..3f1f4fa80466 100644 --- a/js/src/wasm/WasmCode.h +++ b/js/src/wasm/WasmCode.h @@ -19,16 +19,19 @@ #ifndef wasm_code_h #define wasm_code_h +#include "js/HashTable.h" #include "wasm/WasmTypes.h" namespace js { struct AsmJSMetadata; +class WasmActivation; namespace wasm { struct LinkData; struct Metadata; +class FrameIterator; // A wasm CodeSegment owns the allocated executable code for a wasm module. // This allocation also currently includes the global data segment, which allows @@ -240,6 +243,8 @@ class CodeRange ImportJitExit, // fast-path calling from wasm into JIT code ImportInterpExit, // slow-path calling from wasm into C++ interp TrapExit, // calls C++ to report and jumps to throw stub + DebugTrap, // calls C++ to handle debug event such as + // enter/leave frame or breakpoint FarJumpIsland, // inserted to connect otherwise out-of-range insns Inline // stub that is jumped-to, not called, and thus // replaces/loses preceding innermost frame @@ -469,6 +474,7 @@ struct Metadata : ShareableBase, MetadataCacheablePod // Debug-enabled code is not serialized. bool debugEnabled; + Uint32Vector debugTrapFarJumpOffsets; bool usesMemory() const { return UsesMemory(memoryUsage); } bool hasSharedMemory() const { return memoryUsage == MemoryUsage::Shared; } @@ -574,8 +580,11 @@ class Code const SharedBytes maybeBytecode_; UniqueGeneratedSourceMap maybeSourceMap_; CacheableCharsVector funcLabels_; + uint32_t enterAndLeaveFrameTrapsCounter_; bool profilingEnabled_; + void toggleDebugTrap(uint32_t offset, bool enabled); + public: Code(UniqueCodeSegment segment, const Metadata& metadata, @@ -614,6 +623,12 @@ class Code bool profilingEnabled() const { return profilingEnabled_; } const char* profilingLabel(uint32_t funcIndex) const { return funcLabels_[funcIndex].get(); } + // The Code can track enter/leave frame events. Any such event triggers + // debug trap. The enter frame events enabled across all functions, but + // the leave frame events only for particular function. + + void adjustEnterAndLeaveFrameTrapsState(JSContext* cx, bool enabled); + // about:memory reporting: void addSizeOfMisc(MallocSizeOf mallocSizeOf, diff --git a/js/src/wasm/WasmDebugFrame.cpp b/js/src/wasm/WasmDebugFrame.cpp index d4a8c7d919b7..04e374b7d138 100644 --- a/js/src/wasm/WasmDebugFrame.cpp +++ b/js/src/wasm/WasmDebugFrame.cpp @@ -50,7 +50,7 @@ DebugFrame::observeFrame(JSContext* cx) if (observing_) return; - // TODO make sure wasm::Code onLeaveFrame traps are on + instance()->code().adjustEnterAndLeaveFrameTrapsState(cx, /* enabled = */ true); observing_ = true; } @@ -60,6 +60,6 @@ DebugFrame::leaveFrame(JSContext* cx) if (!observing_) return; - // TODO make sure wasm::Code onLeaveFrame traps are off + instance()->code().adjustEnterAndLeaveFrameTrapsState(cx, /* enabled = */ false); observing_ = false; } diff --git a/js/src/wasm/WasmFrameIterator.cpp b/js/src/wasm/WasmFrameIterator.cpp index 5d991301a882..38ccae03d7ee 100644 --- a/js/src/wasm/WasmFrameIterator.cpp +++ b/js/src/wasm/WasmFrameIterator.cpp @@ -151,6 +151,7 @@ FrameIterator::settle() case CodeRange::ImportJitExit: case CodeRange::ImportInterpExit: case CodeRange::TrapExit: + case CodeRange::DebugTrap: case CodeRange::Inline: case CodeRange::FarJumpIsland: MOZ_CRASH("Should not encounter an exit during iteration"); @@ -240,6 +241,14 @@ FrameIterator::debugFrame() const return static_cast(buf); } +const CallSite* +FrameIterator::debugTrapCallsite() const +{ + MOZ_ASSERT(!done() && debugEnabled()); + MOZ_ASSERT(callsite_->kind() == CallSite::EnterFrame || callsite_->kind() == CallSite::LeaveFrame); + return callsite_; +} + /*****************************************************************************/ // Prologue/epilogue code generation @@ -593,6 +602,7 @@ ProfilingFrameIterator::initFromFP() case CodeRange::ImportJitExit: case CodeRange::ImportInterpExit: case CodeRange::TrapExit: + case CodeRange::DebugTrap: case CodeRange::Inline: case CodeRange::FarJumpIsland: MOZ_CRASH("Unexpected CodeRange kind"); @@ -721,6 +731,7 @@ ProfilingFrameIterator::ProfilingFrameIterator(const WasmActivation& activation, callerFP_ = nullptr; break; } + case CodeRange::DebugTrap: case CodeRange::Inline: { // The throw stub clears WasmActivation::fp on it's way out. if (!fp) { @@ -777,6 +788,7 @@ ProfilingFrameIterator::operator++() case CodeRange::ImportJitExit: case CodeRange::ImportInterpExit: case CodeRange::TrapExit: + case CodeRange::DebugTrap: case CodeRange::Inline: case CodeRange::FarJumpIsland: stackAddress_ = callerFP_; @@ -803,6 +815,7 @@ ProfilingFrameIterator::label() const const char* importInterpDescription = "slow FFI trampoline (in asm.js)"; const char* nativeDescription = "native call (in asm.js)"; const char* trapDescription = "trap handling (in asm.js)"; + const char* debugTrapDescription = "debug trap handling (in asm.js)"; switch (exitReason_) { case ExitReason::None: @@ -815,6 +828,8 @@ ProfilingFrameIterator::label() const return nativeDescription; case ExitReason::Trap: return trapDescription; + case ExitReason::DebugTrap: + return debugTrapDescription; } switch (codeRange_->kind()) { @@ -823,6 +838,7 @@ ProfilingFrameIterator::label() const case CodeRange::ImportJitExit: return importJitDescription; case CodeRange::ImportInterpExit: return importInterpDescription; case CodeRange::TrapExit: return trapDescription; + case CodeRange::DebugTrap: return debugTrapDescription; case CodeRange::Inline: return "inline stub (in asm.js)"; case CodeRange::FarJumpIsland: return "interstitial (in asm.js)"; } diff --git a/js/src/wasm/WasmFrameIterator.h b/js/src/wasm/WasmFrameIterator.h index e7a11a9a2818..4e53ebcc04cd 100644 --- a/js/src/wasm/WasmFrameIterator.h +++ b/js/src/wasm/WasmFrameIterator.h @@ -75,6 +75,7 @@ class FrameIterator Instance* instance() const; bool debugEnabled() const; DebugFrame* debugFrame() const; + const CallSite* debugTrapCallsite() const; }; // An ExitReason describes the possible reasons for leaving compiled wasm code @@ -85,7 +86,8 @@ enum class ExitReason : uint32_t ImportJit, // fast-path call directly into JIT code ImportInterp, // slow-path call into C++ Invoke() Native, // call to native C++ code (e.g., Math.sin, ToInt32(), interrupt) - Trap // call to trap handler for the trap in WasmActivation::trap + Trap, // call to trap handler for the trap in WasmActivation::trap + DebugTrap // call to debug trap handler }; // Iterates over the frames of a single WasmActivation, given an diff --git a/js/src/wasm/WasmGenerator.cpp b/js/src/wasm/WasmGenerator.cpp index 7896c4fd176a..3ce162cc2aa1 100644 --- a/js/src/wasm/WasmGenerator.cpp +++ b/js/src/wasm/WasmGenerator.cpp @@ -378,6 +378,28 @@ ModuleGenerator::patchCallSites(TrapExitOffsetArray* maybeTrapExits) masm_.patchCall(callerOffset, *existingTrapFarJumps[cs.trap()]); break; } + case CallSiteDesc::EnterFrame: + case CallSiteDesc::LeaveFrame: { + Uint32Vector& jumps = metadata_->debugTrapFarJumpOffsets; + if (jumps.empty() || + uint32_t(abs(int32_t(jumps.back()) - int32_t(callerOffset))) >= JumpRange()) + { + Offsets offsets; + offsets.begin = masm_.currentOffset(); + uint32_t jumpOffset = masm_.farJumpWithPatch().offset(); + offsets.end = masm_.currentOffset(); + if (masm_.oom()) + return false; + + if (!metadata_->codeRanges.emplaceBack(CodeRange::FarJumpIsland, offsets)) + return false; + if (!debugTrapFarJumps_.emplaceBack(jumpOffset)) + return false; + if (!jumps.emplaceBack(offsets.begin)) + return false; + } + break; + } } } @@ -385,7 +407,7 @@ ModuleGenerator::patchCallSites(TrapExitOffsetArray* maybeTrapExits) } bool -ModuleGenerator::patchFarJumps(const TrapExitOffsetArray& trapExits) +ModuleGenerator::patchFarJumps(const TrapExitOffsetArray& trapExits, const Offsets& debugTrapStub) { for (CallThunk& callThunk : metadata_->callThunks) { uint32_t funcIndex = callThunk.u.funcIndex; @@ -397,6 +419,9 @@ ModuleGenerator::patchFarJumps(const TrapExitOffsetArray& trapExits) for (const TrapFarJump& farJump : masm_.trapFarJumps()) masm_.patchFarJump(farJump.jump, trapExits[farJump.trap].begin); + for (uint32_t debugTrapFarJump : debugTrapFarJumps_) + masm_.patchFarJump(CodeOffset(debugTrapFarJump), debugTrapStub.begin); + return true; } @@ -512,6 +537,7 @@ ModuleGenerator::finishCodegen() Offsets unalignedAccessExit; Offsets interruptExit; Offsets throwStub; + Offsets debugTrapStub; { TempAllocator alloc(&lifo_); @@ -539,6 +565,7 @@ ModuleGenerator::finishCodegen() unalignedAccessExit = GenerateUnalignedExit(masm, &throwLabel); interruptExit = GenerateInterruptExit(masm, &throwLabel); throwStub = GenerateThrowStub(masm, &throwLabel); + debugTrapStub = GenerateDebugTrapStub(masm, &throwLabel); if (masm.oom() || !masm_.asmMergeWith(masm)) return false; @@ -588,6 +615,10 @@ ModuleGenerator::finishCodegen() if (!metadata_->codeRanges.emplaceBack(CodeRange::Inline, throwStub)) return false; + debugTrapStub.offsetBy(offsetInWhole); + if (!metadata_->codeRanges.emplaceBack(CodeRange::DebugTrap, debugTrapStub)) + return false; + // Fill in LinkData with the offsets of these stubs. linkData_.outOfBoundsOffset = outOfBoundsExit.begin; @@ -600,7 +631,7 @@ ModuleGenerator::finishCodegen() if (!patchCallSites(&trapExits)) return false; - if (!patchFarJumps(trapExits)) + if (!patchFarJumps(trapExits, debugTrapStub)) return false; // Code-generation is complete! @@ -1151,6 +1182,7 @@ ModuleGenerator::finish(const ShareableBytes& bytecode) metadata_->codeRanges.podResizeToFit(); metadata_->callSites.podResizeToFit(); metadata_->callThunks.podResizeToFit(); + metadata_->debugTrapFarJumpOffsets.podResizeToFit(); // For asm.js, the tables vector is over-allocated (to avoid resize during // parallel copilation). Shrink it back down to fit. @@ -1168,6 +1200,15 @@ ModuleGenerator::finish(const ShareableBytes& bytecode) } #endif + // Assert debugTrapFarJumpOffsets are sorted. +#ifdef DEBUG + uint32_t lastOffset = 0; + for (uint32_t debugTrapFarJumpOffset : metadata_->debugTrapFarJumpOffsets) { + MOZ_ASSERT(debugTrapFarJumpOffset >= lastOffset); + lastOffset = debugTrapFarJumpOffset; + } +#endif + if (!finishLinkData(code)) return nullptr; diff --git a/js/src/wasm/WasmGenerator.h b/js/src/wasm/WasmGenerator.h index d240b87bf99a..eb8cee18dcca 100644 --- a/js/src/wasm/WasmGenerator.h +++ b/js/src/wasm/WasmGenerator.h @@ -234,6 +234,7 @@ class MOZ_STACK_CLASS ModuleGenerator Uint32Set exportedFuncs_; uint32_t lastPatchedCallsite_; uint32_t startOfUnpatchedCallsites_; + Uint32Vector debugTrapFarJumps_; // Parallel compilation bool parallel_; @@ -254,7 +255,7 @@ class MOZ_STACK_CLASS ModuleGenerator const CodeRange& funcCodeRange(uint32_t funcIndex) const; uint32_t numFuncImports() const; MOZ_MUST_USE bool patchCallSites(TrapExitOffsetArray* maybeTrapExits = nullptr); - MOZ_MUST_USE bool patchFarJumps(const TrapExitOffsetArray& trapExits); + MOZ_MUST_USE bool patchFarJumps(const TrapExitOffsetArray& trapExits, const Offsets& debugTrapStub); MOZ_MUST_USE bool finishTask(CompileTask* task); MOZ_MUST_USE bool finishOutstandingTask(); MOZ_MUST_USE bool finishFuncExports(); diff --git a/js/src/wasm/WasmInstance.cpp b/js/src/wasm/WasmInstance.cpp index 88af4f0db69e..8d9907bb828f 100644 --- a/js/src/wasm/WasmInstance.cpp +++ b/js/src/wasm/WasmInstance.cpp @@ -329,7 +329,8 @@ Instance::Instance(JSContext* cx, object_(object), code_(Move(code)), memory_(memory), - tables_(Move(tables)) + tables_(Move(tables)), + enterFrameTrapsEnabled_(false) { MOZ_ASSERT(funcImports.length() == metadata().funcImports.length()); MOZ_ASSERT(tables_.length() == metadata().tables.length()); @@ -832,6 +833,16 @@ Instance::ensureProfilingState(JSContext* cx, bool newProfilingEnabled) return true; } +void +Instance::ensureEnterFrameTrapsState(JSContext* cx, bool enabled) +{ + if (enterFrameTrapsEnabled_ == enabled) + return; + + code_->adjustEnterAndLeaveFrameTrapsState(cx, enabled); + enterFrameTrapsEnabled_ = enabled; +} + void Instance::addSizeOfMisc(MallocSizeOf mallocSizeOf, Metadata::SeenSet* seenMetadata, diff --git a/js/src/wasm/WasmInstance.h b/js/src/wasm/WasmInstance.h index 8d6ee0b737cd..0883c21daf59 100644 --- a/js/src/wasm/WasmInstance.h +++ b/js/src/wasm/WasmInstance.h @@ -41,6 +41,7 @@ class Instance GCPtrWasmMemoryObject memory_; SharedTableVector tables_; TlsData tlsData_; + bool enterFrameTrapsEnabled_; // Internal helpers: const void** addressOfSigId(const SigIdDesc& sigId) const; @@ -124,6 +125,11 @@ class Instance MOZ_MUST_USE bool ensureProfilingState(JSContext* cx, bool enabled); + // Debug support: + bool debugEnabled() const { return code_->metadata().debugEnabled; } + bool enterFrameTrapsEnabled() const { return enterFrameTrapsEnabled_; } + void ensureEnterFrameTrapsState(JSContext* cx, bool enabled); + // about:memory reporting: void addSizeOfMisc(MallocSizeOf mallocSizeOf, diff --git a/js/src/wasm/WasmStubs.cpp b/js/src/wasm/WasmStubs.cpp index aab8b1d929be..13ef343beae3 100644 --- a/js/src/wasm/WasmStubs.cpp +++ b/js/src/wasm/WasmStubs.cpp @@ -946,6 +946,10 @@ static const LiveRegisterSet AllRegsExceptSP( GeneralRegisterSet(Registers::AllMask & ~(uint32_t(1) << Registers::StackPointer)), FloatRegisterSet(FloatRegisters::AllMask)); +static const LiveRegisterSet AllAllocatableRegs = LiveRegisterSet( + GeneralRegisterSet(Registers::AllocatableMask), + FloatRegisterSet(FloatRegisters::AllMask)); + // The async interrupt-callback exit is called from arbitrarily-interrupted wasm // code. That means we must first save *all* registers and restore *all* // registers (except the stack pointer) when we resume. The address to resume to @@ -1127,6 +1131,11 @@ wasm::GenerateThrowStub(MacroAssembler& masm, Label* throwLabel) Offsets offsets; offsets.begin = masm.currentOffset(); + masm.andToStackPtr(Imm32(~(ABIStackAlignment - 1))); + if (ShadowStackSpace) + masm.subFromStackPtr(Imm32(ShadowStackSpace)); + masm.call(SymbolicAddress::HandleDebugThrow); + // We are about to pop all frames in this WasmActivation. Set fp to null to // maintain the invariant that fp is either null or pointing to a valid // frame. @@ -1146,3 +1155,50 @@ wasm::GenerateThrowStub(MacroAssembler& masm, Label* throwLabel) offsets.end = masm.currentOffset(); return offsets; } + +// Generate a stub that handle toggable enter/leave frame traps or breakpoints. +// The trap records frame pointer (via GenerateExitPrologue) and saves most of +// registers to not affect the code generated by WasmBaselineCompile. +Offsets +wasm::GenerateDebugTrapStub(MacroAssembler& masm, Label* throwLabel) +{ + masm.haltingAlign(CodeAlignment); + + masm.setFramePushed(0); + + ProfilingOffsets offsets; + GenerateExitPrologue(masm, 0, ExitReason::DebugTrap, &offsets); + + // Save all registers used between baseline compiler operations. + masm.PushRegsInMask(AllAllocatableRegs); + + uint32_t framePushed = masm.framePushed(); + + // This method might be called with unaligned stack -- aligning and + // saving old stack pointer at the top. + Register scratch = ABINonArgReturnReg0; + masm.moveStackPtrTo(scratch); + masm.subFromStackPtr(Imm32(sizeof(intptr_t))); + masm.andToStackPtr(Imm32(~(ABIStackAlignment - 1))); + masm.storePtr(scratch, Address(masm.getStackPointer(), 0)); + + if (ShadowStackSpace) + masm.subFromStackPtr(Imm32(ShadowStackSpace)); + masm.assertStackAlignment(ABIStackAlignment); + masm.call(SymbolicAddress::HandleDebugTrap); + + masm.branchIfFalseBool(ReturnReg, throwLabel); + + if (ShadowStackSpace) + masm.addToStackPtr(Imm32(ShadowStackSpace)); + masm.Pop(scratch); + masm.moveToStackPtr(scratch); + + masm.setFramePushed(framePushed); + masm.PopRegsInMask(AllAllocatableRegs); + + GenerateExitEpilogue(masm, 0, ExitReason::DebugTrap, &offsets); + + offsets.end = masm.currentOffset(); + return offsets; +} diff --git a/js/src/wasm/WasmStubs.h b/js/src/wasm/WasmStubs.h index d644aa83d969..e2c5a1f20a84 100644 --- a/js/src/wasm/WasmStubs.h +++ b/js/src/wasm/WasmStubs.h @@ -58,6 +58,10 @@ GenerateInterruptExit(jit::MacroAssembler& masm, jit::Label* throwLabel); extern Offsets GenerateThrowStub(jit::MacroAssembler& masm, jit::Label* throwLabel); +extern Offsets +GenerateDebugTrapStub(jit::MacroAssembler& masm, jit::Label* throwLabel); + + } // namespace wasm } // namespace js diff --git a/js/src/wasm/WasmTypes.cpp b/js/src/wasm/WasmTypes.cpp index 191543e320ed..03bfc0c17436 100644 --- a/js/src/wasm/WasmTypes.cpp +++ b/js/src/wasm/WasmTypes.cpp @@ -98,6 +98,52 @@ WasmHandleExecutionInterrupt() return success; } +static bool +WasmHandleDebugTrap() +{ + WasmActivation* activation = JSRuntime::innermostWasmActivation(); + JSContext* cx = activation->cx(); + + FrameIterator iter(*activation); + MOZ_ASSERT(iter.debugEnabled()); + const CallSite* site = iter.debugTrapCallsite(); + MOZ_ASSERT(site); + if (site->kind() == CallSite::EnterFrame) { + if (!iter.instance()->enterFrameTrapsEnabled()) + return true; + DebugFrame* frame = iter.debugFrame(); + frame->setIsDebuggee(); + frame->observeFrame(cx); + // TODO call onEnterFrame + return true; + } + if (site->kind() == CallSite::LeaveFrame) { + DebugFrame* frame = iter.debugFrame(); + // TODO call onLeaveFrame + frame->leaveFrame(cx); + return true; + } + // TODO baseline debug traps + MOZ_CRASH(); + return true; +} + +static void +WasmHandleDebugThrow() +{ + WasmActivation* activation = JSRuntime::innermostWasmActivation(); + JSContext* cx = activation->cx(); + + for (FrameIterator iter(*activation); !iter.done(); ++iter) { + if (!iter.debugEnabled()) + continue; + + DebugFrame* frame = iter.debugFrame(); + // TODO call onExceptionUnwind and onLeaveFrame + frame->leaveFrame(cx); + } +} + static void WasmReportTrap(int32_t trapIndex) { @@ -277,6 +323,10 @@ wasm::AddressOf(SymbolicAddress imm, ExclusiveContext* cx) return FuncCast(WasmReportOverRecursed, Args_General0); case SymbolicAddress::HandleExecutionInterrupt: return FuncCast(WasmHandleExecutionInterrupt, Args_General0); + case SymbolicAddress::HandleDebugTrap: + return FuncCast(WasmHandleDebugTrap, Args_General0); + case SymbolicAddress::HandleDebugThrow: + return FuncCast(WasmHandleDebugThrow, Args_General0); case SymbolicAddress::ReportTrap: return FuncCast(WasmReportTrap, Args_General1); case SymbolicAddress::ReportOutOfBounds: diff --git a/js/src/wasm/WasmTypes.h b/js/src/wasm/WasmTypes.h index 3d83b6d67529..90db4ff82ab7 100644 --- a/js/src/wasm/WasmTypes.h +++ b/js/src/wasm/WasmTypes.h @@ -890,14 +890,16 @@ struct TrapOffset class CallSiteDesc { - uint32_t lineOrBytecode_ : 30; - uint32_t kind_ : 2; + uint32_t lineOrBytecode_ : 29; + uint32_t kind_ : 3; public: enum Kind { Func, // pc-relative call to a specific function Dynamic, // dynamic callee called via register Symbolic, // call to a single symbolic callee - TrapExit // call to a trap exit + TrapExit, // call to a trap exit + EnterFrame, // call to a enter frame handler + LeaveFrame // call to a leave frame handler }; CallSiteDesc() {} explicit CallSiteDesc(Kind kind) @@ -1014,6 +1016,8 @@ enum class SymbolicAddress InterruptUint32, ReportOverRecursed, HandleExecutionInterrupt, + HandleDebugTrap, + HandleDebugThrow, ReportTrap, ReportOutOfBounds, ReportUnalignedAccess,