Bug 1772987 part 3 - Save caller FP in JitEntry stub. r=rhunt

The JitEntry stub clobbers the FP register in the prologue (to make stack unwinding work).
We need to change it to restore FP to allow adding frame pointers to Warp frames.

This patch changes the prologue to the standard frame prologue, and the epilogue
now stores the return offset to let the profiler correctly handle the instructions
between popping FP and returning.

ARM64 adds some complexity because the JIT ABI aligns the stack at the return address,
so we need to add a padding word to maintain 16-byte stack alignment. When we add the
`callerFP` to `JitFrameLayout` (similar to `wasm::Frame`) we can hopefully simplify this
a bit.

Differential Revision: https://phabricator.services.mozilla.com/D148487
This commit is contained in:
Jan de Mooij 2022-06-09 06:07:05 +00:00
Родитель ceb5751e84
Коммит de250ada91
6 изменённых файлов: 153 добавлений и 41 удалений

Просмотреть файл

@ -136,7 +136,7 @@ CodeRange::CodeRange(Kind kind, CallableOffsets offsets)
CodeRange::CodeRange(Kind kind, uint32_t funcIndex, CallableOffsets offsets) CodeRange::CodeRange(Kind kind, uint32_t funcIndex, CallableOffsets offsets)
: begin_(offsets.begin), ret_(offsets.ret), end_(offsets.end), kind_(kind) { : begin_(offsets.begin), ret_(offsets.ret), end_(offsets.end), kind_(kind) {
MOZ_ASSERT(isImportExit() && !isImportJitExit()); MOZ_ASSERT((isImportExit() && !isImportJitExit()) || isJitEntry());
MOZ_ASSERT(begin_ < ret_); MOZ_ASSERT(begin_ < ret_);
MOZ_ASSERT(ret_ < end_); MOZ_ASSERT(ret_ < end_);
u.funcIndex_ = funcIndex; u.funcIndex_ = funcIndex;

Просмотреть файл

@ -343,12 +343,12 @@ class CodeRange {
bool isDebugTrap() const { return kind() == DebugTrap; } bool isDebugTrap() const { return kind() == DebugTrap; }
bool isThunk() const { return kind() == FarJumpIsland; } bool isThunk() const { return kind() == FarJumpIsland; }
// Function, import exits and trap exits have standard callable prologues // Functions, import exits, trap exits and JitEntry stubs have standard
// and epilogues. Asynchronous frame iteration needs to know the offset of // callable prologues and epilogues. Asynchronous frame iteration needs to
// the return instruction to calculate the frame pointer. // know the offset of the return instruction to calculate the frame pointer.
bool hasReturn() const { bool hasReturn() const {
return isFunction() || isImportExit() || isDebugTrap(); return isFunction() || isImportExit() || isDebugTrap() || isJitEntry();
} }
uint32_t ret() const { uint32_t ret() const {
MOZ_ASSERT(hasReturn()); MOZ_ASSERT(hasReturn());

Просмотреть файл

@ -420,6 +420,39 @@ static_assert(FrameWithInstances::sizeOfInstanceFields() == 2 * sizeof(void*),
static_assert(sizeof(Frame) % 16 == 0, "frame is aligned"); static_assert(sizeof(Frame) % 16 == 0, "frame is aligned");
#endif #endif
// The JitEntry stub uses the following stack layout:
//
// [JS JIT caller]
// [JitFrameLayout] (frame descriptor, return address, etc)
// [JSJitToWasmFrame] (saved frame pointer)
// [Wasm arguments]
//
// The caller's frame pointer is not yet stored in JitFrameLayout, so
// JSJitToWasmFrame represents this space. On ARM64 it also has padding to
// ensure SP is 16-byte aligned.
class JSJitToWasmFrame {
protected: // Silence warning about unused padding_ field.
#if defined(JS_CODEGEN_ARM64)
uintptr_t padding_;
#endif
uint8_t* callerFP_;
public:
static constexpr uint32_t callerFPOffset() {
return offsetof(JSJitToWasmFrame, callerFP_);
}
// Distance from frame pointer to the JitFrameLayout pushed by the caller.
static constexpr uint32_t jitFrameLayoutOffsetFromFP() {
return sizeof(JSJitToWasmFrame) - callerFPOffset();
}
};
#if defined(JS_CODEGEN_ARM64)
static_assert(sizeof(JSJitToWasmFrame) % 16 == 0, "frame is aligned");
#endif
static_assert(JSJitToWasmFrame::jitFrameLayoutOffsetFromFP() == sizeof(void*),
"fp must point to caller fp followed by return address for "
"native stack walking to work");
} // namespace wasm } // namespace wasm
} // namespace js } // namespace js

Просмотреть файл

@ -199,7 +199,8 @@ void WasmFrameIter::popFrame() {
// //
// The next value of FP is just a regular jit frame used as a marker to // The next value of FP is just a regular jit frame used as a marker to
// know that we should transition to a JSJit frame iterator. // know that we should transition to a JSJit frame iterator.
unwoundJitCallerFP_ = reinterpret_cast<uint8_t*>(fp_); unwoundJitCallerFP_ = reinterpret_cast<uint8_t*>(fp_) +
JSJitToWasmFrame::jitFrameLayoutOffsetFromFP();
unwoundJitFrameType_ = FrameType::JSJitToWasm; unwoundJitFrameType_ = FrameType::JSJitToWasm;
fp_ = nullptr; fp_ = nullptr;
@ -346,17 +347,20 @@ static const unsigned PushedRetAddr = 0;
static const unsigned PushedFP = 1; static const unsigned PushedFP = 1;
static const unsigned SetFP = 4; static const unsigned SetFP = 4;
static const unsigned PoppedFP = 0; static const unsigned PoppedFP = 0;
static const unsigned PoppedFPJitEntry = 0;
#elif defined(JS_CODEGEN_X86) #elif defined(JS_CODEGEN_X86)
static const unsigned PushedRetAddr = 0; static const unsigned PushedRetAddr = 0;
static const unsigned PushedFP = 1; static const unsigned PushedFP = 1;
static const unsigned SetFP = 3; static const unsigned SetFP = 3;
static const unsigned PoppedFP = 0; static const unsigned PoppedFP = 0;
static const unsigned PoppedFPJitEntry = 0;
#elif defined(JS_CODEGEN_ARM) #elif defined(JS_CODEGEN_ARM)
static const unsigned BeforePushRetAddr = 0; static const unsigned BeforePushRetAddr = 0;
static const unsigned PushedRetAddr = 4; static const unsigned PushedRetAddr = 4;
static const unsigned PushedFP = 8; static const unsigned PushedFP = 8;
static const unsigned SetFP = 12; static const unsigned SetFP = 12;
static const unsigned PoppedFP = 0; static const unsigned PoppedFP = 0;
static const unsigned PoppedFPJitEntry = 0;
#elif defined(JS_CODEGEN_ARM64) #elif defined(JS_CODEGEN_ARM64)
// On ARM64 we do not use push or pop; the prologues and epilogues are // On ARM64 we do not use push or pop; the prologues and epilogues are
// structured differently due to restrictions on SP alignment. Even so, // structured differently due to restrictions on SP alignment. Even so,
@ -367,6 +371,7 @@ static const unsigned PushedRetAddr = 8;
static const unsigned PushedFP = 12; static const unsigned PushedFP = 12;
static const unsigned SetFP = 16; static const unsigned SetFP = 16;
static const unsigned PoppedFP = 4; static const unsigned PoppedFP = 4;
static const unsigned PoppedFPJitEntry = 8;
static_assert(BeforePushRetAddr == 0, "Required by StartUnwinding"); static_assert(BeforePushRetAddr == 0, "Required by StartUnwinding");
static_assert(PushedFP > PushedRetAddr, "Required by StartUnwinding"); static_assert(PushedFP > PushedRetAddr, "Required by StartUnwinding");
#elif defined(JS_CODEGEN_MIPS64) #elif defined(JS_CODEGEN_MIPS64)
@ -385,10 +390,10 @@ static const unsigned PushedRetAddr = 0;
static const unsigned PushedFP = 1; static const unsigned PushedFP = 1;
static const unsigned SetFP = 2; static const unsigned SetFP = 2;
static const unsigned PoppedFP = 3; static const unsigned PoppedFP = 3;
static const unsigned PoppedFPJitEntry = 4;
#else #else
# error "Unknown architecture!" # error "Unknown architecture!"
#endif #endif
static constexpr unsigned SetJitEntryFP = PushedRetAddr + SetFP - PushedFP;
static void LoadActivation(MacroAssembler& masm, const Register& dest) { static void LoadActivation(MacroAssembler& masm, const Register& dest) {
// WasmCall pushes a JitActivation. // WasmCall pushes a JitActivation.
@ -800,13 +805,15 @@ void wasm::GenerateJitExitEpilogue(MacroAssembler& masm, unsigned framePushed,
MOZ_ASSERT(masm.framePushed() == 0); MOZ_ASSERT(masm.framePushed() == 0);
} }
void wasm::GenerateJitEntryPrologue(MacroAssembler& masm, Offsets* offsets) { void wasm::GenerateJitEntryPrologue(MacroAssembler& masm,
CallableOffsets* offsets) {
masm.haltingAlign(CodeAlignment); masm.haltingAlign(CodeAlignment);
{ {
// Push the return address.
#if defined(JS_CODEGEN_ARM) #if defined(JS_CODEGEN_ARM)
AutoForbidPoolsAndNops afp(&masm, AutoForbidPoolsAndNops afp(&masm,
/* number of instructions in scope = */ 2); /* number of instructions in scope = */ 3);
offsets->begin = masm.currentOffset(); offsets->begin = masm.currentOffset();
static_assert(BeforePushRetAddr == 0); static_assert(BeforePushRetAddr == 0);
masm.push(lr); masm.push(lr);
@ -818,13 +825,12 @@ void wasm::GenerateJitEntryPrologue(MacroAssembler& masm, Offsets* offsets) {
masm.push(ra); masm.push(ra);
#elif defined(JS_CODEGEN_ARM64) #elif defined(JS_CODEGEN_ARM64)
AutoForbidPoolsAndNops afp(&masm, AutoForbidPoolsAndNops afp(&masm,
/* number of instructions in scope = */ 3); /* number of instructions in scope = */ 4);
offsets->begin = masm.currentOffset(); offsets->begin = masm.currentOffset();
static_assert(BeforePushRetAddr == 0); static_assert(BeforePushRetAddr == 0);
// Subtract from SP first as SP must be aligned before offsetting. // Subtract from SP first as SP must be aligned before offsetting.
masm.Sub(sp, sp, 8); masm.Sub(sp, sp, 8 + sizeof(JSJitToWasmFrame));
masm.storePtr(lr, Address(masm.getStackPointer(), 0)); masm.Str(ARMRegister(lr, 64), MemOperand(sp, sizeof(JSJitToWasmFrame)));
masm.adjustFrame(8);
#else #else
// The x86/x64 call instruction pushes the return address. // The x86/x64 call instruction pushes the return address.
offsets->begin = masm.currentOffset(); offsets->begin = masm.currentOffset();
@ -833,27 +839,63 @@ void wasm::GenerateJitEntryPrologue(MacroAssembler& masm, Offsets* offsets) {
PushedRetAddr == masm.currentOffset() - offsets->begin); PushedRetAddr == masm.currentOffset() - offsets->begin);
// Save jit frame pointer, so unwinding from wasm to jit frames is trivial. // Save jit frame pointer, so unwinding from wasm to jit frames is trivial.
masm.moveStackPtrTo(FramePointer); #if defined(JS_CODEGEN_ARM64)
masm.Str(ARMRegister(FramePointer, 64),
MemOperand(sp, JSJitToWasmFrame::callerFPOffset()));
#else
static_assert(sizeof(JSJitToWasmFrame) == sizeof(uintptr_t));
masm.Push(FramePointer);
#endif
MOZ_ASSERT_IF(!masm.oom(), MOZ_ASSERT_IF(!masm.oom(),
SetJitEntryFP == masm.currentOffset() - offsets->begin); PushedFP == masm.currentOffset() - offsets->begin);
#if defined(JS_CODEGEN_ARM64)
masm.Add(ARMRegister(FramePointer, 64), sp,
JSJitToWasmFrame::callerFPOffset());
#else
masm.moveStackPtrTo(FramePointer);
#endif
MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - offsets->begin);
} }
masm.setFramePushed(0); masm.setFramePushed(sizeof(JSJitToWasmFrame));
} }
void wasm::GenerateJitEntryEpilogue(MacroAssembler& masm) { void wasm::GenerateJitEntryEpilogue(MacroAssembler& masm,
CallableOffsets* offsets) {
DebugOnly<uint32_t> poppedFP;
#ifdef JS_CODEGEN_ARM64 #ifdef JS_CODEGEN_ARM64
masm.loadPtr(Address(sp, 0), lr); RegisterOrSP sp = masm.getStackPointer();
masm.addToStackPtr(Imm32(8)); AutoForbidPoolsAndNops afp(&masm,
/* number of instructions in scope = */ 5);
masm.loadPtr(Address(sp, sizeof(JSJitToWasmFrame)), lr);
masm.loadPtr(Address(sp, JSJitToWasmFrame::callerFPOffset()), FramePointer);
poppedFP = masm.currentOffset();
masm.addToStackPtr(Imm32(8 + sizeof(JSJitToWasmFrame)));
// Copy SP into PSP to enforce return-point invariants (SP == PSP). // Copy SP into PSP to enforce return-point invariants (SP == PSP).
// `addToStackPtr` won't sync them because SP is the active pointer here. // `addToStackPtr` won't sync them because SP is the active pointer here.
// For the same reason, we can't use initPseudoStackPtr to do the sync, so // For the same reason, we can't use initPseudoStackPtr to do the sync, so
// we have to do it "by hand". Omitting this causes many tests to segfault. // we have to do it "by hand". Omitting this causes many tests to segfault.
masm.moveStackPtrTo(PseudoStackPointer); masm.moveStackPtrTo(PseudoStackPointer);
masm.abiret();
offsets->ret = masm.currentOffset();
masm.Ret(ARMRegister(lr, 64));
masm.setFramePushed(0);
#else #else
// Forbid pools for the same reason as described in GenerateCallablePrologue.
# if defined(JS_CODEGEN_ARM)
AutoForbidPoolsAndNops afp(&masm, /* number of instructions in scope = */ 2);
# endif
static_assert(sizeof(JSJitToWasmFrame) == sizeof(uintptr_t));
masm.Pop(FramePointer);
poppedFP = masm.currentOffset();
offsets->ret = masm.currentOffset();
masm.ret(); masm.ret();
#endif #endif
MOZ_ASSERT_IF(!masm.oom(), PoppedFPJitEntry == offsets->ret - poppedFP);
} }
/*****************************************************************************/ /*****************************************************************************/
@ -967,7 +1009,8 @@ void ProfilingFrameIterator::initFromExitFP(const Frame* fp) {
case CodeRange::JitEntry: case CodeRange::JitEntry:
callerPC_ = nullptr; callerPC_ = nullptr;
callerFP_ = nullptr; callerFP_ = nullptr;
unwoundJitCallerFP_ = fp->rawCaller(); unwoundJitCallerFP_ =
fp->rawCaller() + JSJitToWasmFrame::jitFrameLayoutOffsetFromFP();
break; break;
case CodeRange::Function: case CodeRange::Function:
fp = fp->wasmCaller(); fp = fp->wasmCaller();
@ -1284,15 +1327,38 @@ bool js::wasm::StartUnwinding(const RegisterState& registers,
return false; return false;
} }
#endif #endif
fixedFP =
offsetFromEntry < SetJitEntryFP ? reinterpret_cast<uint8_t*>(sp) : fp;
fixedPC = nullptr;
// On the error return path, FP might be set to FailFP. Ignore these // On the error return path, FP might be set to FailFP. Ignore these
// transient frames. // transient frames.
if (intptr_t(fixedFP) == (FailFP & ~ExitOrJitEntryFPTag)) { if (intptr_t(fp) == (FailFP & ~ExitOrJitEntryFPTag)) {
return false; return false;
} }
// Set fixedFP to the address of the JitFrameLayout on the stack.
if (offsetFromEntry < PushedFP) {
// On ARM64, we allocate the JSJitToWasmFrame before storing the return
// address so it's already on the stack. On other architectures this
// happens as part of pushing FP.
#if defined(JS_CODEGEN_ARM64)
fixedFP = reinterpret_cast<uint8_t*>(sp) + sizeof(JSJitToWasmFrame);
#else
fixedFP = reinterpret_cast<uint8_t*>(sp);
#endif
} else if (offsetFromEntry < SetFP) {
fixedFP = reinterpret_cast<uint8_t*>(sp) + sizeof(JSJitToWasmFrame);
} else if (offsetInCode >= codeRange->ret() - PoppedFPJitEntry &&
offsetInCode <= codeRange->ret()) {
// We've popped FP but still have to return. Similar to the
// |offsetFromEntry < PushedRetAddr| case above, the JIT frame may be
// incomplete on some platforms if we already popped the return address,
// so we return false.
#if defined(JS_CODEGEN_ARM64)
return false;
#else
fixedFP = reinterpret_cast<uint8_t*>(sp);
#endif
} else {
fixedFP = fp + JSJitToWasmFrame::jitFrameLayoutOffsetFromFP();
}
fixedPC = nullptr;
break; break;
case CodeRange::Throw: case CodeRange::Throw:
// The throw stub executes a small number of instructions before popping // The throw stub executes a small number of instructions before popping
@ -1424,7 +1490,8 @@ void ProfilingFrameIterator::operator++() {
MOZ_ASSERT(codeRange_); MOZ_ASSERT(codeRange_);
if (codeRange_->isJitEntry()) { if (codeRange_->isJitEntry()) {
unwoundJitCallerFP_ = callerFP_; unwoundJitCallerFP_ =
callerFP_ + JSJitToWasmFrame::jitFrameLayoutOffsetFromFP();
MOZ_ASSERT(!done()); MOZ_ASSERT(!done());
return; return;
} }

Просмотреть файл

@ -225,8 +225,10 @@ void GenerateJitExitPrologue(jit::MacroAssembler& masm, unsigned framePushed,
void GenerateJitExitEpilogue(jit::MacroAssembler& masm, unsigned framePushed, void GenerateJitExitEpilogue(jit::MacroAssembler& masm, unsigned framePushed,
CallableOffsets* offsets); CallableOffsets* offsets);
void GenerateJitEntryPrologue(jit::MacroAssembler& masm, Offsets* offsets); void GenerateJitEntryPrologue(jit::MacroAssembler& masm,
void GenerateJitEntryEpilogue(jit::MacroAssembler& masm); CallableOffsets* offsets);
void GenerateJitEntryEpilogue(jit::MacroAssembler& masm,
CallableOffsets* offsets);
void GenerateFunctionPrologue(jit::MacroAssembler& masm, void GenerateFunctionPrologue(jit::MacroAssembler& masm,
const TypeIdDesc& funcTypeId, const TypeIdDesc& funcTypeId,

Просмотреть файл

@ -992,7 +992,7 @@ static void GenerateBigIntInitialization(MacroAssembler& masm,
static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex, static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
const FuncExport& fe, const Maybe<ImmPtr>& funcPtr, const FuncExport& fe, const Maybe<ImmPtr>& funcPtr,
Offsets* offsets) { CallableOffsets* offsets) {
AutoCreatedBy acb(masm, "GenerateJitEntry"); AutoCreatedBy acb(masm, "GenerateJitEntry");
AssertExpectedSP(masm); AssertExpectedSP(masm);
@ -1004,6 +1004,11 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
// The jit caller has set up the following stack layout (sp grows to the // The jit caller has set up the following stack layout (sp grows to the
// left): // left):
// <-- retAddr | descriptor | callee | argc | this | arg1..N // <-- retAddr | descriptor | callee | argc | this | arg1..N
//
// GenerateJitEntryPrologue has additionally pushed a JSJitToWasmFrame storing
// the caller's frame pointer.
MOZ_ASSERT(masm.framePushed() == sizeof(JSJitToWasmFrame));
unsigned normalBytesNeeded = StackArgBytesForWasmABI(fe.funcType()); unsigned normalBytesNeeded = StackArgBytesForWasmABI(fe.funcType());
@ -1017,12 +1022,14 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
// Note the jit caller ensures the stack is aligned *after* the call // Note the jit caller ensures the stack is aligned *after* the call
// instruction. // instruction.
unsigned frameSize = StackDecrementForCall(WasmStackAlignment, unsigned frameSizeExclFP = StackDecrementForCall(
masm.framePushed(), bytesNeeded); WasmStackAlignment, masm.framePushed(), bytesNeeded);
// Reserve stack space for wasm ABI arguments, set up like this: // Reserve stack space for wasm ABI arguments, set up like this:
// <-- ABI args | padding // <-- ABI args | padding
masm.reserveStack(frameSize); masm.reserveStack(frameSizeExclFP);
uint32_t frameSize = masm.framePushed();
GenerateJitEntryLoadInstance(masm, frameSize); GenerateJitEntryLoadInstance(masm, frameSize);
@ -1295,7 +1302,7 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
masm.branchPtr(Assembler::Equal, FramePointer, Imm32(FailFP), &exception); masm.branchPtr(Assembler::Equal, FramePointer, Imm32(FailFP), &exception);
// Pop arguments. // Pop arguments.
masm.freeStack(frameSize); masm.freeStack(frameSizeExclFP);
GenPrintf(DebugChannel::Function, masm, "wasm-function[%d]; returns ", GenPrintf(DebugChannel::Function, masm, "wasm-function[%d]; returns ",
fe.funcIndex()); fe.funcIndex());
@ -1336,11 +1343,11 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
masm.jump(&done); masm.jump(&done);
masm.bind(&fail); masm.bind(&fail);
// Fixup the stack for the exception tail so that we can share it. // Fixup the stack for the exception tail so that we can share it.
masm.reserveStack(frameSize); masm.reserveStack(frameSizeExclFP);
masm.jump(&exception); masm.jump(&exception);
masm.bind(&done); masm.bind(&done);
// Un-fixup the stack for the benefit of the assertion below. // Un-fixup the stack for the benefit of the assertion below.
masm.setFramePushed(0); masm.setFramePushed(sizeof(JSJitToWasmFrame));
break; break;
} }
case ValType::Rtt: case ValType::Rtt:
@ -1356,7 +1363,7 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
case RefType::Extern: case RefType::Extern:
// Per comment above, the call may have clobbered the instance // Per comment above, the call may have clobbered the instance
// register, so reload since unboxing will need it. // register, so reload since unboxing will need it.
GenerateJitEntryLoadInstance(masm, /* frameSize */ 0); GenerateJitEntryLoadInstance(masm, sizeof(JSJitToWasmFrame));
UnboxAnyrefIntoValueReg(masm, InstanceReg, ReturnReg, UnboxAnyrefIntoValueReg(masm, InstanceReg, ReturnReg,
JSReturnOperand, WasmJitEntryReturnScratch); JSReturnOperand, WasmJitEntryReturnScratch);
break; break;
@ -1370,9 +1377,10 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
GenPrintf(DebugChannel::Function, masm, "\n"); GenPrintf(DebugChannel::Function, masm, "\n");
MOZ_ASSERT(masm.framePushed() == 0); MOZ_ASSERT(masm.framePushed() == sizeof(JSJitToWasmFrame));
AssertExpectedSP(masm); AssertExpectedSP(masm);
GenerateJitEntryEpilogue(masm); GenerateJitEntryEpilogue(masm, offsets);
MOZ_ASSERT(masm.framePushed() == 0); MOZ_ASSERT(masm.framePushed() == 0);
// Generate an OOL call to the C++ conversion path. // Generate an OOL call to the C++ conversion path.
@ -3011,10 +3019,12 @@ bool wasm::GenerateEntryStubs(MacroAssembler& masm, size_t funcExportIndex,
return true; return true;
} }
if (!GenerateJitEntry(masm, funcExportIndex, fe, callee, &offsets)) { CallableOffsets jitOffsets;
if (!GenerateJitEntry(masm, funcExportIndex, fe, callee, &jitOffsets)) {
return false; return false;
} }
if (!codeRanges->emplaceBack(CodeRange::JitEntry, fe.funcIndex(), offsets)) { if (!codeRanges->emplaceBack(CodeRange::JitEntry, fe.funcIndex(),
jitOffsets)) {
return false; return false;
} }