Backed out changeset a4d1e2c3ba9e (bug 1639153) for causing bustages in WasmFrameIter.cpp

CLOSED TREE
This commit is contained in:
Mihai Alexandru Michis 2020-06-08 12:07:09 +03:00
Родитель 0356cded2f
Коммит 51f5a7ede6
17 изменённых файлов: 49 добавлений и 152 удалений

Просмотреть файл

@ -295,11 +295,6 @@ static_assert(JitStackAlignment % SimdMemoryAlignment == 0,
static const uint32_t WasmStackAlignment = SimdMemoryAlignment;
static const uint32_t WasmTrapInstructionLength = 4;
// The offsets are dynamically asserted during
// code generation in the prologue/epilogue.
static constexpr uint32_t WasmCheckedCallEntryOffset = 0u;
static constexpr uint32_t WasmCheckedTailEntryOffset = 16u;
static const Scale ScalePointer = TimesFour;
class Instruction;

Просмотреть файл

@ -172,11 +172,6 @@ static_assert(CodeAlignment % SimdMemoryAlignment == 0,
static const uint32_t WasmStackAlignment = SimdMemoryAlignment;
static const uint32_t WasmTrapInstructionLength = 4;
// The offsets are dynamically asserted during
// code generation in the prologue/epilogue.
static constexpr uint32_t WasmCheckedCallEntryOffset = 0u;
static constexpr uint32_t WasmCheckedTailEntryOffset = 32u;
class Assembler : public vixl::Assembler {
public:
Assembler() : vixl::Assembler() {}

Просмотреть файл

@ -154,11 +154,6 @@ static constexpr uint32_t SimdMemoryAlignment = 8;
static constexpr uint32_t WasmStackAlignment = SimdMemoryAlignment;
static const uint32_t WasmTrapInstructionLength = 4;
// The offsets are dynamically asserted during
// code generation in the prologue/epilogue.
static constexpr uint32_t WasmCheckedCallEntryOffset = 0u;
static constexpr uint32_t WasmCheckedTailEntryOffset = 16u;
static constexpr Scale ScalePointer = TimesFour;
class Assembler : public AssemblerMIPSShared {

Просмотреть файл

@ -188,11 +188,6 @@ static constexpr uint32_t SimdMemoryAlignment = 16;
static constexpr uint32_t WasmStackAlignment = SimdMemoryAlignment;
static const uint32_t WasmTrapInstructionLength = 4;
// The offsets are dynamically asserted during
// code generation in the prologue/epilogue.
static constexpr uint32_t WasmCheckedCallEntryOffset = 0u;
static constexpr uint32_t WasmCheckedTailEntryOffset = 16u;
static constexpr Scale ScalePointer = TimesEight;
class Assembler : public AssemblerMIPSShared {

Просмотреть файл

@ -21,11 +21,6 @@ static const uint32_t SimdMemoryAlignment =
static const uint32_t WasmStackAlignment = 8;
static const uint32_t WasmTrapInstructionLength = 0;
// The offsets are dynamically asserted during
// code generation in the prologue/epilogue.
static constexpr uint32_t WasmCheckedCallEntryOffset = 0u;
static constexpr uint32_t WasmCheckedTailEntryOffset = 1u;
class Registers {
public:
enum RegisterID {

Просмотреть файл

@ -268,11 +268,6 @@ static_assert(JitStackAlignment % SimdMemoryAlignment == 0,
static constexpr uint32_t WasmStackAlignment = SimdMemoryAlignment;
static constexpr uint32_t WasmTrapInstructionLength = 2;
// The offsets are dynamically asserted during
// code generation in the prologue/epilogue.
static constexpr uint32_t WasmCheckedCallEntryOffset = 0u;
static constexpr uint32_t WasmCheckedTailEntryOffset = 16u;
static constexpr Scale ScalePointer = TimesEight;
} // namespace jit

Просмотреть файл

@ -179,11 +179,6 @@ static_assert(JitStackAlignment % SimdMemoryAlignment == 0,
static constexpr uint32_t WasmStackAlignment = SimdMemoryAlignment;
static constexpr uint32_t WasmTrapInstructionLength = 2;
// The offsets are dynamically asserted during
// code generation in the prologue/epilogue.
static constexpr uint32_t WasmCheckedCallEntryOffset = 0u;
static constexpr uint32_t WasmCheckedTailEntryOffset = 16u;
struct ImmTag : public Imm32 {
explicit ImmTag(JSValueTag mask) : Imm32(int32_t(mask)) {}
};

Просмотреть файл

@ -685,7 +685,7 @@ bool LazyStubTier::createMany(const Uint32Vector& funcExportIndices,
fe.funcType().temporarilyUnsupportedReftypeForEntry();
numExpectedRanges += (unsupportedType ? 1 : 2);
void* calleePtr =
moduleSegmentBase + metadata.codeRange(fe).funcUncheckedCallEntry();
moduleSegmentBase + metadata.codeRange(fe).funcNormalEntry();
Maybe<ImmPtr> callee;
callee.emplace(calleePtr, ImmPtr::NoCheckToken());
if (!GenerateEntryStubs(masm, funcExportIndex, fe, callee,

Просмотреть файл

@ -223,7 +223,7 @@ static bool GenerateCraneliftCode(WasmMacroAssembler& masm,
#ifdef DEBUG
// Check code offsets.
MOZ_ASSERT(offset.value() >= offsets->uncheckedCallEntry);
MOZ_ASSERT(offset.value() >= offsets->normalEntry);
MOZ_ASSERT(offset.value() < offsets->ret);
MOZ_ASSERT(metadata.module_bytecode_offset != 0);

Просмотреть файл

@ -556,91 +556,50 @@ static void GenerateCallableEpilogue(MacroAssembler& masm, unsigned framePushed,
MOZ_ASSERT_IF(!masm.oom(), PoppedTLSReg == *ret - poppedTlsReg);
}
static void EnsureOffset(MacroAssembler& masm, uint32_t base,
uint32_t targetOffset) {
MOZ_ASSERT(targetOffset % CodeAlignment == 0);
MOZ_ASSERT(masm.currentOffset() - base <= targetOffset);
while (masm.currentOffset() - base < targetOffset) {
masm.nopAlign(CodeAlignment);
if (masm.currentOffset() - base < targetOffset) {
masm.nop();
}
}
MOZ_ASSERT(masm.currentOffset() - base == targetOffset);
}
void wasm::GenerateFunctionPrologue(MacroAssembler& masm,
const FuncTypeIdDesc& funcTypeId,
const Maybe<uint32_t>& tier1FuncIndex,
FuncOffsets* offsets) {
// These constants reflect statically-determined offsets
// between a function's checked call entry and a tail's entry.
static_assert(WasmCheckedCallEntryOffset % CodeAlignment == 0,
"code aligned");
static_assert(WasmCheckedTailEntryOffset % CodeAlignment == 0,
"code aligned");
// Flush pending pools so they do not get dumped between the 'begin' and
// 'uncheckedCallEntry' offsets since the difference must be less than
// UINT8_MAX to be stored in CodeRange::funcbeginToUncheckedCallEntry_.
// 'normalEntry' offsets since the difference must be less than UINT8_MAX
// to be stored in CodeRange::funcBeginToNormalEntry_.
masm.flushBuffer();
masm.haltingAlign(CodeAlignment);
// We are going to generate the next code layout:
// ---------------------------------------------
// checked call entry: callable prologue
// checked tail entry: check signature
// jump functionBody
// unchecked call entry: callable prologue
// functionBody
// -----------------------------------------------
// checked call entry - used for call_indirect when we have to check the
// signature.
// checked tail entry - used by trampolines which already had pushed Frame
// on the callees behalf.
// unchecked call entry - used for regular direct same-instance calls.
// The table entry falls through into the normal entry after it has checked
// the signature.
Label normalEntry;
Label functionBody;
// Generate checked call entry. The BytecodeOffset of the trap is fixed up to
// be the bytecode offset of the callsite by JitActivation::startWasmTrap.
// Generate table entry. The BytecodeOffset of the trap is fixed up to be
// the bytecode offset of the callsite by JitActivation::startWasmTrap.
offsets->begin = masm.currentOffset();
MOZ_ASSERT(masm.currentOffset() - offsets->begin ==
WasmCheckedCallEntryOffset);
uint32_t dummy;
GenerateCallablePrologue(masm, &dummy);
EnsureOffset(masm, offsets->begin, WasmCheckedTailEntryOffset);
switch (funcTypeId.kind()) {
case FuncTypeIdDescKind::Global: {
Register scratch = WasmTableCallScratchReg0;
masm.loadWasmGlobalPtr(funcTypeId.globalDataOffset(), scratch);
masm.branchPtr(Assembler::Condition::Equal, WasmTableCallSigReg, scratch,
&functionBody);
&normalEntry);
masm.wasmTrap(Trap::IndirectCallBadSig, BytecodeOffset(0));
break;
}
case FuncTypeIdDescKind::Immediate: {
masm.branch32(Assembler::Condition::Equal, WasmTableCallSigReg,
Imm32(funcTypeId.immediate()), &functionBody);
Imm32(funcTypeId.immediate()), &normalEntry);
masm.wasmTrap(Trap::IndirectCallBadSig, BytecodeOffset(0));
break;
}
case FuncTypeIdDescKind::None:
masm.jump(&functionBody);
break;
}
// The checked entries might have generated a small constant pool in case of
// The table entry might have generated a small constant pool in case of
// immediate comparison.
masm.flushBuffer();
// Generate unchecked call entry:
// Generate normal entry:
masm.nopAlign(CodeAlignment);
GenerateCallablePrologue(masm, &offsets->uncheckedCallEntry);
masm.bind(&functionBody);
masm.bind(&normalEntry);
GenerateCallablePrologue(masm, &offsets->normalEntry);
// Tiering works as follows. The Code owns a jumpTable, which has one
// pointer-sized element for each function up to the largest funcIndex in
@ -916,22 +875,6 @@ static void AssertCallerFP(DebugOnly<bool> fpWasTagged, Frame* const fp,
reinterpret_cast<Frame*>(sp)->callerFP);
}
static bool isSignatureCheckFail(uint32_t offsetInCode,
const CodeRange* codeRange) {
if (!codeRange->isFunction()) {
return false;
}
// checked call entry: 1. push Frame
// 2. set FP
// 3. signature check <--- check if we are here.
// 4. jump 7
// unchecked call entry: 5. push Frame
// 6. set FP
// 7. function's code
return offsetInCode < codeRange->funcUncheckedCallEntry() &&
(offsetInCode - codeRange->funcCheckedCallEntry()) > SetFP;
}
bool js::wasm::StartUnwinding(const RegisterState& registers,
UnwindState* unwindState, bool* unwoundCaller) {
// Shorthands.
@ -973,18 +916,17 @@ bool js::wasm::StartUnwinding(const RegisterState& registers,
MOZ_ASSERT(offsetInCode >= codeRange->begin());
MOZ_ASSERT(offsetInCode < codeRange->end());
// Compute the offset of the pc from the (unchecked call) entry of the code
// range. The checked call entry and the unchecked call entry have common
// prefix, so pc before signature check in the checked call entry is
// equivalent to the pc of the unchecked-call-entry. Thus, we can simplify the
// below case analysis by redirecting all pc-in-checked-call-entry before
// signature check cases to the pc-at-unchecked-call-entry case.
// Compute the offset of the pc from the (normal) entry of the code range.
// The stack state of the pc for the entire table-entry is equivalent to
// that of the first pc of the normal-entry. Thus, we can simplify the below
// case analysis by redirecting all pc-in-table-entry cases to the
// pc-at-normal-entry case.
uint32_t offsetFromEntry;
if (codeRange->isFunction()) {
if (offsetInCode < codeRange->funcUncheckedCallEntry()) {
offsetFromEntry = offsetInCode - codeRange->funcCheckedCallEntry();
if (offsetInCode < codeRange->funcNormalEntry()) {
offsetFromEntry = 0;
} else {
offsetFromEntry = offsetInCode - codeRange->funcUncheckedCallEntry();
offsetFromEntry = offsetInCode - codeRange->funcNormalEntry();
}
} else {
offsetFromEntry = offsetInCode - codeRange->begin();
@ -1102,15 +1044,6 @@ bool js::wasm::StartUnwinding(const RegisterState& registers,
return false;
}
}
if (isSignatureCheckFail(offsetInCode, codeRange)) {
// Frame have been pushed and FP has been set.
fixedFP = fp->callerFP;
fixedPC = fp->returnAddress;
AssertMatchesCallSite(fixedPC, fixedFP);
break;
}
// Not in the prologue/epilogue.
fixedPC = pc;
fixedFP = fp;

Просмотреть файл

@ -519,7 +519,7 @@ bool ModuleGenerator::linkCallSites() {
case CallSiteDesc::Func: {
if (funcIsCompiled(target.funcIndex())) {
uint32_t calleeOffset =
funcCodeRange(target.funcIndex()).funcUncheckedCallEntry();
funcCodeRange(target.funcIndex()).funcNormalEntry();
if (InRange(callerOffset, calleeOffset)) {
masm_.patchCall(callerOffset, calleeOffset);
break;
@ -945,7 +945,7 @@ bool ModuleGenerator::finishCodegen() {
for (CallFarJump far : callFarJumps_) {
masm_.patchFarJump(far.jump,
funcCodeRange(far.funcIndex).funcUncheckedCallEntry());
funcCodeRange(far.funcIndex).funcNormalEntry());
}
for (CodeOffset farJump : debugTrapFarJumps_) {

Просмотреть файл

@ -1021,7 +1021,7 @@ bool Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
// This element is a wasm function imported from another
// instance. To preserve the === function identity required by
// the JS embedding spec, we must set the element to the
// imported function's underlying CodeRange.funcCheckedCallEntry and
// imported function's underlying CodeRange.funcTableEntry and
// Instance so that future Table.get()s produce the same
// function object as was imported.
WasmInstanceObject* calleeInstanceObj =
@ -1031,14 +1031,13 @@ bool Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
const CodeRange& calleeCodeRange =
calleeInstanceObj->getExportedFunctionCodeRange(fun, calleeTier);
void* code = calleeInstance.codeBase(calleeTier) +
calleeCodeRange.funcCheckedCallEntry();
calleeCodeRange.funcTableEntry();
table.setFuncRef(dstOffset + i, code, &calleeInstance);
continue;
}
}
void* code =
codeBaseTier +
codeRanges[funcToCodeRange[funcIndex]].funcCheckedCallEntry();
void* code = codeBaseTier +
codeRanges[funcToCodeRange[funcIndex]].funcTableEntry();
table.setFuncRef(dstOffset + i, code, this);
}
}
@ -1473,8 +1472,8 @@ bool Instance::init(JSContext* cx, const JSFunctionVector& funcImports,
calleeInstanceObj->getExportedFunctionCodeRange(f, calleeTier);
import.tls = calleeInstance.tlsData();
import.realm = f->realm();
import.code = calleeInstance.codeBase(calleeTier) +
codeRange.funcUncheckedCallEntry();
import.code =
calleeInstance.codeBase(calleeTier) + codeRange.funcNormalEntry();
import.jitScript = nullptr;
} else if (void* thunk = MaybeGetBuiltinThunk(f, fi.funcType())) {
import.tls = tlsData();

Просмотреть файл

@ -468,7 +468,7 @@ bool Module::extractCode(JSContext* cx, Tier tier,
return false;
}
value.setNumber((uint32_t)p.funcUncheckedCallEntry());
value.setNumber((uint32_t)p.funcNormalEntry());
if (!JS_DefineProperty(cx, segment, "funcBodyBegin", value,
JSPROP_ENUMERATE)) {
return false;

Просмотреть файл

@ -1532,7 +1532,7 @@ void wasm::GenerateDirectCallFromJit(MacroAssembler& masm, const FuncExport& fe,
const CodeTier& codeTier = inst.code().codeTier(inst.code().bestTier());
const MetadataTier& metadata = codeTier.metadata();
const CodeRange& codeRange = metadata.codeRange(fe);
void* callee = codeTier.segment().base() + codeRange.funcUncheckedCallEntry();
void* callee = codeTier.segment().base() + codeRange.funcNormalEntry();
masm.assertStackAlignment(WasmStackAlignment);
masm.callJit(ImmPtr(callee));

Просмотреть файл

@ -209,7 +209,7 @@ void Table::fillFuncRef(uint32_t index, uint32_t fillCount, FuncRef ref,
const MetadataTier& metadata = instance.metadata(tier);
const CodeRange& codeRange =
metadata.codeRange(metadata.lookupFuncExport(funcIndex));
void* code = instance.codeBase(tier) + codeRange.funcCheckedCallEntry();
void* code = instance.codeBase(tier) + codeRange.funcTableEntry();
for (uint32_t i = index, end = index + fillCount; i != end; i++) {
setFuncRef(i, code, &instance);
}

Просмотреть файл

@ -848,7 +848,7 @@ CodeRange::CodeRange(Kind kind, uint32_t funcIndex, Offsets offsets)
: begin_(offsets.begin), ret_(0), end_(offsets.end), kind_(kind) {
u.funcIndex_ = funcIndex;
u.func.lineOrBytecode_ = 0;
u.func.beginToUncheckedCallEntry_ = 0;
u.func.beginToNormalEntry_ = 0;
u.func.beginToTierEntry_ = 0;
MOZ_ASSERT(isEntry());
MOZ_ASSERT(begin_ <= end_);
@ -877,7 +877,7 @@ CodeRange::CodeRange(Kind kind, uint32_t funcIndex, CallableOffsets offsets)
MOZ_ASSERT(ret_ < end_);
u.funcIndex_ = funcIndex;
u.func.lineOrBytecode_ = 0;
u.func.beginToUncheckedCallEntry_ = 0;
u.func.beginToNormalEntry_ = 0;
u.func.beginToTierEntry_ = 0;
}
@ -904,11 +904,11 @@ CodeRange::CodeRange(uint32_t funcIndex, uint32_t funcLineOrBytecode,
kind_(Function) {
MOZ_ASSERT(begin_ < ret_);
MOZ_ASSERT(ret_ < end_);
MOZ_ASSERT(offsets.uncheckedCallEntry - begin_ <= UINT8_MAX);
MOZ_ASSERT(offsets.normalEntry - begin_ <= UINT8_MAX);
MOZ_ASSERT(offsets.tierEntry - begin_ <= UINT8_MAX);
u.funcIndex_ = funcIndex;
u.func.lineOrBytecode_ = funcLineOrBytecode;
u.func.beginToUncheckedCallEntry_ = offsets.uncheckedCallEntry - begin_;
u.func.beginToNormalEntry_ = offsets.normalEntry - begin_;
u.func.beginToTierEntry_ = offsets.tierEntry - begin_;
}

Просмотреть файл

@ -2321,14 +2321,14 @@ struct JitExitOffsets : CallableOffsets {
struct FuncOffsets : CallableOffsets {
MOZ_IMPLICIT FuncOffsets()
: CallableOffsets(), uncheckedCallEntry(0), tierEntry(0) {}
: CallableOffsets(), normalEntry(0), tierEntry(0) {}
// Function CodeRanges have a checked call entry which takes an extra
// signature argument which is checked against the callee's signature before
// falling through to the normal prologue. The checked call entry is thus at
// the beginning of the CodeRange and the unchecked call entry is at some
// offset after the checked call entry.
uint32_t uncheckedCallEntry;
// Function CodeRanges have a table entry which takes an extra signature
// argument which is checked against the callee's signature before falling
// through to the normal prologue. The table entry is thus at the beginning
// of the CodeRange and the normal entry is at some offset after the table
// entry.
uint32_t normalEntry;
// The tierEntry is the point within a function to which the patching code
// within a Tier-1 function jumps. It could be the instruction following
@ -2369,7 +2369,7 @@ class CodeRange {
union {
struct {
uint32_t lineOrBytecode_;
uint8_t beginToUncheckedCallEntry_;
uint8_t beginToNormalEntry_;
uint8_t beginToTierEntry_;
} func;
struct {
@ -2456,13 +2456,13 @@ class CodeRange {
// known signature) and one for table calls (which involves dynamic
// signature checking).
uint32_t funcCheckedCallEntry() const {
uint32_t funcTableEntry() const {
MOZ_ASSERT(isFunction());
return begin_;
}
uint32_t funcUncheckedCallEntry() const {
uint32_t funcNormalEntry() const {
MOZ_ASSERT(isFunction());
return begin_ + u.func.beginToUncheckedCallEntry_;
return begin_ + u.func.beginToNormalEntry_;
}
uint32_t funcTierEntry() const {
MOZ_ASSERT(isFunction());