Bug 1268024: Rename MAsmJSHeapAccess to MWasmMemoryAccess; r=luke

MozReview-Commit-ID: 1N1UlhhkFSu

--HG--
extra : rebase_source : 06a02edf8ff1536e52a093f7604f5ba3a73920e3
This commit is contained in:
Benjamin Bouvier 2016-06-17 17:19:42 +02:00
Родитель e804377707
Коммит 1539666ef1
10 изменённых файлов: 65 добавлений и 65 удалений

Просмотреть файл

@ -2691,7 +2691,7 @@ class BaseCompiler
// Cloned from MIRGraph.cpp, merge somehow?
bool needsBoundsCheckBranch(const MAsmJSHeapAccess& access) const {
bool needsBoundsCheckBranch(const MWasmMemoryAccess& access) const {
// A heap access needs a bounds-check branch if we're not relying on signal
// handlers to catch errors, and if it's not proven to be within bounds.
// We use signal-handlers on x64, but on x86 there isn't enough address
@ -2719,7 +2719,7 @@ class BaseCompiler
}
#endif
void loadHeap(const MAsmJSHeapAccess& access, RegI32 ptr, AnyReg dest) {
void loadHeap(const MWasmMemoryAccess& access, RegI32 ptr, AnyReg dest) {
#if defined(JS_CODEGEN_X64)
// CodeGeneratorX64::visitAsmJSLoadHeap()
@ -2750,7 +2750,7 @@ class BaseCompiler
#endif
}
void storeHeap(const MAsmJSHeapAccess& access, RegI32 ptr, AnyReg src) {
void storeHeap(const MWasmMemoryAccess& access, RegI32 ptr, AnyReg src) {
#if defined(JS_CODEGEN_X64)
// CodeGeneratorX64::visitAsmJSStoreHeap()
@ -5026,7 +5026,7 @@ BaseCompiler::emitLoad(ValType type, Scalar::Type viewType)
// TODO / OPTIMIZE: Disable bounds checking on constant accesses
// below the minimum heap length.
MAsmJSHeapAccess access(viewType);
MWasmMemoryAccess access(viewType);
access.setOffset(addr.offset);
access.setAlign(addr.align);
@ -5074,7 +5074,7 @@ BaseCompiler::emitStore(ValType resultType, Scalar::Type viewType)
// TODO / OPTIMIZE: Disable bounds checking on constant accesses
// below the minimum heap length.
MAsmJSHeapAccess access(viewType);
MWasmMemoryAccess access(viewType);
access.setOffset(addr.offset);
access.setAlign(addr.align);
@ -5349,7 +5349,7 @@ BaseCompiler::emitStoreWithCoercion(ValType resultType, Scalar::Type viewType)
// TODO / OPTIMIZE: Disable bounds checking on constant accesses
// below the minimum heap length.
MAsmJSHeapAccess access(viewType);
MWasmMemoryAccess access(viewType);
access.setOffset(addr.offset);
access.setAlign(addr.align);

Просмотреть файл

@ -589,7 +589,7 @@ class FunctionCompiler
curBlock_->setSlot(info().localSlot(slot), def);
}
MDefinition* loadHeap(MDefinition* base, const MAsmJSHeapAccess& access)
MDefinition* loadHeap(MDefinition* base, const MWasmMemoryAccess& access)
{
if (inDeadCode())
return nullptr;
@ -600,7 +600,7 @@ class FunctionCompiler
return load;
}
MDefinition* loadSimdHeap(MDefinition* base, const MAsmJSHeapAccess& access)
MDefinition* loadSimdHeap(MDefinition* base, const MWasmMemoryAccess& access)
{
if (inDeadCode())
return nullptr;
@ -612,7 +612,7 @@ class FunctionCompiler
return load;
}
void storeHeap(MDefinition* base, const MAsmJSHeapAccess& access, MDefinition* v)
void storeHeap(MDefinition* base, const MWasmMemoryAccess& access, MDefinition* v)
{
if (inDeadCode())
return;
@ -623,7 +623,7 @@ class FunctionCompiler
curBlock_->add(store);
}
void storeSimdHeap(MDefinition* base, const MAsmJSHeapAccess& access, MDefinition* v)
void storeSimdHeap(MDefinition* base, const MWasmMemoryAccess& access, MDefinition* v)
{
if (inDeadCode())
return;
@ -634,7 +634,7 @@ class FunctionCompiler
curBlock_->add(store);
}
MDefinition* atomicLoadHeap(MDefinition* base, const MAsmJSHeapAccess& access)
MDefinition* atomicLoadHeap(MDefinition* base, const MWasmMemoryAccess& access)
{
if (inDeadCode())
return nullptr;
@ -644,7 +644,7 @@ class FunctionCompiler
return load;
}
void atomicStoreHeap(MDefinition* base, const MAsmJSHeapAccess& access,
void atomicStoreHeap(MDefinition* base, const MWasmMemoryAccess& access,
MDefinition* v)
{
if (inDeadCode())
@ -654,7 +654,7 @@ class FunctionCompiler
curBlock_->add(store);
}
MDefinition* atomicCompareExchangeHeap(MDefinition* base, const MAsmJSHeapAccess& access,
MDefinition* atomicCompareExchangeHeap(MDefinition* base, const MWasmMemoryAccess& access,
MDefinition* oldv, MDefinition* newv)
{
if (inDeadCode())
@ -666,7 +666,7 @@ class FunctionCompiler
return cas;
}
MDefinition* atomicExchangeHeap(MDefinition* base, const MAsmJSHeapAccess& access,
MDefinition* atomicExchangeHeap(MDefinition* base, const MWasmMemoryAccess& access,
MDefinition* value)
{
if (inDeadCode())
@ -679,7 +679,7 @@ class FunctionCompiler
}
MDefinition* atomicBinopHeap(js::jit::AtomicOp op,
MDefinition* base, const MAsmJSHeapAccess& access,
MDefinition* base, const MWasmMemoryAccess& access,
MDefinition* v)
{
if (inDeadCode())
@ -2080,8 +2080,8 @@ enum class IsAtomic {
};
static bool
SetHeapAccessOffset(FunctionCompiler& f, uint32_t offset, MAsmJSHeapAccess* access, MDefinition** base,
IsAtomic atomic = IsAtomic::No)
SetHeapAccessOffset(FunctionCompiler& f, uint32_t offset, MWasmMemoryAccess* access,
MDefinition** base, IsAtomic atomic = IsAtomic::No)
{
// TODO Remove this after implementing non-wraparound offset semantics.
uint32_t endOffset = offset + access->byteSize();
@ -2107,7 +2107,7 @@ EmitLoad(FunctionCompiler& f, ValType type, Scalar::Type viewType)
if (!f.iter().readLoad(type, Scalar::byteSize(viewType), &addr))
return false;
MAsmJSHeapAccess access(viewType);
MWasmMemoryAccess access(viewType);
access.setAlign(addr.align);
MDefinition* base = addr.base;
@ -2126,7 +2126,7 @@ EmitStore(FunctionCompiler& f, ValType resultType, Scalar::Type viewType)
if (!f.iter().readStore(resultType, Scalar::byteSize(viewType), &addr, &value))
return false;
MAsmJSHeapAccess access(viewType);
MWasmMemoryAccess access(viewType);
access.setAlign(addr.align);
MDefinition* base = addr.base;
@ -2152,7 +2152,7 @@ EmitStoreWithCoercion(FunctionCompiler& f, ValType resultType, Scalar::Type view
else
MOZ_CRASH("unexpected coerced store");
MAsmJSHeapAccess access(viewType);
MWasmMemoryAccess access(viewType);
access.setAlign(addr.align);
MDefinition* base = addr.base;
@ -2229,7 +2229,7 @@ EmitAtomicsLoad(FunctionCompiler& f)
if (!f.iter().readAtomicLoad(&addr, &viewType))
return false;
MAsmJSHeapAccess access(viewType, 0, MembarBeforeLoad, MembarAfterLoad);
MWasmMemoryAccess access(viewType, 0, MembarBeforeLoad, MembarAfterLoad);
access.setAlign(addr.align);
MDefinition* base = addr.base;
@ -2249,7 +2249,7 @@ EmitAtomicsStore(FunctionCompiler& f)
if (!f.iter().readAtomicStore(&addr, &viewType, &value))
return false;
MAsmJSHeapAccess access(viewType, 0, MembarBeforeStore, MembarAfterStore);
MWasmMemoryAccess access(viewType, 0, MembarBeforeStore, MembarAfterStore);
access.setAlign(addr.align);
MDefinition* base = addr.base;
@ -2271,7 +2271,7 @@ EmitAtomicsBinOp(FunctionCompiler& f)
if (!f.iter().readAtomicBinOp(&addr, &viewType, &op, &value))
return false;
MAsmJSHeapAccess access(viewType);
MWasmMemoryAccess access(viewType);
access.setAlign(addr.align);
MDefinition* base = addr.base;
@ -2292,7 +2292,7 @@ EmitAtomicsCompareExchange(FunctionCompiler& f)
if (!f.iter().readAtomicCompareExchange(&addr, &viewType, &oldValue, &newValue))
return false;
MAsmJSHeapAccess access(viewType);
MWasmMemoryAccess access(viewType);
access.setAlign(addr.align);
MDefinition* base = addr.base;
@ -2312,7 +2312,7 @@ EmitAtomicsExchange(FunctionCompiler& f)
if (!f.iter().readAtomicExchange(&addr, &viewType, &value))
return false;
MAsmJSHeapAccess access(viewType);
MWasmMemoryAccess access(viewType);
access.setAlign(addr.align);
MDefinition* base = addr.base;
@ -2540,7 +2540,7 @@ EmitSimdLoad(FunctionCompiler& f, ValType resultType, unsigned numElems)
if (!f.iter().readLoad(resultType, Scalar::byteSize(viewType), &addr))
return false;
MAsmJSHeapAccess access(viewType, numElems);
MWasmMemoryAccess access(viewType, numElems);
access.setAlign(addr.align);
MDefinition* base = addr.base;
@ -2565,7 +2565,7 @@ EmitSimdStore(FunctionCompiler& f, ValType resultType, unsigned numElems)
if (!f.iter().readStore(resultType, Scalar::byteSize(viewType), &addr, &value))
return false;
MAsmJSHeapAccess access(viewType, numElems);
MWasmMemoryAccess access(viewType, numElems);
access.setAlign(addr.align);
MDefinition* base = addr.base;

Просмотреть файл

@ -100,9 +100,9 @@ AnalyzeLsh(TempAllocator& alloc, MLsh* lsh)
last->block()->insertAfter(last, eaddr);
}
template<typename MAsmJSHeapAccessType>
template<typename MWasmMemoryAccessType>
bool
EffectiveAddressAnalysis::tryAddDisplacement(MAsmJSHeapAccessType* ins, int32_t o)
EffectiveAddressAnalysis::tryAddDisplacement(MWasmMemoryAccessType* ins, int32_t o)
{
// Compute the new offset. Check for overflow.
uint32_t oldOffset = ins->offset();
@ -127,9 +127,9 @@ EffectiveAddressAnalysis::tryAddDisplacement(MAsmJSHeapAccessType* ins, int32_t
return true;
}
template<typename MAsmJSHeapAccessType>
template<typename MWasmMemoryAccessType>
void
EffectiveAddressAnalysis::analyzeAsmHeapAccess(MAsmJSHeapAccessType* ins)
EffectiveAddressAnalysis::analyzeAsmHeapAccess(MWasmMemoryAccessType* ins)
{
MDefinition* base = ins->base();

Просмотреть файл

@ -19,11 +19,11 @@ class EffectiveAddressAnalysis
MIRGenerator* mir_;
MIRGraph& graph_;
template<typename MAsmJSHeapAccessType>
MOZ_MUST_USE bool tryAddDisplacement(MAsmJSHeapAccessType* ins, int32_t o);
template<typename MWasmMemoryAccessType>
MOZ_MUST_USE bool tryAddDisplacement(MWasmMemoryAccessType* ins, int32_t o);
template<typename MAsmJSHeapAccessType>
void analyzeAsmHeapAccess(MAsmJSHeapAccessType* ins);
template<typename MWasmMemoryAccessType>
void analyzeAsmHeapAccess(MWasmMemoryAccessType* ins);
public:
EffectiveAddressAnalysis(MIRGenerator* mir, MIRGraph& graph)

Просмотреть файл

@ -12912,7 +12912,7 @@ class MAsmJSNeg
}
};
class MAsmJSHeapAccess
class MWasmMemoryAccess
{
uint32_t offset_;
uint32_t align_;
@ -12923,9 +12923,9 @@ class MAsmJSHeapAccess
MemoryBarrierBits barrierAfter_;
public:
explicit MAsmJSHeapAccess(Scalar::Type accessType, unsigned numSimdElems = 0,
MemoryBarrierBits barrierBefore = MembarNobits,
MemoryBarrierBits barrierAfter = MembarNobits)
explicit MWasmMemoryAccess(Scalar::Type accessType, unsigned numSimdElems = 0,
MemoryBarrierBits barrierBefore = MembarNobits,
MemoryBarrierBits barrierAfter = MembarNobits)
: offset_(0),
align_(Scalar::byteSize(accessType)),
accessType_(accessType),
@ -12958,12 +12958,12 @@ class MAsmJSHeapAccess
class MAsmJSLoadHeap
: public MUnaryInstruction,
public MAsmJSHeapAccess,
public MWasmMemoryAccess,
public NoTypePolicy::Data
{
MAsmJSLoadHeap(MDefinition* base, const MAsmJSHeapAccess& access)
MAsmJSLoadHeap(MDefinition* base, const MWasmMemoryAccess& access)
: MUnaryInstruction(base),
MAsmJSHeapAccess(access)
MWasmMemoryAccess(access)
{
if (access.barrierBefore()|access.barrierAfter())
setGuard(); // Not removable
@ -12995,13 +12995,13 @@ class MAsmJSLoadHeap
class MAsmJSStoreHeap
: public MBinaryInstruction,
public MAsmJSHeapAccess,
public MWasmMemoryAccess,
public NoTypePolicy::Data
{
MAsmJSStoreHeap(MDefinition* base, const MAsmJSHeapAccess& access,
MAsmJSStoreHeap(MDefinition* base, const MWasmMemoryAccess& access,
MDefinition* v)
: MBinaryInstruction(base, v),
MAsmJSHeapAccess(access)
MWasmMemoryAccess(access)
{
if (access.barrierBefore()|access.barrierAfter())
setGuard(); // Not removable
@ -13022,13 +13022,13 @@ class MAsmJSStoreHeap
class MAsmJSCompareExchangeHeap
: public MTernaryInstruction,
public MAsmJSHeapAccess,
public MWasmMemoryAccess,
public NoTypePolicy::Data
{
MAsmJSCompareExchangeHeap(MDefinition* base, const MAsmJSHeapAccess& access,
MAsmJSCompareExchangeHeap(MDefinition* base, const MWasmMemoryAccess& access,
MDefinition* oldv, MDefinition* newv)
: MTernaryInstruction(base, oldv, newv),
MAsmJSHeapAccess(access)
MWasmMemoryAccess(access)
{
setGuard(); // Not removable
setResultType(MIRType::Int32);
@ -13049,13 +13049,13 @@ class MAsmJSCompareExchangeHeap
class MAsmJSAtomicExchangeHeap
: public MBinaryInstruction,
public MAsmJSHeapAccess,
public MWasmMemoryAccess,
public NoTypePolicy::Data
{
MAsmJSAtomicExchangeHeap(MDefinition* base, const MAsmJSHeapAccess& access,
MAsmJSAtomicExchangeHeap(MDefinition* base, const MWasmMemoryAccess& access,
MDefinition* value)
: MBinaryInstruction(base, value),
MAsmJSHeapAccess(access)
MWasmMemoryAccess(access)
{
setGuard(); // Not removable
setResultType(MIRType::Int32);
@ -13075,15 +13075,15 @@ class MAsmJSAtomicExchangeHeap
class MAsmJSAtomicBinopHeap
: public MBinaryInstruction,
public MAsmJSHeapAccess,
public MWasmMemoryAccess,
public NoTypePolicy::Data
{
AtomicOp op_;
MAsmJSAtomicBinopHeap(AtomicOp op, MDefinition* base, const MAsmJSHeapAccess& access,
MAsmJSAtomicBinopHeap(AtomicOp op, MDefinition* base, const MWasmMemoryAccess& access,
MDefinition* v)
: MBinaryInstruction(base, v),
MAsmJSHeapAccess(access),
MWasmMemoryAccess(access),
op_(op)
{
setGuard(); // Not removable

Просмотреть файл

@ -222,8 +222,8 @@ class MIRGenerator
public:
const JitCompileOptions options;
bool needsAsmJSBoundsCheckBranch(const MAsmJSHeapAccess* access) const;
size_t foldableOffsetRange(const MAsmJSHeapAccess* access) const;
bool needsAsmJSBoundsCheckBranch(const MWasmMemoryAccess* access) const;
size_t foldableOffsetRange(const MWasmMemoryAccess* access) const;
size_t foldableOffsetRange(bool accessNeedsBoundsCheck, bool atomic) const;
private:

Просмотреть файл

@ -109,7 +109,7 @@ MIRGenerator::addAbortedPreliminaryGroup(ObjectGroup* group)
}
bool
MIRGenerator::needsAsmJSBoundsCheckBranch(const MAsmJSHeapAccess* access) const
MIRGenerator::needsAsmJSBoundsCheckBranch(const MWasmMemoryAccess* access) const
{
// A heap access needs a bounds-check branch if we're not relying on signal
// handlers to catch errors, and if it's not proven to be within bounds.
@ -124,7 +124,7 @@ MIRGenerator::needsAsmJSBoundsCheckBranch(const MAsmJSHeapAccess* access) const
}
size_t
MIRGenerator::foldableOffsetRange(const MAsmJSHeapAccess* access) const
MIRGenerator::foldableOffsetRange(const MWasmMemoryAccess* access) const
{
return foldableOffsetRange(access->needsBoundsCheck(), access->isAtomicAccess());
}

Просмотреть файл

@ -925,7 +925,7 @@ CodeGeneratorX64::visitAsmJSStoreHeap(LAsmJSStoreHeap* ins)
}
static void
MaybeAddAtomicsBoundsCheck(MacroAssemblerX64& masm, MAsmJSHeapAccess* mir, Register ptr)
MaybeAddAtomicsBoundsCheck(MacroAssemblerX64& masm, MWasmMemoryAccess* mir, Register ptr)
{
if (!mir->needsBoundsCheck())
return;

Просмотреть файл

@ -428,7 +428,7 @@ CodeGeneratorX86Shared::visitOffsetBoundsCheck(OffsetBoundsCheck* oolCheck)
}
void
CodeGeneratorX86Shared::emitAsmJSBoundsCheckBranch(const MAsmJSHeapAccess* access,
CodeGeneratorX86Shared::emitAsmJSBoundsCheckBranch(const MWasmMemoryAccess* access,
const MInstruction* mir,
Register ptr, Label* maybeFail)
{
@ -467,7 +467,7 @@ CodeGeneratorX86Shared::emitAsmJSBoundsCheckBranch(const MAsmJSHeapAccess* acces
}
bool
CodeGeneratorX86Shared::maybeEmitThrowingAsmJSBoundsCheck(const MAsmJSHeapAccess* access,
CodeGeneratorX86Shared::maybeEmitThrowingAsmJSBoundsCheck(const MWasmMemoryAccess* access,
const MInstruction* mir,
const LAllocation* ptr)
{
@ -518,7 +518,7 @@ CodeGeneratorX86Shared::maybeEmitAsmJSStoreBoundsCheck(const MAsmJSStoreHeap* mi
}
void
CodeGeneratorX86Shared::cleanupAfterAsmJSBoundsCheckBranch(const MAsmJSHeapAccess* access,
CodeGeneratorX86Shared::cleanupAfterAsmJSBoundsCheckBranch(const MWasmMemoryAccess* access,
Register ptr)
{
// Clean up after performing a heap access checked by a branch.

Просмотреть файл

@ -95,13 +95,13 @@ class CodeGeneratorX86Shared : public CodeGeneratorShared
private:
void
emitAsmJSBoundsCheckBranch(const MAsmJSHeapAccess* mir, const MInstruction* ins,
emitAsmJSBoundsCheckBranch(const MWasmMemoryAccess* mir, const MInstruction* ins,
Register ptr, Label* fail);
public:
// For SIMD and atomic loads and stores (which throw on out-of-bounds):
bool
maybeEmitThrowingAsmJSBoundsCheck(const MAsmJSHeapAccess* mir, const MInstruction* ins,
maybeEmitThrowingAsmJSBoundsCheck(const MWasmMemoryAccess* mir, const MInstruction* ins,
const LAllocation* ptr);
// For asm.js plain and atomic loads that possibly require a bounds check:
@ -113,7 +113,7 @@ class CodeGeneratorX86Shared : public CodeGeneratorShared
bool
maybeEmitAsmJSStoreBoundsCheck(const MAsmJSStoreHeap* mir, LAsmJSStoreHeap* ins, Label** rejoin);
void cleanupAfterAsmJSBoundsCheckBranch(const MAsmJSHeapAccess* mir, Register ptr);
void cleanupAfterAsmJSBoundsCheckBranch(const MWasmMemoryAccess* mir, Register ptr);
NonAssertingLabel deoptLabel_;