Bug 1478616 - Generalize the wasm write barrier. r=bbouvier

We need to generalize the barrier to handle not just globals, but also
fields in structures.  To do this we pass the location of the store
(ie the Cell**) to the C++ barrier machinery, not the global index.

--HG--
extra : rebase_source : e036ab3500cfa838f8a7dcbfade6dcc5d5765e85
This commit is contained in:
Lars T Hansen 2018-07-26 15:54:49 +02:00
Родитель 5ec72ee62e
Коммит aa46adb8f5
5 изменённых файлов: 99 добавлений и 151 удалений

Просмотреть файл

@ -1891,6 +1891,7 @@ class BaseCompiler final : public BaseCompilerInterface
ValTypeVector SigF_;
MIRTypeVector SigP_;
MIRTypeVector SigPI_;
MIRTypeVector SigPL_;
MIRTypeVector SigPII_;
MIRTypeVector SigPIII_;
MIRTypeVector SigPIIL_;
@ -5614,6 +5615,97 @@ class BaseCompiler final : public BaseCompilerInterface
masm.wasmTrap(t, bytecodeOffset());
}
////////////////////////////////////////////////////////////
//
// Object support.
#ifdef ENABLE_WASM_GC
// This emits a GC pre-write barrier. The pre-barrier is needed when we
// replace a member field with a new value, and the previous field value
// might have no other referents, and incremental GC is ongoing. The field
// might belong to an object or be a stack slot or a register or a heap
// allocated value.
//
// let obj = { field: previousValue };
// obj.field = newValue; // previousValue must be marked with a pre-barrier.
//
// The `valueAddr` is the address of the location that we are about to
// update. This function preserves that register.
void emitPreBarrier(RegPtr valueAddr) {
Label skipBarrier;
MOZ_ASSERT(valueAddr == PreBarrierReg);
ScratchPtr scratch(*this);
// If no incremental GC has started, we don't need the barrier.
masm.loadWasmTlsRegFromFrame(scratch);
masm.loadPtr(Address(scratch, offsetof(TlsData, addressOfNeedsIncrementalBarrier)), scratch);
masm.branchTest32(Assembler::Zero, Address(scratch, 0), Imm32(0x1), &skipBarrier);
// If the previous value is null, we don't need the barrier.
masm.loadPtr(Address(valueAddr, 0), scratch);
masm.branchTestPtr(Assembler::Zero, scratch, scratch, &skipBarrier);
// Call the barrier. This assumes PreBarrierReg contains the address of
// the stored value.
//
// PreBarrierReg is volatile and is preserved by the barrier.
masm.loadWasmTlsRegFromFrame(scratch);
masm.loadPtr(Address(scratch, offsetof(TlsData, instance)), scratch);
masm.loadPtr(Address(scratch, Instance::offsetOfPreBarrierCode()), scratch);
masm.call(scratch);
masm.bind(&skipBarrier);
}
// This emits a GC post-write barrier. This is needed to ensure that the GC
// is aware of slots of tenured things containing references to nursery
// values. Pass None for object when the field's owner object is known to
// be tenured or heap-allocated.
//
// This frees the register `valueAddr`.
void emitPostBarrier(const Maybe<RegPtr>& object, RegPtr otherScratch, RegPtr valueAddr, RegPtr setValue) {
Label skipBarrier;
// If the pointer being stored is null, no barrier.
masm.branchTestPtr(Assembler::Zero, setValue, setValue, &skipBarrier);
// If there is a containing object and it is in the nursery, no barrier.
if (object)
masm.branchPtrInNurseryChunk(Assembler::Equal, *object, otherScratch, &skipBarrier);
// If the pointer being stored is to a tenured object, no barrier.
masm.branchPtrInNurseryChunk(Assembler::NotEqual, setValue, otherScratch, &skipBarrier);
// Need a barrier.
uint32_t bytecodeOffset = iter_.lastOpcodeOffset();
// The `valueAddr` is a raw pointer to the cell within some GC object or
// TLS area, and we guarantee that the GC will not run while the
// postbarrier call is active, so push a uintptr_t value.
# ifdef JS_64BIT
pushI64(RegI64(Register64(valueAddr)));
emitInstanceCall(bytecodeOffset, SigPL_, ExprType::Void, SymbolicAddress::PostBarrier);
# else
pushI32(RegI32(valueAddr));
emitInstanceCall(bytecodeOffset, SigPI_, ExprType::Void, SymbolicAddress::PostBarrier);
# endif
masm.bind(&skipBarrier);
}
#endif
void emitBarrieredStore(const Maybe<RegPtr>& object, RegPtr valueAddr, RegPtr value) {
emitPreBarrier(valueAddr); // Preserves valueAddr
masm.storePtr(value, Address(valueAddr, 0));
RegPtr otherScratch = needRef();
emitPostBarrier(object, otherScratch, valueAddr, value); // Consumes valueAddr
freeRef(otherScratch);
}
////////////////////////////////////////////////////////////
//
// Machinery for optimized conditional branches.
@ -5721,81 +5813,6 @@ class BaseCompiler final : public BaseCompilerInterface
masm.branch64(c, lhs, rhs, l);
}
#ifdef ENABLE_WASM_GC
// The following couple of functions emit a GC pre-write barrier. This is
// needed when we replace a member field with a new value, and the previous
// field value might have no other referents. The field might belong to an
// object or be a stack slot or a register or a heap allocated value.
//
// let obj = { field: previousValue };
// obj.field = newValue; // previousValue must be marked with a pre-barrier.
//
// Implementing a pre-barrier looks like this:
// - call `testNeedPreBarrier` with a fresh label.
// - user code must put the address of the field we're about to clobber in
// PreBarrierReg (to avoid explicit pushing/popping).
// - call `emitPreBarrier`, which binds the label.
void testNeedPreBarrier(Label* skipBarrier) {
MOZ_ASSERT(!skipBarrier->used());
MOZ_ASSERT(!skipBarrier->bound());
// If no incremental GC has started, we don't need the barrier.
ScratchPtr scratch(*this);
masm.loadWasmTlsRegFromFrame(scratch);
masm.loadPtr(Address(scratch, offsetof(TlsData, addressOfNeedsIncrementalBarrier)), scratch);
masm.branchTest32(Assembler::Zero, Address(scratch, 0), Imm32(0x1), skipBarrier);
}
void emitPreBarrier(RegPtr valueAddr, Label* skipBarrier) {
MOZ_ASSERT(valueAddr == PreBarrierReg);
// If the previous value is null, we don't need the barrier.
ScratchPtr scratch(*this);
masm.loadPtr(Address(valueAddr, 0), scratch);
masm.branchTestPtr(Assembler::Zero, scratch, scratch, skipBarrier);
// Call the barrier. This assumes PreBarrierReg contains the address of
// the stored value.
masm.loadWasmTlsRegFromFrame(scratch);
masm.loadPtr(Address(scratch, offsetof(TlsData, instance)), scratch);
masm.loadPtr(Address(scratch, Instance::offsetOfPreBarrierCode()), scratch);
masm.call(scratch);
masm.bind(skipBarrier);
}
// This emits a GC post-write barrier. This is needed to ensure that the GC
// is aware of slots of tenured things containing references to nursery
// values. Pass None for object when the field's owner object is known to
// be tenured or heap-allocated.
void emitPostBarrier(const Maybe<RegPtr>& object, RegPtr setValue, PostBarrierArg arg) {
Label skipBarrier;
// If the set value is null, no barrier.
masm.branchTestPtr(Assembler::Zero, setValue, setValue, &skipBarrier);
RegPtr scratch = needRef();
if (object) {
// If the object value isn't tenured, no barrier.
masm.branchPtrInNurseryChunk(Assembler::Equal, *object, scratch, &skipBarrier);
}
// If the set value is tenured, no barrier.
masm.branchPtrInNurseryChunk(Assembler::NotEqual, setValue, scratch, &skipBarrier);
freeRef(scratch);
// Need a barrier.
uint32_t bytecodeOffset = iter_.lastOpcodeOffset();
pushI32(arg.rawPayload());
emitInstanceCall(bytecodeOffset, SigPI_, ExprType::Void, SymbolicAddress::PostBarrier);
masm.bind(&skipBarrier);
}
#endif
// Emit a conditional branch that optionally and optimally cleans up the CPU
// stack before we branch.
//
@ -8438,27 +8455,14 @@ BaseCompiler::emitSetGlobal()
}
#ifdef ENABLE_WASM_GC
case ValType::AnyRef: {
Label skipBarrier;
testNeedPreBarrier(&skipBarrier);
RegPtr valueAddr(PreBarrierReg);
needRef(valueAddr);
{
ScratchI32 tmp(*this);
masm.computeEffectiveAddress(addressOfGlobalVar(global, tmp), valueAddr);
}
emitPreBarrier(valueAddr, &skipBarrier);
freeRef(valueAddr);
RegPtr rv = popRef();
{
// Actual store.
ScratchI32 tmp(*this);
masm.storePtr(rv, addressOfGlobalVar(global, tmp));
}
emitPostBarrier(Nothing(), rv, PostBarrierArg::Global(id));
emitBarrieredStore(Nothing(), valueAddr, rv); // Consumes valueAddr
freeRef(rv);
break;
}
@ -10260,6 +10264,8 @@ BaseCompiler::init()
return false;
if (!SigPI_.append(MIRType::Pointer) || !SigPI_.append(MIRType::Int32))
return false;
if (!SigPL_.append(MIRType::Pointer) || !SigPL_.append(MIRType::Int64))
return false;
if (!SigPII_.append(MIRType::Pointer) || !SigPII_.append(MIRType::Int32) ||
!SigPII_.append(MIRType::Int32))
{

Просмотреть файл

@ -678,7 +678,6 @@ AddressOf(SymbolicAddress imm, ABIFunctionType* abiType)
#ifdef ENABLE_WASM_GC
case SymbolicAddress::PostBarrier:
*abiType = Args_General2;
static_assert(sizeof(PostBarrierArg) == sizeof(uint32_t), "passed arg is a u32");
return FuncCast(Instance::postBarrier, *abiType);
#endif
#if defined(JS_CODEGEN_MIPS32)

Просмотреть файл

@ -467,25 +467,10 @@ Instance::memFill(Instance* instance, uint32_t byteOffset, uint32_t value, uint3
#ifdef ENABLE_WASM_GC
/* static */ void
Instance::postBarrier(Instance* instance, PostBarrierArg arg)
Instance::postBarrier(Instance* instance, gc::Cell** location)
{
gc::Cell** cell = nullptr;
switch (arg.type()) {
case PostBarrierArg::Type::Global: {
const GlobalDesc& global = instance->metadata().globals[arg.globalIndex()];
MOZ_ASSERT(!global.isConstant());
MOZ_ASSERT(global.type().isRefOrAnyRef());
uint8_t* globalAddr = instance->globalData() + global.offset();
if (global.isIndirect())
globalAddr = *(uint8_t**)globalAddr;
MOZ_ASSERT(*(JSObject**)globalAddr, "shouldn't call postbarrier if null");
cell = (gc::Cell**) globalAddr;
break;
}
}
MOZ_ASSERT(cell);
TlsContext.get()->runtime()->gc.storeBuffer().putCell(cell);
MOZ_ASSERT(location);
TlsContext.get()->runtime()->gc.storeBuffer().putCell(location);
}
#endif // ENABLE_WASM_GC

Просмотреть файл

@ -180,7 +180,7 @@ class Instance
static int32_t memCopy(Instance* instance, uint32_t destByteOffset, uint32_t srcByteOffset, uint32_t len);
static int32_t memFill(Instance* instance, uint32_t byteOffset, uint32_t value, uint32_t len);
#ifdef ENABLE_WASM_GC
static void postBarrier(Instance* instance, PostBarrierArg arg);
static void postBarrier(Instance* instance, gc::Cell** location);
#endif
};

Просмотреть файл

@ -2375,48 +2375,6 @@ class DebugFrame
static void alignmentStaticAsserts();
};
# ifdef ENABLE_WASM_GC
// A packed format for an argument to the Instance::postBarrier function.
class PostBarrierArg
{
public:
enum class Type {
Global = 0x0,
Last = Global
};
private:
uint32_t type_: 1;
uint32_t payload_: 31;
PostBarrierArg(uint32_t payload, Type type)
: type_(uint32_t(type)),
payload_(payload)
{
MOZ_ASSERT(payload < (UINT32_MAX >> 1));
MOZ_ASSERT(uint32_t(type) <= uint32_t(Type::Last));
}
public:
static PostBarrierArg Global(uint32_t globalIndex) {
return PostBarrierArg(globalIndex, Type::Global);
}
Type type() const {
MOZ_ASSERT(type_ <= uint32_t(Type::Last));
return Type(type_);
}
uint32_t globalIndex() const {
MOZ_ASSERT(type() == Type::Global);
return payload_;
}
uint32_t rawPayload() const {
return (payload_ << 1) | type_;
}
};
# endif
} // namespace wasm
} // namespace js