Bug 1790626 - wasm: Use pointer to TypeDef instead of type index in PackedType. r=yury

This commit switches the representation of PackedType (and therefore ValType/FieldType)
to use a *TypeDef instead of type index.

There are several changes here:
  1. PackedTypeCode is always 64-bits now to pack the pointer. This penalizes 32-bit
     platforms, but we likely don't care about them enough to try to optimize this.
  2. RefType::TypeIndex is RefType::TypeRef
  3. RefType::typeIndex() is RefType::typeDef()
  4. TypeContext now stores a map from *TypeDef to original type index for printing errors
  5. Decoding a RefType now stores a *TypeDef instead of type index
  6. We now just transfer the SharedTypeContext from ModuleEnvironment to Metadata instead
     of copying definitions. This is needed for sharing the indexOf map.
  7. We now manually serialize/deserialize TypeContext
  8. TypeContext now stores SharedTypeDef in vector instead of TypeDef, this is needed so
     that *TypeDef is not invalidated across resizing the vector (asm.js does this)
  9. The initialization of TypeContext is refactored to keep the indexOf map in-sync
     with adding new types (asm.js needs this)
  10. We now manually serialize/deserialize PackedTypeCode using a new SerializedTypeCode
  11. Serialization now needs a TypeContext in order to get the index of type definitions
  12. Deserialization now constructs a TypeContext, and uses that when deserializing
      ValType/RefType/FieldType

Differential Revision: https://phabricator.services.mozilla.com/D157387
This commit is contained in:
Ryan Hunt 2022-10-26 14:48:08 +00:00
Родитель cfc3265f69
Коммит 70509cb3c2
28 изменённых файлов: 610 добавлений и 652 удалений

Просмотреть файл

@ -17313,7 +17313,7 @@ void CodeGenerator::emitIonToWasmCallBase(LIonToWasmCallBase<NumDefs>* lir) {
break;
case wasm::RefType::Func:
case wasm::RefType::Eq:
case wasm::RefType::TypeIndex:
case wasm::RefType::TypeRef:
MOZ_CRASH("unexpected argument type when calling from ion to wasm");
}
break;
@ -17390,7 +17390,7 @@ void CodeGenerator::emitIonToWasmCallBase(LIonToWasmCallBase<NumDefs>* lir) {
// API to do so.
MOZ_ASSERT(lir->mir()->type() == MIRType::Value);
break;
case wasm::RefType::TypeIndex:
case wasm::RefType::TypeRef:
MOZ_CRASH("unexpected return type when calling from ion to wasm");
}
break;

Просмотреть файл

@ -1941,13 +1941,18 @@ class MOZ_STACK_CLASS ModuleValidator : public ModuleValidatorShared {
}
*sigIndex = moduleEnv_.types->length();
return moduleEnv_.types->append(std::move(sig));
MutableTypeDef typeDef = moduleEnv_.types->addType();
if (!typeDef) {
return false;
}
*typeDef = std::move(sig);
return true;
}
bool declareSig(FuncType&& sig, uint32_t* sigIndex) {
SigSet::AddPtr p = sigSet_.lookupForAdd(sig);
if (p) {
*sigIndex = p->sigIndex();
MOZ_ASSERT(moduleEnv_.types->funcType(*sigIndex) == sig);
MOZ_ASSERT(moduleEnv_.types->type(*sigIndex).funcType() == sig);
return true;
}
@ -2128,15 +2133,15 @@ class MOZ_STACK_CLASS ModuleValidator : public ModuleValidatorShared {
uint32_t funcIndex = r.front().value();
uint32_t funcTypeIndex = r.front().key().sigIndex();
MOZ_ASSERT(!moduleEnv_.funcs[funcIndex].type);
moduleEnv_.funcs[funcIndex] =
FuncDesc(&moduleEnv_.types->funcType(funcTypeIndex), funcTypeIndex);
moduleEnv_.funcs[funcIndex] = FuncDesc(
&moduleEnv_.types->type(funcTypeIndex).funcType(), funcTypeIndex);
}
for (const Func& func : funcDefs_) {
uint32_t funcIndex = funcImportMap_.count() + func.funcDefIndex();
uint32_t funcTypeIndex = func.sigIndex();
MOZ_ASSERT(!moduleEnv_.funcs[funcIndex].type);
moduleEnv_.funcs[funcIndex] =
FuncDesc(&moduleEnv_.types->funcType(funcTypeIndex), funcTypeIndex);
moduleEnv_.funcs[funcIndex] = FuncDesc(
&moduleEnv_.types->type(funcTypeIndex).funcType(), funcTypeIndex);
}
for (const Export& exp : moduleEnv_.exports) {
if (exp.kind() != DefinitionKind::Function) {
@ -4036,7 +4041,8 @@ static bool CheckFunctionSignature(ModuleValidator<Unit>& m, ParseNode* usepn,
return m.addFuncDef(name, usepn->pn_pos.begin, std::move(sig), func);
}
const FuncType& existingSig = m.env().types->funcType(existing->sigIndex());
const FuncType& existingSig =
m.env().types->type(existing->sigIndex()).funcType();
if (!CheckSignatureAgainstExisting(m, usepn, sig, existingSig)) {
return false;
@ -4110,7 +4116,7 @@ static bool CheckFuncPtrTableAgainstExisting(ModuleValidator<Unit>& m,
}
if (!CheckSignatureAgainstExisting(
m, usepn, sig, m.env().types->funcType(table.sigIndex()))) {
m, usepn, sig, m.env().types->type(table.sigIndex()).funcType())) {
return false;
}
@ -5948,7 +5954,8 @@ static bool CheckReturnType(FunctionValidatorShared& f, ParseNode* usepn,
if (f.returnedType() != type) {
return f.failf(usepn, "%s incompatible with previous return of type %s",
ToString(type).get(), ToString(f.returnedType()).get());
ToString(type, nullptr).get(),
ToString(f.returnedType(), nullptr).get());
}
return true;
@ -6274,7 +6281,7 @@ static bool CheckFuncPtrTable(ModuleValidator<Unit>& m, ParseNode* decl) {
elem, "function-pointer table's elements must be names of functions");
}
const FuncType& funcSig = m.env().types->funcType(func->sigIndex());
const FuncType& funcSig = m.env().types->type(func->sigIndex()).funcType();
if (sig) {
if (*sig != funcSig) {
return m.fail(elem, "all functions in table must have same signature");

Просмотреть файл

@ -226,10 +226,9 @@ class Encoder {
[[nodiscard]] bool writeVarS64(int64_t i) { return writeVarS<int64_t>(i); }
[[nodiscard]] bool writeValType(ValType type) {
static_assert(size_t(TypeCode::Limit) <= UINT8_MAX, "fits");
if (type.isTypeIndex()) {
return writeFixedU8(uint8_t(TypeCode::NullableRef)) &&
writeVarU32(type.refType().typeIndex());
}
// writeValType is only used by asm.js, which doesn't use type
// references
MOZ_RELEASE_ASSERT(!type.isTypeRef(), "NYI");
TypeCode tc = type.packed().typeCode();
MOZ_ASSERT(size_t(tc) < size_t(TypeCode::Limit));
return writeFixedU8(uint8_t(tc));
@ -492,39 +491,25 @@ class Decoder {
// Value and reference types
[[nodiscard]] ValType uncheckedReadValType();
template <class T>
[[nodiscard]] bool readPackedType(uint32_t numTypes,
const FeatureArgs& features, T* type);
[[nodiscard]] ValType uncheckedReadValType(const TypeContext& types);
template <class T>
[[nodiscard]] bool readPackedType(const TypeContext& types,
const FeatureArgs& features, T* type);
[[nodiscard]] bool readValType(uint32_t numTypes, const FeatureArgs& features,
ValType* type);
[[nodiscard]] bool readValType(const TypeContext& types,
const FeatureArgs& features, ValType* type);
[[nodiscard]] bool readFieldType(uint32_t numTypes,
const FeatureArgs& features,
FieldType* type);
[[nodiscard]] bool readFieldType(const TypeContext& types,
const FeatureArgs& features,
FieldType* type);
[[nodiscard]] bool readHeapType(uint32_t numTypes,
const FeatureArgs& features, bool nullable,
RefType* type);
[[nodiscard]] bool readHeapType(const TypeContext& types,
const FeatureArgs& features, bool nullable,
RefType* type);
[[nodiscard]] bool readRefType(uint32_t numTypes, const FeatureArgs& features,
RefType* type);
[[nodiscard]] bool readRefType(const TypeContext& types,
const FeatureArgs& features, RefType* type);
[[nodiscard]] bool validateTypeIndex(const TypeContext& types,
const FeatureArgs& features,
RefType type);
// Instruction opcode
@ -545,7 +530,6 @@ class Decoder {
#endif
[[nodiscard]] bool readRefNull(const TypeContext& types,
const FeatureArgs& features, RefType* type);
[[nodiscard]] bool readRefNull(const FeatureArgs& features, RefType* type);
// See writeBytes comment.
@ -647,7 +631,7 @@ class Decoder {
// Value and reference types
inline ValType Decoder::uncheckedReadValType() {
inline ValType Decoder::uncheckedReadValType(const TypeContext& types) {
uint8_t code = uncheckedReadFixedU8();
switch (code) {
case uint8_t(TypeCode::FuncRef):
@ -666,7 +650,8 @@ inline ValType Decoder::uncheckedReadValType() {
}
int32_t x = uncheckedReadVarS32();
return RefType::fromTypeIndex(x, nullable);
const TypeDef* typeDef = &types.type(x);
return RefType::fromTypeDef(typeDef, nullable);
}
default:
return ValType::fromNonRefTypeCode(TypeCode(code));
@ -674,7 +659,7 @@ inline ValType Decoder::uncheckedReadValType() {
}
template <class T>
inline bool Decoder::readPackedType(uint32_t numTypes,
inline bool Decoder::readPackedType(const TypeContext& types,
const FeatureArgs& features, T* type) {
static_assert(uint8_t(TypeCode::Limit) <= UINT8_MAX, "fits");
uint8_t code;
@ -706,7 +691,7 @@ inline bool Decoder::readPackedType(uint32_t numTypes,
}
bool nullable = code == uint8_t(TypeCode::NullableRef);
RefType refType;
if (!readHeapType(numTypes, features, nullable, &refType)) {
if (!readHeapType(types, features, nullable, &refType)) {
return false;
}
*type = refType;
@ -736,40 +721,19 @@ inline bool Decoder::readPackedType(uint32_t numTypes,
}
return fail("bad type");
}
template <class T>
inline bool Decoder::readPackedType(const TypeContext& types,
const FeatureArgs& features, T* type) {
if (!readPackedType(types.length(), features, type)) {
return false;
}
if (type->isTypeIndex() &&
!validateTypeIndex(types, features, type->refType())) {
return false;
}
return true;
}
inline bool Decoder::readValType(uint32_t numTypes, const FeatureArgs& features,
ValType* type) {
return readPackedType<ValType>(numTypes, features, type);
}
inline bool Decoder::readValType(const TypeContext& types,
const FeatureArgs& features, ValType* type) {
return readPackedType<ValType>(types, features, type);
}
inline bool Decoder::readFieldType(uint32_t numTypes,
const FeatureArgs& features,
FieldType* type) {
return readPackedType<FieldType>(numTypes, features, type);
}
inline bool Decoder::readFieldType(const TypeContext& types,
const FeatureArgs& features,
FieldType* type) {
return readPackedType<FieldType>(types, features, type);
}
inline bool Decoder::readHeapType(uint32_t numTypes,
inline bool Decoder::readHeapType(const TypeContext& types,
const FeatureArgs& features, bool nullable,
RefType* type) {
uint8_t nextByte;
@ -804,40 +768,18 @@ inline bool Decoder::readHeapType(uint32_t numTypes,
#ifdef ENABLE_WASM_FUNCTION_REFERENCES
if (features.functionReferences) {
int32_t x;
if (!readVarS32(&x) || x < 0 || uint32_t(x) >= numTypes ||
if (!readVarS32(&x) || x < 0 || uint32_t(x) >= types.length() ||
uint32_t(x) >= MaxTypeIndex) {
return fail("invalid heap type index");
}
*type = RefType::fromTypeIndex(x, nullable);
const TypeDef* typeDef = &types.type(x);
*type = RefType::fromTypeDef(typeDef, nullable);
return true;
}
#endif
return fail("invalid heap type");
}
inline bool Decoder::readHeapType(const TypeContext& types,
const FeatureArgs& features, bool nullable,
RefType* type) {
if (!readHeapType(types.length(), features, nullable, type)) {
return false;
}
if (type->isTypeIndex() && !validateTypeIndex(types, features, *type)) {
return false;
}
return true;
}
inline bool Decoder::readRefType(uint32_t numTypes, const FeatureArgs& features,
RefType* type) {
ValType valType;
if (!readValType(numTypes, features, &valType)) {
return false;
}
if (!valType.isRefType()) {
return fail("bad type");
}
*type = valType.refType();
return true;
}
inline bool Decoder::readRefType(const TypeContext& types,
const FeatureArgs& features, RefType* type) {
ValType valType;
@ -850,23 +792,6 @@ inline bool Decoder::readRefType(const TypeContext& types,
*type = valType.refType();
return true;
}
inline bool Decoder::validateTypeIndex(const TypeContext& types,
const FeatureArgs& features,
RefType type) {
MOZ_ASSERT(type.isTypeIndex());
if (features.gc && (types[type.typeIndex()].isStructType() ||
types[type.typeIndex()].isArrayType())) {
return true;
}
#ifdef ENABLE_WASM_FUNCTION_REFERENCES
if (features.functionReferences && types[type.typeIndex()].isFuncType()) {
return true;
}
#endif
return fail("type index references an invalid type");
}
// Instruction opcode
@ -948,10 +873,6 @@ inline bool Decoder::readRefNull(const TypeContext& types,
return readHeapType(types, features, true, type);
}
inline bool Decoder::readRefNull(const FeatureArgs& features, RefType* type) {
return readHeapType(MaxTypes, features, true, type);
}
} // namespace wasm
} // namespace js

Просмотреть файл

@ -847,7 +847,7 @@ static int32_t CoerceInPlace_JitEntry(int funcExportIndex, Instance* instance,
break;
case RefType::Func:
case RefType::Eq:
case RefType::TypeIndex:
case RefType::TypeRef:
// Guarded against by temporarilyUnsupportedReftypeForEntry()
MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
}

Просмотреть файл

@ -704,7 +704,7 @@ void LazyStubTier::addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* code,
}
size_t Metadata::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const {
return SizeOfVectorExcludingThis(types, mallocSizeOf) +
return types->sizeOfExcludingThis(mallocSizeOf) +
globals.sizeOfExcludingThis(mallocSizeOf) +
tables.sizeOfExcludingThis(mallocSizeOf) +
tags.sizeOfExcludingThis(mallocSizeOf) +

Просмотреть файл

@ -374,7 +374,7 @@ WASM_DECLARE_CACHEABLE_POD(MetadataCacheablePod)
using ModuleHash = uint8_t[8];
struct Metadata : public ShareableBase<Metadata>, public MetadataCacheablePod {
TypeDefVector types;
SharedTypeContext types;
uint32_t typeIdsOffsetStart;
GlobalDescVector globals;
TableDescVector tables;
@ -407,16 +407,16 @@ struct Metadata : public ShareableBase<Metadata>, public MetadataCacheablePod {
}
const FuncType& getFuncImportType(const FuncImport& funcImport) const {
return types[funcImport.typeIndex()].funcType();
return types->type(funcImport.typeIndex()).funcType();
}
const FuncType& getFuncExportType(const FuncExport& funcExport) const {
return types[funcExport.typeIndex()].funcType();
return types->type(funcExport.typeIndex()).funcType();
}
size_t debugNumFuncs() const { return debugFuncTypeIndices.length(); }
const FuncType& debugFuncType(uint32_t funcIndex) const {
MOZ_ASSERT(debugEnabled);
return types[debugFuncTypeIndices[funcIndex]].funcType();
return types->type(debugFuncTypeIndices[funcIndex]).funcType();
}
// AsmJSMetadata derives Metadata iff isAsmJS(). Mostly this distinction is

Просмотреть файл

@ -107,10 +107,10 @@ static constexpr TypeCode LowestPrimitiveTypeCode = TypeCode::I16;
static constexpr TypeCode AbstractReferenceTypeCode = TypeCode::ExternRef;
// A type code used to represent (ref null? typeindex) whether or not the type
// A type code used to represent (ref null? T) whether or not the type
// is encoded with 'Ref' or 'NullableRef'.
static constexpr TypeCode AbstractReferenceTypeIndexCode = TypeCode::Ref;
static constexpr TypeCode AbstractTypeRefCode = TypeCode::Ref;
// A wasm::Trap represents a wasm-defined trap that can occur during execution
// which triggers a WebAssembly.RuntimeError. Generated code may jump to a Trap

Просмотреть файл

@ -372,6 +372,7 @@ void DebugState::ensureEnterFrameTrapsState(JSContext* cx, Instance* instance,
bool DebugState::debugGetLocalTypes(uint32_t funcIndex, ValTypeVector* locals,
size_t* argsLength,
StackResults* stackResults) {
const TypeContext& types = *metadata().types;
const FuncType& funcType = metadata().debugFuncType(funcIndex);
const ValTypeVector& args = funcType.args();
const ValTypeVector& results = funcType.results();
@ -392,7 +393,7 @@ bool DebugState::debugGetLocalTypes(uint32_t funcIndex, ValTypeVector* locals,
Decoder d(bytecode().begin() + offsetInModule, bytecode().end(),
offsetInModule,
/* error = */ nullptr);
return DecodeValidatedLocalEntries(d, locals);
return DecodeValidatedLocalEntries(types, d, locals);
}
bool DebugState::getGlobal(Instance& instance, uint32_t globalIndex,

Просмотреть файл

@ -75,7 +75,7 @@ class DebugFrame {
case RefType::Func:
case RefType::Extern:
case RefType::Eq:
case RefType::TypeIndex:
case RefType::TypeRef:
return;
}
}

Просмотреть файл

@ -36,8 +36,8 @@ class TaggedValue {
PointerKind1 = 2,
PointerKind2 = 3
};
using PackedRepr = uintptr_t;
static_assert(std::is_same<PackedTypeCode::PackedRepr, uint32_t>(),
using PackedRepr = uint64_t;
static_assert(std::is_same<PackedTypeCode::PackedRepr, uint64_t>(),
"can use pointer tagging with PackedTypeCode");
private:
@ -116,7 +116,8 @@ class ResultType {
InvalidKind = Tagged::PointerKind2,
};
ResultType(Kind kind, uintptr_t imm) : tagged_(Tagged::Kind(kind), imm) {}
ResultType(Kind kind, Tagged::PackedRepr imm)
: tagged_(Tagged::Kind(kind), imm) {}
explicit ResultType(const ValTypeVector* ptr)
: tagged_(Tagged::Kind(VectorKind), ptr) {}
@ -135,7 +136,9 @@ class ResultType {
public:
ResultType() : tagged_(Tagged::Kind(InvalidKind), nullptr) {}
static ResultType Empty() { return ResultType(EmptyKind, uintptr_t(0)); }
static ResultType Empty() {
return ResultType(EmptyKind, Tagged::PackedRepr(0));
}
static ResultType Single(ValType vt) {
return ResultType(SingleKind, vt.bitsUnsafe());
}
@ -233,7 +236,8 @@ class BlockType {
FuncResultsKind = Tagged::PointerKind2
};
BlockType(Kind kind, uintptr_t imm) : tagged_(Tagged::Kind(kind), imm) {}
BlockType(Kind kind, Tagged::PackedRepr imm)
: tagged_(Tagged::Kind(kind), imm) {}
BlockType(Kind kind, const FuncType& type)
: tagged_(Tagged::Kind(kind), &type) {}
@ -251,7 +255,7 @@ class BlockType {
PackedTypeCode::invalid().bits()) {}
static BlockType VoidToVoid() {
return BlockType(VoidToVoidKind, uintptr_t(0));
return BlockType(VoidToVoidKind, Tagged::PackedRepr(0));
}
static BlockType VoidToSingle(ValType vt) {
return BlockType(VoidToSingleKind, vt.bitsUnsafe());

Просмотреть файл

@ -409,7 +409,7 @@ bool WasmGcObject::loadValue(JSContext* cx, const RttValue::PropOffset& offset,
// like to access it so we erase (ref T) with eqref when loading. This is
// safe as (ref T) <: eqref and we're not in the writing case where we
// would need to perform a type check.
if (type.isTypeIndex()) {
if (type.isTypeRef()) {
type = RefType::fromTypeCode(TypeCode::EqRef, true);
}

Просмотреть файл

@ -290,17 +290,8 @@ bool ModuleGenerator::init(Metadata* maybeAsmJSMetadata) {
moduleEnv_->offsetOfFuncImportInstanceData(i));
}
// Copy type definitions to metadata
if (!metadata_->types.resize(moduleEnv_->types->length())) {
return false;
}
for (uint32_t i = 0; i < moduleEnv_->types->length(); i++) {
const TypeDef& typeDef = (*moduleEnv_->types)[i];
if (!metadata_->types[i].clone(typeDef)) {
return false;
}
}
// Share type definitions with metadata
metadata_->types = moduleEnv_->types;
// Accumulate all exported functions:
// - explicitly marked as such;

Просмотреть файл

@ -242,7 +242,8 @@ class MOZ_STACK_CLASS InitExprInterpreter {
: features(FeatureArgs::build(cx, FeatureOptions())),
stack(cx),
globalImportValues(globalImportValues),
instanceObj(cx, instanceObj) {}
instanceObj(cx, instanceObj),
types(instanceObj->instance().metadata().types) {}
bool evaluate(JSContext* cx, Decoder& d);
@ -256,6 +257,7 @@ class MOZ_STACK_CLASS InitExprInterpreter {
RootedValVector stack;
const ValVector& globalImportValues;
Rooted<WasmInstanceObject*> instanceObj;
SharedTypeContext types;
Instance& instance() { return instanceObj->instance(); }
@ -350,8 +352,8 @@ class MOZ_STACK_CLASS InitExprInterpreter {
return false;
}
const StructType& structType =
instance().metadata().types[typeIndex].structType();
const TypeDef& typeDef = instance().metadata().types->type(typeIndex);
const StructType& structType = typeDef.structType();
uint32_t fieldIndex = structType.fields_.length();
while (fieldIndex-- > 0) {
@ -360,7 +362,7 @@ class MOZ_STACK_CLASS InitExprInterpreter {
stack.popBack();
}
return pushRef(RefType::fromTypeIndex(typeIndex, false),
return pushRef(RefType::fromTypeDef(&typeDef, false),
AnyRef::fromJSObject(structObj));
}
@ -370,7 +372,8 @@ class MOZ_STACK_CLASS InitExprInterpreter {
return false;
}
return pushRef(RefType::fromTypeIndex(typeIndex, false),
const TypeDef& typeDef = instance().metadata().types->type(typeIndex);
return pushRef(RefType::fromTypeDef(&typeDef, false),
AnyRef::fromJSObject(structObj));
}
@ -391,7 +394,8 @@ class MOZ_STACK_CLASS InitExprInterpreter {
arrayObj->fillVal(val, 0, len);
stack.popBack();
return pushRef(RefType::fromTypeIndex(typeIndex, false),
const TypeDef& typeDef = instance().metadata().types->type(typeIndex);
return pushRef(RefType::fromTypeDef(&typeDef, false),
AnyRef::fromJSObject(arrayObj));
}
@ -402,7 +406,8 @@ class MOZ_STACK_CLASS InitExprInterpreter {
return false;
}
return pushRef(RefType::fromTypeIndex(typeIndex, false),
const TypeDef& typeDef = instance().metadata().types->type(typeIndex);
return pushRef(RefType::fromTypeDef(&typeDef, false),
AnyRef::fromJSObject(arrayObj));
}
@ -418,7 +423,8 @@ class MOZ_STACK_CLASS InitExprInterpreter {
stack.popBack();
}
return pushRef(RefType::fromTypeIndex(typeIndex, false),
const TypeDef& typeDef = instance().metadata().types->type(typeIndex);
return pushRef(RefType::fromTypeDef(&typeDef, false),
AnyRef::fromJSObject(arrayObj));
}
#endif // ENABLE_WASM_GC
@ -493,7 +499,7 @@ bool InitExprInterpreter::evaluate(JSContext* cx, Decoder& d) {
}
case uint16_t(Op::RefNull): {
RefType type;
if (!d.readRefNull(features, &type)) {
if (!d.readRefNull(*types, features, &type)) {
return false;
}
CHECK(evalRefNull(type));

Просмотреть файл

@ -895,8 +895,7 @@ bool Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
#ifdef ENABLE_WASM_GC
RttValue* Instance::rttCanon(uint32_t typeIndex) const {
const TypeIdDesc& typeId = metadata().typeIds[typeIndex];
return *(RttValue**)addressOfTypeId(typeId);
return *(RttValue**)addressOfTypeId(typeIndex);
}
#endif // ENABLE_WASM_GC
@ -1713,24 +1712,18 @@ bool Instance::init(JSContext* cx, const JSFunctionVector& funcImports,
}
// Allocate in the global type sets for structural type checks
if (!metadata().types.empty()) {
const SharedTypeContext& types = metadata().types;
if (!types->empty()) {
#ifdef ENABLE_WASM_GC
if (GcAvailable(cx)) {
// Transfer and allocate type objects for the struct types in the module
MutableTypeContext tycx = js_new<TypeContext>();
if (!tycx || !tycx->clone(metadata().types)) {
return false;
}
for (uint32_t typeIndex = 0; typeIndex < metadata().types.length();
typeIndex++) {
const TypeDef& typeDef = metadata().types[typeIndex];
for (uint32_t typeIndex = 0; typeIndex < types->length(); typeIndex++) {
const TypeDef& typeDef = types->type(typeIndex);
if ((!typeDef.isStructType() && !typeDef.isArrayType())) {
continue;
}
Rooted<RttValue*> rttValue(
cx, RttValue::rttCanon(cx, TypeHandle(tycx, typeIndex)));
cx, RttValue::rttCanon(cx, TypeHandle(types, typeIndex)));
if (!rttValue) {
return false;
}
@ -1748,9 +1741,8 @@ bool Instance::init(JSContext* cx, const JSFunctionVector& funcImports,
ExclusiveData<FuncTypeIdSet>::Guard lockedFuncTypeIdSet =
funcTypeIdSet.lock();
for (uint32_t typeIndex = 0; typeIndex < metadata().types.length();
typeIndex++) {
const TypeDef& typeDef = metadata().types[typeIndex];
for (uint32_t typeIndex = 0; typeIndex < types->length(); typeIndex++) {
const TypeDef& typeDef = types->type(typeIndex);
switch (typeDef.kind()) {
case TypeDefKind::Func: {
const FuncType& funcType = typeDef.funcType();
@ -1847,13 +1839,13 @@ bool Instance::init(JSContext* cx, const JSFunctionVector& funcImports,
Instance::~Instance() {
realm_->wasm.unregisterInstance(*this);
if (!metadata().types.empty()) {
const SharedTypeContext& types = metadata().types;
if (!types->empty()) {
ExclusiveData<FuncTypeIdSet>::Guard lockedFuncTypeIdSet =
funcTypeIdSet.lock();
for (uint32_t typeIndex = 0; typeIndex < metadata().types.length();
typeIndex++) {
const TypeDef& typeDef = metadata().types[typeIndex];
for (uint32_t typeIndex = 0; typeIndex < types->length(); typeIndex++) {
const TypeDef& typeDef = types->type(typeIndex);
if (!typeDef.isFuncType()) {
continue;
}
@ -1954,9 +1946,9 @@ void Instance::tracePrivate(JSTracer* trc) {
TraceNullableEdge(trc, &memory_, "wasm buffer");
#ifdef ENABLE_WASM_GC
if (hasGcTypes_) {
for (uint32_t typeIndex = 0; typeIndex < metadata().types.length();
for (uint32_t typeIndex = 0; typeIndex < metadata().types->length();
typeIndex++) {
const TypeDef& typeDef = metadata().types[typeIndex];
const TypeDef& typeDef = metadata().types->type(typeIndex);
if (!typeDef.isStructType() && !typeDef.isArrayType()) {
continue;
}
@ -2396,7 +2388,7 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args,
}
if (type.isRefRepr()) {
// Ensure we don't have a temporarily unsupported Ref type in callExport
MOZ_RELEASE_ASSERT(!type.isTypeIndex());
MOZ_RELEASE_ASSERT(!type.isTypeRef());
void* ptr = *reinterpret_cast<void**>(rawArgLoc);
// Store in rooted array until no more GC is possible.
RootedAnyRef ref(cx, AnyRef::fromCompiledCode(ptr));

Просмотреть файл

@ -143,7 +143,7 @@ bool wasm::CompileIntrinsicModule(JSContext* cx,
ReportOutOfMemory(cx);
return false;
}
(*moduleEnv.types)[funcIndex] = TypeDef(std::move(type));
(*moduleEnv.types)[funcIndex] = std::move(type);
}
// Add (func (type $i)) declarations. Do this after all types have been added

Просмотреть файл

@ -4020,7 +4020,7 @@ static bool EmitGetGlobal(FunctionCompiler& f) {
MOZ_ASSERT(value.ref().isNull());
result = f.nullRefConstant();
break;
case RefType::TypeIndex:
case RefType::TypeRef:
MOZ_CRASH("unexpected reference type in EmitGetGlobal");
}
break;

Просмотреть файл

@ -1003,7 +1003,8 @@ static JSString* UTF8CharsToString(JSContext* cx, const char* chars) {
const ValTypeVector& valTypes) {
Rooted<ArrayObject*> arrayObj(cx, NewDenseEmptyArray(cx));
for (ValType valType : valTypes) {
RootedString type(cx, UTF8CharsToString(cx, ToString(valType).get()));
RootedString type(cx,
UTF8CharsToString(cx, ToString(valType, nullptr).get()));
if (!type) {
return nullptr;
}
@ -1039,7 +1040,8 @@ static JSObject* TableTypeToObject(JSContext* cx, RefType type,
uint32_t initial, Maybe<uint32_t> maximum) {
Rooted<IdValueVector> props(cx, IdValueVector(cx));
RootedString elementType(cx, UTF8CharsToString(cx, ToString(type).get()));
RootedString elementType(
cx, UTF8CharsToString(cx, ToString(type, nullptr).get()));
if (!elementType || !props.append(IdValuePair(NameToId(cx->names().element),
StringValue(elementType)))) {
ReportOutOfMemory(cx);
@ -1127,7 +1129,8 @@ static JSObject* GlobalTypeToObject(JSContext* cx, ValType type,
return nullptr;
}
RootedString valueType(cx, UTF8CharsToString(cx, ToString(type).get()));
RootedString valueType(cx,
UTF8CharsToString(cx, ToString(type, nullptr).get()));
if (!valueType || !props.append(IdValuePair(NameToId(cx->names().value),
StringValue(valueType)))) {
ReportOutOfMemory(cx);
@ -4195,7 +4198,7 @@ JSFunction* WasmFunctionCreate(JSContext* cx, HandleFunction func,
return nullptr;
}
FuncType funcType = FuncType(std::move(params), std::move(results));
(*moduleEnv.types)[0] = TypeDef(std::move(funcType));
(*moduleEnv.types)[0] = std::move(funcType);
// Add an (import (func ...))
FuncDesc funcDesc = FuncDesc(&(*moduleEnv.types)[0].funcType(), 0);

Просмотреть файл

@ -867,10 +867,12 @@ inline bool OpIter<Policy>::checkIsSubtypeOf(ResultType params,
template <typename Policy>
inline bool OpIter<Policy>::checkIsSubtypeOf(uint32_t actualTypeIndex,
uint32_t expectedTypeIndex) {
return CheckIsSubtypeOf(
d_, env_, lastOpcodeOffset(),
ValType(RefType::fromTypeIndex(actualTypeIndex, true)),
ValType(RefType::fromTypeIndex(expectedTypeIndex, true)), &cache_);
const TypeDef& actualTypeDef = env_.types->type(actualTypeIndex);
const TypeDef& expectedTypeDef = env_.types->type(expectedTypeIndex);
return CheckIsSubtypeOf(d_, env_, lastOpcodeOffset(),
ValType(RefType::fromTypeDef(&actualTypeDef, true)),
ValType(RefType::fromTypeDef(&expectedTypeDef, true)),
&cache_);
}
#endif
@ -989,7 +991,7 @@ inline bool OpIter<Policy>::popWithRefType(Value* value, StackType* type) {
return true;
}
UniqueChars actualText = ToString(type->valType());
UniqueChars actualText = ToString(type->valType(), env_.types);
if (!actualText) {
return false;
}
@ -1142,11 +1144,12 @@ inline bool OpIter<Policy>::readBlockType(BlockType* type) {
return fail("invalid block type type index");
}
if (!env_.types->isFuncType(x)) {
const TypeDef* typeDef = &env_.types->type(x);
if (!typeDef->isFuncType()) {
return fail("block type type index must be func type");
}
*type = BlockType::Func(env_.types->funcType(x));
*type = BlockType::Func(typeDef->funcType());
return true;
}
@ -1416,7 +1419,7 @@ inline bool OpIter<Policy>::checkCastedBranchValueAndPush(
// Check we at least have one type in the branch target type, which will take
// the casted type.
if (branchTargetType->length() < 1) {
UniqueChars expectedText = ToString(castedToType);
UniqueChars expectedText = ToString(castedToType, env_.types);
if (!expectedText) {
return false;
}
@ -2254,7 +2257,8 @@ inline bool OpIter<Policy>::readRefFunc(uint32_t* funcIndex) {
// validation of the call_ref instruction.
if (env_.functionReferencesEnabled()) {
const uint32_t typeIndex = env_.funcs[*funcIndex].typeIndex;
return push(RefType::fromTypeIndex(typeIndex, false));
const TypeDef& typeDef = env_.types->type(typeIndex);
return push(RefType::fromTypeDef(&typeDef, false));
}
#endif
return push(RefType::func());
@ -2446,11 +2450,11 @@ inline bool OpIter<Policy>::readCallIndirect(uint32_t* funcTypeIndex,
return false;
}
if (!env_.types->isFuncType(*funcTypeIndex)) {
const TypeDef& typeDef = env_.types->type(*funcTypeIndex);
if (!typeDef.isFuncType()) {
return fail("expected signature type");
}
const FuncType& funcType = env_.types->funcType(*funcTypeIndex);
const FuncType& funcType = typeDef.funcType();
#ifdef WASM_PRIVATE_REFTYPES
if (env_.tables[*tableIndex].isImportedOrExported &&
@ -2477,19 +2481,18 @@ inline bool OpIter<Policy>::readCallRef(const FuncType** funcType,
return false;
}
if (!popWithType(ValType(RefType::fromTypeIndex(funcTypeIndex, true)),
callee)) {
const TypeDef& typeDef = env_.types->type(funcTypeIndex);
*funcType = &typeDef.funcType();
if (!popWithType(ValType(RefType::fromTypeDef(&typeDef, true)), callee)) {
return false;
}
const FuncType* funcType_ = &env_.types->funcType(funcTypeIndex);
*funcType = funcType_;
if (!popCallArgs(funcType_->args(), argValues)) {
if (!popCallArgs((*funcType)->args(), argValues)) {
return false;
}
return push(ResultType::Vector(funcType_->results()));
return push(ResultType::Vector((*funcType)->results()));
}
#endif
@ -2537,11 +2540,11 @@ inline bool OpIter<Policy>::readOldCallIndirect(uint32_t* funcTypeIndex,
return fail("signature index out of range");
}
if (!env_.types->isFuncType(*funcTypeIndex)) {
const TypeDef& typeDef = env_.types->type(*funcTypeIndex);
if (!typeDef.isFuncType()) {
return fail("expected signature type");
}
const FuncType& funcType = env_.types->funcType(*funcTypeIndex);
const FuncType& funcType = typeDef.funcType();
if (!popCallArgs(funcType.args(), argValues)) {
return false;
@ -2967,8 +2970,8 @@ inline bool OpIter<Policy>::readGcTypeIndex(uint32_t* typeIndex) {
return fail("type index out of range");
}
if (!env_.types->isStructType(*typeIndex) &&
!env_.types->isArrayType(*typeIndex)) {
if (!env_.types->type(*typeIndex).isStructType() &&
!env_.types->type(*typeIndex).isArrayType()) {
return fail("not a gc type");
}
@ -2985,7 +2988,7 @@ inline bool OpIter<Policy>::readStructTypeIndex(uint32_t* typeIndex) {
return fail("type index out of range");
}
if (!env_.types->isStructType(*typeIndex)) {
if (!env_.types->type(*typeIndex).isStructType()) {
return fail("not a struct type");
}
@ -3002,7 +3005,7 @@ inline bool OpIter<Policy>::readArrayTypeIndex(uint32_t* typeIndex) {
return fail("type index out of range");
}
if (!env_.types->isArrayType(*typeIndex)) {
if (!env_.types->type(*typeIndex).isArrayType()) {
return fail("not an array type");
}
@ -3019,7 +3022,7 @@ inline bool OpIter<Policy>::readFuncTypeIndex(uint32_t* typeIndex) {
return fail("type index out of range");
}
if (!env_.types->isFuncType(*typeIndex)) {
if (!env_.types->type(*typeIndex).isFuncType()) {
return fail("not an func type");
}
@ -3051,21 +3054,23 @@ inline bool OpIter<Policy>::readStructNew(uint32_t* typeIndex,
return false;
}
const StructType& str = env_.types->structType(*typeIndex);
const TypeDef& typeDef = env_.types->type(*typeIndex);
const StructType& structType = typeDef.structType();
if (!argValues->resize(str.fields_.length())) {
if (!argValues->resize(structType.fields_.length())) {
return false;
}
static_assert(MaxStructFields <= INT32_MAX, "Or we iloop below");
for (int32_t i = str.fields_.length() - 1; i >= 0; i--) {
if (!popWithType(str.fields_[i].type.widenToValType(), &(*argValues)[i])) {
for (int32_t i = structType.fields_.length() - 1; i >= 0; i--) {
if (!popWithType(structType.fields_[i].type.widenToValType(),
&(*argValues)[i])) {
return false;
}
}
return push(RefType::fromTypeIndex(*typeIndex, false));
return push(RefType::fromTypeDef(&typeDef, false));
}
template <typename Policy>
@ -3076,13 +3081,14 @@ inline bool OpIter<Policy>::readStructNewDefault(uint32_t* typeIndex) {
return false;
}
const StructType& str = env_.types->structType(*typeIndex);
const TypeDef& typeDef = env_.types->type(*typeIndex);
const StructType& structType = typeDef.structType();
if (!str.isDefaultable()) {
if (!structType.isDefaultable()) {
return fail("struct must be defaultable");
}
return push(RefType::fromTypeIndex(*typeIndex, false));
return push(RefType::fromTypeDef(&typeDef, false));
}
template <typename Policy>
@ -3097,13 +3103,14 @@ inline bool OpIter<Policy>::readStructGet(uint32_t* typeIndex,
return false;
}
const StructType& structType = env_.types->structType(*typeIndex);
const TypeDef& typeDef = env_.types->type(*typeIndex);
const StructType& structType = typeDef.structType();
if (!readFieldIndex(fieldIndex, structType)) {
return false;
}
if (!popWithType(RefType::fromTypeIndex(*typeIndex, true), ptr)) {
if (!popWithType(RefType::fromTypeDef(&typeDef, true), ptr)) {
return false;
}
@ -3131,7 +3138,8 @@ inline bool OpIter<Policy>::readStructSet(uint32_t* typeIndex,
return false;
}
const StructType& structType = env_.types->structType(*typeIndex);
const TypeDef& typeDef = env_.types->type(*typeIndex);
const StructType& structType = typeDef.structType();
if (!readFieldIndex(fieldIndex, structType)) {
return false;
@ -3146,7 +3154,7 @@ inline bool OpIter<Policy>::readStructSet(uint32_t* typeIndex,
return fail("field is not mutable");
}
if (!popWithType(RefType::fromTypeIndex(*typeIndex, true), ptr)) {
if (!popWithType(RefType::fromTypeDef(&typeDef, true), ptr)) {
return false;
}
@ -3162,17 +3170,18 @@ inline bool OpIter<Policy>::readArrayNew(uint32_t* typeIndex,
return false;
}
const ArrayType& arr = env_.types->arrayType(*typeIndex);
const TypeDef& typeDef = env_.types->type(*typeIndex);
const ArrayType& arrayType = typeDef.arrayType();
if (!popWithType(ValType::I32, numElements)) {
return false;
}
if (!popWithType(arr.elementType_.widenToValType(), argValue)) {
if (!popWithType(arrayType.elementType_.widenToValType(), argValue)) {
return false;
}
return push(RefType::fromTypeIndex(*typeIndex, false));
return push(RefType::fromTypeDef(&typeDef, false));
}
template <typename Policy>
@ -3184,7 +3193,8 @@ inline bool OpIter<Policy>::readArrayNewFixed(uint32_t* typeIndex,
return false;
}
const ArrayType& arrayType = env_.types->arrayType(*typeIndex);
const TypeDef& typeDef = env_.types->type(*typeIndex);
const ArrayType& arrayType = typeDef.arrayType();
if (!readVarU32(numElements)) {
return false;
@ -3200,7 +3210,7 @@ inline bool OpIter<Policy>::readArrayNewFixed(uint32_t* typeIndex,
}
}
return push(RefType::fromTypeIndex(*typeIndex, false));
return push(RefType::fromTypeDef(&typeDef, false));
}
template <typename Policy>
@ -3212,17 +3222,18 @@ inline bool OpIter<Policy>::readArrayNewDefault(uint32_t* typeIndex,
return false;
}
const ArrayType& arr = env_.types->arrayType(*typeIndex);
const TypeDef& typeDef = env_.types->type(*typeIndex);
const ArrayType& arrayType = typeDef.arrayType();
if (!popWithType(ValType::I32, numElements)) {
return false;
}
if (!arr.elementType_.isDefaultable()) {
if (!arrayType.elementType_.isDefaultable()) {
return fail("array must be defaultable");
}
return push(RefType::fromTypeIndex(*typeIndex, false));
return push(RefType::fromTypeDef(&typeDef, false));
}
template <typename Policy>
@ -3239,7 +3250,8 @@ inline bool OpIter<Policy>::readArrayNewData(uint32_t* typeIndex,
return fail("unable to read segment index");
}
const ArrayType& arrayType = env_.types->arrayType(*typeIndex);
const TypeDef& typeDef = env_.types->type(*typeIndex);
const ArrayType& arrayType = typeDef.arrayType();
FieldType elemType = arrayType.elementType_;
if (!elemType.isNumber() && !elemType.isPacked() && !elemType.isVector()) {
return fail("element type must be i8/i16/i32/i64/f32/f64/v128");
@ -3258,7 +3270,7 @@ inline bool OpIter<Policy>::readArrayNewData(uint32_t* typeIndex,
return false;
}
return push(RefType::fromTypeIndex(*typeIndex, false));
return push(RefType::fromTypeDef(&typeDef, false));
}
template <typename Policy>
@ -3275,7 +3287,8 @@ inline bool OpIter<Policy>::readArrayNewElem(uint32_t* typeIndex,
return fail("unable to read segment index");
}
const ArrayType& arrayType = env_.types->arrayType(*typeIndex);
const TypeDef& typeDef = env_.types->type(*typeIndex);
const ArrayType& arrayType = typeDef.arrayType();
FieldType dstElemType = arrayType.elementType_;
if (!dstElemType.isRefType()) {
return fail("element type is not a reftype");
@ -3298,7 +3311,7 @@ inline bool OpIter<Policy>::readArrayNewElem(uint32_t* typeIndex,
return false;
}
return push(RefType::fromTypeIndex(*typeIndex, false));
return push(RefType::fromTypeDef(&typeDef, false));
}
template <typename Policy>
@ -3311,13 +3324,14 @@ inline bool OpIter<Policy>::readArrayGet(uint32_t* typeIndex,
return false;
}
const ArrayType& arrayType = env_.types->arrayType(*typeIndex);
const TypeDef& typeDef = env_.types->type(*typeIndex);
const ArrayType& arrayType = typeDef.arrayType();
if (!popWithType(ValType::I32, index)) {
return false;
}
if (!popWithType(RefType::fromTypeIndex(*typeIndex, true), ptr)) {
if (!popWithType(RefType::fromTypeDef(&typeDef, true), ptr)) {
return false;
}
@ -3343,7 +3357,8 @@ inline bool OpIter<Policy>::readArraySet(uint32_t* typeIndex, Value* val,
return false;
}
const ArrayType& arrayType = env_.types->arrayType(*typeIndex);
const TypeDef& typeDef = env_.types->type(*typeIndex);
const ArrayType& arrayType = typeDef.arrayType();
if (!arrayType.isMutable_) {
return fail("array is not mutable");
@ -3357,7 +3372,7 @@ inline bool OpIter<Policy>::readArraySet(uint32_t* typeIndex, Value* val,
return false;
}
if (!popWithType(RefType::fromTypeIndex(*typeIndex, true), ptr)) {
if (!popWithType(RefType::fromTypeDef(&typeDef, true), ptr)) {
return false;
}
@ -3372,7 +3387,8 @@ inline bool OpIter<Policy>::readArrayLen(uint32_t* typeIndex, Value* ptr) {
return false;
}
if (!popWithType(RefType::fromTypeIndex(*typeIndex, true), ptr)) {
const TypeDef& typeDef = env_.types->type(*typeIndex);
if (!popWithType(RefType::fromTypeDef(&typeDef, true), ptr)) {
return false;
}
@ -3401,8 +3417,10 @@ inline bool OpIter<Policy>::readArrayCopy(int32_t* elemSize,
// types. Reject if:
// * the dst array is not of mutable type
// * the element types are incompatible
const ArrayType& dstArrayType = env_.types->arrayType(dstTypeIndex);
const ArrayType& srcArrayType = env_.types->arrayType(srcTypeIndex);
const TypeDef& dstTypeDef = env_.types->type(dstTypeIndex);
const ArrayType& dstArrayType = dstTypeDef.arrayType();
const TypeDef& srcTypeDef = env_.types->type(srcTypeIndex);
const ArrayType& srcArrayType = srcTypeDef.arrayType();
FieldType dstElemType = dstArrayType.elementType_;
FieldType srcElemType = srcArrayType.elementType_;
if (!dstArrayType.isMutable_) {
@ -3426,13 +3444,13 @@ inline bool OpIter<Policy>::readArrayCopy(int32_t* elemSize,
if (!popWithType(ValType::I32, srcIndex)) {
return false;
}
if (!popWithType(RefType::fromTypeIndex(srcTypeIndex, true), srcArray)) {
if (!popWithType(RefType::fromTypeDef(&srcTypeDef, true), srcArray)) {
return false;
}
if (!popWithType(ValType::I32, dstIndex)) {
return false;
}
if (!popWithType(RefType::fromTypeIndex(dstTypeIndex, true), dstArray)) {
if (!popWithType(RefType::fromTypeDef(&dstTypeDef, true), dstArray)) {
return false;
}
@ -3466,7 +3484,8 @@ inline bool OpIter<Policy>::readRefCast(uint32_t* typeIndex, Value* ref) {
return false;
}
return push(RefType::fromTypeIndex(*typeIndex, false));
const TypeDef& typeDef = env_.types->type(*typeIndex);
return push(RefType::fromTypeDef(&typeDef, false));
}
template <typename Policy>
@ -3489,7 +3508,8 @@ inline bool OpIter<Policy>::readBrOnCast(uint32_t* relativeDepth,
// The casted to type is a non-nullable reference to the type index
// specified as an immediate.
ValType castedToType(RefType::fromTypeIndex(*typeIndex, false));
const TypeDef& typeDef = env_.types->type(*typeIndex);
ValType castedToType(RefType::fromTypeDef(&typeDef, false));
return checkCastedBranchValueAndPush(*relativeDepth, castedFromType,
castedToType, branchTargetType, values);

Просмотреть файл

@ -414,33 +414,55 @@ CoderResult CodeShareableBytes(Coder<mode>& coder,
// WasmValType.h
/* static */
SerializableTypeCode SerializableTypeCode::serialize(PackedTypeCode ptc,
const TypeContext& types) {
SerializableTypeCode stc = {};
stc.typeCode = PackedRepr(ptc.typeCode());
stc.typeIndex = ptc.typeDef() ? types.indexOf(*ptc.typeDef())
: SerializableTypeCode::NoTypeIndex;
stc.nullable = ptc.isNullable();
return stc;
}
PackedTypeCode SerializableTypeCode::deserialize(const TypeContext& types) {
if (typeIndex == SerializableTypeCode::NoTypeIndex) {
return PackedTypeCode::pack(TypeCode(typeCode), nullable);
}
const TypeDef* typeDef = &types.type(typeIndex);
return PackedTypeCode::pack(TypeCode(typeCode), typeDef, nullable);
}
template <CoderMode mode>
CoderResult CodeValType(Coder<mode>& coder, CoderArg<mode, ValType> item) {
CoderResult CodePackedTypeCode(Coder<mode>& coder,
CoderArg<mode, PackedTypeCode> item) {
if constexpr (mode == MODE_DECODE) {
return coder.readBytes((void*)item, sizeof(ValType));
SerializableTypeCode stc;
MOZ_TRY(CodePod(coder, &stc));
*item = stc.deserialize(*coder.types_);
return Ok();
} else if constexpr (mode == MODE_SIZE) {
return coder.writeBytes(nullptr, sizeof(SerializableTypeCode));
} else {
return coder.writeBytes((const void*)item, sizeof(ValType));
SerializableTypeCode stc =
SerializableTypeCode::serialize(*item, *coder.types_);
return CodePod(coder, &stc);
}
}
CoderResult CodeFieldType(Coder<MODE_DECODE>& coder, FieldType* item) {
return coder.readBytes((void*)item, sizeof(FieldType));
template <CoderMode mode>
CoderResult CodeValType(Coder<mode>& coder, CoderArg<mode, ValType> item) {
return CodePackedTypeCode(coder, item->addressOfPacked());
}
template <CoderMode mode>
CoderResult CodeFieldType(Coder<mode>& coder, const FieldType* item) {
STATIC_ASSERT_ENCODING_OR_SIZING;
return coder.writeBytes((const void*)item, sizeof(FieldType));
}
CoderResult CodeRefType(Coder<MODE_DECODE>& coder, RefType* item) {
return coder.readBytes((void*)item, sizeof(RefType));
CoderResult CodeFieldType(Coder<mode>& coder, CoderArg<mode, FieldType> item) {
return CodePackedTypeCode(coder, item->addressOfPacked());
}
template <CoderMode mode>
CoderResult CodeRefType(Coder<mode>& coder, const RefType* item) {
STATIC_ASSERT_ENCODING_OR_SIZING;
return coder.writeBytes((const void*)item, sizeof(RefType));
CoderResult CodeRefType(Coder<mode>& coder, CoderArg<mode, RefType> item) {
return CodePackedTypeCode(coder, item->addressOfPacked());
}
// WasmValue.h
@ -476,7 +498,7 @@ CoderResult CodeInitExpr(Coder<mode>& coder, CoderArg<mode, InitExpr> item) {
template <CoderMode mode>
CoderResult CodeFuncType(Coder<mode>& coder, CoderArg<mode, FuncType> item) {
WASM_VERIFY_SERIALIZATION_FOR_SIZE(wasm::FuncType, 208);
WASM_VERIFY_SERIALIZATION_FOR_SIZE(wasm::FuncType, 344);
MOZ_TRY((CodeVector<mode, ValType, &CodeValType<mode>>(coder, &item->args_)));
MOZ_TRY(
(CodeVector<mode, ValType, &CodeValType<mode>>(coder, &item->results_)));
@ -505,7 +527,7 @@ CoderResult CodeStructType(Coder<mode>& coder,
template <CoderMode mode>
CoderResult CodeArrayType(Coder<mode>& coder, CoderArg<mode, ArrayType> item) {
WASM_VERIFY_SERIALIZATION_FOR_SIZE(wasm::ArrayType, 48);
WASM_VERIFY_SERIALIZATION_FOR_SIZE(wasm::ArrayType, 16);
MOZ_TRY(CodeFieldType(coder, &item->elementType_));
MOZ_TRY(CodePod(coder, &item->isMutable_));
return Ok();
@ -513,7 +535,7 @@ CoderResult CodeArrayType(Coder<mode>& coder, CoderArg<mode, ArrayType> item) {
template <CoderMode mode>
CoderResult CodeTypeDef(Coder<mode>& coder, CoderArg<mode, TypeDef> item) {
WASM_VERIFY_SERIALIZATION_FOR_SIZE(wasm::TypeDef, 216);
WASM_VERIFY_SERIALIZATION_FOR_SIZE(wasm::TypeDef, 360);
// TypeDef is a tagged union that begins with kind = None. This implies that
// we must manually initialize the variant that we decode.
if constexpr (mode == MODE_DECODE) {
@ -551,6 +573,28 @@ CoderResult CodeTypeDef(Coder<mode>& coder, CoderArg<mode, TypeDef> item) {
return Ok();
}
template <CoderMode mode>
CoderResult CodeTypeContext(Coder<mode>& coder,
CoderArg<mode, TypeContext> item) {
// Subsequent decoding needs to reference type definitions
if constexpr (mode == MODE_DECODE) {
MOZ_ASSERT(!coder.types_);
coder.types_ = item;
}
size_t length = item->length();
MOZ_TRY(CodePod(coder, &length));
if constexpr (mode == MODE_DECODE) {
if (!item->addTypes(length)) {
return Err(OutOfMemory());
}
}
for (uint32_t typeIndex = 0; typeIndex < item->length(); typeIndex++) {
MOZ_TRY(CodeTypeDef(coder, &item->type(typeIndex)));
}
return Ok();
}
// WasmModuleTypes.h
template <CoderMode mode>
@ -586,7 +630,7 @@ CoderResult CodeGlobalDesc(Coder<mode>& coder,
template <CoderMode mode>
CoderResult CodeTagType(Coder<mode>& coder, CoderArg<mode, TagType> item) {
WASM_VERIFY_SERIALIZATION_FOR_SIZE(wasm::TagType, 168);
WASM_VERIFY_SERIALIZATION_FOR_SIZE(wasm::TagType, 232);
MOZ_TRY(
(CodeVector<mode, ValType, &CodeValType<mode>>(coder, &item->argTypes_)));
MOZ_TRY(CodePodVector(coder, &item->argOffsets_));
@ -640,7 +684,7 @@ CoderResult CodeCustomSection(Coder<mode>& coder,
template <CoderMode mode>
CoderResult CodeTableDesc(Coder<mode>& coder, CoderArg<mode, TableDesc> item) {
WASM_VERIFY_SERIALIZATION_FOR_SIZE(wasm::TableDesc, 48);
WASM_VERIFY_SERIALIZATION_FOR_SIZE(wasm::TableDesc, 32);
MOZ_TRY(CodeRefType(coder, &item->elemType));
MOZ_TRY(CodePod(coder, &item->isImportedOrExported));
MOZ_TRY(CodePod(coder, &item->isAsmJS));
@ -868,15 +912,17 @@ CoderResult CodeMetadataTier(Coder<mode>& coder,
template <CoderMode mode>
CoderResult CodeMetadata(Coder<mode>& coder,
CoderArg<mode, wasm::Metadata> item) {
WASM_VERIFY_SERIALIZATION_FOR_SIZE(wasm::Metadata, 464);
WASM_VERIFY_SERIALIZATION_FOR_SIZE(wasm::Metadata, 400);
if constexpr (mode == MODE_ENCODE) {
// Serialization doesn't handle asm.js or debug enabled modules
MOZ_ASSERT(!item->debugEnabled && item->debugFuncTypeIndices.empty());
MOZ_ASSERT(!item->isAsmJS());
}
MOZ_TRY(Magic(coder, Marker::Metadata));
MOZ_TRY(CodePod(coder, &item->pod()));
MOZ_TRY((CodeVector<mode, TypeDef, &CodeTypeDef<mode>>(coder, &item->types)));
MOZ_TRY((CodeRefPtr<mode, const TypeContext, &CodeTypeContext>(
coder, &item->types)));
MOZ_TRY((CodePod(coder, &item->typeIdsOffsetStart)));
MOZ_TRY((CodeVector<mode, GlobalDesc, &CodeGlobalDesc<mode>>(
coder, &item->globals)));
@ -889,6 +935,7 @@ CoderResult CodeMetadata(Coder<mode>& coder,
MOZ_TRY(CodeCacheableChars(coder, &item->sourceMapURL));
if constexpr (mode == MODE_DECODE) {
// Initialize debugging state to disabled
item->debugEnabled = false;
item->debugFuncTypeIndices.clear();
}
@ -1075,7 +1122,7 @@ CoderResult CodeModule(Coder<mode>& coder, CoderArg<mode, Module> item,
static bool GetSerializedSize(const Module& module, const LinkData& linkData,
size_t* size) {
Coder<MODE_SIZE> coder;
Coder<MODE_SIZE> coder(module.metadata().types.get());
auto result = CodeModule(coder, &module, linkData);
if (result.isErr()) {
return false;
@ -1099,7 +1146,8 @@ bool Module::serialize(const LinkData& linkData, Bytes* bytes) const {
return false;
}
Coder<MODE_ENCODE> coder(bytes->begin(), serializedSize);
Coder<MODE_ENCODE> coder(metadata().types.get(), bytes->begin(),
serializedSize);
CoderResult result = CodeModule(coder, this, linkData);
if (result.isErr()) {
// An error is an OOM, return false
@ -1129,7 +1177,7 @@ void Module::initGCMallocBytesExcludingCode() {
// calculate a value. We consume all errors, as they can only be overflow and
// can be ignored until the end.
constexpr CoderMode MODE = MODE_SIZE;
Coder<MODE> coder;
Coder<MODE> coder(metadata().types.get());
(void)CodeVector<MODE, Import, &CodeImport<MODE>>(coder, &imports_);
(void)CodeVector<MODE, Export, &CodeExport<MODE>>(coder, &exports_);
(void)CodeVector<MODE, SharedDataSegment,

Просмотреть файл

@ -31,6 +31,8 @@
namespace js {
namespace wasm {
class TypeContext;
// [SMDOC] "Module serialization"
//
// A wasm::Module may be serialized to a binary format that allows for quick
@ -129,8 +131,13 @@ struct Coder;
// A Coder<MODE_SIZE> computes the total encoded size of a module
template <>
struct Coder<MODE_SIZE> {
Coder() : size_(0) {}
explicit Coder(const TypeContext* types) : types_(types), size_(0) {}
// The types of the module that we're going to encode. This is required in
// order to encode the original index of types that we encounter.
const TypeContext* types_;
// The current size of buffer required to serialize this module.
mozilla::CheckedInt<size_t> size_;
// This function shares a signature with MODE_ENCODE to allow functions to be
@ -142,9 +149,16 @@ struct Coder<MODE_SIZE> {
// A Coder<MODE_ENCODE> holds the buffer being written to
template <>
struct Coder<MODE_ENCODE> {
Coder(uint8_t* start, size_t length) : buffer_(start), end_(start + length) {}
Coder(const TypeContext* types, uint8_t* start, size_t length)
: types_(types), buffer_(start), end_(start + length) {}
// The types of the module that we're encoding. This is required in
// order to encode the original index of types that we encounter.
const TypeContext* types_;
// The current position in the buffer we're writing to.
uint8_t* buffer_;
// The end position in the buffer we're writing to.
const uint8_t* end_;
CoderResult writeBytes(const void* src, size_t length);
@ -154,9 +168,15 @@ struct Coder<MODE_ENCODE> {
template <>
struct Coder<MODE_DECODE> {
Coder(const uint8_t* start, size_t length)
: buffer_(start), end_(start + length) {}
: types_(nullptr), buffer_(start), end_(start + length) {}
// The types of the module that we're decoding. This is null until the types
// of this module are decoded.
const TypeContext* types_;
// The current position in the buffer we're reading from.
const uint8_t* buffer_;
// The end position in the buffer we're reading from.
const uint8_t* end_;
CoderResult readBytes(void* dest, size_t length);

Просмотреть файл

@ -1182,7 +1182,7 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
}
case RefType::Func:
case RefType::Eq:
case RefType::TypeIndex: {
case RefType::TypeRef: {
// Guarded against by temporarilyUnsupportedReftypeForEntry()
MOZ_CRASH("unexpected argument type when calling from the jit");
}
@ -1362,7 +1362,7 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
UnboxAnyrefIntoValueReg(masm, InstanceReg, ReturnReg,
JSReturnOperand, WasmJitEntryReturnScratch);
break;
case RefType::TypeIndex:
case RefType::TypeRef:
MOZ_CRASH("unexpected return type when calling from ion to wasm");
}
break;
@ -1646,7 +1646,7 @@ void wasm::GenerateDirectCallFromJit(MacroAssembler& masm, const FuncExport& fe,
UnboxAnyrefIntoValueReg(masm, InstanceReg, ReturnReg,
JSReturnOperand, WasmJitEntryReturnScratch);
break;
case wasm::RefType::TypeIndex:
case wasm::RefType::TypeRef:
MOZ_CRASH("unexpected return type when calling from ion to wasm");
}
break;
@ -2211,7 +2211,7 @@ static bool GenerateImportInterpExit(MacroAssembler& masm, const FuncImport& fi,
funcImportIndex);
GenPrintPtr(DebugChannel::Import, masm, ReturnReg);
break;
case RefType::TypeIndex:
case RefType::TypeRef:
MOZ_CRASH("No Ref support here yet");
}
break;
@ -2411,7 +2411,7 @@ static bool GenerateImportJitExit(MacroAssembler& masm, const FuncImport& fi,
break;
case RefType::Func:
case RefType::Eq:
case RefType::TypeIndex:
case RefType::TypeRef:
MOZ_CRASH("typed reference returned by import (jit exit) NYI");
}
break;
@ -2516,7 +2516,7 @@ static bool GenerateImportJitExit(MacroAssembler& masm, const FuncImport& fi,
break;
case RefType::Func:
case RefType::Eq:
case RefType::TypeIndex:
case RefType::TypeRef:
MOZ_CRASH("Unsupported convert type");
}
break;

Просмотреть файл

@ -56,7 +56,7 @@ static bool IsImmediateValType(ValType vt) {
case RefType::Extern:
case RefType::Eq:
return true;
case RefType::TypeIndex:
case RefType::TypeRef:
return false;
}
break;
@ -85,7 +85,7 @@ static unsigned EncodeImmediateValType(ValType vt) {
return 6;
case RefType::Eq:
return 7;
case RefType::TypeIndex:
case RefType::TypeRef:
break;
}
break;
@ -284,21 +284,20 @@ TypeResult TypeContext::isRefEquivalent(RefType first, RefType second,
#ifdef ENABLE_WASM_FUNCTION_REFERENCES
if (features_.functionReferences) {
// second references must have the same nullability to be equal
// References must have the same nullability to be equal
if (first.isNullable() != second.isNullable()) {
return TypeResult::False;
}
// Non type-index references are equal if they have the same kind
if (!first.isTypeIndex() && !second.isTypeIndex() &&
if (!first.isTypeRef() && !second.isTypeRef() &&
first.kind() == second.kind()) {
return TypeResult::True;
}
// Type-index references can be equal
if (first.isTypeIndex() && second.isTypeIndex()) {
return isTypeIndexEquivalent(first.typeIndex(), second.typeIndex(),
cache);
if (first.isTypeRef() && second.isTypeRef()) {
return isTypeDefEquivalent(first.typeDef(), second.typeDef(), cache);
}
}
#endif
@ -306,26 +305,26 @@ TypeResult TypeContext::isRefEquivalent(RefType first, RefType second,
}
#ifdef ENABLE_WASM_FUNCTION_REFERENCES
TypeResult TypeContext::isTypeIndexEquivalent(uint32_t firstIndex,
uint32_t secondIndex,
TypeCache* cache) const {
TypeResult TypeContext::isTypeDefEquivalent(const TypeDef* first,
const TypeDef* second,
TypeCache* cache) const {
MOZ_ASSERT(features_.functionReferences);
// Anything's equal to itself.
if (firstIndex == secondIndex) {
if (first == second) {
return TypeResult::True;
}
# ifdef ENABLE_WASM_GC
if (features_.gc) {
// A struct may be equal to a struct
if (isStructType(firstIndex) && isStructType(secondIndex)) {
return isStructEquivalent(firstIndex, secondIndex, cache);
if (first->isStructType() && second->isStructType()) {
return isStructEquivalent(first, second, cache);
}
// An array may be equal to an array
if (isArrayType(firstIndex) && isArrayType(secondIndex)) {
return isArrayEquivalent(firstIndex, secondIndex, cache);
if (first->isArrayType() && second->isArrayType()) {
return isArrayEquivalent(first, second, cache);
}
}
# endif
@ -335,32 +334,32 @@ TypeResult TypeContext::isTypeIndexEquivalent(uint32_t firstIndex,
#endif
#ifdef ENABLE_WASM_GC
TypeResult TypeContext::isStructEquivalent(uint32_t firstIndex,
uint32_t secondIndex,
TypeResult TypeContext::isStructEquivalent(const TypeDef* first,
const TypeDef* second,
TypeCache* cache) const {
if (cache->isEquivalent(firstIndex, secondIndex)) {
if (cache->isEquivalent(first, second)) {
return TypeResult::True;
}
const StructType& subType = structType(firstIndex);
const StructType& superType = structType(secondIndex);
const StructType& firstStruct = first->structType();
const StructType& secondStruct = second->structType();
// Structs must have the same number of fields to be equal
if (subType.fields_.length() != superType.fields_.length()) {
if (firstStruct.fields_.length() != secondStruct.fields_.length()) {
return TypeResult::False;
}
// Assume these structs are equal while checking fields. If any field is
// not equal then we remove the assumption.
if (!cache->markEquivalent(firstIndex, secondIndex)) {
if (!cache->markEquivalent(first, second)) {
return TypeResult::OOM;
}
for (uint32_t i = 0; i < superType.fields_.length(); i++) {
TypeResult result = isStructFieldEquivalent(subType.fields_[i],
superType.fields_[i], cache);
for (uint32_t i = 0; i < secondStruct.fields_.length(); i++) {
TypeResult result = isStructFieldEquivalent(firstStruct.fields_[i],
secondStruct.fields_[i], cache);
if (result != TypeResult::True) {
cache->unmarkEquivalent(firstIndex, secondIndex);
cache->unmarkEquivalent(first, second);
return result;
}
}
@ -378,25 +377,25 @@ TypeResult TypeContext::isStructFieldEquivalent(const StructField first,
return isEquivalent(first.type, second.type, cache);
}
TypeResult TypeContext::isArrayEquivalent(uint32_t firstIndex,
uint32_t secondIndex,
TypeResult TypeContext::isArrayEquivalent(const TypeDef* firstDef,
const TypeDef* secondDef,
TypeCache* cache) const {
if (cache->isEquivalent(firstIndex, secondIndex)) {
if (cache->isEquivalent(firstDef, secondDef)) {
return TypeResult::True;
}
const ArrayType& subType = arrayType(firstIndex);
const ArrayType& superType = arrayType(secondIndex);
const ArrayType& firstArray = firstDef->arrayType();
const ArrayType& secondArray = secondDef->arrayType();
// Assume these arrays are equal while checking fields. If the array
// element is not equal then we remove the assumption.
if (!cache->markEquivalent(firstIndex, secondIndex)) {
if (!cache->markEquivalent(firstDef, secondDef)) {
return TypeResult::OOM;
}
TypeResult result = isArrayElementEquivalent(subType, superType, cache);
TypeResult result = isArrayElementEquivalent(firstArray, secondArray, cache);
if (result != TypeResult::True) {
cache->unmarkEquivalent(firstIndex, secondIndex);
cache->unmarkEquivalent(firstDef, secondDef);
}
return result;
}
@ -430,31 +429,32 @@ TypeResult TypeContext::isRefSubtypeOf(RefType subType, RefType superType,
}
// Non type-index references are subtypes if they have the same kind
if (!subType.isTypeIndex() && !superType.isTypeIndex() &&
if (!subType.isTypeRef() && !superType.isTypeRef() &&
subType.kind() == superType.kind()) {
return TypeResult::True;
}
// Structs are subtypes of eqref
if (isStructType(subType) && superType.isEq()) {
if (subType.isTypeRef() && subType.typeDef()->isStructType() &&
superType.isEq()) {
return TypeResult::True;
}
// Arrays are subtypes of eqref
if (isArrayType(subType) && superType.isEq()) {
if (subType.isTypeRef() && subType.typeDef()->isArrayType() &&
superType.isEq()) {
return TypeResult::True;
}
// The ref T <: funcref when T = func-type rule
if (subType.isTypeIndex() && types_[subType.typeIndex()].isFuncType() &&
// Funcs are subtypes of funcref
if (subType.isTypeRef() && subType.typeDef()->isFuncType() &&
superType.isFunc()) {
return TypeResult::True;
}
// Type-index references can be subtypes
if (subType.isTypeIndex() && superType.isTypeIndex()) {
return isTypeIndexSubtypeOf(subType.typeIndex(), superType.typeIndex(),
cache);
if (subType.isTypeRef() && superType.isTypeRef()) {
return isTypeDefSubtypeOf(subType.typeDef(), superType.typeDef(), cache);
}
}
#endif
@ -462,9 +462,9 @@ TypeResult TypeContext::isRefSubtypeOf(RefType subType, RefType superType,
}
#ifdef ENABLE_WASM_FUNCTION_REFERENCES
TypeResult TypeContext::isTypeIndexSubtypeOf(uint32_t subType,
uint32_t superType,
TypeCache* cache) const {
TypeResult TypeContext::isTypeDefSubtypeOf(const TypeDef* subType,
const TypeDef* superType,
TypeCache* cache) const {
MOZ_ASSERT(features_.functionReferences);
// Anything's a subtype of itself.
@ -475,12 +475,12 @@ TypeResult TypeContext::isTypeIndexSubtypeOf(uint32_t subType,
# ifdef ENABLE_WASM_GC
if (features_.gc) {
// Structs may be subtypes of structs
if (isStructType(subType) && isStructType(superType)) {
if (subType->isStructType() && superType->isStructType()) {
return isStructSubtypeOf(subType, superType, cache);
}
// Arrays may be subtypes of arrays
if (isArrayType(subType) && isArrayType(superType)) {
if (subType->isArrayType() && superType->isArrayType()) {
return isArraySubtypeOf(subType, superType, cache);
}
}
@ -490,32 +490,32 @@ TypeResult TypeContext::isTypeIndexSubtypeOf(uint32_t subType,
#endif
#ifdef ENABLE_WASM_GC
TypeResult TypeContext::isStructSubtypeOf(uint32_t subTypeIndex,
uint32_t superTypeIndex,
TypeResult TypeContext::isStructSubtypeOf(const TypeDef* subType,
const TypeDef* superType,
TypeCache* cache) const {
if (cache->isSubtypeOf(subTypeIndex, superTypeIndex)) {
if (cache->isSubtypeOf(subType, superType)) {
return TypeResult::True;
}
const StructType& subType = structType(subTypeIndex);
const StructType& superType = structType(superTypeIndex);
const StructType& subStruct = subType->structType();
const StructType& superStruct = superType->structType();
// A subtype must have at least as many fields as its supertype
if (subType.fields_.length() < superType.fields_.length()) {
if (subStruct.fields_.length() < superStruct.fields_.length()) {
return TypeResult::False;
}
// Assume these structs are subtypes while checking fields. If any field
// fails a check then we remove the assumption.
if (!cache->markSubtypeOf(subTypeIndex, superTypeIndex)) {
if (!cache->markSubtypeOf(subType, superType)) {
return TypeResult::OOM;
}
for (uint32_t i = 0; i < superType.fields_.length(); i++) {
TypeResult result =
isStructFieldSubtypeOf(subType.fields_[i], superType.fields_[i], cache);
for (uint32_t i = 0; i < superStruct.fields_.length(); i++) {
TypeResult result = isStructFieldSubtypeOf(subStruct.fields_[i],
superStruct.fields_[i], cache);
if (result != TypeResult::True) {
cache->unmarkSubtypeOf(subTypeIndex, superTypeIndex);
cache->unmarkSubtypeOf(subType, superType);
return result;
}
}
@ -536,25 +536,25 @@ TypeResult TypeContext::isStructFieldSubtypeOf(const StructField subType,
return TypeResult::False;
}
TypeResult TypeContext::isArraySubtypeOf(uint32_t subTypeIndex,
uint32_t superTypeIndex,
TypeResult TypeContext::isArraySubtypeOf(const TypeDef* subType,
const TypeDef* superType,
TypeCache* cache) const {
if (cache->isSubtypeOf(subTypeIndex, superTypeIndex)) {
if (cache->isSubtypeOf(subType, superType)) {
return TypeResult::True;
}
const ArrayType& subType = arrayType(subTypeIndex);
const ArrayType& superType = arrayType(superTypeIndex);
const ArrayType& subArray = subType->arrayType();
const ArrayType& superArray = superType->arrayType();
// Assume these arrays are subtypes while checking elements. If the elements
// fail the check then we remove the assumption.
if (!cache->markSubtypeOf(subTypeIndex, superTypeIndex)) {
if (!cache->markSubtypeOf(subType, superType)) {
return TypeResult::OOM;
}
TypeResult result = isArrayElementSubtypeOf(subType, superType, cache);
TypeResult result = isArrayElementSubtypeOf(subArray, superArray, cache);
if (result != TypeResult::True) {
cache->unmarkSubtypeOf(subTypeIndex, superTypeIndex);
cache->unmarkSubtypeOf(subType, superType);
}
return result;
}

Просмотреть файл

@ -20,6 +20,9 @@
#define wasm_type_def_h
#include "mozilla/CheckedInt.h"
#include "mozilla/HashTable.h"
#include "js/RefCounted.h"
#include "wasm/WasmCodegenConstants.h"
#include "wasm/WasmCompileArgs.h"
@ -78,7 +81,7 @@ class FuncType {
}
}
for (ValType result : results()) {
if (result.isTypeIndex()) {
if (result.isTypeRef()) {
return true;
}
}
@ -180,12 +183,12 @@ class FuncType {
#ifdef WASM_PRIVATE_REFTYPES
bool exposesTypeIndex() const {
for (const ValType& arg : args()) {
if (arg.isTypeIndex()) {
if (arg.isTypeRef()) {
return true;
}
}
for (const ValType& result : results()) {
if (result.isTypeIndex()) {
if (result.isTypeRef()) {
return true;
}
}
@ -328,7 +331,7 @@ enum class TypeDefKind : uint8_t {
Array,
};
class TypeDef {
class TypeDef : public AtomicRefCounted<TypeDef> {
TypeDefKind kind_;
union {
FuncType funcType_;
@ -380,43 +383,25 @@ class TypeDef {
}
}
TypeDef& operator=(TypeDef&& that) noexcept {
TypeDef& operator=(FuncType&& that) noexcept {
MOZ_ASSERT(isNone());
switch (that.kind_) {
case TypeDefKind::Func:
new (&funcType_) FuncType(std::move(that.funcType_));
break;
case TypeDefKind::Struct:
new (&structType_) StructType(std::move(that.structType_));
break;
case TypeDefKind::Array:
new (&arrayType_) ArrayType(std::move(that.arrayType_));
break;
case TypeDefKind::None:
break;
}
kind_ = that.kind_;
kind_ = TypeDefKind::Func;
new (&funcType_) FuncType(std::move(that));
return *this;
}
[[nodiscard]] bool clone(const TypeDef& src) {
TypeDef& operator=(StructType&& that) noexcept {
MOZ_ASSERT(isNone());
kind_ = src.kind_;
switch (src.kind_) {
case TypeDefKind::Func:
new (&funcType_) FuncType();
return funcType_.clone(src.funcType());
case TypeDefKind::Struct:
new (&structType_) StructType();
return structType_.clone(src.structType());
case TypeDefKind::Array:
new (&arrayType_) ArrayType(src.arrayType());
return true;
case TypeDefKind::None:
break;
}
MOZ_ASSERT_UNREACHABLE();
return false;
kind_ = TypeDefKind::Struct;
new (&structType_) StructType(std::move(that));
return *this;
}
TypeDef& operator=(ArrayType&& that) noexcept {
MOZ_ASSERT(isNone());
kind_ = TypeDefKind::Array;
new (&arrayType_) ArrayType(std::move(that));
return *this;
}
TypeDefKind kind() const { return kind_; }
@ -463,7 +448,15 @@ class TypeDef {
WASM_DECLARE_FRIEND_SERIALIZE(TypeDef);
};
using SharedTypeDef = RefPtr<const TypeDef>;
using MutableTypeDef = RefPtr<TypeDef>;
using TypeDefVector = Vector<TypeDef, 0, SystemAllocPolicy>;
using MutableTypeDefVector = Vector<MutableTypeDef, 0, SystemAllocPolicy>;
using TypeDefToModuleIndexMap =
HashMap<const TypeDef*, uint32_t, PointerHasher<const TypeDef*>,
SystemAllocPolicy>;
// A type cache maintains a cache of equivalence and subtype relations between
// wasm types. This is required for the computation of equivalence and subtyping
@ -473,22 +466,42 @@ using TypeDefVector = Vector<TypeDef, 0, SystemAllocPolicy>;
// which may be shared between multiple threads.
class TypeCache {
using TypeIndex = uint32_t;
using TypePair = uint64_t;
using TypeSet = HashSet<TypePair, DefaultHasher<TypePair>, SystemAllocPolicy>;
struct TypePair {
const TypeDef* first;
const TypeDef* second;
// Generates a hash key for the ordered pair (a, b).
static constexpr TypePair makeOrderedPair(TypeIndex a, TypeIndex b) {
return (TypePair(a) << 32) | TypePair(b);
}
constexpr TypePair(const TypeDef* first, const TypeDef* second)
: first(first), second(second) {}
// Generates a hash key for the unordered pair (a, b).
static constexpr TypePair makeUnorderedPair(TypeIndex a, TypeIndex b) {
if (a < b) {
return (TypePair(a) << 32) | TypePair(b);
// Generates a hash key for the ordered pair (a, b).
static constexpr TypePair ordered(const TypeDef* a, const TypeDef* b) {
return TypePair(a, b);
}
return (TypePair(b) << 32) | TypePair(a);
}
// Generates a hash key for the unordered pair (a, b).
static constexpr TypePair unordered(const TypeDef* a, const TypeDef* b) {
if (a < b) {
return TypePair(a, b);
}
return TypePair(b, a);
}
HashNumber hash() const {
HashNumber hn = 0;
hn = mozilla::AddToHash(hn, first);
hn = mozilla::AddToHash(hn, second);
return hn;
}
bool operator==(const TypePair& rhs) const {
return first == rhs.first && second == rhs.second;
}
};
struct TypePairHashPolicy {
using Lookup = const TypePair&;
static HashNumber hash(Lookup pair) { return pair.hash(); }
static bool match(const TypePair& lhs, Lookup rhs) { return lhs == rhs; }
};
using TypeSet = HashSet<TypePair, TypePairHashPolicy, SystemAllocPolicy>;
TypeSet equivalence_;
TypeSet subtype_;
@ -497,30 +510,30 @@ class TypeCache {
TypeCache() = default;
// Mark `a` as equivalent to `b` in the equivalence cache.
[[nodiscard]] bool markEquivalent(TypeIndex a, TypeIndex b) {
return equivalence_.put(makeUnorderedPair(a, b));
[[nodiscard]] bool markEquivalent(const TypeDef* a, const TypeDef* b) {
return equivalence_.put(TypePair::unordered(a, b));
}
// Unmark `a` as equivalent to `b` in the equivalence cache
void unmarkEquivalent(TypeIndex a, TypeIndex b) {
equivalence_.remove(makeUnorderedPair(a, b));
void unmarkEquivalent(const TypeDef* a, const TypeDef* b) {
equivalence_.remove(TypePair::unordered(a, b));
}
// Check if `a` is equivalent to `b` in the equivalence cache
bool isEquivalent(TypeIndex a, TypeIndex b) {
return equivalence_.has(makeUnorderedPair(a, b));
bool isEquivalent(const TypeDef* a, const TypeDef* b) {
return equivalence_.has(TypePair::unordered(a, b));
}
// Mark `a` as a subtype of `b` in the subtype cache
[[nodiscard]] bool markSubtypeOf(TypeIndex a, TypeIndex b) {
return subtype_.put(makeOrderedPair(a, b));
[[nodiscard]] bool markSubtypeOf(const TypeDef* a, const TypeDef* b) {
return subtype_.put(TypePair::ordered(a, b));
}
// Unmark `a` as a subtype of `b` in the subtype cache
void unmarkSubtypeOf(TypeIndex a, TypeIndex b) {
subtype_.remove(makeOrderedPair(a, b));
void unmarkSubtypeOf(const TypeDef* a, const TypeDef* b) {
subtype_.remove(TypePair::ordered(a, b));
}
// Check if `a` is a subtype of `b` in the subtype cache
bool isSubtypeOf(TypeIndex a, TypeIndex b) {
return subtype_.has(makeOrderedPair(a, b));
bool isSubtypeOf(const TypeDef* a, const TypeDef* b) {
return subtype_.has(TypePair::ordered(a, b));
}
};
@ -537,28 +550,16 @@ enum class TypeResult {
class TypeContext : public AtomicRefCounted<TypeContext> {
FeatureArgs features_;
TypeDefVector types_;
MutableTypeDefVector types_;
TypeDefToModuleIndexMap moduleIndices_;
public:
TypeContext() = default;
TypeContext(const FeatureArgs& features, TypeDefVector&& types)
: features_(features), types_(std::move(types)) {}
[[nodiscard]] bool clone(const TypeDefVector& source) {
MOZ_ASSERT(types_.length() == 0);
if (!types_.resize(source.length())) {
return false;
}
for (uint32_t i = 0; i < source.length(); i++) {
if (!types_[i].clone(source[i])) {
return false;
}
}
return true;
}
explicit TypeContext(const FeatureArgs& features) : features_(features) {}
size_t sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const {
return types_.sizeOfExcludingThis(mallocSizeOf);
return types_.sizeOfExcludingThis(mallocSizeOf) +
moduleIndices_.shallowSizeOfExcludingThis(mallocSizeOf);
}
// Disallow copy, allow move initialization
@ -567,74 +568,39 @@ class TypeContext : public AtomicRefCounted<TypeContext> {
TypeContext(TypeContext&&) = delete;
TypeContext& operator=(TypeContext&&) = delete;
TypeDef& type(uint32_t index) { return types_[index]; }
const TypeDef& type(uint32_t index) const { return types_[index]; }
[[nodiscard]] MutableTypeDef addType() {
MutableTypeDef typeDef = js_new<TypeDef>();
if (!typeDef || !types_.append(typeDef) ||
!moduleIndices_.put(typeDef.get(), types_.length())) {
return nullptr;
}
return typeDef;
}
TypeDef& operator[](uint32_t index) { return types_[index]; }
const TypeDef& operator[](uint32_t index) const { return types_[index]; }
[[nodiscard]] bool addTypes(uint32_t length) {
for (uint32_t typeIndex = 0; typeIndex < length; typeIndex++) {
if (!addType()) {
return false;
}
}
return true;
}
TypeDef& type(uint32_t index) { return *types_[index]; }
const TypeDef& type(uint32_t index) const { return *types_[index]; }
TypeDef& operator[](uint32_t index) { return *types_[index]; }
const TypeDef& operator[](uint32_t index) const { return *types_[index]; }
bool empty() const { return types_.empty(); }
uint32_t length() const { return types_.length(); }
template <typename U>
[[nodiscard]] bool append(U&& typeDef) {
return types_.append(std::forward<U>(typeDef));
}
[[nodiscard]] bool resize(uint32_t length) { return types_.resize(length); }
// Map from type definition to index
// // Map from type definition to index
uint32_t indexOf(const TypeDef& typeDef) const {
const TypeDef* elem = &typeDef;
MOZ_ASSERT(elem >= types_.begin() && elem < types_.end());
return elem - types_.begin();
}
// FuncType accessors
bool isFuncType(uint32_t index) const { return types_[index].isFuncType(); }
bool isFuncType(RefType t) const {
return t.isTypeIndex() && isFuncType(t.typeIndex());
}
FuncType& funcType(uint32_t index) { return types_[index].funcType(); }
const FuncType& funcType(uint32_t index) const {
return types_[index].funcType();
}
FuncType& funcType(RefType t) { return funcType(t.typeIndex()); }
const FuncType& funcType(RefType t) const { return funcType(t.typeIndex()); }
// StructType accessors
bool isStructType(uint32_t index) const {
return types_[index].isStructType();
}
bool isStructType(RefType t) const {
return t.isTypeIndex() && isStructType(t.typeIndex());
}
StructType& structType(uint32_t index) { return types_[index].structType(); }
const StructType& structType(uint32_t index) const {
return types_[index].structType();
}
StructType& structType(RefType t) { return structType(t.typeIndex()); }
const StructType& structType(RefType t) const {
return structType(t.typeIndex());
}
// StructType accessors
bool isArrayType(uint32_t index) const { return types_[index].isArrayType(); }
bool isArrayType(RefType t) const {
return t.isTypeIndex() && isArrayType(t.typeIndex());
}
ArrayType& arrayType(uint32_t index) { return types_[index].arrayType(); }
const ArrayType& arrayType(uint32_t index) const {
return types_[index].arrayType();
}
ArrayType& arrayType(RefType t) { return arrayType(t.typeIndex()); }
const ArrayType& arrayType(RefType t) const {
return arrayType(t.typeIndex());
auto moduleIndex = moduleIndices_.readonlyThreadsafeLookup(&typeDef);
MOZ_RELEASE_ASSERT(moduleIndex.found());
return moduleIndex->value();
}
// Type equivalence
@ -657,16 +623,16 @@ class TypeContext : public AtomicRefCounted<TypeContext> {
TypeResult isRefEquivalent(RefType first, RefType second,
TypeCache* cache) const;
#ifdef ENABLE_WASM_FUNCTION_REFERENCES
TypeResult isTypeIndexEquivalent(uint32_t firstIndex, uint32_t secondIndex,
TypeCache* cache) const;
TypeResult isTypeDefEquivalent(const TypeDef* first, const TypeDef* second,
TypeCache* cache) const;
#endif
#ifdef ENABLE_WASM_GC
TypeResult isStructEquivalent(uint32_t firstIndex, uint32_t secondIndex,
TypeResult isStructEquivalent(const TypeDef* first, const TypeDef* second,
TypeCache* cache) const;
TypeResult isStructFieldEquivalent(const StructField first,
const StructField second,
TypeCache* cache) const;
TypeResult isArrayEquivalent(uint32_t firstIndex, uint32_t secondIndex,
TypeResult isArrayEquivalent(const TypeDef* first, const TypeDef* second,
TypeCache* cache) const;
TypeResult isArrayElementEquivalent(const ArrayType& first,
const ArrayType& second,
@ -693,18 +659,18 @@ class TypeContext : public AtomicRefCounted<TypeContext> {
TypeResult isRefSubtypeOf(RefType subType, RefType superType,
TypeCache* cache) const;
#ifdef ENABLE_WASM_FUNCTION_REFERENCES
TypeResult isTypeIndexSubtypeOf(uint32_t subTypeIndex,
uint32_t superTypeIndex,
TypeCache* cache) const;
TypeResult isTypeDefSubtypeOf(const TypeDef* subType,
const TypeDef* superType,
TypeCache* cache) const;
#endif
#ifdef ENABLE_WASM_GC
TypeResult isStructSubtypeOf(uint32_t subTypeIndex, uint32_t superTypeIndex,
TypeResult isStructSubtypeOf(const TypeDef* subType, const TypeDef* superType,
TypeCache* cache) const;
TypeResult isStructFieldSubtypeOf(const StructField subType,
const StructField superType,
TypeCache* cache) const;
TypeResult isArraySubtypeOf(uint32_t subTypeIndex, uint32_t superTypeIndex,
TypeResult isArraySubtypeOf(const TypeDef* subType, const TypeDef* superType,
TypeCache* cache) const;
TypeResult isArrayElementSubtypeOf(const ArrayType& subType,
const ArrayType& superType,

Просмотреть файл

@ -87,9 +87,9 @@ bool wasm::ToRefType(JSContext* cx, JSLinearString* typeLinearStr,
return true;
}
UniqueChars wasm::ToString(RefType type) {
UniqueChars wasm::ToString(RefType type, const TypeContext* types) {
// Try to emit a shorthand version first
if (type.isNullable() && !type.isTypeIndex()) {
if (type.isNullable() && !type.isTypeRef()) {
const char* literal = nullptr;
switch (type.kind()) {
case RefType::Func:
@ -101,8 +101,11 @@ UniqueChars wasm::ToString(RefType type) {
case RefType::Eq:
literal = "eqref";
break;
case RefType::TypeIndex:
MOZ_ASSERT_UNREACHABLE();
case RefType::TypeRef: {
uint32_t typeIndex = types->indexOf(*type.typeDef());
return JS_smprintf("(ref %s%d)", type.isNullable() ? "null " : "",
typeIndex);
}
}
return DuplicateString(literal);
}
@ -119,16 +122,20 @@ UniqueChars wasm::ToString(RefType type) {
case RefType::Eq:
heapType = "eq";
break;
case RefType::TypeIndex:
case RefType::TypeRef: {
uint32_t typeIndex = types->indexOf(*type.typeDef());
return JS_smprintf("(ref %s%d)", type.isNullable() ? "null " : "",
type.typeIndex());
typeIndex);
}
}
return JS_smprintf("(ref %s%s)", type.isNullable() ? "null " : "", heapType);
}
UniqueChars wasm::ToString(ValType type) { return ToString(type.fieldType()); }
UniqueChars wasm::ToString(ValType type, const TypeContext* types) {
return ToString(type.fieldType(), types);
}
UniqueChars wasm::ToString(FieldType type) {
UniqueChars wasm::ToString(FieldType type, const TypeContext* types) {
const char* literal = nullptr;
switch (type.kind()) {
case FieldType::I8:
@ -153,11 +160,12 @@ UniqueChars wasm::ToString(FieldType type) {
literal = "f64";
break;
case FieldType::Ref:
return ToString(type.refType());
return ToString(type.refType(), types);
}
return DuplicateString(literal);
}
UniqueChars wasm::ToString(const Maybe<ValType>& type) {
return type ? ToString(type.ref()) : JS_smprintf("%s", "void");
UniqueChars wasm::ToString(const Maybe<ValType>& type,
const TypeContext* types) {
return type ? ToString(type.ref(), types) : JS_smprintf("%s", "void");
}

Просмотреть файл

@ -33,33 +33,40 @@ namespace wasm {
using mozilla::Maybe;
// A PackedTypeCode represents any value type in an compact POD format.
class TypeDef;
class TypeContext;
// A PackedTypeCode represents any value type.
union PackedTypeCode {
public:
using PackedRepr = uint32_t;
using PackedRepr = uint64_t;
private:
static constexpr size_t TypeCodeBits = 8;
static constexpr size_t TypeIndexBits = 20;
static constexpr size_t NullableBits = 1;
static constexpr size_t TypeCodeBits = 8;
static constexpr size_t TypeDefBits = 48;
static constexpr size_t PointerTagBits = 2;
static_assert(TypeCodeBits + TypeIndexBits + NullableBits + PointerTagBits <=
static_assert(NullableBits + TypeCodeBits + TypeDefBits + PointerTagBits <=
(sizeof(PackedRepr) * 8),
"enough bits");
static_assert(MaxTypes < (1 << TypeIndexBits), "enough bits");
PackedRepr bits_;
struct {
PackedRepr typeCode_ : TypeCodeBits;
PackedRepr typeIndex_ : TypeIndexBits;
PackedRepr nullable_ : NullableBits;
PackedRepr typeCode_ : TypeCodeBits;
// A pointer to the TypeDef this type references. We use 48-bits for this,
// and rely on system memory allocators not allocating outside of this
// range. This is also assumed by JS::Value, and so should be safe here.
PackedRepr typeDef_ : TypeDefBits;
// Reserve the bottom two bits for use as a tagging scheme for BlockType
// and ResultType, which can encode a ValType inside themselves in special
// cases.
PackedRepr pointerTag_ : PointerTagBits;
};
public:
static constexpr uint32_t NoTypeCode = (1 << TypeCodeBits) - 1;
static constexpr uint32_t NoTypeIndex = (1 << TypeIndexBits) - 1;
static constexpr PackedRepr NoTypeCode = ((uint64_t)1 << TypeCodeBits) - 1;
static PackedTypeCode invalid() {
PackedTypeCode ptc = {};
@ -73,27 +80,26 @@ union PackedTypeCode {
return ptc;
}
static constexpr PackedTypeCode pack(TypeCode tc, uint32_t refTypeIndex,
bool isNullable) {
static PackedTypeCode pack(TypeCode tc, const TypeDef* typeDef,
bool isNullable) {
MOZ_ASSERT(uint32_t(tc) <= ((1 << TypeCodeBits) - 1));
MOZ_ASSERT_IF(tc != AbstractReferenceTypeIndexCode,
refTypeIndex == NoTypeIndex);
MOZ_ASSERT_IF(tc == AbstractReferenceTypeIndexCode,
refTypeIndex <= MaxTypeIndex);
MOZ_ASSERT_IF(tc != AbstractTypeRefCode, typeDef == nullptr);
MOZ_ASSERT_IF(tc == AbstractTypeRefCode, typeDef != nullptr);
// Double check that the type definition was allocated within 48-bits, as
// noted above.
MOZ_ASSERT((uint64_t)typeDef <= ((uint64_t)1 << TypeDefBits) - 1);
PackedTypeCode ptc = {};
ptc.typeCode_ = PackedRepr(tc);
ptc.typeIndex_ = refTypeIndex;
ptc.typeDef_ = (uintptr_t)typeDef;
ptc.nullable_ = isNullable;
return ptc;
}
static constexpr PackedTypeCode pack(TypeCode tc, bool nullable) {
return pack(tc, PackedTypeCode::NoTypeIndex, nullable);
static PackedTypeCode pack(TypeCode tc, bool nullable) {
return pack(tc, nullptr, nullable);
}
static constexpr PackedTypeCode pack(TypeCode tc) {
return pack(tc, PackedTypeCode::NoTypeIndex, false);
}
static PackedTypeCode pack(TypeCode tc) { return pack(tc, nullptr, false); }
bool isValid() const { return typeCode_ != NoTypeCode; }
@ -130,14 +136,9 @@ union PackedTypeCode {
// Return whether this type is represented by a reference at runtime.
bool isRefRepr() const { return typeCode() < LowestPrimitiveTypeCode; }
uint32_t typeIndex() const {
const TypeDef* typeDef() const {
MOZ_ASSERT(isValid());
return uint32_t(typeIndex_);
}
uint32_t typeIndexUnchecked() const {
MOZ_ASSERT(isValid());
return uint32_t(typeIndex_);
return (const TypeDef*)(uintptr_t)typeDef_;
}
bool isNullable() const {
@ -160,7 +161,42 @@ union PackedTypeCode {
}
};
static_assert(sizeof(PackedTypeCode) == sizeof(uint32_t), "packed");
static_assert(sizeof(PackedTypeCode) == sizeof(uint64_t), "packed");
// A SerializableTypeCode represents any value type in a form that can be
// serialized and deserialized.
union SerializableTypeCode {
using PackedRepr = uintptr_t;
static constexpr size_t NullableBits = 1;
static constexpr size_t TypeCodeBits = 8;
static constexpr size_t TypeIndexBits = 20;
PackedRepr bits;
struct {
PackedRepr nullable : NullableBits;
PackedRepr typeCode : TypeCodeBits;
PackedRepr typeIndex : TypeIndexBits;
};
WASM_CHECK_CACHEABLE_POD(bits);
static constexpr PackedRepr NoTypeIndex = (1 << TypeIndexBits) - 1;
static_assert(NullableBits + TypeCodeBits + TypeIndexBits <=
(sizeof(PackedRepr) * 8),
"enough bits");
static_assert(NoTypeIndex < (1 << TypeIndexBits), "enough bits");
static_assert(MaxTypes < NoTypeIndex, "enough bits");
// Defined in WasmSerialize.cpp
static inline SerializableTypeCode serialize(PackedTypeCode ptc,
const TypeContext& types);
inline PackedTypeCode deserialize(const TypeContext& types);
};
WASM_DECLARE_CACHEABLE_POD(SerializableTypeCode);
static_assert(sizeof(SerializableTypeCode) == sizeof(uintptr_t), "packed");
// An enum that describes the representation classes for tables; The table
// element type is mapped into this by Table::repr().
@ -176,7 +212,7 @@ class RefType {
Func = uint8_t(TypeCode::FuncRef),
Extern = uint8_t(TypeCode::ExternRef),
Eq = uint8_t(TypeCode::EqRef),
TypeIndex = uint8_t(AbstractReferenceTypeIndexCode)
TypeRef = uint8_t(AbstractTypeRefCode)
};
private:
@ -188,10 +224,10 @@ class RefType {
case TypeCode::FuncRef:
case TypeCode::ExternRef:
case TypeCode::EqRef:
MOZ_ASSERT(ptc_.typeIndex() == PackedTypeCode::NoTypeIndex);
MOZ_ASSERT(ptc_.typeDef() == nullptr);
return true;
case AbstractReferenceTypeIndexCode:
MOZ_ASSERT(ptc_.typeIndex() != PackedTypeCode::NoTypeIndex);
case AbstractTypeRefCode:
MOZ_ASSERT(ptc_.typeDef() != nullptr);
return true;
default:
return false;
@ -203,9 +239,8 @@ class RefType {
MOZ_ASSERT(isValid());
}
RefType(uint32_t refTypeIndex, bool nullable)
: ptc_(PackedTypeCode::pack(AbstractReferenceTypeIndexCode, refTypeIndex,
nullable)) {
RefType(const TypeDef* typeDef, bool nullable)
: ptc_(PackedTypeCode::pack(AbstractTypeRefCode, typeDef, nullable)) {
MOZ_ASSERT(isValid());
}
@ -214,19 +249,21 @@ class RefType {
explicit RefType(PackedTypeCode ptc) : ptc_(ptc) { MOZ_ASSERT(isValid()); }
static RefType fromTypeCode(TypeCode tc, bool nullable) {
MOZ_ASSERT(tc != AbstractReferenceTypeIndexCode);
MOZ_ASSERT(tc != AbstractTypeRefCode);
return RefType(Kind(tc), nullable);
}
static RefType fromTypeIndex(uint32_t refTypeIndex, bool nullable) {
return RefType(refTypeIndex, nullable);
static RefType fromTypeDef(const TypeDef* typeDef, bool nullable) {
return RefType(typeDef, nullable);
}
Kind kind() const { return Kind(ptc_.typeCode()); }
uint32_t typeIndex() const { return ptc_.typeIndex(); }
const TypeDef* typeDef() const { return ptc_.typeDef(); }
PackedTypeCode packed() const { return ptc_; }
PackedTypeCode* addressOfPacked() { return &ptc_; }
const PackedTypeCode* addressOfPacked() const { return &ptc_; }
static RefType func() { return RefType(Func, true); }
static RefType extern_() { return RefType(Extern, true); }
@ -235,7 +272,7 @@ class RefType {
bool isFunc() const { return kind() == RefType::Func; }
bool isExtern() const { return kind() == RefType::Extern; }
bool isEq() const { return kind() == RefType::Eq; }
bool isTypeIndex() const { return kind() == RefType::TypeIndex; }
bool isTypeRef() const { return kind() == RefType::TypeRef; }
bool isNullable() const { return bool(ptc_.isNullable()); }
RefType asNonNullable() const { return RefType(ptc_.asNonNullable()); }
@ -247,7 +284,7 @@ class RefType {
case RefType::Extern:
case RefType::Eq:
return TableRepr::Ref;
case RefType::TypeIndex:
case RefType::TypeRef:
MOZ_CRASH("NYI");
}
MOZ_CRASH("switch is exhaustive");
@ -289,7 +326,7 @@ class FieldTypeTraits {
case TypeCode::EqRef:
#endif
#ifdef ENABLE_WASM_FUNCTION_REFERENCES
case AbstractReferenceTypeIndexCode:
case AbstractTypeRefCode:
#endif
return true;
default:
@ -359,7 +396,7 @@ class ValTypeTraits {
case TypeCode::EqRef:
#endif
#ifdef ENABLE_WASM_FUNCTION_REFERENCES
case AbstractReferenceTypeIndexCode:
case AbstractTypeRefCode:
#endif
return true;
default:
@ -406,7 +443,7 @@ class PackedType : public T {
PackedTypeCode tc_;
explicit PackedType(TypeCode c) : tc_(PackedTypeCode::pack(c)) {
MOZ_ASSERT(c != AbstractReferenceTypeIndexCode);
MOZ_ASSERT(c != AbstractTypeRefCode);
MOZ_ASSERT(isValid());
}
@ -471,7 +508,7 @@ class PackedType : public T {
return PackedType(tc);
}
static PackedType fromBitsUnsafe(uint64_t bits) {
static PackedType fromBitsUnsafe(PackedTypeCode::PackedRepr bits) {
return PackedType(PackedTypeCode::fromBits(bits));
}
@ -494,8 +531,10 @@ class PackedType : public T {
MOZ_ASSERT(isValid());
return tc_;
}
PackedTypeCode* addressOfPacked() { return &tc_; }
const PackedTypeCode* addressOfPacked() const { return &tc_; }
uint64_t bitsUnsafe() const {
PackedTypeCode::PackedRepr bitsUnsafe() const {
MOZ_ASSERT(isValid());
return tc_.bits();
}
@ -514,9 +553,7 @@ class PackedType : public T {
bool isEqRef() const { return tc_.typeCode() == TypeCode::EqRef; }
bool isTypeIndex() const {
return tc_.typeCode() == AbstractReferenceTypeIndexCode;
}
bool isTypeRef() const { return tc_.typeCode() == AbstractTypeRefCode; }
bool isRefRepr() const { return tc_.isRefRepr(); }
@ -526,7 +563,7 @@ class PackedType : public T {
// Returns whether the type has a representation in JS.
bool isExposable() const {
#if defined(ENABLE_WASM_SIMD) || defined(ENABLE_WASM_GC)
return !(kind() == Kind::V128 || isTypeIndex());
return !(kind() == Kind::V128 || isTypeRef());
#else
return true;
#endif
@ -534,7 +571,7 @@ class PackedType : public T {
bool isNullable() const { return tc_.isNullable(); }
uint32_t typeIndex() const { return tc_.typeIndex(); }
const TypeDef* typeDef() const { return tc_.typeDef(); }
Kind kind() const { return Kind(tc_.typeCodeAbstracted()); }
@ -691,11 +728,11 @@ extern bool ToValType(JSContext* cx, HandleValue v, ValType* out);
extern bool ToRefType(JSContext* cx, JSLinearString* typeLinearStr,
RefType* out);
extern UniqueChars ToString(RefType type);
extern UniqueChars ToString(ValType type);
extern UniqueChars ToString(FieldType type);
extern UniqueChars ToString(const Maybe<ValType>& type);
extern UniqueChars ToString(RefType type, const TypeContext* types);
extern UniqueChars ToString(ValType type, const TypeContext* types);
extern UniqueChars ToString(FieldType type, const TypeContext* types);
extern UniqueChars ToString(const Maybe<ValType>& type,
const TypeContext* types);
} // namespace wasm
} // namespace js

Просмотреть файл

@ -118,14 +118,15 @@ bool wasm::DecodeLocalEntries(Decoder& d, const TypeContext& types,
return true;
}
bool wasm::DecodeValidatedLocalEntries(Decoder& d, ValTypeVector* locals) {
bool wasm::DecodeValidatedLocalEntries(const TypeContext& types, Decoder& d,
ValTypeVector* locals) {
uint32_t numLocalEntries;
MOZ_ALWAYS_TRUE(d.readVarU32(&numLocalEntries));
for (uint32_t i = 0; i < numLocalEntries; i++) {
uint32_t count = d.uncheckedReadVarU32();
MOZ_ASSERT(MaxLocals - locals->length() >= count);
if (!locals->appendN(d.uncheckedReadValType(), count)) {
if (!locals->appendN(d.uncheckedReadValType(types), count)) {
return false;
}
}
@ -142,12 +143,12 @@ bool wasm::CheckIsSubtypeOf(Decoder& d, const ModuleEnvironment& env,
case TypeResult::True:
return true;
case TypeResult::False: {
UniqueChars actualText = ToString(actual);
UniqueChars actualText = ToString(actual, env.types);
if (!actualText) {
return false;
}
UniqueChars expectedText = ToString(expected);
UniqueChars expectedText = ToString(expected, env.types);
if (!expectedText) {
return false;
}
@ -1466,30 +1467,6 @@ static bool DecodePreamble(Decoder& d) {
return true;
}
enum class TypeState { None, Gc, ForwardGc, Func };
using TypeStateVector = Vector<TypeState, 0, SystemAllocPolicy>;
template <class T>
static bool ValidateTypeState(Decoder& d, TypeStateVector* typeState, T type) {
if (!type.isTypeIndex()) {
return true;
}
uint32_t refTypeIndex = type.refType().typeIndex();
switch ((*typeState)[refTypeIndex]) {
case TypeState::None:
(*typeState)[refTypeIndex] = TypeState::ForwardGc;
break;
case TypeState::Gc:
case TypeState::ForwardGc:
break;
case TypeState::Func:
return d.fail("ref does not reference a gc type");
}
return true;
}
#ifdef WASM_PRIVATE_REFTYPES
static bool FuncTypeIsJSCompatible(Decoder& d, const FuncType& ft) {
if (ft.exposesTypeIndex()) {
@ -1500,24 +1477,13 @@ static bool FuncTypeIsJSCompatible(Decoder& d, const FuncType& ft) {
#endif
static bool DecodeValTypeVector(Decoder& d, ModuleEnvironment* env,
TypeStateVector* typeState, uint32_t count,
ValTypeVector* valTypes) {
uint32_t count, ValTypeVector* valTypes) {
if (!valTypes->resize(count)) {
return false;
}
for (uint32_t i = 0; i < count; i++) {
if (!d.readValType(env->types->length(), env->features, &(*valTypes)[i])) {
return false;
}
#ifdef ENABLE_WASM_FUNCTION_REFERENCES
if (env->functionReferencesEnabled()) {
// Disable validatation of param/result types for functions.
// ValidateTypeState rejects only TypeState::Func, which is needed.
continue;
}
#endif
if (!ValidateTypeState(d, typeState, (*valTypes)[i])) {
if (!d.readValType(*env->types, env->features, &(*valTypes)[i])) {
return false;
}
}
@ -1525,7 +1491,7 @@ static bool DecodeValTypeVector(Decoder& d, ModuleEnvironment* env,
}
static bool DecodeFuncType(Decoder& d, ModuleEnvironment* env,
TypeStateVector* typeState, uint32_t typeIndex) {
uint32_t typeIndex) {
uint32_t numArgs;
if (!d.readVarU32(&numArgs)) {
return d.fail("bad number of function args");
@ -1534,7 +1500,7 @@ static bool DecodeFuncType(Decoder& d, ModuleEnvironment* env,
return d.fail("too many arguments in signature");
}
ValTypeVector args;
if (!DecodeValTypeVector(d, env, typeState, numArgs, &args)) {
if (!DecodeValTypeVector(d, env, numArgs, &args)) {
return false;
}
@ -1546,33 +1512,23 @@ static bool DecodeFuncType(Decoder& d, ModuleEnvironment* env,
return d.fail("too many returns in signature");
}
ValTypeVector results;
if (!DecodeValTypeVector(d, env, typeState, numResults, &results)) {
if (!DecodeValTypeVector(d, env, numResults, &results)) {
return false;
}
if ((*typeState)[typeIndex] != TypeState::None) {
return d.fail("function type entry referenced as gc");
}
FuncType funcType = FuncType(std::move(args), std::move(results));
(*env->types)[typeIndex] = TypeDef(std::move(funcType));
(*typeState)[typeIndex] = TypeState::Func;
(*env->types)[typeIndex] = std::move(funcType);
return true;
}
static bool DecodeStructType(Decoder& d, ModuleEnvironment* env,
TypeStateVector* typeState, uint32_t typeIndex) {
uint32_t typeIndex) {
if (!env->gcEnabled()) {
return d.fail("Structure types not enabled");
}
if ((*typeState)[typeIndex] != TypeState::None &&
(*typeState)[typeIndex] != TypeState::ForwardGc) {
return d.fail("gc type entry referenced as function");
}
uint32_t numFields;
if (!d.readVarU32(&numFields)) {
return d.fail("Bad number of fields");
@ -1588,8 +1544,7 @@ static bool DecodeStructType(Decoder& d, ModuleEnvironment* env,
}
for (uint32_t i = 0; i < numFields; i++) {
if (!d.readPackedType(env->types->length(), env->features,
&fields[i].type)) {
if (!d.readFieldType(*env->types, env->features, &fields[i].type)) {
return false;
}
@ -1601,10 +1556,6 @@ static bool DecodeStructType(Decoder& d, ModuleEnvironment* env,
return d.fail("garbage flag bits");
}
fields[i].isMutable = flags & uint8_t(FieldFlags::Mutable);
if (!ValidateTypeState(d, typeState, fields[i].type)) {
return false;
}
}
StructType structType = StructType(std::move(fields));
@ -1614,25 +1565,19 @@ static bool DecodeStructType(Decoder& d, ModuleEnvironment* env,
return d.fail("too many fields in struct");
}
(*env->types)[typeIndex] = TypeDef(std::move(structType));
(*typeState)[typeIndex] = TypeState::Gc;
(*env->types)[typeIndex] = std::move(structType);
return true;
}
static bool DecodeArrayType(Decoder& d, ModuleEnvironment* env,
TypeStateVector* typeState, uint32_t typeIndex) {
uint32_t typeIndex) {
if (!env->gcEnabled()) {
return d.fail("gc types not enabled");
}
if ((*typeState)[typeIndex] != TypeState::None &&
(*typeState)[typeIndex] != TypeState::ForwardGc) {
return d.fail("gc type entry referenced as function");
}
FieldType elementType;
if (!d.readFieldType(env->types->length(), env->features, &elementType)) {
if (!d.readFieldType(*env->types, env->features, &elementType)) {
return false;
}
@ -1645,12 +1590,7 @@ static bool DecodeArrayType(Decoder& d, ModuleEnvironment* env,
}
bool isMutable = flags & uint8_t(FieldFlags::Mutable);
if (!ValidateTypeState(d, typeState, elementType)) {
return false;
}
(*env->types)[typeIndex] = TypeDef(ArrayType(elementType, isMutable));
(*typeState)[typeIndex] = TypeState::Gc;
(*env->types)[typeIndex] = ArrayType(elementType, isMutable);
return true;
}
@ -1677,11 +1617,6 @@ static bool DecodeTypeSection(Decoder& d, ModuleEnvironment* env) {
return false;
}
TypeStateVector typeState;
if (!typeState.appendN(TypeState::None, numTypes)) {
return false;
}
for (uint32_t typeIndex = 0; typeIndex < numTypes; typeIndex++) {
uint8_t form;
if (!d.readFixedU8(&form)) {
@ -1690,17 +1625,17 @@ static bool DecodeTypeSection(Decoder& d, ModuleEnvironment* env) {
switch (form) {
case uint8_t(TypeCode::Func):
if (!DecodeFuncType(d, env, &typeState, typeIndex)) {
if (!DecodeFuncType(d, env, typeIndex)) {
return false;
}
break;
case uint8_t(TypeCode::Struct):
if (!DecodeStructType(d, env, &typeState, typeIndex)) {
if (!DecodeStructType(d, env, typeIndex)) {
return false;
}
break;
case uint8_t(TypeCode::Array):
if (!DecodeArrayType(d, env, &typeState, typeIndex)) {
if (!DecodeArrayType(d, env, typeIndex)) {
return false;
}
break;
@ -1881,7 +1816,7 @@ static bool GlobalIsJSCompatible(Decoder& d, ValType type) {
case RefType::Extern:
case RefType::Eq:
break;
case RefType::TypeIndex:
case RefType::TypeRef:
#ifdef WASM_PRIVATE_REFTYPES
return d.fail("cannot expose indexed reference type");
#else
@ -1958,7 +1893,7 @@ static bool DecodeMemoryTypeAndLimits(Decoder& d, ModuleEnvironment* env) {
#ifdef WASM_PRIVATE_REFTYPES
static bool TagIsJSCompatible(Decoder& d, const ValTypeVector& type) {
for (auto t : type) {
if (t.isTypeIndex()) {
if (t.isTypeRef()) {
return d.fail("cannot expose indexed reference type");
}
}
@ -2019,12 +1954,13 @@ static bool DecodeImport(Decoder& d, ModuleEnvironment* env) {
return false;
}
#ifdef WASM_PRIVATE_REFTYPES
if (!FuncTypeIsJSCompatible(d, env->types->funcType(funcTypeIndex))) {
if (!FuncTypeIsJSCompatible(d,
env->types->type(funcTypeIndex).funcType())) {
return false;
}
#endif
if (!env->funcs.append(
FuncDesc(&env->types->funcType(funcTypeIndex), funcTypeIndex))) {
if (!env->funcs.append(FuncDesc(
&env->types->type(funcTypeIndex).funcType(), funcTypeIndex))) {
return false;
}
if (env->funcs.length() > MaxFuncs) {
@ -2161,7 +2097,7 @@ static bool DecodeFunctionSection(Decoder& d, ModuleEnvironment* env) {
return false;
}
env->funcs.infallibleAppend(
FuncDesc(&env->types->funcType(funcTypeIndex), funcTypeIndex));
FuncDesc(&env->types->type(funcTypeIndex).funcType(), funcTypeIndex));
}
return d.finishSection(*range, "function");

Просмотреть файл

@ -121,11 +121,8 @@ struct ModuleEnvironment {
}
bool initTypes(uint32_t numTypes) {
types = js_new<TypeContext>(features, TypeDefVector());
if (!types) {
return false;
}
return types->resize(numTypes);
types = js_new<TypeContext>(features);
return types && types->addTypes(numTypes);
}
void declareFuncExported(uint32_t funcIndex, bool eager, bool canRefFunc) {
@ -234,7 +231,8 @@ using ValidatingOpIter = OpIter<ValidatingPolicy>;
// This performs no validation; the local entries must already have been
// validated by an earlier pass.
[[nodiscard]] bool DecodeValidatedLocalEntries(Decoder& d,
[[nodiscard]] bool DecodeValidatedLocalEntries(const TypeContext& types,
Decoder& d,
ValTypeVector* locals);
// This validates the entries.

Просмотреть файл

@ -137,7 +137,7 @@ bool wasm::CheckRefType(JSContext* cx, RefType targetType, HandleValue v,
return false;
}
break;
case RefType::TypeIndex:
case RefType::TypeRef:
MOZ_CRASH("temporarily unsupported Ref type");
}
return true;
@ -399,7 +399,7 @@ bool wasm::ToWebAssemblyValue(JSContext* cx, HandleValue val, FieldType type,
case RefType::Eq:
return ToWebAssemblyValue_eqref<Debug>(cx, val, (void**)loc,
mustWrite64);
case RefType::TypeIndex:
case RefType::TypeRef:
break;
}
}
@ -525,7 +525,7 @@ bool wasm::ToJSValue(JSContext* cx, const void* src, FieldType type,
case RefType::Eq:
return ToJSValue_anyref<Debug>(
cx, *reinterpret_cast<void* const*>(src), dst);
case RefType::TypeIndex:
case RefType::TypeRef:
break;
}
}