Bug 1478632 - wasm simd, part 2: the v128 type. r=rhunt

We add V128 to the ValType enum and insert it in all switches
throughout the engine.  In many cases we insert MOZ_CRASH() that
will be resolved in later patches.

Note we're currently using MIRType::Int8x16 to represent V128.  This
will change later, when we do the Ion work.

Differential Revision: https://phabricator.services.mozilla.com/D57941
This commit is contained in:
Lars T Hansen 2020-05-05 08:22:20 +00:00
Родитель 414fee387b
Коммит db273436b1
44 изменённых файлов: 508 добавлений и 71 удалений

Просмотреть файл

@ -267,6 +267,7 @@ bool perform(JSContext* cx, HandleValue objv, HandleValue idxv, Args... args) {
case Scalar::Uint8Clamped:
case Scalar::MaxTypedArrayViewType:
case Scalar::Int64:
case Scalar::V128:
break;
}
MOZ_CRASH("Unsupported TypedArray type");

Просмотреть файл

@ -248,6 +248,7 @@ uint32_t ScalarTypeDescr::alignment(Type t) {
JS_FOR_EACH_SCALAR_TYPE_REPR(NUMERIC_TYPE_TO_STRING)
#undef NUMERIC_TYPE_TO_STRING
case Scalar::Int64:
case Scalar::V128:
case Scalar::MaxTypedArrayViewType:
break;
}
@ -296,6 +297,7 @@ bool ScalarTypeDescr::call(JSContext* cx, unsigned argc, Value* vp) {
JS_FOR_EACH_SCALAR_BIGINT_TYPE_REPR(BIGINT_CALL)
#undef BIGINT_CALL
case Scalar::Int64:
case Scalar::V128:
case Scalar::MaxTypedArrayViewType:
MOZ_CRASH();
}
@ -959,10 +961,16 @@ StructTypeDescr* StructMetaTypeDescr::createFromArrays(
return nullptr;
}
CheckedInt32 offset = layout.addField(
fieldProps[i].alignAsInt64 ? ScalarTypeDescr::alignment(Scalar::Int64)
: fieldType->alignment(),
fieldType->size());
CheckedInt32 offset;
if (fieldProps[i].alignAsInt64) {
offset = layout.addField(ScalarTypeDescr::alignment(Scalar::Int64),
fieldType->size());
} else if (fieldProps[i].alignAsV128) {
offset = layout.addField(ScalarTypeDescr::alignment(Scalar::V128),
fieldType->size());
} else {
offset = layout.addField(fieldType->alignment(), fieldType->size());
}
if (!offset.isValid()) {
JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
JSMSG_TYPEDOBJECT_TOO_BIG);

Просмотреть файл

@ -421,9 +421,10 @@ class ArrayTypeDescr : public ComplexTypeDescr {
};
struct StructFieldProps {
StructFieldProps() : isMutable(0), alignAsInt64(0) {}
StructFieldProps() : isMutable(0), alignAsInt64(0), alignAsV128(0) {}
uint32_t isMutable : 1;
uint32_t alignAsInt64 : 1;
uint32_t alignAsV128 : 1;
};
/*

Просмотреть файл

@ -32,7 +32,7 @@ var instance = wasmEvalText(`(module
(function() {
// In ion-eager mode, make sure we don't try to inline a function that
// takes or returns i64 arguments.
assertErrorMessage(() => instance.add_two_i64(0n, 1n), TypeError, /cannot pass i64 to or from JS/);
assertErrorMessage(() => instance.add_two_i64(0n, 1n), TypeError, /cannot pass i64 or v128 to or from JS/);
})();
enableGeckoProfiling();
@ -69,7 +69,7 @@ function main() {
[''] // the jit path wasn't taken (interpreter/baseline only).
]);
assertEq(caught.message, 'cannot pass i64 to or from JS');
assertEq(caught.message, 'cannot pass i64 or v128 to or from JS');
let stack = caught.stack.split('\n');

Просмотреть файл

@ -3414,6 +3414,7 @@ OperandId SetPropIRGenerator::emitNumericGuard(ValOperandId valId,
case Scalar::MaxTypedArrayViewType:
case Scalar::Int64:
case Scalar::V128:
break;
}
MOZ_CRASH("Unsupported TypedArray type");

Просмотреть файл

@ -3722,6 +3722,7 @@ bool CacheIRCompiler::emitStoreTypedElement(ObjOperandId objId,
case Scalar::MaxTypedArrayViewType:
case Scalar::Int64:
case Scalar::V128:
MOZ_CRASH("Unsupported TypedArray type");
}
@ -3991,6 +3992,7 @@ bool CacheIRCompiler::emitStoreTypedObjectScalarProperty(
case Scalar::MaxTypedArrayViewType:
case Scalar::Int64:
case Scalar::V128:
MOZ_CRASH("Unsupported TypedArray type");
}

Просмотреть файл

@ -13970,6 +13970,7 @@ void CodeGenerator::emitIonToWasmCallBase(LIonToWasmCallBase<NumDefs>* lir) {
argMir = ToMIRType(sig.args()[i]);
break;
case wasm::ValType::I64:
case wasm::ValType::V128:
MOZ_CRASH("unexpected argument type when calling from ion to wasm");
case wasm::ValType::Ref:
switch (sig.args()[i].refTypeKind()) {
@ -14037,6 +14038,7 @@ void CodeGenerator::emitIonToWasmCallBase(LIonToWasmCallBase<NumDefs>* lir) {
MOZ_ASSERT(ToFloatRegister(lir->output()) == ReturnDoubleReg);
break;
case wasm::ValType::I64:
case wasm::ValType::V128:
MOZ_CRASH("unexpected return type when calling from ion to wasm");
case wasm::ValType::Ref:
switch (results[0].refTypeKind()) {

Просмотреть файл

@ -551,6 +551,8 @@ static inline size_t MIRTypeToSize(MIRType type) {
return 4;
case MIRType::Double:
return 8;
case MIRType::Int8x16:
return 16;
case MIRType::Pointer:
case MIRType::RefOrNull:
return sizeof(uintptr_t);
@ -689,6 +691,8 @@ static inline MIRType ScalarTypeToMIRType(Scalar::Type type) {
case Scalar::BigInt64:
case Scalar::BigUint64:
MOZ_CRASH("NYI");
case Scalar::V128:
return MIRType::Int8x16;
case Scalar::MaxTypedArrayViewType:
break;
}

Просмотреть файл

@ -4073,6 +4073,8 @@ IonBuilder::InliningResult IonBuilder::inlineWasmCall(CallInfo& callInfo,
case wasm::ValType::F64:
conversion = MToDouble::New(alloc(), arg);
break;
case wasm::ValType::V128:
MOZ_CRASH("impossible per above check");
case wasm::ValType::Ref:
switch (sig.args()[i].refTypeKind()) {
case wasm::RefType::Any:

Просмотреть файл

@ -1746,6 +1746,7 @@ static Range* GetTypedArrayRange(TempAllocator& alloc, Scalar::Type type) {
case Scalar::BigInt64:
case Scalar::BigUint64:
case Scalar::Int64:
case Scalar::V128:
case Scalar::Float32:
case Scalar::Float64:
case Scalar::MaxTypedArrayViewType:

Просмотреть файл

@ -353,6 +353,7 @@ void MacroAssemblerCompat::wasmLoadImpl(const wasm::MemoryAccessDesc& access,
case Scalar::Uint8Clamped:
case Scalar::BigInt64:
case Scalar::BigUint64:
case Scalar::V128:
case Scalar::MaxTypedArrayViewType:
MOZ_CRASH("unexpected array type");
}
@ -412,6 +413,7 @@ void MacroAssemblerCompat::wasmStoreImpl(const wasm::MemoryAccessDesc& access,
case Scalar::Uint8Clamped:
case Scalar::BigInt64:
case Scalar::BigUint64:
case Scalar::V128:
case Scalar::MaxTypedArrayViewType:
MOZ_CRASH("unexpected array type");
}

Просмотреть файл

@ -380,6 +380,7 @@ void CodeGeneratorX64::wasmStore(const wasm::MemoryAccessDesc& access,
masm.movl(cst, dstAddr);
break;
case Scalar::Int64:
case Scalar::V128:
case Scalar::Float32:
case Scalar::Float64:
case Scalar::Uint8Clamped:

Просмотреть файл

@ -213,6 +213,7 @@ void LIRGenerator::visitWasmStore(MWasmStore* ins) {
break;
case Scalar::BigInt64:
case Scalar::BigUint64:
case Scalar::V128:
case Scalar::Uint8Clamped:
case Scalar::MaxTypedArrayViewType:
MOZ_CRASH("unexpected array type");

Просмотреть файл

@ -623,13 +623,16 @@ void MacroAssembler::wasmLoad(const wasm::MemoryAccessDesc& access,
case Scalar::Float64:
loadDouble(srcAddr, out.fpu());
break;
case Scalar::V128:
vmovups(srcAddr, out.fpu());
break;
case Scalar::Int64:
MOZ_CRASH("int64 loads must use load64");
case Scalar::BigInt64:
case Scalar::BigUint64:
case Scalar::Uint8Clamped:
case Scalar::MaxTypedArrayViewType:
MOZ_CRASH("unexpected array type");
MOZ_CRASH("unexpected scalar type for wasmLoad");
}
memoryBarrierAfter(access.sync());
@ -665,12 +668,13 @@ void MacroAssembler::wasmLoadI64(const wasm::MemoryAccessDesc& access,
break;
case Scalar::Float32:
case Scalar::Float64:
MOZ_CRASH("non-int64 loads should use load()");
case Scalar::V128:
MOZ_CRASH("float loads must use wasmLoad");
case Scalar::Uint8Clamped:
case Scalar::BigInt64:
case Scalar::BigUint64:
case Scalar::MaxTypedArrayViewType:
MOZ_CRASH("unexpected array type");
MOZ_CRASH("unexpected scalar type for wasmLoadI64");
}
memoryBarrierAfter(access.sync());
@ -703,6 +707,9 @@ void MacroAssembler::wasmStore(const wasm::MemoryAccessDesc& access,
case Scalar::Float64:
storeUncanonicalizedDouble(value.fpu(), dstAddr);
break;
case Scalar::V128:
vmovups(value.fpu(), dstAddr);
break;
case Scalar::Uint8Clamped:
case Scalar::BigInt64:
case Scalar::BigUint64:

Просмотреть файл

@ -415,6 +415,7 @@ void CodeGeneratorX86Shared::visitOutOfLineLoadTypedArrayOutOfBounds(
case Scalar::Int64:
case Scalar::BigInt64:
case Scalar::BigUint64:
case Scalar::V128:
case Scalar::MaxTypedArrayViewType:
MOZ_CRASH("unexpected array type");
case Scalar::Float32:

Просмотреть файл

@ -342,6 +342,7 @@ void LIRGenerator::visitAsmJSStoreHeap(MAsmJSStoreHeap* ins) {
limitAlloc, memoryBaseAlloc);
break;
case Scalar::Int64:
case Scalar::V128:
MOZ_CRASH("NYI");
case Scalar::Uint8Clamped:
case Scalar::BigInt64:

Просмотреть файл

@ -386,6 +386,7 @@ void LIRGenerator::visitWasmStore(MWasmStore* ins) {
case Scalar::BigInt64:
case Scalar::BigUint64:
case Scalar::MaxTypedArrayViewType:
case Scalar::V128:
MOZ_CRASH("unexpected array type");
}

Просмотреть файл

@ -632,6 +632,7 @@ void MacroAssembler::wasmLoad(const wasm::MemoryAccessDesc& access,
vmovsd(srcAddr, out.fpu());
break;
case Scalar::Int64:
case Scalar::V128:
case Scalar::Uint8Clamped:
case Scalar::BigInt64:
case Scalar::BigUint64:
@ -705,6 +706,7 @@ void MacroAssembler::wasmLoadI64(const wasm::MemoryAccessDesc& access,
case Scalar::Float32:
case Scalar::Float64:
MOZ_CRASH("non-int64 loads should use load()");
case Scalar::V128:
case Scalar::Uint8Clamped:
case Scalar::BigInt64:
case Scalar::BigUint64:
@ -748,6 +750,7 @@ void MacroAssembler::wasmStore(const wasm::MemoryAccessDesc& access,
case Scalar::MaxTypedArrayViewType:
case Scalar::BigInt64:
case Scalar::BigUint64:
case Scalar::V128:
MOZ_CRASH("unexpected type");
}

Просмотреть файл

@ -422,7 +422,7 @@ MSG_DEF(JSMSG_WASM_BAD_ELEMENT_GENERALIZED, 0, JSEXN_TYPEERR, "\"element\" prope
MSG_DEF(JSMSG_WASM_BAD_IMPORT_ARG, 0, JSEXN_TYPEERR, "second argument must be an object")
MSG_DEF(JSMSG_WASM_BAD_IMPORT_FIELD, 1, JSEXN_TYPEERR, "import object field '{0}' is not an Object")
MSG_DEF(JSMSG_WASM_BAD_FUNCREF_VALUE, 0, JSEXN_TYPEERR, "can only pass WebAssembly exported functions to funcref")
MSG_DEF(JSMSG_WASM_BAD_I64_TYPE, 0, JSEXN_TYPEERR, "cannot pass i64 to or from JS")
MSG_DEF(JSMSG_WASM_BAD_VAL_TYPE, 1, JSEXN_TYPEERR, "cannot pass {0} to or from JS")
MSG_DEF(JSMSG_WASM_BAD_GLOBAL_TYPE, 0, JSEXN_TYPEERR, "bad type for a WebAssembly.Global")
MSG_DEF(JSMSG_WASM_NO_TRANSFER, 0, JSEXN_TYPEERR, "cannot transfer WebAssembly/asm.js ArrayBuffer")
MSG_DEF(JSMSG_WASM_TEXT_FAIL, 1, JSEXN_SYNTAXERR, "wasm text error: {0}")

Просмотреть файл

@ -1412,6 +1412,7 @@ enum Type {
MaxTypedArrayViewType,
Int64,
V128,
};
static inline size_t byteSize(Type atype) {
@ -1432,6 +1433,8 @@ static inline size_t byteSize(Type atype) {
case BigInt64:
case BigUint64:
return 8;
case V128:
return 16;
case MaxTypedArrayViewType:
break;
}
@ -1453,6 +1456,7 @@ static inline bool isSignedIntType(Type atype) {
case Float32:
case Float64:
case BigUint64:
case V128:
return false;
case MaxTypedArrayViewType:
break;
@ -1475,6 +1479,7 @@ static inline bool isBigIntType(Type atype) {
case Uint32:
case Float32:
case Float64:
case V128:
return false;
case MaxTypedArrayViewType:
break;

Просмотреть файл

@ -91,6 +91,7 @@ bool TypedArrayObject::convertForSideEffect(JSContext* cx,
}
case Scalar::MaxTypedArrayViewType:
case Scalar::Int64:
case Scalar::V128:
MOZ_CRASH("Unsupported TypedArray type");
}
MOZ_ASSERT_UNREACHABLE("Invalid scalar type");
@ -2073,6 +2074,7 @@ bool TypedArrayObject::getElement<CanGC>(JSContext* cx, uint32_t index,
#undef GET_ELEMENT
case Scalar::MaxTypedArrayViewType:
case Scalar::Int64:
case Scalar::V128:
break;
}
@ -2097,6 +2099,7 @@ bool TypedArrayObject::getElementPure(uint32_t index, Value* vp) {
#undef GET_ELEMENT
case Scalar::MaxTypedArrayViewType:
case Scalar::Int64:
case Scalar::V128:
break;
}
@ -2124,6 +2127,7 @@ bool TypedArrayObject::getElements(JSContext* cx,
#undef GET_ELEMENTS
case Scalar::MaxTypedArrayViewType:
case Scalar::Int64:
case Scalar::V128:
break;
}
@ -2496,6 +2500,7 @@ bool js::SetTypedArrayElement(JSContext* cx, Handle<TypedArrayObject*> obj,
#undef SET_TYPED_ARRAY_ELEMENT
case Scalar::MaxTypedArrayViewType:
case Scalar::Int64:
case Scalar::V128:
break;
}
@ -2555,6 +2560,7 @@ bool js::DefineTypedArrayElement(JSContext* cx, HandleObject obj,
#undef DEFINE_TYPED_ARRAY_ELEMENT
case Scalar::MaxTypedArrayViewType:
case Scalar::Int64:
case Scalar::V128:
break;
}

Просмотреть файл

@ -1354,7 +1354,8 @@ class MOZ_STACK_CLASS JS_HAZ_ROOTED ModuleValidatorShared {
compilerEnv_(CompileMode::Once, Tier::Optimized, OptimizedBackend::Ion,
DebugEnabled::False, /* multi value */ false,
/* ref types */ false, /* gc types */ false,
/* huge memory */ false, /* bigint */ false),
/* huge memory */ false, /* bigint */ false,
/* v128 */ false),
env_(&compilerEnv_, Shareable::False, ModuleKind::AsmJS) {
compilerEnv_.computeParameters();
env_.minMemoryLength = RoundUpToNextValidAsmJSHeapLength(0);
@ -6548,6 +6549,8 @@ static bool ValidateGlobalVariable(JSContext* cx, const AsmJSGlobal& global,
}
case ValType::I64:
MOZ_CRASH("int64");
case ValType::V128:
MOZ_CRASH("v128");
case ValType::F32: {
float f;
if (!RoundFloat32(cx, v, &f)) {

Просмотреть файл

@ -2178,6 +2178,8 @@ struct Stk {
case ValType::I64:
k = Stk::MemI64;
break;
case ValType::V128:
MOZ_CRASH("NYI");
case ValType::F32:
k = Stk::MemF32;
break;
@ -3084,6 +3086,8 @@ class BaseCompiler final : public BaseCompilerInterface {
case ValType::I64:
needI64(RegI64(result.gpr64()));
break;
case ValType::V128:
MOZ_CRASH("NYI");
case ValType::F32:
if (which == RegKind::All) {
needF32(RegF32(result.fpr()));
@ -3120,6 +3124,8 @@ class BaseCompiler final : public BaseCompilerInterface {
case ValType::I64:
freeI64(RegI64(result.gpr64()));
break;
case ValType::V128:
MOZ_CRASH("NYI");
case ValType::F32:
if (which == RegKind::All) {
freeF32(RegF32(result.fpr()));
@ -3165,6 +3171,8 @@ class BaseCompiler final : public BaseCompilerInterface {
case ValType::I64:
MOZ_ASSERT(isAvailableI64(RegI64(result.gpr64())));
break;
case ValType::V128:
MOZ_CRASH("NYI");
case ValType::F32:
MOZ_ASSERT(isAvailableF32(RegF32(result.fpr())));
break;
@ -4110,6 +4118,8 @@ class BaseCompiler final : public BaseCompilerInterface {
case ValType::Ref:
popRef(RegPtr(result.gpr()));
break;
case ValType::V128:
MOZ_CRASH("NYI");
}
}
}
@ -4312,6 +4322,8 @@ class BaseCompiler final : public BaseCompilerInterface {
case ValType::I64:
pushI64(RegI64(result.gpr64()));
break;
case ValType::V128:
MOZ_CRASH("NYI");
case ValType::F32:
pushF32(RegF32(result.fpr()));
break;
@ -4818,6 +4830,8 @@ class BaseCompiler final : public BaseCompilerInterface {
masm.storePtr(RegPtr(result.gpr()), dest);
break;
}
case ValType::V128:
MOZ_CRASH("NYI");
}
registerResultIdx++;
}
@ -4856,6 +4870,8 @@ class BaseCompiler final : public BaseCompilerInterface {
case ValType::Ref:
masm.loadPtr(src, RegPtr(result.gpr()));
break;
case ValType::V128:
MOZ_CRASH("NYI");
}
}
}
@ -5086,6 +5102,9 @@ class BaseCompiler final : public BaseCompilerInterface {
}
break;
}
case ValType::V128: {
MOZ_CRASH("NYI");
}
case ValType::F64: {
ABIArg argLoc = call->abi.next(MIRType::Double);
switch (argLoc.kind()) {
@ -9685,6 +9704,8 @@ bool BaseCompiler::emitGetLocal() {
case ValType::I64:
pushLocalI64(slot);
break;
case ValType::V128:
MOZ_CRASH("NYI");
case ValType::F64:
pushLocalF64(slot);
break;
@ -9751,6 +9772,9 @@ bool BaseCompiler::emitSetOrTeeLocal(uint32_t slot) {
}
break;
}
case ValType::V128: {
MOZ_CRASH("NYI");
}
case ValType::Ref: {
RegPtr rv = popRef();
syncLocal(slot);

Просмотреть файл

@ -519,10 +519,16 @@ static void* WasmHandleTrap() {
MOZ_CRASH("unexpected trap");
}
static void WasmReportInt64JSCall() {
static void WasmReportInt64OrV128JSCall() {
JSContext* cx = TlsContext.get();
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_I64_TYPE);
JSMSG_WASM_BAD_VAL_TYPE,
#ifdef ENABLE_WASM_SIMD
"i64 or v128"
#else
"i64"
#endif
);
}
static int32_t CoerceInPlace_ToInt32(Value* rawVal) {
@ -643,8 +649,11 @@ static int32_t CoerceInPlace_JitEntry(int funcExportIndex, TlsData* tlsData,
break;
}
#endif
case ValType::V128: {
// Guarded against by hasV128ArgOrRet()
MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
}
default: {
// Guarded against by temporarilyUnsupportedReftypeForEntry()
MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
}
}
@ -796,9 +805,9 @@ void* wasm::AddressOf(SymbolicAddress imm, ABIFunctionType* abiType) {
case SymbolicAddress::HandleTrap:
*abiType = Args_General0;
return FuncCast(WasmHandleTrap, *abiType);
case SymbolicAddress::ReportInt64JSCall:
case SymbolicAddress::ReportInt64OrV128JSCall:
*abiType = Args_General0;
return FuncCast(WasmReportInt64JSCall, *abiType);
return FuncCast(WasmReportInt64OrV128JSCall, *abiType);
case SymbolicAddress::CallImport_Void:
*abiType = MakeABIFunctionType(
ArgType_Int32,
@ -814,6 +823,9 @@ void* wasm::AddressOf(SymbolicAddress imm, ABIFunctionType* abiType) {
ArgType_Int32,
{ArgType_General, ArgType_Int32, ArgType_Int32, ArgType_General});
return FuncCast(Instance::callImport_i64, *abiType);
case SymbolicAddress::CallImport_V128:
*abiType = Args_General4;
return FuncCast(Instance::callImport_v128, *abiType);
case SymbolicAddress::CallImport_F64:
*abiType = MakeABIFunctionType(
ArgType_Int32,
@ -1137,6 +1149,7 @@ bool wasm::NeedsBuiltinThunk(SymbolicAddress sym) {
case SymbolicAddress::CallImport_Void: // GenerateImportInterpExit
case SymbolicAddress::CallImport_I32:
case SymbolicAddress::CallImport_I64:
case SymbolicAddress::CallImport_V128:
case SymbolicAddress::CallImport_F64:
case SymbolicAddress::CallImport_FuncRef:
case SymbolicAddress::CallImport_AnyRef:
@ -1203,7 +1216,7 @@ bool wasm::NeedsBuiltinThunk(SymbolicAddress sym) {
case SymbolicAddress::WaitI64:
case SymbolicAddress::Wake:
case SymbolicAddress::CoerceInPlace_JitEntry:
case SymbolicAddress::ReportInt64JSCall:
case SymbolicAddress::ReportInt64OrV128JSCall:
case SymbolicAddress::MemCopy:
case SymbolicAddress::MemCopyShared:
case SymbolicAddress::DataDrop:

Просмотреть файл

@ -613,6 +613,11 @@ bool LazyStubSegment::addStubs(size_t codeLength,
codeRanges_.back().offsetBy(offsetInSegment);
i++;
#ifdef ENABLE_WASM_SIMD
if (funcExports[funcExportIndex].funcType().hasV128ArgOrRet()) {
continue;
}
#endif
if (funcExports[funcExportIndex]
.funcType()
.temporarilyUnsupportedReftypeForEntry()) {
@ -674,8 +679,13 @@ bool LazyStubTier::createMany(const Uint32Vector& funcExportIndices,
DebugOnly<uint32_t> numExpectedRanges = 0;
for (uint32_t funcExportIndex : funcExportIndices) {
const FuncExport& fe = funcExports[funcExportIndex];
numExpectedRanges +=
fe.funcType().temporarilyUnsupportedReftypeForEntry() ? 1 : 2;
// Entries with unsupported types get only the interp exit
bool unsupportedType =
#ifdef ENABLE_WASM_SIMD
fe.funcType().hasV128ArgOrRet() ||
#endif
fe.funcType().temporarilyUnsupportedReftypeForEntry();
numExpectedRanges += (unsupportedType ? 1 : 2);
void* calleePtr =
moduleSegmentBase + metadata.codeRange(fe).funcNormalEntry();
Maybe<ImmPtr> callee;
@ -757,10 +767,14 @@ bool LazyStubTier::createMany(const Uint32Vector& funcExportIndices,
MOZ_ALWAYS_TRUE(
exports_.insert(exports_.begin() + exportIndex, std::move(lazyExport)));
// Functions with unsupported reftypes in their sig have only one entry
// Functions with unsupported types in their sig have only one entry
// (interp). All other functions get an extra jit entry.
interpRangeIndex +=
fe.funcType().temporarilyUnsupportedReftypeForEntry() ? 1 : 2;
bool unsupportedType =
#ifdef ENABLE_WASM_SIMD
fe.funcType().hasV128ArgOrRet() ||
#endif
fe.funcType().temporarilyUnsupportedReftypeForEntry();
interpRangeIndex += (unsupportedType ? 1 : 2);
}
return true;
@ -781,12 +795,19 @@ bool LazyStubTier::createOne(uint32_t funcExportIndex,
const UniqueLazyStubSegment& segment = stubSegments_[stubSegmentIndex];
const CodeRangeVector& codeRanges = segment->codeRanges();
// Functions that have unsupported reftypes in their sig don't get a jit
// Functions that have unsupported types in their sig don't get a jit
// entry.
if (codeTier.metadata()
.funcExports[funcExportIndex]
.funcType()
.temporarilyUnsupportedReftypeForEntry()) {
.temporarilyUnsupportedReftypeForEntry()
#ifdef ENABLE_WASM_SIMD
|| codeTier.metadata()
.funcExports[funcExportIndex]
.funcType()
.hasV128ArgOrRet()
#endif
) {
MOZ_ASSERT(codeRanges.length() >= 1);
MOZ_ASSERT(codeRanges.back().isInterpEntry());
return true;

Просмотреть файл

@ -238,9 +238,13 @@ class FuncExport {
}
bool canHaveJitEntry() const {
return !funcType_.temporarilyUnsupportedReftypeForEntry() &&
!funcType_.temporarilyUnsupportedResultCountForJitEntry() &&
JitOptions.enableWasmJitEntry;
return
#ifdef ENABLE_WASM_SIMD
!funcType_.hasV128ArgOrRet() &&
#endif
!funcType_.temporarilyUnsupportedReftypeForEntry() &&
!funcType_.temporarilyUnsupportedResultCountForJitEntry() &&
JitOptions.enableWasmJitEntry;
}
bool clone(const FuncExport& src) {
@ -359,6 +363,9 @@ struct Metadata : public ShareableBase<Metadata>, public MetadataCacheablePod {
// Feature flag that gets copied from ModuleEnvironment for BigInt support.
bool bigIntEnabled;
// Feature flag that gets copied from ModuleEnvironment for SIMD support.
bool v128Enabled;
explicit Metadata(ModuleKind kind = ModuleKind::Wasm)
: MetadataCacheablePod(kind), debugEnabled(false), debugHash() {}
virtual ~Metadata() = default;

Просмотреть файл

@ -126,6 +126,7 @@ SharedCompileArgs CompileArgs::build(JSContext* cx,
target->hugeMemory = wasm::IsHugeMemoryEnabled();
target->bigIntEnabled = wasm::I64BigIntConversionAvailable(cx);
target->multiValuesEnabled = wasm::MultiValuesAvailable(cx);
target->v128Enabled = wasm::SimdAvailable(cx);
Log(cx, "available wasm compilers: tier1=%s tier2=%s",
baseline ? "baseline" : "none",
@ -421,13 +422,11 @@ static bool TieringBeneficial(uint32_t codeSize) {
CompilerEnvironment::CompilerEnvironment(const CompileArgs& args)
: state_(InitialWithArgs), args_(&args) {}
CompilerEnvironment::CompilerEnvironment(CompileMode mode, Tier tier,
OptimizedBackend optimizedBackend,
DebugEnabled debugEnabled,
bool multiValueConfigured,
bool refTypesConfigured,
bool gcTypesConfigured,
bool hugeMemory, bool bigIntConfigured)
CompilerEnvironment::CompilerEnvironment(
CompileMode mode, Tier tier, OptimizedBackend optimizedBackend,
DebugEnabled debugEnabled, bool multiValueConfigured,
bool refTypesConfigured, bool gcTypesConfigured, bool hugeMemory,
bool bigIntConfigured, bool v128Configured)
: state_(InitialWithModeTierDebug),
mode_(mode),
tier_(tier),
@ -437,7 +436,8 @@ CompilerEnvironment::CompilerEnvironment(CompileMode mode, Tier tier,
gcTypes_(gcTypesConfigured),
multiValues_(multiValueConfigured),
hugeMemory_(hugeMemory),
bigInt_(bigIntConfigured) {}
bigInt_(bigIntConfigured),
v128_(v128Configured) {}
void CompilerEnvironment::computeParameters() {
MOZ_ASSERT(state_ == InitialWithModeTierDebug);
@ -463,6 +463,7 @@ void CompilerEnvironment::computeParameters(Decoder& d) {
bool hugeMemory = args_->hugeMemory;
bool bigIntEnabled = args_->bigIntEnabled;
bool multiValuesEnabled = args_->multiValuesEnabled;
bool v128Enabled = args_->v128Enabled;
bool hasSecondTier = ionEnabled || craneliftEnabled;
MOZ_ASSERT_IF(debugEnabled, baselineEnabled);
@ -498,6 +499,8 @@ void CompilerEnvironment::computeParameters(Decoder& d) {
hugeMemory_ = hugeMemory;
bigInt_ = bigIntEnabled;
multiValues_ = multiValuesEnabled;
v128_ = v128Enabled;
state_ = Computed;
}
@ -604,6 +607,7 @@ void wasm::CompileTier2(const CompileArgs& args, const Bytes& bytecode,
#endif
bool multiValueConfigured = args.multiValuesEnabled;
bool bigIntConfigured = args.bigIntEnabled;
bool v128Configured = args.v128Enabled;
OptimizedBackend optimizedBackend = args.craneliftEnabled
? OptimizedBackend::Cranelift
@ -612,7 +616,7 @@ void wasm::CompileTier2(const CompileArgs& args, const Bytes& bytecode,
CompilerEnvironment compilerEnv(
CompileMode::Tier2, Tier::Optimized, optimizedBackend,
DebugEnabled::False, multiValueConfigured, refTypesConfigured,
gcTypesConfigured, args.hugeMemory, bigIntConfigured);
gcTypesConfigured, args.hugeMemory, bigIntConfigured, v128Configured);
ModuleEnvironment env(&compilerEnv, args.sharedMemoryEnabled
? Shareable::True

Просмотреть файл

@ -61,6 +61,7 @@ struct CompileArgs : ShareableBase<CompileArgs> {
bool hugeMemory;
bool bigIntEnabled;
bool multiValuesEnabled;
bool v128Enabled;
// CompileArgs has two constructors:
//
@ -86,7 +87,8 @@ struct CompileArgs : ShareableBase<CompileArgs> {
gcEnabled(false),
hugeMemory(false),
bigIntEnabled(false),
multiValuesEnabled(false) {}
multiValuesEnabled(false),
v128Enabled(false) {}
};
// Return the estimated compiled (machine) code size for the given bytecode size

Просмотреть файл

@ -53,10 +53,11 @@ enum class TypeCode {
// non-special-purpose) types are added here then you MUST update
// LowestPrimitiveTypeCode, below.
I32 = 0x7f, // SLEB128(-0x01)
I64 = 0x7e, // SLEB128(-0x02)
F32 = 0x7d, // SLEB128(-0x03)
F64 = 0x7c, // SLEB128(-0x04)
I32 = 0x7f, // SLEB128(-0x01)
I64 = 0x7e, // SLEB128(-0x02)
F32 = 0x7d, // SLEB128(-0x03)
F64 = 0x7c, // SLEB128(-0x04)
V128 = 0x7b, // SLEB128(-0x05)
// A function pointer with any signature
FuncRef = 0x70, // SLEB128(-0x10)
@ -86,7 +87,7 @@ enum class TypeCode {
// UnpackTypeCodeTypeAbstracted(). If primitive typecodes are added below any
// reference typecode then the logic in that function MUST change.
static constexpr TypeCode LowestPrimitiveTypeCode = TypeCode::F64;
static constexpr TypeCode LowestPrimitiveTypeCode = TypeCode::V128;
enum class FuncTypeIdDescKind { None, Immediate, Global };

Просмотреть файл

@ -392,6 +392,13 @@ bool DebugState::getGlobal(Instance& instance, uint32_t globalIndex,
// scheme, to make the pointer recognizable without revealing it.
vp.set(MagicValue(JS_OPTIMIZED_OUT));
break;
case ValType::V128:
// Debugger must be updated to handle this, and should be updated to
// handle i64 in any case.
vp.set(MagicValue(JS_OPTIMIZED_OUT));
break;
default:
MOZ_CRASH("Global constant type");
}
return true;
}
@ -424,6 +431,15 @@ bool DebugState::getGlobal(Instance& instance, uint32_t globalIndex,
vp.set(MagicValue(JS_OPTIMIZED_OUT));
break;
}
case ValType::V128: {
// Just hide it. See above.
vp.set(MagicValue(JS_OPTIMIZED_OUT));
break;
}
default: {
MOZ_CRASH("Global variable type");
break;
}
}
return true;
}

Просмотреть файл

@ -1260,6 +1260,7 @@ static const char* ThunkedNativeToDescription(SymbolicAddress func) {
case SymbolicAddress::CallImport_Void:
case SymbolicAddress::CallImport_I32:
case SymbolicAddress::CallImport_I64:
case SymbolicAddress::CallImport_V128:
case SymbolicAddress::CallImport_F64:
case SymbolicAddress::CallImport_FuncRef:
case SymbolicAddress::CallImport_AnyRef:
@ -1359,8 +1360,8 @@ static const char* ThunkedNativeToDescription(SymbolicAddress func) {
return "call to native wake (in wasm)";
case SymbolicAddress::CoerceInPlace_JitEntry:
return "out-of-line coercion for jit entry arguments (in wasm)";
case SymbolicAddress::ReportInt64JSCall:
return "jit call to int64 wasm function";
case SymbolicAddress::ReportInt64OrV128JSCall:
return "jit call to int64 or v128 wasm function";
case SymbolicAddress::MemCopy:
case SymbolicAddress::MemCopyShared:
return "call to native memory.copy function";

Просмотреть файл

@ -1102,6 +1102,7 @@ SharedMetadata ModuleGenerator::finishMetadata(const Bytes& bytecode) {
metadata_->funcNames = std::move(env_->funcNames);
metadata_->omitsBoundsChecks = env_->hugeMemoryEnabled();
metadata_->bigIntEnabled = env_->bigIntEnabled();
metadata_->v128Enabled = env_->v128Enabled();
// Copy over additional debug information.

Просмотреть файл

@ -209,6 +209,8 @@ static bool ToWebAssemblyValue(JSContext* cx, HandleValue val, ValType type,
return ToWebAssemblyValue_f32<Debug>(cx, val, (float*)loc);
case ValType::F64:
return ToWebAssemblyValue_f64<Debug>(cx, val, (double*)loc);
case ValType::V128:
MOZ_CRASH("unexpected v128 in ToWebAssemblyValue");
case ValType::Ref:
switch (type.refTypeKind()) {
case RefType::Func:
@ -297,6 +299,8 @@ static bool ToJSValue(JSContext* cx, const void* src, ValType type,
case ValType::F64:
return ToJSValue_f64<Debug>(cx, *reinterpret_cast<const double*>(src),
dst);
case ValType::V128:
MOZ_CRASH("unhandled type in ToJSValue");
case ValType::Ref:
switch (type.refTypeKind()) {
case RefType::Func:
@ -418,9 +422,13 @@ bool Instance::callImport(JSContext* cx, uint32_t funcImportIndex,
return false;
}
if (fi.funcType().hasI64ArgOrRet() && !I64BigIntConversionAvailable(cx)) {
if ((fi.funcType().hasI64ArgOrRet() && !I64BigIntConversionAvailable(cx))
#ifdef ENABLE_WASM_SIMD
|| fi.funcType().hasV128ArgOrRet()
#endif
) {
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_I64_TYPE);
JSMSG_WASM_BAD_VAL_TYPE, "i64 or v128");
return false;
}
@ -478,6 +486,11 @@ bool Instance::callImport(JSContext* cx, uint32_t funcImportIndex,
return true;
}
#ifdef ENABLE_WASM_SIMD
// Should have been guarded earlier
MOZ_ASSERT(!fi.funcType().hasV128ArgOrRet());
#endif
// Functions with unsupported reference types in signature don't have a jit
// exit at the moment.
if (fi.funcType().temporarilyUnsupportedReftypeForExit()) {
@ -523,6 +536,8 @@ bool Instance::callImport(JSContext* cx, uint32_t funcImportIndex,
#else
MOZ_CRASH("NYI");
#endif
case ValType::V128:
MOZ_CRASH("Not needed per spec");
case ValType::F32:
if (!argTypes->hasType(TypeSet::DoubleType())) {
return true;
@ -612,11 +627,20 @@ Instance::callImport_i64(Instance* instance, int32_t funcImportIndex,
return ToWebAssemblyValue_i64(cx, rval, (int64_t*)argv);
#else
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_I64_TYPE);
JSMSG_WASM_BAD_VAL_TYPE, "i64");
return false;
#endif
}
/* static */ int32_t /* 0 to signal trap; 1 to signal OK */
Instance::callImport_v128(Instance* instance, int32_t funcImportIndex,
int32_t argc, uint64_t* argv) {
JSContext* cx = TlsContext.get();
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_VAL_TYPE, "v128");
return false;
}
/* static */ int32_t /* 0 to signal trap; 1 to signal OK */
Instance::callImport_f64(Instance* instance, int32_t funcImportIndex,
int32_t argc, uint64_t* argv) {
@ -1412,6 +1436,11 @@ void CopyValPostBarriered(uint8_t* dst, const Val& src) {
memcpy(dst, &x, sizeof(x));
break;
}
case ValType::V128: {
V128 x = src.v128();
memcpy(dst, &x, sizeof(x));
break;
}
case ValType::Ref: {
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
@ -2100,9 +2129,13 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
return false;
}
if (funcType->hasI64ArgOrRet() && !I64BigIntConversionAvailable(cx)) {
if ((funcType->hasI64ArgOrRet() && !I64BigIntConversionAvailable(cx))
#ifdef ENABLE_WASM_SIMD
|| funcType->hasV128ArgOrRet()
#endif
) {
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_I64_TYPE);
JSMSG_WASM_BAD_VAL_TYPE, "i64 or v128");
return false;
}

Просмотреть файл

@ -187,6 +187,7 @@ class Instance {
static int32_t callImport_void(Instance*, int32_t, int32_t, uint64_t*);
static int32_t callImport_i32(Instance*, int32_t, int32_t, uint64_t*);
static int32_t callImport_i64(Instance*, int32_t, int32_t, uint64_t*);
static int32_t callImport_v128(Instance*, int32_t, int32_t, uint64_t*);
static int32_t callImport_f64(Instance*, int32_t, int32_t, uint64_t*);
static int32_t callImport_anyref(Instance*, int32_t, int32_t, uint64_t*);
static int32_t callImport_nullref(Instance*, int32_t, int32_t, uint64_t*);

Просмотреть файл

@ -194,6 +194,8 @@ class FunctionCompiler {
case ValType::I64:
ins = MConstant::NewInt64(alloc(), 0);
break;
case ValType::V128:
return iter().fail("Ion has no SIMD support yet");
case ValType::F32:
ins = MConstant::New(alloc(), Float32Value(0.f), MIRType::Float32);
break;
@ -1160,6 +1162,8 @@ class FunctionCompiler {
def = MWasmRegisterResult::New(alloc(), MIRType::RefOrNull,
result.gpr());
break;
case wasm::ValType::V128:
return this->iter().fail("Ion has no SIMD support yet");
}
} else {
MOZ_ASSERT(stackResultArea);
@ -2344,6 +2348,8 @@ static bool EmitGetGlobal(FunctionCompiler& f) {
case ValType::F64:
result = f.constant(value.f64());
break;
case ValType::V128:
return f.iter().fail("Ion has no SIMD support yet");
case ValType::Ref:
switch (value.type().refTypeKind()) {
case RefType::Func:

Просмотреть файл

@ -406,6 +406,9 @@ static bool ToWebAssemblyValue(JSContext* cx, ValType targetType, HandleValue v,
}
break;
}
case ValType::V128: {
break;
}
}
MOZ_CRASH("unexpected import value type, caller must guard");
}
@ -447,6 +450,8 @@ static bool ToJSValue(JSContext* cx, const Val& val, MutableHandleValue out) {
break;
}
break;
case ValType::V128:
break;
}
MOZ_CRASH("unexpected type when translating to a JS value");
}
@ -608,6 +613,12 @@ bool js::wasm::GetImports(JSContext* cx, const Module& module,
return false;
}
if (global.type() == ValType::V128) {
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_VAL_TYPE, "v128");
return false;
}
if (!ToWebAssemblyValue(cx, global.type(), v, &val)) {
return false;
}
@ -2752,6 +2763,7 @@ void WasmGlobalObject::trace(JSTracer* trc, JSObject* obj) {
case ValType::F32:
case ValType::I64:
case ValType::F64:
case ValType::V128:
break;
case ValType::Ref:
switch (global->type().refTypeKind()) {
@ -2839,6 +2851,9 @@ WasmGlobalObject* WasmGlobalObject::create(JSContext* cx, HandleVal hval,
MOZ_CRASH("Ref NYI");
}
break;
case ValType::V128:
cell->v128 = val.v128();
break;
}
obj->initReservedSlot(TYPE_SLOT,
Int32Value(int32_t(val.type().bitsUnsafe())));
@ -2908,6 +2923,10 @@ bool WasmGlobalObject::construct(JSContext* cx, unsigned argc, Value* vp) {
StringEqualsLiteral(typeLinearStr, "i64")) {
globalType = ValType::I64;
#endif
#ifdef ENABLE_WASM_SIMD
} else if (SimdAvailable(cx) && StringEqualsLiteral(typeLinearStr, "v128")) {
globalType = ValType::V128;
#endif
#ifdef ENABLE_WASM_REFTYPES
} else if (ReftypesAvailable(cx) &&
StringEqualsLiteral(typeLinearStr, "funcref")) {
@ -2944,6 +2963,9 @@ bool WasmGlobalObject::construct(JSContext* cx, unsigned argc, Value* vp) {
case ValType::F64:
globalVal = Val(double(0.0));
break;
case ValType::V128:
globalVal = Val(V128());
break;
case ValType::Ref:
switch (globalType.refTypeKind()) {
case RefType::Func:
@ -2963,6 +2985,11 @@ bool WasmGlobalObject::construct(JSContext* cx, unsigned argc, Value* vp) {
RootedValue valueVal(cx, args.get(1));
if (!valueVal.isUndefined() ||
(args.length() >= 2 && globalType.isReference())) {
if (globalType == ValType::V128) {
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_VAL_TYPE, "v128");
return false;
}
if (!ToWebAssemblyValue(cx, globalType, valueVal, &globalVal)) {
return false;
}
@ -2999,6 +3026,10 @@ bool WasmGlobalObject::valueGetterImpl(JSContext* cx, const CallArgs& args) {
case ValType::F64:
args.thisv().toObject().as<WasmGlobalObject>().value(cx, args.rval());
return true;
case ValType::V128:
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_VAL_TYPE, "v128");
return false;
case ValType::I64:
#ifdef ENABLE_WASM_BIGINT
if (I64BigIntConversionAvailable(cx)) {
@ -3007,7 +3038,7 @@ bool WasmGlobalObject::valueGetterImpl(JSContext* cx, const CallArgs& args) {
}
#endif
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_I64_TYPE);
JSMSG_WASM_BAD_VAL_TYPE, "i64");
return false;
case ValType::Ref:
switch (
@ -3045,9 +3076,10 @@ bool WasmGlobalObject::valueSetterImpl(JSContext* cx, const CallArgs& args) {
return false;
}
if (global->type() == ValType::I64 && !I64BigIntConversionAvailable(cx)) {
if ((global->type() == ValType::I64 && !I64BigIntConversionAvailable(cx)) ||
global->type() == ValType::V128) {
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_I64_TYPE);
JSMSG_WASM_BAD_VAL_TYPE, "i64 or v128");
return false;
}
@ -3108,6 +3140,8 @@ void WasmGlobalObject::setVal(JSContext* cx, wasm::HandleVal hval) {
cell->i64 = val.i64();
#endif
break;
case ValType::V128:
MOZ_CRASH("unexpected v128 when setting global's value");
case ValType::Ref:
switch (this->type().refTypeKind()) {
case RefType::Func:
@ -3145,6 +3179,9 @@ void WasmGlobalObject::val(MutableHandleVal outval) const {
case ValType::I64:
outval.set(Val(uint64_t(cell->i64)));
return;
case ValType::V128:
outval.set(Val(cell->v128));
return;
case ValType::F32:
outval.set(Val(cell->f32));
return;

Просмотреть файл

@ -238,8 +238,9 @@ class WasmGlobalObject : public NativeObject {
int64_t i64;
float f32;
double f64;
wasm::V128 v128;
wasm::AnyRef ref;
Cell() : i64(0) {}
Cell() : v128() {}
~Cell() = default;
};

Просмотреть файл

@ -1194,6 +1194,14 @@ static bool MakeStructField(JSContext* cx, const ValType& v, bool isMutable,
t = GlobalObject::getOrCreateScalarTypeDescr(cx, cx->global(),
Scalar::Int32);
break;
case ValType::V128:
// Align for v128 but allocate only an int32, three more int32 allocations
// will follow immediately. JS will see four immutable int32 values but
// wasm knows it's a single v128. See makeStructTypeDescrs(), below.
props.alignAsV128 = true;
t = GlobalObject::getOrCreateScalarTypeDescr(cx, cx->global(),
Scalar::Int32);
break;
case ValType::F32:
t = GlobalObject::getOrCreateScalarTypeDescr(cx, cx->global(),
Scalar::Float32);
@ -1292,6 +1300,28 @@ bool Module::makeStructTypeDescrs(
&ids, &fieldTypeObjs, &fieldProps)) {
return false;
}
} else if (v.kind() == ValType::V128) {
// Ditto v128 fields. These turn into four adjacent i32 fields, using
// the standard xyzw convention.
sf.isMutable = false;
allowConstruct = false;
if (!MakeStructField(cx, ValType::V128, sf.isMutable, "_%d_x", k, &ids,
&fieldTypeObjs, &fieldProps)) {
return false;
}
if (!MakeStructField(cx, ValType::I32, sf.isMutable, "_%d_y", k, &ids,
&fieldTypeObjs, &fieldProps)) {
return false;
}
if (!MakeStructField(cx, ValType::I32, sf.isMutable, "_%d_z", k, &ids,
&fieldTypeObjs, &fieldProps)) {
return false;
}
if (!MakeStructField(cx, ValType::I32, sf.isMutable, "_%d_w", k++, &ids,
&fieldTypeObjs, &fieldProps)) {
return false;
}
} else {
// TypedObjects don't yet have a sufficient notion of type
// constraints on TypedObject properties. Thus we handle fields

Просмотреть файл

@ -54,6 +54,10 @@ static uint32_t ResultStackSize(ValType type) {
return ABIResult::StackSizeOfFloat;
case ValType::F64:
return ABIResult::StackSizeOfDouble;
#ifdef ENABLE_WASM_SIMD
case ValType::V128:
return ABIResult::StackSizeOfV128;
#endif
case ValType::Ref:
return ABIResult::StackSizeOfPtr;
default:
@ -85,6 +89,11 @@ void ABIResultIter::settleRegister(ValType type) {
case ValType::Ref:
cur_ = ABIResult(type, ReturnReg);
break;
#ifdef ENABLE_WASM_SIMD
case ValType::V128:
cur_ = ABIResult(type, ReturnSimd128Reg);
break;
#endif
default:
MOZ_CRASH("Unexpected result type");
}
@ -302,6 +311,9 @@ static void SetupABIArguments(MacroAssembler& masm, const FuncExport& fe,
Register argv, Register scratch) {
// Copy parameters out of argv and into the registers/stack-slots specified by
// the system ABI.
//
// SetupABIArguments are only used for C++ -> wasm calls through callExport(),
// and V128 and Ref types (other than anyref) are not currently allowed.
ArgTypeVector args(fe.funcType());
for (ABIArgIter iter(args); !iter.done(); iter++) {
unsigned argOffset = iter.index() * sizeof(ExportArg);
@ -341,6 +353,13 @@ static void SetupABIArguments(MacroAssembler& masm, const FuncExport& fe,
case MIRType::Float32:
masm.loadFloat32(src, iter->fpu());
break;
case MIRType::Int8x16:
#ifdef ENABLE_WASM_SIMD
masm.loadUnalignedSimd128Float(src, iter->fpu());
break;
#else
MOZ_CRASH("V128 not supported in SetupABIArguments");
#endif
default:
MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("unexpected FPU type");
break;
@ -378,6 +397,9 @@ static void SetupABIArguments(MacroAssembler& masm, const FuncExport& fe,
iter->offsetFromArgBase()));
break;
}
case MIRType::Int8x16: {
MOZ_CRASH("SIMD stack argument");
}
case MIRType::StackResults: {
MOZ_ASSERT(args.isSyntheticStackResultPointerArg(iter.index()));
masm.loadPtr(src, scratch);
@ -412,6 +434,13 @@ static void StoreRegisterResult(MacroAssembler& masm, const FuncExport& fe,
case ValType::I64:
masm.store64(result.gpr64(), Address(loc, 0));
break;
case ValType::V128:
#ifdef ENABLE_WASM_SIMD
masm.storeUnalignedSimd128Float(result.fpr(), Address(loc, 0));
break;
#else
MOZ_CRASH("V128 not supported in StoreABIReturn");
#endif
case ValType::F32:
masm.canonicalizeFloat(result.fpr());
masm.storeFloat32(result.fpr(), Address(loc, 0));
@ -923,9 +952,13 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
GenerateJitEntryLoadTls(masm, frameSize);
if (fe.funcType().hasI64ArgOrRet() && !bigIntEnabled) {
if ((fe.funcType().hasI64ArgOrRet() && !bigIntEnabled)
#ifdef ENABLE_WASM_SIMD
|| fe.funcType().hasV128ArgOrRet()
#endif
) {
CallSymbolicAddress(masm, !fe.hasEagerStubs(),
SymbolicAddress::ReportInt64JSCall);
SymbolicAddress::ReportInt64OrV128JSCall);
GenerateJitEntryThrow(masm, frameSize);
return FinishOffsets(masm, offsets);
}
@ -1083,6 +1116,10 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
}
break;
}
case ValType::V128: {
// Guarded against by hasV128ArgOrRet()
MOZ_CRASH("unexpected argument type when calling from the jit");
}
default: {
MOZ_CRASH("unexpected argument type when calling from the jit");
}
@ -1232,6 +1269,9 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
MOZ_CRASH("unexpected return type when calling from ion to wasm");
#endif
}
case ValType::V128: {
MOZ_CRASH("unexpected return type when calling from ion to wasm");
}
case ValType::Ref: {
switch (results[0].refTypeKind()) {
case RefType::Func:
@ -1533,6 +1573,7 @@ void wasm::GenerateDirectCallFromJit(MacroAssembler& masm, const FuncExport& fe,
}
break;
case wasm::ValType::I64:
case wasm::ValType::V128:
MOZ_CRASH("unexpected return type when calling from ion to wasm");
}
}
@ -1684,7 +1725,6 @@ static void FillArgumentArrayForExit(
break;
#endif
case ABIArg::FPU: {
MOZ_ASSERT(IsFloatingPointType(type));
FloatRegister srcReg = i->fpu();
if (type == MIRType::Double) {
if (toValue) {
@ -1698,8 +1738,7 @@ static void FillArgumentArrayForExit(
GenPrintF64(DebugChannel::Import, masm, srcReg);
masm.storeDouble(srcReg, dst);
}
} else {
MOZ_ASSERT(type == MIRType::Float32);
} else if (type == MIRType::Float32) {
if (toValue) {
// JS::Values can't store Float32, so convert to a Double.
ScratchDoubleScope fpscratch(masm);
@ -1712,6 +1751,21 @@ static void FillArgumentArrayForExit(
GenPrintF32(DebugChannel::Import, masm, srcReg);
masm.storeFloat32(srcReg, dst);
}
} else if (type == MIRType::Int8x16) {
// The value should never escape; the call will be stopped later as
// the import is being called. But we should generate something sane
// here for the boxed case since a debugger or the stack walker may
// observe something.
ScratchDoubleScope dscratch(masm);
masm.loadConstantDouble(0, dscratch);
GenPrintF64(DebugChannel::Import, masm, dscratch);
if (toValue) {
masm.boxDouble(dscratch, dst);
} else {
masm.storeDouble(dscratch, dst);
}
} else {
MOZ_CRASH("Unknown MIRType in wasm exit stub");
}
break;
}
@ -1756,6 +1810,15 @@ static void FillArgumentArrayForExit(
masm.canonicalizeDouble(dscratch);
GenPrintF64(DebugChannel::Import, masm, dscratch);
masm.boxDouble(dscratch, dst);
} else if (type == MIRType::Int8x16) {
// The value should never escape; the call will be stopped later as
// the import is being called. But we should generate something
// sane here for the boxed case since a debugger or the stack walker
// may observe something.
ScratchDoubleScope dscratch(masm);
masm.loadConstantDouble(0, dscratch);
GenPrintF64(DebugChannel::Import, masm, dscratch);
masm.boxDouble(dscratch, dst);
} else {
MOZ_CRASH(
"FillArgumentArrayForExit, ABIArg::Stack: unexpected type");
@ -1988,6 +2051,11 @@ static bool GenerateImportInterpExit(MacroAssembler& masm, const FuncImport& fi,
funcImportIndex);
GenPrintI64(DebugChannel::Import, masm, ReturnReg64);
break;
case ValType::V128:
// Note, CallImport_V128 currently always throws.
masm.call(SymbolicAddress::CallImport_V128);
masm.jump(throwLabel);
break;
case ValType::F32:
masm.call(SymbolicAddress::CallImport_F64);
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
@ -2246,6 +2314,10 @@ static bool GenerateImportJitExit(MacroAssembler& masm, const FuncImport& fi,
masm.breakpoint();
#endif
break;
case ValType::V128:
// Unreachable as callImport should not call the stub.
masm.breakpoint();
break;
case ValType::F32:
masm.convertValueToFloat(JSReturnOperand, ReturnFloat32Reg,
&oolConvert);
@ -2718,6 +2790,13 @@ bool wasm::GenerateEntryStubs(MacroAssembler& masm, size_t funcExportIndex,
return true;
}
#ifdef ENABLE_WASM_SIMD
// SIMD spec requires JS calls to exports with V128 in the signature to throw.
if (fe.funcType().hasV128ArgOrRet()) {
return true;
}
#endif
// Returning multiple values to JS JIT code not yet implemented (see
// bug 1595031).
if (fe.funcType().temporarilyUnsupportedResultCountForJitEntry()) {
@ -2764,6 +2843,14 @@ bool wasm::GenerateStubs(const ModuleEnvironment& env,
return false;
}
#ifdef ENABLE_WASM_SIMD
// SIMD spec requires calls to JS functions with V128 in the signature to
// throw.
if (fi.funcType().hasV128ArgOrRet()) {
continue;
}
#endif
if (fi.funcType().temporarilyUnsupportedReftypeForExit()) {
continue;
}

Просмотреть файл

@ -59,6 +59,9 @@ class ABIResult {
case ValType::Ref:
MOZ_ASSERT(loc_ == Location::Gpr);
break;
case ValType::V128:
MOZ_ASSERT(loc_ == Location::Fpr);
break;
}
#endif
}
@ -84,6 +87,9 @@ class ABIResult {
static constexpr size_t StackSizeOfFloat = sizeof(double);
#endif
static constexpr size_t StackSizeOfDouble = sizeof(double);
#ifdef ENABLE_WASM_SIMD
static constexpr size_t StackSizeOfV128 = sizeof(V128);
#endif
ABIResult(ValType type, Register gpr)
: type_(type), loc_(Location::Gpr), gpr_(gpr) {

Просмотреть файл

@ -79,6 +79,9 @@ Val::Val(const LitVal& val) {
case ValType::F64:
u.f64_ = val.f64();
return;
case ValType::V128:
u.v128_ = val.v128();
return;
case ValType::Ref:
u.ref_ = val.ref();
return;
@ -254,6 +257,7 @@ static bool IsImmediateType(ValType vt) {
case ValType::I64:
case ValType::F32:
case ValType::F64:
case ValType::V128:
return true;
case ValType::Ref:
switch (vt.refTypeKind()) {
@ -280,14 +284,16 @@ static unsigned EncodeImmediateType(ValType vt) {
return 2;
case ValType::F64:
return 3;
case ValType::V128:
return 4;
case ValType::Ref:
switch (vt.refTypeKind()) {
case RefType::Func:
return 4;
case RefType::Any:
return 5;
case RefType::Null:
case RefType::Any:
return 6;
case RefType::Null:
return 7;
case RefType::TypeIndex:
break;
}
@ -708,6 +714,11 @@ bool DebugFrame::getLocal(uint32_t localIndex, MutableHandleValue vp) {
case jit::MIRType::RefOrNull:
vp.set(ObjectOrNullValue(*(JSObject**)dataPtr));
break;
#ifdef ENABLE_WASM_SIMD
case jit::MIRType::Int8x16:
vp.set(NumberValue(0));
break;
#endif
default:
MOZ_CRASH("local type");
}

Просмотреть файл

@ -429,6 +429,7 @@ class ValType {
case TypeCode::I64:
case TypeCode::F32:
case TypeCode::F64:
case TypeCode::V128:
case TypeCode::AnyRef:
case TypeCode::FuncRef:
case TypeCode::NullRef:
@ -446,6 +447,7 @@ class ValType {
I64 = uint8_t(TypeCode::I64),
F32 = uint8_t(TypeCode::F32),
F64 = uint8_t(TypeCode::F64),
V128 = uint8_t(TypeCode::V128),
Ref = uint8_t(TypeCode::OptRef),
};
@ -490,6 +492,9 @@ class ValType {
case jit::MIRType::Double:
tc_ = PackTypeCode(TypeCode::F64);
break;
case jit::MIRType::Int8x16:
tc_ = PackTypeCode(TypeCode::V128);
break;
default:
MOZ_CRASH("ValType(MIRType): unexpected type");
}
@ -502,6 +507,7 @@ class ValType {
case TypeCode::I64:
case TypeCode::F32:
case TypeCode::F64:
case TypeCode::V128:
break;
default:
MOZ_CRASH("Bad type code");
@ -600,6 +606,28 @@ class ValType {
bool operator!=(Kind that) const { return !(*this == that); }
};
struct V128 {
uint8_t bytes[16];
V128() { memset(bytes, 0, sizeof(bytes)); }
template <typename T>
T extractLane(int lane) {
T result;
MOZ_ASSERT(lane < 16 / sizeof(T));
memcpy(&result, bytes + sizeof(T) * lane, sizeof(T));
return result;
}
template <typename T>
void insertLane(int lane, T value) {
MOZ_ASSERT(lane < 16 / sizeof(T));
memcpy(bytes + sizeof(T) * lane, &value, sizeof(T));
}
};
static_assert(sizeof(V128) == 16, "Invariant");
// The dominant use of this data type is for locals and args, and profiling
// with ZenGarden and Tanks suggests an initial size of 16 minimises heap
// allocation, both in terms of blocks and bytes.
@ -615,6 +643,8 @@ static inline unsigned SizeOf(ValType vt) {
case ValType::I64:
case ValType::F64:
return 8;
case ValType::V128:
return 16;
case ValType::Ref:
return sizeof(intptr_t);
}
@ -636,6 +666,8 @@ static inline jit::MIRType ToMIRType(ValType vt) {
return jit::MIRType::Float32;
case ValType::F64:
return jit::MIRType::Double;
case ValType::V128:
return jit::MIRType::Int8x16;
case ValType::Ref:
return jit::MIRType::RefOrNull;
}
@ -654,6 +686,8 @@ static inline const char* ToCString(ValType type) {
return "i32";
case ValType::I64:
return "i64";
case ValType::V128:
return "v128";
case ValType::F32:
return "f32";
case ValType::F64:
@ -946,6 +980,7 @@ class LitVal {
float f32_;
double f64_;
AnyRef ref_;
V128 v128_;
} u;
public:
@ -969,12 +1004,14 @@ class LitVal {
u.f64_ = 0;
break;
}
case ValType::Kind::V128: {
new (&u.v128_) V128();
break;
}
case ValType::Kind::Ref: {
u.ref_ = AnyRef::null();
break;
}
default:
MOZ_CRASH();
}
}
@ -984,6 +1021,8 @@ class LitVal {
explicit LitVal(float f32) : type_(ValType::F32) { u.f32_ = f32; }
explicit LitVal(double f64) : type_(ValType::F64) { u.f64_ = f64; }
explicit LitVal(V128 v128) : type_(ValType::V128) { u.v128_ = v128; }
explicit LitVal(ValType type, AnyRef any) : type_(type) {
MOZ_ASSERT(type.isReference());
MOZ_ASSERT(any.isNull(),
@ -1014,6 +1053,10 @@ class LitVal {
MOZ_ASSERT(type_.isReference());
return u.ref_;
}
const V128& v128() const {
MOZ_ASSERT(type_ == ValType::V128);
return u.v128_;
}
};
// A Val is a LitVal that can contain (non-null) pointers to GC things. All Vals
@ -1030,6 +1073,7 @@ class MOZ_NON_PARAM Val : public LitVal {
explicit Val(uint64_t i64) : LitVal(i64) {}
explicit Val(float f32) : LitVal(f32) {}
explicit Val(double f64) : LitVal(f64) {}
explicit Val(V128 v128) : LitVal(v128) {}
explicit Val(ValType type, AnyRef val) : LitVal(type, AnyRef::null()) {
MOZ_ASSERT(type.isReference());
u.ref_ = val;
@ -1119,8 +1163,24 @@ class FuncType {
bool temporarilyUnsupportedResultCountForJitExit() const {
return results().length() > MaxResultsForJitExit;
}
// For JS->wasm jit entries, AnyRef parameters and returns are allowed,
// as are all reference types apart from TypeIndex.
#ifdef ENABLE_WASM_SIMD
bool hasV128ArgOrRet() const {
for (ValType arg : args()) {
if (arg == ValType::V128) {
return true;
}
}
for (ValType result : results()) {
if (result == ValType::V128) {
return true;
}
}
return false;
}
#endif
// For JS->wasm jit entries, AnyRef parameters and returns are allowed, as are
// all reference types apart from TypeIndex. V128 types are excluded per spec
// but are guarded against separately.
bool temporarilyUnsupportedReftypeForEntry() const {
for (ValType arg : args()) {
if (arg.isReference() && !arg.isAnyRef()) {
@ -1135,7 +1195,8 @@ class FuncType {
return false;
}
// For inlined JS->wasm jit entries, AnyRef parameters and returns are
// allowed, as are all reference types apart from TypeIndex.
// allowed, as are all reference types apart from TypeIndex. V128 types are
// excluded per spec but are guarded against separately.
bool temporarilyUnsupportedReftypeForInlineEntry() const {
for (ValType arg : args()) {
if (arg.isReference() && !arg.isAnyRef()) {
@ -1150,7 +1211,8 @@ class FuncType {
return false;
}
// For wasm->JS jit exits, AnyRef parameters and returns are allowed, as are
// reference type parameters of all types except TypeIndex.
// reference type parameters of all types except TypeIndex. V128 types are
// excluded per spec but are guarded against separately.
bool temporarilyUnsupportedReftypeForExit() const {
for (ValType arg : args()) {
if (arg.isTypeIndex()) {
@ -2592,10 +2654,11 @@ enum class SymbolicAddress {
HandleDebugTrap,
HandleThrow,
HandleTrap,
ReportInt64JSCall,
ReportInt64OrV128JSCall,
CallImport_Void,
CallImport_I32,
CallImport_I64,
CallImport_V128,
CallImport_F64,
CallImport_FuncRef,
CallImport_AnyRef,
@ -3193,6 +3256,9 @@ class DebugFrame {
AnyRef anyref_;
float f32_;
double f64_;
#ifdef ENABLE_WASM_SIMD
V128 v128_;
#endif
#ifdef DEBUG
// Should we add a new value representation, this will remind us to update
// SpilledRegisterResult.
@ -3202,6 +3268,7 @@ class DebugFrame {
case ValType::I64:
case ValType::F32:
case ValType::F64:
case ValType::V128:
return;
case ValType::Ref:
switch (type.refTypeKind()) {

Просмотреть файл

@ -1405,6 +1405,9 @@ static bool DecodeStructType(Decoder& d, ModuleEnvironment* env,
case ValType::F64:
offset = layout.addScalar(Scalar::Float64);
break;
case ValType::V128:
offset = layout.addScalar(Scalar::V128);
break;
case ValType::Ref:
switch (fields[i].type.refTypeKind()) {
case RefType::TypeIndex:
@ -1659,6 +1662,7 @@ static bool GlobalIsJSCompatible(Decoder& d, ValType type) {
case ValType::F32:
case ValType::F64:
case ValType::I64:
case ValType::V128:
break;
case ValType::Ref:
switch (type.refTypeKind()) {
@ -2943,11 +2947,12 @@ bool wasm::Validate(JSContext* cx, const ShareableBytes& bytecode,
bool multiValueConfigured = MultiValuesAvailable(cx);
bool hugeMemory = false;
bool bigIntConfigured = I64BigIntConversionAvailable(cx);
bool v128Configured = SimdAvailable(cx);
CompilerEnvironment compilerEnv(
CompileMode::Once, Tier::Optimized, OptimizedBackend::Ion,
DebugEnabled::False, multiValueConfigured, refTypesConfigured,
gcTypesConfigured, hugeMemory, bigIntConfigured);
gcTypesConfigured, hugeMemory, bigIntConfigured, v128Configured);
ModuleEnvironment env(
&compilerEnv,
cx->realm()->creationOptions().getSharedMemoryAndAtomicsEnabled()

Просмотреть файл

@ -74,6 +74,7 @@ struct CompilerEnvironment {
bool multiValues_;
bool hugeMemory_;
bool bigInt_;
bool v128_;
};
};
@ -89,7 +90,8 @@ struct CompilerEnvironment {
OptimizedBackend optimizedBackend,
DebugEnabled debugEnabled, bool multiValueConfigured,
bool refTypesConfigured, bool gcTypesConfigured,
bool hugeMemory, bool bigIntConfigured);
bool hugeMemory, bool bigIntConfigured,
bool v128Configured);
// Compute any remaining compilation parameters.
void computeParameters(Decoder& d);
@ -136,6 +138,10 @@ struct CompilerEnvironment {
MOZ_ASSERT(isComputed());
return bigInt_;
}
bool v128() const {
MOZ_ASSERT(isComputed());
return v128_;
}
};
// ModuleEnvironment contains all the state necessary to process or render
@ -209,6 +215,7 @@ struct ModuleEnvironment {
bool refTypesEnabled() const { return compilerEnv->refTypes(); }
bool multiValuesEnabled() const { return compilerEnv->multiValues(); }
bool bigIntEnabled() const { return compilerEnv->bigInt(); }
bool v128Enabled() const { return compilerEnv->v128(); }
bool usesMemory() const { return memoryUsage != MemoryUsage::None; }
bool usesSharedMemory() const { return memoryUsage == MemoryUsage::Shared; }
bool isAsmJS() const { return kind == ModuleKind::AsmJS; }
@ -681,6 +688,9 @@ class Decoder {
case uint8_t(TypeCode::F32):
case uint8_t(TypeCode::F64):
case uint8_t(TypeCode::I64):
#ifdef ENABLE_WASM_SIMD
case uint8_t(TypeCode::V128):
#endif
*type = ValType::fromNonRefTypeCode(TypeCode(code));
return true;
#ifdef ENABLE_WASM_REFTYPES