spirv-val: Fix Int64Atomics check (#4192)

This commit is contained in:
sfricke-samsung 2021-03-18 07:50:16 -07:00 коммит произвёл GitHub
Родитель 4f498774db
Коммит a611be7782
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
3 изменённых файлов: 79 добавлений и 72 удалений

Просмотреть файл

@ -151,45 +151,6 @@ spv_result_t AtomicsPass(ValidationState_t& _, const Instruction* inst) {
<< ": expected Result Type to be int or float scalar type";
}
}
if (spvIsVulkanEnv(_.context()->target_env) &&
(_.GetBitWidth(result_type) != 32 &&
(_.GetBitWidth(result_type) != 64 ||
!_.HasCapability(SpvCapabilityInt64ImageEXT)))) {
switch (opcode) {
case SpvOpAtomicSMin:
case SpvOpAtomicUMin:
case SpvOpAtomicSMax:
case SpvOpAtomicUMax:
case SpvOpAtomicAnd:
case SpvOpAtomicOr:
case SpvOpAtomicXor:
case SpvOpAtomicIAdd:
case SpvOpAtomicISub:
case SpvOpAtomicFAddEXT:
case SpvOpAtomicLoad:
case SpvOpAtomicStore:
case SpvOpAtomicExchange:
case SpvOpAtomicIIncrement:
case SpvOpAtomicIDecrement:
case SpvOpAtomicCompareExchangeWeak:
case SpvOpAtomicCompareExchange: {
if (_.GetBitWidth(result_type) == 64 &&
_.IsIntScalarType(result_type) &&
!_.HasCapability(SpvCapabilityInt64Atomics))
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": 64-bit atomics require the Int64Atomics "
"capability";
} break;
default:
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": according to the Vulkan spec atomic Result Type "
"needs "
"to be a 32-bit int scalar type";
}
}
}
uint32_t operand_index =
@ -204,6 +165,14 @@ spv_result_t AtomicsPass(ValidationState_t& _, const Instruction* inst) {
<< ": expected Pointer to be of type OpTypePointer";
}
// Can't use result_type because OpAtomicStore doesn't have a result
if (_.GetBitWidth(data_type) == 64 && _.IsIntScalarType(data_type) &&
!_.HasCapability(SpvCapabilityInt64Atomics)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": 64-bit atomics require the Int64Atomics capability";
}
// Validate storage class against universal rules
if (!IsStorageClassAllowedByUniversalRules(storage_class)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
@ -226,15 +195,6 @@ spv_result_t AtomicsPass(ValidationState_t& _, const Instruction* inst) {
"PhysicalStorageBuffer.";
}
// Can't use result_type because OpAtomicStore doesn't have a result
if (opcode == SpvOpAtomicStore && _.GetBitWidth(data_type) == 64 &&
_.IsIntScalarType(data_type) &&
!_.HasCapability(SpvCapabilityInt64Atomics)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": 64-bit atomics require the Int64Atomics "
"capability";
}
} else if (storage_class == SpvStorageClassFunction) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)

Просмотреть файл

@ -222,7 +222,6 @@ TEST_F(ValidateAtomics, AtomicLoadShaderSuccess) {
const std::string body = R"(
%val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
%val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
%val3 = OpAtomicLoad %u64 %u64_var %subgroup %sequentially_consistent
)";
CompileSuccessfully(GenerateShaderCode(body));
@ -233,13 +232,30 @@ TEST_F(ValidateAtomics, AtomicLoadKernelSuccess) {
const std::string body = R"(
%val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
%val2 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
%val3 = OpAtomicLoad %u64 %u64_var %subgroup %acquire
)";
CompileSuccessfully(GenerateKernelCode(body));
ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
}
TEST_F(ValidateAtomics, AtomicLoadInt64ShaderSuccess) {
const std::string body = R"(
%val1 = OpAtomicLoad %u64 %u64_var %subgroup %sequentially_consistent
)";
CompileSuccessfully(GenerateShaderCode(body, "OpCapability Int64Atomics\n"));
ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
}
TEST_F(ValidateAtomics, AtomicLoadInt64KernelSuccess) {
const std::string body = R"(
%val1 = OpAtomicLoad %u64 %u64_var %subgroup %acquire
)";
CompileSuccessfully(GenerateKernelCode(body, "OpCapability Int64Atomics\n"));
ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
}
TEST_F(ValidateAtomics, AtomicLoadInt32VulkanSuccess) {
const std::string body = R"(
%val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
@ -555,6 +571,19 @@ TEST_F(ValidateAtomics, AtomicLoadVulkanInt64) {
"AtomicLoad: 64-bit atomics require the Int64Atomics capability"));
}
TEST_F(ValidateAtomics, AtomicLoadKernelInt64) {
const std::string body = R"(
%val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
)";
CompileSuccessfully(GenerateKernelCode(body));
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(
getDiagnosticString(),
HasSubstr(
"AtomicLoad: 64-bit atomics require the Int64Atomics capability"));
}
TEST_F(ValidateAtomics, AtomicStoreVulkanInt64) {
const std::string body = R"(
OpAtomicStore %u64_var %device %relaxed %u64_1
@ -568,6 +597,19 @@ OpAtomicStore %u64_var %device %relaxed %u64_1
"AtomicStore: 64-bit atomics require the Int64Atomics capability"));
}
TEST_F(ValidateAtomics, AtomicStoreKernelInt64) {
const std::string body = R"(
OpAtomicStore %u64_var %device %relaxed %u64_1
)";
CompileSuccessfully(GenerateKernelCode(body));
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(
getDiagnosticString(),
HasSubstr(
"AtomicStore: 64-bit atomics require the Int64Atomics capability"));
}
TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64Success) {
const std::string body = R"(
%val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
@ -1260,7 +1302,7 @@ TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotInt32Pointer) {
%val1 = OpAtomicFlagTestAndSet %bool %u64_var %device %relaxed
)";
CompileSuccessfully(GenerateKernelCode(body));
CompileSuccessfully(GenerateKernelCode(body, "OpCapability Int64Atomics\n"));
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(
getDiagnosticString(),
@ -1334,7 +1376,7 @@ TEST_F(ValidateAtomics, AtomicFlagClearNotInt32Pointer) {
OpAtomicFlagClear %u64_var %device %relaxed
)";
CompileSuccessfully(GenerateKernelCode(body));
CompileSuccessfully(GenerateKernelCode(body, "OpCapability Int64Atomics\n"));
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(
getDiagnosticString(),

Просмотреть файл

@ -5815,6 +5815,11 @@ static const std::string capabilities_and_extensions_image64 = R"(
OpCapability Int64ImageEXT
OpExtension "SPV_EXT_shader_image_int64"
)";
static const std::string capabilities_and_extensions_image64_atomic = R"(
OpCapability Int64Atomics
OpCapability Int64ImageEXT
OpExtension "SPV_EXT_shader_image_int64"
)";
static const std::string declarations_image64 = R"(
%type_image_u64_buffer_0002_r64ui = OpTypeImage %u64 Buffer 0 0 0 2 R64ui
%ptr_Image_u64 = OpTypePointer Image %u64
@ -5854,11 +5859,11 @@ TEST_F(ValidateImage, ImageTexelPointer64Success) {
%sum = OpAtomicIAdd %u64 %texel_ptr %u32_1 %u32_0 %u64_1
)";
CompileSuccessfully(GenerateShaderCode(body,
capabilities_and_extensions_image64,
"Fragment", "", SPV_ENV_UNIVERSAL_1_3,
"GLSL450", declarations_image64)
.c_str());
CompileSuccessfully(
GenerateShaderCode(body, capabilities_and_extensions_image64_atomic,
"Fragment", "", SPV_ENV_UNIVERSAL_1_3, "GLSL450",
declarations_image64)
.c_str());
ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
}
@ -5868,11 +5873,11 @@ TEST_F(ValidateImage, ImageTexelPointer64ResultTypeNotPointer) {
%sum = OpAtomicIAdd %u64 %texel_ptr %u32_1 %u32_0 %u64_1
)";
CompileSuccessfully(GenerateShaderCode(body,
capabilities_and_extensions_image64,
"Fragment", "", SPV_ENV_UNIVERSAL_1_3,
"GLSL450", declarations_image64)
.c_str());
CompileSuccessfully(
GenerateShaderCode(body, capabilities_and_extensions_image64_atomic,
"Fragment", "", SPV_ENV_UNIVERSAL_1_3, "GLSL450",
declarations_image64)
.c_str());
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(getDiagnosticString(),
HasSubstr("Expected Result Type to be OpTypePointer"));
@ -5884,11 +5889,11 @@ TEST_F(ValidateImage, ImageTexelPointer64ResultTypeNotImageClass) {
%sum = OpAtomicIAdd %u64 %texel_ptr %u32_1 %u32_0 %u64_1
)";
CompileSuccessfully(GenerateShaderCode(body,
capabilities_and_extensions_image64,
"Fragment", "", SPV_ENV_UNIVERSAL_1_3,
"GLSL450", declarations_image64)
.c_str());
CompileSuccessfully(
GenerateShaderCode(body, capabilities_and_extensions_image64_atomic,
"Fragment", "", SPV_ENV_UNIVERSAL_1_3, "GLSL450",
declarations_image64)
.c_str());
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(getDiagnosticString(),
HasSubstr("Expected Result Type to be OpTypePointer whose "
@ -5901,11 +5906,11 @@ TEST_F(ValidateImage, ImageTexelPointer64SampleNotZeroForImageWithMSZero) {
%sum = OpAtomicIAdd %u64 %texel_ptr %u32_1 %u32_0 %u64_1
)";
CompileSuccessfully(GenerateShaderCode(body,
capabilities_and_extensions_image64,
"Fragment", "", SPV_ENV_UNIVERSAL_1_3,
"GLSL450", declarations_image64)
.c_str());
CompileSuccessfully(
GenerateShaderCode(body, capabilities_and_extensions_image64_atomic,
"Fragment", "", SPV_ENV_UNIVERSAL_1_3, "GLSL450",
declarations_image64)
.c_str());
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(getDiagnosticString(),
HasSubstr("Expected Sample for Image with MS 0 to be a valid "