1 // Copyright (c) 2017 Google Inc.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include <sstream>
16 #include <string>
17
18 #include "gmock/gmock.h"
19 #include "test/unit_spirv.h"
20 #include "test/val/val_fixtures.h"
21
22 namespace spvtools {
23 namespace val {
24 namespace {
25
26 using ::testing::HasSubstr;
27 using ::testing::Not;
28
29 using ValidateAtomics = spvtest::ValidateBase<bool>;
30
GenerateShaderCodeImpl(const std::string & body,const std::string & capabilities_and_extensions,const std::string & definitions,const std::string & memory_model)31 std::string GenerateShaderCodeImpl(
32 const std::string& body, const std::string& capabilities_and_extensions,
33 const std::string& definitions, const std::string& memory_model) {
34 std::ostringstream ss;
35 ss << R"(
36 OpCapability Shader
37 )";
38 ss << capabilities_and_extensions;
39 ss << "OpMemoryModel Logical " << memory_model << "\n";
40 ss << R"(
41 OpEntryPoint Fragment %main "main"
42 OpExecutionMode %main OriginUpperLeft
43 %void = OpTypeVoid
44 %func = OpTypeFunction %void
45 %bool = OpTypeBool
46 %f32 = OpTypeFloat 32
47 %u32 = OpTypeInt 32 0
48 %f32vec4 = OpTypeVector %f32 4
49
50 %f32_0 = OpConstant %f32 0
51 %f32_1 = OpConstant %f32 1
52 %u32_0 = OpConstant %u32 0
53 %u32_1 = OpConstant %u32 1
54 %f32vec4_0000 = OpConstantComposite %f32vec4 %f32_0 %f32_0 %f32_0 %f32_0
55
56 %cross_device = OpConstant %u32 0
57 %device = OpConstant %u32 1
58 %workgroup = OpConstant %u32 2
59 %subgroup = OpConstant %u32 3
60 %invocation = OpConstant %u32 4
61 %queuefamily = OpConstant %u32 5
62
63 %relaxed = OpConstant %u32 0
64 %acquire = OpConstant %u32 2
65 %release = OpConstant %u32 4
66 %acquire_release = OpConstant %u32 8
67 %acquire_and_release = OpConstant %u32 6
68 %sequentially_consistent = OpConstant %u32 16
69 %acquire_release_uniform_workgroup = OpConstant %u32 328
70
71 %f32_ptr = OpTypePointer Workgroup %f32
72 %f32_var = OpVariable %f32_ptr Workgroup
73
74 %u32_ptr = OpTypePointer Workgroup %u32
75 %u32_var = OpVariable %u32_ptr Workgroup
76
77 %f32vec4_ptr = OpTypePointer Workgroup %f32vec4
78 %f32vec4_var = OpVariable %f32vec4_ptr Workgroup
79
80 %f32_ptr_function = OpTypePointer Function %f32
81 )";
82 ss << definitions;
83 ss << R"(
84 %main = OpFunction %void None %func
85 %main_entry = OpLabel
86 )";
87 ss << body;
88 ss << R"(
89 OpReturn
90 OpFunctionEnd)";
91
92 return ss.str();
93 }
94
GenerateShaderCode(const std::string & body,const std::string & capabilities_and_extensions="",const std::string & memory_model="GLSL450")95 std::string GenerateShaderCode(
96 const std::string& body,
97 const std::string& capabilities_and_extensions = "",
98 const std::string& memory_model = "GLSL450") {
99 const std::string defintions = R"(
100 %u64 = OpTypeInt 64 0
101 %s64 = OpTypeInt 64 1
102
103 %u64_1 = OpConstant %u64 1
104 %s64_1 = OpConstant %s64 1
105
106 %u64_ptr = OpTypePointer Workgroup %u64
107 %s64_ptr = OpTypePointer Workgroup %s64
108 %u64_var = OpVariable %u64_ptr Workgroup
109 %s64_var = OpVariable %s64_ptr Workgroup
110 )";
111 return GenerateShaderCodeImpl(
112 body, "OpCapability Int64\n" + capabilities_and_extensions, defintions,
113 memory_model);
114 }
115
GenerateWebGPUShaderCode(const std::string & body,const std::string & capabilities_and_extensions="")116 std::string GenerateWebGPUShaderCode(
117 const std::string& body,
118 const std::string& capabilities_and_extensions = "") {
119 const std::string vulkan_memory_capability = R"(
120 OpCapability VulkanMemoryModelDeviceScopeKHR
121 OpCapability VulkanMemoryModelKHR
122 )";
123 const std::string vulkan_memory_extension = R"(
124 OpExtension "SPV_KHR_vulkan_memory_model"
125 )";
126 return GenerateShaderCodeImpl(body,
127 vulkan_memory_capability +
128 capabilities_and_extensions +
129 vulkan_memory_extension,
130 "", "VulkanKHR");
131 }
132
GenerateKernelCode(const std::string & body,const std::string & capabilities_and_extensions="")133 std::string GenerateKernelCode(
134 const std::string& body,
135 const std::string& capabilities_and_extensions = "") {
136 std::ostringstream ss;
137 ss << R"(
138 OpCapability Addresses
139 OpCapability Kernel
140 OpCapability Linkage
141 OpCapability Int64
142 )";
143
144 ss << capabilities_and_extensions;
145 ss << R"(
146 OpMemoryModel Physical32 OpenCL
147 %void = OpTypeVoid
148 %func = OpTypeFunction %void
149 %bool = OpTypeBool
150 %f32 = OpTypeFloat 32
151 %u32 = OpTypeInt 32 0
152 %u64 = OpTypeInt 64 0
153 %f32vec4 = OpTypeVector %f32 4
154
155 %f32_0 = OpConstant %f32 0
156 %f32_1 = OpConstant %f32 1
157 %u32_0 = OpConstant %u32 0
158 %u32_1 = OpConstant %u32 1
159 %u64_1 = OpConstant %u64 1
160 %f32vec4_0000 = OpConstantComposite %f32vec4 %f32_0 %f32_0 %f32_0 %f32_0
161
162 %cross_device = OpConstant %u32 0
163 %device = OpConstant %u32 1
164 %workgroup = OpConstant %u32 2
165 %subgroup = OpConstant %u32 3
166 %invocation = OpConstant %u32 4
167
168 %relaxed = OpConstant %u32 0
169 %acquire = OpConstant %u32 2
170 %release = OpConstant %u32 4
171 %acquire_release = OpConstant %u32 8
172 %acquire_and_release = OpConstant %u32 6
173 %sequentially_consistent = OpConstant %u32 16
174 %acquire_release_uniform_workgroup = OpConstant %u32 328
175 %acquire_release_atomic_counter_workgroup = OpConstant %u32 1288
176
177 %f32_ptr = OpTypePointer Workgroup %f32
178 %f32_var = OpVariable %f32_ptr Workgroup
179
180 %u32_ptr = OpTypePointer Workgroup %u32
181 %u32_var = OpVariable %u32_ptr Workgroup
182
183 %u64_ptr = OpTypePointer Workgroup %u64
184 %u64_var = OpVariable %u64_ptr Workgroup
185
186 %f32vec4_ptr = OpTypePointer Workgroup %f32vec4
187 %f32vec4_var = OpVariable %f32vec4_ptr Workgroup
188
189 %f32_ptr_function = OpTypePointer Function %f32
190 %f32_ptr_uniformconstant = OpTypePointer UniformConstant %f32
191 %f32_uc_var = OpVariable %f32_ptr_uniformconstant UniformConstant
192
193 %f32_ptr_image = OpTypePointer Image %f32
194 %f32_im_var = OpVariable %f32_ptr_image Image
195
196 %main = OpFunction %void None %func
197 %main_entry = OpLabel
198 )";
199
200 ss << body;
201
202 ss << R"(
203 OpReturn
204 OpFunctionEnd)";
205
206 return ss.str();
207 }
208
TEST_F(ValidateAtomics,AtomicLoadShaderSuccess)209 TEST_F(ValidateAtomics, AtomicLoadShaderSuccess) {
210 const std::string body = R"(
211 %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
212 %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
213 %val3 = OpAtomicLoad %u64 %u64_var %subgroup %sequentially_consistent
214 )";
215
216 CompileSuccessfully(GenerateShaderCode(body));
217 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
218 }
219
TEST_F(ValidateAtomics,AtomicLoadKernelSuccess)220 TEST_F(ValidateAtomics, AtomicLoadKernelSuccess) {
221 const std::string body = R"(
222 %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
223 %val2 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
224 %val3 = OpAtomicLoad %u64 %u64_var %subgroup %acquire
225 )";
226
227 CompileSuccessfully(GenerateKernelCode(body));
228 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
229 }
230
TEST_F(ValidateAtomics,AtomicLoadInt32VulkanSuccess)231 TEST_F(ValidateAtomics, AtomicLoadInt32VulkanSuccess) {
232 const std::string body = R"(
233 %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
234 %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
235 )";
236
237 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
238 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
239 }
240
TEST_F(ValidateAtomics,AtomicLoadFloatVulkan)241 TEST_F(ValidateAtomics, AtomicLoadFloatVulkan) {
242 const std::string body = R"(
243 %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
244 %val2 = OpAtomicLoad %f32 %f32_var %workgroup %acquire
245 )";
246
247 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
248 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
249 EXPECT_THAT(getDiagnosticString(),
250 HasSubstr("expected Result Type to be int scalar type"));
251 }
252
TEST_F(ValidateAtomics,AtomicLoadInt64WithCapabilityVulkanSuccess)253 TEST_F(ValidateAtomics, AtomicLoadInt64WithCapabilityVulkanSuccess) {
254 const std::string body = R"(
255 %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
256 %val2 = OpAtomicLoad %u64 %u64_var %workgroup %acquire
257 )";
258
259 CompileSuccessfully(GenerateShaderCode(body, "OpCapability Int64Atomics\n"),
260 SPV_ENV_VULKAN_1_0);
261 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
262 }
263
TEST_F(ValidateAtomics,AtomicLoadInt64WithoutCapabilityVulkan)264 TEST_F(ValidateAtomics, AtomicLoadInt64WithoutCapabilityVulkan) {
265 const std::string body = R"(
266 %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
267 %val2 = OpAtomicLoad %u64 %u64_var %workgroup %acquire
268 )";
269
270 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
271 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
272 EXPECT_THAT(getDiagnosticString(),
273 HasSubstr("64-bit atomics require the Int64Atomics capability"));
274 }
275
TEST_F(ValidateAtomics,AtomicStoreOpenCLFunctionPointerStorageTypeSuccess)276 TEST_F(ValidateAtomics, AtomicStoreOpenCLFunctionPointerStorageTypeSuccess) {
277 const std::string body = R"(
278 %f32_var_function = OpVariable %f32_ptr_function Function
279 OpAtomicStore %f32_var_function %device %relaxed %f32_1
280 )";
281
282 CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_OPENCL_1_2);
283 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_OPENCL_1_2));
284 }
285
TEST_F(ValidateAtomics,AtomicStoreVulkanFunctionPointerStorageType)286 TEST_F(ValidateAtomics, AtomicStoreVulkanFunctionPointerStorageType) {
287 const std::string body = R"(
288 %f32_var_function = OpVariable %f32_ptr_function Function
289 OpAtomicStore %f32_var_function %device %relaxed %f32_1
290 )";
291
292 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
293 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
294 EXPECT_THAT(getDiagnosticString(),
295 HasSubstr("AtomicStore: Function storage class forbidden when "
296 "the Shader capability is declared."));
297 }
298
299 // TODO(atgoo@github.com): the corresponding check fails Vulkan CTS,
300 // reenable once fixed.
TEST_F(ValidateAtomics,DISABLED_AtomicLoadVulkanSubgroup)301 TEST_F(ValidateAtomics, DISABLED_AtomicLoadVulkanSubgroup) {
302 const std::string body = R"(
303 %val1 = OpAtomicLoad %u32 %u32_var %subgroup %acquire
304 )";
305
306 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
307 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
308 EXPECT_THAT(getDiagnosticString(),
309 HasSubstr("AtomicLoad: in Vulkan environment memory scope is "
310 "limited to Device, Workgroup and Invocation"));
311 }
312
TEST_F(ValidateAtomics,AtomicLoadVulkanRelease)313 TEST_F(ValidateAtomics, AtomicLoadVulkanRelease) {
314 const std::string body = R"(
315 %val1 = OpAtomicLoad %u32 %u32_var %workgroup %release
316 )";
317
318 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
319 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
320 EXPECT_THAT(
321 getDiagnosticString(),
322 HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
323 "Release, AcquireRelease and SequentiallyConsistent"));
324 }
325
TEST_F(ValidateAtomics,AtomicLoadVulkanAcquireRelease)326 TEST_F(ValidateAtomics, AtomicLoadVulkanAcquireRelease) {
327 const std::string body = R"(
328 %val1 = OpAtomicLoad %u32 %u32_var %workgroup %acquire_release
329 )";
330
331 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
332 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
333 EXPECT_THAT(
334 getDiagnosticString(),
335 HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
336 "Release, AcquireRelease and SequentiallyConsistent"));
337 }
338
TEST_F(ValidateAtomics,AtomicLoadVulkanSequentiallyConsistent)339 TEST_F(ValidateAtomics, AtomicLoadVulkanSequentiallyConsistent) {
340 const std::string body = R"(
341 %val1 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
342 )";
343
344 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
345 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
346 EXPECT_THAT(
347 getDiagnosticString(),
348 HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
349 "Release, AcquireRelease and SequentiallyConsistent"));
350 }
351
TEST_F(ValidateAtomics,AtomicLoadShaderFloat)352 TEST_F(ValidateAtomics, AtomicLoadShaderFloat) {
353 const std::string body = R"(
354 %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
355 )";
356
357 CompileSuccessfully(GenerateShaderCode(body));
358 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
359 EXPECT_THAT(getDiagnosticString(),
360 HasSubstr("AtomicLoad: "
361 "expected Result Type to be int scalar type"));
362 }
363
TEST_F(ValidateAtomics,AtomicLoadVulkanInt64)364 TEST_F(ValidateAtomics, AtomicLoadVulkanInt64) {
365 const std::string body = R"(
366 %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
367 )";
368
369 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
370 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
371 EXPECT_THAT(
372 getDiagnosticString(),
373 HasSubstr(
374 "AtomicLoad: 64-bit atomics require the Int64Atomics capability"));
375 }
376
TEST_F(ValidateAtomics,AtomicLoadWebGPUSuccess)377 TEST_F(ValidateAtomics, AtomicLoadWebGPUSuccess) {
378 const std::string body = R"(
379 %val1 = OpAtomicLoad %u32 %u32_var %queuefamily %relaxed
380 )";
381
382 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
383 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_WEBGPU_0));
384 }
385
TEST_F(ValidateAtomics,AtomicLoadWebGPUNonQueueFamilyFailure)386 TEST_F(ValidateAtomics, AtomicLoadWebGPUNonQueueFamilyFailure) {
387 const std::string body = R"(
388 %val3 = OpAtomicLoad %u32 %u32_var %invocation %relaxed
389 )";
390
391 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
392 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_WEBGPU_0));
393 EXPECT_THAT(getDiagnosticString(),
394 HasSubstr("Memory Scope is limited to QueueFamilyKHR for "
395 "OpAtomic* operations"));
396 }
397
TEST_F(ValidateAtomics,AtomicLoadWebGPUNonRelaxedFailure)398 TEST_F(ValidateAtomics, AtomicLoadWebGPUNonRelaxedFailure) {
399 const std::string body = R"(
400 %val1 = OpAtomicLoad %u32 %u32_var %queuefamily %acquire
401 )";
402
403 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
404 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_WEBGPU_0));
405 EXPECT_THAT(getDiagnosticString(),
406 HasSubstr("no bits may be set for Memory Semantics of OpAtomic* "
407 "instructions"));
408 }
409
TEST_F(ValidateAtomics,VK_KHR_shader_atomic_int64Success)410 TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64Success) {
411 const std::string body = R"(
412 %val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
413 %val2 = OpAtomicUMax %u64 %u64_var %device %relaxed %u64_1
414 %val3 = OpAtomicSMin %u64 %u64_var %device %relaxed %u64_1
415 %val4 = OpAtomicSMax %u64 %u64_var %device %relaxed %u64_1
416 %val5 = OpAtomicAnd %u64 %u64_var %device %relaxed %u64_1
417 %val6 = OpAtomicOr %u64 %u64_var %device %relaxed %u64_1
418 %val7 = OpAtomicXor %u64 %u64_var %device %relaxed %u64_1
419 %val8 = OpAtomicIAdd %u64 %u64_var %device %relaxed %u64_1
420 %val9 = OpAtomicExchange %u64 %u64_var %device %relaxed %u64_1
421 %val10 = OpAtomicCompareExchange %u64 %u64_var %device %relaxed %relaxed %u64_1 %u64_1
422
423 %val11 = OpAtomicUMin %s64 %s64_var %device %relaxed %s64_1
424 %val12 = OpAtomicUMax %s64 %s64_var %device %relaxed %s64_1
425 %val13 = OpAtomicSMin %s64 %s64_var %device %relaxed %s64_1
426 %val14 = OpAtomicSMax %s64 %s64_var %device %relaxed %s64_1
427 %val15 = OpAtomicAnd %s64 %s64_var %device %relaxed %s64_1
428 %val16 = OpAtomicOr %s64 %s64_var %device %relaxed %s64_1
429 %val17 = OpAtomicXor %s64 %s64_var %device %relaxed %s64_1
430 %val18 = OpAtomicIAdd %s64 %s64_var %device %relaxed %s64_1
431 %val19 = OpAtomicExchange %s64 %s64_var %device %relaxed %s64_1
432 %val20 = OpAtomicCompareExchange %s64 %s64_var %device %relaxed %relaxed %s64_1 %s64_1
433
434 %val21 = OpAtomicLoad %u64 %u64_var %device %relaxed
435 %val22 = OpAtomicLoad %s64 %s64_var %device %relaxed
436
437 OpAtomicStore %u64_var %device %relaxed %u64_1
438 OpAtomicStore %s64_var %device %relaxed %s64_1
439 )";
440
441 CompileSuccessfully(GenerateShaderCode(body, "OpCapability Int64Atomics\n"),
442 SPV_ENV_VULKAN_1_0);
443 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
444 }
445
TEST_F(ValidateAtomics,VK_KHR_shader_atomic_int64MissingCapability)446 TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64MissingCapability) {
447 const std::string body = R"(
448 %val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
449 )";
450
451 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
452 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
453 EXPECT_THAT(
454 getDiagnosticString(),
455 HasSubstr(
456 "AtomicUMin: 64-bit atomics require the Int64Atomics capability"));
457 }
458
TEST_F(ValidateAtomics,AtomicLoadWrongResultType)459 TEST_F(ValidateAtomics, AtomicLoadWrongResultType) {
460 const std::string body = R"(
461 %val1 = OpAtomicLoad %f32vec4 %f32vec4_var %device %relaxed
462 )";
463
464 CompileSuccessfully(GenerateKernelCode(body));
465 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
466 EXPECT_THAT(getDiagnosticString(),
467 HasSubstr("AtomicLoad: "
468 "expected Result Type to be int or float scalar type"));
469 }
470
TEST_F(ValidateAtomics,AtomicLoadWrongPointerType)471 TEST_F(ValidateAtomics, AtomicLoadWrongPointerType) {
472 const std::string body = R"(
473 %val1 = OpAtomicLoad %f32 %f32_ptr %device %relaxed
474 )";
475
476 CompileSuccessfully(GenerateKernelCode(body));
477 ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
478 EXPECT_THAT(getDiagnosticString(),
479 HasSubstr("Operand 27[%_ptr_Workgroup_float] cannot be a type"));
480 }
481
TEST_F(ValidateAtomics,AtomicLoadWrongPointerDataType)482 TEST_F(ValidateAtomics, AtomicLoadWrongPointerDataType) {
483 const std::string body = R"(
484 %val1 = OpAtomicLoad %u32 %f32_var %device %relaxed
485 )";
486
487 CompileSuccessfully(GenerateKernelCode(body));
488 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
489 EXPECT_THAT(
490 getDiagnosticString(),
491 HasSubstr("AtomicLoad: "
492 "expected Pointer to point to a value of type Result Type"));
493 }
494
TEST_F(ValidateAtomics,AtomicLoadWrongScopeType)495 TEST_F(ValidateAtomics, AtomicLoadWrongScopeType) {
496 const std::string body = R"(
497 %val1 = OpAtomicLoad %f32 %f32_var %f32_1 %relaxed
498 )";
499
500 CompileSuccessfully(GenerateKernelCode(body));
501 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
502 EXPECT_THAT(getDiagnosticString(),
503 HasSubstr("AtomicLoad: expected scope to be a 32-bit int"));
504 }
505
TEST_F(ValidateAtomics,AtomicLoadWrongMemorySemanticsType)506 TEST_F(ValidateAtomics, AtomicLoadWrongMemorySemanticsType) {
507 const std::string body = R"(
508 %val1 = OpAtomicLoad %f32 %f32_var %device %u64_1
509 )";
510
511 CompileSuccessfully(GenerateKernelCode(body));
512 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
513 EXPECT_THAT(
514 getDiagnosticString(),
515 HasSubstr("AtomicLoad: expected Memory Semantics to be a 32-bit int"));
516 }
517
TEST_F(ValidateAtomics,AtomicStoreKernelSuccess)518 TEST_F(ValidateAtomics, AtomicStoreKernelSuccess) {
519 const std::string body = R"(
520 OpAtomicStore %f32_var %device %relaxed %f32_1
521 OpAtomicStore %u32_var %subgroup %release %u32_1
522 )";
523
524 CompileSuccessfully(GenerateKernelCode(body));
525 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
526 }
527
TEST_F(ValidateAtomics,AtomicStoreShaderSuccess)528 TEST_F(ValidateAtomics, AtomicStoreShaderSuccess) {
529 const std::string body = R"(
530 OpAtomicStore %u32_var %device %release %u32_1
531 OpAtomicStore %u32_var %subgroup %sequentially_consistent %u32_1
532 )";
533
534 CompileSuccessfully(GenerateShaderCode(body));
535 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
536 }
537
TEST_F(ValidateAtomics,AtomicStoreVulkanSuccess)538 TEST_F(ValidateAtomics, AtomicStoreVulkanSuccess) {
539 const std::string body = R"(
540 OpAtomicStore %u32_var %device %release %u32_1
541 )";
542
543 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
544 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
545 }
546
TEST_F(ValidateAtomics,AtomicStoreVulkanAcquire)547 TEST_F(ValidateAtomics, AtomicStoreVulkanAcquire) {
548 const std::string body = R"(
549 OpAtomicStore %u32_var %device %acquire %u32_1
550 )";
551
552 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
553 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
554 EXPECT_THAT(
555 getDiagnosticString(),
556 HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
557 "Acquire, AcquireRelease and SequentiallyConsistent"));
558 }
559
TEST_F(ValidateAtomics,AtomicStoreVulkanAcquireRelease)560 TEST_F(ValidateAtomics, AtomicStoreVulkanAcquireRelease) {
561 const std::string body = R"(
562 OpAtomicStore %u32_var %device %acquire_release %u32_1
563 )";
564
565 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
566 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
567 EXPECT_THAT(
568 getDiagnosticString(),
569 HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
570 "Acquire, AcquireRelease and SequentiallyConsistent"));
571 }
572
TEST_F(ValidateAtomics,AtomicStoreVulkanSequentiallyConsistent)573 TEST_F(ValidateAtomics, AtomicStoreVulkanSequentiallyConsistent) {
574 const std::string body = R"(
575 OpAtomicStore %u32_var %device %sequentially_consistent %u32_1
576 )";
577
578 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
579 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
580 EXPECT_THAT(
581 getDiagnosticString(),
582 HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
583 "Acquire, AcquireRelease and SequentiallyConsistent"));
584 }
585
TEST_F(ValidateAtomics,AtomicStoreWebGPUSuccess)586 TEST_F(ValidateAtomics, AtomicStoreWebGPUSuccess) {
587 const std::string body = R"(
588 OpAtomicStore %u32_var %queuefamily %relaxed %u32_1
589 )";
590
591 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
592 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_WEBGPU_0));
593 }
TEST_F(ValidateAtomics,AtomicStoreWebGPUNonQueueFamilyFailure)594 TEST_F(ValidateAtomics, AtomicStoreWebGPUNonQueueFamilyFailure) {
595 const std::string body = R"(
596 OpAtomicStore %u32_var %workgroup %relaxed %u32_1
597 )";
598
599 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
600 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_WEBGPU_0));
601 EXPECT_THAT(getDiagnosticString(),
602 HasSubstr("Memory Scope is limited to QueueFamilyKHR for "
603 "OpAtomic* operations"));
604 }
605
TEST_F(ValidateAtomics,AtomicStoreWebGPUNonRelaxedFailure)606 TEST_F(ValidateAtomics, AtomicStoreWebGPUNonRelaxedFailure) {
607 const std::string body = R"(
608 OpAtomicStore %u32_var %queuefamily %release %u32_1
609 )";
610
611 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
612 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_WEBGPU_0));
613 EXPECT_THAT(getDiagnosticString(),
614 HasSubstr("no bits may be set for Memory Semantics of OpAtomic* "
615 "instructions"));
616 }
617
TEST_F(ValidateAtomics,AtomicStoreWrongPointerType)618 TEST_F(ValidateAtomics, AtomicStoreWrongPointerType) {
619 const std::string body = R"(
620 OpAtomicStore %f32_1 %device %relaxed %f32_1
621 )";
622
623 CompileSuccessfully(GenerateKernelCode(body));
624 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
625 EXPECT_THAT(
626 getDiagnosticString(),
627 HasSubstr("AtomicStore: expected Pointer to be of type OpTypePointer"));
628 }
629
TEST_F(ValidateAtomics,AtomicStoreWrongPointerDataType)630 TEST_F(ValidateAtomics, AtomicStoreWrongPointerDataType) {
631 const std::string body = R"(
632 OpAtomicStore %f32vec4_var %device %relaxed %f32_1
633 )";
634
635 CompileSuccessfully(GenerateKernelCode(body));
636 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
637 EXPECT_THAT(
638 getDiagnosticString(),
639 HasSubstr("AtomicStore: "
640 "expected Pointer to be a pointer to int or float scalar "
641 "type"));
642 }
643
TEST_F(ValidateAtomics,AtomicStoreWrongPointerStorageTypeForOpenCL)644 TEST_F(ValidateAtomics, AtomicStoreWrongPointerStorageTypeForOpenCL) {
645 const std::string body = R"(
646 OpAtomicStore %f32_im_var %device %relaxed %f32_1
647 )";
648
649 CompileSuccessfully(GenerateKernelCode(body));
650 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_OPENCL_1_2));
651 EXPECT_THAT(
652 getDiagnosticString(),
653 HasSubstr("AtomicStore: storage class must be Function, Workgroup, "
654 "CrossWorkGroup or Generic in the OpenCL environment."));
655 }
656
TEST_F(ValidateAtomics,AtomicStoreWrongPointerStorageType)657 TEST_F(ValidateAtomics, AtomicStoreWrongPointerStorageType) {
658 const std::string body = R"(
659 OpAtomicStore %f32_uc_var %device %relaxed %f32_1
660 )";
661
662 CompileSuccessfully(GenerateKernelCode(body));
663 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
664 EXPECT_THAT(getDiagnosticString(),
665 HasSubstr("AtomicStore: storage class forbidden by universal "
666 "validation rules."));
667 }
668
TEST_F(ValidateAtomics,AtomicStoreWrongScopeType)669 TEST_F(ValidateAtomics, AtomicStoreWrongScopeType) {
670 const std::string body = R"(
671 OpAtomicStore %f32_var %f32_1 %relaxed %f32_1
672 )";
673
674 CompileSuccessfully(GenerateKernelCode(body));
675 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
676 EXPECT_THAT(getDiagnosticString(),
677 HasSubstr("AtomicStore: expected scope to be a 32-bit int\n "
678 "OpAtomicStore %28 %float_1 %uint_0_1 %float_1\n"));
679 }
680
TEST_F(ValidateAtomics,AtomicStoreWrongMemorySemanticsType)681 TEST_F(ValidateAtomics, AtomicStoreWrongMemorySemanticsType) {
682 const std::string body = R"(
683 OpAtomicStore %f32_var %device %f32_1 %f32_1
684 )";
685
686 CompileSuccessfully(GenerateKernelCode(body));
687 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
688 EXPECT_THAT(
689 getDiagnosticString(),
690 HasSubstr("AtomicStore: expected Memory Semantics to be a 32-bit int"));
691 }
692
TEST_F(ValidateAtomics,AtomicStoreWrongValueType)693 TEST_F(ValidateAtomics, AtomicStoreWrongValueType) {
694 const std::string body = R"(
695 OpAtomicStore %f32_var %device %relaxed %u32_1
696 )";
697
698 CompileSuccessfully(GenerateKernelCode(body));
699 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
700 EXPECT_THAT(
701 getDiagnosticString(),
702 HasSubstr("AtomicStore: "
703 "expected Value type and the type pointed to by Pointer to "
704 "be the same"));
705 }
706
TEST_F(ValidateAtomics,AtomicExchangeShaderSuccess)707 TEST_F(ValidateAtomics, AtomicExchangeShaderSuccess) {
708 const std::string body = R"(
709 OpAtomicStore %u32_var %device %relaxed %u32_1
710 %val2 = OpAtomicExchange %u32 %u32_var %device %relaxed %u32_0
711 )";
712
713 CompileSuccessfully(GenerateShaderCode(body));
714 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
715 }
716
TEST_F(ValidateAtomics,AtomicExchangeKernelSuccess)717 TEST_F(ValidateAtomics, AtomicExchangeKernelSuccess) {
718 const std::string body = R"(
719 OpAtomicStore %f32_var %device %relaxed %f32_1
720 %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
721 OpAtomicStore %u32_var %device %relaxed %u32_1
722 %val4 = OpAtomicExchange %u32 %u32_var %device %relaxed %u32_0
723 )";
724
725 CompileSuccessfully(GenerateKernelCode(body));
726 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
727 }
728
TEST_F(ValidateAtomics,AtomicExchangeShaderFloat)729 TEST_F(ValidateAtomics, AtomicExchangeShaderFloat) {
730 const std::string body = R"(
731 OpAtomicStore %f32_var %device %relaxed %f32_1
732 %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
733 )";
734
735 CompileSuccessfully(GenerateShaderCode(body));
736 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
737 EXPECT_THAT(getDiagnosticString(),
738 HasSubstr("AtomicExchange: "
739 "expected Result Type to be int scalar type"));
740 }
741
TEST_F(ValidateAtomics,AtomicExchangeWrongResultType)742 TEST_F(ValidateAtomics, AtomicExchangeWrongResultType) {
743 const std::string body = R"(
744 OpStore %f32vec4_var %f32vec4_0000
745 %val2 = OpAtomicExchange %f32vec4 %f32vec4_var %device %relaxed %f32vec4_0000
746 )";
747
748 CompileSuccessfully(GenerateKernelCode(body));
749 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
750 EXPECT_THAT(getDiagnosticString(),
751 HasSubstr("AtomicExchange: "
752 "expected Result Type to be int or float scalar type"));
753 }
754
TEST_F(ValidateAtomics,AtomicExchangeWrongPointerType)755 TEST_F(ValidateAtomics, AtomicExchangeWrongPointerType) {
756 const std::string body = R"(
757 %val2 = OpAtomicExchange %f32 %f32vec4_ptr %device %relaxed %f32vec4_0000
758 )";
759
760 CompileSuccessfully(GenerateKernelCode(body));
761 ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
762 EXPECT_THAT(getDiagnosticString(),
763 HasSubstr("Operand 33[%_ptr_Workgroup_v4float] cannot be a "
764 "type"));
765 }
766
TEST_F(ValidateAtomics,AtomicExchangeWrongPointerDataType)767 TEST_F(ValidateAtomics, AtomicExchangeWrongPointerDataType) {
768 const std::string body = R"(
769 OpStore %f32vec4_var %f32vec4_0000
770 %val2 = OpAtomicExchange %f32 %f32vec4_var %device %relaxed %f32vec4_0000
771 )";
772
773 CompileSuccessfully(GenerateKernelCode(body));
774 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
775 EXPECT_THAT(
776 getDiagnosticString(),
777 HasSubstr("AtomicExchange: "
778 "expected Pointer to point to a value of type Result Type"));
779 }
780
TEST_F(ValidateAtomics,AtomicExchangeWrongScopeType)781 TEST_F(ValidateAtomics, AtomicExchangeWrongScopeType) {
782 const std::string body = R"(
783 OpAtomicStore %f32_var %device %relaxed %f32_1
784 %val2 = OpAtomicExchange %f32 %f32_var %f32_1 %relaxed %f32_0
785 )";
786
787 CompileSuccessfully(GenerateKernelCode(body));
788 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
789 EXPECT_THAT(getDiagnosticString(),
790 HasSubstr("AtomicExchange: expected scope to be a 32-bit int"));
791 }
792
TEST_F(ValidateAtomics,AtomicExchangeWrongMemorySemanticsType)793 TEST_F(ValidateAtomics, AtomicExchangeWrongMemorySemanticsType) {
794 const std::string body = R"(
795 OpAtomicStore %f32_var %device %relaxed %f32_1
796 %val2 = OpAtomicExchange %f32 %f32_var %device %f32_1 %f32_0
797 )";
798
799 CompileSuccessfully(GenerateKernelCode(body));
800 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
801 EXPECT_THAT(
802 getDiagnosticString(),
803 HasSubstr(
804 "AtomicExchange: expected Memory Semantics to be a 32-bit int"));
805 }
806
TEST_F(ValidateAtomics,AtomicExchangeWrongValueType)807 TEST_F(ValidateAtomics, AtomicExchangeWrongValueType) {
808 const std::string body = R"(
809 OpAtomicStore %f32_var %device %relaxed %f32_1
810 %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %u32_0
811 )";
812
813 CompileSuccessfully(GenerateKernelCode(body));
814 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
815 EXPECT_THAT(getDiagnosticString(),
816 HasSubstr("AtomicExchange: "
817 "expected Value to be of type Result Type"));
818 }
819
TEST_F(ValidateAtomics,AtomicCompareExchangeShaderSuccess)820 TEST_F(ValidateAtomics, AtomicCompareExchangeShaderSuccess) {
821 const std::string body = R"(
822 OpAtomicStore %u32_var %device %relaxed %u32_1
823 %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
824 )";
825
826 CompileSuccessfully(GenerateShaderCode(body));
827 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
828 }
829
TEST_F(ValidateAtomics,AtomicCompareExchangeKernelSuccess)830 TEST_F(ValidateAtomics, AtomicCompareExchangeKernelSuccess) {
831 const std::string body = R"(
832 OpAtomicStore %f32_var %device %relaxed %f32_1
833 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
834 OpAtomicStore %u32_var %device %relaxed %u32_1
835 %val4 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
836 )";
837
838 CompileSuccessfully(GenerateKernelCode(body));
839 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
840 }
841
TEST_F(ValidateAtomics,AtomicCompareExchangeShaderFloat)842 TEST_F(ValidateAtomics, AtomicCompareExchangeShaderFloat) {
843 const std::string body = R"(
844 OpAtomicStore %f32_var %device %relaxed %f32_1
845 %val1 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
846 )";
847
848 CompileSuccessfully(GenerateShaderCode(body));
849 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
850 EXPECT_THAT(getDiagnosticString(),
851 HasSubstr("AtomicCompareExchange: "
852 "expected Result Type to be int scalar type"));
853 }
854
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongResultType)855 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongResultType) {
856 const std::string body = R"(
857 OpStore %f32vec4_var %f32vec4_0000
858 %val2 = OpAtomicCompareExchange %f32vec4 %f32vec4_var %device %relaxed %relaxed %f32vec4_0000 %f32vec4_0000
859 )";
860
861 CompileSuccessfully(GenerateKernelCode(body));
862 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
863 EXPECT_THAT(getDiagnosticString(),
864 HasSubstr("AtomicCompareExchange: "
865 "expected Result Type to be int or float scalar type"));
866 }
867
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongPointerType)868 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongPointerType) {
869 const std::string body = R"(
870 %val2 = OpAtomicCompareExchange %f32 %f32vec4_ptr %device %relaxed %relaxed %f32vec4_0000 %f32vec4_0000
871 )";
872
873 CompileSuccessfully(GenerateKernelCode(body));
874 ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
875 EXPECT_THAT(getDiagnosticString(),
876 HasSubstr("Operand 33[%_ptr_Workgroup_v4float] cannot be a "
877 "type"));
878 }
879
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongPointerDataType)880 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongPointerDataType) {
881 const std::string body = R"(
882 OpStore %f32vec4_var %f32vec4_0000
883 %val2 = OpAtomicCompareExchange %f32 %f32vec4_var %device %relaxed %relaxed %f32_0 %f32_1
884 )";
885
886 CompileSuccessfully(GenerateKernelCode(body));
887 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
888 EXPECT_THAT(
889 getDiagnosticString(),
890 HasSubstr("AtomicCompareExchange: "
891 "expected Pointer to point to a value of type Result Type"));
892 }
893
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongScopeType)894 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongScopeType) {
895 const std::string body = R"(
896 OpAtomicStore %f32_var %device %relaxed %f32_1
897 %val2 = OpAtomicCompareExchange %f32 %f32_var %f32_1 %relaxed %relaxed %f32_0 %f32_0
898 )";
899
900 CompileSuccessfully(GenerateKernelCode(body));
901 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
902 EXPECT_THAT(getDiagnosticString(),
903 HasSubstr("AtomicCompareExchange: expected scope to be a 32-bit "
904 "int"));
905 }
906
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongMemorySemanticsType1)907 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType1) {
908 const std::string body = R"(
909 OpAtomicStore %f32_var %device %relaxed %f32_1
910 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %f32_1 %relaxed %f32_0 %f32_0
911 )";
912
913 CompileSuccessfully(GenerateKernelCode(body));
914 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
915 EXPECT_THAT(getDiagnosticString(),
916 HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
917 "be a 32-bit int"));
918 }
919
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongMemorySemanticsType2)920 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType2) {
921 const std::string body = R"(
922 OpAtomicStore %f32_var %device %relaxed %f32_1
923 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %f32_1 %f32_0 %f32_0
924 )";
925
926 CompileSuccessfully(GenerateKernelCode(body));
927 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
928 EXPECT_THAT(getDiagnosticString(),
929 HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
930 "be a 32-bit int"));
931 }
932
TEST_F(ValidateAtomics,AtomicCompareExchangeUnequalRelease)933 TEST_F(ValidateAtomics, AtomicCompareExchangeUnequalRelease) {
934 const std::string body = R"(
935 OpAtomicStore %f32_var %device %relaxed %f32_1
936 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %release %f32_0 %f32_0
937 )";
938
939 CompileSuccessfully(GenerateKernelCode(body));
940 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
941 EXPECT_THAT(getDiagnosticString(),
942 HasSubstr("AtomicCompareExchange: Memory Semantics Release and "
943 "AcquireRelease cannot be used for operand Unequal"));
944 }
945
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongValueType)946 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongValueType) {
947 const std::string body = R"(
948 OpAtomicStore %f32_var %device %relaxed %f32_1
949 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %u32_0 %f32_1
950 )";
951
952 CompileSuccessfully(GenerateKernelCode(body));
953 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
954 EXPECT_THAT(getDiagnosticString(),
955 HasSubstr("AtomicCompareExchange: "
956 "expected Value to be of type Result Type"));
957 }
958
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongComparatorType)959 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongComparatorType) {
960 const std::string body = R"(
961 OpAtomicStore %f32_var %device %relaxed %f32_1
962 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %u32_1
963 )";
964
965 CompileSuccessfully(GenerateKernelCode(body));
966 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
967 EXPECT_THAT(getDiagnosticString(),
968 HasSubstr("AtomicCompareExchange: "
969 "expected Comparator to be of type Result Type"));
970 }
971
TEST_F(ValidateAtomics,AtomicCompareExchangeWeakSuccess)972 TEST_F(ValidateAtomics, AtomicCompareExchangeWeakSuccess) {
973 const std::string body = R"(
974 OpAtomicStore %u32_var %device %relaxed %u32_1
975 %val4 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
976 )";
977
978 CompileSuccessfully(GenerateKernelCode(body));
979 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
980 }
981
TEST_F(ValidateAtomics,AtomicCompareExchangeWeakWrongResultType)982 TEST_F(ValidateAtomics, AtomicCompareExchangeWeakWrongResultType) {
983 const std::string body = R"(
984 OpAtomicStore %f32_var %device %relaxed %f32_1
985 %val2 = OpAtomicCompareExchangeWeak %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
986 )";
987
988 CompileSuccessfully(GenerateKernelCode(body));
989 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
990 EXPECT_THAT(getDiagnosticString(),
991 HasSubstr("AtomicCompareExchangeWeak: "
992 "expected Result Type to be int scalar type"));
993 }
994
TEST_F(ValidateAtomics,AtomicArithmeticsSuccess)995 TEST_F(ValidateAtomics, AtomicArithmeticsSuccess) {
996 const std::string body = R"(
997 OpAtomicStore %u32_var %device %relaxed %u32_1
998 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release
999 %val2 = OpAtomicIDecrement %u32 %u32_var %device %acquire_release
1000 %val3 = OpAtomicIAdd %u32 %u32_var %device %acquire_release %u32_1
1001 %val4 = OpAtomicISub %u32 %u32_var %device %acquire_release %u32_1
1002 %val5 = OpAtomicUMin %u32 %u32_var %device %acquire_release %u32_1
1003 %val6 = OpAtomicUMax %u32 %u32_var %device %acquire_release %u32_1
1004 %val7 = OpAtomicSMin %u32 %u32_var %device %sequentially_consistent %u32_1
1005 %val8 = OpAtomicSMax %u32 %u32_var %device %sequentially_consistent %u32_1
1006 %val9 = OpAtomicAnd %u32 %u32_var %device %sequentially_consistent %u32_1
1007 %val10 = OpAtomicOr %u32 %u32_var %device %sequentially_consistent %u32_1
1008 %val11 = OpAtomicXor %u32 %u32_var %device %sequentially_consistent %u32_1
1009 )";
1010
1011 CompileSuccessfully(GenerateKernelCode(body));
1012 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1013 }
1014
TEST_F(ValidateAtomics,AtomicFlagsSuccess)1015 TEST_F(ValidateAtomics, AtomicFlagsSuccess) {
1016 const std::string body = R"(
1017 OpAtomicFlagClear %u32_var %device %release
1018 %val1 = OpAtomicFlagTestAndSet %bool %u32_var %device %relaxed
1019 )";
1020
1021 CompileSuccessfully(GenerateKernelCode(body));
1022 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1023 }
1024
TEST_F(ValidateAtomics,AtomicFlagTestAndSetWrongResultType)1025 TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongResultType) {
1026 const std::string body = R"(
1027 %val1 = OpAtomicFlagTestAndSet %u32 %u32_var %device %relaxed
1028 )";
1029
1030 CompileSuccessfully(GenerateKernelCode(body));
1031 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1032 EXPECT_THAT(getDiagnosticString(),
1033 HasSubstr("AtomicFlagTestAndSet: "
1034 "expected Result Type to be bool scalar type"));
1035 }
1036
TEST_F(ValidateAtomics,AtomicFlagTestAndSetNotPointer)1037 TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotPointer) {
1038 const std::string body = R"(
1039 %val1 = OpAtomicFlagTestAndSet %bool %u32_1 %device %relaxed
1040 )";
1041
1042 CompileSuccessfully(GenerateKernelCode(body));
1043 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1044 EXPECT_THAT(getDiagnosticString(),
1045 HasSubstr("AtomicFlagTestAndSet: "
1046 "expected Pointer to be of type OpTypePointer"));
1047 }
1048
TEST_F(ValidateAtomics,AtomicFlagTestAndSetNotIntPointer)1049 TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotIntPointer) {
1050 const std::string body = R"(
1051 %val1 = OpAtomicFlagTestAndSet %bool %f32_var %device %relaxed
1052 )";
1053
1054 CompileSuccessfully(GenerateKernelCode(body));
1055 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1056 EXPECT_THAT(
1057 getDiagnosticString(),
1058 HasSubstr("AtomicFlagTestAndSet: "
1059 "expected Pointer to point to a value of 32-bit int type"));
1060 }
1061
TEST_F(ValidateAtomics,AtomicFlagTestAndSetNotInt32Pointer)1062 TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotInt32Pointer) {
1063 const std::string body = R"(
1064 %val1 = OpAtomicFlagTestAndSet %bool %u64_var %device %relaxed
1065 )";
1066
1067 CompileSuccessfully(GenerateKernelCode(body));
1068 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1069 EXPECT_THAT(
1070 getDiagnosticString(),
1071 HasSubstr("AtomicFlagTestAndSet: "
1072 "expected Pointer to point to a value of 32-bit int type"));
1073 }
1074
TEST_F(ValidateAtomics,AtomicFlagTestAndSetWrongScopeType)1075 TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongScopeType) {
1076 const std::string body = R"(
1077 %val1 = OpAtomicFlagTestAndSet %bool %u32_var %u64_1 %relaxed
1078 )";
1079
1080 CompileSuccessfully(GenerateKernelCode(body));
1081 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1082 EXPECT_THAT(
1083 getDiagnosticString(),
1084 HasSubstr("AtomicFlagTestAndSet: expected scope to be a 32-bit int"));
1085 }
1086
TEST_F(ValidateAtomics,AtomicFlagTestAndSetWrongMemorySemanticsType)1087 TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongMemorySemanticsType) {
1088 const std::string body = R"(
1089 %val1 = OpAtomicFlagTestAndSet %bool %u32_var %device %u64_1
1090 )";
1091
1092 CompileSuccessfully(GenerateKernelCode(body));
1093 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1094 EXPECT_THAT(getDiagnosticString(),
1095 HasSubstr("AtomicFlagTestAndSet: "
1096 "expected Memory Semantics to be a 32-bit int"));
1097 }
1098
TEST_F(ValidateAtomics,AtomicFlagClearAcquire)1099 TEST_F(ValidateAtomics, AtomicFlagClearAcquire) {
1100 const std::string body = R"(
1101 OpAtomicFlagClear %u32_var %device %acquire
1102 )";
1103
1104 CompileSuccessfully(GenerateKernelCode(body));
1105 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1106 EXPECT_THAT(getDiagnosticString(),
1107 HasSubstr("Memory Semantics Acquire and AcquireRelease cannot be "
1108 "used with AtomicFlagClear"));
1109 }
1110
TEST_F(ValidateAtomics,AtomicFlagClearNotPointer)1111 TEST_F(ValidateAtomics, AtomicFlagClearNotPointer) {
1112 const std::string body = R"(
1113 OpAtomicFlagClear %u32_1 %device %relaxed
1114 )";
1115
1116 CompileSuccessfully(GenerateKernelCode(body));
1117 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1118 EXPECT_THAT(getDiagnosticString(),
1119 HasSubstr("AtomicFlagClear: "
1120 "expected Pointer to be of type OpTypePointer"));
1121 }
1122
TEST_F(ValidateAtomics,AtomicFlagClearNotIntPointer)1123 TEST_F(ValidateAtomics, AtomicFlagClearNotIntPointer) {
1124 const std::string body = R"(
1125 OpAtomicFlagClear %f32_var %device %relaxed
1126 )";
1127
1128 CompileSuccessfully(GenerateKernelCode(body));
1129 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1130 EXPECT_THAT(
1131 getDiagnosticString(),
1132 HasSubstr("AtomicFlagClear: "
1133 "expected Pointer to point to a value of 32-bit int type"));
1134 }
1135
TEST_F(ValidateAtomics,AtomicFlagClearNotInt32Pointer)1136 TEST_F(ValidateAtomics, AtomicFlagClearNotInt32Pointer) {
1137 const std::string body = R"(
1138 OpAtomicFlagClear %u64_var %device %relaxed
1139 )";
1140
1141 CompileSuccessfully(GenerateKernelCode(body));
1142 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1143 EXPECT_THAT(
1144 getDiagnosticString(),
1145 HasSubstr("AtomicFlagClear: "
1146 "expected Pointer to point to a value of 32-bit int type"));
1147 }
1148
TEST_F(ValidateAtomics,AtomicFlagClearWrongScopeType)1149 TEST_F(ValidateAtomics, AtomicFlagClearWrongScopeType) {
1150 const std::string body = R"(
1151 OpAtomicFlagClear %u32_var %u64_1 %relaxed
1152 )";
1153
1154 CompileSuccessfully(GenerateKernelCode(body));
1155 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1156 EXPECT_THAT(getDiagnosticString(),
1157 HasSubstr("AtomicFlagClear: expected scope to be a 32-bit "
1158 "int\n OpAtomicFlagClear %30 %ulong_1 %uint_0_1\n"));
1159 }
1160
TEST_F(ValidateAtomics,AtomicFlagClearWrongMemorySemanticsType)1161 TEST_F(ValidateAtomics, AtomicFlagClearWrongMemorySemanticsType) {
1162 const std::string body = R"(
1163 OpAtomicFlagClear %u32_var %device %u64_1
1164 )";
1165
1166 CompileSuccessfully(GenerateKernelCode(body));
1167 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1168 EXPECT_THAT(
1169 getDiagnosticString(),
1170 HasSubstr(
1171 "AtomicFlagClear: expected Memory Semantics to be a 32-bit int"));
1172 }
1173
TEST_F(ValidateAtomics,AtomicIIncrementAcquireAndRelease)1174 TEST_F(ValidateAtomics, AtomicIIncrementAcquireAndRelease) {
1175 const std::string body = R"(
1176 OpAtomicStore %u32_var %device %relaxed %u32_1
1177 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_and_release
1178 )";
1179
1180 CompileSuccessfully(GenerateKernelCode(body));
1181 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1182 EXPECT_THAT(getDiagnosticString(),
1183 HasSubstr("AtomicIIncrement: Memory Semantics can have at most "
1184 "one of the following bits set: Acquire, Release, "
1185 "AcquireRelease or SequentiallyConsistent"));
1186 }
1187
TEST_F(ValidateAtomics,AtomicUniformMemorySemanticsShader)1188 TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsShader) {
1189 const std::string body = R"(
1190 OpAtomicStore %u32_var %device %relaxed %u32_1
1191 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_uniform_workgroup
1192 )";
1193
1194 CompileSuccessfully(GenerateShaderCode(body));
1195 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1196 }
1197
TEST_F(ValidateAtomics,AtomicUniformMemorySemanticsKernel)1198 TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsKernel) {
1199 const std::string body = R"(
1200 OpAtomicStore %u32_var %device %relaxed %u32_1
1201 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_uniform_workgroup
1202 )";
1203
1204 CompileSuccessfully(GenerateKernelCode(body));
1205 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1206 EXPECT_THAT(getDiagnosticString(),
1207 HasSubstr("AtomicIIncrement: Memory Semantics UniformMemory "
1208 "requires capability Shader"));
1209 }
1210
1211 // Lack of the AtomicStorage capability is intentionally ignored, see
1212 // https://github.com/KhronosGroup/glslang/issues/1618 for the reasoning why.
TEST_F(ValidateAtomics,AtomicCounterMemorySemanticsNoCapability)1213 TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsNoCapability) {
1214 const std::string body = R"(
1215 OpAtomicStore %u32_var %device %relaxed %u32_1
1216 %val1 = OpAtomicIIncrement %u32 %u32_var %device
1217 %acquire_release_atomic_counter_workgroup
1218 )";
1219
1220 CompileSuccessfully(GenerateKernelCode(body));
1221 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1222 }
1223
TEST_F(ValidateAtomics,AtomicCounterMemorySemanticsWithCapability)1224 TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsWithCapability) {
1225 const std::string body = R"(
1226 OpAtomicStore %u32_var %device %relaxed %u32_1
1227 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_atomic_counter_workgroup
1228 )";
1229
1230 CompileSuccessfully(GenerateKernelCode(body, "OpCapability AtomicStorage\n"));
1231 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1232 }
1233
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicLoad)1234 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicLoad) {
1235 const std::string body = R"(
1236 %ld = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
1237 )";
1238
1239 const std::string extra = R"(
1240 OpCapability VulkanMemoryModelKHR
1241 OpExtension "SPV_KHR_vulkan_memory_model"
1242 )";
1243
1244 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1245 SPV_ENV_UNIVERSAL_1_3);
1246 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1247 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1248 EXPECT_THAT(getDiagnosticString(),
1249 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1250 "used with the VulkanKHR memory model."));
1251 }
1252
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicStore)1253 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicStore) {
1254 const std::string body = R"(
1255 OpAtomicStore %u32_var %workgroup %sequentially_consistent %u32_0
1256 )";
1257
1258 const std::string extra = R"(
1259 OpCapability VulkanMemoryModelKHR
1260 OpExtension "SPV_KHR_vulkan_memory_model"
1261 )";
1262
1263 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1264 SPV_ENV_UNIVERSAL_1_3);
1265 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1266 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1267 EXPECT_THAT(getDiagnosticString(),
1268 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1269 "used with the VulkanKHR memory model."));
1270 }
1271
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicExchange)1272 TEST_F(ValidateAtomics,
1273 VulkanMemoryModelBanSequentiallyConsistentAtomicExchange) {
1274 const std::string body = R"(
1275 %ex = OpAtomicExchange %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1276 )";
1277
1278 const std::string extra = R"(
1279 OpCapability VulkanMemoryModelKHR
1280 OpExtension "SPV_KHR_vulkan_memory_model"
1281 )";
1282
1283 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1284 SPV_ENV_UNIVERSAL_1_3);
1285 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1286 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1287 EXPECT_THAT(getDiagnosticString(),
1288 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1289 "used with the VulkanKHR memory model."));
1290 }
1291
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeEqual)1292 TEST_F(ValidateAtomics,
1293 VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeEqual) {
1294 const std::string body = R"(
1295 %ex = OpAtomicCompareExchange %u32 %u32_var %workgroup %sequentially_consistent %relaxed %u32_0 %u32_0
1296 )";
1297
1298 const std::string extra = R"(
1299 OpCapability VulkanMemoryModelKHR
1300 OpExtension "SPV_KHR_vulkan_memory_model"
1301 )";
1302
1303 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1304 SPV_ENV_UNIVERSAL_1_3);
1305 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1306 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1307 EXPECT_THAT(getDiagnosticString(),
1308 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1309 "used with the VulkanKHR memory model."));
1310 }
1311
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeUnequal)1312 TEST_F(ValidateAtomics,
1313 VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeUnequal) {
1314 const std::string body = R"(
1315 %ex = OpAtomicCompareExchange %u32 %u32_var %workgroup %relaxed %sequentially_consistent %u32_0 %u32_0
1316 )";
1317
1318 const std::string extra = R"(
1319 OpCapability VulkanMemoryModelKHR
1320 OpExtension "SPV_KHR_vulkan_memory_model"
1321 )";
1322
1323 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1324 SPV_ENV_UNIVERSAL_1_3);
1325 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1326 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1327 EXPECT_THAT(getDiagnosticString(),
1328 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1329 "used with the VulkanKHR memory model."));
1330 }
1331
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicIIncrement)1332 TEST_F(ValidateAtomics,
1333 VulkanMemoryModelBanSequentiallyConsistentAtomicIIncrement) {
1334 const std::string body = R"(
1335 %inc = OpAtomicIIncrement %u32 %u32_var %workgroup %sequentially_consistent
1336 )";
1337
1338 const std::string extra = R"(
1339 OpCapability VulkanMemoryModelKHR
1340 OpExtension "SPV_KHR_vulkan_memory_model"
1341 )";
1342
1343 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1344 SPV_ENV_UNIVERSAL_1_3);
1345 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1346 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1347 EXPECT_THAT(getDiagnosticString(),
1348 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1349 "used with the VulkanKHR memory model."));
1350 }
1351
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicIDecrement)1352 TEST_F(ValidateAtomics,
1353 VulkanMemoryModelBanSequentiallyConsistentAtomicIDecrement) {
1354 const std::string body = R"(
1355 %dec = OpAtomicIDecrement %u32 %u32_var %workgroup %sequentially_consistent
1356 )";
1357
1358 const std::string extra = R"(
1359 OpCapability VulkanMemoryModelKHR
1360 OpExtension "SPV_KHR_vulkan_memory_model"
1361 )";
1362
1363 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1364 SPV_ENV_UNIVERSAL_1_3);
1365 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1366 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1367 EXPECT_THAT(getDiagnosticString(),
1368 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1369 "used with the VulkanKHR memory model."));
1370 }
1371
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicIAdd)1372 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicIAdd) {
1373 const std::string body = R"(
1374 %add = OpAtomicIAdd %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1375 )";
1376
1377 const std::string extra = R"(
1378 OpCapability VulkanMemoryModelKHR
1379 OpExtension "SPV_KHR_vulkan_memory_model"
1380 )";
1381
1382 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1383 SPV_ENV_UNIVERSAL_1_3);
1384 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1385 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1386 EXPECT_THAT(getDiagnosticString(),
1387 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1388 "used with the VulkanKHR memory model."));
1389 }
1390
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicISub)1391 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicISub) {
1392 const std::string body = R"(
1393 %sub = OpAtomicISub %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1394 )";
1395
1396 const std::string extra = R"(
1397 OpCapability VulkanMemoryModelKHR
1398 OpExtension "SPV_KHR_vulkan_memory_model"
1399 )";
1400
1401 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1402 SPV_ENV_UNIVERSAL_1_3);
1403 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1404 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1405 EXPECT_THAT(getDiagnosticString(),
1406 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1407 "used with the VulkanKHR memory model."));
1408 }
1409
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicSMin)1410 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicSMin) {
1411 const std::string body = R"(
1412 %min = OpAtomicSMin %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1413 )";
1414
1415 const std::string extra = R"(
1416 OpCapability VulkanMemoryModelKHR
1417 OpExtension "SPV_KHR_vulkan_memory_model"
1418 )";
1419
1420 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1421 SPV_ENV_UNIVERSAL_1_3);
1422 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1423 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1424 EXPECT_THAT(getDiagnosticString(),
1425 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1426 "used with the VulkanKHR memory model."));
1427 }
1428
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicUMin)1429 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicUMin) {
1430 const std::string body = R"(
1431 %min = OpAtomicUMin %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1432 )";
1433
1434 const std::string extra = R"(
1435 OpCapability VulkanMemoryModelKHR
1436 OpExtension "SPV_KHR_vulkan_memory_model"
1437 )";
1438
1439 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1440 SPV_ENV_UNIVERSAL_1_3);
1441 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1442 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1443 EXPECT_THAT(getDiagnosticString(),
1444 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1445 "used with the VulkanKHR memory model."));
1446 }
1447
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicSMax)1448 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicSMax) {
1449 const std::string body = R"(
1450 %max = OpAtomicSMax %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1451 )";
1452
1453 const std::string extra = R"(
1454 OpCapability VulkanMemoryModelKHR
1455 OpExtension "SPV_KHR_vulkan_memory_model"
1456 )";
1457
1458 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1459 SPV_ENV_UNIVERSAL_1_3);
1460 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1461 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1462 EXPECT_THAT(getDiagnosticString(),
1463 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1464 "used with the VulkanKHR memory model."));
1465 }
1466
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicUMax)1467 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicUMax) {
1468 const std::string body = R"(
1469 %max = OpAtomicUMax %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1470 )";
1471
1472 const std::string extra = R"(
1473 OpCapability VulkanMemoryModelKHR
1474 OpExtension "SPV_KHR_vulkan_memory_model"
1475 )";
1476
1477 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1478 SPV_ENV_UNIVERSAL_1_3);
1479 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1480 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1481 EXPECT_THAT(getDiagnosticString(),
1482 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1483 "used with the VulkanKHR memory model."));
1484 }
1485
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicAnd)1486 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicAnd) {
1487 const std::string body = R"(
1488 %and = OpAtomicAnd %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1489 )";
1490
1491 const std::string extra = R"(
1492 OpCapability VulkanMemoryModelKHR
1493 OpExtension "SPV_KHR_vulkan_memory_model"
1494 )";
1495
1496 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1497 SPV_ENV_UNIVERSAL_1_3);
1498 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1499 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1500 EXPECT_THAT(getDiagnosticString(),
1501 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1502 "used with the VulkanKHR memory model."));
1503 }
1504
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicOr)1505 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicOr) {
1506 const std::string body = R"(
1507 %or = OpAtomicOr %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1508 )";
1509
1510 const std::string extra = R"(
1511 OpCapability VulkanMemoryModelKHR
1512 OpExtension "SPV_KHR_vulkan_memory_model"
1513 )";
1514
1515 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1516 SPV_ENV_UNIVERSAL_1_3);
1517 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1518 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1519 EXPECT_THAT(getDiagnosticString(),
1520 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1521 "used with the VulkanKHR memory model."));
1522 }
1523
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicXor)1524 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicXor) {
1525 const std::string body = R"(
1526 %xor = OpAtomicXor %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1527 )";
1528
1529 const std::string extra = R"(
1530 OpCapability VulkanMemoryModelKHR
1531 OpExtension "SPV_KHR_vulkan_memory_model"
1532 )";
1533
1534 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1535 SPV_ENV_UNIVERSAL_1_3);
1536 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1537 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1538 EXPECT_THAT(getDiagnosticString(),
1539 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1540 "used with the VulkanKHR memory model."));
1541 }
1542
TEST_F(ValidateAtomics,OutputMemoryKHRRequiresVulkanMemoryModelKHR)1543 TEST_F(ValidateAtomics, OutputMemoryKHRRequiresVulkanMemoryModelKHR) {
1544 const std::string text = R"(
1545 OpCapability Shader
1546 OpMemoryModel Logical GLSL450
1547 OpEntryPoint Fragment %1 "func"
1548 OpExecutionMode %1 OriginUpperLeft
1549 %2 = OpTypeVoid
1550 %3 = OpTypeInt 32 0
1551 %semantics = OpConstant %3 4100
1552 %5 = OpTypeFunction %2
1553 %workgroup = OpConstant %3 2
1554 %ptr = OpTypePointer Workgroup %3
1555 %var = OpVariable %ptr Workgroup
1556 %1 = OpFunction %2 None %5
1557 %7 = OpLabel
1558 OpAtomicStore %var %workgroup %semantics %workgroup
1559 OpReturn
1560 OpFunctionEnd
1561 )";
1562
1563 CompileSuccessfully(text);
1564 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1565 EXPECT_THAT(getDiagnosticString(),
1566 HasSubstr("AtomicStore: Memory Semantics OutputMemoryKHR "
1567 "requires capability VulkanMemoryModelKHR"));
1568 }
1569
TEST_F(ValidateAtomics,MakeAvailableKHRRequiresVulkanMemoryModelKHR)1570 TEST_F(ValidateAtomics, MakeAvailableKHRRequiresVulkanMemoryModelKHR) {
1571 const std::string text = R"(
1572 OpCapability Shader
1573 OpMemoryModel Logical GLSL450
1574 OpEntryPoint Fragment %1 "func"
1575 OpExecutionMode %1 OriginUpperLeft
1576 %2 = OpTypeVoid
1577 %3 = OpTypeInt 32 0
1578 %semantics = OpConstant %3 8196
1579 %5 = OpTypeFunction %2
1580 %workgroup = OpConstant %3 2
1581 %ptr = OpTypePointer Workgroup %3
1582 %var = OpVariable %ptr Workgroup
1583 %1 = OpFunction %2 None %5
1584 %7 = OpLabel
1585 OpAtomicStore %var %workgroup %semantics %workgroup
1586 OpReturn
1587 OpFunctionEnd
1588 )";
1589
1590 CompileSuccessfully(text);
1591 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1592 EXPECT_THAT(getDiagnosticString(),
1593 HasSubstr("AtomicStore: Memory Semantics MakeAvailableKHR "
1594 "requires capability VulkanMemoryModelKHR"));
1595 }
1596
TEST_F(ValidateAtomics,MakeVisibleKHRRequiresVulkanMemoryModelKHR)1597 TEST_F(ValidateAtomics, MakeVisibleKHRRequiresVulkanMemoryModelKHR) {
1598 const std::string text = R"(
1599 OpCapability Shader
1600 OpMemoryModel Logical GLSL450
1601 OpEntryPoint Fragment %1 "func"
1602 OpExecutionMode %1 OriginUpperLeft
1603 %2 = OpTypeVoid
1604 %3 = OpTypeInt 32 0
1605 %semantics = OpConstant %3 16386
1606 %5 = OpTypeFunction %2
1607 %workgroup = OpConstant %3 2
1608 %ptr = OpTypePointer Workgroup %3
1609 %var = OpVariable %ptr Workgroup
1610 %1 = OpFunction %2 None %5
1611 %7 = OpLabel
1612 %ld = OpAtomicLoad %3 %var %workgroup %semantics
1613 OpReturn
1614 OpFunctionEnd
1615 )";
1616
1617 CompileSuccessfully(text);
1618 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1619 EXPECT_THAT(getDiagnosticString(),
1620 HasSubstr("AtomicLoad: Memory Semantics MakeVisibleKHR requires "
1621 "capability VulkanMemoryModelKHR"));
1622 }
1623
TEST_F(ValidateAtomics,MakeAvailableKHRRequiresReleaseSemantics)1624 TEST_F(ValidateAtomics, MakeAvailableKHRRequiresReleaseSemantics) {
1625 const std::string text = R"(
1626 OpCapability Shader
1627 OpCapability VulkanMemoryModelKHR
1628 OpExtension "SPV_KHR_vulkan_memory_model"
1629 OpMemoryModel Logical VulkanKHR
1630 OpEntryPoint Fragment %1 "func"
1631 OpExecutionMode %1 OriginUpperLeft
1632 %2 = OpTypeVoid
1633 %3 = OpTypeInt 32 0
1634 %semantics = OpConstant %3 8448
1635 %5 = OpTypeFunction %2
1636 %workgroup = OpConstant %3 2
1637 %ptr = OpTypePointer Workgroup %3
1638 %var = OpVariable %ptr Workgroup
1639 %1 = OpFunction %2 None %5
1640 %7 = OpLabel
1641 OpAtomicStore %var %workgroup %semantics %workgroup
1642 OpReturn
1643 OpFunctionEnd
1644 )";
1645
1646 CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
1647 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1648 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1649 EXPECT_THAT(
1650 getDiagnosticString(),
1651 HasSubstr("AtomicStore: MakeAvailableKHR Memory Semantics also requires "
1652 "either Release or AcquireRelease Memory Semantics"));
1653 }
1654
TEST_F(ValidateAtomics,MakeVisibleKHRRequiresAcquireSemantics)1655 TEST_F(ValidateAtomics, MakeVisibleKHRRequiresAcquireSemantics) {
1656 const std::string text = R"(
1657 OpCapability Shader
1658 OpCapability VulkanMemoryModelKHR
1659 OpExtension "SPV_KHR_vulkan_memory_model"
1660 OpMemoryModel Logical VulkanKHR
1661 OpEntryPoint Fragment %1 "func"
1662 OpExecutionMode %1 OriginUpperLeft
1663 %2 = OpTypeVoid
1664 %3 = OpTypeInt 32 0
1665 %semantics = OpConstant %3 16640
1666 %5 = OpTypeFunction %2
1667 %workgroup = OpConstant %3 2
1668 %ptr = OpTypePointer Workgroup %3
1669 %var = OpVariable %ptr Workgroup
1670 %1 = OpFunction %2 None %5
1671 %7 = OpLabel
1672 %ld = OpAtomicLoad %3 %var %workgroup %semantics
1673 OpReturn
1674 OpFunctionEnd
1675 )";
1676
1677 CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
1678 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1679 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1680 EXPECT_THAT(
1681 getDiagnosticString(),
1682 HasSubstr("AtomicLoad: MakeVisibleKHR Memory Semantics also requires "
1683 "either Acquire or AcquireRelease Memory Semantics"));
1684 }
1685
TEST_F(ValidateAtomics,MakeAvailableKHRRequiresStorageSemantics)1686 TEST_F(ValidateAtomics, MakeAvailableKHRRequiresStorageSemantics) {
1687 const std::string text = R"(
1688 OpCapability Shader
1689 OpCapability VulkanMemoryModelKHR
1690 OpExtension "SPV_KHR_vulkan_memory_model"
1691 OpMemoryModel Logical VulkanKHR
1692 OpEntryPoint Fragment %1 "func"
1693 OpExecutionMode %1 OriginUpperLeft
1694 %2 = OpTypeVoid
1695 %3 = OpTypeInt 32 0
1696 %semantics = OpConstant %3 8196
1697 %5 = OpTypeFunction %2
1698 %workgroup = OpConstant %3 2
1699 %ptr = OpTypePointer Workgroup %3
1700 %var = OpVariable %ptr Workgroup
1701 %1 = OpFunction %2 None %5
1702 %7 = OpLabel
1703 OpAtomicStore %var %workgroup %semantics %workgroup
1704 OpReturn
1705 OpFunctionEnd
1706 )";
1707
1708 CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
1709 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1710 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1711 EXPECT_THAT(
1712 getDiagnosticString(),
1713 HasSubstr(
1714 "AtomicStore: expected Memory Semantics to include a storage class"));
1715 }
1716
TEST_F(ValidateAtomics,MakeVisibleKHRRequiresStorageSemantics)1717 TEST_F(ValidateAtomics, MakeVisibleKHRRequiresStorageSemantics) {
1718 const std::string text = R"(
1719 OpCapability Shader
1720 OpCapability VulkanMemoryModelKHR
1721 OpExtension "SPV_KHR_vulkan_memory_model"
1722 OpMemoryModel Logical VulkanKHR
1723 OpEntryPoint Fragment %1 "func"
1724 OpExecutionMode %1 OriginUpperLeft
1725 %2 = OpTypeVoid
1726 %3 = OpTypeInt 32 0
1727 %semantics = OpConstant %3 16386
1728 %5 = OpTypeFunction %2
1729 %workgroup = OpConstant %3 2
1730 %ptr = OpTypePointer Workgroup %3
1731 %var = OpVariable %ptr Workgroup
1732 %1 = OpFunction %2 None %5
1733 %7 = OpLabel
1734 %ld = OpAtomicLoad %3 %var %workgroup %semantics
1735 OpReturn
1736 OpFunctionEnd
1737 )";
1738
1739 CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
1740 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1741 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1742 EXPECT_THAT(
1743 getDiagnosticString(),
1744 HasSubstr(
1745 "AtomicLoad: expected Memory Semantics to include a storage class"));
1746 }
1747
TEST_F(ValidateAtomics,VulkanMemoryModelAllowsQueueFamilyKHR)1748 TEST_F(ValidateAtomics, VulkanMemoryModelAllowsQueueFamilyKHR) {
1749 const std::string body = R"(
1750 %val = OpAtomicAnd %u32 %u32_var %queuefamily %relaxed %u32_1
1751 )";
1752
1753 const std::string extra = R"(
1754 OpCapability VulkanMemoryModelKHR
1755 OpExtension "SPV_KHR_vulkan_memory_model"
1756 )";
1757
1758 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1759 SPV_ENV_VULKAN_1_1);
1760 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_1));
1761 }
1762
TEST_F(ValidateAtomics,NonVulkanMemoryModelDisallowsQueueFamilyKHR)1763 TEST_F(ValidateAtomics, NonVulkanMemoryModelDisallowsQueueFamilyKHR) {
1764 const std::string body = R"(
1765 %val = OpAtomicAnd %u32 %u32_var %queuefamily %relaxed %u32_1
1766 )";
1767
1768 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_1);
1769 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_1));
1770 EXPECT_THAT(getDiagnosticString(),
1771 HasSubstr("AtomicAnd: Memory Scope QueueFamilyKHR requires "
1772 "capability VulkanMemoryModelKHR\n %42 = OpAtomicAnd "
1773 "%uint %29 %uint_5 %uint_0_1 %uint_1\n"));
1774 }
1775
TEST_F(ValidateAtomics,SemanticsSpecConstantShader)1776 TEST_F(ValidateAtomics, SemanticsSpecConstantShader) {
1777 const std::string spirv = R"(
1778 OpCapability Shader
1779 OpMemoryModel Logical GLSL450
1780 OpEntryPoint Fragment %func "func"
1781 OpExecutionMode %func OriginUpperLeft
1782 %void = OpTypeVoid
1783 %int = OpTypeInt 32 0
1784 %spec_const = OpSpecConstant %int 0
1785 %workgroup = OpConstant %int 2
1786 %ptr_int_workgroup = OpTypePointer Workgroup %int
1787 %var = OpVariable %ptr_int_workgroup Workgroup
1788 %voidfn = OpTypeFunction %void
1789 %func = OpFunction %void None %voidfn
1790 %entry = OpLabel
1791 %ld = OpAtomicLoad %int %var %workgroup %spec_const
1792 OpReturn
1793 OpFunctionEnd
1794 )";
1795
1796 CompileSuccessfully(spirv);
1797 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1798 EXPECT_THAT(getDiagnosticString(),
1799 HasSubstr("Memory Semantics ids must be OpConstant when Shader "
1800 "capability is present"));
1801 }
1802
TEST_F(ValidateAtomics,SemanticsSpecConstantKernel)1803 TEST_F(ValidateAtomics, SemanticsSpecConstantKernel) {
1804 const std::string spirv = R"(
1805 OpCapability Kernel
1806 OpCapability Linkage
1807 OpMemoryModel Logical OpenCL
1808 %void = OpTypeVoid
1809 %int = OpTypeInt 32 0
1810 %spec_const = OpSpecConstant %int 0
1811 %workgroup = OpConstant %int 2
1812 %ptr_int_workgroup = OpTypePointer Workgroup %int
1813 %var = OpVariable %ptr_int_workgroup Workgroup
1814 %voidfn = OpTypeFunction %void
1815 %func = OpFunction %void None %voidfn
1816 %entry = OpLabel
1817 %ld = OpAtomicLoad %int %var %workgroup %spec_const
1818 OpReturn
1819 OpFunctionEnd
1820 )";
1821
1822 CompileSuccessfully(spirv);
1823 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
1824 }
1825
TEST_F(ValidateAtomics,ScopeSpecConstantShader)1826 TEST_F(ValidateAtomics, ScopeSpecConstantShader) {
1827 const std::string spirv = R"(
1828 OpCapability Shader
1829 OpMemoryModel Logical GLSL450
1830 OpEntryPoint Fragment %func "func"
1831 OpExecutionMode %func OriginUpperLeft
1832 %void = OpTypeVoid
1833 %int = OpTypeInt 32 0
1834 %spec_const = OpSpecConstant %int 0
1835 %relaxed = OpConstant %int 0
1836 %ptr_int_workgroup = OpTypePointer Workgroup %int
1837 %var = OpVariable %ptr_int_workgroup Workgroup
1838 %voidfn = OpTypeFunction %void
1839 %func = OpFunction %void None %voidfn
1840 %entry = OpLabel
1841 %ld = OpAtomicLoad %int %var %spec_const %relaxed
1842 OpReturn
1843 OpFunctionEnd
1844 )";
1845
1846 CompileSuccessfully(spirv);
1847 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1848 EXPECT_THAT(
1849 getDiagnosticString(),
1850 HasSubstr(
1851 "Scope ids must be OpConstant when Shader capability is present"));
1852 }
1853
TEST_F(ValidateAtomics,ScopeSpecConstantKernel)1854 TEST_F(ValidateAtomics, ScopeSpecConstantKernel) {
1855 const std::string spirv = R"(
1856 OpCapability Kernel
1857 OpCapability Linkage
1858 OpMemoryModel Logical OpenCL
1859 %void = OpTypeVoid
1860 %int = OpTypeInt 32 0
1861 %spec_const = OpSpecConstant %int 0
1862 %relaxed = OpConstant %int 0
1863 %ptr_int_workgroup = OpTypePointer Workgroup %int
1864 %var = OpVariable %ptr_int_workgroup Workgroup
1865 %voidfn = OpTypeFunction %void
1866 %func = OpFunction %void None %voidfn
1867 %entry = OpLabel
1868 %ld = OpAtomicLoad %int %var %spec_const %relaxed
1869 OpReturn
1870 OpFunctionEnd
1871 )";
1872
1873 CompileSuccessfully(spirv);
1874 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
1875 }
1876
TEST_F(ValidateAtomics,VulkanMemoryModelDeviceScopeBad)1877 TEST_F(ValidateAtomics, VulkanMemoryModelDeviceScopeBad) {
1878 const std::string body = R"(
1879 %val = OpAtomicAnd %u32 %u32_var %device %relaxed %u32_1
1880 )";
1881
1882 const std::string extra = R"(OpCapability VulkanMemoryModelKHR
1883 OpExtension "SPV_KHR_vulkan_memory_model"
1884 )";
1885
1886 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1887 SPV_ENV_UNIVERSAL_1_3);
1888 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1889 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1890 EXPECT_THAT(
1891 getDiagnosticString(),
1892 HasSubstr("Use of device scope with VulkanKHR memory model requires the "
1893 "VulkanMemoryModelDeviceScopeKHR capability"));
1894 }
1895
TEST_F(ValidateAtomics,VulkanMemoryModelDeviceScopeGood)1896 TEST_F(ValidateAtomics, VulkanMemoryModelDeviceScopeGood) {
1897 const std::string body = R"(
1898 %val = OpAtomicAnd %u32 %u32_var %device %relaxed %u32_1
1899 )";
1900
1901 const std::string extra = R"(OpCapability VulkanMemoryModelKHR
1902 OpCapability VulkanMemoryModelDeviceScopeKHR
1903 OpExtension "SPV_KHR_vulkan_memory_model"
1904 )";
1905
1906 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1907 SPV_ENV_UNIVERSAL_1_3);
1908 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1909 }
1910
TEST_F(ValidateAtomics,WebGPUCrossDeviceMemoryScopeBad)1911 TEST_F(ValidateAtomics, WebGPUCrossDeviceMemoryScopeBad) {
1912 const std::string body = R"(
1913 %val1 = OpAtomicLoad %u32 %u32_var %cross_device %relaxed
1914 )";
1915
1916 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
1917 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_WEBGPU_0));
1918 EXPECT_THAT(getDiagnosticString(),
1919 HasSubstr("in WebGPU environment Memory Scope is limited to "
1920 "QueueFamilyKHR for OpAtomic* operations"));
1921 }
1922
TEST_F(ValidateAtomics,WebGPUDeviceMemoryScopeBad)1923 TEST_F(ValidateAtomics, WebGPUDeviceMemoryScopeBad) {
1924 const std::string body = R"(
1925 %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
1926 )";
1927
1928 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
1929 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_WEBGPU_0));
1930 EXPECT_THAT(getDiagnosticString(),
1931 HasSubstr("in WebGPU environment Memory Scope is limited to "
1932 "QueueFamilyKHR for OpAtomic* operations"));
1933 }
1934
TEST_F(ValidateAtomics,WebGPUWorkgroupMemoryScopeBad)1935 TEST_F(ValidateAtomics, WebGPUWorkgroupMemoryScopeBad) {
1936 const std::string body = R"(
1937 %val1 = OpAtomicLoad %u32 %u32_var %workgroup %relaxed
1938 )";
1939
1940 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
1941 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_WEBGPU_0));
1942 EXPECT_THAT(getDiagnosticString(),
1943 HasSubstr("in WebGPU environment Memory Scope is limited to "
1944 "QueueFamilyKHR for OpAtomic* operations"));
1945 }
1946
TEST_F(ValidateAtomics,WebGPUSubgroupMemoryScopeBad)1947 TEST_F(ValidateAtomics, WebGPUSubgroupMemoryScopeBad) {
1948 const std::string body = R"(
1949 %val1 = OpAtomicLoad %u32 %u32_var %subgroup %relaxed
1950 )";
1951
1952 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
1953 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_WEBGPU_0));
1954 EXPECT_THAT(getDiagnosticString(),
1955 HasSubstr("in WebGPU environment Memory Scope is limited to "
1956 "QueueFamilyKHR for OpAtomic* operations"));
1957 }
1958
TEST_F(ValidateAtomics,WebGPUInvocationMemoryScopeBad)1959 TEST_F(ValidateAtomics, WebGPUInvocationMemoryScopeBad) {
1960 const std::string body = R"(
1961 %val1 = OpAtomicLoad %u32 %u32_var %invocation %relaxed
1962 )";
1963
1964 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
1965 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_WEBGPU_0));
1966 EXPECT_THAT(getDiagnosticString(),
1967 HasSubstr("in WebGPU environment Memory Scope is limited to "
1968 "QueueFamilyKHR for OpAtomic* operations"));
1969 }
1970
TEST_F(ValidateAtomics,WebGPUQueueFamilyMemoryScopeGood)1971 TEST_F(ValidateAtomics, WebGPUQueueFamilyMemoryScopeGood) {
1972 const std::string body = R"(
1973 %val1 = OpAtomicLoad %u32 %u32_var %queuefamily %relaxed
1974 )";
1975
1976 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
1977 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_WEBGPU_0));
1978 }
1979
TEST_F(ValidateAtomics,CompareExchangeWeakV13ValV14Good)1980 TEST_F(ValidateAtomics, CompareExchangeWeakV13ValV14Good) {
1981 const std::string body = R"(
1982 %val1 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
1983 )";
1984
1985 CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_3);
1986 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
1987 }
1988
TEST_F(ValidateAtomics,CompareExchangeWeakV14Bad)1989 TEST_F(ValidateAtomics, CompareExchangeWeakV14Bad) {
1990 const std::string body = R"(
1991 %val1 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
1992 )";
1993
1994 CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_4);
1995 EXPECT_EQ(SPV_ERROR_WRONG_VERSION,
1996 ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
1997 EXPECT_THAT(
1998 getDiagnosticString(),
1999 HasSubstr(
2000 "AtomicCompareExchangeWeak requires SPIR-V version 1.3 or earlier"));
2001 }
2002
TEST_F(ValidateAtomics,CompareExchangeVolatileMatch)2003 TEST_F(ValidateAtomics, CompareExchangeVolatileMatch) {
2004 const std::string spirv = R"(
2005 OpCapability Shader
2006 OpCapability VulkanMemoryModelKHR
2007 OpCapability Linkage
2008 OpExtension "SPV_KHR_vulkan_memory_model"
2009 OpMemoryModel Logical VulkanKHR
2010 %void = OpTypeVoid
2011 %int = OpTypeInt 32 0
2012 %int_0 = OpConstant %int 0
2013 %int_1 = OpConstant %int 1
2014 %workgroup = OpConstant %int 2
2015 %volatile = OpConstant %int 32768
2016 %ptr_wg_int = OpTypePointer Workgroup %int
2017 %wg_var = OpVariable %ptr_wg_int Workgroup
2018 %void_fn = OpTypeFunction %void
2019 %func = OpFunction %void None %void_fn
2020 %entry = OpLabel
2021 %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %volatile %volatile %int_0 %int_1
2022 OpReturn
2023 OpFunctionEnd
2024 )";
2025
2026 CompileSuccessfully(spirv);
2027 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
2028 }
2029
TEST_F(ValidateAtomics,CompareExchangeVolatileMismatch)2030 TEST_F(ValidateAtomics, CompareExchangeVolatileMismatch) {
2031 const std::string spirv = R"(
2032 OpCapability Shader
2033 OpCapability VulkanMemoryModelKHR
2034 OpCapability Linkage
2035 OpExtension "SPV_KHR_vulkan_memory_model"
2036 OpMemoryModel Logical VulkanKHR
2037 %void = OpTypeVoid
2038 %int = OpTypeInt 32 0
2039 %int_0 = OpConstant %int 0
2040 %int_1 = OpConstant %int 1
2041 %workgroup = OpConstant %int 2
2042 %volatile = OpConstant %int 32768
2043 %non_volatile = OpConstant %int 0
2044 %ptr_wg_int = OpTypePointer Workgroup %int
2045 %wg_var = OpVariable %ptr_wg_int Workgroup
2046 %void_fn = OpTypeFunction %void
2047 %func = OpFunction %void None %void_fn
2048 %entry = OpLabel
2049 %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %non_volatile %volatile %int_0 %int_1
2050 OpReturn
2051 OpFunctionEnd
2052 )";
2053
2054 CompileSuccessfully(spirv);
2055 EXPECT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
2056 EXPECT_THAT(getDiagnosticString(),
2057 HasSubstr("Volatile mask setting must match for Equal and "
2058 "Unequal memory semantics"));
2059 }
2060
TEST_F(ValidateAtomics,CompareExchangeVolatileMismatchCooperativeMatrix)2061 TEST_F(ValidateAtomics, CompareExchangeVolatileMismatchCooperativeMatrix) {
2062 const std::string spirv = R"(
2063 OpCapability Shader
2064 OpCapability VulkanMemoryModelKHR
2065 OpCapability Linkage
2066 OpCapability CooperativeMatrixNV
2067 OpExtension "SPV_KHR_vulkan_memory_model"
2068 OpExtension "SPV_NV_cooperative_matrix"
2069 OpMemoryModel Logical VulkanKHR
2070 %void = OpTypeVoid
2071 %int = OpTypeInt 32 0
2072 %int_0 = OpConstant %int 0
2073 %int_1 = OpConstant %int 1
2074 %workgroup = OpConstant %int 2
2075 %volatile = OpSpecConstant %int 32768
2076 %non_volatile = OpSpecConstant %int 32768
2077 %ptr_wg_int = OpTypePointer Workgroup %int
2078 %wg_var = OpVariable %ptr_wg_int Workgroup
2079 %void_fn = OpTypeFunction %void
2080 %func = OpFunction %void None %void_fn
2081 %entry = OpLabel
2082 %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %volatile %non_volatile %int_0 %int_1
2083 OpReturn
2084 OpFunctionEnd
2085 )";
2086
2087 // This is ok because we cannot evaluate the spec constant defaults.
2088 CompileSuccessfully(spirv);
2089 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
2090 }
2091
TEST_F(ValidateAtomics,VolatileRequiresVulkanMemoryModel)2092 TEST_F(ValidateAtomics, VolatileRequiresVulkanMemoryModel) {
2093 const std::string spirv = R"(
2094 OpCapability Shader
2095 OpCapability Linkage
2096 OpMemoryModel Logical GLSL450
2097 %void = OpTypeVoid
2098 %int = OpTypeInt 32 0
2099 %int_0 = OpConstant %int 0
2100 %int_1 = OpConstant %int 1
2101 %workgroup = OpConstant %int 2
2102 %volatile = OpConstant %int 32768
2103 %ptr_wg_int = OpTypePointer Workgroup %int
2104 %wg_var = OpVariable %ptr_wg_int Workgroup
2105 %void_fn = OpTypeFunction %void
2106 %func = OpFunction %void None %void_fn
2107 %entry = OpLabel
2108 %ld = OpAtomicLoad %int %wg_var %workgroup %volatile
2109 OpReturn
2110 OpFunctionEnd
2111 )";
2112
2113 CompileSuccessfully(spirv);
2114 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
2115 EXPECT_THAT(getDiagnosticString(),
2116 HasSubstr("Memory Semantics Volatile requires capability "
2117 "VulkanMemoryModelKHR"));
2118 }
2119
TEST_F(ValidateAtomics,CooperativeMatrixSemanticsMustBeConstant)2120 TEST_F(ValidateAtomics, CooperativeMatrixSemanticsMustBeConstant) {
2121 const std::string spirv = R"(
2122 OpCapability Shader
2123 OpCapability Linkage
2124 OpCapability CooperativeMatrixNV
2125 OpExtension "SPV_NV_cooperative_matrix"
2126 OpMemoryModel Logical GLSL450
2127 %void = OpTypeVoid
2128 %int = OpTypeInt 32 0
2129 %int_0 = OpConstant %int 0
2130 %int_1 = OpConstant %int 1
2131 %workgroup = OpConstant %int 2
2132 %undef = OpUndef %int
2133 %ptr_wg_int = OpTypePointer Workgroup %int
2134 %wg_var = OpVariable %ptr_wg_int Workgroup
2135 %void_fn = OpTypeFunction %void
2136 %func = OpFunction %void None %void_fn
2137 %entry = OpLabel
2138 %ld = OpAtomicLoad %int %wg_var %workgroup %undef
2139 OpReturn
2140 OpFunctionEnd
2141 )";
2142
2143 CompileSuccessfully(spirv);
2144 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
2145 EXPECT_THAT(getDiagnosticString(),
2146 HasSubstr("Memory Semantics must be a constant instruction when "
2147 "CooperativeMatrixNV capability is present"));
2148 }
2149
2150 } // namespace
2151 } // namespace val
2152 } // namespace spvtools
2153