1 // Copyright (c) 2017 Google Inc.
2 // Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights
3 // reserved.
4 //
5 // Licensed under the Apache License, Version 2.0 (the "License");
6 // you may not use this file except in compliance with the License.
7 // You may obtain a copy of the License at
8 //
9 // http://www.apache.org/licenses/LICENSE-2.0
10 //
11 // Unless required by applicable law or agreed to in writing, software
12 // distributed under the License is distributed on an "AS IS" BASIS,
13 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 // See the License for the specific language governing permissions and
15 // limitations under the License.
16
17 // Validates correctness of atomic SPIR-V instructions.
18
19 #include "source/opcode.h"
20 #include "source/spirv_target_env.h"
21 #include "source/util/bitutils.h"
22 #include "source/val/instruction.h"
23 #include "source/val/validate.h"
24 #include "source/val/validate_memory_semantics.h"
25 #include "source/val/validate_scopes.h"
26 #include "source/val/validation_state.h"
27
28 namespace {
29
IsStorageClassAllowedByUniversalRules(spv::StorageClass storage_class)30 bool IsStorageClassAllowedByUniversalRules(spv::StorageClass storage_class) {
31 switch (storage_class) {
32 case spv::StorageClass::Uniform:
33 case spv::StorageClass::StorageBuffer:
34 case spv::StorageClass::Workgroup:
35 case spv::StorageClass::CrossWorkgroup:
36 case spv::StorageClass::Generic:
37 case spv::StorageClass::AtomicCounter:
38 case spv::StorageClass::Image:
39 case spv::StorageClass::Function:
40 case spv::StorageClass::PhysicalStorageBuffer:
41 case spv::StorageClass::TaskPayloadWorkgroupEXT:
42 return true;
43 break;
44 default:
45 return false;
46 }
47 }
48
HasReturnType(spv::Op opcode)49 bool HasReturnType(spv::Op opcode) {
50 switch (opcode) {
51 case spv::Op::OpAtomicStore:
52 case spv::Op::OpAtomicFlagClear:
53 return false;
54 break;
55 default:
56 return true;
57 }
58 }
59
HasOnlyFloatReturnType(spv::Op opcode)60 bool HasOnlyFloatReturnType(spv::Op opcode) {
61 switch (opcode) {
62 case spv::Op::OpAtomicFAddEXT:
63 case spv::Op::OpAtomicFMinEXT:
64 case spv::Op::OpAtomicFMaxEXT:
65 return true;
66 break;
67 default:
68 return false;
69 }
70 }
71
HasOnlyIntReturnType(spv::Op opcode)72 bool HasOnlyIntReturnType(spv::Op opcode) {
73 switch (opcode) {
74 case spv::Op::OpAtomicCompareExchange:
75 case spv::Op::OpAtomicCompareExchangeWeak:
76 case spv::Op::OpAtomicIIncrement:
77 case spv::Op::OpAtomicIDecrement:
78 case spv::Op::OpAtomicIAdd:
79 case spv::Op::OpAtomicISub:
80 case spv::Op::OpAtomicSMin:
81 case spv::Op::OpAtomicUMin:
82 case spv::Op::OpAtomicSMax:
83 case spv::Op::OpAtomicUMax:
84 case spv::Op::OpAtomicAnd:
85 case spv::Op::OpAtomicOr:
86 case spv::Op::OpAtomicXor:
87 return true;
88 break;
89 default:
90 return false;
91 }
92 }
93
HasIntOrFloatReturnType(spv::Op opcode)94 bool HasIntOrFloatReturnType(spv::Op opcode) {
95 switch (opcode) {
96 case spv::Op::OpAtomicLoad:
97 case spv::Op::OpAtomicExchange:
98 return true;
99 break;
100 default:
101 return false;
102 }
103 }
104
HasOnlyBoolReturnType(spv::Op opcode)105 bool HasOnlyBoolReturnType(spv::Op opcode) {
106 switch (opcode) {
107 case spv::Op::OpAtomicFlagTestAndSet:
108 return true;
109 break;
110 default:
111 return false;
112 }
113 }
114
115 } // namespace
116
117 namespace spvtools {
118 namespace val {
119
120 // Validates correctness of atomic instructions.
AtomicsPass(ValidationState_t & _,const Instruction * inst)121 spv_result_t AtomicsPass(ValidationState_t& _, const Instruction* inst) {
122 const spv::Op opcode = inst->opcode();
123 switch (opcode) {
124 case spv::Op::OpAtomicLoad:
125 case spv::Op::OpAtomicStore:
126 case spv::Op::OpAtomicExchange:
127 case spv::Op::OpAtomicFAddEXT:
128 case spv::Op::OpAtomicCompareExchange:
129 case spv::Op::OpAtomicCompareExchangeWeak:
130 case spv::Op::OpAtomicIIncrement:
131 case spv::Op::OpAtomicIDecrement:
132 case spv::Op::OpAtomicIAdd:
133 case spv::Op::OpAtomicISub:
134 case spv::Op::OpAtomicSMin:
135 case spv::Op::OpAtomicUMin:
136 case spv::Op::OpAtomicFMinEXT:
137 case spv::Op::OpAtomicSMax:
138 case spv::Op::OpAtomicUMax:
139 case spv::Op::OpAtomicFMaxEXT:
140 case spv::Op::OpAtomicAnd:
141 case spv::Op::OpAtomicOr:
142 case spv::Op::OpAtomicXor:
143 case spv::Op::OpAtomicFlagTestAndSet:
144 case spv::Op::OpAtomicFlagClear: {
145 const uint32_t result_type = inst->type_id();
146
147 // Validate return type first so can just check if pointer type is same
148 // (if applicable)
149 if (HasReturnType(opcode)) {
150 if (HasOnlyFloatReturnType(opcode) &&
151 (!(_.HasCapability(spv::Capability::AtomicFloat16VectorNV) &&
152 _.IsFloat16Vector2Or4Type(result_type)) &&
153 !_.IsFloatScalarType(result_type))) {
154 return _.diag(SPV_ERROR_INVALID_DATA, inst)
155 << spvOpcodeString(opcode)
156 << ": expected Result Type to be float scalar type";
157 } else if (HasOnlyIntReturnType(opcode) &&
158 !_.IsIntScalarType(result_type)) {
159 return _.diag(SPV_ERROR_INVALID_DATA, inst)
160 << spvOpcodeString(opcode)
161 << ": expected Result Type to be integer scalar type";
162 } else if (HasIntOrFloatReturnType(opcode) &&
163 !_.IsFloatScalarType(result_type) &&
164 !(opcode == spv::Op::OpAtomicExchange &&
165 _.HasCapability(spv::Capability::AtomicFloat16VectorNV) &&
166 _.IsFloat16Vector2Or4Type(result_type)) &&
167 !_.IsIntScalarType(result_type)) {
168 return _.diag(SPV_ERROR_INVALID_DATA, inst)
169 << spvOpcodeString(opcode)
170 << ": expected Result Type to be integer or float scalar type";
171 } else if (HasOnlyBoolReturnType(opcode) &&
172 !_.IsBoolScalarType(result_type)) {
173 return _.diag(SPV_ERROR_INVALID_DATA, inst)
174 << spvOpcodeString(opcode)
175 << ": expected Result Type to be bool scalar type";
176 }
177 }
178
179 uint32_t operand_index = HasReturnType(opcode) ? 2 : 0;
180 const uint32_t pointer_type = _.GetOperandTypeId(inst, operand_index++);
181 uint32_t data_type = 0;
182 spv::StorageClass storage_class;
183 if (!_.GetPointerTypeInfo(pointer_type, &data_type, &storage_class)) {
184 return _.diag(SPV_ERROR_INVALID_DATA, inst)
185 << spvOpcodeString(opcode)
186 << ": expected Pointer to be a pointer type";
187 }
188
189 // If the pointer is an untyped pointer, get the data type elsewhere.
190 if (data_type == 0) {
191 switch (opcode) {
192 case spv::Op::OpAtomicLoad:
193 case spv::Op::OpAtomicExchange:
194 case spv::Op::OpAtomicFAddEXT:
195 case spv::Op::OpAtomicCompareExchange:
196 case spv::Op::OpAtomicCompareExchangeWeak:
197 case spv::Op::OpAtomicIIncrement:
198 case spv::Op::OpAtomicIDecrement:
199 case spv::Op::OpAtomicIAdd:
200 case spv::Op::OpAtomicISub:
201 case spv::Op::OpAtomicSMin:
202 case spv::Op::OpAtomicUMin:
203 case spv::Op::OpAtomicFMinEXT:
204 case spv::Op::OpAtomicSMax:
205 case spv::Op::OpAtomicUMax:
206 case spv::Op::OpAtomicFMaxEXT:
207 case spv::Op::OpAtomicAnd:
208 case spv::Op::OpAtomicOr:
209 case spv::Op::OpAtomicXor:
210 data_type = inst->type_id();
211 break;
212 case spv::Op::OpAtomicFlagTestAndSet:
213 case spv::Op::OpAtomicFlagClear:
214 return _.diag(SPV_ERROR_INVALID_ID, inst)
215 << "Untyped pointers are not supported by atomic flag "
216 "instructions";
217 break;
218 case spv::Op::OpAtomicStore:
219 data_type = _.FindDef(inst->GetOperandAs<uint32_t>(3))->type_id();
220 break;
221 default:
222 break;
223 }
224 }
225
226 // Can't use result_type because OpAtomicStore doesn't have a result
227 if (_.IsIntScalarType(data_type) && _.GetBitWidth(data_type) == 64 &&
228 !_.HasCapability(spv::Capability::Int64Atomics)) {
229 return _.diag(SPV_ERROR_INVALID_DATA, inst)
230 << spvOpcodeString(opcode)
231 << ": 64-bit atomics require the Int64Atomics capability";
232 }
233
234 // Validate storage class against universal rules
235 if (!IsStorageClassAllowedByUniversalRules(storage_class)) {
236 return _.diag(SPV_ERROR_INVALID_DATA, inst)
237 << spvOpcodeString(opcode)
238 << ": storage class forbidden by universal validation rules.";
239 }
240
241 // Then Shader rules
242 if (_.HasCapability(spv::Capability::Shader)) {
243 // Vulkan environment rule
244 if (spvIsVulkanEnv(_.context()->target_env)) {
245 if ((storage_class != spv::StorageClass::Uniform) &&
246 (storage_class != spv::StorageClass::StorageBuffer) &&
247 (storage_class != spv::StorageClass::Workgroup) &&
248 (storage_class != spv::StorageClass::Image) &&
249 (storage_class != spv::StorageClass::PhysicalStorageBuffer) &&
250 (storage_class != spv::StorageClass::TaskPayloadWorkgroupEXT)) {
251 return _.diag(SPV_ERROR_INVALID_DATA, inst)
252 << _.VkErrorID(4686) << spvOpcodeString(opcode)
253 << ": Vulkan spec only allows storage classes for atomic to "
254 "be: Uniform, Workgroup, Image, StorageBuffer, "
255 "PhysicalStorageBuffer or TaskPayloadWorkgroupEXT.";
256 }
257 } else if (storage_class == spv::StorageClass::Function) {
258 return _.diag(SPV_ERROR_INVALID_DATA, inst)
259 << spvOpcodeString(opcode)
260 << ": Function storage class forbidden when the Shader "
261 "capability is declared.";
262 }
263
264 if (opcode == spv::Op::OpAtomicFAddEXT) {
265 // result type being float checked already
266 if (_.GetBitWidth(result_type) == 16) {
267 if (_.IsFloat16Vector2Or4Type(result_type)) {
268 if (!_.HasCapability(spv::Capability::AtomicFloat16VectorNV))
269 return _.diag(SPV_ERROR_INVALID_DATA, inst)
270 << spvOpcodeString(opcode)
271 << ": float vector atomics require the "
272 "AtomicFloat16VectorNV capability";
273 } else {
274 if (!_.HasCapability(spv::Capability::AtomicFloat16AddEXT)) {
275 return _.diag(SPV_ERROR_INVALID_DATA, inst)
276 << spvOpcodeString(opcode)
277 << ": float add atomics require the AtomicFloat32AddEXT "
278 "capability";
279 }
280 }
281 }
282 if ((_.GetBitWidth(result_type) == 32) &&
283 (!_.HasCapability(spv::Capability::AtomicFloat32AddEXT))) {
284 return _.diag(SPV_ERROR_INVALID_DATA, inst)
285 << spvOpcodeString(opcode)
286 << ": float add atomics require the AtomicFloat32AddEXT "
287 "capability";
288 }
289 if ((_.GetBitWidth(result_type) == 64) &&
290 (!_.HasCapability(spv::Capability::AtomicFloat64AddEXT))) {
291 return _.diag(SPV_ERROR_INVALID_DATA, inst)
292 << spvOpcodeString(opcode)
293 << ": float add atomics require the AtomicFloat64AddEXT "
294 "capability";
295 }
296 } else if (opcode == spv::Op::OpAtomicFMinEXT ||
297 opcode == spv::Op::OpAtomicFMaxEXT) {
298 if (_.GetBitWidth(result_type) == 16) {
299 if (_.IsFloat16Vector2Or4Type(result_type)) {
300 if (!_.HasCapability(spv::Capability::AtomicFloat16VectorNV))
301 return _.diag(SPV_ERROR_INVALID_DATA, inst)
302 << spvOpcodeString(opcode)
303 << ": float vector atomics require the "
304 "AtomicFloat16VectorNV capability";
305 } else {
306 if (!_.HasCapability(spv::Capability::AtomicFloat16MinMaxEXT)) {
307 return _.diag(SPV_ERROR_INVALID_DATA, inst)
308 << spvOpcodeString(opcode)
309 << ": float min/max atomics require the "
310 "AtomicFloat16MinMaxEXT capability";
311 }
312 }
313 }
314 if ((_.GetBitWidth(result_type) == 32) &&
315 (!_.HasCapability(spv::Capability::AtomicFloat32MinMaxEXT))) {
316 return _.diag(SPV_ERROR_INVALID_DATA, inst)
317 << spvOpcodeString(opcode)
318 << ": float min/max atomics require the "
319 "AtomicFloat32MinMaxEXT capability";
320 }
321 if ((_.GetBitWidth(result_type) == 64) &&
322 (!_.HasCapability(spv::Capability::AtomicFloat64MinMaxEXT))) {
323 return _.diag(SPV_ERROR_INVALID_DATA, inst)
324 << spvOpcodeString(opcode)
325 << ": float min/max atomics require the "
326 "AtomicFloat64MinMaxEXT capability";
327 }
328 }
329 }
330
331 // And finally OpenCL environment rules
332 if (spvIsOpenCLEnv(_.context()->target_env)) {
333 if ((storage_class != spv::StorageClass::Function) &&
334 (storage_class != spv::StorageClass::Workgroup) &&
335 (storage_class != spv::StorageClass::CrossWorkgroup) &&
336 (storage_class != spv::StorageClass::Generic)) {
337 return _.diag(SPV_ERROR_INVALID_DATA, inst)
338 << spvOpcodeString(opcode)
339 << ": storage class must be Function, Workgroup, "
340 "CrossWorkGroup or Generic in the OpenCL environment.";
341 }
342
343 if (_.context()->target_env == SPV_ENV_OPENCL_1_2) {
344 if (storage_class == spv::StorageClass::Generic) {
345 return _.diag(SPV_ERROR_INVALID_DATA, inst)
346 << "Storage class cannot be Generic in OpenCL 1.2 "
347 "environment";
348 }
349 }
350 }
351
352 // If result and pointer type are different, need to do special check here
353 if (opcode == spv::Op::OpAtomicFlagTestAndSet ||
354 opcode == spv::Op::OpAtomicFlagClear) {
355 if (!_.IsIntScalarType(data_type) || _.GetBitWidth(data_type) != 32) {
356 return _.diag(SPV_ERROR_INVALID_DATA, inst)
357 << spvOpcodeString(opcode)
358 << ": expected Pointer to point to a value of 32-bit integer "
359 "type";
360 }
361 } else if (opcode == spv::Op::OpAtomicStore) {
362 if (!_.IsFloatScalarType(data_type) && !_.IsIntScalarType(data_type)) {
363 return _.diag(SPV_ERROR_INVALID_DATA, inst)
364 << spvOpcodeString(opcode)
365 << ": expected Pointer to be a pointer to integer or float "
366 << "scalar type";
367 }
368 } else if (data_type != result_type) {
369 return _.diag(SPV_ERROR_INVALID_DATA, inst)
370 << spvOpcodeString(opcode)
371 << ": expected Pointer to point to a value of type Result "
372 "Type";
373 }
374
375 auto memory_scope = inst->GetOperandAs<const uint32_t>(operand_index++);
376 if (auto error = ValidateMemoryScope(_, inst, memory_scope)) {
377 return error;
378 }
379
380 const auto equal_semantics_index = operand_index++;
381 if (auto error = ValidateMemorySemantics(_, inst, equal_semantics_index,
382 memory_scope))
383 return error;
384
385 if (opcode == spv::Op::OpAtomicCompareExchange ||
386 opcode == spv::Op::OpAtomicCompareExchangeWeak) {
387 const auto unequal_semantics_index = operand_index++;
388 if (auto error = ValidateMemorySemantics(
389 _, inst, unequal_semantics_index, memory_scope))
390 return error;
391
392 // Volatile bits must match for equal and unequal semantics. Previous
393 // checks guarantee they are 32-bit constants, but we need to recheck
394 // whether they are evaluatable constants.
395 bool is_int32 = false;
396 bool is_equal_const = false;
397 bool is_unequal_const = false;
398 uint32_t equal_value = 0;
399 uint32_t unequal_value = 0;
400 std::tie(is_int32, is_equal_const, equal_value) = _.EvalInt32IfConst(
401 inst->GetOperandAs<uint32_t>(equal_semantics_index));
402 std::tie(is_int32, is_unequal_const, unequal_value) =
403 _.EvalInt32IfConst(
404 inst->GetOperandAs<uint32_t>(unequal_semantics_index));
405 if (is_equal_const && is_unequal_const &&
406 ((equal_value & uint32_t(spv::MemorySemanticsMask::Volatile)) ^
407 (unequal_value & uint32_t(spv::MemorySemanticsMask::Volatile)))) {
408 return _.diag(SPV_ERROR_INVALID_ID, inst)
409 << "Volatile mask setting must match for Equal and Unequal "
410 "memory semantics";
411 }
412 }
413
414 if (opcode == spv::Op::OpAtomicStore) {
415 const uint32_t value_type = _.GetOperandTypeId(inst, 3);
416 if (value_type != data_type) {
417 return _.diag(SPV_ERROR_INVALID_DATA, inst)
418 << spvOpcodeString(opcode)
419 << ": expected Value type and the type pointed to by "
420 "Pointer to be the same";
421 }
422 } else if (opcode != spv::Op::OpAtomicLoad &&
423 opcode != spv::Op::OpAtomicIIncrement &&
424 opcode != spv::Op::OpAtomicIDecrement &&
425 opcode != spv::Op::OpAtomicFlagTestAndSet &&
426 opcode != spv::Op::OpAtomicFlagClear) {
427 const uint32_t value_type = _.GetOperandTypeId(inst, operand_index++);
428 if (value_type != result_type) {
429 return _.diag(SPV_ERROR_INVALID_DATA, inst)
430 << spvOpcodeString(opcode)
431 << ": expected Value to be of type Result Type";
432 }
433 }
434
435 if (opcode == spv::Op::OpAtomicCompareExchange ||
436 opcode == spv::Op::OpAtomicCompareExchangeWeak) {
437 const uint32_t comparator_type =
438 _.GetOperandTypeId(inst, operand_index++);
439 if (comparator_type != result_type) {
440 return _.diag(SPV_ERROR_INVALID_DATA, inst)
441 << spvOpcodeString(opcode)
442 << ": expected Comparator to be of type Result Type";
443 }
444 }
445
446 break;
447 }
448
449 default:
450 break;
451 }
452
453 return SPV_SUCCESS;
454 }
455
456 } // namespace val
457 } // namespace spvtools
458