1 // Copyright (c) 2017 Google Inc.
2 // Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights
3 // reserved.
4 //
5 // Licensed under the Apache License, Version 2.0 (the "License");
6 // you may not use this file except in compliance with the License.
7 // You may obtain a copy of the License at
8 //
9 // http://www.apache.org/licenses/LICENSE-2.0
10 //
11 // Unless required by applicable law or agreed to in writing, software
12 // distributed under the License is distributed on an "AS IS" BASIS,
13 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 // See the License for the specific language governing permissions and
15 // limitations under the License.
16
17 // Validates correctness of atomic SPIR-V instructions.
18
19 #include "source/val/validate.h"
20
21 #include "source/diagnostic.h"
22 #include "source/opcode.h"
23 #include "source/spirv_target_env.h"
24 #include "source/util/bitutils.h"
25 #include "source/val/instruction.h"
26 #include "source/val/validate_memory_semantics.h"
27 #include "source/val/validate_scopes.h"
28 #include "source/val/validation_state.h"
29
30 namespace {
31
IsStorageClassAllowedByUniversalRules(uint32_t storage_class)32 bool IsStorageClassAllowedByUniversalRules(uint32_t storage_class) {
33 switch (storage_class) {
34 case SpvStorageClassUniform:
35 case SpvStorageClassStorageBuffer:
36 case SpvStorageClassWorkgroup:
37 case SpvStorageClassCrossWorkgroup:
38 case SpvStorageClassGeneric:
39 case SpvStorageClassAtomicCounter:
40 case SpvStorageClassImage:
41 case SpvStorageClassFunction:
42 case SpvStorageClassPhysicalStorageBufferEXT:
43 return true;
44 break;
45 default:
46 return false;
47 }
48 }
49
50 } // namespace
51
52 namespace spvtools {
53 namespace val {
54
55 // Validates correctness of atomic instructions.
AtomicsPass(ValidationState_t & _,const Instruction * inst)56 spv_result_t AtomicsPass(ValidationState_t& _, const Instruction* inst) {
57 const SpvOp opcode = inst->opcode();
58 const uint32_t result_type = inst->type_id();
59 bool is_atomic_float_opcode = false;
60 if (opcode == SpvOpAtomicLoad || opcode == SpvOpAtomicStore ||
61 opcode == SpvOpAtomicFAddEXT || opcode == SpvOpAtomicExchange) {
62 is_atomic_float_opcode = true;
63 }
64 switch (opcode) {
65 case SpvOpAtomicLoad:
66 case SpvOpAtomicStore:
67 case SpvOpAtomicExchange:
68 case SpvOpAtomicFAddEXT:
69 case SpvOpAtomicCompareExchange:
70 case SpvOpAtomicCompareExchangeWeak:
71 case SpvOpAtomicIIncrement:
72 case SpvOpAtomicIDecrement:
73 case SpvOpAtomicIAdd:
74 case SpvOpAtomicISub:
75 case SpvOpAtomicSMin:
76 case SpvOpAtomicUMin:
77 case SpvOpAtomicSMax:
78 case SpvOpAtomicUMax:
79 case SpvOpAtomicAnd:
80 case SpvOpAtomicOr:
81 case SpvOpAtomicXor:
82 case SpvOpAtomicFlagTestAndSet:
83 case SpvOpAtomicFlagClear: {
84 if (_.HasCapability(SpvCapabilityKernel) &&
85 (opcode == SpvOpAtomicLoad || opcode == SpvOpAtomicExchange ||
86 opcode == SpvOpAtomicCompareExchange)) {
87 if (!_.IsFloatScalarType(result_type) &&
88 !_.IsIntScalarType(result_type)) {
89 return _.diag(SPV_ERROR_INVALID_DATA, inst)
90 << spvOpcodeString(opcode)
91 << ": expected Result Type to be int or float scalar type";
92 }
93 } else if (opcode == SpvOpAtomicFlagTestAndSet) {
94 if (!_.IsBoolScalarType(result_type)) {
95 return _.diag(SPV_ERROR_INVALID_DATA, inst)
96 << spvOpcodeString(opcode)
97 << ": expected Result Type to be bool scalar type";
98 }
99 } else if (opcode == SpvOpAtomicFlagClear || opcode == SpvOpAtomicStore) {
100 assert(result_type == 0);
101 } else {
102 if (_.IsFloatScalarType(result_type)) {
103 if (is_atomic_float_opcode) {
104 if (opcode == SpvOpAtomicFAddEXT) {
105 if ((_.GetBitWidth(result_type) == 32) &&
106 (!_.HasCapability(SpvCapabilityAtomicFloat32AddEXT))) {
107 return _.diag(SPV_ERROR_INVALID_DATA, inst)
108 << spvOpcodeString(opcode)
109 << ": float add atomics require the AtomicFloat32AddEXT "
110 "capability";
111 }
112 if ((_.GetBitWidth(result_type) == 64) &&
113 (!_.HasCapability(SpvCapabilityAtomicFloat64AddEXT))) {
114 return _.diag(SPV_ERROR_INVALID_DATA, inst)
115 << spvOpcodeString(opcode)
116 << ": float add atomics require the AtomicFloat64AddEXT "
117 "capability";
118 }
119 }
120 } else {
121 return _.diag(SPV_ERROR_INVALID_DATA, inst)
122 << spvOpcodeString(opcode)
123 << ": expected Result Type to be int scalar type";
124 }
125 } else if (_.IsIntScalarType(result_type) &&
126 opcode == SpvOpAtomicFAddEXT) {
127 return _.diag(SPV_ERROR_INVALID_DATA, inst)
128 << spvOpcodeString(opcode)
129 << ": expected Result Type to be float scalar type";
130 } else if (!_.IsFloatScalarType(result_type) &&
131 !_.IsIntScalarType(result_type)) {
132 switch (opcode) {
133 case SpvOpAtomicFAddEXT:
134 return _.diag(SPV_ERROR_INVALID_DATA, inst)
135 << spvOpcodeString(opcode)
136 << ": expected Result Type to be float scalar type";
137 case SpvOpAtomicIIncrement:
138 case SpvOpAtomicIDecrement:
139 case SpvOpAtomicIAdd:
140 case SpvOpAtomicISub:
141 case SpvOpAtomicSMin:
142 case SpvOpAtomicSMax:
143 case SpvOpAtomicUMin:
144 case SpvOpAtomicUMax:
145 return _.diag(SPV_ERROR_INVALID_DATA, inst)
146 << spvOpcodeString(opcode)
147 << ": expected Result Type to be integer scalar type";
148 default:
149 return _.diag(SPV_ERROR_INVALID_DATA, inst)
150 << spvOpcodeString(opcode)
151 << ": expected Result Type to be int or float scalar type";
152 }
153 }
154
155 if (spvIsVulkanEnv(_.context()->target_env) &&
156 (_.GetBitWidth(result_type) != 32 &&
157 (_.GetBitWidth(result_type) != 64 ||
158 !_.HasCapability(SpvCapabilityInt64ImageEXT)))) {
159 switch (opcode) {
160 case SpvOpAtomicSMin:
161 case SpvOpAtomicUMin:
162 case SpvOpAtomicSMax:
163 case SpvOpAtomicUMax:
164 case SpvOpAtomicAnd:
165 case SpvOpAtomicOr:
166 case SpvOpAtomicXor:
167 case SpvOpAtomicIAdd:
168 case SpvOpAtomicISub:
169 case SpvOpAtomicFAddEXT:
170 case SpvOpAtomicLoad:
171 case SpvOpAtomicStore:
172 case SpvOpAtomicExchange:
173 case SpvOpAtomicIIncrement:
174 case SpvOpAtomicIDecrement:
175 case SpvOpAtomicCompareExchangeWeak:
176 case SpvOpAtomicCompareExchange: {
177 if (_.GetBitWidth(result_type) == 64 &&
178 _.IsIntScalarType(result_type) &&
179 !_.HasCapability(SpvCapabilityInt64Atomics))
180 return _.diag(SPV_ERROR_INVALID_DATA, inst)
181 << spvOpcodeString(opcode)
182 << ": 64-bit atomics require the Int64Atomics "
183 "capability";
184 } break;
185 default:
186 return _.diag(SPV_ERROR_INVALID_DATA, inst)
187 << spvOpcodeString(opcode)
188 << ": according to the Vulkan spec atomic Result Type "
189 "needs "
190 "to be a 32-bit int scalar type";
191 }
192 }
193 }
194
195 uint32_t operand_index =
196 opcode == SpvOpAtomicFlagClear || opcode == SpvOpAtomicStore ? 0 : 2;
197 const uint32_t pointer_type = _.GetOperandTypeId(inst, operand_index++);
198
199 uint32_t data_type = 0;
200 uint32_t storage_class = 0;
201 if (!_.GetPointerTypeInfo(pointer_type, &data_type, &storage_class)) {
202 return _.diag(SPV_ERROR_INVALID_DATA, inst)
203 << spvOpcodeString(opcode)
204 << ": expected Pointer to be of type OpTypePointer";
205 }
206
207 // Validate storage class against universal rules
208 if (!IsStorageClassAllowedByUniversalRules(storage_class)) {
209 return _.diag(SPV_ERROR_INVALID_DATA, inst)
210 << spvOpcodeString(opcode)
211 << ": storage class forbidden by universal validation rules.";
212 }
213
214 // Then Shader rules
215 if (_.HasCapability(SpvCapabilityShader)) {
216 if (spvIsVulkanEnv(_.context()->target_env)) {
217 if ((storage_class != SpvStorageClassUniform) &&
218 (storage_class != SpvStorageClassStorageBuffer) &&
219 (storage_class != SpvStorageClassWorkgroup) &&
220 (storage_class != SpvStorageClassImage) &&
221 (storage_class != SpvStorageClassPhysicalStorageBuffer)) {
222 return _.diag(SPV_ERROR_INVALID_DATA, inst)
223 << _.VkErrorID(4686) << spvOpcodeString(opcode)
224 << ": Vulkan spec only allows storage classes for atomic to "
225 "be: Uniform, Workgroup, Image, StorageBuffer, or "
226 "PhysicalStorageBuffer.";
227 }
228 } else if (storage_class == SpvStorageClassFunction) {
229 return _.diag(SPV_ERROR_INVALID_DATA, inst)
230 << spvOpcodeString(opcode)
231 << ": Function storage class forbidden when the Shader "
232 "capability is declared.";
233 }
234 }
235
236 // And finally OpenCL environment rules
237 if (spvIsOpenCLEnv(_.context()->target_env)) {
238 if ((storage_class != SpvStorageClassFunction) &&
239 (storage_class != SpvStorageClassWorkgroup) &&
240 (storage_class != SpvStorageClassCrossWorkgroup) &&
241 (storage_class != SpvStorageClassGeneric)) {
242 return _.diag(SPV_ERROR_INVALID_DATA, inst)
243 << spvOpcodeString(opcode)
244 << ": storage class must be Function, Workgroup, "
245 "CrossWorkGroup or Generic in the OpenCL environment.";
246 }
247
248 if (_.context()->target_env == SPV_ENV_OPENCL_1_2) {
249 if (storage_class == SpvStorageClassGeneric) {
250 return _.diag(SPV_ERROR_INVALID_DATA, inst)
251 << "Storage class cannot be Generic in OpenCL 1.2 "
252 "environment";
253 }
254 }
255 }
256
257 if (opcode == SpvOpAtomicFlagTestAndSet ||
258 opcode == SpvOpAtomicFlagClear) {
259 if (!_.IsIntScalarType(data_type) || _.GetBitWidth(data_type) != 32) {
260 return _.diag(SPV_ERROR_INVALID_DATA, inst)
261 << spvOpcodeString(opcode)
262 << ": expected Pointer to point to a value of 32-bit int type";
263 }
264 } else if (opcode == SpvOpAtomicStore) {
265 if (!_.IsFloatScalarType(data_type) && !_.IsIntScalarType(data_type)) {
266 return _.diag(SPV_ERROR_INVALID_DATA, inst)
267 << spvOpcodeString(opcode)
268 << ": expected Pointer to be a pointer to int or float "
269 << "scalar type";
270 }
271 } else {
272 if (data_type != result_type) {
273 return _.diag(SPV_ERROR_INVALID_DATA, inst)
274 << spvOpcodeString(opcode)
275 << ": expected Pointer to point to a value of type Result "
276 "Type";
277 }
278 }
279
280 auto memory_scope = inst->GetOperandAs<const uint32_t>(operand_index++);
281 if (auto error = ValidateMemoryScope(_, inst, memory_scope)) {
282 return error;
283 }
284
285 const auto equal_semantics_index = operand_index++;
286 if (auto error = ValidateMemorySemantics(_, inst, equal_semantics_index))
287 return error;
288
289 if (opcode == SpvOpAtomicCompareExchange ||
290 opcode == SpvOpAtomicCompareExchangeWeak) {
291 const auto unequal_semantics_index = operand_index++;
292 if (auto error =
293 ValidateMemorySemantics(_, inst, unequal_semantics_index))
294 return error;
295
296 // Volatile bits must match for equal and unequal semantics. Previous
297 // checks guarantee they are 32-bit constants, but we need to recheck
298 // whether they are evaluatable constants.
299 bool is_int32 = false;
300 bool is_equal_const = false;
301 bool is_unequal_const = false;
302 uint32_t equal_value = 0;
303 uint32_t unequal_value = 0;
304 std::tie(is_int32, is_equal_const, equal_value) = _.EvalInt32IfConst(
305 inst->GetOperandAs<uint32_t>(equal_semantics_index));
306 std::tie(is_int32, is_unequal_const, unequal_value) =
307 _.EvalInt32IfConst(
308 inst->GetOperandAs<uint32_t>(unequal_semantics_index));
309 if (is_equal_const && is_unequal_const &&
310 ((equal_value & SpvMemorySemanticsVolatileMask) ^
311 (unequal_value & SpvMemorySemanticsVolatileMask))) {
312 return _.diag(SPV_ERROR_INVALID_ID, inst)
313 << "Volatile mask setting must match for Equal and Unequal "
314 "memory semantics";
315 }
316 }
317
318 if (opcode == SpvOpAtomicStore) {
319 const uint32_t value_type = _.GetOperandTypeId(inst, 3);
320 if (value_type != data_type) {
321 return _.diag(SPV_ERROR_INVALID_DATA, inst)
322 << spvOpcodeString(opcode)
323 << ": expected Value type and the type pointed to by "
324 "Pointer to be the same";
325 }
326 } else if (opcode != SpvOpAtomicLoad && opcode != SpvOpAtomicIIncrement &&
327 opcode != SpvOpAtomicIDecrement &&
328 opcode != SpvOpAtomicFlagTestAndSet &&
329 opcode != SpvOpAtomicFlagClear) {
330 const uint32_t value_type = _.GetOperandTypeId(inst, operand_index++);
331 if (value_type != result_type) {
332 return _.diag(SPV_ERROR_INVALID_DATA, inst)
333 << spvOpcodeString(opcode)
334 << ": expected Value to be of type Result Type";
335 }
336 }
337
338 if (opcode == SpvOpAtomicCompareExchange ||
339 opcode == SpvOpAtomicCompareExchangeWeak) {
340 const uint32_t comparator_type =
341 _.GetOperandTypeId(inst, operand_index++);
342 if (comparator_type != result_type) {
343 return _.diag(SPV_ERROR_INVALID_DATA, inst)
344 << spvOpcodeString(opcode)
345 << ": expected Comparator to be of type Result Type";
346 }
347 }
348
349 break;
350 }
351
352 default:
353 break;
354 }
355
356 return SPV_SUCCESS;
357 }
358
359 } // namespace val
360 } // namespace spvtools
361