1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2019 The Khronos Group Inc.
6 * Copyright (c) 2019 Google Inc.
7 * Copyright (c) 2017 Codeplay Software Ltd.
8 *
9 * Licensed under the Apache License, Version 2.0 (the "License");
10 * you may not use this file except in compliance with the License.
11 * You may obtain a copy of the License at
12 *
13 * http://www.apache.org/licenses/LICENSE-2.0
14 *
15 * Unless required by applicable law or agreed to in writing, software
16 * distributed under the License is distributed on an "AS IS" BASIS,
17 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 * See the License for the specific language governing permissions and
19 * limitations under the License.
20 *
21 */ /*!
22 * \file
23 * \brief Subgroups Tests
24 */ /*--------------------------------------------------------------------*/
25
26 #include "vktSubgroupsClusteredTests.hpp"
27 #include "vktSubgroupsScanHelpers.hpp"
28 #include "vktSubgroupsTestsUtils.hpp"
29
30 #include <string>
31 #include <vector>
32
33 using namespace tcu;
34 using namespace std;
35 using namespace vk;
36 using namespace vkt;
37
38 namespace
39 {
40 enum OpType
41 {
42 OPTYPE_CLUSTERED_ADD = 0,
43 OPTYPE_CLUSTERED_MUL,
44 OPTYPE_CLUSTERED_MIN,
45 OPTYPE_CLUSTERED_MAX,
46 OPTYPE_CLUSTERED_AND,
47 OPTYPE_CLUSTERED_OR,
48 OPTYPE_CLUSTERED_XOR,
49 OPTYPE_CLUSTERED_LAST
50 };
51
52 struct CaseDefinition
53 {
54 Operator op;
55 VkShaderStageFlags shaderStage;
56 VkFormat format;
57 de::SharedPtr<bool> geometryPointSizeSupported;
58 deBool requiredSubgroupSize;
59 };
60
getOperator(OpType opType)61 static Operator getOperator (OpType opType)
62 {
63 switch (opType)
64 {
65 case OPTYPE_CLUSTERED_ADD: return OPERATOR_ADD;
66 case OPTYPE_CLUSTERED_MUL: return OPERATOR_MUL;
67 case OPTYPE_CLUSTERED_MIN: return OPERATOR_MIN;
68 case OPTYPE_CLUSTERED_MAX: return OPERATOR_MAX;
69 case OPTYPE_CLUSTERED_AND: return OPERATOR_AND;
70 case OPTYPE_CLUSTERED_OR: return OPERATOR_OR;
71 case OPTYPE_CLUSTERED_XOR: return OPERATOR_XOR;
72 default: TCU_THROW(InternalError, "Unsupported op type");
73 }
74 }
75
checkVertexPipelineStages(const void * internalData,vector<const void * > datas,deUint32 width,deUint32)76 static bool checkVertexPipelineStages (const void* internalData,
77 vector<const void*> datas,
78 deUint32 width,
79 deUint32)
80 {
81 DE_UNREF(internalData);
82
83 return subgroups::check(datas, width, 1);
84 }
85
checkComputeOrMesh(const void * internalData,vector<const void * > datas,const deUint32 numWorkgroups[3],const deUint32 localSize[3],deUint32)86 static bool checkComputeOrMesh (const void* internalData,
87 vector<const void*> datas,
88 const deUint32 numWorkgroups[3],
89 const deUint32 localSize[3],
90 deUint32)
91 {
92 DE_UNREF(internalData);
93
94 return subgroups::checkComputeOrMesh(datas, numWorkgroups, localSize, 1);
95 }
96
getOpTypeName(Operator op)97 string getOpTypeName (Operator op)
98 {
99 return getScanOpName("subgroupClustered", "", op, SCAN_REDUCE);
100 }
101
getExtHeader(CaseDefinition & caseDef)102 string getExtHeader (CaseDefinition& caseDef)
103 {
104 return "#extension GL_KHR_shader_subgroup_clustered: enable\n"
105 "#extension GL_KHR_shader_subgroup_ballot: enable\n" +
106 subgroups::getAdditionalExtensionForFormat(caseDef.format);
107 }
108
getTestSrc(CaseDefinition & caseDef)109 string getTestSrc (CaseDefinition& caseDef)
110 {
111 const string formatName = subgroups::getFormatNameForGLSL(caseDef.format);
112 const string opTypeName = getOpTypeName(caseDef.op);
113 const string identity = getIdentity(caseDef.op, caseDef.format);
114 const string opOperation = getOpOperation(caseDef.op, caseDef.format, "ref", "data[index]");
115 const string compare = getCompare(caseDef.op, caseDef.format, "ref", "op");
116 ostringstream bdy;
117
118 bdy << " bool tempResult = true;\n"
119 << " uvec4 mask = subgroupBallot(true);\n";
120
121 for (deUint32 i = 1; i <= subgroups::maxSupportedSubgroupSize(); i *= 2)
122 {
123 bdy << " {\n"
124 << " const uint clusterSize = " << i << ";\n"
125 << " if (clusterSize <= gl_SubgroupSize)\n"
126 << " {\n"
127 << " " << formatName << " op = "
128 << opTypeName + "(data[gl_SubgroupInvocationID], clusterSize);\n"
129 << " for (uint clusterOffset = 0; clusterOffset < gl_SubgroupSize; clusterOffset += clusterSize)\n"
130 << " {\n"
131 << " " << formatName << " ref = "
132 << identity << ";\n"
133 << " for (uint index = clusterOffset; index < (clusterOffset + clusterSize); index++)\n"
134 << " {\n"
135 << " if (subgroupBallotBitExtract(mask, index))\n"
136 << " {\n"
137 << " ref = " << opOperation << ";\n"
138 << " }\n"
139 << " }\n"
140 << " if ((clusterOffset <= gl_SubgroupInvocationID) && (gl_SubgroupInvocationID < (clusterOffset + clusterSize)))\n"
141 << " {\n"
142 << " if (!" << compare << ")\n"
143 << " {\n"
144 << " tempResult = false;\n"
145 << " }\n"
146 << " }\n"
147 << " }\n"
148 << " }\n"
149 << " }\n"
150 << " tempRes = tempResult ? 1 : 0;\n";
151 }
152
153 return bdy.str();
154 }
155
initFrameBufferPrograms(SourceCollections & programCollection,CaseDefinition caseDef)156 void initFrameBufferPrograms (SourceCollections& programCollection, CaseDefinition caseDef)
157 {
158 const ShaderBuildOptions buildOptions (programCollection.usedVulkanVersion, SPIRV_VERSION_1_3, 0u);
159 const string extHeader = getExtHeader(caseDef);
160 const string testSrc = getTestSrc(caseDef);
161
162 subgroups::initStdFrameBufferPrograms(programCollection, buildOptions, caseDef.shaderStage, caseDef.format, *caseDef.geometryPointSizeSupported, extHeader, testSrc, "");
163 }
164
initPrograms(SourceCollections & programCollection,CaseDefinition caseDef)165 void initPrograms (SourceCollections& programCollection, CaseDefinition caseDef)
166 {
167 #ifndef CTS_USES_VULKANSC
168 const bool spirv14required = (isAllRayTracingStages(caseDef.shaderStage) || isAllMeshShadingStages(caseDef.shaderStage));
169 #else
170 const bool spirv14required = false;
171 #endif // CTS_USES_VULKANSC
172 const SpirvVersion spirvVersion = spirv14required ? SPIRV_VERSION_1_4 : SPIRV_VERSION_1_3;
173 const ShaderBuildOptions buildOptions (programCollection.usedVulkanVersion, spirvVersion, 0u, spirv14required);
174 const string extHeader = getExtHeader(caseDef);
175 const string testSrc = getTestSrc(caseDef);
176
177 subgroups::initStdPrograms(programCollection, buildOptions, caseDef.shaderStage, caseDef.format, *caseDef.geometryPointSizeSupported, extHeader, testSrc, "");
178 }
179
supportedCheck(Context & context,CaseDefinition caseDef)180 void supportedCheck (Context& context, CaseDefinition caseDef)
181 {
182 if (!subgroups::isSubgroupSupported(context))
183 TCU_THROW(NotSupportedError, "Subgroup operations are not supported");
184
185 if (!subgroups::isSubgroupFeatureSupportedForDevice(context, VK_SUBGROUP_FEATURE_CLUSTERED_BIT))
186 TCU_THROW(NotSupportedError, "Device does not support subgroup clustered operations");
187
188 if (!subgroups::isFormatSupportedForDevice(context, caseDef.format))
189 TCU_THROW(NotSupportedError, "Device does not support the specified format in subgroup operations");
190
191 if (caseDef.requiredSubgroupSize)
192 {
193 context.requireDeviceFunctionality("VK_EXT_subgroup_size_control");
194
195 #ifndef CTS_USES_VULKANSC
196 const VkPhysicalDeviceSubgroupSizeControlFeatures& subgroupSizeControlFeatures = context.getSubgroupSizeControlFeatures();
197 const VkPhysicalDeviceSubgroupSizeControlProperties& subgroupSizeControlProperties = context.getSubgroupSizeControlProperties();
198 #else
199 const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& subgroupSizeControlFeatures = context.getSubgroupSizeControlFeaturesEXT();
200 const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& subgroupSizeControlProperties = context.getSubgroupSizeControlPropertiesEXT();
201 #endif // CTS_USES_VULKANSC
202
203 if (subgroupSizeControlFeatures.subgroupSizeControl == DE_FALSE)
204 TCU_THROW(NotSupportedError, "Device does not support varying subgroup sizes nor required subgroup size");
205
206 if (subgroupSizeControlFeatures.computeFullSubgroups == DE_FALSE)
207 TCU_THROW(NotSupportedError, "Device does not support full subgroups in compute shaders");
208
209 if ((subgroupSizeControlProperties.requiredSubgroupSizeStages & caseDef.shaderStage) != caseDef.shaderStage)
210 TCU_THROW(NotSupportedError, "Required subgroup size is not supported for shader stage");
211 }
212
213 *caseDef.geometryPointSizeSupported = subgroups::isTessellationAndGeometryPointSizeSupported(context);
214
215 #ifndef CTS_USES_VULKANSC
216 if (isAllRayTracingStages(caseDef.shaderStage))
217 {
218 context.requireDeviceFunctionality("VK_KHR_ray_tracing_pipeline");
219 }
220 else if (isAllMeshShadingStages(caseDef.shaderStage))
221 {
222 context.requireDeviceCoreFeature(DEVICE_CORE_FEATURE_VERTEX_PIPELINE_STORES_AND_ATOMICS);
223 context.requireDeviceFunctionality("VK_EXT_mesh_shader");
224
225 if ((caseDef.shaderStage & VK_SHADER_STAGE_TASK_BIT_EXT) != 0u)
226 {
227 const auto& features = context.getMeshShaderFeaturesEXT();
228 if (!features.taskShader)
229 TCU_THROW(NotSupportedError, "Task shaders not supported");
230 }
231 }
232 #endif // CTS_USES_VULKANSC
233
234 subgroups::supportedCheckShader(context, caseDef.shaderStage);
235 }
236
noSSBOtest(Context & context,const CaseDefinition caseDef)237 TestStatus noSSBOtest (Context& context, const CaseDefinition caseDef)
238 {
239 const subgroups::SSBOData inputData =
240 {
241 subgroups::SSBOData::InitializeNonZero, // InputDataInitializeType initializeType;
242 subgroups::SSBOData::LayoutStd140, // InputDataLayoutType layout;
243 caseDef.format, // vk::VkFormat format;
244 subgroups::maxSupportedSubgroupSize(), // vk::VkDeviceSize numElements;
245 subgroups::SSBOData::BindingUBO, // BindingType bindingType;
246 };
247
248 switch (caseDef.shaderStage)
249 {
250 case VK_SHADER_STAGE_VERTEX_BIT: return subgroups::makeVertexFrameBufferTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages);
251 case VK_SHADER_STAGE_GEOMETRY_BIT: return subgroups::makeGeometryFrameBufferTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages);
252 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT: return subgroups::makeTessellationEvaluationFrameBufferTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages, caseDef.shaderStage);
253 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT: return subgroups::makeTessellationEvaluationFrameBufferTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages, caseDef.shaderStage);
254 default: TCU_THROW(InternalError, "Unhandled shader stage");
255 }
256 }
257
test(Context & context,const CaseDefinition caseDef)258 TestStatus test (Context& context, const CaseDefinition caseDef)
259 {
260 const bool isCompute = isAllComputeStages(caseDef.shaderStage);
261 #ifndef CTS_USES_VULKANSC
262 const bool isMesh = isAllMeshShadingStages(caseDef.shaderStage);
263 #else
264 const bool isMesh = false;
265 #endif // CTS_USES_VULKANSC
266 DE_ASSERT(!(isCompute && isMesh));
267
268 if (isCompute || isMesh)
269 {
270 #ifndef CTS_USES_VULKANSC
271 const VkPhysicalDeviceSubgroupSizeControlProperties& subgroupSizeControlProperties = context.getSubgroupSizeControlProperties();
272 #else
273 const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& subgroupSizeControlProperties = context.getSubgroupSizeControlPropertiesEXT();
274 #endif // CTS_USES_VULKANSC
275 TestLog& log = context.getTestContext().getLog();
276
277 subgroups::SSBOData inputData;
278 inputData.format = caseDef.format;
279 inputData.layout = subgroups::SSBOData::LayoutStd430;
280 inputData.numElements = subgroups::maxSupportedSubgroupSize();
281 inputData.initializeType = subgroups::SSBOData::InitializeNonZero;
282
283 if (caseDef.requiredSubgroupSize == DE_FALSE)
284 {
285 if (isCompute)
286 return subgroups::makeComputeTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkComputeOrMesh);
287 else
288 return subgroups::makeMeshTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkComputeOrMesh);
289 }
290
291 log << TestLog::Message << "Testing required subgroup size range [" << subgroupSizeControlProperties.minSubgroupSize << ", "
292 << subgroupSizeControlProperties.maxSubgroupSize << "]" << TestLog::EndMessage;
293
294 // According to the spec, requiredSubgroupSize must be a power-of-two integer.
295 for (deUint32 size = subgroupSizeControlProperties.minSubgroupSize; size <= subgroupSizeControlProperties.maxSubgroupSize; size *= 2)
296 {
297 TestStatus result (QP_TEST_RESULT_INTERNAL_ERROR, "Internal Error");
298
299 if (isCompute)
300 result = subgroups::makeComputeTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkComputeOrMesh, size);
301 else
302 result = subgroups::makeMeshTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkComputeOrMesh, size);
303
304 if (result.getCode() != QP_TEST_RESULT_PASS)
305 {
306 log << TestLog::Message << "subgroupSize " << size << " failed" << TestLog::EndMessage;
307 return result;
308 }
309 }
310
311 return TestStatus::pass("OK");
312 }
313 else if (isAllGraphicsStages(caseDef.shaderStage))
314 {
315 const VkShaderStageFlags stages = subgroups::getPossibleGraphicsSubgroupStages(context, caseDef.shaderStage);
316 const subgroups::SSBOData inputData =
317 {
318 subgroups::SSBOData::InitializeNonZero, // InputDataInitializeType initializeType;
319 subgroups::SSBOData::LayoutStd430, // InputDataLayoutType layout;
320 caseDef.format, // vk::VkFormat format;
321 subgroups::maxSupportedSubgroupSize(), // vk::VkDeviceSize numElements;
322 subgroups::SSBOData::BindingSSBO, // bool isImage;
323 4u, // deUint32 binding;
324 stages, // vk::VkShaderStageFlags stages;
325 };
326
327 return subgroups::allStages(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages, stages);
328 }
329 #ifndef CTS_USES_VULKANSC
330 else if (isAllRayTracingStages(caseDef.shaderStage))
331 {
332 const VkShaderStageFlags stages = subgroups::getPossibleRayTracingSubgroupStages(context, caseDef.shaderStage);
333 const subgroups::SSBOData inputData =
334 {
335 subgroups::SSBOData::InitializeNonZero, // InputDataInitializeType initializeType;
336 subgroups::SSBOData::LayoutStd430, // InputDataLayoutType layout;
337 caseDef.format, // vk::VkFormat format;
338 subgroups::maxSupportedSubgroupSize(), // vk::VkDeviceSize numElements;
339 subgroups::SSBOData::BindingSSBO, // bool isImage;
340 6u, // deUint32 binding;
341 stages, // vk::VkShaderStageFlags stages;
342 };
343
344 return subgroups::allRayTracingStages(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages, stages);
345 }
346 #endif // CTS_USES_VULKANSC
347 else
348 TCU_THROW(InternalError, "Unknown stage or invalid stage set");
349 }
350 }
351
352 namespace vkt
353 {
354 namespace subgroups
355 {
createSubgroupsClusteredTests(TestContext & testCtx)356 TestCaseGroup* createSubgroupsClusteredTests (TestContext& testCtx)
357 {
358 de::MovePtr<TestCaseGroup> group (new TestCaseGroup(testCtx, "clustered", "Subgroup clustered category tests"));
359 de::MovePtr<TestCaseGroup> graphicGroup (new TestCaseGroup(testCtx, "graphics", "Subgroup clustered category tests: graphics"));
360 de::MovePtr<TestCaseGroup> computeGroup (new TestCaseGroup(testCtx, "compute", "Subgroup clustered category tests: compute"));
361 de::MovePtr<TestCaseGroup> framebufferGroup (new TestCaseGroup(testCtx, "framebuffer", "Subgroup clustered category tests: framebuffer"));
362 #ifndef CTS_USES_VULKANSC
363 de::MovePtr<TestCaseGroup> raytracingGroup (new TestCaseGroup(testCtx, "ray_tracing", "Subgroup clustered category tests: ray tracing"));
364 de::MovePtr<TestCaseGroup> meshGroup (new TestCaseGroup(testCtx, "mesh", "Subgroup clustered category tests: mesh shading"));
365 #endif // CTS_USES_VULKANSC
366 const VkShaderStageFlags fbStages[] =
367 {
368 VK_SHADER_STAGE_VERTEX_BIT,
369 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
370 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
371 VK_SHADER_STAGE_GEOMETRY_BIT,
372 };
373 #ifndef CTS_USES_VULKANSC
374 const VkShaderStageFlags meshStages[] =
375 {
376 VK_SHADER_STAGE_MESH_BIT_EXT,
377 VK_SHADER_STAGE_TASK_BIT_EXT,
378 };
379 #endif // CTS_USES_VULKANSC
380 const deBool boolValues[] =
381 {
382 DE_FALSE,
383 DE_TRUE
384 };
385
386 {
387 const vector<VkFormat> formats = subgroups::getAllFormats();
388
389 for (size_t formatIndex = 0; formatIndex < formats.size(); ++formatIndex)
390 {
391 const VkFormat format = formats[formatIndex];
392 const string formatName = subgroups::getFormatNameForGLSL(format);
393 const bool isBool = subgroups::isFormatBool(format);
394 const bool isFloat = subgroups::isFormatFloat(format);
395
396 for (int opTypeIndex = 0; opTypeIndex < OPTYPE_CLUSTERED_LAST; ++opTypeIndex)
397 {
398 const OpType opType = static_cast<OpType>(opTypeIndex);
399 const Operator op = getOperator(opType);
400 const bool isBitwiseOp = (op == OPERATOR_AND || op == OPERATOR_OR || op == OPERATOR_XOR);
401
402 // Skip float with bitwise category.
403 if (isFloat && isBitwiseOp)
404 continue;
405
406 // Skip bool when its not the bitwise category.
407 if (isBool && !isBitwiseOp)
408 continue;
409
410 const string name = de::toLower(getOpTypeName(op)) +"_" + formatName;
411
412 for (size_t groupSizeNdx = 0; groupSizeNdx < DE_LENGTH_OF_ARRAY(boolValues); ++groupSizeNdx)
413 {
414 const deBool requiredSubgroupSize = boolValues[groupSizeNdx];
415 const string testName = name + (requiredSubgroupSize ? "_requiredsubgroupsize" : "");
416 const CaseDefinition caseDef =
417 {
418 op, // Operator op;
419 VK_SHADER_STAGE_COMPUTE_BIT, // VkShaderStageFlags shaderStage;
420 format, // VkFormat format;
421 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
422 requiredSubgroupSize, // deBool requiredSubgroupSize;
423 };
424
425 addFunctionCaseWithPrograms(computeGroup.get(), testName, "", supportedCheck, initPrograms, test, caseDef);
426 }
427
428 #ifndef CTS_USES_VULKANSC
429 for (size_t groupSizeNdx = 0; groupSizeNdx < DE_LENGTH_OF_ARRAY(boolValues); ++groupSizeNdx)
430 {
431 for (const auto& stage : meshStages)
432 {
433 const deBool requiredSubgroupSize = boolValues[groupSizeNdx];
434 const string testName = name + (requiredSubgroupSize ? "_requiredsubgroupsize" : "") + "_" + getShaderStageName(stage);
435 const CaseDefinition caseDef =
436 {
437 op, // Operator op;
438 stage, // VkShaderStageFlags shaderStage;
439 format, // VkFormat format;
440 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
441 requiredSubgroupSize, // deBool requiredSubgroupSize;
442 };
443
444 addFunctionCaseWithPrograms(meshGroup.get(), testName, "", supportedCheck, initPrograms, test, caseDef);
445 }
446 }
447 #endif // CTS_USES_VULKANSC
448
449 {
450 const CaseDefinition caseDef =
451 {
452 op, // Operator op;
453 VK_SHADER_STAGE_ALL_GRAPHICS, // VkShaderStageFlags shaderStage;
454 format, // VkFormat format;
455 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
456 DE_FALSE // deBool requiredSubgroupSize;
457 };
458
459 addFunctionCaseWithPrograms(graphicGroup.get(), name, "", supportedCheck, initPrograms, test, caseDef);
460 }
461
462 for (int stageIndex = 0; stageIndex < DE_LENGTH_OF_ARRAY(fbStages); ++stageIndex)
463 {
464 const CaseDefinition caseDef =
465 {
466 op, // Operator op;
467 fbStages[stageIndex], // VkShaderStageFlags shaderStage;
468 format, // VkFormat format;
469 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
470 DE_FALSE // deBool requiredSubgroupSize;
471 };
472 const string testName = name +"_" + getShaderStageName(caseDef.shaderStage);
473
474 addFunctionCaseWithPrograms(framebufferGroup.get(), testName, "", supportedCheck, initFrameBufferPrograms, noSSBOtest, caseDef);
475 }
476 }
477 }
478 }
479
480 #ifndef CTS_USES_VULKANSC
481 {
482 const vector<VkFormat> formats = subgroups::getAllRayTracingFormats();
483
484 for (size_t formatIndex = 0; formatIndex < formats.size(); ++formatIndex)
485 {
486 const VkFormat format = formats[formatIndex];
487 const string formatName = subgroups::getFormatNameForGLSL(format);
488 const bool isBool = subgroups::isFormatBool(format);
489 const bool isFloat = subgroups::isFormatFloat(format);
490
491 for (int opTypeIndex = 0; opTypeIndex < OPTYPE_CLUSTERED_LAST; ++opTypeIndex)
492 {
493 const OpType opType = static_cast<OpType>(opTypeIndex);
494 const Operator op = getOperator(opType);
495 const bool isBitwiseOp = (op == OPERATOR_AND || op == OPERATOR_OR || op == OPERATOR_XOR);
496
497 // Skip float with bitwise category.
498 if (isFloat && isBitwiseOp)
499 continue;
500
501 // Skip bool when its not the bitwise category.
502 if (isBool && !isBitwiseOp)
503 continue;
504
505 {
506 const string name = de::toLower(getOpTypeName(op)) +"_" + formatName;
507 const CaseDefinition caseDef =
508 {
509 op, // Operator op;
510 SHADER_STAGE_ALL_RAY_TRACING, // VkShaderStageFlags shaderStage;
511 format, // VkFormat format;
512 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
513 DE_FALSE // deBool requiredSubgroupSize;
514 };
515
516 addFunctionCaseWithPrograms(raytracingGroup.get(), name, "", supportedCheck, initPrograms, test, caseDef);
517 }
518 }
519 }
520 }
521 #endif // CTS_USES_VULKANSC
522
523 group->addChild(graphicGroup.release());
524 group->addChild(computeGroup.release());
525 group->addChild(framebufferGroup.release());
526 #ifndef CTS_USES_VULKANSC
527 group->addChild(raytracingGroup.release());
528 group->addChild(meshGroup.release());
529 #endif // CTS_USES_VULKANSC
530
531 return group.release();
532 }
533 } // subgroups
534 } // vkt
535