1 /*-------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2021 The Khronos Group Inc.
6 * Copyright (c) 2021 Valve Corporation.
7 *
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
11 *
12 * http://www.apache.org/licenses/LICENSE-2.0
13 *
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
19 *
20 *//*!
21 * \file
22 * \brief Tests for VK_VALVE_mutable_descriptor_type and VK_EXT_mutable_descriptor_type.
23 *//*--------------------------------------------------------------------*/
24 #include "vktBindingMutableTests.hpp"
25 #include "vktTestCase.hpp"
26 #include "vktTestGroupUtil.hpp"
27 #include "vktCustomInstancesDevices.hpp"
28
29 #include "tcuCommandLine.hpp"
30
31 #include "vkDefs.hpp"
32 #include "vkRefUtil.hpp"
33 #include "vkQueryUtil.hpp"
34 #include "vkImageWithMemory.hpp"
35 #include "vkBufferWithMemory.hpp"
36 #include "vkTypeUtil.hpp"
37 #include "vkObjUtil.hpp"
38 #include "vkBarrierUtil.hpp"
39 #include "vkCmdUtil.hpp"
40 #include "vkBuilderUtil.hpp"
41 #include "vkRayTracingUtil.hpp"
42
43 #include "deUniquePtr.hpp"
44 #include "deSTLUtil.hpp"
45 #include "deStringUtil.hpp"
46
47 #include <vector>
48 #include <algorithm>
49 #include <iterator>
50 #include <set>
51 #include <sstream>
52 #include <limits>
53
54 namespace vkt
55 {
56 namespace BindingModel
57 {
58
59 namespace
60 {
61
62 using namespace vk;
63
64 de::SharedPtr<Move<vk::VkDevice>> g_singletonDevice;
65
getDevice(Context & context)66 VkDevice getDevice(Context& context)
67 {
68 if (!g_singletonDevice)
69 {
70 const float queuePriority = 1.0f;
71
72 // Create a universal queue that supports graphics and compute
73 const VkDeviceQueueCreateInfo queueParams
74 {
75 VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, // VkStructureType sType;
76 DE_NULL, // const void* pNext;
77 0u, // VkDeviceQueueCreateFlags flags;
78 context.getUniversalQueueFamilyIndex(), // deUint32 queueFamilyIndex;
79 1u, // deUint32 queueCount;
80 &queuePriority // const float* pQueuePriorities;
81 };
82
83 // \note Extensions in core are not explicitly enabled even though
84 // they are in the extension list advertised to tests.
85 const auto& extensionPtrs = context.getDeviceCreationExtensions();
86
87 VkPhysicalDeviceAccelerationStructureFeaturesKHR accelerationStructureFeatures = initVulkanStructure();
88 VkPhysicalDeviceBufferDeviceAddressFeatures bufferDeviceAddressFeatures = initVulkanStructure();
89 VkPhysicalDeviceRayTracingPipelineFeaturesKHR rayTracingPipelineFeatures = initVulkanStructure();
90 VkPhysicalDeviceRayQueryFeaturesKHR rayQueryFeatures = initVulkanStructure();
91 VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT mutableDescriptorTypeFeatures = initVulkanStructure();
92 VkPhysicalDeviceDescriptorIndexingFeatures descriptorIndexingFeatures = initVulkanStructure();
93 VkPhysicalDeviceFeatures2 features2 = initVulkanStructure();
94
95 const auto addFeatures = makeStructChainAdder(&features2);
96
97 if (context.isDeviceFunctionalitySupported("VK_KHR_acceleration_structure"))
98 addFeatures(&accelerationStructureFeatures);
99
100 if (context.isDeviceFunctionalitySupported("VK_KHR_buffer_device_address"))
101 addFeatures(&bufferDeviceAddressFeatures);
102
103 if (context.isDeviceFunctionalitySupported("VK_KHR_ray_tracing_pipeline"))
104 addFeatures(&rayTracingPipelineFeatures);
105
106 if (context.isDeviceFunctionalitySupported("VK_KHR_ray_query"))
107 addFeatures(&rayQueryFeatures);
108
109 if (context.isDeviceFunctionalitySupported("VK_VALVE_mutable_descriptor_type") || context.isDeviceFunctionalitySupported("VK_EXT_mutable_descriptor_type"))
110 addFeatures(&mutableDescriptorTypeFeatures);
111
112 if (context.isDeviceFunctionalitySupported("VK_EXT_descriptor_indexing"))
113 addFeatures(&descriptorIndexingFeatures);
114
115 context.getInstanceInterface().getPhysicalDeviceFeatures2(context.getPhysicalDevice(), &features2);
116 features2.features.robustBufferAccess = VK_FALSE; // Disable robustness features.
117
118 const VkDeviceCreateInfo deviceCreateInfo
119 {
120 VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, //sType;
121 &features2, //pNext;
122 (VkDeviceCreateFlags)0u, //flags
123 1, //queueRecordCount;
124 &queueParams, //pRequestedQueues;
125 0, //layerCount;
126 nullptr, //ppEnabledLayerNames;
127 de::sizeU32(extensionPtrs), // deUint32 enabledExtensionCount;
128 de::dataOrNull(extensionPtrs), // const char* const* ppEnabledExtensionNames;
129 DE_NULL, //pEnabledFeatures;
130 };
131
132 Move<VkDevice> device = createCustomDevice(context.getTestContext().getCommandLine().isValidationEnabled(), context.getPlatformInterface(), context.getInstance(), context.getInstanceInterface(), context.getPhysicalDevice(), &deviceCreateInfo);
133 g_singletonDevice = de::SharedPtr<Move<VkDevice>>(new Move<VkDevice>(device));
134 }
135
136 return g_singletonDevice->get();
137 }
138
getDescriptorNumericValue(deUint32 iteration,deUint32 bindingIdx,deUint32 descriptorIdx=0u)139 deUint32 getDescriptorNumericValue (deUint32 iteration, deUint32 bindingIdx, deUint32 descriptorIdx = 0u)
140 {
141 // When assigning numeric values for the descriptor contents, each descriptor will get 0x5aIIBBDD. II is an octed containing the
142 // iteration index. BB is an octet containing the binding index and DD is the descriptor index inside that binding.
143 constexpr deUint32 kNumericValueBase = 0x5a000000u;
144
145 return (kNumericValueBase | ((iteration & 0xFFu) << 16) | ((bindingIdx & 0xFFu) << 8) | (descriptorIdx & 0xFFu));
146 }
147
getAccelerationStructureOffsetX(deUint32 descriptorNumericValue)148 deUint16 getAccelerationStructureOffsetX (deUint32 descriptorNumericValue)
149 {
150 // Keep the lowest 16 bits (binding and descriptor idx) as the offset.
151 return static_cast<deUint16>(descriptorNumericValue);
152 }
153
154 // Value that will be stored in the output buffer to signal success reading values.
getExpectedOutputBufferValue()155 deUint32 getExpectedOutputBufferValue ()
156 {
157 return 2u;
158 }
159
160 // This value will be stored in an image to be sampled when checking descriptors containing samplers alone.
getExternalSampledImageValue()161 deUint32 getExternalSampledImageValue ()
162 {
163 return 0x41322314u;
164 }
165
166 // Value that will be ORed with the descriptor value before writing.
getStoredValueMask()167 deUint32 getStoredValueMask ()
168 {
169 return 0xFF000000u;
170 }
171
getDescriptorImageFormat()172 VkFormat getDescriptorImageFormat ()
173 {
174 return VK_FORMAT_R32_UINT;
175 }
176
getDefaultExtent()177 VkExtent3D getDefaultExtent ()
178 {
179 return makeExtent3D(1u, 1u, 1u);
180 }
181
182 // Convert value to hexadecimal.
toHex(deUint32 val)183 std::string toHex (deUint32 val)
184 {
185 std::ostringstream s;
186 s << "0x" << std::hex << val << "u";
187 return s.str();
188 }
189
190 // Returns the list of descriptor types that cannot be part of a mutable descriptor.
getForbiddenMutableTypes()191 std::vector<VkDescriptorType> getForbiddenMutableTypes ()
192 {
193 return std::vector<VkDescriptorType>
194 {
195 VK_DESCRIPTOR_TYPE_MUTABLE_EXT,
196 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
197 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
198 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
199 };
200 }
201
202 // Returns the list of descriptor types that are mandatory for the extension.
getMandatoryMutableTypes()203 std::vector<VkDescriptorType> getMandatoryMutableTypes ()
204 {
205 return std::vector<VkDescriptorType>
206 {
207 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
208 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
209 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
210 VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
211 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
212 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
213 };
214 }
215
216 // This helps quickly transform a vector of descriptor types into a bitmask, which makes it easier to check some conditions.
217 enum DescriptorTypeFlagBits
218 {
219 DTFB_SAMPLER = (1 << 0),
220 DTFB_COMBINED_IMAGE_SAMPLER = (1 << 1),
221 DTFB_SAMPLED_IMAGE = (1 << 2),
222 DTFB_STORAGE_IMAGE = (1 << 3),
223 DTFB_UNIFORM_TEXEL_BUFFER = (1 << 4),
224 DTFB_STORAGE_TEXEL_BUFFER = (1 << 5),
225 DTFB_UNIFORM_BUFFER = (1 << 6),
226 DTFB_STORAGE_BUFFER = (1 << 7),
227 DTFB_UNIFORM_BUFFER_DYNAMIC = (1 << 8),
228 DTFB_STORAGE_BUFFER_DYNAMIC = (1 << 9),
229 DTFB_INPUT_ATTACHMENT = (1 << 10),
230 DTFB_INLINE_UNIFORM_BLOCK_EXT = (1 << 11),
231 DTFB_ACCELERATION_STRUCTURE_KHR = (1 << 12),
232 DTFB_ACCELERATION_STRUCTURE_NV = (1 << 13),
233 DTFB_MUTABLE = (1 << 14),
234 };
235
236 using DescriptorTypeFlags = deUint32;
237
238 // Convert type to its corresponding flag bit.
toDescriptorTypeFlagBit(VkDescriptorType descriptorType)239 DescriptorTypeFlagBits toDescriptorTypeFlagBit (VkDescriptorType descriptorType)
240 {
241 switch (descriptorType)
242 {
243 case VK_DESCRIPTOR_TYPE_SAMPLER: return DTFB_SAMPLER;
244 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: return DTFB_COMBINED_IMAGE_SAMPLER;
245 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: return DTFB_SAMPLED_IMAGE;
246 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: return DTFB_STORAGE_IMAGE;
247 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: return DTFB_UNIFORM_TEXEL_BUFFER;
248 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: return DTFB_STORAGE_TEXEL_BUFFER;
249 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: return DTFB_UNIFORM_BUFFER;
250 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: return DTFB_STORAGE_BUFFER;
251 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: return DTFB_UNIFORM_BUFFER_DYNAMIC;
252 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: return DTFB_STORAGE_BUFFER_DYNAMIC;
253 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: return DTFB_INPUT_ATTACHMENT;
254 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: return DTFB_INLINE_UNIFORM_BLOCK_EXT;
255 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: return DTFB_ACCELERATION_STRUCTURE_KHR;
256 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: return DTFB_ACCELERATION_STRUCTURE_NV;
257 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT: return DTFB_MUTABLE;
258 default: break;
259 }
260
261 // Unreachable.
262 DE_ASSERT(false);
263 return DTFB_SAMPLER;
264 }
265
266 // Convert vector of descriptor types to a bitfield.
toDescriptorTypeFlags(const std::vector<VkDescriptorType> & types)267 DescriptorTypeFlags toDescriptorTypeFlags (const std::vector<VkDescriptorType>& types)
268 {
269 DescriptorTypeFlags result = 0u;
270 for (const auto& t : types)
271 result |= toDescriptorTypeFlagBit(t);
272 return result;
273 }
274
275 // Convert bitfield to vector of descriptor types.
toDescriptorTypeVector(DescriptorTypeFlags bitfield)276 std::vector<VkDescriptorType> toDescriptorTypeVector (DescriptorTypeFlags bitfield)
277 {
278 std::vector<VkDescriptorType> result;
279
280 if (bitfield & DTFB_SAMPLER) result.push_back(VK_DESCRIPTOR_TYPE_SAMPLER);
281 if (bitfield & DTFB_COMBINED_IMAGE_SAMPLER) result.push_back(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
282 if (bitfield & DTFB_SAMPLED_IMAGE) result.push_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
283 if (bitfield & DTFB_STORAGE_IMAGE) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
284 if (bitfield & DTFB_UNIFORM_TEXEL_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
285 if (bitfield & DTFB_STORAGE_TEXEL_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
286 if (bitfield & DTFB_UNIFORM_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
287 if (bitfield & DTFB_STORAGE_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
288 if (bitfield & DTFB_UNIFORM_BUFFER_DYNAMIC) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
289 if (bitfield & DTFB_STORAGE_BUFFER_DYNAMIC) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
290 if (bitfield & DTFB_INPUT_ATTACHMENT) result.push_back(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
291 if (bitfield & DTFB_INLINE_UNIFORM_BLOCK_EXT) result.push_back(VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT);
292 if (bitfield & DTFB_ACCELERATION_STRUCTURE_KHR) result.push_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
293 if (bitfield & DTFB_ACCELERATION_STRUCTURE_NV) result.push_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV);
294 if (bitfield & DTFB_MUTABLE) result.push_back(VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
295
296 return result;
297 }
298
299 // How to create the source set when copying descriptors from another set.
300 // * MUTABLE means to transform bindings into mutable bindings.
301 // * NONMUTABLE means to transform bindings into non-mutable bindings.
302 enum class SourceSetStrategy
303 {
304 MUTABLE = 0,
305 NONMUTABLE,
306 NO_SOURCE,
307 };
308
309 enum class PoolMutableStrategy
310 {
311 KEEP_TYPES = 0,
312 EXPAND_TYPES,
313 NO_TYPES,
314 };
315
316 // Type of information that's present in VkWriteDescriptorSet.
317 enum class WriteType
318 {
319 IMAGE_INFO = 0,
320 BUFFER_INFO,
321 BUFFER_VIEW,
322 ACCELERATION_STRUCTURE_INFO,
323 };
324
325 struct WriteInfo
326 {
327 WriteType writeType;
328 union
329 {
330 VkDescriptorImageInfo imageInfo;
331 VkDescriptorBufferInfo bufferInfo;
332 VkBufferView bufferView;
333 VkWriteDescriptorSetAccelerationStructureKHR asInfo;
334 };
335
WriteInfovkt::BindingModel::__anon29a03d250111::WriteInfo336 explicit WriteInfo (const VkDescriptorImageInfo& info_)
337 : writeType(WriteType::IMAGE_INFO)
338 , imageInfo(info_)
339 {}
340
WriteInfovkt::BindingModel::__anon29a03d250111::WriteInfo341 explicit WriteInfo (const VkDescriptorBufferInfo& info_)
342 : writeType(WriteType::BUFFER_INFO)
343 , bufferInfo(info_)
344 {}
345
WriteInfovkt::BindingModel::__anon29a03d250111::WriteInfo346 explicit WriteInfo (VkBufferView view_)
347 : writeType(WriteType::BUFFER_VIEW)
348 , bufferView(view_)
349 {}
350
WriteInfovkt::BindingModel::__anon29a03d250111::WriteInfo351 explicit WriteInfo (const VkWriteDescriptorSetAccelerationStructureKHR& asInfo_)
352 : writeType(WriteType::ACCELERATION_STRUCTURE_INFO)
353 , asInfo(asInfo_)
354 {}
355 };
356
357 // Resource backing up a single binding.
358 enum class ResourceType
359 {
360 SAMPLER = 0,
361 IMAGE,
362 COMBINED_IMAGE_SAMPLER,
363 BUFFER,
364 BUFFER_VIEW,
365 ACCELERATION_STRUCTURE,
366 };
367
368 // Type of resource backing up a particular descriptor type.
toResourceType(VkDescriptorType descriptorType)369 ResourceType toResourceType (VkDescriptorType descriptorType)
370 {
371 ResourceType resourceType = ResourceType::SAMPLER;
372 switch (descriptorType)
373 {
374 case VK_DESCRIPTOR_TYPE_SAMPLER:
375 resourceType = ResourceType::SAMPLER;
376 break;
377
378 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
379 resourceType = ResourceType::COMBINED_IMAGE_SAMPLER;
380 break;
381
382 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
383 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
384 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
385 resourceType = ResourceType::IMAGE;
386 break;
387
388 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
389 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
390 resourceType = ResourceType::BUFFER_VIEW;
391 break;
392
393 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
394 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
395 resourceType = ResourceType::BUFFER;
396 break;
397
398 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
399 resourceType = ResourceType::ACCELERATION_STRUCTURE;
400 break;
401
402 default:
403 DE_ASSERT(false);
404 break;
405 }
406
407 return resourceType;
408 }
409
isShaderWritable(VkDescriptorType descriptorType)410 bool isShaderWritable (VkDescriptorType descriptorType)
411 {
412 return (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
413 descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
414 }
415
makeDefaultSampler(const DeviceInterface & vkd,VkDevice device)416 Move<VkSampler> makeDefaultSampler (const DeviceInterface& vkd, VkDevice device)
417 {
418 const VkSamplerCreateInfo samplerCreateInfo = {
419 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, // VkStructureType sType;
420 nullptr, // const void* pNext;
421 0u, // VkSamplerCreateFlags flags;
422 VK_FILTER_NEAREST, // VkFilter magFilter;
423 VK_FILTER_NEAREST, // VkFilter minFilter;
424 VK_SAMPLER_MIPMAP_MODE_NEAREST, // VkSamplerMipmapMode mipmapMode;
425 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeU;
426 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeV;
427 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeW;
428 0.f, // float mipLodBias;
429 VK_FALSE, // VkBool32 anisotropyEnable;
430 1.f, // float maxAnisotropy;
431 VK_FALSE, // VkBool32 compareEnable;
432 VK_COMPARE_OP_ALWAYS, // VkCompareOp compareOp;
433 0.f, // float minLod;
434 0.f, // float maxLod;
435 VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, // VkBorderColor borderColor;
436 VK_FALSE, // VkBool32 unnormalizedCoordinates;
437 };
438
439 return createSampler(vkd, device, &samplerCreateInfo);
440 }
441
makeDefaultImage(const DeviceInterface & vkd,VkDevice device,Allocator & alloc)442 de::MovePtr<ImageWithMemory> makeDefaultImage (const DeviceInterface& vkd, VkDevice device, Allocator& alloc)
443 {
444 const auto extent = makeExtent3D(1u, 1u, 1u);
445 const VkImageUsageFlags usageFlags = (
446 VK_IMAGE_USAGE_SAMPLED_BIT
447 | VK_IMAGE_USAGE_STORAGE_BIT
448 | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
449 | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
450 | VK_IMAGE_USAGE_TRANSFER_SRC_BIT
451 | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
452
453 const VkImageCreateInfo imageCreateInfo = {
454 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
455 nullptr, // const void* pNext;
456 0u, // VkImageCreateFlags flags;
457 VK_IMAGE_TYPE_2D, // VkImageType imageType;
458 getDescriptorImageFormat(), // VkFormat format;
459 extent, // VkExtent3D extent;
460 1u, // deUint32 mipLevels;
461 1u, // deUint32 arrayLayers;
462 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
463 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
464 usageFlags, // VkImageUsageFlags usage;
465 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
466 0u, // deUint32 queueFamilyIndexCount;
467 nullptr, // const deUint32* pQueueFamilyIndices;
468 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
469 };
470 return de::MovePtr<ImageWithMemory>(new ImageWithMemory(vkd, device, alloc, imageCreateInfo, MemoryRequirement::Any));
471 }
472
makeDefaultImageView(const DeviceInterface & vkd,VkDevice device,VkImage image)473 Move<VkImageView> makeDefaultImageView (const DeviceInterface& vkd, VkDevice device, VkImage image)
474 {
475 const auto subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
476 return makeImageView(vkd, device, image, VK_IMAGE_VIEW_TYPE_2D, getDescriptorImageFormat(), subresourceRange);
477 }
478
makeDefaultBuffer(const DeviceInterface & vkd,VkDevice device,Allocator & alloc,deUint32 numElements=1u)479 de::MovePtr<BufferWithMemory> makeDefaultBuffer (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 numElements = 1u)
480 {
481 const VkBufferUsageFlags bufferUsage = (
482 VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT
483 | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT
484 | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT
485 | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
486 | VK_BUFFER_USAGE_TRANSFER_SRC_BIT
487 | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
488
489 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(deUint32) * static_cast<size_t>(numElements));
490
491 const auto bufferCreateInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
492
493 return de::MovePtr<BufferWithMemory>(new BufferWithMemory(vkd, device, alloc, bufferCreateInfo, MemoryRequirement::HostVisible));
494 }
495
makeDefaultBufferView(const DeviceInterface & vkd,VkDevice device,VkBuffer buffer)496 Move<VkBufferView> makeDefaultBufferView (const DeviceInterface& vkd, VkDevice device, VkBuffer buffer)
497 {
498 const auto bufferOffset = static_cast<VkDeviceSize>(0);
499 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(deUint32));
500
501 return makeBufferView(vkd, device, buffer, getDescriptorImageFormat(), bufferOffset, bufferSize);
502 }
503
504 struct AccelerationStructureData
505 {
506 using TLASPtr = de::MovePtr<TopLevelAccelerationStructure>;
507 using BLASPtr = de::MovePtr<BottomLevelAccelerationStructure>;
508
509 TLASPtr tlas;
510 BLASPtr blas;
511
swapvkt::BindingModel::__anon29a03d250111::AccelerationStructureData512 void swap (AccelerationStructureData& other)
513 {
514 auto myTlasPtr = tlas.release();
515 auto myBlasPtr = blas.release();
516
517 auto otherTlasPtr = other.tlas.release();
518 auto otherBlasPtr = other.blas.release();
519
520 tlas = TLASPtr(otherTlasPtr);
521 blas = BLASPtr(otherBlasPtr);
522
523 other.tlas = TLASPtr(myTlasPtr);
524 other.blas = BLASPtr(myBlasPtr);
525 }
526
AccelerationStructureDatavkt::BindingModel::__anon29a03d250111::AccelerationStructureData527 AccelerationStructureData () : tlas() , blas() {}
528
AccelerationStructureDatavkt::BindingModel::__anon29a03d250111::AccelerationStructureData529 AccelerationStructureData (AccelerationStructureData&& other)
530 : AccelerationStructureData()
531 {
532 swap(other);
533 }
534
operator =vkt::BindingModel::__anon29a03d250111::AccelerationStructureData535 AccelerationStructureData& operator= (AccelerationStructureData&& other)
536 {
537 swap(other);
538 return *this;
539 }
540 };
541
makeDefaultAccelerationStructure(const DeviceInterface & vkd,VkDevice device,VkCommandBuffer cmdBuffer,Allocator & alloc,bool triangles,deUint16 offsetX)542 AccelerationStructureData makeDefaultAccelerationStructure (const DeviceInterface& vkd, VkDevice device, VkCommandBuffer cmdBuffer, Allocator& alloc, bool triangles, deUint16 offsetX)
543 {
544 AccelerationStructureData data;
545
546 // Triangle around (offsetX, 0) with depth 5.0.
547 const float middleX = static_cast<float>(offsetX);
548 const float leftX = middleX - 0.5f;
549 const float rightX = middleX + 0.5f;
550 const float topY = 0.5f;
551 const float bottomY = -0.5f;
552 const float depth = 5.0f;
553
554 std::vector<tcu::Vec3> vertices;
555
556 if (triangles)
557 {
558 vertices.reserve(3u);
559 vertices.emplace_back(middleX, topY, depth);
560 vertices.emplace_back(rightX, bottomY, depth);
561 vertices.emplace_back(leftX, bottomY, depth);
562 }
563 else
564 {
565 vertices.reserve(2u);
566 vertices.emplace_back(leftX, bottomY, depth);
567 vertices.emplace_back(rightX, topY, depth);
568 }
569
570 data.tlas = makeTopLevelAccelerationStructure();
571 data.blas = makeBottomLevelAccelerationStructure();
572
573 VkGeometryInstanceFlagsKHR instanceFlags = 0u;
574 if (triangles)
575 instanceFlags |= VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR;
576
577 data.blas->addGeometry(vertices, triangles, VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR);
578 data.blas->createAndBuild(vkd, device, cmdBuffer, alloc);
579
580 de::SharedPtr<BottomLevelAccelerationStructure> blasSharedPtr (data.blas.release());
581 data.tlas->setInstanceCount(1u);
582 data.tlas->addInstance(blasSharedPtr, identityMatrix3x4, 0u, 0xFFu, 0u, instanceFlags);
583 data.tlas->createAndBuild(vkd, device, cmdBuffer, alloc);
584
585 return data;
586 }
587
588 const auto kShaderAccess = (VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT);
589
590 struct Resource
591 {
592 VkDescriptorType descriptorType;
593 ResourceType resourceType;
594 Move<VkSampler> sampler;
595 de::MovePtr<ImageWithMemory> imageWithMemory;
596 Move<VkImageView> imageView;
597 de::MovePtr<BufferWithMemory> bufferWithMemory;
598 Move<VkBufferView> bufferView;
599 AccelerationStructureData asData;
600 deUint32 initialValue;
601
Resourcevkt::BindingModel::__anon29a03d250111::Resource602 Resource (VkDescriptorType descriptorType_, const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, bool useAABBs, deUint32 initialValue_, deUint32 numElements = 1u)
603 : descriptorType (descriptorType_)
604 , resourceType (toResourceType(descriptorType))
605 , sampler ()
606 , imageWithMemory ()
607 , imageView ()
608 , bufferWithMemory ()
609 , bufferView ()
610 , asData ()
611 , initialValue (initialValue_)
612 {
613 if (numElements != 1u)
614 DE_ASSERT(resourceType == ResourceType::BUFFER);
615
616 switch (resourceType)
617 {
618 case ResourceType::SAMPLER:
619 sampler = makeDefaultSampler(vkd, device);
620 break;
621
622 case ResourceType::IMAGE:
623 imageWithMemory = makeDefaultImage(vkd, device, alloc);
624 imageView = makeDefaultImageView(vkd, device, imageWithMemory->get());
625 break;
626
627 case ResourceType::COMBINED_IMAGE_SAMPLER:
628 sampler = makeDefaultSampler(vkd, device);
629 imageWithMemory = makeDefaultImage(vkd, device, alloc);
630 imageView = makeDefaultImageView(vkd, device, imageWithMemory->get());
631 break;
632
633 case ResourceType::BUFFER:
634 bufferWithMemory = makeDefaultBuffer(vkd, device, alloc, numElements);
635 break;
636
637 case ResourceType::BUFFER_VIEW:
638 bufferWithMemory = makeDefaultBuffer(vkd, device, alloc);
639 bufferView = makeDefaultBufferView(vkd, device, bufferWithMemory->get());
640 break;
641
642 case ResourceType::ACCELERATION_STRUCTURE:
643 {
644 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
645 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
646 const auto cmdBuffer = cmdBufferPtr.get();
647 const bool triangles = !useAABBs;
648
649 beginCommandBuffer(vkd, cmdBuffer);
650 asData = makeDefaultAccelerationStructure(vkd, device, cmdBuffer, alloc, triangles, getAccelerationStructureOffsetX(initialValue));
651 endCommandBuffer(vkd, cmdBuffer);
652 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
653 }
654 break;
655
656 default:
657 DE_ASSERT(false);
658 break;
659 }
660
661 if (imageWithMemory || bufferWithMemory)
662 {
663 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
664 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
665 const auto cmdBuffer = cmdBufferPtr.get();
666
667 if (imageWithMemory)
668 {
669 // Prepare staging buffer.
670 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(initialValue));
671 const VkBufferUsageFlags bufferUsage = (VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
672 const auto stagingBufferInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
673
674 BufferWithMemory stagingBuffer(vkd, device, alloc, stagingBufferInfo, MemoryRequirement::HostVisible);
675 auto& bufferAlloc = stagingBuffer.getAllocation();
676 void* bufferData = bufferAlloc.getHostPtr();
677
678 deMemcpy(bufferData, &initialValue, sizeof(initialValue));
679 flushAlloc(vkd, device, bufferAlloc);
680
681 beginCommandBuffer(vkd, cmdBuffer);
682
683 // Transition and copy image.
684 const auto copyRegion = makeBufferImageCopy(makeExtent3D(1u, 1u, 1u),
685 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u));
686
687 // Switch image to TRANSFER_DST_OPTIMAL before copying data to it.
688 const auto subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
689
690 const auto preTransferBarrier = makeImageMemoryBarrier(
691 0u, VK_ACCESS_TRANSFER_WRITE_BIT,
692 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
693 imageWithMemory->get(), subresourceRange);
694
695 vkd.cmdPipelineBarrier(
696 cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
697 0u, nullptr, 0u, nullptr, 1u, &preTransferBarrier);
698
699 // Copy data to image.
700 vkd.cmdCopyBufferToImage(cmdBuffer, stagingBuffer.get(), imageWithMemory->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
701
702 // Switch image to the GENERAL layout before reading or writing to it from shaders.
703 const auto postTransferBarrier = makeImageMemoryBarrier(
704 VK_ACCESS_TRANSFER_WRITE_BIT, kShaderAccess,
705 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
706 imageWithMemory->get(), subresourceRange);
707
708 vkd.cmdPipelineBarrier(
709 cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0u,
710 0u, nullptr, 0u, nullptr, 1u, &postTransferBarrier);
711
712 endCommandBuffer(vkd, cmdBuffer);
713 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
714 }
715
716 if (bufferWithMemory)
717 {
718 auto& bufferAlloc = bufferWithMemory->getAllocation();
719 void* bufferData = bufferAlloc.getHostPtr();
720
721 const std::vector<deUint32> bufferValues(numElements, initialValue);
722 deMemcpy(bufferData, bufferValues.data(), de::dataSize(bufferValues));
723 flushAlloc(vkd, device, bufferAlloc);
724
725 beginCommandBuffer(vkd, cmdBuffer);
726
727 // Make sure host writes happen before shader reads/writes. Note: this barrier is not needed in theory.
728 const auto hostToShaderBarrier = makeMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, kShaderAccess);
729
730 vkd.cmdPipelineBarrier(
731 cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0u,
732 1u, &hostToShaderBarrier, 0u, nullptr, 0u, nullptr);
733
734 endCommandBuffer(vkd, cmdBuffer);
735 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
736 }
737 }
738 }
739
740 // Remove problematic copy constructor.
741 Resource (const Resource&) = delete;
742
743 // Make it movable.
Resourcevkt::BindingModel::__anon29a03d250111::Resource744 Resource (Resource&& other) noexcept
745 : descriptorType (other.descriptorType)
746 , resourceType (other.resourceType)
747 , sampler (other.sampler)
748 , imageWithMemory (other.imageWithMemory.release())
749 , imageView (other.imageView)
750 , bufferWithMemory (other.bufferWithMemory.release())
751 , bufferView (other.bufferView)
752 , asData (std::move(other.asData))
753 , initialValue (other.initialValue)
754 {}
755
~Resourcevkt::BindingModel::__anon29a03d250111::Resource756 ~Resource ()
757 {}
758
makeWriteInfovkt::BindingModel::__anon29a03d250111::Resource759 WriteInfo makeWriteInfo () const
760 {
761 using WriteInfoPtr = de::MovePtr<WriteInfo>;
762
763 WriteInfoPtr writeInfo;
764
765 switch (resourceType)
766 {
767 case ResourceType::SAMPLER:
768 {
769 const VkDescriptorImageInfo imageInfo = { sampler.get(), DE_NULL, VK_IMAGE_LAYOUT_UNDEFINED };
770 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
771 }
772 break;
773
774 case ResourceType::IMAGE:
775 {
776 const VkDescriptorImageInfo imageInfo = { DE_NULL, imageView.get(), VK_IMAGE_LAYOUT_GENERAL };
777 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
778 }
779 break;
780
781 case ResourceType::COMBINED_IMAGE_SAMPLER:
782 {
783 const VkDescriptorImageInfo imageInfo = { sampler.get(), imageView.get(), VK_IMAGE_LAYOUT_GENERAL };
784 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
785 }
786 break;
787
788 case ResourceType::BUFFER:
789 {
790 const VkDescriptorBufferInfo bufferInfo = { bufferWithMemory->get(), 0ull, static_cast<VkDeviceSize>(sizeof(deUint32)) };
791 writeInfo = WriteInfoPtr (new WriteInfo(bufferInfo));
792 }
793 break;
794
795 case ResourceType::BUFFER_VIEW:
796 writeInfo = WriteInfoPtr (new WriteInfo(bufferView.get()));
797 break;
798
799 case ResourceType::ACCELERATION_STRUCTURE:
800 {
801 VkWriteDescriptorSetAccelerationStructureKHR asWrite = initVulkanStructure();
802 asWrite.accelerationStructureCount = 1u;
803 asWrite.pAccelerationStructures = asData.tlas.get()->getPtr();
804 writeInfo = WriteInfoPtr (new WriteInfo(asWrite));
805 }
806 break;
807
808 default:
809 DE_ASSERT(false);
810 break;
811 }
812
813 return *writeInfo;
814 }
815
getStoredValuevkt::BindingModel::__anon29a03d250111::Resource816 tcu::Maybe<deUint32> getStoredValue (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, deUint32 position = 0u) const
817 {
818 if (position != 0u)
819 DE_ASSERT(static_cast<bool>(bufferWithMemory));
820
821 if (imageWithMemory || bufferWithMemory)
822 {
823 // Command pool and buffer.
824 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
825 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
826 const auto cmdBuffer = cmdBufferPtr.get();
827
828 if (imageWithMemory)
829 {
830 // Prepare staging buffer.
831 deUint32 result;
832 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(result));
833 const VkBufferUsageFlags bufferUsage = (VK_BUFFER_USAGE_TRANSFER_DST_BIT);
834 const auto stagingBufferInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
835
836 BufferWithMemory stagingBuffer(vkd, device, alloc, stagingBufferInfo, MemoryRequirement::HostVisible);
837 auto& bufferAlloc = stagingBuffer.getAllocation();
838 void* bufferData = bufferAlloc.getHostPtr();
839
840 // Copy image value to staging buffer.
841 beginCommandBuffer(vkd, cmdBuffer);
842
843 // Make sure shader accesses happen before transfers and prepare image for transfer.
844 const auto colorResourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
845
846 const auto preTransferBarrier = makeImageMemoryBarrier(
847 kShaderAccess, VK_ACCESS_TRANSFER_READ_BIT,
848 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
849 imageWithMemory->get(), colorResourceRange);
850
851 vkd.cmdPipelineBarrier(
852 cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
853 0u, nullptr, 0u, nullptr, 1u, &preTransferBarrier);
854
855 // Copy image contents to staging buffer.
856 const auto copyRegion = makeBufferImageCopy(makeExtent3D(1u, 1u, 1u),
857 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u));
858 vkd.cmdCopyImageToBuffer(cmdBuffer, imageWithMemory->get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, stagingBuffer.get(), 1u, ©Region);
859
860 // Make sure writes are visible from the host.
861 const auto postTransferBarrier = makeMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT);
862 vkd.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u, 1u, &postTransferBarrier, 0u, nullptr, 0u, nullptr);
863
864 endCommandBuffer(vkd, cmdBuffer);
865 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
866
867 // Get value from staging buffer.
868 invalidateAlloc(vkd, device, bufferAlloc);
869 deMemcpy(&result, bufferData, sizeof(result));
870 return tcu::just(result);
871 }
872
873 if (bufferWithMemory)
874 {
875 auto& bufferAlloc = bufferWithMemory->getAllocation();
876 auto bufferData = reinterpret_cast<const char*>(bufferAlloc.getHostPtr());
877 deUint32 result;
878
879 // Make sure shader writes are visible from the host.
880 beginCommandBuffer(vkd, cmdBuffer);
881
882 const auto shaderToHostBarrier = makeMemoryBarrier(kShaderAccess, VK_ACCESS_HOST_READ_BIT);
883 vkd.cmdPipelineBarrier(
884 cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u,
885 1u, &shaderToHostBarrier, 0u, nullptr, 0u, nullptr);
886
887 endCommandBuffer(vkd, cmdBuffer);
888 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
889
890 invalidateAlloc(vkd, device, bufferAlloc);
891 deMemcpy(&result, bufferData + sizeof(deUint32) * static_cast<size_t>(position), sizeof(result));
892 return tcu::just(result);
893 }
894 }
895
896 return tcu::Nothing;
897 }
898 };
899
900 struct BindingInterface
901 {
~BindingInterfacevkt::BindingModel::__anon29a03d250111::BindingInterface902 virtual ~BindingInterface () {}
903
904 // Minimum number of iterations to test all mutable types.
905 virtual deUint32 maxTypes () const = 0;
906
907 // Types that will be used by the binding at a given iteration.
908 virtual std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const = 0;
909
910 // Binding's main type.
911 virtual VkDescriptorType mainType () const = 0;
912
913 // Binding's list of mutable types, if present.
914 virtual std::vector<VkDescriptorType> mutableTypes () const = 0;
915
916 // Descriptor count in the binding.
917 virtual size_t size () const = 0;
918
919 // Is the binding an array binding?
920 virtual bool isArray () const = 0;
921
922 // Is the binding an unbounded array?
923 virtual bool isUnbounded () const = 0;
924
925 // Will the binding use different descriptor types in a given iteration?
needsAliasingvkt::BindingModel::__anon29a03d250111::BindingInterface926 virtual bool needsAliasing (deUint32 iteration) const
927 {
928 const auto typesVec = typesAtIteration(iteration);
929 std::set<VkDescriptorType> descTypes(begin(typesVec), end(typesVec));
930 return (descTypes.size() > 1u);
931 }
932
933 // Will the binding need aliasing on any iteration up to a given number?
needsAliasingUpTovkt::BindingModel::__anon29a03d250111::BindingInterface934 virtual bool needsAliasingUpTo (deUint32 numIterations) const
935 {
936 std::vector<bool> needsAliasingFlags;
937 needsAliasingFlags.reserve(numIterations);
938
939 for (deUint32 iter = 0u; iter < numIterations; ++iter)
940 needsAliasingFlags.push_back(needsAliasing(iter));
941
942 return std::any_of(begin(needsAliasingFlags), end(needsAliasingFlags), [] (bool f) { return f; });
943 }
944
945 private:
hasDescriptorTypevkt::BindingModel::__anon29a03d250111::BindingInterface946 virtual bool hasDescriptorType (deUint32 iteration, VkDescriptorType descriptorType) const
947 {
948 const auto typesVec = typesAtIteration(iteration);
949 return (std::find(begin(typesVec), end(typesVec), descriptorType) != end(typesVec));
950 }
951
952 public:
953 // Convert one particular binding to a mutable or non-mutable equivalent binding, returning the equivalent binding.
954 virtual de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const = 0;
955 virtual de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const = 0;
956
957 // Create resources needed to back up this binding.
958 virtual std::vector<Resource> createResources (
959 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
960 deUint32 iteration, bool useAABBs, deUint32 baseValue) const = 0;
961
962 // Get GLSL binding declarations. Note: no array size means no array, if size is < 0 it means unbounded array.
963 virtual std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const = 0;
964
965 // Get GLSL statements to check this binding.
966 virtual std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const = 0;
967 };
968
969 // Represents a single binding that will be used in a test.
970 class SingleBinding : public BindingInterface
971 {
972 private:
973 VkDescriptorType type; // The descriptor type.
974 std::vector<VkDescriptorType> mutableTypesVec; // The types that will be used for each iteration of a test if mutable.
975
976 public:
SingleBinding(VkDescriptorType type_,std::vector<VkDescriptorType> mutableTypes_)977 SingleBinding (VkDescriptorType type_, std::vector<VkDescriptorType> mutableTypes_)
978 : type (type_)
979 , mutableTypesVec (std::move(mutableTypes_))
980 {
981 static const auto kForbiddenMutableTypes = getForbiddenMutableTypes();
982 const auto kBeginForbidden = begin(kForbiddenMutableTypes);
983 const auto kEndForbidden = end(kForbiddenMutableTypes);
984
985 // For release builds.
986 DE_UNREF(kBeginForbidden);
987 DE_UNREF(kEndForbidden);
988
989 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
990 {
991 DE_ASSERT(mutableTypesVec.empty());
992 }
993 else
994 {
995 DE_ASSERT(!mutableTypesVec.empty());
996 DE_ASSERT(std::none_of(begin(mutableTypesVec), end(mutableTypesVec),
997 [&kBeginForbidden, &kEndForbidden] (VkDescriptorType t) -> bool {
998 return std::find(kBeginForbidden, kEndForbidden, t) != kEndForbidden;
999 }));
1000 }
1001 }
1002
maxTypes() const1003 deUint32 maxTypes () const override
1004 {
1005 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
1006 return 1u;
1007 const auto vecSize = mutableTypesVec.size();
1008 DE_ASSERT(vecSize <= std::numeric_limits<deUint32>::max());
1009 return static_cast<deUint32>(vecSize);
1010 }
1011
typeAtIteration(deUint32 iteration) const1012 VkDescriptorType typeAtIteration (deUint32 iteration) const
1013 {
1014 return typesAtIteration(iteration)[0];
1015 }
1016
usedTypes() const1017 std::vector<VkDescriptorType> usedTypes () const
1018 {
1019 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
1020 return std::vector<VkDescriptorType>(1u, type);
1021 return mutableTypesVec;
1022 }
1023
typesAtIteration(deUint32 iteration) const1024 std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const override
1025 {
1026 const auto typesVec = usedTypes();
1027 return std::vector<VkDescriptorType>(1u, typesVec[static_cast<size_t>(iteration) % typesVec.size()]);
1028 }
1029
mainType() const1030 VkDescriptorType mainType () const override
1031 {
1032 return type;
1033 }
1034
mutableTypes() const1035 std::vector<VkDescriptorType> mutableTypes () const override
1036 {
1037 return mutableTypesVec;
1038 }
1039
size() const1040 size_t size () const override
1041 {
1042 return size_t{1u};
1043 }
1044
isArray() const1045 bool isArray () const override
1046 {
1047 return false;
1048 }
1049
isUnbounded() const1050 bool isUnbounded () const override
1051 {
1052 return false;
1053 }
1054
toMutable(deUint32 iteration) const1055 de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const override
1056 {
1057 DE_UNREF(iteration);
1058
1059 static const auto kMandatoryMutableTypeFlags = toDescriptorTypeFlags(getMandatoryMutableTypes());
1060 if (type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
1061 {
1062 const auto descFlags = toDescriptorTypeFlags(mutableTypesVec);
1063 return de::MovePtr<BindingInterface>(new SingleBinding(type, toDescriptorTypeVector(descFlags)));
1064 }
1065
1066 // Make sure it's not a forbidden mutable type.
1067 static const auto kForbiddenMutableTypes = getForbiddenMutableTypes();
1068 DE_ASSERT(std::find(begin(kForbiddenMutableTypes), end(kForbiddenMutableTypes), type) == end(kForbiddenMutableTypes));
1069
1070 // Convert the binding to mutable using a wider set of descriptor types if possible, including the binding type.
1071 const auto descFlags = (kMandatoryMutableTypeFlags | toDescriptorTypeFlagBit(type));
1072
1073 return de::MovePtr<BindingInterface>(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, toDescriptorTypeVector(descFlags)));
1074 }
1075
toNonMutable(deUint32 iteration) const1076 de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const override
1077 {
1078 return de::MovePtr<BindingInterface>(new SingleBinding(typeAtIteration(iteration), std::vector<VkDescriptorType>()));
1079 }
1080
createResources(const DeviceInterface & vkd,VkDevice device,Allocator & alloc,deUint32 qIndex,VkQueue queue,deUint32 iteration,bool useAABBs,deUint32 baseValue) const1081 std::vector<Resource> createResources (
1082 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
1083 deUint32 iteration, bool useAABBs, deUint32 baseValue) const override
1084 {
1085 const auto descriptorType = typeAtIteration(iteration);
1086
1087 std::vector<Resource> resources;
1088 resources.emplace_back(descriptorType, vkd, device, alloc, qIndex, queue, useAABBs, baseValue);
1089 return resources;
1090 }
1091
glslDeclarations(deUint32 iteration,deUint32 setNum,deUint32 bindingNum,deUint32 inputAttachmentIdx,tcu::Maybe<deInt32> arraySize) const1092 std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const override
1093 {
1094 const auto descriptorType = typeAtIteration(iteration);
1095 const std::string arraySuffix = ((static_cast<bool>(arraySize)) ? ((arraySize.get() < 0) ? "[]" : ("[" + de::toString(arraySize.get()) + "]")) : "");
1096 const std::string layoutAttribs = "set=" + de::toString(setNum) + ", binding=" + de::toString(bindingNum);
1097 const std::string bindingSuffix = "_" + de::toString(setNum) + "_" + de::toString(bindingNum);
1098 const std::string nameSuffix = bindingSuffix + arraySuffix;
1099 std::ostringstream declarations;
1100
1101 declarations << "layout (";
1102
1103 switch (descriptorType)
1104 {
1105 case VK_DESCRIPTOR_TYPE_SAMPLER:
1106 declarations << layoutAttribs << ") uniform sampler sampler" << nameSuffix;
1107 break;
1108
1109 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1110 declarations << layoutAttribs << ") uniform usampler2D combinedSampler" << nameSuffix;
1111 break;
1112
1113 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1114 declarations << layoutAttribs << ") uniform utexture2D sampledImage" << nameSuffix;
1115 break;
1116
1117 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1118 declarations << layoutAttribs << ") uniform uboBlock" << bindingSuffix << " { uint val; } ubo" << nameSuffix;
1119 break;
1120
1121 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1122 declarations << layoutAttribs << ") buffer sboBlock" << bindingSuffix << " { uint val; } ssbo" << nameSuffix;
1123 break;
1124
1125 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1126 declarations << layoutAttribs << ") uniform utextureBuffer uniformTexel" << nameSuffix;
1127 break;
1128
1129 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1130 declarations << layoutAttribs << ", r32ui) uniform uimageBuffer storageTexel" << nameSuffix;
1131 break;
1132
1133 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1134 declarations << layoutAttribs << ", r32ui) uniform uimage2D storageImage" << nameSuffix;
1135 break;
1136
1137 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1138 declarations << layoutAttribs << ", input_attachment_index=" << inputAttachmentIdx << ") uniform usubpassInput inputAttachment" << nameSuffix;
1139 break;
1140
1141 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
1142 declarations << layoutAttribs << ") uniform accelerationStructureEXT accelerationStructure" << nameSuffix;
1143 break;
1144
1145 default:
1146 DE_ASSERT(false);
1147 break;
1148 }
1149
1150 declarations << ";\n";
1151
1152 return declarations.str();
1153 }
1154
glslCheckStatements(deUint32 iteration,deUint32 setNum,deUint32 bindingNum,deUint32 baseValue_,tcu::Maybe<deUint32> arrayIndex,bool usePushConstants) const1155 std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue_, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const override
1156 {
1157 const auto descriptorType = typeAtIteration(iteration);
1158 const std::string bindingSuffix = "_" + de::toString(setNum) + "_" + de::toString(bindingNum);
1159
1160 std::string indexSuffix;
1161 if (arrayIndex)
1162 {
1163 indexSuffix = de::toString(arrayIndex.get());
1164 if (usePushConstants)
1165 indexSuffix += " + pc.zero";
1166 indexSuffix = "[" + indexSuffix + "]";
1167 }
1168
1169 const std::string nameSuffix = bindingSuffix + indexSuffix;
1170 const std::string baseValue = toHex(baseValue_);
1171 const std::string externalImageValue = toHex(getExternalSampledImageValue());
1172 const std::string mask = toHex(getStoredValueMask());
1173
1174 std::ostringstream checks;
1175
1176 // Note: all of these depend on an external anyError uint variable.
1177 switch (descriptorType)
1178 {
1179 case VK_DESCRIPTOR_TYPE_SAMPLER:
1180 // Note this depends on an "externalSampledImage" binding.
1181 checks << " {\n";
1182 checks << " uint readValue = texture(usampler2D(externalSampledImage, sampler" << nameSuffix << "), vec2(0, 0)).r;\n";
1183 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1184 checks << " anyError |= ((readValue == " << externalImageValue << ") ? 0u : 1u);\n";
1185 //checks << " anyError = readValue;\n";
1186 checks << " }\n";
1187 break;
1188
1189 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1190 checks << " {\n";
1191 checks << " uint readValue = texture(combinedSampler" << nameSuffix << ", vec2(0, 0)).r;\n";
1192 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1193 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1194 //checks << " anyError = readValue;\n";
1195 checks << " }\n";
1196 break;
1197
1198 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1199 // Note this depends on an "externalSampler" binding.
1200 checks << " {\n";
1201 checks << " uint readValue = texture(usampler2D(sampledImage" << nameSuffix << ", externalSampler), vec2(0, 0)).r;\n";
1202 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1203 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1204 //checks << " anyError = readValue;\n";
1205 checks << " }\n";
1206 break;
1207
1208 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1209 checks << " {\n";
1210 checks << " uint readValue = ubo" << nameSuffix << ".val;\n";
1211 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1212 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1213 //checks << " anyError = readValue;\n";
1214 checks << " }\n";
1215 break;
1216
1217 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1218 checks << " {\n";
1219 checks << " uint readValue = ssbo" << nameSuffix << ".val;\n";
1220 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1221 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1222 //checks << " anyError = readValue;\n";
1223 // Check writes.
1224 checks << " ssbo" << nameSuffix << ".val = (readValue | " << mask << ");\n";
1225 checks << " }\n";
1226 break;
1227
1228 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1229 checks << " {\n";
1230 checks << " uint readValue = texelFetch(uniformTexel" << nameSuffix << ", 0).x;\n";
1231 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1232 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1233 //checks << " anyError = readValue;\n";
1234 checks << " }\n";
1235 break;
1236
1237 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1238 checks << " {\n";
1239 checks << " uint readValue = imageLoad(storageTexel" << nameSuffix << ", 0).x;\n";
1240 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1241 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1242 //checks << " anyError = readValue;\n";
1243 checks << " readValue |= " << mask << ";\n";
1244 // Check writes.
1245 checks << " imageStore(storageTexel" << nameSuffix << ", 0, uvec4(readValue, 0, 0, 0));\n";
1246 checks << " }\n";
1247 break;
1248
1249 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1250 checks << " {\n";
1251 checks << " uint readValue = imageLoad(storageImage" << nameSuffix << ", ivec2(0, 0)).x;\n";
1252 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1253 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1254 //checks << " anyError = readValue;\n";
1255 checks << " readValue |= " << mask << ";\n";
1256 // Check writes.
1257 checks << " imageStore(storageImage" << nameSuffix << ", ivec2(0, 0), uvec4(readValue, 0, 0, 0));\n";
1258 checks << " }\n";
1259 break;
1260
1261 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1262 checks << " {\n";
1263 checks << " uint readValue = subpassLoad(inputAttachment" << nameSuffix << ").x;\n";
1264 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1265 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1266 //checks << " anyError = readValue;\n";
1267 checks << " }\n";
1268 break;
1269
1270 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
1271 checks << " {\n";
1272 checks << " const uint cullMask = 0xFF;\n";
1273 checks << " const vec3 origin = vec3(" << getAccelerationStructureOffsetX(baseValue_) << ".0, 0.0, 0.0);\n";
1274 checks << " const vec3 direction = vec3(0.0, 0.0, 1.0);\n";
1275 checks << " const float tmin = 1.0;\n";
1276 checks << " const float tmax = 10.0;\n";
1277 checks << " uint candidateFound = 0u;\n";
1278 checks << " rayQueryEXT rq;\n";
1279 checks << " rayQueryInitializeEXT(rq, accelerationStructure" << nameSuffix << ", gl_RayFlagsNoneEXT, cullMask, origin, tmin, direction, tmax);\n";
1280 checks << " while (rayQueryProceedEXT(rq)) {\n";
1281 checks << " const uint candidateType = rayQueryGetIntersectionTypeEXT(rq, false);\n";
1282 checks << " if (candidateType == gl_RayQueryCandidateIntersectionTriangleEXT || candidateType == gl_RayQueryCandidateIntersectionAABBEXT) {\n";
1283 checks << " candidateFound = 1u;\n";
1284 checks << " }\n";
1285 checks << " }\n";
1286 checks << " anyError |= ((candidateFound == 1u) ? 0u : 1u);\n";
1287 checks << " }\n";
1288 break;
1289
1290 default:
1291 DE_ASSERT(false);
1292 break;
1293 }
1294
1295 return checks.str();
1296 }
1297 };
1298
1299 // Represents an array of bindings. Individual bindings are stored as SingleBindings because each one of them may take a different
1300 // type in each iteration (i.e. they can all have different descriptor type vectors).
1301 class ArrayBinding : public BindingInterface
1302 {
1303 private:
1304 bool unbounded;
1305 std::vector<SingleBinding> bindings;
1306
1307 public:
ArrayBinding(bool unbounded_,std::vector<SingleBinding> bindings_)1308 ArrayBinding (bool unbounded_, std::vector<SingleBinding> bindings_)
1309 : unbounded (unbounded_)
1310 , bindings (std::move(bindings_))
1311 {
1312 // We need to check all single bindings have the same effective type, even if mutable descriptors have different orders.
1313 DE_ASSERT(!bindings.empty());
1314
1315 std::set<VkDescriptorType> basicTypes;
1316 std::set<DescriptorTypeFlags> bindingTypes;
1317
1318 for (const auto& b : bindings)
1319 {
1320 basicTypes.insert(b.mainType());
1321 bindingTypes.insert(toDescriptorTypeFlags(b.usedTypes()));
1322 }
1323
1324 DE_ASSERT(basicTypes.size() == 1u);
1325 DE_ASSERT(bindingTypes.size() == 1u);
1326
1327 // For release builds.
1328 DE_UNREF(basicTypes);
1329 DE_UNREF(bindingTypes);
1330 }
1331
maxTypes() const1332 deUint32 maxTypes () const override
1333 {
1334 // Each binding may have the same effective type but a different number of iterations due to repeated types.
1335 std::vector<size_t> bindingSizes;
1336 bindingSizes.reserve(bindings.size());
1337
1338 std::transform(begin(bindings), end(bindings), std::back_inserter(bindingSizes),
1339 [] (const SingleBinding& b) { return b.usedTypes().size(); });
1340
1341 const auto maxElement = std::max_element(begin(bindingSizes), end(bindingSizes));
1342 DE_ASSERT(maxElement != end(bindingSizes));
1343 DE_ASSERT(*maxElement <= std::numeric_limits<deUint32>::max());
1344 return static_cast<deUint32>(*maxElement);
1345 }
1346
typesAtIteration(deUint32 iteration) const1347 std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const override
1348 {
1349 std::vector<VkDescriptorType> result;
1350 result.reserve(bindings.size());
1351
1352 for (const auto& b : bindings)
1353 result.push_back(b.typeAtIteration(iteration));
1354
1355 return result;
1356 }
1357
mainType() const1358 VkDescriptorType mainType () const override
1359 {
1360 return bindings[0].mainType();
1361 }
1362
mutableTypes() const1363 std::vector<VkDescriptorType> mutableTypes () const override
1364 {
1365 return bindings[0].mutableTypes();
1366 }
1367
size() const1368 size_t size () const override
1369 {
1370 return bindings.size();
1371 }
1372
isArray() const1373 bool isArray () const override
1374 {
1375 return true;
1376 }
1377
isUnbounded() const1378 bool isUnbounded () const override
1379 {
1380 return unbounded;
1381 }
1382
toMutable(deUint32 iteration) const1383 de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const override
1384 {
1385 // Replicate the first binding once converted, as all are equivalent.
1386 const auto firstBindingPtr = bindings[0].toMutable(iteration);
1387 const auto firstBinding = *dynamic_cast<SingleBinding*>(firstBindingPtr.get());
1388 const std::vector<SingleBinding> newBindings (bindings.size(), firstBinding);
1389
1390 return de::MovePtr<BindingInterface>(new ArrayBinding(unbounded, newBindings));
1391 }
1392
toNonMutable(deUint32 iteration) const1393 de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const override
1394 {
1395 // Make sure this binding can be converted to nonmutable for a given iteration.
1396 DE_ASSERT(!needsAliasing(iteration));
1397
1398 // We could use each SingleBinding's toNonMutable(), but this is the same.
1399 const auto descType = bindings[0].typeAtIteration(iteration);
1400 const SingleBinding firstBinding (descType, std::vector<VkDescriptorType>());
1401 const std::vector<SingleBinding> newBindings (bindings.size(), firstBinding);
1402
1403 return de::MovePtr<BindingInterface>(new ArrayBinding(unbounded, newBindings));
1404 }
1405
createResources(const DeviceInterface & vkd,VkDevice device,Allocator & alloc,deUint32 qIndex,VkQueue queue,deUint32 iteration,bool useAABBs,deUint32 baseValue) const1406 std::vector<Resource> createResources (
1407 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
1408 deUint32 iteration, bool useAABBs, deUint32 baseValue) const override
1409 {
1410 std::vector<Resource> resources;
1411 const auto numBindings = static_cast<deUint32>(bindings.size());
1412
1413 for (deUint32 i = 0u; i < numBindings; ++i)
1414 {
1415 auto resourceVec = bindings[i].createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs, baseValue + i);
1416 resources.emplace_back(std::move(resourceVec[0]));
1417 }
1418
1419 return resources;
1420 }
1421
1422 // We will ignore the array size parameter.
glslDeclarations(deUint32 iteration,deUint32 setNum,deUint32 bindingNum,deUint32 inputAttachmentIdx,tcu::Maybe<deInt32> arraySize) const1423 std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const override
1424 {
1425 const auto descriptorCount = bindings.size();
1426 const auto arraySizeVal = (isUnbounded() ? tcu::just(deInt32{-1}) : tcu::just(static_cast<deInt32>(descriptorCount)));
1427
1428 DE_UNREF(arraySize);
1429 DE_ASSERT(descriptorCount < static_cast<size_t>(std::numeric_limits<deInt32>::max()));
1430
1431 // Maybe a single declaration is enough.
1432 if (!needsAliasing(iteration))
1433 return bindings[0].glslDeclarations(iteration, setNum, bindingNum, inputAttachmentIdx, arraySizeVal);
1434
1435 // Aliasing needed. Avoid reusing types.
1436 const auto descriptorTypes = typesAtIteration(iteration);
1437 std::set<VkDescriptorType> usedTypes;
1438 std::ostringstream declarations;
1439
1440 for (size_t descriptorIdx = 0u; descriptorIdx < descriptorCount; ++descriptorIdx)
1441 {
1442 const auto& descriptorType = descriptorTypes[descriptorIdx];
1443 if (usedTypes.count(descriptorType) > 0)
1444 continue;
1445
1446 usedTypes.insert(descriptorType);
1447 declarations << bindings[descriptorIdx].glslDeclarations(iteration, setNum, bindingNum, inputAttachmentIdx, arraySizeVal);
1448 }
1449
1450 return declarations.str();
1451 }
1452
glslCheckStatements(deUint32 iteration,deUint32 setNum,deUint32 bindingNum,deUint32 baseValue_,tcu::Maybe<deUint32> arrayIndex,bool usePushConstants) const1453 std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue_, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const override
1454 {
1455 DE_ASSERT(!arrayIndex);
1456 DE_UNREF(arrayIndex); // For release builds.
1457
1458 std::ostringstream checks;
1459 const auto numDescriptors = static_cast<deUint32>(bindings.size());
1460
1461 for (deUint32 descriptorIdx = 0u; descriptorIdx < numDescriptors; ++descriptorIdx)
1462 {
1463 const auto& binding = bindings[descriptorIdx];
1464 checks << binding.glslCheckStatements(iteration, setNum, bindingNum, baseValue_ + descriptorIdx, tcu::just(descriptorIdx), usePushConstants);
1465 }
1466
1467 return checks.str();
1468 }
1469 };
1470
1471 class DescriptorSet;
1472
1473 using DescriptorSetPtr = de::SharedPtr<DescriptorSet>;
1474
1475 class DescriptorSet
1476 {
1477 public:
1478 using BindingInterfacePtr = de::MovePtr<BindingInterface>;
1479 using BindingPtrVector = std::vector<BindingInterfacePtr>;
1480
1481 private:
1482 BindingPtrVector bindings;
1483
1484 public:
DescriptorSet(BindingPtrVector & bindings_)1485 explicit DescriptorSet (BindingPtrVector& bindings_)
1486 : bindings(std::move(bindings_))
1487 {
1488 DE_ASSERT(!bindings.empty());
1489 }
1490
numBindings() const1491 size_t numBindings () const
1492 {
1493 return bindings.size();
1494 }
1495
getBinding(size_t bindingIdx) const1496 const BindingInterface* getBinding (size_t bindingIdx) const
1497 {
1498 return bindings.at(bindingIdx).get();
1499 }
1500
1501 // Maximum number of descriptor types used by any binding in the set.
maxTypes() const1502 deUint32 maxTypes () const
1503 {
1504 std::vector<deUint32> maxSizes;
1505 maxSizes.reserve(bindings.size());
1506
1507 std::transform(begin(bindings), end(bindings), std::back_inserter(maxSizes),
1508 [] (const BindingInterfacePtr& b) { return b->maxTypes(); });
1509
1510 const auto maxElement = std::max_element(begin(maxSizes), end(maxSizes));
1511 DE_ASSERT(maxElement != end(maxSizes));
1512 return *maxElement;
1513 }
1514
1515 // Create another descriptor set that can be the source for copies when setting descriptor values.
genSourceSet(SourceSetStrategy strategy,deUint32 iteration) const1516 DescriptorSetPtr genSourceSet (SourceSetStrategy strategy, deUint32 iteration) const
1517 {
1518 BindingPtrVector newBindings;
1519 for (const auto& b : bindings)
1520 {
1521 if (strategy == SourceSetStrategy::MUTABLE)
1522 newBindings.push_back(b->toMutable(iteration));
1523 else
1524 newBindings.push_back(b->toNonMutable(iteration));
1525 }
1526
1527 return DescriptorSetPtr(new DescriptorSet(newBindings));
1528 }
1529
1530 // Makes a descriptor pool that can be used when allocating descriptors for this set.
makeDescriptorPool(const DeviceInterface & vkd,VkDevice device,PoolMutableStrategy strategy,VkDescriptorPoolCreateFlags flags) const1531 Move<VkDescriptorPool> makeDescriptorPool (const DeviceInterface& vkd, VkDevice device, PoolMutableStrategy strategy, VkDescriptorPoolCreateFlags flags) const
1532 {
1533 std::vector<VkDescriptorPoolSize> poolSizes;
1534 std::vector<std::vector<VkDescriptorType>> mutableTypesVec;
1535 std::vector<VkMutableDescriptorTypeListEXT> mutableTypeLists;
1536
1537 // Make vector element addresses stable.
1538 const auto bindingCount = numBindings();
1539 poolSizes.reserve(bindingCount);
1540 mutableTypesVec.reserve(bindingCount);
1541 mutableTypeLists.reserve(bindingCount);
1542
1543 for (const auto& b : bindings)
1544 {
1545 const auto mainType = b->mainType();
1546 const VkDescriptorPoolSize poolSize = {
1547 mainType,
1548 static_cast<deUint32>(b->size()),
1549 };
1550 poolSizes.push_back(poolSize);
1551
1552 if (strategy == PoolMutableStrategy::KEEP_TYPES || strategy == PoolMutableStrategy::EXPAND_TYPES)
1553 {
1554 if (mainType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
1555 {
1556 if (strategy == PoolMutableStrategy::KEEP_TYPES)
1557 {
1558 mutableTypesVec.emplace_back(b->mutableTypes());
1559 }
1560 else
1561 {
1562 // Expand the type list with the mandatory types.
1563 static const auto mandatoryTypesFlags = toDescriptorTypeFlags(getMandatoryMutableTypes());
1564 const auto bindingTypes = toDescriptorTypeVector(mandatoryTypesFlags | toDescriptorTypeFlags(b->mutableTypes()));
1565
1566 mutableTypesVec.emplace_back(bindingTypes);
1567 }
1568
1569 const auto& lastVec = mutableTypesVec.back();
1570 const VkMutableDescriptorTypeListEXT typeList = { static_cast<deUint32>(lastVec.size()), de::dataOrNull(lastVec) };
1571 mutableTypeLists.push_back(typeList);
1572 }
1573 else
1574 {
1575 const VkMutableDescriptorTypeListEXT typeList = { 0u, nullptr };
1576 mutableTypeLists.push_back(typeList);
1577 }
1578 }
1579 else if (strategy == PoolMutableStrategy::NO_TYPES)
1580 ; // Do nothing, we will not use any type list.
1581 else
1582 DE_ASSERT(false);
1583 }
1584
1585 VkDescriptorPoolCreateInfo poolCreateInfo = initVulkanStructure();
1586
1587 poolCreateInfo.maxSets = 1u;
1588 poolCreateInfo.flags = flags;
1589 poolCreateInfo.poolSizeCount = static_cast<deUint32>(poolSizes.size());
1590 poolCreateInfo.pPoolSizes = de::dataOrNull(poolSizes);
1591
1592 VkMutableDescriptorTypeCreateInfoEXT mutableInfo = initVulkanStructure();
1593
1594 if (strategy == PoolMutableStrategy::KEEP_TYPES || strategy == PoolMutableStrategy::EXPAND_TYPES)
1595 {
1596 mutableInfo.mutableDescriptorTypeListCount = static_cast<deUint32>(mutableTypeLists.size());
1597 mutableInfo.pMutableDescriptorTypeLists = de::dataOrNull(mutableTypeLists);
1598 poolCreateInfo.pNext = &mutableInfo;
1599 }
1600
1601 return createDescriptorPool(vkd, device, &poolCreateInfo);
1602 }
1603
1604 private:
1605 // Building the descriptor set layout create info structure is cumbersome, so we'll reuse the same procedure to check support
1606 // and create the layout. This structure contains the result. "supported" is created as an enum to avoid the Move<> to bool
1607 // conversion cast in the contructors.
1608 struct DescriptorSetLayoutResult
1609 {
1610 enum class LayoutSupported { NO = 0, YES };
1611
1612 LayoutSupported supported;
1613 Move<VkDescriptorSetLayout> layout;
1614
DescriptorSetLayoutResultvkt::BindingModel::__anon29a03d250111::DescriptorSet::DescriptorSetLayoutResult1615 explicit DescriptorSetLayoutResult (Move<VkDescriptorSetLayout>&& layout_)
1616 : supported (LayoutSupported::YES)
1617 , layout (layout_)
1618 {}
1619
DescriptorSetLayoutResultvkt::BindingModel::__anon29a03d250111::DescriptorSet::DescriptorSetLayoutResult1620 explicit DescriptorSetLayoutResult (LayoutSupported supported_)
1621 : supported (supported_)
1622 , layout ()
1623 {}
1624 };
1625
makeOrCheckDescriptorSetLayout(bool checkOnly,const DeviceInterface & vkd,VkDevice device,VkShaderStageFlags stageFlags,VkDescriptorSetLayoutCreateFlags createFlags) const1626 DescriptorSetLayoutResult makeOrCheckDescriptorSetLayout (bool checkOnly, const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1627 {
1628 const auto numIterations = maxTypes();
1629 std::vector<VkDescriptorSetLayoutBinding> bindingsVec;
1630 std::vector<std::vector<VkDescriptorType>> mutableTypesVec;
1631 std::vector<VkMutableDescriptorTypeListEXT> mutableTypeLists;
1632
1633 // Make vector element addresses stable.
1634 const auto bindingCount = numBindings();
1635 bindingsVec.reserve(bindingCount);
1636 mutableTypesVec.reserve(bindingCount);
1637 mutableTypeLists.reserve(bindingCount);
1638
1639 for (size_t bindingIdx = 0u; bindingIdx < bindings.size(); ++bindingIdx)
1640 {
1641 const auto& binding = bindings[bindingIdx];
1642 const auto mainType = binding->mainType();
1643
1644 const VkDescriptorSetLayoutBinding layoutBinding = {
1645 static_cast<deUint32>(bindingIdx), // deUint32 binding;
1646 mainType, // VkDescriptorType descriptorType;
1647 static_cast<deUint32>(binding->size()), // deUint32 descriptorCount;
1648 stageFlags, // VkShaderStageFlags stageFlags;
1649 nullptr, // const VkSampler* pImmutableSamplers;
1650 };
1651 bindingsVec.push_back(layoutBinding);
1652
1653 // This list may be empty for non-mutable types, which is fine.
1654 mutableTypesVec.push_back(binding->mutableTypes());
1655 const auto& lastVec = mutableTypesVec.back();
1656
1657 const VkMutableDescriptorTypeListEXT typeList = {
1658 static_cast<deUint32>(lastVec.size()), // deUint32 descriptorTypeCount;
1659 de::dataOrNull(lastVec), // const VkDescriptorType* pDescriptorTypes;
1660 };
1661 mutableTypeLists.push_back(typeList);
1662 }
1663
1664 // Make sure to include the variable descriptor count and/or update after bind binding flags.
1665 const bool updateAfterBind = ((createFlags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT) != 0u);
1666 bool lastIsUnbounded = false;
1667 bool aliasingNeded = false;
1668 std::vector<bool> bindingNeedsAliasing(bindings.size(), false);
1669
1670 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1671 {
1672 if (bindingIdx < bindings.size() - 1)
1673 DE_ASSERT(!bindings[bindingIdx]->isUnbounded());
1674 else
1675 lastIsUnbounded = bindings[bindingIdx]->isUnbounded();
1676
1677 if (bindings[bindingIdx]->needsAliasingUpTo(numIterations))
1678 {
1679 bindingNeedsAliasing[bindingIdx] = true;
1680 aliasingNeded = true;
1681 }
1682 }
1683
1684 using FlagsCreateInfoPtr = de::MovePtr<VkDescriptorSetLayoutBindingFlagsCreateInfo>;
1685 using BindingFlagsVecPtr = de::MovePtr<std::vector<VkDescriptorBindingFlags>>;
1686
1687 FlagsCreateInfoPtr flagsCreateInfo;
1688 BindingFlagsVecPtr bindingFlagsVec;
1689
1690 if (updateAfterBind || lastIsUnbounded || aliasingNeded)
1691 {
1692 flagsCreateInfo = FlagsCreateInfoPtr(new VkDescriptorSetLayoutBindingFlagsCreateInfo);
1693 *flagsCreateInfo = initVulkanStructure();
1694
1695 bindingFlagsVec = BindingFlagsVecPtr(new std::vector<VkDescriptorBindingFlags>(bindingsVec.size(), (updateAfterBind ? VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT : 0)));
1696 if (lastIsUnbounded)
1697 bindingFlagsVec->back() |= VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT;
1698
1699 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1700 {
1701 if (bindingNeedsAliasing[bindingIdx])
1702 bindingFlagsVec->at(bindingIdx) |= VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
1703 }
1704
1705 flagsCreateInfo->bindingCount = static_cast<deUint32>(bindingFlagsVec->size());
1706 flagsCreateInfo->pBindingFlags = de::dataOrNull(*bindingFlagsVec);
1707 }
1708
1709 const VkMutableDescriptorTypeCreateInfoEXT createInfoMutable = {
1710 VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT,
1711 flagsCreateInfo.get(),
1712 static_cast<deUint32>(mutableTypeLists.size()),
1713 de::dataOrNull(mutableTypeLists),
1714 };
1715
1716 const VkDescriptorSetLayoutCreateInfo layoutCreateInfo = {
1717 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, // VkStructureType sType;
1718 &createInfoMutable, // const void* pNext;
1719 createFlags, // VkDescriptorSetLayoutCreateFlags flags;
1720 static_cast<deUint32>(bindingsVec.size()), // deUint32 bindingCount;
1721 de::dataOrNull(bindingsVec), // const VkDescriptorSetLayoutBinding* pBindings;
1722 };
1723
1724 if (checkOnly)
1725 {
1726 VkDescriptorSetLayoutSupport support = initVulkanStructure();
1727 vkd.getDescriptorSetLayoutSupport(device, &layoutCreateInfo, &support);
1728 DescriptorSetLayoutResult result((support.supported == VK_TRUE) ? DescriptorSetLayoutResult::LayoutSupported::YES
1729 : DescriptorSetLayoutResult::LayoutSupported::NO);
1730 return result;
1731 }
1732 else
1733 {
1734 DescriptorSetLayoutResult result(createDescriptorSetLayout(vkd, device, &layoutCreateInfo));
1735 return result;
1736 }
1737 }
1738
1739 public:
makeDescriptorSetLayout(const DeviceInterface & vkd,VkDevice device,VkShaderStageFlags stageFlags,VkDescriptorSetLayoutCreateFlags createFlags) const1740 Move<VkDescriptorSetLayout> makeDescriptorSetLayout (const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1741 {
1742 return makeOrCheckDescriptorSetLayout(false /*checkOnly*/, vkd, device, stageFlags, createFlags).layout;
1743 }
1744
checkDescriptorSetLayout(const DeviceInterface & vkd,VkDevice device,VkShaderStageFlags stageFlags,VkDescriptorSetLayoutCreateFlags createFlags) const1745 bool checkDescriptorSetLayout (const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1746 {
1747 return (makeOrCheckDescriptorSetLayout(true /*checkOnly*/, vkd, device, stageFlags, createFlags).supported == DescriptorSetLayoutResult::LayoutSupported::YES);
1748 }
1749
numDescriptors() const1750 size_t numDescriptors () const
1751 {
1752 size_t total = 0;
1753 for (const auto& b : bindings)
1754 total += b->size();
1755 return total;
1756 }
1757
createResources(const DeviceInterface & vkd,VkDevice device,Allocator & alloc,deUint32 qIndex,VkQueue queue,deUint32 iteration,bool useAABBs) const1758 std::vector<Resource> createResources (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, deUint32 iteration, bool useAABBs) const
1759 {
1760 // Create resources for each binding.
1761 std::vector<Resource> result;
1762 result.reserve(numDescriptors());
1763
1764 const auto bindingsCount = static_cast<deUint32>(bindings.size());
1765
1766 for (deUint32 bindingIdx = 0u; bindingIdx < bindingsCount; ++bindingIdx)
1767 {
1768 const auto& binding = bindings[bindingIdx];
1769 auto bindingResources = binding->createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs, getDescriptorNumericValue(iteration, bindingIdx));
1770
1771 for (auto& resource : bindingResources)
1772 result.emplace_back(std::move(resource));
1773 }
1774
1775 return result;
1776 }
1777
1778 // Updates a descriptor set with the given resources. Note: the set must have been created with a layout that's compatible with this object.
updateDescriptorSet(const DeviceInterface & vkd,VkDevice device,VkDescriptorSet set,deUint32 iteration,const std::vector<Resource> & resources) const1779 void updateDescriptorSet (const DeviceInterface& vkd, VkDevice device, VkDescriptorSet set, deUint32 iteration, const std::vector<Resource>& resources) const
1780 {
1781 // Make sure the number of resources is correct.
1782 const auto numResources = resources.size();
1783 DE_ASSERT(numDescriptors() == numResources);
1784
1785 std::vector<VkWriteDescriptorSet> descriptorWrites;
1786 descriptorWrites.reserve(numResources);
1787
1788 std::vector<VkDescriptorImageInfo> imageInfoVec;
1789 std::vector<VkDescriptorBufferInfo> bufferInfoVec;
1790 std::vector<VkBufferView> bufferViewVec;
1791 std::vector<VkWriteDescriptorSetAccelerationStructureKHR> asWriteVec;
1792 size_t resourceIdx = 0;
1793
1794 // We'll be storing pointers to elements of these vectors as we're appending elements, so we need their addresses to be stable.
1795 imageInfoVec.reserve(numResources);
1796 bufferInfoVec.reserve(numResources);
1797 bufferViewVec.reserve(numResources);
1798 asWriteVec.reserve(numResources);
1799
1800 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1801 {
1802 const auto& binding = bindings[bindingIdx];
1803 const auto descriptorTypes = binding->typesAtIteration(iteration);
1804
1805 for (size_t descriptorIdx = 0; descriptorIdx < binding->size(); ++descriptorIdx)
1806 {
1807 // Make sure the resource type matches the expected value.
1808 const auto& resource = resources[resourceIdx];
1809 const auto& descriptorType = descriptorTypes[descriptorIdx];
1810
1811 DE_ASSERT(resource.descriptorType == descriptorType);
1812
1813 // Obtain the descriptor write info for the resource.
1814 const auto writeInfo = resource.makeWriteInfo();
1815
1816 switch (writeInfo.writeType)
1817 {
1818 case WriteType::IMAGE_INFO: imageInfoVec.push_back(writeInfo.imageInfo); break;
1819 case WriteType::BUFFER_INFO: bufferInfoVec.push_back(writeInfo.bufferInfo); break;
1820 case WriteType::BUFFER_VIEW: bufferViewVec.push_back(writeInfo.bufferView); break;
1821 case WriteType::ACCELERATION_STRUCTURE_INFO: asWriteVec.push_back(writeInfo.asInfo); break;
1822 default: DE_ASSERT(false); break;
1823 }
1824
1825 // Add a new VkWriteDescriptorSet struct or extend the last one with more info. This helps us exercise different implementation code paths.
1826 bool extended = false;
1827
1828 if (!descriptorWrites.empty() && descriptorIdx > 0)
1829 {
1830 auto& last = descriptorWrites.back();
1831 if (last.dstSet == set /* this should always be true */ &&
1832 last.dstBinding == bindingIdx && (last.dstArrayElement + last.descriptorCount) == descriptorIdx &&
1833 last.descriptorType == descriptorType &&
1834 writeInfo.writeType != WriteType::ACCELERATION_STRUCTURE_INFO)
1835 {
1836 // The new write should be in the same vector (imageInfoVec, bufferInfoVec or bufferViewVec) so increasing the count works.
1837 ++last.descriptorCount;
1838 extended = true;
1839 }
1840 }
1841
1842 if (!extended)
1843 {
1844 const VkWriteDescriptorSet write = {
1845 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
1846 ((writeInfo.writeType == WriteType::ACCELERATION_STRUCTURE_INFO) ? &asWriteVec.back() : nullptr),
1847 set,
1848 static_cast<deUint32>(bindingIdx),
1849 static_cast<deUint32>(descriptorIdx),
1850 1u,
1851 descriptorType,
1852 (writeInfo.writeType == WriteType::IMAGE_INFO ? &imageInfoVec.back() : nullptr),
1853 (writeInfo.writeType == WriteType::BUFFER_INFO ? &bufferInfoVec.back() : nullptr),
1854 (writeInfo.writeType == WriteType::BUFFER_VIEW ? &bufferViewVec.back() : nullptr),
1855 };
1856 descriptorWrites.push_back(write);
1857 }
1858
1859 ++resourceIdx;
1860 }
1861 }
1862
1863 // Finally, update descriptor set with all the writes.
1864 vkd.updateDescriptorSets(device, static_cast<deUint32>(descriptorWrites.size()), de::dataOrNull(descriptorWrites), 0u, nullptr);
1865 }
1866
1867 // Copies between descriptor sets. They must be compatible and related to this set.
copyDescriptorSet(const DeviceInterface & vkd,VkDevice device,VkDescriptorSet srcSet,VkDescriptorSet dstSet) const1868 void copyDescriptorSet (const DeviceInterface& vkd, VkDevice device, VkDescriptorSet srcSet, VkDescriptorSet dstSet) const
1869 {
1870 std::vector<VkCopyDescriptorSet> copies;
1871
1872 for (size_t bindingIdx = 0; bindingIdx < numBindings(); ++bindingIdx)
1873 {
1874 const auto& binding = getBinding(bindingIdx);
1875 const auto bindingNumber = static_cast<deUint32>(bindingIdx);
1876 const auto descriptorCount = static_cast<deUint32>(binding->size());
1877
1878 const VkCopyDescriptorSet copy =
1879 {
1880 VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
1881 nullptr,
1882 // set, binding, array element.
1883 srcSet, bindingNumber, 0u,
1884 dstSet, bindingNumber, 0u,
1885 descriptorCount,
1886 };
1887
1888 copies.push_back(copy);
1889 }
1890
1891 vkd.updateDescriptorSets(device, 0u, nullptr, static_cast<deUint32>(copies.size()), de::dataOrNull(copies));
1892 }
1893
1894 // Does any binding in the set need aliasing in a given iteration?
needsAliasing(deUint32 iteration) const1895 bool needsAliasing (deUint32 iteration) const
1896 {
1897 std::vector<bool> aliasingNeededFlags;
1898 aliasingNeededFlags.reserve(bindings.size());
1899
1900 std::transform(begin(bindings), end(bindings), std::back_inserter(aliasingNeededFlags),
1901 [iteration] (const BindingInterfacePtr& b) { return b->needsAliasing(iteration); });
1902 return std::any_of(begin(aliasingNeededFlags), end(aliasingNeededFlags), [] (bool f) { return f; });
1903 }
1904
1905 // Does any binding in the set need aliasing in any iteration?
needsAnyAliasing() const1906 bool needsAnyAliasing () const
1907 {
1908 const auto numIterations = maxTypes();
1909 std::vector<bool> aliasingNeededFlags (numIterations, false);
1910
1911 for (deUint32 iteration = 0; iteration < numIterations; ++iteration)
1912 aliasingNeededFlags[iteration] = needsAliasing(iteration);
1913
1914 return std::any_of(begin(aliasingNeededFlags), end(aliasingNeededFlags), [] (bool f) { return f; });
1915 }
1916
1917 // Is the last binding an unbounded array?
lastBindingIsUnbounded() const1918 bool lastBindingIsUnbounded () const
1919 {
1920 if (bindings.empty())
1921 return false;
1922 return bindings.back()->isUnbounded();
1923 }
1924
1925 // Get the variable descriptor count for the last binding if any.
getVariableDescriptorCount() const1926 tcu::Maybe<deUint32> getVariableDescriptorCount () const
1927 {
1928 if (lastBindingIsUnbounded())
1929 return tcu::just(static_cast<deUint32>(bindings.back()->size()));
1930 return tcu::Nothing;
1931 }
1932
1933 // Check if the set contains a descriptor type of the given type at the given iteration.
containsTypeAtIteration(VkDescriptorType descriptorType,deUint32 iteration) const1934 bool containsTypeAtIteration (VkDescriptorType descriptorType, deUint32 iteration) const
1935 {
1936 return std::any_of(begin(bindings), end(bindings),
1937 [descriptorType, iteration] (const BindingInterfacePtr& b) {
1938 const auto types = b->typesAtIteration(iteration);
1939 return de::contains(begin(types), end(types), descriptorType);
1940 });
1941 }
1942
1943 // Is any binding an array?
hasArrays() const1944 bool hasArrays () const
1945 {
1946 return std::any_of(begin(bindings), end(bindings), [] (const BindingInterfacePtr& b) { return b->isArray(); });
1947 }
1948 };
1949
1950 enum class UpdateType
1951 {
1952 WRITE = 0,
1953 COPY,
1954 };
1955
1956 enum class SourceSetType
1957 {
1958 NORMAL = 0,
1959 HOST_ONLY,
1960 NO_SOURCE,
1961 };
1962
1963 enum class UpdateMoment
1964 {
1965 NORMAL = 0,
1966 UPDATE_AFTER_BIND,
1967 };
1968
1969 enum class TestingStage
1970 {
1971 COMPUTE = 0,
1972 VERTEX,
1973 TESS_EVAL,
1974 TESS_CONTROL,
1975 GEOMETRY,
1976 FRAGMENT,
1977 RAY_GEN,
1978 INTERSECTION,
1979 ANY_HIT,
1980 CLOSEST_HIT,
1981 MISS,
1982 CALLABLE,
1983 };
1984
1985 enum class ArrayAccessType
1986 {
1987 CONSTANT = 0,
1988 PUSH_CONSTANT,
1989 NO_ARRAY,
1990 };
1991
1992 // Are we testing a ray tracing pipeline stage?
isRayTracingStage(TestingStage stage)1993 bool isRayTracingStage (TestingStage stage)
1994 {
1995 switch (stage)
1996 {
1997 case TestingStage::RAY_GEN:
1998 case TestingStage::INTERSECTION:
1999 case TestingStage::ANY_HIT:
2000 case TestingStage::CLOSEST_HIT:
2001 case TestingStage::MISS:
2002 case TestingStage::CALLABLE:
2003 return true;
2004 default:
2005 break;
2006 }
2007
2008 return false;
2009 }
2010
2011 struct TestParams
2012 {
2013 DescriptorSetPtr descriptorSet;
2014 UpdateType updateType;
2015 SourceSetStrategy sourceSetStrategy;
2016 SourceSetType sourceSetType;
2017 PoolMutableStrategy poolMutableStrategy;
2018 UpdateMoment updateMoment;
2019 ArrayAccessType arrayAccessType;
2020 TestingStage testingStage;
2021
getStageFlagsvkt::BindingModel::__anon29a03d250111::TestParams2022 VkShaderStageFlags getStageFlags () const
2023 {
2024 VkShaderStageFlags flags = 0u;
2025
2026 switch (testingStage)
2027 {
2028 case TestingStage::COMPUTE: flags |= VK_SHADER_STAGE_COMPUTE_BIT; break;
2029 case TestingStage::VERTEX: flags |= VK_SHADER_STAGE_VERTEX_BIT; break;
2030 case TestingStage::TESS_EVAL: flags |= VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT; break;
2031 case TestingStage::TESS_CONTROL: flags |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT; break;
2032 case TestingStage::GEOMETRY: flags |= VK_SHADER_STAGE_GEOMETRY_BIT; break;
2033 case TestingStage::FRAGMENT: flags |= VK_SHADER_STAGE_FRAGMENT_BIT; break;
2034 case TestingStage::RAY_GEN: flags |= VK_SHADER_STAGE_RAYGEN_BIT_KHR; break;
2035 case TestingStage::INTERSECTION: flags |= VK_SHADER_STAGE_INTERSECTION_BIT_KHR; break;
2036 case TestingStage::ANY_HIT: flags |= VK_SHADER_STAGE_ANY_HIT_BIT_KHR; break;
2037 case TestingStage::CLOSEST_HIT: flags |= VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR; break;
2038 case TestingStage::MISS: flags |= VK_SHADER_STAGE_MISS_BIT_KHR; break;
2039 case TestingStage::CALLABLE: flags |= VK_SHADER_STAGE_CALLABLE_BIT_KHR; break;
2040 default:
2041 DE_ASSERT(false);
2042 break;
2043 }
2044
2045 return flags;
2046 }
2047
getPipelineWriteStagevkt::BindingModel::__anon29a03d250111::TestParams2048 VkPipelineStageFlags getPipelineWriteStage () const
2049 {
2050 VkPipelineStageFlags flags = 0u;
2051
2052 switch (testingStage)
2053 {
2054 case TestingStage::COMPUTE: flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break;
2055 case TestingStage::VERTEX: flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; break;
2056 case TestingStage::TESS_EVAL: flags |= VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT; break;
2057 case TestingStage::TESS_CONTROL: flags |= VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT; break;
2058 case TestingStage::GEOMETRY: flags |= VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT; break;
2059 case TestingStage::FRAGMENT: flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; break;
2060 case TestingStage::RAY_GEN: // fallthrough
2061 case TestingStage::INTERSECTION: // fallthrough
2062 case TestingStage::ANY_HIT: // fallthrough
2063 case TestingStage::CLOSEST_HIT: // fallthrough
2064 case TestingStage::MISS: // fallthrough
2065 case TestingStage::CALLABLE: flags |= VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR; break;
2066 default:
2067 DE_ASSERT(false);
2068 break;
2069 }
2070
2071 return flags;
2072 }
2073
2074 private:
getLayoutCreateFlagsvkt::BindingModel::__anon29a03d250111::TestParams2075 VkDescriptorSetLayoutCreateFlags getLayoutCreateFlags (bool isSourceSet) const
2076 {
2077 // UPDATE_AFTER_BIND cannot be used with HOST_ONLY sets.
2078 //DE_ASSERT(!(updateMoment == UpdateMoment::UPDATE_AFTER_BIND && sourceSetType == SourceSetType::HOST_ONLY));
2079
2080 VkDescriptorSetLayoutCreateFlags createFlags = 0u;
2081
2082 if ((!isSourceSet || sourceSetType != SourceSetType::HOST_ONLY) && updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2083 createFlags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT;
2084
2085 if (isSourceSet && sourceSetType == SourceSetType::HOST_ONLY)
2086 createFlags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT;
2087
2088 return createFlags;
2089 }
2090
2091 public:
getSrcLayoutCreateFlagsvkt::BindingModel::__anon29a03d250111::TestParams2092 VkDescriptorSetLayoutCreateFlags getSrcLayoutCreateFlags () const
2093 {
2094 return getLayoutCreateFlags(true);
2095 }
2096
getDstLayoutCreateFlagsvkt::BindingModel::__anon29a03d250111::TestParams2097 VkDescriptorSetLayoutCreateFlags getDstLayoutCreateFlags () const
2098 {
2099 return getLayoutCreateFlags(false);
2100 }
2101
2102 private:
getPoolCreateFlagsvkt::BindingModel::__anon29a03d250111::TestParams2103 VkDescriptorPoolCreateFlags getPoolCreateFlags (bool isSourceSet) const
2104 {
2105 // UPDATE_AFTER_BIND cannot be used with HOST_ONLY sets.
2106 //DE_ASSERT(!(updateMoment == UpdateMoment::UPDATE_AFTER_BIND && sourceSetType == SourceSetType::HOST_ONLY));
2107
2108 VkDescriptorPoolCreateFlags poolCreateFlags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
2109
2110 if ((!isSourceSet || sourceSetType != SourceSetType::HOST_ONLY) && updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2111 poolCreateFlags |= VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT;
2112
2113 if (isSourceSet && sourceSetType == SourceSetType::HOST_ONLY)
2114 poolCreateFlags |= VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT;
2115
2116 return poolCreateFlags;
2117 }
2118
2119 public:
getSrcPoolCreateFlagsvkt::BindingModel::__anon29a03d250111::TestParams2120 VkDescriptorPoolCreateFlags getSrcPoolCreateFlags () const
2121 {
2122 return getPoolCreateFlags(true);
2123 }
2124
getDstPoolCreateFlagsvkt::BindingModel::__anon29a03d250111::TestParams2125 VkDescriptorPoolCreateFlags getDstPoolCreateFlags () const
2126 {
2127 return getPoolCreateFlags(false);
2128 }
2129
getBindPointvkt::BindingModel::__anon29a03d250111::TestParams2130 VkPipelineBindPoint getBindPoint () const
2131 {
2132 if (testingStage == TestingStage::COMPUTE)
2133 return VK_PIPELINE_BIND_POINT_COMPUTE;
2134 if (isRayTracingStage(testingStage))
2135 return VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR;
2136 return VK_PIPELINE_BIND_POINT_GRAPHICS;
2137 }
2138 };
2139
2140 class MutableTypesTest : public TestCase
2141 {
2142 public:
MutableTypesTest(tcu::TestContext & testCtx,const std::string & name,const TestParams & params)2143 MutableTypesTest (tcu::TestContext& testCtx, const std::string& name, const TestParams& params)
2144 : TestCase(testCtx, name)
2145 , m_params(params)
2146 {}
2147
2148 ~MutableTypesTest () override = default;
2149
2150 void initPrograms (vk::SourceCollections& programCollection) const override;
2151 TestInstance* createInstance (Context& context) const override;
2152 void checkSupport (Context& context) const override;
2153
2154 private:
2155 TestParams m_params;
2156 };
2157
2158 class MutableTypesInstance : public TestInstance
2159 {
2160 public:
MutableTypesInstance(Context & context,const TestParams & params)2161 MutableTypesInstance (Context& context, const TestParams& params)
2162 : TestInstance (context)
2163 , m_params (params)
2164 {}
2165
2166 ~MutableTypesInstance () override = default;
2167
2168 tcu::TestStatus iterate () override;
2169
2170 private:
2171 TestParams m_params;
2172 };
2173
2174 // Check if a descriptor set contains a given descriptor type in any iteration up to maxTypes().
containsAnyDescriptorType(const DescriptorSet & descriptorSet,VkDescriptorType descriptorType)2175 bool containsAnyDescriptorType (const DescriptorSet& descriptorSet, VkDescriptorType descriptorType)
2176 {
2177 const auto numIterations = descriptorSet.maxTypes();
2178
2179 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2180 {
2181 if (descriptorSet.containsTypeAtIteration(descriptorType, iter))
2182 return true;
2183 }
2184
2185 return false;
2186 }
2187
2188 // Check if testing this descriptor set needs an external image (for sampler descriptors).
needsExternalImage(const DescriptorSet & descriptorSet)2189 bool needsExternalImage (const DescriptorSet& descriptorSet)
2190 {
2191 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_SAMPLER);
2192 }
2193
2194 // Check if testing this descriptor set needs an external sampler (for sampled images).
needsExternalSampler(const DescriptorSet & descriptorSet)2195 bool needsExternalSampler (const DescriptorSet& descriptorSet)
2196 {
2197 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
2198 }
2199
2200 // Check if this descriptor set contains a input attachments.
usesInputAttachments(const DescriptorSet & descriptorSet)2201 bool usesInputAttachments (const DescriptorSet& descriptorSet)
2202 {
2203 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2204 }
2205
2206 // Check if this descriptor set contains acceleration structures.
usesAccelerationStructures(const DescriptorSet & descriptorSet)2207 bool usesAccelerationStructures (const DescriptorSet& descriptorSet)
2208 {
2209 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
2210 }
2211
shaderName(deUint32 iteration)2212 std::string shaderName (deUint32 iteration)
2213 {
2214 return ("iteration-" + de::toString(iteration));
2215 }
2216
initPrograms(vk::SourceCollections & programCollection) const2217 void MutableTypesTest::initPrograms (vk::SourceCollections& programCollection) const
2218 {
2219 const bool usePushConstants = (m_params.arrayAccessType == ArrayAccessType::PUSH_CONSTANT);
2220 const bool useExternalImage = needsExternalImage(*m_params.descriptorSet);
2221 const bool useExternalSampler = needsExternalSampler(*m_params.descriptorSet);
2222 const bool rayQueries = usesAccelerationStructures(*m_params.descriptorSet);
2223 const bool rayTracing = isRayTracingStage(m_params.testingStage);
2224 const auto numIterations = m_params.descriptorSet->maxTypes();
2225 const auto numBindings = m_params.descriptorSet->numBindings();
2226 const vk::ShaderBuildOptions rtBuildOptions (programCollection.usedVulkanVersion, vk::SPIRV_VERSION_1_4, 0u, true);
2227
2228 // Extra set and bindings for external resources.
2229 std::ostringstream extraSet;
2230 deUint32 extraBindings = 0u;
2231
2232 extraSet << "layout (set=1, binding=" << extraBindings++ << ") buffer OutputBufferBlock { uint value[" << numIterations << "]; } outputBuffer;\n";
2233 if (useExternalImage)
2234 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform utexture2D externalSampledImage;\n";
2235 if (useExternalSampler)
2236 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform sampler externalSampler;\n";
2237 // The extra binding below will be declared in the "passthrough" ray generation shader.
2238 #if 0
2239 if (rayTracing)
2240 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform accelerationStructureEXT externalAS;\n";
2241 #endif
2242
2243 // Common vertex preamble.
2244 std::ostringstream vertexPreamble;
2245 vertexPreamble
2246 << "vec2 vertexPositions[3] = vec2[](\n"
2247 << " vec2(0.0, -0.5),\n"
2248 << " vec2(0.5, 0.5),\n"
2249 << " vec2(-0.5, 0.5)\n"
2250 << ");\n"
2251 ;
2252
2253 // Vertex shader body common statements.
2254 std::ostringstream vertexBodyCommon;
2255 vertexBodyCommon << " gl_Position = vec4(vertexPositions[gl_VertexIndex], 0.0, 1.0);\n";
2256
2257 // Common tessellation control preamble.
2258 std::ostringstream tescPreamble;
2259 tescPreamble
2260 << "layout (vertices=3) out;\n"
2261 << "in gl_PerVertex\n"
2262 << "{\n"
2263 << " vec4 gl_Position;\n"
2264 << "} gl_in[gl_MaxPatchVertices];\n"
2265 << "out gl_PerVertex\n"
2266 << "{\n"
2267 << " vec4 gl_Position;\n"
2268 << "} gl_out[];\n"
2269 ;
2270
2271 // Common tessellation control body.
2272 std::ostringstream tescBodyCommon;
2273 tescBodyCommon
2274 << " gl_TessLevelInner[0] = 1.0;\n"
2275 << " gl_TessLevelInner[1] = 1.0;\n"
2276 << " gl_TessLevelOuter[0] = 1.0;\n"
2277 << " gl_TessLevelOuter[1] = 1.0;\n"
2278 << " gl_TessLevelOuter[2] = 1.0;\n"
2279 << " gl_TessLevelOuter[3] = 1.0;\n"
2280 << " gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
2281 ;
2282
2283 // Common tessellation evaluation preamble.
2284 std::ostringstream tesePreamble;
2285 tesePreamble
2286 << "layout (triangles, fractional_odd_spacing, cw) in;\n"
2287 << "in gl_PerVertex\n"
2288 << "{\n"
2289 << " vec4 gl_Position;\n"
2290 << "} gl_in[gl_MaxPatchVertices];\n"
2291 << "out gl_PerVertex\n"
2292 << "{\n"
2293 << " vec4 gl_Position;\n"
2294 << "};\n"
2295 ;
2296
2297 // Common tessellation evaluation body.
2298 std::ostringstream teseBodyCommon;
2299 teseBodyCommon
2300 << " gl_Position = (gl_TessCoord.x * gl_in[0].gl_Position) +\n"
2301 << " (gl_TessCoord.y * gl_in[1].gl_Position) +\n"
2302 << " (gl_TessCoord.z * gl_in[2].gl_Position);\n"
2303 ;
2304
2305 // Shader preamble.
2306 std::ostringstream preamble;
2307
2308 preamble
2309 << "#version 460\n"
2310 << "#extension GL_EXT_nonuniform_qualifier : enable\n"
2311 << "#extension GL_EXT_debug_printf : enable\n"
2312 << (rayTracing ? "#extension GL_EXT_ray_tracing : enable\n" : "")
2313 << (rayQueries ? "#extension GL_EXT_ray_query : enable\n" : "")
2314 << "\n"
2315 ;
2316
2317 if (m_params.testingStage == TestingStage::VERTEX)
2318 {
2319 preamble << vertexPreamble.str();
2320 }
2321 else if (m_params.testingStage == TestingStage::COMPUTE)
2322 {
2323 preamble
2324 << "layout (local_size_x=1, local_size_y=1, local_size_z=1) in;\n"
2325 << "\n"
2326 ;
2327 }
2328 else if (m_params.testingStage == TestingStage::GEOMETRY)
2329 {
2330 preamble
2331 << "layout (triangles) in;\n"
2332 << "layout (triangle_strip, max_vertices=3) out;\n"
2333 << "in gl_PerVertex\n"
2334 << "{\n"
2335 << " vec4 gl_Position;\n"
2336 << "} gl_in[3];\n"
2337 << "out gl_PerVertex\n"
2338 << "{\n"
2339 << " vec4 gl_Position;\n"
2340 << "};\n"
2341 ;
2342 }
2343 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2344 {
2345 preamble << tescPreamble.str();
2346 }
2347 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2348 {
2349 preamble << tesePreamble.str();
2350 }
2351 else if (m_params.testingStage == TestingStage::CALLABLE)
2352 {
2353 preamble << "layout (location=0) callableDataInEXT float unusedCallableData;\n";
2354 }
2355 else if (m_params.testingStage == TestingStage::CLOSEST_HIT ||
2356 m_params.testingStage == TestingStage::ANY_HIT ||
2357 m_params.testingStage == TestingStage::MISS)
2358 {
2359 preamble << "layout (location=0) rayPayloadInEXT float unusedRayPayload;\n";
2360 }
2361 else if (m_params.testingStage == TestingStage::INTERSECTION)
2362 {
2363 preamble << "hitAttributeEXT vec3 hitAttribute;\n";
2364 }
2365
2366 preamble << extraSet.str();
2367 if (usePushConstants)
2368 preamble << "layout (push_constant, std430) uniform PushConstantBlock { uint zero; } pc;\n";
2369 preamble << "\n";
2370
2371 // We need to create a shader per iteration.
2372 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2373 {
2374 // Shader preamble.
2375 std::ostringstream shader;
2376 shader << preamble.str();
2377
2378 deUint32 inputAttachmentCount = 0u;
2379
2380 // Descriptor declarations for this iteration.
2381 for (size_t bindingIdx = 0; bindingIdx < numBindings; ++bindingIdx)
2382 {
2383 DE_ASSERT(bindingIdx <= std::numeric_limits<deUint32>::max());
2384
2385 const auto binding = m_params.descriptorSet->getBinding(bindingIdx);
2386 const auto bindingTypes = binding->typesAtIteration(iter);
2387 const auto hasInputAttachment = de::contains(begin(bindingTypes), end(bindingTypes), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2388 const auto isArray = binding->isArray();
2389 const auto isUnbounded = binding->isUnbounded();
2390 const auto bindingSize = binding->size();
2391
2392 // If the binding is an input attachment, make sure it's not an array.
2393 DE_ASSERT(!hasInputAttachment || !isArray);
2394
2395 // Make sure the descriptor count fits a deInt32 if needed.
2396 DE_ASSERT(!isArray || isUnbounded || bindingSize <= static_cast<size_t>(std::numeric_limits<deInt32>::max()));
2397
2398 const auto arraySize = (isArray ? (isUnbounded ? tcu::just(deInt32{-1}) : tcu::just(static_cast<deInt32>(bindingSize)))
2399 : tcu::Nothing);
2400
2401 shader << binding->glslDeclarations(iter, 0u, static_cast<deUint32>(bindingIdx), inputAttachmentCount, arraySize);
2402
2403 if (hasInputAttachment)
2404 ++inputAttachmentCount;
2405 }
2406
2407 // Main body.
2408 shader
2409 << "\n"
2410 << "void main() {\n"
2411 // This checks if we are the first invocation to arrive here, so the checks are executed only once.
2412 << " const uint flag = atomicCompSwap(outputBuffer.value[" << iter << "], 0u, 1u);\n"
2413 << " if (flag == 0u) {\n"
2414 << " uint anyError = 0u;\n"
2415 ;
2416
2417 for (size_t bindingIdx = 0; bindingIdx < numBindings; ++bindingIdx)
2418 {
2419 const auto binding = m_params.descriptorSet->getBinding(bindingIdx);
2420 const auto idx32 = static_cast<deUint32>(bindingIdx);
2421 shader << binding->glslCheckStatements(iter, 0u, idx32, getDescriptorNumericValue(iter, idx32), tcu::Nothing, usePushConstants);
2422 }
2423
2424 shader
2425 << " if (anyError == 0u) {\n"
2426 << " atomicAdd(outputBuffer.value[" << iter << "], 1u);\n"
2427 << " }\n"
2428 << " }\n" // Closes if (flag == 0u).
2429 ;
2430
2431 if (m_params.testingStage == TestingStage::VERTEX)
2432 {
2433 shader << vertexBodyCommon.str();
2434 }
2435 else if (m_params.testingStage == TestingStage::GEOMETRY)
2436 {
2437 shader
2438 << " gl_Position = gl_in[0].gl_Position; EmitVertex();\n"
2439 << " gl_Position = gl_in[1].gl_Position; EmitVertex();\n"
2440 << " gl_Position = gl_in[2].gl_Position; EmitVertex();\n"
2441 ;
2442 }
2443 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2444 {
2445 shader << tescBodyCommon.str();
2446 }
2447 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2448 {
2449 shader << teseBodyCommon.str();
2450 }
2451
2452 shader
2453 << "}\n" // End of main().
2454 ;
2455
2456 {
2457 const auto shaderNameStr = shaderName(iter);
2458 const auto shaderStr = shader.str();
2459 auto& glslSource = programCollection.glslSources.add(shaderNameStr);
2460
2461 if (m_params.testingStage == TestingStage::COMPUTE)
2462 glslSource << glu::ComputeSource(shaderStr);
2463 else if (m_params.testingStage == TestingStage::VERTEX)
2464 glslSource << glu::VertexSource(shaderStr);
2465 else if (m_params.testingStage == TestingStage::FRAGMENT)
2466 glslSource << glu::FragmentSource(shaderStr);
2467 else if (m_params.testingStage == TestingStage::GEOMETRY)
2468 glslSource << glu::GeometrySource(shaderStr);
2469 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2470 glslSource << glu::TessellationControlSource(shaderStr);
2471 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2472 glslSource << glu::TessellationEvaluationSource(shaderStr);
2473 else if (m_params.testingStage == TestingStage::RAY_GEN)
2474 glslSource << glu::RaygenSource(updateRayTracingGLSL(shaderStr));
2475 else if (m_params.testingStage == TestingStage::INTERSECTION)
2476 glslSource << glu::IntersectionSource(updateRayTracingGLSL(shaderStr));
2477 else if (m_params.testingStage == TestingStage::ANY_HIT)
2478 glslSource << glu::AnyHitSource(updateRayTracingGLSL(shaderStr));
2479 else if (m_params.testingStage == TestingStage::CLOSEST_HIT)
2480 glslSource << glu::ClosestHitSource(updateRayTracingGLSL(shaderStr));
2481 else if (m_params.testingStage == TestingStage::MISS)
2482 glslSource << glu::MissSource(updateRayTracingGLSL(shaderStr));
2483 else if (m_params.testingStage == TestingStage::CALLABLE)
2484 glslSource << glu::CallableSource(updateRayTracingGLSL(shaderStr));
2485 else
2486 DE_ASSERT(false);
2487
2488 if (rayTracing || rayQueries)
2489 glslSource << rtBuildOptions;
2490 }
2491 }
2492
2493 if (m_params.testingStage == TestingStage::FRAGMENT
2494 || m_params.testingStage == TestingStage::GEOMETRY
2495 || m_params.testingStage == TestingStage::TESS_CONTROL
2496 || m_params.testingStage == TestingStage::TESS_EVAL)
2497 {
2498 // Add passthrough vertex shader that works for points.
2499 std::ostringstream vertPassthrough;
2500 vertPassthrough
2501 << "#version 460\n"
2502 << "out gl_PerVertex\n"
2503 << "{\n"
2504 << " vec4 gl_Position;\n"
2505 << "};\n"
2506 << vertexPreamble.str()
2507 << "void main() {\n"
2508 << vertexBodyCommon.str()
2509 << "}\n"
2510 ;
2511 programCollection.glslSources.add("vert") << glu::VertexSource(vertPassthrough.str());
2512 }
2513
2514 if (m_params.testingStage == TestingStage::TESS_CONTROL)
2515 {
2516 // Add passthrough tessellation evaluation shader.
2517 std::ostringstream tesePassthrough;
2518 tesePassthrough
2519 << "#version 460\n"
2520 << tesePreamble.str()
2521 << "void main (void)\n"
2522 << "{\n"
2523 << teseBodyCommon.str()
2524 << "}\n"
2525 ;
2526
2527 programCollection.glslSources.add("tese") << glu::TessellationEvaluationSource(tesePassthrough.str());
2528 }
2529
2530 if (m_params.testingStage == TestingStage::TESS_EVAL)
2531 {
2532 // Add passthrough tessellation control shader.
2533 std::ostringstream tescPassthrough;
2534 tescPassthrough
2535 << "#version 460\n"
2536 << tescPreamble.str()
2537 << "void main (void)\n"
2538 << "{\n"
2539 << tescBodyCommon.str()
2540 << "}\n"
2541 ;
2542
2543 programCollection.glslSources.add("tesc") << glu::TessellationControlSource(tescPassthrough.str());
2544 }
2545
2546 if (rayTracing && m_params.testingStage != TestingStage::RAY_GEN)
2547 {
2548 // Add a "passthrough" ray generation shader.
2549 std::ostringstream rgen;
2550 rgen
2551 << "#version 460 core\n"
2552 << "#extension GL_EXT_ray_tracing : require\n"
2553 << "layout (set=1, binding=" << extraBindings << ") uniform accelerationStructureEXT externalAS;\n"
2554 << ((m_params.testingStage == TestingStage::CALLABLE)
2555 ? "layout (location=0) callableDataEXT float unusedCallableData;\n"
2556 : "layout (location=0) rayPayloadEXT float unusedRayPayload;\n")
2557 << "\n"
2558 << "void main()\n"
2559 << "{\n"
2560 ;
2561
2562 if (m_params.testingStage == TestingStage::INTERSECTION
2563 || m_params.testingStage == TestingStage::ANY_HIT
2564 || m_params.testingStage == TestingStage::CLOSEST_HIT
2565 || m_params.testingStage == TestingStage::MISS)
2566 {
2567 // We need to trace rays in this case to get hits or misses.
2568 const auto zDir = ((m_params.testingStage == TestingStage::MISS) ? "-1.0" : "1.0");
2569
2570 rgen
2571 << " const uint cullMask = 0xFF;\n"
2572 << " const float tMin = 1.0;\n"
2573 << " const float tMax = 10.0;\n"
2574 << " const vec3 origin = vec3(0.0, 0.0, 0.0);\n"
2575 << " const vec3 direction = vec3(0.0, 0.0, " << zDir << ");\n"
2576 << " traceRayEXT(externalAS, gl_RayFlagsNoneEXT, cullMask, 0, 0, 0, origin, tMin, direction, tMax, 0);\n"
2577 ;
2578
2579 }
2580 else if (m_params.testingStage == TestingStage::CALLABLE)
2581 {
2582 rgen << " executeCallableEXT(0, 0);\n";
2583 }
2584
2585 // End of main().
2586 rgen << "}\n";
2587
2588 programCollection.glslSources.add("rgen") << glu::RaygenSource(updateRayTracingGLSL(rgen.str())) << rtBuildOptions;
2589
2590 // Intersection shaders will ignore the intersection, so we need a passthrough miss shader.
2591 if (m_params.testingStage == TestingStage::INTERSECTION)
2592 {
2593 std::ostringstream miss;
2594 miss
2595 << "#version 460 core\n"
2596 << "#extension GL_EXT_ray_tracing : require\n"
2597 << "layout (location=0) rayPayloadEXT float unusedRayPayload;\n"
2598 << "\n"
2599 << "void main()\n"
2600 << "{\n"
2601 << "}\n"
2602 ;
2603
2604 programCollection.glslSources.add("miss") << glu::MissSource(updateRayTracingGLSL(miss.str())) << rtBuildOptions;
2605 }
2606 }
2607 }
2608
createInstance(Context & context) const2609 TestInstance* MutableTypesTest::createInstance (Context& context) const
2610 {
2611 return new MutableTypesInstance(context, m_params);
2612 }
2613
requirePartiallyBound(Context & context)2614 void requirePartiallyBound (Context& context)
2615 {
2616 context.requireDeviceFunctionality("VK_EXT_descriptor_indexing");
2617 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2618 if (!indexingFeatures.descriptorBindingPartiallyBound)
2619 TCU_THROW(NotSupportedError, "Partially bound bindings not supported");
2620 }
2621
requireVariableDescriptorCount(Context & context)2622 void requireVariableDescriptorCount (Context& context)
2623 {
2624 context.requireDeviceFunctionality("VK_EXT_descriptor_indexing");
2625 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2626 if (!indexingFeatures.descriptorBindingVariableDescriptorCount)
2627 TCU_THROW(NotSupportedError, "Variable descriptor count not supported");
2628 }
2629
2630 // Calculates the set of used descriptor types for a given set and iteration count, for bindings matching a predicate.
getUsedDescriptorTypes(const DescriptorSet & descriptorSet,deUint32 numIterations,bool (* predicate)(const BindingInterface * binding))2631 std::set<VkDescriptorType> getUsedDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations, bool (*predicate)(const BindingInterface* binding))
2632 {
2633 std::set<VkDescriptorType> usedDescriptorTypes;
2634
2635 for (size_t bindingIdx = 0; bindingIdx < descriptorSet.numBindings(); ++bindingIdx)
2636 {
2637 const auto bindingPtr = descriptorSet.getBinding(bindingIdx);
2638 if (predicate(bindingPtr))
2639 {
2640 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2641 {
2642 const auto descTypes = bindingPtr->typesAtIteration(iter);
2643 usedDescriptorTypes.insert(begin(descTypes), end(descTypes));
2644 }
2645 }
2646 }
2647
2648 return usedDescriptorTypes;
2649 }
2650
getAllUsedDescriptorTypes(const DescriptorSet & descriptorSet,deUint32 numIterations)2651 std::set<VkDescriptorType> getAllUsedDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations)
2652 {
2653 return getUsedDescriptorTypes(descriptorSet, numIterations, [] (const BindingInterface*) { return true; });
2654 }
2655
getUsedArrayDescriptorTypes(const DescriptorSet & descriptorSet,deUint32 numIterations)2656 std::set<VkDescriptorType> getUsedArrayDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations)
2657 {
2658 return getUsedDescriptorTypes(descriptorSet, numIterations, [] (const BindingInterface* b) { return b->isArray(); });
2659 }
2660
2661 // Are we testing a vertex pipeline stage?
isVertexStage(TestingStage stage)2662 bool isVertexStage (TestingStage stage)
2663 {
2664 switch (stage)
2665 {
2666 case TestingStage::VERTEX:
2667 case TestingStage::TESS_CONTROL:
2668 case TestingStage::TESS_EVAL:
2669 case TestingStage::GEOMETRY:
2670 return true;
2671 default:
2672 break;
2673 }
2674
2675 return false;
2676 }
2677
checkSupport(Context & context) const2678 void MutableTypesTest::checkSupport (Context& context) const
2679 {
2680 if (!context.isDeviceFunctionalitySupported("VK_VALVE_mutable_descriptor_type") &&
2681 !context.isDeviceFunctionalitySupported("VK_EXT_mutable_descriptor_type"))
2682
2683 TCU_THROW(NotSupportedError, "VK_VALVE_mutable_descriptor_type or VK_EXT_mutable_descriptor_type is not supported");
2684
2685 VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT mutableDescriptorType = initVulkanStructure();
2686 VkPhysicalDeviceFeatures2KHR features2 = initVulkanStructure(&mutableDescriptorType);
2687
2688 context.getInstanceInterface().getPhysicalDeviceFeatures2(context.getPhysicalDevice(), &features2);
2689
2690 if (!mutableDescriptorType.mutableDescriptorType)
2691 TCU_THROW(NotSupportedError, "mutableDescriptorType feature is not supported");
2692
2693 // Check ray tracing if needed.
2694 const bool rayTracing = isRayTracingStage(m_params.testingStage);
2695
2696 if (rayTracing)
2697 {
2698 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2699 context.requireDeviceFunctionality("VK_KHR_ray_tracing_pipeline");
2700 }
2701
2702 // Check if ray queries are needed. Ray queries are used to verify acceleration structure descriptors.
2703 const bool rayQueriesNeeded = usesAccelerationStructures(*m_params.descriptorSet);
2704 if (rayQueriesNeeded)
2705 {
2706 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2707 context.requireDeviceFunctionality("VK_KHR_ray_query");
2708 }
2709
2710 // We'll use iterations to check each mutable type, as needed.
2711 const auto numIterations = m_params.descriptorSet->maxTypes();
2712
2713 if (m_params.descriptorSet->lastBindingIsUnbounded())
2714 requireVariableDescriptorCount(context);
2715
2716 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2717 {
2718 if (m_params.descriptorSet->needsAliasing(iter))
2719 {
2720 requirePartiallyBound(context);
2721 break;
2722 }
2723 }
2724
2725 if (m_params.updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2726 {
2727 // Check update after bind for each used descriptor type.
2728 const auto& usedDescriptorTypes = getAllUsedDescriptorTypes(*m_params.descriptorSet, numIterations);
2729 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2730
2731 for (const auto& descType : usedDescriptorTypes)
2732 {
2733 switch (descType)
2734 {
2735 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2736 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2737 if (!indexingFeatures.descriptorBindingUniformBufferUpdateAfterBind)
2738 TCU_THROW(NotSupportedError, "Update-after-bind not supported for uniform buffers");
2739 break;
2740
2741 case VK_DESCRIPTOR_TYPE_SAMPLER:
2742 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2743 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2744 if (!indexingFeatures.descriptorBindingSampledImageUpdateAfterBind)
2745 TCU_THROW(NotSupportedError, "Update-after-bind not supported for samplers and sampled images");
2746 break;
2747
2748 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2749 if (!indexingFeatures.descriptorBindingStorageImageUpdateAfterBind)
2750 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage images");
2751 break;
2752
2753 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2754 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2755 if (!indexingFeatures.descriptorBindingStorageBufferUpdateAfterBind)
2756 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage buffers");
2757 break;
2758
2759 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2760 if (!indexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind)
2761 TCU_THROW(NotSupportedError, "Update-after-bind not supported for uniform texel buffers");
2762 break;
2763
2764 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2765 if (!indexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind)
2766 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage texel buffers");
2767 break;
2768
2769 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2770 TCU_THROW(InternalError, "Tests do not support update-after-bind with input attachments");
2771
2772 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
2773 {
2774 // Just in case we ever mix some of these in.
2775 context.requireDeviceFunctionality("VK_EXT_inline_uniform_block");
2776 const auto& iubFeatures = context.getInlineUniformBlockFeatures();
2777 if (!iubFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind)
2778 TCU_THROW(NotSupportedError, "Update-after-bind not supported for inline uniform blocks");
2779 }
2780 break;
2781
2782 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2783 {
2784 // Just in case we ever mix some of these in.
2785 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2786 const auto& asFeatures = context.getAccelerationStructureFeatures();
2787 if (!asFeatures.descriptorBindingAccelerationStructureUpdateAfterBind)
2788 TCU_THROW(NotSupportedError, "Update-after-bind not supported for acceleration structures");
2789 }
2790 break;
2791
2792 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
2793 TCU_THROW(InternalError, "Found VK_DESCRIPTOR_TYPE_MUTABLE_EXT in list of used descriptor types");
2794
2795 default:
2796 TCU_THROW(InternalError, "Unexpected descriptor type found in list of used descriptor types: " + de::toString(descType));
2797 }
2798 }
2799 }
2800
2801 if (m_params.arrayAccessType == ArrayAccessType::PUSH_CONSTANT)
2802 {
2803 // These require dynamically uniform indices.
2804 const auto& usedDescriptorTypes = getUsedArrayDescriptorTypes(*m_params.descriptorSet, numIterations);
2805 const auto& features = context.getDeviceFeatures();
2806 const auto descriptorIndexingSupported = context.isDeviceFunctionalitySupported("VK_EXT_descriptor_indexing");
2807 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2808
2809 for (const auto& descType : usedDescriptorTypes)
2810 {
2811 switch (descType)
2812 {
2813 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2814 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2815 if (!features.shaderUniformBufferArrayDynamicIndexing)
2816 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for uniform buffers");
2817 break;
2818
2819 case VK_DESCRIPTOR_TYPE_SAMPLER:
2820 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2821 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2822 if (!features.shaderSampledImageArrayDynamicIndexing)
2823 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for samplers and sampled images");
2824 break;
2825
2826 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2827 if (!features.shaderStorageImageArrayDynamicIndexing)
2828 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage images");
2829 break;
2830
2831 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2832 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2833 if (!features.shaderStorageBufferArrayDynamicIndexing)
2834 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage buffers");
2835 break;
2836
2837 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2838 if (!descriptorIndexingSupported || !indexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing)
2839 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for uniform texel buffers");
2840 break;
2841
2842 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2843 if (!descriptorIndexingSupported || !indexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing)
2844 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage texel buffers");
2845 break;
2846
2847 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2848 if (!descriptorIndexingSupported || !indexingFeatures.shaderInputAttachmentArrayDynamicIndexing)
2849 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for input attachments");
2850 break;
2851
2852 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2853 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2854 break;
2855
2856 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
2857 TCU_THROW(InternalError, "Found VK_DESCRIPTOR_TYPE_MUTABLE_EXT in list of used array descriptor types");
2858
2859 default:
2860 TCU_THROW(InternalError, "Unexpected descriptor type found in list of used descriptor types: " + de::toString(descType));
2861 }
2862 }
2863 }
2864
2865 // Check layout support.
2866 {
2867 const auto& vkd = context.getDeviceInterface();
2868 const auto device = getDevice(context);
2869 const auto stageFlags = m_params.getStageFlags();
2870
2871 {
2872 const auto layoutCreateFlags = m_params.getDstLayoutCreateFlags();
2873 const auto supported = m_params.descriptorSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2874
2875 if (!supported)
2876 TCU_THROW(NotSupportedError, "Required descriptor set layout not supported");
2877 }
2878
2879 if (m_params.updateType == UpdateType::COPY)
2880 {
2881 const auto layoutCreateFlags = m_params.getSrcLayoutCreateFlags();
2882 const auto supported = m_params.descriptorSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2883
2884 if (!supported)
2885 TCU_THROW(NotSupportedError, "Required descriptor set layout for source set not supported");
2886
2887 // Check specific layouts for the different source sets are supported.
2888 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2889 {
2890 const auto srcSet = m_params.descriptorSet->genSourceSet(m_params.sourceSetStrategy, iter);
2891 const auto srcLayoutSupported = srcSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2892
2893 if (!srcLayoutSupported)
2894 TCU_THROW(NotSupportedError, "Descriptor set layout for source set at iteration " + de::toString(iter) + " not supported");
2895 }
2896 }
2897 }
2898
2899 // Check supported stores and stages.
2900 const bool vertexStage = isVertexStage(m_params.testingStage);
2901 const bool fragmentStage = (m_params.testingStage == TestingStage::FRAGMENT);
2902 const bool geometryStage = (m_params.testingStage == TestingStage::GEOMETRY);
2903 const bool tessellation = (m_params.testingStage == TestingStage::TESS_CONTROL || m_params.testingStage == TestingStage::TESS_EVAL);
2904
2905 const auto& features = context.getDeviceFeatures();
2906
2907 if (vertexStage && !features.vertexPipelineStoresAndAtomics)
2908 TCU_THROW(NotSupportedError, "Vertex pipeline stores and atomics not supported");
2909
2910 if (fragmentStage && !features.fragmentStoresAndAtomics)
2911 TCU_THROW(NotSupportedError, "Fragment shader stores and atomics not supported");
2912
2913 if (geometryStage && !features.geometryShader)
2914 TCU_THROW(NotSupportedError, "Geometry shader not supported");
2915
2916 if (tessellation && !features.tessellationShader)
2917 TCU_THROW(NotSupportedError, "Tessellation shaders not supported");
2918 }
2919
2920 // What to do at each iteration step. Used to apply UPDATE_AFTER_BIND or not.
2921 enum class Step
2922 {
2923 UPDATE = 0,
2924 BIND,
2925 };
2926
2927 // Create render pass.
buildRenderPass(const DeviceInterface & vkd,VkDevice device,const std::vector<Resource> & resources)2928 Move<VkRenderPass> buildRenderPass (const DeviceInterface& vkd, VkDevice device, const std::vector<Resource>& resources)
2929 {
2930 const auto imageFormat = getDescriptorImageFormat();
2931
2932 std::vector<VkAttachmentDescription> attachmentDescriptions;
2933 std::vector<VkAttachmentReference> attachmentReferences;
2934 std::vector<deUint32> attachmentIndices;
2935
2936 for (const auto& resource : resources)
2937 {
2938 if (resource.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
2939 {
2940 const auto nextIndex = static_cast<deUint32>(attachmentDescriptions.size());
2941
2942 const VkAttachmentDescription description = {
2943 0u, // VkAttachmentDescriptionFlags flags;
2944 imageFormat, // VkFormat format;
2945 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
2946 VK_ATTACHMENT_LOAD_OP_LOAD, // VkAttachmentLoadOp loadOp;
2947 VK_ATTACHMENT_STORE_OP_DONT_CARE, // VkAttachmentStoreOp storeOp;
2948 VK_ATTACHMENT_LOAD_OP_DONT_CARE, // VkAttachmentLoadOp stencilLoadOp;
2949 VK_ATTACHMENT_STORE_OP_DONT_CARE, // VkAttachmentStoreOp stencilStoreOp;
2950 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout initialLayout;
2951 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout finalLayout;
2952 };
2953
2954 const VkAttachmentReference reference = { nextIndex, VK_IMAGE_LAYOUT_GENERAL };
2955
2956 attachmentIndices.push_back(nextIndex);
2957 attachmentDescriptions.push_back(description);
2958 attachmentReferences.push_back(reference);
2959 }
2960 }
2961
2962 const auto attachmentCount = static_cast<deUint32>(attachmentDescriptions.size());
2963 DE_ASSERT(attachmentCount == static_cast<deUint32>(attachmentIndices.size()));
2964 DE_ASSERT(attachmentCount == static_cast<deUint32>(attachmentReferences.size()));
2965
2966 const VkSubpassDescription subpassDescription =
2967 {
2968 0u, // VkSubpassDescriptionFlags flags;
2969 VK_PIPELINE_BIND_POINT_GRAPHICS, // VkPipelineBindPoint pipelineBindPoint;
2970 attachmentCount, // deUint32 inputAttachmentCount;
2971 de::dataOrNull(attachmentReferences), // const VkAttachmentReference* pInputAttachments;
2972 0u, // deUint32 colorAttachmentCount;
2973 nullptr, // const VkAttachmentReference* pColorAttachments;
2974 0u, // const VkAttachmentReference* pResolveAttachments;
2975 nullptr, // const VkAttachmentReference* pDepthStencilAttachment;
2976 0u, // deUint32 preserveAttachmentCount;
2977 nullptr, // const deUint32* pPreserveAttachments;
2978 };
2979
2980 const VkRenderPassCreateInfo renderPassCreateInfo =
2981 {
2982 VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, // VkStructureType sType;
2983 nullptr, // const void* pNext;
2984 0u, // VkRenderPassCreateFlags flags;
2985 static_cast<deUint32>(attachmentDescriptions.size()), // deUint32 attachmentCount;
2986 de::dataOrNull(attachmentDescriptions), // const VkAttachmentDescription* pAttachments;
2987 1u, // deUint32 subpassCount;
2988 &subpassDescription, // const VkSubpassDescription* pSubpasses;
2989 0u, // deUint32 dependencyCount;
2990 nullptr, // const VkSubpassDependency* pDependencies;
2991 };
2992
2993 return createRenderPass(vkd, device, &renderPassCreateInfo);
2994 }
2995
2996 // Create a graphics pipeline.
buildGraphicsPipeline(const DeviceInterface & vkd,VkDevice device,VkPipelineLayout pipelineLayout,VkShaderModule vertModule,VkShaderModule tescModule,VkShaderModule teseModule,VkShaderModule geomModule,VkShaderModule fragModule,VkRenderPass renderPass)2997 Move<VkPipeline> buildGraphicsPipeline (const DeviceInterface& vkd, VkDevice device, VkPipelineLayout pipelineLayout,
2998 VkShaderModule vertModule,
2999 VkShaderModule tescModule,
3000 VkShaderModule teseModule,
3001 VkShaderModule geomModule,
3002 VkShaderModule fragModule,
3003 VkRenderPass renderPass)
3004 {
3005 const auto extent = getDefaultExtent();
3006 const std::vector<VkViewport> viewports (1u, makeViewport(extent));
3007 const std::vector<VkRect2D> scissors (1u, makeRect2D(extent));
3008 const auto hasTess = (tescModule != DE_NULL || teseModule != DE_NULL);
3009 const auto topology = (hasTess ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST);
3010
3011
3012 const VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo = initVulkanStructure();
3013
3014 const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo = {
3015 VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, // VkStructureType sType;
3016 nullptr, // const void* pNext;
3017 0u, // VkPipelineInputAssemblyStateCreateFlags flags;
3018 topology, // VkPrimitiveTopology topology;
3019 VK_FALSE, // VkBool32 primitiveRestartEnable;
3020 };
3021
3022 const VkPipelineTessellationStateCreateInfo tessellationStateCreateInfo = {
3023 VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, // VkStructureType sType;
3024 nullptr, // const void* pNext;
3025 0u, // VkPipelineTessellationStateCreateFlags flags;
3026 (hasTess ? 3u : 0u), // deUint32 patchControlPoints;
3027 };
3028
3029 const VkPipelineViewportStateCreateInfo viewportStateCreateInfo = {
3030 VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, // VkStructureType sType;
3031 nullptr, // const void* pNext;
3032 0u, // VkPipelineViewportStateCreateFlags flags;
3033 static_cast<deUint32>(viewports.size()), // deUint32 viewportCount;
3034 de::dataOrNull(viewports), // const VkViewport* pViewports;
3035 static_cast<deUint32>(scissors.size()), // deUint32 scissorCount;
3036 de::dataOrNull(scissors), // const VkRect2D* pScissors;
3037 };
3038
3039 const VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo = {
3040 VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, // VkStructureType sType;
3041 nullptr, // const void* pNext;
3042 0u, // VkPipelineRasterizationStateCreateFlags flags;
3043 VK_FALSE, // VkBool32 depthClampEnable;
3044 (fragModule == DE_NULL ? VK_TRUE : VK_FALSE), // VkBool32 rasterizerDiscardEnable;
3045 VK_POLYGON_MODE_FILL, // VkPolygonMode polygonMode;
3046 VK_CULL_MODE_NONE, // VkCullModeFlags cullMode;
3047 VK_FRONT_FACE_CLOCKWISE, // VkFrontFace frontFace;
3048 VK_FALSE, // VkBool32 depthBiasEnable;
3049 0.0f, // float depthBiasConstantFactor;
3050 0.0f, // float depthBiasClamp;
3051 0.0f, // float depthBiasSlopeFactor;
3052 1.0f, // float lineWidth;
3053 };
3054
3055 const VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo = {
3056 VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, // VkStructureType sType;
3057 nullptr, // const void* pNext;
3058 0u, // VkPipelineMultisampleStateCreateFlags flags;
3059 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits rasterizationSamples;
3060 VK_FALSE, // VkBool32 sampleShadingEnable;
3061 1.0f, // float minSampleShading;
3062 nullptr, // const VkSampleMask* pSampleMask;
3063 VK_FALSE, // VkBool32 alphaToCoverageEnable;
3064 VK_FALSE, // VkBool32 alphaToOneEnable;
3065 };
3066
3067 const VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo = initVulkanStructure();
3068
3069 const VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo = initVulkanStructure();
3070
3071 return makeGraphicsPipeline(vkd, device, pipelineLayout,
3072 vertModule, tescModule, teseModule, geomModule, fragModule,
3073 renderPass, 0u, &vertexInputStateCreateInfo, &inputAssemblyStateCreateInfo,
3074 (hasTess ? &tessellationStateCreateInfo : nullptr), &viewportStateCreateInfo,
3075 &rasterizationStateCreateInfo, &multisampleStateCreateInfo,
3076 &depthStencilStateCreateInfo, &colorBlendStateCreateInfo, nullptr);
3077 }
3078
buildFramebuffer(const DeviceInterface & vkd,VkDevice device,VkRenderPass renderPass,const std::vector<Resource> & resources)3079 Move<VkFramebuffer> buildFramebuffer (const DeviceInterface& vkd, VkDevice device, VkRenderPass renderPass, const std::vector<Resource>& resources)
3080 {
3081 const auto extent = getDefaultExtent();
3082
3083 std::vector<VkImageView> inputAttachments;
3084 for (const auto& resource : resources)
3085 {
3086 if (resource.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
3087 inputAttachments.push_back(resource.imageView.get());
3088 }
3089
3090 const VkFramebufferCreateInfo framebufferCreateInfo =
3091 {
3092 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, // VkStructureType sType;
3093 nullptr, // const void* pNext;
3094 0u, // VkFramebufferCreateFlags flags;
3095 renderPass, // VkRenderPass renderPass;
3096 static_cast<deUint32>(inputAttachments.size()), // deUint32 attachmentCount;
3097 de:: dataOrNull(inputAttachments), // const VkImageView* pAttachments;
3098 extent.width, // deUint32 width;
3099 extent.height, // deUint32 height;
3100 extent.depth, // deUint32 layers;
3101 };
3102
3103 return createFramebuffer(vkd, device, &framebufferCreateInfo);
3104 }
3105
iterate()3106 tcu::TestStatus MutableTypesInstance::iterate ()
3107 {
3108 const auto& vki = m_context.getInstanceInterface();
3109 const auto& vkd = m_context.getDeviceInterface();
3110 const auto device = getDevice(m_context);
3111 const auto physDev = m_context.getPhysicalDevice();
3112 const auto qIndex = m_context.getUniversalQueueFamilyIndex();
3113 const auto queue = getDeviceQueue(vkd, device, m_context.getUniversalQueueFamilyIndex(), 0);
3114
3115 SimpleAllocator alloc(vkd, device, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
3116
3117 const auto& paramSet = m_params.descriptorSet;
3118 const auto numIterations = paramSet->maxTypes();
3119 const bool useExternalImage = needsExternalImage(*m_params.descriptorSet);
3120 const bool useExternalSampler = needsExternalSampler(*m_params.descriptorSet);
3121 const auto stageFlags = m_params.getStageFlags();
3122 const bool srcSetNeeded = (m_params.updateType == UpdateType::COPY);
3123 const bool updateAfterBind = (m_params.updateMoment == UpdateMoment::UPDATE_AFTER_BIND);
3124 const auto bindPoint = m_params.getBindPoint();
3125 const bool rayTracing = isRayTracingStage(m_params.testingStage);
3126 const bool useAABBs = (m_params.testingStage == TestingStage::INTERSECTION);
3127
3128 // Resources for each iteration.
3129 std::vector<std::vector<Resource>> allResources;
3130 allResources.reserve(numIterations);
3131
3132 // Command pool.
3133 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
3134
3135 // Descriptor pool and set for the active (dst) descriptor set.
3136 const auto dstPoolFlags = m_params.getDstPoolCreateFlags();
3137 const auto dstLayoutFlags = m_params.getDstLayoutCreateFlags();
3138
3139 const auto dstPool = paramSet->makeDescriptorPool(vkd, device, m_params.poolMutableStrategy, dstPoolFlags);
3140 const auto dstLayout = paramSet->makeDescriptorSetLayout(vkd, device, stageFlags, dstLayoutFlags);
3141 const auto varCount = paramSet->getVariableDescriptorCount();
3142
3143 using VariableCountInfoPtr = de::MovePtr<VkDescriptorSetVariableDescriptorCountAllocateInfo>;
3144
3145 VariableCountInfoPtr dstVariableCountInfo;
3146 if (varCount)
3147 {
3148 dstVariableCountInfo = VariableCountInfoPtr(new VkDescriptorSetVariableDescriptorCountAllocateInfo);
3149 *dstVariableCountInfo = initVulkanStructure();
3150
3151 dstVariableCountInfo->descriptorSetCount = 1u;
3152 dstVariableCountInfo->pDescriptorCounts = &(varCount.get());
3153 }
3154 const auto dstSet = makeDescriptorSet(vkd, device, dstPool.get(), dstLayout.get(), dstVariableCountInfo.get());
3155
3156 // Source pool and set (optional).
3157 const auto srcPoolFlags = m_params.getSrcPoolCreateFlags();
3158 const auto srcLayoutFlags = m_params.getSrcLayoutCreateFlags();
3159 DescriptorSetPtr iterationSrcSet;
3160 Move<VkDescriptorPool> srcPool;
3161 Move<VkDescriptorSetLayout> srcLayout;
3162 Move<VkDescriptorSet> srcSet;
3163
3164 // Extra set for external resources and output buffer.
3165 std::vector<Resource> extraResources;
3166 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vkd, device, alloc, qIndex, queue, useAABBs, 0u, numIterations);
3167 if (useExternalImage)
3168 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, vkd, device, alloc, qIndex, queue, useAABBs, getExternalSampledImageValue());
3169 if (useExternalSampler)
3170 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLER, vkd, device, alloc, qIndex, queue, useAABBs, 0u);
3171 if (rayTracing)
3172 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, vkd, device, alloc, qIndex, queue, useAABBs, 0u);
3173
3174 Move<VkDescriptorPool> extraPool;
3175 {
3176 DescriptorPoolBuilder poolBuilder;
3177 poolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
3178 if (useExternalImage)
3179 poolBuilder.addType(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
3180 if (useExternalSampler)
3181 poolBuilder.addType(VK_DESCRIPTOR_TYPE_SAMPLER);
3182 if (rayTracing)
3183 poolBuilder.addType(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
3184 extraPool = poolBuilder.build(vkd, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
3185 }
3186
3187 Move<VkDescriptorSetLayout> extraLayout;
3188 {
3189 DescriptorSetLayoutBuilder layoutBuilder;
3190 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1u, stageFlags, nullptr);
3191 if (useExternalImage)
3192 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1u, stageFlags, nullptr);
3193 if (useExternalSampler)
3194 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_SAMPLER, 1u, stageFlags, nullptr);
3195 if (rayTracing)
3196 {
3197 // The extra acceleration structure is used from the ray generation shader only.
3198 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, 1u, VK_SHADER_STAGE_RAYGEN_BIT_KHR, nullptr);
3199 }
3200 extraLayout = layoutBuilder.build(vkd, device);
3201 }
3202
3203 const auto extraSet = makeDescriptorSet(vkd, device, extraPool.get(), extraLayout.get());
3204
3205 // Update extra set.
3206 using DescriptorBufferInfoPtr = de::MovePtr<VkDescriptorBufferInfo>;
3207 using DescriptorImageInfoPtr = de::MovePtr<VkDescriptorImageInfo>;
3208 using DescriptorASInfoPtr = de::MovePtr<VkWriteDescriptorSetAccelerationStructureKHR>;
3209
3210 deUint32 bindingCount = 0u;
3211 DescriptorBufferInfoPtr bufferInfoPtr;
3212 DescriptorImageInfoPtr imageInfoPtr;
3213 DescriptorImageInfoPtr samplerInfoPtr;
3214 DescriptorASInfoPtr asWriteInfoPtr;
3215
3216 const auto outputBufferSize = static_cast<VkDeviceSize>(sizeof(deUint32) * static_cast<size_t>(numIterations));
3217 bufferInfoPtr = DescriptorBufferInfoPtr(new VkDescriptorBufferInfo(makeDescriptorBufferInfo(extraResources[bindingCount++].bufferWithMemory->get(), 0ull, outputBufferSize)));
3218 if (useExternalImage)
3219 imageInfoPtr = DescriptorImageInfoPtr(new VkDescriptorImageInfo(makeDescriptorImageInfo(DE_NULL, extraResources[bindingCount++].imageView.get(), VK_IMAGE_LAYOUT_GENERAL)));
3220 if (useExternalSampler)
3221 samplerInfoPtr = DescriptorImageInfoPtr(new VkDescriptorImageInfo(makeDescriptorImageInfo(extraResources[bindingCount++].sampler.get(), DE_NULL, VK_IMAGE_LAYOUT_GENERAL)));
3222 if (rayTracing)
3223 {
3224 asWriteInfoPtr = DescriptorASInfoPtr(new VkWriteDescriptorSetAccelerationStructureKHR);
3225 *asWriteInfoPtr = initVulkanStructure();
3226 asWriteInfoPtr->accelerationStructureCount = 1u;
3227 asWriteInfoPtr->pAccelerationStructures = extraResources[bindingCount++].asData.tlas.get()->getPtr();
3228 }
3229
3230 {
3231 bindingCount = 0u;
3232 DescriptorSetUpdateBuilder updateBuilder;
3233 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, bufferInfoPtr.get());
3234 if (useExternalImage)
3235 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, imageInfoPtr.get());
3236 if (useExternalSampler)
3237 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_SAMPLER, samplerInfoPtr.get());
3238 if (rayTracing)
3239 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, asWriteInfoPtr.get());
3240 updateBuilder.update(vkd, device);
3241 }
3242
3243 // Push constants.
3244 const deUint32 zero = 0u;
3245 const VkPushConstantRange pcRange = {stageFlags, 0u /*offset*/, static_cast<deUint32>(sizeof(zero)) /*size*/ };
3246
3247 // Needed for some test variants.
3248 Move<VkShaderModule> vertPassthrough;
3249 Move<VkShaderModule> tesePassthrough;
3250 Move<VkShaderModule> tescPassthrough;
3251 Move<VkShaderModule> rgenPassthrough;
3252 Move<VkShaderModule> missPassthrough;
3253
3254 if (m_params.testingStage == TestingStage::FRAGMENT
3255 || m_params.testingStage == TestingStage::GEOMETRY
3256 || m_params.testingStage == TestingStage::TESS_CONTROL
3257 || m_params.testingStage == TestingStage::TESS_EVAL)
3258 {
3259 vertPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("vert"), 0u);
3260 }
3261
3262 if (m_params.testingStage == TestingStage::TESS_CONTROL)
3263 {
3264 tesePassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("tese"), 0u);
3265 }
3266
3267 if (m_params.testingStage == TestingStage::TESS_EVAL)
3268 {
3269 tescPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("tesc"), 0u);
3270 }
3271
3272 if (m_params.testingStage == TestingStage::CLOSEST_HIT
3273 || m_params.testingStage == TestingStage::ANY_HIT
3274 || m_params.testingStage == TestingStage::INTERSECTION
3275 || m_params.testingStage == TestingStage::MISS
3276 || m_params.testingStage == TestingStage::CALLABLE)
3277 {
3278 rgenPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("rgen"), 0u);
3279 }
3280
3281 if (m_params.testingStage == TestingStage::INTERSECTION)
3282 {
3283 missPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("miss"), 0u);
3284 }
3285
3286 for (deUint32 iteration = 0u; iteration < numIterations; ++iteration)
3287 {
3288 // Generate source set for the current iteration.
3289 if (srcSetNeeded)
3290 {
3291 // Free previous descriptor set before rebuilding the pool.
3292 srcSet = Move<VkDescriptorSet>();
3293 iterationSrcSet = paramSet->genSourceSet(m_params.sourceSetStrategy, iteration);
3294 srcPool = iterationSrcSet->makeDescriptorPool(vkd, device, m_params.poolMutableStrategy, srcPoolFlags);
3295 srcLayout = iterationSrcSet->makeDescriptorSetLayout(vkd, device, stageFlags, srcLayoutFlags);
3296
3297 const auto srcVarCount = iterationSrcSet->getVariableDescriptorCount();
3298 VariableCountInfoPtr srcVariableCountInfo;
3299
3300 if (srcVarCount)
3301 {
3302 srcVariableCountInfo = VariableCountInfoPtr(new VkDescriptorSetVariableDescriptorCountAllocateInfo);
3303 *srcVariableCountInfo = initVulkanStructure();
3304
3305 srcVariableCountInfo->descriptorSetCount = 1u;
3306 srcVariableCountInfo->pDescriptorCounts = &(srcVarCount.get());
3307 }
3308
3309 srcSet = makeDescriptorSet(vkd, device, srcPool.get(), srcLayout.get(), srcVariableCountInfo.get());
3310 }
3311
3312 // Set layouts and sets used in the pipeline.
3313 const std::vector<VkDescriptorSetLayout> setLayouts = {dstLayout.get(), extraLayout.get()};
3314 const std::vector<VkDescriptorSet> usedSets = {dstSet.get(), extraSet.get()};
3315
3316 // Create resources.
3317 allResources.emplace_back(paramSet->createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs));
3318 const auto& resources = allResources.back();
3319
3320 // Make pipeline for the current iteration.
3321 const auto pipelineLayout = makePipelineLayout(vkd, device, static_cast<deUint32>(setLayouts.size()), de::dataOrNull(setLayouts), 1u, &pcRange);
3322 const auto moduleName = shaderName(iteration);
3323 const auto shaderModule = createShaderModule(vkd, device, m_context.getBinaryCollection().get(moduleName), 0u);
3324
3325 Move<VkPipeline> pipeline;
3326 Move<VkRenderPass> renderPass;
3327 Move<VkFramebuffer> framebuffer;
3328
3329 deUint32 shaderGroupHandleSize = 0u;
3330 deUint32 shaderGroupBaseAlignment = 1u;
3331
3332 de::MovePtr<BufferWithMemory> raygenSBT;
3333 de::MovePtr<BufferWithMemory> missSBT;
3334 de::MovePtr<BufferWithMemory> hitSBT;
3335 de::MovePtr<BufferWithMemory> callableSBT;
3336
3337 VkStridedDeviceAddressRegionKHR raygenSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3338 VkStridedDeviceAddressRegionKHR missSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3339 VkStridedDeviceAddressRegionKHR hitSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3340 VkStridedDeviceAddressRegionKHR callableSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3341
3342 if (bindPoint == VK_PIPELINE_BIND_POINT_COMPUTE)
3343 pipeline = makeComputePipeline(vkd, device, pipelineLayout.get(), shaderModule.get());
3344 else if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS)
3345 {
3346 VkShaderModule vertModule = DE_NULL;
3347 VkShaderModule teseModule = DE_NULL;
3348 VkShaderModule tescModule = DE_NULL;
3349 VkShaderModule geomModule = DE_NULL;
3350 VkShaderModule fragModule = DE_NULL;
3351
3352 if (m_params.testingStage == TestingStage::VERTEX)
3353 vertModule = shaderModule.get();
3354 else if (m_params.testingStage == TestingStage::FRAGMENT)
3355 {
3356 vertModule = vertPassthrough.get();
3357 fragModule = shaderModule.get();
3358 }
3359 else if (m_params.testingStage == TestingStage::GEOMETRY)
3360 {
3361 vertModule = vertPassthrough.get();
3362 geomModule = shaderModule.get();
3363 }
3364 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
3365 {
3366 vertModule = vertPassthrough.get();
3367 teseModule = tesePassthrough.get();
3368 tescModule = shaderModule.get();
3369 }
3370 else if (m_params.testingStage == TestingStage::TESS_EVAL)
3371 {
3372 vertModule = vertPassthrough.get();
3373 tescModule = tescPassthrough.get();
3374 teseModule = shaderModule.get();
3375 }
3376 else
3377 DE_ASSERT(false);
3378
3379 renderPass = buildRenderPass(vkd, device, resources);
3380 pipeline = buildGraphicsPipeline(vkd, device, pipelineLayout.get(), vertModule, tescModule, teseModule, geomModule, fragModule, renderPass.get());
3381 framebuffer = buildFramebuffer(vkd, device, renderPass.get(), resources);
3382 }
3383 else if (bindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR)
3384 {
3385 const auto rayTracingPipeline = de::newMovePtr<RayTracingPipeline>();
3386 const auto rayTracingPropertiesKHR = makeRayTracingProperties(vki, physDev);
3387 shaderGroupHandleSize = rayTracingPropertiesKHR->getShaderGroupHandleSize();
3388 shaderGroupBaseAlignment = rayTracingPropertiesKHR->getShaderGroupBaseAlignment();
3389
3390 VkShaderModule rgenModule = DE_NULL;
3391 VkShaderModule isecModule = DE_NULL;
3392 VkShaderModule ahitModule = DE_NULL;
3393 VkShaderModule chitModule = DE_NULL;
3394 VkShaderModule missModule = DE_NULL;
3395 VkShaderModule callModule = DE_NULL;
3396
3397 const deUint32 rgenGroup = 0u;
3398 deUint32 hitGroup = 0u;
3399 deUint32 missGroup = 0u;
3400 deUint32 callGroup = 0u;
3401
3402 if (m_params.testingStage == TestingStage::RAY_GEN)
3403 {
3404 rgenModule = shaderModule.get();
3405 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3406 }
3407 else if (m_params.testingStage == TestingStage::INTERSECTION)
3408 {
3409 hitGroup = 1u;
3410 missGroup = 2u;
3411 rgenModule = rgenPassthrough.get();
3412 missModule = missPassthrough.get();
3413 isecModule = shaderModule.get();
3414 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3415 rayTracingPipeline->addShader(VK_SHADER_STAGE_INTERSECTION_BIT_KHR, isecModule, hitGroup);
3416 rayTracingPipeline->addShader(VK_SHADER_STAGE_MISS_BIT_KHR, missModule, missGroup);
3417 }
3418 else if (m_params.testingStage == TestingStage::ANY_HIT)
3419 {
3420 hitGroup = 1u;
3421 rgenModule = rgenPassthrough.get();
3422 ahitModule = shaderModule.get();
3423 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3424 rayTracingPipeline->addShader(VK_SHADER_STAGE_ANY_HIT_BIT_KHR, ahitModule, hitGroup);
3425 }
3426 else if (m_params.testingStage == TestingStage::CLOSEST_HIT)
3427 {
3428 hitGroup = 1u;
3429 rgenModule = rgenPassthrough.get();
3430 chitModule = shaderModule.get();
3431 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3432 rayTracingPipeline->addShader(VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, chitModule, hitGroup);
3433 }
3434 else if (m_params.testingStage == TestingStage::MISS)
3435 {
3436 missGroup = 1u;
3437 rgenModule = rgenPassthrough.get();
3438 missModule = shaderModule.get();
3439 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3440 rayTracingPipeline->addShader(VK_SHADER_STAGE_MISS_BIT_KHR, missModule, missGroup);
3441 }
3442 else if (m_params.testingStage == TestingStage::CALLABLE)
3443 {
3444 callGroup = 1u;
3445 rgenModule = rgenPassthrough.get();
3446 callModule = shaderModule.get();
3447 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3448 rayTracingPipeline->addShader(VK_SHADER_STAGE_CALLABLE_BIT_KHR, callModule, callGroup);
3449 }
3450 else
3451 DE_ASSERT(false);
3452
3453 pipeline = rayTracingPipeline->createPipeline(vkd, device, pipelineLayout.get());
3454
3455 raygenSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, rgenGroup, 1u);
3456 raygenSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, raygenSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3457
3458 if (missGroup > 0u)
3459 {
3460 missSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, missGroup, 1u);
3461 missSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, missSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3462 }
3463
3464 if (hitGroup > 0u)
3465 {
3466 hitSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, hitGroup, 1u);
3467 hitSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, hitSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3468 }
3469
3470 if (callGroup > 0u)
3471 {
3472 callableSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, callGroup, 1u);
3473 callableSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, callableSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3474 }
3475 }
3476 else
3477 DE_ASSERT(false);
3478
3479 // Command buffer for the current iteration.
3480 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
3481 const auto cmdBuffer = cmdBufferPtr.get();
3482
3483 beginCommandBuffer(vkd, cmdBuffer);
3484
3485 const Step steps[] = {
3486 (updateAfterBind ? Step::BIND : Step::UPDATE),
3487 (updateAfterBind ? Step::UPDATE : Step::BIND)
3488 };
3489
3490 for (const auto& step : steps)
3491 {
3492 if (step == Step::BIND)
3493 {
3494 vkd.cmdBindPipeline(cmdBuffer, bindPoint, pipeline.get());
3495 vkd.cmdBindDescriptorSets(cmdBuffer, bindPoint, pipelineLayout.get(), 0u, static_cast<deUint32>(usedSets.size()), de::dataOrNull(usedSets), 0u, nullptr);
3496 }
3497 else // Step::UPDATE
3498 {
3499 if (srcSetNeeded)
3500 {
3501 // Note: these operations need to be called on paramSet and not iterationSrcSet. The latter is a compatible set
3502 // that's correct and contains compatible bindings but, when a binding has been changed from non-mutable to
3503 // mutable or to an extended mutable type, the list of descriptor types for the mutable bindings in
3504 // iterationSrcSet are not in iteration order like they are in the original set and must not be taken into
3505 // account to update or copy sets.
3506 paramSet->updateDescriptorSet(vkd, device, srcSet.get(), iteration, resources);
3507 paramSet->copyDescriptorSet(vkd, device, srcSet.get(), dstSet.get());
3508 }
3509 else
3510 {
3511 paramSet->updateDescriptorSet(vkd, device, dstSet.get(), iteration, resources);
3512 }
3513 }
3514 }
3515
3516 // Run shader.
3517 vkd.cmdPushConstants(cmdBuffer, pipelineLayout.get(), stageFlags, 0u, static_cast<deUint32>(sizeof(zero)), &zero);
3518
3519 if (bindPoint == VK_PIPELINE_BIND_POINT_COMPUTE)
3520 vkd.cmdDispatch(cmdBuffer, 1u, 1u, 1u);
3521 else if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS)
3522 {
3523 const auto extent = getDefaultExtent();
3524 const auto renderArea = makeRect2D(extent);
3525
3526 beginRenderPass(vkd, cmdBuffer, renderPass.get(), framebuffer.get(), renderArea);
3527 vkd.cmdDraw(cmdBuffer, 3u, 1u, 0u, 0u);
3528 endRenderPass(vkd, cmdBuffer);
3529 }
3530 else if (bindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR)
3531 {
3532 vkd.cmdTraceRaysKHR(cmdBuffer, &raygenSBTRegion, &missSBTRegion, &hitSBTRegion, &callableSBTRegion, 1u, 1u, 1u);
3533 }
3534 else
3535 DE_ASSERT(false);
3536
3537 endCommandBuffer(vkd, cmdBuffer);
3538 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
3539
3540 // Verify output buffer.
3541 {
3542 const auto outputBufferVal = extraResources[0].getStoredValue(vkd, device, alloc, qIndex, queue, iteration);
3543 DE_ASSERT(static_cast<bool>(outputBufferVal));
3544
3545 const auto expectedValue = getExpectedOutputBufferValue();
3546 if (outputBufferVal.get() != expectedValue)
3547 {
3548 std::ostringstream msg;
3549 msg << "Iteration " << iteration << ": unexpected value found in output buffer (expected " << expectedValue << " and found " << outputBufferVal.get() << ")";
3550 TCU_FAIL(msg.str());
3551 }
3552 }
3553
3554 // Verify descriptor writes.
3555 {
3556 size_t resourcesOffset = 0;
3557 const auto writeMask = getStoredValueMask();
3558 const auto numBindings = paramSet->numBindings();
3559
3560 for (deUint32 bindingIdx = 0u; bindingIdx < numBindings; ++bindingIdx)
3561 {
3562 const auto binding = paramSet->getBinding(bindingIdx);
3563 const auto bindingTypes = binding->typesAtIteration(iteration);
3564
3565 for (size_t descriptorIdx = 0; descriptorIdx < bindingTypes.size(); ++descriptorIdx)
3566 {
3567 const auto& descriptorType = bindingTypes[descriptorIdx];
3568 if (!isShaderWritable(descriptorType))
3569 continue;
3570
3571 const auto& resource = resources[resourcesOffset + descriptorIdx];
3572 const auto initialValue = resource.initialValue;
3573 const auto storedValuePtr = resource.getStoredValue(vkd, device, alloc, qIndex, queue);
3574
3575 DE_ASSERT(static_cast<bool>(storedValuePtr));
3576 const auto storedValue = storedValuePtr.get();
3577 const auto expectedValue = (initialValue | writeMask);
3578 if (expectedValue != storedValue)
3579 {
3580 std::ostringstream msg;
3581 msg << "Iteration " << iteration << ": descriptor at binding " << bindingIdx << " index " << descriptorIdx
3582 << " with type " << de::toString(descriptorType) << " contains unexpected value " << std::hex
3583 << storedValue << " (expected " << expectedValue << ")";
3584 TCU_FAIL(msg.str());
3585 }
3586 }
3587
3588 resourcesOffset += bindingTypes.size();
3589 }
3590 }
3591 }
3592
3593 return tcu::TestStatus::pass("Pass");
3594 }
3595
3596 using GroupPtr = de::MovePtr<tcu::TestCaseGroup>;
3597
createMutableTestVariants(tcu::TestContext & testCtx,tcu::TestCaseGroup * parentGroup,const DescriptorSetPtr & descriptorSet,const std::vector<TestingStage> & stagesToTest)3598 void createMutableTestVariants (tcu::TestContext& testCtx, tcu::TestCaseGroup* parentGroup, const DescriptorSetPtr& descriptorSet, const std::vector<TestingStage>& stagesToTest)
3599 {
3600 const struct
3601 {
3602 UpdateType updateType;
3603 const char* name;
3604 } updateTypes[] = {
3605 {UpdateType::WRITE, "update_write"},
3606 {UpdateType::COPY, "update_copy"},
3607 };
3608
3609 const struct
3610 {
3611 SourceSetStrategy sourceSetStrategy;
3612 const char* name;
3613 } sourceStrategies[] = {
3614 {SourceSetStrategy::MUTABLE, "mutable_source"},
3615 {SourceSetStrategy::NONMUTABLE, "nonmutable_source"},
3616 {SourceSetStrategy::NO_SOURCE, "no_source"},
3617 };
3618
3619 const struct
3620 {
3621 SourceSetType sourceSetType;
3622 const char* name;
3623 } sourceTypes[] = {
3624 {SourceSetType::NORMAL, "normal_source"},
3625 {SourceSetType::HOST_ONLY, "host_only_source"},
3626 {SourceSetType::NO_SOURCE, "no_source"},
3627 };
3628
3629 const struct
3630 {
3631 PoolMutableStrategy poolMutableStrategy;
3632 const char* name;
3633 } poolStrategies[] = {
3634 {PoolMutableStrategy::KEEP_TYPES, "pool_same_types"},
3635 {PoolMutableStrategy::NO_TYPES, "pool_no_types"},
3636 {PoolMutableStrategy::EXPAND_TYPES, "pool_expand_types"},
3637 };
3638
3639 const struct
3640 {
3641 UpdateMoment updateMoment;
3642 const char* name;
3643 } updateMoments[] = {
3644 {UpdateMoment::NORMAL, "pre_update"},
3645 {UpdateMoment::UPDATE_AFTER_BIND, "update_after_bind"},
3646 };
3647
3648 const struct
3649 {
3650 ArrayAccessType arrayAccessType;
3651 const char* name;
3652 } arrayAccessTypes[] = {
3653 {ArrayAccessType::CONSTANT, "index_constant"},
3654 {ArrayAccessType::PUSH_CONSTANT, "index_push_constant"},
3655 {ArrayAccessType::NO_ARRAY, "no_array"},
3656 };
3657
3658 const struct StageAndName
3659 {
3660 TestingStage testingStage;
3661 const char* name;
3662 } testStageList[] = {
3663 {TestingStage::COMPUTE, "comp"},
3664 {TestingStage::VERTEX, "vert"},
3665 {TestingStage::TESS_CONTROL, "tesc"},
3666 {TestingStage::TESS_EVAL, "tese"},
3667 {TestingStage::GEOMETRY, "geom"},
3668 {TestingStage::FRAGMENT, "frag"},
3669 {TestingStage::RAY_GEN, "rgen"},
3670 {TestingStage::INTERSECTION, "isec"},
3671 {TestingStage::ANY_HIT, "ahit"},
3672 {TestingStage::CLOSEST_HIT, "chit"},
3673 {TestingStage::MISS, "miss"},
3674 {TestingStage::CALLABLE, "call"},
3675 };
3676
3677 const bool hasArrays = descriptorSet->hasArrays();
3678 const bool hasInputAttachments = usesInputAttachments(*descriptorSet);
3679
3680 for (const auto& ut : updateTypes)
3681 {
3682 GroupPtr updateGroup(new tcu::TestCaseGroup(testCtx, ut.name));
3683
3684 for (const auto& srcStrategy : sourceStrategies)
3685 {
3686 // Skip combinations that make no sense.
3687 if (ut.updateType == UpdateType::WRITE && srcStrategy.sourceSetStrategy != SourceSetStrategy::NO_SOURCE)
3688 continue;
3689
3690 if (ut.updateType == UpdateType::COPY && srcStrategy.sourceSetStrategy == SourceSetStrategy::NO_SOURCE)
3691 continue;
3692
3693 if (srcStrategy.sourceSetStrategy == SourceSetStrategy::NONMUTABLE && descriptorSet->needsAnyAliasing())
3694 continue;
3695
3696 GroupPtr srcStrategyGroup(new tcu::TestCaseGroup(testCtx, srcStrategy.name));
3697
3698 for (const auto& srcType : sourceTypes)
3699 {
3700 // Skip combinations that make no sense.
3701 if (ut.updateType == UpdateType::WRITE && srcType.sourceSetType != SourceSetType::NO_SOURCE)
3702 continue;
3703
3704 if (ut.updateType == UpdateType::COPY && srcType.sourceSetType == SourceSetType::NO_SOURCE)
3705 continue;
3706
3707 GroupPtr srcTypeGroup(new tcu::TestCaseGroup(testCtx, srcType.name));
3708
3709 for (const auto& poolStrategy: poolStrategies)
3710 {
3711 GroupPtr poolStrategyGroup(new tcu::TestCaseGroup(testCtx, poolStrategy.name));
3712
3713 for (const auto& moment : updateMoments)
3714 {
3715 //if (moment.updateMoment == UpdateMoment::UPDATE_AFTER_BIND && srcType.sourceSetType == SourceSetType::HOST_ONLY)
3716 // continue;
3717
3718 if (moment.updateMoment == UpdateMoment::UPDATE_AFTER_BIND && hasInputAttachments)
3719 continue;
3720
3721 GroupPtr momentGroup(new tcu::TestCaseGroup(testCtx, moment.name));
3722
3723 for (const auto& accessType : arrayAccessTypes)
3724 {
3725 // Skip combinations that make no sense.
3726 if (hasArrays && accessType.arrayAccessType == ArrayAccessType::NO_ARRAY)
3727 continue;
3728
3729 if (!hasArrays && accessType.arrayAccessType != ArrayAccessType::NO_ARRAY)
3730 continue;
3731
3732 GroupPtr accessTypeGroup(new tcu::TestCaseGroup(testCtx, accessType.name));
3733
3734 for (const auto& testStage : stagesToTest)
3735 {
3736 const auto beginItr = std::begin(testStageList);
3737 const auto endItr = std::end(testStageList);
3738 const auto iter = std::find_if(beginItr, endItr, [testStage] (const StageAndName& ts) { return ts.testingStage == testStage; });
3739
3740 DE_ASSERT(iter != endItr);
3741 const auto& stage = *iter;
3742
3743 if (hasInputAttachments && stage.testingStage != TestingStage::FRAGMENT)
3744 continue;
3745
3746 TestParams params = {
3747 descriptorSet,
3748 ut.updateType,
3749 srcStrategy.sourceSetStrategy,
3750 srcType.sourceSetType,
3751 poolStrategy.poolMutableStrategy,
3752 moment.updateMoment,
3753 accessType.arrayAccessType,
3754 stage.testingStage,
3755 };
3756
3757 accessTypeGroup->addChild(new MutableTypesTest(testCtx, stage.name, params));
3758 }
3759
3760 momentGroup->addChild(accessTypeGroup.release());
3761 }
3762
3763 poolStrategyGroup->addChild(momentGroup.release());
3764 }
3765
3766 srcTypeGroup->addChild(poolStrategyGroup.release());
3767 }
3768
3769 srcStrategyGroup->addChild(srcTypeGroup.release());
3770 }
3771
3772 updateGroup->addChild(srcStrategyGroup.release());
3773 }
3774
3775 parentGroup->addChild(updateGroup.release());
3776 }
3777 }
3778
3779 }
3780
descriptorTypeStr(VkDescriptorType descriptorType)3781 std::string descriptorTypeStr (VkDescriptorType descriptorType)
3782 {
3783 static const auto prefixLen = std::string("VK_DESCRIPTOR_TYPE_").size();
3784 return de::toLower(de::toString(descriptorType).substr(prefixLen));
3785 }
3786
3787 static void createChildren (tcu::TestCaseGroup* testGroup);
3788
cleanupGroup(tcu::TestCaseGroup * testGroup)3789 static void cleanupGroup (tcu::TestCaseGroup* testGroup)
3790 {
3791 DE_UNREF(testGroup);
3792 // Destroy singleton objects.
3793 g_singletonDevice.clear();
3794 }
3795
createDescriptorMutableTests(tcu::TestContext & testCtx)3796 tcu::TestCaseGroup* createDescriptorMutableTests (tcu::TestContext& testCtx)
3797 {
3798 return createTestGroup(testCtx, "mutable_descriptor", createChildren, cleanupGroup);
3799 }
3800
createChildren(tcu::TestCaseGroup * mainGroup)3801 void createChildren (tcu::TestCaseGroup* mainGroup)
3802 {
3803 tcu::TestContext& testCtx = mainGroup->getTestContext();
3804
3805 const VkDescriptorType basicDescriptorTypes[] = {
3806 VK_DESCRIPTOR_TYPE_SAMPLER,
3807 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
3808 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
3809 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
3810 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
3811 VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
3812 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
3813 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
3814 VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
3815 VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
3816 };
3817
3818 static const auto mandatoryTypes = getMandatoryMutableTypes();
3819
3820 using StageVec = std::vector<TestingStage>;
3821
3822 const StageVec allStages =
3823 {
3824 TestingStage::COMPUTE,
3825 TestingStage::VERTEX,
3826 TestingStage::TESS_CONTROL,
3827 TestingStage::TESS_EVAL,
3828 TestingStage::GEOMETRY,
3829 TestingStage::FRAGMENT,
3830 TestingStage::RAY_GEN,
3831 TestingStage::INTERSECTION,
3832 TestingStage::ANY_HIT,
3833 TestingStage::CLOSEST_HIT,
3834 TestingStage::MISS,
3835 TestingStage::CALLABLE,
3836 };
3837
3838 const StageVec reducedStages =
3839 {
3840 TestingStage::COMPUTE,
3841 TestingStage::VERTEX,
3842 TestingStage::FRAGMENT,
3843 TestingStage::RAY_GEN,
3844 };
3845
3846 const StageVec computeOnly =
3847 {
3848 TestingStage::COMPUTE,
3849 };
3850
3851 // Basic tests with a single mutable descriptor.
3852 {
3853 GroupPtr singleCases(new tcu::TestCaseGroup(testCtx, "single"));
3854
3855 for (const auto& descriptorType : basicDescriptorTypes)
3856 {
3857 const auto groupName = descriptorTypeStr(descriptorType);
3858 const std::vector<VkDescriptorType> actualTypes(1u, descriptorType);
3859
3860 DescriptorSetPtr setPtr;
3861 {
3862 DescriptorSet::BindingPtrVector setBindings;
3863 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, actualTypes));
3864 setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3865 }
3866
3867 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str()));
3868 createMutableTestVariants(testCtx, subGroup.get(), setPtr, allStages);
3869
3870 singleCases->addChild(subGroup.release());
3871 }
3872
3873 // Case with a single descriptor that iterates several types.
3874 {
3875 DescriptorSetPtr setPtr;
3876 {
3877 DescriptorSet::BindingPtrVector setBindings;
3878 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, mandatoryTypes));
3879 setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3880 }
3881
3882 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, "all_mandatory"));
3883 createMutableTestVariants(testCtx, subGroup.get(), setPtr, reducedStages);
3884
3885 singleCases->addChild(subGroup.release());
3886 }
3887
3888 // Cases that try to verify switching from any descriptor type to any other is possible.
3889 {
3890 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, "switches"));
3891
3892 for (const auto& initialDescriptorType : basicDescriptorTypes)
3893 {
3894 for (const auto& finalDescriptorType : basicDescriptorTypes)
3895 {
3896 if (initialDescriptorType == finalDescriptorType)
3897 continue;
3898
3899 const std::vector<VkDescriptorType> mutableTypes { initialDescriptorType, finalDescriptorType };
3900 DescriptorSet::BindingPtrVector setBindings;
3901 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, mutableTypes));
3902
3903 DescriptorSetPtr setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3904
3905 const auto groupName = descriptorTypeStr(initialDescriptorType) + "_" + descriptorTypeStr(finalDescriptorType);
3906 GroupPtr combinationGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str()));
3907 createMutableTestVariants(testCtx, combinationGroup.get(), setPtr, reducedStages);
3908 subGroup->addChild(combinationGroup.release());
3909 }
3910 }
3911
3912 singleCases->addChild(subGroup.release());
3913 }
3914
3915 mainGroup->addChild(singleCases.release());
3916 }
3917
3918 // Cases with a single non-mutable descriptor. This provides some basic checks to verify copying to non-mutable bindings works.
3919 {
3920 GroupPtr singleNonMutableGroup (new tcu::TestCaseGroup(testCtx, "single_nonmutable"));
3921
3922 for (const auto& descriptorType : basicDescriptorTypes)
3923 {
3924 DescriptorSet::BindingPtrVector bindings;
3925 bindings.emplace_back(new SingleBinding(descriptorType, std::vector<VkDescriptorType>()));
3926 DescriptorSetPtr descriptorSet (new DescriptorSet(bindings));
3927
3928 const auto groupName = descriptorTypeStr(descriptorType);
3929 GroupPtr descGroup (new tcu::TestCaseGroup(testCtx, groupName.c_str()));
3930
3931 createMutableTestVariants(testCtx, descGroup.get(), descriptorSet, reducedStages);
3932 singleNonMutableGroup->addChild(descGroup.release());
3933 }
3934
3935 mainGroup->addChild(singleNonMutableGroup.release());
3936 }
3937
3938 const struct {
3939 bool unbounded;
3940 const char* name;
3941 } unboundedCases[] = {
3942 {false, "constant_size"},
3943 {true, "unbounded"},
3944 };
3945
3946 const struct {
3947 bool aliasing;
3948 const char* name;
3949 } aliasingCases[] = {
3950 {false, "noaliasing"},
3951 {true, "aliasing"},
3952 };
3953
3954 const struct {
3955 bool oneArrayOnly;
3956 bool mixNonMutable;
3957 const char* groupName;
3958 } arrayCountGroups[] = {
3959 // Tests using an array of mutable descriptors
3960 {true, false, "one_array"},
3961 // Tests using multiple arrays of mutable descriptors
3962 {false, false, "multiple_arrays"},
3963 // Tests using multiple arrays of mutable descriptors mixed with arrays of nonmutable ones
3964 {false, true, "multiple_arrays_mixed"},
3965 };
3966
3967 for (const auto& variant : arrayCountGroups)
3968 {
3969 GroupPtr arrayGroup(new tcu::TestCaseGroup(testCtx, variant.groupName));
3970
3971 for (const auto& unboundedCase : unboundedCases)
3972 {
3973 GroupPtr unboundedGroup(new tcu::TestCaseGroup(testCtx, unboundedCase.name));
3974
3975 for (const auto& aliasingCase : aliasingCases)
3976 {
3977 GroupPtr aliasingGroup(new tcu::TestCaseGroup(testCtx, aliasingCase.name));
3978
3979 DescriptorSet::BindingPtrVector setBindings;
3980
3981 // Prepare descriptors for this test variant.
3982 for (size_t mandatoryTypesRotation = 0; mandatoryTypesRotation < mandatoryTypes.size(); ++mandatoryTypesRotation)
3983 {
3984 const bool isLastBinding = (variant.oneArrayOnly || mandatoryTypesRotation == mandatoryTypes.size() - 1u);
3985 const bool isUnbounded = (unboundedCase.unbounded && isLastBinding);
3986
3987 // Create a rotation of the mandatory types for each mutable array binding.
3988 auto mandatoryTypesVector = mandatoryTypes;
3989 {
3990 const auto beginPtr = &mandatoryTypesVector[0];
3991 const auto endPtr = beginPtr + mandatoryTypesVector.size();
3992 std::rotate(beginPtr, &mandatoryTypesVector[mandatoryTypesRotation], endPtr);
3993 }
3994
3995 std::vector<SingleBinding> arrayBindings;
3996
3997 if (aliasingCase.aliasing)
3998 {
3999 // With aliasing, the descriptor types rotate in each descriptor.
4000 for (size_t typeIdx = 0; typeIdx < mandatoryTypesVector.size(); ++typeIdx)
4001 {
4002 auto rotatedTypes = mandatoryTypesVector;
4003 const auto beginPtr = &rotatedTypes[0];
4004 const auto endPtr = beginPtr + rotatedTypes.size();
4005
4006 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
4007
4008 arrayBindings.emplace_back(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, rotatedTypes);
4009 }
4010 }
4011 else
4012 {
4013 // Without aliasing, all descriptors use the same type at the same time.
4014 const SingleBinding noAliasingBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, mandatoryTypesVector);
4015 arrayBindings.resize(mandatoryTypesVector.size(), noAliasingBinding);
4016 }
4017
4018 setBindings.emplace_back(new ArrayBinding(isUnbounded, arrayBindings));
4019
4020 if (variant.mixNonMutable && !isUnbounded)
4021 {
4022 // Create a non-mutable array binding interleaved with the other ones.
4023 const SingleBinding nonMutableBinding(mandatoryTypes[mandatoryTypesRotation], std::vector<VkDescriptorType>());
4024 std::vector<SingleBinding> nonMutableBindings(mandatoryTypes.size(), nonMutableBinding);
4025 setBindings.emplace_back(new ArrayBinding(false, nonMutableBindings));
4026 }
4027
4028 if (variant.oneArrayOnly)
4029 break;
4030 }
4031
4032 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
4033 createMutableTestVariants(testCtx, aliasingGroup.get(), descriptorSet, computeOnly);
4034
4035 unboundedGroup->addChild(aliasingGroup.release());
4036 }
4037
4038 arrayGroup->addChild(unboundedGroup.release());
4039 }
4040
4041 mainGroup->addChild(arrayGroup.release());
4042 }
4043
4044 // Cases with a single mutable binding followed by an array of mutable bindings.
4045 // The array will use a single type beyond the mandatory ones.
4046 {
4047 GroupPtr singleAndArrayGroup(new tcu::TestCaseGroup(testCtx, "single_and_array"));
4048
4049 for (const auto& descriptorType : basicDescriptorTypes)
4050 {
4051 // Input attachments will not use arrays.
4052 if (descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
4053 continue;
4054
4055 if (de::contains(begin(mandatoryTypes), end(mandatoryTypes), descriptorType))
4056 continue;
4057
4058 const auto groupName = descriptorTypeStr(descriptorType);
4059 GroupPtr descTypeGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str()));
4060
4061 for (const auto& aliasingCase : aliasingCases)
4062 {
4063 GroupPtr aliasingGroup(new tcu::TestCaseGroup(testCtx, aliasingCase.name));
4064
4065 DescriptorSet::BindingPtrVector setBindings;
4066 std::vector<SingleBinding> arrayBindings;
4067
4068 // Add single type beyond the mandatory ones.
4069 auto arrayBindingDescTypes = mandatoryTypes;
4070 arrayBindingDescTypes.push_back(descriptorType);
4071
4072 // Single mutable descriptor as the first binding.
4073 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, arrayBindingDescTypes));
4074
4075 // Descriptor array as the second binding.
4076 if (aliasingCase.aliasing)
4077 {
4078 // With aliasing, the descriptor types rotate in each descriptor.
4079 for (size_t typeIdx = 0; typeIdx < arrayBindingDescTypes.size(); ++typeIdx)
4080 {
4081 auto rotatedTypes = arrayBindingDescTypes;
4082 const auto beginPtr = &rotatedTypes[0];
4083 const auto endPtr = beginPtr + rotatedTypes.size();
4084
4085 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
4086
4087 arrayBindings.emplace_back(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, rotatedTypes);
4088 }
4089 }
4090 else
4091 {
4092 // Without aliasing, all descriptors use the same type at the same time.
4093 const SingleBinding noAliasingBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, arrayBindingDescTypes);
4094 arrayBindings.resize(arrayBindingDescTypes.size(), noAliasingBinding);
4095 }
4096
4097 // Second binding: array binding.
4098 setBindings.emplace_back(new ArrayBinding(false/*unbounded*/, arrayBindings));
4099
4100 // Create set and test variants.
4101 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
4102 createMutableTestVariants(testCtx, aliasingGroup.get(), descriptorSet, computeOnly);
4103
4104 descTypeGroup->addChild(aliasingGroup.release());
4105 }
4106
4107 singleAndArrayGroup->addChild(descTypeGroup.release());
4108 }
4109
4110 mainGroup->addChild(singleAndArrayGroup.release());
4111 }
4112
4113 // Cases with several mutable non-array bindings.
4114 {
4115 GroupPtr multipleGroup (new tcu::TestCaseGroup(testCtx, "multiple"));
4116 GroupPtr mutableOnlyGroup (new tcu::TestCaseGroup(testCtx, "mutable_only"));
4117 GroupPtr mixedGroup (new tcu::TestCaseGroup(testCtx, "mixed"));
4118
4119 // Each descriptor will have a different type in every iteration, like in the one_array aliasing case.
4120 for (int groupIdx = 0; groupIdx < 2; ++groupIdx)
4121 {
4122 const bool mixed = (groupIdx == 1);
4123 DescriptorSet::BindingPtrVector setBindings;
4124
4125 for (size_t typeIdx = 0; typeIdx < mandatoryTypes.size(); ++typeIdx)
4126 {
4127 auto rotatedTypes = mandatoryTypes;
4128 const auto beginPtr = &rotatedTypes[0];
4129 const auto endPtr = beginPtr + rotatedTypes.size();
4130
4131 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
4132 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, rotatedTypes));
4133
4134 // Additional non-mutable binding interleaved with the mutable ones.
4135 if (mixed)
4136 setBindings.emplace_back(new SingleBinding(rotatedTypes[0], std::vector<VkDescriptorType>()));
4137 }
4138 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
4139
4140 const auto dstGroup = (mixed ? mixedGroup.get() : mutableOnlyGroup.get());
4141 createMutableTestVariants(testCtx, dstGroup, descriptorSet, computeOnly);
4142 }
4143
4144 multipleGroup->addChild(mutableOnlyGroup.release());
4145 multipleGroup->addChild(mixedGroup.release());
4146 mainGroup->addChild(multipleGroup.release());
4147 }
4148 }
4149
4150 } // BindingModel
4151 } // vkt
4152