• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*------------------------------------------------------------------------
2  * Vulkan Conformance Tests
3  * ------------------------
4  *
5  * Copyright (c) 2022 The Khronos Group Inc.
6  * Copyright (C) 2022 Advanced Micro Devices, Inc.
7  *
8  * Licensed under the Apache License, Version 2.0 (the "License");
9  * you may not use this file except in compliance with the License.
10  * You may obtain a copy of the License at
11  *
12  *      http://www.apache.org/licenses/LICENSE-2.0
13  *
14  * Unless required by applicable law or agreed to in writing, software
15  * distributed under the License is distributed on an "AS IS" BASIS,
16  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17  * See the License for the specific language governing permissions and
18  * limitations under the License.
19  *
20  *//*!
21  * \file vktBindingDescriptorBufferTests.cpp
22  * \brief Descriptor buffer (extension) tests
23  *//*--------------------------------------------------------------------*/
24 
25 #include "deSharedPtr.hpp"
26 #include "deUniquePtr.hpp"
27 #include "deRandom.hpp"
28 #include "tcuCommandLine.hpp"
29 #include "vktBindingDescriptorBufferTests.hpp"
30 #include "vktTestCaseUtil.hpp"
31 #include "vktTestGroupUtil.hpp"
32 #include "vktCustomInstancesDevices.hpp"
33 #include "vkBuilderUtil.hpp"
34 #include "vkCmdUtil.hpp"
35 #include "vkMemUtil.hpp"
36 #include "vkObjUtil.hpp"
37 #include "vkQueryUtil.hpp"
38 #include "vkRefUtil.hpp"
39 #include "vkStrUtil.hpp"
40 #include "vkTypeUtil.hpp"
41 #include "vkImageUtil.hpp"
42 #include "vkRayTracingUtil.hpp"
43 
44 #include <algorithm>
45 
46 // The defines below can be changed for debugging purposes, otherwise keep them as is.
47 
48 #define DEBUG_FORCE_STAGED_UPLOAD false         // false - prefer direct write to device-local memory
49 #define DEBUG_MIX_DIRECT_AND_STAGED_UPLOAD true // true  - use some staged uploads to test new access flag
50 
51 // Workaround a framework script bug.
52 #ifndef VK_PIPELINE_STAGE_2_TRANSFER_BIT
53 #define VK_PIPELINE_STAGE_2_TRANSFER_BIT VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR
54 #endif
55 
56 namespace vkt
57 {
58 namespace BindingModel
59 {
60 namespace
61 {
62 using namespace vk;
63 using de::MovePtr;
64 using de::SharedPtr;
65 using de::UniquePtr;
66 
67 constexpr uint32_t INDEX_INVALID    = ~0u;
68 constexpr uint32_t OFFSET_UNUSED    = ~0u;
69 constexpr uint32_t HASH_MASK_FOR_AS = (1u << 19) - 1;
70 
71 constexpr uint32_t ConstResultBufferDwords     = 0x4;    // uvec4
72 constexpr uint32_t ConstInlineBlockDwords      = 0x40;   // 256 B spec minimum
73 constexpr uint32_t ConstUniformBufferDwords    = 0x1000; // 16 KiB spec minimum
74 constexpr uint32_t ConstTexelBufferElements    = 512;
75 constexpr uint32_t ConstMaxDescriptorArraySize = 3;   // at most define N-element descriptor arrays
76 constexpr uint32_t ConstRobustBufferAlignment  = 256; // 256 is the worst-case alignment required by UBOs in robustness2
77 constexpr uint32_t ConstChecksPerBuffer        = 4;   // when verifying data in buffers, do at most N comparisons;
78                                                       // this is to avoid excessive shader execution time
79 
80 constexpr VkComponentMapping ComponentMappingIdentity = {
81     VK_COMPONENT_SWIZZLE_IDENTITY,
82     VK_COMPONENT_SWIZZLE_IDENTITY,
83     VK_COMPONENT_SWIZZLE_IDENTITY,
84     VK_COMPONENT_SWIZZLE_IDENTITY,
85 };
86 
87 template <typename T>
u32(const T & value)88 inline uint32_t u32(const T &value)
89 {
90     return static_cast<uint32_t>(value);
91 }
92 
93 template <typename T, typename... args_t>
newMovePtr(args_t &&...args)94 inline de::MovePtr<T> newMovePtr(args_t &&...args)
95 {
96     return de::MovePtr<T>(new T(::std::forward<args_t>(args)...));
97 }
98 
99 template <typename T>
reset(Move<T> & ptr)100 inline void reset(Move<T> &ptr)
101 {
102     ptr = Move<T>();
103 }
104 
105 template <typename T>
reset(MovePtr<T> & ptr)106 inline void reset(MovePtr<T> &ptr)
107 {
108     ptr.clear();
109 }
110 
111 template <typename T>
makeSharedUniquePtr()112 inline SharedPtr<UniquePtr<T>> makeSharedUniquePtr()
113 {
114     return SharedPtr<UniquePtr<T>>(new UniquePtr<T>(new T()));
115 }
116 
offsetPtr(void * ptr,VkDeviceSize offset)117 inline void *offsetPtr(void *ptr, VkDeviceSize offset)
118 {
119     return reinterpret_cast<char *>(ptr) + offset;
120 }
121 
offsetPtr(const void * ptr,VkDeviceSize offset)122 inline const void *offsetPtr(const void *ptr, VkDeviceSize offset)
123 {
124     return reinterpret_cast<const char *>(ptr) + offset;
125 }
126 
127 // Calculate the byte offset of ptr from basePtr.
128 // This can be useful if an object at ptr is suballocated from a larger allocation at basePtr, for example.
basePtrOffsetOf(const void * basePtr,const void * ptr)129 inline std::size_t basePtrOffsetOf(const void *basePtr, const void *ptr)
130 {
131     DE_ASSERT(basePtr <= ptr);
132     return static_cast<std::size_t>(static_cast<const uint8_t *>(ptr) - static_cast<const uint8_t *>(basePtr));
133 }
134 
getShaderGroupHandleSize(const InstanceInterface & vki,const VkPhysicalDevice physicalDevice)135 uint32_t getShaderGroupHandleSize(const InstanceInterface &vki, const VkPhysicalDevice physicalDevice)
136 {
137     de::MovePtr<RayTracingProperties> rayTracingPropertiesKHR;
138 
139     rayTracingPropertiesKHR = makeRayTracingProperties(vki, physicalDevice);
140 
141     return rayTracingPropertiesKHR->getShaderGroupHandleSize();
142 }
143 
getShaderGroupBaseAlignment(const InstanceInterface & vki,const VkPhysicalDevice physicalDevice)144 uint32_t getShaderGroupBaseAlignment(const InstanceInterface &vki, const VkPhysicalDevice physicalDevice)
145 {
146     de::MovePtr<RayTracingProperties> rayTracingPropertiesKHR;
147 
148     rayTracingPropertiesKHR = makeRayTracingProperties(vki, physicalDevice);
149 
150     return rayTracingPropertiesKHR->getShaderGroupBaseAlignment();
151 }
152 
getVkBuffer(const de::MovePtr<BufferWithMemory> & buffer)153 VkBuffer getVkBuffer(const de::MovePtr<BufferWithMemory> &buffer)
154 {
155     VkBuffer result = (buffer.get() == nullptr) ? VK_NULL_HANDLE : buffer->get();
156 
157     return result;
158 }
159 
makeStridedDeviceAddressRegion(const DeviceInterface & vkd,const VkDevice device,VkBuffer buffer,VkDeviceSize size)160 VkStridedDeviceAddressRegionKHR makeStridedDeviceAddressRegion(const DeviceInterface &vkd, const VkDevice device,
161                                                                VkBuffer buffer, VkDeviceSize size)
162 {
163     const VkDeviceSize sizeFixed = ((buffer == VK_NULL_HANDLE) ? 0ull : size);
164 
165     return makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, buffer, 0), sizeFixed, sizeFixed);
166 }
167 
getAccelerationStructureDeviceAddress(DeviceDriver & deviceDriver,VkDevice device,VkAccelerationStructureKHR accelerationStructure)168 VkDeviceAddress getAccelerationStructureDeviceAddress(DeviceDriver &deviceDriver, VkDevice device,
169                                                       VkAccelerationStructureKHR accelerationStructure)
170 {
171     const VkAccelerationStructureDeviceAddressInfoKHR addressInfo = {
172         VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR, // VkStructureType                sType
173         nullptr,                                                          // const void*                    pNext
174         accelerationStructure // VkAccelerationStructureKHR    accelerationStructure
175     };
176     const VkDeviceAddress deviceAddress = deviceDriver.getAccelerationStructureDeviceAddressKHR(device, &addressInfo);
177 
178     DE_ASSERT(deviceAddress != 0);
179 
180     return deviceAddress;
181 }
182 
183 // Used to distinguish different test implementations.
184 enum class TestVariant : uint32_t
185 {
186     SINGLE,                      // basic quick check for descriptor/shader combinations
187     MULTIPLE,                    // multiple buffer bindings with various descriptor types
188     MAX,                         // verify max(Sampler/Resource)DescriptorBufferBindings
189     EMBEDDED_IMMUTABLE_SAMPLERS, // various usages of embedded immutable samplers
190     PUSH_DESCRIPTOR,             // use push descriptors and descriptor buffer at the same time
191     PUSH_TEMPLATE,               // use push descriptor template and descriptor buffer at the same time
192     ROBUST_BUFFER_ACCESS,        // robust buffer access
193     ROBUST_NULL_DESCRIPTOR,      // robustness2 with null descriptor
194     CAPTURE_REPLAY,              // capture and replay capability with descriptor buffers
195     MUTABLE_DESCRIPTOR_TYPE,     // use VK_EXT_mutable_descriptor_type
196     YCBCR_SAMPLER,               // use VK_KHR_sampler_ycbcr_conversion
197 };
198 
199 // Optional; Used to add variations for a specific test case.
200 enum class SubCase : uint32_t
201 {
202     NONE,                               // no sub case, i.e. a baseline test case
203     IMMUTABLE_SAMPLERS,                 // treat all samplers as immutable
204     CAPTURE_REPLAY_CUSTOM_BORDER_COLOR, // in capture/replay tests, test VK_EXT_custom_border_color interaction
205     SINGLE_BUFFER,       // use push descriptors and descriptor buffer at the same time using single buffer
206     YCBCR_SAMPLER_ARRAY, // a more complex case with arrayed combined image samplers
207 };
208 
209 // Indicates residency of resource on GPU memory
210 enum class ResourceResidency : uint32_t
211 {
212     TRADITIONAL,      // descriptor buffer resource bound to memory in traditional way
213     SPARSE_BINDING,   // descriptor buffer is sparse resource fully bound to memory
214     SPARSE_RESIDENCY, // descriptor buffer is sparse resource not fully bound to memory
215 };
216 
217 // A simplified descriptor binding, used to define the test case behavior at a high level.
218 struct SimpleBinding
219 {
220     uint32_t set;
221     uint32_t binding;
222     VkDescriptorType type;
223     uint32_t count;
224     uint32_t inputAttachmentIndex;
225 
226     bool isResultBuffer;             // binding used for compute buffer results
227     bool isEmbeddedImmutableSampler; // binding used as immutable embedded sampler
228     bool isRayTracingAS;             // binding used for raytracing acceleration structure
229 };
230 
231 // Scan simple bindings for the binding with the compute and ray tracing shader's result storage buffer.
getResultBufferIndex(const std::vector<SimpleBinding> & simpleBindings)232 uint32_t getResultBufferIndex(const std::vector<SimpleBinding> &simpleBindings)
233 {
234     bool found                 = false;
235     uint32_t resultBufferIndex = 0;
236 
237     for (const auto &sb : simpleBindings)
238     {
239         if (sb.isResultBuffer)
240         {
241             found = true;
242 
243             break;
244         }
245 
246         ++resultBufferIndex;
247     }
248 
249     if (!found)
250     {
251         resultBufferIndex = INDEX_INVALID;
252     }
253 
254     return resultBufferIndex;
255 }
256 
257 // Scan simple bindings for the binding with the ray tracing acceleration structure
getRayTracingASIndex(const std::vector<SimpleBinding> & simpleBindings)258 uint32_t getRayTracingASIndex(const std::vector<SimpleBinding> &simpleBindings)
259 {
260     uint32_t ndx    = 0;
261     uint32_t result = INDEX_INVALID;
262 
263     for (const auto &sb : simpleBindings)
264     {
265         if (sb.isRayTracingAS)
266         {
267             result = ndx;
268 
269             break;
270         }
271 
272         ++ndx;
273     }
274 
275     DE_ASSERT(result != INDEX_INVALID);
276 
277     return result;
278 }
279 
280 // A mask of descriptor types, with opaque mapping of VkDescriptorType to bits.
281 // Use the provided functions to get/set bits.
282 typedef uint32_t DescriptorMask;
283 
maskCheck(DescriptorMask mask,VkDescriptorType type)284 inline bool maskCheck(DescriptorMask mask, VkDescriptorType type)
285 {
286     DE_ASSERT(static_cast<uint32_t>(type) < 32);
287     return (mask & (1u << static_cast<uint32_t>(type))) != 0;
288 }
289 
maskSet(DescriptorMask * pMask,VkDescriptorType type)290 inline void maskSet(DescriptorMask *pMask, VkDescriptorType type)
291 {
292     DE_ASSERT(static_cast<uint32_t>(type) < 32);
293     *pMask |= (1u << static_cast<uint32_t>(type));
294 }
295 
makeDescriptorMask(const std::initializer_list<VkDescriptorType> & types)296 DescriptorMask makeDescriptorMask(const std::initializer_list<VkDescriptorType> &types)
297 {
298     DescriptorMask mask = 0u;
299     for (const auto &t : types)
300     {
301         maskSet(&mask, t);
302     }
303     return mask;
304 }
305 
getDescriptorMaskTypes(DescriptorMask inputMask)306 std::vector<VkDescriptorType> getDescriptorMaskTypes(DescriptorMask inputMask)
307 {
308     static const VkDescriptorType consideredTypes[]{
309         VK_DESCRIPTOR_TYPE_SAMPLER,
310         VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
311         VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
312         VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
313         VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
314         VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
315         VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
316         VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
317         VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
318     };
319 
320     std::vector<VkDescriptorType> types;
321 
322     for (const auto &type : consideredTypes)
323     {
324         uint32_t typeMask = 0u;
325         maskSet(&typeMask, type);
326 
327         if ((inputMask & typeMask) != 0)
328         {
329             types.emplace_back(type);
330             inputMask &= ~typeMask; // clear the bit corresponding to this descriptor type
331         }
332 
333         if (inputMask == 0)
334         {
335             // Early exit
336             break;
337         }
338     }
339 
340     // Ensure that all bits were accounted for
341     DE_ASSERT(inputMask == 0);
342 
343     return types;
344 }
345 
346 // The parameters for a test case (with the exclusion of simple bindings).
347 // Not all values are used by every test variant.
348 struct TestParams
349 {
350     uint32_t hash;               // a value used to "salt" results in memory to get unique values per test case
351     TestVariant variant;         // general type of the test case
352     SubCase subcase;             // a variation of the specific test case
353     VkShaderStageFlagBits stage; // which shader makes use of the bindings
354     VkQueueFlagBits queue;       // which queue to use for the access
355     uint32_t bufferBindingCount; // number of buffer bindings to create
356     uint32_t setsPerBuffer;      // how may sets to put in one buffer binding
357     bool useMaintenance5;        // should we use VkPipelineCreateFlagBits2KHR
358 
359     // Basic, null descriptor, capture/replay, or ycbcr sampler test
360     VkDescriptorType descriptor; // descriptor type under test
361 
362     // Max bindings test and to check the supported limits in other cases
363     uint32_t samplerBufferBindingCount;
364     uint32_t resourceBufferBindingCount;
365 
366     // Max embedded immutable samplers test
367     uint32_t embeddedImmutableSamplerBufferBindingCount;
368     uint32_t embeddedImmutableSamplersPerBuffer;
369 
370     // Push descriptors
371     uint32_t pushDescriptorSetIndex; // which descriptor set is updated with push descriptor/template
372 
373     // Mutable descriptor type
374     DescriptorMask mutableDescriptorTypes; // determines the descriptor types for VkMutableDescriptorTypeListEXT
375 
376     bool commands2; // Use vkCmd* commands from VK_KHR_maintenance6
377 
378     ResourceResidency resourceResidency; // Create descriptor buffer as sparse resource
379 
isComputevkt::BindingModel::__anon3929bd8b0111::TestParams380     bool isCompute() const
381     {
382         return stage == VK_SHADER_STAGE_COMPUTE_BIT;
383     }
384 
isGraphicsvkt::BindingModel::__anon3929bd8b0111::TestParams385     bool isGraphics() const
386     {
387         return (stage & VK_SHADER_STAGE_ALL_GRAPHICS) != 0;
388     }
389 
isGeometryvkt::BindingModel::__anon3929bd8b0111::TestParams390     bool isGeometry() const
391     {
392         return stage == VK_SHADER_STAGE_GEOMETRY_BIT;
393     }
394 
isTessellationvkt::BindingModel::__anon3929bd8b0111::TestParams395     bool isTessellation() const
396     {
397         return (stage & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)) != 0;
398     }
399 
isPushDescriptorTestvkt::BindingModel::__anon3929bd8b0111::TestParams400     bool isPushDescriptorTest() const
401     {
402         return (variant == TestVariant::PUSH_DESCRIPTOR) || (variant == TestVariant::PUSH_TEMPLATE);
403     }
404 
isAccelerationStructurevkt::BindingModel::__anon3929bd8b0111::TestParams405     bool isAccelerationStructure() const
406     {
407         return descriptor == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR;
408     }
409 
isRayTracingvkt::BindingModel::__anon3929bd8b0111::TestParams410     bool isRayTracing() const
411     {
412         return isAllRayTracingStages(stage);
413     }
414 
415     // The resource accessed via this descriptor type has capture/replay enabled.
isCaptureReplayDescriptorvkt::BindingModel::__anon3929bd8b0111::TestParams416     bool isCaptureReplayDescriptor(VkDescriptorType otherType) const
417     {
418         return (variant == TestVariant::CAPTURE_REPLAY) && (descriptor == otherType);
419     }
420 
isAccelerationStructureOptionalvkt::BindingModel::__anon3929bd8b0111::TestParams421     bool isAccelerationStructureOptional() const
422     {
423         switch (variant)
424         {
425         case TestVariant::MULTIPLE:
426         case TestVariant::PUSH_DESCRIPTOR:
427         case TestVariant::PUSH_TEMPLATE:
428         case TestVariant::MUTABLE_DESCRIPTOR_TYPE:
429             return true;
430         default:
431             return false;
432         }
433     }
434 
isAccelerationStructureObligatoryvkt::BindingModel::__anon3929bd8b0111::TestParams435     bool isAccelerationStructureObligatory() const
436     {
437         switch (variant)
438         {
439         case TestVariant::SINGLE:
440         case TestVariant::ROBUST_NULL_DESCRIPTOR:
441         case TestVariant::CAPTURE_REPLAY:
442             return isAccelerationStructure();
443         default:
444             return false;
445         }
446     }
447 
448     // Update the hash field. Must be called after changing the value of any other parameters.
updateHashvkt::BindingModel::__anon3929bd8b0111::TestParams449     void updateHash(uint32_t basehash)
450     {
451         hash = deUint32Hash(basehash);
452 
453         hash = isAccelerationStructure() ? (basehash & HASH_MASK_FOR_AS) : basehash;
454     }
455 };
456 
457 // A convenience holder for a buffer-related data.
458 struct BufferAlloc
459 {
460     VkDeviceSize size             = 0;
461     VkDeviceAddress deviceAddress = 0; // non-zero if used
462     VkBufferUsageFlags usage      = 0;
463     uint64_t opaqueCaptureAddress = 0;
464 
465     Move<VkBuffer> buffer;
466     MovePtr<Allocation> alloc;
467 
468     BufferAlloc()              = default;
469     BufferAlloc(BufferAlloc &) = delete;
470 
loadDeviceAddressvkt::BindingModel::__anon3929bd8b0111::BufferAlloc471     void loadDeviceAddress(const DeviceInterface &vk, VkDevice device)
472     {
473         VkBufferDeviceAddressInfo bdaInfo = initVulkanStructure();
474         bdaInfo.buffer                    = *buffer;
475 
476         deviceAddress = vk.getBufferDeviceAddress(device, &bdaInfo);
477     }
478 };
479 
480 using BufferAllocPtr = SharedPtr<BufferAlloc>;
481 
482 // A convenience holder for image-related data.
483 struct ImageAlloc
484 {
485     VkImageCreateInfo info        = {};
486     VkDeviceSize sizeBytes        = 0;
487     VkImageLayout layout          = VK_IMAGE_LAYOUT_UNDEFINED; // layout used when image is accessed
488     uint64_t opaqueCaptureAddress = 0;
489 
490     Move<VkImage> image;
491     Move<VkImageView> imageView;
492     MovePtr<Allocation> alloc;
493 
494     ImageAlloc()             = default;
495     ImageAlloc(ImageAlloc &) = delete;
496 };
497 
498 using ImageAllocPtr = SharedPtr<ImageAlloc>;
499 
500 // A descriptor binding with supporting data.
501 class Binding
502 {
503 public:
504     uint32_t binding;
505     VkDescriptorType descriptorType; // always the actual descriptor type (i.e. even with mutable)
506     uint32_t descriptorCount;
507     VkShaderStageFlags stageFlags;
508 
509     VkDeviceSize offset;
510     uint32_t inputAttachmentIndex; // if used
511     bool isResultBuffer;           // used with compute shaders
512     bool isRayTracingAS;           // used with raytracing shaders
513     bool isMutableType;            // used with MUTABLE_DESCRIPTOR_TYPE cases
514 
isTestableDescriptor() const515     bool isTestableDescriptor() const
516     {
517         return !isRayTracingAS && !isResultBuffer;
518     }
519 
520     // Index into the vector of resources in the main test class, if used.
521     // It's an array, because a binding may have several arrayed descriptors.
522     uint32_t perBindingResourceIndex[ConstMaxDescriptorArraySize];
523 
524     // An array of immutable samplers, if used by the binding.
525     VkSampler immutableSamplers[ConstMaxDescriptorArraySize];
526 
Binding()527     Binding()
528         : binding(0)
529         , descriptorType(VK_DESCRIPTOR_TYPE_SAMPLER)
530         , descriptorCount(0)
531         , stageFlags(0)
532         , offset(0)
533         , inputAttachmentIndex(0)
534         , isResultBuffer(false)
535         , isRayTracingAS(false)
536         , isMutableType(false)
537     {
538         for (uint32_t i = 0; i < DE_LENGTH_OF_ARRAY(perBindingResourceIndex); ++i)
539         {
540             perBindingResourceIndex[i] = INDEX_INVALID;
541             immutableSamplers[i]       = VK_NULL_HANDLE;
542         }
543     }
544 };
545 
546 // Get an array of descriptor bindings, this is used in descriptor set layout creation.
getDescriptorSetLayoutBindings(const std::vector<Binding> & allBindings)547 std::vector<VkDescriptorSetLayoutBinding> getDescriptorSetLayoutBindings(const std::vector<Binding> &allBindings)
548 {
549     std::vector<VkDescriptorSetLayoutBinding> result;
550     result.reserve(allBindings.size());
551 
552     for (auto &binding : allBindings)
553     {
554         VkDescriptorSetLayoutBinding dslBinding{};
555         dslBinding.binding         = binding.binding;
556         dslBinding.descriptorType  = binding.descriptorType;
557         dslBinding.descriptorCount = binding.descriptorCount;
558         dslBinding.stageFlags      = binding.stageFlags;
559 
560         if (binding.immutableSamplers[0] != VK_NULL_HANDLE)
561         {
562             dslBinding.pImmutableSamplers = binding.immutableSamplers;
563         }
564 
565         if (binding.isMutableType)
566         {
567             dslBinding.descriptorType = VK_DESCRIPTOR_TYPE_MUTABLE_EXT;
568         }
569 
570         DE_ASSERT(dslBinding.descriptorCount != 0);
571         DE_ASSERT(dslBinding.stageFlags != 0);
572 
573         result.emplace_back(dslBinding);
574     }
575 
576     return result;
577 }
578 
579 // Descriptor data used with push descriptors (regular and templates).
580 struct PushDescriptorData
581 {
582     VkDescriptorImageInfo imageInfos[ConstMaxDescriptorArraySize];
583     VkDescriptorBufferInfo bufferInfos[ConstMaxDescriptorArraySize];
584     VkBufferView texelBufferViews[ConstMaxDescriptorArraySize];
585     VkAccelerationStructureKHR accelerationStructures[ConstMaxDescriptorArraySize];
586 };
587 
588 // A convenience holder for a descriptor set layout and its bindings.
589 struct DescriptorSetLayoutHolder
590 {
591     std::vector<Binding> bindings;
592 
593     Move<VkDescriptorSetLayout> layout;
594     VkDeviceSize sizeOfLayout         = 0;
595     uint32_t bufferIndex              = INDEX_INVALID;
596     VkDeviceSize bufferOffset         = 0;
597     VkDeviceSize stagingBufferOffset  = OFFSET_UNUSED;
598     bool hasEmbeddedImmutableSamplers = false;
599     bool usePushDescriptors           = false; // instead of descriptor buffer
600 
601     DescriptorSetLayoutHolder()                            = default;
602     DescriptorSetLayoutHolder(DescriptorSetLayoutHolder &) = delete;
603 };
604 
605 using DSLPtr = SharedPtr<UniquePtr<DescriptorSetLayoutHolder>>;
606 
607 // Get an array of descriptor set layouts.
getDescriptorSetLayouts(const std::vector<DSLPtr> & dslPtrs)608 std::vector<VkDescriptorSetLayout> getDescriptorSetLayouts(const std::vector<DSLPtr> &dslPtrs)
609 {
610     std::vector<VkDescriptorSetLayout> result;
611     result.reserve(dslPtrs.size());
612 
613     for (auto &pDsl : dslPtrs)
614     {
615         result.emplace_back((**pDsl).layout.get());
616     }
617 
618     return result;
619 }
620 
621 // A helper struct to keep descriptor's underlying resource data.
622 // This is intended to be flexible and support a mix of buffer/image/sampler, depending on the binding type.
623 struct ResourceHolder
624 {
625     BufferAlloc buffer;
626     ImageAlloc image;
627     Move<VkSampler> sampler;
628     Move<VkSamplerYcbcrConversion> samplerYcbcrConversion;
629     Move<VkBufferView> bufferView;
630     SharedPtr<BottomLevelAccelerationStructure> rtBlas;
631     MovePtr<TopLevelAccelerationStructure> rtTlas;
632 
633     struct
634     {
635         std::vector<uint8_t> bufferData;
636         std::vector<uint8_t> imageData;
637         std::vector<uint8_t> imageViewData;
638         std::vector<uint8_t> samplerData;
639         std::vector<uint8_t> accelerationStructureDataBlas;
640         std::vector<uint8_t> accelerationStructureDataTlas;
641     } captureReplay;
642 
643     ResourceHolder()                 = default;
644     ResourceHolder(ResourceHolder &) = delete;
645 };
646 
647 using ResourcePtr = SharedPtr<UniquePtr<ResourceHolder>>;
648 
649 // Used in test case name generation.
toString(VkQueueFlagBits queue)650 std::string toString(VkQueueFlagBits queue)
651 {
652     switch (queue)
653     {
654     case VK_QUEUE_GRAPHICS_BIT:
655         return "graphics";
656     case VK_QUEUE_COMPUTE_BIT:
657         return "compute";
658 
659     default:
660         DE_ASSERT(false);
661         break;
662     }
663     return "";
664 }
665 
666 // Used in test case name generation.
toString(VkDescriptorType type)667 std::string toString(VkDescriptorType type)
668 {
669     switch (type)
670     {
671     case VK_DESCRIPTOR_TYPE_SAMPLER:
672         return "sampler";
673     case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
674         return "combined_image_sampler";
675     case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
676         return "sampled_image";
677     case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
678         return "storage_image";
679     case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
680         return "uniform_texel_buffer";
681     case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
682         return "storage_texel_buffer";
683     case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
684         return "uniform_buffer";
685     case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
686         return "storage_buffer";
687     case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
688         return "input_attachment";
689     case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
690         return "inline_uniform_block";
691     case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
692         return "acceleration_structure";
693 
694     default:
695         DE_ASSERT(false);
696         break;
697     }
698     return "";
699 }
700 
701 // Used in test case name generation.
toString(VkShaderStageFlagBits stage)702 std::string toString(VkShaderStageFlagBits stage)
703 {
704     switch (stage)
705     {
706     case VK_SHADER_STAGE_VERTEX_BIT:
707         return "vert";
708     case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
709         return "tesc";
710     case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
711         return "tese";
712     case VK_SHADER_STAGE_GEOMETRY_BIT:
713         return "geom";
714     case VK_SHADER_STAGE_FRAGMENT_BIT:
715         return "frag";
716     case VK_SHADER_STAGE_COMPUTE_BIT:
717         return "comp";
718     case VK_SHADER_STAGE_RAYGEN_BIT_KHR:
719         return "rgen";
720     case VK_SHADER_STAGE_ANY_HIT_BIT_KHR:
721         return "ahit";
722     case VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR:
723         return "chit";
724     case VK_SHADER_STAGE_MISS_BIT_KHR:
725         return "miss";
726     case VK_SHADER_STAGE_INTERSECTION_BIT_KHR:
727         return "sect";
728     case VK_SHADER_STAGE_CALLABLE_BIT_KHR:
729         return "call";
730 
731     default:
732         DE_ASSERT(false);
733         break;
734     }
735 
736     return "";
737 }
738 
739 // Used in test case name generation.
getCaseNameUpdateHash(TestParams & params,uint32_t baseHash)740 std::string getCaseNameUpdateHash(TestParams &params, uint32_t baseHash)
741 {
742     std::ostringstream str;
743 
744     str << toString(params.queue) << "_" << toString(params.stage);
745 
746     if ((params.variant == TestVariant::SINGLE) || (params.variant == TestVariant::ROBUST_NULL_DESCRIPTOR) ||
747         (params.variant == TestVariant::CAPTURE_REPLAY))
748     {
749         str << "_" << toString(params.descriptor);
750 
751         if (params.subcase == SubCase::CAPTURE_REPLAY_CUSTOM_BORDER_COLOR)
752         {
753             str << "_custom_border_color";
754         }
755     }
756     else if (params.variant == TestVariant::MULTIPLE)
757     {
758         str << "_buffers" << params.bufferBindingCount << "_sets" << params.setsPerBuffer;
759     }
760     else if (params.variant == TestVariant::MAX)
761     {
762         str << "_sampler" << params.samplerBufferBindingCount << "_resource" << params.resourceBufferBindingCount;
763     }
764     else if (params.variant == TestVariant::EMBEDDED_IMMUTABLE_SAMPLERS)
765     {
766         str << "_buffers" << params.embeddedImmutableSamplerBufferBindingCount << "_samplers"
767             << params.embeddedImmutableSamplersPerBuffer;
768     }
769     else if (params.isPushDescriptorTest())
770     {
771         str << "_sets" << (params.bufferBindingCount + 1) << "_push_set" << params.pushDescriptorSetIndex
772             << ((params.subcase == SubCase::SINGLE_BUFFER) ? "_single_buffer" : "");
773     }
774     else if (params.variant == TestVariant::MUTABLE_DESCRIPTOR_TYPE)
775     {
776         str << "_type_mask" << params.mutableDescriptorTypes;
777     }
778 
779     if (params.subcase == SubCase::IMMUTABLE_SAMPLERS)
780     {
781         str << "_imm_samplers";
782     }
783     else if (params.subcase == SubCase::YCBCR_SAMPLER_ARRAY)
784     {
785         str << "_array";
786     }
787 
788     if (params.commands2)
789     {
790         str << "_commands_2";
791     }
792 
793     params.updateHash(baseHash ^ deStringHash(str.str().c_str()));
794 
795     return str.str();
796 }
797 
798 // Used by shaders to identify a specific binding.
packBindingArgs(uint32_t set,uint32_t binding,uint32_t arrayIndex)799 uint32_t packBindingArgs(uint32_t set, uint32_t binding, uint32_t arrayIndex)
800 {
801     DE_ASSERT(set < 0x40);
802     DE_ASSERT(binding < 0x40);
803     DE_ASSERT(arrayIndex < 0x80);
804 
805     return (arrayIndex << 12) | ((set & 0x3Fu) << 6) | (binding & 0x3Fu);
806 }
807 
808 // Used by shaders to identify a specific binding.
unpackBindingArgs(uint32_t packed,uint32_t * pOutSet,uint32_t * pBinding,uint32_t * pArrayIndex)809 void unpackBindingArgs(uint32_t packed, uint32_t *pOutSet, uint32_t *pBinding, uint32_t *pArrayIndex)
810 {
811     if (pBinding != nullptr)
812     {
813         *pBinding = packed & 0x3Fu;
814     }
815     if (pOutSet != nullptr)
816     {
817         *pOutSet = (packed >> 6) & 0x3Fu;
818     }
819     if (pArrayIndex != nullptr)
820     {
821         *pArrayIndex = (packed >> 12) & 0x7Fu;
822     }
823 }
824 
825 // The expected data read through a descriptor. Try to get a unique value per test and binding.
getExpectedData(uint32_t hash,uint32_t set,uint32_t binding,uint32_t arrayIndex=0)826 uint32_t getExpectedData(uint32_t hash, uint32_t set, uint32_t binding, uint32_t arrayIndex = 0)
827 {
828     return hash ^ packBindingArgs(set, binding, arrayIndex);
829 }
830 
831 // The returned vector contains G8 in x and B8R8 in y components (as defined by VK_FORMAT_G8_B8R8_2PLANE_420_UNORM).
getExpectedData_G8_B8R8(uint32_t hash,uint32_t set,uint32_t binding,uint32_t arrayIndex=0)832 tcu::UVec2 getExpectedData_G8_B8R8(uint32_t hash, uint32_t set, uint32_t binding, uint32_t arrayIndex = 0)
833 {
834     // Hash the input data to achieve "randomness" of components.
835     const uint32_t data = deUint32Hash(getExpectedData(hash, set, binding, arrayIndex));
836 
837     return tcu::UVec2((data >> 16) & 0xff, data & 0xffff);
838 }
839 
840 // Convert G8_B8R8_UNORM to float components.
toVec4_G8_B8R8(const tcu::UVec2 & input)841 tcu::Vec4 toVec4_G8_B8R8(const tcu::UVec2 &input)
842 {
843     return tcu::Vec4(float(((input.y() >> 8) & 0xff)) / 255.0f, float(input.x()) / 255.0f,
844                      float((input.y() & 0xff)) / 255.0f, 1.0f);
845 }
846 
847 // Used by shaders.
glslFormat(uint32_t value)848 std::string glslFormat(uint32_t value)
849 {
850     return std::to_string(value) + "u";
851 }
852 
853 // Generate a unique shader resource name for a binding.
glslResourceName(uint32_t set,uint32_t binding)854 std::string glslResourceName(uint32_t set, uint32_t binding)
855 {
856     // A generic name for any accessible shader binding.
857     std::ostringstream str;
858     str << "res_" << set << "_" << binding;
859     return str.str();
860 }
861 
862 // Generate GLSL that declares a descriptor binding.
glslDeclareBinding(VkDescriptorType type,uint32_t set,uint32_t binding,uint32_t count,uint32_t attachmentIndex,uint32_t bufferArraySize,VkFormat format)863 std::string glslDeclareBinding(VkDescriptorType type, uint32_t set, uint32_t binding, uint32_t count,
864                                uint32_t attachmentIndex, uint32_t bufferArraySize,
865                                VkFormat format) // for resources that use it
866 {
867     std::ostringstream str;
868 
869     str << "layout(set = " << set << ", binding = " << binding;
870 
871     std::string imagePrefix;
872     std::string imageFormat;
873 
874     if (format == VK_FORMAT_R32_UINT)
875     {
876         imagePrefix = "u";
877         imageFormat = "r32ui";
878     }
879     else if (format == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM)
880     {
881         imagePrefix = "";
882         imageFormat = "rgba8";
883     }
884     else
885     {
886         DE_ASSERT(0);
887     }
888 
889     // Additional layout information
890     switch (type)
891     {
892     case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
893     case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
894         str << ", " << imageFormat << ") ";
895         break;
896     case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
897         str << ", input_attachment_index = " << attachmentIndex << ") ";
898         break;
899     default:
900         str << ") ";
901         break;
902     }
903 
904     switch (type)
905     {
906     case VK_DESCRIPTOR_TYPE_SAMPLER:
907         str << "uniform sampler ";
908         break;
909     case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
910         str << "uniform " << imagePrefix << "sampler2D ";
911         break;
912     case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
913         str << "uniform " << imagePrefix << "texture2D ";
914         break;
915     case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
916         str << "uniform " << imagePrefix << "image2D ";
917         break;
918     case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
919         str << "uniform " << imagePrefix << "textureBuffer ";
920         break;
921     case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
922         str << "uniform " << imagePrefix << "imageBuffer ";
923         break;
924     case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
925     case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
926         DE_ASSERT(bufferArraySize != 0);
927         DE_ASSERT((bufferArraySize % 4) == 0);
928         // std140 layout rules, each array element is aligned to 16 bytes.
929         // Due to this, we will use uvec4 instead to access all dwords.
930         str << "uniform Buffer_" << set << "_" << binding << " {\n"
931             << "    uvec4 data[" << (bufferArraySize / 4) << "];\n"
932             << "} ";
933         break;
934     case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
935         DE_ASSERT(bufferArraySize != 0);
936         str << "buffer Buffer_" << set << "_" << binding << " {\n"
937             << "    uint data[" << bufferArraySize << "];\n"
938             << "} ";
939         break;
940     case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
941         str << "uniform " << imagePrefix << "subpassInput ";
942         break;
943     case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
944         str << "uniform accelerationStructureEXT ";
945         break;
946     default:
947         DE_ASSERT(0);
948         break;
949     }
950 
951     str << glslResourceName(set, binding);
952 
953     if (count > 1)
954     {
955         str << "[" << count << "];\n";
956     }
957     else
958     {
959         str << ";\n";
960     }
961 
962     return str.str();
963 }
964 
965 // Generate all GLSL descriptor set/binding declarations.
glslGlobalDeclarations(const TestParams & params,const std::vector<SimpleBinding> & simpleBindings,bool accStruct)966 std::string glslGlobalDeclarations(const TestParams &params, const std::vector<SimpleBinding> &simpleBindings,
967                                    bool accStruct)
968 {
969     DE_UNREF(params);
970 
971     std::ostringstream str;
972 
973     if (accStruct)
974         str << "#extension GL_EXT_ray_query : require\n";
975 
976     for (const auto &sb : simpleBindings)
977     {
978         const uint32_t arraySize = sb.isResultBuffer                                    ? ConstResultBufferDwords :
979                                    (sb.type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) ? ConstInlineBlockDwords :
980                                                                                           ConstUniformBufferDwords;
981 
982         VkFormat format;
983         if ((params.variant == TestVariant::YCBCR_SAMPLER) && (sb.type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))
984         {
985             format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
986         }
987         else
988         {
989             format = VK_FORMAT_R32_UINT;
990         }
991 
992         str << glslDeclareBinding(sb.type, sb.set, sb.binding, sb.count, sb.inputAttachmentIndex, arraySize, format);
993     }
994 
995     if (accStruct)
996     {
997         str << ""
998                "uint queryAS(accelerationStructureEXT rayQueryTopLevelAccelerationStructure)\n"
999                "{\n"
1000                "    const uint  rayFlags = gl_RayFlagsNoOpaqueEXT;\n"
1001                "    const uint  cullMask = 0xFF;\n"
1002                "    const float tmin     = 0.0f;\n"
1003                "    const float tmax     = 524288.0f; // 2^^19\n"
1004                "    const vec3  origin   = vec3(0.0f, 0.0f, 0.0f);\n"
1005                "    const vec3  direct   = vec3(0.0f, 0.0f, 1.0f);\n"
1006                "    rayQueryEXT rayQuery;\n"
1007                "\n"
1008                "    rayQueryInitializeEXT(rayQuery, rayQueryTopLevelAccelerationStructure, rayFlags, cullMask, origin, "
1009                "tmin, direct, tmax);\n"
1010                "\n"
1011                "    if (rayQueryProceedEXT(rayQuery))\n"
1012                "    {\n"
1013                "        if (rayQueryGetIntersectionTypeEXT(rayQuery, false) == "
1014                "gl_RayQueryCandidateIntersectionTriangleEXT)\n"
1015                "        {\n"
1016                "            return uint(round(rayQueryGetIntersectionTEXT(rayQuery, false)));\n"
1017                "        }\n"
1018                "    }\n"
1019                "\n"
1020                "    return 0u;\n"
1021                "}\n"
1022                "\n";
1023     }
1024 
1025     return str.str();
1026 }
1027 
1028 // This function is used to return additional diagnostic information for a failed descriptor binding.
1029 // For example, result Y is the packed binding information and result Z is the array index (for arrayed descriptors, or buffers).
glslResultBlock(const std::string & indent,const std::string & resultY,const std::string & resultZ="")1030 std::string glslResultBlock(const std::string &indent, const std::string &resultY, const std::string &resultZ = "")
1031 {
1032     std::ostringstream str;
1033     str << "{\n"
1034         << indent << "    result.x += 1;\n"
1035         << indent << "} else if (result.y == 0) {\n"
1036         << indent << "    result.y = " << resultY << ";\n";
1037 
1038     if (!resultZ.empty())
1039     {
1040         str << indent << "    result.z = " << resultZ << ";\n";
1041     }
1042 
1043     str << indent << "}\n";
1044     return str.str();
1045 }
1046 
1047 // Get the number of iterations required to access all elements of a buffer.
1048 // This mainly exists because we access UBOs as uvec4.
getBufferLoopIterations(VkDescriptorType type)1049 inline uint32_t getBufferLoopIterations(VkDescriptorType type)
1050 {
1051     switch (type)
1052     {
1053     case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1054         return ConstUniformBufferDwords / 4;
1055 
1056     case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
1057         return ConstInlineBlockDwords / 4;
1058 
1059     case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1060         return ConstUniformBufferDwords;
1061 
1062     case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1063     case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1064         return ConstTexelBufferElements;
1065 
1066     default:
1067         // Ignored
1068         return 0;
1069     }
1070 }
1071 
1072 // Generate GLSL that reads through the binding and compares the value.
1073 // Successful reads increment a counter, while failed read will write back debug information.
glslOutputVerification(const TestParams & params,const std::vector<SimpleBinding> & simpleBindings,bool)1074 std::string glslOutputVerification(const TestParams &params, const std::vector<SimpleBinding> &simpleBindings, bool)
1075 {
1076     std::ostringstream str;
1077 
1078     if ((params.variant == TestVariant::SINGLE) || (params.variant == TestVariant::MULTIPLE) ||
1079         (params.variant == TestVariant::PUSH_DESCRIPTOR) || (params.variant == TestVariant::PUSH_TEMPLATE) ||
1080         (params.variant == TestVariant::ROBUST_NULL_DESCRIPTOR) ||
1081         (params.variant == TestVariant::MUTABLE_DESCRIPTOR_TYPE) || (params.variant == TestVariant::CAPTURE_REPLAY) ||
1082         (params.variant == TestVariant::YCBCR_SAMPLER))
1083     {
1084         // Read at least one value from a descriptor and compare it.
1085         // For buffers, verify every element.
1086         //
1087         // With null descriptors, reads must always return zero.
1088 
1089         for (const auto &sb : simpleBindings)
1090         {
1091             uint32_t samplerIndex = INDEX_INVALID;
1092 
1093             if (sb.isResultBuffer || sb.isRayTracingAS)
1094             {
1095                 // Used by other bindings.
1096                 continue;
1097             }
1098 
1099             if (sb.type == VK_DESCRIPTOR_TYPE_SAMPLER)
1100             {
1101                 // Used by sampled images.
1102                 continue;
1103             }
1104             else if (sb.type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE)
1105             {
1106                 // Sampled images require a sampler to use.
1107                 // Find a suitable sampler within the same descriptor set.
1108 
1109                 bool found   = false;
1110                 samplerIndex = 0;
1111 
1112                 for (const auto &sb1 : simpleBindings)
1113                 {
1114                     if ((sb.set == sb1.set) && (sb1.type == VK_DESCRIPTOR_TYPE_SAMPLER))
1115                     {
1116                         found = true;
1117                         break;
1118                     }
1119 
1120                     ++samplerIndex;
1121                 }
1122 
1123                 if (!found)
1124                 {
1125                     samplerIndex = INDEX_INVALID;
1126                 }
1127             }
1128 
1129             const uint32_t bufferLoopIterations = getBufferLoopIterations(sb.type);
1130             const uint32_t loopIncrement        = bufferLoopIterations / (ConstChecksPerBuffer - 1);
1131 
1132             // Ensure we won't miss the last check (the index will always be less than the buffer length).
1133             DE_ASSERT((bufferLoopIterations == 0) || ((bufferLoopIterations % (ConstChecksPerBuffer - 1)) != 0));
1134 
1135             const bool isNullDescriptor =
1136                 (params.variant == TestVariant::ROBUST_NULL_DESCRIPTOR) && (sb.type == params.descriptor);
1137             const bool isCustomBorderColor = (params.subcase == SubCase::CAPTURE_REPLAY_CUSTOM_BORDER_COLOR);
1138 
1139             for (uint32_t arrayIndex = 0; arrayIndex < sb.count; ++arrayIndex)
1140             {
1141                 // Input attachment index increases with array index.
1142                 const auto expectedData =
1143                     glslFormat(isNullDescriptor ? 0 :
1144                                                   getExpectedData(params.hash, sb.set, sb.binding,
1145                                                                   sb.inputAttachmentIndex + arrayIndex));
1146                 const auto expectedBorderColor = isNullDescriptor    ? "uvec4(0)" :
1147                                                  isCustomBorderColor ? "uvec4(2, 0, 0, 1)" :
1148                                                                        "uvec4(0, 0, 0, 1)";
1149                 const auto bindingArgs =
1150                     glslFormat(packBindingArgs(sb.set, sb.binding, sb.inputAttachmentIndex + arrayIndex));
1151                 const auto &subscript = (sb.count > 1) ? "[" + std::to_string(arrayIndex) + "]" : "";
1152 
1153                 if (sb.type == VK_DESCRIPTOR_TYPE_SAMPLER)
1154                 {
1155                     TCU_THROW(InternalError, "Sampler is tested implicitly");
1156                 }
1157                 else if (sb.type == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR)
1158                 {
1159                     str << "    if (queryAS(" << glslResourceName(sb.set, sb.binding) << subscript
1160                         << ") == " << expectedData << ") " << glslResultBlock("\t", bindingArgs);
1161                 }
1162                 else if (sb.type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1163                 {
1164                     str << "    if (subpassLoad(" << glslResourceName(sb.set, sb.binding) << subscript
1165                         << ").r == " << expectedData << ") " << glslResultBlock("\t", bindingArgs);
1166                 }
1167                 else if (sb.type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE)
1168                 {
1169                     DE_ASSERT(samplerIndex != INDEX_INVALID);
1170                     const auto &samplerSb = simpleBindings[samplerIndex];
1171                     const auto &samplerSubscript =
1172                         (samplerSb.count > 1) ? "[" + std::to_string(arrayIndex % samplerSb.count) + "]" : "";
1173 
1174                     // With samplers, verify the image color and the border color.
1175 
1176                     std::stringstream samplerStr;
1177                     samplerStr << "usampler2D(" << glslResourceName(sb.set, sb.binding) << subscript << ", "
1178                                << glslResourceName(samplerSb.set, samplerSb.binding) << samplerSubscript << ")";
1179 
1180                     str << "    if ((textureLod(" << samplerStr.str() << ", vec2(0, 0), 0).r == " << expectedData
1181                         << ") &&\n"
1182                         << "        (textureLod(" << samplerStr.str() << ", vec2(-1, 0), 0) == " << expectedBorderColor
1183                         << ")) " << glslResultBlock("\t", bindingArgs);
1184                 }
1185                 else if (sb.type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
1186                 {
1187                     if (params.variant == TestVariant::YCBCR_SAMPLER)
1188                     {
1189                         const auto ycbcrData         = getExpectedData_G8_B8R8(params.hash, sb.set, sb.binding,
1190                                                                                sb.inputAttachmentIndex + arrayIndex);
1191                         const auto expectedDataFloat = toVec4_G8_B8R8(ycbcrData);
1192 
1193                         // No border color with ycbcr samplers. 0.005 tolerance is a bit more than 1/255.
1194                         str << "\t{\n"
1195                             << "    vec4 color = textureLod(" << glslResourceName(sb.set, sb.binding) << subscript
1196                             << ", vec2(0, 0), 0);\n"
1197                             << "    if ((abs(" << expectedDataFloat.x() << " - color.r) < 0.005) &&\n"
1198                             << "        (abs(" << expectedDataFloat.y() << " - color.g) < 0.005) &&\n"
1199                             << "        (abs(" << expectedDataFloat.z() << " - color.b) < 0.005) &&\n"
1200                             << "        (color.a == 1.0)) " << glslResultBlock("\t\t", bindingArgs) << "\t}\n";
1201                     }
1202                     else
1203                     {
1204                         str << "    if ((textureLod(" << glslResourceName(sb.set, sb.binding) << subscript
1205                             << ", vec2(0, 0), 0).r == " << expectedData << ") &&\n"
1206                             << "        (textureLod(" << glslResourceName(sb.set, sb.binding) << subscript
1207                             << ", vec2(-1, 0), 0) == " << expectedBorderColor << ")) "
1208                             << glslResultBlock("\t", bindingArgs);
1209                     }
1210                 }
1211                 else if (sb.type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1212                 {
1213                     str << "    if (imageLoad(" << glslResourceName(sb.set, sb.binding) << subscript
1214                         << ", ivec2(0, 0)).r == " << expectedData << ") " << glslResultBlock("\t", bindingArgs);
1215                 }
1216                 else if ((sb.type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
1217                          (sb.type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER))
1218                 {
1219                     const auto loadOp =
1220                         (sb.type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ? "texelFetch" : "imageLoad";
1221                     const auto loopData = isNullDescriptor ? expectedData : "(" + expectedData + " + i)";
1222 
1223                     str << "    for (uint i = 0; i < " << glslFormat(bufferLoopIterations)
1224                         << "; i += " << glslFormat(loopIncrement) << ") {\n"
1225                         << "        uint value = " << loadOp << "(" << glslResourceName(sb.set, sb.binding) << subscript
1226                         << ", int(i)).r;\n"
1227                         << "        if (value == " << loopData << ") " << glslResultBlock("\t\t", bindingArgs, "i")
1228                         << "    }\n";
1229                 }
1230                 else if ((sb.type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
1231                          (sb.type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK))
1232                 {
1233                     const auto loopData0 = isNullDescriptor ? expectedData : "(" + expectedData + " + 4 * i + 0)";
1234                     const auto loopData1 = isNullDescriptor ? expectedData : "(" + expectedData + " + 4 * i + 1)";
1235                     const auto loopData2 = isNullDescriptor ? expectedData : "(" + expectedData + " + 4 * i + 2)";
1236                     const auto loopData3 = isNullDescriptor ? expectedData : "(" + expectedData + " + 4 * i + 3)";
1237 
1238                     str << "    for (uint i = 0; i < " << glslFormat(bufferLoopIterations)
1239                         << "; i += " << glslFormat(loopIncrement) << ") {\n"
1240                         << "        uvec4 value = " << glslResourceName(sb.set, sb.binding) << subscript
1241                         << ".data[i];\n"
1242                         << "        if (value.x == " << loopData0 << ") "
1243                         << glslResultBlock("\t\t", bindingArgs, "4 * i + 0") << "        if (value.y == " << loopData1
1244                         << ") " << glslResultBlock("\t\t", bindingArgs, "4 * i + 1")
1245                         << "        if (value.z == " << loopData2 << ") "
1246                         << glslResultBlock("\t\t", bindingArgs, "4 * i + 2") << "        if (value.w == " << loopData3
1247                         << ") " << glslResultBlock("\t\t", bindingArgs, "4 * i + 3") << "    }\n";
1248                 }
1249                 else if (sb.type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
1250                 {
1251                     const auto loopData = isNullDescriptor ? expectedData : "(" + expectedData + " + i)";
1252 
1253                     str << "    for (uint i = 0; i < " << glslFormat(bufferLoopIterations)
1254                         << "; i += " << glslFormat(loopIncrement) << ") {\n"
1255                         << "        uint value = " << glslResourceName(sb.set, sb.binding) << subscript << ".data[i];\n"
1256                         << "        if (value == " << loopData << ") " << glslResultBlock("\t\t", bindingArgs, "i")
1257                         << "    }\n";
1258                 }
1259                 else
1260                 {
1261                     DE_ASSERT(0);
1262                 }
1263             }
1264         }
1265     }
1266     else if (params.variant == TestVariant::ROBUST_BUFFER_ACCESS)
1267     {
1268         // With robust buffer tests, the buffer is always filled with zeros and we read with an offset that will
1269         // eventually cause us to read past the end of the buffer.
1270 
1271         for (const auto &sb : simpleBindings)
1272         {
1273             if (sb.isResultBuffer || sb.isRayTracingAS)
1274             {
1275                 // Used by other bindings.
1276                 continue;
1277             }
1278 
1279             const uint32_t bufferLoopIterations = getBufferLoopIterations(sb.type);
1280             const uint32_t loopIncrement        = bufferLoopIterations / (ConstChecksPerBuffer - 1);
1281             const auto iterationOffsetStr       = glslFormat(bufferLoopIterations / 2);
1282 
1283             // Ensure we won't miss the last check (the index will always be less than the buffer length).
1284             DE_ASSERT((bufferLoopIterations == 0) || ((bufferLoopIterations % (ConstChecksPerBuffer - 1)) != 0));
1285 
1286             for (uint32_t arrayIndex = 0; arrayIndex < sb.count; ++arrayIndex)
1287             {
1288                 const auto bindingArgs =
1289                     glslFormat(packBindingArgs(sb.set, sb.binding, sb.inputAttachmentIndex + arrayIndex));
1290                 const auto &subscript = (sb.count > 1) ? "[" + std::to_string(arrayIndex) + "]" : "";
1291 
1292                 switch (sb.type)
1293                 {
1294                 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1295                     str << "    for (uint i = 0; i < " << glslFormat(bufferLoopIterations)
1296                         << ";  i += " << glslFormat(loopIncrement) << ") {\n"
1297                         << "        if (texelFetch(" << glslResourceName(sb.set, sb.binding) << subscript
1298                         << ", int(i + " << iterationOffsetStr << ")).r == 0) "
1299                         << glslResultBlock("\t\t", bindingArgs, "i + " + iterationOffsetStr) << "    }\n";
1300                     break;
1301 
1302                 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1303                     str << "    for (uint i = 0; i < " << glslFormat(bufferLoopIterations)
1304                         << ";  i += " << glslFormat(loopIncrement) << ") {\n"
1305                         << "        if (imageLoad(" << glslResourceName(sb.set, sb.binding) << subscript << ", int(i + "
1306                         << iterationOffsetStr << ")).r == 0) "
1307                         << glslResultBlock("\t\t", bindingArgs, "i + " + iterationOffsetStr) << "    }\n";
1308                     break;
1309 
1310                 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1311                     str << "    for (uint i = 0; i < " << glslFormat(bufferLoopIterations)
1312                         << ";  i += " << glslFormat(loopIncrement) << ") {\n"
1313                         << "        if (" << glslResourceName(sb.set, sb.binding) << subscript << ".data[i + "
1314                         << iterationOffsetStr << "].x == 0) "
1315                         << glslResultBlock("\t\t", bindingArgs, "4 * i + " + iterationOffsetStr + " + 0")
1316                         << "        if (" << glslResourceName(sb.set, sb.binding) << subscript << ".data[i + "
1317                         << iterationOffsetStr << "].y == 0) "
1318                         << glslResultBlock("\t\t", bindingArgs, "4 * i + " + iterationOffsetStr + " + 1")
1319                         << "        if (" << glslResourceName(sb.set, sb.binding) << subscript << ".data[i + "
1320                         << iterationOffsetStr << "].z == 0) "
1321                         << glslResultBlock("\t\t", bindingArgs, "4 * i + " + iterationOffsetStr + " + 2")
1322                         << "        if (" << glslResourceName(sb.set, sb.binding) << subscript << ".data[i + "
1323                         << iterationOffsetStr << "].w == 0) "
1324                         << glslResultBlock("\t\t", bindingArgs, "4 * i + " + iterationOffsetStr + " + 3") << "    }\n";
1325                     break;
1326 
1327                 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1328                     str << "    for (uint i = 0; i < " << glslFormat(bufferLoopIterations)
1329                         << ";  i += " << glslFormat(loopIncrement) << ") {\n"
1330                         << "        if (" << glslResourceName(sb.set, sb.binding) << subscript << ".data[i + "
1331                         << iterationOffsetStr << "] == 0) "
1332                         << glslResultBlock("\t\t", bindingArgs, "i + " + iterationOffsetStr) << "    }\n";
1333                     break;
1334 
1335                 default:
1336                     DE_ASSERT(0);
1337                     break;
1338                 }
1339             }
1340         }
1341     }
1342     else if (params.variant == TestVariant::MAX)
1343     {
1344         std::vector<uint32_t> samplerIndices;
1345         std::vector<uint32_t> imageIndices;
1346 
1347         for (uint32_t i = 0; i < u32(simpleBindings.size()); ++i)
1348         {
1349             const auto &binding = simpleBindings[i];
1350 
1351             if (binding.type == VK_DESCRIPTOR_TYPE_SAMPLER)
1352             {
1353                 samplerIndices.emplace_back(i);
1354             }
1355             else if (binding.type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE)
1356             {
1357                 imageIndices.emplace_back(i);
1358             }
1359             // Ignore other descriptors, if any.
1360         }
1361 
1362         // Ensure that all samplers and images are accessed at least once. If we run out of one, simply reuse it.
1363 
1364         const auto maxIndex = deMaxu32(u32(samplerIndices.size()), u32(imageIndices.size()));
1365 
1366         for (uint32_t index = 0; index < maxIndex; ++index)
1367         {
1368             const auto &samplerBinding = simpleBindings[samplerIndices[index % samplerIndices.size()]];
1369             const auto &imageBinding   = simpleBindings[imageIndices[index % imageIndices.size()]];
1370 
1371             const auto expectedData =
1372                 glslFormat(getExpectedData(params.hash, imageBinding.set, imageBinding.binding, 0));
1373             const auto imageBindingArgs   = glslFormat(packBindingArgs(imageBinding.set, imageBinding.binding, 0));
1374             const auto samplerBindingArgs = glslFormat(packBindingArgs(samplerBinding.set, samplerBinding.binding, 0));
1375 
1376             std::stringstream samplerStr;
1377             samplerStr << "usampler2D(" << glslResourceName(imageBinding.set, imageBinding.binding) << ", "
1378                        << glslResourceName(samplerBinding.set, samplerBinding.binding) << ")";
1379 
1380             str << "    if ((textureLod(" << samplerStr.str() << ", vec2(0, 0), 0).r == " << expectedData << ") &&\n"
1381                 << "        (textureLod(" << samplerStr.str() << ", vec2(-1, 0), 0) == uvec4(0, 0, 0, 1))) "
1382                 << glslResultBlock("\t", imageBindingArgs, samplerBindingArgs);
1383         }
1384     }
1385     else if (params.variant == TestVariant::EMBEDDED_IMMUTABLE_SAMPLERS)
1386     {
1387         // The first few sets contain only samplers.
1388         // Then the last set contains only images.
1389         // Optionally, the last binding of that set is the compute result buffer.
1390 
1391         uint32_t firstImageIndex = 0;
1392         uint32_t lastImageIndex  = 0;
1393 
1394         for (uint32_t i = 0; i < u32(simpleBindings.size()); ++i)
1395         {
1396             const auto &binding = simpleBindings[i];
1397 
1398             if (binding.type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE)
1399             {
1400                 if (firstImageIndex == 0)
1401                 {
1402                     firstImageIndex = i;
1403                 }
1404 
1405                 lastImageIndex = i;
1406             }
1407         }
1408 
1409         DE_ASSERT(firstImageIndex == (lastImageIndex + 1 - firstImageIndex)); // same number of images and samplers
1410 
1411         for (uint32_t imageIndex = firstImageIndex; imageIndex <= lastImageIndex; ++imageIndex)
1412         {
1413             const auto &imageBinding = simpleBindings[imageIndex];
1414             const auto expectedData =
1415                 glslFormat(getExpectedData(params.hash, imageBinding.set, imageBinding.binding, 0));
1416             const auto bindingArgs = glslFormat(packBindingArgs(imageBinding.set, imageBinding.binding, 0));
1417 
1418             DE_ASSERT(imageBinding.type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
1419 
1420             const auto &samplerBinding    = simpleBindings[imageIndex - firstImageIndex];
1421             const auto samplerBindingArgs = glslFormat(packBindingArgs(samplerBinding.set, samplerBinding.binding, 0));
1422 
1423             std::stringstream samplerStr;
1424             samplerStr << "usampler2D(" << glslResourceName(imageBinding.set, imageBinding.binding) << ", "
1425                        << glslResourceName(samplerBinding.set, samplerBinding.binding) << ")";
1426 
1427             str << "    if ((textureLod(" << samplerStr.str() << ", vec2(0, 0), 0).r == " << expectedData << ") &&\n"
1428                 << "        (textureLod(" << samplerStr.str() << ", vec2(-1, 0), 0) == uvec4(0, 0, 0, 1))) "
1429                 << glslResultBlock("\t", bindingArgs, samplerBindingArgs);
1430         }
1431     }
1432     else
1433     {
1434         TCU_THROW(InternalError, "Not implemented");
1435     }
1436 
1437     // Compute shaders write the result to a storage buffer.
1438     const uint32_t computeResultBufferIndex = getResultBufferIndex(simpleBindings);
1439 
1440     if (computeResultBufferIndex != INDEX_INVALID)
1441     {
1442         DE_ASSERT(params.isCompute() || params.isRayTracing());
1443         const auto &resultSb = simpleBindings[computeResultBufferIndex];
1444 
1445         str << "    " << glslResourceName(resultSb.set, resultSb.binding) << ".data[0] = result.x;\n";
1446         str << "    " << glslResourceName(resultSb.set, resultSb.binding) << ".data[1] = result.y;\n";
1447         str << "    " << glslResourceName(resultSb.set, resultSb.binding) << ".data[2] = result.z;\n";
1448         str << "    " << glslResourceName(resultSb.set, resultSb.binding) << ".data[3] = result.w;\n";
1449     }
1450 
1451     return str.str();
1452 }
1453 
1454 // Base class for all test cases.
1455 class DescriptorBufferTestCase : public TestCase
1456 {
1457 public:
DescriptorBufferTestCase(tcu::TestContext & testCtx,const std::string & name,const TestParams & params)1458     DescriptorBufferTestCase(tcu::TestContext &testCtx, const std::string &name, const TestParams &params)
1459 
1460         : TestCase(testCtx, name)
1461         , m_params(params)
1462         , m_rng(params.hash)
1463     {
1464     }
1465 
1466     void delayedInit();
1467     void initPrograms(vk::SourceCollections &programCollection) const;
1468     void initPrograms(vk::SourceCollections &programCollection, const std::vector<SimpleBinding> &simpleBinding,
1469                       bool accStruct, bool addService) const;
1470     TestInstance *createInstance(Context &context) const;
1471     void checkSupport(Context &context) const;
1472 
1473 private:
1474     const TestParams m_params;
1475     de::Random m_rng;
1476     std::vector<SimpleBinding> m_simpleBindings;
1477 };
1478 
1479 // Based on the basic test parameters, this function creates a number of sets/bindings that will be tested.
delayedInit()1480 void DescriptorBufferTestCase::delayedInit()
1481 {
1482     if ((m_params.variant == TestVariant::SINGLE) || (m_params.variant == TestVariant::CAPTURE_REPLAY) ||
1483         (m_params.variant == TestVariant::YCBCR_SAMPLER))
1484     {
1485         // Creates a single set with a single binding, unless additional helper resources are required.
1486         {
1487             SimpleBinding sb{};
1488             sb.set     = 0;
1489             sb.binding = 0;
1490             sb.type    = m_params.descriptor;
1491             sb.count   = 1;
1492 
1493             // For inline uniforms we still use count = 1. The byte size is implicit in our tests.
1494 
1495             m_simpleBindings.emplace_back(sb);
1496         }
1497 
1498         if (m_params.subcase == SubCase::YCBCR_SAMPLER_ARRAY)
1499         {
1500             // Add one more arrayed binding to ensure the descriptor offsets are as expected.
1501             SimpleBinding sb{};
1502             sb.set     = 0;
1503             sb.binding = u32(m_simpleBindings.size());
1504             sb.type    = m_params.descriptor;
1505             sb.count   = 2;
1506 
1507             m_simpleBindings.emplace_back(sb);
1508         }
1509 
1510         // Sampled images require a sampler as well.
1511         if (m_params.descriptor == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE)
1512         {
1513             SimpleBinding sb{};
1514             sb.set     = 0;
1515             sb.binding = u32(m_simpleBindings.size());
1516             sb.type    = VK_DESCRIPTOR_TYPE_SAMPLER;
1517             sb.count   = 1;
1518 
1519             m_simpleBindings.emplace_back(sb);
1520         }
1521         else if (m_params.isCaptureReplayDescriptor(VK_DESCRIPTOR_TYPE_SAMPLER))
1522         {
1523             // Samplers are usually tested implicitly, but with capture replay they are the target of specific API commands.
1524             // Add a sampled image to acompany the sampler.
1525 
1526             SimpleBinding sb{};
1527             sb.set     = 0;
1528             sb.binding = u32(m_simpleBindings.size());
1529             sb.type    = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
1530             sb.count   = 1;
1531 
1532             m_simpleBindings.emplace_back(sb);
1533         }
1534 
1535         // For compute shaders add a result buffer as the last binding of the first set.
1536         if (m_params.isCompute() || m_params.isRayTracing())
1537         {
1538             SimpleBinding sb{};
1539             sb.set            = 0;
1540             sb.binding        = u32(m_simpleBindings.size());
1541             sb.type           = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1542             sb.count          = 1;
1543             sb.isResultBuffer = true;
1544             sb.isRayTracingAS = false;
1545 
1546             m_simpleBindings.emplace_back(sb);
1547 
1548             if (m_params.isRayTracing())
1549             {
1550                 SimpleBinding sba{};
1551                 sba.set            = 0;
1552                 sba.binding        = u32(m_simpleBindings.size());
1553                 sba.type           = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR;
1554                 sba.count          = 1;
1555                 sba.isResultBuffer = false;
1556                 sba.isRayTracingAS = true;
1557 
1558                 m_simpleBindings.emplace_back(sba);
1559             }
1560         }
1561     }
1562     else if ((m_params.variant == TestVariant::MULTIPLE) || (m_params.variant == TestVariant::PUSH_DESCRIPTOR) ||
1563              (m_params.variant == TestVariant::PUSH_TEMPLATE) ||
1564              (m_params.variant == TestVariant::ROBUST_BUFFER_ACCESS) ||
1565              (m_params.variant == TestVariant::ROBUST_NULL_DESCRIPTOR) ||
1566              (m_params.variant == TestVariant::MUTABLE_DESCRIPTOR_TYPE))
1567     {
1568         // Generate a descriptor set for each descriptor buffer binding.
1569         // Within a set, add bindings for each descriptor type. Bindings may have 1-3 array elements.
1570         // In this test we include sampler descriptors, they will be used with sampled images, if needed.
1571 
1572         // NOTE: For implementation simplicity, this test doesn't limit the number of descriptors accessed
1573         // in the shaders, which may not work on some implementations.
1574 
1575         // Don't overcomplicate the test logic
1576         DE_ASSERT(!m_params.isPushDescriptorTest() || (m_params.setsPerBuffer == 1));
1577 
1578         // Add one more set for push descriptors (if used)
1579         const auto numSets =
1580             (m_params.bufferBindingCount * m_params.setsPerBuffer) + (m_params.isPushDescriptorTest() ? 1 : 0);
1581         uint32_t attachmentIndex = 0;
1582 
1583         // One set per buffer binding
1584         for (uint32_t set = 0; set < numSets; ++set)
1585         {
1586             std::vector<VkDescriptorType> choiceDescriptors;
1587             choiceDescriptors.emplace_back(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
1588             choiceDescriptors.emplace_back(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
1589             choiceDescriptors.emplace_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
1590             choiceDescriptors.emplace_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1591 
1592             if (m_params.variant != TestVariant::ROBUST_BUFFER_ACCESS)
1593             {
1594                 choiceDescriptors.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLER);
1595                 choiceDescriptors.emplace_back(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1596                 choiceDescriptors.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
1597                 choiceDescriptors.emplace_back(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
1598 
1599                 if (m_params.variant != TestVariant::MUTABLE_DESCRIPTOR_TYPE &&
1600                     (m_params.variant != TestVariant::ROBUST_NULL_DESCRIPTOR ||
1601                      (m_params.variant == TestVariant::ROBUST_NULL_DESCRIPTOR && m_params.isAccelerationStructure())))
1602                 {
1603                     choiceDescriptors.emplace_back(
1604                         VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR); // will be replaced with VK_DESCRIPTOR_TYPE_STORAGE_BUFFER if unsupported
1605                 }
1606 
1607                 if ((m_params.variant != TestVariant::ROBUST_NULL_DESCRIPTOR) &&
1608                     (m_params.variant != TestVariant::MUTABLE_DESCRIPTOR_TYPE) &&
1609                     (!m_params.isPushDescriptorTest() || (set != m_params.pushDescriptorSetIndex)))
1610                 {
1611                     choiceDescriptors.emplace_back(VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK);
1612                 }
1613 
1614                 if (m_params.stage == VK_SHADER_STAGE_FRAGMENT_BIT)
1615                 {
1616                     choiceDescriptors.emplace_back(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
1617                 }
1618             }
1619 
1620             // Randomize the order
1621             m_rng.shuffle(choiceDescriptors.begin(), choiceDescriptors.end());
1622 
1623             for (uint32_t binding = 0; binding < u32(choiceDescriptors.size()); ++binding)
1624             {
1625                 SimpleBinding sb{};
1626                 sb.set     = set;
1627                 sb.binding = binding;
1628                 sb.type    = choiceDescriptors[binding];
1629                 sb.count   = 1 + ((sb.type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) ?
1630                                       m_rng.getUint32() % ConstMaxDescriptorArraySize :
1631                                       0);
1632 
1633                 // For inline uniforms we still use count = 1. The byte size is implicit in our tests.
1634 
1635                 if (sb.type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1636                 {
1637                     sb.inputAttachmentIndex = attachmentIndex;
1638                     attachmentIndex += sb.count;
1639                 }
1640 
1641                 m_simpleBindings.emplace_back(sb);
1642             }
1643 
1644             // For compute shaders add a result buffer as the last binding of the first set.
1645             if (set == 0 && (m_params.isCompute() || m_params.isRayTracing()))
1646             {
1647                 SimpleBinding sb{};
1648                 sb.set            = set;
1649                 sb.binding        = u32(m_simpleBindings.size());
1650                 sb.type           = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1651                 sb.count          = 1;
1652                 sb.isResultBuffer = true;
1653                 sb.isRayTracingAS = false;
1654 
1655                 m_simpleBindings.emplace_back(sb);
1656 
1657                 if (m_params.isRayTracing())
1658                 {
1659                     SimpleBinding sba{};
1660                     sba.set            = set;
1661                     sba.binding        = u32(m_simpleBindings.size());
1662                     sba.type           = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR;
1663                     sba.count          = 1;
1664                     sba.isResultBuffer = false;
1665                     sba.isRayTracingAS = true;
1666 
1667                     m_simpleBindings.emplace_back(sba);
1668                 }
1669             }
1670         }
1671     }
1672     else if (m_params.variant == TestVariant::MAX)
1673     {
1674         // Create sampler- and resource-only sets, up to specified maxiumums.
1675         // Each set will get its own descriptor buffer binding.
1676 
1677         uint32_t set          = 0;
1678         uint32_t samplerIndex = 0;
1679         uint32_t imageIndex   = 0;
1680 
1681         for (;;)
1682         {
1683             SimpleBinding sb{};
1684             sb.binding = 0;
1685             sb.count   = 1;
1686             sb.set     = set; // save the original set index here
1687 
1688             if (samplerIndex < m_params.samplerBufferBindingCount)
1689             {
1690                 sb.set  = set;
1691                 sb.type = VK_DESCRIPTOR_TYPE_SAMPLER;
1692 
1693                 m_simpleBindings.emplace_back(sb);
1694 
1695                 ++set;
1696                 ++samplerIndex;
1697             }
1698 
1699             if (imageIndex < m_params.resourceBufferBindingCount)
1700             {
1701                 sb.set  = set;
1702                 sb.type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
1703 
1704                 m_simpleBindings.emplace_back(sb);
1705 
1706                 // Put the result buffer in the first resource set
1707                 if ((imageIndex == 0) && (m_params.isCompute() || m_params.isRayTracing()))
1708                 {
1709                     sb.binding        = 1;
1710                     sb.type           = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1711                     sb.isResultBuffer = true;
1712 
1713                     m_simpleBindings.emplace_back(sb);
1714 
1715                     if (m_params.isRayTracing())
1716                     {
1717                         sb.binding        = 2;
1718                         sb.type           = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR;
1719                         sb.isResultBuffer = false;
1720                         sb.isRayTracingAS = true;
1721 
1722                         m_simpleBindings.emplace_back(sb);
1723                     }
1724                 }
1725 
1726                 ++set;
1727                 ++imageIndex;
1728             }
1729 
1730             if (sb.set == set)
1731             {
1732                 // We didn't add a new set, so we must be done.
1733                 break;
1734             }
1735         }
1736     }
1737     else if (m_params.variant == TestVariant::EMBEDDED_IMMUTABLE_SAMPLERS)
1738     {
1739         // Create a number of sampler-only sets across several descriptor buffers, they will be used as embedded
1740         // immutable sampler buffers. Finally, add a set with images that use these samplers.
1741 
1742         // Buffer index maps to a set with embedded immutable samplers
1743         for (uint32_t bufferIndex = 0; bufferIndex < m_params.embeddedImmutableSamplerBufferBindingCount; ++bufferIndex)
1744         {
1745             for (uint32_t samplerIndex = 0; samplerIndex < m_params.embeddedImmutableSamplersPerBuffer; ++samplerIndex)
1746             {
1747                 SimpleBinding sb{};
1748                 sb.set                        = bufferIndex;
1749                 sb.binding                    = samplerIndex;
1750                 sb.count                      = 1;
1751                 sb.type                       = VK_DESCRIPTOR_TYPE_SAMPLER;
1752                 sb.isEmbeddedImmutableSampler = true;
1753 
1754                 m_simpleBindings.emplace_back(sb);
1755             }
1756         }
1757 
1758         // After the samplers come the images
1759         if (!m_simpleBindings.empty())
1760         {
1761             SimpleBinding sb{};
1762             sb.set   = m_simpleBindings.back().set + 1;
1763             sb.count = 1;
1764 
1765             const auto numSamplers =
1766                 m_params.embeddedImmutableSamplerBufferBindingCount * m_params.embeddedImmutableSamplersPerBuffer;
1767 
1768             for (uint32_t samplerIndex = 0; samplerIndex < numSamplers; ++samplerIndex)
1769             {
1770                 sb.type    = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
1771                 sb.binding = samplerIndex;
1772 
1773                 m_simpleBindings.emplace_back(sb);
1774             }
1775 
1776             if (m_params.isCompute() || m_params.isRayTracing())
1777             {
1778                 // Append the result buffer after the images
1779                 sb.binding += 1;
1780                 sb.type           = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1781                 sb.isResultBuffer = true;
1782 
1783                 m_simpleBindings.emplace_back(sb);
1784 
1785                 if (m_params.isRayTracing())
1786                 {
1787                     sb.binding += 1;
1788                     sb.type           = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR;
1789                     sb.isResultBuffer = false;
1790                     sb.isRayTracingAS = true;
1791 
1792                     m_simpleBindings.emplace_back(sb);
1793                 }
1794             }
1795         }
1796     }
1797 }
1798 
1799 // Generate shaders for both acceleration structures and without them
initPrograms(vk::SourceCollections & programs) const1800 void DescriptorBufferTestCase::initPrograms(vk::SourceCollections &programs) const
1801 {
1802     const bool accStruct = m_params.isAccelerationStructureObligatory() || m_params.isAccelerationStructureOptional();
1803 
1804     initPrograms(programs, m_simpleBindings, accStruct, true);
1805 
1806     if (accStruct)
1807     {
1808         std::vector<SimpleBinding> simpleBindings(m_simpleBindings);
1809 
1810         for (auto &simpleBinding : simpleBindings)
1811             if (simpleBinding.type == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR)
1812                 simpleBinding.type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1813 
1814         initPrograms(programs, simpleBindings, false, false);
1815     }
1816 }
1817 
1818 // Initialize GLSL shaders used by all test cases.
initPrograms(vk::SourceCollections & programs,const std::vector<SimpleBinding> & simpleBindings,bool accStruct,bool addService) const1819 void DescriptorBufferTestCase::initPrograms(vk::SourceCollections &programs,
1820                                             const std::vector<SimpleBinding> &simpleBindings, bool accStruct,
1821                                             bool addService) const
1822 {
1823     // For vertex pipelines, a verification variable (in_result/out_result) is passed
1824     // through shader interfaces, until it can be output as a color write.
1825     //
1826     // Compute shaders still declare a "result" variable to help unify the verification logic.
1827     std::string extentionDeclarations = std::string(glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_460)) + "\n" +
1828                                         (m_params.isRayTracing() ? "#extension GL_EXT_ray_tracing : require\n" : "");
1829 
1830     if (m_params.isGraphics())
1831     {
1832         std::string srcDeclarations;
1833         std::string srcVerification;
1834         std::string suffix;
1835 
1836         if (m_params.stage == VK_SHADER_STAGE_VERTEX_BIT)
1837         {
1838             srcDeclarations = glslGlobalDeclarations(m_params, simpleBindings, accStruct) + "\n";
1839             srcVerification = glslOutputVerification(m_params, simpleBindings, accStruct) + "\n";
1840             suffix          = accStruct ? "_as" : "";
1841         }
1842 
1843         std::ostringstream str;
1844         str << extentionDeclarations << srcDeclarations
1845             << "\n"
1846                "layout(location = 0) out uvec4 out_result;\n"
1847                "\n"
1848                "void main (void) {\n"
1849                "    switch(gl_VertexIndex) {\n"
1850                "        case 0: gl_Position = vec4(-1, -1, 0, 1); break;\n"
1851                "        case 1: gl_Position = vec4(-1,  1, 0, 1); break;\n"
1852                "        case 2: gl_Position = vec4( 1, -1, 0, 1); break;\n"
1853                "\n"
1854                "        case 3: gl_Position = vec4( 1,  1, 0, 1); break;\n"
1855                "        case 4: gl_Position = vec4( 1, -1, 0, 1); break;\n"
1856                "        case 5: gl_Position = vec4(-1,  1, 0, 1); break;\n"
1857                "    }\n"
1858                "\n"
1859                "    uvec4 result = uvec4(0);\n"
1860                "\n"
1861             << srcVerification
1862             << "\n"
1863                "    out_result = result;\n"
1864                "}\n";
1865 
1866         if (addService || !srcDeclarations.empty())
1867             programs.glslSources.add("vert" + suffix) << glu::VertexSource(str.str());
1868     }
1869 
1870     if (m_params.isGraphics())
1871     {
1872         std::string srcDeclarations;
1873         std::string srcVerification;
1874         std::string suffix;
1875 
1876         if (m_params.stage == VK_SHADER_STAGE_FRAGMENT_BIT)
1877         {
1878             srcDeclarations = glslGlobalDeclarations(m_params, simpleBindings, accStruct) + "\n";
1879             srcVerification = glslOutputVerification(m_params, simpleBindings, accStruct) + "\n";
1880             suffix          = accStruct ? "_as" : "";
1881         }
1882 
1883         std::ostringstream str;
1884         str << extentionDeclarations << srcDeclarations
1885             << "\n"
1886                "layout(location = 0) in flat uvec4 in_result;\n"
1887                "\n"
1888                "layout(location = 0) out uint out_color;\n"
1889                "\n"
1890                "void main (void) {\n"
1891                "    uvec4 result = in_result;\n"
1892                "\n"
1893             << srcVerification
1894             << "\n"
1895                "    if (uint(gl_FragCoord.x) == 0)    out_color = result.x;\n"
1896                "    if (uint(gl_FragCoord.x) == 1)    out_color = result.y;\n"
1897                "    if (uint(gl_FragCoord.x) == 2)    out_color = result.z;\n"
1898                "    if (uint(gl_FragCoord.x) == 3)    out_color = result.w;\n"
1899                "}\n";
1900 
1901         if (addService || !srcDeclarations.empty())
1902             programs.glslSources.add("frag" + suffix) << glu::FragmentSource(str.str());
1903     }
1904 
1905     if (m_params.isGeometry())
1906     {
1907         std::string srcDeclarations = glslGlobalDeclarations(m_params, simpleBindings, accStruct) + "\n";
1908         std::string srcVerification = glslOutputVerification(m_params, simpleBindings, accStruct) + "\n";
1909         std::string suffix          = accStruct ? "_as" : "";
1910 
1911         std::ostringstream str;
1912         str << extentionDeclarations << srcDeclarations
1913             << "\n"
1914                "layout(triangles) in;\n"
1915                "layout(triangle_strip, max_vertices = 3) out;\n"
1916                "\n"
1917                "layout(location = 0) in  uvec4 in_result[];\n"
1918                "layout(location = 0) out uvec4 out_result;\n"
1919                "\n"
1920                "void main (void) {\n"
1921                "    for (uint i = 0; i < gl_in.length(); ++i) {\n"
1922                "        gl_Position = gl_in[i].gl_Position;\n"
1923                "\n"
1924                "        uvec4 result = in_result[i];\n"
1925                "\n"
1926             << srcVerification
1927             << "\n"
1928                "        out_result = result;\n"
1929                "\n"
1930                "        EmitVertex();\n"
1931                "    }\n"
1932                "}\n";
1933 
1934         if (addService || !srcDeclarations.empty())
1935             programs.glslSources.add("geom" + suffix) << glu::GeometrySource(str.str());
1936     }
1937 
1938     if (m_params.isTessellation())
1939     {
1940         std::string srcDeclarations;
1941         std::string srcVerification;
1942         std::string suffix;
1943 
1944         if (m_params.stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
1945         {
1946             srcDeclarations = glslGlobalDeclarations(m_params, simpleBindings, accStruct) + "\n";
1947             srcVerification = glslOutputVerification(m_params, simpleBindings, accStruct) + "\n";
1948             suffix          = accStruct ? "_as" : "";
1949         }
1950 
1951         std::ostringstream str;
1952         str << extentionDeclarations << "#extension GL_EXT_tessellation_shader : require\n"
1953             << srcDeclarations
1954             << "\n"
1955                "layout(vertices = 3) out;\n"
1956                "\n"
1957                "layout(location = 0) in  uvec4 in_result[];\n"
1958                "layout(location = 0) out uvec4 out_result[];\n"
1959                "\n"
1960                "void main (void) {\n"
1961                "    gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
1962                "    \n"
1963                "    gl_TessLevelOuter[0] = 1.0;\n"
1964                "    gl_TessLevelOuter[1] = 1.0;\n"
1965                "    gl_TessLevelOuter[2] = 1.0;\n"
1966                "    gl_TessLevelInner[0] = 1.0;\n"
1967                "\n"
1968                "    uvec4 result = in_result[gl_InvocationID];\n"
1969                "\n"
1970             << srcVerification
1971             << "\n"
1972                "    out_result[gl_InvocationID] = result;\n"
1973                "}\n";
1974 
1975         if (addService || !srcDeclarations.empty())
1976             programs.glslSources.add("tesc" + suffix) << glu::TessellationControlSource(str.str());
1977     }
1978 
1979     if (m_params.isTessellation())
1980     {
1981         std::string srcDeclarations;
1982         std::string srcVerification;
1983         std::string suffix;
1984 
1985         if (m_params.stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
1986         {
1987             srcDeclarations = glslGlobalDeclarations(m_params, simpleBindings, accStruct) + "\n";
1988             srcVerification = glslOutputVerification(m_params, simpleBindings, accStruct) + "\n";
1989             suffix          = accStruct ? "_as" : "";
1990         }
1991 
1992         std::ostringstream str;
1993         str << extentionDeclarations << "#extension GL_EXT_tessellation_shader : require\n"
1994             << srcDeclarations
1995             << "\n"
1996                "layout(triangles) in;\n"
1997                "\n"
1998                "layout(location = 0) in  uvec4 in_result[];\n"
1999                "layout(location = 0) out uvec4 out_result;\n"
2000                "\n"
2001                "void main (void) {\n"
2002                "    gl_Position.xyz = gl_TessCoord.x * gl_in[0].gl_Position.xyz +\n"
2003                "                      gl_TessCoord.y * gl_in[1].gl_Position.xyz +\n"
2004                "                      gl_TessCoord.z * gl_in[2].gl_Position.xyz;\n"
2005                "    gl_Position.w   = 1.0;\n"
2006                "\n"
2007                "    uvec4 result = in_result[0];\n" // Use index 0, all vertices should have the same value
2008                "\n"
2009             << srcVerification
2010             << "\n"
2011                "    out_result = result;\n"
2012                "}\n";
2013 
2014         if (addService || !srcDeclarations.empty())
2015             programs.glslSources.add("tese" + suffix) << glu::TessellationEvaluationSource(str.str());
2016     }
2017 
2018     if (m_params.isCompute())
2019     {
2020         const std::string suffix = accStruct ? "_as" : "";
2021         std::ostringstream str;
2022         str << extentionDeclarations << glslGlobalDeclarations(m_params, simpleBindings, accStruct)
2023             << "\n"
2024                "layout(local_size_x = 1) in;\n"
2025                "\n"
2026                "void main (void) {\n"
2027                "    uvec4 result = uvec4(0);\n"
2028                "\n"
2029             << glslOutputVerification(m_params, simpleBindings, accStruct) << "}\n";
2030 
2031         programs.glslSources.add("comp" + suffix) << glu::ComputeSource(str.str());
2032     }
2033 
2034     if (m_params.isRayTracing())
2035     {
2036         const std::string missPassthrough = extentionDeclarations +
2037                                             "layout(location = 0) rayPayloadInEXT vec3 hitValue;\n"
2038                                             "\n"
2039                                             "void main()\n"
2040                                             "{\n"
2041                                             "}\n";
2042         const std::string hitPassthrough = extentionDeclarations +
2043                                            "hitAttributeEXT vec3 attribs;\n"
2044                                            "layout(location = 0) rayPayloadInEXT vec3 hitValue;\n"
2045                                            "\n"
2046                                            "void main()\n"
2047                                            "{\n"
2048                                            "}\n";
2049         const uint32_t asIndex         = getRayTracingASIndex(simpleBindings);
2050         const auto &asBinding          = simpleBindings[asIndex];
2051         const std::string asName       = glslResourceName(asBinding.set, asBinding.binding);
2052         const std::string raygenCommon = extentionDeclarations +
2053                                          "layout(location = 0) rayPayloadEXT vec3 hitValue;\n"
2054                                          "layout(set = " +
2055                                          de::toString(asBinding.set) +
2056                                          ", binding = " + de::toString(asBinding.binding) +
2057                                          ") uniform accelerationStructureEXT " + asName +
2058                                          ";\n"
2059                                          "\n"
2060                                          "void main()\n"
2061                                          "{\n"
2062                                          "    uint  rayFlags = 0;\n"
2063                                          "    uint  cullMask = 0xFF;\n"
2064                                          "    float tmin     = 0.0f;\n"
2065                                          "    float tmax     = 9.0f;\n"
2066                                          "    vec3  origin   = vec3(0.0f, 0.0f, 0.0f);\n"
2067                                          "    vec3  direct   = vec3(0.0f, 0.0f, -1.0f);\n"
2068                                          "    traceRayEXT(" +
2069                                          asName +
2070                                          ", rayFlags, cullMask, 0, 0, 0, origin, tmin, direct, tmax, 0);\n"
2071                                          "}\n";
2072         const vk::ShaderBuildOptions buildOptions =
2073             vk::ShaderBuildOptions(programs.usedVulkanVersion, vk::SPIRV_VERSION_1_4, 0u, true);
2074         const std::string suffix          = accStruct ? "_as" : "";
2075         const std::string srcDeclarations = glslGlobalDeclarations(m_params, simpleBindings, accStruct) + "\n";
2076         const std::string srcVerification =
2077             "    uvec4 result = uvec4(0);\n" + glslOutputVerification(m_params, simpleBindings, accStruct) + "\n";
2078 
2079         switch (m_params.stage)
2080         {
2081         case VK_SHADER_STAGE_RAYGEN_BIT_KHR:
2082         {
2083             std::stringstream css;
2084             css << extentionDeclarations << "\n"
2085                 << srcDeclarations
2086                 << "\n"
2087                    "void main()\n"
2088                    "{\n"
2089                 << srcVerification << "}\n";
2090 
2091             programs.glslSources.add("rgen" + suffix) << glu::RaygenSource(css.str()) << buildOptions;
2092 
2093             break;
2094         }
2095 
2096         case VK_SHADER_STAGE_ANY_HIT_BIT_KHR:
2097         {
2098             if (addService)
2099                 programs.glslSources.add("rgen") << glu::RaygenSource(raygenCommon) << buildOptions;
2100 
2101             {
2102                 std::stringstream css;
2103                 css << extentionDeclarations << "\n"
2104                     << srcDeclarations
2105                     << "hitAttributeEXT vec3 attribs;\n"
2106                        "layout(location = 0) rayPayloadInEXT vec3 hitValue;\n"
2107                        "\n"
2108                        "void main()\n"
2109                        "{\n"
2110                     << srcVerification << "}\n";
2111 
2112                 programs.glslSources.add("ahit" + suffix) << glu::AnyHitSource(css.str()) << buildOptions;
2113             }
2114 
2115             if (addService)
2116                 programs.glslSources.add("chit") << glu::ClosestHitSource(hitPassthrough) << buildOptions;
2117             if (addService)
2118                 programs.glslSources.add("miss") << glu::MissSource(missPassthrough) << buildOptions;
2119 
2120             break;
2121         }
2122 
2123         case VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR:
2124         {
2125             if (addService)
2126                 programs.glslSources.add("rgen") << glu::RaygenSource(raygenCommon) << buildOptions;
2127 
2128             {
2129                 std::stringstream css;
2130                 css << extentionDeclarations << "\n"
2131                     << srcDeclarations
2132                     << "layout(location = 0) rayPayloadInEXT vec3 hitValue;\n"
2133                        "hitAttributeEXT vec3 attribs;\n"
2134                        "\n"
2135                        "\n"
2136                        "void main()\n"
2137                        "{\n"
2138                     << srcVerification << "}\n";
2139 
2140                 programs.glslSources.add("chit" + suffix) << glu::ClosestHitSource(css.str()) << buildOptions;
2141             }
2142 
2143             if (addService)
2144                 programs.glslSources.add("ahit") << glu::AnyHitSource(hitPassthrough) << buildOptions;
2145             if (addService)
2146                 programs.glslSources.add("miss") << glu::MissSource(missPassthrough) << buildOptions;
2147 
2148             break;
2149         }
2150 
2151         case VK_SHADER_STAGE_INTERSECTION_BIT_KHR:
2152         {
2153             if (addService)
2154                 programs.glslSources.add("rgen") << glu::RaygenSource(raygenCommon) << buildOptions;
2155 
2156             {
2157                 std::stringstream css;
2158                 css << extentionDeclarations << "\n"
2159                     << srcDeclarations
2160                     << "hitAttributeEXT vec3 hitAttribute;\n"
2161                        "\n"
2162                        "void main()\n"
2163                        "{\n"
2164                     << srcVerification
2165                     << "    hitAttribute = vec3(0.0f, 0.0f, 0.0f);\n"
2166                        "    reportIntersectionEXT(1.0f, 0);\n"
2167                        "}\n";
2168 
2169                 programs.glslSources.add("sect" + suffix) << glu::IntersectionSource(css.str()) << buildOptions;
2170             }
2171 
2172             if (addService)
2173                 programs.glslSources.add("ahit") << glu::AnyHitSource(hitPassthrough) << buildOptions;
2174             if (addService)
2175                 programs.glslSources.add("chit") << glu::ClosestHitSource(hitPassthrough) << buildOptions;
2176             if (addService)
2177                 programs.glslSources.add("miss") << glu::MissSource(missPassthrough) << buildOptions;
2178 
2179             break;
2180         }
2181 
2182         case VK_SHADER_STAGE_MISS_BIT_KHR:
2183         {
2184             if (addService)
2185                 programs.glslSources.add("rgen") << glu::RaygenSource(raygenCommon) << buildOptions;
2186 
2187             {
2188                 std::stringstream css;
2189                 css << extentionDeclarations << "\n"
2190                     << srcDeclarations
2191                     << "\n"
2192                        "layout(location = 0) rayPayloadInEXT vec3 hitValue;\n"
2193                        "\n"
2194                        "void main()\n"
2195                        "{\n"
2196                     << srcVerification << "}\n";
2197 
2198                 programs.glslSources.add("miss" + suffix) << glu::MissSource(css.str()) << buildOptions;
2199             }
2200 
2201             if (addService)
2202                 programs.glslSources.add("ahit") << glu::AnyHitSource(hitPassthrough) << buildOptions;
2203             if (addService)
2204                 programs.glslSources.add("chit") << glu::ClosestHitSource(hitPassthrough) << buildOptions;
2205 
2206             break;
2207         }
2208 
2209         case VK_SHADER_STAGE_CALLABLE_BIT_KHR:
2210         {
2211             {
2212                 std::stringstream css;
2213                 css << extentionDeclarations << "\n"
2214                     << (accStruct ? "#extension GL_EXT_ray_query : require\n" : "")
2215                     << "\n"
2216                        "layout(location = 0) callableDataEXT float dummy;"
2217                        "\n"
2218                        "void main()\n"
2219                        "{\n"
2220                        "    executeCallableEXT(0, 0);\n"
2221                        "}\n";
2222 
2223                 if (addService)
2224                     programs.glslSources.add("rgen") << glu::RaygenSource(css.str()) << buildOptions;
2225             }
2226 
2227             {
2228                 std::stringstream css;
2229                 css << extentionDeclarations << "\n"
2230                     << srcDeclarations
2231                     << "\n"
2232                        "layout(location = 0) callableDataInEXT float dummy;"
2233                        "\n"
2234                        "void main()\n"
2235                        "{\n"
2236                     << srcVerification << "}\n";
2237 
2238                 programs.glslSources.add("call" + suffix) << glu::CallableSource(css.str()) << buildOptions;
2239             }
2240 
2241             if (addService)
2242                 programs.glslSources.add("ahit") << glu::AnyHitSource(hitPassthrough) << buildOptions;
2243             if (addService)
2244                 programs.glslSources.add("chit") << glu::ClosestHitSource(hitPassthrough) << buildOptions;
2245             if (addService)
2246                 programs.glslSources.add("miss") << glu::MissSource(missPassthrough) << buildOptions;
2247 
2248             break;
2249         }
2250 
2251         default:
2252             TCU_THROW(InternalError, "Unknown stage");
2253         }
2254     }
2255 }
2256 
checkSupport(Context & context) const2257 void DescriptorBufferTestCase::checkSupport(Context &context) const
2258 {
2259     // Required to test the extension
2260     if (!context.isDeviceFunctionalitySupported("VK_EXT_descriptor_buffer"))
2261     {
2262         TCU_THROW(NotSupportedError, "VK_EXT_descriptor_buffer is not supported");
2263     }
2264 
2265     if (!context.isInstanceFunctionalitySupported("VK_KHR_get_physical_device_properties2"))
2266     {
2267         TCU_THROW(NotSupportedError, "VK_KHR_get_physical_device_properties2 is not supported");
2268     }
2269 
2270     if (!context.isDeviceFunctionalitySupported("VK_KHR_buffer_device_address"))
2271     {
2272         TCU_THROW(NotSupportedError, "VK_KHR_buffer_device_address is not supported");
2273     }
2274 
2275     if (!context.isDeviceFunctionalitySupported("VK_KHR_synchronization2"))
2276     {
2277         TCU_THROW(NotSupportedError, "VK_KHR_synchronization2 is not supported");
2278     }
2279 
2280     if (!context.isDeviceFunctionalitySupported("VK_EXT_descriptor_indexing"))
2281     {
2282         TCU_THROW(NotSupportedError, "VK_EXT_descriptor_indexing is not supported");
2283     }
2284 
2285     context.requireDeviceFunctionality("VK_KHR_buffer_device_address");
2286     context.requireDeviceFunctionality("VK_KHR_maintenance4");
2287     if (m_params.useMaintenance5)
2288         context.requireDeviceFunctionality("VK_KHR_maintenance5");
2289 
2290     // Optional
2291     if ((m_params.resourceResidency == ResourceResidency::SPARSE_BINDING) &&
2292         (context.getDeviceFeatures().sparseBinding == VK_FALSE))
2293     {
2294         TCU_THROW(NotSupportedError, "sparseBinding feature is not supported");
2295     }
2296 
2297     if ((m_params.resourceResidency == ResourceResidency::SPARSE_RESIDENCY) &&
2298         ((context.getDeviceFeatures().sparseBinding == VK_FALSE) ||
2299          (context.getDeviceFeatures().sparseResidencyBuffer == VK_FALSE)))
2300     {
2301         TCU_THROW(NotSupportedError, "sparseResidencyBuffer feature is not supported");
2302     }
2303 
2304     if ((m_params.descriptor == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) &&
2305         !context.isDeviceFunctionalitySupported("VK_EXT_inline_uniform_block"))
2306     {
2307         TCU_THROW(NotSupportedError, "VK_EXT_inline_uniform_block is not supported");
2308     }
2309 
2310     const auto &descriptorBufferFeatures = context.getDescriptorBufferFeaturesEXT();
2311     const auto &descriptorBufferProps    = context.getDescriptorBufferPropertiesEXT();
2312 
2313     if (!descriptorBufferFeatures.descriptorBuffer)
2314     {
2315         TCU_THROW(NotSupportedError, "descriptorBufferFeatures.descriptorBuffer is not supported");
2316     }
2317 
2318     if (m_params.variant == TestVariant::CAPTURE_REPLAY)
2319     {
2320         if (descriptorBufferFeatures.descriptorBufferCaptureReplay == VK_FALSE)
2321         {
2322             TCU_THROW(NotSupportedError, "descriptorBufferCaptureReplay feature is not supported");
2323         }
2324 
2325         if ((m_params.subcase == SubCase::CAPTURE_REPLAY_CUSTOM_BORDER_COLOR) &&
2326             !context.isDeviceFunctionalitySupported("VK_EXT_custom_border_color"))
2327         {
2328             TCU_THROW(NotSupportedError, "VK_EXT_custom_border_color is not supported");
2329         }
2330     }
2331 
2332     if (m_params.isTessellation() && (context.getDeviceFeatures().tessellationShader == VK_FALSE))
2333     {
2334         TCU_THROW(NotSupportedError, "tessellationShader feature is not supported");
2335     }
2336     else if (m_params.isGeometry() && (context.getDeviceFeatures().geometryShader == VK_FALSE))
2337     {
2338         TCU_THROW(NotSupportedError, "geometryShader feature is not supported");
2339     }
2340 
2341     if (m_params.bufferBindingCount * m_params.setsPerBuffer >
2342         context.getDeviceProperties().limits.maxBoundDescriptorSets)
2343         TCU_THROW(NotSupportedError, "Test requires more descriptor sets than specified in maxBoundDescriptorSets");
2344 
2345     // Test case specific
2346     if (m_params.isPushDescriptorTest())
2347     {
2348         context.requireDeviceFunctionality("VK_KHR_push_descriptor");
2349 
2350         if (descriptorBufferFeatures.descriptorBufferPushDescriptors == VK_FALSE)
2351         {
2352             TCU_THROW(NotSupportedError, "Require descriptorBufferFeatures.descriptorBufferPushDescriptors");
2353         }
2354 
2355         if (m_params.bufferBindingCount + 1 > context.getDeviceProperties().limits.maxBoundDescriptorSets)
2356             TCU_THROW(NotSupportedError, "Test requires more descriptor sets than specified in maxBoundDescriptorSets");
2357 
2358         if (m_params.subcase == SubCase::SINGLE_BUFFER)
2359         {
2360             if (descriptorBufferProps.bufferlessPushDescriptors)
2361                 TCU_THROW(NotSupportedError, "Require bufferlessPushDescriptors to be false");
2362         }
2363         else
2364         {
2365             if (m_params.samplerBufferBindingCount + 1 > descriptorBufferProps.maxSamplerDescriptorBufferBindings)
2366             {
2367                 TCU_THROW(NotSupportedError, "maxSamplerDescriptorBufferBindings is too small");
2368             }
2369 
2370             if (m_params.resourceBufferBindingCount + 1 > descriptorBufferProps.maxResourceDescriptorBufferBindings)
2371             {
2372                 TCU_THROW(NotSupportedError, "maxResourceDescriptorBufferBindings is too small");
2373             }
2374         }
2375     }
2376 
2377     if (m_params.bufferBindingCount > descriptorBufferProps.maxDescriptorBufferBindings)
2378     {
2379         TCU_THROW(NotSupportedError, "maxDescriptorBufferBindings is too small");
2380     }
2381 
2382     if (m_params.samplerBufferBindingCount > descriptorBufferProps.maxSamplerDescriptorBufferBindings)
2383     {
2384         TCU_THROW(NotSupportedError, "maxSamplerDescriptorBufferBindings is too small");
2385     }
2386 
2387     if (m_params.resourceBufferBindingCount > descriptorBufferProps.maxResourceDescriptorBufferBindings)
2388     {
2389         TCU_THROW(NotSupportedError, "maxResourceDescriptorBufferBindings is too small");
2390     }
2391 
2392     if ((m_params.variant == TestVariant::ROBUST_BUFFER_ACCESS) ||
2393         (m_params.variant == TestVariant::ROBUST_NULL_DESCRIPTOR))
2394     {
2395         if (context.isDeviceFunctionalitySupported("VK_EXT_robustness2"))
2396         {
2397             VkPhysicalDeviceFeatures2 features2                        = initVulkanStructure();
2398             VkPhysicalDeviceRobustness2FeaturesEXT robustness2Features = initVulkanStructure();
2399 
2400             features2.pNext = &robustness2Features;
2401 
2402             context.getInstanceInterface().getPhysicalDeviceFeatures2(context.getPhysicalDevice(), &features2);
2403 
2404             if ((m_params.variant == TestVariant::ROBUST_NULL_DESCRIPTOR) &&
2405                 (robustness2Features.nullDescriptor == VK_FALSE))
2406             {
2407                 TCU_THROW(NotSupportedError, "robustness2 nullDescriptor is not supported");
2408             }
2409 
2410             DE_ASSERT(features2.features.robustBufferAccess);
2411         }
2412         else if (m_params.variant == TestVariant::ROBUST_NULL_DESCRIPTOR)
2413         {
2414             TCU_THROW(NotSupportedError, "VK_EXT_robustness2 is not supported");
2415         }
2416         else if (m_params.variant == TestVariant::ROBUST_BUFFER_ACCESS)
2417         {
2418             VkPhysicalDeviceFeatures features{};
2419             context.getInstanceInterface().getPhysicalDeviceFeatures(context.getPhysicalDevice(), &features);
2420 
2421             if (features.robustBufferAccess == VK_FALSE)
2422             {
2423                 TCU_THROW(NotSupportedError, "robustBufferAccess is not supported");
2424             }
2425         }
2426     }
2427     else if ((m_params.variant == TestVariant::MUTABLE_DESCRIPTOR_TYPE) &&
2428              !context.isDeviceFunctionalitySupported("VK_EXT_mutable_descriptor_type"))
2429     {
2430         TCU_THROW(NotSupportedError, "VK_EXT_mutable_descriptor_type is not supported");
2431     }
2432 
2433     if ((m_params.descriptor == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) ||
2434         (m_params.variant == TestVariant::MULTIPLE) || m_params.isPushDescriptorTest())
2435     {
2436         const auto &inlineUniformBlockFeatures = context.getInlineUniformBlockFeatures();
2437 
2438         if (!inlineUniformBlockFeatures.inlineUniformBlock)
2439         {
2440             TCU_THROW(NotSupportedError, "inlineUniformBlock is required");
2441         }
2442     }
2443 
2444     if (m_params.variant == TestVariant::MULTIPLE)
2445     {
2446         const VkPhysicalDeviceVulkan13Properties &vulkan13properties =
2447             *findStructure<VkPhysicalDeviceVulkan13Properties>(&context.getDeviceVulkan13Properties());
2448 
2449         if (m_params.bufferBindingCount > vulkan13properties.maxPerStageDescriptorInlineUniformBlocks)
2450             TCU_THROW(NotSupportedError, "Test require more per-stage inline uniform block bindings count. Provided " +
2451                                              de::toString(vulkan13properties.maxPerStageDescriptorInlineUniformBlocks));
2452 
2453         if (m_params.bufferBindingCount > vulkan13properties.maxDescriptorSetInlineUniformBlocks)
2454             TCU_THROW(NotSupportedError, "Test require more inline uniform block bindings among all stages. Provided " +
2455                                              de::toString(vulkan13properties.maxDescriptorSetInlineUniformBlocks));
2456 
2457         if (m_params.bufferBindingCount > vulkan13properties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks)
2458             TCU_THROW(NotSupportedError,
2459                       "Test require more per-stage inline uniform block bindings count. Provided " +
2460                           de::toString(vulkan13properties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks));
2461 
2462         if (m_params.bufferBindingCount > vulkan13properties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks)
2463             TCU_THROW(NotSupportedError,
2464                       "Test require more inline uniform block bindings among all stages. Provided " +
2465                           de::toString(vulkan13properties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks));
2466     }
2467 
2468     if (m_params.isAccelerationStructureObligatory())
2469     {
2470         context.requireDeviceFunctionality("VK_KHR_ray_query");
2471     }
2472 
2473     if (m_params.isRayTracing())
2474     {
2475         context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2476         context.requireDeviceFunctionality("VK_KHR_ray_tracing_pipeline");
2477     }
2478 
2479     if ((m_params.variant == TestVariant::YCBCR_SAMPLER) &&
2480         !context.isDeviceFunctionalitySupported("VK_KHR_sampler_ycbcr_conversion"))
2481     {
2482         TCU_THROW(NotSupportedError, "VK_KHR_sampler_ycbcr_conversion is not supported");
2483     }
2484 
2485     if (m_params.commands2)
2486     {
2487         context.requireDeviceFunctionality("VK_KHR_maintenance6");
2488     }
2489 }
2490 
2491 // The base class for all test case implementations.
2492 class DescriptorBufferTestInstance : public TestInstance
2493 {
2494 public:
2495     DescriptorBufferTestInstance(Context &context, const TestParams &params,
2496                                  const std::vector<SimpleBinding> &simpleBindings);
2497 
2498     tcu::TestStatus iterate() override;
2499 
2500     void createRayTracingPipeline();
2501     de::MovePtr<BufferWithMemory> createShaderBindingTable(const InstanceInterface &vki, const DeviceInterface &vkd,
2502                                                            const VkDevice device, const VkPhysicalDevice physicalDevice,
2503                                                            const VkPipeline pipeline, Allocator &allocator,
2504                                                            de::MovePtr<RayTracingPipeline> &rayTracingPipeline,
2505                                                            const uint32_t group);
2506     void addRayTracingShader(const VkShaderStageFlagBits stage, const uint32_t group);
2507 
2508     void createGraphicsPipeline();
2509     void createDescriptorSetLayouts();
2510     void createDescriptorBuffers();
2511 
2512     void initializeBinding(const DescriptorSetLayoutHolder &dsl, uint32_t setIndex, Binding &binding);
2513 
2514     void pushDescriptorSet(VkCommandBuffer cmdBuf, VkPipelineBindPoint bindPoint, const DescriptorSetLayoutHolder &dsl,
2515                            uint32_t setIndex) const;
2516 
2517     void bindDescriptorBuffers(VkCommandBuffer cmdBuf, VkPipelineBindPoint bindPoint) const;
2518 
2519     void createBufferForBinding(ResourceHolder &resources, VkDescriptorType descriptorType,
2520                                 VkBufferCreateInfo createInfo, bool isResultBuffer) const;
2521 
2522     void createImageForBinding(ResourceHolder &resources, VkDescriptorType descriptorType) const;
2523 
allocate(const VkMemoryRequirements & memReqs,const MemoryRequirement requirement,const void * pNext=nullptr) const2524     MovePtr<Allocation> allocate(const VkMemoryRequirements &memReqs, const MemoryRequirement requirement,
2525                                  const void *pNext = nullptr) const
2526     {
2527         return allocateExtended(m_context.getInstanceInterface(), *m_deviceInterface, m_context.getPhysicalDevice(),
2528                                 *m_device, memReqs, requirement, pNext);
2529     }
2530 
2531     // Descriptor size is used to determine the stride of a descriptor array (for bindings with multiple descriptors).
2532     VkDeviceSize getDescriptorSize(const Binding &binding) const;
2533     VkDeviceSize getDescriptorTypeSize(VkDescriptorType descriptorType) const;
2534 
addDescriptorSetLayout()2535     uint32_t addDescriptorSetLayout()
2536     {
2537         m_descriptorSetLayouts.emplace_back(makeSharedUniquePtr<DescriptorSetLayoutHolder>());
2538         return u32(m_descriptorSetLayouts.size()) - 1;
2539     }
2540 
2541     // The resources used by descriptors are tracked in a simple array and referenced by an index.
addResource()2542     uint32_t addResource()
2543     {
2544         m_resources.emplace_back(makeSharedUniquePtr<ResourceHolder>());
2545         return u32(m_resources.size()) - 1;
2546     }
2547 
getOrCreateResource(Binding & binding,uint32_t arrayIndex)2548     ResourceHolder &getOrCreateResource(Binding &binding, uint32_t arrayIndex)
2549     {
2550         if (binding.perBindingResourceIndex[arrayIndex] == INDEX_INVALID)
2551         {
2552             binding.perBindingResourceIndex[arrayIndex] = addResource();
2553         }
2554 
2555         ResourceHolder &result = (**m_resources[binding.perBindingResourceIndex[arrayIndex]]);
2556 
2557         return result;
2558     }
2559 
getShaderName(const VkShaderStageFlagBits stage) const2560     const std::string getShaderName(const VkShaderStageFlagBits stage) const
2561     {
2562         return toString(stage) + (m_params.isAccelerationStructure() && (m_params.stage == stage) ? "_as" : "");
2563     }
2564 
getShaderBinary(const VkShaderStageFlagBits stage) const2565     const ProgramBinary &getShaderBinary(const VkShaderStageFlagBits stage) const
2566     {
2567         return m_context.getBinaryCollection().get(getShaderName(stage));
2568     }
2569 
isCaptureDescriptor(VkDescriptorType type) const2570     bool isCaptureDescriptor(VkDescriptorType type) const
2571     {
2572         return (m_testIteration == 0) && m_params.isCaptureReplayDescriptor(type);
2573     }
2574 
isReplayDescriptor(VkDescriptorType type) const2575     bool isReplayDescriptor(VkDescriptorType type) const
2576     {
2577         return (m_testIteration == 1) && m_params.isCaptureReplayDescriptor(type);
2578     }
2579 
2580     // Test cases using compute shaders always declare one binding with a result buffer.
getResultBuffer() const2581     const BufferAlloc &getResultBuffer() const
2582     {
2583         DE_ASSERT(m_params.isCompute() || m_params.isRayTracing());
2584 
2585         const uint32_t resultBufferIndex = getResultBufferIndex(m_simpleBindings);
2586         DE_ASSERT(resultBufferIndex != INDEX_INVALID);
2587         const auto &sb = m_simpleBindings[resultBufferIndex];
2588 
2589         const auto binding = std::find_if((**m_descriptorSetLayouts[sb.set]).bindings.begin(),
2590                                           (**m_descriptorSetLayouts[sb.set]).bindings.end(),
2591                                           [&sb](const Binding &it) { return it.binding == sb.binding; });
2592 
2593         DE_ASSERT(binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
2594 
2595         // There's only one result buffer at this binding
2596         return (**m_resources[binding->perBindingResourceIndex[0]]).buffer;
2597     }
2598 
2599 protected:
2600     TestParams m_params;
2601     std::vector<SimpleBinding> m_simpleBindings;
2602 
2603     Move<VkDevice> m_device;
2604     MovePtr<DeviceDriver> m_deviceInterface;
2605     VkQueue m_queue;
2606     uint32_t m_queueFamilyIndex;
2607     VkQueue m_sparseQueue;
2608     uint32_t m_sparseQueueFamilyIndex;
2609     MovePtr<Allocator> m_allocatorPtr;
2610 
2611     VkPhysicalDeviceMemoryProperties m_memoryProperties;
2612     VkPhysicalDeviceDescriptorBufferFeaturesEXT m_descriptorBufferFeatures;
2613     VkPhysicalDeviceDescriptorBufferPropertiesEXT m_descriptorBufferProperties;
2614 
2615     Move<VkPipeline> m_pipeline;
2616     Move<VkPipelineLayout> m_pipelineLayout;
2617 
2618     // Optional, for graphics pipelines
2619     Move<VkFramebuffer> m_framebuffer;
2620     Move<VkRenderPass> m_renderPass;
2621     VkRect2D m_renderArea;
2622     ImageAlloc m_colorImage;
2623     BufferAlloc m_colorBuffer; // for copying back to host visible memory
2624 
2625     std::vector<DSLPtr> m_descriptorSetLayouts;
2626     std::vector<BufferAllocPtr> m_descriptorBuffers;
2627     BufferAlloc m_descriptorStagingBuffer;
2628 
2629     // Ray Tracing fields
2630     uint32_t m_shaders;
2631     uint32_t m_raygenShaderGroup;
2632     uint32_t m_missShaderGroup;
2633     uint32_t m_hitShaderGroup;
2634     uint32_t m_callableShaderGroup;
2635     uint32_t m_shaderGroupCount;
2636 
2637     de::MovePtr<RayTracingPipeline> m_rayTracingPipeline;
2638 
2639     de::MovePtr<BufferWithMemory> m_raygenShaderBindingTable;
2640     de::MovePtr<BufferWithMemory> m_hitShaderBindingTable;
2641     de::MovePtr<BufferWithMemory> m_missShaderBindingTable;
2642     de::MovePtr<BufferWithMemory> m_callableShaderBindingTable;
2643 
2644     VkStridedDeviceAddressRegionKHR m_raygenShaderBindingTableRegion;
2645     VkStridedDeviceAddressRegionKHR m_missShaderBindingTableRegion;
2646     VkStridedDeviceAddressRegionKHR m_hitShaderBindingTableRegion;
2647     VkStridedDeviceAddressRegionKHR m_callableShaderBindingTableRegion;
2648 
2649     de::SharedPtr<BottomLevelAccelerationStructure> m_bottomLevelAccelerationStructure;
2650     de::SharedPtr<TopLevelAccelerationStructure> m_topLevelAccelerationStructure;
2651 
2652     // Optional, ycbcr conversion test
2653     VkFormat m_imageColorFormat;
2654     uint32_t m_combinedImageSamplerDescriptorCount;
2655 
2656     // Common, but last
2657     std::vector<ResourcePtr> m_resources; // various resources used to test the descriptors
2658     uint32_t m_testIteration;             // for multi-pass tests such as capture/replay
2659 };
2660 
DescriptorBufferTestInstance(Context & context,const TestParams & params,const std::vector<SimpleBinding> & simpleBindings)2661 DescriptorBufferTestInstance::DescriptorBufferTestInstance(Context &context, const TestParams &params,
2662                                                            const std::vector<SimpleBinding> &simpleBindings)
2663     : TestInstance(context)
2664     , m_params(params)
2665     , m_simpleBindings(simpleBindings)
2666     , m_device()
2667     , m_deviceInterface()
2668     , m_queue()
2669     , m_queueFamilyIndex()
2670     , m_sparseQueue()
2671     , m_sparseQueueFamilyIndex()
2672     , m_allocatorPtr(nullptr)
2673     , m_memoryProperties()
2674     , m_descriptorBufferFeatures()
2675     , m_descriptorBufferProperties()
2676     , m_pipeline()
2677     , m_pipelineLayout()
2678     , m_framebuffer()
2679     , m_renderPass()
2680     , m_renderArea(makeRect2D(0, 0, 4, 2)) // 4x2 to support _420 format, if needed
2681     , m_colorImage()
2682     , m_colorBuffer()
2683     , m_descriptorSetLayouts()
2684     , m_descriptorBuffers()
2685     , m_descriptorStagingBuffer()
2686     , m_shaders(0)
2687     , m_raygenShaderGroup(~0u)
2688     , m_missShaderGroup(~0u)
2689     , m_hitShaderGroup(~0u)
2690     , m_callableShaderGroup(~0u)
2691     , m_shaderGroupCount(0)
2692     , m_rayTracingPipeline(nullptr)
2693     , m_raygenShaderBindingTable()
2694     , m_hitShaderBindingTable()
2695     , m_missShaderBindingTable()
2696     , m_callableShaderBindingTable()
2697     , m_raygenShaderBindingTableRegion()
2698     , m_missShaderBindingTableRegion()
2699     , m_hitShaderBindingTableRegion()
2700     , m_callableShaderBindingTableRegion()
2701     , m_bottomLevelAccelerationStructure()
2702     , m_topLevelAccelerationStructure()
2703     , m_imageColorFormat()
2704     , m_combinedImageSamplerDescriptorCount(1)
2705     , m_resources()
2706     , m_testIteration(0)
2707 {
2708     // Need to create a new device because:
2709     // - We want to test graphics and compute queues,
2710     // - We must exclude VK_AMD_shader_fragment_mask from the enabled extensions.
2711 
2712     if (m_params.isAccelerationStructure() && m_params.isAccelerationStructureOptional())
2713     {
2714         if (!m_context.getRayQueryFeatures().rayQuery)
2715         {
2716             // Disable testing of acceleration structures if they ray query is not supported
2717             m_params.descriptor = VK_DESCRIPTOR_TYPE_MAX_ENUM;
2718 
2719             // Replace acceleration structures with storage buffers
2720             for (auto &simpleBinding : m_simpleBindings)
2721                 if ((simpleBinding.type == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR) &&
2722                     !simpleBinding.isRayTracingAS)
2723                     simpleBinding.type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
2724         }
2725         else
2726         {
2727             context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2728         }
2729     }
2730 
2731     if ((m_params.variant == TestVariant::MULTIPLE) || (m_params.variant == TestVariant::PUSH_DESCRIPTOR) ||
2732         (m_params.variant == TestVariant::PUSH_TEMPLATE) || (m_params.variant == TestVariant::ROBUST_BUFFER_ACCESS) ||
2733         (m_params.variant == TestVariant::ROBUST_NULL_DESCRIPTOR))
2734     {
2735         const vk::VkPhysicalDeviceLimits &limits = context.getDeviceProperties().limits;
2736         uint32_t maxPerStageDescriptorSamplers =
2737             0; // VK_DESCRIPTOR_TYPE_SAMPLER or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
2738         uint32_t maxPerStageDescriptorUniformBuffers = 0; // VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
2739         uint32_t maxPerStageDescriptorStorageBuffers = 0; // VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
2740         uint32_t maxPerStageDescriptorSampledImages =
2741             0; // VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, or VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
2742         uint32_t maxPerStageDescriptorStorageImages =
2743             0; // VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, or VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
2744         uint32_t maxPerStageDescriptorInputAttachments = 0; // VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
2745 
2746         for (const auto &simpleBinding : m_simpleBindings)
2747         {
2748             switch (simpleBinding.type)
2749             {
2750             case VK_DESCRIPTOR_TYPE_SAMPLER:
2751                 maxPerStageDescriptorSamplers += simpleBinding.count;
2752                 break;
2753             case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2754                 maxPerStageDescriptorSamplers += simpleBinding.count;
2755                 maxPerStageDescriptorSampledImages += simpleBinding.count;
2756                 break;
2757             case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2758                 maxPerStageDescriptorUniformBuffers += simpleBinding.count;
2759                 break;
2760             case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2761                 maxPerStageDescriptorStorageBuffers += simpleBinding.count;
2762                 break;
2763             case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2764                 maxPerStageDescriptorSampledImages += simpleBinding.count;
2765                 break;
2766             case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2767                 maxPerStageDescriptorSampledImages += simpleBinding.count;
2768                 break;
2769             case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2770                 maxPerStageDescriptorStorageImages += simpleBinding.count;
2771                 break;
2772             case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2773                 maxPerStageDescriptorStorageImages += simpleBinding.count;
2774                 break;
2775             case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2776                 maxPerStageDescriptorInputAttachments += simpleBinding.count;
2777                 break;
2778             default:
2779                 break;
2780             }
2781         }
2782 
2783 #define VALIDATE_PER_STAGE_LIMIT(NAME)                                                                           \
2784     if (NAME > limits.NAME)                                                                                      \
2785         TCU_THROW(NotSupportedError, std::string(#NAME) + " " + de::toString(NAME) + " is greater than limit " + \
2786                                          de::toString(limits.NAME));
2787         VALIDATE_PER_STAGE_LIMIT(maxPerStageDescriptorSamplers);
2788         VALIDATE_PER_STAGE_LIMIT(maxPerStageDescriptorUniformBuffers);
2789         VALIDATE_PER_STAGE_LIMIT(maxPerStageDescriptorStorageBuffers);
2790         VALIDATE_PER_STAGE_LIMIT(maxPerStageDescriptorSampledImages);
2791         VALIDATE_PER_STAGE_LIMIT(maxPerStageDescriptorStorageImages);
2792         VALIDATE_PER_STAGE_LIMIT(maxPerStageDescriptorInputAttachments);
2793 #undef VALIDATE_PER_STAGE_LIMIT
2794     }
2795 
2796     auto &inst                           = context.getInstanceInterface();
2797     auto physDevice                      = context.getPhysicalDevice();
2798     auto queueProps                      = getPhysicalDeviceQueueFamilyProperties(inst, physDevice);
2799     const bool sparseCompatibilityDevice = !(m_params.resourceResidency == ResourceResidency::TRADITIONAL);
2800 
2801     uint32_t graphicsComputeQueue = VK_QUEUE_FAMILY_IGNORED;
2802     m_queueFamilyIndex            = VK_QUEUE_FAMILY_IGNORED;
2803     m_sparseQueueFamilyIndex      = VK_QUEUE_FAMILY_IGNORED;
2804 
2805     for (uint32_t i = 0; i < queueProps.size(); ++i)
2806     {
2807         // Looking for queue that supports sparse resource operations
2808         if (sparseCompatibilityDevice)
2809         {
2810             if ((queueProps[i].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) != 0)
2811             {
2812                 m_sparseQueueFamilyIndex = i;
2813             }
2814         }
2815 
2816         if (m_params.queue == VK_QUEUE_GRAPHICS_BIT)
2817         {
2818             if ((queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
2819             {
2820                 m_queueFamilyIndex = i;
2821 
2822                 break;
2823             }
2824         }
2825         else if (m_params.queue == VK_QUEUE_COMPUTE_BIT)
2826         {
2827             if (((queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0) &&
2828                 ((queueProps[i].queueFlags & VK_QUEUE_COMPUTE_BIT) != 0))
2829             {
2830                 m_queueFamilyIndex = i;
2831             }
2832             else if (((queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) &&
2833                      ((queueProps[i].queueFlags & VK_QUEUE_COMPUTE_BIT) != 0))
2834             {
2835                 graphicsComputeQueue = i;
2836             }
2837         }
2838     }
2839 
2840     // If a compute only queue could not be found, fall back to a
2841     // graphics & compute one.
2842     if (m_params.queue == VK_QUEUE_COMPUTE_BIT && m_queueFamilyIndex == VK_QUEUE_FAMILY_IGNORED)
2843     {
2844         m_queueFamilyIndex = graphicsComputeQueue;
2845     }
2846 
2847     if (m_queueFamilyIndex == VK_QUEUE_FAMILY_IGNORED)
2848     {
2849         TCU_THROW(NotSupportedError, "Queue not supported");
2850     }
2851 
2852     if (sparseCompatibilityDevice && m_sparseQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED)
2853     {
2854         TCU_THROW(NotSupportedError, "Sparse operations not supported by any queue");
2855     }
2856 
2857     VkPhysicalDeviceFeatures2 features2                                            = initVulkanStructure();
2858     VkPhysicalDeviceDescriptorBufferFeaturesEXT descriptorBufferFeatures           = initVulkanStructure();
2859     VkPhysicalDeviceInlineUniformBlockFeaturesEXT inlineUniformBlockFeatures       = initVulkanStructure();
2860     VkPhysicalDeviceSynchronization2FeaturesKHR synchronization2Features           = initVulkanStructure();
2861     VkPhysicalDeviceRobustness2FeaturesEXT robustness2Features                     = initVulkanStructure();
2862     VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT mutableDescTypeFeatures       = initVulkanStructure();
2863     VkPhysicalDeviceCustomBorderColorFeaturesEXT customBorderColorFeatures         = initVulkanStructure();
2864     VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR samplerYcbcrConvFeatures     = initVulkanStructure();
2865     VkPhysicalDeviceAccelerationStructureFeaturesKHR accelerationStructureFeatures = initVulkanStructure();
2866     VkPhysicalDeviceRayQueryFeaturesKHR rayQueryFeatures                           = initVulkanStructure();
2867     VkPhysicalDeviceRayTracingPipelineFeaturesKHR rayTracingPipelineFeatures       = initVulkanStructure();
2868     VkPhysicalDeviceBufferDeviceAddressFeatures bufferDeviceAddressFeatures        = initVulkanStructure();
2869     VkPhysicalDeviceMaintenance4Features maintenance4Features                      = initVulkanStructure();
2870     VkPhysicalDeviceMaintenance5FeaturesKHR maintenance5Features                   = initVulkanStructure();
2871     VkPhysicalDeviceMaintenance6FeaturesKHR maintenance6Features                   = initVulkanStructure();
2872 
2873     const float priority[1] = {0.5f};
2874 
2875     uint32_t queueCiCnt = 1;
2876     VkDeviceQueueCreateInfo
2877         queueInfos[2]; // Needed when sparse binding is supported on other queue than descriptor buffer
2878     queueInfos[0]                  = initVulkanStructure();
2879     queueInfos[0].queueFamilyIndex = m_queueFamilyIndex;
2880     queueInfos[0].queueCount       = 1;
2881     queueInfos[0].pQueuePriorities = priority;
2882 
2883     if (sparseCompatibilityDevice && m_sparseQueueFamilyIndex != m_queueFamilyIndex)
2884     {
2885         queueInfos[1]                  = initVulkanStructure();
2886         queueInfos[1].queueFamilyIndex = m_sparseQueueFamilyIndex;
2887         queueInfos[1].queueCount       = 1;
2888         queueInfos[1].pQueuePriorities = priority;
2889 
2890         ++queueCiCnt;
2891     }
2892 
2893     void **nextPtr = &features2.pNext;
2894     addToChainVulkanStructure(&nextPtr, synchronization2Features);
2895     addToChainVulkanStructure(&nextPtr, descriptorBufferFeatures);
2896     addToChainVulkanStructure(&nextPtr, bufferDeviceAddressFeatures);
2897     addToChainVulkanStructure(&nextPtr, maintenance4Features);
2898 
2899     // NOTE: VK_AMD_shader_fragment_mask must not be enabled
2900     std::vector<const char *> extensions;
2901     extensions.push_back("VK_EXT_descriptor_buffer");
2902     extensions.push_back("VK_KHR_buffer_device_address");
2903     extensions.push_back("VK_KHR_synchronization2");
2904     extensions.push_back("VK_EXT_descriptor_indexing");
2905     extensions.push_back("VK_KHR_maintenance4");
2906 
2907     if (m_params.useMaintenance5)
2908     {
2909         addToChainVulkanStructure(&nextPtr, maintenance5Features);
2910         extensions.push_back("VK_KHR_maintenance5");
2911     }
2912 
2913     if ((m_params.descriptor == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) ||
2914         (m_params.variant == TestVariant::MULTIPLE) || m_params.isPushDescriptorTest())
2915     {
2916         extensions.push_back("VK_EXT_inline_uniform_block");
2917         addToChainVulkanStructure(&nextPtr, inlineUniformBlockFeatures);
2918 
2919         if (m_params.isPushDescriptorTest())
2920             extensions.push_back("VK_KHR_push_descriptor");
2921     }
2922     else if (m_params.variant == TestVariant::ROBUST_NULL_DESCRIPTOR ||
2923              m_params.variant == TestVariant::ROBUST_BUFFER_ACCESS)
2924     {
2925         if (context.isDeviceFunctionalitySupported("VK_EXT_robustness2"))
2926         {
2927             extensions.push_back("VK_EXT_robustness2");
2928             addToChainVulkanStructure(&nextPtr, robustness2Features);
2929         }
2930     }
2931     else if (m_params.subcase == SubCase::CAPTURE_REPLAY_CUSTOM_BORDER_COLOR)
2932     {
2933         extensions.push_back("VK_EXT_custom_border_color");
2934         addToChainVulkanStructure(&nextPtr, customBorderColorFeatures);
2935     }
2936     else if (m_params.variant == TestVariant::MUTABLE_DESCRIPTOR_TYPE)
2937     {
2938         extensions.push_back("VK_EXT_mutable_descriptor_type");
2939         addToChainVulkanStructure(&nextPtr, mutableDescTypeFeatures);
2940     }
2941     else if (m_params.variant == TestVariant::YCBCR_SAMPLER)
2942     {
2943         extensions.push_back("VK_KHR_sampler_ycbcr_conversion");
2944         addToChainVulkanStructure(&nextPtr, samplerYcbcrConvFeatures);
2945     }
2946 
2947     if (m_params.isAccelerationStructure() || m_params.isRayTracing())
2948     {
2949         extensions.push_back("VK_KHR_acceleration_structure");
2950         addToChainVulkanStructure(&nextPtr, accelerationStructureFeatures);
2951         extensions.push_back("VK_KHR_spirv_1_4");
2952         extensions.push_back("VK_KHR_deferred_host_operations");
2953 
2954         if (m_params.isAccelerationStructure())
2955         {
2956             extensions.push_back("VK_KHR_ray_query");
2957             addToChainVulkanStructure(&nextPtr, rayQueryFeatures);
2958             extensions.push_back("VK_KHR_deferred_host_operations");
2959         }
2960 
2961         if (m_params.isRayTracing())
2962         {
2963             extensions.push_back("VK_KHR_ray_tracing_pipeline");
2964             addToChainVulkanStructure(&nextPtr, rayTracingPipelineFeatures);
2965         }
2966     }
2967 
2968     if (m_params.commands2)
2969     {
2970         extensions.push_back("VK_KHR_maintenance6");
2971         addToChainVulkanStructure(&nextPtr, maintenance6Features);
2972     }
2973 
2974     context.getInstanceInterface().getPhysicalDeviceFeatures2(context.getPhysicalDevice(), &features2);
2975 
2976     if (m_params.variant != TestVariant::ROBUST_BUFFER_ACCESS)
2977     {
2978         features2.features.robustBufferAccess   = VK_FALSE;
2979         robustness2Features.robustBufferAccess2 = VK_FALSE;
2980         robustness2Features.robustImageAccess2  = VK_FALSE;
2981     }
2982 
2983     if (m_params.variant != TestVariant::ROBUST_NULL_DESCRIPTOR)
2984     {
2985         robustness2Features.nullDescriptor = VK_FALSE;
2986     }
2987 
2988     if (!m_params.isPushDescriptorTest())
2989     {
2990         descriptorBufferFeatures.descriptorBufferPushDescriptors = VK_FALSE;
2991     }
2992 
2993     if (!maintenance4Features.maintenance4)
2994         TCU_THROW(NotSupportedError, "Execution mode LocalSizeId is used, maintenance4 required");
2995 
2996     if (m_params.isAccelerationStructure() || m_params.isRayTracing())
2997     {
2998         if (!accelerationStructureFeatures.accelerationStructure)
2999             TCU_THROW(NotSupportedError, "Require accelerationStructureFeatures.accelerationStructure");
3000 
3001         if (m_params.isCaptureReplayDescriptor(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR))
3002         {
3003             if (!accelerationStructureFeatures.accelerationStructureCaptureReplay)
3004                 TCU_THROW(NotSupportedError,
3005                           "Require accelerationStructureFeatures.accelerationStructureCaptureReplay");
3006         }
3007 
3008         if (m_params.isAccelerationStructure())
3009         {
3010             if (!rayQueryFeatures.rayQuery)
3011                 TCU_THROW(NotSupportedError, "Require rayQueryFeatures.rayQuery");
3012         }
3013 
3014         if (m_params.isRayTracing())
3015         {
3016             if (!rayTracingPipelineFeatures.rayTracingPipeline)
3017                 TCU_THROW(NotSupportedError, "Require rayTracingPipelineFeatures.rayTracingPipeline");
3018 
3019             if (m_params.isCaptureReplayDescriptor(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR))
3020             {
3021                 if (!rayTracingPipelineFeatures.rayTracingPipelineShaderGroupHandleCaptureReplay)
3022                     TCU_THROW(NotSupportedError,
3023                               "Require rayTracingPipelineFeatures.rayTracingPipelineShaderGroupHandleCaptureReplay");
3024             }
3025         }
3026     }
3027 
3028     if (m_params.commands2)
3029     {
3030         if (!maintenance6Features.maintenance6)
3031             TCU_THROW(NotSupportedError, "maintenance6 required");
3032     }
3033 
3034     // Should be enabled by default
3035     DE_ASSERT(descriptorBufferFeatures.descriptorBuffer);
3036     DE_ASSERT(synchronization2Features.synchronization2);
3037 
3038     if (m_params.variant == TestVariant::MULTIPLE || m_params.isPushDescriptorTest())
3039     {
3040         // TODO: Currently these tests assume the feature is available and there's no easy way to make it optional.
3041         // Rather than returning NotSupported, this should be reworked if many implementations have this limitation.
3042         DE_ASSERT(inlineUniformBlockFeatures.inlineUniformBlock);
3043     }
3044     else if (m_params.subcase == SubCase::CAPTURE_REPLAY_CUSTOM_BORDER_COLOR)
3045     {
3046         DE_ASSERT(customBorderColorFeatures.customBorderColors);
3047     }
3048     else if (m_params.variant == TestVariant::MUTABLE_DESCRIPTOR_TYPE)
3049     {
3050         DE_ASSERT(mutableDescTypeFeatures.mutableDescriptorType);
3051     }
3052     else if (params.variant == TestVariant::YCBCR_SAMPLER)
3053     {
3054         DE_ASSERT(samplerYcbcrConvFeatures.samplerYcbcrConversion);
3055     }
3056 
3057     m_descriptorBufferFeatures       = descriptorBufferFeatures;
3058     m_descriptorBufferFeatures.pNext = nullptr;
3059 
3060     m_descriptorBufferProperties       = context.getDescriptorBufferPropertiesEXT();
3061     m_descriptorBufferProperties.pNext = nullptr;
3062 
3063     VkDeviceCreateInfo createInfo      = initVulkanStructure(&features2);
3064     createInfo.pEnabledFeatures        = nullptr;
3065     createInfo.enabledExtensionCount   = u32(extensions.size());
3066     createInfo.ppEnabledExtensionNames = extensions.data();
3067     createInfo.queueCreateInfoCount    = queueCiCnt;
3068     createInfo.pQueueCreateInfos       = queueInfos;
3069 
3070     m_device =
3071         createCustomDevice(false, context.getPlatformInterface(), context.getInstance(), inst, physDevice, &createInfo);
3072 
3073     context.getDeviceInterface().getDeviceQueue(*m_device, m_queueFamilyIndex, 0, &m_queue);
3074 
3075     if (sparseCompatibilityDevice)
3076     {
3077         context.getDeviceInterface().getDeviceQueue(*m_device, m_sparseQueueFamilyIndex, 0, &m_sparseQueue);
3078     }
3079 
3080     m_deviceInterface =
3081         newMovePtr<DeviceDriver>(context.getPlatformInterface(), context.getInstance(), *m_device,
3082                                  context.getUsedApiVersion(), context.getTestContext().getCommandLine());
3083 
3084     m_memoryProperties = vk::getPhysicalDeviceMemoryProperties(inst, physDevice);
3085 
3086     if (params.variant == TestVariant::YCBCR_SAMPLER)
3087     {
3088         m_imageColorFormat = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
3089 
3090         VkPhysicalDeviceImageFormatInfo2 imageFormatInfo = initVulkanStructure();
3091         imageFormatInfo.format                           = m_imageColorFormat;
3092         imageFormatInfo.type                             = VK_IMAGE_TYPE_2D;
3093         imageFormatInfo.tiling                           = VK_IMAGE_TILING_OPTIMAL;
3094         imageFormatInfo.usage                            = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
3095 
3096         VkSamplerYcbcrConversionImageFormatProperties ycbcrFormatProps = initVulkanStructure();
3097         VkImageFormatProperties2 imageFormatProps                      = initVulkanStructure(&ycbcrFormatProps);
3098 
3099         VK_CHECK(m_context.getInstanceInterface().getPhysicalDeviceImageFormatProperties2(
3100             m_context.getPhysicalDevice(), &imageFormatInfo, &imageFormatProps));
3101 
3102         m_combinedImageSamplerDescriptorCount = ycbcrFormatProps.combinedImageSamplerDescriptorCount;
3103 
3104         DE_ASSERT(m_combinedImageSamplerDescriptorCount != 0);
3105     }
3106     else
3107     {
3108         m_imageColorFormat = VK_FORMAT_R32_UINT;
3109     }
3110 
3111     m_allocatorPtr = de::MovePtr<Allocator>(new SimpleAllocator(*m_deviceInterface, *m_device, m_memoryProperties));
3112 }
3113 
getDescriptorSize(const Binding & binding) const3114 VkDeviceSize DescriptorBufferTestInstance::getDescriptorSize(const Binding &binding) const
3115 {
3116     const auto isRobustBufferAccess = (m_params.variant == TestVariant::ROBUST_BUFFER_ACCESS);
3117 
3118     // To support mutable descriptor type bindings, we pick the max size from the list below.
3119     // For regular descriptors, there will be only one element in the list.
3120     std::vector<VkDescriptorType> typeList;
3121 
3122     if (binding.isMutableType)
3123     {
3124         DE_ASSERT(m_params.variant == TestVariant::MUTABLE_DESCRIPTOR_TYPE);
3125         typeList = getDescriptorMaskTypes(m_params.mutableDescriptorTypes);
3126     }
3127     else
3128     {
3129         typeList.emplace_back(binding.descriptorType);
3130     }
3131 
3132     std::size_t maxSize = 0u;
3133 
3134     for (const auto &type : typeList)
3135     {
3136         std::size_t size = 0u;
3137 
3138         switch (type)
3139         {
3140         case VK_DESCRIPTOR_TYPE_SAMPLER:
3141             size = m_descriptorBufferProperties.samplerDescriptorSize;
3142             break;
3143 
3144         case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
3145             size =
3146                 m_descriptorBufferProperties.combinedImageSamplerDescriptorSize * m_combinedImageSamplerDescriptorCount;
3147             break;
3148 
3149         case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
3150             size = m_descriptorBufferProperties.sampledImageDescriptorSize;
3151             break;
3152 
3153         case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
3154             size = m_descriptorBufferProperties.storageImageDescriptorSize;
3155             break;
3156 
3157         case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
3158             size = isRobustBufferAccess ? m_descriptorBufferProperties.robustUniformTexelBufferDescriptorSize :
3159                                           m_descriptorBufferProperties.uniformTexelBufferDescriptorSize;
3160             break;
3161 
3162         case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
3163             size = isRobustBufferAccess ? m_descriptorBufferProperties.robustStorageTexelBufferDescriptorSize :
3164                                           m_descriptorBufferProperties.storageTexelBufferDescriptorSize;
3165             break;
3166 
3167         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
3168             size = isRobustBufferAccess ? m_descriptorBufferProperties.robustUniformBufferDescriptorSize :
3169                                           m_descriptorBufferProperties.uniformBufferDescriptorSize;
3170             break;
3171 
3172         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
3173             size = isRobustBufferAccess ? m_descriptorBufferProperties.robustStorageBufferDescriptorSize :
3174                                           m_descriptorBufferProperties.storageBufferDescriptorSize;
3175             break;
3176 
3177         case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
3178             size = m_descriptorBufferProperties.inputAttachmentDescriptorSize;
3179             break;
3180 
3181         case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
3182             size = m_descriptorBufferProperties.accelerationStructureDescriptorSize;
3183             break;
3184 
3185         case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
3186             // Inline uniform block has no associated size. This is OK, because it can't be arrayed.
3187             break;
3188 
3189         default:
3190             DE_ASSERT(0);
3191             break;
3192         }
3193 
3194         maxSize = std::max(maxSize, size);
3195     }
3196 
3197     return maxSize;
3198 }
3199 
getDescriptorTypeSize(VkDescriptorType descriptorType) const3200 VkDeviceSize DescriptorBufferTestInstance::getDescriptorTypeSize(VkDescriptorType descriptorType) const
3201 {
3202     const auto isRobustBufferAccess = (m_params.variant == TestVariant::ROBUST_BUFFER_ACCESS);
3203 
3204     switch (descriptorType)
3205     {
3206     case VK_DESCRIPTOR_TYPE_SAMPLER:
3207         return m_descriptorBufferProperties.samplerDescriptorSize;
3208 
3209     case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
3210         return m_descriptorBufferProperties.combinedImageSamplerDescriptorSize * m_combinedImageSamplerDescriptorCount;
3211 
3212     case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
3213         return m_descriptorBufferProperties.sampledImageDescriptorSize;
3214 
3215     case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
3216         return m_descriptorBufferProperties.storageImageDescriptorSize;
3217 
3218     case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
3219         return isRobustBufferAccess ? m_descriptorBufferProperties.robustUniformTexelBufferDescriptorSize :
3220                                       m_descriptorBufferProperties.uniformTexelBufferDescriptorSize;
3221 
3222     case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
3223         return isRobustBufferAccess ? m_descriptorBufferProperties.robustStorageTexelBufferDescriptorSize :
3224                                       m_descriptorBufferProperties.storageTexelBufferDescriptorSize;
3225 
3226     case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
3227         return isRobustBufferAccess ? m_descriptorBufferProperties.robustUniformBufferDescriptorSize :
3228                                       m_descriptorBufferProperties.uniformBufferDescriptorSize;
3229 
3230     case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
3231         return isRobustBufferAccess ? m_descriptorBufferProperties.robustStorageBufferDescriptorSize :
3232                                       m_descriptorBufferProperties.storageBufferDescriptorSize;
3233 
3234     case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
3235         return m_descriptorBufferProperties.inputAttachmentDescriptorSize;
3236 
3237     case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
3238         return m_descriptorBufferProperties.accelerationStructureDescriptorSize;
3239 
3240     case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
3241         // Inline uniform block has no associated size. This is OK, because it can't be arrayed.
3242         break;
3243 
3244     default:
3245         DE_ASSERT(0);
3246         break;
3247     }
3248 
3249     return 0;
3250 }
3251 
createDescriptorSetLayouts()3252 void DescriptorBufferTestInstance::createDescriptorSetLayouts()
3253 {
3254     for (auto &dslPtr : m_descriptorSetLayouts)
3255     {
3256         auto &dsl = **dslPtr;
3257 
3258         DE_ASSERT(!dsl.bindings.empty());
3259 
3260         const auto bindingsCopy = getDescriptorSetLayoutBindings(dsl.bindings);
3261 
3262         VkMutableDescriptorTypeCreateInfoEXT mutableDescTypeCreateInfo = initVulkanStructure();
3263         std::vector<VkMutableDescriptorTypeListEXT> mutableDescTypeLists;
3264         std::vector<VkDescriptorType> mutableDescTypeDescriptors;
3265 
3266         VkDescriptorSetLayoutCreateInfo createInfo = initVulkanStructure();
3267         createInfo.bindingCount                    = u32(bindingsCopy.size());
3268         createInfo.pBindings                       = bindingsCopy.data();
3269         createInfo.flags                           = VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT;
3270 
3271         if (dsl.hasEmbeddedImmutableSamplers)
3272         {
3273             createInfo.flags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT;
3274         }
3275         else if (dsl.usePushDescriptors)
3276         {
3277             createInfo.flags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
3278         }
3279 
3280         if (m_params.variant == TestVariant::MUTABLE_DESCRIPTOR_TYPE)
3281         {
3282             // Prepare mutable descriptor type structures
3283 
3284             // NOTE: This test makes a simplification that each mutable descriptor binding has the same
3285             //       set of possible real descriptor types. Due to this, we can use a single descriptor type list.
3286             mutableDescTypeDescriptors = getDescriptorMaskTypes(m_params.mutableDescriptorTypes);
3287             mutableDescTypeLists.resize(createInfo.bindingCount);
3288 
3289             createInfo.pNext                                         = &mutableDescTypeCreateInfo;
3290             mutableDescTypeCreateInfo.mutableDescriptorTypeListCount = u32(mutableDescTypeLists.size());
3291             mutableDescTypeCreateInfo.pMutableDescriptorTypeLists    = mutableDescTypeLists.data();
3292 
3293             for (uint32_t bindingIndex = 0; bindingIndex < u32(dsl.bindings.size()); ++bindingIndex)
3294             {
3295                 const auto &binding = dsl.bindings[bindingIndex];
3296 
3297                 if (binding.isMutableType)
3298                 {
3299                     DE_ASSERT(binding.immutableSamplers[0] == VK_NULL_HANDLE);
3300                     mutableDescTypeLists[bindingIndex].descriptorTypeCount = u32(mutableDescTypeDescriptors.size());
3301                     mutableDescTypeLists[bindingIndex].pDescriptorTypes    = mutableDescTypeDescriptors.data();
3302                 }
3303                 else
3304                 {
3305                     mutableDescTypeLists[bindingIndex].descriptorTypeCount = 0;
3306                     mutableDescTypeLists[bindingIndex].pDescriptorTypes    = nullptr;
3307                 }
3308             }
3309 
3310             // Check support
3311 
3312             VkDescriptorSetLayoutSupport support = initVulkanStructure();
3313 
3314             m_deviceInterface->getDescriptorSetLayoutSupport(*m_device, &createInfo, &support);
3315 
3316             if (support.supported == VK_FALSE)
3317             {
3318                 TCU_THROW(NotSupportedError, "Descriptor set layout is not supported");
3319             }
3320         }
3321 
3322         dsl.layout = createDescriptorSetLayout(*m_deviceInterface, *m_device, &createInfo);
3323 
3324         m_deviceInterface->getDescriptorSetLayoutSizeEXT(*m_device, *dsl.layout, &dsl.sizeOfLayout);
3325 
3326         for (auto &binding : dsl.bindings)
3327         {
3328             m_deviceInterface->getDescriptorSetLayoutBindingOffsetEXT(*m_device, *dsl.layout, binding.binding,
3329                                                                       &binding.offset);
3330         }
3331     }
3332 }
3333 
3334 // The test may create a variable number of descriptor buffers, based on the parameters.
3335 //
createDescriptorBuffers()3336 void DescriptorBufferTestInstance::createDescriptorBuffers()
3337 {
3338     DE_ASSERT(m_descriptorBuffers.empty());
3339 
3340     const uint32_t bufferInitialMemory            = 0xcc;  // descriptor buffer memory is initially set to this
3341     bool allocateStagingBuffer                    = false; // determined after descriptors are created
3342     VkDeviceSize stagingBufferDescriptorSetOffset = 0;
3343     const uint32_t setsPerBuffer =
3344         m_params.subcase == SubCase::SINGLE_BUFFER ? m_params.bufferBindingCount + 1 : m_params.setsPerBuffer;
3345     const bool sparseCompatibilityDevice = !(m_params.resourceResidency == ResourceResidency::TRADITIONAL);
3346 
3347     // Data tracked per buffer creation
3348     struct
3349     {
3350         uint32_t firstSet;
3351         uint32_t numSets;
3352         VkBufferUsageFlags usage;
3353         VkDeviceSize setOffset;
3354     } currentBuffer;
3355 
3356     currentBuffer = {};
3357 
3358     for (uint32_t setIndex = 0; setIndex < u32(m_descriptorSetLayouts.size()); ++setIndex)
3359     {
3360         auto &dsl = **m_descriptorSetLayouts[setIndex];
3361 
3362         if (dsl.hasEmbeddedImmutableSamplers ||
3363             (dsl.usePushDescriptors && m_descriptorBufferProperties.bufferlessPushDescriptors &&
3364              m_params.subcase != SubCase::SINGLE_BUFFER))
3365         {
3366             // Embedded immutable samplers aren't backed by a descriptor buffer.
3367             // Same goes for the set used with push descriptors.
3368             // Push descriptors might require buffer. If so, don't skip creation of buffer.
3369 
3370             // We musn't have started adding sets to the next buffer yet.
3371             DE_ASSERT(currentBuffer.numSets == 0);
3372             ++currentBuffer.firstSet;
3373 
3374             continue;
3375         }
3376 
3377         // Required for binding
3378         currentBuffer.usage |= VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT;
3379 
3380         for (const auto &binding : dsl.bindings)
3381         {
3382             if (binding.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER)
3383             {
3384                 currentBuffer.usage |= VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT;
3385             }
3386             else if (binding.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
3387             {
3388                 currentBuffer.usage |= VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT |
3389                                        VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT;
3390             }
3391             else
3392             {
3393                 currentBuffer.usage |=
3394                     VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3395             }
3396         }
3397 
3398         if (!m_descriptorBufferProperties.bufferlessPushDescriptors && dsl.usePushDescriptors)
3399         {
3400             currentBuffer.usage |= VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT;
3401         }
3402 
3403         // Allow descriptor set layout to be size of zero bytes
3404         if (dsl.sizeOfLayout != 0)
3405         {
3406             // Assign this descriptor set to a new buffer
3407             dsl.bufferIndex  = u32(m_descriptorBuffers.size());
3408             dsl.bufferOffset = currentBuffer.setOffset;
3409         }
3410 
3411         currentBuffer.numSets += 1;
3412         currentBuffer.setOffset +=
3413             deAlignSize(static_cast<std::size_t>(dsl.sizeOfLayout),
3414                         static_cast<std::size_t>(m_descriptorBufferProperties.descriptorBufferOffsetAlignment));
3415 
3416         VkMemoryAllocateFlagsInfo allocFlagsInfo = initVulkanStructure();
3417         allocFlagsInfo.flags |= VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT;
3418 
3419         // We've reached the limit of sets for this descriptor buffer.
3420         if (currentBuffer.numSets == setsPerBuffer)
3421         {
3422             vk::VkBufferCreateInfo bufferCreateInfo =
3423                 makeBufferCreateInfo(currentBuffer.setOffset, currentBuffer.usage);
3424 
3425             const uint32_t queueFamilyIndices[] = {m_queueFamilyIndex, m_sparseQueueFamilyIndex};
3426 
3427             if (sparseCompatibilityDevice)
3428             {
3429                 bufferCreateInfo.flags |= VK_BUFFER_CREATE_SPARSE_BINDING_BIT;
3430 
3431                 // Overrides sharing mode if compute queue does not support sparse operations
3432                 if (m_queueFamilyIndex != m_sparseQueueFamilyIndex)
3433                 {
3434 
3435                     bufferCreateInfo.sharingMode           = VK_SHARING_MODE_CONCURRENT;
3436                     bufferCreateInfo.queueFamilyIndexCount = 2u;
3437                     bufferCreateInfo.pQueueFamilyIndices   = queueFamilyIndices;
3438                 }
3439             }
3440 
3441             if (bufferCreateInfo.size != 0)
3442             {
3443                 m_descriptorBuffers.emplace_back(new BufferAlloc());
3444                 auto &bufferAlloc = *m_descriptorBuffers.back();
3445 
3446                 bufferAlloc.size  = bufferCreateInfo.size;
3447                 bufferAlloc.usage = bufferCreateInfo.usage;
3448 
3449                 vk::VkBufferUsageFlags2CreateInfoKHR bufferUsageFlags2 = initVulkanStructure();
3450                 ;
3451                 if (m_params.useMaintenance5)
3452                 {
3453                     bufferUsageFlags2.usage = (VkBufferUsageFlagBits2KHR)currentBuffer.usage;
3454                     bufferCreateInfo.pNext  = &bufferUsageFlags2;
3455                     bufferCreateInfo.usage  = 0;
3456                 }
3457 
3458                 bufferAlloc.buffer = vk::createBuffer(*m_deviceInterface, *m_device, &bufferCreateInfo);
3459 
3460                 auto bufferMemReqs   = getBufferMemoryRequirements(*m_deviceInterface, *m_device, *bufferAlloc.buffer);
3461                 bool useStagedUpload = false; // write directly to device-local memory, if possible
3462 
3463                 if (DEBUG_FORCE_STAGED_UPLOAD)
3464                 {
3465                     useStagedUpload = true;
3466                 }
3467                 else if (DEBUG_MIX_DIRECT_AND_STAGED_UPLOAD)
3468                 {
3469                     // To avoid adding yet another test case permutation (which may be redundant on some implementations),
3470                     // we are going to always test a mix of direct and staged uploads.
3471                     useStagedUpload = ((dsl.bufferIndex % 2) == 1);
3472                 }
3473 
3474                 if (!useStagedUpload)
3475                 {
3476                     auto memReqs = MemoryRequirement::Local | MemoryRequirement::HostVisible;
3477                     auto compatMask =
3478                         bufferMemReqs.memoryTypeBits & getCompatibleMemoryTypes(m_memoryProperties, memReqs);
3479 
3480                     if (compatMask != 0)
3481                     {
3482                         if (m_params.resourceResidency == ResourceResidency::SPARSE_RESIDENCY)
3483                         {
3484                             bufferMemReqs.size +=
3485                                 bufferMemReqs
3486                                     .alignment; // Allocating bigger chunk to be able to bind not a whole resource at once
3487                             bufferAlloc.alloc = allocate(bufferMemReqs, memReqs, &allocFlagsInfo);
3488                             bufferMemReqs.size -= bufferMemReqs.alignment;
3489                         }
3490                         else
3491                         {
3492                             bufferAlloc.alloc = allocate(bufferMemReqs, memReqs, &allocFlagsInfo);
3493                         }
3494                     }
3495                     else
3496                     {
3497                         // No suitable memory type, fall back to a staged upload
3498                         useStagedUpload = true;
3499                     }
3500                 }
3501 
3502                 if (useStagedUpload)
3503                 {
3504                     DE_ASSERT(!bufferAlloc.alloc);
3505 
3506                     if ((bufferAlloc.usage & VK_BUFFER_USAGE_TRANSFER_DST_BIT) == 0)
3507                     {
3508                         bufferAlloc.buffer = Move<VkBuffer>();
3509                         bufferAlloc.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3510 
3511                         bufferCreateInfo.usage = bufferAlloc.usage;
3512 
3513                         bufferAlloc.buffer = vk::createBuffer(*m_deviceInterface, *m_device, &bufferCreateInfo);
3514 
3515                         bufferMemReqs = getBufferMemoryRequirements(*m_deviceInterface, *m_device, *bufferAlloc.buffer);
3516                     }
3517 
3518                     bufferAlloc.alloc     = allocate(bufferMemReqs, MemoryRequirement::Local, &allocFlagsInfo);
3519                     allocateStagingBuffer = true;
3520 
3521                     // Update staging buffer offsets for all sets in this buffer
3522                     for (uint32_t i = currentBuffer.firstSet; i < currentBuffer.firstSet + currentBuffer.numSets; ++i)
3523                     {
3524                         (**m_descriptorSetLayouts[i]).stagingBufferOffset = stagingBufferDescriptorSetOffset;
3525                         stagingBufferDescriptorSetOffset += (**m_descriptorSetLayouts[i]).sizeOfLayout;
3526                     }
3527                 }
3528 
3529                 if (!sparseCompatibilityDevice)
3530                 {
3531                     VK_CHECK(m_deviceInterface->bindBufferMemory(*m_device, *bufferAlloc.buffer,
3532                                                                  bufferAlloc.alloc->getMemory(),
3533                                                                  bufferAlloc.alloc->getOffset()));
3534                 }
3535                 else // Bind sparse
3536                 {
3537                     // Fence to signal when sparse binding operation ends
3538                     const vk::Unique<vk::VkFence> sparseBindFence(vk::createFence(*m_deviceInterface, *m_device));
3539 
3540                     const VkSparseMemoryBind sparseMemBind = {
3541                         0,                              // VkDeviceSize               resourceOffset;
3542                         bufferMemReqs.size,             // VkDeviceSize               size;
3543                         bufferAlloc.alloc->getMemory(), // VkDeviceMemory             memory;
3544                         bufferAlloc.alloc->getOffset(), // VkDeviceSize               memoryOffset;
3545                         0,                              // VkSparseMemoryBindFlags    flags;
3546                     };
3547 
3548                     const VkSparseBufferMemoryBindInfo sparseBufferMemBindInfo = {
3549                         *bufferAlloc.buffer, // VkBuffer                     buffer;
3550                         1,                   // uint32_t                     bindCount;
3551                         &sparseMemBind,      // const VkSparseMemoryBind*    pBinds;
3552                     };
3553 
3554                     VkBindSparseInfo bindSparseInfo = initVulkanStructure();
3555                     bindSparseInfo.bufferBindCount  = 1;
3556                     bindSparseInfo.pBufferBinds     = &sparseBufferMemBindInfo;
3557 
3558                     vk::VkResult res = VK_SUCCESS;
3559 
3560                     res = m_deviceInterface->queueBindSparse(m_sparseQueue, 1, &bindSparseInfo, *sparseBindFence);
3561 
3562                     VK_CHECK(res);
3563 
3564                     VK_CHECK(m_deviceInterface->waitForFences(*m_device, 1u, &sparseBindFence.get(), true, ~0ull));
3565                 }
3566 
3567                 bufferAlloc.loadDeviceAddress(*m_deviceInterface, *m_device);
3568 
3569                 if (!useStagedUpload)
3570                 {
3571                     // Clear the descriptor buffer memory to ensure there can be no random data there.
3572                     deMemset(bufferAlloc.alloc->getHostPtr(), bufferInitialMemory,
3573                              static_cast<std::size_t>(bufferAlloc.size));
3574                 }
3575             }
3576 
3577             // Start with a new buffer
3578             currentBuffer          = {};
3579             currentBuffer.firstSet = setIndex + 1;
3580         }
3581     }
3582 
3583     if (allocateStagingBuffer)
3584     {
3585         DE_ASSERT(!m_descriptorStagingBuffer.alloc);
3586 
3587         auto bufferCreateInfo =
3588             makeBufferCreateInfo(stagingBufferDescriptorSetOffset, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
3589 
3590         m_descriptorStagingBuffer.buffer = vk::createBuffer(*m_deviceInterface, *m_device, &bufferCreateInfo);
3591         m_descriptorStagingBuffer.size   = bufferCreateInfo.size;
3592 
3593         auto bufferMemReqs =
3594             getBufferMemoryRequirements(*m_deviceInterface, *m_device, *m_descriptorStagingBuffer.buffer);
3595 
3596         m_descriptorStagingBuffer.alloc = allocate(bufferMemReqs, MemoryRequirement::HostVisible);
3597 
3598         VK_CHECK(m_deviceInterface->bindBufferMemory(*m_device, *m_descriptorStagingBuffer.buffer,
3599                                                      m_descriptorStagingBuffer.alloc->getMemory(),
3600                                                      m_descriptorStagingBuffer.alloc->getOffset()));
3601 
3602         // Clear the descriptor buffer memory to ensure there can be no random data there.
3603         deMemset(m_descriptorStagingBuffer.alloc->getHostPtr(), bufferInitialMemory,
3604                  static_cast<std::size_t>(m_descriptorStagingBuffer.size));
3605     }
3606 }
3607 
bindDescriptorBuffers(VkCommandBuffer cmdBuf,VkPipelineBindPoint bindPoint) const3608 void DescriptorBufferTestInstance::bindDescriptorBuffers(VkCommandBuffer cmdBuf, VkPipelineBindPoint bindPoint) const
3609 {
3610     std::vector<uint32_t> bufferIndices;
3611     std::vector<VkDeviceSize> bufferOffsets;
3612     std::vector<VkDescriptorBufferBindingInfoEXT> bufferBindingInfos;
3613     VkDescriptorBufferBindingPushDescriptorBufferHandleEXT bufferBindingPushDescriptorBufferHandleEXT =
3614         initVulkanStructure();
3615 
3616     uint32_t firstSet = 0;
3617 
3618     if (m_params.variant == TestVariant::EMBEDDED_IMMUTABLE_SAMPLERS)
3619     {
3620         // These sampler sets are ordered first, so we can bind them now and increment the firstSet index.
3621         for (uint32_t setIndex = firstSet; setIndex < u32(m_descriptorSetLayouts.size()); ++setIndex)
3622         {
3623             const auto &dsl = **m_descriptorSetLayouts[setIndex];
3624 
3625             if (dsl.hasEmbeddedImmutableSamplers)
3626             {
3627                 if (m_params.commands2)
3628                 {
3629                     vk::VkBindDescriptorBufferEmbeddedSamplersInfoEXT bindDescriptorBufferEmbeddedSamplersInfo = {
3630                         VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT, // VkStructureType sType;
3631                         nullptr,                                                             // const void* pNext;
3632                         (VkShaderStageFlags)m_params.stage, // VkShaderStageFlags stageFlags;
3633                         *m_pipelineLayout,                  // VkPipelineLayout layout;
3634                         setIndex                            // uint32_t set;
3635                     };
3636                     m_deviceInterface->cmdBindDescriptorBufferEmbeddedSamplers2EXT(
3637                         cmdBuf, &bindDescriptorBufferEmbeddedSamplersInfo);
3638                 }
3639                 else
3640                 {
3641                     m_deviceInterface->cmdBindDescriptorBufferEmbeddedSamplersEXT(cmdBuf, bindPoint, *m_pipelineLayout,
3642                                                                                   setIndex);
3643                 }
3644 
3645                 // No gaps between sets.
3646                 DE_ASSERT(firstSet == setIndex);
3647 
3648                 firstSet = setIndex + 1;
3649             }
3650         }
3651     }
3652 
3653     for (const auto &buffer : m_descriptorBuffers)
3654     {
3655         VkDescriptorBufferBindingInfoEXT info = initVulkanStructure();
3656 
3657         info.address = buffer->deviceAddress;
3658         info.usage   = buffer->usage;
3659 
3660         if (!m_descriptorBufferProperties.bufferlessPushDescriptors &&
3661             (buffer->usage & VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT) != 0)
3662         {
3663             info.pNext = &bufferBindingPushDescriptorBufferHandleEXT;
3664 
3665             // Make sure there is only one such buffer
3666             DE_ASSERT(bufferBindingPushDescriptorBufferHandleEXT.buffer == VK_NULL_HANDLE);
3667 
3668             bufferBindingPushDescriptorBufferHandleEXT.buffer = *buffer->buffer;
3669 
3670             DE_ASSERT(bufferBindingPushDescriptorBufferHandleEXT.buffer != VK_NULL_HANDLE);
3671         }
3672 
3673         bufferBindingInfos.emplace_back(info);
3674     }
3675 
3676     if (bufferBindingInfos.size() != 0u)
3677     {
3678         m_deviceInterface->cmdBindDescriptorBuffersEXT(cmdBuf, u32(bufferBindingInfos.size()),
3679                                                        bufferBindingInfos.data());
3680     }
3681 
3682     // Next, set the offsets for the bound buffers.
3683 
3684     for (uint32_t setIndex = firstSet; setIndex < u32(m_descriptorSetLayouts.size()); ++setIndex)
3685     {
3686         const auto &dsl       = **m_descriptorSetLayouts[setIndex];
3687         const bool isBoundSet = (dsl.bufferIndex != INDEX_INVALID);
3688         const bool isLastSet  = ((setIndex + 1) == u32(m_descriptorSetLayouts.size()));
3689 
3690         if (isBoundSet)
3691         {
3692             bufferIndices.emplace_back(dsl.bufferIndex);
3693             bufferOffsets.emplace_back(dsl.bufferOffset);
3694         }
3695 
3696         if ((!isBoundSet || isLastSet) && !bufferIndices.empty())
3697         {
3698             if (m_params.commands2)
3699             {
3700                 vk::VkSetDescriptorBufferOffsetsInfoEXT setDescriptorBufferOffsetInfo = {
3701                     VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT, // VkStructureType sType;
3702                     nullptr,                                                  // const void* pNext;
3703                     (VkShaderStageFlags)m_params.stage,                       // VkShaderStageFlags stageFlags;
3704                     *m_pipelineLayout,                                        // VkPipelineLayout layout;
3705                     firstSet,                                                 // uint32_t firstSet;
3706                     u32(bufferIndices.size()),                                // uint32_t setCount;
3707                     bufferIndices.data(),                                     // const uint32_t* pBufferIndices;
3708                     bufferOffsets.data()                                      // const VkDeviceSize* pOffsets;
3709                 };
3710                 m_deviceInterface->cmdSetDescriptorBufferOffsets2EXT(cmdBuf, &setDescriptorBufferOffsetInfo);
3711             }
3712             else
3713             {
3714                 m_deviceInterface->cmdSetDescriptorBufferOffsetsEXT(cmdBuf, bindPoint, *m_pipelineLayout, firstSet,
3715                                                                     u32(bufferIndices.size()), bufferIndices.data(),
3716                                                                     bufferOffsets.data());
3717             }
3718 
3719             bufferIndices.clear();
3720             bufferOffsets.clear();
3721 
3722             firstSet = setIndex + 1;
3723         }
3724         else if (!isBoundSet)
3725         {
3726             // Push descriptor sets will have no buffer backing. Skip this set.
3727             ++firstSet;
3728         }
3729     }
3730 }
3731 
makeShaderStageCreateInfo(VkShaderStageFlagBits stage,VkShaderModule shaderModule)3732 VkPipelineShaderStageCreateInfo makeShaderStageCreateInfo(VkShaderStageFlagBits stage, VkShaderModule shaderModule)
3733 {
3734     VkPipelineShaderStageCreateInfo createInfo = initVulkanStructure();
3735     createInfo.stage                           = stage;
3736     createInfo.module                          = shaderModule;
3737     createInfo.pName                           = "main";
3738     createInfo.pSpecializationInfo             = nullptr;
3739     return createInfo;
3740 }
3741 
createShaderBindingTable(const InstanceInterface & vki,const DeviceInterface & vkd,const VkDevice device,const VkPhysicalDevice physicalDevice,const VkPipeline pipeline,Allocator & allocator,de::MovePtr<RayTracingPipeline> & rayTracingPipeline,const uint32_t group)3742 de::MovePtr<BufferWithMemory> DescriptorBufferTestInstance::createShaderBindingTable(
3743     const InstanceInterface &vki, const DeviceInterface &vkd, const VkDevice device,
3744     const VkPhysicalDevice physicalDevice, const VkPipeline pipeline, Allocator &allocator,
3745     de::MovePtr<RayTracingPipeline> &rayTracingPipeline, const uint32_t group)
3746 {
3747     de::MovePtr<BufferWithMemory> shaderBindingTable;
3748 
3749     if (group < m_shaderGroupCount)
3750     {
3751         const uint32_t shaderGroupHandleSize    = getShaderGroupHandleSize(vki, physicalDevice);
3752         const uint32_t shaderGroupBaseAlignment = getShaderGroupBaseAlignment(vki, physicalDevice);
3753 
3754         shaderBindingTable = rayTracingPipeline->createShaderBindingTable(
3755             vkd, device, pipeline, allocator, shaderGroupHandleSize, shaderGroupBaseAlignment, group, 1u);
3756     }
3757 
3758     return shaderBindingTable;
3759 }
3760 
createRayTracingPipeline()3761 void DescriptorBufferTestInstance::createRayTracingPipeline()
3762 {
3763     const InstanceInterface &vki          = m_context.getInstanceInterface();
3764     const DeviceInterface &vkd            = *m_deviceInterface;
3765     const VkDevice device                 = *m_device;
3766     const VkPhysicalDevice physicalDevice = m_context.getPhysicalDevice();
3767     vk::BinaryCollection &collection      = m_context.getBinaryCollection();
3768     Allocator &allocator                  = *m_allocatorPtr;
3769     const uint32_t shaderGroupHandleSize  = getShaderGroupHandleSize(vki, physicalDevice);
3770     const VkShaderStageFlags hitStages =
3771         VK_SHADER_STAGE_ANY_HIT_BIT_KHR | VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR | VK_SHADER_STAGE_INTERSECTION_BIT_KHR;
3772 
3773     m_shaderGroupCount = 0;
3774 
3775     if (collection.contains(getShaderName(VK_SHADER_STAGE_RAYGEN_BIT_KHR)))
3776         m_shaders |= VK_SHADER_STAGE_RAYGEN_BIT_KHR;
3777     if (collection.contains(getShaderName(VK_SHADER_STAGE_ANY_HIT_BIT_KHR)))
3778         m_shaders |= VK_SHADER_STAGE_ANY_HIT_BIT_KHR;
3779     if (collection.contains(getShaderName(VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR)))
3780         m_shaders |= VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR;
3781     if (collection.contains(getShaderName(VK_SHADER_STAGE_MISS_BIT_KHR)))
3782         m_shaders |= VK_SHADER_STAGE_MISS_BIT_KHR;
3783     if (collection.contains(getShaderName(VK_SHADER_STAGE_INTERSECTION_BIT_KHR)))
3784         m_shaders |= VK_SHADER_STAGE_INTERSECTION_BIT_KHR;
3785     if (collection.contains(getShaderName(VK_SHADER_STAGE_CALLABLE_BIT_KHR)))
3786         m_shaders |= VK_SHADER_STAGE_CALLABLE_BIT_KHR;
3787 
3788     if (0 != (m_shaders & VK_SHADER_STAGE_RAYGEN_BIT_KHR))
3789         m_raygenShaderGroup = m_shaderGroupCount++;
3790 
3791     if (0 != (m_shaders & VK_SHADER_STAGE_MISS_BIT_KHR))
3792         m_missShaderGroup = m_shaderGroupCount++;
3793 
3794     if (0 != (m_shaders & hitStages))
3795         m_hitShaderGroup = m_shaderGroupCount++;
3796 
3797     if (0 != (m_shaders & VK_SHADER_STAGE_CALLABLE_BIT_KHR))
3798         m_callableShaderGroup = m_shaderGroupCount++;
3799 
3800     m_rayTracingPipeline = de::newMovePtr<RayTracingPipeline>();
3801 
3802     m_rayTracingPipeline->setCreateFlags(VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT);
3803 
3804     if (0 != (m_shaders & VK_SHADER_STAGE_RAYGEN_BIT_KHR))
3805         addRayTracingShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, m_raygenShaderGroup);
3806     if (0 != (m_shaders & VK_SHADER_STAGE_ANY_HIT_BIT_KHR))
3807         addRayTracingShader(VK_SHADER_STAGE_ANY_HIT_BIT_KHR, m_hitShaderGroup);
3808     if (0 != (m_shaders & VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR))
3809         addRayTracingShader(VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, m_hitShaderGroup);
3810     if (0 != (m_shaders & VK_SHADER_STAGE_MISS_BIT_KHR))
3811         addRayTracingShader(VK_SHADER_STAGE_MISS_BIT_KHR, m_missShaderGroup);
3812     if (0 != (m_shaders & VK_SHADER_STAGE_INTERSECTION_BIT_KHR))
3813         addRayTracingShader(VK_SHADER_STAGE_INTERSECTION_BIT_KHR, m_hitShaderGroup);
3814     if (0 != (m_shaders & VK_SHADER_STAGE_CALLABLE_BIT_KHR))
3815         addRayTracingShader(VK_SHADER_STAGE_CALLABLE_BIT_KHR, m_callableShaderGroup);
3816 
3817     m_pipelineLayout = makePipelineLayout(vkd, device, getDescriptorSetLayouts(m_descriptorSetLayouts));
3818     m_pipeline       = m_rayTracingPipeline->createPipeline(vkd, device, *m_pipelineLayout);
3819 
3820     m_raygenShaderBindingTable   = createShaderBindingTable(vki, vkd, device, physicalDevice, *m_pipeline, allocator,
3821                                                             m_rayTracingPipeline, m_raygenShaderGroup);
3822     m_missShaderBindingTable     = createShaderBindingTable(vki, vkd, device, physicalDevice, *m_pipeline, allocator,
3823                                                             m_rayTracingPipeline, m_missShaderGroup);
3824     m_hitShaderBindingTable      = createShaderBindingTable(vki, vkd, device, physicalDevice, *m_pipeline, allocator,
3825                                                             m_rayTracingPipeline, m_hitShaderGroup);
3826     m_callableShaderBindingTable = createShaderBindingTable(vki, vkd, device, physicalDevice, *m_pipeline, allocator,
3827                                                             m_rayTracingPipeline, m_callableShaderGroup);
3828 
3829     m_raygenShaderBindingTableRegion =
3830         makeStridedDeviceAddressRegion(vkd, device, getVkBuffer(m_raygenShaderBindingTable), shaderGroupHandleSize);
3831     m_missShaderBindingTableRegion =
3832         makeStridedDeviceAddressRegion(vkd, device, getVkBuffer(m_missShaderBindingTable), shaderGroupHandleSize);
3833     m_hitShaderBindingTableRegion =
3834         makeStridedDeviceAddressRegion(vkd, device, getVkBuffer(m_hitShaderBindingTable), shaderGroupHandleSize);
3835     m_callableShaderBindingTableRegion =
3836         makeStridedDeviceAddressRegion(vkd, device, getVkBuffer(m_callableShaderBindingTable), shaderGroupHandleSize);
3837 }
3838 
addRayTracingShader(const VkShaderStageFlagBits stage,const uint32_t group)3839 void DescriptorBufferTestInstance::addRayTracingShader(const VkShaderStageFlagBits stage, const uint32_t group)
3840 {
3841     DE_ASSERT(m_rayTracingPipeline);
3842 
3843     m_rayTracingPipeline->addShader(stage, createShaderModule(*m_deviceInterface, *m_device, getShaderBinary(stage), 0),
3844                                     group);
3845 }
3846 
3847 // The graphics pipeline is very simple for this test.
3848 // The number of shader stages is configurable. There's no vertex input, a single triangle covers the entire viewport.
3849 // The color target uses R32_UINT format and is used to save the verifcation result.
3850 //
createGraphicsPipeline()3851 void DescriptorBufferTestInstance::createGraphicsPipeline()
3852 {
3853     std::vector<VkImageView> framebufferAttachments;
3854 
3855     {
3856         m_colorImage.info                       = initVulkanStructure();
3857         m_colorImage.info.flags                 = 0;
3858         m_colorImage.info.imageType             = VK_IMAGE_TYPE_2D;
3859         m_colorImage.info.format                = VK_FORMAT_R32_UINT;
3860         m_colorImage.info.extent.width          = m_renderArea.extent.width;
3861         m_colorImage.info.extent.height         = m_renderArea.extent.height;
3862         m_colorImage.info.extent.depth          = 1;
3863         m_colorImage.info.mipLevels             = 1;
3864         m_colorImage.info.arrayLayers           = 1;
3865         m_colorImage.info.samples               = VK_SAMPLE_COUNT_1_BIT;
3866         m_colorImage.info.tiling                = VK_IMAGE_TILING_OPTIMAL;
3867         m_colorImage.info.usage                 = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
3868         m_colorImage.info.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
3869         m_colorImage.info.queueFamilyIndexCount = 0;
3870         m_colorImage.info.pQueueFamilyIndices   = nullptr;
3871         m_colorImage.info.initialLayout         = VK_IMAGE_LAYOUT_UNDEFINED;
3872 
3873         m_colorImage.image = createImage(*m_deviceInterface, *m_device, &m_colorImage.info);
3874 
3875         auto memReqs           = getImageMemoryRequirements(*m_deviceInterface, *m_device, *m_colorImage.image);
3876         m_colorImage.sizeBytes = memReqs.size;
3877         m_colorImage.alloc     = allocate(memReqs, MemoryRequirement::Local);
3878 
3879         VK_CHECK(m_deviceInterface->bindImageMemory(*m_device, *m_colorImage.image, m_colorImage.alloc->getMemory(),
3880                                                     m_colorImage.alloc->getOffset()));
3881     }
3882     {
3883         auto createInfo = makeBufferCreateInfo(m_colorImage.sizeBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
3884 
3885         m_colorBuffer.buffer = createBuffer(*m_deviceInterface, *m_device, &createInfo);
3886 
3887         auto memReqs = getBufferMemoryRequirements(*m_deviceInterface, *m_device, *m_colorBuffer.buffer);
3888 
3889         m_colorBuffer.alloc = allocate(memReqs, MemoryRequirement::HostVisible);
3890         VK_CHECK(m_deviceInterface->bindBufferMemory(*m_device, *m_colorBuffer.buffer, m_colorBuffer.alloc->getMemory(),
3891                                                      m_colorBuffer.alloc->getOffset()));
3892     }
3893     {
3894         VkImageViewCreateInfo createInfo = initVulkanStructure();
3895         createInfo.image                 = *m_colorImage.image;
3896         createInfo.viewType              = VK_IMAGE_VIEW_TYPE_2D;
3897         createInfo.format                = m_colorImage.info.format;
3898         createInfo.components            = ComponentMappingIdentity;
3899         createInfo.subresourceRange      = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
3900 
3901         m_colorImage.imageView = createImageView(*m_deviceInterface, *m_device, &createInfo);
3902     }
3903 
3904     framebufferAttachments.push_back(*m_colorImage.imageView);
3905 
3906     {
3907         std::vector<VkAttachmentDescription> attachments;
3908         std::vector<VkAttachmentReference> colorRefs;
3909         std::vector<VkAttachmentReference> inputRefs;
3910 
3911         {
3912             VkAttachmentDescription colorAttachment{};
3913             colorAttachment.format         = VK_FORMAT_R32_UINT;
3914             colorAttachment.samples        = VK_SAMPLE_COUNT_1_BIT;
3915             colorAttachment.loadOp         = VK_ATTACHMENT_LOAD_OP_CLEAR;
3916             colorAttachment.storeOp        = VK_ATTACHMENT_STORE_OP_STORE;
3917             colorAttachment.stencilLoadOp  = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
3918             colorAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
3919             colorAttachment.initialLayout  = VK_IMAGE_LAYOUT_UNDEFINED;
3920             colorAttachment.finalLayout    = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
3921 
3922             colorRefs.emplace_back(
3923                 makeAttachmentReference(u32(attachments.size()), VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL));
3924             attachments.emplace_back(colorAttachment);
3925         }
3926 
3927         for (uint32_t setIndex = 0; setIndex < u32(m_descriptorSetLayouts.size()); ++setIndex)
3928         {
3929             const auto &dsl = **m_descriptorSetLayouts[setIndex];
3930 
3931             for (uint32_t bindingIndex = 0; bindingIndex < u32(dsl.bindings.size()); ++bindingIndex)
3932             {
3933                 const auto &binding = dsl.bindings[bindingIndex];
3934 
3935                 if (binding.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
3936                 {
3937                     for (uint32_t arrayIndex = 0; arrayIndex < binding.descriptorCount; ++arrayIndex)
3938                     {
3939                         VkAttachmentDescription inputAttachment{};
3940                         inputAttachment.format         = m_imageColorFormat;
3941                         inputAttachment.samples        = VK_SAMPLE_COUNT_1_BIT;
3942                         inputAttachment.loadOp         = VK_ATTACHMENT_LOAD_OP_LOAD;
3943                         inputAttachment.storeOp        = VK_ATTACHMENT_STORE_OP_DONT_CARE;
3944                         inputAttachment.stencilLoadOp  = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
3945                         inputAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
3946                         inputAttachment.initialLayout  = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3947                         inputAttachment.finalLayout    = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3948 
3949                         inputRefs.emplace_back(
3950                             makeAttachmentReference(u32(attachments.size()), VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL));
3951                         attachments.emplace_back(inputAttachment);
3952 
3953                         const auto inputAttachmentResourceIndex = binding.perBindingResourceIndex[arrayIndex];
3954                         framebufferAttachments.push_back(
3955                             *(**m_resources[inputAttachmentResourceIndex]).image.imageView);
3956                     }
3957                 }
3958             }
3959         }
3960 
3961         VkSubpassDescription subpass{};
3962         subpass.pipelineBindPoint       = VK_PIPELINE_BIND_POINT_GRAPHICS;
3963         subpass.inputAttachmentCount    = u32(inputRefs.size());
3964         subpass.pInputAttachments       = inputRefs.data();
3965         subpass.colorAttachmentCount    = u32(colorRefs.size());
3966         subpass.pColorAttachments       = colorRefs.data();
3967         subpass.pResolveAttachments     = nullptr;
3968         subpass.pDepthStencilAttachment = nullptr;
3969         subpass.preserveAttachmentCount = 0;
3970         subpass.pPreserveAttachments    = nullptr;
3971 
3972         VkRenderPassCreateInfo createInfo = initVulkanStructure();
3973         // No explicit dependencies
3974         createInfo.attachmentCount = u32(attachments.size());
3975         createInfo.pAttachments    = attachments.data();
3976         createInfo.subpassCount    = 1;
3977         createInfo.pSubpasses      = &subpass;
3978 
3979         m_renderPass = createRenderPass(*m_deviceInterface, *m_device, &createInfo);
3980     }
3981     {
3982         VkFramebufferCreateInfo createInfo = initVulkanStructure();
3983         createInfo.renderPass              = *m_renderPass;
3984         createInfo.attachmentCount         = u32(framebufferAttachments.size());
3985         createInfo.pAttachments            = framebufferAttachments.data();
3986         createInfo.width                   = m_renderArea.extent.width;
3987         createInfo.height                  = m_renderArea.extent.height;
3988         createInfo.layers                  = 1;
3989 
3990         m_framebuffer = createFramebuffer(*m_deviceInterface, *m_device, &createInfo);
3991     }
3992 
3993     std::vector<VkPipelineShaderStageCreateInfo> shaderStages;
3994 
3995     Move<VkShaderModule> vertModule;
3996     Move<VkShaderModule> tessControlModule;
3997     Move<VkShaderModule> tessEvalModule;
3998     Move<VkShaderModule> geomModule;
3999     Move<VkShaderModule> fragModule;
4000 
4001     vertModule = createShaderModule(*m_deviceInterface, *m_device, getShaderBinary(VK_SHADER_STAGE_VERTEX_BIT), 0u);
4002     fragModule = createShaderModule(*m_deviceInterface, *m_device, getShaderBinary(VK_SHADER_STAGE_FRAGMENT_BIT), 0u);
4003 
4004     shaderStages.emplace_back(makeShaderStageCreateInfo(VK_SHADER_STAGE_VERTEX_BIT, *vertModule));
4005     shaderStages.emplace_back(makeShaderStageCreateInfo(VK_SHADER_STAGE_FRAGMENT_BIT, *fragModule));
4006 
4007     if (m_params.isTessellation())
4008     {
4009         tessControlModule = createShaderModule(*m_deviceInterface, *m_device,
4010                                                getShaderBinary(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT), 0u);
4011         tessEvalModule    = createShaderModule(*m_deviceInterface, *m_device,
4012                                                getShaderBinary(VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT), 0u);
4013 
4014         shaderStages.emplace_back(
4015             makeShaderStageCreateInfo(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, *tessControlModule));
4016         shaderStages.emplace_back(
4017             makeShaderStageCreateInfo(VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, *tessEvalModule));
4018     }
4019     else if (m_params.isGeometry())
4020     {
4021         geomModule =
4022             createShaderModule(*m_deviceInterface, *m_device, getShaderBinary(VK_SHADER_STAGE_GEOMETRY_BIT), 0u);
4023 
4024         shaderStages.emplace_back(makeShaderStageCreateInfo(VK_SHADER_STAGE_GEOMETRY_BIT, *geomModule));
4025     }
4026 
4027     VkPipelineVertexInputStateCreateInfo vertexInputState = initVulkanStructure();
4028     // No vertex input
4029 
4030     VkPipelineInputAssemblyStateCreateInfo inputAssemblyState = initVulkanStructure();
4031     inputAssemblyState.topology =
4032         !!tessControlModule ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
4033 
4034     VkPipelineTessellationStateCreateInfo tesselationState = initVulkanStructure();
4035     tesselationState.patchControlPoints                    = 3;
4036 
4037     VkViewport viewport = makeViewport(m_renderArea.extent);
4038 
4039     VkPipelineViewportStateCreateInfo viewportState = initVulkanStructure();
4040     viewportState.viewportCount                     = 1;
4041     viewportState.pViewports                        = &viewport;
4042     viewportState.scissorCount                      = 1;
4043     viewportState.pScissors                         = &m_renderArea;
4044 
4045     VkPipelineRasterizationStateCreateInfo rasterizationState = initVulkanStructure();
4046     rasterizationState.depthClampEnable                       = VK_FALSE;
4047     rasterizationState.rasterizerDiscardEnable                = VK_FALSE;
4048     rasterizationState.polygonMode                            = VK_POLYGON_MODE_FILL;
4049     rasterizationState.cullMode                               = VK_CULL_MODE_NONE;
4050     rasterizationState.frontFace                              = VK_FRONT_FACE_COUNTER_CLOCKWISE;
4051     rasterizationState.depthBiasEnable                        = VK_FALSE;
4052     rasterizationState.depthBiasConstantFactor                = 0.0f;
4053     rasterizationState.depthBiasClamp                         = 0.0f;
4054     rasterizationState.depthBiasSlopeFactor                   = 0.0f;
4055     rasterizationState.lineWidth                              = 1.0f;
4056 
4057     VkPipelineMultisampleStateCreateInfo multisampleState = initVulkanStructure();
4058     // Everything else disabled/default
4059     multisampleState.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
4060 
4061     VkPipelineDepthStencilStateCreateInfo depthStencilState = initVulkanStructure();
4062     // Everything else disabled/default
4063     depthStencilState.minDepthBounds = 0.0f;
4064     depthStencilState.maxDepthBounds = 1.0f;
4065 
4066     VkPipelineColorBlendAttachmentState colorAttachment{};
4067     // Everything else disabled/default
4068     colorAttachment.colorWriteMask =
4069         VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT;
4070 
4071     VkPipelineColorBlendStateCreateInfo colorBlendState = initVulkanStructure();
4072     // Everything else disabled/default
4073     colorBlendState.attachmentCount = 1;
4074     colorBlendState.pAttachments    = &colorAttachment;
4075 
4076     {
4077         VkGraphicsPipelineCreateInfo createInfo = initVulkanStructure();
4078         createInfo.stageCount                   = u32(shaderStages.size());
4079         createInfo.pStages                      = shaderStages.data();
4080         createInfo.pVertexInputState            = &vertexInputState;
4081         createInfo.pInputAssemblyState          = &inputAssemblyState;
4082         createInfo.pTessellationState           = m_params.isTessellation() ? &tesselationState : nullptr;
4083         createInfo.pViewportState               = &viewportState;
4084         createInfo.pRasterizationState          = &rasterizationState;
4085         createInfo.pMultisampleState            = &multisampleState;
4086         createInfo.pDepthStencilState           = &depthStencilState;
4087         createInfo.pColorBlendState             = &colorBlendState;
4088         createInfo.pDynamicState                = nullptr;
4089         createInfo.layout                       = *m_pipelineLayout;
4090         createInfo.renderPass                   = *m_renderPass;
4091         createInfo.subpass                      = 0;
4092         createInfo.basePipelineHandle           = VK_NULL_HANDLE;
4093         createInfo.basePipelineIndex            = -1;
4094         createInfo.flags                        = VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT;
4095 
4096         m_pipeline = vk::createGraphicsPipeline(*m_deviceInterface, *m_device,
4097                                                 VK_NULL_HANDLE, // pipeline cache
4098                                                 &createInfo);
4099     }
4100 }
4101 
createBufferForBinding(ResourceHolder & resources,VkDescriptorType descriptorType,VkBufferCreateInfo createInfo,bool isResultBuffer) const4102 void DescriptorBufferTestInstance::createBufferForBinding(ResourceHolder &resources, VkDescriptorType descriptorType,
4103                                                           VkBufferCreateInfo createInfo, bool isResultBuffer) const
4104 {
4105     auto &bufferResource    = resources.buffer;
4106     auto &captureReplayData = resources.captureReplay.bufferData;
4107 
4108     createInfo.usage |= VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT;
4109 
4110     if (!isResultBuffer && isCaptureDescriptor(descriptorType))
4111     {
4112         createInfo.flags |= VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT;
4113 
4114         DE_ASSERT(!bufferResource.buffer);
4115         bufferResource.buffer = createBuffer(*m_deviceInterface, *m_device, &createInfo);
4116 
4117         VkBufferCaptureDescriptorDataInfoEXT info = initVulkanStructure();
4118         info.buffer                               = *bufferResource.buffer;
4119 
4120         DE_ASSERT(captureReplayData.empty());
4121         captureReplayData.resize(m_descriptorBufferProperties.bufferCaptureReplayDescriptorDataSize);
4122 
4123         VK_CHECK(
4124             m_deviceInterface->getBufferOpaqueCaptureDescriptorDataEXT(*m_device, &info, captureReplayData.data()));
4125     }
4126     else if (!isResultBuffer && isReplayDescriptor(descriptorType))
4127     {
4128         // Free the previous buffer and its memory
4129         reset(bufferResource.buffer);
4130         reset(bufferResource.alloc);
4131 
4132         VkOpaqueCaptureDescriptorDataCreateInfoEXT info = initVulkanStructure();
4133         info.opaqueCaptureDescriptorData                = captureReplayData.data();
4134 
4135         createInfo.flags |= VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT;
4136         createInfo.pNext = &info;
4137 
4138         bufferResource.buffer = createBuffer(*m_deviceInterface, *m_device, &createInfo);
4139     }
4140     else
4141     {
4142         DE_ASSERT(!bufferResource.buffer);
4143         bufferResource.buffer = createBuffer(*m_deviceInterface, *m_device, &createInfo);
4144     }
4145 
4146     auto memReqs = getBufferMemoryRequirements(*m_deviceInterface, *m_device, *bufferResource.buffer);
4147 
4148     VkMemoryOpaqueCaptureAddressAllocateInfo opaqueCaptureAddressAllocateInfo = initVulkanStructure();
4149     VkMemoryAllocateFlagsInfo allocFlagsInfo                                  = initVulkanStructure();
4150     allocFlagsInfo.flags |= VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT;
4151 
4152     if (!isResultBuffer && m_params.isCaptureReplayDescriptor(descriptorType))
4153     {
4154         allocFlagsInfo.flags |= VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT;
4155         allocFlagsInfo.pNext = &opaqueCaptureAddressAllocateInfo;
4156 
4157         if (isCaptureDescriptor(descriptorType))
4158         {
4159             opaqueCaptureAddressAllocateInfo.opaqueCaptureAddress = 0ull;
4160         }
4161         else if (isReplayDescriptor(descriptorType))
4162         {
4163             opaqueCaptureAddressAllocateInfo.opaqueCaptureAddress = bufferResource.opaqueCaptureAddress;
4164         }
4165     }
4166 
4167     DE_ASSERT(!bufferResource.alloc);
4168     bufferResource.alloc = allocate(memReqs, MemoryRequirement::HostVisible, &allocFlagsInfo);
4169 
4170     if (isCaptureDescriptor(descriptorType))
4171     {
4172         VkDeviceMemoryOpaqueCaptureAddressInfo memoryOpaqueCaptureAddressInfo = initVulkanStructure();
4173 
4174         memoryOpaqueCaptureAddressInfo.memory = bufferResource.alloc->getMemory();
4175 
4176         bufferResource.opaqueCaptureAddress =
4177             m_deviceInterface->getDeviceMemoryOpaqueCaptureAddress(*m_device, &memoryOpaqueCaptureAddressInfo);
4178     }
4179 
4180     VK_CHECK(m_deviceInterface->bindBufferMemory(*m_device, *bufferResource.buffer, bufferResource.alloc->getMemory(),
4181                                                  bufferResource.alloc->getOffset()));
4182 
4183     bufferResource.loadDeviceAddress(*m_deviceInterface, *m_device);
4184 }
4185 
createImageForBinding(ResourceHolder & resources,VkDescriptorType descriptorType) const4186 void DescriptorBufferTestInstance::createImageForBinding(ResourceHolder &resources,
4187                                                          VkDescriptorType descriptorType) const
4188 {
4189     auto &imageResource = resources.image;
4190 
4191     // Image
4192     auto &captureReplayData = resources.captureReplay.imageData;
4193 
4194     if (isCaptureDescriptor(descriptorType))
4195     {
4196         imageResource.info.flags |= VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT;
4197 
4198         DE_ASSERT(!imageResource.image);
4199         imageResource.image = createImage(*m_deviceInterface, *m_device, &imageResource.info);
4200 
4201         VkImageCaptureDescriptorDataInfoEXT info = initVulkanStructure();
4202         info.image                               = *imageResource.image;
4203 
4204         DE_ASSERT(captureReplayData.empty());
4205         captureReplayData.resize(m_descriptorBufferProperties.imageCaptureReplayDescriptorDataSize);
4206 
4207         VK_CHECK(m_deviceInterface->getImageOpaqueCaptureDescriptorDataEXT(*m_device, &info, captureReplayData.data()));
4208     }
4209     else if (isReplayDescriptor(descriptorType))
4210     {
4211         // Free the previous image, its memory and the image view
4212         reset(imageResource.image);
4213         reset(imageResource.alloc);
4214         reset(imageResource.imageView);
4215 
4216         VkOpaqueCaptureDescriptorDataCreateInfoEXT info = initVulkanStructure();
4217         info.opaqueCaptureDescriptorData                = captureReplayData.data();
4218 
4219         imageResource.info.flags |= VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT;
4220         imageResource.info.pNext = &info;
4221 
4222         imageResource.image = createImage(*m_deviceInterface, *m_device, &imageResource.info);
4223     }
4224     else
4225     {
4226         DE_ASSERT(!imageResource.image);
4227         imageResource.image = createImage(*m_deviceInterface, *m_device, &imageResource.info);
4228     }
4229 
4230     // Memory allocation
4231     auto memReqs = getImageMemoryRequirements(*m_deviceInterface, *m_device, *imageResource.image);
4232 
4233     VkMemoryOpaqueCaptureAddressAllocateInfo opaqueCaptureAddressAllocateInfo = initVulkanStructure();
4234     VkMemoryAllocateFlagsInfo allocFlagsInfo                                  = initVulkanStructure();
4235 
4236     if (m_params.isCaptureReplayDescriptor(descriptorType))
4237     {
4238         allocFlagsInfo.flags |=
4239             VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT | VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT;
4240         allocFlagsInfo.pNext = &opaqueCaptureAddressAllocateInfo;
4241 
4242         if (isCaptureDescriptor(descriptorType))
4243         {
4244             opaqueCaptureAddressAllocateInfo.opaqueCaptureAddress = 0ull;
4245         }
4246         else if (isReplayDescriptor(descriptorType))
4247         {
4248             opaqueCaptureAddressAllocateInfo.opaqueCaptureAddress = imageResource.opaqueCaptureAddress;
4249         }
4250     }
4251 
4252     DE_ASSERT(!imageResource.alloc);
4253     imageResource.sizeBytes = memReqs.size;
4254     imageResource.alloc     = allocate(memReqs, MemoryRequirement::Local, &allocFlagsInfo);
4255 
4256     if (isCaptureDescriptor(descriptorType))
4257     {
4258         VkDeviceMemoryOpaqueCaptureAddressInfo memoryOpaqueCaptureAddressInfo = initVulkanStructure();
4259 
4260         memoryOpaqueCaptureAddressInfo.memory = imageResource.alloc->getMemory();
4261 
4262         imageResource.opaqueCaptureAddress =
4263             m_deviceInterface->getDeviceMemoryOpaqueCaptureAddress(*m_device, &memoryOpaqueCaptureAddressInfo);
4264     }
4265 
4266     VK_CHECK(m_deviceInterface->bindImageMemory(*m_device, *imageResource.image, imageResource.alloc->getMemory(),
4267                                                 imageResource.alloc->getOffset()));
4268 
4269     // Image view
4270     {
4271         auto &captureReplayDataView = resources.captureReplay.imageViewData;
4272 
4273         DE_ASSERT(imageResource.info.imageType == VK_IMAGE_TYPE_2D);
4274 
4275         VkImageViewCreateInfo createInfo = initVulkanStructure();
4276         createInfo.image                 = *imageResource.image;
4277         createInfo.viewType              = VK_IMAGE_VIEW_TYPE_2D;
4278         createInfo.format                = imageResource.info.format;
4279         createInfo.components            = ComponentMappingIdentity;
4280         createInfo.subresourceRange      = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
4281 
4282         if (isCaptureDescriptor(descriptorType))
4283         {
4284             createInfo.flags |= VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT;
4285 
4286             DE_ASSERT(!imageResource.imageView);
4287             imageResource.imageView = createImageView(*m_deviceInterface, *m_device, &createInfo);
4288 
4289             VkImageViewCaptureDescriptorDataInfoEXT info = initVulkanStructure();
4290             info.imageView                               = *imageResource.imageView;
4291 
4292             DE_ASSERT(captureReplayDataView.empty());
4293             captureReplayDataView.resize(m_descriptorBufferProperties.imageViewCaptureReplayDescriptorDataSize);
4294 
4295             VK_CHECK(m_deviceInterface->getImageViewOpaqueCaptureDescriptorDataEXT(*m_device, &info,
4296                                                                                    captureReplayDataView.data()));
4297         }
4298         else if (isReplayDescriptor(descriptorType))
4299         {
4300             VkOpaqueCaptureDescriptorDataCreateInfoEXT info = initVulkanStructure();
4301             info.opaqueCaptureDescriptorData                = captureReplayDataView.data();
4302 
4303             createInfo.flags |= VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT;
4304             createInfo.pNext = &info;
4305 
4306             imageResource.imageView = createImageView(*m_deviceInterface, *m_device, &createInfo);
4307         }
4308         else
4309         {
4310             VkSamplerYcbcrConversionInfo samplerYcbcrConvInfo = initVulkanStructure();
4311 
4312             if (resources.samplerYcbcrConversion)
4313             {
4314                 DE_ASSERT(m_params.variant == TestVariant::YCBCR_SAMPLER);
4315 
4316                 samplerYcbcrConvInfo.conversion = *resources.samplerYcbcrConversion;
4317 
4318                 createInfo.pNext = &samplerYcbcrConvInfo;
4319             }
4320 
4321             // No assertion here, as we must create a new view to go with the image.
4322             imageResource.imageView = createImageView(*m_deviceInterface, *m_device, &createInfo);
4323         }
4324     }
4325 }
4326 
4327 // This function prepares a descriptor binding for use:
4328 // - Create necessary buffer/image resources and initialize them
4329 // - Write descriptor data into the descriptor buffer
4330 // - Fix the memory layout of combined image samplers (if needed)
4331 //
initializeBinding(const DescriptorSetLayoutHolder & dsl,uint32_t setIndex,Binding & binding)4332 void DescriptorBufferTestInstance::initializeBinding(const DescriptorSetLayoutHolder &dsl, uint32_t setIndex,
4333                                                      Binding &binding)
4334 {
4335     const auto arrayCount =
4336         (binding.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) ? 1 : binding.descriptorCount;
4337 
4338     const bool mustSplitCombinedImageSampler =
4339         (arrayCount > 1) && (binding.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
4340         (m_descriptorBufferProperties.combinedImageSamplerDescriptorSingleArray == VK_FALSE);
4341 
4342     const bool isRobustBufferAccess = (m_params.variant == TestVariant::ROBUST_BUFFER_ACCESS);
4343     const bool isNullDescriptor     = (m_params.variant == TestVariant::ROBUST_NULL_DESCRIPTOR) &&
4344                                   (binding.descriptorType == m_params.descriptor) && binding.isTestableDescriptor();
4345 
4346     for (uint32_t arrayIndex = 0; arrayIndex < arrayCount; ++arrayIndex)
4347     {
4348         VkDescriptorGetInfoEXT descGetInfo     = initVulkanStructure();
4349         VkDescriptorAddressInfoEXT addressInfo = initVulkanStructure();
4350         VkDescriptorImageInfo imageInfo{
4351             VK_NULL_HANDLE, VK_NULL_HANDLE,
4352             VK_IMAGE_LAYOUT_UNDEFINED}; // must be explicitly initialized due to CTS handles inside
4353 
4354         descGetInfo.type = VK_DESCRIPTOR_TYPE_MAX_ENUM;
4355 
4356         if ((binding.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
4357             (binding.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER))
4358         {
4359             auto &resources      = getOrCreateResource(binding, arrayIndex);
4360             auto &bufferResource = resources.buffer;
4361 
4362             const VkBufferUsageFlags usage =
4363                 (binding.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT :
4364                 (binding.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ? VK_BUFFER_USAGE_STORAGE_BUFFER_BIT :
4365                                                                                 0;
4366             DE_ASSERT(usage);
4367 
4368             bufferResource.size =
4369                 sizeof(uint32_t) * (binding.isResultBuffer ? ConstResultBufferDwords : ConstUniformBufferDwords);
4370 
4371             createBufferForBinding(resources, binding.descriptorType, makeBufferCreateInfo(bufferResource.size, usage),
4372                                    binding.isResultBuffer);
4373 
4374             uint32_t *pBufferData = static_cast<uint32_t *>(bufferResource.alloc->getHostPtr());
4375 
4376             if (binding.isResultBuffer || isRobustBufferAccess)
4377             {
4378                 // We zero the buffer if it's a result buffer or if it's used with robust access.
4379                 deMemset(pBufferData, 0, static_cast<std::size_t>(bufferResource.size));
4380             }
4381             else
4382             {
4383                 const auto data = getExpectedData(m_params.hash, setIndex, binding.binding, arrayIndex);
4384 
4385                 for (uint32_t i = 0; i < ConstUniformBufferDwords; ++i)
4386                 {
4387                     pBufferData[i] = data + i;
4388                 }
4389             }
4390 
4391             addressInfo.address = bufferResource.deviceAddress;
4392             addressInfo.range   = bufferResource.size;
4393             addressInfo.format  = VK_FORMAT_UNDEFINED;
4394 
4395             DE_UNREF(ConstRobustBufferAlignment);
4396             DE_ASSERT(binding.isResultBuffer || !isRobustBufferAccess ||
4397                       ((addressInfo.range % ConstRobustBufferAlignment) == 0));
4398 
4399             descGetInfo.type                = binding.descriptorType;
4400             descGetInfo.data.pUniformBuffer = isNullDescriptor ? nullptr : &addressInfo; // and pStorageBuffer
4401         }
4402         else if (binding.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
4403         {
4404             // Inline uniforms don't use a backing buffer.
4405             DE_ASSERT(binding.perBindingResourceIndex[arrayIndex] == INDEX_INVALID);
4406         }
4407         else if ((binding.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
4408                  (binding.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER))
4409         {
4410             auto &resources      = getOrCreateResource(binding, arrayIndex);
4411             auto &bufferResource = resources.buffer;
4412 
4413             const VkBufferUsageFlags usage = (binding.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ?
4414                                                  VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT :
4415                                              (binding.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER) ?
4416                                                  VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT :
4417                                                  0;
4418             DE_ASSERT(usage);
4419 
4420             bufferResource.size = ConstTexelBufferElements * sizeof(uint32_t);
4421 
4422             createBufferForBinding(resources, binding.descriptorType, makeBufferCreateInfo(bufferResource.size, usage),
4423                                    binding.isResultBuffer);
4424 
4425             if (m_params.isPushDescriptorTest())
4426             {
4427                 // Push descriptors use buffer views.
4428                 auto &bufferViewResource = (**m_resources[binding.perBindingResourceIndex[arrayIndex]]).bufferView;
4429 
4430                 bufferViewResource = makeBufferView(*m_deviceInterface, *m_device, *bufferResource.buffer,
4431                                                     VK_FORMAT_R32_UINT, 0, bufferResource.size);
4432             }
4433 
4434             uint32_t *pBufferData = static_cast<uint32_t *>(bufferResource.alloc->getHostPtr());
4435 
4436             if (isRobustBufferAccess)
4437             {
4438                 // Zero the buffer used with robust access.
4439                 deMemset(pBufferData, 0, static_cast<std::size_t>(bufferResource.size));
4440             }
4441             else
4442             {
4443                 const auto data = getExpectedData(m_params.hash, setIndex, binding.binding, arrayIndex);
4444 
4445                 for (uint32_t i = 0; i < ConstTexelBufferElements; ++i)
4446                 {
4447                     pBufferData[i] = data + i;
4448                 }
4449             }
4450 
4451             addressInfo.address = bufferResource.deviceAddress;
4452             addressInfo.range   = bufferResource.size;
4453             addressInfo.format  = VK_FORMAT_R32_UINT;
4454 
4455             DE_UNREF(ConstRobustBufferAlignment);
4456             DE_ASSERT(!isRobustBufferAccess || ((addressInfo.range % ConstRobustBufferAlignment) == 0));
4457 
4458             descGetInfo.type                     = binding.descriptorType;
4459             descGetInfo.data.pUniformTexelBuffer = isNullDescriptor ? nullptr : &addressInfo; // and pStorageTexelBuffer
4460         }
4461         else if ((binding.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
4462                  (binding.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
4463                  (binding.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) ||
4464                  (binding.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))
4465         {
4466             // Check if we had already added the resource while handling samplers.
4467             auto &resources     = getOrCreateResource(binding, arrayIndex);
4468             auto &imageResource = resources.image;
4469             auto &stagingBuffer = resources.buffer;
4470 
4471             {
4472                 VkImageLayout layout    = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
4473                 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4474 
4475                 if (binding.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
4476                 {
4477                     usage |= VK_IMAGE_USAGE_STORAGE_BIT;
4478                     layout = VK_IMAGE_LAYOUT_GENERAL;
4479                 }
4480                 else if (binding.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
4481                 {
4482                     usage |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
4483                 }
4484                 else
4485                 {
4486                     usage |= VK_IMAGE_USAGE_SAMPLED_BIT;
4487                 }
4488 
4489                 // We ensure the extent matches the render area, for the sake of input attachment case.
4490                 imageResource.info                       = initVulkanStructure();
4491                 imageResource.info.flags                 = 0;
4492                 imageResource.info.imageType             = VK_IMAGE_TYPE_2D;
4493                 imageResource.info.format                = m_imageColorFormat;
4494                 imageResource.info.extent.width          = m_renderArea.extent.width;
4495                 imageResource.info.extent.height         = m_renderArea.extent.height;
4496                 imageResource.info.extent.depth          = 1;
4497                 imageResource.info.mipLevels             = 1;
4498                 imageResource.info.arrayLayers           = 1;
4499                 imageResource.info.samples               = VK_SAMPLE_COUNT_1_BIT;
4500                 imageResource.info.tiling                = VK_IMAGE_TILING_OPTIMAL;
4501                 imageResource.info.usage                 = usage;
4502                 imageResource.info.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
4503                 imageResource.info.queueFamilyIndexCount = 0;
4504                 imageResource.info.pQueueFamilyIndices   = nullptr;
4505                 imageResource.info.initialLayout         = VK_IMAGE_LAYOUT_UNDEFINED;
4506 
4507                 createImageForBinding(resources, binding.descriptorType);
4508 
4509                 imageResource.layout = layout;
4510 
4511                 imageInfo.imageLayout = layout;
4512                 imageInfo.imageView   = *imageResource.imageView;
4513 
4514                 descGetInfo.type = binding.descriptorType;
4515 
4516                 if (binding.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
4517                 {
4518                     if (isNullDescriptor)
4519                         imageInfo.imageView = VK_NULL_HANDLE;
4520 
4521                     descGetInfo.data.pCombinedImageSampler = &imageInfo;
4522                 }
4523                 else
4524                     descGetInfo.data.pStorageImage = isNullDescriptor ? nullptr : &imageInfo;
4525             }
4526             {
4527                 const auto numPixels = m_renderArea.extent.width * m_renderArea.extent.height; // plane 0
4528 
4529                 if (m_imageColorFormat == VK_FORMAT_R32_UINT)
4530                 {
4531                     stagingBuffer.size = sizeof(uint32_t) * numPixels;
4532                 }
4533                 else if (m_imageColorFormat == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM)
4534                 {
4535                     DE_ASSERT((m_renderArea.extent.width % 2) == 0);
4536                     DE_ASSERT((m_renderArea.extent.height % 2) == 0);
4537 
4538                     stagingBuffer.size = 1 * numPixels;      // g8
4539                     stagingBuffer.size += 2 * numPixels / 4; // b8r8
4540                 }
4541                 else
4542                 {
4543                     DE_ASSERT(0);
4544                 }
4545 
4546                 auto createInfo = makeBufferCreateInfo(stagingBuffer.size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
4547 
4548                 stagingBuffer.buffer = createBuffer(*m_deviceInterface, *m_device, &createInfo);
4549 
4550                 auto memReqs = getBufferMemoryRequirements(*m_deviceInterface, *m_device, *stagingBuffer.buffer);
4551 
4552                 stagingBuffer.alloc = allocate(memReqs, MemoryRequirement::HostVisible);
4553 
4554                 VK_CHECK(m_deviceInterface->bindBufferMemory(*m_device, *stagingBuffer.buffer,
4555                                                              stagingBuffer.alloc->getMemory(),
4556                                                              stagingBuffer.alloc->getOffset()));
4557 
4558                 // Fill the whole image uniformly
4559                 if (m_imageColorFormat == VK_FORMAT_R32_UINT)
4560                 {
4561                     auto pBufferData = static_cast<uint32_t *>(stagingBuffer.alloc->getHostPtr());
4562                     uint32_t expectedData;
4563 
4564                     if (binding.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
4565                     {
4566                         expectedData = getExpectedData(m_params.hash, setIndex, binding.binding,
4567                                                        binding.inputAttachmentIndex + arrayIndex);
4568                     }
4569                     else
4570                     {
4571                         expectedData = getExpectedData(m_params.hash, setIndex, binding.binding, arrayIndex);
4572                     }
4573 
4574                     std::fill(pBufferData, pBufferData + numPixels, expectedData);
4575                 }
4576                 else if (m_imageColorFormat == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM)
4577                 {
4578                     auto pPlane0 = static_cast<uint8_t *>(stagingBuffer.alloc->getHostPtr());
4579                     auto pPlane1 = static_cast<uint16_t *>(offsetPtr(pPlane0, numPixels));
4580                     const auto expectedData =
4581                         getExpectedData_G8_B8R8(m_params.hash, setIndex, binding.binding, arrayIndex);
4582 
4583                     std::fill(pPlane0, pPlane0 + numPixels, expectedData.x());
4584                     std::fill(pPlane1, pPlane1 + numPixels / 4, expectedData.y());
4585                 }
4586                 else
4587                 {
4588                     DE_ASSERT(0);
4589                 }
4590             }
4591 
4592             if (binding.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
4593             {
4594                 DE_ASSERT(m_params.variant != TestVariant::EMBEDDED_IMMUTABLE_SAMPLERS);
4595 
4596                 DE_ASSERT(binding.perBindingResourceIndex[arrayIndex] != INDEX_INVALID);
4597                 auto &resourceSampler = (**m_resources[binding.perBindingResourceIndex[arrayIndex]]).sampler;
4598 
4599                 imageInfo.sampler = *resourceSampler;
4600             }
4601         }
4602         else if (binding.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER)
4603         {
4604             if (m_params.variant != TestVariant::EMBEDDED_IMMUTABLE_SAMPLERS)
4605             {
4606                 DE_ASSERT(binding.perBindingResourceIndex[arrayIndex] != INDEX_INVALID);
4607                 auto &resourceSampler = (**m_resources[binding.perBindingResourceIndex[arrayIndex]]).sampler;
4608 
4609                 descGetInfo.type          = binding.descriptorType;
4610                 descGetInfo.data.pSampler = &*resourceSampler;
4611             }
4612         }
4613         else if (binding.descriptorType == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR)
4614         {
4615             Allocator &allocator        = *m_allocatorPtr;
4616             const uint32_t expectedData = getExpectedData(m_params.hash, setIndex, binding.binding, arrayIndex);
4617             const float zDepth          = float(expectedData);
4618             const std::vector<tcu::Vec3> vertices{
4619                 tcu::Vec3(-1.0f, -1.0f, zDepth), tcu::Vec3(-1.0f, 1.0f, zDepth), tcu::Vec3(1.0f, -1.0f, zDepth),
4620 
4621                 tcu::Vec3(-1.0f, 1.0f, zDepth),  tcu::Vec3(1.0f, 1.0f, zDepth),  tcu::Vec3(1.0f, -1.0f, zDepth),
4622             };
4623             auto &resources              = getOrCreateResource(binding, arrayIndex);
4624             const bool replayableBinding = binding.isTestableDescriptor();
4625             VkAccelerationStructureCreateFlagsKHR createFlags =
4626                 (m_params.isCaptureReplayDescriptor(binding.descriptorType) && replayableBinding) ?
4627                     static_cast<VkAccelerationStructureCreateFlagsKHR>(
4628                         VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT) :
4629                     static_cast<VkAccelerationStructureCreateFlagsKHR>(0u);
4630             vk::MemoryRequirement memoryReqs =
4631                 (m_params.isCaptureReplayDescriptor(binding.descriptorType) && replayableBinding) ?
4632                     MemoryRequirement::DeviceAddressCaptureReplay :
4633                     MemoryRequirement::Any;
4634             VkOpaqueCaptureDescriptorDataCreateInfoEXT infos[]     = {initVulkanStructure(), initVulkanStructure()};
4635             VkOpaqueCaptureDescriptorDataCreateInfoEXT *infoPtrs[] = {nullptr, nullptr};
4636 
4637             if (isReplayDescriptor(binding.descriptorType) && replayableBinding)
4638             {
4639                 resources.rtBlas.clear();
4640                 resources.rtTlas.clear();
4641 
4642                 std::vector<uint8_t> *captureReplayDatas[] = {&resources.captureReplay.accelerationStructureDataBlas,
4643                                                               &resources.captureReplay.accelerationStructureDataTlas};
4644 
4645                 for (int ndx = 0; ndx < 2; ++ndx)
4646                 {
4647                     std::vector<uint8_t> &captureReplayData          = *captureReplayDatas[ndx];
4648                     VkOpaqueCaptureDescriptorDataCreateInfoEXT &info = infos[ndx];
4649 
4650                     info.opaqueCaptureDescriptorData = captureReplayData.data();
4651                     infoPtrs[ndx]                    = &infos[ndx];
4652                 }
4653             }
4654 
4655             {
4656                 DE_ASSERT(resources.rtBlas.get() == nullptr);
4657 
4658                 resources.rtBlas =
4659                     de::SharedPtr<BottomLevelAccelerationStructure>(makeBottomLevelAccelerationStructure().release());
4660                 if (binding.isRayTracingAS)
4661                     resources.rtBlas->setDefaultGeometryData(m_params.stage);
4662                 else
4663                     resources.rtBlas->setGeometryData(vertices, true);
4664                 resources.rtBlas->setCreateFlags(createFlags);
4665                 resources.rtBlas->create(*m_deviceInterface, *m_device, allocator, 0, 0, infoPtrs[0], memoryReqs);
4666             }
4667 
4668             {
4669                 DE_ASSERT(resources.rtTlas.get() == nullptr);
4670 
4671                 resources.rtTlas = makeTopLevelAccelerationStructure();
4672                 resources.rtTlas->addInstance(resources.rtBlas);
4673                 resources.rtTlas->setCreateFlags(createFlags);
4674                 resources.rtTlas->create(*m_deviceInterface, *m_device, allocator, 0, 0, infoPtrs[1], memoryReqs);
4675             }
4676 
4677             if (isCaptureDescriptor(binding.descriptorType) && replayableBinding)
4678             {
4679                 const VkAccelerationStructureKHR *accelerationStructures[] = {resources.rtBlas->getPtr(),
4680                                                                               resources.rtTlas->getPtr()};
4681                 std::vector<uint8_t> *captureReplayDatas[] = {&resources.captureReplay.accelerationStructureDataBlas,
4682                                                               &resources.captureReplay.accelerationStructureDataTlas};
4683 
4684                 for (int ndx = 0; ndx < 2; ++ndx)
4685                 {
4686                     VkAccelerationStructureCaptureDescriptorDataInfoEXT info = initVulkanStructure();
4687                     const VkAccelerationStructureKHR *accelerationStructure  = accelerationStructures[ndx];
4688                     std::vector<uint8_t> &captureReplayData                  = *captureReplayDatas[ndx];
4689 
4690                     DE_ASSERT(accelerationStructure != nullptr && *accelerationStructure != VK_NULL_HANDLE);
4691                     DE_ASSERT(captureReplayData.empty());
4692 
4693                     info.accelerationStructure = *accelerationStructure;
4694 
4695                     captureReplayData.resize(
4696                         m_descriptorBufferProperties.accelerationStructureCaptureReplayDescriptorDataSize);
4697 
4698                     VK_CHECK(m_deviceInterface->getAccelerationStructureOpaqueCaptureDescriptorDataEXT(
4699                         *m_device, &info, captureReplayData.data()));
4700                 }
4701             }
4702 
4703             descGetInfo.type = binding.descriptorType;
4704             descGetInfo.data.accelerationStructure =
4705                 isNullDescriptor ?
4706                     0 :
4707                     getAccelerationStructureDeviceAddress(*m_deviceInterface, *m_device, *resources.rtTlas->getPtr());
4708         }
4709         else
4710         {
4711             TCU_THROW(InternalError, "Not implemented");
4712         }
4713 
4714         if (dsl.usePushDescriptors || dsl.sizeOfLayout == 0)
4715         {
4716             // Push descriptors don't rely on descriptor buffers, move to the next binding.
4717             continue;
4718         }
4719 
4720         // Write the descriptor at the right offset in the descriptor buffer memory.
4721         // - With inline uniform blocks, we write the uniform data into the descriptor buffer directly.
4722         // - With regular descriptors, the written memory is opaque to us (same goes for null descriptors).
4723         {
4724             void *bindingHostPtr = nullptr;
4725             Allocation *pAlloc   = nullptr;
4726             auto arrayOffset     = arrayIndex * getDescriptorSize(binding);
4727 
4728             if (dsl.stagingBufferOffset == OFFSET_UNUSED)
4729             {
4730                 const auto &descriptorBuffer = *m_descriptorBuffers[dsl.bufferIndex];
4731                 const auto bufferHostPtr     = offsetPtr(descriptorBuffer.alloc->getHostPtr(), dsl.bufferOffset);
4732 
4733                 bindingHostPtr = offsetPtr(bufferHostPtr, binding.offset);
4734                 pAlloc         = descriptorBuffer.alloc.get();
4735             }
4736             else
4737             {
4738                 bindingHostPtr =
4739                     offsetPtr(m_descriptorStagingBuffer.alloc->getHostPtr(), dsl.stagingBufferOffset + binding.offset);
4740 
4741                 pAlloc = m_descriptorStagingBuffer.alloc.get();
4742             }
4743 
4744             if (binding.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
4745             {
4746                 DE_ASSERT(arrayIndex == 0);
4747 
4748                 // Inline uniform data is written in descriptor buffer directly.
4749                 const auto numDwords = binding.descriptorCount / sizeof(uint32_t);
4750                 const auto data      = getExpectedData(m_params.hash, setIndex, binding.binding, arrayIndex);
4751 
4752                 uint32_t *pInlineData = static_cast<uint32_t *>(bindingHostPtr);
4753 
4754                 for (uint32_t i = 0; i < numDwords; ++i)
4755                 {
4756                     pInlineData[i] = data + i;
4757                 }
4758             }
4759             else if (isReplayDescriptor(binding.descriptorType))
4760             {
4761                 // We're expecting that a descriptor based on replayed resources will have exactly the same binary data.
4762                 // Copy it and compare after obtaining the new descriptor.
4763                 //
4764                 auto descriptorPtr        = offsetPtr(bindingHostPtr, arrayOffset);
4765                 const auto descriptorSize = static_cast<size_t>(getDescriptorTypeSize(descGetInfo.type));
4766 
4767                 std::vector<uint8_t> reference(descriptorSize);
4768                 deMemcpy(reference.data(), descriptorPtr, descriptorSize);
4769 
4770                 deMemset(descriptorPtr, 0xcc, descriptorSize);
4771                 m_deviceInterface->getDescriptorEXT(*m_device, &descGetInfo, descriptorSize, descriptorPtr);
4772 
4773                 if (deMemCmp(reference.data(), descriptorPtr, descriptorSize) != 0)
4774                 {
4775                     TCU_THROW(TestError, "Replayed descriptor differs from the captured descriptor");
4776                 }
4777             }
4778             else
4779             {
4780                 auto descriptorPtr        = offsetPtr(bindingHostPtr, arrayOffset);
4781                 const auto descriptorSize = static_cast<size_t>(getDescriptorTypeSize(descGetInfo.type));
4782                 m_deviceInterface->getDescriptorEXT(*m_device, &descGetInfo, descriptorSize, descriptorPtr);
4783             }
4784 
4785             // After writing the last array element, rearrange the split combined image sampler data.
4786             if (mustSplitCombinedImageSampler && ((arrayIndex + 1) == arrayCount))
4787             {
4788                 // We determined the size of the descriptor set layout on the VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type,
4789                 // so it's expected the following holds true.
4790                 DE_ASSERT((m_descriptorBufferProperties.sampledImageDescriptorSize +
4791                            m_descriptorBufferProperties.samplerDescriptorSize) ==
4792                           m_descriptorBufferProperties.combinedImageSamplerDescriptorSize);
4793 
4794                 // Needed buffer memory size depends on combinedImageSamplerDescriptorCount
4795                 auto sampledImageDescriptorSizeInBuffer =
4796                     m_descriptorBufferProperties.sampledImageDescriptorSize * m_combinedImageSamplerDescriptorCount;
4797                 auto samplerDescriptorSizeInBuffer =
4798                     m_descriptorBufferProperties.samplerDescriptorSize * m_combinedImageSamplerDescriptorCount;
4799                 auto combinedImageSamplerDescriptorSizeInBuffer =
4800                     m_descriptorBufferProperties.combinedImageSamplerDescriptorSize *
4801                     m_combinedImageSamplerDescriptorCount;
4802 
4803                 std::vector<uint8_t> scratchSpace(arrayCount * combinedImageSamplerDescriptorSizeInBuffer);
4804 
4805                 const auto descriptorArraySize =
4806                     static_cast<std::size_t>(arrayCount * combinedImageSamplerDescriptorSizeInBuffer);
4807 
4808                 deMemcpy(scratchSpace.data(), bindingHostPtr, descriptorArraySize);
4809                 deMemset(bindingHostPtr, 0, descriptorArraySize);
4810 
4811                 const void *combinedReadPtr = scratchSpace.data();
4812                 void *imageWritePtr         = bindingHostPtr;
4813                 void *samplerWritePtr = offsetPtr(bindingHostPtr, arrayCount * sampledImageDescriptorSizeInBuffer);
4814 
4815                 for (uint32_t i = 0; i < arrayCount; ++i)
4816                 {
4817                     deMemcpy(imageWritePtr, offsetPtr(combinedReadPtr, 0), sampledImageDescriptorSizeInBuffer);
4818                     deMemcpy(samplerWritePtr, offsetPtr(combinedReadPtr, sampledImageDescriptorSizeInBuffer),
4819                              samplerDescriptorSizeInBuffer);
4820 
4821                     combinedReadPtr = offsetPtr(combinedReadPtr, combinedImageSamplerDescriptorSizeInBuffer);
4822                     imageWritePtr   = offsetPtr(imageWritePtr, sampledImageDescriptorSizeInBuffer);
4823                     samplerWritePtr = offsetPtr(samplerWritePtr, samplerDescriptorSizeInBuffer);
4824                 }
4825             }
4826 
4827             flushAlloc(*m_deviceInterface, *m_device, *pAlloc);
4828         }
4829     }
4830 }
4831 
4832 // Update a descriptor set with a push or a push template.
4833 //
pushDescriptorSet(VkCommandBuffer cmdBuf,VkPipelineBindPoint bindPoint,const DescriptorSetLayoutHolder & dsl,uint32_t setIndex) const4834 void DescriptorBufferTestInstance::pushDescriptorSet(VkCommandBuffer cmdBuf, VkPipelineBindPoint bindPoint,
4835                                                      const DescriptorSetLayoutHolder &dsl, uint32_t setIndex) const
4836 {
4837     std::vector<PushDescriptorData> descriptorData(dsl.bindings.size()); // Allocate empty elements upfront
4838     std::vector<VkWriteDescriptorSet> descriptorWrites;
4839     std::vector<VkWriteDescriptorSetAccelerationStructureKHR> descriptorWritesAccelerationStructures;
4840 
4841     descriptorWrites.reserve(dsl.bindings.size());
4842     descriptorWritesAccelerationStructures.reserve(dsl.bindings.size());
4843 
4844     // Fill in the descriptor data structure. It can be used by the regular and templated update path.
4845 
4846     for (uint32_t bindingIndex = 0; bindingIndex < u32(dsl.bindings.size()); ++bindingIndex)
4847     {
4848         const auto &binding = dsl.bindings[bindingIndex];
4849 
4850         VkWriteDescriptorSet write = initVulkanStructure();
4851         write.dstSet               = VK_NULL_HANDLE; // ignored with push descriptors
4852         write.dstBinding           = bindingIndex;
4853         write.dstArrayElement      = 0;
4854         write.descriptorCount      = binding.descriptorCount;
4855         write.descriptorType       = binding.descriptorType;
4856 
4857         for (uint32_t arrayIndex = 0; arrayIndex < write.descriptorCount; ++arrayIndex)
4858         {
4859             DE_ASSERT(binding.perBindingResourceIndex[arrayIndex] != INDEX_INVALID);
4860 
4861             if ((binding.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
4862                 (binding.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER))
4863             {
4864                 const auto &bufferResource = (**m_resources[binding.perBindingResourceIndex[arrayIndex]]).buffer;
4865 
4866                 auto pInfo    = &descriptorData[bindingIndex].bufferInfos[arrayIndex];
4867                 pInfo->buffer = *bufferResource.buffer;
4868                 pInfo->offset = 0;
4869                 pInfo->range  = bufferResource.size;
4870 
4871                 if (arrayIndex == 0)
4872                 {
4873                     write.pBufferInfo = pInfo;
4874                 }
4875             }
4876             else if ((binding.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
4877                      (binding.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER))
4878             {
4879                 const auto &bufferViewResource =
4880                     (**m_resources[binding.perBindingResourceIndex[arrayIndex]]).bufferView;
4881 
4882                 auto pBufferView = &descriptorData[bindingIndex].texelBufferViews[arrayIndex];
4883                 *pBufferView     = *bufferViewResource;
4884 
4885                 if (arrayIndex == 0)
4886                 {
4887                     write.pTexelBufferView = pBufferView;
4888                 }
4889             }
4890             else if ((binding.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
4891                      (binding.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
4892                      (binding.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) ||
4893                      (binding.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
4894                      (binding.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER))
4895             {
4896                 const auto &imageResource   = (**m_resources[binding.perBindingResourceIndex[arrayIndex]]).image;
4897                 const auto &samplerResource = (**m_resources[binding.perBindingResourceIndex[arrayIndex]]).sampler;
4898 
4899                 // Dereferencing unused resources will return null handles, so we can treat all these descriptors uniformly.
4900 
4901                 auto pInfo         = &descriptorData[bindingIndex].imageInfos[arrayIndex];
4902                 pInfo->imageView   = *imageResource.imageView;
4903                 pInfo->imageLayout = imageResource.layout;
4904                 pInfo->sampler     = *samplerResource;
4905 
4906                 if (arrayIndex == 0)
4907                 {
4908                     write.pImageInfo = pInfo;
4909                 }
4910             }
4911             else if (binding.descriptorType == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR)
4912             {
4913                 const ResourceHolder &resources = **m_resources[binding.perBindingResourceIndex[arrayIndex]];
4914                 const VkAccelerationStructureKHR *accelerationStructurePtr = resources.rtTlas.get()->getPtr();
4915 
4916                 DE_ASSERT(accelerationStructurePtr != nullptr && *accelerationStructurePtr != VK_NULL_HANDLE);
4917 
4918                 descriptorData[bindingIndex].accelerationStructures[arrayIndex] = *accelerationStructurePtr;
4919 
4920                 if (arrayIndex == 0)
4921                 {
4922                     VkWriteDescriptorSetAccelerationStructureKHR descriptorWritesAccelerationStructure =
4923                         initVulkanStructure();
4924 
4925                     descriptorWritesAccelerationStructure.accelerationStructureCount = write.descriptorCount;
4926                     descriptorWritesAccelerationStructure.pAccelerationStructures =
4927                         descriptorData[bindingIndex].accelerationStructures;
4928 
4929                     descriptorWritesAccelerationStructures.emplace_back(descriptorWritesAccelerationStructure);
4930 
4931                     write.pNext =
4932                         &descriptorWritesAccelerationStructures[descriptorWritesAccelerationStructures.size() - 1];
4933                 }
4934             }
4935             else
4936             {
4937                 TCU_THROW(InternalError, "Not implemented");
4938             }
4939         }
4940 
4941         if (m_params.variant == TestVariant::PUSH_DESCRIPTOR)
4942         {
4943             descriptorWrites.emplace_back(write);
4944         }
4945     }
4946 
4947     if (m_params.variant == TestVariant::PUSH_DESCRIPTOR)
4948     {
4949         if (m_params.commands2)
4950         {
4951             vk::VkPushDescriptorSetInfoKHR pushDescriptorSetInfo = {
4952                 VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR, // VkStructureType sType;
4953                 nullptr,                                        // const void* pNext;
4954                 (VkShaderStageFlags)m_params.stage,             // VkShaderStageFlags stageFlags;
4955                 *m_pipelineLayout,                              // VkPipelineLayout layout;
4956                 setIndex,                                       // uint32_t set;
4957                 u32(descriptorWrites.size()),                   // uint32_t descriptorWriteCount;
4958                 descriptorWrites.data()                         // const VkWriteDescriptorSet* pDescriptorWrites;
4959             };
4960             m_deviceInterface->cmdPushDescriptorSet2(cmdBuf, &pushDescriptorSetInfo);
4961         }
4962         else
4963         {
4964             m_deviceInterface->cmdPushDescriptorSet(cmdBuf, bindPoint, *m_pipelineLayout, setIndex,
4965                                                     u32(descriptorWrites.size()), descriptorWrites.data());
4966         }
4967     }
4968     else if (m_params.variant == TestVariant::PUSH_TEMPLATE)
4969     {
4970         std::vector<VkDescriptorUpdateTemplateEntry> updateEntries(descriptorData.size()); // preallocate
4971 
4972         const auto dataBasePtr = reinterpret_cast<uint8_t *>(descriptorData.data());
4973 
4974         for (uint32_t bindingIndex = 0; bindingIndex < u32(dsl.bindings.size()); ++bindingIndex)
4975         {
4976             const auto &binding = dsl.bindings[bindingIndex];
4977             const auto &data    = descriptorData[bindingIndex];
4978 
4979             auto &entry           = updateEntries[bindingIndex];
4980             entry.dstBinding      = binding.binding;
4981             entry.dstArrayElement = 0;
4982             entry.descriptorCount = binding.descriptorCount;
4983             entry.descriptorType  = binding.descriptorType;
4984 
4985             switch (binding.descriptorType)
4986             {
4987             case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
4988             case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
4989                 entry.offset = basePtrOffsetOf(dataBasePtr, data.bufferInfos);
4990                 entry.stride = sizeof(data.bufferInfos[0]);
4991                 break;
4992 
4993             case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
4994             case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
4995                 entry.offset = basePtrOffsetOf(dataBasePtr, data.texelBufferViews);
4996                 entry.stride = sizeof(data.texelBufferViews[0]);
4997                 break;
4998 
4999             case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
5000             case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
5001             case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
5002             case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
5003             case VK_DESCRIPTOR_TYPE_SAMPLER:
5004                 entry.offset = basePtrOffsetOf(dataBasePtr, data.imageInfos);
5005                 entry.stride = sizeof(data.imageInfos[0]);
5006                 break;
5007 
5008             case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
5009                 entry.offset = basePtrOffsetOf(dataBasePtr, data.accelerationStructures);
5010                 entry.stride = sizeof(data.accelerationStructures[0]);
5011                 break;
5012 
5013             default:
5014                 DE_ASSERT(0);
5015                 break;
5016             }
5017         }
5018 
5019         VkDescriptorUpdateTemplateCreateInfo createInfo = initVulkanStructure();
5020         createInfo.templateType                         = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR;
5021         createInfo.descriptorSetLayout                  = *dsl.layout;
5022         createInfo.pipelineBindPoint                    = bindPoint;
5023         createInfo.pipelineLayout                       = *m_pipelineLayout;
5024         createInfo.set                                  = setIndex;
5025         createInfo.descriptorUpdateEntryCount           = u32(updateEntries.size());
5026         createInfo.pDescriptorUpdateEntries             = updateEntries.data();
5027 
5028         auto descriptorUpdateTemplate = createDescriptorUpdateTemplate(*m_deviceInterface, *m_device, &createInfo);
5029 
5030         if (m_params.commands2)
5031         {
5032             vk::VkPushDescriptorSetWithTemplateInfoKHR pushDescriptorSetWithTemplateInfo = {
5033                 VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR, // VkStructureType sType;
5034                 nullptr,                                                      // const void* pNext;
5035                 *descriptorUpdateTemplate, // VkDescriptorUpdateTemplate descriptorUpdateTemplate;
5036                 *m_pipelineLayout,         // VkPipelineLayout layout;
5037                 setIndex,                  // uint32_t set;
5038                 dataBasePtr                // const void* pData;
5039             };
5040             m_deviceInterface->cmdPushDescriptorSetWithTemplate2(cmdBuf, &pushDescriptorSetWithTemplateInfo);
5041         }
5042         else
5043         {
5044             m_deviceInterface->cmdPushDescriptorSetWithTemplate(cmdBuf, *descriptorUpdateTemplate, *m_pipelineLayout,
5045                                                                 setIndex, dataBasePtr);
5046         }
5047     }
5048 }
5049 
5050 // Perform the test accoring to the parameters. At high level, all tests perform these steps:
5051 //
5052 // - Create a new device and queues, query extension properties.
5053 // - Fill descriptor set layouts and bindings, based on SimpleBinding's.
5054 // - Create samplers, if needed. Set immutable samplers in bindings.
5055 // - Create descriptor set layouts.
5056 //   - If mutable descriptor types are used, it affects: descriptor size and offset and descriptor set layout creation.
5057 //     However, the rest of the logic is largely unchanged and refers to the specific descriptor type used at runtime.
5058 // - Create descriptor buffers.
5059 // - Iterate over all bindings to:
5060 //   - Create their resources (images, buffers) and initialize them
5061 //   - Write bindings to descriptor buffer memory
5062 //   - Fix combined image samplers for arrayed bindings (if applicable)
5063 // - Create the pipeline layout, shaders, and the pipeline
5064 // - Create the command buffer and record the commands (barriers omitted for brevity):
5065 //   - Bind the pipeline and the descriptor buffers
5066 //   - Upload descriptor buffer data (with staged uploads)
5067 //   - Upload image data (if images are used)
5068 //   - Push descriptors (if used)
5069 //   - Dispatch or draw
5070 //   - Submit the commands
5071 //   - Map the result buffer to a host pointer
5072 //   - Verify the result and log diagnostic on a failure
5073 //
5074 // Verification logic is very simple.
5075 //
5076 // Each successful binding read will increment the result counter. If the shader got an unexpected value, the counter
5077 // will be less than expected. Additionally, the first failed set/binding/array index will be recorded.
5078 //
5079 // With capture/replay tests, iterate() will be called twice, splitting the test into capture and replay passes.
5080 // The capture pass saves the opaque data, while the replay pass uses it and compares the results.
5081 //
iterate()5082 tcu::TestStatus DescriptorBufferTestInstance::iterate()
5083 {
5084     DE_ASSERT(m_params.bufferBindingCount <= m_descriptorBufferProperties.maxDescriptorBufferBindings);
5085 
5086     const auto &vk = *m_deviceInterface;
5087 
5088     if (m_testIteration == 0)
5089     {
5090         uint32_t currentSet = INDEX_INVALID;
5091 
5092         uint32_t inlineUniformSize = 0u;
5093 
5094         for (const auto &sb : m_simpleBindings)
5095         {
5096             if ((currentSet == INDEX_INVALID) || (currentSet < sb.set))
5097             {
5098                 currentSet = sb.set;
5099 
5100                 addDescriptorSetLayout();
5101             }
5102 
5103             auto &dsl                     = **m_descriptorSetLayouts.back();
5104             VkShaderStageFlags stageFlags = sb.isRayTracingAS ?
5105                                                 static_cast<VkShaderStageFlags>(VK_SHADER_STAGE_RAYGEN_BIT_KHR) :
5106                                                 static_cast<VkShaderStageFlags>(0u);
5107 
5108             Binding binding{};
5109             binding.binding              = sb.binding;
5110             binding.descriptorType       = sb.type;
5111             binding.stageFlags           = m_params.stage | stageFlags;
5112             binding.inputAttachmentIndex = sb.inputAttachmentIndex;
5113             binding.isResultBuffer       = sb.isResultBuffer;
5114             binding.isRayTracingAS       = sb.isRayTracingAS;
5115             binding.isMutableType        = (m_params.variant == TestVariant::MUTABLE_DESCRIPTOR_TYPE) &&
5116                                     sb.type != VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR &&
5117                                     maskCheck(m_params.mutableDescriptorTypes, sb.type);
5118 
5119             if (sb.type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
5120             {
5121                 binding.descriptorCount = sizeof(uint32_t) * ConstInlineBlockDwords;
5122                 inlineUniformSize += binding.descriptorCount;
5123             }
5124             else
5125             {
5126                 binding.descriptorCount = sb.count;
5127             }
5128 
5129             if ((sb.type == VK_DESCRIPTOR_TYPE_SAMPLER) || (sb.type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))
5130             {
5131                 if (sb.isEmbeddedImmutableSampler)
5132                 {
5133                     dsl.hasEmbeddedImmutableSamplers = true;
5134                 }
5135             }
5136 
5137             if (m_params.isPushDescriptorTest() &&
5138                 (m_params.pushDescriptorSetIndex == (m_descriptorSetLayouts.size() - 1)))
5139             {
5140                 dsl.usePushDescriptors = true;
5141             }
5142 
5143             dsl.bindings.emplace_back(binding);
5144         }
5145 
5146         const VkPhysicalDeviceVulkan13Properties &vulkan13properties = m_context.getDeviceVulkan13Properties();
5147         if (m_context.getUsedApiVersion() >= VK_API_VERSION_1_3 &&
5148             inlineUniformSize > vulkan13properties.maxInlineUniformTotalSize)
5149         {
5150             TCU_THROW(NotSupportedError, "Test require more inline uniform total size among all stages. Provided " +
5151                                              de::toString(vulkan13properties.maxInlineUniformTotalSize));
5152         }
5153     }
5154 
5155     // We create samplers before creating the descriptor set layouts, in case we need to use
5156     // immutable (or embedded) samplers.
5157 
5158     for (uint32_t setIndex = 0; setIndex < u32(m_descriptorSetLayouts.size()); ++setIndex)
5159     {
5160         auto &dsl = **m_descriptorSetLayouts[setIndex];
5161 
5162         for (uint32_t bindingIndex = 0; bindingIndex < u32(dsl.bindings.size()); ++bindingIndex)
5163         {
5164             auto &binding = dsl.bindings[bindingIndex];
5165 
5166             if ((binding.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
5167                 (binding.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))
5168             {
5169                 for (uint32_t arrayIndex = 0; arrayIndex < binding.descriptorCount; ++arrayIndex)
5170                 {
5171                     if (binding.perBindingResourceIndex[arrayIndex] == INDEX_INVALID)
5172                     {
5173                         binding.perBindingResourceIndex[arrayIndex] = addResource();
5174                     }
5175 
5176                     auto &resources         = **m_resources[binding.perBindingResourceIndex[arrayIndex]];
5177                     auto &captureReplayData = resources.captureReplay.samplerData;
5178 
5179                     if (m_params.variant == TestVariant::YCBCR_SAMPLER)
5180                     {
5181                         VkSamplerYcbcrConversionCreateInfo convCreateInfo = initVulkanStructure();
5182                         convCreateInfo.format                             = m_imageColorFormat;
5183                         convCreateInfo.ycbcrModel                  = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
5184                         convCreateInfo.ycbcrRange                  = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
5185                         convCreateInfo.components                  = ComponentMappingIdentity;
5186                         convCreateInfo.xChromaOffset               = VK_CHROMA_LOCATION_COSITED_EVEN;
5187                         convCreateInfo.yChromaOffset               = VK_CHROMA_LOCATION_COSITED_EVEN;
5188                         convCreateInfo.chromaFilter                = VK_FILTER_NEAREST;
5189                         convCreateInfo.forceExplicitReconstruction = VK_FALSE;
5190 
5191                         resources.samplerYcbcrConversion = createSamplerYcbcrConversion(vk, *m_device, &convCreateInfo);
5192                     }
5193 
5194                     // Use CLAMP_TO_BORDER to verify that sampling outside the image will make use of the sampler's
5195                     // properties. The border color used must match the one in glslOutputVerification().
5196 
5197                     VkSamplerCreateInfo createInfo     = initVulkanStructure();
5198                     createInfo.magFilter               = VK_FILTER_NEAREST;
5199                     createInfo.minFilter               = VK_FILTER_NEAREST;
5200                     createInfo.mipmapMode              = VK_SAMPLER_MIPMAP_MODE_NEAREST;
5201                     createInfo.addressModeU            = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
5202                     createInfo.addressModeV            = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
5203                     createInfo.addressModeW            = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
5204                     createInfo.mipLodBias              = 0.0f;
5205                     createInfo.anisotropyEnable        = VK_FALSE;
5206                     createInfo.maxAnisotropy           = 1.0f;
5207                     createInfo.compareEnable           = VK_FALSE;
5208                     createInfo.compareOp               = VK_COMPARE_OP_NEVER;
5209                     createInfo.minLod                  = 0.0;
5210                     createInfo.maxLod                  = 0.0;
5211                     createInfo.borderColor             = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
5212                     createInfo.unnormalizedCoordinates = VK_FALSE;
5213 
5214                     VkSamplerCustomBorderColorCreateInfoEXT customBorderColorInfo = initVulkanStructure();
5215                     VkSamplerYcbcrConversionInfo samplerYcbcrConvInfo             = initVulkanStructure();
5216 
5217                     const void **nextPtr = &createInfo.pNext;
5218 
5219                     if (m_params.variant == TestVariant::YCBCR_SAMPLER)
5220                     {
5221                         createInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
5222                         createInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
5223                         createInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
5224 
5225                         samplerYcbcrConvInfo.conversion = *resources.samplerYcbcrConversion;
5226 
5227                         addToChainVulkanStructure(&nextPtr, samplerYcbcrConvInfo);
5228                     }
5229 
5230                     if (m_params.subcase == SubCase::CAPTURE_REPLAY_CUSTOM_BORDER_COLOR)
5231                     {
5232                         createInfo.borderColor = VK_BORDER_COLOR_INT_CUSTOM_EXT;
5233 
5234                         customBorderColorInfo.format            = VK_FORMAT_R32_UINT;
5235                         customBorderColorInfo.customBorderColor = makeClearValueColorU32(2, 0, 0, 1).color;
5236 
5237                         addToChainVulkanStructure(&nextPtr, customBorderColorInfo);
5238                     }
5239 
5240                     if (isCaptureDescriptor(VK_DESCRIPTOR_TYPE_SAMPLER) ||
5241                         isCaptureDescriptor(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))
5242                     {
5243                         createInfo.flags |= VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT;
5244 
5245                         resources.sampler = createSampler(vk, *m_device, &createInfo);
5246 
5247                         VkSamplerCaptureDescriptorDataInfoEXT info = initVulkanStructure();
5248                         info.sampler                               = *resources.sampler;
5249 
5250                         DE_ASSERT(captureReplayData.empty());
5251                         captureReplayData.resize(m_descriptorBufferProperties.samplerCaptureReplayDescriptorDataSize);
5252 
5253                         VK_CHECK(m_deviceInterface->getSamplerOpaqueCaptureDescriptorDataEXT(*m_device, &info,
5254                                                                                              captureReplayData.data()));
5255                     }
5256                     else if (isReplayDescriptor(VK_DESCRIPTOR_TYPE_SAMPLER) ||
5257                              isReplayDescriptor(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))
5258                     {
5259                         reset(resources.sampler);
5260 
5261                         VkOpaqueCaptureDescriptorDataCreateInfoEXT info = initVulkanStructure();
5262                         info.opaqueCaptureDescriptorData                = captureReplayData.data();
5263 
5264                         createInfo.flags |= VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT;
5265 
5266                         addToChainVulkanStructure(&nextPtr, info);
5267 
5268                         resources.sampler = createSampler(vk, *m_device, &createInfo);
5269                     }
5270                     else if (m_testIteration == 0)
5271                     {
5272                         resources.sampler = createSampler(vk, *m_device, &createInfo);
5273                     }
5274                 }
5275             }
5276         }
5277     }
5278 
5279     if ((m_params.variant == TestVariant::EMBEDDED_IMMUTABLE_SAMPLERS) ||
5280         (m_params.subcase == SubCase::IMMUTABLE_SAMPLERS) || (m_params.variant == TestVariant::YCBCR_SAMPLER))
5281     {
5282         // Patch immutable sampler pointers, now that all memory has been allocated and pointers won't move.
5283 
5284         for (uint32_t setIndex = 0; setIndex < u32(m_descriptorSetLayouts.size()); ++setIndex)
5285         {
5286             auto &dsl = **m_descriptorSetLayouts[setIndex];
5287 
5288             for (uint32_t bindingIndex = 0; bindingIndex < u32(dsl.bindings.size()); ++bindingIndex)
5289             {
5290                 auto &binding = dsl.bindings[bindingIndex];
5291 
5292                 for (uint32_t resourceIndex = 0; resourceIndex < DE_LENGTH_OF_ARRAY(binding.perBindingResourceIndex);
5293                      ++resourceIndex)
5294                 {
5295                     if (binding.perBindingResourceIndex[resourceIndex] != INDEX_INVALID)
5296                     {
5297                         const auto &resources = **m_resources[binding.perBindingResourceIndex[resourceIndex]];
5298 
5299                         if (resources.sampler)
5300                         {
5301                             DE_ASSERT(resourceIndex < DE_LENGTH_OF_ARRAY(binding.immutableSamplers));
5302 
5303                             binding.immutableSamplers[resourceIndex] = *resources.sampler;
5304                         }
5305                     }
5306                 }
5307             }
5308         }
5309     }
5310 
5311     if (m_testIteration == 0)
5312     {
5313         createDescriptorSetLayouts();
5314         createDescriptorBuffers();
5315     }
5316 
5317     for (uint32_t setIndex = 0; setIndex < u32(m_descriptorSetLayouts.size()); ++setIndex)
5318     {
5319         auto &dsl = **m_descriptorSetLayouts[setIndex];
5320 
5321         if (dsl.hasEmbeddedImmutableSamplers)
5322         {
5323             // Embedded samplers are not written to the descriptor buffer directly.
5324             continue;
5325         }
5326 
5327         for (uint32_t bindingIndex = 0; bindingIndex < u32(dsl.bindings.size()); ++bindingIndex)
5328         {
5329             auto &binding = dsl.bindings[bindingIndex];
5330 
5331             // The descriptor bindings are initialized in two situations:
5332             // 1. in the first test iteration (which is also the capture pass of capture/replay test)
5333             // 2. in the replay pass, for the binding with the matching descriptor type
5334             //
5335             if ((m_testIteration == 0) ||
5336                 (binding.isTestableDescriptor() && m_params.isCaptureReplayDescriptor(binding.descriptorType)))
5337             {
5338                 initializeBinding(dsl, setIndex, binding);
5339             }
5340         }
5341     }
5342 
5343     {
5344         VkPipelineLayoutCreateInfo createInfo = initVulkanStructure();
5345         const auto dslCopy                    = getDescriptorSetLayouts(m_descriptorSetLayouts);
5346         createInfo.setLayoutCount             = u32(dslCopy.size());
5347         createInfo.pSetLayouts                = dslCopy.data();
5348 
5349         m_pipelineLayout = createPipelineLayout(vk, *m_device, &createInfo);
5350     }
5351 
5352     if (m_params.isCompute())
5353     {
5354         const auto shaderModule = createShaderModule(vk, *m_device, getShaderBinary(VK_SHADER_STAGE_COMPUTE_BIT), 0u);
5355 
5356         const VkPipelineShaderStageCreateInfo pipelineShaderStageParams{
5357             VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, // VkStructureType sType;
5358             nullptr,                                             // const void* pNext;
5359             0u,                                                  // VkPipelineShaderStageCreateFlags flags;
5360             VK_SHADER_STAGE_COMPUTE_BIT,                         // VkShaderStageFlagBits stage;
5361             *shaderModule,                                       // VkShaderModule module;
5362             "main",                                              // const char* pName;
5363             nullptr,                                             // const VkSpecializationInfo* pSpecializationInfo;
5364         };
5365         VkComputePipelineCreateInfo pipelineCreateInfo{
5366             VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, // VkStructureType sType;
5367             nullptr,                                        // const void* pNext;
5368             VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT,   // VkPipelineCreateFlags flags;
5369             pipelineShaderStageParams,                      // VkPipelineShaderStageCreateInfo stage;
5370             *m_pipelineLayout,                              // VkPipelineLayout layout;
5371             VK_NULL_HANDLE,                                 // VkPipeline basePipelineHandle;
5372             0,                                              // int32_t basePipelineIndex;
5373         };
5374 
5375         vk::VkPipelineCreateFlags2CreateInfoKHR pipelineFlags2CreateInfo = vk::initVulkanStructure();
5376         if (m_params.useMaintenance5)
5377         {
5378             pipelineFlags2CreateInfo.flags = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT;
5379             pipelineCreateInfo.pNext       = &pipelineFlags2CreateInfo;
5380             pipelineCreateInfo.flags       = 0;
5381         }
5382 
5383         m_pipeline = createComputePipeline(vk, *m_device, VK_NULL_HANDLE, &pipelineCreateInfo);
5384     }
5385     else if (m_params.isRayTracing())
5386     {
5387         createRayTracingPipeline();
5388     }
5389     else
5390     {
5391         createGraphicsPipeline();
5392     }
5393 
5394     {
5395         auto cmdPool            = makeCommandPool(vk, *m_device, m_queueFamilyIndex);
5396         auto cmdBuf             = allocateCommandBuffer(vk, *m_device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY);
5397         const auto bindPoint    = m_params.isCompute()    ? VK_PIPELINE_BIND_POINT_COMPUTE :
5398                                   m_params.isRayTracing() ? VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR :
5399                                   m_params.isGraphics()   ? VK_PIPELINE_BIND_POINT_GRAPHICS :
5400                                                             VK_PIPELINE_BIND_POINT_MAX_ENUM;
5401         const auto dstStageMask = m_params.isCompute()    ? VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT :
5402                                   m_params.isRayTracing() ? VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR :
5403                                   m_params.isGraphics()   ? VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT :
5404                                                             VK_PIPELINE_STAGE_2_NONE;
5405         const auto dstStageMaskUp =
5406             m_params.isCompute()    ? VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT :
5407             m_params.isRayTracing() ? VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR :
5408             m_params.isGraphics()   ? VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT :
5409                                       VK_PIPELINE_STAGE_2_NONE;
5410 
5411         beginCommandBuffer(vk, *cmdBuf);
5412 
5413         vk.cmdBindPipeline(*cmdBuf, bindPoint, *m_pipeline);
5414 
5415         bindDescriptorBuffers(*cmdBuf, bindPoint);
5416 
5417         // Check if we need any staged descriptor set uploads or push descriptors.
5418 
5419         for (uint32_t setIndex = 0; setIndex < m_descriptorSetLayouts.size(); ++setIndex)
5420         {
5421             const auto &dsl = **m_descriptorSetLayouts[setIndex];
5422 
5423             if (dsl.usePushDescriptors)
5424             {
5425                 pushDescriptorSet(*cmdBuf, bindPoint, dsl, setIndex);
5426             }
5427             else if (dsl.stagingBufferOffset != OFFSET_UNUSED)
5428             {
5429                 VkBufferCopy copy{};
5430                 copy.srcOffset = dsl.stagingBufferOffset;
5431                 copy.dstOffset = dsl.bufferOffset;
5432                 copy.size      = dsl.sizeOfLayout;
5433 
5434                 VkBuffer descriptorBuffer = *m_descriptorBuffers[dsl.bufferIndex]->buffer;
5435 
5436                 vk.cmdCopyBuffer(*cmdBuf, *m_descriptorStagingBuffer.buffer, descriptorBuffer,
5437                                  1, // copy regions
5438                                  &copy);
5439 
5440                 VkBufferMemoryBarrier2 barrier = initVulkanStructure();
5441                 barrier.srcStageMask           = VK_PIPELINE_STAGE_2_COPY_BIT;
5442                 barrier.srcAccessMask          = VK_ACCESS_2_TRANSFER_WRITE_BIT;
5443                 barrier.dstStageMask           = dstStageMask;
5444                 barrier.dstAccessMask          = VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT;
5445                 barrier.srcQueueFamilyIndex    = VK_QUEUE_FAMILY_IGNORED;
5446                 barrier.dstQueueFamilyIndex    = VK_QUEUE_FAMILY_IGNORED;
5447                 barrier.buffer                 = descriptorBuffer;
5448                 barrier.offset                 = 0;
5449                 barrier.size                   = VK_WHOLE_SIZE;
5450 
5451                 VkDependencyInfo depInfo         = initVulkanStructure();
5452                 depInfo.bufferMemoryBarrierCount = 1;
5453                 depInfo.pBufferMemoryBarriers    = &barrier;
5454 
5455                 vk.cmdPipelineBarrier2(*cmdBuf, &depInfo);
5456             }
5457         }
5458 
5459         // Upload image data
5460 
5461         for (uint32_t setIndex = 0; setIndex < u32(m_descriptorSetLayouts.size()); ++setIndex)
5462         {
5463             const auto &dsl = **m_descriptorSetLayouts[setIndex];
5464 
5465             for (uint32_t bindingIndex = 0; bindingIndex < u32(dsl.bindings.size()); ++bindingIndex)
5466             {
5467                 const auto &binding = dsl.bindings[bindingIndex];
5468 
5469                 if ((binding.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
5470                     (binding.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
5471                     (binding.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) ||
5472                     (binding.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))
5473                 {
5474                     for (uint32_t arrayIndex = 0; arrayIndex < binding.descriptorCount; ++arrayIndex)
5475                     {
5476                         // Need to upload the image data from a staging buffer
5477                         const auto &dstImage  = (**m_resources[binding.perBindingResourceIndex[arrayIndex]]).image;
5478                         const auto &srcBuffer = (**m_resources[binding.perBindingResourceIndex[arrayIndex]]).buffer;
5479 
5480                         {
5481                             VkImageMemoryBarrier2 barrier = initVulkanStructure();
5482                             barrier.srcStageMask          = VK_PIPELINE_STAGE_2_NONE;
5483                             barrier.srcAccessMask         = VK_ACCESS_2_NONE;
5484                             barrier.dstStageMask          = VK_PIPELINE_STAGE_2_TRANSFER_BIT;
5485                             barrier.dstAccessMask         = VK_ACCESS_2_TRANSFER_WRITE_BIT;
5486                             barrier.oldLayout             = VK_IMAGE_LAYOUT_UNDEFINED;
5487                             barrier.newLayout             = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
5488                             barrier.srcQueueFamilyIndex   = VK_QUEUE_FAMILY_IGNORED;
5489                             barrier.dstQueueFamilyIndex   = VK_QUEUE_FAMILY_IGNORED;
5490                             barrier.image                 = *dstImage.image;
5491                             barrier.subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
5492 
5493                             VkDependencyInfo depInfo        = initVulkanStructure();
5494                             depInfo.imageMemoryBarrierCount = 1;
5495                             depInfo.pImageMemoryBarriers    = &barrier;
5496 
5497                             vk.cmdPipelineBarrier2(*cmdBuf, &depInfo);
5498                         }
5499 
5500                         if (m_imageColorFormat == VK_FORMAT_R32_UINT)
5501                         {
5502                             VkBufferImageCopy region{};
5503                             // Use default buffer settings
5504                             region.imageSubresource = makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1);
5505                             region.imageOffset      = makeOffset3D(0, 0, 0);
5506                             region.imageExtent = makeExtent3D(m_renderArea.extent.width, m_renderArea.extent.height, 1);
5507 
5508                             vk.cmdCopyBufferToImage(*cmdBuf, *srcBuffer.buffer, *dstImage.image,
5509                                                     VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
5510                                                     1, // region count
5511                                                     &region);
5512                         }
5513                         else if (m_imageColorFormat == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM)
5514                         {
5515                             std::vector<VkBufferImageCopy> regions(2);
5516 
5517                             regions[0].bufferOffset = 0;
5518                             regions[0].imageSubresource =
5519                                 makeImageSubresourceLayers(VK_IMAGE_ASPECT_PLANE_0_BIT, 0, 0, 1);
5520                             regions[0].imageOffset = makeOffset3D(0, 0, 0);
5521                             regions[0].imageExtent =
5522                                 makeExtent3D(m_renderArea.extent.width, m_renderArea.extent.height, 1);
5523 
5524                             regions[1].bufferOffset = m_renderArea.extent.width * m_renderArea.extent.height;
5525                             regions[1].imageSubresource =
5526                                 makeImageSubresourceLayers(VK_IMAGE_ASPECT_PLANE_1_BIT, 0, 0, 1);
5527                             regions[1].imageOffset = makeOffset3D(0, 0, 0);
5528                             regions[1].imageExtent =
5529                                 makeExtent3D(m_renderArea.extent.width / 2, m_renderArea.extent.height / 2, 1);
5530 
5531                             vk.cmdCopyBufferToImage(*cmdBuf, *srcBuffer.buffer, *dstImage.image,
5532                                                     VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, u32(regions.size()),
5533                                                     regions.data());
5534                         }
5535                         else
5536                         {
5537                             DE_ASSERT(0);
5538                         }
5539 
5540                         {
5541                             VkImageMemoryBarrier2 barrier = initVulkanStructure();
5542                             barrier.srcStageMask          = VK_PIPELINE_STAGE_2_TRANSFER_BIT;
5543                             barrier.srcAccessMask         = VK_ACCESS_2_TRANSFER_WRITE_BIT;
5544                             barrier.dstStageMask          = dstStageMaskUp; // beginning of the shader pipeline
5545                             barrier.dstAccessMask         = VK_ACCESS_2_SHADER_READ_BIT;
5546                             barrier.oldLayout             = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
5547                             barrier.newLayout             = dstImage.layout;
5548                             barrier.srcQueueFamilyIndex   = VK_QUEUE_FAMILY_IGNORED;
5549                             barrier.dstQueueFamilyIndex   = VK_QUEUE_FAMILY_IGNORED;
5550                             barrier.image                 = *dstImage.image;
5551                             barrier.subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
5552 
5553                             VkDependencyInfo depInfo        = initVulkanStructure();
5554                             depInfo.imageMemoryBarrierCount = 1;
5555                             depInfo.pImageMemoryBarriers    = &barrier;
5556 
5557                             vk.cmdPipelineBarrier2(*cmdBuf, &depInfo);
5558                         }
5559                     }
5560                 }
5561                 else if (binding.descriptorType == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR)
5562                 {
5563                     for (uint32_t arrayIndex = 0; arrayIndex < binding.descriptorCount; ++arrayIndex)
5564                     {
5565                         ResourceHolder &resource = (**m_resources[binding.perBindingResourceIndex[arrayIndex]]);
5566 
5567                         resource.rtBlas->build(*m_deviceInterface, *m_device, *cmdBuf);
5568                         resource.rtTlas->build(*m_deviceInterface, *m_device, *cmdBuf);
5569                     }
5570                 }
5571             }
5572         }
5573 
5574         if (m_params.isCompute())
5575         {
5576             vk.cmdDispatch(*cmdBuf, 1, 1, 1);
5577 
5578             {
5579                 auto &resultBuffer = getResultBuffer();
5580 
5581                 VkBufferMemoryBarrier2 barrier = initVulkanStructure();
5582                 barrier.srcStageMask           = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT;
5583                 barrier.srcAccessMask          = VK_ACCESS_2_SHADER_WRITE_BIT;
5584                 barrier.dstStageMask           = VK_PIPELINE_STAGE_2_HOST_BIT;
5585                 barrier.dstAccessMask          = VK_ACCESS_2_HOST_READ_BIT;
5586                 barrier.srcQueueFamilyIndex    = VK_QUEUE_FAMILY_IGNORED;
5587                 barrier.dstQueueFamilyIndex    = VK_QUEUE_FAMILY_IGNORED;
5588                 barrier.buffer                 = *resultBuffer.buffer;
5589                 barrier.offset                 = 0;
5590                 barrier.size                   = VK_WHOLE_SIZE;
5591 
5592                 VkDependencyInfo depInfo         = initVulkanStructure();
5593                 depInfo.bufferMemoryBarrierCount = 1;
5594                 depInfo.pBufferMemoryBarriers    = &barrier;
5595 
5596                 vk.cmdPipelineBarrier2(*cmdBuf, &depInfo);
5597             }
5598         }
5599         else if (m_params.isRayTracing())
5600         {
5601             cmdTraceRays(vk, *cmdBuf, &m_raygenShaderBindingTableRegion, &m_missShaderBindingTableRegion,
5602                          &m_hitShaderBindingTableRegion, &m_callableShaderBindingTableRegion, 1, 1, 1);
5603 
5604             {
5605                 auto &resultBuffer = getResultBuffer();
5606 
5607                 VkBufferMemoryBarrier2 barrier = initVulkanStructure();
5608                 barrier.srcStageMask           = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR;
5609                 barrier.srcAccessMask          = VK_ACCESS_2_SHADER_WRITE_BIT;
5610                 barrier.dstStageMask           = VK_PIPELINE_STAGE_2_HOST_BIT;
5611                 barrier.dstAccessMask          = VK_ACCESS_2_HOST_READ_BIT;
5612                 barrier.srcQueueFamilyIndex    = VK_QUEUE_FAMILY_IGNORED;
5613                 barrier.dstQueueFamilyIndex    = VK_QUEUE_FAMILY_IGNORED;
5614                 barrier.buffer                 = *resultBuffer.buffer;
5615                 barrier.offset                 = 0;
5616                 barrier.size                   = VK_WHOLE_SIZE;
5617 
5618                 VkDependencyInfo depInfo         = initVulkanStructure();
5619                 depInfo.bufferMemoryBarrierCount = 1;
5620                 depInfo.pBufferMemoryBarriers    = &barrier;
5621 
5622                 vk.cmdPipelineBarrier2(*cmdBuf, &depInfo);
5623             }
5624         }
5625         else
5626         {
5627             beginRenderPass(vk, *cmdBuf, *m_renderPass, *m_framebuffer, m_renderArea, tcu::Vec4());
5628 
5629             vk.cmdDraw(*cmdBuf, 6, 1, 0, 0);
5630 
5631             endRenderPass(vk, *cmdBuf);
5632 
5633             // Copy the rendered image to a host-visible buffer.
5634 
5635             {
5636                 VkImageMemoryBarrier2 barrier = initVulkanStructure();
5637                 barrier.srcStageMask          = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT;
5638                 barrier.srcAccessMask         = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT;
5639                 barrier.dstStageMask          = VK_PIPELINE_STAGE_2_TRANSFER_BIT;
5640                 barrier.dstAccessMask         = VK_ACCESS_2_TRANSFER_READ_BIT;
5641                 barrier.oldLayout             = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
5642                 barrier.newLayout             = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
5643                 barrier.srcQueueFamilyIndex   = VK_QUEUE_FAMILY_IGNORED;
5644                 barrier.dstQueueFamilyIndex   = VK_QUEUE_FAMILY_IGNORED;
5645                 barrier.image                 = *m_colorImage.image;
5646                 barrier.subresourceRange      = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
5647 
5648                 VkDependencyInfo depInfo        = initVulkanStructure();
5649                 depInfo.imageMemoryBarrierCount = 1;
5650                 depInfo.pImageMemoryBarriers    = &barrier;
5651 
5652                 vk.cmdPipelineBarrier2(*cmdBuf, &depInfo);
5653             }
5654             {
5655                 VkBufferImageCopy region{};
5656                 // Use default buffer settings
5657                 region.imageSubresource = makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1);
5658                 region.imageOffset      = makeOffset3D(0, 0, 0);
5659                 region.imageExtent      = m_colorImage.info.extent;
5660 
5661                 vk.cmdCopyImageToBuffer(*cmdBuf, *m_colorImage.image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
5662                                         *m_colorBuffer.buffer,
5663                                         1, // region count
5664                                         &region);
5665             }
5666             {
5667                 VkBufferMemoryBarrier2 barrier = initVulkanStructure();
5668                 barrier.srcStageMask           = VK_PIPELINE_STAGE_2_TRANSFER_BIT;
5669                 barrier.srcAccessMask          = VK_ACCESS_2_TRANSFER_WRITE_BIT;
5670                 barrier.dstStageMask           = VK_PIPELINE_STAGE_2_HOST_BIT;
5671                 barrier.dstAccessMask          = VK_ACCESS_2_HOST_READ_BIT;
5672                 barrier.srcQueueFamilyIndex    = VK_QUEUE_FAMILY_IGNORED;
5673                 barrier.dstQueueFamilyIndex    = VK_QUEUE_FAMILY_IGNORED;
5674                 barrier.buffer                 = *m_colorBuffer.buffer;
5675                 barrier.offset                 = 0;
5676                 barrier.size                   = VK_WHOLE_SIZE;
5677 
5678                 VkDependencyInfo depInfo         = initVulkanStructure();
5679                 depInfo.bufferMemoryBarrierCount = 1;
5680                 depInfo.pBufferMemoryBarriers    = &barrier;
5681 
5682                 vk.cmdPipelineBarrier2(*cmdBuf, &depInfo);
5683             }
5684         }
5685 
5686         endCommandBuffer(vk, *cmdBuf);
5687         submitCommandsAndWait(vk, *m_device, m_queue, *cmdBuf);
5688     }
5689 
5690     // Verification
5691     {
5692         const tcu::UVec4 *pResultData = nullptr;
5693 
5694         if (m_params.isCompute() || m_params.isRayTracing())
5695         {
5696             auto &resultBuffer = getResultBuffer();
5697 
5698             invalidateAlloc(vk, *m_device, *resultBuffer.alloc);
5699 
5700             pResultData = static_cast<const tcu::UVec4 *>(resultBuffer.alloc->getHostPtr());
5701         }
5702         else
5703         {
5704             pResultData = static_cast<const tcu::UVec4 *>(m_colorBuffer.alloc->getHostPtr());
5705         }
5706 
5707         const auto actual = pResultData->x();
5708         uint32_t expected = 0;
5709 
5710         for (const auto &sb : m_simpleBindings)
5711         {
5712             if (!(sb.isResultBuffer || sb.isRayTracingAS))
5713             {
5714                 if (m_params.variant == TestVariant::MAX)
5715                 {
5716                     // We test enough (image, sampler) pairs to access each one at least once.
5717                     expected = deMaxu32(m_params.samplerBufferBindingCount, m_params.resourceBufferBindingCount);
5718                 }
5719                 else
5720                 {
5721                     // Uniform blocks/buffers check 4 elements per iteration.
5722                     if (sb.type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
5723                     {
5724                         expected += ConstChecksPerBuffer * 4;
5725                     }
5726                     else if (sb.type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER)
5727                     {
5728                         expected += ConstChecksPerBuffer * 4 * sb.count;
5729                     }
5730                     else if ((sb.type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
5731                              (sb.type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER) ||
5732                              (sb.type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER))
5733                     {
5734                         expected += ConstChecksPerBuffer * sb.count;
5735                     }
5736                     // Samplers are tested implicitly via sampled images
5737                     else if (sb.type != VK_DESCRIPTOR_TYPE_SAMPLER)
5738                     {
5739                         expected += sb.count;
5740                     }
5741                 }
5742             }
5743         }
5744 
5745         if (actual != expected)
5746         {
5747             uint32_t badSet        = 0;
5748             uint32_t badBinding    = 0;
5749             uint32_t badArrayIndex = 0;
5750 
5751             unpackBindingArgs(pResultData->y(), &badSet, &badBinding, &badArrayIndex);
5752 
5753             std::ostringstream msg;
5754             msg << "Wrong value in result buffer. Expected (" << expected << ") but got (" << actual << ").";
5755             msg << " The first wrong binding is (set = " << badSet << ", binding = " << badBinding << ")";
5756 
5757             if (m_params.variant == TestVariant::MAX)
5758             {
5759                 uint32_t badSamplerSet     = 0;
5760                 uint32_t badSamplerBinding = 0;
5761 
5762                 unpackBindingArgs(pResultData->z(), &badSamplerSet, &badSamplerBinding, nullptr);
5763 
5764                 msg << " which used a sampler (set = " << badSamplerSet << ", binding = " << badSamplerBinding << ")";
5765             }
5766             else if (badArrayIndex > 0)
5767             {
5768                 msg << " at array index " << badArrayIndex;
5769             }
5770 
5771             msg << ".";
5772 
5773             return tcu::TestStatus::fail(msg.str());
5774         }
5775     }
5776 
5777     if ((m_params.variant == TestVariant::CAPTURE_REPLAY) && (m_testIteration == 0))
5778     {
5779         // The first pass succeeded, continue to the next one where we verify replay.
5780         ++m_testIteration;
5781 
5782         return tcu::TestStatus::incomplete();
5783     }
5784 
5785     return tcu::TestStatus::pass("Pass");
5786 }
5787 
createInstance(Context & context) const5788 TestInstance *DescriptorBufferTestCase::createInstance(Context &context) const
5789 {
5790     // Currently all tests follow the same basic execution logic.
5791     return new DescriptorBufferTestInstance(context, m_params, m_simpleBindings);
5792 }
5793 
5794 // This simple tests verifies extension properties against the spec limits.
5795 //
testLimits(Context & context)5796 tcu::TestStatus testLimits(Context &context)
5797 {
5798 #define CHECK_MIN_LIMIT(_struct_, _field_, _limit_)               \
5799     if (_struct_._field_ < _limit_)                               \
5800     {                                                             \
5801         TCU_THROW(TestError, #_field_ " is less than " #_limit_); \
5802     }
5803 
5804 // Max implicitly checks nonzero too
5805 #define CHECK_MAX_LIMIT_NON_ZERO(_struct_, _field_, _limit_)         \
5806     if (_struct_._field_ == 0)                                       \
5807     {                                                                \
5808         TCU_THROW(TestError, #_field_ " is 0");                      \
5809     }                                                                \
5810     if (_struct_._field_ > _limit_)                                  \
5811     {                                                                \
5812         TCU_THROW(TestError, #_field_ " is greater than " #_limit_); \
5813     }
5814 
5815 #define CHECK_MAX_LIMIT(_struct_, _field_, _limit_)                  \
5816     if (_struct_._field_ > _limit_)                                  \
5817     {                                                                \
5818         TCU_THROW(TestError, #_field_ " is greater than " #_limit_); \
5819     }
5820 
5821     if (context.isDeviceFunctionalitySupported("VK_EXT_descriptor_buffer"))
5822     {
5823         const auto &features = context.getDescriptorBufferFeaturesEXT();
5824         const auto &props    = context.getDescriptorBufferPropertiesEXT();
5825         const bool hasRT     = context.isDeviceFunctionalitySupported("VK_KHR_ray_tracing_pipeline") ||
5826                            context.isDeviceFunctionalitySupported("VK_KHR_ray_query");
5827         const size_t maxResourceDescriptorSize = std::max(
5828             props.storageImageDescriptorSize,
5829             std::max(props.sampledImageDescriptorSize,
5830                      std::max(props.robustUniformTexelBufferDescriptorSize,
5831                               std::max(props.robustStorageTexelBufferDescriptorSize,
5832                                        std::max(props.robustUniformBufferDescriptorSize,
5833                                                 std::max(props.robustStorageBufferDescriptorSize,
5834                                                          std::max(props.inputAttachmentDescriptorSize,
5835                                                                   std::max(props.accelerationStructureDescriptorSize,
5836                                                                            size_t(0u)))))))));
5837 
5838         DE_ASSERT(features.descriptorBuffer == VK_TRUE);
5839 
5840         // Must be queried directly from the physical device, the structure cached in the context has robustness disabled.
5841         VkPhysicalDeviceFeatures physDeviceFeatures{};
5842         context.getInstanceInterface().getPhysicalDeviceFeatures(context.getPhysicalDevice(), &physDeviceFeatures);
5843 
5844         if (physDeviceFeatures.robustBufferAccess)
5845         {
5846             CHECK_MAX_LIMIT(props, robustUniformTexelBufferDescriptorSize, 256);
5847             CHECK_MAX_LIMIT(props, robustStorageTexelBufferDescriptorSize, 256);
5848             CHECK_MAX_LIMIT(props, robustUniformBufferDescriptorSize, 256);
5849             CHECK_MAX_LIMIT(props, robustStorageBufferDescriptorSize, 256);
5850         }
5851 
5852         if (features.descriptorBufferCaptureReplay)
5853         {
5854             CHECK_MAX_LIMIT(props, bufferCaptureReplayDescriptorDataSize, 64);
5855             CHECK_MAX_LIMIT(props, imageCaptureReplayDescriptorDataSize, 64);
5856             CHECK_MAX_LIMIT(props, imageViewCaptureReplayDescriptorDataSize, 64);
5857             CHECK_MAX_LIMIT(props, samplerCaptureReplayDescriptorDataSize, 64);
5858 
5859             if (hasRT)
5860             {
5861                 CHECK_MAX_LIMIT(props, accelerationStructureCaptureReplayDescriptorDataSize, 64);
5862             }
5863         }
5864 
5865         if (hasRT)
5866         {
5867             CHECK_MAX_LIMIT_NON_ZERO(props, accelerationStructureDescriptorSize, 256);
5868         }
5869 
5870         CHECK_MAX_LIMIT_NON_ZERO(props, descriptorBufferOffsetAlignment, 256);
5871 
5872         CHECK_MIN_LIMIT(props, maxDescriptorBufferBindings, 3);
5873         CHECK_MIN_LIMIT(props, maxResourceDescriptorBufferBindings, 1);
5874         CHECK_MIN_LIMIT(props, maxSamplerDescriptorBufferBindings, 1);
5875         CHECK_MIN_LIMIT(props, maxEmbeddedImmutableSamplerBindings, 1);
5876         CHECK_MIN_LIMIT(props, maxEmbeddedImmutableSamplers, 2032);
5877 
5878         CHECK_MAX_LIMIT_NON_ZERO(props, samplerDescriptorSize, 256);
5879         CHECK_MAX_LIMIT_NON_ZERO(props, combinedImageSamplerDescriptorSize, 256);
5880         CHECK_MAX_LIMIT_NON_ZERO(props, sampledImageDescriptorSize, 256);
5881         CHECK_MAX_LIMIT_NON_ZERO(props, storageImageDescriptorSize, 256);
5882         CHECK_MAX_LIMIT_NON_ZERO(props, uniformTexelBufferDescriptorSize, 256);
5883         CHECK_MAX_LIMIT_NON_ZERO(props, storageTexelBufferDescriptorSize, 256);
5884         CHECK_MAX_LIMIT_NON_ZERO(props, uniformBufferDescriptorSize, 256);
5885         CHECK_MAX_LIMIT_NON_ZERO(props, storageBufferDescriptorSize, 256);
5886         CHECK_MAX_LIMIT(props, inputAttachmentDescriptorSize, 256);
5887 
5888         CHECK_MIN_LIMIT(props, maxSamplerDescriptorBufferRange, ((1u << 11) * props.samplerDescriptorSize));
5889         CHECK_MIN_LIMIT(props, maxResourceDescriptorBufferRange,
5890                         (((1u << 20) - (1u << 15)) * maxResourceDescriptorSize));
5891         CHECK_MIN_LIMIT(props, samplerDescriptorBufferAddressSpaceSize, (1u << 27));
5892         CHECK_MIN_LIMIT(props, resourceDescriptorBufferAddressSpaceSize, (1u << 27));
5893         CHECK_MIN_LIMIT(props, descriptorBufferAddressSpaceSize, (1u << 27));
5894 
5895         // The following requirement ensures that for split combined image sampler arrays:
5896         // - there's no unnecessary padding at the end, or
5897         // - there's no risk of overrun (if somehow the sum of image and sampler was greater).
5898 
5899         if ((props.combinedImageSamplerDescriptorSingleArray == VK_FALSE) &&
5900             ((props.sampledImageDescriptorSize + props.samplerDescriptorSize) !=
5901              props.combinedImageSamplerDescriptorSize))
5902         {
5903             return tcu::TestStatus::fail(
5904                 "For combinedImageSamplerDescriptorSingleArray, it is expected that the sampled image size "
5905                 "and the sampler size add up to combinedImageSamplerDescriptorSize.");
5906         }
5907     }
5908     else
5909     {
5910         TCU_THROW(NotSupportedError, "VK_EXT_descriptor_buffer is not supported");
5911     }
5912 
5913     return tcu::TestStatus::pass("Pass");
5914 
5915 #undef CHECK_MIN_LIMIT
5916 #undef CHECK_MAX_LIMIT
5917 #undef CHECK_MAX_LIMIT_NON_ZERO
5918 }
5919 
5920 enum class CaptureReplyTestMode
5921 {
5922     Image = 0,
5923     Sparse_Image,
5924     Buffer,
5925     Sparse_Buffer,
5926 };
5927 
5928 class CaptureReplyTestInstance : public TestInstance
5929 {
5930 public:
5931     CaptureReplyTestInstance(Context &context, CaptureReplyTestMode mode);
5932 
5933     tcu::TestStatus iterate() override;
5934 
5935 protected:
5936     const CaptureReplyTestMode m_mode;
5937 };
5938 
CaptureReplyTestInstance(Context & context,CaptureReplyTestMode mode)5939 CaptureReplyTestInstance::CaptureReplyTestInstance(Context &context, CaptureReplyTestMode mode)
5940     : TestInstance(context)
5941     , m_mode(mode)
5942 {
5943 }
5944 
iterate()5945 tcu::TestStatus CaptureReplyTestInstance::iterate()
5946 {
5947     const auto &vki           = m_context.getInstanceInterface();
5948     const DeviceInterface &vk = m_context.getDeviceInterface();
5949     const VkDevice device     = m_context.getDevice();
5950     auto physicalDevice       = m_context.getPhysicalDevice();
5951     MovePtr<Allocation> allocation;
5952     const auto &dbProperties = m_context.getDescriptorBufferPropertiesEXT();
5953     const bool useSparseImage(m_mode == CaptureReplyTestMode::Sparse_Image);
5954     const bool useSparseBuffer(m_mode == CaptureReplyTestMode::Sparse_Buffer);
5955 
5956     VkMemoryOpaqueCaptureAddressAllocateInfo opaqueCaptureAddressAllocateInfo = initVulkanStructure();
5957     VkMemoryAllocateFlagsInfo allocFlagsInfo = initVulkanStructure(&opaqueCaptureAddressAllocateInfo);
5958     uint64_t opaqueCaptureAddress{};
5959     VkMemoryRequirements memoryRequirements;
5960 
5961     if (useSparseImage || (m_mode == CaptureReplyTestMode::Image))
5962     {
5963         const VkImageCreateFlags sparseFlag =
5964             useSparseImage * (VK_IMAGE_CREATE_SPARSE_BINDING_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT);
5965 
5966         VkImageCreateInfo imageCreateInfo = initVulkanStructure();
5967         imageCreateInfo.flags             = sparseFlag | VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT;
5968         imageCreateInfo.imageType         = VK_IMAGE_TYPE_2D;
5969         imageCreateInfo.format            = VK_FORMAT_R8G8B8A8_UNORM;
5970         imageCreateInfo.extent            = {16, 16, 1};
5971         imageCreateInfo.mipLevels         = 1;
5972         imageCreateInfo.arrayLayers       = 1;
5973         imageCreateInfo.samples           = VK_SAMPLE_COUNT_1_BIT;
5974         imageCreateInfo.usage             = VK_IMAGE_USAGE_STORAGE_BIT;
5975 
5976         // create image with VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT
5977         auto image(createImage(vk, device, &imageCreateInfo));
5978 
5979         if (!useSparseImage)
5980         {
5981             memoryRequirements = getImageMemoryRequirements(vk, device, *image);
5982             allocFlagsInfo.flags =
5983                 VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT | VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT;
5984 
5985             // allocate memory with VkMemoryOpaqueCaptureAddressAllocateInfo
5986             allocation = allocateExtended(vki, vk, physicalDevice, device, memoryRequirements,
5987                                           MemoryRequirement::DeviceAddressCaptureReplay, &allocFlagsInfo);
5988 
5989             // get data from vkGetDeviceMemoryOpaqueCaptureAddressKHR
5990             VkDeviceMemoryOpaqueCaptureAddressInfo memoryOpaqueCaptureAddressInfo = initVulkanStructure();
5991             memoryOpaqueCaptureAddressInfo.memory                                 = allocation->getMemory();
5992             opaqueCaptureAddress = vk.getDeviceMemoryOpaqueCaptureAddress(device, &memoryOpaqueCaptureAddressInfo);
5993 
5994             // bind image & memory
5995             VK_CHECK(vk.bindImageMemory(device, *image, allocation->getMemory(), allocation->getOffset()));
5996         }
5997 
5998         VkImageViewCreateInfo imageViewCreateInfo = initVulkanStructure();
5999         imageViewCreateInfo.flags                 = VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT;
6000         imageViewCreateInfo.image                 = *image;
6001         imageViewCreateInfo.viewType              = VK_IMAGE_VIEW_TYPE_2D;
6002         imageViewCreateInfo.format                = VK_FORMAT_R8G8B8A8_UNORM;
6003         imageViewCreateInfo.components            = makeComponentMappingRGBA();
6004         imageViewCreateInfo.subresourceRange      = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
6005         auto imageView                            = createImageView(vk, device, &imageViewCreateInfo);
6006 
6007         // get data from vkGetImageOpaqueCaptureDescriptorDataEXT
6008         VkImageCaptureDescriptorDataInfoEXT imageCaptureDescriptorDataInfo = initVulkanStructure();
6009         imageCaptureDescriptorDataInfo.image                               = *image;
6010         std::vector<uint8_t> imageCaptureReplayData(
6011             de::max(dbProperties.imageCaptureReplayDescriptorDataSize, std::size_t(64)));
6012         VK_CHECK(vk.getImageOpaqueCaptureDescriptorDataEXT(device, &imageCaptureDescriptorDataInfo,
6013                                                            imageCaptureReplayData.data()));
6014 
6015         // get data from vkGetImageViewOpaqueCaptureDescriptorDataEXT
6016         VkImageViewCaptureDescriptorDataInfoEXT imageViewCaptureDescriptorDataInfo = initVulkanStructure();
6017         imageViewCaptureDescriptorDataInfo.imageView                               = *imageView;
6018         std::vector<uint8_t> imageViewCaptureReplayData(
6019             de::max(dbProperties.imageViewCaptureReplayDescriptorDataSize, std::size_t(64)));
6020         VK_CHECK(vk.getImageViewOpaqueCaptureDescriptorDataEXT(device, &imageViewCaptureDescriptorDataInfo,
6021                                                                imageViewCaptureReplayData.data()));
6022 
6023         // call vkGetDescriptorEXT() with the image and store the write descriptor data
6024         const auto descriptorSize = dbProperties.storageImageDescriptorSize;
6025         std::vector<uint8_t> firstDescriptorData(descriptorSize);
6026         VkDescriptorImageInfo imageInfo{};
6027         imageInfo.imageView                = *imageView;
6028         VkDescriptorGetInfoEXT descGetInfo = initVulkanStructure();
6029         descGetInfo.type                   = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
6030         descGetInfo.data.pStorageImage     = &imageInfo;
6031         vk.getDescriptorEXT(device, &descGetInfo, descriptorSize, firstDescriptorData.data());
6032 
6033         // destroy image and free memory
6034         imageView = Move<VkImageView>();
6035         if (!useSparseImage)
6036             allocation = MovePtr<Allocation>();
6037         image = Move<VkImage>();
6038 
6039         // recreate image with VkOpaqueCaptureDescriptorDataCreateInfoEXT with data from imageCaptureDescriptorDataInfo
6040         VkOpaqueCaptureDescriptorDataCreateInfoEXT opaqueCaptureDescriptorDataCreateInfo = initVulkanStructure();
6041         opaqueCaptureDescriptorDataCreateInfo.opaqueCaptureDescriptorData = imageCaptureReplayData.data();
6042         imageCreateInfo.pNext                                             = &opaqueCaptureDescriptorDataCreateInfo;
6043         image                                                             = createImage(vk, device, &imageCreateInfo);
6044 
6045         if (!useSparseImage)
6046         {
6047             // allocate memory with VkMemoryOpaqueCaptureAddressAllocateInfo with data from opaqueCaptureAddress
6048             opaqueCaptureAddressAllocateInfo.opaqueCaptureAddress = opaqueCaptureAddress;
6049             allocation = allocateExtended(vki, vk, physicalDevice, device, memoryRequirements,
6050                                           MemoryRequirement::DeviceAddressCaptureReplay, &allocFlagsInfo);
6051 
6052             // bind image & memory
6053             VK_CHECK(vk.bindImageMemory(device, *image, allocation->getMemory(), allocation->getOffset()));
6054         }
6055 
6056         // recreate imageView with VkOpaqueCaptureDescriptorDataCreateInfoEXT with data from imageViewCaptureDescriptorDataInfo
6057         opaqueCaptureDescriptorDataCreateInfo.opaqueCaptureDescriptorData = imageViewCaptureReplayData.data();
6058         imageViewCreateInfo.pNext                                         = &opaqueCaptureDescriptorDataCreateInfo;
6059         imageViewCreateInfo.image                                         = *image;
6060         imageView = createImageView(vk, device, &imageViewCreateInfo);
6061 
6062         // call vkGetDescriptorEXT() for the second time
6063         std::vector<uint8_t> secondDescriptorData(descriptorSize);
6064         imageInfo.imageView = *imageView;
6065         vk.getDescriptorEXT(device, &descGetInfo, descriptorSize, secondDescriptorData.data());
6066 
6067         if (deMemCmp(firstDescriptorData.data(), secondDescriptorData.data(), descriptorSize) == 0)
6068             return tcu::TestStatus::pass("Pass");
6069     }
6070     else
6071     {
6072         auto bufferCreateInfo =
6073             makeBufferCreateInfo(64, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT);
6074         const VkBufferCreateFlags sparseFlag =
6075             useSparseBuffer * (VK_BUFFER_CREATE_SPARSE_BINDING_BIT | VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT);
6076         bufferCreateInfo.flags = sparseFlag | VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT;
6077 
6078         // create buffer with VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT
6079         auto buffer(createBuffer(vk, device, &bufferCreateInfo));
6080 
6081         if (!useSparseBuffer)
6082         {
6083             memoryRequirements = getBufferMemoryRequirements(vk, device, *buffer);
6084             allocFlagsInfo.flags =
6085                 VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT | VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT;
6086 
6087             // allocate memory with VkMemoryOpaqueCaptureAddressAllocateInfo
6088             allocation = allocateExtended(vki, vk, physicalDevice, device, memoryRequirements,
6089                                           MemoryRequirement::DeviceAddressCaptureReplay, &allocFlagsInfo);
6090 
6091             // get data from vkGetDeviceMemoryOpaqueCaptureAddressKHR
6092             VkDeviceMemoryOpaqueCaptureAddressInfo memoryOpaqueCaptureAddressInfo = initVulkanStructure();
6093             memoryOpaqueCaptureAddressInfo.memory                                 = allocation->getMemory();
6094             opaqueCaptureAddress = vk.getDeviceMemoryOpaqueCaptureAddress(device, &memoryOpaqueCaptureAddressInfo);
6095 
6096             // bind buffer & memory
6097             VK_CHECK(vk.bindBufferMemory(device, *buffer, allocation->getMemory(), allocation->getOffset()));
6098         }
6099 
6100         // get data from vkGetBufferOpaqueCaptureDescriptorDataEXT
6101         VkBufferCaptureDescriptorDataInfoEXT captureDescriptorDataInfo = initVulkanStructure();
6102         captureDescriptorDataInfo.buffer                               = *buffer;
6103         std::vector<uint8_t> captureReplayData(
6104             de::max(dbProperties.bufferCaptureReplayDescriptorDataSize, std::size_t(64)));
6105         VK_CHECK(
6106             vk.getBufferOpaqueCaptureDescriptorDataEXT(device, &captureDescriptorDataInfo, captureReplayData.data()));
6107 
6108         // call vkGetDescriptorEXT() with the buffer and store the write descriptor data
6109         const auto descriptorSize = dbProperties.storageBufferDescriptorSize;
6110         std::vector<uint8_t> firstDescriptorData(descriptorSize);
6111         VkBufferDeviceAddressInfo bdaInfo                = initVulkanStructure();
6112         bdaInfo.buffer                                   = *buffer;
6113         VkDescriptorAddressInfoEXT descriptorAddressInfo = initVulkanStructure();
6114         descriptorAddressInfo.address                    = vk.getBufferDeviceAddress(device, &bdaInfo);
6115         descriptorAddressInfo.range                      = 64;
6116         VkDescriptorGetInfoEXT descGetInfo               = initVulkanStructure();
6117         descGetInfo.type                                 = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
6118         descGetInfo.data.pStorageBuffer                  = &descriptorAddressInfo;
6119         vk.getDescriptorEXT(device, &descGetInfo, descriptorSize, firstDescriptorData.data());
6120 
6121         // destroy buffer and free memory
6122         buffer = Move<VkBuffer>();
6123         if (!useSparseBuffer)
6124             allocation = MovePtr<Allocation>();
6125 
6126         // recreate buffer with VkOpaqueCaptureDescriptorDataCreateInfoEXT with data from captureDescriptorDataInfo
6127         VkOpaqueCaptureDescriptorDataCreateInfoEXT opaqueCaptureDescriptorDataCreateInfo = initVulkanStructure();
6128         opaqueCaptureDescriptorDataCreateInfo.opaqueCaptureDescriptorData                = captureReplayData.data();
6129         bufferCreateInfo.pNext = &opaqueCaptureDescriptorDataCreateInfo;
6130         buffer                 = createBuffer(vk, device, &bufferCreateInfo);
6131 
6132         if (!useSparseBuffer)
6133         {
6134             // allocate memory with VkMemoryOpaqueCaptureAddressAllocateInfo with data from opaqueCaptureAddress
6135             opaqueCaptureAddressAllocateInfo.opaqueCaptureAddress = opaqueCaptureAddress;
6136             allocation = allocateExtended(vki, vk, physicalDevice, device, memoryRequirements,
6137                                           MemoryRequirement::DeviceAddressCaptureReplay, &allocFlagsInfo);
6138 
6139             // bind image & memory
6140             VK_CHECK(vk.bindBufferMemory(device, *buffer, allocation->getMemory(), allocation->getOffset()));
6141         }
6142 
6143         // call vkGetDescriptorEXT() for the second time
6144         std::vector<uint8_t> secondDescriptorData(descriptorSize);
6145         bdaInfo.buffer                = *buffer;
6146         descriptorAddressInfo.address = vk.getBufferDeviceAddress(device, &bdaInfo);
6147         vk.getDescriptorEXT(device, &descGetInfo, descriptorSize, secondDescriptorData.data());
6148 
6149         if (deMemCmp(firstDescriptorData.data(), secondDescriptorData.data(), descriptorSize) == 0)
6150             return tcu::TestStatus::pass("Pass");
6151     }
6152 
6153     return tcu::TestStatus::fail("descriptor data is not the same between both getDescriptorEXT calls");
6154 }
6155 
6156 class CaptureReplyTestCase : public TestCase
6157 {
6158 public:
6159     CaptureReplyTestCase(tcu::TestContext &testCtx, const std::string &name, CaptureReplyTestMode mode);
6160 
6161     TestInstance *createInstance(Context &context) const override;
6162     void checkSupport(Context &context) const override;
6163 
6164 private:
6165     const CaptureReplyTestMode m_mode;
6166 };
6167 
CaptureReplyTestCase(tcu::TestContext & testCtx,const std::string & name,CaptureReplyTestMode mode)6168 CaptureReplyTestCase::CaptureReplyTestCase(tcu::TestContext &testCtx, const std::string &name,
6169                                            CaptureReplyTestMode mode)
6170     : TestCase(testCtx, name)
6171     , m_mode(mode)
6172 {
6173 }
6174 
createInstance(Context & context) const6175 TestInstance *CaptureReplyTestCase::createInstance(Context &context) const
6176 {
6177     return new CaptureReplyTestInstance(context, m_mode);
6178 }
6179 
checkSupport(Context & context) const6180 void CaptureReplyTestCase::checkSupport(Context &context) const
6181 {
6182     context.requireDeviceFunctionality("VK_EXT_descriptor_buffer");
6183 
6184     const auto &vki     = context.getInstanceInterface();
6185     auto physicalDevice = context.getPhysicalDevice();
6186 
6187     const auto &descriptorBufferFeatures = context.getDescriptorBufferFeaturesEXT();
6188     if (!descriptorBufferFeatures.descriptorBufferCaptureReplay)
6189         TCU_THROW(NotSupportedError, "descriptorBufferCaptureReplay feature is not supported");
6190 
6191     if (m_mode == CaptureReplyTestMode::Sparse_Image)
6192     {
6193         const auto sparseImageFormatPropVec = getPhysicalDeviceSparseImageFormatProperties(
6194             vki, physicalDevice, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D, VK_SAMPLE_COUNT_1_BIT,
6195             VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_TILING_OPTIMAL);
6196         if (sparseImageFormatPropVec.size() == 0)
6197             TCU_THROW(NotSupportedError, "Format does not support sparse operations.");
6198     }
6199     else if (m_mode == CaptureReplyTestMode::Sparse_Buffer)
6200         context.requireDeviceCoreFeature(DEVICE_CORE_FEATURE_SPARSE_BINDING);
6201 }
6202 
populateDescriptorBufferTestGroup(tcu::TestCaseGroup * topGroup,ResourceResidency resourceResidency)6203 void populateDescriptorBufferTestGroup(tcu::TestCaseGroup *topGroup, ResourceResidency resourceResidency)
6204 {
6205     tcu::TestContext &testCtx = topGroup->getTestContext();
6206     const uint32_t baseSeed   = static_cast<uint32_t>(testCtx.getCommandLine().getBaseSeed());
6207     ;
6208     std::string caseName;
6209 
6210     const VkQueueFlagBits choiceQueues[]{
6211         VK_QUEUE_GRAPHICS_BIT,
6212         VK_QUEUE_COMPUTE_BIT,
6213     };
6214 
6215     const VkShaderStageFlagBits choiceStages[]{
6216         VK_SHADER_STAGE_VERTEX_BIT,
6217         VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
6218         VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
6219         VK_SHADER_STAGE_GEOMETRY_BIT,
6220         VK_SHADER_STAGE_FRAGMENT_BIT,
6221         VK_SHADER_STAGE_COMPUTE_BIT,
6222         VK_SHADER_STAGE_RAYGEN_BIT_KHR,
6223         VK_SHADER_STAGE_ANY_HIT_BIT_KHR,
6224         VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR,
6225         VK_SHADER_STAGE_MISS_BIT_KHR,
6226         VK_SHADER_STAGE_INTERSECTION_BIT_KHR,
6227         VK_SHADER_STAGE_CALLABLE_BIT_KHR,
6228     };
6229 
6230     const bool choiceStagesCommands[]{
6231         false,
6232         true,
6233     };
6234 
6235     {
6236         MovePtr<tcu::TestCaseGroup> subGroup(new tcu::TestCaseGroup(testCtx, "basic"));
6237 
6238         addFunctionCase(subGroup.get(), "limits", testLimits);
6239 
6240         topGroup->addChild(subGroup.release());
6241     }
6242 
6243     {
6244         //
6245         // Basic single descriptor cases -- a quick check.
6246         //
6247         MovePtr<tcu::TestCaseGroup> subGroup(new tcu::TestCaseGroup(testCtx, "single"));
6248         const uint32_t subGroupHash = baseSeed ^ deStringHash(subGroup->getName());
6249 
6250         // VK_DESCRIPTOR_TYPE_SAMPLER is tested implicitly by sampled image case.
6251         // *_BUFFER_DYNAMIC are not allowed with descriptor buffers.
6252         //
6253         const VkDescriptorType choiceDescriptors[]{
6254             VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
6255             VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,          VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
6256             VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,   VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
6257             VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,         VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
6258             VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK,   VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
6259         };
6260 
6261         TestParams params{};
6262         params.variant            = TestVariant::SINGLE;
6263         params.subcase            = SubCase::NONE;
6264         params.bufferBindingCount = 1;
6265         params.setsPerBuffer      = 1;
6266         params.useMaintenance5    = false;
6267         params.resourceResidency  = resourceResidency;
6268 
6269         for (auto pQueue = choiceQueues; pQueue < DE_ARRAY_END(choiceQueues); ++pQueue)
6270             for (auto pStage = choiceStages; pStage < DE_ARRAY_END(choiceStages); ++pStage)
6271                 for (auto pCommands2 = choiceStagesCommands; pCommands2 < DE_ARRAY_END(choiceStagesCommands);
6272                      ++pCommands2)
6273                     for (auto pDescriptor = choiceDescriptors; pDescriptor < DE_ARRAY_END(choiceDescriptors);
6274                          ++pDescriptor)
6275                     {
6276                         if ((*pQueue == VK_QUEUE_COMPUTE_BIT) && (*pStage != VK_SHADER_STAGE_COMPUTE_BIT))
6277                         {
6278                             // Compute queue can only use compute shaders.
6279                             continue;
6280                         }
6281 
6282                         if ((*pDescriptor == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) &&
6283                             (*pStage != VK_SHADER_STAGE_FRAGMENT_BIT))
6284                         {
6285                             // Subpass loads are only valid in fragment stage.
6286                             continue;
6287                         }
6288 
6289                         params.stage      = *pStage;
6290                         params.queue      = *pQueue;
6291                         params.descriptor = *pDescriptor;
6292                         params.commands2  = *pCommands2;
6293 
6294                         subGroup->addChild(
6295                             new DescriptorBufferTestCase(testCtx, getCaseNameUpdateHash(params, subGroupHash), params));
6296                     }
6297 
6298         params.stage           = VK_SHADER_STAGE_COMPUTE_BIT;
6299         params.queue           = VK_QUEUE_COMPUTE_BIT;
6300         params.descriptor      = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
6301         params.useMaintenance5 = true;
6302 
6303         subGroup->addChild(new DescriptorBufferTestCase(testCtx, "compute_maintenance5", params));
6304         topGroup->addChild(subGroup.release());
6305     }
6306 
6307     {
6308         //
6309         // More complex cases. Multiple sets and bindings per buffer. Immutable samplers.
6310         //
6311         MovePtr<tcu::TestCaseGroup> subGroup(new tcu::TestCaseGroup(testCtx, "multiple"));
6312         const uint32_t subGroupHash = baseSeed ^ deStringHash(subGroup->getName());
6313         const VkShaderStageFlags longTestStages =
6314             VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_COMPUTE_BIT;
6315 
6316         const struct
6317         {
6318             uint32_t bufferBindingCount;
6319             uint32_t setsPerBuffer;
6320         } caseOptions[] = {
6321             {1, 1}, {1, 3},  {2, 4},  {3, 1}, // 3 buffer bindings is spec minimum
6322             {8, 1}, {16, 1}, {32, 1},
6323         };
6324 
6325         for (auto pQueue = choiceQueues; pQueue < DE_ARRAY_END(choiceQueues); ++pQueue)
6326             for (auto pStage = choiceStages; pStage < DE_ARRAY_END(choiceStages); ++pStage)
6327                 for (auto pOptions = caseOptions; pOptions < DE_ARRAY_END(caseOptions); ++pOptions)
6328                 {
6329                     if ((*pQueue == VK_QUEUE_COMPUTE_BIT) && (*pStage != VK_SHADER_STAGE_COMPUTE_BIT))
6330                     {
6331                         // Compute queue can only use compute shaders.
6332                         continue;
6333                     }
6334 
6335                     if (pOptions->bufferBindingCount >= 16 && ((*pStage) & longTestStages) == 0)
6336                     {
6337                         // Allow long tests for certain stages only, skip on rest stages
6338                         continue;
6339                     }
6340 
6341                     TestParams params{};
6342                     params.variant                    = TestVariant::MULTIPLE;
6343                     params.subcase                    = SubCase::NONE;
6344                     params.stage                      = *pStage;
6345                     params.queue                      = *pQueue;
6346                     params.bufferBindingCount         = pOptions->bufferBindingCount;
6347                     params.samplerBufferBindingCount  = pOptions->bufferBindingCount;
6348                     params.resourceBufferBindingCount = pOptions->bufferBindingCount;
6349                     params.setsPerBuffer              = pOptions->setsPerBuffer;
6350                     params.descriptor =
6351                         VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR; // Optional, will be tested if supported
6352                     params.useMaintenance5   = false;
6353                     params.resourceResidency = resourceResidency;
6354 
6355                     subGroup->addChild(
6356                         new DescriptorBufferTestCase(testCtx, getCaseNameUpdateHash(params, subGroupHash), params));
6357 
6358                     if ((pOptions->setsPerBuffer != 1) && (pOptions->bufferBindingCount < 4))
6359                     {
6360                         // For the smaller binding counts add a subcase with immutable samplers.
6361 
6362                         params.subcase = SubCase::IMMUTABLE_SAMPLERS;
6363 
6364                         subGroup->addChild(
6365                             new DescriptorBufferTestCase(testCtx, getCaseNameUpdateHash(params, subGroupHash), params));
6366                     }
6367                 }
6368 
6369         topGroup->addChild(subGroup.release());
6370     }
6371 
6372     {
6373         //
6374         // These cases exercise buffers of single usage (samplers only and resources only) and tries to use
6375         // all available buffer bindings.
6376         //
6377         MovePtr<tcu::TestCaseGroup> subGroup(new tcu::TestCaseGroup(testCtx, "max"));
6378         const uint32_t subGroupHash = baseSeed ^ deStringHash(subGroup->getName());
6379 
6380         const struct
6381         {
6382             uint32_t samplerBufferBindingCount;
6383             uint32_t resourceBufferBindingCount;
6384         } caseOptions[] = {
6385             {1, 1}, {2, 2}, {4, 4}, {8, 8}, {16, 16}, {1, 7}, {1, 15}, {1, 31}, {7, 1}, {15, 1}, {31, 1},
6386         };
6387 
6388         for (auto pQueue = choiceQueues; pQueue < DE_ARRAY_END(choiceQueues); ++pQueue)
6389             for (auto pStage = choiceStages; pStage < DE_ARRAY_END(choiceStages); ++pStage)
6390                 for (auto pOptions = caseOptions; pOptions < DE_ARRAY_END(caseOptions); ++pOptions)
6391                 {
6392                     if ((*pQueue == VK_QUEUE_COMPUTE_BIT) && (*pStage != VK_SHADER_STAGE_COMPUTE_BIT))
6393                     {
6394                         // Compute queue can only use compute shaders.
6395                         continue;
6396                     }
6397 
6398                     if (isAllRayTracingStages(*pStage) &&
6399                         (pOptions->samplerBufferBindingCount > 15 || pOptions->resourceBufferBindingCount > 15))
6400                     {
6401                         // Limit ray tracing stages
6402                         continue;
6403                     }
6404 
6405                     TestParams params{};
6406                     params.variant                    = TestVariant::MAX;
6407                     params.subcase                    = SubCase::NONE;
6408                     params.stage                      = *pStage;
6409                     params.queue                      = *pQueue;
6410                     params.samplerBufferBindingCount  = pOptions->samplerBufferBindingCount;
6411                     params.resourceBufferBindingCount = pOptions->resourceBufferBindingCount;
6412                     params.bufferBindingCount =
6413                         pOptions->samplerBufferBindingCount + pOptions->resourceBufferBindingCount;
6414                     params.setsPerBuffer     = 1;
6415                     params.descriptor        = VK_DESCRIPTOR_TYPE_MAX_ENUM;
6416                     params.useMaintenance5   = false;
6417                     params.resourceResidency = resourceResidency;
6418 
6419                     subGroup->addChild(
6420                         new DescriptorBufferTestCase(testCtx, getCaseNameUpdateHash(params, subGroupHash), params));
6421                 }
6422 
6423         topGroup->addChild(subGroup.release());
6424     }
6425 
6426     {
6427         //
6428         // Check embedded immutable sampler buffers/bindings.
6429         //
6430         MovePtr<tcu::TestCaseGroup> subGroup(new tcu::TestCaseGroup(testCtx, "embedded_imm_samplers"));
6431         const uint32_t subGroupHash = baseSeed ^ deStringHash(subGroup->getName());
6432 
6433         const struct
6434         {
6435             uint32_t bufferBindingCount;
6436             uint32_t samplersPerBuffer;
6437         } caseOptions[] = {
6438             {1, 1}, {1, 2}, {1, 4}, {1, 8}, {1, 16}, {2, 1}, {2, 2}, {3, 1}, {3, 3}, {8, 1}, {8, 4},
6439         };
6440 
6441         for (auto pQueue = choiceQueues; pQueue < DE_ARRAY_END(choiceQueues); ++pQueue)
6442             for (auto pStage = choiceStages; pStage < DE_ARRAY_END(choiceStages); ++pStage)
6443                 for (auto pCommands2 = choiceStagesCommands; pCommands2 < DE_ARRAY_END(choiceStagesCommands);
6444                      ++pCommands2)
6445                     for (auto pOptions = caseOptions; pOptions < DE_ARRAY_END(caseOptions); ++pOptions)
6446                     {
6447                         if ((*pQueue == VK_QUEUE_COMPUTE_BIT) && (*pStage != VK_SHADER_STAGE_COMPUTE_BIT))
6448                         {
6449                             // Compute queue can only use compute shaders.
6450                             continue;
6451                         }
6452 
6453                         TestParams params{};
6454                         params.variant                                    = TestVariant::EMBEDDED_IMMUTABLE_SAMPLERS;
6455                         params.subcase                                    = SubCase::NONE;
6456                         params.stage                                      = *pStage;
6457                         params.queue                                      = *pQueue;
6458                         params.bufferBindingCount                         = pOptions->bufferBindingCount + 1;
6459                         params.samplerBufferBindingCount                  = pOptions->bufferBindingCount;
6460                         params.resourceBufferBindingCount                 = 1;
6461                         params.setsPerBuffer                              = 1;
6462                         params.embeddedImmutableSamplerBufferBindingCount = pOptions->bufferBindingCount;
6463                         params.embeddedImmutableSamplersPerBuffer         = pOptions->samplersPerBuffer;
6464                         params.descriptor                                 = VK_DESCRIPTOR_TYPE_MAX_ENUM;
6465                         params.useMaintenance5                            = false;
6466                         params.resourceResidency                          = resourceResidency;
6467                         params.commands2                                  = *pCommands2;
6468 
6469                         subGroup->addChild(
6470                             new DescriptorBufferTestCase(testCtx, getCaseNameUpdateHash(params, subGroupHash), params));
6471                     }
6472 
6473         topGroup->addChild(subGroup.release());
6474     }
6475 
6476     {
6477         //
6478         // Check push descriptors and push descriptors with template updates
6479         //
6480         MovePtr<tcu::TestCaseGroup> subGroupPush(new tcu::TestCaseGroup(testCtx, "push_descriptor"));
6481         MovePtr<tcu::TestCaseGroup> subGroupPushTemplate(new tcu::TestCaseGroup(testCtx, "push_template"));
6482         const uint32_t subGroupPushHash         = baseSeed ^ deStringHash(subGroupPush->getName());
6483         const uint32_t subGroupPushTemplateHash = baseSeed ^ deStringHash(subGroupPushTemplate->getName());
6484 
6485         const struct
6486         {
6487             uint32_t pushDescriptorSetIndex;
6488             uint32_t bufferBindingCount;
6489 
6490             // The total number of descriptor sets will be bufferBindingCount + 1, where the additional set is used for push descriptors.
6491 
6492         } caseOptions[] = {
6493             {0, 0}, // Only push descriptors
6494             {0, 1}, {0, 3}, {1, 1}, {0, 2},
6495             {1, 2}, {2, 2}, // index = 2 means 3 sets, where the first two are used with descriptor buffer and the last with push descriptors
6496             {3, 3},
6497         };
6498 
6499         for (auto pQueue = choiceQueues; pQueue < DE_ARRAY_END(choiceQueues); ++pQueue)
6500             for (auto pStage = choiceStages; pStage < DE_ARRAY_END(choiceStages); ++pStage)
6501                 for (auto pCommands2 = choiceStagesCommands; pCommands2 < DE_ARRAY_END(choiceStagesCommands);
6502                      ++pCommands2)
6503                     for (auto pOptions = caseOptions; pOptions < DE_ARRAY_END(caseOptions); ++pOptions)
6504                     {
6505                         if ((*pQueue == VK_QUEUE_COMPUTE_BIT) && (*pStage != VK_SHADER_STAGE_COMPUTE_BIT))
6506                         {
6507                             // Compute queue can only use compute shaders.
6508                             continue;
6509                         }
6510 
6511                         TestParams params{};
6512                         params.variant                    = TestVariant::PUSH_DESCRIPTOR;
6513                         params.subcase                    = SubCase::NONE;
6514                         params.stage                      = *pStage;
6515                         params.queue                      = *pQueue;
6516                         params.bufferBindingCount         = pOptions->bufferBindingCount;
6517                         params.samplerBufferBindingCount  = pOptions->bufferBindingCount;
6518                         params.resourceBufferBindingCount = pOptions->bufferBindingCount;
6519                         params.setsPerBuffer              = 1;
6520                         params.pushDescriptorSetIndex     = pOptions->pushDescriptorSetIndex;
6521                         params.descriptor =
6522                             VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR; // Optional, will be tested if supported
6523                         params.useMaintenance5   = false;
6524                         params.resourceResidency = resourceResidency;
6525                         params.commands2         = *pCommands2;
6526 
6527                         subGroupPush->addChild(new DescriptorBufferTestCase(
6528                             testCtx, getCaseNameUpdateHash(params, subGroupPushHash), params));
6529 
6530                         if (pOptions->bufferBindingCount < 2)
6531                         {
6532                             TestParams paramsSingleBuffer = params;
6533 
6534                             paramsSingleBuffer.subcase = SubCase::SINGLE_BUFFER;
6535 
6536                             subGroupPush->addChild(new DescriptorBufferTestCase(
6537                                 testCtx, getCaseNameUpdateHash(paramsSingleBuffer, subGroupPushHash),
6538                                 paramsSingleBuffer));
6539                         }
6540 
6541                         params.variant = TestVariant::PUSH_TEMPLATE;
6542 
6543                         subGroupPushTemplate->addChild(new DescriptorBufferTestCase(
6544                             testCtx, getCaseNameUpdateHash(params, subGroupPushTemplateHash), params));
6545                     }
6546 
6547         topGroup->addChild(subGroupPush.release());
6548         topGroup->addChild(subGroupPushTemplate.release());
6549     }
6550 
6551     {
6552         //
6553         // Robustness tests
6554         //
6555         MovePtr<tcu::TestCaseGroup> subGroup(new tcu::TestCaseGroup(testCtx, "robust"));
6556         MovePtr<tcu::TestCaseGroup> subGroupBuffer(new tcu::TestCaseGroup(testCtx, "buffer_access"));
6557         MovePtr<tcu::TestCaseGroup> subGroupNullDescriptor(new tcu::TestCaseGroup(testCtx, "null_descriptor"));
6558         const uint32_t subGroupBufferHash         = baseSeed ^ deStringHash(subGroupBuffer->getName());
6559         const uint32_t subGroupNullDescriptorHash = baseSeed ^ deStringHash(subGroupNullDescriptor->getName());
6560 
6561         // Robust buffer access:
6562         // This test will fill the buffers with zeros and always expect to read zero values back (in and out of bounds).
6563 
6564         // Null descriptor cases:
6565         // For each test, one of these descriptors will have its buffer/imageView/etc. set to null handle.
6566         // Reads done through a null descriptor are expected to return zeros.
6567         //
6568         const VkDescriptorType choiceNullDescriptors[]{
6569             VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
6570             VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,          VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
6571             VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,   VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
6572             VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,         VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
6573         };
6574 
6575         for (auto pQueue = choiceQueues; pQueue < DE_ARRAY_END(choiceQueues); ++pQueue)
6576             for (auto pStage = choiceStages; pStage < DE_ARRAY_END(choiceStages); ++pStage)
6577             {
6578                 if ((*pQueue == VK_QUEUE_COMPUTE_BIT) && (*pStage != VK_SHADER_STAGE_COMPUTE_BIT))
6579                 {
6580                     // Compute queue can only use compute shaders.
6581                     continue;
6582                 }
6583 
6584                 TestParams params{};
6585                 params.variant            = TestVariant::ROBUST_BUFFER_ACCESS;
6586                 params.stage              = *pStage;
6587                 params.queue              = *pQueue;
6588                 params.bufferBindingCount = 1;
6589                 params.setsPerBuffer      = 1;
6590                 params.useMaintenance5    = false;
6591                 params.resourceResidency  = resourceResidency;
6592 
6593                 subGroupBuffer->addChild(
6594                     new DescriptorBufferTestCase(testCtx, getCaseNameUpdateHash(params, subGroupBufferHash), params));
6595 
6596                 for (auto pDescriptor = choiceNullDescriptors; pDescriptor < DE_ARRAY_END(choiceNullDescriptors);
6597                      ++pDescriptor)
6598                 {
6599                     if ((*pDescriptor == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) &&
6600                         (*pStage != VK_SHADER_STAGE_FRAGMENT_BIT))
6601                     {
6602                         // Subpass loads are only valid in fragment stage.
6603                         continue;
6604                     }
6605 
6606                     params.variant    = TestVariant::ROBUST_NULL_DESCRIPTOR;
6607                     params.descriptor = *pDescriptor;
6608 
6609                     subGroupNullDescriptor->addChild(new DescriptorBufferTestCase(
6610                         testCtx, getCaseNameUpdateHash(params, subGroupNullDescriptorHash), params));
6611                 }
6612             }
6613 
6614         subGroup->addChild(subGroupBuffer.release());
6615         subGroup->addChild(subGroupNullDescriptor.release());
6616         topGroup->addChild(subGroup.release());
6617     }
6618 
6619     {
6620         //
6621         // Capture and replay
6622         //
6623         MovePtr<tcu::TestCaseGroup> subGroup(new tcu::TestCaseGroup(testCtx, "capture_replay"));
6624         const uint32_t subGroupHash = baseSeed ^ deStringHash(subGroup->getName());
6625 
6626         const VkDescriptorType choiceDescriptors[]{
6627             VK_DESCRIPTOR_TYPE_SAMPLER,
6628             VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, // both sampler and image are captured
6629             VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
6630             VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
6631             VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
6632             VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
6633             VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
6634             VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
6635             VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
6636             VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
6637         };
6638 
6639         for (auto pQueue = choiceQueues; pQueue < DE_ARRAY_END(choiceQueues); ++pQueue)
6640             for (auto pStage = choiceStages; pStage < DE_ARRAY_END(choiceStages); ++pStage)
6641                 for (auto pDescriptor = choiceDescriptors; pDescriptor < DE_ARRAY_END(choiceDescriptors); ++pDescriptor)
6642                 {
6643                     if ((*pQueue == VK_QUEUE_COMPUTE_BIT) && (*pStage != VK_SHADER_STAGE_COMPUTE_BIT))
6644                     {
6645                         // Compute queue can only use compute shaders.
6646                         continue;
6647                     }
6648 
6649                     if ((*pDescriptor == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) &&
6650                         (*pStage != VK_SHADER_STAGE_FRAGMENT_BIT))
6651                     {
6652                         // Subpass loads are only valid in fragment stage.
6653                         continue;
6654                     }
6655 
6656                     TestParams params{};
6657                     params.variant            = TestVariant::CAPTURE_REPLAY;
6658                     params.subcase            = SubCase::NONE;
6659                     params.stage              = *pStage;
6660                     params.queue              = *pQueue;
6661                     params.descriptor         = *pDescriptor;
6662                     params.bufferBindingCount = 1;
6663                     params.setsPerBuffer      = 1;
6664                     params.useMaintenance5    = false;
6665                     params.resourceResidency  = resourceResidency;
6666 
6667                     subGroup->addChild(
6668                         new DescriptorBufferTestCase(testCtx, getCaseNameUpdateHash(params, subGroupHash), params));
6669 
6670                     if ((*pDescriptor == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
6671                         (*pDescriptor == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
6672                         (*pDescriptor == VK_DESCRIPTOR_TYPE_SAMPLER))
6673                     {
6674                         params.subcase = SubCase::CAPTURE_REPLAY_CUSTOM_BORDER_COLOR;
6675 
6676                         subGroup->addChild(
6677                             new DescriptorBufferTestCase(testCtx, getCaseNameUpdateHash(params, subGroupHash), params));
6678                     }
6679                 }
6680 
6681         std::pair<std::string, CaptureReplyTestMode> captureReplyModes[]{
6682             {"image", CaptureReplyTestMode::Image},
6683             {"sparse_image", CaptureReplyTestMode::Sparse_Image},
6684             {"buffer", CaptureReplyTestMode::Buffer},
6685             {"sparse_buffer", CaptureReplyTestMode::Sparse_Buffer},
6686         };
6687 
6688         for (const auto &captureReplyMode : captureReplyModes)
6689         {
6690             std::string name = captureReplyMode.first + "_descriptor_data_consistency";
6691             subGroup->addChild(new CaptureReplyTestCase(testCtx, name, captureReplyMode.second));
6692         }
6693 
6694         topGroup->addChild(subGroup.release());
6695     }
6696 
6697     {
6698         //
6699         // VK_EXT_mutable_descriptor_type tests
6700         //
6701         // Similar to multiple test case, but with mutable descriptor type instead.
6702         // Rather than using mutable type for everything, there are a few subcases that determine which descriptor
6703         // types can be replaced with the mutable type.
6704         //
6705         MovePtr<tcu::TestCaseGroup> subGroup(
6706             new tcu::TestCaseGroup(testCtx, "mutable_descriptor", "Mutable descriptor type tests"));
6707         const uint32_t subGroupHash = baseSeed ^ deStringHash(subGroup->getName());
6708 
6709         const DescriptorMask choiceDescriptorMasks[]{
6710             // Single
6711             makeDescriptorMask({VK_DESCRIPTOR_TYPE_SAMPLER}),
6712             makeDescriptorMask({VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER}),
6713             makeDescriptorMask({VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE}),
6714             makeDescriptorMask({VK_DESCRIPTOR_TYPE_STORAGE_IMAGE}),
6715             makeDescriptorMask({VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER}),
6716             makeDescriptorMask({VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER}),
6717             makeDescriptorMask({VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER}),
6718             makeDescriptorMask({VK_DESCRIPTOR_TYPE_STORAGE_BUFFER}),
6719             makeDescriptorMask({VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT}),
6720 
6721             // Multiple - images/samplers
6722             makeDescriptorMask({VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER}),
6723             makeDescriptorMask({VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER}),
6724             makeDescriptorMask({VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE}),
6725             makeDescriptorMask({VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT}),
6726             makeDescriptorMask({VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
6727                                 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE}),
6728             makeDescriptorMask({VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
6729                                 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
6730                                 VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT}),
6731 
6732             // Multiple - buffers
6733             makeDescriptorMask({VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER}),
6734             makeDescriptorMask({VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER}),
6735             makeDescriptorMask({VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
6736                                 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER}),
6737 
6738             // Everything
6739             makeDescriptorMask({VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
6740                                 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
6741                                 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
6742                                 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER}),
6743             makeDescriptorMask({VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
6744                                 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
6745                                 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
6746                                 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
6747                                 VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT}), // with input attachment
6748         };
6749 
6750         for (auto pQueue = choiceQueues; pQueue < DE_ARRAY_END(choiceQueues); ++pQueue)
6751             for (auto pStage = choiceStages; pStage < DE_ARRAY_END(choiceStages); ++pStage)
6752                 for (auto pMask = choiceDescriptorMasks; pMask < DE_ARRAY_END(choiceDescriptorMasks); ++pMask)
6753                 {
6754                     if ((*pQueue == VK_QUEUE_COMPUTE_BIT) && (*pStage != VK_SHADER_STAGE_COMPUTE_BIT))
6755                     {
6756                         // Compute queue can only use compute shaders.
6757                         continue;
6758                     }
6759 
6760                     if ((*pStage != VK_SHADER_STAGE_FRAGMENT_BIT) &&
6761                         (maskCheck(*pMask, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)))
6762                     {
6763                         // Subpass loads are only valid in fragment stage.
6764                         continue;
6765                     }
6766 
6767                     TestParams params{};
6768                     params.variant                = TestVariant::MUTABLE_DESCRIPTOR_TYPE;
6769                     params.stage                  = *pStage;
6770                     params.queue                  = *pQueue;
6771                     params.bufferBindingCount     = 1;
6772                     params.setsPerBuffer          = 1;
6773                     params.mutableDescriptorTypes = *pMask;
6774                     params.resourceResidency      = resourceResidency;
6775 
6776                     subGroup->addChild(
6777                         new DescriptorBufferTestCase(testCtx, getCaseNameUpdateHash(params, subGroupHash), params));
6778                 }
6779 
6780         topGroup->addChild(subGroup.release());
6781     }
6782 
6783     {
6784         //
6785         // ycbcr sampler conversion interaction
6786         //
6787         MovePtr<tcu::TestCaseGroup> subGroup(new tcu::TestCaseGroup(testCtx, "ycbcr_sampler", "ycbcr sampler tests"));
6788         const uint32_t subGroupHash = baseSeed ^ deStringHash(subGroup->getName());
6789 
6790         for (auto pQueue = choiceQueues; pQueue < DE_ARRAY_END(choiceQueues); ++pQueue)
6791             for (auto pStage = choiceStages; pStage < DE_ARRAY_END(choiceStages); ++pStage)
6792             {
6793                 if ((*pQueue == VK_QUEUE_COMPUTE_BIT) && (*pStage != VK_SHADER_STAGE_COMPUTE_BIT))
6794                 {
6795                     // Compute queue can only use compute shaders.
6796                     continue;
6797                 }
6798 
6799                 TestParams params{};
6800                 params.variant            = TestVariant::YCBCR_SAMPLER;
6801                 params.subcase            = SubCase::NONE;
6802                 params.stage              = *pStage;
6803                 params.queue              = *pQueue;
6804                 params.descriptor         = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
6805                 params.bufferBindingCount = 1;
6806                 params.setsPerBuffer      = 1;
6807                 params.resourceResidency  = resourceResidency;
6808 
6809                 subGroup->addChild(
6810                     new DescriptorBufferTestCase(testCtx, getCaseNameUpdateHash(params, subGroupHash), params));
6811 
6812                 params.subcase = SubCase::YCBCR_SAMPLER_ARRAY;
6813 
6814                 subGroup->addChild(
6815                     new DescriptorBufferTestCase(testCtx, getCaseNameUpdateHash(params, subGroupHash), params));
6816             }
6817 
6818         topGroup->addChild(subGroup.release());
6819     }
6820 }
6821 
populateDescriptorBufferTests(tcu::TestCaseGroup * testGroup)6822 void populateDescriptorBufferTests(tcu::TestCaseGroup *testGroup)
6823 {
6824     tcu::TestContext &testCtx = testGroup->getTestContext();
6825 
6826     MovePtr<tcu::TestCaseGroup> traditionalGroup(
6827         new tcu::TestCaseGroup(testCtx, "traditional_buffer", "Traditional descriptor buffer tests"));
6828     populateDescriptorBufferTestGroup(traditionalGroup.get(), ResourceResidency::TRADITIONAL);
6829     testGroup->addChild(traditionalGroup.release());
6830 
6831     MovePtr<tcu::TestCaseGroup> sparseBindingGroup(
6832         new tcu::TestCaseGroup(testCtx, "sparse_binding_buffer", "Sparse binding descriptor buffer tests"));
6833     populateDescriptorBufferTestGroup(sparseBindingGroup.get(), ResourceResidency::SPARSE_BINDING);
6834     testGroup->addChild(sparseBindingGroup.release());
6835 
6836     MovePtr<tcu::TestCaseGroup> sparseResidencyGroup(
6837         new tcu::TestCaseGroup(testCtx, "sparse_residency_buffer", "Sparse residency descriptor buffer tests"));
6838     populateDescriptorBufferTestGroup(sparseResidencyGroup.get(), ResourceResidency::SPARSE_RESIDENCY);
6839     testGroup->addChild(sparseResidencyGroup.release());
6840 }
6841 
6842 } // namespace
6843 
createDescriptorBufferTests(tcu::TestContext & testCtx)6844 tcu::TestCaseGroup *createDescriptorBufferTests(tcu::TestContext &testCtx)
6845 {
6846     return createTestGroup(testCtx, "descriptor_buffer", populateDescriptorBufferTests);
6847 }
6848 
6849 } // namespace BindingModel
6850 } // namespace vkt
6851