1 /*-------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2021 The Khronos Group Inc.
6 * Copyright (c) 2021 Valve Corporation.
7 *
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
11 *
12 * http://www.apache.org/licenses/LICENSE-2.0
13 *
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
19 *
20 *//*!
21 * \file
22 * \brief Tests for VK_VALVE_mutable_descriptor_type and VK_EXT_mutable_descriptor_type.
23 *//*--------------------------------------------------------------------*/
24 #include "vktBindingMutableTests.hpp"
25 #include "vktTestCase.hpp"
26 #include "vktTestGroupUtil.hpp"
27 #include "vktCustomInstancesDevices.hpp"
28
29 #include "tcuCommandLine.hpp"
30
31 #include "vkDefs.hpp"
32 #include "vkRefUtil.hpp"
33 #include "vkQueryUtil.hpp"
34 #include "vkImageWithMemory.hpp"
35 #include "vkBufferWithMemory.hpp"
36 #include "vkTypeUtil.hpp"
37 #include "vkObjUtil.hpp"
38 #include "vkBarrierUtil.hpp"
39 #include "vkCmdUtil.hpp"
40 #include "vkBuilderUtil.hpp"
41 #include "vkRayTracingUtil.hpp"
42
43 #include "deUniquePtr.hpp"
44 #include "deSTLUtil.hpp"
45 #include "deStringUtil.hpp"
46
47 #include <vector>
48 #include <algorithm>
49 #include <iterator>
50 #include <set>
51 #include <sstream>
52 #include <limits>
53
54 namespace vkt
55 {
56 namespace BindingModel
57 {
58
59 namespace
60 {
61
62 using namespace vk;
63
64 de::SharedPtr<Move<vk::VkDevice>> g_singletonDevice;
65
removeExtensions(const std::vector<std::string> & a,const std::vector<const char * > & b)66 static std::vector<std::string> removeExtensions (const std::vector<std::string>& a, const std::vector<const char*>& b)
67 {
68 std::vector<std::string> res;
69 std::set<std::string> removeExts (b.begin(), b.end());
70
71 for (std::vector<std::string>::const_iterator aIter = a.begin(); aIter != a.end(); ++aIter)
72 {
73 if (!de::contains(removeExts, *aIter))
74 res.push_back(*aIter);
75 }
76
77 return res;
78 }
79
getDevice(Context & context)80 VkDevice getDevice(Context& context)
81 {
82 if (!g_singletonDevice)
83 {
84 const float queuePriority = 1.0f;
85
86 // Create a universal queue that supports graphics and compute
87 const VkDeviceQueueCreateInfo queueParams
88 {
89 VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, // VkStructureType sType;
90 DE_NULL, // const void* pNext;
91 0u, // VkDeviceQueueCreateFlags flags;
92 context.getUniversalQueueFamilyIndex(), // deUint32 queueFamilyIndex;
93 1u, // deUint32 queueCount;
94 &queuePriority // const float* pQueuePriorities;
95 };
96
97 // \note Extensions in core are not explicitly enabled even though
98 // they are in the extension list advertised to tests.
99 std::vector<const char*> extensionPtrs;
100 std::vector<const char*> coreExtensions;
101 getCoreDeviceExtensions(context.getUsedApiVersion(), coreExtensions);
102 std::vector<std::string> nonCoreExtensions(removeExtensions(context.getDeviceExtensions(), coreExtensions));
103
104 extensionPtrs.resize(nonCoreExtensions.size());
105
106 for (size_t ndx = 0; ndx < nonCoreExtensions.size(); ++ndx)
107 extensionPtrs[ndx] = nonCoreExtensions[ndx].c_str();
108
109 VkPhysicalDeviceAccelerationStructureFeaturesKHR accelerationStructureFeatures = initVulkanStructure();
110 VkPhysicalDeviceBufferDeviceAddressFeatures bufferDeviceAddressFeatures = initVulkanStructure(&accelerationStructureFeatures);
111 VkPhysicalDeviceRayTracingPipelineFeaturesKHR rayTracingPipelineFeatures = initVulkanStructure(&bufferDeviceAddressFeatures);
112 VkPhysicalDeviceRayQueryFeaturesKHR rayQueryFeatures = initVulkanStructure(&rayTracingPipelineFeatures);
113 VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT mutableDescriptorTypeFeatures = initVulkanStructure(&rayQueryFeatures);
114 VkPhysicalDeviceDescriptorIndexingFeatures descriptorIndexingFeatures = initVulkanStructure(&mutableDescriptorTypeFeatures);
115 VkPhysicalDeviceFeatures2 features2 = initVulkanStructure(&descriptorIndexingFeatures);
116
117 context.getInstanceInterface().getPhysicalDeviceFeatures2(context.getPhysicalDevice(), &features2);
118
119 const VkDeviceCreateInfo deviceCreateInfo
120 {
121 VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, //sType;
122 &features2, //pNext;
123 (VkDeviceCreateFlags)0u, //flags
124 1, //queueRecordCount;
125 &queueParams, //pRequestedQueues;
126 0, //layerCount;
127 DE_NULL, //ppEnabledLayerNames;
128 (deUint32)extensionPtrs.size(), // deUint32 enabledExtensionCount;
129 (extensionPtrs.empty() ? DE_NULL : &extensionPtrs[0]), // const char* const* ppEnabledExtensionNames;
130 DE_NULL, //pEnabledFeatures;
131 };
132
133 Move<VkDevice> device = createCustomDevice(context.getTestContext().getCommandLine().isValidationEnabled(), context.getPlatformInterface(), context.getInstance(), context.getInstanceInterface(), context.getPhysicalDevice(), &deviceCreateInfo);
134 g_singletonDevice = de::SharedPtr<Move<VkDevice>>(new Move<VkDevice>(device));
135 }
136
137 return g_singletonDevice->get();
138 }
139
getDescriptorNumericValue(deUint32 iteration,deUint32 bindingIdx,deUint32 descriptorIdx=0u)140 deUint32 getDescriptorNumericValue (deUint32 iteration, deUint32 bindingIdx, deUint32 descriptorIdx = 0u)
141 {
142 // When assigning numeric values for the descriptor contents, each descriptor will get 0x5aIIBBDD. II is an octed containing the
143 // iteration index. BB is an octet containing the binding index and DD is the descriptor index inside that binding.
144 constexpr deUint32 kNumericValueBase = 0x5a000000u;
145
146 return (kNumericValueBase | ((iteration & 0xFFu) << 16) | ((bindingIdx & 0xFFu) << 8) | (descriptorIdx & 0xFFu));
147 }
148
getAccelerationStructureOffsetX(deUint32 descriptorNumericValue)149 deUint16 getAccelerationStructureOffsetX (deUint32 descriptorNumericValue)
150 {
151 // Keep the lowest 16 bits (binding and descriptor idx) as the offset.
152 return static_cast<deUint16>(descriptorNumericValue);
153 }
154
155 // Value that will be stored in the output buffer to signal success reading values.
getExpectedOutputBufferValue()156 deUint32 getExpectedOutputBufferValue ()
157 {
158 return 2u;
159 }
160
161 // This value will be stored in an image to be sampled when checking descriptors containing samplers alone.
getExternalSampledImageValue()162 deUint32 getExternalSampledImageValue ()
163 {
164 return 0x41322314u;
165 }
166
167 // Value that will be ORed with the descriptor value before writing.
getStoredValueMask()168 deUint32 getStoredValueMask ()
169 {
170 return 0xFF000000u;
171 }
172
getDescriptorImageFormat()173 VkFormat getDescriptorImageFormat ()
174 {
175 return VK_FORMAT_R32_UINT;
176 }
177
getDefaultExtent()178 VkExtent3D getDefaultExtent ()
179 {
180 return makeExtent3D(1u, 1u, 1u);
181 }
182
183 // Convert value to hexadecimal.
toHex(deUint32 val)184 std::string toHex (deUint32 val)
185 {
186 std::ostringstream s;
187 s << "0x" << std::hex << val << "u";
188 return s.str();
189 }
190
191 // Returns the list of descriptor types that cannot be part of a mutable descriptor.
getForbiddenMutableTypes()192 std::vector<VkDescriptorType> getForbiddenMutableTypes ()
193 {
194 return std::vector<VkDescriptorType>
195 {
196 VK_DESCRIPTOR_TYPE_MUTABLE_EXT,
197 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
198 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
199 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
200 };
201 }
202
203 // Returns the list of descriptor types that are mandatory for the extension.
getMandatoryMutableTypes()204 std::vector<VkDescriptorType> getMandatoryMutableTypes ()
205 {
206 return std::vector<VkDescriptorType>
207 {
208 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
209 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
210 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
211 VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
212 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
213 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
214 };
215 }
216
217 // This helps quickly transform a vector of descriptor types into a bitmask, which makes it easier to check some conditions.
218 enum DescriptorTypeFlagBits
219 {
220 DTFB_SAMPLER = (1 << 0),
221 DTFB_COMBINED_IMAGE_SAMPLER = (1 << 1),
222 DTFB_SAMPLED_IMAGE = (1 << 2),
223 DTFB_STORAGE_IMAGE = (1 << 3),
224 DTFB_UNIFORM_TEXEL_BUFFER = (1 << 4),
225 DTFB_STORAGE_TEXEL_BUFFER = (1 << 5),
226 DTFB_UNIFORM_BUFFER = (1 << 6),
227 DTFB_STORAGE_BUFFER = (1 << 7),
228 DTFB_UNIFORM_BUFFER_DYNAMIC = (1 << 8),
229 DTFB_STORAGE_BUFFER_DYNAMIC = (1 << 9),
230 DTFB_INPUT_ATTACHMENT = (1 << 10),
231 DTFB_INLINE_UNIFORM_BLOCK_EXT = (1 << 11),
232 DTFB_ACCELERATION_STRUCTURE_KHR = (1 << 12),
233 DTFB_ACCELERATION_STRUCTURE_NV = (1 << 13),
234 DTFB_MUTABLE = (1 << 14),
235 };
236
237 using DescriptorTypeFlags = deUint32;
238
239 // Convert type to its corresponding flag bit.
toDescriptorTypeFlagBit(VkDescriptorType descriptorType)240 DescriptorTypeFlagBits toDescriptorTypeFlagBit (VkDescriptorType descriptorType)
241 {
242 switch (descriptorType)
243 {
244 case VK_DESCRIPTOR_TYPE_SAMPLER: return DTFB_SAMPLER;
245 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: return DTFB_COMBINED_IMAGE_SAMPLER;
246 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: return DTFB_SAMPLED_IMAGE;
247 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: return DTFB_STORAGE_IMAGE;
248 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: return DTFB_UNIFORM_TEXEL_BUFFER;
249 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: return DTFB_STORAGE_TEXEL_BUFFER;
250 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: return DTFB_UNIFORM_BUFFER;
251 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: return DTFB_STORAGE_BUFFER;
252 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: return DTFB_UNIFORM_BUFFER_DYNAMIC;
253 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: return DTFB_STORAGE_BUFFER_DYNAMIC;
254 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: return DTFB_INPUT_ATTACHMENT;
255 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: return DTFB_INLINE_UNIFORM_BLOCK_EXT;
256 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: return DTFB_ACCELERATION_STRUCTURE_KHR;
257 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: return DTFB_ACCELERATION_STRUCTURE_NV;
258 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT: return DTFB_MUTABLE;
259 default: break;
260 }
261
262 // Unreachable.
263 DE_ASSERT(false);
264 return DTFB_SAMPLER;
265 }
266
267 // Convert vector of descriptor types to a bitfield.
toDescriptorTypeFlags(const std::vector<VkDescriptorType> & types)268 DescriptorTypeFlags toDescriptorTypeFlags (const std::vector<VkDescriptorType>& types)
269 {
270 DescriptorTypeFlags result = 0u;
271 for (const auto& t : types)
272 result |= toDescriptorTypeFlagBit(t);
273 return result;
274 }
275
276 // Convert bitfield to vector of descriptor types.
toDescriptorTypeVector(DescriptorTypeFlags bitfield)277 std::vector<VkDescriptorType> toDescriptorTypeVector (DescriptorTypeFlags bitfield)
278 {
279 std::vector<VkDescriptorType> result;
280
281 if (bitfield & DTFB_SAMPLER) result.push_back(VK_DESCRIPTOR_TYPE_SAMPLER);
282 if (bitfield & DTFB_COMBINED_IMAGE_SAMPLER) result.push_back(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
283 if (bitfield & DTFB_SAMPLED_IMAGE) result.push_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
284 if (bitfield & DTFB_STORAGE_IMAGE) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
285 if (bitfield & DTFB_UNIFORM_TEXEL_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
286 if (bitfield & DTFB_STORAGE_TEXEL_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
287 if (bitfield & DTFB_UNIFORM_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
288 if (bitfield & DTFB_STORAGE_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
289 if (bitfield & DTFB_UNIFORM_BUFFER_DYNAMIC) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
290 if (bitfield & DTFB_STORAGE_BUFFER_DYNAMIC) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
291 if (bitfield & DTFB_INPUT_ATTACHMENT) result.push_back(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
292 if (bitfield & DTFB_INLINE_UNIFORM_BLOCK_EXT) result.push_back(VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT);
293 if (bitfield & DTFB_ACCELERATION_STRUCTURE_KHR) result.push_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
294 if (bitfield & DTFB_ACCELERATION_STRUCTURE_NV) result.push_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV);
295 if (bitfield & DTFB_MUTABLE) result.push_back(VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
296
297 return result;
298 }
299
300 // How to create the source set when copying descriptors from another set.
301 // * MUTABLE means to transform bindings into mutable bindings.
302 // * NONMUTABLE means to transform bindings into non-mutable bindings.
303 enum class SourceSetStrategy
304 {
305 MUTABLE = 0,
306 NONMUTABLE,
307 NO_SOURCE,
308 };
309
310 enum class PoolMutableStrategy
311 {
312 KEEP_TYPES = 0,
313 EXPAND_TYPES,
314 NO_TYPES,
315 };
316
317 // Type of information that's present in VkWriteDescriptorSet.
318 enum class WriteType
319 {
320 IMAGE_INFO = 0,
321 BUFFER_INFO,
322 BUFFER_VIEW,
323 ACCELERATION_STRUCTURE_INFO,
324 };
325
326 struct WriteInfo
327 {
328 WriteType writeType;
329 union
330 {
331 VkDescriptorImageInfo imageInfo;
332 VkDescriptorBufferInfo bufferInfo;
333 VkBufferView bufferView;
334 VkWriteDescriptorSetAccelerationStructureKHR asInfo;
335 };
336
WriteInfovkt::BindingModel::__anon912c01eb0111::WriteInfo337 explicit WriteInfo (const VkDescriptorImageInfo& info_)
338 : writeType(WriteType::IMAGE_INFO)
339 , imageInfo(info_)
340 {}
341
WriteInfovkt::BindingModel::__anon912c01eb0111::WriteInfo342 explicit WriteInfo (const VkDescriptorBufferInfo& info_)
343 : writeType(WriteType::BUFFER_INFO)
344 , bufferInfo(info_)
345 {}
346
WriteInfovkt::BindingModel::__anon912c01eb0111::WriteInfo347 explicit WriteInfo (VkBufferView view_)
348 : writeType(WriteType::BUFFER_VIEW)
349 , bufferView(view_)
350 {}
351
WriteInfovkt::BindingModel::__anon912c01eb0111::WriteInfo352 explicit WriteInfo (const VkWriteDescriptorSetAccelerationStructureKHR& asInfo_)
353 : writeType(WriteType::ACCELERATION_STRUCTURE_INFO)
354 , asInfo(asInfo_)
355 {}
356 };
357
358 // Resource backing up a single binding.
359 enum class ResourceType
360 {
361 SAMPLER = 0,
362 IMAGE,
363 COMBINED_IMAGE_SAMPLER,
364 BUFFER,
365 BUFFER_VIEW,
366 ACCELERATION_STRUCTURE,
367 };
368
369 // Type of resource backing up a particular descriptor type.
toResourceType(VkDescriptorType descriptorType)370 ResourceType toResourceType (VkDescriptorType descriptorType)
371 {
372 ResourceType resourceType = ResourceType::SAMPLER;
373 switch (descriptorType)
374 {
375 case VK_DESCRIPTOR_TYPE_SAMPLER:
376 resourceType = ResourceType::SAMPLER;
377 break;
378
379 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
380 resourceType = ResourceType::COMBINED_IMAGE_SAMPLER;
381 break;
382
383 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
384 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
385 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
386 resourceType = ResourceType::IMAGE;
387 break;
388
389 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
390 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
391 resourceType = ResourceType::BUFFER_VIEW;
392 break;
393
394 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
395 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
396 resourceType = ResourceType::BUFFER;
397 break;
398
399 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
400 resourceType = ResourceType::ACCELERATION_STRUCTURE;
401 break;
402
403 default:
404 DE_ASSERT(false);
405 break;
406 }
407
408 return resourceType;
409 }
410
isShaderWritable(VkDescriptorType descriptorType)411 bool isShaderWritable (VkDescriptorType descriptorType)
412 {
413 return (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
414 descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
415 }
416
makeDefaultSampler(const DeviceInterface & vkd,VkDevice device)417 Move<VkSampler> makeDefaultSampler (const DeviceInterface& vkd, VkDevice device)
418 {
419 const VkSamplerCreateInfo samplerCreateInfo = {
420 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, // VkStructureType sType;
421 nullptr, // const void* pNext;
422 0u, // VkSamplerCreateFlags flags;
423 VK_FILTER_NEAREST, // VkFilter magFilter;
424 VK_FILTER_NEAREST, // VkFilter minFilter;
425 VK_SAMPLER_MIPMAP_MODE_NEAREST, // VkSamplerMipmapMode mipmapMode;
426 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeU;
427 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeV;
428 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeW;
429 0.f, // float mipLodBias;
430 VK_FALSE, // VkBool32 anisotropyEnable;
431 1.f, // float maxAnisotropy;
432 VK_FALSE, // VkBool32 compareEnable;
433 VK_COMPARE_OP_ALWAYS, // VkCompareOp compareOp;
434 0.f, // float minLod;
435 0.f, // float maxLod;
436 VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, // VkBorderColor borderColor;
437 VK_FALSE, // VkBool32 unnormalizedCoordinates;
438 };
439
440 return createSampler(vkd, device, &samplerCreateInfo);
441 }
442
makeDefaultImage(const DeviceInterface & vkd,VkDevice device,Allocator & alloc)443 de::MovePtr<ImageWithMemory> makeDefaultImage (const DeviceInterface& vkd, VkDevice device, Allocator& alloc)
444 {
445 const auto extent = makeExtent3D(1u, 1u, 1u);
446 const VkImageUsageFlags usageFlags = (
447 VK_IMAGE_USAGE_SAMPLED_BIT
448 | VK_IMAGE_USAGE_STORAGE_BIT
449 | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
450 | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
451 | VK_IMAGE_USAGE_TRANSFER_SRC_BIT
452 | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
453
454 const VkImageCreateInfo imageCreateInfo = {
455 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
456 nullptr, // const void* pNext;
457 0u, // VkImageCreateFlags flags;
458 VK_IMAGE_TYPE_2D, // VkImageType imageType;
459 getDescriptorImageFormat(), // VkFormat format;
460 extent, // VkExtent3D extent;
461 1u, // deUint32 mipLevels;
462 1u, // deUint32 arrayLayers;
463 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
464 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
465 usageFlags, // VkImageUsageFlags usage;
466 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
467 0u, // deUint32 queueFamilyIndexCount;
468 nullptr, // const deUint32* pQueueFamilyIndices;
469 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
470 };
471 return de::MovePtr<ImageWithMemory>(new ImageWithMemory(vkd, device, alloc, imageCreateInfo, MemoryRequirement::Any));
472 }
473
makeDefaultImageView(const DeviceInterface & vkd,VkDevice device,VkImage image)474 Move<VkImageView> makeDefaultImageView (const DeviceInterface& vkd, VkDevice device, VkImage image)
475 {
476 const auto subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
477 return makeImageView(vkd, device, image, VK_IMAGE_VIEW_TYPE_2D, getDescriptorImageFormat(), subresourceRange);
478 }
479
makeDefaultBuffer(const DeviceInterface & vkd,VkDevice device,Allocator & alloc,deUint32 numElements=1u)480 de::MovePtr<BufferWithMemory> makeDefaultBuffer (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 numElements = 1u)
481 {
482 const VkBufferUsageFlags bufferUsage = (
483 VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT
484 | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT
485 | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT
486 | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
487 | VK_BUFFER_USAGE_TRANSFER_SRC_BIT
488 | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
489
490 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(deUint32) * static_cast<size_t>(numElements));
491
492 const auto bufferCreateInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
493
494 return de::MovePtr<BufferWithMemory>(new BufferWithMemory(vkd, device, alloc, bufferCreateInfo, MemoryRequirement::HostVisible));
495 }
496
makeDefaultBufferView(const DeviceInterface & vkd,VkDevice device,VkBuffer buffer)497 Move<VkBufferView> makeDefaultBufferView (const DeviceInterface& vkd, VkDevice device, VkBuffer buffer)
498 {
499 const auto bufferOffset = static_cast<VkDeviceSize>(0);
500 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(deUint32));
501
502 return makeBufferView(vkd, device, buffer, getDescriptorImageFormat(), bufferOffset, bufferSize);
503 }
504
505 struct AccelerationStructureData
506 {
507 using TLASPtr = de::MovePtr<TopLevelAccelerationStructure>;
508 using BLASPtr = de::MovePtr<BottomLevelAccelerationStructure>;
509
510 TLASPtr tlas;
511 BLASPtr blas;
512
swapvkt::BindingModel::__anon912c01eb0111::AccelerationStructureData513 void swap (AccelerationStructureData& other)
514 {
515 auto myTlasPtr = tlas.release();
516 auto myBlasPtr = blas.release();
517
518 auto otherTlasPtr = other.tlas.release();
519 auto otherBlasPtr = other.blas.release();
520
521 tlas = TLASPtr(otherTlasPtr);
522 blas = BLASPtr(otherBlasPtr);
523
524 other.tlas = TLASPtr(myTlasPtr);
525 other.blas = BLASPtr(myBlasPtr);
526 }
527
AccelerationStructureDatavkt::BindingModel::__anon912c01eb0111::AccelerationStructureData528 AccelerationStructureData () : tlas() , blas() {}
529
AccelerationStructureDatavkt::BindingModel::__anon912c01eb0111::AccelerationStructureData530 AccelerationStructureData (AccelerationStructureData&& other)
531 : AccelerationStructureData()
532 {
533 swap(other);
534 }
535
operator =vkt::BindingModel::__anon912c01eb0111::AccelerationStructureData536 AccelerationStructureData& operator= (AccelerationStructureData&& other)
537 {
538 swap(other);
539 return *this;
540 }
541 };
542
makeDefaultAccelerationStructure(const DeviceInterface & vkd,VkDevice device,VkCommandBuffer cmdBuffer,Allocator & alloc,bool triangles,deUint16 offsetX)543 AccelerationStructureData makeDefaultAccelerationStructure (const DeviceInterface& vkd, VkDevice device, VkCommandBuffer cmdBuffer, Allocator& alloc, bool triangles, deUint16 offsetX)
544 {
545 AccelerationStructureData data;
546
547 // Triangle around (offsetX, 0) with depth 5.0.
548 const float middleX = static_cast<float>(offsetX);
549 const float leftX = middleX - 0.5f;
550 const float rightX = middleX + 0.5f;
551 const float topY = 0.5f;
552 const float bottomY = -0.5f;
553 const float depth = 5.0f;
554
555 std::vector<tcu::Vec3> vertices;
556
557 if (triangles)
558 {
559 vertices.reserve(3u);
560 vertices.emplace_back(middleX, topY, depth);
561 vertices.emplace_back(rightX, bottomY, depth);
562 vertices.emplace_back(leftX, bottomY, depth);
563 }
564 else
565 {
566 vertices.reserve(2u);
567 vertices.emplace_back(leftX, bottomY, depth);
568 vertices.emplace_back(rightX, topY, depth);
569 }
570
571 data.tlas = makeTopLevelAccelerationStructure();
572 data.blas = makeBottomLevelAccelerationStructure();
573
574 VkGeometryInstanceFlagsKHR instanceFlags = 0u;
575 if (triangles)
576 instanceFlags |= VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR;
577
578 data.blas->addGeometry(vertices, triangles, VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR);
579 data.blas->createAndBuild(vkd, device, cmdBuffer, alloc);
580
581 de::SharedPtr<BottomLevelAccelerationStructure> blasSharedPtr (data.blas.release());
582 data.tlas->setInstanceCount(1u);
583 data.tlas->addInstance(blasSharedPtr, identityMatrix3x4, 0u, 0xFFu, 0u, instanceFlags);
584 data.tlas->createAndBuild(vkd, device, cmdBuffer, alloc);
585
586 return data;
587 }
588
589 const auto kShaderAccess = (VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT);
590
591 struct Resource
592 {
593 VkDescriptorType descriptorType;
594 ResourceType resourceType;
595 Move<VkSampler> sampler;
596 de::MovePtr<ImageWithMemory> imageWithMemory;
597 Move<VkImageView> imageView;
598 de::MovePtr<BufferWithMemory> bufferWithMemory;
599 Move<VkBufferView> bufferView;
600 AccelerationStructureData asData;
601 deUint32 initialValue;
602
Resourcevkt::BindingModel::__anon912c01eb0111::Resource603 Resource (VkDescriptorType descriptorType_, const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, bool useAABBs, deUint32 initialValue_, deUint32 numElements = 1u)
604 : descriptorType (descriptorType_)
605 , resourceType (toResourceType(descriptorType))
606 , sampler ()
607 , imageWithMemory ()
608 , imageView ()
609 , bufferWithMemory ()
610 , bufferView ()
611 , asData ()
612 , initialValue (initialValue_)
613 {
614 if (numElements != 1u)
615 DE_ASSERT(resourceType == ResourceType::BUFFER);
616
617 switch (resourceType)
618 {
619 case ResourceType::SAMPLER:
620 sampler = makeDefaultSampler(vkd, device);
621 break;
622
623 case ResourceType::IMAGE:
624 imageWithMemory = makeDefaultImage(vkd, device, alloc);
625 imageView = makeDefaultImageView(vkd, device, imageWithMemory->get());
626 break;
627
628 case ResourceType::COMBINED_IMAGE_SAMPLER:
629 sampler = makeDefaultSampler(vkd, device);
630 imageWithMemory = makeDefaultImage(vkd, device, alloc);
631 imageView = makeDefaultImageView(vkd, device, imageWithMemory->get());
632 break;
633
634 case ResourceType::BUFFER:
635 bufferWithMemory = makeDefaultBuffer(vkd, device, alloc, numElements);
636 break;
637
638 case ResourceType::BUFFER_VIEW:
639 bufferWithMemory = makeDefaultBuffer(vkd, device, alloc);
640 bufferView = makeDefaultBufferView(vkd, device, bufferWithMemory->get());
641 break;
642
643 case ResourceType::ACCELERATION_STRUCTURE:
644 {
645 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
646 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
647 const auto cmdBuffer = cmdBufferPtr.get();
648 const bool triangles = !useAABBs;
649
650 beginCommandBuffer(vkd, cmdBuffer);
651 asData = makeDefaultAccelerationStructure(vkd, device, cmdBuffer, alloc, triangles, getAccelerationStructureOffsetX(initialValue));
652 endCommandBuffer(vkd, cmdBuffer);
653 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
654 }
655 break;
656
657 default:
658 DE_ASSERT(false);
659 break;
660 }
661
662 if (imageWithMemory || bufferWithMemory)
663 {
664 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
665 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
666 const auto cmdBuffer = cmdBufferPtr.get();
667
668 if (imageWithMemory)
669 {
670 // Prepare staging buffer.
671 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(initialValue));
672 const VkBufferUsageFlags bufferUsage = (VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
673 const auto stagingBufferInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
674
675 BufferWithMemory stagingBuffer(vkd, device, alloc, stagingBufferInfo, MemoryRequirement::HostVisible);
676 auto& bufferAlloc = stagingBuffer.getAllocation();
677 void* bufferData = bufferAlloc.getHostPtr();
678
679 deMemcpy(bufferData, &initialValue, sizeof(initialValue));
680 flushAlloc(vkd, device, bufferAlloc);
681
682 beginCommandBuffer(vkd, cmdBuffer);
683
684 // Transition and copy image.
685 const auto copyRegion = makeBufferImageCopy(makeExtent3D(1u, 1u, 1u),
686 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u));
687
688 // Switch image to TRANSFER_DST_OPTIMAL before copying data to it.
689 const auto subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
690
691 const auto preTransferBarrier = makeImageMemoryBarrier(
692 0u, VK_ACCESS_TRANSFER_WRITE_BIT,
693 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
694 imageWithMemory->get(), subresourceRange);
695
696 vkd.cmdPipelineBarrier(
697 cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
698 0u, nullptr, 0u, nullptr, 1u, &preTransferBarrier);
699
700 // Copy data to image.
701 vkd.cmdCopyBufferToImage(cmdBuffer, stagingBuffer.get(), imageWithMemory->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
702
703 // Switch image to the GENERAL layout before reading or writing to it from shaders.
704 const auto postTransferBarrier = makeImageMemoryBarrier(
705 VK_ACCESS_TRANSFER_WRITE_BIT, kShaderAccess,
706 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
707 imageWithMemory->get(), subresourceRange);
708
709 vkd.cmdPipelineBarrier(
710 cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0u,
711 0u, nullptr, 0u, nullptr, 1u, &postTransferBarrier);
712
713 endCommandBuffer(vkd, cmdBuffer);
714 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
715 }
716
717 if (bufferWithMemory)
718 {
719 auto& bufferAlloc = bufferWithMemory->getAllocation();
720 void* bufferData = bufferAlloc.getHostPtr();
721
722 const std::vector<deUint32> bufferValues(numElements, initialValue);
723 deMemcpy(bufferData, bufferValues.data(), de::dataSize(bufferValues));
724 flushAlloc(vkd, device, bufferAlloc);
725
726 beginCommandBuffer(vkd, cmdBuffer);
727
728 // Make sure host writes happen before shader reads/writes. Note: this barrier is not needed in theory.
729 const auto hostToShaderBarrier = makeMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, kShaderAccess);
730
731 vkd.cmdPipelineBarrier(
732 cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0u,
733 1u, &hostToShaderBarrier, 0u, nullptr, 0u, nullptr);
734
735 endCommandBuffer(vkd, cmdBuffer);
736 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
737 }
738 }
739 }
740
741 // Remove problematic copy constructor.
742 Resource (const Resource&) = delete;
743
744 // Make it movable.
Resourcevkt::BindingModel::__anon912c01eb0111::Resource745 Resource (Resource&& other) noexcept
746 : descriptorType (other.descriptorType)
747 , resourceType (other.resourceType)
748 , sampler (other.sampler)
749 , imageWithMemory (other.imageWithMemory.release())
750 , imageView (other.imageView)
751 , bufferWithMemory (other.bufferWithMemory.release())
752 , bufferView (other.bufferView)
753 , asData (std::move(other.asData))
754 , initialValue (other.initialValue)
755 {}
756
~Resourcevkt::BindingModel::__anon912c01eb0111::Resource757 ~Resource ()
758 {}
759
makeWriteInfovkt::BindingModel::__anon912c01eb0111::Resource760 WriteInfo makeWriteInfo () const
761 {
762 using WriteInfoPtr = de::MovePtr<WriteInfo>;
763
764 WriteInfoPtr writeInfo;
765
766 switch (resourceType)
767 {
768 case ResourceType::SAMPLER:
769 {
770 const VkDescriptorImageInfo imageInfo = { sampler.get(), DE_NULL, VK_IMAGE_LAYOUT_UNDEFINED };
771 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
772 }
773 break;
774
775 case ResourceType::IMAGE:
776 {
777 const VkDescriptorImageInfo imageInfo = { DE_NULL, imageView.get(), VK_IMAGE_LAYOUT_GENERAL };
778 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
779 }
780 break;
781
782 case ResourceType::COMBINED_IMAGE_SAMPLER:
783 {
784 const VkDescriptorImageInfo imageInfo = { sampler.get(), imageView.get(), VK_IMAGE_LAYOUT_GENERAL };
785 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
786 }
787 break;
788
789 case ResourceType::BUFFER:
790 {
791 const VkDescriptorBufferInfo bufferInfo = { bufferWithMemory->get(), 0ull, static_cast<VkDeviceSize>(sizeof(deUint32)) };
792 writeInfo = WriteInfoPtr (new WriteInfo(bufferInfo));
793 }
794 break;
795
796 case ResourceType::BUFFER_VIEW:
797 writeInfo = WriteInfoPtr (new WriteInfo(bufferView.get()));
798 break;
799
800 case ResourceType::ACCELERATION_STRUCTURE:
801 {
802 VkWriteDescriptorSetAccelerationStructureKHR asWrite = initVulkanStructure();
803 asWrite.accelerationStructureCount = 1u;
804 asWrite.pAccelerationStructures = asData.tlas.get()->getPtr();
805 writeInfo = WriteInfoPtr (new WriteInfo(asWrite));
806 }
807 break;
808
809 default:
810 DE_ASSERT(false);
811 break;
812 }
813
814 return *writeInfo;
815 }
816
getStoredValuevkt::BindingModel::__anon912c01eb0111::Resource817 tcu::Maybe<deUint32> getStoredValue (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, deUint32 position = 0u) const
818 {
819 if (position != 0u)
820 DE_ASSERT(static_cast<bool>(bufferWithMemory));
821
822 if (imageWithMemory || bufferWithMemory)
823 {
824 // Command pool and buffer.
825 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
826 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
827 const auto cmdBuffer = cmdBufferPtr.get();
828
829 if (imageWithMemory)
830 {
831 // Prepare staging buffer.
832 deUint32 result;
833 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(result));
834 const VkBufferUsageFlags bufferUsage = (VK_BUFFER_USAGE_TRANSFER_DST_BIT);
835 const auto stagingBufferInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
836
837 BufferWithMemory stagingBuffer(vkd, device, alloc, stagingBufferInfo, MemoryRequirement::HostVisible);
838 auto& bufferAlloc = stagingBuffer.getAllocation();
839 void* bufferData = bufferAlloc.getHostPtr();
840
841 // Copy image value to staging buffer.
842 beginCommandBuffer(vkd, cmdBuffer);
843
844 // Make sure shader accesses happen before transfers and prepare image for transfer.
845 const auto colorResourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
846
847 const auto preTransferBarrier = makeImageMemoryBarrier(
848 kShaderAccess, VK_ACCESS_TRANSFER_READ_BIT,
849 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
850 imageWithMemory->get(), colorResourceRange);
851
852 vkd.cmdPipelineBarrier(
853 cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
854 0u, nullptr, 0u, nullptr, 1u, &preTransferBarrier);
855
856 // Copy image contents to staging buffer.
857 const auto copyRegion = makeBufferImageCopy(makeExtent3D(1u, 1u, 1u),
858 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u));
859 vkd.cmdCopyImageToBuffer(cmdBuffer, imageWithMemory->get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, stagingBuffer.get(), 1u, ©Region);
860
861 // Make sure writes are visible from the host.
862 const auto postTransferBarrier = makeMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT);
863 vkd.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u, 1u, &postTransferBarrier, 0u, nullptr, 0u, nullptr);
864
865 endCommandBuffer(vkd, cmdBuffer);
866 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
867
868 // Get value from staging buffer.
869 invalidateAlloc(vkd, device, bufferAlloc);
870 deMemcpy(&result, bufferData, sizeof(result));
871 return tcu::just(result);
872 }
873
874 if (bufferWithMemory)
875 {
876 auto& bufferAlloc = bufferWithMemory->getAllocation();
877 auto bufferData = reinterpret_cast<const char*>(bufferAlloc.getHostPtr());
878 deUint32 result;
879
880 // Make sure shader writes are visible from the host.
881 beginCommandBuffer(vkd, cmdBuffer);
882
883 const auto shaderToHostBarrier = makeMemoryBarrier(kShaderAccess, VK_ACCESS_HOST_READ_BIT);
884 vkd.cmdPipelineBarrier(
885 cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u,
886 1u, &shaderToHostBarrier, 0u, nullptr, 0u, nullptr);
887
888 endCommandBuffer(vkd, cmdBuffer);
889 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
890
891 invalidateAlloc(vkd, device, bufferAlloc);
892 deMemcpy(&result, bufferData + sizeof(deUint32) * static_cast<size_t>(position), sizeof(result));
893 return tcu::just(result);
894 }
895 }
896
897 return tcu::Nothing;
898 }
899 };
900
901 struct BindingInterface
902 {
~BindingInterfacevkt::BindingModel::__anon912c01eb0111::BindingInterface903 virtual ~BindingInterface () {}
904
905 // Minimum number of iterations to test all mutable types.
906 virtual deUint32 maxTypes () const = 0;
907
908 // Types that will be used by the binding at a given iteration.
909 virtual std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const = 0;
910
911 // Binding's main type.
912 virtual VkDescriptorType mainType () const = 0;
913
914 // Binding's list of mutable types, if present.
915 virtual std::vector<VkDescriptorType> mutableTypes () const = 0;
916
917 // Descriptor count in the binding.
918 virtual size_t size () const = 0;
919
920 // Is the binding an array binding?
921 virtual bool isArray () const = 0;
922
923 // Is the binding an unbounded array?
924 virtual bool isUnbounded () const = 0;
925
926 // Will the binding use different descriptor types in a given iteration?
needsAliasingvkt::BindingModel::__anon912c01eb0111::BindingInterface927 virtual bool needsAliasing (deUint32 iteration) const
928 {
929 const auto typesVec = typesAtIteration(iteration);
930 std::set<VkDescriptorType> descTypes(begin(typesVec), end(typesVec));
931 return (descTypes.size() > 1u);
932 }
933
934 // Will the binding need aliasing on any iteration up to a given number?
needsAliasingUpTovkt::BindingModel::__anon912c01eb0111::BindingInterface935 virtual bool needsAliasingUpTo (deUint32 numIterations) const
936 {
937 std::vector<bool> needsAliasingFlags;
938 needsAliasingFlags.reserve(numIterations);
939
940 for (deUint32 iter = 0u; iter < numIterations; ++iter)
941 needsAliasingFlags.push_back(needsAliasing(iter));
942
943 return std::any_of(begin(needsAliasingFlags), end(needsAliasingFlags), [] (bool f) { return f; });
944 }
945
946 private:
hasDescriptorTypevkt::BindingModel::__anon912c01eb0111::BindingInterface947 virtual bool hasDescriptorType (deUint32 iteration, VkDescriptorType descriptorType) const
948 {
949 const auto typesVec = typesAtIteration(iteration);
950 return (std::find(begin(typesVec), end(typesVec), descriptorType) != end(typesVec));
951 }
952
953 public:
954 // Convert one particular binding to a mutable or non-mutable equivalent binding, returning the equivalent binding.
955 virtual de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const = 0;
956 virtual de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const = 0;
957
958 // Create resources needed to back up this binding.
959 virtual std::vector<Resource> createResources (
960 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
961 deUint32 iteration, bool useAABBs, deUint32 baseValue) const = 0;
962
963 // Get GLSL binding declarations. Note: no array size means no array, if size is < 0 it means unbounded array.
964 virtual std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const = 0;
965
966 // Get GLSL statements to check this binding.
967 virtual std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const = 0;
968 };
969
970 // Represents a single binding that will be used in a test.
971 class SingleBinding : public BindingInterface
972 {
973 private:
974 VkDescriptorType type; // The descriptor type.
975 std::vector<VkDescriptorType> mutableTypesVec; // The types that will be used for each iteration of a test if mutable.
976
977 public:
SingleBinding(VkDescriptorType type_,std::vector<VkDescriptorType> mutableTypes_)978 SingleBinding (VkDescriptorType type_, std::vector<VkDescriptorType> mutableTypes_)
979 : type (type_)
980 , mutableTypesVec (std::move(mutableTypes_))
981 {
982 static const auto kForbiddenMutableTypes = getForbiddenMutableTypes();
983 const auto kBeginForbidden = begin(kForbiddenMutableTypes);
984 const auto kEndForbidden = end(kForbiddenMutableTypes);
985
986 // For release builds.
987 DE_UNREF(kBeginForbidden);
988 DE_UNREF(kEndForbidden);
989
990 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
991 {
992 DE_ASSERT(mutableTypesVec.empty());
993 }
994 else
995 {
996 DE_ASSERT(!mutableTypesVec.empty());
997 DE_ASSERT(std::none_of(begin(mutableTypesVec), end(mutableTypesVec),
998 [&kBeginForbidden, &kEndForbidden] (VkDescriptorType t) -> bool {
999 return std::find(kBeginForbidden, kEndForbidden, t) != kEndForbidden;
1000 }));
1001 }
1002 }
1003
maxTypes() const1004 deUint32 maxTypes () const override
1005 {
1006 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
1007 return 1u;
1008 const auto vecSize = mutableTypesVec.size();
1009 DE_ASSERT(vecSize <= std::numeric_limits<deUint32>::max());
1010 return static_cast<deUint32>(vecSize);
1011 }
1012
typeAtIteration(deUint32 iteration) const1013 VkDescriptorType typeAtIteration (deUint32 iteration) const
1014 {
1015 return typesAtIteration(iteration)[0];
1016 }
1017
usedTypes() const1018 std::vector<VkDescriptorType> usedTypes () const
1019 {
1020 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
1021 return std::vector<VkDescriptorType>(1u, type);
1022 return mutableTypesVec;
1023 }
1024
typesAtIteration(deUint32 iteration) const1025 std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const override
1026 {
1027 const auto typesVec = usedTypes();
1028 return std::vector<VkDescriptorType>(1u, typesVec[static_cast<size_t>(iteration) % typesVec.size()]);
1029 }
1030
mainType() const1031 VkDescriptorType mainType () const override
1032 {
1033 return type;
1034 }
1035
mutableTypes() const1036 std::vector<VkDescriptorType> mutableTypes () const override
1037 {
1038 return mutableTypesVec;
1039 }
1040
size() const1041 size_t size () const override
1042 {
1043 return size_t{1u};
1044 }
1045
isArray() const1046 bool isArray () const override
1047 {
1048 return false;
1049 }
1050
isUnbounded() const1051 bool isUnbounded () const override
1052 {
1053 return false;
1054 }
1055
toMutable(deUint32 iteration) const1056 de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const override
1057 {
1058 DE_UNREF(iteration);
1059
1060 static const auto kMandatoryMutableTypeFlags = toDescriptorTypeFlags(getMandatoryMutableTypes());
1061 if (type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
1062 {
1063 const auto descFlags = toDescriptorTypeFlags(mutableTypesVec);
1064 return de::MovePtr<BindingInterface>(new SingleBinding(type, toDescriptorTypeVector(descFlags)));
1065 }
1066
1067 // Make sure it's not a forbidden mutable type.
1068 static const auto kForbiddenMutableTypes = getForbiddenMutableTypes();
1069 DE_ASSERT(std::find(begin(kForbiddenMutableTypes), end(kForbiddenMutableTypes), type) == end(kForbiddenMutableTypes));
1070
1071 // Convert the binding to mutable using a wider set of descriptor types if possible, including the binding type.
1072 const auto descFlags = (kMandatoryMutableTypeFlags | toDescriptorTypeFlagBit(type));
1073
1074 return de::MovePtr<BindingInterface>(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, toDescriptorTypeVector(descFlags)));
1075 }
1076
toNonMutable(deUint32 iteration) const1077 de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const override
1078 {
1079 return de::MovePtr<BindingInterface>(new SingleBinding(typeAtIteration(iteration), std::vector<VkDescriptorType>()));
1080 }
1081
createResources(const DeviceInterface & vkd,VkDevice device,Allocator & alloc,deUint32 qIndex,VkQueue queue,deUint32 iteration,bool useAABBs,deUint32 baseValue) const1082 std::vector<Resource> createResources (
1083 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
1084 deUint32 iteration, bool useAABBs, deUint32 baseValue) const override
1085 {
1086 const auto descriptorType = typeAtIteration(iteration);
1087
1088 std::vector<Resource> resources;
1089 resources.emplace_back(descriptorType, vkd, device, alloc, qIndex, queue, useAABBs, baseValue);
1090 return resources;
1091 }
1092
glslDeclarations(deUint32 iteration,deUint32 setNum,deUint32 bindingNum,deUint32 inputAttachmentIdx,tcu::Maybe<deInt32> arraySize) const1093 std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const override
1094 {
1095 const auto descriptorType = typeAtIteration(iteration);
1096 const std::string arraySuffix = ((static_cast<bool>(arraySize)) ? ((arraySize.get() < 0) ? "[]" : ("[" + de::toString(arraySize.get()) + "]")) : "");
1097 const std::string layoutAttribs = "set=" + de::toString(setNum) + ", binding=" + de::toString(bindingNum);
1098 const std::string bindingSuffix = "_" + de::toString(setNum) + "_" + de::toString(bindingNum);
1099 const std::string nameSuffix = bindingSuffix + arraySuffix;
1100 std::ostringstream declarations;
1101
1102 declarations << "layout (";
1103
1104 switch (descriptorType)
1105 {
1106 case VK_DESCRIPTOR_TYPE_SAMPLER:
1107 declarations << layoutAttribs << ") uniform sampler sampler" << nameSuffix;
1108 break;
1109
1110 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1111 declarations << layoutAttribs << ") uniform usampler2D combinedSampler" << nameSuffix;
1112 break;
1113
1114 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1115 declarations << layoutAttribs << ") uniform utexture2D sampledImage" << nameSuffix;
1116 break;
1117
1118 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1119 declarations << layoutAttribs << ") uniform uboBlock" << bindingSuffix << " { uint val; } ubo" << nameSuffix;
1120 break;
1121
1122 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1123 declarations << layoutAttribs << ") buffer sboBlock" << bindingSuffix << " { uint val; } ssbo" << nameSuffix;
1124 break;
1125
1126 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1127 declarations << layoutAttribs << ") uniform utextureBuffer uniformTexel" << nameSuffix;
1128 break;
1129
1130 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1131 declarations << layoutAttribs << ", r32ui) uniform uimageBuffer storageTexel" << nameSuffix;
1132 break;
1133
1134 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1135 declarations << layoutAttribs << ", r32ui) uniform uimage2D storageImage" << nameSuffix;
1136 break;
1137
1138 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1139 declarations << layoutAttribs << ", input_attachment_index=" << inputAttachmentIdx << ") uniform usubpassInput inputAttachment" << nameSuffix;
1140 break;
1141
1142 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
1143 declarations << layoutAttribs << ") uniform accelerationStructureEXT accelerationStructure" << nameSuffix;
1144 break;
1145
1146 default:
1147 DE_ASSERT(false);
1148 break;
1149 }
1150
1151 declarations << ";\n";
1152
1153 return declarations.str();
1154 }
1155
glslCheckStatements(deUint32 iteration,deUint32 setNum,deUint32 bindingNum,deUint32 baseValue_,tcu::Maybe<deUint32> arrayIndex,bool usePushConstants) const1156 std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue_, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const override
1157 {
1158 const auto descriptorType = typeAtIteration(iteration);
1159 const std::string bindingSuffix = "_" + de::toString(setNum) + "_" + de::toString(bindingNum);
1160
1161 std::string indexSuffix;
1162 if (arrayIndex)
1163 {
1164 indexSuffix = de::toString(arrayIndex.get());
1165 if (usePushConstants)
1166 indexSuffix += " + pc.zero";
1167 indexSuffix = "[" + indexSuffix + "]";
1168 }
1169
1170 const std::string nameSuffix = bindingSuffix + indexSuffix;
1171 const std::string baseValue = toHex(baseValue_);
1172 const std::string externalImageValue = toHex(getExternalSampledImageValue());
1173 const std::string mask = toHex(getStoredValueMask());
1174
1175 std::ostringstream checks;
1176
1177 // Note: all of these depend on an external anyError uint variable.
1178 switch (descriptorType)
1179 {
1180 case VK_DESCRIPTOR_TYPE_SAMPLER:
1181 // Note this depends on an "externalSampledImage" binding.
1182 checks << " {\n";
1183 checks << " uint readValue = texture(usampler2D(externalSampledImage, sampler" << nameSuffix << "), vec2(0, 0)).r;\n";
1184 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1185 checks << " anyError |= ((readValue == " << externalImageValue << ") ? 0u : 1u);\n";
1186 //checks << " anyError = readValue;\n";
1187 checks << " }\n";
1188 break;
1189
1190 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1191 checks << " {\n";
1192 checks << " uint readValue = texture(combinedSampler" << nameSuffix << ", vec2(0, 0)).r;\n";
1193 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1194 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1195 //checks << " anyError = readValue;\n";
1196 checks << " }\n";
1197 break;
1198
1199 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1200 // Note this depends on an "externalSampler" binding.
1201 checks << " {\n";
1202 checks << " uint readValue = texture(usampler2D(sampledImage" << nameSuffix << ", externalSampler), vec2(0, 0)).r;\n";
1203 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1204 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1205 //checks << " anyError = readValue;\n";
1206 checks << " }\n";
1207 break;
1208
1209 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1210 checks << " {\n";
1211 checks << " uint readValue = ubo" << nameSuffix << ".val;\n";
1212 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1213 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1214 //checks << " anyError = readValue;\n";
1215 checks << " }\n";
1216 break;
1217
1218 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1219 checks << " {\n";
1220 checks << " uint readValue = ssbo" << nameSuffix << ".val;\n";
1221 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1222 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1223 //checks << " anyError = readValue;\n";
1224 // Check writes.
1225 checks << " ssbo" << nameSuffix << ".val = (readValue | " << mask << ");\n";
1226 checks << " }\n";
1227 break;
1228
1229 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1230 checks << " {\n";
1231 checks << " uint readValue = texelFetch(uniformTexel" << nameSuffix << ", 0).x;\n";
1232 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1233 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1234 //checks << " anyError = readValue;\n";
1235 checks << " }\n";
1236 break;
1237
1238 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1239 checks << " {\n";
1240 checks << " uint readValue = imageLoad(storageTexel" << nameSuffix << ", 0).x;\n";
1241 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1242 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1243 //checks << " anyError = readValue;\n";
1244 checks << " readValue |= " << mask << ";\n";
1245 // Check writes.
1246 checks << " imageStore(storageTexel" << nameSuffix << ", 0, uvec4(readValue, 0, 0, 0));\n";
1247 checks << " }\n";
1248 break;
1249
1250 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1251 checks << " {\n";
1252 checks << " uint readValue = imageLoad(storageImage" << nameSuffix << ", ivec2(0, 0)).x;\n";
1253 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1254 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1255 //checks << " anyError = readValue;\n";
1256 checks << " readValue |= " << mask << ";\n";
1257 // Check writes.
1258 checks << " imageStore(storageImage" << nameSuffix << ", ivec2(0, 0), uvec4(readValue, 0, 0, 0));\n";
1259 checks << " }\n";
1260 break;
1261
1262 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1263 checks << " {\n";
1264 checks << " uint readValue = subpassLoad(inputAttachment" << nameSuffix << ").x;\n";
1265 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1266 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1267 //checks << " anyError = readValue;\n";
1268 checks << " }\n";
1269 break;
1270
1271 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
1272 checks << " {\n";
1273 checks << " const uint cullMask = 0xFF;\n";
1274 checks << " const vec3 origin = vec3(" << getAccelerationStructureOffsetX(baseValue_) << ".0, 0.0, 0.0);\n";
1275 checks << " const vec3 direction = vec3(0.0, 0.0, 1.0);\n";
1276 checks << " const float tmin = 1.0;\n";
1277 checks << " const float tmax = 10.0;\n";
1278 checks << " uint candidateFound = 0u;\n";
1279 checks << " rayQueryEXT rq;\n";
1280 checks << " rayQueryInitializeEXT(rq, accelerationStructure" << nameSuffix << ", gl_RayFlagsNoneEXT, cullMask, origin, tmin, direction, tmax);\n";
1281 checks << " while (rayQueryProceedEXT(rq)) {\n";
1282 checks << " const uint candidateType = rayQueryGetIntersectionTypeEXT(rq, false);\n";
1283 checks << " if (candidateType == gl_RayQueryCandidateIntersectionTriangleEXT || candidateType == gl_RayQueryCandidateIntersectionAABBEXT) {\n";
1284 checks << " candidateFound = 1u;\n";
1285 checks << " }\n";
1286 checks << " }\n";
1287 checks << " anyError |= ((candidateFound == 1u) ? 0u : 1u);\n";
1288 checks << " }\n";
1289 break;
1290
1291 default:
1292 DE_ASSERT(false);
1293 break;
1294 }
1295
1296 return checks.str();
1297 }
1298 };
1299
1300 // Represents an array of bindings. Individual bindings are stored as SingleBindings because each one of them may take a different
1301 // type in each iteration (i.e. they can all have different descriptor type vectors).
1302 class ArrayBinding : public BindingInterface
1303 {
1304 private:
1305 bool unbounded;
1306 std::vector<SingleBinding> bindings;
1307
1308 public:
ArrayBinding(bool unbounded_,std::vector<SingleBinding> bindings_)1309 ArrayBinding (bool unbounded_, std::vector<SingleBinding> bindings_)
1310 : unbounded (unbounded_)
1311 , bindings (std::move(bindings_))
1312 {
1313 // We need to check all single bindings have the same effective type, even if mutable descriptors have different orders.
1314 DE_ASSERT(!bindings.empty());
1315
1316 std::set<VkDescriptorType> basicTypes;
1317 std::set<DescriptorTypeFlags> bindingTypes;
1318
1319 for (const auto& b : bindings)
1320 {
1321 basicTypes.insert(b.mainType());
1322 bindingTypes.insert(toDescriptorTypeFlags(b.usedTypes()));
1323 }
1324
1325 DE_ASSERT(basicTypes.size() == 1u);
1326 DE_ASSERT(bindingTypes.size() == 1u);
1327
1328 // For release builds.
1329 DE_UNREF(basicTypes);
1330 DE_UNREF(bindingTypes);
1331 }
1332
maxTypes() const1333 deUint32 maxTypes () const override
1334 {
1335 // Each binding may have the same effective type but a different number of iterations due to repeated types.
1336 std::vector<size_t> bindingSizes;
1337 bindingSizes.reserve(bindings.size());
1338
1339 std::transform(begin(bindings), end(bindings), std::back_inserter(bindingSizes),
1340 [] (const SingleBinding& b) { return b.usedTypes().size(); });
1341
1342 const auto maxElement = std::max_element(begin(bindingSizes), end(bindingSizes));
1343 DE_ASSERT(maxElement != end(bindingSizes));
1344 DE_ASSERT(*maxElement <= std::numeric_limits<deUint32>::max());
1345 return static_cast<deUint32>(*maxElement);
1346 }
1347
typesAtIteration(deUint32 iteration) const1348 std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const override
1349 {
1350 std::vector<VkDescriptorType> result;
1351 result.reserve(bindings.size());
1352
1353 for (const auto& b : bindings)
1354 result.push_back(b.typeAtIteration(iteration));
1355
1356 return result;
1357 }
1358
mainType() const1359 VkDescriptorType mainType () const override
1360 {
1361 return bindings[0].mainType();
1362 }
1363
mutableTypes() const1364 std::vector<VkDescriptorType> mutableTypes () const override
1365 {
1366 return bindings[0].mutableTypes();
1367 }
1368
size() const1369 size_t size () const override
1370 {
1371 return bindings.size();
1372 }
1373
isArray() const1374 bool isArray () const override
1375 {
1376 return true;
1377 }
1378
isUnbounded() const1379 bool isUnbounded () const override
1380 {
1381 return unbounded;
1382 }
1383
toMutable(deUint32 iteration) const1384 de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const override
1385 {
1386 // Replicate the first binding once converted, as all are equivalent.
1387 const auto firstBindingPtr = bindings[0].toMutable(iteration);
1388 const auto firstBinding = *dynamic_cast<SingleBinding*>(firstBindingPtr.get());
1389 const std::vector<SingleBinding> newBindings (bindings.size(), firstBinding);
1390
1391 return de::MovePtr<BindingInterface>(new ArrayBinding(unbounded, newBindings));
1392 }
1393
toNonMutable(deUint32 iteration) const1394 de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const override
1395 {
1396 // Make sure this binding can be converted to nonmutable for a given iteration.
1397 DE_ASSERT(!needsAliasing(iteration));
1398
1399 // We could use each SingleBinding's toNonMutable(), but this is the same.
1400 const auto descType = bindings[0].typeAtIteration(iteration);
1401 const SingleBinding firstBinding (descType, std::vector<VkDescriptorType>());
1402 const std::vector<SingleBinding> newBindings (bindings.size(), firstBinding);
1403
1404 return de::MovePtr<BindingInterface>(new ArrayBinding(unbounded, newBindings));
1405 }
1406
createResources(const DeviceInterface & vkd,VkDevice device,Allocator & alloc,deUint32 qIndex,VkQueue queue,deUint32 iteration,bool useAABBs,deUint32 baseValue) const1407 std::vector<Resource> createResources (
1408 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
1409 deUint32 iteration, bool useAABBs, deUint32 baseValue) const override
1410 {
1411 std::vector<Resource> resources;
1412 const auto numBindings = static_cast<deUint32>(bindings.size());
1413
1414 for (deUint32 i = 0u; i < numBindings; ++i)
1415 {
1416 auto resourceVec = bindings[i].createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs, baseValue + i);
1417 resources.emplace_back(std::move(resourceVec[0]));
1418 }
1419
1420 return resources;
1421 }
1422
1423 // We will ignore the array size parameter.
glslDeclarations(deUint32 iteration,deUint32 setNum,deUint32 bindingNum,deUint32 inputAttachmentIdx,tcu::Maybe<deInt32> arraySize) const1424 std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const override
1425 {
1426 const auto descriptorCount = bindings.size();
1427 const auto arraySizeVal = (isUnbounded() ? tcu::just(deInt32{-1}) : tcu::just(static_cast<deInt32>(descriptorCount)));
1428
1429 DE_UNREF(arraySize);
1430 DE_ASSERT(descriptorCount < static_cast<size_t>(std::numeric_limits<deInt32>::max()));
1431
1432 // Maybe a single declaration is enough.
1433 if (!needsAliasing(iteration))
1434 return bindings[0].glslDeclarations(iteration, setNum, bindingNum, inputAttachmentIdx, arraySizeVal);
1435
1436 // Aliasing needed. Avoid reusing types.
1437 const auto descriptorTypes = typesAtIteration(iteration);
1438 std::set<VkDescriptorType> usedTypes;
1439 std::ostringstream declarations;
1440
1441 for (size_t descriptorIdx = 0u; descriptorIdx < descriptorCount; ++descriptorIdx)
1442 {
1443 const auto& descriptorType = descriptorTypes[descriptorIdx];
1444 if (usedTypes.count(descriptorType) > 0)
1445 continue;
1446
1447 usedTypes.insert(descriptorType);
1448 declarations << bindings[descriptorIdx].glslDeclarations(iteration, setNum, bindingNum, inputAttachmentIdx, arraySizeVal);
1449 }
1450
1451 return declarations.str();
1452 }
1453
glslCheckStatements(deUint32 iteration,deUint32 setNum,deUint32 bindingNum,deUint32 baseValue_,tcu::Maybe<deUint32> arrayIndex,bool usePushConstants) const1454 std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue_, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const override
1455 {
1456 DE_ASSERT(!arrayIndex);
1457 DE_UNREF(arrayIndex); // For release builds.
1458
1459 std::ostringstream checks;
1460 const auto numDescriptors = static_cast<deUint32>(bindings.size());
1461
1462 for (deUint32 descriptorIdx = 0u; descriptorIdx < numDescriptors; ++descriptorIdx)
1463 {
1464 const auto& binding = bindings[descriptorIdx];
1465 checks << binding.glslCheckStatements(iteration, setNum, bindingNum, baseValue_ + descriptorIdx, tcu::just(descriptorIdx), usePushConstants);
1466 }
1467
1468 return checks.str();
1469 }
1470 };
1471
1472 class DescriptorSet;
1473
1474 using DescriptorSetPtr = de::SharedPtr<DescriptorSet>;
1475
1476 class DescriptorSet
1477 {
1478 public:
1479 using BindingInterfacePtr = de::MovePtr<BindingInterface>;
1480 using BindingPtrVector = std::vector<BindingInterfacePtr>;
1481
1482 private:
1483 BindingPtrVector bindings;
1484
1485 public:
DescriptorSet(BindingPtrVector & bindings_)1486 explicit DescriptorSet (BindingPtrVector& bindings_)
1487 : bindings(std::move(bindings_))
1488 {
1489 DE_ASSERT(!bindings.empty());
1490 }
1491
numBindings() const1492 size_t numBindings () const
1493 {
1494 return bindings.size();
1495 }
1496
getBinding(size_t bindingIdx) const1497 const BindingInterface* getBinding (size_t bindingIdx) const
1498 {
1499 return bindings.at(bindingIdx).get();
1500 }
1501
1502 // Maximum number of descriptor types used by any binding in the set.
maxTypes() const1503 deUint32 maxTypes () const
1504 {
1505 std::vector<deUint32> maxSizes;
1506 maxSizes.reserve(bindings.size());
1507
1508 std::transform(begin(bindings), end(bindings), std::back_inserter(maxSizes),
1509 [] (const BindingInterfacePtr& b) { return b->maxTypes(); });
1510
1511 const auto maxElement = std::max_element(begin(maxSizes), end(maxSizes));
1512 DE_ASSERT(maxElement != end(maxSizes));
1513 return *maxElement;
1514 }
1515
1516 // Create another descriptor set that can be the source for copies when setting descriptor values.
genSourceSet(SourceSetStrategy strategy,deUint32 iteration) const1517 DescriptorSetPtr genSourceSet (SourceSetStrategy strategy, deUint32 iteration) const
1518 {
1519 BindingPtrVector newBindings;
1520 for (const auto& b : bindings)
1521 {
1522 if (strategy == SourceSetStrategy::MUTABLE)
1523 newBindings.push_back(b->toMutable(iteration));
1524 else
1525 newBindings.push_back(b->toNonMutable(iteration));
1526 }
1527
1528 return DescriptorSetPtr(new DescriptorSet(newBindings));
1529 }
1530
1531 // Makes a descriptor pool that can be used when allocating descriptors for this set.
makeDescriptorPool(const DeviceInterface & vkd,VkDevice device,PoolMutableStrategy strategy,VkDescriptorPoolCreateFlags flags) const1532 Move<VkDescriptorPool> makeDescriptorPool (const DeviceInterface& vkd, VkDevice device, PoolMutableStrategy strategy, VkDescriptorPoolCreateFlags flags) const
1533 {
1534 std::vector<VkDescriptorPoolSize> poolSizes;
1535 std::vector<std::vector<VkDescriptorType>> mutableTypesVec;
1536 std::vector<VkMutableDescriptorTypeListEXT> mutableTypeLists;
1537
1538 // Make vector element addresses stable.
1539 const auto bindingCount = numBindings();
1540 poolSizes.reserve(bindingCount);
1541 mutableTypesVec.reserve(bindingCount);
1542 mutableTypeLists.reserve(bindingCount);
1543
1544 for (const auto& b : bindings)
1545 {
1546 const auto mainType = b->mainType();
1547 const VkDescriptorPoolSize poolSize = {
1548 mainType,
1549 static_cast<deUint32>(b->size()),
1550 };
1551 poolSizes.push_back(poolSize);
1552
1553 if (strategy == PoolMutableStrategy::KEEP_TYPES || strategy == PoolMutableStrategy::EXPAND_TYPES)
1554 {
1555 if (mainType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
1556 {
1557 if (strategy == PoolMutableStrategy::KEEP_TYPES)
1558 {
1559 mutableTypesVec.emplace_back(b->mutableTypes());
1560 }
1561 else
1562 {
1563 // Expand the type list with the mandatory types.
1564 static const auto mandatoryTypesFlags = toDescriptorTypeFlags(getMandatoryMutableTypes());
1565 const auto bindingTypes = toDescriptorTypeVector(mandatoryTypesFlags | toDescriptorTypeFlags(b->mutableTypes()));
1566
1567 mutableTypesVec.emplace_back(bindingTypes);
1568 }
1569
1570 const auto& lastVec = mutableTypesVec.back();
1571 const VkMutableDescriptorTypeListEXT typeList = { static_cast<deUint32>(lastVec.size()), de::dataOrNull(lastVec) };
1572 mutableTypeLists.push_back(typeList);
1573 }
1574 else
1575 {
1576 const VkMutableDescriptorTypeListEXT typeList = { 0u, nullptr };
1577 mutableTypeLists.push_back(typeList);
1578 }
1579 }
1580 else if (strategy == PoolMutableStrategy::NO_TYPES)
1581 ; // Do nothing, we will not use any type list.
1582 else
1583 DE_ASSERT(false);
1584 }
1585
1586 VkDescriptorPoolCreateInfo poolCreateInfo = initVulkanStructure();
1587
1588 poolCreateInfo.maxSets = 1u;
1589 poolCreateInfo.flags = flags;
1590 poolCreateInfo.poolSizeCount = static_cast<deUint32>(poolSizes.size());
1591 poolCreateInfo.pPoolSizes = de::dataOrNull(poolSizes);
1592
1593 VkMutableDescriptorTypeCreateInfoEXT mutableInfo = initVulkanStructure();
1594
1595 if (strategy == PoolMutableStrategy::KEEP_TYPES || strategy == PoolMutableStrategy::EXPAND_TYPES)
1596 {
1597 mutableInfo.mutableDescriptorTypeListCount = static_cast<deUint32>(mutableTypeLists.size());
1598 mutableInfo.pMutableDescriptorTypeLists = de::dataOrNull(mutableTypeLists);
1599 poolCreateInfo.pNext = &mutableInfo;
1600 }
1601
1602 return createDescriptorPool(vkd, device, &poolCreateInfo);
1603 }
1604
1605 private:
1606 // Building the descriptor set layout create info structure is cumbersome, so we'll reuse the same procedure to check support
1607 // and create the layout. This structure contains the result. "supported" is created as an enum to avoid the Move<> to bool
1608 // conversion cast in the contructors.
1609 struct DescriptorSetLayoutResult
1610 {
1611 enum class LayoutSupported { NO = 0, YES };
1612
1613 LayoutSupported supported;
1614 Move<VkDescriptorSetLayout> layout;
1615
DescriptorSetLayoutResultvkt::BindingModel::__anon912c01eb0111::DescriptorSet::DescriptorSetLayoutResult1616 explicit DescriptorSetLayoutResult (Move<VkDescriptorSetLayout>&& layout_)
1617 : supported (LayoutSupported::YES)
1618 , layout (layout_)
1619 {}
1620
DescriptorSetLayoutResultvkt::BindingModel::__anon912c01eb0111::DescriptorSet::DescriptorSetLayoutResult1621 explicit DescriptorSetLayoutResult (LayoutSupported supported_)
1622 : supported (supported_)
1623 , layout ()
1624 {}
1625 };
1626
makeOrCheckDescriptorSetLayout(bool checkOnly,const DeviceInterface & vkd,VkDevice device,VkShaderStageFlags stageFlags,VkDescriptorSetLayoutCreateFlags createFlags) const1627 DescriptorSetLayoutResult makeOrCheckDescriptorSetLayout (bool checkOnly, const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1628 {
1629 const auto numIterations = maxTypes();
1630 std::vector<VkDescriptorSetLayoutBinding> bindingsVec;
1631 std::vector<std::vector<VkDescriptorType>> mutableTypesVec;
1632 std::vector<VkMutableDescriptorTypeListEXT> mutableTypeLists;
1633
1634 // Make vector element addresses stable.
1635 const auto bindingCount = numBindings();
1636 bindingsVec.reserve(bindingCount);
1637 mutableTypesVec.reserve(bindingCount);
1638 mutableTypeLists.reserve(bindingCount);
1639
1640 for (size_t bindingIdx = 0u; bindingIdx < bindings.size(); ++bindingIdx)
1641 {
1642 const auto& binding = bindings[bindingIdx];
1643 const auto mainType = binding->mainType();
1644
1645 const VkDescriptorSetLayoutBinding layoutBinding = {
1646 static_cast<deUint32>(bindingIdx), // deUint32 binding;
1647 mainType, // VkDescriptorType descriptorType;
1648 static_cast<deUint32>(binding->size()), // deUint32 descriptorCount;
1649 stageFlags, // VkShaderStageFlags stageFlags;
1650 nullptr, // const VkSampler* pImmutableSamplers;
1651 };
1652 bindingsVec.push_back(layoutBinding);
1653
1654 // This list may be empty for non-mutable types, which is fine.
1655 mutableTypesVec.push_back(binding->mutableTypes());
1656 const auto& lastVec = mutableTypesVec.back();
1657
1658 const VkMutableDescriptorTypeListEXT typeList = {
1659 static_cast<deUint32>(lastVec.size()), // deUint32 descriptorTypeCount;
1660 de::dataOrNull(lastVec), // const VkDescriptorType* pDescriptorTypes;
1661 };
1662 mutableTypeLists.push_back(typeList);
1663 }
1664
1665 // Make sure to include the variable descriptor count and/or update after bind binding flags.
1666 const bool updateAfterBind = ((createFlags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT) != 0u);
1667 bool lastIsUnbounded = false;
1668 bool aliasingNeded = false;
1669 std::vector<bool> bindingNeedsAliasing(bindings.size(), false);
1670
1671 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1672 {
1673 if (bindingIdx < bindings.size() - 1)
1674 DE_ASSERT(!bindings[bindingIdx]->isUnbounded());
1675 else
1676 lastIsUnbounded = bindings[bindingIdx]->isUnbounded();
1677
1678 if (bindings[bindingIdx]->needsAliasingUpTo(numIterations))
1679 {
1680 bindingNeedsAliasing[bindingIdx] = true;
1681 aliasingNeded = true;
1682 }
1683 }
1684
1685 using FlagsCreateInfoPtr = de::MovePtr<VkDescriptorSetLayoutBindingFlagsCreateInfo>;
1686 using BindingFlagsVecPtr = de::MovePtr<std::vector<VkDescriptorBindingFlags>>;
1687
1688 FlagsCreateInfoPtr flagsCreateInfo;
1689 BindingFlagsVecPtr bindingFlagsVec;
1690
1691 if (updateAfterBind || lastIsUnbounded || aliasingNeded)
1692 {
1693 flagsCreateInfo = FlagsCreateInfoPtr(new VkDescriptorSetLayoutBindingFlagsCreateInfo);
1694 *flagsCreateInfo = initVulkanStructure();
1695
1696 bindingFlagsVec = BindingFlagsVecPtr(new std::vector<VkDescriptorBindingFlags>(bindingsVec.size(), (updateAfterBind ? VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT : 0)));
1697 if (lastIsUnbounded)
1698 bindingFlagsVec->back() |= VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT;
1699
1700 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1701 {
1702 if (bindingNeedsAliasing[bindingIdx])
1703 bindingFlagsVec->at(bindingIdx) |= VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
1704 }
1705
1706 flagsCreateInfo->bindingCount = static_cast<deUint32>(bindingFlagsVec->size());
1707 flagsCreateInfo->pBindingFlags = de::dataOrNull(*bindingFlagsVec);
1708 }
1709
1710 const VkMutableDescriptorTypeCreateInfoEXT createInfoMutable = {
1711 VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT,
1712 flagsCreateInfo.get(),
1713 static_cast<deUint32>(mutableTypeLists.size()),
1714 de::dataOrNull(mutableTypeLists),
1715 };
1716
1717 const VkDescriptorSetLayoutCreateInfo layoutCreateInfo = {
1718 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, // VkStructureType sType;
1719 &createInfoMutable, // const void* pNext;
1720 createFlags, // VkDescriptorSetLayoutCreateFlags flags;
1721 static_cast<deUint32>(bindingsVec.size()), // deUint32 bindingCount;
1722 de::dataOrNull(bindingsVec), // const VkDescriptorSetLayoutBinding* pBindings;
1723 };
1724
1725 if (checkOnly)
1726 {
1727 VkDescriptorSetLayoutSupport support = initVulkanStructure();
1728 vkd.getDescriptorSetLayoutSupport(device, &layoutCreateInfo, &support);
1729 DescriptorSetLayoutResult result((support.supported == VK_TRUE) ? DescriptorSetLayoutResult::LayoutSupported::YES
1730 : DescriptorSetLayoutResult::LayoutSupported::NO);
1731 return result;
1732 }
1733 else
1734 {
1735 DescriptorSetLayoutResult result(createDescriptorSetLayout(vkd, device, &layoutCreateInfo));
1736 return result;
1737 }
1738 }
1739
1740 public:
makeDescriptorSetLayout(const DeviceInterface & vkd,VkDevice device,VkShaderStageFlags stageFlags,VkDescriptorSetLayoutCreateFlags createFlags) const1741 Move<VkDescriptorSetLayout> makeDescriptorSetLayout (const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1742 {
1743 return makeOrCheckDescriptorSetLayout(false /*checkOnly*/, vkd, device, stageFlags, createFlags).layout;
1744 }
1745
checkDescriptorSetLayout(const DeviceInterface & vkd,VkDevice device,VkShaderStageFlags stageFlags,VkDescriptorSetLayoutCreateFlags createFlags) const1746 bool checkDescriptorSetLayout (const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1747 {
1748 return (makeOrCheckDescriptorSetLayout(true /*checkOnly*/, vkd, device, stageFlags, createFlags).supported == DescriptorSetLayoutResult::LayoutSupported::YES);
1749 }
1750
numDescriptors() const1751 size_t numDescriptors () const
1752 {
1753 size_t total = 0;
1754 for (const auto& b : bindings)
1755 total += b->size();
1756 return total;
1757 }
1758
createResources(const DeviceInterface & vkd,VkDevice device,Allocator & alloc,deUint32 qIndex,VkQueue queue,deUint32 iteration,bool useAABBs) const1759 std::vector<Resource> createResources (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, deUint32 iteration, bool useAABBs) const
1760 {
1761 // Create resources for each binding.
1762 std::vector<Resource> result;
1763 result.reserve(numDescriptors());
1764
1765 const auto bindingsCount = static_cast<deUint32>(bindings.size());
1766
1767 for (deUint32 bindingIdx = 0u; bindingIdx < bindingsCount; ++bindingIdx)
1768 {
1769 const auto& binding = bindings[bindingIdx];
1770 auto bindingResources = binding->createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs, getDescriptorNumericValue(iteration, bindingIdx));
1771
1772 for (auto& resource : bindingResources)
1773 result.emplace_back(std::move(resource));
1774 }
1775
1776 return result;
1777 }
1778
1779 // Updates a descriptor set with the given resources. Note: the set must have been created with a layout that's compatible with this object.
updateDescriptorSet(const DeviceInterface & vkd,VkDevice device,VkDescriptorSet set,deUint32 iteration,const std::vector<Resource> & resources) const1780 void updateDescriptorSet (const DeviceInterface& vkd, VkDevice device, VkDescriptorSet set, deUint32 iteration, const std::vector<Resource>& resources) const
1781 {
1782 // Make sure the number of resources is correct.
1783 const auto numResources = resources.size();
1784 DE_ASSERT(numDescriptors() == numResources);
1785
1786 std::vector<VkWriteDescriptorSet> descriptorWrites;
1787 descriptorWrites.reserve(numResources);
1788
1789 std::vector<VkDescriptorImageInfo> imageInfoVec;
1790 std::vector<VkDescriptorBufferInfo> bufferInfoVec;
1791 std::vector<VkBufferView> bufferViewVec;
1792 std::vector<VkWriteDescriptorSetAccelerationStructureKHR> asWriteVec;
1793 size_t resourceIdx = 0;
1794
1795 // We'll be storing pointers to elements of these vectors as we're appending elements, so we need their addresses to be stable.
1796 imageInfoVec.reserve(numResources);
1797 bufferInfoVec.reserve(numResources);
1798 bufferViewVec.reserve(numResources);
1799 asWriteVec.reserve(numResources);
1800
1801 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1802 {
1803 const auto& binding = bindings[bindingIdx];
1804 const auto descriptorTypes = binding->typesAtIteration(iteration);
1805
1806 for (size_t descriptorIdx = 0; descriptorIdx < binding->size(); ++descriptorIdx)
1807 {
1808 // Make sure the resource type matches the expected value.
1809 const auto& resource = resources[resourceIdx];
1810 const auto& descriptorType = descriptorTypes[descriptorIdx];
1811
1812 DE_ASSERT(resource.descriptorType == descriptorType);
1813
1814 // Obtain the descriptor write info for the resource.
1815 const auto writeInfo = resource.makeWriteInfo();
1816
1817 switch (writeInfo.writeType)
1818 {
1819 case WriteType::IMAGE_INFO: imageInfoVec.push_back(writeInfo.imageInfo); break;
1820 case WriteType::BUFFER_INFO: bufferInfoVec.push_back(writeInfo.bufferInfo); break;
1821 case WriteType::BUFFER_VIEW: bufferViewVec.push_back(writeInfo.bufferView); break;
1822 case WriteType::ACCELERATION_STRUCTURE_INFO: asWriteVec.push_back(writeInfo.asInfo); break;
1823 default: DE_ASSERT(false); break;
1824 }
1825
1826 // Add a new VkWriteDescriptorSet struct or extend the last one with more info. This helps us exercise different implementation code paths.
1827 bool extended = false;
1828
1829 if (!descriptorWrites.empty() && descriptorIdx > 0)
1830 {
1831 auto& last = descriptorWrites.back();
1832 if (last.dstSet == set /* this should always be true */ &&
1833 last.dstBinding == bindingIdx && (last.dstArrayElement + last.descriptorCount) == descriptorIdx &&
1834 last.descriptorType == descriptorType &&
1835 writeInfo.writeType != WriteType::ACCELERATION_STRUCTURE_INFO)
1836 {
1837 // The new write should be in the same vector (imageInfoVec, bufferInfoVec or bufferViewVec) so increasing the count works.
1838 ++last.descriptorCount;
1839 extended = true;
1840 }
1841 }
1842
1843 if (!extended)
1844 {
1845 const VkWriteDescriptorSet write = {
1846 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
1847 ((writeInfo.writeType == WriteType::ACCELERATION_STRUCTURE_INFO) ? &asWriteVec.back() : nullptr),
1848 set,
1849 static_cast<deUint32>(bindingIdx),
1850 static_cast<deUint32>(descriptorIdx),
1851 1u,
1852 descriptorType,
1853 (writeInfo.writeType == WriteType::IMAGE_INFO ? &imageInfoVec.back() : nullptr),
1854 (writeInfo.writeType == WriteType::BUFFER_INFO ? &bufferInfoVec.back() : nullptr),
1855 (writeInfo.writeType == WriteType::BUFFER_VIEW ? &bufferViewVec.back() : nullptr),
1856 };
1857 descriptorWrites.push_back(write);
1858 }
1859
1860 ++resourceIdx;
1861 }
1862 }
1863
1864 // Finally, update descriptor set with all the writes.
1865 vkd.updateDescriptorSets(device, static_cast<deUint32>(descriptorWrites.size()), de::dataOrNull(descriptorWrites), 0u, nullptr);
1866 }
1867
1868 // Copies between descriptor sets. They must be compatible and related to this set.
copyDescriptorSet(const DeviceInterface & vkd,VkDevice device,VkDescriptorSet srcSet,VkDescriptorSet dstSet) const1869 void copyDescriptorSet (const DeviceInterface& vkd, VkDevice device, VkDescriptorSet srcSet, VkDescriptorSet dstSet) const
1870 {
1871 std::vector<VkCopyDescriptorSet> copies;
1872
1873 for (size_t bindingIdx = 0; bindingIdx < numBindings(); ++bindingIdx)
1874 {
1875 const auto& binding = getBinding(bindingIdx);
1876 const auto bindingNumber = static_cast<deUint32>(bindingIdx);
1877 const auto descriptorCount = static_cast<deUint32>(binding->size());
1878
1879 const VkCopyDescriptorSet copy =
1880 {
1881 VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
1882 nullptr,
1883 // set, binding, array element.
1884 srcSet, bindingNumber, 0u,
1885 dstSet, bindingNumber, 0u,
1886 descriptorCount,
1887 };
1888
1889 copies.push_back(copy);
1890 }
1891
1892 vkd.updateDescriptorSets(device, 0u, nullptr, static_cast<deUint32>(copies.size()), de::dataOrNull(copies));
1893 }
1894
1895 // Does any binding in the set need aliasing in a given iteration?
needsAliasing(deUint32 iteration) const1896 bool needsAliasing (deUint32 iteration) const
1897 {
1898 std::vector<bool> aliasingNeededFlags;
1899 aliasingNeededFlags.reserve(bindings.size());
1900
1901 std::transform(begin(bindings), end(bindings), std::back_inserter(aliasingNeededFlags),
1902 [iteration] (const BindingInterfacePtr& b) { return b->needsAliasing(iteration); });
1903 return std::any_of(begin(aliasingNeededFlags), end(aliasingNeededFlags), [] (bool f) { return f; });
1904 }
1905
1906 // Does any binding in the set need aliasing in any iteration?
needsAnyAliasing() const1907 bool needsAnyAliasing () const
1908 {
1909 const auto numIterations = maxTypes();
1910 std::vector<bool> aliasingNeededFlags (numIterations, false);
1911
1912 for (deUint32 iteration = 0; iteration < numIterations; ++iteration)
1913 aliasingNeededFlags[iteration] = needsAliasing(iteration);
1914
1915 return std::any_of(begin(aliasingNeededFlags), end(aliasingNeededFlags), [] (bool f) { return f; });
1916 }
1917
1918 // Is the last binding an unbounded array?
lastBindingIsUnbounded() const1919 bool lastBindingIsUnbounded () const
1920 {
1921 if (bindings.empty())
1922 return false;
1923 return bindings.back()->isUnbounded();
1924 }
1925
1926 // Get the variable descriptor count for the last binding if any.
getVariableDescriptorCount() const1927 tcu::Maybe<deUint32> getVariableDescriptorCount () const
1928 {
1929 if (lastBindingIsUnbounded())
1930 return tcu::just(static_cast<deUint32>(bindings.back()->size()));
1931 return tcu::Nothing;
1932 }
1933
1934 // Check if the set contains a descriptor type of the given type at the given iteration.
containsTypeAtIteration(VkDescriptorType descriptorType,deUint32 iteration) const1935 bool containsTypeAtIteration (VkDescriptorType descriptorType, deUint32 iteration) const
1936 {
1937 return std::any_of(begin(bindings), end(bindings),
1938 [descriptorType, iteration] (const BindingInterfacePtr& b) {
1939 const auto types = b->typesAtIteration(iteration);
1940 return de::contains(begin(types), end(types), descriptorType);
1941 });
1942 }
1943
1944 // Is any binding an array?
hasArrays() const1945 bool hasArrays () const
1946 {
1947 return std::any_of(begin(bindings), end(bindings), [] (const BindingInterfacePtr& b) { return b->isArray(); });
1948 }
1949 };
1950
1951 enum class UpdateType
1952 {
1953 WRITE = 0,
1954 COPY,
1955 };
1956
1957 enum class SourceSetType
1958 {
1959 NORMAL = 0,
1960 HOST_ONLY,
1961 NO_SOURCE,
1962 };
1963
1964 enum class UpdateMoment
1965 {
1966 NORMAL = 0,
1967 UPDATE_AFTER_BIND,
1968 };
1969
1970 enum class TestingStage
1971 {
1972 COMPUTE = 0,
1973 VERTEX,
1974 TESS_EVAL,
1975 TESS_CONTROL,
1976 GEOMETRY,
1977 FRAGMENT,
1978 RAY_GEN,
1979 INTERSECTION,
1980 ANY_HIT,
1981 CLOSEST_HIT,
1982 MISS,
1983 CALLABLE,
1984 };
1985
1986 enum class ArrayAccessType
1987 {
1988 CONSTANT = 0,
1989 PUSH_CONSTANT,
1990 NO_ARRAY,
1991 };
1992
1993 // Are we testing a ray tracing pipeline stage?
isRayTracingStage(TestingStage stage)1994 bool isRayTracingStage (TestingStage stage)
1995 {
1996 switch (stage)
1997 {
1998 case TestingStage::RAY_GEN:
1999 case TestingStage::INTERSECTION:
2000 case TestingStage::ANY_HIT:
2001 case TestingStage::CLOSEST_HIT:
2002 case TestingStage::MISS:
2003 case TestingStage::CALLABLE:
2004 return true;
2005 default:
2006 break;
2007 }
2008
2009 return false;
2010 }
2011
2012 struct TestParams
2013 {
2014 DescriptorSetPtr descriptorSet;
2015 UpdateType updateType;
2016 SourceSetStrategy sourceSetStrategy;
2017 SourceSetType sourceSetType;
2018 PoolMutableStrategy poolMutableStrategy;
2019 UpdateMoment updateMoment;
2020 ArrayAccessType arrayAccessType;
2021 TestingStage testingStage;
2022
getStageFlagsvkt::BindingModel::__anon912c01eb0111::TestParams2023 VkShaderStageFlags getStageFlags () const
2024 {
2025 VkShaderStageFlags flags = 0u;
2026
2027 switch (testingStage)
2028 {
2029 case TestingStage::COMPUTE: flags |= VK_SHADER_STAGE_COMPUTE_BIT; break;
2030 case TestingStage::VERTEX: flags |= VK_SHADER_STAGE_VERTEX_BIT; break;
2031 case TestingStage::TESS_EVAL: flags |= VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT; break;
2032 case TestingStage::TESS_CONTROL: flags |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT; break;
2033 case TestingStage::GEOMETRY: flags |= VK_SHADER_STAGE_GEOMETRY_BIT; break;
2034 case TestingStage::FRAGMENT: flags |= VK_SHADER_STAGE_FRAGMENT_BIT; break;
2035 case TestingStage::RAY_GEN: flags |= VK_SHADER_STAGE_RAYGEN_BIT_KHR; break;
2036 case TestingStage::INTERSECTION: flags |= VK_SHADER_STAGE_INTERSECTION_BIT_KHR; break;
2037 case TestingStage::ANY_HIT: flags |= VK_SHADER_STAGE_ANY_HIT_BIT_KHR; break;
2038 case TestingStage::CLOSEST_HIT: flags |= VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR; break;
2039 case TestingStage::MISS: flags |= VK_SHADER_STAGE_MISS_BIT_KHR; break;
2040 case TestingStage::CALLABLE: flags |= VK_SHADER_STAGE_CALLABLE_BIT_KHR; break;
2041 default:
2042 DE_ASSERT(false);
2043 break;
2044 }
2045
2046 return flags;
2047 }
2048
getPipelineWriteStagevkt::BindingModel::__anon912c01eb0111::TestParams2049 VkPipelineStageFlags getPipelineWriteStage () const
2050 {
2051 VkPipelineStageFlags flags = 0u;
2052
2053 switch (testingStage)
2054 {
2055 case TestingStage::COMPUTE: flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break;
2056 case TestingStage::VERTEX: flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; break;
2057 case TestingStage::TESS_EVAL: flags |= VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT; break;
2058 case TestingStage::TESS_CONTROL: flags |= VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT; break;
2059 case TestingStage::GEOMETRY: flags |= VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT; break;
2060 case TestingStage::FRAGMENT: flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; break;
2061 case TestingStage::RAY_GEN: // fallthrough
2062 case TestingStage::INTERSECTION: // fallthrough
2063 case TestingStage::ANY_HIT: // fallthrough
2064 case TestingStage::CLOSEST_HIT: // fallthrough
2065 case TestingStage::MISS: // fallthrough
2066 case TestingStage::CALLABLE: flags |= VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR; break;
2067 default:
2068 DE_ASSERT(false);
2069 break;
2070 }
2071
2072 return flags;
2073 }
2074
2075 private:
getLayoutCreateFlagsvkt::BindingModel::__anon912c01eb0111::TestParams2076 VkDescriptorSetLayoutCreateFlags getLayoutCreateFlags (bool isSourceSet) const
2077 {
2078 // UPDATE_AFTER_BIND cannot be used with HOST_ONLY sets.
2079 //DE_ASSERT(!(updateMoment == UpdateMoment::UPDATE_AFTER_BIND && sourceSetType == SourceSetType::HOST_ONLY));
2080
2081 VkDescriptorSetLayoutCreateFlags createFlags = 0u;
2082
2083 if ((!isSourceSet || sourceSetType != SourceSetType::HOST_ONLY) && updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2084 createFlags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT;
2085
2086 if (isSourceSet && sourceSetType == SourceSetType::HOST_ONLY)
2087 createFlags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT;
2088
2089 return createFlags;
2090 }
2091
2092 public:
getSrcLayoutCreateFlagsvkt::BindingModel::__anon912c01eb0111::TestParams2093 VkDescriptorSetLayoutCreateFlags getSrcLayoutCreateFlags () const
2094 {
2095 return getLayoutCreateFlags(true);
2096 }
2097
getDstLayoutCreateFlagsvkt::BindingModel::__anon912c01eb0111::TestParams2098 VkDescriptorSetLayoutCreateFlags getDstLayoutCreateFlags () const
2099 {
2100 return getLayoutCreateFlags(false);
2101 }
2102
2103 private:
getPoolCreateFlagsvkt::BindingModel::__anon912c01eb0111::TestParams2104 VkDescriptorPoolCreateFlags getPoolCreateFlags (bool isSourceSet) const
2105 {
2106 // UPDATE_AFTER_BIND cannot be used with HOST_ONLY sets.
2107 //DE_ASSERT(!(updateMoment == UpdateMoment::UPDATE_AFTER_BIND && sourceSetType == SourceSetType::HOST_ONLY));
2108
2109 VkDescriptorPoolCreateFlags poolCreateFlags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
2110
2111 if ((!isSourceSet || sourceSetType != SourceSetType::HOST_ONLY) && updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2112 poolCreateFlags |= VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT;
2113
2114 if (isSourceSet && sourceSetType == SourceSetType::HOST_ONLY)
2115 poolCreateFlags |= VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT;
2116
2117 return poolCreateFlags;
2118 }
2119
2120 public:
getSrcPoolCreateFlagsvkt::BindingModel::__anon912c01eb0111::TestParams2121 VkDescriptorPoolCreateFlags getSrcPoolCreateFlags () const
2122 {
2123 return getPoolCreateFlags(true);
2124 }
2125
getDstPoolCreateFlagsvkt::BindingModel::__anon912c01eb0111::TestParams2126 VkDescriptorPoolCreateFlags getDstPoolCreateFlags () const
2127 {
2128 return getPoolCreateFlags(false);
2129 }
2130
getBindPointvkt::BindingModel::__anon912c01eb0111::TestParams2131 VkPipelineBindPoint getBindPoint () const
2132 {
2133 if (testingStage == TestingStage::COMPUTE)
2134 return VK_PIPELINE_BIND_POINT_COMPUTE;
2135 if (isRayTracingStage(testingStage))
2136 return VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR;
2137 return VK_PIPELINE_BIND_POINT_GRAPHICS;
2138 }
2139 };
2140
2141 class MutableTypesTest : public TestCase
2142 {
2143 public:
MutableTypesTest(tcu::TestContext & testCtx,const std::string & name,const std::string & description,const TestParams & params)2144 MutableTypesTest (tcu::TestContext& testCtx, const std::string& name, const std::string& description, const TestParams& params)
2145 : TestCase(testCtx, name, description)
2146 , m_params(params)
2147 {}
2148
2149 ~MutableTypesTest () override = default;
2150
2151 void initPrograms (vk::SourceCollections& programCollection) const override;
2152 TestInstance* createInstance (Context& context) const override;
2153 void checkSupport (Context& context) const override;
2154
2155 private:
2156 TestParams m_params;
2157 };
2158
2159 class MutableTypesInstance : public TestInstance
2160 {
2161 public:
MutableTypesInstance(Context & context,const TestParams & params)2162 MutableTypesInstance (Context& context, const TestParams& params)
2163 : TestInstance (context)
2164 , m_params (params)
2165 {}
2166
2167 ~MutableTypesInstance () override = default;
2168
2169 tcu::TestStatus iterate () override;
2170
2171 private:
2172 TestParams m_params;
2173 };
2174
2175 // Check if a descriptor set contains a given descriptor type in any iteration up to maxTypes().
containsAnyDescriptorType(const DescriptorSet & descriptorSet,VkDescriptorType descriptorType)2176 bool containsAnyDescriptorType (const DescriptorSet& descriptorSet, VkDescriptorType descriptorType)
2177 {
2178 const auto numIterations = descriptorSet.maxTypes();
2179
2180 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2181 {
2182 if (descriptorSet.containsTypeAtIteration(descriptorType, iter))
2183 return true;
2184 }
2185
2186 return false;
2187 }
2188
2189 // Check if testing this descriptor set needs an external image (for sampler descriptors).
needsExternalImage(const DescriptorSet & descriptorSet)2190 bool needsExternalImage (const DescriptorSet& descriptorSet)
2191 {
2192 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_SAMPLER);
2193 }
2194
2195 // Check if testing this descriptor set needs an external sampler (for sampled images).
needsExternalSampler(const DescriptorSet & descriptorSet)2196 bool needsExternalSampler (const DescriptorSet& descriptorSet)
2197 {
2198 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
2199 }
2200
2201 // Check if this descriptor set contains a input attachments.
usesInputAttachments(const DescriptorSet & descriptorSet)2202 bool usesInputAttachments (const DescriptorSet& descriptorSet)
2203 {
2204 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2205 }
2206
2207 // Check if this descriptor set contains acceleration structures.
usesAccelerationStructures(const DescriptorSet & descriptorSet)2208 bool usesAccelerationStructures (const DescriptorSet& descriptorSet)
2209 {
2210 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
2211 }
2212
shaderName(deUint32 iteration)2213 std::string shaderName (deUint32 iteration)
2214 {
2215 return ("iteration-" + de::toString(iteration));
2216 }
2217
initPrograms(vk::SourceCollections & programCollection) const2218 void MutableTypesTest::initPrograms (vk::SourceCollections& programCollection) const
2219 {
2220 const bool usePushConstants = (m_params.arrayAccessType == ArrayAccessType::PUSH_CONSTANT);
2221 const bool useExternalImage = needsExternalImage(*m_params.descriptorSet);
2222 const bool useExternalSampler = needsExternalSampler(*m_params.descriptorSet);
2223 const bool rayQueries = usesAccelerationStructures(*m_params.descriptorSet);
2224 const bool rayTracing = isRayTracingStage(m_params.testingStage);
2225 const auto numIterations = m_params.descriptorSet->maxTypes();
2226 const auto numBindings = m_params.descriptorSet->numBindings();
2227 const vk::ShaderBuildOptions rtBuildOptions (programCollection.usedVulkanVersion, vk::SPIRV_VERSION_1_4, 0u, true);
2228
2229 // Extra set and bindings for external resources.
2230 std::ostringstream extraSet;
2231 deUint32 extraBindings = 0u;
2232
2233 extraSet << "layout (set=1, binding=" << extraBindings++ << ") buffer OutputBufferBlock { uint value[" << numIterations << "]; } outputBuffer;\n";
2234 if (useExternalImage)
2235 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform utexture2D externalSampledImage;\n";
2236 if (useExternalSampler)
2237 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform sampler externalSampler;\n";
2238 // The extra binding below will be declared in the "passthrough" ray generation shader.
2239 #if 0
2240 if (rayTracing)
2241 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform accelerationStructureEXT externalAS;\n";
2242 #endif
2243
2244 // Common vertex preamble.
2245 std::ostringstream vertexPreamble;
2246 vertexPreamble
2247 << "vec2 vertexPositions[3] = vec2[](\n"
2248 << " vec2(0.0, -0.5),\n"
2249 << " vec2(0.5, 0.5),\n"
2250 << " vec2(-0.5, 0.5)\n"
2251 << ");\n"
2252 ;
2253
2254 // Vertex shader body common statements.
2255 std::ostringstream vertexBodyCommon;
2256 vertexBodyCommon << " gl_Position = vec4(vertexPositions[gl_VertexIndex], 0.0, 1.0);\n";
2257
2258 // Common tessellation control preamble.
2259 std::ostringstream tescPreamble;
2260 tescPreamble
2261 << "layout (vertices=3) out;\n"
2262 << "in gl_PerVertex\n"
2263 << "{\n"
2264 << " vec4 gl_Position;\n"
2265 << "} gl_in[gl_MaxPatchVertices];\n"
2266 << "out gl_PerVertex\n"
2267 << "{\n"
2268 << " vec4 gl_Position;\n"
2269 << "} gl_out[];\n"
2270 ;
2271
2272 // Common tessellation control body.
2273 std::ostringstream tescBodyCommon;
2274 tescBodyCommon
2275 << " gl_TessLevelInner[0] = 1.0;\n"
2276 << " gl_TessLevelInner[1] = 1.0;\n"
2277 << " gl_TessLevelOuter[0] = 1.0;\n"
2278 << " gl_TessLevelOuter[1] = 1.0;\n"
2279 << " gl_TessLevelOuter[2] = 1.0;\n"
2280 << " gl_TessLevelOuter[3] = 1.0;\n"
2281 << " gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
2282 ;
2283
2284 // Common tessellation evaluation preamble.
2285 std::ostringstream tesePreamble;
2286 tesePreamble
2287 << "layout (triangles, fractional_odd_spacing, cw) in;\n"
2288 << "in gl_PerVertex\n"
2289 << "{\n"
2290 << " vec4 gl_Position;\n"
2291 << "} gl_in[gl_MaxPatchVertices];\n"
2292 << "out gl_PerVertex\n"
2293 << "{\n"
2294 << " vec4 gl_Position;\n"
2295 << "};\n"
2296 ;
2297
2298 // Common tessellation evaluation body.
2299 std::ostringstream teseBodyCommon;
2300 teseBodyCommon
2301 << " gl_Position = (gl_TessCoord.x * gl_in[0].gl_Position) +\n"
2302 << " (gl_TessCoord.y * gl_in[1].gl_Position) +\n"
2303 << " (gl_TessCoord.z * gl_in[2].gl_Position);\n"
2304 ;
2305
2306 // Shader preamble.
2307 std::ostringstream preamble;
2308
2309 preamble
2310 << "#version 460\n"
2311 << "#extension GL_EXT_nonuniform_qualifier : enable\n"
2312 << "#extension GL_EXT_debug_printf : enable\n"
2313 << (rayTracing ? "#extension GL_EXT_ray_tracing : enable\n" : "")
2314 << (rayQueries ? "#extension GL_EXT_ray_query : enable\n" : "")
2315 << "\n"
2316 ;
2317
2318 if (m_params.testingStage == TestingStage::VERTEX)
2319 {
2320 preamble << vertexPreamble.str();
2321 }
2322 else if (m_params.testingStage == TestingStage::COMPUTE)
2323 {
2324 preamble
2325 << "layout (local_size_x=1, local_size_y=1, local_size_z=1) in;\n"
2326 << "\n"
2327 ;
2328 }
2329 else if (m_params.testingStage == TestingStage::GEOMETRY)
2330 {
2331 preamble
2332 << "layout (triangles) in;\n"
2333 << "layout (triangle_strip, max_vertices=3) out;\n"
2334 << "in gl_PerVertex\n"
2335 << "{\n"
2336 << " vec4 gl_Position;\n"
2337 << "} gl_in[3];\n"
2338 << "out gl_PerVertex\n"
2339 << "{\n"
2340 << " vec4 gl_Position;\n"
2341 << "};\n"
2342 ;
2343 }
2344 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2345 {
2346 preamble << tescPreamble.str();
2347 }
2348 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2349 {
2350 preamble << tesePreamble.str();
2351 }
2352 else if (m_params.testingStage == TestingStage::CALLABLE)
2353 {
2354 preamble << "layout (location=0) callableDataInEXT float unusedCallableData;\n";
2355 }
2356 else if (m_params.testingStage == TestingStage::CLOSEST_HIT ||
2357 m_params.testingStage == TestingStage::ANY_HIT ||
2358 m_params.testingStage == TestingStage::MISS)
2359 {
2360 preamble << "layout (location=0) rayPayloadInEXT float unusedRayPayload;\n";
2361 }
2362 else if (m_params.testingStage == TestingStage::INTERSECTION)
2363 {
2364 preamble << "hitAttributeEXT vec3 hitAttribute;\n";
2365 }
2366
2367 preamble << extraSet.str();
2368 if (usePushConstants)
2369 preamble << "layout (push_constant, std430) uniform PushConstantBlock { uint zero; } pc;\n";
2370 preamble << "\n";
2371
2372 // We need to create a shader per iteration.
2373 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2374 {
2375 // Shader preamble.
2376 std::ostringstream shader;
2377 shader << preamble.str();
2378
2379 deUint32 inputAttachmentCount = 0u;
2380
2381 // Descriptor declarations for this iteration.
2382 for (size_t bindingIdx = 0; bindingIdx < numBindings; ++bindingIdx)
2383 {
2384 DE_ASSERT(bindingIdx <= std::numeric_limits<deUint32>::max());
2385
2386 const auto binding = m_params.descriptorSet->getBinding(bindingIdx);
2387 const auto bindingTypes = binding->typesAtIteration(iter);
2388 const auto hasInputAttachment = de::contains(begin(bindingTypes), end(bindingTypes), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2389 const auto isArray = binding->isArray();
2390 const auto isUnbounded = binding->isUnbounded();
2391 const auto bindingSize = binding->size();
2392
2393 // If the binding is an input attachment, make sure it's not an array.
2394 DE_ASSERT(!hasInputAttachment || !isArray);
2395
2396 // Make sure the descriptor count fits a deInt32 if needed.
2397 DE_ASSERT(!isArray || isUnbounded || bindingSize <= static_cast<size_t>(std::numeric_limits<deInt32>::max()));
2398
2399 const auto arraySize = (isArray ? (isUnbounded ? tcu::just(deInt32{-1}) : tcu::just(static_cast<deInt32>(bindingSize)))
2400 : tcu::Nothing);
2401
2402 shader << binding->glslDeclarations(iter, 0u, static_cast<deUint32>(bindingIdx), inputAttachmentCount, arraySize);
2403
2404 if (hasInputAttachment)
2405 ++inputAttachmentCount;
2406 }
2407
2408 // Main body.
2409 shader
2410 << "\n"
2411 << "void main() {\n"
2412 // This checks if we are the first invocation to arrive here, so the checks are executed only once.
2413 << " const uint flag = atomicCompSwap(outputBuffer.value[" << iter << "], 0u, 1u);\n"
2414 << " if (flag == 0u) {\n"
2415 << " uint anyError = 0u;\n"
2416 ;
2417
2418 for (size_t bindingIdx = 0; bindingIdx < numBindings; ++bindingIdx)
2419 {
2420 const auto binding = m_params.descriptorSet->getBinding(bindingIdx);
2421 const auto idx32 = static_cast<deUint32>(bindingIdx);
2422 shader << binding->glslCheckStatements(iter, 0u, idx32, getDescriptorNumericValue(iter, idx32), tcu::Nothing, usePushConstants);
2423 }
2424
2425 shader
2426 << " if (anyError == 0u) {\n"
2427 << " atomicAdd(outputBuffer.value[" << iter << "], 1u);\n"
2428 << " }\n"
2429 << " }\n" // Closes if (flag == 0u).
2430 ;
2431
2432 if (m_params.testingStage == TestingStage::VERTEX)
2433 {
2434 shader << vertexBodyCommon.str();
2435 }
2436 else if (m_params.testingStage == TestingStage::GEOMETRY)
2437 {
2438 shader
2439 << " gl_Position = gl_in[0].gl_Position; EmitVertex();\n"
2440 << " gl_Position = gl_in[1].gl_Position; EmitVertex();\n"
2441 << " gl_Position = gl_in[2].gl_Position; EmitVertex();\n"
2442 ;
2443 }
2444 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2445 {
2446 shader << tescBodyCommon.str();
2447 }
2448 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2449 {
2450 shader << teseBodyCommon.str();
2451 }
2452
2453 shader
2454 << "}\n" // End of main().
2455 ;
2456
2457 {
2458 const auto shaderNameStr = shaderName(iter);
2459 const auto shaderStr = shader.str();
2460 auto& glslSource = programCollection.glslSources.add(shaderNameStr);
2461
2462 if (m_params.testingStage == TestingStage::COMPUTE)
2463 glslSource << glu::ComputeSource(shaderStr);
2464 else if (m_params.testingStage == TestingStage::VERTEX)
2465 glslSource << glu::VertexSource(shaderStr);
2466 else if (m_params.testingStage == TestingStage::FRAGMENT)
2467 glslSource << glu::FragmentSource(shaderStr);
2468 else if (m_params.testingStage == TestingStage::GEOMETRY)
2469 glslSource << glu::GeometrySource(shaderStr);
2470 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2471 glslSource << glu::TessellationControlSource(shaderStr);
2472 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2473 glslSource << glu::TessellationEvaluationSource(shaderStr);
2474 else if (m_params.testingStage == TestingStage::RAY_GEN)
2475 glslSource << glu::RaygenSource(updateRayTracingGLSL(shaderStr));
2476 else if (m_params.testingStage == TestingStage::INTERSECTION)
2477 glslSource << glu::IntersectionSource(updateRayTracingGLSL(shaderStr));
2478 else if (m_params.testingStage == TestingStage::ANY_HIT)
2479 glslSource << glu::AnyHitSource(updateRayTracingGLSL(shaderStr));
2480 else if (m_params.testingStage == TestingStage::CLOSEST_HIT)
2481 glslSource << glu::ClosestHitSource(updateRayTracingGLSL(shaderStr));
2482 else if (m_params.testingStage == TestingStage::MISS)
2483 glslSource << glu::MissSource(updateRayTracingGLSL(shaderStr));
2484 else if (m_params.testingStage == TestingStage::CALLABLE)
2485 glslSource << glu::CallableSource(updateRayTracingGLSL(shaderStr));
2486 else
2487 DE_ASSERT(false);
2488
2489 if (rayTracing || rayQueries)
2490 glslSource << rtBuildOptions;
2491 }
2492 }
2493
2494 if (m_params.testingStage == TestingStage::FRAGMENT
2495 || m_params.testingStage == TestingStage::GEOMETRY
2496 || m_params.testingStage == TestingStage::TESS_CONTROL
2497 || m_params.testingStage == TestingStage::TESS_EVAL)
2498 {
2499 // Add passthrough vertex shader that works for points.
2500 std::ostringstream vertPassthrough;
2501 vertPassthrough
2502 << "#version 460\n"
2503 << "out gl_PerVertex\n"
2504 << "{\n"
2505 << " vec4 gl_Position;\n"
2506 << "};\n"
2507 << vertexPreamble.str()
2508 << "void main() {\n"
2509 << vertexBodyCommon.str()
2510 << "}\n"
2511 ;
2512 programCollection.glslSources.add("vert") << glu::VertexSource(vertPassthrough.str());
2513 }
2514
2515 if (m_params.testingStage == TestingStage::TESS_CONTROL)
2516 {
2517 // Add passthrough tessellation evaluation shader.
2518 std::ostringstream tesePassthrough;
2519 tesePassthrough
2520 << "#version 460\n"
2521 << tesePreamble.str()
2522 << "void main (void)\n"
2523 << "{\n"
2524 << teseBodyCommon.str()
2525 << "}\n"
2526 ;
2527
2528 programCollection.glslSources.add("tese") << glu::TessellationEvaluationSource(tesePassthrough.str());
2529 }
2530
2531 if (m_params.testingStage == TestingStage::TESS_EVAL)
2532 {
2533 // Add passthrough tessellation control shader.
2534 std::ostringstream tescPassthrough;
2535 tescPassthrough
2536 << "#version 460\n"
2537 << tescPreamble.str()
2538 << "void main (void)\n"
2539 << "{\n"
2540 << tescBodyCommon.str()
2541 << "}\n"
2542 ;
2543
2544 programCollection.glslSources.add("tesc") << glu::TessellationControlSource(tescPassthrough.str());
2545 }
2546
2547 if (rayTracing && m_params.testingStage != TestingStage::RAY_GEN)
2548 {
2549 // Add a "passthrough" ray generation shader.
2550 std::ostringstream rgen;
2551 rgen
2552 << "#version 460 core\n"
2553 << "#extension GL_EXT_ray_tracing : require\n"
2554 << "layout (set=1, binding=" << extraBindings << ") uniform accelerationStructureEXT externalAS;\n"
2555 << ((m_params.testingStage == TestingStage::CALLABLE)
2556 ? "layout (location=0) callableDataEXT float unusedCallableData;\n"
2557 : "layout (location=0) rayPayloadEXT float unusedRayPayload;\n")
2558 << "\n"
2559 << "void main()\n"
2560 << "{\n"
2561 ;
2562
2563 if (m_params.testingStage == TestingStage::INTERSECTION
2564 || m_params.testingStage == TestingStage::ANY_HIT
2565 || m_params.testingStage == TestingStage::CLOSEST_HIT
2566 || m_params.testingStage == TestingStage::MISS)
2567 {
2568 // We need to trace rays in this case to get hits or misses.
2569 const auto zDir = ((m_params.testingStage == TestingStage::MISS) ? "-1.0" : "1.0");
2570
2571 rgen
2572 << " const uint cullMask = 0xFF;\n"
2573 << " const float tMin = 1.0;\n"
2574 << " const float tMax = 10.0;\n"
2575 << " const vec3 origin = vec3(0.0, 0.0, 0.0);\n"
2576 << " const vec3 direction = vec3(0.0, 0.0, " << zDir << ");\n"
2577 << " traceRayEXT(externalAS, gl_RayFlagsNoneEXT, cullMask, 0, 0, 0, origin, tMin, direction, tMax, 0);\n"
2578 ;
2579
2580 }
2581 else if (m_params.testingStage == TestingStage::CALLABLE)
2582 {
2583 rgen << " executeCallableEXT(0, 0);\n";
2584 }
2585
2586 // End of main().
2587 rgen << "}\n";
2588
2589 programCollection.glslSources.add("rgen") << glu::RaygenSource(updateRayTracingGLSL(rgen.str())) << rtBuildOptions;
2590
2591 // Intersection shaders will ignore the intersection, so we need a passthrough miss shader.
2592 if (m_params.testingStage == TestingStage::INTERSECTION)
2593 {
2594 std::ostringstream miss;
2595 miss
2596 << "#version 460 core\n"
2597 << "#extension GL_EXT_ray_tracing : require\n"
2598 << "layout (location=0) rayPayloadEXT float unusedRayPayload;\n"
2599 << "\n"
2600 << "void main()\n"
2601 << "{\n"
2602 << "}\n"
2603 ;
2604
2605 programCollection.glslSources.add("miss") << glu::MissSource(updateRayTracingGLSL(miss.str())) << rtBuildOptions;
2606 }
2607 }
2608 }
2609
createInstance(Context & context) const2610 TestInstance* MutableTypesTest::createInstance (Context& context) const
2611 {
2612 return new MutableTypesInstance(context, m_params);
2613 }
2614
requirePartiallyBound(Context & context)2615 void requirePartiallyBound (Context& context)
2616 {
2617 context.requireDeviceFunctionality("VK_EXT_descriptor_indexing");
2618 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2619 if (!indexingFeatures.descriptorBindingPartiallyBound)
2620 TCU_THROW(NotSupportedError, "Partially bound bindings not supported");
2621 }
2622
requireVariableDescriptorCount(Context & context)2623 void requireVariableDescriptorCount (Context& context)
2624 {
2625 context.requireDeviceFunctionality("VK_EXT_descriptor_indexing");
2626 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2627 if (!indexingFeatures.descriptorBindingVariableDescriptorCount)
2628 TCU_THROW(NotSupportedError, "Variable descriptor count not supported");
2629 }
2630
2631 // Calculates the set of used descriptor types for a given set and iteration count, for bindings matching a predicate.
getUsedDescriptorTypes(const DescriptorSet & descriptorSet,deUint32 numIterations,bool (* predicate)(const BindingInterface * binding))2632 std::set<VkDescriptorType> getUsedDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations, bool (*predicate)(const BindingInterface* binding))
2633 {
2634 std::set<VkDescriptorType> usedDescriptorTypes;
2635
2636 for (size_t bindingIdx = 0; bindingIdx < descriptorSet.numBindings(); ++bindingIdx)
2637 {
2638 const auto bindingPtr = descriptorSet.getBinding(bindingIdx);
2639 if (predicate(bindingPtr))
2640 {
2641 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2642 {
2643 const auto descTypes = bindingPtr->typesAtIteration(iter);
2644 usedDescriptorTypes.insert(begin(descTypes), end(descTypes));
2645 }
2646 }
2647 }
2648
2649 return usedDescriptorTypes;
2650 }
2651
getAllUsedDescriptorTypes(const DescriptorSet & descriptorSet,deUint32 numIterations)2652 std::set<VkDescriptorType> getAllUsedDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations)
2653 {
2654 return getUsedDescriptorTypes(descriptorSet, numIterations, [] (const BindingInterface*) { return true; });
2655 }
2656
getUsedArrayDescriptorTypes(const DescriptorSet & descriptorSet,deUint32 numIterations)2657 std::set<VkDescriptorType> getUsedArrayDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations)
2658 {
2659 return getUsedDescriptorTypes(descriptorSet, numIterations, [] (const BindingInterface* b) { return b->isArray(); });
2660 }
2661
2662 // Are we testing a vertex pipeline stage?
isVertexStage(TestingStage stage)2663 bool isVertexStage (TestingStage stage)
2664 {
2665 switch (stage)
2666 {
2667 case TestingStage::VERTEX:
2668 case TestingStage::TESS_CONTROL:
2669 case TestingStage::TESS_EVAL:
2670 case TestingStage::GEOMETRY:
2671 return true;
2672 default:
2673 break;
2674 }
2675
2676 return false;
2677 }
2678
checkSupport(Context & context) const2679 void MutableTypesTest::checkSupport (Context& context) const
2680 {
2681 if (!context.isDeviceFunctionalitySupported("VK_VALVE_mutable_descriptor_type") &&
2682 !context.isDeviceFunctionalitySupported("VK_EXT_mutable_descriptor_type"))
2683
2684 TCU_THROW(NotSupportedError, "VK_VALVE_mutable_descriptor_type or VK_EXT_mutable_descriptor_type is not supported");
2685
2686 VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT mutableDescriptorType = initVulkanStructure();
2687 VkPhysicalDeviceFeatures2KHR features2 = initVulkanStructure(&mutableDescriptorType);
2688
2689 context.getInstanceInterface().getPhysicalDeviceFeatures2(context.getPhysicalDevice(), &features2);
2690
2691 if (!mutableDescriptorType.mutableDescriptorType)
2692 TCU_THROW(NotSupportedError, "mutableDescriptorType feature is not supported");
2693
2694 // Check ray tracing if needed.
2695 const bool rayTracing = isRayTracingStage(m_params.testingStage);
2696
2697 if (rayTracing)
2698 {
2699 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2700 context.requireDeviceFunctionality("VK_KHR_ray_tracing_pipeline");
2701 }
2702
2703 // Check if ray queries are needed. Ray queries are used to verify acceleration structure descriptors.
2704 const bool rayQueriesNeeded = usesAccelerationStructures(*m_params.descriptorSet);
2705 if (rayQueriesNeeded)
2706 {
2707 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2708 context.requireDeviceFunctionality("VK_KHR_ray_query");
2709 }
2710
2711 // We'll use iterations to check each mutable type, as needed.
2712 const auto numIterations = m_params.descriptorSet->maxTypes();
2713
2714 if (m_params.descriptorSet->lastBindingIsUnbounded())
2715 requireVariableDescriptorCount(context);
2716
2717 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2718 {
2719 if (m_params.descriptorSet->needsAliasing(iter))
2720 {
2721 requirePartiallyBound(context);
2722 break;
2723 }
2724 }
2725
2726 if (m_params.updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2727 {
2728 // Check update after bind for each used descriptor type.
2729 const auto& usedDescriptorTypes = getAllUsedDescriptorTypes(*m_params.descriptorSet, numIterations);
2730 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2731
2732 for (const auto& descType : usedDescriptorTypes)
2733 {
2734 switch (descType)
2735 {
2736 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2737 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2738 if (!indexingFeatures.descriptorBindingUniformBufferUpdateAfterBind)
2739 TCU_THROW(NotSupportedError, "Update-after-bind not supported for uniform buffers");
2740 break;
2741
2742 case VK_DESCRIPTOR_TYPE_SAMPLER:
2743 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2744 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2745 if (!indexingFeatures.descriptorBindingSampledImageUpdateAfterBind)
2746 TCU_THROW(NotSupportedError, "Update-after-bind not supported for samplers and sampled images");
2747 break;
2748
2749 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2750 if (!indexingFeatures.descriptorBindingStorageImageUpdateAfterBind)
2751 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage images");
2752 break;
2753
2754 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2755 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2756 if (!indexingFeatures.descriptorBindingStorageBufferUpdateAfterBind)
2757 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage buffers");
2758 break;
2759
2760 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2761 if (!indexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind)
2762 TCU_THROW(NotSupportedError, "Update-after-bind not supported for uniform texel buffers");
2763 break;
2764
2765 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2766 if (!indexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind)
2767 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage texel buffers");
2768 break;
2769
2770 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2771 TCU_THROW(InternalError, "Tests do not support update-after-bind with input attachments");
2772
2773 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
2774 {
2775 // Just in case we ever mix some of these in.
2776 context.requireDeviceFunctionality("VK_EXT_inline_uniform_block");
2777 const auto& iubFeatures = context.getInlineUniformBlockFeatures();
2778 if (!iubFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind)
2779 TCU_THROW(NotSupportedError, "Update-after-bind not supported for inline uniform blocks");
2780 }
2781 break;
2782
2783 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2784 {
2785 // Just in case we ever mix some of these in.
2786 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2787 const auto& asFeatures = context.getAccelerationStructureFeatures();
2788 if (!asFeatures.descriptorBindingAccelerationStructureUpdateAfterBind)
2789 TCU_THROW(NotSupportedError, "Update-after-bind not supported for acceleration structures");
2790 }
2791 break;
2792
2793 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
2794 TCU_THROW(InternalError, "Found VK_DESCRIPTOR_TYPE_MUTABLE_EXT in list of used descriptor types");
2795
2796 default:
2797 TCU_THROW(InternalError, "Unexpected descriptor type found in list of used descriptor types: " + de::toString(descType));
2798 }
2799 }
2800 }
2801
2802 if (m_params.arrayAccessType == ArrayAccessType::PUSH_CONSTANT)
2803 {
2804 // These require dynamically uniform indices.
2805 const auto& usedDescriptorTypes = getUsedArrayDescriptorTypes(*m_params.descriptorSet, numIterations);
2806 const auto& features = context.getDeviceFeatures();
2807 const auto descriptorIndexingSupported = context.isDeviceFunctionalitySupported("VK_EXT_descriptor_indexing");
2808 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2809
2810 for (const auto& descType : usedDescriptorTypes)
2811 {
2812 switch (descType)
2813 {
2814 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2815 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2816 if (!features.shaderUniformBufferArrayDynamicIndexing)
2817 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for uniform buffers");
2818 break;
2819
2820 case VK_DESCRIPTOR_TYPE_SAMPLER:
2821 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2822 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2823 if (!features.shaderSampledImageArrayDynamicIndexing)
2824 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for samplers and sampled images");
2825 break;
2826
2827 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2828 if (!features.shaderStorageImageArrayDynamicIndexing)
2829 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage images");
2830 break;
2831
2832 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2833 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2834 if (!features.shaderStorageBufferArrayDynamicIndexing)
2835 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage buffers");
2836 break;
2837
2838 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2839 if (!descriptorIndexingSupported || !indexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing)
2840 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for uniform texel buffers");
2841 break;
2842
2843 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2844 if (!descriptorIndexingSupported || !indexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing)
2845 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage texel buffers");
2846 break;
2847
2848 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2849 if (!descriptorIndexingSupported || !indexingFeatures.shaderInputAttachmentArrayDynamicIndexing)
2850 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for input attachments");
2851 break;
2852
2853 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2854 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2855 break;
2856
2857 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
2858 TCU_THROW(InternalError, "Found VK_DESCRIPTOR_TYPE_MUTABLE_EXT in list of used array descriptor types");
2859
2860 default:
2861 TCU_THROW(InternalError, "Unexpected descriptor type found in list of used descriptor types: " + de::toString(descType));
2862 }
2863 }
2864 }
2865
2866 // Check layout support.
2867 {
2868 const auto& vkd = context.getDeviceInterface();
2869 const auto device = getDevice(context);
2870 const auto stageFlags = m_params.getStageFlags();
2871
2872 {
2873 const auto layoutCreateFlags = m_params.getDstLayoutCreateFlags();
2874 const auto supported = m_params.descriptorSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2875
2876 if (!supported)
2877 TCU_THROW(NotSupportedError, "Required descriptor set layout not supported");
2878 }
2879
2880 if (m_params.updateType == UpdateType::COPY)
2881 {
2882 const auto layoutCreateFlags = m_params.getSrcLayoutCreateFlags();
2883 const auto supported = m_params.descriptorSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2884
2885 if (!supported)
2886 TCU_THROW(NotSupportedError, "Required descriptor set layout for source set not supported");
2887
2888 // Check specific layouts for the different source sets are supported.
2889 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2890 {
2891 const auto srcSet = m_params.descriptorSet->genSourceSet(m_params.sourceSetStrategy, iter);
2892 const auto srcLayoutSupported = srcSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2893
2894 if (!srcLayoutSupported)
2895 TCU_THROW(NotSupportedError, "Descriptor set layout for source set at iteration " + de::toString(iter) + " not supported");
2896 }
2897 }
2898 }
2899
2900 // Check supported stores and stages.
2901 const bool vertexStage = isVertexStage(m_params.testingStage);
2902 const bool fragmentStage = (m_params.testingStage == TestingStage::FRAGMENT);
2903 const bool geometryStage = (m_params.testingStage == TestingStage::GEOMETRY);
2904 const bool tessellation = (m_params.testingStage == TestingStage::TESS_CONTROL || m_params.testingStage == TestingStage::TESS_EVAL);
2905
2906 const auto& features = context.getDeviceFeatures();
2907
2908 if (vertexStage && !features.vertexPipelineStoresAndAtomics)
2909 TCU_THROW(NotSupportedError, "Vertex pipeline stores and atomics not supported");
2910
2911 if (fragmentStage && !features.fragmentStoresAndAtomics)
2912 TCU_THROW(NotSupportedError, "Fragment shader stores and atomics not supported");
2913
2914 if (geometryStage && !features.geometryShader)
2915 TCU_THROW(NotSupportedError, "Geometry shader not supported");
2916
2917 if (tessellation && !features.tessellationShader)
2918 TCU_THROW(NotSupportedError, "Tessellation shaders not supported");
2919 }
2920
2921 // What to do at each iteration step. Used to apply UPDATE_AFTER_BIND or not.
2922 enum class Step
2923 {
2924 UPDATE = 0,
2925 BIND,
2926 };
2927
2928 // Create render pass.
buildRenderPass(const DeviceInterface & vkd,VkDevice device,const std::vector<Resource> & resources)2929 Move<VkRenderPass> buildRenderPass (const DeviceInterface& vkd, VkDevice device, const std::vector<Resource>& resources)
2930 {
2931 const auto imageFormat = getDescriptorImageFormat();
2932
2933 std::vector<VkAttachmentDescription> attachmentDescriptions;
2934 std::vector<VkAttachmentReference> attachmentReferences;
2935 std::vector<deUint32> attachmentIndices;
2936
2937 for (const auto& resource : resources)
2938 {
2939 if (resource.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
2940 {
2941 const auto nextIndex = static_cast<deUint32>(attachmentDescriptions.size());
2942
2943 const VkAttachmentDescription description = {
2944 0u, // VkAttachmentDescriptionFlags flags;
2945 imageFormat, // VkFormat format;
2946 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
2947 VK_ATTACHMENT_LOAD_OP_LOAD, // VkAttachmentLoadOp loadOp;
2948 VK_ATTACHMENT_STORE_OP_DONT_CARE, // VkAttachmentStoreOp storeOp;
2949 VK_ATTACHMENT_LOAD_OP_DONT_CARE, // VkAttachmentLoadOp stencilLoadOp;
2950 VK_ATTACHMENT_STORE_OP_DONT_CARE, // VkAttachmentStoreOp stencilStoreOp;
2951 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout initialLayout;
2952 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout finalLayout;
2953 };
2954
2955 const VkAttachmentReference reference = { nextIndex, VK_IMAGE_LAYOUT_GENERAL };
2956
2957 attachmentIndices.push_back(nextIndex);
2958 attachmentDescriptions.push_back(description);
2959 attachmentReferences.push_back(reference);
2960 }
2961 }
2962
2963 const auto attachmentCount = static_cast<deUint32>(attachmentDescriptions.size());
2964 DE_ASSERT(attachmentCount == static_cast<deUint32>(attachmentIndices.size()));
2965 DE_ASSERT(attachmentCount == static_cast<deUint32>(attachmentReferences.size()));
2966
2967 const VkSubpassDescription subpassDescription =
2968 {
2969 0u, // VkSubpassDescriptionFlags flags;
2970 VK_PIPELINE_BIND_POINT_GRAPHICS, // VkPipelineBindPoint pipelineBindPoint;
2971 attachmentCount, // deUint32 inputAttachmentCount;
2972 de::dataOrNull(attachmentReferences), // const VkAttachmentReference* pInputAttachments;
2973 0u, // deUint32 colorAttachmentCount;
2974 nullptr, // const VkAttachmentReference* pColorAttachments;
2975 0u, // const VkAttachmentReference* pResolveAttachments;
2976 nullptr, // const VkAttachmentReference* pDepthStencilAttachment;
2977 0u, // deUint32 preserveAttachmentCount;
2978 nullptr, // const deUint32* pPreserveAttachments;
2979 };
2980
2981 const VkRenderPassCreateInfo renderPassCreateInfo =
2982 {
2983 VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, // VkStructureType sType;
2984 nullptr, // const void* pNext;
2985 0u, // VkRenderPassCreateFlags flags;
2986 static_cast<deUint32>(attachmentDescriptions.size()), // deUint32 attachmentCount;
2987 de::dataOrNull(attachmentDescriptions), // const VkAttachmentDescription* pAttachments;
2988 1u, // deUint32 subpassCount;
2989 &subpassDescription, // const VkSubpassDescription* pSubpasses;
2990 0u, // deUint32 dependencyCount;
2991 nullptr, // const VkSubpassDependency* pDependencies;
2992 };
2993
2994 return createRenderPass(vkd, device, &renderPassCreateInfo);
2995 }
2996
2997 // Create a graphics pipeline.
buildGraphicsPipeline(const DeviceInterface & vkd,VkDevice device,VkPipelineLayout pipelineLayout,VkShaderModule vertModule,VkShaderModule tescModule,VkShaderModule teseModule,VkShaderModule geomModule,VkShaderModule fragModule,VkRenderPass renderPass)2998 Move<VkPipeline> buildGraphicsPipeline (const DeviceInterface& vkd, VkDevice device, VkPipelineLayout pipelineLayout,
2999 VkShaderModule vertModule,
3000 VkShaderModule tescModule,
3001 VkShaderModule teseModule,
3002 VkShaderModule geomModule,
3003 VkShaderModule fragModule,
3004 VkRenderPass renderPass)
3005 {
3006 const auto extent = getDefaultExtent();
3007 const std::vector<VkViewport> viewports (1u, makeViewport(extent));
3008 const std::vector<VkRect2D> scissors (1u, makeRect2D(extent));
3009 const auto hasTess = (tescModule != DE_NULL || teseModule != DE_NULL);
3010 const auto topology = (hasTess ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST);
3011
3012
3013 const VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo = initVulkanStructure();
3014
3015 const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo = {
3016 VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, // VkStructureType sType;
3017 nullptr, // const void* pNext;
3018 0u, // VkPipelineInputAssemblyStateCreateFlags flags;
3019 topology, // VkPrimitiveTopology topology;
3020 VK_FALSE, // VkBool32 primitiveRestartEnable;
3021 };
3022
3023 const VkPipelineTessellationStateCreateInfo tessellationStateCreateInfo = {
3024 VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, // VkStructureType sType;
3025 nullptr, // const void* pNext;
3026 0u, // VkPipelineTessellationStateCreateFlags flags;
3027 (hasTess ? 3u : 0u), // deUint32 patchControlPoints;
3028 };
3029
3030 const VkPipelineViewportStateCreateInfo viewportStateCreateInfo = {
3031 VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, // VkStructureType sType;
3032 nullptr, // const void* pNext;
3033 0u, // VkPipelineViewportStateCreateFlags flags;
3034 static_cast<deUint32>(viewports.size()), // deUint32 viewportCount;
3035 de::dataOrNull(viewports), // const VkViewport* pViewports;
3036 static_cast<deUint32>(scissors.size()), // deUint32 scissorCount;
3037 de::dataOrNull(scissors), // const VkRect2D* pScissors;
3038 };
3039
3040 const VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo = {
3041 VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, // VkStructureType sType;
3042 nullptr, // const void* pNext;
3043 0u, // VkPipelineRasterizationStateCreateFlags flags;
3044 VK_FALSE, // VkBool32 depthClampEnable;
3045 (fragModule == DE_NULL ? VK_TRUE : VK_FALSE), // VkBool32 rasterizerDiscardEnable;
3046 VK_POLYGON_MODE_FILL, // VkPolygonMode polygonMode;
3047 VK_CULL_MODE_NONE, // VkCullModeFlags cullMode;
3048 VK_FRONT_FACE_CLOCKWISE, // VkFrontFace frontFace;
3049 VK_FALSE, // VkBool32 depthBiasEnable;
3050 0.0f, // float depthBiasConstantFactor;
3051 0.0f, // float depthBiasClamp;
3052 0.0f, // float depthBiasSlopeFactor;
3053 1.0f, // float lineWidth;
3054 };
3055
3056 const VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo = {
3057 VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, // VkStructureType sType;
3058 nullptr, // const void* pNext;
3059 0u, // VkPipelineMultisampleStateCreateFlags flags;
3060 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits rasterizationSamples;
3061 VK_FALSE, // VkBool32 sampleShadingEnable;
3062 1.0f, // float minSampleShading;
3063 nullptr, // const VkSampleMask* pSampleMask;
3064 VK_FALSE, // VkBool32 alphaToCoverageEnable;
3065 VK_FALSE, // VkBool32 alphaToOneEnable;
3066 };
3067
3068 const VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo = initVulkanStructure();
3069
3070 const VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo = initVulkanStructure();
3071
3072 return makeGraphicsPipeline(vkd, device, pipelineLayout,
3073 vertModule, tescModule, teseModule, geomModule, fragModule,
3074 renderPass, 0u, &vertexInputStateCreateInfo, &inputAssemblyStateCreateInfo,
3075 (hasTess ? &tessellationStateCreateInfo : nullptr), &viewportStateCreateInfo,
3076 &rasterizationStateCreateInfo, &multisampleStateCreateInfo,
3077 &depthStencilStateCreateInfo, &colorBlendStateCreateInfo, nullptr);
3078 }
3079
buildFramebuffer(const DeviceInterface & vkd,VkDevice device,VkRenderPass renderPass,const std::vector<Resource> & resources)3080 Move<VkFramebuffer> buildFramebuffer (const DeviceInterface& vkd, VkDevice device, VkRenderPass renderPass, const std::vector<Resource>& resources)
3081 {
3082 const auto extent = getDefaultExtent();
3083
3084 std::vector<VkImageView> inputAttachments;
3085 for (const auto& resource : resources)
3086 {
3087 if (resource.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
3088 inputAttachments.push_back(resource.imageView.get());
3089 }
3090
3091 const VkFramebufferCreateInfo framebufferCreateInfo =
3092 {
3093 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, // VkStructureType sType;
3094 nullptr, // const void* pNext;
3095 0u, // VkFramebufferCreateFlags flags;
3096 renderPass, // VkRenderPass renderPass;
3097 static_cast<deUint32>(inputAttachments.size()), // deUint32 attachmentCount;
3098 de:: dataOrNull(inputAttachments), // const VkImageView* pAttachments;
3099 extent.width, // deUint32 width;
3100 extent.height, // deUint32 height;
3101 extent.depth, // deUint32 layers;
3102 };
3103
3104 return createFramebuffer(vkd, device, &framebufferCreateInfo);
3105 }
3106
iterate()3107 tcu::TestStatus MutableTypesInstance::iterate ()
3108 {
3109 const auto& vki = m_context.getInstanceInterface();
3110 const auto& vkd = m_context.getDeviceInterface();
3111 const auto device = getDevice(m_context);
3112 const auto physDev = m_context.getPhysicalDevice();
3113 const auto qIndex = m_context.getUniversalQueueFamilyIndex();
3114 const auto queue = getDeviceQueue(vkd, device, m_context.getUniversalQueueFamilyIndex(), 0);
3115
3116 SimpleAllocator alloc(vkd, device, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
3117
3118 const auto& paramSet = m_params.descriptorSet;
3119 const auto numIterations = paramSet->maxTypes();
3120 const bool useExternalImage = needsExternalImage(*m_params.descriptorSet);
3121 const bool useExternalSampler = needsExternalSampler(*m_params.descriptorSet);
3122 const auto stageFlags = m_params.getStageFlags();
3123 const bool srcSetNeeded = (m_params.updateType == UpdateType::COPY);
3124 const bool updateAfterBind = (m_params.updateMoment == UpdateMoment::UPDATE_AFTER_BIND);
3125 const auto bindPoint = m_params.getBindPoint();
3126 const bool rayTracing = isRayTracingStage(m_params.testingStage);
3127 const bool useAABBs = (m_params.testingStage == TestingStage::INTERSECTION);
3128
3129 // Resources for each iteration.
3130 std::vector<std::vector<Resource>> allResources;
3131 allResources.reserve(numIterations);
3132
3133 // Command pool.
3134 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
3135
3136 // Descriptor pool and set for the active (dst) descriptor set.
3137 const auto dstPoolFlags = m_params.getDstPoolCreateFlags();
3138 const auto dstLayoutFlags = m_params.getDstLayoutCreateFlags();
3139
3140 const auto dstPool = paramSet->makeDescriptorPool(vkd, device, m_params.poolMutableStrategy, dstPoolFlags);
3141 const auto dstLayout = paramSet->makeDescriptorSetLayout(vkd, device, stageFlags, dstLayoutFlags);
3142 const auto varCount = paramSet->getVariableDescriptorCount();
3143
3144 using VariableCountInfoPtr = de::MovePtr<VkDescriptorSetVariableDescriptorCountAllocateInfo>;
3145
3146 VariableCountInfoPtr dstVariableCountInfo;
3147 if (varCount)
3148 {
3149 dstVariableCountInfo = VariableCountInfoPtr(new VkDescriptorSetVariableDescriptorCountAllocateInfo);
3150 *dstVariableCountInfo = initVulkanStructure();
3151
3152 dstVariableCountInfo->descriptorSetCount = 1u;
3153 dstVariableCountInfo->pDescriptorCounts = &(varCount.get());
3154 }
3155 const auto dstSet = makeDescriptorSet(vkd, device, dstPool.get(), dstLayout.get(), dstVariableCountInfo.get());
3156
3157 // Source pool and set (optional).
3158 const auto srcPoolFlags = m_params.getSrcPoolCreateFlags();
3159 const auto srcLayoutFlags = m_params.getSrcLayoutCreateFlags();
3160 DescriptorSetPtr iterationSrcSet;
3161 Move<VkDescriptorPool> srcPool;
3162 Move<VkDescriptorSetLayout> srcLayout;
3163 Move<VkDescriptorSet> srcSet;
3164
3165 // Extra set for external resources and output buffer.
3166 std::vector<Resource> extraResources;
3167 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vkd, device, alloc, qIndex, queue, useAABBs, 0u, numIterations);
3168 if (useExternalImage)
3169 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, vkd, device, alloc, qIndex, queue, useAABBs, getExternalSampledImageValue());
3170 if (useExternalSampler)
3171 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLER, vkd, device, alloc, qIndex, queue, useAABBs, 0u);
3172 if (rayTracing)
3173 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, vkd, device, alloc, qIndex, queue, useAABBs, 0u);
3174
3175 Move<VkDescriptorPool> extraPool;
3176 {
3177 DescriptorPoolBuilder poolBuilder;
3178 poolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
3179 if (useExternalImage)
3180 poolBuilder.addType(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
3181 if (useExternalSampler)
3182 poolBuilder.addType(VK_DESCRIPTOR_TYPE_SAMPLER);
3183 if (rayTracing)
3184 poolBuilder.addType(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
3185 extraPool = poolBuilder.build(vkd, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
3186 }
3187
3188 Move<VkDescriptorSetLayout> extraLayout;
3189 {
3190 DescriptorSetLayoutBuilder layoutBuilder;
3191 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1u, stageFlags, nullptr);
3192 if (useExternalImage)
3193 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1u, stageFlags, nullptr);
3194 if (useExternalSampler)
3195 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_SAMPLER, 1u, stageFlags, nullptr);
3196 if (rayTracing)
3197 {
3198 // The extra acceleration structure is used from the ray generation shader only.
3199 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, 1u, VK_SHADER_STAGE_RAYGEN_BIT_KHR, nullptr);
3200 }
3201 extraLayout = layoutBuilder.build(vkd, device);
3202 }
3203
3204 const auto extraSet = makeDescriptorSet(vkd, device, extraPool.get(), extraLayout.get());
3205
3206 // Update extra set.
3207 using DescriptorBufferInfoPtr = de::MovePtr<VkDescriptorBufferInfo>;
3208 using DescriptorImageInfoPtr = de::MovePtr<VkDescriptorImageInfo>;
3209 using DescriptorASInfoPtr = de::MovePtr<VkWriteDescriptorSetAccelerationStructureKHR>;
3210
3211 deUint32 bindingCount = 0u;
3212 DescriptorBufferInfoPtr bufferInfoPtr;
3213 DescriptorImageInfoPtr imageInfoPtr;
3214 DescriptorImageInfoPtr samplerInfoPtr;
3215 DescriptorASInfoPtr asWriteInfoPtr;
3216
3217 const auto outputBufferSize = static_cast<VkDeviceSize>(sizeof(deUint32) * static_cast<size_t>(numIterations));
3218 bufferInfoPtr = DescriptorBufferInfoPtr(new VkDescriptorBufferInfo(makeDescriptorBufferInfo(extraResources[bindingCount++].bufferWithMemory->get(), 0ull, outputBufferSize)));
3219 if (useExternalImage)
3220 imageInfoPtr = DescriptorImageInfoPtr(new VkDescriptorImageInfo(makeDescriptorImageInfo(DE_NULL, extraResources[bindingCount++].imageView.get(), VK_IMAGE_LAYOUT_GENERAL)));
3221 if (useExternalSampler)
3222 samplerInfoPtr = DescriptorImageInfoPtr(new VkDescriptorImageInfo(makeDescriptorImageInfo(extraResources[bindingCount++].sampler.get(), DE_NULL, VK_IMAGE_LAYOUT_GENERAL)));
3223 if (rayTracing)
3224 {
3225 asWriteInfoPtr = DescriptorASInfoPtr(new VkWriteDescriptorSetAccelerationStructureKHR);
3226 *asWriteInfoPtr = initVulkanStructure();
3227 asWriteInfoPtr->accelerationStructureCount = 1u;
3228 asWriteInfoPtr->pAccelerationStructures = extraResources[bindingCount++].asData.tlas.get()->getPtr();
3229 }
3230
3231 {
3232 bindingCount = 0u;
3233 DescriptorSetUpdateBuilder updateBuilder;
3234 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, bufferInfoPtr.get());
3235 if (useExternalImage)
3236 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, imageInfoPtr.get());
3237 if (useExternalSampler)
3238 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_SAMPLER, samplerInfoPtr.get());
3239 if (rayTracing)
3240 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, asWriteInfoPtr.get());
3241 updateBuilder.update(vkd, device);
3242 }
3243
3244 // Push constants.
3245 const deUint32 zero = 0u;
3246 const VkPushConstantRange pcRange = {stageFlags, 0u /*offset*/, static_cast<deUint32>(sizeof(zero)) /*size*/ };
3247
3248 // Needed for some test variants.
3249 Move<VkShaderModule> vertPassthrough;
3250 Move<VkShaderModule> tesePassthrough;
3251 Move<VkShaderModule> tescPassthrough;
3252 Move<VkShaderModule> rgenPassthrough;
3253 Move<VkShaderModule> missPassthrough;
3254
3255 if (m_params.testingStage == TestingStage::FRAGMENT
3256 || m_params.testingStage == TestingStage::GEOMETRY
3257 || m_params.testingStage == TestingStage::TESS_CONTROL
3258 || m_params.testingStage == TestingStage::TESS_EVAL)
3259 {
3260 vertPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("vert"), 0u);
3261 }
3262
3263 if (m_params.testingStage == TestingStage::TESS_CONTROL)
3264 {
3265 tesePassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("tese"), 0u);
3266 }
3267
3268 if (m_params.testingStage == TestingStage::TESS_EVAL)
3269 {
3270 tescPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("tesc"), 0u);
3271 }
3272
3273 if (m_params.testingStage == TestingStage::CLOSEST_HIT
3274 || m_params.testingStage == TestingStage::ANY_HIT
3275 || m_params.testingStage == TestingStage::INTERSECTION
3276 || m_params.testingStage == TestingStage::MISS
3277 || m_params.testingStage == TestingStage::CALLABLE)
3278 {
3279 rgenPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("rgen"), 0u);
3280 }
3281
3282 if (m_params.testingStage == TestingStage::INTERSECTION)
3283 {
3284 missPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("miss"), 0u);
3285 }
3286
3287 for (deUint32 iteration = 0u; iteration < numIterations; ++iteration)
3288 {
3289 // Generate source set for the current iteration.
3290 if (srcSetNeeded)
3291 {
3292 // Free previous descriptor set before rebuilding the pool.
3293 srcSet = Move<VkDescriptorSet>();
3294 iterationSrcSet = paramSet->genSourceSet(m_params.sourceSetStrategy, iteration);
3295 srcPool = iterationSrcSet->makeDescriptorPool(vkd, device, m_params.poolMutableStrategy, srcPoolFlags);
3296 srcLayout = iterationSrcSet->makeDescriptorSetLayout(vkd, device, stageFlags, srcLayoutFlags);
3297
3298 const auto srcVarCount = iterationSrcSet->getVariableDescriptorCount();
3299 VariableCountInfoPtr srcVariableCountInfo;
3300
3301 if (srcVarCount)
3302 {
3303 srcVariableCountInfo = VariableCountInfoPtr(new VkDescriptorSetVariableDescriptorCountAllocateInfo);
3304 *srcVariableCountInfo = initVulkanStructure();
3305
3306 srcVariableCountInfo->descriptorSetCount = 1u;
3307 srcVariableCountInfo->pDescriptorCounts = &(srcVarCount.get());
3308 }
3309
3310 srcSet = makeDescriptorSet(vkd, device, srcPool.get(), srcLayout.get(), srcVariableCountInfo.get());
3311 }
3312
3313 // Set layouts and sets used in the pipeline.
3314 const std::vector<VkDescriptorSetLayout> setLayouts = {dstLayout.get(), extraLayout.get()};
3315 const std::vector<VkDescriptorSet> usedSets = {dstSet.get(), extraSet.get()};
3316
3317 // Create resources.
3318 allResources.emplace_back(paramSet->createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs));
3319 const auto& resources = allResources.back();
3320
3321 // Make pipeline for the current iteration.
3322 const auto pipelineLayout = makePipelineLayout(vkd, device, static_cast<deUint32>(setLayouts.size()), de::dataOrNull(setLayouts), 1u, &pcRange);
3323 const auto moduleName = shaderName(iteration);
3324 const auto shaderModule = createShaderModule(vkd, device, m_context.getBinaryCollection().get(moduleName), 0u);
3325
3326 Move<VkPipeline> pipeline;
3327 Move<VkRenderPass> renderPass;
3328 Move<VkFramebuffer> framebuffer;
3329
3330 deUint32 shaderGroupHandleSize = 0u;
3331 deUint32 shaderGroupBaseAlignment = 1u;
3332
3333 de::MovePtr<BufferWithMemory> raygenSBT;
3334 de::MovePtr<BufferWithMemory> missSBT;
3335 de::MovePtr<BufferWithMemory> hitSBT;
3336 de::MovePtr<BufferWithMemory> callableSBT;
3337
3338 VkStridedDeviceAddressRegionKHR raygenSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3339 VkStridedDeviceAddressRegionKHR missSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3340 VkStridedDeviceAddressRegionKHR hitSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3341 VkStridedDeviceAddressRegionKHR callableSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3342
3343 if (bindPoint == VK_PIPELINE_BIND_POINT_COMPUTE)
3344 pipeline = makeComputePipeline(vkd, device, pipelineLayout.get(), 0u, shaderModule.get(), 0u, nullptr);
3345 else if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS)
3346 {
3347 VkShaderModule vertModule = DE_NULL;
3348 VkShaderModule teseModule = DE_NULL;
3349 VkShaderModule tescModule = DE_NULL;
3350 VkShaderModule geomModule = DE_NULL;
3351 VkShaderModule fragModule = DE_NULL;
3352
3353 if (m_params.testingStage == TestingStage::VERTEX)
3354 vertModule = shaderModule.get();
3355 else if (m_params.testingStage == TestingStage::FRAGMENT)
3356 {
3357 vertModule = vertPassthrough.get();
3358 fragModule = shaderModule.get();
3359 }
3360 else if (m_params.testingStage == TestingStage::GEOMETRY)
3361 {
3362 vertModule = vertPassthrough.get();
3363 geomModule = shaderModule.get();
3364 }
3365 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
3366 {
3367 vertModule = vertPassthrough.get();
3368 teseModule = tesePassthrough.get();
3369 tescModule = shaderModule.get();
3370 }
3371 else if (m_params.testingStage == TestingStage::TESS_EVAL)
3372 {
3373 vertModule = vertPassthrough.get();
3374 tescModule = tescPassthrough.get();
3375 teseModule = shaderModule.get();
3376 }
3377 else
3378 DE_ASSERT(false);
3379
3380 renderPass = buildRenderPass(vkd, device, resources);
3381 pipeline = buildGraphicsPipeline(vkd, device, pipelineLayout.get(), vertModule, tescModule, teseModule, geomModule, fragModule, renderPass.get());
3382 framebuffer = buildFramebuffer(vkd, device, renderPass.get(), resources);
3383 }
3384 else if (bindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR)
3385 {
3386 const auto rayTracingPipeline = de::newMovePtr<RayTracingPipeline>();
3387 const auto rayTracingPropertiesKHR = makeRayTracingProperties(vki, physDev);
3388 shaderGroupHandleSize = rayTracingPropertiesKHR->getShaderGroupHandleSize();
3389 shaderGroupBaseAlignment = rayTracingPropertiesKHR->getShaderGroupBaseAlignment();
3390
3391 VkShaderModule rgenModule = DE_NULL;
3392 VkShaderModule isecModule = DE_NULL;
3393 VkShaderModule ahitModule = DE_NULL;
3394 VkShaderModule chitModule = DE_NULL;
3395 VkShaderModule missModule = DE_NULL;
3396 VkShaderModule callModule = DE_NULL;
3397
3398 const deUint32 rgenGroup = 0u;
3399 deUint32 hitGroup = 0u;
3400 deUint32 missGroup = 0u;
3401 deUint32 callGroup = 0u;
3402
3403 if (m_params.testingStage == TestingStage::RAY_GEN)
3404 {
3405 rgenModule = shaderModule.get();
3406 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3407 }
3408 else if (m_params.testingStage == TestingStage::INTERSECTION)
3409 {
3410 hitGroup = 1u;
3411 missGroup = 2u;
3412 rgenModule = rgenPassthrough.get();
3413 missModule = missPassthrough.get();
3414 isecModule = shaderModule.get();
3415 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3416 rayTracingPipeline->addShader(VK_SHADER_STAGE_INTERSECTION_BIT_KHR, isecModule, hitGroup);
3417 rayTracingPipeline->addShader(VK_SHADER_STAGE_MISS_BIT_KHR, missModule, missGroup);
3418 }
3419 else if (m_params.testingStage == TestingStage::ANY_HIT)
3420 {
3421 hitGroup = 1u;
3422 rgenModule = rgenPassthrough.get();
3423 ahitModule = shaderModule.get();
3424 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3425 rayTracingPipeline->addShader(VK_SHADER_STAGE_ANY_HIT_BIT_KHR, ahitModule, hitGroup);
3426 }
3427 else if (m_params.testingStage == TestingStage::CLOSEST_HIT)
3428 {
3429 hitGroup = 1u;
3430 rgenModule = rgenPassthrough.get();
3431 chitModule = shaderModule.get();
3432 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3433 rayTracingPipeline->addShader(VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, chitModule, hitGroup);
3434 }
3435 else if (m_params.testingStage == TestingStage::MISS)
3436 {
3437 missGroup = 1u;
3438 rgenModule = rgenPassthrough.get();
3439 missModule = shaderModule.get();
3440 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3441 rayTracingPipeline->addShader(VK_SHADER_STAGE_MISS_BIT_KHR, missModule, missGroup);
3442 }
3443 else if (m_params.testingStage == TestingStage::CALLABLE)
3444 {
3445 callGroup = 1u;
3446 rgenModule = rgenPassthrough.get();
3447 callModule = shaderModule.get();
3448 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3449 rayTracingPipeline->addShader(VK_SHADER_STAGE_CALLABLE_BIT_KHR, callModule, callGroup);
3450 }
3451 else
3452 DE_ASSERT(false);
3453
3454 pipeline = rayTracingPipeline->createPipeline(vkd, device, pipelineLayout.get());
3455
3456 raygenSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, rgenGroup, 1u);
3457 raygenSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, raygenSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3458
3459 if (missGroup > 0u)
3460 {
3461 missSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, missGroup, 1u);
3462 missSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, missSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3463 }
3464
3465 if (hitGroup > 0u)
3466 {
3467 hitSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, hitGroup, 1u);
3468 hitSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, hitSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3469 }
3470
3471 if (callGroup > 0u)
3472 {
3473 callableSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, callGroup, 1u);
3474 callableSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, callableSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3475 }
3476 }
3477 else
3478 DE_ASSERT(false);
3479
3480 // Command buffer for the current iteration.
3481 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
3482 const auto cmdBuffer = cmdBufferPtr.get();
3483
3484 beginCommandBuffer(vkd, cmdBuffer);
3485
3486 const Step steps[] = {
3487 (updateAfterBind ? Step::BIND : Step::UPDATE),
3488 (updateAfterBind ? Step::UPDATE : Step::BIND)
3489 };
3490
3491 for (const auto& step : steps)
3492 {
3493 if (step == Step::BIND)
3494 {
3495 vkd.cmdBindPipeline(cmdBuffer, bindPoint, pipeline.get());
3496 vkd.cmdBindDescriptorSets(cmdBuffer, bindPoint, pipelineLayout.get(), 0u, static_cast<deUint32>(usedSets.size()), de::dataOrNull(usedSets), 0u, nullptr);
3497 }
3498 else // Step::UPDATE
3499 {
3500 if (srcSetNeeded)
3501 {
3502 // Note: these operations need to be called on paramSet and not iterationSrcSet. The latter is a compatible set
3503 // that's correct and contains compatible bindings but, when a binding has been changed from non-mutable to
3504 // mutable or to an extended mutable type, the list of descriptor types for the mutable bindings in
3505 // iterationSrcSet are not in iteration order like they are in the original set and must not be taken into
3506 // account to update or copy sets.
3507 paramSet->updateDescriptorSet(vkd, device, srcSet.get(), iteration, resources);
3508 paramSet->copyDescriptorSet(vkd, device, srcSet.get(), dstSet.get());
3509 }
3510 else
3511 {
3512 paramSet->updateDescriptorSet(vkd, device, dstSet.get(), iteration, resources);
3513 }
3514 }
3515 }
3516
3517 // Run shader.
3518 vkd.cmdPushConstants(cmdBuffer, pipelineLayout.get(), stageFlags, 0u, static_cast<deUint32>(sizeof(zero)), &zero);
3519
3520 if (bindPoint == VK_PIPELINE_BIND_POINT_COMPUTE)
3521 vkd.cmdDispatch(cmdBuffer, 1u, 1u, 1u);
3522 else if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS)
3523 {
3524 const auto extent = getDefaultExtent();
3525 const auto renderArea = makeRect2D(extent);
3526
3527 beginRenderPass(vkd, cmdBuffer, renderPass.get(), framebuffer.get(), renderArea);
3528 vkd.cmdDraw(cmdBuffer, 3u, 1u, 0u, 0u);
3529 endRenderPass(vkd, cmdBuffer);
3530 }
3531 else if (bindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR)
3532 {
3533 vkd.cmdTraceRaysKHR(cmdBuffer, &raygenSBTRegion, &missSBTRegion, &hitSBTRegion, &callableSBTRegion, 1u, 1u, 1u);
3534 }
3535 else
3536 DE_ASSERT(false);
3537
3538 endCommandBuffer(vkd, cmdBuffer);
3539 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
3540
3541 // Verify output buffer.
3542 {
3543 const auto outputBufferVal = extraResources[0].getStoredValue(vkd, device, alloc, qIndex, queue, iteration);
3544 DE_ASSERT(static_cast<bool>(outputBufferVal));
3545
3546 const auto expectedValue = getExpectedOutputBufferValue();
3547 if (outputBufferVal.get() != expectedValue)
3548 {
3549 std::ostringstream msg;
3550 msg << "Iteration " << iteration << ": unexpected value found in output buffer (expected " << expectedValue << " and found " << outputBufferVal.get() << ")";
3551 TCU_FAIL(msg.str());
3552 }
3553 }
3554
3555 // Verify descriptor writes.
3556 {
3557 size_t resourcesOffset = 0;
3558 const auto writeMask = getStoredValueMask();
3559 const auto numBindings = paramSet->numBindings();
3560
3561 for (deUint32 bindingIdx = 0u; bindingIdx < numBindings; ++bindingIdx)
3562 {
3563 const auto binding = paramSet->getBinding(bindingIdx);
3564 const auto bindingTypes = binding->typesAtIteration(iteration);
3565
3566 for (size_t descriptorIdx = 0; descriptorIdx < bindingTypes.size(); ++descriptorIdx)
3567 {
3568 const auto& descriptorType = bindingTypes[descriptorIdx];
3569 if (!isShaderWritable(descriptorType))
3570 continue;
3571
3572 const auto& resource = resources[resourcesOffset + descriptorIdx];
3573 const auto initialValue = resource.initialValue;
3574 const auto storedValuePtr = resource.getStoredValue(vkd, device, alloc, qIndex, queue);
3575
3576 DE_ASSERT(static_cast<bool>(storedValuePtr));
3577 const auto storedValue = storedValuePtr.get();
3578 const auto expectedValue = (initialValue | writeMask);
3579 if (expectedValue != storedValue)
3580 {
3581 std::ostringstream msg;
3582 msg << "Iteration " << iteration << ": descriptor at binding " << bindingIdx << " index " << descriptorIdx
3583 << " with type " << de::toString(descriptorType) << " contains unexpected value " << std::hex
3584 << storedValue << " (expected " << expectedValue << ")";
3585 TCU_FAIL(msg.str());
3586 }
3587 }
3588
3589 resourcesOffset += bindingTypes.size();
3590 }
3591 }
3592 }
3593
3594 return tcu::TestStatus::pass("Pass");
3595 }
3596
3597 using GroupPtr = de::MovePtr<tcu::TestCaseGroup>;
3598
createMutableTestVariants(tcu::TestContext & testCtx,tcu::TestCaseGroup * parentGroup,const DescriptorSetPtr & descriptorSet,const std::vector<TestingStage> & stagesToTest)3599 void createMutableTestVariants (tcu::TestContext& testCtx, tcu::TestCaseGroup* parentGroup, const DescriptorSetPtr& descriptorSet, const std::vector<TestingStage>& stagesToTest)
3600 {
3601 const struct
3602 {
3603 UpdateType updateType;
3604 const char* name;
3605 } updateTypes[] = {
3606 {UpdateType::WRITE, "update_write"},
3607 {UpdateType::COPY, "update_copy"},
3608 };
3609
3610 const struct
3611 {
3612 SourceSetStrategy sourceSetStrategy;
3613 const char* name;
3614 } sourceStrategies[] = {
3615 {SourceSetStrategy::MUTABLE, "mutable_source"},
3616 {SourceSetStrategy::NONMUTABLE, "nonmutable_source"},
3617 {SourceSetStrategy::NO_SOURCE, "no_source"},
3618 };
3619
3620 const struct
3621 {
3622 SourceSetType sourceSetType;
3623 const char* name;
3624 } sourceTypes[] = {
3625 {SourceSetType::NORMAL, "normal_source"},
3626 {SourceSetType::HOST_ONLY, "host_only_source"},
3627 {SourceSetType::NO_SOURCE, "no_source"},
3628 };
3629
3630 const struct
3631 {
3632 PoolMutableStrategy poolMutableStrategy;
3633 const char* name;
3634 } poolStrategies[] = {
3635 {PoolMutableStrategy::KEEP_TYPES, "pool_same_types"},
3636 {PoolMutableStrategy::NO_TYPES, "pool_no_types"},
3637 {PoolMutableStrategy::EXPAND_TYPES, "pool_expand_types"},
3638 };
3639
3640 const struct
3641 {
3642 UpdateMoment updateMoment;
3643 const char* name;
3644 } updateMoments[] = {
3645 {UpdateMoment::NORMAL, "pre_update"},
3646 {UpdateMoment::UPDATE_AFTER_BIND, "update_after_bind"},
3647 };
3648
3649 const struct
3650 {
3651 ArrayAccessType arrayAccessType;
3652 const char* name;
3653 } arrayAccessTypes[] = {
3654 {ArrayAccessType::CONSTANT, "index_constant"},
3655 {ArrayAccessType::PUSH_CONSTANT, "index_push_constant"},
3656 {ArrayAccessType::NO_ARRAY, "no_array"},
3657 };
3658
3659 const struct StageAndName
3660 {
3661 TestingStage testingStage;
3662 const char* name;
3663 } testStageList[] = {
3664 {TestingStage::COMPUTE, "comp"},
3665 {TestingStage::VERTEX, "vert"},
3666 {TestingStage::TESS_CONTROL, "tesc"},
3667 {TestingStage::TESS_EVAL, "tese"},
3668 {TestingStage::GEOMETRY, "geom"},
3669 {TestingStage::FRAGMENT, "frag"},
3670 {TestingStage::RAY_GEN, "rgen"},
3671 {TestingStage::INTERSECTION, "isec"},
3672 {TestingStage::ANY_HIT, "ahit"},
3673 {TestingStage::CLOSEST_HIT, "chit"},
3674 {TestingStage::MISS, "miss"},
3675 {TestingStage::CALLABLE, "call"},
3676 };
3677
3678 const bool hasArrays = descriptorSet->hasArrays();
3679 const bool hasInputAttachments = usesInputAttachments(*descriptorSet);
3680
3681 for (const auto& ut : updateTypes)
3682 {
3683 GroupPtr updateGroup(new tcu::TestCaseGroup(testCtx, ut.name, ""));
3684
3685 for (const auto& srcStrategy : sourceStrategies)
3686 {
3687 // Skip combinations that make no sense.
3688 if (ut.updateType == UpdateType::WRITE && srcStrategy.sourceSetStrategy != SourceSetStrategy::NO_SOURCE)
3689 continue;
3690
3691 if (ut.updateType == UpdateType::COPY && srcStrategy.sourceSetStrategy == SourceSetStrategy::NO_SOURCE)
3692 continue;
3693
3694 if (srcStrategy.sourceSetStrategy == SourceSetStrategy::NONMUTABLE && descriptorSet->needsAnyAliasing())
3695 continue;
3696
3697 GroupPtr srcStrategyGroup(new tcu::TestCaseGroup(testCtx, srcStrategy.name, ""));
3698
3699 for (const auto& srcType : sourceTypes)
3700 {
3701 // Skip combinations that make no sense.
3702 if (ut.updateType == UpdateType::WRITE && srcType.sourceSetType != SourceSetType::NO_SOURCE)
3703 continue;
3704
3705 if (ut.updateType == UpdateType::COPY && srcType.sourceSetType == SourceSetType::NO_SOURCE)
3706 continue;
3707
3708 GroupPtr srcTypeGroup(new tcu::TestCaseGroup(testCtx, srcType.name, ""));
3709
3710 for (const auto& poolStrategy: poolStrategies)
3711 {
3712 GroupPtr poolStrategyGroup(new tcu::TestCaseGroup(testCtx, poolStrategy.name, ""));
3713
3714 for (const auto& moment : updateMoments)
3715 {
3716 //if (moment.updateMoment == UpdateMoment::UPDATE_AFTER_BIND && srcType.sourceSetType == SourceSetType::HOST_ONLY)
3717 // continue;
3718
3719 if (moment.updateMoment == UpdateMoment::UPDATE_AFTER_BIND && hasInputAttachments)
3720 continue;
3721
3722 GroupPtr momentGroup(new tcu::TestCaseGroup(testCtx, moment.name, ""));
3723
3724 for (const auto& accessType : arrayAccessTypes)
3725 {
3726 // Skip combinations that make no sense.
3727 if (hasArrays && accessType.arrayAccessType == ArrayAccessType::NO_ARRAY)
3728 continue;
3729
3730 if (!hasArrays && accessType.arrayAccessType != ArrayAccessType::NO_ARRAY)
3731 continue;
3732
3733 GroupPtr accessTypeGroup(new tcu::TestCaseGroup(testCtx, accessType.name, ""));
3734
3735 for (const auto& testStage : stagesToTest)
3736 {
3737 const auto beginItr = std::begin(testStageList);
3738 const auto endItr = std::end(testStageList);
3739 const auto iter = std::find_if(beginItr, endItr, [testStage] (const StageAndName& ts) { return ts.testingStage == testStage; });
3740
3741 DE_ASSERT(iter != endItr);
3742 const auto& stage = *iter;
3743
3744 if (hasInputAttachments && stage.testingStage != TestingStage::FRAGMENT)
3745 continue;
3746
3747 TestParams params = {
3748 descriptorSet,
3749 ut.updateType,
3750 srcStrategy.sourceSetStrategy,
3751 srcType.sourceSetType,
3752 poolStrategy.poolMutableStrategy,
3753 moment.updateMoment,
3754 accessType.arrayAccessType,
3755 stage.testingStage,
3756 };
3757
3758 accessTypeGroup->addChild(new MutableTypesTest(testCtx, stage.name, "", params));
3759 }
3760
3761 momentGroup->addChild(accessTypeGroup.release());
3762 }
3763
3764 poolStrategyGroup->addChild(momentGroup.release());
3765 }
3766
3767 srcTypeGroup->addChild(poolStrategyGroup.release());
3768 }
3769
3770 srcStrategyGroup->addChild(srcTypeGroup.release());
3771 }
3772
3773 updateGroup->addChild(srcStrategyGroup.release());
3774 }
3775
3776 parentGroup->addChild(updateGroup.release());
3777 }
3778 }
3779
3780 }
3781
descriptorTypeStr(VkDescriptorType descriptorType)3782 std::string descriptorTypeStr (VkDescriptorType descriptorType)
3783 {
3784 static const auto prefixLen = std::string("VK_DESCRIPTOR_TYPE_").size();
3785 return de::toLower(de::toString(descriptorType).substr(prefixLen));
3786 }
3787
3788 static void createChildren (tcu::TestCaseGroup* testGroup);
3789
cleanupGroup(tcu::TestCaseGroup * testGroup)3790 static void cleanupGroup (tcu::TestCaseGroup* testGroup)
3791 {
3792 DE_UNREF(testGroup);
3793 // Destroy singleton objects.
3794 g_singletonDevice.clear();
3795 }
3796
createDescriptorMutableTests(tcu::TestContext & testCtx)3797 tcu::TestCaseGroup* createDescriptorMutableTests (tcu::TestContext& testCtx)
3798 {
3799 return createTestGroup(testCtx, "mutable_descriptor", "Tests for VK_VALVE_mutable_descriptor_type and VK_EXT_mutable_descriptor_type", createChildren, cleanupGroup);
3800 }
3801
createChildren(tcu::TestCaseGroup * mainGroup)3802 void createChildren (tcu::TestCaseGroup* mainGroup)
3803 {
3804 tcu::TestContext& testCtx = mainGroup->getTestContext();
3805
3806 const VkDescriptorType basicDescriptorTypes[] = {
3807 VK_DESCRIPTOR_TYPE_SAMPLER,
3808 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
3809 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
3810 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
3811 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
3812 VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
3813 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
3814 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
3815 VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
3816 VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
3817 };
3818
3819 static const auto mandatoryTypes = getMandatoryMutableTypes();
3820
3821 using StageVec = std::vector<TestingStage>;
3822
3823 const StageVec allStages =
3824 {
3825 TestingStage::COMPUTE,
3826 TestingStage::VERTEX,
3827 TestingStage::TESS_CONTROL,
3828 TestingStage::TESS_EVAL,
3829 TestingStage::GEOMETRY,
3830 TestingStage::FRAGMENT,
3831 TestingStage::RAY_GEN,
3832 TestingStage::INTERSECTION,
3833 TestingStage::ANY_HIT,
3834 TestingStage::CLOSEST_HIT,
3835 TestingStage::MISS,
3836 TestingStage::CALLABLE,
3837 };
3838
3839 const StageVec reducedStages =
3840 {
3841 TestingStage::COMPUTE,
3842 TestingStage::VERTEX,
3843 TestingStage::FRAGMENT,
3844 TestingStage::RAY_GEN,
3845 };
3846
3847 const StageVec computeOnly =
3848 {
3849 TestingStage::COMPUTE,
3850 };
3851
3852 // Basic tests with a single mutable descriptor.
3853 {
3854 GroupPtr singleCases(new tcu::TestCaseGroup(testCtx, "single", "Basic mutable descriptor tests with a single mutable descriptor"));
3855
3856 for (const auto& descriptorType : basicDescriptorTypes)
3857 {
3858 const auto groupName = descriptorTypeStr(descriptorType);
3859 const std::vector<VkDescriptorType> actualTypes(1u, descriptorType);
3860
3861 DescriptorSetPtr setPtr;
3862 {
3863 DescriptorSet::BindingPtrVector setBindings;
3864 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, actualTypes));
3865 setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3866 }
3867
3868 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3869 createMutableTestVariants(testCtx, subGroup.get(), setPtr, allStages);
3870
3871 singleCases->addChild(subGroup.release());
3872 }
3873
3874 // Case with a single descriptor that iterates several types.
3875 {
3876 DescriptorSetPtr setPtr;
3877 {
3878 DescriptorSet::BindingPtrVector setBindings;
3879 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, mandatoryTypes));
3880 setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3881 }
3882
3883 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, "all_mandatory", ""));
3884 createMutableTestVariants(testCtx, subGroup.get(), setPtr, reducedStages);
3885
3886 singleCases->addChild(subGroup.release());
3887 }
3888
3889 // Cases that try to verify switching from any descriptor type to any other is possible.
3890 {
3891 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, "switches", "Test switching from one to another descriptor type works as expected"));
3892
3893 for (const auto& initialDescriptorType : basicDescriptorTypes)
3894 {
3895 for (const auto& finalDescriptorType : basicDescriptorTypes)
3896 {
3897 if (initialDescriptorType == finalDescriptorType)
3898 continue;
3899
3900 const std::vector<VkDescriptorType> mutableTypes { initialDescriptorType, finalDescriptorType };
3901 DescriptorSet::BindingPtrVector setBindings;
3902 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, mutableTypes));
3903
3904 DescriptorSetPtr setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3905
3906 const auto groupName = descriptorTypeStr(initialDescriptorType) + "_" + descriptorTypeStr(finalDescriptorType);
3907 GroupPtr combinationGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3908 createMutableTestVariants(testCtx, combinationGroup.get(), setPtr, reducedStages);
3909 subGroup->addChild(combinationGroup.release());
3910 }
3911 }
3912
3913 singleCases->addChild(subGroup.release());
3914 }
3915
3916 mainGroup->addChild(singleCases.release());
3917 }
3918
3919 // Cases with a single non-mutable descriptor. This provides some basic checks to verify copying to non-mutable bindings works.
3920 {
3921 GroupPtr singleNonMutableGroup (new tcu::TestCaseGroup(testCtx, "single_nonmutable", "Tests using a single non-mutable descriptor"));
3922
3923 for (const auto& descriptorType : basicDescriptorTypes)
3924 {
3925 DescriptorSet::BindingPtrVector bindings;
3926 bindings.emplace_back(new SingleBinding(descriptorType, std::vector<VkDescriptorType>()));
3927 DescriptorSetPtr descriptorSet (new DescriptorSet(bindings));
3928
3929 const auto groupName = descriptorTypeStr(descriptorType);
3930 GroupPtr descGroup (new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3931
3932 createMutableTestVariants(testCtx, descGroup.get(), descriptorSet, reducedStages);
3933 singleNonMutableGroup->addChild(descGroup.release());
3934 }
3935
3936 mainGroup->addChild(singleNonMutableGroup.release());
3937 }
3938
3939 const struct {
3940 bool unbounded;
3941 const char* name;
3942 } unboundedCases[] = {
3943 {false, "constant_size"},
3944 {true, "unbounded"},
3945 };
3946
3947 const struct {
3948 bool aliasing;
3949 const char* name;
3950 } aliasingCases[] = {
3951 {false, "noaliasing"},
3952 {true, "aliasing"},
3953 };
3954
3955 const struct {
3956 bool oneArrayOnly;
3957 bool mixNonMutable;
3958 const char* groupName;
3959 const char* groupDesc;
3960 } arrayCountGroups[] = {
3961 {true, false, "one_array", "Tests using an array of mutable descriptors"},
3962 {false, false, "multiple_arrays", "Tests using multiple arrays of mutable descriptors"},
3963 {false, true, "multiple_arrays_mixed", "Tests using multiple arrays of mutable descriptors mixed with arrays of nonmutable ones"},
3964 };
3965
3966 for (const auto& variant : arrayCountGroups)
3967 {
3968 GroupPtr arrayGroup(new tcu::TestCaseGroup(testCtx, variant.groupName, variant.groupDesc));
3969
3970 for (const auto& unboundedCase : unboundedCases)
3971 {
3972 GroupPtr unboundedGroup(new tcu::TestCaseGroup(testCtx, unboundedCase.name, ""));
3973
3974 for (const auto& aliasingCase : aliasingCases)
3975 {
3976 GroupPtr aliasingGroup(new tcu::TestCaseGroup(testCtx, aliasingCase.name, ""));
3977
3978 DescriptorSet::BindingPtrVector setBindings;
3979
3980 // Prepare descriptors for this test variant.
3981 for (size_t mandatoryTypesRotation = 0; mandatoryTypesRotation < mandatoryTypes.size(); ++mandatoryTypesRotation)
3982 {
3983 const bool isLastBinding = (variant.oneArrayOnly || mandatoryTypesRotation == mandatoryTypes.size() - 1u);
3984 const bool isUnbounded = (unboundedCase.unbounded && isLastBinding);
3985
3986 // Create a rotation of the mandatory types for each mutable array binding.
3987 auto mandatoryTypesVector = mandatoryTypes;
3988 {
3989 const auto beginPtr = &mandatoryTypesVector[0];
3990 const auto endPtr = beginPtr + mandatoryTypesVector.size();
3991 std::rotate(beginPtr, &mandatoryTypesVector[mandatoryTypesRotation], endPtr);
3992 }
3993
3994 std::vector<SingleBinding> arrayBindings;
3995
3996 if (aliasingCase.aliasing)
3997 {
3998 // With aliasing, the descriptor types rotate in each descriptor.
3999 for (size_t typeIdx = 0; typeIdx < mandatoryTypesVector.size(); ++typeIdx)
4000 {
4001 auto rotatedTypes = mandatoryTypesVector;
4002 const auto beginPtr = &rotatedTypes[0];
4003 const auto endPtr = beginPtr + rotatedTypes.size();
4004
4005 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
4006
4007 arrayBindings.emplace_back(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, rotatedTypes);
4008 }
4009 }
4010 else
4011 {
4012 // Without aliasing, all descriptors use the same type at the same time.
4013 const SingleBinding noAliasingBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, mandatoryTypesVector);
4014 arrayBindings.resize(mandatoryTypesVector.size(), noAliasingBinding);
4015 }
4016
4017 setBindings.emplace_back(new ArrayBinding(isUnbounded, arrayBindings));
4018
4019 if (variant.mixNonMutable && !isUnbounded)
4020 {
4021 // Create a non-mutable array binding interleaved with the other ones.
4022 const SingleBinding nonMutableBinding(mandatoryTypes[mandatoryTypesRotation], std::vector<VkDescriptorType>());
4023 std::vector<SingleBinding> nonMutableBindings(mandatoryTypes.size(), nonMutableBinding);
4024 setBindings.emplace_back(new ArrayBinding(false, nonMutableBindings));
4025 }
4026
4027 if (variant.oneArrayOnly)
4028 break;
4029 }
4030
4031 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
4032 createMutableTestVariants(testCtx, aliasingGroup.get(), descriptorSet, computeOnly);
4033
4034 unboundedGroup->addChild(aliasingGroup.release());
4035 }
4036
4037 arrayGroup->addChild(unboundedGroup.release());
4038 }
4039
4040 mainGroup->addChild(arrayGroup.release());
4041 }
4042
4043 // Cases with a single mutable binding followed by an array of mutable bindings.
4044 // The array will use a single type beyond the mandatory ones.
4045 {
4046 GroupPtr singleAndArrayGroup(new tcu::TestCaseGroup(testCtx, "single_and_array", "Tests using a single mutable binding followed by a mutable array binding"));
4047
4048 for (const auto& descriptorType : basicDescriptorTypes)
4049 {
4050 // Input attachments will not use arrays.
4051 if (descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
4052 continue;
4053
4054 if (de::contains(begin(mandatoryTypes), end(mandatoryTypes), descriptorType))
4055 continue;
4056
4057 const auto groupName = descriptorTypeStr(descriptorType);
4058 GroupPtr descTypeGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
4059
4060 for (const auto& aliasingCase : aliasingCases)
4061 {
4062 GroupPtr aliasingGroup(new tcu::TestCaseGroup(testCtx, aliasingCase.name, ""));
4063
4064 DescriptorSet::BindingPtrVector setBindings;
4065 std::vector<SingleBinding> arrayBindings;
4066
4067 // Add single type beyond the mandatory ones.
4068 auto arrayBindingDescTypes = mandatoryTypes;
4069 arrayBindingDescTypes.push_back(descriptorType);
4070
4071 // Single mutable descriptor as the first binding.
4072 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, arrayBindingDescTypes));
4073
4074 // Descriptor array as the second binding.
4075 if (aliasingCase.aliasing)
4076 {
4077 // With aliasing, the descriptor types rotate in each descriptor.
4078 for (size_t typeIdx = 0; typeIdx < arrayBindingDescTypes.size(); ++typeIdx)
4079 {
4080 auto rotatedTypes = arrayBindingDescTypes;
4081 const auto beginPtr = &rotatedTypes[0];
4082 const auto endPtr = beginPtr + rotatedTypes.size();
4083
4084 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
4085
4086 arrayBindings.emplace_back(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, rotatedTypes);
4087 }
4088 }
4089 else
4090 {
4091 // Without aliasing, all descriptors use the same type at the same time.
4092 const SingleBinding noAliasingBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, arrayBindingDescTypes);
4093 arrayBindings.resize(arrayBindingDescTypes.size(), noAliasingBinding);
4094 }
4095
4096 // Second binding: array binding.
4097 setBindings.emplace_back(new ArrayBinding(false/*unbounded*/, arrayBindings));
4098
4099 // Create set and test variants.
4100 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
4101 createMutableTestVariants(testCtx, aliasingGroup.get(), descriptorSet, computeOnly);
4102
4103 descTypeGroup->addChild(aliasingGroup.release());
4104 }
4105
4106 singleAndArrayGroup->addChild(descTypeGroup.release());
4107 }
4108
4109 mainGroup->addChild(singleAndArrayGroup.release());
4110 }
4111
4112 // Cases with several mutable non-array bindings.
4113 {
4114 GroupPtr multipleGroup (new tcu::TestCaseGroup(testCtx, "multiple", "Tests using multiple mutable bindings"));
4115 GroupPtr mutableOnlyGroup (new tcu::TestCaseGroup(testCtx, "mutable_only", "Tests using only mutable descriptors"));
4116 GroupPtr mixedGroup (new tcu::TestCaseGroup(testCtx, "mixed", "Tests mixing mutable descriptors an non-mutable descriptors"));
4117
4118 // Each descriptor will have a different type in every iteration, like in the one_array aliasing case.
4119 for (int groupIdx = 0; groupIdx < 2; ++groupIdx)
4120 {
4121 const bool mixed = (groupIdx == 1);
4122 DescriptorSet::BindingPtrVector setBindings;
4123
4124 for (size_t typeIdx = 0; typeIdx < mandatoryTypes.size(); ++typeIdx)
4125 {
4126 auto rotatedTypes = mandatoryTypes;
4127 const auto beginPtr = &rotatedTypes[0];
4128 const auto endPtr = beginPtr + rotatedTypes.size();
4129
4130 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
4131 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, rotatedTypes));
4132
4133 // Additional non-mutable binding interleaved with the mutable ones.
4134 if (mixed)
4135 setBindings.emplace_back(new SingleBinding(rotatedTypes[0], std::vector<VkDescriptorType>()));
4136 }
4137 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
4138
4139 const auto dstGroup = (mixed ? mixedGroup.get() : mutableOnlyGroup.get());
4140 createMutableTestVariants(testCtx, dstGroup, descriptorSet, computeOnly);
4141 }
4142
4143 multipleGroup->addChild(mutableOnlyGroup.release());
4144 multipleGroup->addChild(mixedGroup.release());
4145 mainGroup->addChild(multipleGroup.release());
4146 }
4147 }
4148
4149 } // BindingModel
4150 } // vkt
4151