1 /*-------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2021 The Khronos Group Inc.
6 * Copyright (c) 2021 Valve Corporation.
7 *
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
11 *
12 * http://www.apache.org/licenses/LICENSE-2.0
13 *
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
19 *
20 *//*!
21 * \file
22 * \brief Tests for VK_VALVE_mutable_descriptor_type and VK_EXT_mutable_descriptor_type.
23 *//*--------------------------------------------------------------------*/
24 #include "vktBindingMutableTests.hpp"
25 #include "vktTestCase.hpp"
26 #include "vktTestGroupUtil.hpp"
27 #include "vktCustomInstancesDevices.hpp"
28
29 #include "tcuCommandLine.hpp"
30
31 #include "vkDefs.hpp"
32 #include "vkRefUtil.hpp"
33 #include "vkQueryUtil.hpp"
34 #include "vkImageWithMemory.hpp"
35 #include "vkBufferWithMemory.hpp"
36 #include "vkTypeUtil.hpp"
37 #include "vkObjUtil.hpp"
38 #include "vkBarrierUtil.hpp"
39 #include "vkCmdUtil.hpp"
40 #include "vkBuilderUtil.hpp"
41 #include "vkRayTracingUtil.hpp"
42
43 #include "deUniquePtr.hpp"
44 #include "deSTLUtil.hpp"
45 #include "deStringUtil.hpp"
46
47 #include <vector>
48 #include <algorithm>
49 #include <iterator>
50 #include <set>
51 #include <sstream>
52 #include <limits>
53
54 namespace vkt
55 {
56 namespace BindingModel
57 {
58
59 namespace
60 {
61
62 using namespace vk;
63
64 de::SharedPtr<Move<vk::VkDevice>> g_singletonDevice;
65
removeExtensions(const std::vector<std::string> & a,const std::vector<const char * > & b)66 static std::vector<std::string> removeExtensions (const std::vector<std::string>& a, const std::vector<const char*>& b)
67 {
68 std::vector<std::string> res;
69 std::set<std::string> removeExts (b.begin(), b.end());
70
71 for (std::vector<std::string>::const_iterator aIter = a.begin(); aIter != a.end(); ++aIter)
72 {
73 if (!de::contains(removeExts, *aIter))
74 res.push_back(*aIter);
75 }
76
77 return res;
78 }
79
getDevice(Context & context)80 VkDevice getDevice(Context& context)
81 {
82 if (!g_singletonDevice)
83 {
84 const float queuePriority = 1.0f;
85
86 // Create a universal queue that supports graphics and compute
87 const VkDeviceQueueCreateInfo queueParams
88 {
89 VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, // VkStructureType sType;
90 DE_NULL, // const void* pNext;
91 0u, // VkDeviceQueueCreateFlags flags;
92 context.getUniversalQueueFamilyIndex(), // deUint32 queueFamilyIndex;
93 1u, // deUint32 queueCount;
94 &queuePriority // const float* pQueuePriorities;
95 };
96
97 // \note Extensions in core are not explicitly enabled even though
98 // they are in the extension list advertised to tests.
99 std::vector<const char*> extensionPtrs;
100 std::vector<const char*> coreExtensions;
101 getCoreDeviceExtensions(context.getUsedApiVersion(), coreExtensions);
102 std::vector<std::string> nonCoreExtensions(removeExtensions(context.getDeviceExtensions(), coreExtensions));
103
104 extensionPtrs.resize(nonCoreExtensions.size());
105
106 for (size_t ndx = 0; ndx < nonCoreExtensions.size(); ++ndx)
107 extensionPtrs[ndx] = nonCoreExtensions[ndx].c_str();
108
109 VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT mutableDescriptorTypeFeatures = initVulkanStructure();
110 VkPhysicalDeviceFeatures2 features2 = initVulkanStructure(&mutableDescriptorTypeFeatures);
111
112 context.getInstanceInterface().getPhysicalDeviceFeatures2(context.getPhysicalDevice(), &features2);
113
114 const VkDeviceCreateInfo deviceCreateInfo
115 {
116 VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, //sType;
117 &features2, //pNext;
118 (VkDeviceCreateFlags)0u, //flags
119 1, //queueRecordCount;
120 &queueParams, //pRequestedQueues;
121 0, //layerCount;
122 DE_NULL, //ppEnabledLayerNames;
123 (deUint32)extensionPtrs.size(), // deUint32 enabledExtensionCount;
124 (extensionPtrs.empty() ? DE_NULL : &extensionPtrs[0]), // const char* const* ppEnabledExtensionNames;
125 DE_NULL, //pEnabledFeatures;
126 };
127
128 Move<VkDevice> device = createCustomDevice(context.getTestContext().getCommandLine().isValidationEnabled(), context.getPlatformInterface(), context.getInstance(), context.getInstanceInterface(), context.getPhysicalDevice(), &deviceCreateInfo);
129 g_singletonDevice = de::SharedPtr<Move<VkDevice>>(new Move<VkDevice>(device));
130 }
131
132 return g_singletonDevice->get();
133 }
134
getDescriptorNumericValue(deUint32 iteration,deUint32 bindingIdx,deUint32 descriptorIdx=0u)135 deUint32 getDescriptorNumericValue (deUint32 iteration, deUint32 bindingIdx, deUint32 descriptorIdx = 0u)
136 {
137 // When assigning numeric values for the descriptor contents, each descriptor will get 0x5aIIBBDD. II is an octed containing the
138 // iteration index. BB is an octet containing the binding index and DD is the descriptor index inside that binding.
139 constexpr deUint32 kNumericValueBase = 0x5a000000u;
140
141 return (kNumericValueBase | ((iteration & 0xFFu) << 16) | ((bindingIdx & 0xFFu) << 8) | (descriptorIdx & 0xFFu));
142 }
143
getAccelerationStructureOffsetX(deUint32 descriptorNumericValue)144 deUint16 getAccelerationStructureOffsetX (deUint32 descriptorNumericValue)
145 {
146 // Keep the lowest 16 bits (binding and descriptor idx) as the offset.
147 return static_cast<deUint16>(descriptorNumericValue);
148 }
149
150 // Value that will be stored in the output buffer to signal success reading values.
getExpectedOutputBufferValue()151 deUint32 getExpectedOutputBufferValue ()
152 {
153 return 2u;
154 }
155
156 // This value will be stored in an image to be sampled when checking descriptors containing samplers alone.
getExternalSampledImageValue()157 deUint32 getExternalSampledImageValue ()
158 {
159 return 0x41322314u;
160 }
161
162 // Value that will be ORed with the descriptor value before writing.
getStoredValueMask()163 deUint32 getStoredValueMask ()
164 {
165 return 0xFF000000u;
166 }
167
getDescriptorImageFormat()168 VkFormat getDescriptorImageFormat ()
169 {
170 return VK_FORMAT_R32_UINT;
171 }
172
getDefaultExtent()173 VkExtent3D getDefaultExtent ()
174 {
175 return makeExtent3D(1u, 1u, 1u);
176 }
177
178 // Convert value to hexadecimal.
toHex(deUint32 val)179 std::string toHex (deUint32 val)
180 {
181 std::ostringstream s;
182 s << "0x" << std::hex << val << "u";
183 return s.str();
184 }
185
186 // Returns the list of descriptor types that cannot be part of a mutable descriptor.
getForbiddenMutableTypes()187 std::vector<VkDescriptorType> getForbiddenMutableTypes ()
188 {
189 return std::vector<VkDescriptorType>
190 {
191 VK_DESCRIPTOR_TYPE_MUTABLE_EXT,
192 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
193 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
194 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
195 };
196 }
197
198 // Returns the list of descriptor types that are mandatory for the extension.
getMandatoryMutableTypes()199 std::vector<VkDescriptorType> getMandatoryMutableTypes ()
200 {
201 return std::vector<VkDescriptorType>
202 {
203 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
204 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
205 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
206 VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
207 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
208 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
209 };
210 }
211
212 // This helps quickly transform a vector of descriptor types into a bitmask, which makes it easier to check some conditions.
213 enum DescriptorTypeFlagBits
214 {
215 DTFB_SAMPLER = (1 << 0),
216 DTFB_COMBINED_IMAGE_SAMPLER = (1 << 1),
217 DTFB_SAMPLED_IMAGE = (1 << 2),
218 DTFB_STORAGE_IMAGE = (1 << 3),
219 DTFB_UNIFORM_TEXEL_BUFFER = (1 << 4),
220 DTFB_STORAGE_TEXEL_BUFFER = (1 << 5),
221 DTFB_UNIFORM_BUFFER = (1 << 6),
222 DTFB_STORAGE_BUFFER = (1 << 7),
223 DTFB_UNIFORM_BUFFER_DYNAMIC = (1 << 8),
224 DTFB_STORAGE_BUFFER_DYNAMIC = (1 << 9),
225 DTFB_INPUT_ATTACHMENT = (1 << 10),
226 DTFB_INLINE_UNIFORM_BLOCK_EXT = (1 << 11),
227 DTFB_ACCELERATION_STRUCTURE_KHR = (1 << 12),
228 DTFB_ACCELERATION_STRUCTURE_NV = (1 << 13),
229 DTFB_MUTABLE = (1 << 14),
230 };
231
232 using DescriptorTypeFlags = deUint32;
233
234 // Convert type to its corresponding flag bit.
toDescriptorTypeFlagBit(VkDescriptorType descriptorType)235 DescriptorTypeFlagBits toDescriptorTypeFlagBit (VkDescriptorType descriptorType)
236 {
237 switch (descriptorType)
238 {
239 case VK_DESCRIPTOR_TYPE_SAMPLER: return DTFB_SAMPLER;
240 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: return DTFB_COMBINED_IMAGE_SAMPLER;
241 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: return DTFB_SAMPLED_IMAGE;
242 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: return DTFB_STORAGE_IMAGE;
243 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: return DTFB_UNIFORM_TEXEL_BUFFER;
244 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: return DTFB_STORAGE_TEXEL_BUFFER;
245 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: return DTFB_UNIFORM_BUFFER;
246 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: return DTFB_STORAGE_BUFFER;
247 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: return DTFB_UNIFORM_BUFFER_DYNAMIC;
248 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: return DTFB_STORAGE_BUFFER_DYNAMIC;
249 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: return DTFB_INPUT_ATTACHMENT;
250 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: return DTFB_INLINE_UNIFORM_BLOCK_EXT;
251 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: return DTFB_ACCELERATION_STRUCTURE_KHR;
252 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: return DTFB_ACCELERATION_STRUCTURE_NV;
253 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT: return DTFB_MUTABLE;
254 default: break;
255 }
256
257 // Unreachable.
258 DE_ASSERT(false);
259 return DTFB_SAMPLER;
260 }
261
262 // Convert vector of descriptor types to a bitfield.
toDescriptorTypeFlags(const std::vector<VkDescriptorType> & types)263 DescriptorTypeFlags toDescriptorTypeFlags (const std::vector<VkDescriptorType>& types)
264 {
265 DescriptorTypeFlags result = 0u;
266 for (const auto& t : types)
267 result |= toDescriptorTypeFlagBit(t);
268 return result;
269 }
270
271 // Convert bitfield to vector of descriptor types.
toDescriptorTypeVector(DescriptorTypeFlags bitfield)272 std::vector<VkDescriptorType> toDescriptorTypeVector (DescriptorTypeFlags bitfield)
273 {
274 std::vector<VkDescriptorType> result;
275
276 if (bitfield & DTFB_SAMPLER) result.push_back(VK_DESCRIPTOR_TYPE_SAMPLER);
277 if (bitfield & DTFB_COMBINED_IMAGE_SAMPLER) result.push_back(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
278 if (bitfield & DTFB_SAMPLED_IMAGE) result.push_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
279 if (bitfield & DTFB_STORAGE_IMAGE) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
280 if (bitfield & DTFB_UNIFORM_TEXEL_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
281 if (bitfield & DTFB_STORAGE_TEXEL_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
282 if (bitfield & DTFB_UNIFORM_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
283 if (bitfield & DTFB_STORAGE_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
284 if (bitfield & DTFB_UNIFORM_BUFFER_DYNAMIC) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
285 if (bitfield & DTFB_STORAGE_BUFFER_DYNAMIC) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
286 if (bitfield & DTFB_INPUT_ATTACHMENT) result.push_back(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
287 if (bitfield & DTFB_INLINE_UNIFORM_BLOCK_EXT) result.push_back(VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT);
288 if (bitfield & DTFB_ACCELERATION_STRUCTURE_KHR) result.push_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
289 if (bitfield & DTFB_ACCELERATION_STRUCTURE_NV) result.push_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV);
290 if (bitfield & DTFB_MUTABLE) result.push_back(VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
291
292 return result;
293 }
294
295 // How to create the source set when copying descriptors from another set.
296 // * MUTABLE means to transform bindings into mutable bindings.
297 // * NONMUTABLE means to transform bindings into non-mutable bindings.
298 enum class SourceSetStrategy
299 {
300 MUTABLE = 0,
301 NONMUTABLE,
302 NO_SOURCE,
303 };
304
305 enum class PoolMutableStrategy
306 {
307 KEEP_TYPES = 0,
308 EXPAND_TYPES,
309 NO_TYPES,
310 };
311
312 // Type of information that's present in VkWriteDescriptorSet.
313 enum class WriteType
314 {
315 IMAGE_INFO = 0,
316 BUFFER_INFO,
317 BUFFER_VIEW,
318 ACCELERATION_STRUCTURE_INFO,
319 };
320
321 struct WriteInfo
322 {
323 WriteType writeType;
324 union
325 {
326 VkDescriptorImageInfo imageInfo;
327 VkDescriptorBufferInfo bufferInfo;
328 VkBufferView bufferView;
329 VkWriteDescriptorSetAccelerationStructureKHR asInfo;
330 };
331
WriteInfovkt::BindingModel::__anone6fd2b720111::WriteInfo332 explicit WriteInfo (const VkDescriptorImageInfo& info_)
333 : writeType(WriteType::IMAGE_INFO)
334 , imageInfo(info_)
335 {}
336
WriteInfovkt::BindingModel::__anone6fd2b720111::WriteInfo337 explicit WriteInfo (const VkDescriptorBufferInfo& info_)
338 : writeType(WriteType::BUFFER_INFO)
339 , bufferInfo(info_)
340 {}
341
WriteInfovkt::BindingModel::__anone6fd2b720111::WriteInfo342 explicit WriteInfo (VkBufferView view_)
343 : writeType(WriteType::BUFFER_VIEW)
344 , bufferView(view_)
345 {}
346
WriteInfovkt::BindingModel::__anone6fd2b720111::WriteInfo347 explicit WriteInfo (const VkWriteDescriptorSetAccelerationStructureKHR& asInfo_)
348 : writeType(WriteType::ACCELERATION_STRUCTURE_INFO)
349 , asInfo(asInfo_)
350 {}
351 };
352
353 // Resource backing up a single binding.
354 enum class ResourceType
355 {
356 SAMPLER = 0,
357 IMAGE,
358 COMBINED_IMAGE_SAMPLER,
359 BUFFER,
360 BUFFER_VIEW,
361 ACCELERATION_STRUCTURE,
362 };
363
364 // Type of resource backing up a particular descriptor type.
toResourceType(VkDescriptorType descriptorType)365 ResourceType toResourceType (VkDescriptorType descriptorType)
366 {
367 ResourceType resourceType = ResourceType::SAMPLER;
368 switch (descriptorType)
369 {
370 case VK_DESCRIPTOR_TYPE_SAMPLER:
371 resourceType = ResourceType::SAMPLER;
372 break;
373
374 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
375 resourceType = ResourceType::COMBINED_IMAGE_SAMPLER;
376 break;
377
378 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
379 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
380 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
381 resourceType = ResourceType::IMAGE;
382 break;
383
384 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
385 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
386 resourceType = ResourceType::BUFFER_VIEW;
387 break;
388
389 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
390 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
391 resourceType = ResourceType::BUFFER;
392 break;
393
394 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
395 resourceType = ResourceType::ACCELERATION_STRUCTURE;
396 break;
397
398 default:
399 DE_ASSERT(false);
400 break;
401 }
402
403 return resourceType;
404 }
405
isShaderWritable(VkDescriptorType descriptorType)406 bool isShaderWritable (VkDescriptorType descriptorType)
407 {
408 return (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
409 descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
410 }
411
makeDefaultSampler(const DeviceInterface & vkd,VkDevice device)412 Move<VkSampler> makeDefaultSampler (const DeviceInterface& vkd, VkDevice device)
413 {
414 const VkSamplerCreateInfo samplerCreateInfo = {
415 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, // VkStructureType sType;
416 nullptr, // const void* pNext;
417 0u, // VkSamplerCreateFlags flags;
418 VK_FILTER_NEAREST, // VkFilter magFilter;
419 VK_FILTER_NEAREST, // VkFilter minFilter;
420 VK_SAMPLER_MIPMAP_MODE_NEAREST, // VkSamplerMipmapMode mipmapMode;
421 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeU;
422 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeV;
423 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeW;
424 0.f, // float mipLodBias;
425 VK_FALSE, // VkBool32 anisotropyEnable;
426 1.f, // float maxAnisotropy;
427 VK_FALSE, // VkBool32 compareEnable;
428 VK_COMPARE_OP_ALWAYS, // VkCompareOp compareOp;
429 0.f, // float minLod;
430 0.f, // float maxLod;
431 VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, // VkBorderColor borderColor;
432 VK_FALSE, // VkBool32 unnormalizedCoordinates;
433 };
434
435 return createSampler(vkd, device, &samplerCreateInfo);
436 }
437
makeDefaultImage(const DeviceInterface & vkd,VkDevice device,Allocator & alloc)438 de::MovePtr<ImageWithMemory> makeDefaultImage (const DeviceInterface& vkd, VkDevice device, Allocator& alloc)
439 {
440 const auto extent = makeExtent3D(1u, 1u, 1u);
441 const VkImageUsageFlags usageFlags = (
442 VK_IMAGE_USAGE_SAMPLED_BIT
443 | VK_IMAGE_USAGE_STORAGE_BIT
444 | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
445 | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
446 | VK_IMAGE_USAGE_TRANSFER_SRC_BIT
447 | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
448
449 const VkImageCreateInfo imageCreateInfo = {
450 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
451 nullptr, // const void* pNext;
452 0u, // VkImageCreateFlags flags;
453 VK_IMAGE_TYPE_2D, // VkImageType imageType;
454 getDescriptorImageFormat(), // VkFormat format;
455 extent, // VkExtent3D extent;
456 1u, // deUint32 mipLevels;
457 1u, // deUint32 arrayLayers;
458 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
459 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
460 usageFlags, // VkImageUsageFlags usage;
461 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
462 0u, // deUint32 queueFamilyIndexCount;
463 nullptr, // const deUint32* pQueueFamilyIndices;
464 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
465 };
466 return de::MovePtr<ImageWithMemory>(new ImageWithMemory(vkd, device, alloc, imageCreateInfo, MemoryRequirement::Any));
467 }
468
makeDefaultImageView(const DeviceInterface & vkd,VkDevice device,VkImage image)469 Move<VkImageView> makeDefaultImageView (const DeviceInterface& vkd, VkDevice device, VkImage image)
470 {
471 const auto subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
472 return makeImageView(vkd, device, image, VK_IMAGE_VIEW_TYPE_2D, getDescriptorImageFormat(), subresourceRange);
473 }
474
makeDefaultBuffer(const DeviceInterface & vkd,VkDevice device,Allocator & alloc,deUint32 numElements=1u)475 de::MovePtr<BufferWithMemory> makeDefaultBuffer (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 numElements = 1u)
476 {
477 const VkBufferUsageFlags bufferUsage = (
478 VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT
479 | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT
480 | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT
481 | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
482 | VK_BUFFER_USAGE_TRANSFER_SRC_BIT
483 | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
484
485 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(deUint32) * static_cast<size_t>(numElements));
486
487 const auto bufferCreateInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
488
489 return de::MovePtr<BufferWithMemory>(new BufferWithMemory(vkd, device, alloc, bufferCreateInfo, MemoryRequirement::HostVisible));
490 }
491
makeDefaultBufferView(const DeviceInterface & vkd,VkDevice device,VkBuffer buffer)492 Move<VkBufferView> makeDefaultBufferView (const DeviceInterface& vkd, VkDevice device, VkBuffer buffer)
493 {
494 const auto bufferOffset = static_cast<VkDeviceSize>(0);
495 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(deUint32));
496
497 return makeBufferView(vkd, device, buffer, getDescriptorImageFormat(), bufferOffset, bufferSize);
498 }
499
500 struct AccelerationStructureData
501 {
502 using TLASPtr = de::MovePtr<TopLevelAccelerationStructure>;
503 using BLASPtr = de::MovePtr<BottomLevelAccelerationStructure>;
504
505 TLASPtr tlas;
506 BLASPtr blas;
507
swapvkt::BindingModel::__anone6fd2b720111::AccelerationStructureData508 void swap (AccelerationStructureData& other)
509 {
510 auto myTlasPtr = tlas.release();
511 auto myBlasPtr = blas.release();
512
513 auto otherTlasPtr = other.tlas.release();
514 auto otherBlasPtr = other.blas.release();
515
516 tlas = TLASPtr(otherTlasPtr);
517 blas = BLASPtr(otherBlasPtr);
518
519 other.tlas = TLASPtr(myTlasPtr);
520 other.blas = BLASPtr(myBlasPtr);
521 }
522
AccelerationStructureDatavkt::BindingModel::__anone6fd2b720111::AccelerationStructureData523 AccelerationStructureData () : tlas() , blas() {}
524
AccelerationStructureDatavkt::BindingModel::__anone6fd2b720111::AccelerationStructureData525 AccelerationStructureData (AccelerationStructureData&& other)
526 : AccelerationStructureData()
527 {
528 swap(other);
529 }
530
operator =vkt::BindingModel::__anone6fd2b720111::AccelerationStructureData531 AccelerationStructureData& operator= (AccelerationStructureData&& other)
532 {
533 swap(other);
534 return *this;
535 }
536 };
537
makeDefaultAccelerationStructure(const DeviceInterface & vkd,VkDevice device,VkCommandBuffer cmdBuffer,Allocator & alloc,bool triangles,deUint16 offsetX)538 AccelerationStructureData makeDefaultAccelerationStructure (const DeviceInterface& vkd, VkDevice device, VkCommandBuffer cmdBuffer, Allocator& alloc, bool triangles, deUint16 offsetX)
539 {
540 AccelerationStructureData data;
541
542 // Triangle around (offsetX, 0) with depth 5.0.
543 const float middleX = static_cast<float>(offsetX);
544 const float leftX = middleX - 0.5f;
545 const float rightX = middleX + 0.5f;
546 const float topY = 0.5f;
547 const float bottomY = -0.5f;
548 const float depth = 5.0f;
549
550 std::vector<tcu::Vec3> vertices;
551
552 if (triangles)
553 {
554 vertices.reserve(3u);
555 vertices.emplace_back(middleX, topY, depth);
556 vertices.emplace_back(rightX, bottomY, depth);
557 vertices.emplace_back(leftX, bottomY, depth);
558 }
559 else
560 {
561 vertices.reserve(2u);
562 vertices.emplace_back(leftX, bottomY, depth);
563 vertices.emplace_back(rightX, topY, depth);
564 }
565
566 data.tlas = makeTopLevelAccelerationStructure();
567 data.blas = makeBottomLevelAccelerationStructure();
568
569 VkGeometryInstanceFlagsKHR instanceFlags = 0u;
570 if (triangles)
571 instanceFlags |= VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR;
572
573 data.blas->addGeometry(vertices, triangles, VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR);
574 data.blas->createAndBuild(vkd, device, cmdBuffer, alloc);
575
576 de::SharedPtr<BottomLevelAccelerationStructure> blasSharedPtr (data.blas.release());
577 data.tlas->setInstanceCount(1u);
578 data.tlas->addInstance(blasSharedPtr, identityMatrix3x4, 0u, 0xFFu, 0u, instanceFlags);
579 data.tlas->createAndBuild(vkd, device, cmdBuffer, alloc);
580
581 return data;
582 }
583
584 const auto kShaderAccess = (VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT);
585
586 struct Resource
587 {
588 VkDescriptorType descriptorType;
589 ResourceType resourceType;
590 Move<VkSampler> sampler;
591 de::MovePtr<ImageWithMemory> imageWithMemory;
592 Move<VkImageView> imageView;
593 de::MovePtr<BufferWithMemory> bufferWithMemory;
594 Move<VkBufferView> bufferView;
595 AccelerationStructureData asData;
596 deUint32 initialValue;
597
Resourcevkt::BindingModel::__anone6fd2b720111::Resource598 Resource (VkDescriptorType descriptorType_, const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, bool useAABBs, deUint32 initialValue_, deUint32 numElements = 1u)
599 : descriptorType (descriptorType_)
600 , resourceType (toResourceType(descriptorType))
601 , sampler ()
602 , imageWithMemory ()
603 , imageView ()
604 , bufferWithMemory ()
605 , bufferView ()
606 , asData ()
607 , initialValue (initialValue_)
608 {
609 if (numElements != 1u)
610 DE_ASSERT(resourceType == ResourceType::BUFFER);
611
612 switch (resourceType)
613 {
614 case ResourceType::SAMPLER:
615 sampler = makeDefaultSampler(vkd, device);
616 break;
617
618 case ResourceType::IMAGE:
619 imageWithMemory = makeDefaultImage(vkd, device, alloc);
620 imageView = makeDefaultImageView(vkd, device, imageWithMemory->get());
621 break;
622
623 case ResourceType::COMBINED_IMAGE_SAMPLER:
624 sampler = makeDefaultSampler(vkd, device);
625 imageWithMemory = makeDefaultImage(vkd, device, alloc);
626 imageView = makeDefaultImageView(vkd, device, imageWithMemory->get());
627 break;
628
629 case ResourceType::BUFFER:
630 bufferWithMemory = makeDefaultBuffer(vkd, device, alloc, numElements);
631 break;
632
633 case ResourceType::BUFFER_VIEW:
634 bufferWithMemory = makeDefaultBuffer(vkd, device, alloc);
635 bufferView = makeDefaultBufferView(vkd, device, bufferWithMemory->get());
636 break;
637
638 case ResourceType::ACCELERATION_STRUCTURE:
639 {
640 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
641 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
642 const auto cmdBuffer = cmdBufferPtr.get();
643 const bool triangles = !useAABBs;
644
645 beginCommandBuffer(vkd, cmdBuffer);
646 asData = makeDefaultAccelerationStructure(vkd, device, cmdBuffer, alloc, triangles, getAccelerationStructureOffsetX(initialValue));
647 endCommandBuffer(vkd, cmdBuffer);
648 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
649 }
650 break;
651
652 default:
653 DE_ASSERT(false);
654 break;
655 }
656
657 if (imageWithMemory || bufferWithMemory)
658 {
659 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
660 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
661 const auto cmdBuffer = cmdBufferPtr.get();
662
663 if (imageWithMemory)
664 {
665 // Prepare staging buffer.
666 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(initialValue));
667 const VkBufferUsageFlags bufferUsage = (VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
668 const auto stagingBufferInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
669
670 BufferWithMemory stagingBuffer(vkd, device, alloc, stagingBufferInfo, MemoryRequirement::HostVisible);
671 auto& bufferAlloc = stagingBuffer.getAllocation();
672 void* bufferData = bufferAlloc.getHostPtr();
673
674 deMemcpy(bufferData, &initialValue, sizeof(initialValue));
675 flushAlloc(vkd, device, bufferAlloc);
676
677 beginCommandBuffer(vkd, cmdBuffer);
678
679 // Transition and copy image.
680 const auto copyRegion = makeBufferImageCopy(makeExtent3D(1u, 1u, 1u),
681 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u));
682
683 // Switch image to TRANSFER_DST_OPTIMAL before copying data to it.
684 const auto subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
685
686 const auto preTransferBarrier = makeImageMemoryBarrier(
687 0u, VK_ACCESS_TRANSFER_WRITE_BIT,
688 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
689 imageWithMemory->get(), subresourceRange);
690
691 vkd.cmdPipelineBarrier(
692 cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
693 0u, nullptr, 0u, nullptr, 1u, &preTransferBarrier);
694
695 // Copy data to image.
696 vkd.cmdCopyBufferToImage(cmdBuffer, stagingBuffer.get(), imageWithMemory->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
697
698 // Switch image to the GENERAL layout before reading or writing to it from shaders.
699 const auto postTransferBarrier = makeImageMemoryBarrier(
700 VK_ACCESS_TRANSFER_WRITE_BIT, kShaderAccess,
701 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
702 imageWithMemory->get(), subresourceRange);
703
704 vkd.cmdPipelineBarrier(
705 cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0u,
706 0u, nullptr, 0u, nullptr, 1u, &postTransferBarrier);
707
708 endCommandBuffer(vkd, cmdBuffer);
709 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
710 }
711
712 if (bufferWithMemory)
713 {
714 auto& bufferAlloc = bufferWithMemory->getAllocation();
715 void* bufferData = bufferAlloc.getHostPtr();
716
717 const std::vector<deUint32> bufferValues(numElements, initialValue);
718 deMemcpy(bufferData, bufferValues.data(), de::dataSize(bufferValues));
719 flushAlloc(vkd, device, bufferAlloc);
720
721 beginCommandBuffer(vkd, cmdBuffer);
722
723 // Make sure host writes happen before shader reads/writes. Note: this barrier is not needed in theory.
724 const auto hostToShaderBarrier = makeMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, kShaderAccess);
725
726 vkd.cmdPipelineBarrier(
727 cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0u,
728 1u, &hostToShaderBarrier, 0u, nullptr, 0u, nullptr);
729
730 endCommandBuffer(vkd, cmdBuffer);
731 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
732 }
733 }
734 }
735
736 // Remove problematic copy constructor.
737 Resource (const Resource&) = delete;
738
739 // Make it movable.
Resourcevkt::BindingModel::__anone6fd2b720111::Resource740 Resource (Resource&& other) noexcept
741 : descriptorType (other.descriptorType)
742 , resourceType (other.resourceType)
743 , sampler (other.sampler)
744 , imageWithMemory (other.imageWithMemory.release())
745 , imageView (other.imageView)
746 , bufferWithMemory (other.bufferWithMemory.release())
747 , bufferView (other.bufferView)
748 , asData (std::move(other.asData))
749 , initialValue (other.initialValue)
750 {}
751
~Resourcevkt::BindingModel::__anone6fd2b720111::Resource752 ~Resource ()
753 {}
754
makeWriteInfovkt::BindingModel::__anone6fd2b720111::Resource755 WriteInfo makeWriteInfo () const
756 {
757 using WriteInfoPtr = de::MovePtr<WriteInfo>;
758
759 WriteInfoPtr writeInfo;
760
761 switch (resourceType)
762 {
763 case ResourceType::SAMPLER:
764 {
765 const VkDescriptorImageInfo imageInfo = { sampler.get(), DE_NULL, VK_IMAGE_LAYOUT_UNDEFINED };
766 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
767 }
768 break;
769
770 case ResourceType::IMAGE:
771 {
772 const VkDescriptorImageInfo imageInfo = { DE_NULL, imageView.get(), VK_IMAGE_LAYOUT_GENERAL };
773 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
774 }
775 break;
776
777 case ResourceType::COMBINED_IMAGE_SAMPLER:
778 {
779 const VkDescriptorImageInfo imageInfo = { sampler.get(), imageView.get(), VK_IMAGE_LAYOUT_GENERAL };
780 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
781 }
782 break;
783
784 case ResourceType::BUFFER:
785 {
786 const VkDescriptorBufferInfo bufferInfo = { bufferWithMemory->get(), 0ull, static_cast<VkDeviceSize>(sizeof(deUint32)) };
787 writeInfo = WriteInfoPtr (new WriteInfo(bufferInfo));
788 }
789 break;
790
791 case ResourceType::BUFFER_VIEW:
792 writeInfo = WriteInfoPtr (new WriteInfo(bufferView.get()));
793 break;
794
795 case ResourceType::ACCELERATION_STRUCTURE:
796 {
797 VkWriteDescriptorSetAccelerationStructureKHR asWrite = initVulkanStructure();
798 asWrite.accelerationStructureCount = 1u;
799 asWrite.pAccelerationStructures = asData.tlas.get()->getPtr();
800 writeInfo = WriteInfoPtr (new WriteInfo(asWrite));
801 }
802 break;
803
804 default:
805 DE_ASSERT(false);
806 break;
807 }
808
809 return *writeInfo;
810 }
811
getStoredValuevkt::BindingModel::__anone6fd2b720111::Resource812 tcu::Maybe<deUint32> getStoredValue (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, deUint32 position = 0u) const
813 {
814 if (position != 0u)
815 DE_ASSERT(static_cast<bool>(bufferWithMemory));
816
817 if (imageWithMemory || bufferWithMemory)
818 {
819 // Command pool and buffer.
820 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
821 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
822 const auto cmdBuffer = cmdBufferPtr.get();
823
824 if (imageWithMemory)
825 {
826 // Prepare staging buffer.
827 deUint32 result;
828 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(result));
829 const VkBufferUsageFlags bufferUsage = (VK_BUFFER_USAGE_TRANSFER_DST_BIT);
830 const auto stagingBufferInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
831
832 BufferWithMemory stagingBuffer(vkd, device, alloc, stagingBufferInfo, MemoryRequirement::HostVisible);
833 auto& bufferAlloc = stagingBuffer.getAllocation();
834 void* bufferData = bufferAlloc.getHostPtr();
835
836 // Copy image value to staging buffer.
837 beginCommandBuffer(vkd, cmdBuffer);
838
839 // Make sure shader accesses happen before transfers and prepare image for transfer.
840 const auto colorResourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
841
842 const auto preTransferBarrier = makeImageMemoryBarrier(
843 kShaderAccess, VK_ACCESS_TRANSFER_READ_BIT,
844 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
845 imageWithMemory->get(), colorResourceRange);
846
847 vkd.cmdPipelineBarrier(
848 cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
849 0u, nullptr, 0u, nullptr, 1u, &preTransferBarrier);
850
851 // Copy image contents to staging buffer.
852 const auto copyRegion = makeBufferImageCopy(makeExtent3D(1u, 1u, 1u),
853 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u));
854 vkd.cmdCopyImageToBuffer(cmdBuffer, imageWithMemory->get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, stagingBuffer.get(), 1u, ©Region);
855
856 // Make sure writes are visible from the host.
857 const auto postTransferBarrier = makeMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT);
858 vkd.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u, 1u, &postTransferBarrier, 0u, nullptr, 0u, nullptr);
859
860 endCommandBuffer(vkd, cmdBuffer);
861 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
862
863 // Get value from staging buffer.
864 invalidateAlloc(vkd, device, bufferAlloc);
865 deMemcpy(&result, bufferData, sizeof(result));
866 return tcu::just(result);
867 }
868
869 if (bufferWithMemory)
870 {
871 auto& bufferAlloc = bufferWithMemory->getAllocation();
872 auto bufferData = reinterpret_cast<const char*>(bufferAlloc.getHostPtr());
873 deUint32 result;
874
875 // Make sure shader writes are visible from the host.
876 beginCommandBuffer(vkd, cmdBuffer);
877
878 const auto shaderToHostBarrier = makeMemoryBarrier(kShaderAccess, VK_ACCESS_HOST_READ_BIT);
879 vkd.cmdPipelineBarrier(
880 cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u,
881 1u, &shaderToHostBarrier, 0u, nullptr, 0u, nullptr);
882
883 endCommandBuffer(vkd, cmdBuffer);
884 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
885
886 invalidateAlloc(vkd, device, bufferAlloc);
887 deMemcpy(&result, bufferData + sizeof(deUint32) * static_cast<size_t>(position), sizeof(result));
888 return tcu::just(result);
889 }
890 }
891
892 return tcu::Nothing;
893 }
894 };
895
896 struct BindingInterface
897 {
~BindingInterfacevkt::BindingModel::__anone6fd2b720111::BindingInterface898 virtual ~BindingInterface () {}
899
900 // Minimum number of iterations to test all mutable types.
901 virtual deUint32 maxTypes () const = 0;
902
903 // Types that will be used by the binding at a given iteration.
904 virtual std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const = 0;
905
906 // Binding's main type.
907 virtual VkDescriptorType mainType () const = 0;
908
909 // Binding's list of mutable types, if present.
910 virtual std::vector<VkDescriptorType> mutableTypes () const = 0;
911
912 // Descriptor count in the binding.
913 virtual size_t size () const = 0;
914
915 // Is the binding an array binding?
916 virtual bool isArray () const = 0;
917
918 // Is the binding an unbounded array?
919 virtual bool isUnbounded () const = 0;
920
921 // Will the binding use different descriptor types in a given iteration?
needsAliasingvkt::BindingModel::__anone6fd2b720111::BindingInterface922 virtual bool needsAliasing (deUint32 iteration) const
923 {
924 const auto typesVec = typesAtIteration(iteration);
925 std::set<VkDescriptorType> descTypes(begin(typesVec), end(typesVec));
926 return (descTypes.size() > 1u);
927 }
928
929 // Will the binding need aliasing on any iteration up to a given number?
needsAliasingUpTovkt::BindingModel::__anone6fd2b720111::BindingInterface930 virtual bool needsAliasingUpTo (deUint32 numIterations) const
931 {
932 std::vector<bool> needsAliasingFlags;
933 needsAliasingFlags.reserve(numIterations);
934
935 for (deUint32 iter = 0u; iter < numIterations; ++iter)
936 needsAliasingFlags.push_back(needsAliasing(iter));
937
938 return std::any_of(begin(needsAliasingFlags), end(needsAliasingFlags), [] (bool f) { return f; });
939 }
940
941 private:
hasDescriptorTypevkt::BindingModel::__anone6fd2b720111::BindingInterface942 virtual bool hasDescriptorType (deUint32 iteration, VkDescriptorType descriptorType) const
943 {
944 const auto typesVec = typesAtIteration(iteration);
945 return (std::find(begin(typesVec), end(typesVec), descriptorType) != end(typesVec));
946 }
947
948 public:
949 // Convert one particular binding to a mutable or non-mutable equivalent binding, returning the equivalent binding.
950 virtual de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const = 0;
951 virtual de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const = 0;
952
953 // Create resources needed to back up this binding.
954 virtual std::vector<Resource> createResources (
955 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
956 deUint32 iteration, bool useAABBs, deUint32 baseValue) const = 0;
957
958 // Get GLSL binding declarations. Note: no array size means no array, if size is < 0 it means unbounded array.
959 virtual std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const = 0;
960
961 // Get GLSL statements to check this binding.
962 virtual std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const = 0;
963 };
964
965 // Represents a single binding that will be used in a test.
966 class SingleBinding : public BindingInterface
967 {
968 private:
969 VkDescriptorType type; // The descriptor type.
970 std::vector<VkDescriptorType> mutableTypesVec; // The types that will be used for each iteration of a test if mutable.
971
972 public:
SingleBinding(VkDescriptorType type_,std::vector<VkDescriptorType> mutableTypes_)973 SingleBinding (VkDescriptorType type_, std::vector<VkDescriptorType> mutableTypes_)
974 : type (type_)
975 , mutableTypesVec (std::move(mutableTypes_))
976 {
977 static const auto kForbiddenMutableTypes = getForbiddenMutableTypes();
978 const auto kBeginForbidden = begin(kForbiddenMutableTypes);
979 const auto kEndForbidden = end(kForbiddenMutableTypes);
980
981 // For release builds.
982 DE_UNREF(kBeginForbidden);
983 DE_UNREF(kEndForbidden);
984
985 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
986 {
987 DE_ASSERT(mutableTypesVec.empty());
988 }
989 else
990 {
991 DE_ASSERT(!mutableTypesVec.empty());
992 DE_ASSERT(std::none_of(begin(mutableTypesVec), end(mutableTypesVec),
993 [&kBeginForbidden, &kEndForbidden] (VkDescriptorType t) -> bool {
994 return std::find(kBeginForbidden, kEndForbidden, t) != kEndForbidden;
995 }));
996 }
997 }
998
maxTypes() const999 deUint32 maxTypes () const override
1000 {
1001 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
1002 return 1u;
1003 const auto vecSize = mutableTypesVec.size();
1004 DE_ASSERT(vecSize <= std::numeric_limits<deUint32>::max());
1005 return static_cast<deUint32>(vecSize);
1006 }
1007
typeAtIteration(deUint32 iteration) const1008 VkDescriptorType typeAtIteration (deUint32 iteration) const
1009 {
1010 return typesAtIteration(iteration)[0];
1011 }
1012
usedTypes() const1013 std::vector<VkDescriptorType> usedTypes () const
1014 {
1015 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
1016 return std::vector<VkDescriptorType>(1u, type);
1017 return mutableTypesVec;
1018 }
1019
typesAtIteration(deUint32 iteration) const1020 std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const override
1021 {
1022 const auto typesVec = usedTypes();
1023 return std::vector<VkDescriptorType>(1u, typesVec[static_cast<size_t>(iteration) % typesVec.size()]);
1024 }
1025
mainType() const1026 VkDescriptorType mainType () const override
1027 {
1028 return type;
1029 }
1030
mutableTypes() const1031 std::vector<VkDescriptorType> mutableTypes () const override
1032 {
1033 return mutableTypesVec;
1034 }
1035
size() const1036 size_t size () const override
1037 {
1038 return size_t{1u};
1039 }
1040
isArray() const1041 bool isArray () const override
1042 {
1043 return false;
1044 }
1045
isUnbounded() const1046 bool isUnbounded () const override
1047 {
1048 return false;
1049 }
1050
toMutable(deUint32 iteration) const1051 de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const override
1052 {
1053 DE_UNREF(iteration);
1054
1055 static const auto kMandatoryMutableTypeFlags = toDescriptorTypeFlags(getMandatoryMutableTypes());
1056 if (type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
1057 {
1058 const auto descFlags = toDescriptorTypeFlags(mutableTypesVec);
1059 return de::MovePtr<BindingInterface>(new SingleBinding(type, toDescriptorTypeVector(descFlags)));
1060 }
1061
1062 // Make sure it's not a forbidden mutable type.
1063 static const auto kForbiddenMutableTypes = getForbiddenMutableTypes();
1064 DE_ASSERT(std::find(begin(kForbiddenMutableTypes), end(kForbiddenMutableTypes), type) == end(kForbiddenMutableTypes));
1065
1066 // Convert the binding to mutable using a wider set of descriptor types if possible, including the binding type.
1067 const auto descFlags = (kMandatoryMutableTypeFlags | toDescriptorTypeFlagBit(type));
1068
1069 return de::MovePtr<BindingInterface>(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, toDescriptorTypeVector(descFlags)));
1070 }
1071
toNonMutable(deUint32 iteration) const1072 de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const override
1073 {
1074 return de::MovePtr<BindingInterface>(new SingleBinding(typeAtIteration(iteration), std::vector<VkDescriptorType>()));
1075 }
1076
createResources(const DeviceInterface & vkd,VkDevice device,Allocator & alloc,deUint32 qIndex,VkQueue queue,deUint32 iteration,bool useAABBs,deUint32 baseValue) const1077 std::vector<Resource> createResources (
1078 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
1079 deUint32 iteration, bool useAABBs, deUint32 baseValue) const override
1080 {
1081 const auto descriptorType = typeAtIteration(iteration);
1082
1083 std::vector<Resource> resources;
1084 resources.emplace_back(descriptorType, vkd, device, alloc, qIndex, queue, useAABBs, baseValue);
1085 return resources;
1086 }
1087
glslDeclarations(deUint32 iteration,deUint32 setNum,deUint32 bindingNum,deUint32 inputAttachmentIdx,tcu::Maybe<deInt32> arraySize) const1088 std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const override
1089 {
1090 const auto descriptorType = typeAtIteration(iteration);
1091 const std::string arraySuffix = ((static_cast<bool>(arraySize)) ? ((arraySize.get() < 0) ? "[]" : ("[" + de::toString(arraySize.get()) + "]")) : "");
1092 const std::string layoutAttribs = "set=" + de::toString(setNum) + ", binding=" + de::toString(bindingNum);
1093 const std::string bindingSuffix = "_" + de::toString(setNum) + "_" + de::toString(bindingNum);
1094 const std::string nameSuffix = bindingSuffix + arraySuffix;
1095 std::ostringstream declarations;
1096
1097 declarations << "layout (";
1098
1099 switch (descriptorType)
1100 {
1101 case VK_DESCRIPTOR_TYPE_SAMPLER:
1102 declarations << layoutAttribs << ") uniform sampler sampler" << nameSuffix;
1103 break;
1104
1105 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1106 declarations << layoutAttribs << ") uniform usampler2D combinedSampler" << nameSuffix;
1107 break;
1108
1109 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1110 declarations << layoutAttribs << ") uniform utexture2D sampledImage" << nameSuffix;
1111 break;
1112
1113 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1114 declarations << layoutAttribs << ") uniform uboBlock" << bindingSuffix << " { uint val; } ubo" << nameSuffix;
1115 break;
1116
1117 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1118 declarations << layoutAttribs << ") buffer sboBlock" << bindingSuffix << " { uint val; } ssbo" << nameSuffix;
1119 break;
1120
1121 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1122 declarations << layoutAttribs << ") uniform utextureBuffer uniformTexel" << nameSuffix;
1123 break;
1124
1125 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1126 declarations << layoutAttribs << ", r32ui) uniform uimageBuffer storageTexel" << nameSuffix;
1127 break;
1128
1129 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1130 declarations << layoutAttribs << ", r32ui) uniform uimage2D storageImage" << nameSuffix;
1131 break;
1132
1133 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1134 declarations << layoutAttribs << ", input_attachment_index=" << inputAttachmentIdx << ") uniform usubpassInput inputAttachment" << nameSuffix;
1135 break;
1136
1137 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
1138 declarations << layoutAttribs << ") uniform accelerationStructureEXT accelerationStructure" << nameSuffix;
1139 break;
1140
1141 default:
1142 DE_ASSERT(false);
1143 break;
1144 }
1145
1146 declarations << ";\n";
1147
1148 return declarations.str();
1149 }
1150
glslCheckStatements(deUint32 iteration,deUint32 setNum,deUint32 bindingNum,deUint32 baseValue_,tcu::Maybe<deUint32> arrayIndex,bool usePushConstants) const1151 std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue_, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const override
1152 {
1153 const auto descriptorType = typeAtIteration(iteration);
1154 const std::string bindingSuffix = "_" + de::toString(setNum) + "_" + de::toString(bindingNum);
1155
1156 std::string indexSuffix;
1157 if (arrayIndex)
1158 {
1159 indexSuffix = de::toString(arrayIndex.get());
1160 if (usePushConstants)
1161 indexSuffix += " + pc.zero";
1162 indexSuffix = "[" + indexSuffix + "]";
1163 }
1164
1165 const std::string nameSuffix = bindingSuffix + indexSuffix;
1166 const std::string baseValue = toHex(baseValue_);
1167 const std::string externalImageValue = toHex(getExternalSampledImageValue());
1168 const std::string mask = toHex(getStoredValueMask());
1169
1170 std::ostringstream checks;
1171
1172 // Note: all of these depend on an external anyError uint variable.
1173 switch (descriptorType)
1174 {
1175 case VK_DESCRIPTOR_TYPE_SAMPLER:
1176 // Note this depends on an "externalSampledImage" binding.
1177 checks << " {\n";
1178 checks << " uint readValue = texture(usampler2D(externalSampledImage, sampler" << nameSuffix << "), vec2(0, 0)).r;\n";
1179 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1180 checks << " anyError |= ((readValue == " << externalImageValue << ") ? 0u : 1u);\n";
1181 //checks << " anyError = readValue;\n";
1182 checks << " }\n";
1183 break;
1184
1185 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1186 checks << " {\n";
1187 checks << " uint readValue = texture(combinedSampler" << nameSuffix << ", vec2(0, 0)).r;\n";
1188 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1189 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1190 //checks << " anyError = readValue;\n";
1191 checks << " }\n";
1192 break;
1193
1194 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1195 // Note this depends on an "externalSampler" binding.
1196 checks << " {\n";
1197 checks << " uint readValue = texture(usampler2D(sampledImage" << nameSuffix << ", externalSampler), vec2(0, 0)).r;\n";
1198 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1199 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1200 //checks << " anyError = readValue;\n";
1201 checks << " }\n";
1202 break;
1203
1204 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1205 checks << " {\n";
1206 checks << " uint readValue = ubo" << nameSuffix << ".val;\n";
1207 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1208 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1209 //checks << " anyError = readValue;\n";
1210 checks << " }\n";
1211 break;
1212
1213 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1214 checks << " {\n";
1215 checks << " uint readValue = ssbo" << nameSuffix << ".val;\n";
1216 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1217 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1218 //checks << " anyError = readValue;\n";
1219 // Check writes.
1220 checks << " ssbo" << nameSuffix << ".val = (readValue | " << mask << ");\n";
1221 checks << " }\n";
1222 break;
1223
1224 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1225 checks << " {\n";
1226 checks << " uint readValue = texelFetch(uniformTexel" << nameSuffix << ", 0).x;\n";
1227 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1228 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1229 //checks << " anyError = readValue;\n";
1230 checks << " }\n";
1231 break;
1232
1233 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1234 checks << " {\n";
1235 checks << " uint readValue = imageLoad(storageTexel" << nameSuffix << ", 0).x;\n";
1236 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1237 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1238 //checks << " anyError = readValue;\n";
1239 checks << " readValue |= " << mask << ";\n";
1240 // Check writes.
1241 checks << " imageStore(storageTexel" << nameSuffix << ", 0, uvec4(readValue, 0, 0, 0));\n";
1242 checks << " }\n";
1243 break;
1244
1245 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1246 checks << " {\n";
1247 checks << " uint readValue = imageLoad(storageImage" << nameSuffix << ", ivec2(0, 0)).x;\n";
1248 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1249 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1250 //checks << " anyError = readValue;\n";
1251 checks << " readValue |= " << mask << ";\n";
1252 // Check writes.
1253 checks << " imageStore(storageImage" << nameSuffix << ", ivec2(0, 0), uvec4(readValue, 0, 0, 0));\n";
1254 checks << " }\n";
1255 break;
1256
1257 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1258 checks << " {\n";
1259 checks << " uint readValue = subpassLoad(inputAttachment" << nameSuffix << ").x;\n";
1260 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1261 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1262 //checks << " anyError = readValue;\n";
1263 checks << " }\n";
1264 break;
1265
1266 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
1267 checks << " {\n";
1268 checks << " const uint cullMask = 0xFF;\n";
1269 checks << " const vec3 origin = vec3(" << getAccelerationStructureOffsetX(baseValue_) << ".0, 0.0, 0.0);\n";
1270 checks << " const vec3 direction = vec3(0.0, 0.0, 1.0);\n";
1271 checks << " const float tmin = 1.0;\n";
1272 checks << " const float tmax = 10.0;\n";
1273 checks << " uint candidateFound = 0u;\n";
1274 checks << " rayQueryEXT rq;\n";
1275 checks << " rayQueryInitializeEXT(rq, accelerationStructure" << nameSuffix << ", gl_RayFlagsNoneEXT, cullMask, origin, tmin, direction, tmax);\n";
1276 checks << " while (rayQueryProceedEXT(rq)) {\n";
1277 checks << " const uint candidateType = rayQueryGetIntersectionTypeEXT(rq, false);\n";
1278 checks << " if (candidateType == gl_RayQueryCandidateIntersectionTriangleEXT || candidateType == gl_RayQueryCandidateIntersectionAABBEXT) {\n";
1279 checks << " candidateFound = 1u;\n";
1280 checks << " }\n";
1281 checks << " }\n";
1282 checks << " anyError |= ((candidateFound == 1u) ? 0u : 1u);\n";
1283 checks << " }\n";
1284 break;
1285
1286 default:
1287 DE_ASSERT(false);
1288 break;
1289 }
1290
1291 return checks.str();
1292 }
1293 };
1294
1295 // Represents an array of bindings. Individual bindings are stored as SingleBindings because each one of them may take a different
1296 // type in each iteration (i.e. they can all have different descriptor type vectors).
1297 class ArrayBinding : public BindingInterface
1298 {
1299 private:
1300 bool unbounded;
1301 std::vector<SingleBinding> bindings;
1302
1303 public:
ArrayBinding(bool unbounded_,std::vector<SingleBinding> bindings_)1304 ArrayBinding (bool unbounded_, std::vector<SingleBinding> bindings_)
1305 : unbounded (unbounded_)
1306 , bindings (std::move(bindings_))
1307 {
1308 // We need to check all single bindings have the same effective type, even if mutable descriptors have different orders.
1309 DE_ASSERT(!bindings.empty());
1310
1311 std::set<VkDescriptorType> basicTypes;
1312 std::set<DescriptorTypeFlags> bindingTypes;
1313
1314 for (const auto& b : bindings)
1315 {
1316 basicTypes.insert(b.mainType());
1317 bindingTypes.insert(toDescriptorTypeFlags(b.usedTypes()));
1318 }
1319
1320 DE_ASSERT(basicTypes.size() == 1u);
1321 DE_ASSERT(bindingTypes.size() == 1u);
1322
1323 // For release builds.
1324 DE_UNREF(basicTypes);
1325 DE_UNREF(bindingTypes);
1326 }
1327
maxTypes() const1328 deUint32 maxTypes () const override
1329 {
1330 // Each binding may have the same effective type but a different number of iterations due to repeated types.
1331 std::vector<size_t> bindingSizes;
1332 bindingSizes.reserve(bindings.size());
1333
1334 std::transform(begin(bindings), end(bindings), std::back_inserter(bindingSizes),
1335 [] (const SingleBinding& b) { return b.usedTypes().size(); });
1336
1337 const auto maxElement = std::max_element(begin(bindingSizes), end(bindingSizes));
1338 DE_ASSERT(maxElement != end(bindingSizes));
1339 DE_ASSERT(*maxElement <= std::numeric_limits<deUint32>::max());
1340 return static_cast<deUint32>(*maxElement);
1341 }
1342
typesAtIteration(deUint32 iteration) const1343 std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const override
1344 {
1345 std::vector<VkDescriptorType> result;
1346 result.reserve(bindings.size());
1347
1348 for (const auto& b : bindings)
1349 result.push_back(b.typeAtIteration(iteration));
1350
1351 return result;
1352 }
1353
mainType() const1354 VkDescriptorType mainType () const override
1355 {
1356 return bindings[0].mainType();
1357 }
1358
mutableTypes() const1359 std::vector<VkDescriptorType> mutableTypes () const override
1360 {
1361 return bindings[0].mutableTypes();
1362 }
1363
size() const1364 size_t size () const override
1365 {
1366 return bindings.size();
1367 }
1368
isArray() const1369 bool isArray () const override
1370 {
1371 return true;
1372 }
1373
isUnbounded() const1374 bool isUnbounded () const override
1375 {
1376 return unbounded;
1377 }
1378
toMutable(deUint32 iteration) const1379 de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const override
1380 {
1381 // Replicate the first binding once converted, as all are equivalent.
1382 const auto firstBindingPtr = bindings[0].toMutable(iteration);
1383 const auto firstBinding = *dynamic_cast<SingleBinding*>(firstBindingPtr.get());
1384 const std::vector<SingleBinding> newBindings (bindings.size(), firstBinding);
1385
1386 return de::MovePtr<BindingInterface>(new ArrayBinding(unbounded, newBindings));
1387 }
1388
toNonMutable(deUint32 iteration) const1389 de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const override
1390 {
1391 // Make sure this binding can be converted to nonmutable for a given iteration.
1392 DE_ASSERT(!needsAliasing(iteration));
1393
1394 // We could use each SingleBinding's toNonMutable(), but this is the same.
1395 const auto descType = bindings[0].typeAtIteration(iteration);
1396 const SingleBinding firstBinding (descType, std::vector<VkDescriptorType>());
1397 const std::vector<SingleBinding> newBindings (bindings.size(), firstBinding);
1398
1399 return de::MovePtr<BindingInterface>(new ArrayBinding(unbounded, newBindings));
1400 }
1401
createResources(const DeviceInterface & vkd,VkDevice device,Allocator & alloc,deUint32 qIndex,VkQueue queue,deUint32 iteration,bool useAABBs,deUint32 baseValue) const1402 std::vector<Resource> createResources (
1403 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
1404 deUint32 iteration, bool useAABBs, deUint32 baseValue) const override
1405 {
1406 std::vector<Resource> resources;
1407 const auto numBindings = static_cast<deUint32>(bindings.size());
1408
1409 for (deUint32 i = 0u; i < numBindings; ++i)
1410 {
1411 auto resourceVec = bindings[i].createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs, baseValue + i);
1412 resources.emplace_back(std::move(resourceVec[0]));
1413 }
1414
1415 return resources;
1416 }
1417
1418 // We will ignore the array size parameter.
glslDeclarations(deUint32 iteration,deUint32 setNum,deUint32 bindingNum,deUint32 inputAttachmentIdx,tcu::Maybe<deInt32> arraySize) const1419 std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const override
1420 {
1421 const auto descriptorCount = bindings.size();
1422 const auto arraySizeVal = (isUnbounded() ? tcu::just(deInt32{-1}) : tcu::just(static_cast<deInt32>(descriptorCount)));
1423
1424 DE_UNREF(arraySize);
1425 DE_ASSERT(descriptorCount < static_cast<size_t>(std::numeric_limits<deInt32>::max()));
1426
1427 // Maybe a single declaration is enough.
1428 if (!needsAliasing(iteration))
1429 return bindings[0].glslDeclarations(iteration, setNum, bindingNum, inputAttachmentIdx, arraySizeVal);
1430
1431 // Aliasing needed. Avoid reusing types.
1432 const auto descriptorTypes = typesAtIteration(iteration);
1433 std::set<VkDescriptorType> usedTypes;
1434 std::ostringstream declarations;
1435
1436 for (size_t descriptorIdx = 0u; descriptorIdx < descriptorCount; ++descriptorIdx)
1437 {
1438 const auto& descriptorType = descriptorTypes[descriptorIdx];
1439 if (usedTypes.count(descriptorType) > 0)
1440 continue;
1441
1442 usedTypes.insert(descriptorType);
1443 declarations << bindings[descriptorIdx].glslDeclarations(iteration, setNum, bindingNum, inputAttachmentIdx, arraySizeVal);
1444 }
1445
1446 return declarations.str();
1447 }
1448
glslCheckStatements(deUint32 iteration,deUint32 setNum,deUint32 bindingNum,deUint32 baseValue_,tcu::Maybe<deUint32> arrayIndex,bool usePushConstants) const1449 std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue_, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const override
1450 {
1451 DE_ASSERT(!arrayIndex);
1452 DE_UNREF(arrayIndex); // For release builds.
1453
1454 std::ostringstream checks;
1455 const auto numDescriptors = static_cast<deUint32>(bindings.size());
1456
1457 for (deUint32 descriptorIdx = 0u; descriptorIdx < numDescriptors; ++descriptorIdx)
1458 {
1459 const auto& binding = bindings[descriptorIdx];
1460 checks << binding.glslCheckStatements(iteration, setNum, bindingNum, baseValue_ + descriptorIdx, tcu::just(descriptorIdx), usePushConstants);
1461 }
1462
1463 return checks.str();
1464 }
1465 };
1466
1467 class DescriptorSet;
1468
1469 using DescriptorSetPtr = de::SharedPtr<DescriptorSet>;
1470
1471 class DescriptorSet
1472 {
1473 public:
1474 using BindingInterfacePtr = de::MovePtr<BindingInterface>;
1475 using BindingPtrVector = std::vector<BindingInterfacePtr>;
1476
1477 private:
1478 BindingPtrVector bindings;
1479
1480 public:
DescriptorSet(BindingPtrVector & bindings_)1481 explicit DescriptorSet (BindingPtrVector& bindings_)
1482 : bindings(std::move(bindings_))
1483 {
1484 DE_ASSERT(!bindings.empty());
1485 }
1486
numBindings() const1487 size_t numBindings () const
1488 {
1489 return bindings.size();
1490 }
1491
getBinding(size_t bindingIdx) const1492 const BindingInterface* getBinding (size_t bindingIdx) const
1493 {
1494 return bindings.at(bindingIdx).get();
1495 }
1496
1497 // Maximum number of descriptor types used by any binding in the set.
maxTypes() const1498 deUint32 maxTypes () const
1499 {
1500 std::vector<deUint32> maxSizes;
1501 maxSizes.reserve(bindings.size());
1502
1503 std::transform(begin(bindings), end(bindings), std::back_inserter(maxSizes),
1504 [] (const BindingInterfacePtr& b) { return b->maxTypes(); });
1505
1506 const auto maxElement = std::max_element(begin(maxSizes), end(maxSizes));
1507 DE_ASSERT(maxElement != end(maxSizes));
1508 return *maxElement;
1509 }
1510
1511 // Create another descriptor set that can be the source for copies when setting descriptor values.
genSourceSet(SourceSetStrategy strategy,deUint32 iteration) const1512 DescriptorSetPtr genSourceSet (SourceSetStrategy strategy, deUint32 iteration) const
1513 {
1514 BindingPtrVector newBindings;
1515 for (const auto& b : bindings)
1516 {
1517 if (strategy == SourceSetStrategy::MUTABLE)
1518 newBindings.push_back(b->toMutable(iteration));
1519 else
1520 newBindings.push_back(b->toNonMutable(iteration));
1521 }
1522
1523 return DescriptorSetPtr(new DescriptorSet(newBindings));
1524 }
1525
1526 // Makes a descriptor pool that can be used when allocating descriptors for this set.
makeDescriptorPool(const DeviceInterface & vkd,VkDevice device,PoolMutableStrategy strategy,VkDescriptorPoolCreateFlags flags) const1527 Move<VkDescriptorPool> makeDescriptorPool (const DeviceInterface& vkd, VkDevice device, PoolMutableStrategy strategy, VkDescriptorPoolCreateFlags flags) const
1528 {
1529 std::vector<VkDescriptorPoolSize> poolSizes;
1530 std::vector<std::vector<VkDescriptorType>> mutableTypesVec;
1531 std::vector<VkMutableDescriptorTypeListEXT> mutableTypeLists;
1532
1533 // Make vector element addresses stable.
1534 const auto bindingCount = numBindings();
1535 poolSizes.reserve(bindingCount);
1536 mutableTypesVec.reserve(bindingCount);
1537 mutableTypeLists.reserve(bindingCount);
1538
1539 for (const auto& b : bindings)
1540 {
1541 const auto mainType = b->mainType();
1542 const VkDescriptorPoolSize poolSize = {
1543 mainType,
1544 static_cast<deUint32>(b->size()),
1545 };
1546 poolSizes.push_back(poolSize);
1547
1548 if (strategy == PoolMutableStrategy::KEEP_TYPES || strategy == PoolMutableStrategy::EXPAND_TYPES)
1549 {
1550 if (mainType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
1551 {
1552 if (strategy == PoolMutableStrategy::KEEP_TYPES)
1553 {
1554 mutableTypesVec.emplace_back(b->mutableTypes());
1555 }
1556 else
1557 {
1558 // Expand the type list with the mandatory types.
1559 static const auto mandatoryTypesFlags = toDescriptorTypeFlags(getMandatoryMutableTypes());
1560 const auto bindingTypes = toDescriptorTypeVector(mandatoryTypesFlags | toDescriptorTypeFlags(b->mutableTypes()));
1561
1562 mutableTypesVec.emplace_back(bindingTypes);
1563 }
1564
1565 const auto& lastVec = mutableTypesVec.back();
1566 const VkMutableDescriptorTypeListEXT typeList = { static_cast<deUint32>(lastVec.size()), de::dataOrNull(lastVec) };
1567 mutableTypeLists.push_back(typeList);
1568 }
1569 else
1570 {
1571 const VkMutableDescriptorTypeListEXT typeList = { 0u, nullptr };
1572 mutableTypeLists.push_back(typeList);
1573 }
1574 }
1575 else if (strategy == PoolMutableStrategy::NO_TYPES)
1576 ; // Do nothing, we will not use any type list.
1577 else
1578 DE_ASSERT(false);
1579 }
1580
1581 VkDescriptorPoolCreateInfo poolCreateInfo = initVulkanStructure();
1582
1583 poolCreateInfo.maxSets = 1u;
1584 poolCreateInfo.flags = flags;
1585 poolCreateInfo.poolSizeCount = static_cast<deUint32>(poolSizes.size());
1586 poolCreateInfo.pPoolSizes = de::dataOrNull(poolSizes);
1587
1588 VkMutableDescriptorTypeCreateInfoEXT mutableInfo = initVulkanStructure();
1589
1590 if (strategy == PoolMutableStrategy::KEEP_TYPES || strategy == PoolMutableStrategy::EXPAND_TYPES)
1591 {
1592 mutableInfo.mutableDescriptorTypeListCount = static_cast<deUint32>(mutableTypeLists.size());
1593 mutableInfo.pMutableDescriptorTypeLists = de::dataOrNull(mutableTypeLists);
1594 poolCreateInfo.pNext = &mutableInfo;
1595 }
1596
1597 return createDescriptorPool(vkd, device, &poolCreateInfo);
1598 }
1599
1600 private:
1601 // Building the descriptor set layout create info structure is cumbersome, so we'll reuse the same procedure to check support
1602 // and create the layout. This structure contains the result. "supported" is created as an enum to avoid the Move<> to bool
1603 // conversion cast in the contructors.
1604 struct DescriptorSetLayoutResult
1605 {
1606 enum class LayoutSupported { NO = 0, YES };
1607
1608 LayoutSupported supported;
1609 Move<VkDescriptorSetLayout> layout;
1610
DescriptorSetLayoutResultvkt::BindingModel::__anone6fd2b720111::DescriptorSet::DescriptorSetLayoutResult1611 explicit DescriptorSetLayoutResult (Move<VkDescriptorSetLayout>&& layout_)
1612 : supported (LayoutSupported::YES)
1613 , layout (layout_)
1614 {}
1615
DescriptorSetLayoutResultvkt::BindingModel::__anone6fd2b720111::DescriptorSet::DescriptorSetLayoutResult1616 explicit DescriptorSetLayoutResult (LayoutSupported supported_)
1617 : supported (supported_)
1618 , layout ()
1619 {}
1620 };
1621
makeOrCheckDescriptorSetLayout(bool checkOnly,const DeviceInterface & vkd,VkDevice device,VkShaderStageFlags stageFlags,VkDescriptorSetLayoutCreateFlags createFlags) const1622 DescriptorSetLayoutResult makeOrCheckDescriptorSetLayout (bool checkOnly, const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1623 {
1624 const auto numIterations = maxTypes();
1625 std::vector<VkDescriptorSetLayoutBinding> bindingsVec;
1626 std::vector<std::vector<VkDescriptorType>> mutableTypesVec;
1627 std::vector<VkMutableDescriptorTypeListEXT> mutableTypeLists;
1628
1629 // Make vector element addresses stable.
1630 const auto bindingCount = numBindings();
1631 bindingsVec.reserve(bindingCount);
1632 mutableTypesVec.reserve(bindingCount);
1633 mutableTypeLists.reserve(bindingCount);
1634
1635 for (size_t bindingIdx = 0u; bindingIdx < bindings.size(); ++bindingIdx)
1636 {
1637 const auto& binding = bindings[bindingIdx];
1638 const auto mainType = binding->mainType();
1639
1640 const VkDescriptorSetLayoutBinding layoutBinding = {
1641 static_cast<deUint32>(bindingIdx), // deUint32 binding;
1642 mainType, // VkDescriptorType descriptorType;
1643 static_cast<deUint32>(binding->size()), // deUint32 descriptorCount;
1644 stageFlags, // VkShaderStageFlags stageFlags;
1645 nullptr, // const VkSampler* pImmutableSamplers;
1646 };
1647 bindingsVec.push_back(layoutBinding);
1648
1649 // This list may be empty for non-mutable types, which is fine.
1650 mutableTypesVec.push_back(binding->mutableTypes());
1651 const auto& lastVec = mutableTypesVec.back();
1652
1653 const VkMutableDescriptorTypeListEXT typeList = {
1654 static_cast<deUint32>(lastVec.size()), // deUint32 descriptorTypeCount;
1655 de::dataOrNull(lastVec), // const VkDescriptorType* pDescriptorTypes;
1656 };
1657 mutableTypeLists.push_back(typeList);
1658 }
1659
1660 // Make sure to include the variable descriptor count and/or update after bind binding flags.
1661 const bool updateAfterBind = ((createFlags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT) != 0u);
1662 bool lastIsUnbounded = false;
1663 bool aliasingNeded = false;
1664 std::vector<bool> bindingNeedsAliasing(bindings.size(), false);
1665
1666 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1667 {
1668 if (bindingIdx < bindings.size() - 1)
1669 DE_ASSERT(!bindings[bindingIdx]->isUnbounded());
1670 else
1671 lastIsUnbounded = bindings[bindingIdx]->isUnbounded();
1672
1673 if (bindings[bindingIdx]->needsAliasingUpTo(numIterations))
1674 {
1675 bindingNeedsAliasing[bindingIdx] = true;
1676 aliasingNeded = true;
1677 }
1678 }
1679
1680 using FlagsCreateInfoPtr = de::MovePtr<VkDescriptorSetLayoutBindingFlagsCreateInfo>;
1681 using BindingFlagsVecPtr = de::MovePtr<std::vector<VkDescriptorBindingFlags>>;
1682
1683 FlagsCreateInfoPtr flagsCreateInfo;
1684 BindingFlagsVecPtr bindingFlagsVec;
1685
1686 if (updateAfterBind || lastIsUnbounded || aliasingNeded)
1687 {
1688 flagsCreateInfo = FlagsCreateInfoPtr(new VkDescriptorSetLayoutBindingFlagsCreateInfo);
1689 *flagsCreateInfo = initVulkanStructure();
1690
1691 bindingFlagsVec = BindingFlagsVecPtr(new std::vector<VkDescriptorBindingFlags>(bindingsVec.size(), (updateAfterBind ? VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT : 0)));
1692 if (lastIsUnbounded)
1693 bindingFlagsVec->back() |= VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT;
1694
1695 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1696 {
1697 if (bindingNeedsAliasing[bindingIdx])
1698 bindingFlagsVec->at(bindingIdx) |= VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
1699 }
1700
1701 flagsCreateInfo->bindingCount = static_cast<deUint32>(bindingFlagsVec->size());
1702 flagsCreateInfo->pBindingFlags = de::dataOrNull(*bindingFlagsVec);
1703 }
1704
1705 const VkMutableDescriptorTypeCreateInfoEXT createInfoMutable = {
1706 VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT,
1707 flagsCreateInfo.get(),
1708 static_cast<deUint32>(mutableTypeLists.size()),
1709 de::dataOrNull(mutableTypeLists),
1710 };
1711
1712 const VkDescriptorSetLayoutCreateInfo layoutCreateInfo = {
1713 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, // VkStructureType sType;
1714 &createInfoMutable, // const void* pNext;
1715 createFlags, // VkDescriptorSetLayoutCreateFlags flags;
1716 static_cast<deUint32>(bindingsVec.size()), // deUint32 bindingCount;
1717 de::dataOrNull(bindingsVec), // const VkDescriptorSetLayoutBinding* pBindings;
1718 };
1719
1720 if (checkOnly)
1721 {
1722 VkDescriptorSetLayoutSupport support = initVulkanStructure();
1723 vkd.getDescriptorSetLayoutSupport(device, &layoutCreateInfo, &support);
1724 DescriptorSetLayoutResult result((support.supported == VK_TRUE) ? DescriptorSetLayoutResult::LayoutSupported::YES
1725 : DescriptorSetLayoutResult::LayoutSupported::NO);
1726 return result;
1727 }
1728 else
1729 {
1730 DescriptorSetLayoutResult result(createDescriptorSetLayout(vkd, device, &layoutCreateInfo));
1731 return result;
1732 }
1733 }
1734
1735 public:
makeDescriptorSetLayout(const DeviceInterface & vkd,VkDevice device,VkShaderStageFlags stageFlags,VkDescriptorSetLayoutCreateFlags createFlags) const1736 Move<VkDescriptorSetLayout> makeDescriptorSetLayout (const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1737 {
1738 return makeOrCheckDescriptorSetLayout(false /*checkOnly*/, vkd, device, stageFlags, createFlags).layout;
1739 }
1740
checkDescriptorSetLayout(const DeviceInterface & vkd,VkDevice device,VkShaderStageFlags stageFlags,VkDescriptorSetLayoutCreateFlags createFlags) const1741 bool checkDescriptorSetLayout (const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1742 {
1743 return (makeOrCheckDescriptorSetLayout(true /*checkOnly*/, vkd, device, stageFlags, createFlags).supported == DescriptorSetLayoutResult::LayoutSupported::YES);
1744 }
1745
numDescriptors() const1746 size_t numDescriptors () const
1747 {
1748 size_t total = 0;
1749 for (const auto& b : bindings)
1750 total += b->size();
1751 return total;
1752 }
1753
createResources(const DeviceInterface & vkd,VkDevice device,Allocator & alloc,deUint32 qIndex,VkQueue queue,deUint32 iteration,bool useAABBs) const1754 std::vector<Resource> createResources (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, deUint32 iteration, bool useAABBs) const
1755 {
1756 // Create resources for each binding.
1757 std::vector<Resource> result;
1758 result.reserve(numDescriptors());
1759
1760 const auto bindingsCount = static_cast<deUint32>(bindings.size());
1761
1762 for (deUint32 bindingIdx = 0u; bindingIdx < bindingsCount; ++bindingIdx)
1763 {
1764 const auto& binding = bindings[bindingIdx];
1765 auto bindingResources = binding->createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs, getDescriptorNumericValue(iteration, bindingIdx));
1766
1767 for (auto& resource : bindingResources)
1768 result.emplace_back(std::move(resource));
1769 }
1770
1771 return result;
1772 }
1773
1774 // Updates a descriptor set with the given resources. Note: the set must have been created with a layout that's compatible with this object.
updateDescriptorSet(const DeviceInterface & vkd,VkDevice device,VkDescriptorSet set,deUint32 iteration,const std::vector<Resource> & resources) const1775 void updateDescriptorSet (const DeviceInterface& vkd, VkDevice device, VkDescriptorSet set, deUint32 iteration, const std::vector<Resource>& resources) const
1776 {
1777 // Make sure the number of resources is correct.
1778 const auto numResources = resources.size();
1779 DE_ASSERT(numDescriptors() == numResources);
1780
1781 std::vector<VkWriteDescriptorSet> descriptorWrites;
1782 descriptorWrites.reserve(numResources);
1783
1784 std::vector<VkDescriptorImageInfo> imageInfoVec;
1785 std::vector<VkDescriptorBufferInfo> bufferInfoVec;
1786 std::vector<VkBufferView> bufferViewVec;
1787 std::vector<VkWriteDescriptorSetAccelerationStructureKHR> asWriteVec;
1788 size_t resourceIdx = 0;
1789
1790 // We'll be storing pointers to elements of these vectors as we're appending elements, so we need their addresses to be stable.
1791 imageInfoVec.reserve(numResources);
1792 bufferInfoVec.reserve(numResources);
1793 bufferViewVec.reserve(numResources);
1794 asWriteVec.reserve(numResources);
1795
1796 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1797 {
1798 const auto& binding = bindings[bindingIdx];
1799 const auto descriptorTypes = binding->typesAtIteration(iteration);
1800
1801 for (size_t descriptorIdx = 0; descriptorIdx < binding->size(); ++descriptorIdx)
1802 {
1803 // Make sure the resource type matches the expected value.
1804 const auto& resource = resources[resourceIdx];
1805 const auto& descriptorType = descriptorTypes[descriptorIdx];
1806
1807 DE_ASSERT(resource.descriptorType == descriptorType);
1808
1809 // Obtain the descriptor write info for the resource.
1810 const auto writeInfo = resource.makeWriteInfo();
1811
1812 switch (writeInfo.writeType)
1813 {
1814 case WriteType::IMAGE_INFO: imageInfoVec.push_back(writeInfo.imageInfo); break;
1815 case WriteType::BUFFER_INFO: bufferInfoVec.push_back(writeInfo.bufferInfo); break;
1816 case WriteType::BUFFER_VIEW: bufferViewVec.push_back(writeInfo.bufferView); break;
1817 case WriteType::ACCELERATION_STRUCTURE_INFO: asWriteVec.push_back(writeInfo.asInfo); break;
1818 default: DE_ASSERT(false); break;
1819 }
1820
1821 // Add a new VkWriteDescriptorSet struct or extend the last one with more info. This helps us exercise different implementation code paths.
1822 bool extended = false;
1823
1824 if (!descriptorWrites.empty() && descriptorIdx > 0)
1825 {
1826 auto& last = descriptorWrites.back();
1827 if (last.dstSet == set /* this should always be true */ &&
1828 last.dstBinding == bindingIdx && (last.dstArrayElement + last.descriptorCount) == descriptorIdx &&
1829 last.descriptorType == descriptorType &&
1830 writeInfo.writeType != WriteType::ACCELERATION_STRUCTURE_INFO)
1831 {
1832 // The new write should be in the same vector (imageInfoVec, bufferInfoVec or bufferViewVec) so increasing the count works.
1833 ++last.descriptorCount;
1834 extended = true;
1835 }
1836 }
1837
1838 if (!extended)
1839 {
1840 const VkWriteDescriptorSet write = {
1841 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
1842 ((writeInfo.writeType == WriteType::ACCELERATION_STRUCTURE_INFO) ? &asWriteVec.back() : nullptr),
1843 set,
1844 static_cast<deUint32>(bindingIdx),
1845 static_cast<deUint32>(descriptorIdx),
1846 1u,
1847 descriptorType,
1848 (writeInfo.writeType == WriteType::IMAGE_INFO ? &imageInfoVec.back() : nullptr),
1849 (writeInfo.writeType == WriteType::BUFFER_INFO ? &bufferInfoVec.back() : nullptr),
1850 (writeInfo.writeType == WriteType::BUFFER_VIEW ? &bufferViewVec.back() : nullptr),
1851 };
1852 descriptorWrites.push_back(write);
1853 }
1854
1855 ++resourceIdx;
1856 }
1857 }
1858
1859 // Finally, update descriptor set with all the writes.
1860 vkd.updateDescriptorSets(device, static_cast<deUint32>(descriptorWrites.size()), de::dataOrNull(descriptorWrites), 0u, nullptr);
1861 }
1862
1863 // Copies between descriptor sets. They must be compatible and related to this set.
copyDescriptorSet(const DeviceInterface & vkd,VkDevice device,VkDescriptorSet srcSet,VkDescriptorSet dstSet) const1864 void copyDescriptorSet (const DeviceInterface& vkd, VkDevice device, VkDescriptorSet srcSet, VkDescriptorSet dstSet) const
1865 {
1866 std::vector<VkCopyDescriptorSet> copies;
1867
1868 for (size_t bindingIdx = 0; bindingIdx < numBindings(); ++bindingIdx)
1869 {
1870 const auto& binding = getBinding(bindingIdx);
1871 const auto bindingNumber = static_cast<deUint32>(bindingIdx);
1872 const auto descriptorCount = static_cast<deUint32>(binding->size());
1873
1874 const VkCopyDescriptorSet copy =
1875 {
1876 VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
1877 nullptr,
1878 // set, binding, array element.
1879 srcSet, bindingNumber, 0u,
1880 dstSet, bindingNumber, 0u,
1881 descriptorCount,
1882 };
1883
1884 copies.push_back(copy);
1885 }
1886
1887 vkd.updateDescriptorSets(device, 0u, nullptr, static_cast<deUint32>(copies.size()), de::dataOrNull(copies));
1888 }
1889
1890 // Does any binding in the set need aliasing in a given iteration?
needsAliasing(deUint32 iteration) const1891 bool needsAliasing (deUint32 iteration) const
1892 {
1893 std::vector<bool> aliasingNeededFlags;
1894 aliasingNeededFlags.reserve(bindings.size());
1895
1896 std::transform(begin(bindings), end(bindings), std::back_inserter(aliasingNeededFlags),
1897 [iteration] (const BindingInterfacePtr& b) { return b->needsAliasing(iteration); });
1898 return std::any_of(begin(aliasingNeededFlags), end(aliasingNeededFlags), [] (bool f) { return f; });
1899 }
1900
1901 // Does any binding in the set need aliasing in any iteration?
needsAnyAliasing() const1902 bool needsAnyAliasing () const
1903 {
1904 const auto numIterations = maxTypes();
1905 std::vector<bool> aliasingNeededFlags (numIterations, false);
1906
1907 for (deUint32 iteration = 0; iteration < numIterations; ++iteration)
1908 aliasingNeededFlags[iteration] = needsAliasing(iteration);
1909
1910 return std::any_of(begin(aliasingNeededFlags), end(aliasingNeededFlags), [] (bool f) { return f; });
1911 }
1912
1913 // Is the last binding an unbounded array?
lastBindingIsUnbounded() const1914 bool lastBindingIsUnbounded () const
1915 {
1916 if (bindings.empty())
1917 return false;
1918 return bindings.back()->isUnbounded();
1919 }
1920
1921 // Get the variable descriptor count for the last binding if any.
getVariableDescriptorCount() const1922 tcu::Maybe<deUint32> getVariableDescriptorCount () const
1923 {
1924 if (lastBindingIsUnbounded())
1925 return tcu::just(static_cast<deUint32>(bindings.back()->size()));
1926 return tcu::Nothing;
1927 }
1928
1929 // Check if the set contains a descriptor type of the given type at the given iteration.
containsTypeAtIteration(VkDescriptorType descriptorType,deUint32 iteration) const1930 bool containsTypeAtIteration (VkDescriptorType descriptorType, deUint32 iteration) const
1931 {
1932 return std::any_of(begin(bindings), end(bindings),
1933 [descriptorType, iteration] (const BindingInterfacePtr& b) {
1934 const auto types = b->typesAtIteration(iteration);
1935 return de::contains(begin(types), end(types), descriptorType);
1936 });
1937 }
1938
1939 // Is any binding an array?
hasArrays() const1940 bool hasArrays () const
1941 {
1942 return std::any_of(begin(bindings), end(bindings), [] (const BindingInterfacePtr& b) { return b->isArray(); });
1943 }
1944 };
1945
1946 enum class UpdateType
1947 {
1948 WRITE = 0,
1949 COPY,
1950 };
1951
1952 enum class SourceSetType
1953 {
1954 NORMAL = 0,
1955 HOST_ONLY,
1956 NO_SOURCE,
1957 };
1958
1959 enum class UpdateMoment
1960 {
1961 NORMAL = 0,
1962 UPDATE_AFTER_BIND,
1963 };
1964
1965 enum class TestingStage
1966 {
1967 COMPUTE = 0,
1968 VERTEX,
1969 TESS_EVAL,
1970 TESS_CONTROL,
1971 GEOMETRY,
1972 FRAGMENT,
1973 RAY_GEN,
1974 INTERSECTION,
1975 ANY_HIT,
1976 CLOSEST_HIT,
1977 MISS,
1978 CALLABLE,
1979 };
1980
1981 enum class ArrayAccessType
1982 {
1983 CONSTANT = 0,
1984 PUSH_CONSTANT,
1985 NO_ARRAY,
1986 };
1987
1988 // Are we testing a ray tracing pipeline stage?
isRayTracingStage(TestingStage stage)1989 bool isRayTracingStage (TestingStage stage)
1990 {
1991 switch (stage)
1992 {
1993 case TestingStage::RAY_GEN:
1994 case TestingStage::INTERSECTION:
1995 case TestingStage::ANY_HIT:
1996 case TestingStage::CLOSEST_HIT:
1997 case TestingStage::MISS:
1998 case TestingStage::CALLABLE:
1999 return true;
2000 default:
2001 break;
2002 }
2003
2004 return false;
2005 }
2006
2007 struct TestParams
2008 {
2009 DescriptorSetPtr descriptorSet;
2010 UpdateType updateType;
2011 SourceSetStrategy sourceSetStrategy;
2012 SourceSetType sourceSetType;
2013 PoolMutableStrategy poolMutableStrategy;
2014 UpdateMoment updateMoment;
2015 ArrayAccessType arrayAccessType;
2016 TestingStage testingStage;
2017
getStageFlagsvkt::BindingModel::__anone6fd2b720111::TestParams2018 VkShaderStageFlags getStageFlags () const
2019 {
2020 VkShaderStageFlags flags = 0u;
2021
2022 switch (testingStage)
2023 {
2024 case TestingStage::COMPUTE: flags |= VK_SHADER_STAGE_COMPUTE_BIT; break;
2025 case TestingStage::VERTEX: flags |= VK_SHADER_STAGE_VERTEX_BIT; break;
2026 case TestingStage::TESS_EVAL: flags |= VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT; break;
2027 case TestingStage::TESS_CONTROL: flags |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT; break;
2028 case TestingStage::GEOMETRY: flags |= VK_SHADER_STAGE_GEOMETRY_BIT; break;
2029 case TestingStage::FRAGMENT: flags |= VK_SHADER_STAGE_FRAGMENT_BIT; break;
2030 case TestingStage::RAY_GEN: flags |= VK_SHADER_STAGE_RAYGEN_BIT_KHR; break;
2031 case TestingStage::INTERSECTION: flags |= VK_SHADER_STAGE_INTERSECTION_BIT_KHR; break;
2032 case TestingStage::ANY_HIT: flags |= VK_SHADER_STAGE_ANY_HIT_BIT_KHR; break;
2033 case TestingStage::CLOSEST_HIT: flags |= VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR; break;
2034 case TestingStage::MISS: flags |= VK_SHADER_STAGE_MISS_BIT_KHR; break;
2035 case TestingStage::CALLABLE: flags |= VK_SHADER_STAGE_CALLABLE_BIT_KHR; break;
2036 default:
2037 DE_ASSERT(false);
2038 break;
2039 }
2040
2041 return flags;
2042 }
2043
getPipelineWriteStagevkt::BindingModel::__anone6fd2b720111::TestParams2044 VkPipelineStageFlags getPipelineWriteStage () const
2045 {
2046 VkPipelineStageFlags flags = 0u;
2047
2048 switch (testingStage)
2049 {
2050 case TestingStage::COMPUTE: flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break;
2051 case TestingStage::VERTEX: flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; break;
2052 case TestingStage::TESS_EVAL: flags |= VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT; break;
2053 case TestingStage::TESS_CONTROL: flags |= VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT; break;
2054 case TestingStage::GEOMETRY: flags |= VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT; break;
2055 case TestingStage::FRAGMENT: flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; break;
2056 case TestingStage::RAY_GEN: // fallthrough
2057 case TestingStage::INTERSECTION: // fallthrough
2058 case TestingStage::ANY_HIT: // fallthrough
2059 case TestingStage::CLOSEST_HIT: // fallthrough
2060 case TestingStage::MISS: // fallthrough
2061 case TestingStage::CALLABLE: flags |= VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR; break;
2062 default:
2063 DE_ASSERT(false);
2064 break;
2065 }
2066
2067 return flags;
2068 }
2069
2070 private:
getLayoutCreateFlagsvkt::BindingModel::__anone6fd2b720111::TestParams2071 VkDescriptorSetLayoutCreateFlags getLayoutCreateFlags (bool isSourceSet) const
2072 {
2073 // UPDATE_AFTER_BIND cannot be used with HOST_ONLY sets.
2074 //DE_ASSERT(!(updateMoment == UpdateMoment::UPDATE_AFTER_BIND && sourceSetType == SourceSetType::HOST_ONLY));
2075
2076 VkDescriptorSetLayoutCreateFlags createFlags = 0u;
2077
2078 if ((!isSourceSet || sourceSetType != SourceSetType::HOST_ONLY) && updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2079 createFlags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT;
2080
2081 if (isSourceSet && sourceSetType == SourceSetType::HOST_ONLY)
2082 createFlags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT;
2083
2084 return createFlags;
2085 }
2086
2087 public:
getSrcLayoutCreateFlagsvkt::BindingModel::__anone6fd2b720111::TestParams2088 VkDescriptorSetLayoutCreateFlags getSrcLayoutCreateFlags () const
2089 {
2090 return getLayoutCreateFlags(true);
2091 }
2092
getDstLayoutCreateFlagsvkt::BindingModel::__anone6fd2b720111::TestParams2093 VkDescriptorSetLayoutCreateFlags getDstLayoutCreateFlags () const
2094 {
2095 return getLayoutCreateFlags(false);
2096 }
2097
2098 private:
getPoolCreateFlagsvkt::BindingModel::__anone6fd2b720111::TestParams2099 VkDescriptorPoolCreateFlags getPoolCreateFlags (bool isSourceSet) const
2100 {
2101 // UPDATE_AFTER_BIND cannot be used with HOST_ONLY sets.
2102 //DE_ASSERT(!(updateMoment == UpdateMoment::UPDATE_AFTER_BIND && sourceSetType == SourceSetType::HOST_ONLY));
2103
2104 VkDescriptorPoolCreateFlags poolCreateFlags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
2105
2106 if ((!isSourceSet || sourceSetType != SourceSetType::HOST_ONLY) && updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2107 poolCreateFlags |= VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT;
2108
2109 if (isSourceSet && sourceSetType == SourceSetType::HOST_ONLY)
2110 poolCreateFlags |= VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT;
2111
2112 return poolCreateFlags;
2113 }
2114
2115 public:
getSrcPoolCreateFlagsvkt::BindingModel::__anone6fd2b720111::TestParams2116 VkDescriptorPoolCreateFlags getSrcPoolCreateFlags () const
2117 {
2118 return getPoolCreateFlags(true);
2119 }
2120
getDstPoolCreateFlagsvkt::BindingModel::__anone6fd2b720111::TestParams2121 VkDescriptorPoolCreateFlags getDstPoolCreateFlags () const
2122 {
2123 return getPoolCreateFlags(false);
2124 }
2125
getBindPointvkt::BindingModel::__anone6fd2b720111::TestParams2126 VkPipelineBindPoint getBindPoint () const
2127 {
2128 if (testingStage == TestingStage::COMPUTE)
2129 return VK_PIPELINE_BIND_POINT_COMPUTE;
2130 if (isRayTracingStage(testingStage))
2131 return VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR;
2132 return VK_PIPELINE_BIND_POINT_GRAPHICS;
2133 }
2134 };
2135
2136 class MutableTypesTest : public TestCase
2137 {
2138 public:
MutableTypesTest(tcu::TestContext & testCtx,const std::string & name,const std::string & description,const TestParams & params)2139 MutableTypesTest (tcu::TestContext& testCtx, const std::string& name, const std::string& description, const TestParams& params)
2140 : TestCase(testCtx, name, description)
2141 , m_params(params)
2142 {}
2143
2144 ~MutableTypesTest () override = default;
2145
2146 void initPrograms (vk::SourceCollections& programCollection) const override;
2147 TestInstance* createInstance (Context& context) const override;
2148 void checkSupport (Context& context) const override;
2149
2150 private:
2151 TestParams m_params;
2152 };
2153
2154 class MutableTypesInstance : public TestInstance
2155 {
2156 public:
MutableTypesInstance(Context & context,const TestParams & params)2157 MutableTypesInstance (Context& context, const TestParams& params)
2158 : TestInstance (context)
2159 , m_params (params)
2160 {}
2161
2162 ~MutableTypesInstance () override = default;
2163
2164 tcu::TestStatus iterate () override;
2165
2166 private:
2167 TestParams m_params;
2168 };
2169
2170 // Check if a descriptor set contains a given descriptor type in any iteration up to maxTypes().
containsAnyDescriptorType(const DescriptorSet & descriptorSet,VkDescriptorType descriptorType)2171 bool containsAnyDescriptorType (const DescriptorSet& descriptorSet, VkDescriptorType descriptorType)
2172 {
2173 const auto numIterations = descriptorSet.maxTypes();
2174
2175 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2176 {
2177 if (descriptorSet.containsTypeAtIteration(descriptorType, iter))
2178 return true;
2179 }
2180
2181 return false;
2182 }
2183
2184 // Check if testing this descriptor set needs an external image (for sampler descriptors).
needsExternalImage(const DescriptorSet & descriptorSet)2185 bool needsExternalImage (const DescriptorSet& descriptorSet)
2186 {
2187 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_SAMPLER);
2188 }
2189
2190 // Check if testing this descriptor set needs an external sampler (for sampled images).
needsExternalSampler(const DescriptorSet & descriptorSet)2191 bool needsExternalSampler (const DescriptorSet& descriptorSet)
2192 {
2193 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
2194 }
2195
2196 // Check if this descriptor set contains a input attachments.
usesInputAttachments(const DescriptorSet & descriptorSet)2197 bool usesInputAttachments (const DescriptorSet& descriptorSet)
2198 {
2199 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2200 }
2201
2202 // Check if this descriptor set contains acceleration structures.
usesAccelerationStructures(const DescriptorSet & descriptorSet)2203 bool usesAccelerationStructures (const DescriptorSet& descriptorSet)
2204 {
2205 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
2206 }
2207
shaderName(deUint32 iteration)2208 std::string shaderName (deUint32 iteration)
2209 {
2210 return ("iteration-" + de::toString(iteration));
2211 }
2212
initPrograms(vk::SourceCollections & programCollection) const2213 void MutableTypesTest::initPrograms (vk::SourceCollections& programCollection) const
2214 {
2215 const bool usePushConstants = (m_params.arrayAccessType == ArrayAccessType::PUSH_CONSTANT);
2216 const bool useExternalImage = needsExternalImage(*m_params.descriptorSet);
2217 const bool useExternalSampler = needsExternalSampler(*m_params.descriptorSet);
2218 const bool rayQueries = usesAccelerationStructures(*m_params.descriptorSet);
2219 const bool rayTracing = isRayTracingStage(m_params.testingStage);
2220 const auto numIterations = m_params.descriptorSet->maxTypes();
2221 const auto numBindings = m_params.descriptorSet->numBindings();
2222 const vk::ShaderBuildOptions rtBuildOptions (programCollection.usedVulkanVersion, vk::SPIRV_VERSION_1_4, 0u, true);
2223
2224 // Extra set and bindings for external resources.
2225 std::ostringstream extraSet;
2226 deUint32 extraBindings = 0u;
2227
2228 extraSet << "layout (set=1, binding=" << extraBindings++ << ") buffer OutputBufferBlock { uint value[" << numIterations << "]; } outputBuffer;\n";
2229 if (useExternalImage)
2230 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform utexture2D externalSampledImage;\n";
2231 if (useExternalSampler)
2232 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform sampler externalSampler;\n";
2233 // The extra binding below will be declared in the "passthrough" ray generation shader.
2234 #if 0
2235 if (rayTracing)
2236 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform accelerationStructureEXT externalAS;\n";
2237 #endif
2238
2239 // Common vertex preamble.
2240 std::ostringstream vertexPreamble;
2241 vertexPreamble
2242 << "vec2 vertexPositions[3] = vec2[](\n"
2243 << " vec2(0.0, -0.5),\n"
2244 << " vec2(0.5, 0.5),\n"
2245 << " vec2(-0.5, 0.5)\n"
2246 << ");\n"
2247 ;
2248
2249 // Vertex shader body common statements.
2250 std::ostringstream vertexBodyCommon;
2251 vertexBodyCommon << " gl_Position = vec4(vertexPositions[gl_VertexIndex], 0.0, 1.0);\n";
2252
2253 // Common tessellation control preamble.
2254 std::ostringstream tescPreamble;
2255 tescPreamble
2256 << "layout (vertices=3) out;\n"
2257 << "in gl_PerVertex\n"
2258 << "{\n"
2259 << " vec4 gl_Position;\n"
2260 << "} gl_in[gl_MaxPatchVertices];\n"
2261 << "out gl_PerVertex\n"
2262 << "{\n"
2263 << " vec4 gl_Position;\n"
2264 << "} gl_out[];\n"
2265 ;
2266
2267 // Common tessellation control body.
2268 std::ostringstream tescBodyCommon;
2269 tescBodyCommon
2270 << " gl_TessLevelInner[0] = 1.0;\n"
2271 << " gl_TessLevelInner[1] = 1.0;\n"
2272 << " gl_TessLevelOuter[0] = 1.0;\n"
2273 << " gl_TessLevelOuter[1] = 1.0;\n"
2274 << " gl_TessLevelOuter[2] = 1.0;\n"
2275 << " gl_TessLevelOuter[3] = 1.0;\n"
2276 << " gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
2277 ;
2278
2279 // Common tessellation evaluation preamble.
2280 std::ostringstream tesePreamble;
2281 tesePreamble
2282 << "layout (triangles, fractional_odd_spacing, cw) in;\n"
2283 << "in gl_PerVertex\n"
2284 << "{\n"
2285 << " vec4 gl_Position;\n"
2286 << "} gl_in[gl_MaxPatchVertices];\n"
2287 << "out gl_PerVertex\n"
2288 << "{\n"
2289 << " vec4 gl_Position;\n"
2290 << "};\n"
2291 ;
2292
2293 // Common tessellation evaluation body.
2294 std::ostringstream teseBodyCommon;
2295 teseBodyCommon
2296 << " gl_Position = (gl_TessCoord.x * gl_in[0].gl_Position) +\n"
2297 << " (gl_TessCoord.y * gl_in[1].gl_Position) +\n"
2298 << " (gl_TessCoord.z * gl_in[2].gl_Position);\n"
2299 ;
2300
2301 // Shader preamble.
2302 std::ostringstream preamble;
2303
2304 preamble
2305 << "#version 460\n"
2306 << "#extension GL_EXT_nonuniform_qualifier : enable\n"
2307 << "#extension GL_EXT_debug_printf : enable\n"
2308 << (rayTracing ? "#extension GL_EXT_ray_tracing : enable\n" : "")
2309 << (rayQueries ? "#extension GL_EXT_ray_query : enable\n" : "")
2310 << "\n"
2311 ;
2312
2313 if (m_params.testingStage == TestingStage::VERTEX)
2314 {
2315 preamble << vertexPreamble.str();
2316 }
2317 else if (m_params.testingStage == TestingStage::COMPUTE)
2318 {
2319 preamble
2320 << "layout (local_size_x=1, local_size_y=1, local_size_z=1) in;\n"
2321 << "\n"
2322 ;
2323 }
2324 else if (m_params.testingStage == TestingStage::GEOMETRY)
2325 {
2326 preamble
2327 << "layout (triangles) in;\n"
2328 << "layout (triangle_strip, max_vertices=3) out;\n"
2329 << "in gl_PerVertex\n"
2330 << "{\n"
2331 << " vec4 gl_Position;\n"
2332 << "} gl_in[3];\n"
2333 << "out gl_PerVertex\n"
2334 << "{\n"
2335 << " vec4 gl_Position;\n"
2336 << "};\n"
2337 ;
2338 }
2339 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2340 {
2341 preamble << tescPreamble.str();
2342 }
2343 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2344 {
2345 preamble << tesePreamble.str();
2346 }
2347 else if (m_params.testingStage == TestingStage::CALLABLE)
2348 {
2349 preamble << "layout (location=0) callableDataInEXT float unusedCallableData;\n";
2350 }
2351 else if (m_params.testingStage == TestingStage::CLOSEST_HIT ||
2352 m_params.testingStage == TestingStage::ANY_HIT ||
2353 m_params.testingStage == TestingStage::MISS)
2354 {
2355 preamble << "layout (location=0) rayPayloadInEXT float unusedRayPayload;\n";
2356 }
2357 else if (m_params.testingStage == TestingStage::INTERSECTION)
2358 {
2359 preamble << "hitAttributeEXT vec3 hitAttribute;\n";
2360 }
2361
2362 preamble << extraSet.str();
2363 if (usePushConstants)
2364 preamble << "layout (push_constant, std430) uniform PushConstantBlock { uint zero; } pc;\n";
2365 preamble << "\n";
2366
2367 // We need to create a shader per iteration.
2368 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2369 {
2370 // Shader preamble.
2371 std::ostringstream shader;
2372 shader << preamble.str();
2373
2374 deUint32 inputAttachmentCount = 0u;
2375
2376 // Descriptor declarations for this iteration.
2377 for (size_t bindingIdx = 0; bindingIdx < numBindings; ++bindingIdx)
2378 {
2379 DE_ASSERT(bindingIdx <= std::numeric_limits<deUint32>::max());
2380
2381 const auto binding = m_params.descriptorSet->getBinding(bindingIdx);
2382 const auto bindingTypes = binding->typesAtIteration(iter);
2383 const auto hasInputAttachment = de::contains(begin(bindingTypes), end(bindingTypes), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2384 const auto isArray = binding->isArray();
2385 const auto isUnbounded = binding->isUnbounded();
2386 const auto bindingSize = binding->size();
2387
2388 // If the binding is an input attachment, make sure it's not an array.
2389 DE_ASSERT(!hasInputAttachment || !isArray);
2390
2391 // Make sure the descriptor count fits a deInt32 if needed.
2392 DE_ASSERT(!isArray || isUnbounded || bindingSize <= static_cast<size_t>(std::numeric_limits<deInt32>::max()));
2393
2394 const auto arraySize = (isArray ? (isUnbounded ? tcu::just(deInt32{-1}) : tcu::just(static_cast<deInt32>(bindingSize)))
2395 : tcu::Nothing);
2396
2397 shader << binding->glslDeclarations(iter, 0u, static_cast<deUint32>(bindingIdx), inputAttachmentCount, arraySize);
2398
2399 if (hasInputAttachment)
2400 ++inputAttachmentCount;
2401 }
2402
2403 // Main body.
2404 shader
2405 << "\n"
2406 << "void main() {\n"
2407 // This checks if we are the first invocation to arrive here, so the checks are executed only once.
2408 << " const uint flag = atomicCompSwap(outputBuffer.value[" << iter << "], 0u, 1u);\n"
2409 << " if (flag == 0u) {\n"
2410 << " uint anyError = 0u;\n"
2411 ;
2412
2413 for (size_t bindingIdx = 0; bindingIdx < numBindings; ++bindingIdx)
2414 {
2415 const auto binding = m_params.descriptorSet->getBinding(bindingIdx);
2416 const auto idx32 = static_cast<deUint32>(bindingIdx);
2417 shader << binding->glslCheckStatements(iter, 0u, idx32, getDescriptorNumericValue(iter, idx32), tcu::Nothing, usePushConstants);
2418 }
2419
2420 shader
2421 << " if (anyError == 0u) {\n"
2422 << " atomicAdd(outputBuffer.value[" << iter << "], 1u);\n"
2423 << " }\n"
2424 << " }\n" // Closes if (flag == 0u).
2425 ;
2426
2427 if (m_params.testingStage == TestingStage::VERTEX)
2428 {
2429 shader << vertexBodyCommon.str();
2430 }
2431 else if (m_params.testingStage == TestingStage::GEOMETRY)
2432 {
2433 shader
2434 << " gl_Position = gl_in[0].gl_Position; EmitVertex();\n"
2435 << " gl_Position = gl_in[1].gl_Position; EmitVertex();\n"
2436 << " gl_Position = gl_in[2].gl_Position; EmitVertex();\n"
2437 ;
2438 }
2439 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2440 {
2441 shader << tescBodyCommon.str();
2442 }
2443 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2444 {
2445 shader << teseBodyCommon.str();
2446 }
2447
2448 shader
2449 << "}\n" // End of main().
2450 ;
2451
2452 {
2453 const auto shaderNameStr = shaderName(iter);
2454 const auto shaderStr = shader.str();
2455 auto& glslSource = programCollection.glslSources.add(shaderNameStr);
2456
2457 if (m_params.testingStage == TestingStage::COMPUTE)
2458 glslSource << glu::ComputeSource(shaderStr);
2459 else if (m_params.testingStage == TestingStage::VERTEX)
2460 glslSource << glu::VertexSource(shaderStr);
2461 else if (m_params.testingStage == TestingStage::FRAGMENT)
2462 glslSource << glu::FragmentSource(shaderStr);
2463 else if (m_params.testingStage == TestingStage::GEOMETRY)
2464 glslSource << glu::GeometrySource(shaderStr);
2465 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2466 glslSource << glu::TessellationControlSource(shaderStr);
2467 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2468 glslSource << glu::TessellationEvaluationSource(shaderStr);
2469 else if (m_params.testingStage == TestingStage::RAY_GEN)
2470 glslSource << glu::RaygenSource(updateRayTracingGLSL(shaderStr));
2471 else if (m_params.testingStage == TestingStage::INTERSECTION)
2472 glslSource << glu::IntersectionSource(updateRayTracingGLSL(shaderStr));
2473 else if (m_params.testingStage == TestingStage::ANY_HIT)
2474 glslSource << glu::AnyHitSource(updateRayTracingGLSL(shaderStr));
2475 else if (m_params.testingStage == TestingStage::CLOSEST_HIT)
2476 glslSource << glu::ClosestHitSource(updateRayTracingGLSL(shaderStr));
2477 else if (m_params.testingStage == TestingStage::MISS)
2478 glslSource << glu::MissSource(updateRayTracingGLSL(shaderStr));
2479 else if (m_params.testingStage == TestingStage::CALLABLE)
2480 glslSource << glu::CallableSource(updateRayTracingGLSL(shaderStr));
2481 else
2482 DE_ASSERT(false);
2483
2484 if (rayTracing || rayQueries)
2485 glslSource << rtBuildOptions;
2486 }
2487 }
2488
2489 if (m_params.testingStage == TestingStage::FRAGMENT
2490 || m_params.testingStage == TestingStage::GEOMETRY
2491 || m_params.testingStage == TestingStage::TESS_CONTROL
2492 || m_params.testingStage == TestingStage::TESS_EVAL)
2493 {
2494 // Add passthrough vertex shader that works for points.
2495 std::ostringstream vertPassthrough;
2496 vertPassthrough
2497 << "#version 460\n"
2498 << "out gl_PerVertex\n"
2499 << "{\n"
2500 << " vec4 gl_Position;\n"
2501 << "};\n"
2502 << vertexPreamble.str()
2503 << "void main() {\n"
2504 << vertexBodyCommon.str()
2505 << "}\n"
2506 ;
2507 programCollection.glslSources.add("vert") << glu::VertexSource(vertPassthrough.str());
2508 }
2509
2510 if (m_params.testingStage == TestingStage::TESS_CONTROL)
2511 {
2512 // Add passthrough tessellation evaluation shader.
2513 std::ostringstream tesePassthrough;
2514 tesePassthrough
2515 << "#version 460\n"
2516 << tesePreamble.str()
2517 << "void main (void)\n"
2518 << "{\n"
2519 << teseBodyCommon.str()
2520 << "}\n"
2521 ;
2522
2523 programCollection.glslSources.add("tese") << glu::TessellationEvaluationSource(tesePassthrough.str());
2524 }
2525
2526 if (m_params.testingStage == TestingStage::TESS_EVAL)
2527 {
2528 // Add passthrough tessellation control shader.
2529 std::ostringstream tescPassthrough;
2530 tescPassthrough
2531 << "#version 460\n"
2532 << tescPreamble.str()
2533 << "void main (void)\n"
2534 << "{\n"
2535 << tescBodyCommon.str()
2536 << "}\n"
2537 ;
2538
2539 programCollection.glslSources.add("tesc") << glu::TessellationControlSource(tescPassthrough.str());
2540 }
2541
2542 if (rayTracing && m_params.testingStage != TestingStage::RAY_GEN)
2543 {
2544 // Add a "passthrough" ray generation shader.
2545 std::ostringstream rgen;
2546 rgen
2547 << "#version 460 core\n"
2548 << "#extension GL_EXT_ray_tracing : require\n"
2549 << "layout (set=1, binding=" << extraBindings << ") uniform accelerationStructureEXT externalAS;\n"
2550 << ((m_params.testingStage == TestingStage::CALLABLE)
2551 ? "layout (location=0) callableDataEXT float unusedCallableData;\n"
2552 : "layout (location=0) rayPayloadEXT float unusedRayPayload;\n")
2553 << "\n"
2554 << "void main()\n"
2555 << "{\n"
2556 ;
2557
2558 if (m_params.testingStage == TestingStage::INTERSECTION
2559 || m_params.testingStage == TestingStage::ANY_HIT
2560 || m_params.testingStage == TestingStage::CLOSEST_HIT
2561 || m_params.testingStage == TestingStage::MISS)
2562 {
2563 // We need to trace rays in this case to get hits or misses.
2564 const auto zDir = ((m_params.testingStage == TestingStage::MISS) ? "-1.0" : "1.0");
2565
2566 rgen
2567 << " const uint cullMask = 0xFF;\n"
2568 << " const float tMin = 1.0;\n"
2569 << " const float tMax = 10.0;\n"
2570 << " const vec3 origin = vec3(0.0, 0.0, 0.0);\n"
2571 << " const vec3 direction = vec3(0.0, 0.0, " << zDir << ");\n"
2572 << " traceRayEXT(externalAS, gl_RayFlagsNoneEXT, cullMask, 0, 0, 0, origin, tMin, direction, tMax, 0);\n"
2573 ;
2574
2575 }
2576 else if (m_params.testingStage == TestingStage::CALLABLE)
2577 {
2578 rgen << " executeCallableEXT(0, 0);\n";
2579 }
2580
2581 // End of main().
2582 rgen << "}\n";
2583
2584 programCollection.glslSources.add("rgen") << glu::RaygenSource(updateRayTracingGLSL(rgen.str())) << rtBuildOptions;
2585
2586 // Intersection shaders will ignore the intersection, so we need a passthrough miss shader.
2587 if (m_params.testingStage == TestingStage::INTERSECTION)
2588 {
2589 std::ostringstream miss;
2590 miss
2591 << "#version 460 core\n"
2592 << "#extension GL_EXT_ray_tracing : require\n"
2593 << "layout (location=0) rayPayloadEXT float unusedRayPayload;\n"
2594 << "\n"
2595 << "void main()\n"
2596 << "{\n"
2597 << "}\n"
2598 ;
2599
2600 programCollection.glslSources.add("miss") << glu::MissSource(updateRayTracingGLSL(miss.str())) << rtBuildOptions;
2601 }
2602 }
2603 }
2604
createInstance(Context & context) const2605 TestInstance* MutableTypesTest::createInstance (Context& context) const
2606 {
2607 return new MutableTypesInstance(context, m_params);
2608 }
2609
requirePartiallyBound(Context & context)2610 void requirePartiallyBound (Context& context)
2611 {
2612 context.requireDeviceFunctionality("VK_EXT_descriptor_indexing");
2613 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2614 if (!indexingFeatures.descriptorBindingPartiallyBound)
2615 TCU_THROW(NotSupportedError, "Partially bound bindings not supported");
2616 }
2617
requireVariableDescriptorCount(Context & context)2618 void requireVariableDescriptorCount (Context& context)
2619 {
2620 context.requireDeviceFunctionality("VK_EXT_descriptor_indexing");
2621 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2622 if (!indexingFeatures.descriptorBindingVariableDescriptorCount)
2623 TCU_THROW(NotSupportedError, "Variable descriptor count not supported");
2624 }
2625
2626 // Calculates the set of used descriptor types for a given set and iteration count, for bindings matching a predicate.
getUsedDescriptorTypes(const DescriptorSet & descriptorSet,deUint32 numIterations,bool (* predicate)(const BindingInterface * binding))2627 std::set<VkDescriptorType> getUsedDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations, bool (*predicate)(const BindingInterface* binding))
2628 {
2629 std::set<VkDescriptorType> usedDescriptorTypes;
2630
2631 for (size_t bindingIdx = 0; bindingIdx < descriptorSet.numBindings(); ++bindingIdx)
2632 {
2633 const auto bindingPtr = descriptorSet.getBinding(bindingIdx);
2634 if (predicate(bindingPtr))
2635 {
2636 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2637 {
2638 const auto descTypes = bindingPtr->typesAtIteration(iter);
2639 usedDescriptorTypes.insert(begin(descTypes), end(descTypes));
2640 }
2641 }
2642 }
2643
2644 return usedDescriptorTypes;
2645 }
2646
getAllUsedDescriptorTypes(const DescriptorSet & descriptorSet,deUint32 numIterations)2647 std::set<VkDescriptorType> getAllUsedDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations)
2648 {
2649 return getUsedDescriptorTypes(descriptorSet, numIterations, [] (const BindingInterface*) { return true; });
2650 }
2651
getUsedArrayDescriptorTypes(const DescriptorSet & descriptorSet,deUint32 numIterations)2652 std::set<VkDescriptorType> getUsedArrayDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations)
2653 {
2654 return getUsedDescriptorTypes(descriptorSet, numIterations, [] (const BindingInterface* b) { return b->isArray(); });
2655 }
2656
2657 // Are we testing a vertex pipeline stage?
isVertexStage(TestingStage stage)2658 bool isVertexStage (TestingStage stage)
2659 {
2660 switch (stage)
2661 {
2662 case TestingStage::VERTEX:
2663 case TestingStage::TESS_CONTROL:
2664 case TestingStage::TESS_EVAL:
2665 case TestingStage::GEOMETRY:
2666 return true;
2667 default:
2668 break;
2669 }
2670
2671 return false;
2672 }
2673
checkSupport(Context & context) const2674 void MutableTypesTest::checkSupport (Context& context) const
2675 {
2676 if (!context.isDeviceFunctionalitySupported("VK_VALVE_mutable_descriptor_type") &&
2677 !context.isDeviceFunctionalitySupported("VK_EXT_mutable_descriptor_type"))
2678
2679 TCU_THROW(NotSupportedError, "VK_VALVE_mutable_descriptor_type or VK_EXT_mutable_descriptor_type is not supported");
2680
2681 VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT mutableDescriptorType = initVulkanStructure();
2682 VkPhysicalDeviceFeatures2KHR features2 = initVulkanStructure(&mutableDescriptorType);
2683
2684 context.getInstanceInterface().getPhysicalDeviceFeatures2(context.getPhysicalDevice(), &features2);
2685
2686 if (!mutableDescriptorType.mutableDescriptorType)
2687 TCU_THROW(NotSupportedError, "mutableDescriptorType feature is not supported");
2688
2689 // Check ray tracing if needed.
2690 const bool rayTracing = isRayTracingStage(m_params.testingStage);
2691
2692 if (rayTracing)
2693 {
2694 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2695 context.requireDeviceFunctionality("VK_KHR_ray_tracing_pipeline");
2696 }
2697
2698 // Check if ray queries are needed. Ray queries are used to verify acceleration structure descriptors.
2699 const bool rayQueriesNeeded = usesAccelerationStructures(*m_params.descriptorSet);
2700 if (rayQueriesNeeded)
2701 {
2702 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2703 context.requireDeviceFunctionality("VK_KHR_ray_query");
2704 }
2705
2706 // We'll use iterations to check each mutable type, as needed.
2707 const auto numIterations = m_params.descriptorSet->maxTypes();
2708
2709 if (m_params.descriptorSet->lastBindingIsUnbounded())
2710 requireVariableDescriptorCount(context);
2711
2712 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2713 {
2714 if (m_params.descriptorSet->needsAliasing(iter))
2715 {
2716 requirePartiallyBound(context);
2717 break;
2718 }
2719 }
2720
2721 if (m_params.updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2722 {
2723 // Check update after bind for each used descriptor type.
2724 const auto& usedDescriptorTypes = getAllUsedDescriptorTypes(*m_params.descriptorSet, numIterations);
2725 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2726
2727 for (const auto& descType : usedDescriptorTypes)
2728 {
2729 switch (descType)
2730 {
2731 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2732 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2733 if (!indexingFeatures.descriptorBindingUniformBufferUpdateAfterBind)
2734 TCU_THROW(NotSupportedError, "Update-after-bind not supported for uniform buffers");
2735 break;
2736
2737 case VK_DESCRIPTOR_TYPE_SAMPLER:
2738 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2739 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2740 if (!indexingFeatures.descriptorBindingSampledImageUpdateAfterBind)
2741 TCU_THROW(NotSupportedError, "Update-after-bind not supported for samplers and sampled images");
2742 break;
2743
2744 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2745 if (!indexingFeatures.descriptorBindingStorageImageUpdateAfterBind)
2746 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage images");
2747 break;
2748
2749 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2750 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2751 if (!indexingFeatures.descriptorBindingStorageBufferUpdateAfterBind)
2752 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage buffers");
2753 break;
2754
2755 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2756 if (!indexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind)
2757 TCU_THROW(NotSupportedError, "Update-after-bind not supported for uniform texel buffers");
2758 break;
2759
2760 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2761 if (!indexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind)
2762 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage texel buffers");
2763 break;
2764
2765 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2766 TCU_THROW(InternalError, "Tests do not support update-after-bind with input attachments");
2767
2768 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
2769 {
2770 // Just in case we ever mix some of these in.
2771 context.requireDeviceFunctionality("VK_EXT_inline_uniform_block");
2772 const auto& iubFeatures = context.getInlineUniformBlockFeatures();
2773 if (!iubFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind)
2774 TCU_THROW(NotSupportedError, "Update-after-bind not supported for inline uniform blocks");
2775 }
2776 break;
2777
2778 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2779 {
2780 // Just in case we ever mix some of these in.
2781 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2782 const auto& asFeatures = context.getAccelerationStructureFeatures();
2783 if (!asFeatures.descriptorBindingAccelerationStructureUpdateAfterBind)
2784 TCU_THROW(NotSupportedError, "Update-after-bind not supported for acceleration structures");
2785 }
2786 break;
2787
2788 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
2789 TCU_THROW(InternalError, "Found VK_DESCRIPTOR_TYPE_MUTABLE_EXT in list of used descriptor types");
2790
2791 default:
2792 TCU_THROW(InternalError, "Unexpected descriptor type found in list of used descriptor types: " + de::toString(descType));
2793 }
2794 }
2795 }
2796
2797 if (m_params.arrayAccessType == ArrayAccessType::PUSH_CONSTANT)
2798 {
2799 // These require dynamically uniform indices.
2800 const auto& usedDescriptorTypes = getUsedArrayDescriptorTypes(*m_params.descriptorSet, numIterations);
2801 const auto& features = context.getDeviceFeatures();
2802 const auto descriptorIndexingSupported = context.isDeviceFunctionalitySupported("VK_EXT_descriptor_indexing");
2803 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2804
2805 for (const auto& descType : usedDescriptorTypes)
2806 {
2807 switch (descType)
2808 {
2809 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2810 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2811 if (!features.shaderUniformBufferArrayDynamicIndexing)
2812 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for uniform buffers");
2813 break;
2814
2815 case VK_DESCRIPTOR_TYPE_SAMPLER:
2816 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2817 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2818 if (!features.shaderSampledImageArrayDynamicIndexing)
2819 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for samplers and sampled images");
2820 break;
2821
2822 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2823 if (!features.shaderStorageImageArrayDynamicIndexing)
2824 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage images");
2825 break;
2826
2827 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2828 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2829 if (!features.shaderStorageBufferArrayDynamicIndexing)
2830 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage buffers");
2831 break;
2832
2833 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2834 if (!descriptorIndexingSupported || !indexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing)
2835 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for uniform texel buffers");
2836 break;
2837
2838 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2839 if (!descriptorIndexingSupported || !indexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing)
2840 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage texel buffers");
2841 break;
2842
2843 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2844 if (!descriptorIndexingSupported || !indexingFeatures.shaderInputAttachmentArrayDynamicIndexing)
2845 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for input attachments");
2846 break;
2847
2848 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2849 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2850 break;
2851
2852 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
2853 TCU_THROW(InternalError, "Found VK_DESCRIPTOR_TYPE_MUTABLE_EXT in list of used array descriptor types");
2854
2855 default:
2856 TCU_THROW(InternalError, "Unexpected descriptor type found in list of used descriptor types: " + de::toString(descType));
2857 }
2858 }
2859 }
2860
2861 // Check layout support.
2862 {
2863 const auto& vkd = context.getDeviceInterface();
2864 const auto device = getDevice(context);
2865 const auto stageFlags = m_params.getStageFlags();
2866
2867 {
2868 const auto layoutCreateFlags = m_params.getDstLayoutCreateFlags();
2869 const auto supported = m_params.descriptorSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2870
2871 if (!supported)
2872 TCU_THROW(NotSupportedError, "Required descriptor set layout not supported");
2873 }
2874
2875 if (m_params.updateType == UpdateType::COPY)
2876 {
2877 const auto layoutCreateFlags = m_params.getSrcLayoutCreateFlags();
2878 const auto supported = m_params.descriptorSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2879
2880 if (!supported)
2881 TCU_THROW(NotSupportedError, "Required descriptor set layout for source set not supported");
2882
2883 // Check specific layouts for the different source sets are supported.
2884 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2885 {
2886 const auto srcSet = m_params.descriptorSet->genSourceSet(m_params.sourceSetStrategy, iter);
2887 const auto srcLayoutSupported = srcSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2888
2889 if (!srcLayoutSupported)
2890 TCU_THROW(NotSupportedError, "Descriptor set layout for source set at iteration " + de::toString(iter) + " not supported");
2891 }
2892 }
2893 }
2894
2895 // Check supported stores and stages.
2896 const bool vertexStage = isVertexStage(m_params.testingStage);
2897 const bool fragmentStage = (m_params.testingStage == TestingStage::FRAGMENT);
2898 const bool geometryStage = (m_params.testingStage == TestingStage::GEOMETRY);
2899 const bool tessellation = (m_params.testingStage == TestingStage::TESS_CONTROL || m_params.testingStage == TestingStage::TESS_EVAL);
2900
2901 const auto& features = context.getDeviceFeatures();
2902
2903 if (vertexStage && !features.vertexPipelineStoresAndAtomics)
2904 TCU_THROW(NotSupportedError, "Vertex pipeline stores and atomics not supported");
2905
2906 if (fragmentStage && !features.fragmentStoresAndAtomics)
2907 TCU_THROW(NotSupportedError, "Fragment shader stores and atomics not supported");
2908
2909 if (geometryStage && !features.geometryShader)
2910 TCU_THROW(NotSupportedError, "Geometry shader not supported");
2911
2912 if (tessellation && !features.tessellationShader)
2913 TCU_THROW(NotSupportedError, "Tessellation shaders not supported");
2914 }
2915
2916 // What to do at each iteration step. Used to apply UPDATE_AFTER_BIND or not.
2917 enum class Step
2918 {
2919 UPDATE = 0,
2920 BIND,
2921 };
2922
2923 // Create render pass.
buildRenderPass(const DeviceInterface & vkd,VkDevice device,const std::vector<Resource> & resources)2924 Move<VkRenderPass> buildRenderPass (const DeviceInterface& vkd, VkDevice device, const std::vector<Resource>& resources)
2925 {
2926 const auto imageFormat = getDescriptorImageFormat();
2927
2928 std::vector<VkAttachmentDescription> attachmentDescriptions;
2929 std::vector<VkAttachmentReference> attachmentReferences;
2930 std::vector<deUint32> attachmentIndices;
2931
2932 for (const auto& resource : resources)
2933 {
2934 if (resource.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
2935 {
2936 const auto nextIndex = static_cast<deUint32>(attachmentDescriptions.size());
2937
2938 const VkAttachmentDescription description = {
2939 0u, // VkAttachmentDescriptionFlags flags;
2940 imageFormat, // VkFormat format;
2941 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
2942 VK_ATTACHMENT_LOAD_OP_LOAD, // VkAttachmentLoadOp loadOp;
2943 VK_ATTACHMENT_STORE_OP_DONT_CARE, // VkAttachmentStoreOp storeOp;
2944 VK_ATTACHMENT_LOAD_OP_DONT_CARE, // VkAttachmentLoadOp stencilLoadOp;
2945 VK_ATTACHMENT_STORE_OP_DONT_CARE, // VkAttachmentStoreOp stencilStoreOp;
2946 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout initialLayout;
2947 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout finalLayout;
2948 };
2949
2950 const VkAttachmentReference reference = { nextIndex, VK_IMAGE_LAYOUT_GENERAL };
2951
2952 attachmentIndices.push_back(nextIndex);
2953 attachmentDescriptions.push_back(description);
2954 attachmentReferences.push_back(reference);
2955 }
2956 }
2957
2958 const auto attachmentCount = static_cast<deUint32>(attachmentDescriptions.size());
2959 DE_ASSERT(attachmentCount == static_cast<deUint32>(attachmentIndices.size()));
2960 DE_ASSERT(attachmentCount == static_cast<deUint32>(attachmentReferences.size()));
2961
2962 const VkSubpassDescription subpassDescription =
2963 {
2964 0u, // VkSubpassDescriptionFlags flags;
2965 VK_PIPELINE_BIND_POINT_GRAPHICS, // VkPipelineBindPoint pipelineBindPoint;
2966 attachmentCount, // deUint32 inputAttachmentCount;
2967 de::dataOrNull(attachmentReferences), // const VkAttachmentReference* pInputAttachments;
2968 0u, // deUint32 colorAttachmentCount;
2969 nullptr, // const VkAttachmentReference* pColorAttachments;
2970 0u, // const VkAttachmentReference* pResolveAttachments;
2971 nullptr, // const VkAttachmentReference* pDepthStencilAttachment;
2972 0u, // deUint32 preserveAttachmentCount;
2973 nullptr, // const deUint32* pPreserveAttachments;
2974 };
2975
2976 const VkRenderPassCreateInfo renderPassCreateInfo =
2977 {
2978 VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, // VkStructureType sType;
2979 nullptr, // const void* pNext;
2980 0u, // VkRenderPassCreateFlags flags;
2981 static_cast<deUint32>(attachmentDescriptions.size()), // deUint32 attachmentCount;
2982 de::dataOrNull(attachmentDescriptions), // const VkAttachmentDescription* pAttachments;
2983 1u, // deUint32 subpassCount;
2984 &subpassDescription, // const VkSubpassDescription* pSubpasses;
2985 0u, // deUint32 dependencyCount;
2986 nullptr, // const VkSubpassDependency* pDependencies;
2987 };
2988
2989 return createRenderPass(vkd, device, &renderPassCreateInfo);
2990 }
2991
2992 // Create a graphics pipeline.
buildGraphicsPipeline(const DeviceInterface & vkd,VkDevice device,VkPipelineLayout pipelineLayout,VkShaderModule vertModule,VkShaderModule tescModule,VkShaderModule teseModule,VkShaderModule geomModule,VkShaderModule fragModule,VkRenderPass renderPass)2993 Move<VkPipeline> buildGraphicsPipeline (const DeviceInterface& vkd, VkDevice device, VkPipelineLayout pipelineLayout,
2994 VkShaderModule vertModule,
2995 VkShaderModule tescModule,
2996 VkShaderModule teseModule,
2997 VkShaderModule geomModule,
2998 VkShaderModule fragModule,
2999 VkRenderPass renderPass)
3000 {
3001 const auto extent = getDefaultExtent();
3002 const std::vector<VkViewport> viewports (1u, makeViewport(extent));
3003 const std::vector<VkRect2D> scissors (1u, makeRect2D(extent));
3004 const auto hasTess = (tescModule != DE_NULL || teseModule != DE_NULL);
3005 const auto topology = (hasTess ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST);
3006
3007
3008 const VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo = initVulkanStructure();
3009
3010 const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo = {
3011 VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, // VkStructureType sType;
3012 nullptr, // const void* pNext;
3013 0u, // VkPipelineInputAssemblyStateCreateFlags flags;
3014 topology, // VkPrimitiveTopology topology;
3015 VK_FALSE, // VkBool32 primitiveRestartEnable;
3016 };
3017
3018 const VkPipelineTessellationStateCreateInfo tessellationStateCreateInfo = {
3019 VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, // VkStructureType sType;
3020 nullptr, // const void* pNext;
3021 0u, // VkPipelineTessellationStateCreateFlags flags;
3022 (hasTess ? 3u : 0u), // deUint32 patchControlPoints;
3023 };
3024
3025 const VkPipelineViewportStateCreateInfo viewportStateCreateInfo = {
3026 VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, // VkStructureType sType;
3027 nullptr, // const void* pNext;
3028 0u, // VkPipelineViewportStateCreateFlags flags;
3029 static_cast<deUint32>(viewports.size()), // deUint32 viewportCount;
3030 de::dataOrNull(viewports), // const VkViewport* pViewports;
3031 static_cast<deUint32>(scissors.size()), // deUint32 scissorCount;
3032 de::dataOrNull(scissors), // const VkRect2D* pScissors;
3033 };
3034
3035 const VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo = {
3036 VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, // VkStructureType sType;
3037 nullptr, // const void* pNext;
3038 0u, // VkPipelineRasterizationStateCreateFlags flags;
3039 VK_FALSE, // VkBool32 depthClampEnable;
3040 (fragModule == DE_NULL ? VK_TRUE : VK_FALSE), // VkBool32 rasterizerDiscardEnable;
3041 VK_POLYGON_MODE_FILL, // VkPolygonMode polygonMode;
3042 VK_CULL_MODE_NONE, // VkCullModeFlags cullMode;
3043 VK_FRONT_FACE_CLOCKWISE, // VkFrontFace frontFace;
3044 VK_FALSE, // VkBool32 depthBiasEnable;
3045 0.0f, // float depthBiasConstantFactor;
3046 0.0f, // float depthBiasClamp;
3047 0.0f, // float depthBiasSlopeFactor;
3048 1.0f, // float lineWidth;
3049 };
3050
3051 const VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo = {
3052 VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, // VkStructureType sType;
3053 nullptr, // const void* pNext;
3054 0u, // VkPipelineMultisampleStateCreateFlags flags;
3055 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits rasterizationSamples;
3056 VK_FALSE, // VkBool32 sampleShadingEnable;
3057 1.0f, // float minSampleShading;
3058 nullptr, // const VkSampleMask* pSampleMask;
3059 VK_FALSE, // VkBool32 alphaToCoverageEnable;
3060 VK_FALSE, // VkBool32 alphaToOneEnable;
3061 };
3062
3063 const VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo = initVulkanStructure();
3064
3065 const VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo = initVulkanStructure();
3066
3067 return makeGraphicsPipeline(vkd, device, pipelineLayout,
3068 vertModule, tescModule, teseModule, geomModule, fragModule,
3069 renderPass, 0u, &vertexInputStateCreateInfo, &inputAssemblyStateCreateInfo,
3070 (hasTess ? &tessellationStateCreateInfo : nullptr), &viewportStateCreateInfo,
3071 &rasterizationStateCreateInfo, &multisampleStateCreateInfo,
3072 &depthStencilStateCreateInfo, &colorBlendStateCreateInfo, nullptr);
3073 }
3074
buildFramebuffer(const DeviceInterface & vkd,VkDevice device,VkRenderPass renderPass,const std::vector<Resource> & resources)3075 Move<VkFramebuffer> buildFramebuffer (const DeviceInterface& vkd, VkDevice device, VkRenderPass renderPass, const std::vector<Resource>& resources)
3076 {
3077 const auto extent = getDefaultExtent();
3078
3079 std::vector<VkImageView> inputAttachments;
3080 for (const auto& resource : resources)
3081 {
3082 if (resource.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
3083 inputAttachments.push_back(resource.imageView.get());
3084 }
3085
3086 const VkFramebufferCreateInfo framebufferCreateInfo =
3087 {
3088 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, // VkStructureType sType;
3089 nullptr, // const void* pNext;
3090 0u, // VkFramebufferCreateFlags flags;
3091 renderPass, // VkRenderPass renderPass;
3092 static_cast<deUint32>(inputAttachments.size()), // deUint32 attachmentCount;
3093 de:: dataOrNull(inputAttachments), // const VkImageView* pAttachments;
3094 extent.width, // deUint32 width;
3095 extent.height, // deUint32 height;
3096 extent.depth, // deUint32 layers;
3097 };
3098
3099 return createFramebuffer(vkd, device, &framebufferCreateInfo);
3100 }
3101
iterate()3102 tcu::TestStatus MutableTypesInstance::iterate ()
3103 {
3104 const auto& vki = m_context.getInstanceInterface();
3105 const auto& vkd = m_context.getDeviceInterface();
3106 const auto device = getDevice(m_context);
3107 const auto physDev = m_context.getPhysicalDevice();
3108 const auto qIndex = m_context.getUniversalQueueFamilyIndex();
3109 const auto queue = getDeviceQueue(vkd, device, m_context.getUniversalQueueFamilyIndex(), 0);
3110
3111 SimpleAllocator alloc(vkd, device, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
3112
3113 const auto& paramSet = m_params.descriptorSet;
3114 const auto numIterations = paramSet->maxTypes();
3115 const bool useExternalImage = needsExternalImage(*m_params.descriptorSet);
3116 const bool useExternalSampler = needsExternalSampler(*m_params.descriptorSet);
3117 const auto stageFlags = m_params.getStageFlags();
3118 const bool srcSetNeeded = (m_params.updateType == UpdateType::COPY);
3119 const bool updateAfterBind = (m_params.updateMoment == UpdateMoment::UPDATE_AFTER_BIND);
3120 const auto bindPoint = m_params.getBindPoint();
3121 const bool rayTracing = isRayTracingStage(m_params.testingStage);
3122 const bool useAABBs = (m_params.testingStage == TestingStage::INTERSECTION);
3123
3124 // Resources for each iteration.
3125 std::vector<std::vector<Resource>> allResources;
3126 allResources.reserve(numIterations);
3127
3128 // Command pool.
3129 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
3130
3131 // Descriptor pool and set for the active (dst) descriptor set.
3132 const auto dstPoolFlags = m_params.getDstPoolCreateFlags();
3133 const auto dstLayoutFlags = m_params.getDstLayoutCreateFlags();
3134
3135 const auto dstPool = paramSet->makeDescriptorPool(vkd, device, m_params.poolMutableStrategy, dstPoolFlags);
3136 const auto dstLayout = paramSet->makeDescriptorSetLayout(vkd, device, stageFlags, dstLayoutFlags);
3137 const auto varCount = paramSet->getVariableDescriptorCount();
3138
3139 using VariableCountInfoPtr = de::MovePtr<VkDescriptorSetVariableDescriptorCountAllocateInfo>;
3140
3141 VariableCountInfoPtr dstVariableCountInfo;
3142 if (varCount)
3143 {
3144 dstVariableCountInfo = VariableCountInfoPtr(new VkDescriptorSetVariableDescriptorCountAllocateInfo);
3145 *dstVariableCountInfo = initVulkanStructure();
3146
3147 dstVariableCountInfo->descriptorSetCount = 1u;
3148 dstVariableCountInfo->pDescriptorCounts = &(varCount.get());
3149 }
3150 const auto dstSet = makeDescriptorSet(vkd, device, dstPool.get(), dstLayout.get(), dstVariableCountInfo.get());
3151
3152 // Source pool and set (optional).
3153 const auto srcPoolFlags = m_params.getSrcPoolCreateFlags();
3154 const auto srcLayoutFlags = m_params.getSrcLayoutCreateFlags();
3155 DescriptorSetPtr iterationSrcSet;
3156 Move<VkDescriptorPool> srcPool;
3157 Move<VkDescriptorSetLayout> srcLayout;
3158 Move<VkDescriptorSet> srcSet;
3159
3160 // Extra set for external resources and output buffer.
3161 std::vector<Resource> extraResources;
3162 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vkd, device, alloc, qIndex, queue, useAABBs, 0u, numIterations);
3163 if (useExternalImage)
3164 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, vkd, device, alloc, qIndex, queue, useAABBs, getExternalSampledImageValue());
3165 if (useExternalSampler)
3166 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLER, vkd, device, alloc, qIndex, queue, useAABBs, 0u);
3167 if (rayTracing)
3168 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, vkd, device, alloc, qIndex, queue, useAABBs, 0u);
3169
3170 Move<VkDescriptorPool> extraPool;
3171 {
3172 DescriptorPoolBuilder poolBuilder;
3173 poolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
3174 if (useExternalImage)
3175 poolBuilder.addType(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
3176 if (useExternalSampler)
3177 poolBuilder.addType(VK_DESCRIPTOR_TYPE_SAMPLER);
3178 if (rayTracing)
3179 poolBuilder.addType(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
3180 extraPool = poolBuilder.build(vkd, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
3181 }
3182
3183 Move<VkDescriptorSetLayout> extraLayout;
3184 {
3185 DescriptorSetLayoutBuilder layoutBuilder;
3186 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1u, stageFlags, nullptr);
3187 if (useExternalImage)
3188 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1u, stageFlags, nullptr);
3189 if (useExternalSampler)
3190 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_SAMPLER, 1u, stageFlags, nullptr);
3191 if (rayTracing)
3192 {
3193 // The extra acceleration structure is used from the ray generation shader only.
3194 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, 1u, VK_SHADER_STAGE_RAYGEN_BIT_KHR, nullptr);
3195 }
3196 extraLayout = layoutBuilder.build(vkd, device);
3197 }
3198
3199 const auto extraSet = makeDescriptorSet(vkd, device, extraPool.get(), extraLayout.get());
3200
3201 // Update extra set.
3202 using DescriptorBufferInfoPtr = de::MovePtr<VkDescriptorBufferInfo>;
3203 using DescriptorImageInfoPtr = de::MovePtr<VkDescriptorImageInfo>;
3204 using DescriptorASInfoPtr = de::MovePtr<VkWriteDescriptorSetAccelerationStructureKHR>;
3205
3206 deUint32 bindingCount = 0u;
3207 DescriptorBufferInfoPtr bufferInfoPtr;
3208 DescriptorImageInfoPtr imageInfoPtr;
3209 DescriptorImageInfoPtr samplerInfoPtr;
3210 DescriptorASInfoPtr asWriteInfoPtr;
3211
3212 const auto outputBufferSize = static_cast<VkDeviceSize>(sizeof(deUint32) * static_cast<size_t>(numIterations));
3213 bufferInfoPtr = DescriptorBufferInfoPtr(new VkDescriptorBufferInfo(makeDescriptorBufferInfo(extraResources[bindingCount++].bufferWithMemory->get(), 0ull, outputBufferSize)));
3214 if (useExternalImage)
3215 imageInfoPtr = DescriptorImageInfoPtr(new VkDescriptorImageInfo(makeDescriptorImageInfo(DE_NULL, extraResources[bindingCount++].imageView.get(), VK_IMAGE_LAYOUT_GENERAL)));
3216 if (useExternalSampler)
3217 samplerInfoPtr = DescriptorImageInfoPtr(new VkDescriptorImageInfo(makeDescriptorImageInfo(extraResources[bindingCount++].sampler.get(), DE_NULL, VK_IMAGE_LAYOUT_GENERAL)));
3218 if (rayTracing)
3219 {
3220 asWriteInfoPtr = DescriptorASInfoPtr(new VkWriteDescriptorSetAccelerationStructureKHR);
3221 *asWriteInfoPtr = initVulkanStructure();
3222 asWriteInfoPtr->accelerationStructureCount = 1u;
3223 asWriteInfoPtr->pAccelerationStructures = extraResources[bindingCount++].asData.tlas.get()->getPtr();
3224 }
3225
3226 {
3227 bindingCount = 0u;
3228 DescriptorSetUpdateBuilder updateBuilder;
3229 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, bufferInfoPtr.get());
3230 if (useExternalImage)
3231 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, imageInfoPtr.get());
3232 if (useExternalSampler)
3233 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_SAMPLER, samplerInfoPtr.get());
3234 if (rayTracing)
3235 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, asWriteInfoPtr.get());
3236 updateBuilder.update(vkd, device);
3237 }
3238
3239 // Push constants.
3240 const deUint32 zero = 0u;
3241 const VkPushConstantRange pcRange = {stageFlags, 0u /*offset*/, static_cast<deUint32>(sizeof(zero)) /*size*/ };
3242
3243 // Needed for some test variants.
3244 Move<VkShaderModule> vertPassthrough;
3245 Move<VkShaderModule> tesePassthrough;
3246 Move<VkShaderModule> tescPassthrough;
3247 Move<VkShaderModule> rgenPassthrough;
3248 Move<VkShaderModule> missPassthrough;
3249
3250 if (m_params.testingStage == TestingStage::FRAGMENT
3251 || m_params.testingStage == TestingStage::GEOMETRY
3252 || m_params.testingStage == TestingStage::TESS_CONTROL
3253 || m_params.testingStage == TestingStage::TESS_EVAL)
3254 {
3255 vertPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("vert"), 0u);
3256 }
3257
3258 if (m_params.testingStage == TestingStage::TESS_CONTROL)
3259 {
3260 tesePassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("tese"), 0u);
3261 }
3262
3263 if (m_params.testingStage == TestingStage::TESS_EVAL)
3264 {
3265 tescPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("tesc"), 0u);
3266 }
3267
3268 if (m_params.testingStage == TestingStage::CLOSEST_HIT
3269 || m_params.testingStage == TestingStage::ANY_HIT
3270 || m_params.testingStage == TestingStage::INTERSECTION
3271 || m_params.testingStage == TestingStage::MISS
3272 || m_params.testingStage == TestingStage::CALLABLE)
3273 {
3274 rgenPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("rgen"), 0u);
3275 }
3276
3277 if (m_params.testingStage == TestingStage::INTERSECTION)
3278 {
3279 missPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("miss"), 0u);
3280 }
3281
3282 for (deUint32 iteration = 0u; iteration < numIterations; ++iteration)
3283 {
3284 // Generate source set for the current iteration.
3285 if (srcSetNeeded)
3286 {
3287 // Free previous descriptor set before rebuilding the pool.
3288 srcSet = Move<VkDescriptorSet>();
3289 iterationSrcSet = paramSet->genSourceSet(m_params.sourceSetStrategy, iteration);
3290 srcPool = iterationSrcSet->makeDescriptorPool(vkd, device, m_params.poolMutableStrategy, srcPoolFlags);
3291 srcLayout = iterationSrcSet->makeDescriptorSetLayout(vkd, device, stageFlags, srcLayoutFlags);
3292
3293 const auto srcVarCount = iterationSrcSet->getVariableDescriptorCount();
3294 VariableCountInfoPtr srcVariableCountInfo;
3295
3296 if (srcVarCount)
3297 {
3298 srcVariableCountInfo = VariableCountInfoPtr(new VkDescriptorSetVariableDescriptorCountAllocateInfo);
3299 *srcVariableCountInfo = initVulkanStructure();
3300
3301 srcVariableCountInfo->descriptorSetCount = 1u;
3302 srcVariableCountInfo->pDescriptorCounts = &(srcVarCount.get());
3303 }
3304
3305 srcSet = makeDescriptorSet(vkd, device, srcPool.get(), srcLayout.get(), srcVariableCountInfo.get());
3306 }
3307
3308 // Set layouts and sets used in the pipeline.
3309 const std::vector<VkDescriptorSetLayout> setLayouts = {dstLayout.get(), extraLayout.get()};
3310 const std::vector<VkDescriptorSet> usedSets = {dstSet.get(), extraSet.get()};
3311
3312 // Create resources.
3313 allResources.emplace_back(paramSet->createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs));
3314 const auto& resources = allResources.back();
3315
3316 // Make pipeline for the current iteration.
3317 const auto pipelineLayout = makePipelineLayout(vkd, device, static_cast<deUint32>(setLayouts.size()), de::dataOrNull(setLayouts), 1u, &pcRange);
3318 const auto moduleName = shaderName(iteration);
3319 const auto shaderModule = createShaderModule(vkd, device, m_context.getBinaryCollection().get(moduleName), 0u);
3320
3321 Move<VkPipeline> pipeline;
3322 Move<VkRenderPass> renderPass;
3323 Move<VkFramebuffer> framebuffer;
3324
3325 deUint32 shaderGroupHandleSize = 0u;
3326 deUint32 shaderGroupBaseAlignment = 1u;
3327
3328 de::MovePtr<BufferWithMemory> raygenSBT;
3329 de::MovePtr<BufferWithMemory> missSBT;
3330 de::MovePtr<BufferWithMemory> hitSBT;
3331 de::MovePtr<BufferWithMemory> callableSBT;
3332
3333 VkStridedDeviceAddressRegionKHR raygenSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3334 VkStridedDeviceAddressRegionKHR missSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3335 VkStridedDeviceAddressRegionKHR hitSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3336 VkStridedDeviceAddressRegionKHR callableSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3337
3338 if (bindPoint == VK_PIPELINE_BIND_POINT_COMPUTE)
3339 pipeline = makeComputePipeline(vkd, device, pipelineLayout.get(), 0u, shaderModule.get(), 0u, nullptr);
3340 else if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS)
3341 {
3342 VkShaderModule vertModule = DE_NULL;
3343 VkShaderModule teseModule = DE_NULL;
3344 VkShaderModule tescModule = DE_NULL;
3345 VkShaderModule geomModule = DE_NULL;
3346 VkShaderModule fragModule = DE_NULL;
3347
3348 if (m_params.testingStage == TestingStage::VERTEX)
3349 vertModule = shaderModule.get();
3350 else if (m_params.testingStage == TestingStage::FRAGMENT)
3351 {
3352 vertModule = vertPassthrough.get();
3353 fragModule = shaderModule.get();
3354 }
3355 else if (m_params.testingStage == TestingStage::GEOMETRY)
3356 {
3357 vertModule = vertPassthrough.get();
3358 geomModule = shaderModule.get();
3359 }
3360 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
3361 {
3362 vertModule = vertPassthrough.get();
3363 teseModule = tesePassthrough.get();
3364 tescModule = shaderModule.get();
3365 }
3366 else if (m_params.testingStage == TestingStage::TESS_EVAL)
3367 {
3368 vertModule = vertPassthrough.get();
3369 tescModule = tescPassthrough.get();
3370 teseModule = shaderModule.get();
3371 }
3372 else
3373 DE_ASSERT(false);
3374
3375 renderPass = buildRenderPass(vkd, device, resources);
3376 pipeline = buildGraphicsPipeline(vkd, device, pipelineLayout.get(), vertModule, tescModule, teseModule, geomModule, fragModule, renderPass.get());
3377 framebuffer = buildFramebuffer(vkd, device, renderPass.get(), resources);
3378 }
3379 else if (bindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR)
3380 {
3381 const auto rayTracingPipeline = de::newMovePtr<RayTracingPipeline>();
3382 const auto rayTracingPropertiesKHR = makeRayTracingProperties(vki, physDev);
3383 shaderGroupHandleSize = rayTracingPropertiesKHR->getShaderGroupHandleSize();
3384 shaderGroupBaseAlignment = rayTracingPropertiesKHR->getShaderGroupBaseAlignment();
3385
3386 VkShaderModule rgenModule = DE_NULL;
3387 VkShaderModule isecModule = DE_NULL;
3388 VkShaderModule ahitModule = DE_NULL;
3389 VkShaderModule chitModule = DE_NULL;
3390 VkShaderModule missModule = DE_NULL;
3391 VkShaderModule callModule = DE_NULL;
3392
3393 const deUint32 rgenGroup = 0u;
3394 deUint32 hitGroup = 0u;
3395 deUint32 missGroup = 0u;
3396 deUint32 callGroup = 0u;
3397
3398 if (m_params.testingStage == TestingStage::RAY_GEN)
3399 {
3400 rgenModule = shaderModule.get();
3401 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3402 }
3403 else if (m_params.testingStage == TestingStage::INTERSECTION)
3404 {
3405 hitGroup = 1u;
3406 missGroup = 2u;
3407 rgenModule = rgenPassthrough.get();
3408 missModule = missPassthrough.get();
3409 isecModule = shaderModule.get();
3410 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3411 rayTracingPipeline->addShader(VK_SHADER_STAGE_INTERSECTION_BIT_KHR, isecModule, hitGroup);
3412 rayTracingPipeline->addShader(VK_SHADER_STAGE_MISS_BIT_KHR, missModule, missGroup);
3413 }
3414 else if (m_params.testingStage == TestingStage::ANY_HIT)
3415 {
3416 hitGroup = 1u;
3417 rgenModule = rgenPassthrough.get();
3418 ahitModule = shaderModule.get();
3419 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3420 rayTracingPipeline->addShader(VK_SHADER_STAGE_ANY_HIT_BIT_KHR, ahitModule, hitGroup);
3421 }
3422 else if (m_params.testingStage == TestingStage::CLOSEST_HIT)
3423 {
3424 hitGroup = 1u;
3425 rgenModule = rgenPassthrough.get();
3426 chitModule = shaderModule.get();
3427 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3428 rayTracingPipeline->addShader(VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, chitModule, hitGroup);
3429 }
3430 else if (m_params.testingStage == TestingStage::MISS)
3431 {
3432 missGroup = 1u;
3433 rgenModule = rgenPassthrough.get();
3434 missModule = shaderModule.get();
3435 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3436 rayTracingPipeline->addShader(VK_SHADER_STAGE_MISS_BIT_KHR, missModule, missGroup);
3437 }
3438 else if (m_params.testingStage == TestingStage::CALLABLE)
3439 {
3440 callGroup = 1u;
3441 rgenModule = rgenPassthrough.get();
3442 callModule = shaderModule.get();
3443 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3444 rayTracingPipeline->addShader(VK_SHADER_STAGE_CALLABLE_BIT_KHR, callModule, callGroup);
3445 }
3446 else
3447 DE_ASSERT(false);
3448
3449 pipeline = rayTracingPipeline->createPipeline(vkd, device, pipelineLayout.get());
3450
3451 raygenSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, rgenGroup, 1u);
3452 raygenSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, raygenSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3453
3454 if (missGroup > 0u)
3455 {
3456 missSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, missGroup, 1u);
3457 missSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, missSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3458 }
3459
3460 if (hitGroup > 0u)
3461 {
3462 hitSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, hitGroup, 1u);
3463 hitSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, hitSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3464 }
3465
3466 if (callGroup > 0u)
3467 {
3468 callableSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, callGroup, 1u);
3469 callableSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, callableSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3470 }
3471 }
3472 else
3473 DE_ASSERT(false);
3474
3475 // Command buffer for the current iteration.
3476 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
3477 const auto cmdBuffer = cmdBufferPtr.get();
3478
3479 beginCommandBuffer(vkd, cmdBuffer);
3480
3481 const Step steps[] = {
3482 (updateAfterBind ? Step::BIND : Step::UPDATE),
3483 (updateAfterBind ? Step::UPDATE : Step::BIND)
3484 };
3485
3486 for (const auto& step : steps)
3487 {
3488 if (step == Step::BIND)
3489 {
3490 vkd.cmdBindPipeline(cmdBuffer, bindPoint, pipeline.get());
3491 vkd.cmdBindDescriptorSets(cmdBuffer, bindPoint, pipelineLayout.get(), 0u, static_cast<deUint32>(usedSets.size()), de::dataOrNull(usedSets), 0u, nullptr);
3492 }
3493 else // Step::UPDATE
3494 {
3495 if (srcSetNeeded)
3496 {
3497 // Note: these operations need to be called on paramSet and not iterationSrcSet. The latter is a compatible set
3498 // that's correct and contains compatible bindings but, when a binding has been changed from non-mutable to
3499 // mutable or to an extended mutable type, the list of descriptor types for the mutable bindings in
3500 // iterationSrcSet are not in iteration order like they are in the original set and must not be taken into
3501 // account to update or copy sets.
3502 paramSet->updateDescriptorSet(vkd, device, srcSet.get(), iteration, resources);
3503 paramSet->copyDescriptorSet(vkd, device, srcSet.get(), dstSet.get());
3504 }
3505 else
3506 {
3507 paramSet->updateDescriptorSet(vkd, device, dstSet.get(), iteration, resources);
3508 }
3509 }
3510 }
3511
3512 // Run shader.
3513 vkd.cmdPushConstants(cmdBuffer, pipelineLayout.get(), stageFlags, 0u, static_cast<deUint32>(sizeof(zero)), &zero);
3514
3515 if (bindPoint == VK_PIPELINE_BIND_POINT_COMPUTE)
3516 vkd.cmdDispatch(cmdBuffer, 1u, 1u, 1u);
3517 else if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS)
3518 {
3519 const auto extent = getDefaultExtent();
3520 const auto renderArea = makeRect2D(extent);
3521
3522 beginRenderPass(vkd, cmdBuffer, renderPass.get(), framebuffer.get(), renderArea);
3523 vkd.cmdDraw(cmdBuffer, 3u, 1u, 0u, 0u);
3524 endRenderPass(vkd, cmdBuffer);
3525 }
3526 else if (bindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR)
3527 {
3528 vkd.cmdTraceRaysKHR(cmdBuffer, &raygenSBTRegion, &missSBTRegion, &hitSBTRegion, &callableSBTRegion, 1u, 1u, 1u);
3529 }
3530 else
3531 DE_ASSERT(false);
3532
3533 endCommandBuffer(vkd, cmdBuffer);
3534 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
3535
3536 // Verify output buffer.
3537 {
3538 const auto outputBufferVal = extraResources[0].getStoredValue(vkd, device, alloc, qIndex, queue, iteration);
3539 DE_ASSERT(static_cast<bool>(outputBufferVal));
3540
3541 const auto expectedValue = getExpectedOutputBufferValue();
3542 if (outputBufferVal.get() != expectedValue)
3543 {
3544 std::ostringstream msg;
3545 msg << "Iteration " << iteration << ": unexpected value found in output buffer (expected " << expectedValue << " and found " << outputBufferVal.get() << ")";
3546 TCU_FAIL(msg.str());
3547 }
3548 }
3549
3550 // Verify descriptor writes.
3551 {
3552 size_t resourcesOffset = 0;
3553 const auto writeMask = getStoredValueMask();
3554 const auto numBindings = paramSet->numBindings();
3555
3556 for (deUint32 bindingIdx = 0u; bindingIdx < numBindings; ++bindingIdx)
3557 {
3558 const auto binding = paramSet->getBinding(bindingIdx);
3559 const auto bindingTypes = binding->typesAtIteration(iteration);
3560
3561 for (size_t descriptorIdx = 0; descriptorIdx < bindingTypes.size(); ++descriptorIdx)
3562 {
3563 const auto& descriptorType = bindingTypes[descriptorIdx];
3564 if (!isShaderWritable(descriptorType))
3565 continue;
3566
3567 const auto& resource = resources[resourcesOffset + descriptorIdx];
3568 const auto initialValue = resource.initialValue;
3569 const auto storedValuePtr = resource.getStoredValue(vkd, device, alloc, qIndex, queue);
3570
3571 DE_ASSERT(static_cast<bool>(storedValuePtr));
3572 const auto storedValue = storedValuePtr.get();
3573 const auto expectedValue = (initialValue | writeMask);
3574 if (expectedValue != storedValue)
3575 {
3576 std::ostringstream msg;
3577 msg << "Iteration " << iteration << ": descriptor at binding " << bindingIdx << " index " << descriptorIdx
3578 << " with type " << de::toString(descriptorType) << " contains unexpected value " << std::hex
3579 << storedValue << " (expected " << expectedValue << ")";
3580 TCU_FAIL(msg.str());
3581 }
3582 }
3583
3584 resourcesOffset += bindingTypes.size();
3585 }
3586 }
3587 }
3588
3589 return tcu::TestStatus::pass("Pass");
3590 }
3591
3592 using GroupPtr = de::MovePtr<tcu::TestCaseGroup>;
3593
createMutableTestVariants(tcu::TestContext & testCtx,tcu::TestCaseGroup * parentGroup,const DescriptorSetPtr & descriptorSet,const std::vector<TestingStage> & stagesToTest)3594 void createMutableTestVariants (tcu::TestContext& testCtx, tcu::TestCaseGroup* parentGroup, const DescriptorSetPtr& descriptorSet, const std::vector<TestingStage>& stagesToTest)
3595 {
3596 const struct
3597 {
3598 UpdateType updateType;
3599 const char* name;
3600 } updateTypes[] = {
3601 {UpdateType::WRITE, "update_write"},
3602 {UpdateType::COPY, "update_copy"},
3603 };
3604
3605 const struct
3606 {
3607 SourceSetStrategy sourceSetStrategy;
3608 const char* name;
3609 } sourceStrategies[] = {
3610 {SourceSetStrategy::MUTABLE, "mutable_source"},
3611 {SourceSetStrategy::NONMUTABLE, "nonmutable_source"},
3612 {SourceSetStrategy::NO_SOURCE, "no_source"},
3613 };
3614
3615 const struct
3616 {
3617 SourceSetType sourceSetType;
3618 const char* name;
3619 } sourceTypes[] = {
3620 {SourceSetType::NORMAL, "normal_source"},
3621 {SourceSetType::HOST_ONLY, "host_only_source"},
3622 {SourceSetType::NO_SOURCE, "no_source"},
3623 };
3624
3625 const struct
3626 {
3627 PoolMutableStrategy poolMutableStrategy;
3628 const char* name;
3629 } poolStrategies[] = {
3630 {PoolMutableStrategy::KEEP_TYPES, "pool_same_types"},
3631 {PoolMutableStrategy::NO_TYPES, "pool_no_types"},
3632 {PoolMutableStrategy::EXPAND_TYPES, "pool_expand_types"},
3633 };
3634
3635 const struct
3636 {
3637 UpdateMoment updateMoment;
3638 const char* name;
3639 } updateMoments[] = {
3640 {UpdateMoment::NORMAL, "pre_update"},
3641 {UpdateMoment::UPDATE_AFTER_BIND, "update_after_bind"},
3642 };
3643
3644 const struct
3645 {
3646 ArrayAccessType arrayAccessType;
3647 const char* name;
3648 } arrayAccessTypes[] = {
3649 {ArrayAccessType::CONSTANT, "index_constant"},
3650 {ArrayAccessType::PUSH_CONSTANT, "index_push_constant"},
3651 {ArrayAccessType::NO_ARRAY, "no_array"},
3652 };
3653
3654 const struct StageAndName
3655 {
3656 TestingStage testingStage;
3657 const char* name;
3658 } testStageList[] = {
3659 {TestingStage::COMPUTE, "comp"},
3660 {TestingStage::VERTEX, "vert"},
3661 {TestingStage::TESS_CONTROL, "tesc"},
3662 {TestingStage::TESS_EVAL, "tese"},
3663 {TestingStage::GEOMETRY, "geom"},
3664 {TestingStage::FRAGMENT, "frag"},
3665 {TestingStage::RAY_GEN, "rgen"},
3666 {TestingStage::INTERSECTION, "isec"},
3667 {TestingStage::ANY_HIT, "ahit"},
3668 {TestingStage::CLOSEST_HIT, "chit"},
3669 {TestingStage::MISS, "miss"},
3670 {TestingStage::CALLABLE, "call"},
3671 };
3672
3673 const bool hasArrays = descriptorSet->hasArrays();
3674 const bool hasInputAttachments = usesInputAttachments(*descriptorSet);
3675
3676 for (const auto& ut : updateTypes)
3677 {
3678 GroupPtr updateGroup(new tcu::TestCaseGroup(testCtx, ut.name, ""));
3679
3680 for (const auto& srcStrategy : sourceStrategies)
3681 {
3682 // Skip combinations that make no sense.
3683 if (ut.updateType == UpdateType::WRITE && srcStrategy.sourceSetStrategy != SourceSetStrategy::NO_SOURCE)
3684 continue;
3685
3686 if (ut.updateType == UpdateType::COPY && srcStrategy.sourceSetStrategy == SourceSetStrategy::NO_SOURCE)
3687 continue;
3688
3689 if (srcStrategy.sourceSetStrategy == SourceSetStrategy::NONMUTABLE && descriptorSet->needsAnyAliasing())
3690 continue;
3691
3692 GroupPtr srcStrategyGroup(new tcu::TestCaseGroup(testCtx, srcStrategy.name, ""));
3693
3694 for (const auto& srcType : sourceTypes)
3695 {
3696 // Skip combinations that make no sense.
3697 if (ut.updateType == UpdateType::WRITE && srcType.sourceSetType != SourceSetType::NO_SOURCE)
3698 continue;
3699
3700 if (ut.updateType == UpdateType::COPY && srcType.sourceSetType == SourceSetType::NO_SOURCE)
3701 continue;
3702
3703 GroupPtr srcTypeGroup(new tcu::TestCaseGroup(testCtx, srcType.name, ""));
3704
3705 for (const auto& poolStrategy: poolStrategies)
3706 {
3707 GroupPtr poolStrategyGroup(new tcu::TestCaseGroup(testCtx, poolStrategy.name, ""));
3708
3709 for (const auto& moment : updateMoments)
3710 {
3711 //if (moment.updateMoment == UpdateMoment::UPDATE_AFTER_BIND && srcType.sourceSetType == SourceSetType::HOST_ONLY)
3712 // continue;
3713
3714 if (moment.updateMoment == UpdateMoment::UPDATE_AFTER_BIND && hasInputAttachments)
3715 continue;
3716
3717 GroupPtr momentGroup(new tcu::TestCaseGroup(testCtx, moment.name, ""));
3718
3719 for (const auto& accessType : arrayAccessTypes)
3720 {
3721 // Skip combinations that make no sense.
3722 if (hasArrays && accessType.arrayAccessType == ArrayAccessType::NO_ARRAY)
3723 continue;
3724
3725 if (!hasArrays && accessType.arrayAccessType != ArrayAccessType::NO_ARRAY)
3726 continue;
3727
3728 GroupPtr accessTypeGroup(new tcu::TestCaseGroup(testCtx, accessType.name, ""));
3729
3730 for (const auto& testStage : stagesToTest)
3731 {
3732 const auto beginItr = std::begin(testStageList);
3733 const auto endItr = std::end(testStageList);
3734 const auto iter = std::find_if(beginItr, endItr, [testStage] (const StageAndName& ts) { return ts.testingStage == testStage; });
3735
3736 DE_ASSERT(iter != endItr);
3737 const auto& stage = *iter;
3738
3739 if (hasInputAttachments && stage.testingStage != TestingStage::FRAGMENT)
3740 continue;
3741
3742 TestParams params = {
3743 descriptorSet,
3744 ut.updateType,
3745 srcStrategy.sourceSetStrategy,
3746 srcType.sourceSetType,
3747 poolStrategy.poolMutableStrategy,
3748 moment.updateMoment,
3749 accessType.arrayAccessType,
3750 stage.testingStage,
3751 };
3752
3753 accessTypeGroup->addChild(new MutableTypesTest(testCtx, stage.name, "", params));
3754 }
3755
3756 momentGroup->addChild(accessTypeGroup.release());
3757 }
3758
3759 poolStrategyGroup->addChild(momentGroup.release());
3760 }
3761
3762 srcTypeGroup->addChild(poolStrategyGroup.release());
3763 }
3764
3765 srcStrategyGroup->addChild(srcTypeGroup.release());
3766 }
3767
3768 updateGroup->addChild(srcStrategyGroup.release());
3769 }
3770
3771 parentGroup->addChild(updateGroup.release());
3772 }
3773 }
3774
3775 }
3776
descriptorTypeStr(VkDescriptorType descriptorType)3777 std::string descriptorTypeStr (VkDescriptorType descriptorType)
3778 {
3779 static const auto prefixLen = std::string("VK_DESCRIPTOR_TYPE_").size();
3780 return de::toLower(de::toString(descriptorType).substr(prefixLen));
3781 }
3782
3783 static void createChildren (tcu::TestCaseGroup* testGroup);
3784
cleanupGroup(tcu::TestCaseGroup * testGroup)3785 static void cleanupGroup (tcu::TestCaseGroup* testGroup)
3786 {
3787 DE_UNREF(testGroup);
3788 // Destroy singleton objects.
3789 g_singletonDevice.clear();
3790 }
3791
createDescriptorMutableTests(tcu::TestContext & testCtx)3792 tcu::TestCaseGroup* createDescriptorMutableTests (tcu::TestContext& testCtx)
3793 {
3794 return createTestGroup(testCtx, "mutable_descriptor", "Tests for VK_VALVE_mutable_descriptor_type and VK_EXT_mutable_descriptor_type", createChildren, cleanupGroup);
3795 }
3796
createChildren(tcu::TestCaseGroup * mainGroup)3797 void createChildren (tcu::TestCaseGroup* mainGroup)
3798 {
3799 tcu::TestContext& testCtx = mainGroup->getTestContext();
3800
3801 const VkDescriptorType basicDescriptorTypes[] = {
3802 VK_DESCRIPTOR_TYPE_SAMPLER,
3803 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
3804 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
3805 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
3806 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
3807 VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
3808 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
3809 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
3810 VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
3811 VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
3812 };
3813
3814 static const auto mandatoryTypes = getMandatoryMutableTypes();
3815
3816 using StageVec = std::vector<TestingStage>;
3817
3818 const StageVec allStages =
3819 {
3820 TestingStage::COMPUTE,
3821 TestingStage::VERTEX,
3822 TestingStage::TESS_CONTROL,
3823 TestingStage::TESS_EVAL,
3824 TestingStage::GEOMETRY,
3825 TestingStage::FRAGMENT,
3826 TestingStage::RAY_GEN,
3827 TestingStage::INTERSECTION,
3828 TestingStage::ANY_HIT,
3829 TestingStage::CLOSEST_HIT,
3830 TestingStage::MISS,
3831 TestingStage::CALLABLE,
3832 };
3833
3834 const StageVec reducedStages =
3835 {
3836 TestingStage::COMPUTE,
3837 TestingStage::VERTEX,
3838 TestingStage::FRAGMENT,
3839 TestingStage::RAY_GEN,
3840 };
3841
3842 const StageVec computeOnly =
3843 {
3844 TestingStage::COMPUTE,
3845 };
3846
3847 // Basic tests with a single mutable descriptor.
3848 {
3849 GroupPtr singleCases(new tcu::TestCaseGroup(testCtx, "single", "Basic mutable descriptor tests with a single mutable descriptor"));
3850
3851 for (const auto& descriptorType : basicDescriptorTypes)
3852 {
3853 const auto groupName = descriptorTypeStr(descriptorType);
3854 const std::vector<VkDescriptorType> actualTypes(1u, descriptorType);
3855
3856 DescriptorSetPtr setPtr;
3857 {
3858 DescriptorSet::BindingPtrVector setBindings;
3859 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, actualTypes));
3860 setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3861 }
3862
3863 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3864 createMutableTestVariants(testCtx, subGroup.get(), setPtr, allStages);
3865
3866 singleCases->addChild(subGroup.release());
3867 }
3868
3869 // Case with a single descriptor that iterates several types.
3870 {
3871 DescriptorSetPtr setPtr;
3872 {
3873 DescriptorSet::BindingPtrVector setBindings;
3874 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, mandatoryTypes));
3875 setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3876 }
3877
3878 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, "all_mandatory", ""));
3879 createMutableTestVariants(testCtx, subGroup.get(), setPtr, reducedStages);
3880
3881 singleCases->addChild(subGroup.release());
3882 }
3883
3884 // Cases that try to verify switching from any descriptor type to any other is possible.
3885 {
3886 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, "switches", "Test switching from one to another descriptor type works as expected"));
3887
3888 for (const auto& initialDescriptorType : basicDescriptorTypes)
3889 {
3890 for (const auto& finalDescriptorType : basicDescriptorTypes)
3891 {
3892 if (initialDescriptorType == finalDescriptorType)
3893 continue;
3894
3895 const std::vector<VkDescriptorType> mutableTypes { initialDescriptorType, finalDescriptorType };
3896 DescriptorSet::BindingPtrVector setBindings;
3897 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, mutableTypes));
3898
3899 DescriptorSetPtr setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3900
3901 const auto groupName = descriptorTypeStr(initialDescriptorType) + "_" + descriptorTypeStr(finalDescriptorType);
3902 GroupPtr combinationGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3903 createMutableTestVariants(testCtx, combinationGroup.get(), setPtr, reducedStages);
3904 subGroup->addChild(combinationGroup.release());
3905 }
3906 }
3907
3908 singleCases->addChild(subGroup.release());
3909 }
3910
3911 mainGroup->addChild(singleCases.release());
3912 }
3913
3914 // Cases with a single non-mutable descriptor. This provides some basic checks to verify copying to non-mutable bindings works.
3915 {
3916 GroupPtr singleNonMutableGroup (new tcu::TestCaseGroup(testCtx, "single_nonmutable", "Tests using a single non-mutable descriptor"));
3917
3918 for (const auto& descriptorType : basicDescriptorTypes)
3919 {
3920 DescriptorSet::BindingPtrVector bindings;
3921 bindings.emplace_back(new SingleBinding(descriptorType, std::vector<VkDescriptorType>()));
3922 DescriptorSetPtr descriptorSet (new DescriptorSet(bindings));
3923
3924 const auto groupName = descriptorTypeStr(descriptorType);
3925 GroupPtr descGroup (new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3926
3927 createMutableTestVariants(testCtx, descGroup.get(), descriptorSet, reducedStages);
3928 singleNonMutableGroup->addChild(descGroup.release());
3929 }
3930
3931 mainGroup->addChild(singleNonMutableGroup.release());
3932 }
3933
3934 const struct {
3935 bool unbounded;
3936 const char* name;
3937 } unboundedCases[] = {
3938 {false, "constant_size"},
3939 {true, "unbounded"},
3940 };
3941
3942 const struct {
3943 bool aliasing;
3944 const char* name;
3945 } aliasingCases[] = {
3946 {false, "noaliasing"},
3947 {true, "aliasing"},
3948 };
3949
3950 const struct {
3951 bool oneArrayOnly;
3952 bool mixNonMutable;
3953 const char* groupName;
3954 const char* groupDesc;
3955 } arrayCountGroups[] = {
3956 {true, false, "one_array", "Tests using an array of mutable descriptors"},
3957 {false, false, "multiple_arrays", "Tests using multiple arrays of mutable descriptors"},
3958 {false, true, "multiple_arrays_mixed", "Tests using multiple arrays of mutable descriptors mixed with arrays of nonmutable ones"},
3959 };
3960
3961 for (const auto& variant : arrayCountGroups)
3962 {
3963 GroupPtr arrayGroup(new tcu::TestCaseGroup(testCtx, variant.groupName, variant.groupDesc));
3964
3965 for (const auto& unboundedCase : unboundedCases)
3966 {
3967 GroupPtr unboundedGroup(new tcu::TestCaseGroup(testCtx, unboundedCase.name, ""));
3968
3969 for (const auto& aliasingCase : aliasingCases)
3970 {
3971 GroupPtr aliasingGroup(new tcu::TestCaseGroup(testCtx, aliasingCase.name, ""));
3972
3973 DescriptorSet::BindingPtrVector setBindings;
3974
3975 // Prepare descriptors for this test variant.
3976 for (size_t mandatoryTypesRotation = 0; mandatoryTypesRotation < mandatoryTypes.size(); ++mandatoryTypesRotation)
3977 {
3978 const bool isLastBinding = (variant.oneArrayOnly || mandatoryTypesRotation == mandatoryTypes.size() - 1u);
3979 const bool isUnbounded = (unboundedCase.unbounded && isLastBinding);
3980
3981 // Create a rotation of the mandatory types for each mutable array binding.
3982 auto mandatoryTypesVector = mandatoryTypes;
3983 {
3984 const auto beginPtr = &mandatoryTypesVector[0];
3985 const auto endPtr = beginPtr + mandatoryTypesVector.size();
3986 std::rotate(beginPtr, &mandatoryTypesVector[mandatoryTypesRotation], endPtr);
3987 }
3988
3989 std::vector<SingleBinding> arrayBindings;
3990
3991 if (aliasingCase.aliasing)
3992 {
3993 // With aliasing, the descriptor types rotate in each descriptor.
3994 for (size_t typeIdx = 0; typeIdx < mandatoryTypesVector.size(); ++typeIdx)
3995 {
3996 auto rotatedTypes = mandatoryTypesVector;
3997 const auto beginPtr = &rotatedTypes[0];
3998 const auto endPtr = beginPtr + rotatedTypes.size();
3999
4000 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
4001
4002 arrayBindings.emplace_back(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, rotatedTypes);
4003 }
4004 }
4005 else
4006 {
4007 // Without aliasing, all descriptors use the same type at the same time.
4008 const SingleBinding noAliasingBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, mandatoryTypesVector);
4009 arrayBindings.resize(mandatoryTypesVector.size(), noAliasingBinding);
4010 }
4011
4012 setBindings.emplace_back(new ArrayBinding(isUnbounded, arrayBindings));
4013
4014 if (variant.mixNonMutable && !isUnbounded)
4015 {
4016 // Create a non-mutable array binding interleaved with the other ones.
4017 const SingleBinding nonMutableBinding(mandatoryTypes[mandatoryTypesRotation], std::vector<VkDescriptorType>());
4018 std::vector<SingleBinding> nonMutableBindings(mandatoryTypes.size(), nonMutableBinding);
4019 setBindings.emplace_back(new ArrayBinding(false, nonMutableBindings));
4020 }
4021
4022 if (variant.oneArrayOnly)
4023 break;
4024 }
4025
4026 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
4027 createMutableTestVariants(testCtx, aliasingGroup.get(), descriptorSet, computeOnly);
4028
4029 unboundedGroup->addChild(aliasingGroup.release());
4030 }
4031
4032 arrayGroup->addChild(unboundedGroup.release());
4033 }
4034
4035 mainGroup->addChild(arrayGroup.release());
4036 }
4037
4038 // Cases with a single mutable binding followed by an array of mutable bindings.
4039 // The array will use a single type beyond the mandatory ones.
4040 {
4041 GroupPtr singleAndArrayGroup(new tcu::TestCaseGroup(testCtx, "single_and_array", "Tests using a single mutable binding followed by a mutable array binding"));
4042
4043 for (const auto& descriptorType : basicDescriptorTypes)
4044 {
4045 // Input attachments will not use arrays.
4046 if (descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
4047 continue;
4048
4049 if (de::contains(begin(mandatoryTypes), end(mandatoryTypes), descriptorType))
4050 continue;
4051
4052 const auto groupName = descriptorTypeStr(descriptorType);
4053 GroupPtr descTypeGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
4054
4055 for (const auto& aliasingCase : aliasingCases)
4056 {
4057 GroupPtr aliasingGroup(new tcu::TestCaseGroup(testCtx, aliasingCase.name, ""));
4058
4059 DescriptorSet::BindingPtrVector setBindings;
4060 std::vector<SingleBinding> arrayBindings;
4061
4062 // Single mutable descriptor as the first binding.
4063 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, mandatoryTypes));
4064
4065 // Descriptor array as the second binding.
4066 auto arrayBindingDescTypes = mandatoryTypes;
4067 arrayBindingDescTypes.push_back(descriptorType);
4068
4069 if (aliasingCase.aliasing)
4070 {
4071 // With aliasing, the descriptor types rotate in each descriptor.
4072 for (size_t typeIdx = 0; typeIdx < arrayBindingDescTypes.size(); ++typeIdx)
4073 {
4074 auto rotatedTypes = arrayBindingDescTypes;
4075 const auto beginPtr = &rotatedTypes[0];
4076 const auto endPtr = beginPtr + rotatedTypes.size();
4077
4078 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
4079
4080 arrayBindings.emplace_back(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, rotatedTypes);
4081 }
4082 }
4083 else
4084 {
4085 // Without aliasing, all descriptors use the same type at the same time.
4086 const SingleBinding noAliasingBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, arrayBindingDescTypes);
4087 arrayBindings.resize(arrayBindingDescTypes.size(), noAliasingBinding);
4088 }
4089
4090 // Second binding: array binding.
4091 setBindings.emplace_back(new ArrayBinding(false/*unbounded*/, arrayBindings));
4092
4093 // Create set and test variants.
4094 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
4095 createMutableTestVariants(testCtx, aliasingGroup.get(), descriptorSet, computeOnly);
4096
4097 descTypeGroup->addChild(aliasingGroup.release());
4098 }
4099
4100 singleAndArrayGroup->addChild(descTypeGroup.release());
4101 }
4102
4103 mainGroup->addChild(singleAndArrayGroup.release());
4104 }
4105
4106 // Cases with several mutable non-array bindings.
4107 {
4108 GroupPtr multipleGroup (new tcu::TestCaseGroup(testCtx, "multiple", "Tests using multiple mutable bindings"));
4109 GroupPtr mutableOnlyGroup (new tcu::TestCaseGroup(testCtx, "mutable_only", "Tests using only mutable descriptors"));
4110 GroupPtr mixedGroup (new tcu::TestCaseGroup(testCtx, "mixed", "Tests mixing mutable descriptors an non-mutable descriptors"));
4111
4112 // Each descriptor will have a different type in every iteration, like in the one_array aliasing case.
4113 for (int groupIdx = 0; groupIdx < 2; ++groupIdx)
4114 {
4115 const bool mixed = (groupIdx == 1);
4116 DescriptorSet::BindingPtrVector setBindings;
4117
4118 for (size_t typeIdx = 0; typeIdx < mandatoryTypes.size(); ++typeIdx)
4119 {
4120 auto rotatedTypes = mandatoryTypes;
4121 const auto beginPtr = &rotatedTypes[0];
4122 const auto endPtr = beginPtr + rotatedTypes.size();
4123
4124 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
4125 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_EXT, rotatedTypes));
4126
4127 // Additional non-mutable binding interleaved with the mutable ones.
4128 if (mixed)
4129 setBindings.emplace_back(new SingleBinding(rotatedTypes[0], std::vector<VkDescriptorType>()));
4130 }
4131 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
4132
4133 const auto dstGroup = (mixed ? mixedGroup.get() : mutableOnlyGroup.get());
4134 createMutableTestVariants(testCtx, dstGroup, descriptorSet, computeOnly);
4135 }
4136
4137 multipleGroup->addChild(mutableOnlyGroup.release());
4138 multipleGroup->addChild(mixedGroup.release());
4139 mainGroup->addChild(multipleGroup.release());
4140 }
4141 }
4142
4143 } // BindingModel
4144 } // vkt
4145