1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2016 The Khronos Group Inc.
6 * Copyright (c) 2016 Imagination Technologies Ltd.
7 *
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
11 *
12 * http://www.apache.org/licenses/LICENSE-2.0
13 *
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
19 *
20 *//*!
21 * \file
22 * \brief Robustness Utilities
23 *//*--------------------------------------------------------------------*/
24
25 #include "vktRobustnessUtil.hpp"
26 #include "vktCustomInstancesDevices.hpp"
27 #include "vkDefs.hpp"
28 #include "vkImageUtil.hpp"
29 #include "vkPrograms.hpp"
30 #include "vkQueryUtil.hpp"
31 #include "vkRefUtil.hpp"
32 #include "vkTypeUtil.hpp"
33 #include "vkCmdUtil.hpp"
34 #include "vkObjUtil.hpp"
35 #include "vkSafetyCriticalUtil.hpp"
36 #include "tcuCommandLine.hpp"
37 #include "vkDeviceUtil.hpp"
38 #include "deMath.h"
39 #include <iomanip>
40 #include <limits>
41 #include <sstream>
42
43 namespace vkt
44 {
45 namespace robustness
46 {
47
48 using namespace vk;
49 using std::vector;
50 using std::string;
51
createRobustBufferAccessDevice(Context & context,const VkPhysicalDeviceFeatures2 * enabledFeatures2)52 Move<VkDevice> createRobustBufferAccessDevice (Context& context, const VkPhysicalDeviceFeatures2* enabledFeatures2)
53 {
54 const float queuePriority = 1.0f;
55
56 // Create a universal queue that supports graphics and compute
57 const VkDeviceQueueCreateInfo queueParams =
58 {
59 VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, // VkStructureType sType;
60 DE_NULL, // const void* pNext;
61 0u, // VkDeviceQueueCreateFlags flags;
62 context.getUniversalQueueFamilyIndex(), // deUint32 queueFamilyIndex;
63 1u, // deUint32 queueCount;
64 &queuePriority // const float* pQueuePriorities;
65 };
66
67 VkPhysicalDeviceFeatures enabledFeatures = context.getDeviceFeatures();
68 enabledFeatures.robustBufferAccess = true;
69
70 // \note Extensions in core are not explicitly enabled even though
71 // they are in the extension list advertised to tests.
72 const auto& extensionPtrs = context.getDeviceCreationExtensions();
73
74 void* pNext = (void*)enabledFeatures2;
75 #ifdef CTS_USES_VULKANSC
76 VkDeviceObjectReservationCreateInfo memReservationInfo = context.getTestContext().getCommandLine().isSubProcess() ? context.getResourceInterface()->getStatMax() : resetDeviceObjectReservationCreateInfo();
77 memReservationInfo.pNext = pNext;
78 pNext = &memReservationInfo;
79
80 VkPhysicalDeviceVulkanSC10Features sc10Features = createDefaultSC10Features();
81 sc10Features.pNext = pNext;
82 pNext = &sc10Features;
83
84 VkPipelineCacheCreateInfo pcCI;
85 std::vector<VkPipelinePoolSize> poolSizes;
86 if (context.getTestContext().getCommandLine().isSubProcess())
87 {
88 if (context.getResourceInterface()->getCacheDataSize() > 0)
89 {
90 pcCI =
91 {
92 VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, // VkStructureType sType;
93 DE_NULL, // const void* pNext;
94 VK_PIPELINE_CACHE_CREATE_READ_ONLY_BIT |
95 VK_PIPELINE_CACHE_CREATE_USE_APPLICATION_STORAGE_BIT, // VkPipelineCacheCreateFlags flags;
96 context.getResourceInterface()->getCacheDataSize(), // deUintptr initialDataSize;
97 context.getResourceInterface()->getCacheData() // const void* pInitialData;
98 };
99 memReservationInfo.pipelineCacheCreateInfoCount = 1;
100 memReservationInfo.pPipelineCacheCreateInfos = &pcCI;
101 }
102
103 poolSizes = context.getResourceInterface()->getPipelinePoolSizes();
104 if (!poolSizes.empty())
105 {
106 memReservationInfo.pipelinePoolSizeCount = deUint32(poolSizes.size());
107 memReservationInfo.pPipelinePoolSizes = poolSizes.data();
108 }
109 }
110 #endif
111
112 const VkDeviceCreateInfo deviceParams =
113 {
114 VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, // VkStructureType sType;
115 pNext, // const void* pNext;
116 0u, // VkDeviceCreateFlags flags;
117 1u, // deUint32 queueCreateInfoCount;
118 &queueParams, // const VkDeviceQueueCreateInfo* pQueueCreateInfos;
119 0u, // deUint32 enabledLayerCount;
120 nullptr, // const char* const* ppEnabledLayerNames;
121 de::sizeU32(extensionPtrs), // deUint32 enabledExtensionCount;
122 de::dataOrNull(extensionPtrs), // const char* const* ppEnabledExtensionNames;
123 enabledFeatures2 ? nullptr : &enabledFeatures // const VkPhysicalDeviceFeatures* pEnabledFeatures;
124 };
125
126 // We are creating a custom device with a potentially large amount of extensions and features enabled, using the default device
127 // as a reference. Some implementations may only enable certain device extensions if some instance extensions are enabled, so in
128 // this case it's important to reuse the context instance when creating the device.
129 const auto& vki = context.getInstanceInterface();
130 const auto instance = context.getInstance();
131 const auto physicalDevice = chooseDevice(vki, instance, context.getTestContext().getCommandLine());
132
133 return createCustomDevice(context.getTestContext().getCommandLine().isValidationEnabled(), context.getPlatformInterface(),
134 instance, vki, physicalDevice, &deviceParams);
135 }
136
areEqual(float a,float b)137 bool areEqual (float a, float b)
138 {
139 return deFloatAbs(a - b) <= 0.001f;
140 }
141
isValueZero(const void * valuePtr,size_t valueSizeInBytes)142 bool isValueZero (const void* valuePtr, size_t valueSizeInBytes)
143 {
144 const deUint8* bytePtr = reinterpret_cast<const deUint8*>(valuePtr);
145
146 for (size_t i = 0; i < valueSizeInBytes; i++)
147 {
148 if (bytePtr[i] != 0)
149 return false;
150 }
151
152 return true;
153 }
154
isValueWithinBuffer(const void * buffer,VkDeviceSize bufferSize,const void * valuePtr,size_t valueSizeInBytes)155 bool isValueWithinBuffer (const void* buffer, VkDeviceSize bufferSize, const void* valuePtr, size_t valueSizeInBytes)
156 {
157 const deUint8* byteBuffer = reinterpret_cast<const deUint8*>(buffer);
158
159 if (bufferSize < ((VkDeviceSize)valueSizeInBytes))
160 return false;
161
162 for (VkDeviceSize i = 0; i <= (bufferSize - valueSizeInBytes); i++)
163 {
164 if (!deMemCmp(&byteBuffer[i], valuePtr, valueSizeInBytes))
165 return true;
166 }
167
168 return false;
169 }
170
isValueWithinBufferOrZero(const void * buffer,VkDeviceSize bufferSize,const void * valuePtr,size_t valueSizeInBytes)171 bool isValueWithinBufferOrZero (const void* buffer, VkDeviceSize bufferSize, const void* valuePtr, size_t valueSizeInBytes)
172 {
173 return isValueWithinBuffer(buffer, bufferSize, valuePtr, valueSizeInBytes) || isValueZero(valuePtr, valueSizeInBytes);
174 }
175
176 template<typename T>
verifyVec4IntegerValues(const void * vecPtr)177 bool verifyVec4IntegerValues (const void* vecPtr)
178 {
179 const T Tzero = T{0};
180 const T Tone = T{1};
181 const T Tmax = std::numeric_limits<T>::max();
182
183 T values[4];
184 deMemcpy(values, vecPtr, 4*sizeof(T));
185 return (values[0] == Tzero && values[1] == Tzero && values[2] == Tzero &&
186 (values[3] == Tzero || values[3] == Tone || values[3] == Tmax));
187 }
188
verifyOutOfBoundsVec4(const void * vecPtr,VkFormat bufferFormat)189 bool verifyOutOfBoundsVec4 (const void* vecPtr, VkFormat bufferFormat)
190 {
191 if (isUintFormat(bufferFormat))
192 {
193 if (bufferFormat == VK_FORMAT_R64_UINT)
194 return verifyVec4IntegerValues<deUint64>(vecPtr);
195 return verifyVec4IntegerValues<deUint32>(vecPtr);
196 }
197 else if (isIntFormat(bufferFormat))
198 {
199 if (bufferFormat == VK_FORMAT_R64_SINT)
200 return verifyVec4IntegerValues<deInt64>(vecPtr);
201 return verifyVec4IntegerValues<deInt32>(vecPtr);
202 }
203 else if (isFloatFormat(bufferFormat))
204 {
205 const float* data = (float*)vecPtr;
206
207 return areEqual(data[0], 0.0f)
208 && areEqual(data[1], 0.0f)
209 && areEqual(data[2], 0.0f)
210 && (areEqual(data[3], 0.0f) || areEqual(data[3], 1.0f));
211 }
212 else if (bufferFormat == VK_FORMAT_A2B10G10R10_UNORM_PACK32)
213 {
214 return *((deUint32*)vecPtr) == 0xc0000000u;
215 }
216
217 DE_ASSERT(false);
218 return false;
219 }
220
populateBufferWithTestValues(void * buffer,VkDeviceSize size,VkFormat format)221 void populateBufferWithTestValues (void* buffer, VkDeviceSize size, VkFormat format)
222 {
223 // Assign a sequence of 32-bit values
224 for (VkDeviceSize scalarNdx = 0; scalarNdx < size / 4; scalarNdx++)
225 {
226 const deUint32 valueIndex = (deUint32)(2 + scalarNdx); // Do not use 0 or 1
227
228 if (isUintFormat(format))
229 {
230 reinterpret_cast<deUint32*>(buffer)[scalarNdx] = valueIndex;
231 }
232 else if (isIntFormat(format))
233 {
234 reinterpret_cast<deInt32*>(buffer)[scalarNdx] = -deInt32(valueIndex);
235 }
236 else if (isFloatFormat(format))
237 {
238 reinterpret_cast<float*>(buffer)[scalarNdx] = float(valueIndex);
239 }
240 else if (format == VK_FORMAT_A2B10G10R10_UNORM_PACK32)
241 {
242 const deUint32 r = ((valueIndex + 0) & ((2u << 10) - 1u));
243 const deUint32 g = ((valueIndex + 1) & ((2u << 10) - 1u));
244 const deUint32 b = ((valueIndex + 2) & ((2u << 10) - 1u));
245 const deUint32 a = ((valueIndex + 0) & ((2u << 2) - 1u));
246
247 reinterpret_cast<deUint32*>(buffer)[scalarNdx] = (a << 30) | (b << 20) | (g << 10) | r;
248 }
249 else
250 {
251 DE_ASSERT(false);
252 }
253 }
254 }
255
logValue(std::ostringstream & logMsg,const void * valuePtr,VkFormat valueFormat,size_t valueSize)256 void logValue (std::ostringstream& logMsg, const void* valuePtr, VkFormat valueFormat, size_t valueSize)
257 {
258 if (isUintFormat(valueFormat))
259 {
260 logMsg << *reinterpret_cast<const deUint32*>(valuePtr);
261 }
262 else if (isIntFormat(valueFormat))
263 {
264 logMsg << *reinterpret_cast<const deInt32*>(valuePtr);
265 }
266 else if (isFloatFormat(valueFormat))
267 {
268 logMsg << *reinterpret_cast<const float*>(valuePtr);
269 }
270 else
271 {
272 const deUint8* bytePtr = reinterpret_cast<const deUint8*>(valuePtr);
273 const std::ios::fmtflags streamFlags = logMsg.flags();
274
275 logMsg << std::hex;
276 for (size_t i = 0; i < valueSize; i++)
277 {
278 logMsg << " " << (deUint32)bytePtr[i];
279 }
280 logMsg.flags(streamFlags);
281 }
282 }
283
284 // TestEnvironment
285
TestEnvironment(Context & context,const DeviceInterface & vk,VkDevice device,VkDescriptorSetLayout descriptorSetLayout,VkDescriptorSet descriptorSet)286 TestEnvironment::TestEnvironment (Context& context,
287 const DeviceInterface& vk,
288 VkDevice device,
289 VkDescriptorSetLayout descriptorSetLayout,
290 VkDescriptorSet descriptorSet)
291 : m_context (context)
292 , m_device (device)
293 , m_descriptorSetLayout (descriptorSetLayout)
294 , m_descriptorSet (descriptorSet)
295 {
296 // Create command pool
297 {
298 const VkCommandPoolCreateInfo commandPoolParams =
299 {
300 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, // VkStructureType sType;
301 DE_NULL, // const void* pNext;
302 VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, // VkCommandPoolCreateFlags flags;
303 context.getUniversalQueueFamilyIndex() // deUint32 queueFamilyIndex;
304 };
305
306 m_commandPool = createCommandPool(vk, m_device, &commandPoolParams);
307 }
308
309 // Create command buffer
310 {
311 const VkCommandBufferAllocateInfo commandBufferAllocateInfo =
312 {
313 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // VkStructureType sType;
314 DE_NULL, // const void* pNext;
315 *m_commandPool, // VkCommandPool commandPool;
316 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // VkCommandBufferLevel level;
317 1u, // deUint32 bufferCount;
318 };
319
320 m_commandBuffer = allocateCommandBuffer(vk, m_device, &commandBufferAllocateInfo);
321 }
322 }
323
getCommandBuffer(void)324 VkCommandBuffer TestEnvironment::getCommandBuffer (void)
325 {
326 return *m_commandBuffer;
327 }
328
329 // GraphicsEnvironment
330
GraphicsEnvironment(Context & context,const DeviceInterface & vk,VkDevice device,VkDescriptorSetLayout descriptorSetLayout,VkDescriptorSet descriptorSet,const VertexBindings & vertexBindings,const VertexAttributes & vertexAttributes,const DrawConfig & drawConfig,bool testPipelineRobustness)331 GraphicsEnvironment::GraphicsEnvironment (Context& context,
332 const DeviceInterface& vk,
333 VkDevice device,
334 VkDescriptorSetLayout descriptorSetLayout,
335 VkDescriptorSet descriptorSet,
336 const VertexBindings& vertexBindings,
337 const VertexAttributes& vertexAttributes,
338 const DrawConfig& drawConfig,
339 bool testPipelineRobustness)
340
341 : TestEnvironment (context, vk, device, descriptorSetLayout, descriptorSet)
342 , m_renderSize (16, 16)
343 , m_colorFormat (VK_FORMAT_R8G8B8A8_UNORM)
344 {
345 const auto& vki = context.getInstanceInterface();
346 const auto instance = context.getInstance();
347 const deUint32 queueFamilyIndex = context.getUniversalQueueFamilyIndex();
348 const VkComponentMapping componentMappingRGBA = { VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A };
349 const VkPhysicalDevice physicalDevice = chooseDevice(vki, instance, context.getTestContext().getCommandLine());
350 SimpleAllocator memAlloc (vk, m_device, getPhysicalDeviceMemoryProperties(vki, physicalDevice));
351
352 // Create color image and view
353 {
354 const VkImageCreateInfo colorImageParams =
355 {
356 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
357 DE_NULL, // const void* pNext;
358 0u, // VkImageCreateFlags flags;
359 VK_IMAGE_TYPE_2D, // VkImageType imageType;
360 m_colorFormat, // VkFormat format;
361 { (deUint32)m_renderSize.x(), (deUint32)m_renderSize.y(), 1u }, // VkExtent3D extent;
362 1u, // deUint32 mipLevels;
363 1u, // deUint32 arrayLayers;
364 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
365 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
366 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT, // VkImageUsageFlags usage;
367 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
368 1u, // deUint32 queueFamilyIndexCount;
369 &queueFamilyIndex, // const deUint32* pQueueFamilyIndices;
370 VK_IMAGE_LAYOUT_UNDEFINED // VkImageLayout initialLayout;
371 };
372
373 m_colorImage = createImage(vk, m_device, &colorImageParams);
374 m_colorImageAlloc = memAlloc.allocate(getImageMemoryRequirements(vk, m_device, *m_colorImage), MemoryRequirement::Any);
375 VK_CHECK(vk.bindImageMemory(m_device, *m_colorImage, m_colorImageAlloc->getMemory(), m_colorImageAlloc->getOffset()));
376
377 const VkImageViewCreateInfo colorAttachmentViewParams =
378 {
379 VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, // VkStructureType sType;
380 DE_NULL, // const void* pNext;
381 0u, // VkImageViewCreateFlags flags;
382 *m_colorImage, // VkImage image;
383 VK_IMAGE_VIEW_TYPE_2D, // VkImageViewType viewType;
384 m_colorFormat, // VkFormat format;
385 componentMappingRGBA, // VkComponentMapping components;
386 { VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u } // VkImageSubresourceRange subresourceRange;
387 };
388
389 m_colorAttachmentView = createImageView(vk, m_device, &colorAttachmentViewParams);
390 }
391
392 // Create render pass
393 m_renderPass = makeRenderPass(vk, m_device, m_colorFormat);
394
395 // Create framebuffer
396 {
397 const VkFramebufferCreateInfo framebufferParams =
398 {
399 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, // VkStructureType sType;
400 DE_NULL, // const void* pNext;
401 0u, // VkFramebufferCreateFlags flags;
402 *m_renderPass, // VkRenderPass renderPass;
403 1u, // deUint32 attachmentCount;
404 &m_colorAttachmentView.get(), // const VkImageView* pAttachments;
405 (deUint32)m_renderSize.x(), // deUint32 width;
406 (deUint32)m_renderSize.y(), // deUint32 height;
407 1u // deUint32 layers;
408 };
409
410 m_framebuffer = createFramebuffer(vk, m_device, &framebufferParams);
411 }
412
413 // Create pipeline layout
414 {
415 const VkPipelineLayoutCreateInfo pipelineLayoutParams =
416 {
417 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, // VkStructureType sType;
418 DE_NULL, // const void* pNext;
419 0u, // VkPipelineLayoutCreateFlags flags;
420 1u, // deUint32 setLayoutCount;
421 &m_descriptorSetLayout, // const VkDescriptorSetLayout* pSetLayouts;
422 0u, // deUint32 pushConstantRangeCount;
423 DE_NULL // const VkPushConstantRange* pPushConstantRanges;
424 };
425
426 m_pipelineLayout = createPipelineLayout(vk, m_device, &pipelineLayoutParams);
427 }
428
429 m_vertexShaderModule = createShaderModule(vk, m_device, m_context.getBinaryCollection().get("vertex"), 0);
430 m_fragmentShaderModule = createShaderModule(vk, m_device, m_context.getBinaryCollection().get("fragment"), 0);
431
432 // Create pipeline
433 {
434 const VkPipelineVertexInputStateCreateInfo vertexInputStateParams =
435 {
436 VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, // VkStructureType sType;
437 DE_NULL, // const void* pNext;
438 0u, // VkPipelineVertexInputStateCreateFlags flags;
439 (deUint32)vertexBindings.size(), // deUint32 vertexBindingDescriptionCount;
440 vertexBindings.data(), // const VkVertexInputBindingDescription* pVertexBindingDescriptions;
441 (deUint32)vertexAttributes.size(), // deUint32 vertexAttributeDescriptionCount;
442 vertexAttributes.data() // const VkVertexInputAttributeDescription* pVertexAttributeDescriptions;
443 };
444
445 const std::vector<VkViewport> viewports (1, makeViewport(m_renderSize));
446 const std::vector<VkRect2D> scissors (1, makeRect2D(m_renderSize));
447
448 const void* pNext = DE_NULL;
449 #ifndef CTS_USES_VULKANSC
450 VkPipelineRobustnessCreateInfoEXT pipelineRobustnessInfo = initVulkanStructure();
451
452 if (testPipelineRobustness)
453 {
454 pipelineRobustnessInfo.storageBuffers = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT;
455 pipelineRobustnessInfo.uniformBuffers = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT;
456 pipelineRobustnessInfo.vertexInputs = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT;
457 pipelineRobustnessInfo.images = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT;
458 pNext = &pipelineRobustnessInfo;
459 }
460 #else
461 DE_UNREF(testPipelineRobustness);
462 #endif
463
464 m_graphicsPipeline = makeGraphicsPipeline(vk, // const DeviceInterface& vk
465 m_device, // const VkDevice device
466 *m_pipelineLayout, // const VkPipelineLayout pipelineLayout
467 *m_vertexShaderModule, // const VkShaderModule vertexShaderModule
468 DE_NULL, // const VkShaderModule tessellationControlShaderModule
469 DE_NULL, // const VkShaderModule tessellationEvalShaderModule
470 DE_NULL, // const VkShaderModule geometryShaderModule
471 *m_fragmentShaderModule, // const VkShaderModule fragmentShaderModule
472 *m_renderPass, // const VkRenderPass renderPass
473 viewports, // const std::vector<VkViewport>& viewports
474 scissors, // const std::vector<VkRect2D>& scissors
475 VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, // const VkPrimitiveTopology topology
476 0u, // const deUint32 subpass
477 0u, // const deUint32 patchControlPoints
478 &vertexInputStateParams, // const VkPipelineVertexInputStateCreateInfo* vertexInputStateCreateInfo
479 DE_NULL, // const VkPipelineRasterizationStateCreateInfo* rasterizationStateCreateInfo
480 DE_NULL, // const VkPipelineMultisampleStateCreateInfo* multisampleStateCreateInfo
481 DE_NULL, // const VkPipelineDepthStencilStateCreateInfo* depthStencilStateCreateInfo
482 DE_NULL, // const VkPipelineColorBlendStateCreateInfo* colorBlendStateCreateInfo
483 DE_NULL, // const VkPipelineDynamicStateCreateInfo* dynamicStateCreateInfo
484 pNext); // void* pNext
485 }
486
487 // Record commands
488 {
489 const VkImageMemoryBarrier imageLayoutBarrier =
490 {
491 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
492 DE_NULL, // const void* pNext;
493 (VkAccessFlags)0, // VkAccessFlags srcAccessMask;
494 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, // VkAccessFlags dstAccessMask;
495 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout;
496 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout;
497 VK_QUEUE_FAMILY_IGNORED, // uint32_t srcQueueFamilyIndex;
498 VK_QUEUE_FAMILY_IGNORED, // uint32_t dstQueueFamilyIndex;
499 *m_colorImage, // VkImage image;
500 { VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u } // VkImageSubresourceRange subresourceRange;
501 };
502
503 beginCommandBuffer(vk, *m_commandBuffer, 0u);
504 {
505 vk.cmdPipelineBarrier(*m_commandBuffer,
506 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
507 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
508 (VkDependencyFlags)0,
509 0u, DE_NULL,
510 0u, DE_NULL,
511 1u, &imageLayoutBarrier);
512
513 beginRenderPass(vk, *m_commandBuffer, *m_renderPass, *m_framebuffer, makeRect2D(0, 0, m_renderSize.x(), m_renderSize.y()), tcu::Vec4(0.0f));
514 {
515 const std::vector<VkDeviceSize> vertexBufferOffsets(drawConfig.vertexBuffers.size(), 0ull);
516
517 vk.cmdBindPipeline(*m_commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipeline);
518 vk.cmdBindDescriptorSets(*m_commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0, 1, &m_descriptorSet, 0, DE_NULL);
519 vk.cmdBindVertexBuffers(*m_commandBuffer, 0, (deUint32)drawConfig.vertexBuffers.size(), drawConfig.vertexBuffers.data(), vertexBufferOffsets.data());
520
521 if (drawConfig.indexBuffer == DE_NULL || drawConfig.indexCount == 0)
522 {
523 vk.cmdDraw(*m_commandBuffer, drawConfig.vertexCount, drawConfig.instanceCount, 0, 0);
524 }
525 else
526 {
527 vk.cmdBindIndexBuffer(*m_commandBuffer, drawConfig.indexBuffer, 0, VK_INDEX_TYPE_UINT32);
528 vk.cmdDrawIndexed(*m_commandBuffer, drawConfig.indexCount, drawConfig.instanceCount, 0, 0, 0);
529 }
530 }
531 endRenderPass(vk, *m_commandBuffer);
532 }
533 endCommandBuffer(vk, *m_commandBuffer);
534 }
535 }
536
537 // ComputeEnvironment
538
ComputeEnvironment(Context & context,const DeviceInterface & vk,VkDevice device,VkDescriptorSetLayout descriptorSetLayout,VkDescriptorSet descriptorSet,bool testPipelineRobustness)539 ComputeEnvironment::ComputeEnvironment (Context& context,
540 const DeviceInterface& vk,
541 VkDevice device,
542 VkDescriptorSetLayout descriptorSetLayout,
543 VkDescriptorSet descriptorSet,
544 bool testPipelineRobustness)
545
546 : TestEnvironment (context, vk, device, descriptorSetLayout, descriptorSet)
547 {
548 // Create pipeline layout
549 {
550 const VkPipelineLayoutCreateInfo pipelineLayoutParams =
551 {
552 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, // VkStructureType sType;
553 DE_NULL, // const void* pNext;
554 0u, // VkPipelineLayoutCreateFlags flags;
555 1u, // deUint32 setLayoutCount;
556 &m_descriptorSetLayout, // const VkDescriptorSetLayout* pSetLayouts;
557 0u, // deUint32 pushConstantRangeCount;
558 DE_NULL // const VkPushConstantRange* pPushConstantRanges;
559 };
560
561 m_pipelineLayout = createPipelineLayout(vk, m_device, &pipelineLayoutParams);
562 }
563
564 // Create compute pipeline
565 {
566 m_computeShaderModule = createShaderModule(vk, m_device, m_context.getBinaryCollection().get("compute"), 0);
567
568 const VkPipelineShaderStageCreateInfo computeStageParams =
569 {
570 VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, // VkStructureType sType;
571 DE_NULL, // const void* pNext;
572 0u, // VkPipelineShaderStageCreateFlags flags;
573 VK_SHADER_STAGE_COMPUTE_BIT, // VkShaderStageFlagBits stage;
574 *m_computeShaderModule, // VkShaderModule module;
575 "main", // const char* pName;
576 DE_NULL, // const VkSpecializationInfo* pSpecializationInfo;
577 };
578
579 const void* pNext = DE_NULL;
580 #ifndef CTS_USES_VULKANSC
581 VkPipelineRobustnessCreateInfoEXT pipelineRobustnessInfo = initVulkanStructure();
582
583 if (testPipelineRobustness)
584 {
585 pipelineRobustnessInfo.storageBuffers = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT;
586 pipelineRobustnessInfo.uniformBuffers = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT;
587 pipelineRobustnessInfo.vertexInputs = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT;
588 pipelineRobustnessInfo.images = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT;
589 pNext = &pipelineRobustnessInfo;
590 }
591 #else
592 DE_UNREF(testPipelineRobustness);
593 #endif
594
595 const VkComputePipelineCreateInfo computePipelineParams =
596 {
597 VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, // VkStructureType sType;
598 pNext, // const void* pNext;
599 0u, // VkPipelineCreateFlags flags;
600 computeStageParams, // VkPipelineShaderStageCreateInfo stage;
601 *m_pipelineLayout, // VkPipelineLayout layout;
602 DE_NULL, // VkPipeline basePipelineHandle;
603 0u // deInt32 basePipelineIndex;
604 };
605
606 m_computePipeline = createComputePipeline(vk, m_device, DE_NULL, &computePipelineParams);
607 }
608
609 // Record commands
610 {
611 beginCommandBuffer(vk, *m_commandBuffer, 0u);
612 vk.cmdBindPipeline(*m_commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_computePipeline);
613 vk.cmdBindDescriptorSets(*m_commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0, 1, &m_descriptorSet, 0, DE_NULL);
614 vk.cmdDispatch(*m_commandBuffer, 32, 32, 1);
615 endCommandBuffer(vk, *m_commandBuffer);
616 }
617 }
618
619 } // robustness
620 } // vkt
621