1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2016 The Khronos Group Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file
21 * \brief Synchronization operation abstraction
22 *//*--------------------------------------------------------------------*/
23
24 #include "vktSynchronizationOperation.hpp"
25 #include "synchronization/vktSynchronizationUtil.hpp"
26 #include "vkDefs.hpp"
27 #include "vktTestCase.hpp"
28 #include "vktTestCaseUtil.hpp"
29 #include "vkRef.hpp"
30 #include "vkRefUtil.hpp"
31 #include "vkMemUtil.hpp"
32 #include "vkBarrierUtil.hpp"
33 #include "vkQueryUtil.hpp"
34 #include "vkTypeUtil.hpp"
35 #include "vkImageUtil.hpp"
36 #include "vkBuilderUtil.hpp"
37 #include "vkCmdUtil.hpp"
38 #include "vkObjUtil.hpp"
39 #include "deUniquePtr.hpp"
40 #include "tcuTestLog.hpp"
41 #include "tcuTextureUtil.hpp"
42 #include <vector>
43 #include <sstream>
44
45 namespace vkt
46 {
47 namespace synchronization
48 {
49 namespace
50 {
51 using namespace vk;
52
53 enum Constants
54 {
55 MAX_IMAGE_DIMENSION_2D = 0x1000u,
56 MAX_UBO_RANGE = 0x4000u,
57 MAX_UPDATE_BUFFER_SIZE = 0x10000u,
58 };
59
60 enum BufferType
61 {
62 BUFFER_TYPE_UNIFORM,
63 BUFFER_TYPE_STORAGE,
64 BUFFER_TYPE_UNIFORM_TEXEL,
65 };
66
67 enum AccessMode
68 {
69 ACCESS_MODE_READ,
70 ACCESS_MODE_WRITE,
71 };
72
73 enum PipelineType
74 {
75 PIPELINE_TYPE_GRAPHICS,
76 PIPELINE_TYPE_COMPUTE,
77 };
78
79 static const char *const s_perVertexBlock = "gl_PerVertex {\n"
80 " vec4 gl_Position;\n"
81 "}";
82
83 static const SyncInfo emptySyncInfo = {
84 0, // VkPipelineStageFlags stageMask;
85 0, // VkAccessFlags accessMask;
86 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
87 };
88
getShaderStageName(VkShaderStageFlagBits stage)89 std::string getShaderStageName(VkShaderStageFlagBits stage)
90 {
91 switch (stage)
92 {
93 default:
94 DE_FATAL("Unhandled stage!");
95 return "";
96 case VK_SHADER_STAGE_COMPUTE_BIT:
97 return "compute";
98 case VK_SHADER_STAGE_FRAGMENT_BIT:
99 return "fragment";
100 case VK_SHADER_STAGE_VERTEX_BIT:
101 return "vertex";
102 case VK_SHADER_STAGE_GEOMETRY_BIT:
103 return "geometry";
104 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
105 return "tess_control";
106 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
107 return "tess_eval";
108 }
109 }
110
111 //! A pipeline that can be embedded inside an operation.
112 class Pipeline
113 {
114 public:
~Pipeline(void)115 virtual ~Pipeline(void)
116 {
117 }
118 virtual void recordCommands(OperationContext &context, const VkCommandBuffer cmdBuffer,
119 const VkDescriptorSet descriptorSet) = 0;
120 };
121
122 //! Vertex data that covers the whole viewport with two triangles.
123 class VertexGrid
124 {
125 public:
VertexGrid(OperationContext & context)126 VertexGrid(OperationContext &context)
127 : m_vertexFormat(VK_FORMAT_R32G32B32A32_SFLOAT)
128 , m_vertexStride(tcu::getPixelSize(mapVkFormat(m_vertexFormat)))
129 {
130 const DeviceInterface &vk = context.getDeviceInterface();
131 const VkDevice device = context.getDevice();
132 Allocator &allocator = context.getAllocator();
133
134 // Vertex positions
135 {
136 m_vertexData.push_back(tcu::Vec4(1.0f, 1.0f, 0.0f, 1.0f));
137 m_vertexData.push_back(tcu::Vec4(-1.0f, 1.0f, 0.0f, 1.0f));
138 m_vertexData.push_back(tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f));
139
140 m_vertexData.push_back(tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f));
141 m_vertexData.push_back(tcu::Vec4(1.0f, -1.0f, 0.0f, 1.0f));
142 m_vertexData.push_back(tcu::Vec4(1.0f, 1.0f, 0.0f, 1.0f));
143 }
144
145 {
146 const VkDeviceSize vertexDataSizeBytes = m_vertexData.size() * sizeof(m_vertexData[0]);
147
148 m_vertexBuffer = de::MovePtr<Buffer>(new Buffer(
149 vk, device, allocator, makeBufferCreateInfo(vertexDataSizeBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT),
150 MemoryRequirement::HostVisible));
151 DE_ASSERT(sizeof(m_vertexData[0]) == m_vertexStride);
152
153 {
154 const Allocation &alloc = m_vertexBuffer->getAllocation();
155
156 deMemcpy(alloc.getHostPtr(), &m_vertexData[0], static_cast<std::size_t>(vertexDataSizeBytes));
157 flushAlloc(vk, device, alloc);
158 }
159 }
160
161 // Indices
162 {
163 const VkDeviceSize indexBufferSizeBytes = sizeof(uint32_t) * m_vertexData.size();
164 const uint32_t numIndices = static_cast<uint32_t>(m_vertexData.size());
165
166 m_indexBuffer = de::MovePtr<Buffer>(new Buffer(
167 vk, device, allocator, makeBufferCreateInfo(indexBufferSizeBytes, VK_BUFFER_USAGE_INDEX_BUFFER_BIT),
168 MemoryRequirement::HostVisible));
169
170 {
171 const Allocation &alloc = m_indexBuffer->getAllocation();
172 uint32_t *const pData = static_cast<uint32_t *>(alloc.getHostPtr());
173
174 for (uint32_t i = 0; i < numIndices; ++i)
175 pData[i] = i;
176
177 flushAlloc(vk, device, alloc);
178 }
179 }
180 }
181
getVertexFormat(void) const182 VkFormat getVertexFormat(void) const
183 {
184 return m_vertexFormat;
185 }
getVertexStride(void) const186 uint32_t getVertexStride(void) const
187 {
188 return m_vertexStride;
189 }
getIndexType(void) const190 VkIndexType getIndexType(void) const
191 {
192 return VK_INDEX_TYPE_UINT32;
193 }
getNumVertices(void) const194 uint32_t getNumVertices(void) const
195 {
196 return static_cast<uint32_t>(m_vertexData.size());
197 }
getNumIndices(void) const198 uint32_t getNumIndices(void) const
199 {
200 return getNumVertices();
201 }
getVertexBuffer(void) const202 VkBuffer getVertexBuffer(void) const
203 {
204 return **m_vertexBuffer;
205 }
getIndexBuffer(void) const206 VkBuffer getIndexBuffer(void) const
207 {
208 return **m_indexBuffer;
209 }
210
211 private:
212 const VkFormat m_vertexFormat;
213 const uint32_t m_vertexStride;
214 std::vector<tcu::Vec4> m_vertexData;
215 de::MovePtr<Buffer> m_vertexBuffer;
216 de::MovePtr<Buffer> m_indexBuffer;
217 };
218
219 //! Add flags for all shader stages required to support a particular stage (e.g. fragment requires vertex as well).
getRequiredStages(const VkShaderStageFlagBits stage)220 VkShaderStageFlags getRequiredStages(const VkShaderStageFlagBits stage)
221 {
222 VkShaderStageFlags flags = 0;
223
224 DE_ASSERT(stage == VK_SHADER_STAGE_COMPUTE_BIT || (stage & VK_SHADER_STAGE_COMPUTE_BIT) == 0);
225
226 if (stage & VK_SHADER_STAGE_ALL_GRAPHICS)
227 flags |= VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
228
229 if (stage & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
230 flags |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
231
232 if (stage & VK_SHADER_STAGE_GEOMETRY_BIT)
233 flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
234
235 if (stage & VK_SHADER_STAGE_COMPUTE_BIT)
236 flags |= VK_SHADER_STAGE_COMPUTE_BIT;
237
238 return flags;
239 }
240
241 //! Check that SSBO read/write is available and that all shader stages are supported.
requireFeaturesForSSBOAccess(OperationContext & context,const VkShaderStageFlags usedStages)242 void requireFeaturesForSSBOAccess(OperationContext &context, const VkShaderStageFlags usedStages)
243 {
244 const InstanceInterface &vki = context.getInstanceInterface();
245 const VkPhysicalDevice physDevice = context.getPhysicalDevice();
246 FeatureFlags flags = (FeatureFlags)0;
247
248 if (usedStages & VK_SHADER_STAGE_FRAGMENT_BIT)
249 flags |= FEATURE_FRAGMENT_STORES_AND_ATOMICS;
250
251 if (usedStages & (VK_SHADER_STAGE_ALL_GRAPHICS & (~VK_SHADER_STAGE_FRAGMENT_BIT)))
252 flags |= FEATURE_VERTEX_PIPELINE_STORES_AND_ATOMICS;
253
254 if (usedStages & VK_SHADER_STAGE_GEOMETRY_BIT)
255 flags |= FEATURE_GEOMETRY_SHADER;
256
257 if (usedStages & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
258 flags |= FEATURE_TESSELLATION_SHADER;
259
260 requireFeatures(vki, physDevice, flags);
261 }
262
getHostBufferData(const OperationContext & context,const Buffer & hostBuffer,const VkDeviceSize size)263 Data getHostBufferData(const OperationContext &context, const Buffer &hostBuffer, const VkDeviceSize size)
264 {
265 const DeviceInterface &vk = context.getDeviceInterface();
266 const VkDevice device = context.getDevice();
267 const Allocation &alloc = hostBuffer.getAllocation();
268 const Data data = {
269 static_cast<std::size_t>(size), // std::size_t size;
270 static_cast<uint8_t *>(alloc.getHostPtr()), // const uint8_t* data;
271 };
272
273 invalidateAlloc(vk, device, alloc);
274
275 return data;
276 }
277
setHostBufferData(const OperationContext & context,const Buffer & hostBuffer,const Data & data)278 void setHostBufferData(const OperationContext &context, const Buffer &hostBuffer, const Data &data)
279 {
280 const DeviceInterface &vk = context.getDeviceInterface();
281 const VkDevice device = context.getDevice();
282 const Allocation &alloc = hostBuffer.getAllocation();
283
284 deMemcpy(alloc.getHostPtr(), data.data, data.size);
285 flushAlloc(vk, device, alloc);
286 }
287
assertValidShaderStage(const VkShaderStageFlagBits stage)288 void assertValidShaderStage(const VkShaderStageFlagBits stage)
289 {
290 switch (stage)
291 {
292 case VK_SHADER_STAGE_VERTEX_BIT:
293 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
294 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
295 case VK_SHADER_STAGE_GEOMETRY_BIT:
296 case VK_SHADER_STAGE_FRAGMENT_BIT:
297 case VK_SHADER_STAGE_COMPUTE_BIT:
298 // OK
299 break;
300
301 default:
302 DE_FATAL("Invalid shader stage");
303 break;
304 }
305 }
306
pipelineStageFlagsFromShaderStageFlagBits(const VkShaderStageFlagBits shaderStage)307 VkPipelineStageFlags pipelineStageFlagsFromShaderStageFlagBits(const VkShaderStageFlagBits shaderStage)
308 {
309 switch (shaderStage)
310 {
311 case VK_SHADER_STAGE_VERTEX_BIT:
312 return VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR;
313 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
314 return VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR;
315 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
316 return VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR;
317 case VK_SHADER_STAGE_GEOMETRY_BIT:
318 return VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR;
319 case VK_SHADER_STAGE_FRAGMENT_BIT:
320 return VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR;
321 case VK_SHADER_STAGE_COMPUTE_BIT:
322 return VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR;
323
324 // Other usages are probably an error, so flag that.
325 default:
326 DE_FATAL("Invalid shader stage");
327 return (VkPipelineStageFlags)0;
328 }
329 }
330
331 //! Fill destination buffer with a repeating pattern.
fillPattern(void * const pData,const VkDeviceSize size,bool useIndexPattern=false)332 void fillPattern(void *const pData, const VkDeviceSize size, bool useIndexPattern = false)
333 {
334 // There are two pattern options - most operations use primePattern,
335 // indexPattern is only needed for testing vertex index bufffer.
336 static const uint8_t primePattern[] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
337 static const uint32_t indexPattern[] = {0, 1, 2, 3, 4};
338
339 const uint8_t *pattern = (useIndexPattern ? reinterpret_cast<const uint8_t *>(indexPattern) : primePattern);
340 const uint32_t patternSize = static_cast<uint32_t>(
341 useIndexPattern ? DE_LENGTH_OF_ARRAY(indexPattern) * sizeof(uint32_t) : DE_LENGTH_OF_ARRAY(primePattern));
342 uint8_t *const pBytes = static_cast<uint8_t *>(pData);
343
344 for (uint32_t i = 0; i < size; ++i)
345 pBytes[i] = pattern[i % patternSize];
346 }
347
348 //! Get size in bytes of a pixel buffer with given extent.
getPixelBufferSize(const VkFormat format,const VkExtent3D & extent)349 VkDeviceSize getPixelBufferSize(const VkFormat format, const VkExtent3D &extent)
350 {
351 const int pixelSize = tcu::getPixelSize(mapVkFormat(format));
352 return (pixelSize * extent.width * extent.height * extent.depth);
353 }
354
355 //! Determine the size of a 2D image that can hold sizeBytes data.
get2DImageExtentWithSize(const VkDeviceSize sizeBytes,const uint32_t pixelSize)356 VkExtent3D get2DImageExtentWithSize(const VkDeviceSize sizeBytes, const uint32_t pixelSize)
357 {
358 const uint32_t size = static_cast<uint32_t>(sizeBytes / pixelSize);
359
360 DE_ASSERT(size <= MAX_IMAGE_DIMENSION_2D * MAX_IMAGE_DIMENSION_2D);
361
362 return makeExtent3D(std::min(size, static_cast<uint32_t>(MAX_IMAGE_DIMENSION_2D)),
363 (size / MAX_IMAGE_DIMENSION_2D) + (size % MAX_IMAGE_DIMENSION_2D != 0 ? 1u : 0u), 1u);
364 }
365
makeClearValue(const VkFormat format)366 VkClearValue makeClearValue(const VkFormat format)
367 {
368 if (isDepthStencilFormat(format))
369 return makeClearValueDepthStencil(0.4f, 21u);
370 else
371 {
372 if (isIntFormat(format) || isUintFormat(format))
373 return makeClearValueColorU32(8u, 16u, 24u, 32u);
374 else
375 return makeClearValueColorF32(0.25f, 0.49f, 0.75f, 1.0f);
376 }
377 }
378
clearPixelBuffer(tcu::PixelBufferAccess & pixels,const VkClearValue & clearValue)379 void clearPixelBuffer(tcu::PixelBufferAccess &pixels, const VkClearValue &clearValue)
380 {
381 const tcu::TextureFormat format = pixels.getFormat();
382 const tcu::TextureChannelClass channelClass = tcu::getTextureChannelClass(format.type);
383
384 if (format.order == tcu::TextureFormat::D)
385 {
386 for (int z = 0; z < pixels.getDepth(); z++)
387 for (int y = 0; y < pixels.getHeight(); y++)
388 for (int x = 0; x < pixels.getWidth(); x++)
389 pixels.setPixDepth(clearValue.depthStencil.depth, x, y, z);
390 }
391 else if (format.order == tcu::TextureFormat::S)
392 {
393 for (int z = 0; z < pixels.getDepth(); z++)
394 for (int y = 0; y < pixels.getHeight(); y++)
395 for (int x = 0; x < pixels.getWidth(); x++)
396 pixels.setPixStencil(clearValue.depthStencil.stencil, x, y, z);
397 }
398 else if (format.order == tcu::TextureFormat::DS)
399 {
400 for (int z = 0; z < pixels.getDepth(); z++)
401 for (int y = 0; y < pixels.getHeight(); y++)
402 for (int x = 0; x < pixels.getWidth(); x++)
403 {
404 pixels.setPixDepth(clearValue.depthStencil.depth, x, y, z);
405 pixels.setPixStencil(clearValue.depthStencil.stencil, x, y, z);
406 }
407 }
408 else if (channelClass == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER ||
409 channelClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)
410 {
411 const tcu::UVec4 color(clearValue.color.uint32);
412
413 for (int z = 0; z < pixels.getDepth(); z++)
414 for (int y = 0; y < pixels.getHeight(); y++)
415 for (int x = 0; x < pixels.getWidth(); x++)
416 pixels.setPixel(color, x, y, z);
417 }
418 else
419 {
420 const tcu::Vec4 color(clearValue.color.float32);
421
422 for (int z = 0; z < pixels.getDepth(); z++)
423 for (int y = 0; y < pixels.getHeight(); y++)
424 for (int x = 0; x < pixels.getWidth(); x++)
425 pixels.setPixel(color, x, y, z);
426 }
427 }
428
getImageViewType(const VkImageType imageType)429 VkImageViewType getImageViewType(const VkImageType imageType)
430 {
431 switch (imageType)
432 {
433 case VK_IMAGE_TYPE_1D:
434 return VK_IMAGE_VIEW_TYPE_1D;
435 case VK_IMAGE_TYPE_2D:
436 return VK_IMAGE_VIEW_TYPE_2D;
437 case VK_IMAGE_TYPE_3D:
438 return VK_IMAGE_VIEW_TYPE_3D;
439
440 default:
441 DE_FATAL("Unknown image type");
442 return VK_IMAGE_VIEW_TYPE_LAST;
443 }
444 }
445
getShaderImageType(const VkFormat format,const VkImageType imageType)446 std::string getShaderImageType(const VkFormat format, const VkImageType imageType)
447 {
448 const tcu::TextureFormat texFormat = mapVkFormat(format);
449 const std::string formatPart =
450 tcu::getTextureChannelClass(texFormat.type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER ? "u" :
451 tcu::getTextureChannelClass(texFormat.type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER ? "i" :
452 "";
453 switch (imageType)
454 {
455 case VK_IMAGE_TYPE_1D:
456 return formatPart + "image1D";
457 case VK_IMAGE_TYPE_2D:
458 return formatPart + "image2D";
459 case VK_IMAGE_TYPE_3D:
460 return formatPart + "image3D";
461
462 default:
463 DE_FATAL("Unknown image type");
464 return "";
465 }
466 }
467
getShaderImageFormatQualifier(const VkFormat format)468 std::string getShaderImageFormatQualifier(const VkFormat format)
469 {
470 const tcu::TextureFormat texFormat = mapVkFormat(format);
471 const char *orderPart = nullptr;
472 const char *typePart = nullptr;
473
474 switch (texFormat.order)
475 {
476 case tcu::TextureFormat::R:
477 orderPart = "r";
478 break;
479 case tcu::TextureFormat::RG:
480 orderPart = "rg";
481 break;
482 case tcu::TextureFormat::RGB:
483 orderPart = "rgb";
484 break;
485 case tcu::TextureFormat::RGBA:
486 orderPart = "rgba";
487 break;
488
489 default:
490 DE_FATAL("Unksupported texture channel order");
491 break;
492 }
493
494 switch (texFormat.type)
495 {
496 case tcu::TextureFormat::FLOAT:
497 typePart = "32f";
498 break;
499 case tcu::TextureFormat::HALF_FLOAT:
500 typePart = "16f";
501 break;
502
503 case tcu::TextureFormat::UNSIGNED_INT32:
504 typePart = "32ui";
505 break;
506 case tcu::TextureFormat::UNSIGNED_INT16:
507 typePart = "16ui";
508 break;
509 case tcu::TextureFormat::UNSIGNED_INT8:
510 typePart = "8ui";
511 break;
512
513 case tcu::TextureFormat::SIGNED_INT32:
514 typePart = "32i";
515 break;
516 case tcu::TextureFormat::SIGNED_INT16:
517 typePart = "16i";
518 break;
519 case tcu::TextureFormat::SIGNED_INT8:
520 typePart = "8i";
521 break;
522
523 case tcu::TextureFormat::UNORM_INT16:
524 typePart = "16";
525 break;
526 case tcu::TextureFormat::UNORM_INT8:
527 typePart = "8";
528 break;
529
530 case tcu::TextureFormat::SNORM_INT16:
531 typePart = "16_snorm";
532 break;
533 case tcu::TextureFormat::SNORM_INT8:
534 typePart = "8_snorm";
535 break;
536
537 default:
538 DE_FATAL("Unksupported texture channel type");
539 break;
540 }
541
542 return std::string(orderPart) + typePart;
543 }
544
545 namespace FillUpdateBuffer
546 {
547
548 enum BufferOp
549 {
550 BUFFER_OP_FILL,
551 BUFFER_OP_UPDATE,
552 BUFFER_OP_UPDATE_WITH_INDEX_PATTERN,
553 };
554
555 class Implementation : public Operation
556 {
557 public:
Implementation(OperationContext & context,Resource & resource,const BufferOp bufferOp)558 Implementation(OperationContext &context, Resource &resource, const BufferOp bufferOp)
559 : m_context(context)
560 , m_resource(resource)
561 , m_fillValue(0x13)
562 , m_bufferOp(bufferOp)
563 {
564 DE_ASSERT((m_resource.getBuffer().size % sizeof(uint32_t)) == 0);
565 DE_ASSERT(m_bufferOp == BUFFER_OP_FILL || m_resource.getBuffer().size <= MAX_UPDATE_BUFFER_SIZE);
566
567 m_data.resize(static_cast<size_t>(m_resource.getBuffer().size));
568
569 if (m_bufferOp == BUFFER_OP_FILL)
570 {
571 const std::size_t size = m_data.size() / sizeof(m_fillValue);
572 uint32_t *const pData = reinterpret_cast<uint32_t *>(&m_data[0]);
573
574 for (uint32_t i = 0; i < size; ++i)
575 pData[i] = m_fillValue;
576 }
577 else if (m_bufferOp == BUFFER_OP_UPDATE)
578 {
579 fillPattern(&m_data[0], m_data.size());
580 }
581 else if (m_bufferOp == BUFFER_OP_UPDATE_WITH_INDEX_PATTERN)
582 {
583 fillPattern(&m_data[0], m_data.size(), true);
584 }
585 }
586
recordCommands(const VkCommandBuffer cmdBuffer)587 void recordCommands(const VkCommandBuffer cmdBuffer)
588 {
589 const DeviceInterface &vk = m_context.getDeviceInterface();
590
591 if (m_bufferOp == BUFFER_OP_FILL)
592 {
593 vk.cmdFillBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset,
594 m_resource.getBuffer().size, m_fillValue);
595
596 SynchronizationWrapperPtr synchronizationWrapper =
597 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
598 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
599 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
600 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
601 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
602 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
603 m_resource.getBuffer().handle, // VkBuffer buffer
604 0u, // VkDeviceSize offset
605 m_resource.getBuffer().size // VkDeviceSize size
606 );
607 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, &bufferMemoryBarrier2);
608 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
609 }
610 else
611 vk.cmdUpdateBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset,
612 m_resource.getBuffer().size, reinterpret_cast<uint32_t *>(&m_data[0]));
613 }
614
getInSyncInfo(void) const615 SyncInfo getInSyncInfo(void) const
616 {
617 return emptySyncInfo;
618 }
619
getOutSyncInfo(void) const620 SyncInfo getOutSyncInfo(void) const
621 {
622 const SyncInfo syncInfo = {
623 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
624 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
625 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
626 };
627
628 return syncInfo;
629 }
630
getData(void) const631 Data getData(void) const
632 {
633 const Data data = {
634 m_data.size(), // std::size_t size;
635 &m_data[0], // const uint8_t* data;
636 };
637 return data;
638 }
639
setData(const Data & data)640 void setData(const Data &data)
641 {
642 deMemcpy(&m_data[0], data.data, data.size);
643 }
644
645 private:
646 OperationContext &m_context;
647 Resource &m_resource;
648 std::vector<uint8_t> m_data;
649 const uint32_t m_fillValue;
650 const BufferOp m_bufferOp;
651 };
652
653 class Support : public OperationSupport
654 {
655 public:
Support(const ResourceDescription & resourceDesc,const BufferOp bufferOp)656 Support(const ResourceDescription &resourceDesc, const BufferOp bufferOp)
657 : m_resourceDesc(resourceDesc)
658 , m_bufferOp(bufferOp)
659 {
660 DE_ASSERT(m_bufferOp == BUFFER_OP_FILL || m_bufferOp == BUFFER_OP_UPDATE ||
661 m_bufferOp == BUFFER_OP_UPDATE_WITH_INDEX_PATTERN);
662 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER || m_resourceDesc.type == RESOURCE_TYPE_INDEX_BUFFER);
663 }
664
getInResourceUsageFlags(void) const665 uint32_t getInResourceUsageFlags(void) const
666 {
667 return 0;
668 }
669
getOutResourceUsageFlags(void) const670 uint32_t getOutResourceUsageFlags(void) const
671 {
672 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
673 }
674
getQueueFlags(const OperationContext & context) const675 VkQueueFlags getQueueFlags(const OperationContext &context) const
676 {
677 if (m_bufferOp == BUFFER_OP_FILL && !context.isDeviceFunctionalitySupported("VK_KHR_maintenance1"))
678 {
679 return VK_QUEUE_COMPUTE_BIT | VK_QUEUE_GRAPHICS_BIT;
680 }
681
682 return VK_QUEUE_TRANSFER_BIT;
683 }
684
build(OperationContext & context,Resource & resource) const685 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
686 {
687 return de::MovePtr<Operation>(new Implementation(context, resource, m_bufferOp));
688 }
689
build(OperationContext &,Resource &,Resource &) const690 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
691 {
692 DE_ASSERT(0);
693 return de::MovePtr<Operation>();
694 }
695
696 private:
697 const ResourceDescription m_resourceDesc;
698 const BufferOp m_bufferOp;
699 };
700
701 } // namespace FillUpdateBuffer
702
703 namespace CopyBuffer
704 {
705
706 class Implementation : public Operation
707 {
708 public:
Implementation(OperationContext & context,Resource & resource,const AccessMode mode)709 Implementation(OperationContext &context, Resource &resource, const AccessMode mode)
710 : m_context(context)
711 , m_resource(resource)
712 , m_mode(mode)
713 {
714 const DeviceInterface &vk = m_context.getDeviceInterface();
715 const VkDevice device = m_context.getDevice();
716 Allocator &allocator = m_context.getAllocator();
717 const VkBufferUsageFlags hostBufferUsage =
718 (m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
719
720 m_hostBuffer = de::MovePtr<Buffer>(
721 new Buffer(vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, hostBufferUsage),
722 MemoryRequirement::HostVisible));
723
724 const Allocation &alloc = m_hostBuffer->getAllocation();
725
726 if (m_mode == ACCESS_MODE_READ)
727 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_resource.getBuffer().size));
728 else
729 fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
730
731 flushAlloc(vk, device, alloc);
732 }
733
recordCommands(const VkCommandBuffer cmdBuffer)734 void recordCommands(const VkCommandBuffer cmdBuffer)
735 {
736 const DeviceInterface &vk = m_context.getDeviceInterface();
737 const VkBufferCopy copyRegion = makeBufferCopy(0u, 0u, m_resource.getBuffer().size);
738 SynchronizationWrapperPtr synchronizationWrapper =
739 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
740
741 if (m_mode == ACCESS_MODE_READ)
742 {
743 vk.cmdCopyBuffer(cmdBuffer, m_resource.getBuffer().handle, **m_hostBuffer, 1u, ©Region);
744
745 // Insert a barrier so copied data is available to the host
746 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
747 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
748 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
749 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
750 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
751 **m_hostBuffer, // VkBuffer buffer
752 0u, // VkDeviceSize offset
753 m_resource.getBuffer().size // VkDeviceSize size
754 );
755 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, &bufferMemoryBarrier2);
756 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
757 }
758 else
759 {
760 // Insert a barrier so buffer data is available to the device
761 //const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
762 // VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
763 // VK_ACCESS_2_HOST_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
764 // VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
765 // VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
766 // **m_hostBuffer, // VkBuffer buffer
767 // 0u, // VkDeviceSize offset
768 // m_resource.getBuffer().size // VkDeviceSize size
769 //);
770 //VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, &bufferMemoryBarrier2);
771 //synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
772
773 vk.cmdCopyBuffer(cmdBuffer, **m_hostBuffer, m_resource.getBuffer().handle, 1u, ©Region);
774 }
775 }
776
getInSyncInfo(void) const777 SyncInfo getInSyncInfo(void) const
778 {
779 const VkAccessFlags access = (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_TRANSFER_READ_BIT_KHR : 0);
780 const SyncInfo syncInfo = {
781 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
782 access, // VkAccessFlags accessMask;
783 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
784 };
785 return syncInfo;
786 }
787
getOutSyncInfo(void) const788 SyncInfo getOutSyncInfo(void) const
789 {
790 const VkAccessFlags access = (m_mode == ACCESS_MODE_WRITE ? VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR : 0);
791 const SyncInfo syncInfo = {
792 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
793 access, // VkAccessFlags accessMask;
794 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
795 };
796 return syncInfo;
797 }
798
getData(void) const799 Data getData(void) const
800 {
801 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
802 }
803
setData(const Data & data)804 void setData(const Data &data)
805 {
806 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
807 setHostBufferData(m_context, *m_hostBuffer, data);
808 }
809
810 private:
811 OperationContext &m_context;
812 Resource &m_resource;
813 const AccessMode m_mode;
814 de::MovePtr<Buffer> m_hostBuffer;
815 };
816
817 class Support : public OperationSupport
818 {
819 public:
Support(const ResourceDescription & resourceDesc,const AccessMode mode)820 Support(const ResourceDescription &resourceDesc, const AccessMode mode) : m_mode(mode)
821 {
822 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_BUFFER);
823 DE_UNREF(resourceDesc);
824 }
825
getInResourceUsageFlags(void) const826 uint32_t getInResourceUsageFlags(void) const
827 {
828 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0;
829 }
830
getOutResourceUsageFlags(void) const831 uint32_t getOutResourceUsageFlags(void) const
832 {
833 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0;
834 }
835
getQueueFlags(const OperationContext & context) const836 VkQueueFlags getQueueFlags(const OperationContext &context) const
837 {
838 DE_UNREF(context);
839 return VK_QUEUE_TRANSFER_BIT;
840 }
841
build(OperationContext & context,Resource & resource) const842 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
843 {
844 return de::MovePtr<Operation>(new Implementation(context, resource, m_mode));
845 }
846
build(OperationContext &,Resource &,Resource &) const847 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
848 {
849 DE_ASSERT(0);
850 return de::MovePtr<Operation>();
851 }
852
853 private:
854 const AccessMode m_mode;
855 };
856
857 class CopyImplementation : public Operation
858 {
859 public:
CopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)860 CopyImplementation(OperationContext &context, Resource &inResource, Resource &outResource)
861 : m_context(context)
862 , m_inResource(inResource)
863 , m_outResource(outResource)
864 {
865 }
866
recordCommands(const VkCommandBuffer cmdBuffer)867 void recordCommands(const VkCommandBuffer cmdBuffer)
868 {
869 const DeviceInterface &vk = m_context.getDeviceInterface();
870 const VkBufferCopy copyRegion = makeBufferCopy(0u, 0u, m_inResource.getBuffer().size);
871
872 vk.cmdCopyBuffer(cmdBuffer, m_inResource.getBuffer().handle, m_outResource.getBuffer().handle, 1u, ©Region);
873 }
874
getInSyncInfo(void) const875 SyncInfo getInSyncInfo(void) const
876 {
877 const SyncInfo syncInfo = {
878 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
879 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
880 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
881 };
882 return syncInfo;
883 }
884
getOutSyncInfo(void) const885 SyncInfo getOutSyncInfo(void) const
886 {
887 const SyncInfo syncInfo = {
888 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
889 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
890 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
891 };
892 return syncInfo;
893 }
894
getData(void) const895 Data getData(void) const
896 {
897 Data data = {0, nullptr};
898 return data;
899 }
900
setData(const Data &)901 void setData(const Data &)
902 {
903 DE_ASSERT(0);
904 }
905
906 private:
907 OperationContext &m_context;
908 Resource &m_inResource;
909 Resource &m_outResource;
910 de::MovePtr<Buffer> m_hostBuffer;
911 };
912
913 class CopySupport : public OperationSupport
914 {
915 public:
CopySupport(const ResourceDescription & resourceDesc)916 CopySupport(const ResourceDescription &resourceDesc)
917 {
918 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_BUFFER);
919 DE_UNREF(resourceDesc);
920 }
921
getInResourceUsageFlags(void) const922 uint32_t getInResourceUsageFlags(void) const
923 {
924 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
925 }
926
getOutResourceUsageFlags(void) const927 uint32_t getOutResourceUsageFlags(void) const
928 {
929 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
930 }
931
getQueueFlags(const OperationContext & context) const932 VkQueueFlags getQueueFlags(const OperationContext &context) const
933 {
934 DE_UNREF(context);
935 return VK_QUEUE_TRANSFER_BIT;
936 }
937
build(OperationContext &,Resource &) const938 de::MovePtr<Operation> build(OperationContext &, Resource &) const
939 {
940 DE_ASSERT(0);
941 return de::MovePtr<Operation>();
942 }
943
build(OperationContext & context,Resource & inResource,Resource & outResource) const944 de::MovePtr<Operation> build(OperationContext &context, Resource &inResource, Resource &outResource) const
945 {
946 return de::MovePtr<Operation>(new CopyImplementation(context, inResource, outResource));
947 }
948 };
949
950 } // namespace CopyBuffer
951
952 namespace CopyBlitResolveImage
953 {
954
955 class ImplementationBase : public Operation
956 {
957 public:
958 //! Copy/Blit/Resolve etc. operation
959 virtual void recordCopyCommand(const VkCommandBuffer cmdBuffer) = 0;
960
961 //! Get source stage mask that is used during read - added to test synchronization2 new stage masks
962 virtual VkPipelineStageFlags2KHR getReadSrcStageMask() const = 0;
963
ImplementationBase(OperationContext & context,Resource & resource,const AccessMode mode)964 ImplementationBase(OperationContext &context, Resource &resource, const AccessMode mode)
965 : m_context(context)
966 , m_resource(resource)
967 , m_mode(mode)
968 , m_bufferSize(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
969 {
970 const DeviceInterface &vk = m_context.getDeviceInterface();
971 const VkDevice device = m_context.getDevice();
972 Allocator &allocator = m_context.getAllocator();
973
974 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
975 vk, device, allocator,
976 makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT),
977 MemoryRequirement::HostVisible));
978
979 const Allocation &alloc = m_hostBuffer->getAllocation();
980 if (m_mode == ACCESS_MODE_READ)
981 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_bufferSize));
982 else
983 fillPattern(alloc.getHostPtr(), m_bufferSize);
984 flushAlloc(vk, device, alloc);
985
986 // Staging image
987 const auto &imgResource = m_resource.getImage();
988 m_image = de::MovePtr<Image>(
989 new Image(vk, device, allocator,
990 makeImageCreateInfo(imgResource.imageType, imgResource.extent, imgResource.format,
991 (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
992 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
993 MemoryRequirement::Any));
994 }
995
recordCommands(const VkCommandBuffer cmdBuffer)996 void recordCommands(const VkCommandBuffer cmdBuffer)
997 {
998 const DeviceInterface &vk = m_context.getDeviceInterface();
999 const VkBufferImageCopy bufferCopyRegion =
1000 makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
1001 SynchronizationWrapperPtr synchronizationWrapper =
1002 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
1003
1004 // Staging image layout
1005 {
1006 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1007 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
1008 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1009 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
1010 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1011 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1012 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1013 **m_image, // VkImage image
1014 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1015 );
1016 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
1017 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1018 }
1019
1020 if (m_mode == ACCESS_MODE_READ)
1021 {
1022 // Resource Image -> Staging image
1023 recordCopyCommand(cmdBuffer);
1024
1025 // Staging image layout
1026 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1027 getReadSrcStageMask(), // VkPipelineStageFlags2KHR srcStageMask
1028 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1029 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
1030 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1031 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
1032 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
1033 **m_image, // VkImage image
1034 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1035 );
1036 VkDependencyInfoKHR imageDependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
1037 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &imageDependencyInfo);
1038
1039 // Image -> Host buffer
1040 vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u,
1041 &bufferCopyRegion);
1042
1043 // Insert a barrier so copied data is available to the host
1044 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
1045 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
1046 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1047 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
1048 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1049 **m_hostBuffer, // VkBuffer buffer
1050 0u, // VkDeviceSize offset
1051 m_bufferSize // VkDeviceSize size
1052 );
1053 VkDependencyInfoKHR bufferDependencyInfo = makeCommonDependencyInfo(nullptr, &bufferMemoryBarrier2);
1054 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &bufferDependencyInfo);
1055 }
1056 else
1057 {
1058 // Host buffer -> Staging image
1059 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u,
1060 &bufferCopyRegion);
1061
1062 // Staging image layout
1063 {
1064 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1065 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
1066 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1067 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
1068 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1069 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
1070 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
1071 **m_image, // VkImage image
1072 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1073 );
1074 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
1075 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1076 }
1077
1078 // Resource image layout
1079 {
1080 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1081 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
1082 (VkAccessFlags2KHR)0, // VkAccessFlags2KHR srcAccessMask
1083 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
1084 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1085 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1086 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1087 m_resource.getImage().handle, // VkImage image
1088 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1089 );
1090 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
1091 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1092 }
1093
1094 // Staging image -> Resource Image
1095 recordCopyCommand(cmdBuffer);
1096 }
1097 }
1098
getInSyncInfo(void) const1099 SyncInfo getInSyncInfo(void) const
1100 {
1101 const VkAccessFlags2KHR access =
1102 (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_TRANSFER_READ_BIT_KHR : VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR);
1103 const VkImageLayout layout =
1104 (m_mode == ACCESS_MODE_READ ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1105 const SyncInfo syncInfo = {
1106 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
1107 access, // VkAccessFlags accessMask;
1108 layout, // VkImageLayout imageLayout;
1109 };
1110 return syncInfo;
1111 }
1112
getOutSyncInfo(void) const1113 SyncInfo getOutSyncInfo(void) const
1114 {
1115 const VkAccessFlags2KHR access =
1116 (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_TRANSFER_READ_BIT_KHR : VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR);
1117 const VkImageLayout layout =
1118 (m_mode == ACCESS_MODE_READ ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1119 const SyncInfo syncInfo = {
1120 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
1121 access, // VkAccessFlags accessMask;
1122 layout, // VkImageLayout imageLayout;
1123 };
1124 return syncInfo;
1125 }
1126
getData(void) const1127 Data getData(void) const
1128 {
1129 return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
1130 }
1131
setData(const Data & data)1132 void setData(const Data &data)
1133 {
1134 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
1135 setHostBufferData(m_context, *m_hostBuffer, data);
1136 }
1137
1138 protected:
1139 OperationContext &m_context;
1140 Resource &m_resource;
1141 const AccessMode m_mode;
1142 const VkDeviceSize m_bufferSize;
1143 de::MovePtr<Buffer> m_hostBuffer;
1144 de::MovePtr<Image> m_image;
1145 };
1146
makeExtentOffset(const Resource & resource)1147 VkOffset3D makeExtentOffset(const Resource &resource)
1148 {
1149 DE_ASSERT(resource.getType() == RESOURCE_TYPE_IMAGE);
1150 const VkExtent3D extent = resource.getImage().extent;
1151
1152 switch (resource.getImage().imageType)
1153 {
1154 case VK_IMAGE_TYPE_1D:
1155 return makeOffset3D(extent.width, 1, 1);
1156 case VK_IMAGE_TYPE_2D:
1157 return makeOffset3D(extent.width, extent.height, 1);
1158 case VK_IMAGE_TYPE_3D:
1159 return makeOffset3D(extent.width, extent.height, extent.depth);
1160 default:
1161 DE_ASSERT(0);
1162 return VkOffset3D();
1163 }
1164 }
1165
makeBlitRegion(const Resource & resource)1166 VkImageBlit makeBlitRegion(const Resource &resource)
1167 {
1168 const VkImageBlit blitRegion = {
1169 resource.getImage().subresourceLayers, // VkImageSubresourceLayers srcSubresource;
1170 {makeOffset3D(0, 0, 0), makeExtentOffset(resource)}, // VkOffset3D srcOffsets[2];
1171 resource.getImage().subresourceLayers, // VkImageSubresourceLayers dstSubresource;
1172 {makeOffset3D(0, 0, 0), makeExtentOffset(resource)}, // VkOffset3D dstOffsets[2];
1173 };
1174 return blitRegion;
1175 }
1176
1177 class BlitImplementation : public ImplementationBase
1178 {
1179 public:
BlitImplementation(OperationContext & context,Resource & resource,const AccessMode mode)1180 BlitImplementation(OperationContext &context, Resource &resource, const AccessMode mode)
1181 : ImplementationBase(context, resource, mode)
1182 , m_blitRegion(makeBlitRegion(m_resource))
1183 {
1184 const InstanceInterface &vki = m_context.getInstanceInterface();
1185 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
1186 const auto &imgResource = m_resource.getImage();
1187 const VkFormatProperties formatProps = getPhysicalDeviceFormatProperties(vki, physDevice, imgResource.format);
1188 const auto &features = ((imgResource.tiling == VK_IMAGE_TILING_LINEAR) ? formatProps.linearTilingFeatures :
1189 formatProps.optimalTilingFeatures);
1190 const VkFormatFeatureFlags requiredFlags = (VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT);
1191
1192 // Blit image command not allowed when using --deqp-compute-only=enable
1193 if (m_context.isComputeOnly())
1194 THROW_NOT_SUPPORTED_COMPUTE_ONLY();
1195
1196 // SRC and DST blit is required because both images are using the same format.
1197 if ((features & requiredFlags) != requiredFlags)
1198 TCU_THROW(NotSupportedError, "Format doesn't support blits");
1199 }
1200
recordCopyCommand(const VkCommandBuffer cmdBuffer)1201 void recordCopyCommand(const VkCommandBuffer cmdBuffer)
1202 {
1203 const DeviceInterface &vk = m_context.getDeviceInterface();
1204
1205 if (m_mode == ACCESS_MODE_READ)
1206 {
1207 // Resource Image -> Staging image
1208 vk.cmdBlitImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image,
1209 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_blitRegion, VK_FILTER_NEAREST);
1210 }
1211 else
1212 {
1213 // Staging image -> Resource Image
1214 vk.cmdBlitImage(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getImage().handle,
1215 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_blitRegion, VK_FILTER_NEAREST);
1216 }
1217 }
1218
getReadSrcStageMask() const1219 VkPipelineStageFlags2KHR getReadSrcStageMask() const
1220 {
1221 return (m_context.getSynchronizationType() == SynchronizationType::LEGACY) ?
1222 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR :
1223 VK_PIPELINE_STAGE_2_BLIT_BIT_KHR;
1224 }
1225
1226 private:
1227 const VkImageBlit m_blitRegion;
1228 };
1229
1230 template <typename ImageCopyOrResolve>
makeImageRegion(const Resource & resource)1231 ImageCopyOrResolve makeImageRegion(const Resource &resource)
1232 {
1233 return {
1234 resource.getImage().subresourceLayers, // VkImageSubresourceLayers srcSubresource;
1235 makeOffset3D(0, 0, 0), // VkOffset3D srcOffset;
1236 resource.getImage().subresourceLayers, // VkImageSubresourceLayers dstSubresource;
1237 makeOffset3D(0, 0, 0), // VkOffset3D dstOffset;
1238 resource.getImage().extent, // VkExtent3D extent;
1239 };
1240 }
1241
1242 class CopyImplementation : public ImplementationBase
1243 {
1244 public:
CopyImplementation(OperationContext & context,Resource & resource,const AccessMode mode)1245 CopyImplementation(OperationContext &context, Resource &resource, const AccessMode mode)
1246 : ImplementationBase(context, resource, mode)
1247 , m_imageCopyRegion(makeImageRegion<VkImageCopy>(m_resource))
1248 {
1249 }
1250
recordCopyCommand(const VkCommandBuffer cmdBuffer)1251 void recordCopyCommand(const VkCommandBuffer cmdBuffer)
1252 {
1253 const DeviceInterface &vk = m_context.getDeviceInterface();
1254
1255 if (m_mode == ACCESS_MODE_READ)
1256 {
1257 // Resource Image -> Staging image
1258 vk.cmdCopyImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image,
1259 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageCopyRegion);
1260 }
1261 else
1262 {
1263 // Staging image -> Resource Image
1264 vk.cmdCopyImage(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getImage().handle,
1265 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageCopyRegion);
1266 }
1267 }
1268
getReadSrcStageMask() const1269 VkPipelineStageFlags2KHR getReadSrcStageMask() const
1270 {
1271 return (m_context.getSynchronizationType() == SynchronizationType::LEGACY) ?
1272 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR :
1273 VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
1274 }
1275
1276 private:
1277 const VkImageCopy m_imageCopyRegion;
1278 };
1279
1280 class ResolveImplementation : public ImplementationBase
1281 {
1282 public:
ResolveImplementation(OperationContext & context,Resource & resource,const AccessMode mode)1283 ResolveImplementation(OperationContext &context, Resource &resource, const AccessMode mode)
1284 : ImplementationBase(context, resource, mode)
1285 , m_imageResolveRegion(makeImageRegion<VkImageResolve>(resource))
1286 {
1287 DE_ASSERT(m_mode == ACCESS_MODE_READ);
1288 }
1289
recordCopyCommand(const VkCommandBuffer cmdBuffer)1290 void recordCopyCommand(const VkCommandBuffer cmdBuffer)
1291 {
1292 const DeviceInterface &vk = m_context.getDeviceInterface();
1293
1294 // Resource Image -> Staging image
1295 vk.cmdResolveImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image,
1296 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageResolveRegion);
1297 }
1298
getReadSrcStageMask() const1299 VkPipelineStageFlags2KHR getReadSrcStageMask() const
1300 {
1301 return (m_context.getSynchronizationType() == SynchronizationType::LEGACY) ?
1302 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR :
1303 VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR;
1304 }
1305
1306 private:
1307 VkImageResolve m_imageResolveRegion;
1308 };
1309
1310 enum Type
1311 {
1312 TYPE_COPY,
1313 TYPE_BLIT,
1314 TYPE_RESOLVE,
1315 };
1316
1317 class Support : public OperationSupport
1318 {
1319 public:
Support(const ResourceDescription & resourceDesc,const Type type,const AccessMode mode)1320 Support(const ResourceDescription &resourceDesc, const Type type, const AccessMode mode)
1321 : m_type(type)
1322 , m_mode(mode)
1323 {
1324 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_IMAGE);
1325
1326 const bool isDepthStencil = isDepthStencilFormat(resourceDesc.imageFormat);
1327 m_requiredQueueFlags = (isDepthStencil || m_type != TYPE_COPY ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT);
1328
1329 // Don't blit depth/stencil images.
1330 DE_ASSERT(m_type != TYPE_BLIT || !isDepthStencil);
1331 }
1332
getInResourceUsageFlags(void) const1333 uint32_t getInResourceUsageFlags(void) const
1334 {
1335 return (m_mode == ACCESS_MODE_READ ? VK_IMAGE_USAGE_TRANSFER_SRC_BIT : 0);
1336 }
1337
getOutResourceUsageFlags(void) const1338 uint32_t getOutResourceUsageFlags(void) const
1339 {
1340 return (m_mode == ACCESS_MODE_WRITE ? VK_IMAGE_USAGE_TRANSFER_DST_BIT : 0);
1341 }
1342
getQueueFlags(const OperationContext & context) const1343 VkQueueFlags getQueueFlags(const OperationContext &context) const
1344 {
1345 DE_UNREF(context);
1346 return m_requiredQueueFlags;
1347 }
1348
build(OperationContext & context,Resource & resource) const1349 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
1350 {
1351 if (m_type == TYPE_COPY)
1352 return de::MovePtr<Operation>(new CopyImplementation(context, resource, m_mode));
1353 else if (m_type == TYPE_BLIT)
1354 return de::MovePtr<Operation>(new BlitImplementation(context, resource, m_mode));
1355 else
1356 return de::MovePtr<Operation>(new ResolveImplementation(context, resource, m_mode));
1357 }
1358
build(OperationContext &,Resource &,Resource &) const1359 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
1360 {
1361 DE_ASSERT(0);
1362 return de::MovePtr<Operation>();
1363 }
1364
1365 private:
1366 const Type m_type;
1367 const AccessMode m_mode;
1368 VkQueueFlags m_requiredQueueFlags;
1369 };
1370
1371 class BlitCopyImplementation : public Operation
1372 {
1373 public:
BlitCopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)1374 BlitCopyImplementation(OperationContext &context, Resource &inResource, Resource &outResource)
1375 : m_context(context)
1376 , m_inResource(inResource)
1377 , m_outResource(outResource)
1378 , m_blitRegion(makeBlitRegion(m_inResource))
1379 {
1380 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_IMAGE);
1381 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_IMAGE);
1382
1383 const InstanceInterface &vki = m_context.getInstanceInterface();
1384 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
1385 const auto &imgResource = m_inResource.getImage();
1386 const VkFormatProperties formatProps = getPhysicalDeviceFormatProperties(vki, physDevice, imgResource.format);
1387 const auto &features = ((imgResource.tiling == VK_IMAGE_TILING_LINEAR) ? formatProps.linearTilingFeatures :
1388 formatProps.optimalTilingFeatures);
1389 const VkFormatFeatureFlags requiredFlags = (VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT);
1390
1391 // SRC and DST blit is required because both images are using the same format.
1392 if ((features & requiredFlags) != requiredFlags)
1393 TCU_THROW(NotSupportedError, "Format doesn't support blits");
1394 }
1395
recordCommands(const VkCommandBuffer cmdBuffer)1396 void recordCommands(const VkCommandBuffer cmdBuffer)
1397 {
1398 const DeviceInterface &vk = m_context.getDeviceInterface();
1399 SynchronizationWrapperPtr synchronizationWrapper =
1400 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
1401
1402 {
1403 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1404 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1405 (VkAccessFlags2KHR)0, // VkAccessFlags2KHR srcAccessMask
1406 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
1407 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1408 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1409 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1410 m_outResource.getImage().handle, // VkImage image
1411 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1412 );
1413 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
1414 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1415 }
1416
1417 vk.cmdBlitImage(cmdBuffer, m_inResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1418 m_outResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_blitRegion,
1419 VK_FILTER_NEAREST);
1420 }
1421
getInSyncInfo(void) const1422 SyncInfo getInSyncInfo(void) const
1423 {
1424 const SyncInfo syncInfo = {
1425 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1426 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
1427 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
1428 };
1429 return syncInfo;
1430 }
1431
getOutSyncInfo(void) const1432 SyncInfo getOutSyncInfo(void) const
1433 {
1434 const SyncInfo syncInfo = {
1435 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1436 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
1437 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
1438 };
1439 return syncInfo;
1440 }
1441
getData(void) const1442 Data getData(void) const
1443 {
1444 Data data = {0, nullptr};
1445 return data;
1446 }
1447
setData(const Data &)1448 void setData(const Data &)
1449 {
1450 DE_ASSERT(0);
1451 }
1452
1453 private:
1454 OperationContext &m_context;
1455 Resource &m_inResource;
1456 Resource &m_outResource;
1457 const VkImageBlit m_blitRegion;
1458 };
1459
1460 class CopyCopyImplementation : public Operation
1461 {
1462 public:
CopyCopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)1463 CopyCopyImplementation(OperationContext &context, Resource &inResource, Resource &outResource)
1464 : m_context(context)
1465 , m_inResource(inResource)
1466 , m_outResource(outResource)
1467 , m_imageCopyRegion(makeImageRegion<VkImageCopy>(m_inResource))
1468 {
1469 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_IMAGE);
1470 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_IMAGE);
1471 }
1472
recordCommands(const VkCommandBuffer cmdBuffer)1473 void recordCommands(const VkCommandBuffer cmdBuffer)
1474 {
1475 const DeviceInterface &vk = m_context.getDeviceInterface();
1476 SynchronizationWrapperPtr synchronizationWrapper =
1477 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
1478
1479 {
1480 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1481 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1482 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1483 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
1484 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1485 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1486 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1487 m_outResource.getImage().handle, // VkImage image
1488 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1489 );
1490 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
1491 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1492 }
1493
1494 vk.cmdCopyImage(cmdBuffer, m_inResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1495 m_outResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageCopyRegion);
1496 }
1497
getInSyncInfo(void) const1498 SyncInfo getInSyncInfo(void) const
1499 {
1500 const SyncInfo syncInfo = {
1501 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1502 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
1503 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
1504 };
1505 return syncInfo;
1506 }
1507
getOutSyncInfo(void) const1508 SyncInfo getOutSyncInfo(void) const
1509 {
1510 const SyncInfo syncInfo = {
1511 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1512 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
1513 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
1514 };
1515 return syncInfo;
1516 }
1517
getData(void) const1518 Data getData(void) const
1519 {
1520 Data data = {0, nullptr};
1521 return data;
1522 }
1523
setData(const Data &)1524 void setData(const Data &)
1525 {
1526 DE_ASSERT(0);
1527 }
1528
1529 private:
1530 OperationContext &m_context;
1531 Resource &m_inResource;
1532 Resource &m_outResource;
1533 const VkImageCopy m_imageCopyRegion;
1534 };
1535
1536 class CopySupport : public OperationSupport
1537 {
1538 public:
CopySupport(const ResourceDescription & resourceDesc,const Type type)1539 CopySupport(const ResourceDescription &resourceDesc, const Type type) : m_type(type)
1540 {
1541 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_IMAGE);
1542
1543 const bool isDepthStencil = isDepthStencilFormat(resourceDesc.imageFormat);
1544 m_requiredQueueFlags = (isDepthStencil || m_type == TYPE_BLIT ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT);
1545
1546 // Don't blit depth/stencil images.
1547 DE_ASSERT(m_type != TYPE_BLIT || !isDepthStencil);
1548 }
1549
getInResourceUsageFlags(void) const1550 uint32_t getInResourceUsageFlags(void) const
1551 {
1552 return VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1553 }
1554
getOutResourceUsageFlags(void) const1555 uint32_t getOutResourceUsageFlags(void) const
1556 {
1557 return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1558 }
1559
getQueueFlags(const OperationContext & context) const1560 VkQueueFlags getQueueFlags(const OperationContext &context) const
1561 {
1562 DE_UNREF(context);
1563 return m_requiredQueueFlags;
1564 }
1565
build(OperationContext &,Resource &) const1566 de::MovePtr<Operation> build(OperationContext &, Resource &) const
1567 {
1568 DE_ASSERT(0);
1569 return de::MovePtr<Operation>();
1570 }
1571
build(OperationContext & context,Resource & inResource,Resource & outResource) const1572 de::MovePtr<Operation> build(OperationContext &context, Resource &inResource, Resource &outResource) const
1573 {
1574 if (m_type == TYPE_COPY)
1575 return de::MovePtr<Operation>(new CopyCopyImplementation(context, inResource, outResource));
1576 else
1577 return de::MovePtr<Operation>(new BlitCopyImplementation(context, inResource, outResource));
1578 }
1579
1580 private:
1581 const Type m_type;
1582 VkQueueFlags m_requiredQueueFlags;
1583 };
1584
1585 } // namespace CopyBlitResolveImage
1586
1587 namespace ShaderAccess
1588 {
1589
1590 enum DispatchCall
1591 {
1592 DISPATCH_CALL_DISPATCH,
1593 DISPATCH_CALL_DISPATCH_INDIRECT,
1594 };
1595
1596 class GraphicsPipeline : public Pipeline
1597 {
1598 public:
GraphicsPipeline(OperationContext & context,const VkShaderStageFlagBits stage,const std::string & shaderPrefix,const VkDescriptorSetLayout descriptorSetLayout)1599 GraphicsPipeline(OperationContext &context, const VkShaderStageFlagBits stage, const std::string &shaderPrefix,
1600 const VkDescriptorSetLayout descriptorSetLayout)
1601 : m_vertices(context)
1602 {
1603 const DeviceInterface &vk = context.getDeviceInterface();
1604 const VkDevice device = context.getDevice();
1605 Allocator &allocator = context.getAllocator();
1606 const VkShaderStageFlags requiredStages = getRequiredStages(stage);
1607
1608 // Color attachment
1609
1610 m_colorFormat = VK_FORMAT_R8G8B8A8_UNORM;
1611 m_colorImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
1612 m_colorImageExtent = makeExtent3D(16u, 16u, 1u);
1613 m_colorAttachmentImage = de::MovePtr<Image>(new Image(
1614 vk, device, allocator,
1615 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat,
1616 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
1617 MemoryRequirement::Any));
1618
1619 // Pipeline
1620
1621 m_colorAttachmentView = makeImageView(vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D,
1622 m_colorFormat, m_colorImageSubresourceRange);
1623 m_renderPass = makeRenderPass(vk, device, m_colorFormat);
1624 m_framebuffer = makeFramebuffer(vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width,
1625 m_colorImageExtent.height);
1626 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
1627
1628 GraphicsPipelineBuilder pipelineBuilder;
1629 pipelineBuilder.setRenderSize(tcu::IVec2(m_colorImageExtent.width, m_colorImageExtent.height))
1630 .setVertexInputSingleAttribute(m_vertices.getVertexFormat(), m_vertices.getVertexStride())
1631 .setShader(vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get(shaderPrefix + "vert"),
1632 nullptr)
1633 .setShader(vk, device, VK_SHADER_STAGE_FRAGMENT_BIT,
1634 context.getBinaryCollection().get(shaderPrefix + "frag"), nullptr);
1635
1636 if (requiredStages & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
1637 pipelineBuilder.setPatchControlPoints(m_vertices.getNumVertices())
1638 .setShader(vk, device, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
1639 context.getBinaryCollection().get(shaderPrefix + "tesc"), nullptr)
1640 .setShader(vk, device, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
1641 context.getBinaryCollection().get(shaderPrefix + "tese"), nullptr);
1642
1643 if (requiredStages & VK_SHADER_STAGE_GEOMETRY_BIT)
1644 pipelineBuilder.setShader(vk, device, VK_SHADER_STAGE_GEOMETRY_BIT,
1645 context.getBinaryCollection().get(shaderPrefix + "geom"), nullptr);
1646
1647 m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData(),
1648 context.getResourceInterface());
1649 }
1650
recordCommands(OperationContext & context,const VkCommandBuffer cmdBuffer,const VkDescriptorSet descriptorSet)1651 void recordCommands(OperationContext &context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
1652 {
1653 const DeviceInterface &vk = context.getDeviceInterface();
1654 SynchronizationWrapperPtr synchronizationWrapper =
1655 getSynchronizationWrapper(context.getSynchronizationType(), vk, false);
1656
1657 // Change color attachment image layout
1658 {
1659 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1660 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1661 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1662 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, // VkPipelineStageFlags2KHR dstStageMask
1663 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1664 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1665 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
1666 **m_colorAttachmentImage, // VkImage image
1667 m_colorImageSubresourceRange // VkImageSubresourceRange subresourceRange
1668 );
1669 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
1670 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1671 }
1672
1673 {
1674 const VkRect2D renderArea = makeRect2D(m_colorImageExtent);
1675 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
1676
1677 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
1678 }
1679
1680 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
1681 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &descriptorSet,
1682 0u, nullptr);
1683 {
1684 const VkDeviceSize vertexBufferOffset = 0ull;
1685 const VkBuffer vertexBuffer = m_vertices.getVertexBuffer();
1686 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
1687 }
1688
1689 vk.cmdDraw(cmdBuffer, m_vertices.getNumVertices(), 1u, 0u, 0u);
1690 endRenderPass(vk, cmdBuffer);
1691 }
1692
1693 private:
1694 const VertexGrid m_vertices;
1695 VkFormat m_colorFormat;
1696 de::MovePtr<Image> m_colorAttachmentImage;
1697 Move<VkImageView> m_colorAttachmentView;
1698 VkExtent3D m_colorImageExtent;
1699 VkImageSubresourceRange m_colorImageSubresourceRange;
1700 Move<VkRenderPass> m_renderPass;
1701 Move<VkFramebuffer> m_framebuffer;
1702 Move<VkPipelineLayout> m_pipelineLayout;
1703 Move<VkPipeline> m_pipeline;
1704 };
1705
1706 class ComputePipeline : public Pipeline
1707 {
1708 public:
ComputePipeline(OperationContext & context,const DispatchCall dispatchCall,const std::string & shaderPrefix,const VkDescriptorSetLayout descriptorSetLayout)1709 ComputePipeline(OperationContext &context, const DispatchCall dispatchCall, const std::string &shaderPrefix,
1710 const VkDescriptorSetLayout descriptorSetLayout)
1711 : m_dispatchCall(dispatchCall)
1712 {
1713 const DeviceInterface &vk = context.getDeviceInterface();
1714 const VkDevice device = context.getDevice();
1715 Allocator &allocator = context.getAllocator();
1716
1717 if (m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT)
1718 {
1719 m_indirectBuffer = de::MovePtr<Buffer>(
1720 new Buffer(vk, device, allocator,
1721 makeBufferCreateInfo(sizeof(VkDispatchIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT),
1722 MemoryRequirement::HostVisible));
1723
1724 const Allocation &alloc = m_indirectBuffer->getAllocation();
1725 VkDispatchIndirectCommand *const pIndirectCommand =
1726 static_cast<VkDispatchIndirectCommand *>(alloc.getHostPtr());
1727
1728 pIndirectCommand->x = 1u;
1729 pIndirectCommand->y = 1u;
1730 pIndirectCommand->z = 1u;
1731
1732 flushAlloc(vk, device, alloc);
1733 }
1734
1735 const Unique<VkShaderModule> shaderModule(createShaderModule(
1736 vk, device, context.getBinaryCollection().get(shaderPrefix + "comp"), (VkShaderModuleCreateFlags)0));
1737
1738 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
1739 m_pipeline = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, nullptr,
1740 context.getPipelineCacheData(), context.getResourceInterface());
1741 }
1742
recordCommands(OperationContext & context,const VkCommandBuffer cmdBuffer,const VkDescriptorSet descriptorSet)1743 void recordCommands(OperationContext &context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
1744 {
1745 const DeviceInterface &vk = context.getDeviceInterface();
1746
1747 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
1748 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &descriptorSet,
1749 0u, nullptr);
1750
1751 if (m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT)
1752 vk.cmdDispatchIndirect(cmdBuffer, **m_indirectBuffer, 0u);
1753 else
1754 vk.cmdDispatch(cmdBuffer, 1u, 1u, 1u);
1755 }
1756
1757 private:
1758 const DispatchCall m_dispatchCall;
1759 de::MovePtr<Buffer> m_indirectBuffer;
1760 Move<VkPipelineLayout> m_pipelineLayout;
1761 Move<VkPipeline> m_pipeline;
1762 };
1763
1764 //! Read/write operation on a UBO/SSBO in graphics/compute pipeline.
1765 class BufferImplementation : public Operation
1766 {
1767 public:
BufferImplementation(OperationContext & context,Resource & resource,const VkShaderStageFlagBits stage,const BufferType bufferType,const std::string & shaderPrefix,const AccessMode mode,const bool specializedAccess,const PipelineType pipelineType,const DispatchCall dispatchCall)1768 BufferImplementation(OperationContext &context, Resource &resource, const VkShaderStageFlagBits stage,
1769 const BufferType bufferType, const std::string &shaderPrefix, const AccessMode mode,
1770 const bool specializedAccess, const PipelineType pipelineType, const DispatchCall dispatchCall)
1771 : Operation(specializedAccess)
1772 , m_context(context)
1773 , m_resource(resource)
1774 , m_stage(stage)
1775 , m_pipelineStage(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
1776 , m_bufferType(bufferType)
1777 , m_mode(mode)
1778 , m_dispatchCall(dispatchCall)
1779 {
1780 requireFeaturesForSSBOAccess(m_context, m_stage);
1781
1782 const DeviceInterface &vk = m_context.getDeviceInterface();
1783 const VkDevice device = m_context.getDevice();
1784 Allocator &allocator = m_context.getAllocator();
1785
1786 m_hostBuffer = de::MovePtr<Buffer>(
1787 new Buffer(vk, device, allocator,
1788 makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT),
1789 MemoryRequirement::HostVisible));
1790
1791 // Init host buffer data
1792 {
1793 const Allocation &alloc = m_hostBuffer->getAllocation();
1794 if (m_mode == ACCESS_MODE_READ)
1795 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_resource.getBuffer().size));
1796 else
1797 fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
1798 flushAlloc(vk, device, alloc);
1799 }
1800 // Prepare descriptors
1801 {
1802 VkDescriptorType bufferDescriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1803
1804 if (m_bufferType == BUFFER_TYPE_UNIFORM)
1805 bufferDescriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1806 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
1807 bufferDescriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
1808
1809 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
1810 .addSingleBinding(bufferDescriptorType, m_stage)
1811 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
1812 .build(vk, device);
1813
1814 m_descriptorPool = DescriptorPoolBuilder()
1815 .addType(bufferDescriptorType)
1816 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
1817 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
1818
1819 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
1820
1821 if (m_mode == ACCESS_MODE_READ)
1822 {
1823 if ((m_bufferType == BUFFER_TYPE_UNIFORM) || (m_bufferType == BUFFER_TYPE_STORAGE))
1824 {
1825 const VkDescriptorBufferInfo bufferInfo = makeDescriptorBufferInfo(
1826 m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size);
1827 const VkDescriptorBufferInfo hostBufferInfo =
1828 makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_resource.getBuffer().size);
1829 DescriptorSetUpdateBuilder()
1830 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
1831 bufferDescriptorType, &bufferInfo)
1832 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
1833 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1834 .update(vk, device);
1835 }
1836 else
1837 {
1838 m_pBufferView =
1839 vk::makeBufferView(vk, device, m_resource.getBuffer().handle, VK_FORMAT_R32G32B32A32_UINT,
1840 m_resource.getBuffer().offset, m_resource.getBuffer().size);
1841 const VkDescriptorBufferInfo hostBufferInfo =
1842 makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_resource.getBuffer().size);
1843 DescriptorSetUpdateBuilder()
1844 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
1845 bufferDescriptorType, &m_pBufferView.get())
1846 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
1847 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1848 .update(vk, device);
1849 }
1850 }
1851 else
1852 {
1853 const VkDescriptorBufferInfo bufferInfo = makeDescriptorBufferInfo(
1854 m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size);
1855 const VkDescriptorBufferInfo hostBufferInfo =
1856 makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_resource.getBuffer().size);
1857 DescriptorSetUpdateBuilder()
1858 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
1859 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1860 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
1861 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferInfo)
1862 .update(vk, device);
1863 }
1864 }
1865
1866 // Create pipeline
1867 m_pipeline =
1868 (pipelineType == PIPELINE_TYPE_GRAPHICS ?
1869 de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout)) :
1870 de::MovePtr<Pipeline>(
1871 new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
1872 }
1873
recordCommands(const VkCommandBuffer cmdBuffer)1874 void recordCommands(const VkCommandBuffer cmdBuffer)
1875 {
1876 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
1877
1878 // Post draw/dispatch commands
1879
1880 if (m_mode == ACCESS_MODE_READ)
1881 {
1882 const DeviceInterface &vk = m_context.getDeviceInterface();
1883 SynchronizationWrapperPtr synchronizationWrapper =
1884 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
1885
1886 // Insert a barrier so data written by the shader is available to the host
1887 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
1888 m_pipelineStage, // VkPipelineStageFlags2KHR srcStageMask
1889 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1890 VK_PIPELINE_STAGE_HOST_BIT, // VkPipelineStageFlags2KHR dstStageMask
1891 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1892 **m_hostBuffer, // VkBuffer buffer
1893 0u, // VkDeviceSize offset
1894 m_resource.getBuffer().size // VkDeviceSize size
1895 );
1896 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, &bufferMemoryBarrier2);
1897 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1898 }
1899 }
1900
getInSyncInfo(void) const1901 SyncInfo getInSyncInfo(void) const
1902 {
1903 VkAccessFlags2KHR accessFlags = VK_ACCESS_2_NONE_KHR;
1904
1905 if (m_mode == ACCESS_MODE_READ)
1906 {
1907 if (m_bufferType == BUFFER_TYPE_UNIFORM)
1908 accessFlags = VK_ACCESS_2_UNIFORM_READ_BIT_KHR;
1909
1910 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
1911 {
1912 if (m_specializedAccess)
1913 accessFlags = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR;
1914 else
1915 accessFlags = VK_ACCESS_2_SHADER_READ_BIT_KHR;
1916 }
1917 else
1918 {
1919 if (m_specializedAccess)
1920 accessFlags = VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR;
1921 else
1922 accessFlags = VK_ACCESS_2_SHADER_READ_BIT_KHR;
1923 }
1924 }
1925 else
1926 {
1927 if (m_specializedAccess)
1928 accessFlags = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR;
1929 else
1930 accessFlags = VK_ACCESS_2_SHADER_WRITE_BIT_KHR;
1931 }
1932
1933 const SyncInfo syncInfo = {
1934 m_pipelineStage, // VkPipelineStageFlags stageMask;
1935 accessFlags, // VkAccessFlags accessMask;
1936 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
1937 };
1938 return syncInfo;
1939 }
1940
getOutSyncInfo(void) const1941 SyncInfo getOutSyncInfo(void) const
1942 {
1943 VkAccessFlags2KHR accessFlags = VK_ACCESS_2_NONE_KHR;
1944
1945 if (m_mode == ACCESS_MODE_WRITE)
1946 {
1947 if (m_specializedAccess)
1948 accessFlags = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR;
1949 else
1950 accessFlags = VK_ACCESS_2_SHADER_WRITE_BIT_KHR;
1951 }
1952
1953 const SyncInfo syncInfo = {
1954 m_pipelineStage, // VkPipelineStageFlags stageMask;
1955 accessFlags, // VkAccessFlags accessMask;
1956 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
1957 };
1958 return syncInfo;
1959 }
1960
getData(void) const1961 Data getData(void) const
1962 {
1963 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
1964 }
1965
setData(const Data & data)1966 void setData(const Data &data)
1967 {
1968 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
1969 setHostBufferData(m_context, *m_hostBuffer, data);
1970 }
1971
getShaderStage(void)1972 vk::VkShaderStageFlagBits getShaderStage(void)
1973 {
1974 return m_stage;
1975 }
1976
1977 private:
1978 OperationContext &m_context;
1979 Resource &m_resource;
1980 const VkShaderStageFlagBits m_stage;
1981 const VkPipelineStageFlags m_pipelineStage;
1982 const BufferType m_bufferType;
1983 const AccessMode m_mode;
1984 const DispatchCall m_dispatchCall;
1985 de::MovePtr<Buffer> m_hostBuffer;
1986 Move<VkDescriptorPool> m_descriptorPool;
1987 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
1988 Move<VkDescriptorSet> m_descriptorSet;
1989 de::MovePtr<Pipeline> m_pipeline;
1990 Move<VkBufferView> m_pBufferView;
1991 };
1992
1993 class ImageImplementation : public Operation
1994 {
1995 public:
ImageImplementation(OperationContext & context,Resource & resource,const VkShaderStageFlagBits stage,const std::string & shaderPrefix,const AccessMode mode,const bool specializedAccess,const PipelineType pipelineType,const DispatchCall dispatchCall)1996 ImageImplementation(OperationContext &context, Resource &resource, const VkShaderStageFlagBits stage,
1997 const std::string &shaderPrefix, const AccessMode mode, const bool specializedAccess,
1998 const PipelineType pipelineType, const DispatchCall dispatchCall)
1999 : Operation(specializedAccess)
2000 , m_context(context)
2001 , m_resource(resource)
2002 , m_stage(stage)
2003 , m_pipelineStage(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
2004 , m_mode(mode)
2005 , m_dispatchCall(dispatchCall)
2006 , m_hostBufferSizeBytes(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
2007 {
2008 const DeviceInterface &vk = m_context.getDeviceInterface();
2009 const InstanceInterface &vki = m_context.getInstanceInterface();
2010 const VkDevice device = m_context.getDevice();
2011 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
2012 Allocator &allocator = m_context.getAllocator();
2013
2014 // Image stores are always required, in either access mode.
2015 requireFeaturesForSSBOAccess(m_context, m_stage);
2016
2017 // Some storage image formats may not be supported
2018 const auto &imgResource = m_resource.getImage();
2019 requireStorageImageSupport(vki, physDevice, imgResource.format, imgResource.tiling);
2020
2021 m_hostBuffer = de::MovePtr<Buffer>(
2022 new Buffer(vk, device, allocator,
2023 makeBufferCreateInfo(m_hostBufferSizeBytes,
2024 VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT),
2025 MemoryRequirement::HostVisible));
2026
2027 // Init host buffer data
2028 {
2029 const Allocation &alloc = m_hostBuffer->getAllocation();
2030 if (m_mode == ACCESS_MODE_READ)
2031 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_hostBufferSizeBytes));
2032 else
2033 fillPattern(alloc.getHostPtr(), m_hostBufferSizeBytes);
2034 flushAlloc(vk, device, alloc);
2035 }
2036
2037 // Image resources
2038 {
2039 m_image = de::MovePtr<Image>(new Image(
2040 vk, device, allocator,
2041 makeImageCreateInfo(
2042 m_resource.getImage().imageType, m_resource.getImage().extent, m_resource.getImage().format,
2043 (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_STORAGE_BIT),
2044 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
2045 MemoryRequirement::Any));
2046
2047 if (m_mode == ACCESS_MODE_READ)
2048 {
2049 m_srcImage = &m_resource.getImage().handle;
2050 m_dstImage = &(**m_image);
2051 }
2052 else
2053 {
2054 m_srcImage = &(**m_image);
2055 m_dstImage = &m_resource.getImage().handle;
2056 }
2057
2058 const VkImageViewType viewType = getImageViewType(m_resource.getImage().imageType);
2059
2060 m_srcImageView = makeImageView(vk, device, *m_srcImage, viewType, m_resource.getImage().format,
2061 m_resource.getImage().subresourceRange);
2062 m_dstImageView = makeImageView(vk, device, *m_dstImage, viewType, m_resource.getImage().format,
2063 m_resource.getImage().subresourceRange);
2064 }
2065
2066 // Prepare descriptors
2067 {
2068 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2069 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
2070 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
2071 .build(vk, device);
2072
2073 m_descriptorPool = DescriptorPoolBuilder()
2074 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2075 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2076 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2077
2078 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
2079
2080 const VkDescriptorImageInfo srcImageInfo =
2081 makeDescriptorImageInfo(VK_NULL_HANDLE, *m_srcImageView, VK_IMAGE_LAYOUT_GENERAL);
2082 const VkDescriptorImageInfo dstImageInfo =
2083 makeDescriptorImageInfo(VK_NULL_HANDLE, *m_dstImageView, VK_IMAGE_LAYOUT_GENERAL);
2084
2085 DescriptorSetUpdateBuilder()
2086 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
2087 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &srcImageInfo)
2088 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
2089 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &dstImageInfo)
2090 .update(vk, device);
2091 }
2092
2093 // Create pipeline
2094 m_pipeline =
2095 (pipelineType == PIPELINE_TYPE_GRAPHICS ?
2096 de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout)) :
2097 de::MovePtr<Pipeline>(
2098 new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
2099 }
2100
recordCommands(const VkCommandBuffer cmdBuffer)2101 void recordCommands(const VkCommandBuffer cmdBuffer)
2102 {
2103 const DeviceInterface &vk = m_context.getDeviceInterface();
2104 const VkBufferImageCopy bufferCopyRegion =
2105 makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
2106 SynchronizationWrapperPtr synchronizationWrapper =
2107 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
2108
2109 // Destination image layout
2110 {
2111 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
2112 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
2113 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
2114 m_pipelineStage, // VkPipelineStageFlags2KHR dstStageMask
2115 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2116 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
2117 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
2118 *m_dstImage, // VkImage image
2119 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2120 );
2121 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
2122 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2123 }
2124
2125 // In write mode, source image must be filled with data.
2126 if (m_mode == ACCESS_MODE_WRITE)
2127 {
2128 // Layout for transfer
2129 {
2130 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
2131 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
2132 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
2133 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
2134 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2135 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
2136 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
2137 *m_srcImage, // VkImage image
2138 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2139 );
2140 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
2141 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2142 }
2143
2144 // Host buffer -> Src image
2145 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, *m_srcImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u,
2146 &bufferCopyRegion);
2147
2148 // Layout for shader reading
2149 {
2150 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
2151 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR srcStageMask
2152 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
2153 m_pipelineStage, // VkPipelineStageFlags2KHR dstStageMask
2154 VK_ACCESS_2_SHADER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2155 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
2156 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
2157 *m_srcImage, // VkImage image
2158 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2159 );
2160 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
2161 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2162 }
2163 }
2164
2165 // Execute shaders
2166
2167 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
2168
2169 // Post draw/dispatch commands
2170
2171 if (m_mode == ACCESS_MODE_READ)
2172 {
2173 // Layout for transfer
2174 {
2175 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
2176 m_pipelineStage, // VkPipelineStageFlags2KHR srcStageMask
2177 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
2178 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
2179 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2180 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout oldLayout
2181 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
2182 *m_dstImage, // VkImage image
2183 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2184 );
2185 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
2186 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2187 }
2188
2189 // Dst image -> Host buffer
2190 vk.cmdCopyImageToBuffer(cmdBuffer, *m_dstImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u,
2191 &bufferCopyRegion);
2192
2193 // Insert a barrier so data written by the shader is available to the host
2194 {
2195 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
2196 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR srcStageMask
2197 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
2198 VK_PIPELINE_STAGE_HOST_BIT, // VkPipelineStageFlags2KHR dstStageMask
2199 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2200 **m_hostBuffer, // VkBuffer buffer
2201 0u, // VkDeviceSize offset
2202 m_hostBufferSizeBytes // VkDeviceSize size
2203 );
2204 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, &bufferMemoryBarrier2);
2205 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2206 }
2207 }
2208 }
2209
getInSyncInfo(void) const2210 SyncInfo getInSyncInfo(void) const
2211 {
2212 VkAccessFlags2KHR accessFlags = VK_ACCESS_2_NONE_KHR;
2213
2214 if (m_mode == ACCESS_MODE_READ)
2215 {
2216 if (m_specializedAccess)
2217 accessFlags = VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR;
2218 else
2219 accessFlags = VK_ACCESS_2_SHADER_READ_BIT_KHR;
2220 }
2221
2222 const SyncInfo syncInfo = {
2223 m_pipelineStage, // VkPipelineStageFlags stageMask;
2224 accessFlags, // VkAccessFlags accessMask;
2225 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2226 };
2227 return syncInfo;
2228 }
2229
getOutSyncInfo(void) const2230 SyncInfo getOutSyncInfo(void) const
2231 {
2232 VkAccessFlags2KHR accessFlags = VK_ACCESS_2_NONE_KHR;
2233
2234 if (m_mode == ACCESS_MODE_WRITE)
2235 {
2236 if (m_specializedAccess)
2237 accessFlags = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR;
2238 else
2239 accessFlags = VK_ACCESS_2_SHADER_WRITE_BIT_KHR;
2240 }
2241
2242 const SyncInfo syncInfo = {
2243 m_pipelineStage, // VkPipelineStageFlags stageMask;
2244 accessFlags, // VkAccessFlags accessMask;
2245 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2246 };
2247 return syncInfo;
2248 }
2249
getData(void) const2250 Data getData(void) const
2251 {
2252 return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
2253 }
2254
setData(const Data & data)2255 void setData(const Data &data)
2256 {
2257 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
2258 setHostBufferData(m_context, *m_hostBuffer, data);
2259 }
2260
getShaderStage(void)2261 vk::VkShaderStageFlagBits getShaderStage(void)
2262 {
2263 return m_stage;
2264 }
2265
2266 private:
2267 OperationContext &m_context;
2268 Resource &m_resource;
2269 const VkShaderStageFlagBits m_stage;
2270 const VkPipelineStageFlags m_pipelineStage;
2271 const AccessMode m_mode;
2272 const DispatchCall m_dispatchCall;
2273 const VkDeviceSize m_hostBufferSizeBytes;
2274 de::MovePtr<Buffer> m_hostBuffer;
2275 de::MovePtr<Image> m_image; //! Additional image used as src or dst depending on operation mode.
2276 const VkImage *m_srcImage;
2277 const VkImage *m_dstImage;
2278 Move<VkImageView> m_srcImageView;
2279 Move<VkImageView> m_dstImageView;
2280 Move<VkDescriptorPool> m_descriptorPool;
2281 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
2282 Move<VkDescriptorSet> m_descriptorSet;
2283 de::MovePtr<Pipeline> m_pipeline;
2284 };
2285
2286 //! Create generic passthrough shaders with bits of custom code inserted in a specific shader stage.
initPassthroughPrograms(SourceCollections & programCollection,const std::string & shaderPrefix,const std::string & declCode,const std::string & mainCode,const VkShaderStageFlagBits stage)2287 void initPassthroughPrograms(SourceCollections &programCollection, const std::string &shaderPrefix,
2288 const std::string &declCode, const std::string &mainCode,
2289 const VkShaderStageFlagBits stage)
2290 {
2291 const VkShaderStageFlags requiredStages = getRequiredStages(stage);
2292
2293 if (requiredStages & VK_SHADER_STAGE_VERTEX_BIT)
2294 {
2295 std::ostringstream src;
2296 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2297 << "\n"
2298 << "layout(location = 0) in vec4 v_in_position;\n"
2299 << "\n"
2300 << "out " << s_perVertexBlock << ";\n"
2301 << "\n"
2302 << (stage & VK_SHADER_STAGE_VERTEX_BIT ? declCode + "\n" : "") << "void main (void)\n"
2303 << "{\n"
2304 << " gl_Position = v_in_position;\n"
2305 << (stage & VK_SHADER_STAGE_VERTEX_BIT ? mainCode : "") << "}\n";
2306
2307 if (!programCollection.glslSources.contains(shaderPrefix + "vert"))
2308 programCollection.glslSources.add(shaderPrefix + "vert") << glu::VertexSource(src.str());
2309 }
2310
2311 if (requiredStages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
2312 {
2313 std::ostringstream src;
2314 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2315 << "\n"
2316 << "layout(vertices = 3) out;\n"
2317 << "\n"
2318 << "in " << s_perVertexBlock << " gl_in[gl_MaxPatchVertices];\n"
2319 << "\n"
2320 << "out " << s_perVertexBlock << " gl_out[];\n"
2321 << "\n"
2322 << (stage & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ? declCode + "\n" : "") << "void main (void)\n"
2323 << "{\n"
2324 << " gl_TessLevelInner[0] = 1.0;\n"
2325 << " gl_TessLevelInner[1] = 1.0;\n"
2326 << "\n"
2327 << " gl_TessLevelOuter[0] = 1.0;\n"
2328 << " gl_TessLevelOuter[1] = 1.0;\n"
2329 << " gl_TessLevelOuter[2] = 1.0;\n"
2330 << " gl_TessLevelOuter[3] = 1.0;\n"
2331 << "\n"
2332 << " gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
2333 << (stage & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ? "\n" + mainCode : "") << "}\n";
2334
2335 if (!programCollection.glslSources.contains(shaderPrefix + "tesc"))
2336 programCollection.glslSources.add(shaderPrefix + "tesc") << glu::TessellationControlSource(src.str());
2337 }
2338
2339 if (requiredStages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
2340 {
2341 std::ostringstream src;
2342 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2343 << "\n"
2344 << "layout(triangles, equal_spacing, ccw) in;\n"
2345 << "\n"
2346 << "in " << s_perVertexBlock << " gl_in[gl_MaxPatchVertices];\n"
2347 << "\n"
2348 << "out " << s_perVertexBlock << ";\n"
2349 << "\n"
2350 << (stage & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT ? declCode + "\n" : "") << "void main (void)\n"
2351 << "{\n"
2352 << " vec3 px = gl_TessCoord.x * gl_in[0].gl_Position.xyz;\n"
2353 << " vec3 py = gl_TessCoord.y * gl_in[1].gl_Position.xyz;\n"
2354 << " vec3 pz = gl_TessCoord.z * gl_in[2].gl_Position.xyz;\n"
2355 << " gl_Position = vec4(px + py + pz, 1.0);\n"
2356 << (stage & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT ? mainCode : "") << "}\n";
2357
2358 if (!programCollection.glslSources.contains(shaderPrefix + "tese"))
2359 programCollection.glslSources.add(shaderPrefix + "tese") << glu::TessellationEvaluationSource(src.str());
2360 }
2361
2362 if (requiredStages & VK_SHADER_STAGE_GEOMETRY_BIT)
2363 {
2364 std::ostringstream src;
2365 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2366 << "\n"
2367 << "layout(triangles) in;\n"
2368 << "layout(triangle_strip, max_vertices = 3) out;\n"
2369 << "\n"
2370 << "in " << s_perVertexBlock << " gl_in[];\n"
2371 << "\n"
2372 << "out " << s_perVertexBlock << ";\n"
2373 << "\n"
2374 << (stage & VK_SHADER_STAGE_GEOMETRY_BIT ? declCode + "\n" : "") << "void main (void)\n"
2375 << "{\n"
2376 << " gl_Position = gl_in[0].gl_Position;\n"
2377 << " EmitVertex();\n"
2378 << "\n"
2379 << " gl_Position = gl_in[1].gl_Position;\n"
2380 << " EmitVertex();\n"
2381 << "\n"
2382 << " gl_Position = gl_in[2].gl_Position;\n"
2383 << " EmitVertex();\n"
2384 << (stage & VK_SHADER_STAGE_GEOMETRY_BIT ? "\n" + mainCode : "") << "}\n";
2385
2386 if (!programCollection.glslSources.contains(shaderPrefix + "geom"))
2387 programCollection.glslSources.add(shaderPrefix + "geom") << glu::GeometrySource(src.str());
2388 }
2389
2390 if (requiredStages & VK_SHADER_STAGE_FRAGMENT_BIT)
2391 {
2392 std::ostringstream src;
2393 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2394 << "\n"
2395 << "layout(location = 0) out vec4 o_color;\n"
2396 << "\n"
2397 << (stage & VK_SHADER_STAGE_FRAGMENT_BIT ? declCode + "\n" : "") << "void main (void)\n"
2398 << "{\n"
2399 << " o_color = vec4(1.0);\n"
2400 << (stage & VK_SHADER_STAGE_FRAGMENT_BIT ? "\n" + mainCode : "") << "}\n";
2401
2402 if (!programCollection.glslSources.contains(shaderPrefix + "frag"))
2403 programCollection.glslSources.add(shaderPrefix + "frag") << glu::FragmentSource(src.str());
2404 }
2405
2406 if (requiredStages & VK_SHADER_STAGE_COMPUTE_BIT)
2407 {
2408 std::ostringstream src;
2409 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2410 << "\n"
2411 << "layout(local_size_x = 1) in;\n"
2412 << "\n"
2413 << (stage & VK_SHADER_STAGE_COMPUTE_BIT ? declCode + "\n" : "") << "void main (void)\n"
2414 << "{\n"
2415 << (stage & VK_SHADER_STAGE_COMPUTE_BIT ? mainCode : "") << "}\n";
2416
2417 if (!programCollection.glslSources.contains(shaderPrefix + "comp"))
2418 programCollection.glslSources.add(shaderPrefix + "comp") << glu::ComputeSource(src.str());
2419 }
2420 }
2421
2422 class BufferSupport : public OperationSupport
2423 {
2424 public:
BufferSupport(const ResourceDescription & resourceDesc,const BufferType bufferType,const AccessMode mode,const bool specializedAccess,const VkShaderStageFlagBits stage,const DispatchCall dispatchCall=DISPATCH_CALL_DISPATCH)2425 BufferSupport(const ResourceDescription &resourceDesc, const BufferType bufferType, const AccessMode mode,
2426 const bool specializedAccess, const VkShaderStageFlagBits stage,
2427 const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2428 : OperationSupport(specializedAccess)
2429 , m_resourceDesc(resourceDesc)
2430 , m_bufferType(bufferType)
2431 , m_mode(mode)
2432 , m_stage(stage)
2433 , m_shaderPrefix(std::string(m_mode == ACCESS_MODE_READ ? "read_" : "write_") +
2434 (m_bufferType == BUFFER_TYPE_UNIFORM ?
2435 "ubo_" :
2436 (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL ? "ubo_texel_" : "ssbo_")) +
2437 getResourceName(m_resourceDesc) + "_")
2438 , m_dispatchCall(dispatchCall)
2439 {
2440 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER);
2441 DE_ASSERT(m_bufferType == BUFFER_TYPE_UNIFORM || m_bufferType == BUFFER_TYPE_STORAGE ||
2442 m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL);
2443 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
2444 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_bufferType == BUFFER_TYPE_STORAGE);
2445 DE_ASSERT(m_bufferType != BUFFER_TYPE_UNIFORM || m_resourceDesc.size.x() <= MAX_UBO_RANGE);
2446 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2447
2448 assertValidShaderStage(m_stage);
2449 }
2450
initPrograms(SourceCollections & programCollection) const2451 void initPrograms(SourceCollections &programCollection) const
2452 {
2453 DE_ASSERT((m_resourceDesc.size.x() % sizeof(tcu::UVec4)) == 0);
2454
2455 std::string bufferTypeStr = "";
2456 if (m_bufferType == BUFFER_TYPE_UNIFORM)
2457 bufferTypeStr = "uniform";
2458 else
2459 {
2460 if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
2461 bufferTypeStr = "uniform utextureBuffer";
2462 else
2463 bufferTypeStr = "buffer";
2464 }
2465
2466 const int numVecElements = static_cast<int>(m_resourceDesc.size.x() /
2467 sizeof(tcu::UVec4)); // std140 must be aligned to a multiple of 16
2468 std::ostringstream declSrc;
2469 std::ostringstream copySrc;
2470 std::string outputBuff = "layout(set = 0, binding = 1, std140) writeonly buffer Output {\n"
2471 " uvec4 data[" +
2472 std::to_string(numVecElements) +
2473 "];\n"
2474 "} b_out;\n";
2475 if ((m_bufferType == BUFFER_TYPE_UNIFORM) || (m_bufferType == BUFFER_TYPE_STORAGE))
2476 {
2477 declSrc << "layout(set = 0, binding = 0, std140) readonly " << bufferTypeStr << " Input {\n"
2478 << " uvec4 data[" << numVecElements << "];\n"
2479 << "} b_in;\n"
2480 << "\n"
2481 << outputBuff;
2482
2483 copySrc << " for (int i = 0; i < " << numVecElements << "; ++i) {\n"
2484 << " b_out.data[i] = b_in.data[i];\n"
2485 << " }\n";
2486 }
2487 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
2488 {
2489 declSrc << "layout(set = 0, binding = 0) " << bufferTypeStr << " Input;\n"
2490 << "\n"
2491 << outputBuff;
2492
2493 copySrc << " for (int i = 0; i < " << numVecElements << "; ++i) {\n"
2494 << " b_out.data[i] = texelFetch(Input, i);\n"
2495 << " }\n";
2496 }
2497
2498 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), copySrc.str(), m_stage);
2499 }
2500
getInResourceUsageFlags(void) const2501 uint32_t getInResourceUsageFlags(void) const
2502 {
2503 if (m_bufferType == BUFFER_TYPE_UNIFORM)
2504 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : 0;
2505 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
2506 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT : 0;
2507 else
2508 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_STORAGE_BUFFER_BIT : 0;
2509 }
2510
getOutResourceUsageFlags(void) const2511 uint32_t getOutResourceUsageFlags(void) const
2512 {
2513 if (m_bufferType == BUFFER_TYPE_UNIFORM)
2514 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : 0;
2515 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
2516 return m_mode == ACCESS_MODE_WRITE ? 0 : VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
2517 else
2518 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_STORAGE_BUFFER_BIT : 0;
2519 }
2520
getQueueFlags(const OperationContext & context) const2521 VkQueueFlags getQueueFlags(const OperationContext &context) const
2522 {
2523 DE_UNREF(context);
2524 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2525 }
2526
build(OperationContext & context,Resource & resource) const2527 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
2528 {
2529 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2530 return de::MovePtr<Operation>(new BufferImplementation(context, resource, m_stage, m_bufferType,
2531 m_shaderPrefix, m_mode, m_specializedAccess,
2532 PIPELINE_TYPE_COMPUTE, m_dispatchCall));
2533 else
2534 return de::MovePtr<Operation>(new BufferImplementation(context, resource, m_stage, m_bufferType,
2535 m_shaderPrefix, m_mode, m_specializedAccess,
2536 PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
2537 }
2538
build(OperationContext &,Resource &,Resource &) const2539 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
2540 {
2541 DE_ASSERT(0);
2542 return de::MovePtr<Operation>();
2543 }
2544
getShaderStage(void)2545 vk::VkShaderStageFlagBits getShaderStage(void)
2546 {
2547 return m_stage;
2548 }
2549
2550 private:
2551 const ResourceDescription m_resourceDesc;
2552 const BufferType m_bufferType;
2553 const AccessMode m_mode;
2554 const VkShaderStageFlagBits m_stage;
2555 const std::string m_shaderPrefix;
2556 const DispatchCall m_dispatchCall;
2557 };
2558
2559 class ImageSupport : public OperationSupport
2560 {
2561 public:
ImageSupport(const ResourceDescription & resourceDesc,const AccessMode mode,const bool specializedAccess,const VkShaderStageFlagBits stage,const DispatchCall dispatchCall=DISPATCH_CALL_DISPATCH)2562 ImageSupport(const ResourceDescription &resourceDesc, const AccessMode mode, const bool specializedAccess,
2563 const VkShaderStageFlagBits stage, const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2564 : OperationSupport(specializedAccess)
2565 , m_resourceDesc(resourceDesc)
2566 , m_mode(mode)
2567 , m_stage(stage)
2568 , m_shaderPrefix((m_mode == ACCESS_MODE_READ ? "read_image_" : "write_image_") +
2569 getResourceName(m_resourceDesc) + "_")
2570 , m_dispatchCall(dispatchCall)
2571 {
2572 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
2573 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
2574 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2575
2576 assertValidShaderStage(m_stage);
2577 }
2578
initPrograms(SourceCollections & programCollection) const2579 void initPrograms(SourceCollections &programCollection) const
2580 {
2581 const std::string imageFormat = getShaderImageFormatQualifier(m_resourceDesc.imageFormat);
2582 const std::string imageType = getShaderImageType(m_resourceDesc.imageFormat, m_resourceDesc.imageType);
2583
2584 std::ostringstream declSrc;
2585 declSrc << "layout(set = 0, binding = 0, " << imageFormat << ") readonly uniform " << imageType << " srcImg;\n"
2586 << "layout(set = 0, binding = 1, " << imageFormat << ") writeonly uniform " << imageType
2587 << " dstImg;\n";
2588
2589 std::ostringstream mainSrc;
2590 if (m_resourceDesc.imageType == VK_IMAGE_TYPE_1D)
2591 mainSrc << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2592 << " imageStore(dstImg, x, imageLoad(srcImg, x));\n";
2593 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_2D)
2594 mainSrc << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
2595 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2596 << " imageStore(dstImg, ivec2(x, y), imageLoad(srcImg, ivec2(x, y)));\n";
2597 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_3D)
2598 mainSrc << " for (int z = 0; z < " << m_resourceDesc.size.z() << "; ++z)\n"
2599 << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
2600 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2601 << " imageStore(dstImg, ivec3(x, y, z), imageLoad(srcImg, ivec3(x, y, z)));\n";
2602 else
2603 DE_ASSERT(0);
2604
2605 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), mainSrc.str(), m_stage);
2606 }
2607
getInResourceUsageFlags(void) const2608 uint32_t getInResourceUsageFlags(void) const
2609 {
2610 return VK_IMAGE_USAGE_STORAGE_BIT;
2611 }
2612
getOutResourceUsageFlags(void) const2613 uint32_t getOutResourceUsageFlags(void) const
2614 {
2615 return VK_IMAGE_USAGE_STORAGE_BIT;
2616 }
2617
getQueueFlags(const OperationContext & context) const2618 VkQueueFlags getQueueFlags(const OperationContext &context) const
2619 {
2620 DE_UNREF(context);
2621 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2622 }
2623
build(OperationContext & context,Resource & resource) const2624 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
2625 {
2626 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2627 return de::MovePtr<Operation>(new ImageImplementation(context, resource, m_stage, m_shaderPrefix, m_mode,
2628 m_specializedAccess, PIPELINE_TYPE_COMPUTE,
2629 m_dispatchCall));
2630 else
2631 return de::MovePtr<Operation>(new ImageImplementation(context, resource, m_stage, m_shaderPrefix, m_mode,
2632 m_specializedAccess, PIPELINE_TYPE_GRAPHICS,
2633 m_dispatchCall));
2634 }
2635
build(OperationContext &,Resource &,Resource &) const2636 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
2637 {
2638 DE_ASSERT(0);
2639 return de::MovePtr<Operation>();
2640 }
2641
getShaderStage(void)2642 vk::VkShaderStageFlagBits getShaderStage(void)
2643 {
2644 return m_stage;
2645 }
2646
2647 private:
2648 const ResourceDescription m_resourceDesc;
2649 const AccessMode m_mode;
2650 const VkShaderStageFlagBits m_stage;
2651 const std::string m_shaderPrefix;
2652 const DispatchCall m_dispatchCall;
2653 };
2654
2655 //! Copy operation on a UBO/SSBO in graphics/compute pipeline.
2656 class BufferCopyImplementation : public Operation
2657 {
2658 public:
BufferCopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource,const VkShaderStageFlagBits stage,const BufferType bufferType,const std::string & shaderPrefix,const bool specializedAccess,const PipelineType pipelineType,const DispatchCall dispatchCall)2659 BufferCopyImplementation(OperationContext &context, Resource &inResource, Resource &outResource,
2660 const VkShaderStageFlagBits stage, const BufferType bufferType,
2661 const std::string &shaderPrefix, const bool specializedAccess,
2662 const PipelineType pipelineType, const DispatchCall dispatchCall)
2663 : Operation(specializedAccess)
2664 , m_context(context)
2665 , m_inResource(inResource)
2666 , m_outResource(outResource)
2667 , m_stage(stage)
2668 , m_pipelineStage(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
2669 , m_bufferType(bufferType)
2670 , m_dispatchCall(dispatchCall)
2671 {
2672 requireFeaturesForSSBOAccess(m_context, m_stage);
2673
2674 const DeviceInterface &vk = m_context.getDeviceInterface();
2675 const VkDevice device = m_context.getDevice();
2676
2677 // Prepare descriptors
2678 {
2679 const VkDescriptorType bufferDescriptorType =
2680 (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER :
2681 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
2682
2683 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2684 .addSingleBinding(bufferDescriptorType, m_stage)
2685 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
2686 .build(vk, device);
2687
2688 m_descriptorPool = DescriptorPoolBuilder()
2689 .addType(bufferDescriptorType)
2690 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
2691 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2692
2693 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
2694
2695 const VkDescriptorBufferInfo inBufferInfo = makeDescriptorBufferInfo(
2696 m_inResource.getBuffer().handle, m_inResource.getBuffer().offset, m_inResource.getBuffer().size);
2697 const VkDescriptorBufferInfo outBufferInfo = makeDescriptorBufferInfo(
2698 m_outResource.getBuffer().handle, m_outResource.getBuffer().offset, m_outResource.getBuffer().size);
2699
2700 DescriptorSetUpdateBuilder()
2701 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
2702 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &inBufferInfo)
2703 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
2704 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outBufferInfo)
2705 .update(vk, device);
2706 }
2707
2708 // Create pipeline
2709 m_pipeline =
2710 (pipelineType == PIPELINE_TYPE_GRAPHICS ?
2711 de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout)) :
2712 de::MovePtr<Pipeline>(
2713 new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
2714 }
2715
recordCommands(const VkCommandBuffer cmdBuffer)2716 void recordCommands(const VkCommandBuffer cmdBuffer)
2717 {
2718 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
2719 }
2720
getInSyncInfo(void) const2721 SyncInfo getInSyncInfo(void) const
2722 {
2723 VkAccessFlags2KHR accessFlags =
2724 (m_specializedAccess ? VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR : VK_ACCESS_2_SHADER_READ_BIT_KHR);
2725 const SyncInfo syncInfo = {
2726 m_pipelineStage, // VkPipelineStageFlags stageMask;
2727 accessFlags, // VkAccessFlags accessMask;
2728 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
2729 };
2730 return syncInfo;
2731 }
2732
getOutSyncInfo(void) const2733 SyncInfo getOutSyncInfo(void) const
2734 {
2735 VkAccessFlags2KHR accessFlags =
2736 (m_specializedAccess ? VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR : VK_ACCESS_2_SHADER_WRITE_BIT_KHR);
2737 const SyncInfo syncInfo = {
2738 m_pipelineStage, // VkPipelineStageFlags stageMask;
2739 accessFlags, // VkAccessFlags accessMask;
2740 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
2741 };
2742 return syncInfo;
2743 }
2744
getData(void) const2745 Data getData(void) const
2746 {
2747 Data data = {0, nullptr};
2748 return data;
2749 }
2750
setData(const Data &)2751 void setData(const Data &)
2752 {
2753 DE_ASSERT(0);
2754 }
2755
getShaderStage(void)2756 vk::VkShaderStageFlagBits getShaderStage(void)
2757 {
2758 return m_stage;
2759 }
2760
2761 private:
2762 OperationContext &m_context;
2763 Resource &m_inResource;
2764 Resource &m_outResource;
2765 const VkShaderStageFlagBits m_stage;
2766 const VkPipelineStageFlags m_pipelineStage;
2767 const BufferType m_bufferType;
2768 const DispatchCall m_dispatchCall;
2769 Move<VkDescriptorPool> m_descriptorPool;
2770 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
2771 Move<VkDescriptorSet> m_descriptorSet;
2772 de::MovePtr<Pipeline> m_pipeline;
2773 };
2774
2775 class CopyBufferSupport : public OperationSupport
2776 {
2777 public:
CopyBufferSupport(const ResourceDescription & resourceDesc,const BufferType bufferType,const bool specializedAccess,const VkShaderStageFlagBits stage,const DispatchCall dispatchCall=DISPATCH_CALL_DISPATCH)2778 CopyBufferSupport(const ResourceDescription &resourceDesc, const BufferType bufferType,
2779 const bool specializedAccess, const VkShaderStageFlagBits stage,
2780 const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2781 : OperationSupport(specializedAccess)
2782 , m_resourceDesc(resourceDesc)
2783 , m_bufferType(bufferType)
2784 , m_stage(stage)
2785 , m_shaderPrefix(std::string("copy_") + getShaderStageName(stage) +
2786 (m_bufferType == BUFFER_TYPE_UNIFORM ? "_ubo_" : "_ssbo_") + getResourceName(m_resourceDesc) +
2787 "_")
2788 , m_dispatchCall(dispatchCall)
2789 {
2790 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER);
2791 DE_ASSERT(m_bufferType == BUFFER_TYPE_UNIFORM || m_bufferType == BUFFER_TYPE_STORAGE);
2792 DE_ASSERT(m_bufferType != BUFFER_TYPE_UNIFORM || m_resourceDesc.size.x() <= MAX_UBO_RANGE);
2793 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2794
2795 assertValidShaderStage(m_stage);
2796 }
2797
initPrograms(SourceCollections & programCollection) const2798 void initPrograms(SourceCollections &programCollection) const
2799 {
2800 DE_ASSERT((m_resourceDesc.size.x() % sizeof(tcu::UVec4)) == 0);
2801
2802 const std::string bufferTypeStr = (m_bufferType == BUFFER_TYPE_UNIFORM ? "uniform" : "buffer");
2803 const int numVecElements = static_cast<int>(m_resourceDesc.size.x() /
2804 sizeof(tcu::UVec4)); // std140 must be aligned to a multiple of 16
2805
2806 std::ostringstream declSrc;
2807 declSrc << "layout(set = 0, binding = 0, std140) readonly " << bufferTypeStr << " Input {\n"
2808 << " uvec4 data[" << numVecElements << "];\n"
2809 << "} b_in;\n"
2810 << "\n"
2811 << "layout(set = 0, binding = 1, std140) writeonly buffer Output {\n"
2812 << " uvec4 data[" << numVecElements << "];\n"
2813 << "} b_out;\n";
2814
2815 std::ostringstream copySrc;
2816 copySrc << " for (int i = 0; i < " << numVecElements << "; ++i) {\n"
2817 << " b_out.data[i] = b_in.data[i];\n"
2818 << " }\n";
2819
2820 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), copySrc.str(), m_stage);
2821 }
2822
getInResourceUsageFlags(void) const2823 uint32_t getInResourceUsageFlags(void) const
2824 {
2825 return (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT :
2826 VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
2827 }
2828
getOutResourceUsageFlags(void) const2829 uint32_t getOutResourceUsageFlags(void) const
2830 {
2831 return VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
2832 }
2833
getQueueFlags(const OperationContext & context) const2834 VkQueueFlags getQueueFlags(const OperationContext &context) const
2835 {
2836 DE_UNREF(context);
2837 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2838 }
2839
build(OperationContext &,Resource &) const2840 de::MovePtr<Operation> build(OperationContext &, Resource &) const
2841 {
2842 DE_ASSERT(0);
2843 return de::MovePtr<Operation>();
2844 }
2845
build(OperationContext & context,Resource & inResource,Resource & outResource) const2846 de::MovePtr<Operation> build(OperationContext &context, Resource &inResource, Resource &outResource) const
2847 {
2848 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2849 return de::MovePtr<Operation>(
2850 new BufferCopyImplementation(context, inResource, outResource, m_stage, m_bufferType, m_shaderPrefix,
2851 m_specializedAccess, PIPELINE_TYPE_COMPUTE, m_dispatchCall));
2852 else
2853 return de::MovePtr<Operation>(
2854 new BufferCopyImplementation(context, inResource, outResource, m_stage, m_bufferType, m_shaderPrefix,
2855 m_specializedAccess, PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
2856 }
2857
getShaderStage(void)2858 vk::VkShaderStageFlagBits getShaderStage(void)
2859 {
2860 return m_stage;
2861 }
2862
2863 private:
2864 const ResourceDescription m_resourceDesc;
2865 const BufferType m_bufferType;
2866 const VkShaderStageFlagBits m_stage;
2867 const std::string m_shaderPrefix;
2868 const DispatchCall m_dispatchCall;
2869 };
2870
2871 class CopyImageImplementation : public Operation
2872 {
2873 public:
CopyImageImplementation(OperationContext & context,Resource & inResource,Resource & outResource,const VkShaderStageFlagBits stage,const std::string & shaderPrefix,const bool specializedAccess,const PipelineType pipelineType,const DispatchCall dispatchCall)2874 CopyImageImplementation(OperationContext &context, Resource &inResource, Resource &outResource,
2875 const VkShaderStageFlagBits stage, const std::string &shaderPrefix,
2876 const bool specializedAccess, const PipelineType pipelineType,
2877 const DispatchCall dispatchCall)
2878 : Operation(specializedAccess)
2879 , m_context(context)
2880 , m_inResource(inResource)
2881 , m_outResource(outResource)
2882 , m_stage(stage)
2883 , m_pipelineStage(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
2884 , m_dispatchCall(dispatchCall)
2885 {
2886 const DeviceInterface &vk = m_context.getDeviceInterface();
2887 const InstanceInterface &vki = m_context.getInstanceInterface();
2888 const VkDevice device = m_context.getDevice();
2889 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
2890
2891 // Image stores are always required, in either access mode.
2892 requireFeaturesForSSBOAccess(m_context, m_stage);
2893
2894 // Some storage image formats may not be supported
2895 const auto &imgResource = m_inResource.getImage();
2896 requireStorageImageSupport(vki, physDevice, imgResource.format, imgResource.tiling);
2897
2898 // Image resources
2899 {
2900 const VkImageViewType viewType = getImageViewType(m_inResource.getImage().imageType);
2901
2902 m_srcImageView = makeImageView(vk, device, m_inResource.getImage().handle, viewType,
2903 m_inResource.getImage().format, m_inResource.getImage().subresourceRange);
2904 m_dstImageView = makeImageView(vk, device, m_outResource.getImage().handle, viewType,
2905 m_outResource.getImage().format, m_outResource.getImage().subresourceRange);
2906 }
2907
2908 // Prepare descriptors
2909 {
2910 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2911 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
2912 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
2913 .build(vk, device);
2914
2915 m_descriptorPool = DescriptorPoolBuilder()
2916 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2917 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2918 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2919
2920 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
2921
2922 const VkDescriptorImageInfo srcImageInfo =
2923 makeDescriptorImageInfo(VK_NULL_HANDLE, *m_srcImageView, VK_IMAGE_LAYOUT_GENERAL);
2924 const VkDescriptorImageInfo dstImageInfo =
2925 makeDescriptorImageInfo(VK_NULL_HANDLE, *m_dstImageView, VK_IMAGE_LAYOUT_GENERAL);
2926
2927 DescriptorSetUpdateBuilder()
2928 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
2929 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &srcImageInfo)
2930 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
2931 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &dstImageInfo)
2932 .update(vk, device);
2933 }
2934
2935 // Create pipeline
2936 m_pipeline =
2937 (pipelineType == PIPELINE_TYPE_GRAPHICS ?
2938 de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout)) :
2939 de::MovePtr<Pipeline>(
2940 new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
2941 }
2942
recordCommands(const VkCommandBuffer cmdBuffer)2943 void recordCommands(const VkCommandBuffer cmdBuffer)
2944 {
2945 {
2946 const DeviceInterface &vk = m_context.getDeviceInterface();
2947 SynchronizationWrapperPtr synchronizationWrapper =
2948 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
2949
2950 const VkImageMemoryBarrier2KHR imageMemoryBarriers2 = makeImageMemoryBarrier2(
2951 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
2952 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
2953 m_pipelineStage, // VkPipelineStageFlags2KHR dstStageMask
2954 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2955 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
2956 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
2957 m_outResource.getImage().handle, // VkImage image
2958 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2959 );
2960 VkDependencyInfoKHR dependencyInfo{
2961 VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR, // VkStructureType sType
2962 nullptr, // const void* pNext
2963 VK_DEPENDENCY_BY_REGION_BIT, // VkDependencyFlags dependencyFlags
2964 0u, // uint32_t memoryBarrierCount
2965 nullptr, // const VkMemoryBarrier2KHR* pMemoryBarriers
2966 0u, // uint32_t bufferMemoryBarrierCount
2967 nullptr, // const VkBufferMemoryBarrier2KHR* pBufferMemoryBarriers
2968 1, // uint32_t imageMemoryBarrierCount
2969 &imageMemoryBarriers2 // const VkImageMemoryBarrier2KHR* pImageMemoryBarriers
2970 };
2971 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2972 }
2973
2974 // Execute shaders
2975 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
2976 }
2977
getInSyncInfo(void) const2978 SyncInfo getInSyncInfo(void) const
2979 {
2980 VkAccessFlags2KHR accessFlags =
2981 (m_specializedAccess ? VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR : VK_ACCESS_2_SHADER_READ_BIT_KHR);
2982 const SyncInfo syncInfo = {
2983 m_pipelineStage, // VkPipelineStageFlags stageMask;
2984 accessFlags, // VkAccessFlags accessMask;
2985 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2986 };
2987 return syncInfo;
2988 }
2989
getOutSyncInfo(void) const2990 SyncInfo getOutSyncInfo(void) const
2991 {
2992 VkAccessFlags2KHR accessFlags =
2993 (m_specializedAccess ? VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR : VK_ACCESS_2_SHADER_WRITE_BIT_KHR);
2994 const SyncInfo syncInfo = {
2995 m_pipelineStage, // VkPipelineStageFlags stageMask;
2996 accessFlags, // VkAccessFlags accessMask;
2997 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2998 };
2999 return syncInfo;
3000 }
3001
getData(void) const3002 Data getData(void) const
3003 {
3004 Data data = {0, nullptr};
3005 return data;
3006 }
3007
setData(const Data &)3008 void setData(const Data &)
3009 {
3010 DE_ASSERT(0);
3011 }
3012
getShaderStage(void)3013 vk::VkShaderStageFlagBits getShaderStage(void)
3014 {
3015 return m_stage;
3016 }
3017
3018 private:
3019 OperationContext &m_context;
3020 Resource &m_inResource;
3021 Resource &m_outResource;
3022 const VkShaderStageFlagBits m_stage;
3023 const VkPipelineStageFlags m_pipelineStage;
3024 const DispatchCall m_dispatchCall;
3025 Move<VkImageView> m_srcImageView;
3026 Move<VkImageView> m_dstImageView;
3027 Move<VkDescriptorPool> m_descriptorPool;
3028 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
3029 Move<VkDescriptorSet> m_descriptorSet;
3030 de::MovePtr<Pipeline> m_pipeline;
3031 };
3032
3033 class CopyImageSupport : public OperationSupport
3034 {
3035 public:
CopyImageSupport(const ResourceDescription & resourceDesc,const VkShaderStageFlagBits stage,const bool specializedAccess,const DispatchCall dispatchCall=DISPATCH_CALL_DISPATCH)3036 CopyImageSupport(const ResourceDescription &resourceDesc, const VkShaderStageFlagBits stage,
3037 const bool specializedAccess, const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
3038 : OperationSupport(specializedAccess)
3039 , m_resourceDesc(resourceDesc)
3040 , m_stage(stage)
3041 , m_shaderPrefix(std::string("copy_image_") + getShaderStageName(stage) + "_" +
3042 getResourceName(m_resourceDesc) + "_")
3043 , m_dispatchCall(dispatchCall)
3044 {
3045 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
3046 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
3047
3048 assertValidShaderStage(m_stage);
3049 }
3050
initPrograms(SourceCollections & programCollection) const3051 void initPrograms(SourceCollections &programCollection) const
3052 {
3053 const std::string imageFormat = getShaderImageFormatQualifier(m_resourceDesc.imageFormat);
3054 const std::string imageType = getShaderImageType(m_resourceDesc.imageFormat, m_resourceDesc.imageType);
3055
3056 std::ostringstream declSrc;
3057 declSrc << "layout(set = 0, binding = 0, " << imageFormat << ") readonly uniform " << imageType << " srcImg;\n"
3058 << "layout(set = 0, binding = 1, " << imageFormat << ") writeonly uniform " << imageType
3059 << " dstImg;\n";
3060
3061 std::ostringstream mainSrc;
3062 if (m_resourceDesc.imageType == VK_IMAGE_TYPE_1D)
3063 mainSrc << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
3064 << " imageStore(dstImg, x, imageLoad(srcImg, x));\n";
3065 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_2D)
3066 mainSrc << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
3067 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
3068 << " imageStore(dstImg, ivec2(x, y), imageLoad(srcImg, ivec2(x, y)));\n";
3069 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_3D)
3070 mainSrc << " for (int z = 0; z < " << m_resourceDesc.size.z() << "; ++z)\n"
3071 << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
3072 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
3073 << " imageStore(dstImg, ivec3(x, y, z), imageLoad(srcImg, ivec3(x, y, z)));\n";
3074 else
3075 DE_ASSERT(0);
3076
3077 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), mainSrc.str(), m_stage);
3078 }
3079
getInResourceUsageFlags(void) const3080 uint32_t getInResourceUsageFlags(void) const
3081 {
3082 return VK_IMAGE_USAGE_STORAGE_BIT;
3083 }
3084
getOutResourceUsageFlags(void) const3085 uint32_t getOutResourceUsageFlags(void) const
3086 {
3087 return VK_IMAGE_USAGE_STORAGE_BIT;
3088 }
3089
getQueueFlags(const OperationContext & context) const3090 VkQueueFlags getQueueFlags(const OperationContext &context) const
3091 {
3092 DE_UNREF(context);
3093 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
3094 }
3095
build(OperationContext &,Resource &) const3096 de::MovePtr<Operation> build(OperationContext &, Resource &) const
3097 {
3098 DE_ASSERT(0);
3099 return de::MovePtr<Operation>();
3100 }
3101
build(OperationContext & context,Resource & inResource,Resource & outResource) const3102 de::MovePtr<Operation> build(OperationContext &context, Resource &inResource, Resource &outResource) const
3103 {
3104 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
3105 return de::MovePtr<Operation>(new CopyImageImplementation(context, inResource, outResource, m_stage,
3106 m_shaderPrefix, m_specializedAccess,
3107 PIPELINE_TYPE_COMPUTE, m_dispatchCall));
3108 else
3109 return de::MovePtr<Operation>(new CopyImageImplementation(context, inResource, outResource, m_stage,
3110 m_shaderPrefix, m_specializedAccess,
3111 PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
3112 }
3113
getShaderStage(void)3114 vk::VkShaderStageFlagBits getShaderStage(void)
3115 {
3116 return m_stage;
3117 }
3118
3119 private:
3120 const ResourceDescription m_resourceDesc;
3121 const VkShaderStageFlagBits m_stage;
3122 const std::string m_shaderPrefix;
3123 const DispatchCall m_dispatchCall;
3124 };
3125
3126 class MSImageImplementation : public Operation
3127 {
3128 public:
MSImageImplementation(OperationContext & context,Resource & resource)3129 MSImageImplementation(OperationContext &context, Resource &resource)
3130 : m_context(context)
3131 , m_resource(resource)
3132 , m_hostBufferSizeBytes(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
3133 {
3134 const DeviceInterface &vk = m_context.getDeviceInterface();
3135 const InstanceInterface &vki = m_context.getInstanceInterface();
3136 const VkDevice device = m_context.getDevice();
3137 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
3138 const VkPhysicalDeviceFeatures features = getPhysicalDeviceFeatures(vki, physDevice);
3139 Allocator &allocator = m_context.getAllocator();
3140
3141 const auto &imgResource = m_resource.getImage();
3142 requireStorageImageSupport(vki, physDevice, imgResource.format, imgResource.tiling);
3143 if (!features.shaderStorageImageMultisample)
3144 TCU_THROW(NotSupportedError, "Using multisample images as storage is not supported");
3145
3146 VkBufferCreateInfo bufferCreateInfo = makeBufferCreateInfo(
3147 m_hostBufferSizeBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
3148 m_hostBuffer =
3149 de::MovePtr<Buffer>(new Buffer(vk, device, allocator, bufferCreateInfo, MemoryRequirement::HostVisible));
3150 const Allocation &alloc = m_hostBuffer->getAllocation();
3151 fillPattern(alloc.getHostPtr(), m_hostBufferSizeBytes);
3152 flushAlloc(vk, device, alloc);
3153
3154 const ImageResource &image = m_resource.getImage();
3155 const VkImageViewType viewType = getImageViewType(image.imageType);
3156 m_imageView = makeImageView(vk, device, image.handle, viewType, image.format, image.subresourceRange);
3157
3158 // Prepare descriptors
3159 {
3160 m_descriptorSetLayout =
3161 DescriptorSetLayoutBuilder()
3162 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
3163 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
3164 .build(vk, device);
3165
3166 m_descriptorPool = DescriptorPoolBuilder()
3167 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
3168 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
3169 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
3170
3171 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
3172
3173 const VkDescriptorBufferInfo bufferInfo =
3174 makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_hostBufferSizeBytes);
3175 const VkDescriptorImageInfo imageInfo =
3176 makeDescriptorImageInfo(VK_NULL_HANDLE, *m_imageView, VK_IMAGE_LAYOUT_GENERAL);
3177
3178 DescriptorSetUpdateBuilder()
3179 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
3180 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferInfo)
3181 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
3182 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &imageInfo)
3183 .update(vk, device);
3184 }
3185
3186 // Create pipeline
3187 const Unique<VkShaderModule> shaderModule(
3188 createShaderModule(vk, device, context.getBinaryCollection().get("comp"), (VkShaderModuleCreateFlags)0));
3189 m_pipelineLayout = makePipelineLayout(vk, device, *m_descriptorSetLayout);
3190 m_pipeline = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, nullptr,
3191 context.getPipelineCacheData(), context.getResourceInterface());
3192 }
3193
recordCommands(const VkCommandBuffer cmdBuffer)3194 void recordCommands(const VkCommandBuffer cmdBuffer)
3195 {
3196 const DeviceInterface &vk = m_context.getDeviceInterface();
3197 SynchronizationWrapperPtr synchronizationWrapper =
3198 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3199
3200 // change image layout
3201 {
3202 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3203 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3204 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3205 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3206 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3207 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3208 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
3209 m_resource.getImage().handle, // VkImage image
3210 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3211 );
3212 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
3213 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3214 }
3215
3216 // execute shader
3217 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
3218 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u,
3219 &*m_descriptorSet, 0u, nullptr);
3220 vk.cmdDispatch(cmdBuffer, m_resource.getImage().extent.width, m_resource.getImage().extent.height, 1u);
3221 }
3222
getInSyncInfo(void) const3223 SyncInfo getInSyncInfo(void) const
3224 {
3225 DE_ASSERT(false);
3226 return emptySyncInfo;
3227 }
3228
getOutSyncInfo(void) const3229 SyncInfo getOutSyncInfo(void) const
3230 {
3231 return {
3232 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR, // VkPipelineStageFlags stageMask;
3233 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3234 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
3235 };
3236 }
3237
getData(void) const3238 Data getData(void) const
3239 {
3240 return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
3241 }
3242
setData(const Data &)3243 void setData(const Data &)
3244 {
3245 DE_ASSERT(false);
3246 }
3247
3248 private:
3249 OperationContext &m_context;
3250 Resource &m_resource;
3251 Move<VkImageView> m_imageView;
3252
3253 const VkDeviceSize m_hostBufferSizeBytes;
3254 de::MovePtr<Buffer> m_hostBuffer;
3255
3256 Move<VkDescriptorPool> m_descriptorPool;
3257 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
3258 Move<VkDescriptorSet> m_descriptorSet;
3259 Move<VkPipelineLayout> m_pipelineLayout;
3260 Move<VkPipeline> m_pipeline;
3261 };
3262
3263 class MSImageSupport : public OperationSupport
3264 {
3265 public:
MSImageSupport(const ResourceDescription & resourceDesc)3266 MSImageSupport(const ResourceDescription &resourceDesc) : m_resourceDesc(resourceDesc)
3267 {
3268 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
3269 }
3270
initPrograms(SourceCollections & programCollection) const3271 void initPrograms(SourceCollections &programCollection) const
3272 {
3273 std::stringstream source;
3274 source << "#version 440\n"
3275 "\n"
3276 "layout(local_size_x = 1) in;\n"
3277 "layout(set = 0, binding = 0, std430) readonly buffer Input {\n"
3278 " uint data[];\n"
3279 "} inData;\n"
3280 "layout(set = 0, binding = 1, r32ui) writeonly uniform uimage2DMS msImage;\n"
3281 "\n"
3282 "void main (void)\n"
3283 "{\n"
3284 " int gx = int(gl_GlobalInvocationID.x);\n"
3285 " int gy = int(gl_GlobalInvocationID.y);\n"
3286 " uint value = inData.data[gy * "
3287 << m_resourceDesc.size.x()
3288 << " + gx];\n"
3289 " for (int sampleNdx = 0; sampleNdx < "
3290 << m_resourceDesc.imageSamples
3291 << "; ++sampleNdx)\n"
3292 " imageStore(msImage, ivec2(gx, gy), sampleNdx, uvec4(value));\n"
3293 "}\n";
3294 programCollection.glslSources.add("comp") << glu::ComputeSource(source.str().c_str());
3295 }
3296
getInResourceUsageFlags(void) const3297 uint32_t getInResourceUsageFlags(void) const
3298 {
3299 return 0;
3300 }
3301
getOutResourceUsageFlags(void) const3302 uint32_t getOutResourceUsageFlags(void) const
3303 {
3304 return VK_IMAGE_USAGE_STORAGE_BIT;
3305 }
3306
getQueueFlags(const OperationContext &) const3307 VkQueueFlags getQueueFlags(const OperationContext &) const
3308 {
3309 return VK_QUEUE_COMPUTE_BIT;
3310 }
3311
build(OperationContext & context,Resource & resource) const3312 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
3313 {
3314 return de::MovePtr<Operation>(new MSImageImplementation(context, resource));
3315 }
3316
build(OperationContext &,Resource &,Resource &) const3317 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
3318 {
3319 DE_ASSERT(0);
3320 return de::MovePtr<Operation>();
3321 }
3322
3323 private:
3324 const ResourceDescription m_resourceDesc;
3325 };
3326
3327 } // namespace ShaderAccess
3328
3329 namespace CopyBufferToImage
3330 {
3331
3332 class WriteImplementation : public Operation
3333 {
3334 public:
WriteImplementation(OperationContext & context,Resource & resource)3335 WriteImplementation(OperationContext &context, Resource &resource)
3336 : m_context(context)
3337 , m_resource(resource)
3338 , m_bufferSize(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
3339 {
3340 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_IMAGE);
3341
3342 const DeviceInterface &vk = m_context.getDeviceInterface();
3343 const VkDevice device = m_context.getDevice();
3344 Allocator &allocator = m_context.getAllocator();
3345
3346 m_hostBuffer = de::MovePtr<Buffer>(
3347 new Buffer(vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
3348 MemoryRequirement::HostVisible));
3349
3350 const Allocation &alloc = m_hostBuffer->getAllocation();
3351 fillPattern(alloc.getHostPtr(), m_bufferSize);
3352 flushAlloc(vk, device, alloc);
3353 }
3354
recordCommands(const VkCommandBuffer cmdBuffer)3355 void recordCommands(const VkCommandBuffer cmdBuffer)
3356 {
3357 const DeviceInterface &vk = m_context.getDeviceInterface();
3358 const VkBufferImageCopy copyRegion =
3359 makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
3360 SynchronizationWrapperPtr synchronizationWrapper =
3361 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3362
3363 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3364 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3365 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3366 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3367 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3368 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3369 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3370 m_resource.getImage().handle, // VkImage image
3371 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3372 );
3373 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
3374 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3375
3376 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, m_resource.getImage().handle,
3377 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3378 }
3379
getInSyncInfo(void) const3380 SyncInfo getInSyncInfo(void) const
3381 {
3382 return emptySyncInfo;
3383 }
3384
getOutSyncInfo(void) const3385 SyncInfo getOutSyncInfo(void) const
3386 {
3387 const SyncInfo syncInfo = {
3388 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3389 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3390 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
3391 };
3392 return syncInfo;
3393 }
3394
getData(void) const3395 Data getData(void) const
3396 {
3397 return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
3398 }
3399
setData(const Data & data)3400 void setData(const Data &data)
3401 {
3402 setHostBufferData(m_context, *m_hostBuffer, data);
3403 }
3404
3405 private:
3406 OperationContext &m_context;
3407 Resource &m_resource;
3408 de::MovePtr<Buffer> m_hostBuffer;
3409 const VkDeviceSize m_bufferSize;
3410 };
3411
3412 class ReadImplementation : public Operation
3413 {
3414 public:
ReadImplementation(OperationContext & context,Resource & resource)3415 ReadImplementation(OperationContext &context, Resource &resource)
3416 : m_context(context)
3417 , m_resource(resource)
3418 , m_subresourceRange(makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
3419 , m_subresourceLayers(makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
3420 {
3421 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_BUFFER);
3422
3423 const DeviceInterface &vk = m_context.getDeviceInterface();
3424 const VkDevice device = m_context.getDevice();
3425 Allocator &allocator = m_context.getAllocator();
3426 const VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3427 const uint32_t pixelSize = tcu::getPixelSize(mapVkFormat(format));
3428
3429 DE_ASSERT((m_resource.getBuffer().size % pixelSize) == 0);
3430 m_imageExtent = get2DImageExtentWithSize(m_resource.getBuffer().size,
3431 pixelSize); // there may be some unused space at the end
3432
3433 // Copy destination image.
3434 m_image = de::MovePtr<Image>(
3435 new Image(vk, device, allocator,
3436 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_imageExtent, format,
3437 (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
3438 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
3439 MemoryRequirement::Any));
3440
3441 // Image data will be copied here, so it can be read on the host.
3442 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
3443 vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
3444 MemoryRequirement::HostVisible));
3445 }
3446
recordCommands(const VkCommandBuffer cmdBuffer)3447 void recordCommands(const VkCommandBuffer cmdBuffer)
3448 {
3449 const DeviceInterface &vk = m_context.getDeviceInterface();
3450 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_imageExtent, m_subresourceLayers);
3451 SynchronizationWrapperPtr synchronizationWrapper =
3452 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3453
3454 // Resource -> Image
3455 {
3456 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3457 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3458 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3459 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3460 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3461 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3462 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3463 **m_image, // VkImage image
3464 m_subresourceRange // VkImageSubresourceRange subresourceRange
3465 );
3466 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
3467 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3468
3469 vk.cmdCopyBufferToImage(cmdBuffer, m_resource.getBuffer().handle, **m_image,
3470 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3471 }
3472 // Image -> Host buffer
3473 {
3474 const VkImageMemoryBarrier2KHR imageLayoutBarrier2 = makeImageMemoryBarrier2(
3475 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3476 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3477 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3478 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3479 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
3480 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
3481 **m_image, // VkImage image
3482 m_subresourceRange // VkImageSubresourceRange subresourceRange
3483 );
3484 VkDependencyInfoKHR layoutDependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageLayoutBarrier2);
3485 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &layoutDependencyInfo);
3486
3487 vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u,
3488 ©Region);
3489
3490 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3491 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3492 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3493 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3494 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3495 **m_hostBuffer, // VkBuffer buffer
3496 0u, // VkDeviceSize offset
3497 m_resource.getBuffer().size // VkDeviceSize size
3498 );
3499 VkDependencyInfoKHR bufferDependencyInfo = makeCommonDependencyInfo(nullptr, &bufferMemoryBarrier2);
3500 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &bufferDependencyInfo);
3501 }
3502 }
3503
getInSyncInfo(void) const3504 SyncInfo getInSyncInfo(void) const
3505 {
3506 const SyncInfo syncInfo = {
3507 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3508 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3509 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3510 };
3511 return syncInfo;
3512 }
3513
getOutSyncInfo(void) const3514 SyncInfo getOutSyncInfo(void) const
3515 {
3516 return emptySyncInfo;
3517 }
3518
getData(void) const3519 Data getData(void) const
3520 {
3521 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
3522 }
3523
setData(const Data & data)3524 void setData(const Data &data)
3525 {
3526 setHostBufferData(m_context, *m_hostBuffer, data);
3527 }
3528
3529 private:
3530 OperationContext &m_context;
3531 Resource &m_resource;
3532 const VkImageSubresourceRange m_subresourceRange;
3533 const VkImageSubresourceLayers m_subresourceLayers;
3534 de::MovePtr<Buffer> m_hostBuffer;
3535 de::MovePtr<Image> m_image;
3536 VkExtent3D m_imageExtent;
3537 };
3538
3539 class Support : public OperationSupport
3540 {
3541 public:
Support(const ResourceDescription & resourceDesc,const AccessMode mode)3542 Support(const ResourceDescription &resourceDesc, const AccessMode mode)
3543 : m_mode(mode)
3544 , m_resourceType(resourceDesc.type)
3545 , m_requiredQueueFlags(resourceDesc.type == RESOURCE_TYPE_IMAGE &&
3546 isDepthStencilFormat(resourceDesc.imageFormat) ?
3547 VK_QUEUE_GRAPHICS_BIT :
3548 VK_QUEUE_TRANSFER_BIT)
3549 {
3550 // From spec:
3551 // Because depth or stencil aspect buffer to image copies may require format conversions on some implementations,
3552 // they are not supported on queues that do not support graphics.
3553
3554 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
3555 DE_ASSERT(m_mode == ACCESS_MODE_READ || resourceDesc.type != RESOURCE_TYPE_BUFFER);
3556 DE_ASSERT(m_mode == ACCESS_MODE_WRITE || resourceDesc.type != RESOURCE_TYPE_IMAGE);
3557 }
3558
getInResourceUsageFlags(void) const3559 uint32_t getInResourceUsageFlags(void) const
3560 {
3561 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3562 return m_mode == ACCESS_MODE_READ ? VK_IMAGE_USAGE_TRANSFER_SRC_BIT : 0;
3563 else
3564 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0;
3565 }
3566
getOutResourceUsageFlags(void) const3567 uint32_t getOutResourceUsageFlags(void) const
3568 {
3569 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3570 return m_mode == ACCESS_MODE_WRITE ? VK_IMAGE_USAGE_TRANSFER_DST_BIT : 0;
3571 else
3572 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0;
3573 }
3574
getQueueFlags(const OperationContext & context) const3575 VkQueueFlags getQueueFlags(const OperationContext &context) const
3576 {
3577 DE_UNREF(context);
3578 return m_requiredQueueFlags;
3579 }
3580
build(OperationContext & context,Resource & resource) const3581 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
3582 {
3583 if (m_mode == ACCESS_MODE_READ)
3584 return de::MovePtr<Operation>(new ReadImplementation(context, resource));
3585 else
3586 return de::MovePtr<Operation>(new WriteImplementation(context, resource));
3587 }
3588
build(OperationContext &,Resource &,Resource &) const3589 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
3590 {
3591 DE_ASSERT(0);
3592 return de::MovePtr<Operation>();
3593 }
3594
3595 private:
3596 const AccessMode m_mode;
3597 const enum ResourceType m_resourceType;
3598 const VkQueueFlags m_requiredQueueFlags;
3599 };
3600
3601 class CopyImplementation : public Operation
3602 {
3603 public:
CopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)3604 CopyImplementation(OperationContext &context, Resource &inResource, Resource &outResource)
3605 : m_context(context)
3606 , m_inResource(inResource)
3607 , m_outResource(outResource)
3608 {
3609 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_BUFFER);
3610 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_IMAGE);
3611 }
3612
recordCommands(const VkCommandBuffer cmdBuffer)3613 void recordCommands(const VkCommandBuffer cmdBuffer)
3614 {
3615 const DeviceInterface &vk = m_context.getDeviceInterface();
3616 const VkBufferImageCopy copyRegion =
3617 makeBufferImageCopy(m_outResource.getImage().extent, m_outResource.getImage().subresourceLayers);
3618 SynchronizationWrapperPtr synchronizationWrapper =
3619 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3620
3621 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3622 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3623 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3624 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3625 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3626 m_inResource.getBuffer().handle, // VkBuffer buffer
3627 0u, // VkDeviceSize offset
3628 m_inResource.getBuffer().size // VkDeviceSize size
3629 );
3630 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3631 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3632 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3633 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3634 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3635 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3636 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3637 m_outResource.getImage().handle, // VkImage image
3638 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3639 );
3640 VkDependencyInfoKHR dependencyInfo =
3641 makeCommonDependencyInfo(nullptr, &bufferMemoryBarrier2, &imageMemoryBarrier2);
3642 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3643
3644 vk.cmdCopyBufferToImage(cmdBuffer, m_inResource.getBuffer().handle, m_outResource.getImage().handle,
3645 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3646 }
3647
getInSyncInfo(void) const3648 SyncInfo getInSyncInfo(void) const
3649 {
3650 const SyncInfo syncInfo = {
3651 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3652 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3653 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
3654 };
3655 return syncInfo;
3656 }
3657
getOutSyncInfo(void) const3658 SyncInfo getOutSyncInfo(void) const
3659 {
3660 const SyncInfo syncInfo = {
3661 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3662 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3663 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
3664 };
3665 return syncInfo;
3666 }
3667
getData(void) const3668 Data getData(void) const
3669 {
3670 Data data = {0, nullptr};
3671 return data;
3672 }
3673
setData(const Data &)3674 void setData(const Data &)
3675 {
3676 DE_ASSERT(0);
3677 }
3678
3679 private:
3680 OperationContext &m_context;
3681 Resource &m_inResource;
3682 Resource &m_outResource;
3683 };
3684
3685 class CopySupport : public OperationSupport
3686 {
3687 public:
CopySupport(const ResourceDescription & resourceDesc)3688 CopySupport(const ResourceDescription &resourceDesc)
3689 : m_resourceType(resourceDesc.type)
3690 , m_requiredQueueFlags(resourceDesc.type == RESOURCE_TYPE_IMAGE &&
3691 isDepthStencilFormat(resourceDesc.imageFormat) ?
3692 VK_QUEUE_GRAPHICS_BIT :
3693 VK_QUEUE_TRANSFER_BIT)
3694 {
3695 }
3696
getInResourceUsageFlags(void) const3697 uint32_t getInResourceUsageFlags(void) const
3698 {
3699 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3700 return VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
3701 else
3702 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3703 }
3704
getOutResourceUsageFlags(void) const3705 uint32_t getOutResourceUsageFlags(void) const
3706 {
3707 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3708 return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3709 else
3710 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3711 }
3712
getQueueFlags(const OperationContext & context) const3713 VkQueueFlags getQueueFlags(const OperationContext &context) const
3714 {
3715 DE_UNREF(context);
3716 return m_requiredQueueFlags;
3717 }
3718
build(OperationContext &,Resource &) const3719 de::MovePtr<Operation> build(OperationContext &, Resource &) const
3720 {
3721 DE_ASSERT(0);
3722 return de::MovePtr<Operation>();
3723 }
3724
build(OperationContext & context,Resource & inResource,Resource & outResource) const3725 de::MovePtr<Operation> build(OperationContext &context, Resource &inResource, Resource &outResource) const
3726 {
3727 return de::MovePtr<Operation>(new CopyImplementation(context, inResource, outResource));
3728 }
3729
3730 private:
3731 const enum ResourceType m_resourceType;
3732 const VkQueueFlags m_requiredQueueFlags;
3733 };
3734
3735 } // namespace CopyBufferToImage
3736
3737 namespace CopyImageToBuffer
3738 {
3739
3740 class WriteImplementation : public Operation
3741 {
3742 public:
WriteImplementation(OperationContext & context,Resource & resource)3743 WriteImplementation(OperationContext &context, Resource &resource)
3744 : m_context(context)
3745 , m_resource(resource)
3746 , m_subresourceRange(makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
3747 , m_subresourceLayers(makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
3748 {
3749 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_BUFFER);
3750
3751 const DeviceInterface &vk = m_context.getDeviceInterface();
3752 const VkDevice device = m_context.getDevice();
3753 Allocator &allocator = m_context.getAllocator();
3754 const VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3755 const uint32_t pixelSize = tcu::getPixelSize(mapVkFormat(format));
3756
3757 DE_ASSERT((m_resource.getBuffer().size % pixelSize) == 0);
3758 m_imageExtent = get2DImageExtentWithSize(m_resource.getBuffer().size, pixelSize);
3759
3760 // Source data staging buffer
3761 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
3762 vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
3763 MemoryRequirement::HostVisible));
3764
3765 const Allocation &alloc = m_hostBuffer->getAllocation();
3766 fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
3767 flushAlloc(vk, device, alloc);
3768
3769 // Source data image
3770 m_image = de::MovePtr<Image>(
3771 new Image(vk, device, allocator,
3772 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_imageExtent, format,
3773 (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
3774 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
3775 MemoryRequirement::Any));
3776 }
3777
recordCommands(const VkCommandBuffer cmdBuffer)3778 void recordCommands(const VkCommandBuffer cmdBuffer)
3779 {
3780 const DeviceInterface &vk = m_context.getDeviceInterface();
3781 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_imageExtent, m_subresourceLayers);
3782 SynchronizationWrapperPtr synchronizationWrapper =
3783 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3784
3785 // Host buffer -> Image
3786 {
3787 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3788 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3789 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3790 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3791 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3792 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3793 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3794 **m_image, // VkImage image
3795 m_subresourceRange // VkImageSubresourceRange subresourceRange
3796 );
3797 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
3798 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3799
3800 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u,
3801 ©Region);
3802 }
3803 // Image -> Resource
3804 {
3805 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3806 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3807 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3808 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3809 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3810 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
3811 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
3812 **m_image, // VkImage image
3813 m_subresourceRange // VkImageSubresourceRange subresourceRange
3814 );
3815 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
3816 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3817
3818 vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3819 m_resource.getBuffer().handle, 1u, ©Region);
3820 }
3821 }
3822
getInSyncInfo(void) const3823 SyncInfo getInSyncInfo(void) const
3824 {
3825 return emptySyncInfo;
3826 }
3827
getOutSyncInfo(void) const3828 SyncInfo getOutSyncInfo(void) const
3829 {
3830 const SyncInfo syncInfo = {
3831 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3832 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3833 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3834 };
3835 return syncInfo;
3836 }
3837
getData(void) const3838 Data getData(void) const
3839 {
3840 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
3841 }
3842
setData(const Data & data)3843 void setData(const Data &data)
3844 {
3845 setHostBufferData(m_context, *m_hostBuffer, data);
3846 }
3847
3848 private:
3849 OperationContext &m_context;
3850 Resource &m_resource;
3851 const VkImageSubresourceRange m_subresourceRange;
3852 const VkImageSubresourceLayers m_subresourceLayers;
3853 de::MovePtr<Buffer> m_hostBuffer;
3854 de::MovePtr<Image> m_image;
3855 VkExtent3D m_imageExtent;
3856 };
3857
3858 class ReadImplementation : public Operation
3859 {
3860 public:
ReadImplementation(OperationContext & context,Resource & resource)3861 ReadImplementation(OperationContext &context, Resource &resource)
3862 : m_context(context)
3863 , m_resource(resource)
3864 , m_bufferSize(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
3865 {
3866 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_IMAGE);
3867
3868 const DeviceInterface &vk = m_context.getDeviceInterface();
3869 const VkDevice device = m_context.getDevice();
3870 Allocator &allocator = m_context.getAllocator();
3871
3872 m_hostBuffer = de::MovePtr<Buffer>(
3873 new Buffer(vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
3874 MemoryRequirement::HostVisible));
3875
3876 const Allocation &alloc = m_hostBuffer->getAllocation();
3877 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_bufferSize));
3878 flushAlloc(vk, device, alloc);
3879 }
3880
recordCommands(const VkCommandBuffer cmdBuffer)3881 void recordCommands(const VkCommandBuffer cmdBuffer)
3882 {
3883 const DeviceInterface &vk = m_context.getDeviceInterface();
3884 const VkBufferImageCopy copyRegion =
3885 makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
3886 SynchronizationWrapperPtr synchronizationWrapper =
3887 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3888
3889 vk.cmdCopyImageToBuffer(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3890 **m_hostBuffer, 1u, ©Region);
3891
3892 // Insert a barrier so data written by the transfer is available to the host
3893 {
3894 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3895 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3896 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3897 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3898 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3899 **m_hostBuffer, // VkBuffer buffer
3900 0u, // VkDeviceSize offset
3901 VK_WHOLE_SIZE // VkDeviceSize size
3902 );
3903 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, &bufferMemoryBarrier2);
3904 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3905 }
3906 }
3907
getInSyncInfo(void) const3908 SyncInfo getInSyncInfo(void) const
3909 {
3910 const SyncInfo syncInfo = {
3911 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3912 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3913 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
3914 };
3915 return syncInfo;
3916 }
3917
getOutSyncInfo(void) const3918 SyncInfo getOutSyncInfo(void) const
3919 {
3920 return emptySyncInfo;
3921 }
3922
getData(void) const3923 Data getData(void) const
3924 {
3925 return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
3926 }
3927
setData(const Data &)3928 void setData(const Data &)
3929 {
3930 DE_ASSERT(0);
3931 }
3932
3933 private:
3934 OperationContext &m_context;
3935 Resource &m_resource;
3936 de::MovePtr<Buffer> m_hostBuffer;
3937 const VkDeviceSize m_bufferSize;
3938 };
3939
3940 class CopyImplementation : public Operation
3941 {
3942 public:
CopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)3943 CopyImplementation(OperationContext &context, Resource &inResource, Resource &outResource)
3944 : m_context(context)
3945 , m_inResource(inResource)
3946 , m_outResource(outResource)
3947 , m_subresourceRange(makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
3948 , m_subresourceLayers(makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
3949 {
3950 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_IMAGE);
3951 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_BUFFER);
3952 }
3953
recordCommands(const VkCommandBuffer cmdBuffer)3954 void recordCommands(const VkCommandBuffer cmdBuffer)
3955 {
3956 const DeviceInterface &vk = m_context.getDeviceInterface();
3957 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_inResource.getImage().extent, m_subresourceLayers);
3958 SynchronizationWrapperPtr synchronizationWrapper =
3959 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3960
3961 {
3962 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3963 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3964 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3965 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3966 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3967 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3968 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3969 m_inResource.getImage().handle, // VkImage image
3970 m_inResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3971 );
3972 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3973 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3974 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3975 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3976 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3977 m_outResource.getBuffer().handle, // VkBuffer buffer
3978 0u, // VkDeviceSize offset
3979 m_outResource.getBuffer().size // VkDeviceSize size
3980 );
3981 VkDependencyInfoKHR dependencyInfo =
3982 makeCommonDependencyInfo(nullptr, &bufferMemoryBarrier2, &imageMemoryBarrier2);
3983 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3984 }
3985
3986 vk.cmdCopyImageToBuffer(cmdBuffer, m_inResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3987 m_outResource.getBuffer().handle, 1u, ©Region);
3988 }
3989
getInSyncInfo(void) const3990 SyncInfo getInSyncInfo(void) const
3991 {
3992 const SyncInfo syncInfo = {
3993 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3994 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3995 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3996 };
3997 return syncInfo;
3998 }
3999
getOutSyncInfo(void) const4000 SyncInfo getOutSyncInfo(void) const
4001 {
4002 const SyncInfo syncInfo = {
4003 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
4004 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
4005 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
4006 };
4007 return syncInfo;
4008 }
4009
getData(void) const4010 Data getData(void) const
4011 {
4012 Data data = {0, nullptr};
4013 return data;
4014 }
4015
setData(const Data &)4016 void setData(const Data &)
4017 {
4018 DE_ASSERT(0);
4019 }
4020
4021 private:
4022 OperationContext &m_context;
4023 Resource &m_inResource;
4024 Resource &m_outResource;
4025 const VkImageSubresourceRange m_subresourceRange;
4026 const VkImageSubresourceLayers m_subresourceLayers;
4027 };
4028
4029 class Support : public OperationSupport
4030 {
4031 public:
Support(const ResourceDescription & resourceDesc,const AccessMode mode)4032 Support(const ResourceDescription &resourceDesc, const AccessMode mode)
4033 : m_mode(mode)
4034 , m_requiredQueueFlags(resourceDesc.type == RESOURCE_TYPE_IMAGE &&
4035 isDepthStencilFormat(resourceDesc.imageFormat) ?
4036 VK_QUEUE_GRAPHICS_BIT :
4037 VK_QUEUE_TRANSFER_BIT)
4038 {
4039 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
4040 DE_ASSERT(m_mode == ACCESS_MODE_READ || resourceDesc.type != RESOURCE_TYPE_IMAGE);
4041 DE_ASSERT(m_mode == ACCESS_MODE_WRITE || resourceDesc.type != RESOURCE_TYPE_BUFFER);
4042 }
4043
getInResourceUsageFlags(void) const4044 uint32_t getInResourceUsageFlags(void) const
4045 {
4046 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0;
4047 }
4048
getOutResourceUsageFlags(void) const4049 uint32_t getOutResourceUsageFlags(void) const
4050 {
4051 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0;
4052 }
4053
getQueueFlags(const OperationContext & context) const4054 VkQueueFlags getQueueFlags(const OperationContext &context) const
4055 {
4056 DE_UNREF(context);
4057 return m_requiredQueueFlags;
4058 }
4059
build(OperationContext & context,Resource & resource) const4060 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
4061 {
4062 if (m_mode == ACCESS_MODE_READ)
4063 return de::MovePtr<Operation>(new ReadImplementation(context, resource));
4064 else
4065 return de::MovePtr<Operation>(new WriteImplementation(context, resource));
4066 }
4067
build(OperationContext &,Resource &,Resource &) const4068 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
4069 {
4070 DE_ASSERT(0);
4071 return de::MovePtr<Operation>();
4072 }
4073
4074 private:
4075 const AccessMode m_mode;
4076 const VkQueueFlags m_requiredQueueFlags;
4077 };
4078
4079 } // namespace CopyImageToBuffer
4080
4081 namespace ClearImage
4082 {
4083
4084 enum ClearMode
4085 {
4086 CLEAR_MODE_COLOR,
4087 CLEAR_MODE_DEPTH_STENCIL,
4088 };
4089
4090 class Implementation : public Operation
4091 {
4092 public:
Implementation(OperationContext & context,Resource & resource,const ClearMode mode)4093 Implementation(OperationContext &context, Resource &resource, const ClearMode mode)
4094 : m_context(context)
4095 , m_resource(resource)
4096 , m_clearValue(makeClearValue(m_resource.getImage().format))
4097 , m_mode(mode)
4098 {
4099 const VkDeviceSize size = getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent);
4100 const VkExtent3D &extent = m_resource.getImage().extent;
4101 const VkFormat format = m_resource.getImage().format;
4102 const tcu::TextureFormat texFormat = mapVkFormat(format);
4103
4104 m_data.resize(static_cast<std::size_t>(size));
4105 tcu::PixelBufferAccess imagePixels(texFormat, extent.width, extent.height, extent.depth, &m_data[0]);
4106 clearPixelBuffer(imagePixels, m_clearValue);
4107 }
4108
recordCommands(const VkCommandBuffer cmdBuffer)4109 void recordCommands(const VkCommandBuffer cmdBuffer)
4110 {
4111 const DeviceInterface &vk = m_context.getDeviceInterface();
4112 SynchronizationWrapperPtr synchronizationWrapper =
4113 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
4114
4115 VkPipelineStageFlags2KHR dstStageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR;
4116 if (m_context.getSynchronizationType() == SynchronizationType::SYNCHRONIZATION2)
4117 dstStageMask = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR;
4118
4119 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
4120 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
4121 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
4122 dstStageMask, // VkPipelineStageFlags2KHR dstStageMask
4123 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4124 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
4125 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
4126 m_resource.getImage().handle, // VkImage image
4127 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
4128 );
4129 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
4130 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4131
4132 if (m_mode == CLEAR_MODE_COLOR)
4133 vk.cmdClearColorImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4134 &m_clearValue.color, 1u, &m_resource.getImage().subresourceRange);
4135 else
4136 vk.cmdClearDepthStencilImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4137 &m_clearValue.depthStencil, 1u, &m_resource.getImage().subresourceRange);
4138 }
4139
getInSyncInfo(void) const4140 SyncInfo getInSyncInfo(void) const
4141 {
4142 return emptySyncInfo;
4143 }
4144
getOutSyncInfo(void) const4145 SyncInfo getOutSyncInfo(void) const
4146 {
4147 VkPipelineStageFlags2KHR stageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR;
4148 if (m_context.getSynchronizationType() == SynchronizationType::SYNCHRONIZATION2)
4149 stageMask = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR;
4150
4151 return {
4152 stageMask, // VkPipelineStageFlags stageMask;
4153 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
4154 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
4155 };
4156 }
4157
getData(void) const4158 Data getData(void) const
4159 {
4160 const Data data = {
4161 m_data.size(), // std::size_t size;
4162 &m_data[0], // const uint8_t* data;
4163 };
4164 return data;
4165 }
4166
setData(const Data &)4167 void setData(const Data &)
4168 {
4169 DE_ASSERT(0);
4170 }
4171
4172 private:
4173 OperationContext &m_context;
4174 Resource &m_resource;
4175 std::vector<uint8_t> m_data;
4176 const VkClearValue m_clearValue;
4177 const ClearMode m_mode;
4178 };
4179
4180 class Support : public OperationSupport
4181 {
4182 public:
Support(const ResourceDescription & resourceDesc,const ClearMode mode)4183 Support(const ResourceDescription &resourceDesc, const ClearMode mode) : m_resourceDesc(resourceDesc), m_mode(mode)
4184 {
4185 DE_ASSERT(m_mode == CLEAR_MODE_COLOR || m_mode == CLEAR_MODE_DEPTH_STENCIL);
4186 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
4187 DE_ASSERT(m_resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT || (m_mode != CLEAR_MODE_COLOR));
4188 DE_ASSERT((m_resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) ||
4189 (m_mode != CLEAR_MODE_DEPTH_STENCIL));
4190 }
4191
getInResourceUsageFlags(void) const4192 uint32_t getInResourceUsageFlags(void) const
4193 {
4194 return 0;
4195 }
4196
getOutResourceUsageFlags(void) const4197 uint32_t getOutResourceUsageFlags(void) const
4198 {
4199 return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4200 }
4201
getQueueFlags(const OperationContext & context) const4202 VkQueueFlags getQueueFlags(const OperationContext &context) const
4203 {
4204 DE_UNREF(context);
4205 if (m_mode == CLEAR_MODE_COLOR)
4206 return VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
4207 else
4208 return VK_QUEUE_GRAPHICS_BIT;
4209 }
4210
build(OperationContext & context,Resource & resource) const4211 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
4212 {
4213 return de::MovePtr<Operation>(new Implementation(context, resource, m_mode));
4214 }
4215
build(OperationContext &,Resource &,Resource &) const4216 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
4217 {
4218 DE_ASSERT(0);
4219 return de::MovePtr<Operation>();
4220 }
4221
4222 private:
4223 const ResourceDescription m_resourceDesc;
4224 const ClearMode m_mode;
4225 };
4226
4227 } // namespace ClearImage
4228
4229 namespace Draw
4230 {
4231
4232 enum DrawCall
4233 {
4234 DRAW_CALL_DRAW,
4235 DRAW_CALL_DRAW_INDEXED,
4236 DRAW_CALL_DRAW_INDIRECT,
4237 DRAW_CALL_DRAW_INDEXED_INDIRECT,
4238 };
4239
4240 //! A write operation that is a result of drawing to an image.
4241 //! \todo Add support for depth/stencil too?
4242 class Implementation : public Operation
4243 {
4244 public:
Implementation(OperationContext & context,Resource & resource,const DrawCall drawCall)4245 Implementation(OperationContext &context, Resource &resource, const DrawCall drawCall)
4246 : m_context(context)
4247 , m_resource(resource)
4248 , m_drawCall(drawCall)
4249 , m_vertices(context)
4250 {
4251 const DeviceInterface &vk = context.getDeviceInterface();
4252 const VkDevice device = context.getDevice();
4253 Allocator &allocator = context.getAllocator();
4254
4255 // Indirect buffer
4256
4257 if (m_drawCall == DRAW_CALL_DRAW_INDIRECT)
4258 {
4259 m_indirectBuffer = de::MovePtr<Buffer>(
4260 new Buffer(vk, device, allocator,
4261 makeBufferCreateInfo(sizeof(VkDrawIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT),
4262 MemoryRequirement::HostVisible));
4263
4264 const Allocation &alloc = m_indirectBuffer->getAllocation();
4265 VkDrawIndirectCommand *const pIndirectCommand = static_cast<VkDrawIndirectCommand *>(alloc.getHostPtr());
4266
4267 pIndirectCommand->vertexCount = m_vertices.getNumVertices();
4268 pIndirectCommand->instanceCount = 1u;
4269 pIndirectCommand->firstVertex = 0u;
4270 pIndirectCommand->firstInstance = 0u;
4271
4272 flushAlloc(vk, device, alloc);
4273 }
4274 else if (m_drawCall == DRAW_CALL_DRAW_INDEXED_INDIRECT)
4275 {
4276 m_indirectBuffer = de::MovePtr<Buffer>(new Buffer(
4277 vk, device, allocator,
4278 makeBufferCreateInfo(sizeof(VkDrawIndexedIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT),
4279 MemoryRequirement::HostVisible));
4280
4281 const Allocation &alloc = m_indirectBuffer->getAllocation();
4282 VkDrawIndexedIndirectCommand *const pIndirectCommand =
4283 static_cast<VkDrawIndexedIndirectCommand *>(alloc.getHostPtr());
4284
4285 pIndirectCommand->indexCount = m_vertices.getNumIndices();
4286 pIndirectCommand->instanceCount = 1u;
4287 pIndirectCommand->firstIndex = 0u;
4288 pIndirectCommand->vertexOffset = 0u;
4289 pIndirectCommand->firstInstance = 0u;
4290
4291 flushAlloc(vk, device, alloc);
4292 }
4293
4294 // Resource image is the color attachment
4295
4296 m_colorFormat = m_resource.getImage().format;
4297 m_colorSubresourceRange = m_resource.getImage().subresourceRange;
4298 m_colorImage = m_resource.getImage().handle;
4299 m_attachmentExtent = m_resource.getImage().extent;
4300
4301 // Pipeline
4302
4303 m_colorAttachmentView =
4304 makeImageView(vk, device, m_colorImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorSubresourceRange);
4305 m_renderPass = makeRenderPass(vk, device, m_colorFormat);
4306 m_framebuffer = makeFramebuffer(vk, device, *m_renderPass, *m_colorAttachmentView, m_attachmentExtent.width,
4307 m_attachmentExtent.height);
4308 m_pipelineLayout = makePipelineLayout(vk, device);
4309
4310 GraphicsPipelineBuilder pipelineBuilder;
4311 pipelineBuilder.setRenderSize(tcu::IVec2(m_attachmentExtent.width, m_attachmentExtent.height))
4312 .setVertexInputSingleAttribute(m_vertices.getVertexFormat(), m_vertices.getVertexStride())
4313 .setShader(vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get("draw_vert"), nullptr)
4314 .setShader(vk, device, VK_SHADER_STAGE_FRAGMENT_BIT, context.getBinaryCollection().get("draw_frag"),
4315 nullptr);
4316
4317 m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData(),
4318 context.getResourceInterface());
4319
4320 // Set expected draw values
4321
4322 m_expectedData.resize(
4323 static_cast<size_t>(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent)));
4324 tcu::PixelBufferAccess imagePixels(mapVkFormat(m_colorFormat), m_attachmentExtent.width,
4325 m_attachmentExtent.height, m_attachmentExtent.depth, &m_expectedData[0]);
4326 clearPixelBuffer(imagePixels, makeClearValue(m_colorFormat));
4327 }
4328
recordCommands(const VkCommandBuffer cmdBuffer)4329 void recordCommands(const VkCommandBuffer cmdBuffer)
4330 {
4331 const DeviceInterface &vk = m_context.getDeviceInterface();
4332 SynchronizationWrapperPtr synchronizationWrapper =
4333 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
4334
4335 // Change color attachment image layout
4336 {
4337 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
4338 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
4339 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
4340 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
4341 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4342 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
4343 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
4344 m_colorImage, // VkImage image
4345 m_colorSubresourceRange // VkImageSubresourceRange subresourceRange
4346 );
4347 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
4348 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4349 }
4350
4351 {
4352 const VkRect2D renderArea = makeRect2D(m_attachmentExtent);
4353 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
4354
4355 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
4356 }
4357
4358 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
4359 {
4360 const VkDeviceSize vertexBufferOffset = 0ull;
4361 const VkBuffer vertexBuffer = m_vertices.getVertexBuffer();
4362 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
4363 }
4364
4365 if (m_drawCall == DRAW_CALL_DRAW_INDEXED || m_drawCall == DRAW_CALL_DRAW_INDEXED_INDIRECT)
4366 vk.cmdBindIndexBuffer(cmdBuffer, m_vertices.getIndexBuffer(), 0u, m_vertices.getIndexType());
4367
4368 switch (m_drawCall)
4369 {
4370 case DRAW_CALL_DRAW:
4371 vk.cmdDraw(cmdBuffer, m_vertices.getNumVertices(), 1u, 0u, 0u);
4372 break;
4373
4374 case DRAW_CALL_DRAW_INDEXED:
4375 vk.cmdDrawIndexed(cmdBuffer, m_vertices.getNumIndices(), 1u, 0u, 0, 0u);
4376 break;
4377
4378 case DRAW_CALL_DRAW_INDIRECT:
4379 vk.cmdDrawIndirect(cmdBuffer, **m_indirectBuffer, 0u, 1u, 0u);
4380 break;
4381
4382 case DRAW_CALL_DRAW_INDEXED_INDIRECT:
4383 vk.cmdDrawIndexedIndirect(cmdBuffer, **m_indirectBuffer, 0u, 1u, 0u);
4384 break;
4385 }
4386
4387 endRenderPass(vk, cmdBuffer);
4388 }
4389
getInSyncInfo(void) const4390 SyncInfo getInSyncInfo(void) const
4391 {
4392 return emptySyncInfo;
4393 }
4394
getOutSyncInfo(void) const4395 SyncInfo getOutSyncInfo(void) const
4396 {
4397 const SyncInfo syncInfo = {
4398 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags stageMask;
4399 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags accessMask;
4400 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout imageLayout;
4401 };
4402 return syncInfo;
4403 }
4404
getData(void) const4405 Data getData(void) const
4406 {
4407 const Data data = {
4408 m_expectedData.size(), // std::size_t size;
4409 &m_expectedData[0], // const uint8_t* data;
4410 };
4411 return data;
4412 }
4413
setData(const Data & data)4414 void setData(const Data &data)
4415 {
4416 DE_ASSERT(m_expectedData.size() == data.size);
4417 deMemcpy(&m_expectedData[0], data.data, data.size);
4418 }
4419
4420 private:
4421 OperationContext &m_context;
4422 Resource &m_resource;
4423 const DrawCall m_drawCall;
4424 const VertexGrid m_vertices;
4425 std::vector<uint8_t> m_expectedData;
4426 de::MovePtr<Buffer> m_indirectBuffer;
4427 VkFormat m_colorFormat;
4428 VkImage m_colorImage;
4429 Move<VkImageView> m_colorAttachmentView;
4430 VkImageSubresourceRange m_colorSubresourceRange;
4431 VkExtent3D m_attachmentExtent;
4432 Move<VkRenderPass> m_renderPass;
4433 Move<VkFramebuffer> m_framebuffer;
4434 Move<VkPipelineLayout> m_pipelineLayout;
4435 Move<VkPipeline> m_pipeline;
4436 };
4437
4438 template <typename T, std::size_t N>
toString(const T (& values)[N])4439 std::string toString(const T (&values)[N])
4440 {
4441 std::ostringstream str;
4442 for (std::size_t i = 0; i < N; ++i)
4443 str << (i != 0 ? ", " : "") << values[i];
4444 return str.str();
4445 }
4446
4447 class Support : public OperationSupport
4448 {
4449 public:
Support(const ResourceDescription & resourceDesc,const DrawCall drawCall)4450 Support(const ResourceDescription &resourceDesc, const DrawCall drawCall)
4451 : m_resourceDesc(resourceDesc)
4452 , m_drawCall(drawCall)
4453 {
4454 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE && m_resourceDesc.imageType == VK_IMAGE_TYPE_2D);
4455 DE_ASSERT(!isDepthStencilFormat(m_resourceDesc.imageFormat));
4456 }
4457
initPrograms(SourceCollections & programCollection) const4458 void initPrograms(SourceCollections &programCollection) const
4459 {
4460 // Vertex
4461 {
4462 std::ostringstream src;
4463 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4464 << "\n"
4465 << "layout(location = 0) in vec4 v_in_position;\n"
4466 << "\n"
4467 << "out " << s_perVertexBlock << ";\n"
4468 << "\n"
4469 << "void main (void)\n"
4470 << "{\n"
4471 << " gl_Position = v_in_position;\n"
4472 << "}\n";
4473
4474 programCollection.glslSources.add("draw_vert") << glu::VertexSource(src.str());
4475 }
4476
4477 // Fragment
4478 {
4479 const VkClearValue clearValue = makeClearValue(m_resourceDesc.imageFormat);
4480 const bool isIntegerFormat =
4481 isIntFormat(m_resourceDesc.imageFormat) || isUintFormat(m_resourceDesc.imageFormat);
4482 const std::string colorType = (isIntegerFormat ? "uvec4" : "vec4");
4483
4484 std::ostringstream src;
4485 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4486 << "\n"
4487 << "layout(location = 0) out " << colorType << " o_color;\n"
4488 << "\n"
4489 << "void main (void)\n"
4490 << "{\n"
4491 << " o_color = " << colorType << "("
4492 << (isIntegerFormat ? toString(clearValue.color.uint32) : toString(clearValue.color.float32)) << ");\n"
4493 << "}\n";
4494
4495 programCollection.glslSources.add("draw_frag") << glu::FragmentSource(src.str());
4496 }
4497 }
4498
getInResourceUsageFlags(void) const4499 uint32_t getInResourceUsageFlags(void) const
4500 {
4501 return 0;
4502 }
4503
getOutResourceUsageFlags(void) const4504 uint32_t getOutResourceUsageFlags(void) const
4505 {
4506 return VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4507 }
4508
getQueueFlags(const OperationContext & context) const4509 VkQueueFlags getQueueFlags(const OperationContext &context) const
4510 {
4511 DE_UNREF(context);
4512 return VK_QUEUE_GRAPHICS_BIT;
4513 }
4514
build(OperationContext & context,Resource & resource) const4515 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
4516 {
4517 return de::MovePtr<Operation>(new Implementation(context, resource, m_drawCall));
4518 }
4519
build(OperationContext &,Resource &,Resource &) const4520 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
4521 {
4522 DE_ASSERT(0);
4523 return de::MovePtr<Operation>();
4524 }
4525
4526 private:
4527 const ResourceDescription m_resourceDesc;
4528 const DrawCall m_drawCall;
4529 };
4530
4531 } // namespace Draw
4532
4533 namespace ClearAttachments
4534 {
4535
4536 class Implementation : public Operation
4537 {
4538 public:
Implementation(OperationContext & context,Resource & resource)4539 Implementation(OperationContext &context, Resource &resource)
4540 : m_context(context)
4541 , m_resource(resource)
4542 , m_clearValue(makeClearValue(m_resource.getImage().format))
4543 {
4544 const DeviceInterface &vk = context.getDeviceInterface();
4545 const VkDevice device = context.getDevice();
4546
4547 const VkDeviceSize size = getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent);
4548 const VkExtent3D &extent = m_resource.getImage().extent;
4549 const VkFormat format = m_resource.getImage().format;
4550 const tcu::TextureFormat texFormat = mapVkFormat(format);
4551 const SyncInfo syncInfo = getOutSyncInfo();
4552
4553 m_data.resize(static_cast<std::size_t>(size));
4554 tcu::PixelBufferAccess imagePixels(texFormat, extent.width, extent.height, extent.depth, &m_data[0]);
4555 clearPixelBuffer(imagePixels, m_clearValue);
4556
4557 m_attachmentView =
4558 makeImageView(vk, device, m_resource.getImage().handle, getImageViewType(m_resource.getImage().imageType),
4559 m_resource.getImage().format, m_resource.getImage().subresourceRange);
4560
4561 switch (m_resource.getImage().subresourceRange.aspectMask)
4562 {
4563 case VK_IMAGE_ASPECT_COLOR_BIT:
4564 m_renderPass = makeRenderPass(vk, device, m_resource.getImage().format, VK_FORMAT_UNDEFINED,
4565 VK_ATTACHMENT_LOAD_OP_DONT_CARE, syncInfo.imageLayout);
4566 break;
4567 case VK_IMAGE_ASPECT_STENCIL_BIT:
4568 case VK_IMAGE_ASPECT_DEPTH_BIT:
4569 m_renderPass = makeRenderPass(vk, device, VK_FORMAT_UNDEFINED, m_resource.getImage().format,
4570 VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
4571 syncInfo.imageLayout);
4572 break;
4573 default:
4574 DE_ASSERT(0);
4575 break;
4576 }
4577
4578 m_frameBuffer = makeFramebuffer(vk, device, *m_renderPass, *m_attachmentView,
4579 m_resource.getImage().extent.width, m_resource.getImage().extent.height);
4580 }
4581
recordCommands(const VkCommandBuffer cmdBuffer)4582 void recordCommands(const VkCommandBuffer cmdBuffer)
4583 {
4584 const DeviceInterface &vk = m_context.getDeviceInterface();
4585 if ((m_resource.getImage().subresourceRange.aspectMask &
4586 (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) != 0)
4587 {
4588 const VkImageMemoryBarrier imageBarrier = {
4589 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType
4590 nullptr, // pNext
4591 0u, // srcAccessMask
4592 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, // dstAccessMask
4593 VK_IMAGE_LAYOUT_UNDEFINED, // oldLayout
4594 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, // newLayout
4595 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex
4596 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex
4597 m_resource.getImage().handle, // image
4598 m_resource.getImage().subresourceRange // subresourceRange
4599 };
4600 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT,
4601 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
4602 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
4603 0u, 0u, nullptr, 0u, nullptr, 1u, &imageBarrier);
4604 }
4605 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_frameBuffer,
4606 makeRect2D(0, 0, m_resource.getImage().extent.width, m_resource.getImage().extent.height),
4607 m_clearValue);
4608
4609 const VkClearAttachment clearAttachment = {
4610 m_resource.getImage().subresourceRange.aspectMask, // VkImageAspectFlags aspectMask;
4611 0, // uint32_t colorAttachment;
4612 m_clearValue // VkClearValue clearValue;
4613 };
4614
4615 const VkRect2D rect2D = makeRect2D(m_resource.getImage().extent);
4616
4617 const VkClearRect clearRect = {
4618 rect2D, // VkRect2D rect;
4619 0u, // uint32_t baseArrayLayer;
4620 m_resource.getImage().subresourceLayers.layerCount // uint32_t layerCount;
4621 };
4622
4623 vk.cmdClearAttachments(cmdBuffer, 1, &clearAttachment, 1, &clearRect);
4624
4625 endRenderPass(vk, cmdBuffer);
4626 }
4627
getInSyncInfo(void) const4628 SyncInfo getInSyncInfo(void) const
4629 {
4630 return emptySyncInfo;
4631 }
4632
getOutSyncInfo(void) const4633 SyncInfo getOutSyncInfo(void) const
4634 {
4635 SyncInfo syncInfo;
4636 syncInfo.stageMask = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR;
4637
4638 switch (m_resource.getImage().subresourceRange.aspectMask)
4639 {
4640 case VK_IMAGE_ASPECT_COLOR_BIT:
4641 syncInfo.accessMask = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR;
4642 syncInfo.imageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4643 break;
4644 case VK_IMAGE_ASPECT_STENCIL_BIT:
4645 case VK_IMAGE_ASPECT_DEPTH_BIT:
4646 syncInfo.accessMask = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR;
4647 syncInfo.imageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
4648 break;
4649 default:
4650 DE_ASSERT(0);
4651 break;
4652 }
4653
4654 return syncInfo;
4655 }
4656
getData(void) const4657 Data getData(void) const
4658 {
4659 const Data data = {
4660 m_data.size(), // std::size_t size;
4661 &m_data[0], // const uint8_t* data;
4662 };
4663 return data;
4664 }
4665
setData(const Data &)4666 void setData(const Data &)
4667 {
4668 DE_ASSERT(0);
4669 }
4670
4671 private:
4672 OperationContext &m_context;
4673 Resource &m_resource;
4674 std::vector<uint8_t> m_data;
4675 const VkClearValue m_clearValue;
4676 Move<VkImageView> m_attachmentView;
4677 Move<VkRenderPass> m_renderPass;
4678 Move<VkFramebuffer> m_frameBuffer;
4679 };
4680
4681 class Support : public OperationSupport
4682 {
4683 public:
Support(const ResourceDescription & resourceDesc)4684 Support(const ResourceDescription &resourceDesc) : m_resourceDesc(resourceDesc)
4685 {
4686 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
4687 }
4688
getInResourceUsageFlags(void) const4689 uint32_t getInResourceUsageFlags(void) const
4690 {
4691 return 0;
4692 }
4693
getOutResourceUsageFlags(void) const4694 uint32_t getOutResourceUsageFlags(void) const
4695 {
4696 switch (m_resourceDesc.imageAspect)
4697 {
4698 case VK_IMAGE_ASPECT_COLOR_BIT:
4699 return VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4700 case VK_IMAGE_ASPECT_STENCIL_BIT:
4701 case VK_IMAGE_ASPECT_DEPTH_BIT:
4702 return VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
4703 default:
4704 DE_ASSERT(0);
4705 }
4706 return 0u;
4707 }
4708
getQueueFlags(const OperationContext & context) const4709 VkQueueFlags getQueueFlags(const OperationContext &context) const
4710 {
4711 DE_UNREF(context);
4712 return VK_QUEUE_GRAPHICS_BIT;
4713 }
4714
build(OperationContext & context,Resource & resource) const4715 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
4716 {
4717 return de::MovePtr<Operation>(new Implementation(context, resource));
4718 }
4719
build(OperationContext &,Resource &,Resource &) const4720 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
4721 {
4722 DE_ASSERT(0);
4723 return de::MovePtr<Operation>();
4724 }
4725
4726 private:
4727 const ResourceDescription m_resourceDesc;
4728 };
4729
4730 } // namespace ClearAttachments
4731
4732 namespace IndirectBuffer
4733 {
4734
4735 class GraphicsPipeline : public Pipeline
4736 {
4737 public:
GraphicsPipeline(OperationContext & context,const ResourceType resourceType,const VkBuffer indirectBuffer,const std::string & shaderPrefix,const VkDescriptorSetLayout descriptorSetLayout)4738 GraphicsPipeline(OperationContext &context, const ResourceType resourceType, const VkBuffer indirectBuffer,
4739 const std::string &shaderPrefix, const VkDescriptorSetLayout descriptorSetLayout)
4740 : m_resourceType(resourceType)
4741 , m_indirectBuffer(indirectBuffer)
4742 , m_vertices(context)
4743 {
4744 const DeviceInterface &vk = context.getDeviceInterface();
4745 const VkDevice device = context.getDevice();
4746 Allocator &allocator = context.getAllocator();
4747
4748 // Color attachment
4749
4750 m_colorFormat = VK_FORMAT_R8G8B8A8_UNORM;
4751 m_colorImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
4752 m_colorImageExtent = makeExtent3D(16u, 16u, 1u);
4753 m_colorAttachmentImage = de::MovePtr<Image>(new Image(
4754 vk, device, allocator,
4755 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat,
4756 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
4757 MemoryRequirement::Any));
4758
4759 // Pipeline
4760
4761 m_colorAttachmentView = makeImageView(vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D,
4762 m_colorFormat, m_colorImageSubresourceRange);
4763 m_renderPass = makeRenderPass(vk, device, m_colorFormat);
4764 m_framebuffer = makeFramebuffer(vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width,
4765 m_colorImageExtent.height);
4766 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
4767
4768 GraphicsPipelineBuilder pipelineBuilder;
4769 pipelineBuilder.setRenderSize(tcu::IVec2(m_colorImageExtent.width, m_colorImageExtent.height))
4770 .setVertexInputSingleAttribute(m_vertices.getVertexFormat(), m_vertices.getVertexStride())
4771 .setShader(vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get(shaderPrefix + "vert"),
4772 nullptr)
4773 .setShader(vk, device, VK_SHADER_STAGE_FRAGMENT_BIT,
4774 context.getBinaryCollection().get(shaderPrefix + "frag"), nullptr);
4775
4776 m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData(),
4777 context.getResourceInterface());
4778 }
4779
recordCommands(OperationContext & context,const VkCommandBuffer cmdBuffer,const VkDescriptorSet descriptorSet)4780 void recordCommands(OperationContext &context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
4781 {
4782 const DeviceInterface &vk = context.getDeviceInterface();
4783 SynchronizationWrapperPtr synchronizationWrapper =
4784 getSynchronizationWrapper(context.getSynchronizationType(), vk, false);
4785
4786 // Change color attachment image layout
4787 {
4788 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
4789 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
4790 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
4791 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
4792 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4793 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
4794 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
4795 **m_colorAttachmentImage, // VkImage image
4796 m_colorImageSubresourceRange // VkImageSubresourceRange subresourceRange
4797 );
4798 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
4799 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4800 }
4801
4802 {
4803 const VkRect2D renderArea = makeRect2D(m_colorImageExtent);
4804 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
4805
4806 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
4807 }
4808
4809 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
4810 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &descriptorSet,
4811 0u, nullptr);
4812 {
4813 const VkDeviceSize vertexBufferOffset = 0ull;
4814 const VkBuffer vertexBuffer = m_vertices.getVertexBuffer();
4815 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
4816 }
4817
4818 switch (m_resourceType)
4819 {
4820 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW:
4821 vk.cmdDrawIndirect(cmdBuffer, m_indirectBuffer, 0u, 1u, 0u);
4822 break;
4823
4824 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED:
4825 vk.cmdBindIndexBuffer(cmdBuffer, m_vertices.getIndexBuffer(), 0u, m_vertices.getIndexType());
4826 vk.cmdDrawIndexedIndirect(cmdBuffer, m_indirectBuffer, 0u, 1u, 0u);
4827 break;
4828
4829 default:
4830 DE_ASSERT(0);
4831 break;
4832 }
4833 endRenderPass(vk, cmdBuffer);
4834 }
4835
4836 private:
4837 const ResourceType m_resourceType;
4838 const VkBuffer m_indirectBuffer;
4839 const VertexGrid m_vertices;
4840 VkFormat m_colorFormat;
4841 de::MovePtr<Image> m_colorAttachmentImage;
4842 Move<VkImageView> m_colorAttachmentView;
4843 VkExtent3D m_colorImageExtent;
4844 VkImageSubresourceRange m_colorImageSubresourceRange;
4845 Move<VkRenderPass> m_renderPass;
4846 Move<VkFramebuffer> m_framebuffer;
4847 Move<VkPipelineLayout> m_pipelineLayout;
4848 Move<VkPipeline> m_pipeline;
4849 };
4850
4851 class ComputePipeline : public Pipeline
4852 {
4853 public:
ComputePipeline(OperationContext & context,const VkBuffer indirectBuffer,const std::string & shaderPrefix,const VkDescriptorSetLayout descriptorSetLayout)4854 ComputePipeline(OperationContext &context, const VkBuffer indirectBuffer, const std::string &shaderPrefix,
4855 const VkDescriptorSetLayout descriptorSetLayout)
4856 : m_indirectBuffer(indirectBuffer)
4857 {
4858 const DeviceInterface &vk = context.getDeviceInterface();
4859 const VkDevice device = context.getDevice();
4860
4861 const Unique<VkShaderModule> shaderModule(createShaderModule(
4862 vk, device, context.getBinaryCollection().get(shaderPrefix + "comp"), (VkShaderModuleCreateFlags)0));
4863
4864 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
4865 m_pipeline = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, nullptr,
4866 context.getPipelineCacheData(), context.getResourceInterface());
4867 }
4868
recordCommands(OperationContext & context,const VkCommandBuffer cmdBuffer,const VkDescriptorSet descriptorSet)4869 void recordCommands(OperationContext &context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
4870 {
4871 const DeviceInterface &vk = context.getDeviceInterface();
4872
4873 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
4874 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &descriptorSet,
4875 0u, nullptr);
4876 vk.cmdDispatchIndirect(cmdBuffer, m_indirectBuffer, 0u);
4877 }
4878
4879 private:
4880 const VkBuffer m_indirectBuffer;
4881 Move<VkPipelineLayout> m_pipelineLayout;
4882 Move<VkPipeline> m_pipeline;
4883 };
4884
4885 //! Read indirect buffer by executing an indirect draw or dispatch command.
4886 class ReadImplementation : public Operation
4887 {
4888 public:
ReadImplementation(OperationContext & context,Resource & resource)4889 ReadImplementation(OperationContext &context, Resource &resource)
4890 : m_context(context)
4891 , m_resource(resource)
4892 , m_stage(resource.getType() == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH ? VK_SHADER_STAGE_COMPUTE_BIT :
4893 VK_SHADER_STAGE_VERTEX_BIT)
4894 , m_pipelineStage(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
4895 , m_hostBufferSizeBytes(sizeof(uint32_t))
4896 {
4897 requireFeaturesForSSBOAccess(m_context, m_stage);
4898
4899 const DeviceInterface &vk = m_context.getDeviceInterface();
4900 const VkDevice device = m_context.getDevice();
4901 Allocator &allocator = m_context.getAllocator();
4902
4903 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
4904 vk, device, allocator, makeBufferCreateInfo(m_hostBufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT),
4905 MemoryRequirement::HostVisible));
4906
4907 // Init host buffer data
4908 {
4909 const Allocation &alloc = m_hostBuffer->getAllocation();
4910 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_hostBufferSizeBytes));
4911 flushAlloc(vk, device, alloc);
4912 }
4913
4914 // Prepare descriptors
4915 {
4916 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
4917 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
4918 .build(vk, device);
4919
4920 m_descriptorPool = DescriptorPoolBuilder()
4921 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
4922 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
4923
4924 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
4925
4926 const VkDescriptorBufferInfo hostBufferInfo =
4927 makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_hostBufferSizeBytes);
4928
4929 DescriptorSetUpdateBuilder()
4930 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
4931 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
4932 .update(vk, device);
4933 }
4934
4935 // Create pipeline
4936 m_pipeline = (m_resource.getType() == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH ?
4937 de::MovePtr<Pipeline>(new ComputePipeline(context, m_resource.getBuffer().handle, "read_ib_",
4938 *m_descriptorSetLayout)) :
4939 de::MovePtr<Pipeline>(new GraphicsPipeline(context, m_resource.getType(),
4940 m_resource.getBuffer().handle, "read_ib_",
4941 *m_descriptorSetLayout)));
4942 }
4943
recordCommands(const VkCommandBuffer cmdBuffer)4944 void recordCommands(const VkCommandBuffer cmdBuffer)
4945 {
4946 const DeviceInterface &vk = m_context.getDeviceInterface();
4947 SynchronizationWrapperPtr synchronizationWrapper =
4948 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
4949
4950 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
4951
4952 // Insert a barrier so data written by the shader is available to the host
4953 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
4954 m_pipelineStage, // VkPipelineStageFlags2KHR srcStageMask
4955 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
4956 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
4957 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4958 **m_hostBuffer, // VkBuffer buffer
4959 0u, // VkDeviceSize offset
4960 m_hostBufferSizeBytes // VkDeviceSize size
4961 );
4962 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, &bufferMemoryBarrier2);
4963 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4964 }
4965
getInSyncInfo(void) const4966 SyncInfo getInSyncInfo(void) const
4967 {
4968 const SyncInfo syncInfo = {
4969 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, // VkPipelineStageFlags stageMask;
4970 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR, // VkAccessFlags accessMask;
4971 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
4972 };
4973 return syncInfo;
4974 }
4975
getOutSyncInfo(void) const4976 SyncInfo getOutSyncInfo(void) const
4977 {
4978 return emptySyncInfo;
4979 }
4980
getData(void) const4981 Data getData(void) const
4982 {
4983 return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
4984 }
4985
setData(const Data &)4986 void setData(const Data &)
4987 {
4988 DE_ASSERT(0);
4989 }
4990
getShaderStage(void)4991 vk::VkShaderStageFlagBits getShaderStage(void)
4992 {
4993 return m_stage;
4994 }
4995
4996 private:
4997 OperationContext &m_context;
4998 Resource &m_resource;
4999 const VkShaderStageFlagBits m_stage;
5000 const VkPipelineStageFlags m_pipelineStage;
5001 const VkDeviceSize m_hostBufferSizeBytes;
5002 de::MovePtr<Buffer> m_hostBuffer;
5003 Move<VkDescriptorPool> m_descriptorPool;
5004 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
5005 Move<VkDescriptorSet> m_descriptorSet;
5006 de::MovePtr<Pipeline> m_pipeline;
5007 };
5008
5009 //! Prepare indirect buffer for a draw/dispatch call.
5010 class WriteImplementation : public Operation
5011 {
5012 public:
WriteImplementation(OperationContext & context,Resource & resource)5013 WriteImplementation(OperationContext &context, Resource &resource) : m_context(context), m_resource(resource)
5014 {
5015 switch (m_resource.getType())
5016 {
5017 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW:
5018 {
5019 m_drawIndirect.vertexCount = 6u;
5020 m_drawIndirect.instanceCount = 1u;
5021 m_drawIndirect.firstVertex = 0u;
5022 m_drawIndirect.firstInstance = 0u;
5023
5024 m_indirectData = reinterpret_cast<uint32_t *>(&m_drawIndirect);
5025 m_expectedValue = 6u;
5026 }
5027 break;
5028
5029 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED:
5030 {
5031 m_drawIndexedIndirect.indexCount = 6u;
5032 m_drawIndexedIndirect.instanceCount = 1u;
5033 m_drawIndexedIndirect.firstIndex = 0u;
5034 m_drawIndexedIndirect.vertexOffset = 0u;
5035 m_drawIndexedIndirect.firstInstance = 0u;
5036
5037 m_indirectData = reinterpret_cast<uint32_t *>(&m_drawIndexedIndirect);
5038 m_expectedValue = 6u;
5039 }
5040 break;
5041
5042 case RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH:
5043 {
5044 m_dispatchIndirect.x = 7u;
5045 m_dispatchIndirect.y = 2u;
5046 m_dispatchIndirect.z = 1u;
5047
5048 m_indirectData = reinterpret_cast<uint32_t *>(&m_dispatchIndirect);
5049 m_expectedValue = 14u;
5050 }
5051 break;
5052
5053 default:
5054 DE_ASSERT(0);
5055 break;
5056 }
5057 }
5058
recordCommands(const VkCommandBuffer cmdBuffer)5059 void recordCommands(const VkCommandBuffer cmdBuffer)
5060 {
5061 const DeviceInterface &vk = m_context.getDeviceInterface();
5062
5063 vk.cmdUpdateBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset,
5064 m_resource.getBuffer().size, m_indirectData);
5065 }
5066
getInSyncInfo(void) const5067 SyncInfo getInSyncInfo(void) const
5068 {
5069 return emptySyncInfo;
5070 }
5071
getOutSyncInfo(void) const5072 SyncInfo getOutSyncInfo(void) const
5073 {
5074 const SyncInfo syncInfo = {
5075 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
5076 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
5077 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
5078 };
5079 return syncInfo;
5080 }
5081
getData(void) const5082 Data getData(void) const
5083 {
5084 const Data data = {
5085 sizeof(uint32_t), // std::size_t size;
5086 reinterpret_cast<const uint8_t *>(&m_expectedValue), // const uint8_t* data;
5087 };
5088 return data;
5089 }
5090
setData(const Data &)5091 void setData(const Data &)
5092 {
5093 DE_ASSERT(0);
5094 }
5095
5096 private:
5097 OperationContext &m_context;
5098 Resource &m_resource;
5099 VkDrawIndirectCommand m_drawIndirect;
5100 VkDrawIndexedIndirectCommand m_drawIndexedIndirect;
5101 VkDispatchIndirectCommand m_dispatchIndirect;
5102 uint32_t *m_indirectData;
5103 uint32_t m_expectedValue; //! Side-effect value expected to be computed by a read (draw/dispatch) command.
5104 };
5105
5106 class ReadSupport : public OperationSupport
5107 {
5108 public:
ReadSupport(const ResourceDescription & resourceDesc)5109 ReadSupport(const ResourceDescription &resourceDesc) : m_resourceDesc(resourceDesc)
5110 {
5111 DE_ASSERT(isIndirectBuffer(m_resourceDesc.type));
5112 }
5113
initPrograms(SourceCollections & programCollection) const5114 void initPrograms(SourceCollections &programCollection) const
5115 {
5116 std::ostringstream decl;
5117 decl << "layout(set = 0, binding = 0, std140) coherent buffer Data {\n"
5118 << " uint value;\n"
5119 << "} sb_out;\n";
5120
5121 std::ostringstream main;
5122 main << " atomicAdd(sb_out.value, 1u);\n";
5123
5124 // Vertex
5125 {
5126 std::ostringstream src;
5127 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
5128 << "\n"
5129 << "layout(location = 0) in vec4 v_in_position;\n"
5130 << "\n"
5131 << "out " << s_perVertexBlock << ";\n"
5132 << "\n"
5133 << decl.str() << "\n"
5134 << "void main (void)\n"
5135 << "{\n"
5136 << " gl_Position = v_in_position;\n"
5137 << main.str() << "}\n";
5138
5139 programCollection.glslSources.add("read_ib_vert") << glu::VertexSource(src.str());
5140 }
5141
5142 // Fragment
5143 {
5144 std::ostringstream src;
5145 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
5146 << "\n"
5147 << "layout(location = 0) out vec4 o_color;\n"
5148 << "\n"
5149 << "void main (void)\n"
5150 << "{\n"
5151 << " o_color = vec4(1.0);\n"
5152 << "}\n";
5153
5154 programCollection.glslSources.add("read_ib_frag") << glu::FragmentSource(src.str());
5155 }
5156
5157 // Compute
5158 {
5159 std::ostringstream src;
5160 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
5161 << "\n"
5162 << "layout(local_size_x = 1) in;\n"
5163 << "\n"
5164 << decl.str() << "\n"
5165 << "void main (void)\n"
5166 << "{\n"
5167 << main.str() << "}\n";
5168
5169 programCollection.glslSources.add("read_ib_comp") << glu::ComputeSource(src.str());
5170 }
5171 }
5172
getInResourceUsageFlags(void) const5173 uint32_t getInResourceUsageFlags(void) const
5174 {
5175 return VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
5176 }
5177
getOutResourceUsageFlags(void) const5178 uint32_t getOutResourceUsageFlags(void) const
5179 {
5180 return 0;
5181 }
5182
getQueueFlags(const OperationContext & context) const5183 VkQueueFlags getQueueFlags(const OperationContext &context) const
5184 {
5185 DE_UNREF(context);
5186 return (m_resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH ? VK_QUEUE_COMPUTE_BIT :
5187 VK_QUEUE_GRAPHICS_BIT);
5188 }
5189
build(OperationContext & context,Resource & resource) const5190 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
5191 {
5192 return de::MovePtr<Operation>(new ReadImplementation(context, resource));
5193 }
5194
build(OperationContext &,Resource &,Resource &) const5195 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
5196 {
5197 DE_ASSERT(0);
5198 return de::MovePtr<Operation>();
5199 }
5200
5201 private:
5202 const ResourceDescription m_resourceDesc;
5203 };
5204
5205 class WriteSupport : public OperationSupport
5206 {
5207 public:
WriteSupport(const ResourceDescription & resourceDesc)5208 WriteSupport(const ResourceDescription &resourceDesc)
5209 {
5210 DE_ASSERT(isIndirectBuffer(resourceDesc.type));
5211 DE_UNREF(resourceDesc);
5212 }
5213
getInResourceUsageFlags(void) const5214 uint32_t getInResourceUsageFlags(void) const
5215 {
5216 return 0;
5217 }
5218
getOutResourceUsageFlags(void) const5219 uint32_t getOutResourceUsageFlags(void) const
5220 {
5221 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
5222 }
5223
getQueueFlags(const OperationContext & context) const5224 VkQueueFlags getQueueFlags(const OperationContext &context) const
5225 {
5226 DE_UNREF(context);
5227 return VK_QUEUE_TRANSFER_BIT;
5228 }
5229
build(OperationContext & context,Resource & resource) const5230 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
5231 {
5232 return de::MovePtr<Operation>(new WriteImplementation(context, resource));
5233 }
5234
build(OperationContext &,Resource &,Resource &) const5235 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
5236 {
5237 DE_ASSERT(0);
5238 return de::MovePtr<Operation>();
5239 }
5240 };
5241
5242 } // namespace IndirectBuffer
5243
5244 namespace VertexInput
5245 {
5246
5247 enum DrawMode
5248 {
5249 DRAW_MODE_VERTEX = 0,
5250 DRAW_MODE_INDEXED,
5251 };
5252
5253 class Implementation : public Operation
5254 {
5255 public:
Implementation(OperationContext & context,Resource & resource,DrawMode drawMode)5256 Implementation(OperationContext &context, Resource &resource, DrawMode drawMode)
5257 : m_context(context)
5258 , m_resource(resource)
5259 , m_drawMode(drawMode)
5260 {
5261 requireFeaturesForSSBOAccess(m_context, VK_SHADER_STAGE_VERTEX_BIT);
5262
5263 const DeviceInterface &vk = context.getDeviceInterface();
5264 const VkDevice device = context.getDevice();
5265 Allocator &allocator = context.getAllocator();
5266 VkFormat attributeFormat = VK_FORMAT_R32G32B32A32_UINT;
5267 const VkDeviceSize dataSizeBytes = m_resource.getBuffer().size;
5268
5269 // allocate ssbo that will store data used for verification
5270 {
5271 m_outputBuffer = de::MovePtr<Buffer>(new Buffer(
5272 vk, device, allocator, makeBufferCreateInfo(dataSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT),
5273 MemoryRequirement::HostVisible));
5274
5275 const Allocation &alloc = m_outputBuffer->getAllocation();
5276 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(dataSizeBytes));
5277 flushAlloc(vk, device, alloc);
5278 }
5279
5280 // allocate buffer that will be used for vertex attributes when we use resource for indices
5281 if (m_drawMode == DRAW_MODE_INDEXED)
5282 {
5283 attributeFormat = VK_FORMAT_R32_UINT;
5284
5285 m_inputBuffer = de::MovePtr<Buffer>(new Buffer(
5286 vk, device, allocator, makeBufferCreateInfo(dataSizeBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT),
5287 MemoryRequirement::HostVisible));
5288
5289 const Allocation &alloc = m_inputBuffer->getAllocation();
5290 fillPattern(alloc.getHostPtr(), dataSizeBytes, true);
5291 flushAlloc(vk, device, alloc);
5292 }
5293
5294 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
5295 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_VERTEX_BIT)
5296 .build(vk, device);
5297
5298 m_descriptorPool = DescriptorPoolBuilder()
5299 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
5300 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
5301
5302 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
5303
5304 const VkDescriptorBufferInfo outputBufferDescriptorInfo =
5305 makeDescriptorBufferInfo(m_outputBuffer->get(), 0ull, dataSizeBytes);
5306 DescriptorSetUpdateBuilder()
5307 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
5308 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outputBufferDescriptorInfo)
5309 .update(vk, device);
5310
5311 // Color attachment
5312 m_colorFormat = VK_FORMAT_R8G8B8A8_UNORM;
5313 m_colorImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
5314 m_colorImageExtent = makeExtent3D(16u, 16u, 1u);
5315 m_colorAttachmentImage = de::MovePtr<Image>(new Image(
5316 vk, device, allocator,
5317 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat,
5318 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
5319 MemoryRequirement::Any));
5320
5321 // Pipeline
5322 m_colorAttachmentView = makeImageView(vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D,
5323 m_colorFormat, m_colorImageSubresourceRange);
5324 m_renderPass = makeRenderPass(vk, device, m_colorFormat);
5325 m_framebuffer = makeFramebuffer(vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width,
5326 m_colorImageExtent.height);
5327 m_pipelineLayout = makePipelineLayout(vk, device, *m_descriptorSetLayout);
5328
5329 m_pipeline =
5330 GraphicsPipelineBuilder()
5331 .setPrimitiveTopology(VK_PRIMITIVE_TOPOLOGY_POINT_LIST)
5332 .setRenderSize(
5333 tcu::IVec2(static_cast<int>(m_colorImageExtent.width), static_cast<int>(m_colorImageExtent.height)))
5334 .setVertexInputSingleAttribute(attributeFormat, tcu::getPixelSize(mapVkFormat(attributeFormat)))
5335 .setShader(vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get("input_vert"),
5336 nullptr)
5337 .setShader(vk, device, VK_SHADER_STAGE_FRAGMENT_BIT, context.getBinaryCollection().get("input_frag"),
5338 nullptr)
5339 .build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData(),
5340 context.getResourceInterface());
5341 }
5342
recordCommands(const VkCommandBuffer cmdBuffer)5343 void recordCommands(const VkCommandBuffer cmdBuffer)
5344 {
5345 const DeviceInterface &vk = m_context.getDeviceInterface();
5346 const VkDeviceSize dataSizeBytes = m_resource.getBuffer().size;
5347 SynchronizationWrapperPtr synchronizationWrapper =
5348 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
5349
5350 // Change color attachment image layout
5351 {
5352 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
5353 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
5354 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
5355 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
5356 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
5357 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
5358 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
5359 **m_colorAttachmentImage, // VkImage image
5360 m_colorImageSubresourceRange // VkImageSubresourceRange subresourceRange
5361 );
5362 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, nullptr, &imageMemoryBarrier2);
5363 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
5364 }
5365
5366 {
5367 const VkRect2D renderArea = makeRect2D(m_colorImageExtent);
5368 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
5369
5370 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
5371 }
5372
5373 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
5374 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u,
5375 &m_descriptorSet.get(), 0u, nullptr);
5376
5377 const VkDeviceSize vertexBufferOffset = 0ull;
5378 if (m_drawMode == DRAW_MODE_VERTEX)
5379 {
5380 const uint32_t count = static_cast<uint32_t>(dataSizeBytes / sizeof(tcu::UVec4));
5381 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &m_resource.getBuffer().handle, &vertexBufferOffset);
5382 vk.cmdDraw(cmdBuffer, count, 1u, 0u, 0u);
5383 }
5384 else // (m_drawMode == DRAW_MODE_INDEXED)
5385 {
5386 const uint32_t count = static_cast<uint32_t>(dataSizeBytes / sizeof(uint32_t));
5387 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &**m_inputBuffer, &vertexBufferOffset);
5388 vk.cmdBindIndexBuffer(cmdBuffer, m_resource.getBuffer().handle, 0u, VK_INDEX_TYPE_UINT32);
5389 vk.cmdDrawIndexed(cmdBuffer, count, 1, 0, 0, 0);
5390 }
5391
5392 endRenderPass(vk, cmdBuffer);
5393
5394 // Insert a barrier so data written by the shader is available to the host
5395 {
5396 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
5397 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
5398 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
5399 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
5400 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
5401 **m_outputBuffer, // VkBuffer buffer
5402 0u, // VkDeviceSize offset
5403 m_resource.getBuffer().size // VkDeviceSize size
5404 );
5405 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(nullptr, &bufferMemoryBarrier2);
5406 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
5407 }
5408 }
5409
getInSyncInfo(void) const5410 SyncInfo getInSyncInfo(void) const
5411 {
5412 const bool usingIndexedDraw = (m_drawMode == DRAW_MODE_INDEXED);
5413 VkPipelineStageFlags2KHR stageMask = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR;
5414 VkAccessFlags2KHR accessMask =
5415 usingIndexedDraw ? VK_ACCESS_2_INDEX_READ_BIT_KHR : VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR;
5416
5417 if (m_context.getSynchronizationType() == SynchronizationType::SYNCHRONIZATION2)
5418 {
5419 stageMask = usingIndexedDraw ? VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR :
5420 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR;
5421 }
5422
5423 const SyncInfo syncInfo = {
5424 stageMask, // VkPipelineStageFlags stageMask;
5425 accessMask, // VkAccessFlags accessMask;
5426 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
5427 };
5428 return syncInfo;
5429 }
5430
getOutSyncInfo(void) const5431 SyncInfo getOutSyncInfo(void) const
5432 {
5433 return emptySyncInfo;
5434 }
5435
getData(void) const5436 Data getData(void) const
5437 {
5438 return getHostBufferData(m_context, *m_outputBuffer, m_resource.getBuffer().size);
5439 }
5440
setData(const Data & data)5441 void setData(const Data &data)
5442 {
5443 setHostBufferData(m_context, *m_outputBuffer, data);
5444 }
5445
5446 private:
5447 OperationContext &m_context;
5448 Resource &m_resource;
5449 DrawMode m_drawMode;
5450 de::MovePtr<Buffer> m_inputBuffer;
5451 de::MovePtr<Buffer> m_outputBuffer;
5452 Move<VkRenderPass> m_renderPass;
5453 Move<VkFramebuffer> m_framebuffer;
5454 Move<VkPipelineLayout> m_pipelineLayout;
5455 Move<VkPipeline> m_pipeline;
5456 VkFormat m_colorFormat;
5457 de::MovePtr<Image> m_colorAttachmentImage;
5458 Move<VkImageView> m_colorAttachmentView;
5459 VkExtent3D m_colorImageExtent;
5460 VkImageSubresourceRange m_colorImageSubresourceRange;
5461 Move<VkDescriptorPool> m_descriptorPool;
5462 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
5463 Move<VkDescriptorSet> m_descriptorSet;
5464 };
5465
5466 class Support : public OperationSupport
5467 {
5468 public:
Support(const ResourceDescription & resourceDesc,DrawMode drawMode)5469 Support(const ResourceDescription &resourceDesc, DrawMode drawMode)
5470 : m_resourceDesc(resourceDesc)
5471 , m_drawMode(drawMode)
5472 {
5473 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER || m_resourceDesc.type == RESOURCE_TYPE_INDEX_BUFFER);
5474 }
5475
initPrograms(SourceCollections & programCollection) const5476 void initPrograms(SourceCollections &programCollection) const
5477 {
5478 // Vertex
5479 {
5480 std::ostringstream src;
5481 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n";
5482 if (m_drawMode == DRAW_MODE_VERTEX)
5483 {
5484 src << "layout(location = 0) in uvec4 v_in_data;\n"
5485 << "layout(set = 0, binding = 0, std140) writeonly buffer Output {\n"
5486 << " uvec4 data[" << m_resourceDesc.size.x() / sizeof(tcu::UVec4) << "];\n"
5487 << "} b_out;\n"
5488 << "\n"
5489 << "void main (void)\n"
5490 << "{\n"
5491 << " b_out.data[gl_VertexIndex] = v_in_data;\n"
5492 << " gl_PointSize = 1.0f;\n"
5493 << "}\n";
5494 }
5495 else // DRAW_MODE_INDEXED
5496 {
5497 src << "layout(location = 0) in uint v_in_data;\n"
5498 << "layout(set = 0, binding = 0, std430) writeonly buffer Output {\n"
5499 << " uint data[" << m_resourceDesc.size.x() / sizeof(uint32_t) << "];\n"
5500 << "} b_out;\n"
5501 << "\n"
5502 << "void main (void)\n"
5503 << "{\n"
5504 << " b_out.data[gl_VertexIndex] = v_in_data;\n"
5505 << " gl_PointSize = 1.0f;\n"
5506 << "}\n";
5507 }
5508 programCollection.glslSources.add("input_vert") << glu::VertexSource(src.str());
5509 }
5510
5511 // Fragment
5512 {
5513 std::ostringstream src;
5514 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
5515 << "\n"
5516 << "layout(location = 0) out vec4 o_color;\n"
5517 << "\n"
5518 << "void main (void)\n"
5519 << "{\n"
5520 << " o_color = vec4(1.0);\n"
5521 << "}\n";
5522 programCollection.glslSources.add("input_frag") << glu::FragmentSource(src.str());
5523 }
5524 }
5525
getInResourceUsageFlags(void) const5526 uint32_t getInResourceUsageFlags(void) const
5527 {
5528 return (m_drawMode == DRAW_MODE_VERTEX) ? VK_BUFFER_USAGE_VERTEX_BUFFER_BIT : VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
5529 }
5530
getOutResourceUsageFlags(void) const5531 uint32_t getOutResourceUsageFlags(void) const
5532 {
5533 return VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5534 }
5535
getQueueFlags(const OperationContext &) const5536 VkQueueFlags getQueueFlags(const OperationContext &) const
5537 {
5538 return VK_QUEUE_GRAPHICS_BIT;
5539 }
5540
build(OperationContext & context,Resource & resource) const5541 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
5542 {
5543 return de::MovePtr<Operation>(new Implementation(context, resource, m_drawMode));
5544 }
5545
build(OperationContext &,Resource &,Resource &) const5546 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
5547 {
5548 DE_ASSERT(0);
5549 return de::MovePtr<Operation>();
5550 }
5551
5552 private:
5553 const ResourceDescription m_resourceDesc;
5554 const DrawMode m_drawMode;
5555 };
5556
5557 } // namespace VertexInput
5558
5559 } // namespace
5560
OperationContext(Context & context,SynchronizationType syncType,PipelineCacheData & pipelineCacheData)5561 OperationContext::OperationContext(Context &context, SynchronizationType syncType, PipelineCacheData &pipelineCacheData)
5562 : m_context(context)
5563 , m_syncType(syncType)
5564 , m_vki(context.getInstanceInterface())
5565 , m_vk(context.getDeviceInterface())
5566 , m_physicalDevice(context.getPhysicalDevice())
5567 , m_device(context.getDevice())
5568 , m_allocator(context.getDefaultAllocator())
5569 , m_progCollection(context.getBinaryCollection())
5570 , m_pipelineCacheData(pipelineCacheData)
5571 {
5572 }
5573
OperationContext(Context & context,SynchronizationType syncType,const DeviceInterface & vk,const VkDevice device,vk::Allocator & allocator,PipelineCacheData & pipelineCacheData)5574 OperationContext::OperationContext(Context &context, SynchronizationType syncType, const DeviceInterface &vk,
5575 const VkDevice device, vk::Allocator &allocator,
5576 PipelineCacheData &pipelineCacheData)
5577 : m_context(context)
5578 , m_syncType(syncType)
5579 , m_vki(context.getInstanceInterface())
5580 , m_vk(vk)
5581 , m_physicalDevice(context.getPhysicalDevice())
5582 , m_device(device)
5583 , m_allocator(allocator)
5584 , m_progCollection(context.getBinaryCollection())
5585 , m_pipelineCacheData(pipelineCacheData)
5586 {
5587 }
5588
OperationContext(Context & context,SynchronizationType syncType,const vk::InstanceInterface & vki,const vk::DeviceInterface & vkd,vk::VkPhysicalDevice physicalDevice,vk::VkDevice device,vk::Allocator & allocator,vk::BinaryCollection & programCollection,PipelineCacheData & pipelineCacheData)5589 OperationContext::OperationContext(Context &context, SynchronizationType syncType, const vk::InstanceInterface &vki,
5590 const vk::DeviceInterface &vkd, vk::VkPhysicalDevice physicalDevice,
5591 vk::VkDevice device, vk::Allocator &allocator,
5592 vk::BinaryCollection &programCollection, PipelineCacheData &pipelineCacheData)
5593 : m_context(context)
5594 , m_syncType(syncType)
5595 , m_vki(vki)
5596 , m_vk(vkd)
5597 , m_physicalDevice(physicalDevice)
5598 , m_device(device)
5599 , m_allocator(allocator)
5600 , m_progCollection(programCollection)
5601 , m_pipelineCacheData(pipelineCacheData)
5602 {
5603 }
5604
Resource(OperationContext & context,const ResourceDescription & desc,const uint32_t usage,const vk::VkSharingMode sharingMode,const std::vector<uint32_t> & queueFamilyIndex)5605 Resource::Resource(OperationContext &context, const ResourceDescription &desc, const uint32_t usage,
5606 const vk::VkSharingMode sharingMode, const std::vector<uint32_t> &queueFamilyIndex)
5607 : m_type(desc.type)
5608 {
5609 const DeviceInterface &vk = context.getDeviceInterface();
5610 const InstanceInterface &vki = context.getInstanceInterface();
5611 const VkDevice device = context.getDevice();
5612 const VkPhysicalDevice physDevice = context.getPhysicalDevice();
5613 Allocator &allocator = context.getAllocator();
5614
5615 if (m_type == RESOURCE_TYPE_BUFFER || m_type == RESOURCE_TYPE_INDEX_BUFFER || isIndirectBuffer(m_type))
5616 {
5617 m_bufferData = de::MovePtr<BufferResource>(
5618 new BufferResource(VK_NULL_HANDLE, 0u, static_cast<VkDeviceSize>(desc.size.x())));
5619 VkBufferCreateInfo bufferCreateInfo = makeBufferCreateInfo(m_bufferData->size, usage);
5620 bufferCreateInfo.sharingMode = sharingMode;
5621 if (queueFamilyIndex.size() > 0)
5622 {
5623 bufferCreateInfo.queueFamilyIndexCount = static_cast<uint32_t>(queueFamilyIndex.size());
5624 bufferCreateInfo.pQueueFamilyIndices = &queueFamilyIndex[0];
5625 }
5626 m_buffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, bufferCreateInfo, MemoryRequirement::Any));
5627 m_bufferData->handle = **m_buffer;
5628 }
5629 else if (m_type == RESOURCE_TYPE_IMAGE)
5630 {
5631 m_imageData = de::MovePtr<ImageResource>(new ImageResource(
5632 VK_NULL_HANDLE, makeExtent3D(desc.size.x(), std::max(1, desc.size.y()), std::max(1, desc.size.z())),
5633 desc.imageType, desc.imageFormat, makeImageSubresourceRange(desc.imageAspect, 0u, 1u, 0u, 1u),
5634 makeImageSubresourceLayers(desc.imageAspect, 0u, 0u, 1u), vk::VK_IMAGE_TILING_OPTIMAL));
5635 VkImageCreateInfo imageInfo =
5636 makeImageCreateInfo(m_imageData->imageType, m_imageData->extent, m_imageData->format, usage,
5637 desc.imageSamples, m_imageData->tiling);
5638 imageInfo.sharingMode = sharingMode;
5639 if (queueFamilyIndex.size() > 0)
5640 {
5641 imageInfo.queueFamilyIndexCount = static_cast<uint32_t>(queueFamilyIndex.size());
5642 imageInfo.pQueueFamilyIndices = &queueFamilyIndex[0];
5643 }
5644
5645 VkImageFormatProperties imageFormatProperties;
5646 const VkResult formatResult = vki.getPhysicalDeviceImageFormatProperties(
5647 physDevice, imageInfo.format, imageInfo.imageType, imageInfo.tiling, imageInfo.usage, imageInfo.flags,
5648 &imageFormatProperties);
5649
5650 if (formatResult != VK_SUCCESS)
5651 TCU_THROW(NotSupportedError, "Image format is not supported");
5652
5653 if ((imageFormatProperties.sampleCounts & desc.imageSamples) != desc.imageSamples)
5654 TCU_THROW(NotSupportedError, "Requested sample count is not supported");
5655
5656 m_image = de::MovePtr<Image>(new Image(vk, device, allocator, imageInfo, MemoryRequirement::Any));
5657 m_imageData->handle = **m_image;
5658 }
5659 else
5660 DE_ASSERT(0);
5661 }
5662
Resource(ResourceType type,vk::Move<vk::VkBuffer> buffer,de::MovePtr<vk::Allocation> allocation,vk::VkDeviceSize offset,vk::VkDeviceSize size)5663 Resource::Resource(ResourceType type, vk::Move<vk::VkBuffer> buffer, de::MovePtr<vk::Allocation> allocation,
5664 vk::VkDeviceSize offset, vk::VkDeviceSize size)
5665 : m_type(type)
5666 , m_buffer(new Buffer(buffer, allocation))
5667 , m_bufferData(de::MovePtr<BufferResource>(new BufferResource(m_buffer->get(), offset, size)))
5668 {
5669 DE_ASSERT(type != RESOURCE_TYPE_IMAGE);
5670 }
5671
Resource(vk::Move<vk::VkImage> image,de::MovePtr<vk::Allocation> allocation,const vk::VkExtent3D & extent,vk::VkImageType imageType,vk::VkFormat format,vk::VkImageSubresourceRange subresourceRange,vk::VkImageSubresourceLayers subresourceLayers,vk::VkImageTiling tiling)5672 Resource::Resource(vk::Move<vk::VkImage> image, de::MovePtr<vk::Allocation> allocation, const vk::VkExtent3D &extent,
5673 vk::VkImageType imageType, vk::VkFormat format, vk::VkImageSubresourceRange subresourceRange,
5674 vk::VkImageSubresourceLayers subresourceLayers, vk::VkImageTiling tiling)
5675 : m_type(RESOURCE_TYPE_IMAGE)
5676 , m_image(new Image(image, allocation))
5677 , m_imageData(de::MovePtr<ImageResource>(
5678 new ImageResource(m_image->get(), extent, imageType, format, subresourceRange, subresourceLayers, tiling)))
5679 {
5680 }
5681
getMemory(void) const5682 vk::VkDeviceMemory Resource::getMemory(void) const
5683 {
5684 if (m_type == RESOURCE_TYPE_IMAGE)
5685 return m_image->getAllocation().getMemory();
5686 else
5687 return m_buffer->getAllocation().getMemory();
5688 }
5689
5690 //! \note This function exists for performance reasons. We're creating a lot of tests and checking requirements here
5691 //! before creating an OperationSupport object is faster.
isResourceSupported(const OperationName opName,const ResourceDescription & resourceDesc)5692 bool isResourceSupported(const OperationName opName, const ResourceDescription &resourceDesc)
5693 {
5694 switch (opName)
5695 {
5696 case OPERATION_NAME_WRITE_FILL_BUFFER:
5697 case OPERATION_NAME_WRITE_COPY_BUFFER:
5698 case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER:
5699 case OPERATION_NAME_WRITE_SSBO_VERTEX:
5700 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:
5701 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:
5702 case OPERATION_NAME_WRITE_SSBO_GEOMETRY:
5703 case OPERATION_NAME_WRITE_SSBO_FRAGMENT:
5704 case OPERATION_NAME_WRITE_SSBO_COMPUTE:
5705 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:
5706 case OPERATION_NAME_READ_COPY_BUFFER:
5707 case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE:
5708 case OPERATION_NAME_READ_SSBO_VERTEX:
5709 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:
5710 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:
5711 case OPERATION_NAME_READ_SSBO_GEOMETRY:
5712 case OPERATION_NAME_READ_SSBO_FRAGMENT:
5713 case OPERATION_NAME_READ_SSBO_COMPUTE:
5714 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:
5715 case OPERATION_NAME_READ_VERTEX_INPUT:
5716 return resourceDesc.type == RESOURCE_TYPE_BUFFER;
5717
5718 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW:
5719 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW:
5720 return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DRAW;
5721
5722 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED:
5723 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED:
5724 return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED;
5725
5726 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH:
5727 case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH:
5728 return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH;
5729
5730 case OPERATION_NAME_WRITE_UPDATE_INDEX_BUFFER:
5731 case OPERATION_NAME_READ_INDEX_INPUT:
5732 return resourceDesc.type == RESOURCE_TYPE_INDEX_BUFFER;
5733
5734 case OPERATION_NAME_WRITE_UPDATE_BUFFER:
5735 return resourceDesc.type == RESOURCE_TYPE_BUFFER && resourceDesc.size.x() <= MAX_UPDATE_BUFFER_SIZE;
5736
5737 case OPERATION_NAME_WRITE_COPY_IMAGE:
5738 case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE:
5739 case OPERATION_NAME_READ_COPY_IMAGE:
5740 case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER:
5741 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5742
5743 case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS:
5744 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageType != VK_IMAGE_TYPE_3D &&
5745 resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5746
5747 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_MULTISAMPLE:
5748 case OPERATION_NAME_READ_RESOLVE_IMAGE:
5749 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT &&
5750 resourceDesc.imageSamples != VK_SAMPLE_COUNT_1_BIT;
5751
5752 case OPERATION_NAME_WRITE_BLIT_IMAGE:
5753 case OPERATION_NAME_READ_BLIT_IMAGE:
5754 case OPERATION_NAME_WRITE_IMAGE_VERTEX:
5755 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:
5756 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:
5757 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:
5758 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:
5759 case OPERATION_NAME_WRITE_IMAGE_COMPUTE:
5760 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:
5761 case OPERATION_NAME_READ_IMAGE_VERTEX:
5762 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:
5763 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:
5764 case OPERATION_NAME_READ_IMAGE_GEOMETRY:
5765 case OPERATION_NAME_READ_IMAGE_FRAGMENT:
5766 case OPERATION_NAME_READ_IMAGE_COMPUTE:
5767 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:
5768 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT &&
5769 resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5770
5771 case OPERATION_NAME_READ_UBO_VERTEX:
5772 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:
5773 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:
5774 case OPERATION_NAME_READ_UBO_GEOMETRY:
5775 case OPERATION_NAME_READ_UBO_FRAGMENT:
5776 case OPERATION_NAME_READ_UBO_COMPUTE:
5777 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:
5778 case OPERATION_NAME_READ_UBO_TEXEL_VERTEX:
5779 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_CONTROL:
5780 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_EVALUATION:
5781 case OPERATION_NAME_READ_UBO_TEXEL_GEOMETRY:
5782 case OPERATION_NAME_READ_UBO_TEXEL_FRAGMENT:
5783 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE:
5784 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE_INDIRECT:
5785 return resourceDesc.type == RESOURCE_TYPE_BUFFER && resourceDesc.size.x() <= MAX_UBO_RANGE;
5786
5787 case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE:
5788 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT &&
5789 resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5790
5791 case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE:
5792 return resourceDesc.type == RESOURCE_TYPE_IMAGE &&
5793 (resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) &&
5794 resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5795
5796 case OPERATION_NAME_WRITE_DRAW:
5797 case OPERATION_NAME_WRITE_DRAW_INDEXED:
5798 case OPERATION_NAME_WRITE_DRAW_INDIRECT:
5799 case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT:
5800 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageType == VK_IMAGE_TYPE_2D &&
5801 (resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) == 0 &&
5802 resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5803
5804 case OPERATION_NAME_COPY_BUFFER:
5805 case OPERATION_NAME_COPY_SSBO_VERTEX:
5806 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL:
5807 case OPERATION_NAME_COPY_SSBO_TESSELLATION_EVALUATION:
5808 case OPERATION_NAME_COPY_SSBO_GEOMETRY:
5809 case OPERATION_NAME_COPY_SSBO_FRAGMENT:
5810 case OPERATION_NAME_COPY_SSBO_COMPUTE:
5811 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT:
5812 return resourceDesc.type == RESOURCE_TYPE_BUFFER;
5813
5814 case OPERATION_NAME_COPY_IMAGE:
5815 case OPERATION_NAME_BLIT_IMAGE:
5816 case OPERATION_NAME_COPY_IMAGE_VERTEX:
5817 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL:
5818 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION:
5819 case OPERATION_NAME_COPY_IMAGE_GEOMETRY:
5820 case OPERATION_NAME_COPY_IMAGE_FRAGMENT:
5821 case OPERATION_NAME_COPY_IMAGE_COMPUTE:
5822 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT:
5823 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT &&
5824 resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5825
5826 default:
5827 DE_ASSERT(0);
5828 return false;
5829 }
5830 }
5831
getOperationName(const OperationName opName)5832 std::string getOperationName(const OperationName opName)
5833 {
5834 switch (opName)
5835 {
5836 case OPERATION_NAME_WRITE_FILL_BUFFER:
5837 return "write_fill_buffer";
5838 case OPERATION_NAME_WRITE_UPDATE_BUFFER:
5839 return "write_update_buffer";
5840 case OPERATION_NAME_WRITE_COPY_BUFFER:
5841 return "write_copy_buffer";
5842 case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE:
5843 return "write_copy_buffer_to_image";
5844 case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER:
5845 return "write_copy_image_to_buffer";
5846 case OPERATION_NAME_WRITE_COPY_IMAGE:
5847 return "write_copy_image";
5848 case OPERATION_NAME_WRITE_BLIT_IMAGE:
5849 return "write_blit_image";
5850 case OPERATION_NAME_WRITE_SSBO_VERTEX:
5851 return "write_ssbo_vertex";
5852 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:
5853 return "write_ssbo_tess_control";
5854 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:
5855 return "write_ssbo_tess_eval";
5856 case OPERATION_NAME_WRITE_SSBO_GEOMETRY:
5857 return "write_ssbo_geometry";
5858 case OPERATION_NAME_WRITE_SSBO_FRAGMENT:
5859 return "write_ssbo_fragment";
5860 case OPERATION_NAME_WRITE_SSBO_COMPUTE:
5861 return "write_ssbo_compute";
5862 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:
5863 return "write_ssbo_compute_indirect";
5864 case OPERATION_NAME_WRITE_IMAGE_VERTEX:
5865 return "write_image_vertex";
5866 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:
5867 return "write_image_tess_control";
5868 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:
5869 return "write_image_tess_eval";
5870 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:
5871 return "write_image_geometry";
5872 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:
5873 return "write_image_fragment";
5874 case OPERATION_NAME_WRITE_IMAGE_COMPUTE:
5875 return "write_image_compute";
5876 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_MULTISAMPLE:
5877 return "write_image_compute_multisample";
5878 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:
5879 return "write_image_compute_indirect";
5880 case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE:
5881 return "write_clear_color_image";
5882 case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE:
5883 return "write_clear_depth_stencil_image";
5884 case OPERATION_NAME_WRITE_DRAW:
5885 return "write_draw";
5886 case OPERATION_NAME_WRITE_DRAW_INDEXED:
5887 return "write_draw_indexed";
5888 case OPERATION_NAME_WRITE_DRAW_INDIRECT:
5889 return "write_draw_indirect";
5890 case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT:
5891 return "write_draw_indexed_indirect";
5892 case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS:
5893 return "write_clear_attachments";
5894 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW:
5895 return "write_indirect_buffer_draw";
5896 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED:
5897 return "write_indirect_buffer_draw_indexed";
5898 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH:
5899 return "write_indirect_buffer_dispatch";
5900 case OPERATION_NAME_WRITE_UPDATE_INDEX_BUFFER:
5901 return "write_update_index_buffer";
5902
5903 case OPERATION_NAME_READ_COPY_BUFFER:
5904 return "read_copy_buffer";
5905 case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE:
5906 return "read_copy_buffer_to_image";
5907 case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER:
5908 return "read_copy_image_to_buffer";
5909 case OPERATION_NAME_READ_COPY_IMAGE:
5910 return "read_copy_image";
5911 case OPERATION_NAME_READ_BLIT_IMAGE:
5912 return "read_blit_image";
5913 case OPERATION_NAME_READ_RESOLVE_IMAGE:
5914 return "read_resolve_image";
5915 case OPERATION_NAME_READ_UBO_VERTEX:
5916 return "read_ubo_vertex";
5917 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:
5918 return "read_ubo_tess_control";
5919 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:
5920 return "read_ubo_tess_eval";
5921 case OPERATION_NAME_READ_UBO_GEOMETRY:
5922 return "read_ubo_geometry";
5923 case OPERATION_NAME_READ_UBO_FRAGMENT:
5924 return "read_ubo_fragment";
5925 case OPERATION_NAME_READ_UBO_COMPUTE:
5926 return "read_ubo_compute";
5927 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:
5928 return "read_ubo_compute_indirect";
5929 case OPERATION_NAME_READ_UBO_TEXEL_VERTEX:
5930 return "read_ubo_texel_vertex";
5931 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_CONTROL:
5932 return "read_ubo_texel_tess_control";
5933 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_EVALUATION:
5934 return "read_ubo_texel_tess_eval";
5935 case OPERATION_NAME_READ_UBO_TEXEL_GEOMETRY:
5936 return "read_ubo_texel_geometry";
5937 case OPERATION_NAME_READ_UBO_TEXEL_FRAGMENT:
5938 return "read_ubo_texel_fragment";
5939 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE:
5940 return "read_ubo_texel_compute";
5941 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE_INDIRECT:
5942 return "read_ubo_texel_compute_indirect";
5943 case OPERATION_NAME_READ_SSBO_VERTEX:
5944 return "read_ssbo_vertex";
5945 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:
5946 return "read_ssbo_tess_control";
5947 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:
5948 return "read_ssbo_tess_eval";
5949 case OPERATION_NAME_READ_SSBO_GEOMETRY:
5950 return "read_ssbo_geometry";
5951 case OPERATION_NAME_READ_SSBO_FRAGMENT:
5952 return "read_ssbo_fragment";
5953 case OPERATION_NAME_READ_SSBO_COMPUTE:
5954 return "read_ssbo_compute";
5955 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:
5956 return "read_ssbo_compute_indirect";
5957 case OPERATION_NAME_READ_IMAGE_VERTEX:
5958 return "read_image_vertex";
5959 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:
5960 return "read_image_tess_control";
5961 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:
5962 return "read_image_tess_eval";
5963 case OPERATION_NAME_READ_IMAGE_GEOMETRY:
5964 return "read_image_geometry";
5965 case OPERATION_NAME_READ_IMAGE_FRAGMENT:
5966 return "read_image_fragment";
5967 case OPERATION_NAME_READ_IMAGE_COMPUTE:
5968 return "read_image_compute";
5969 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:
5970 return "read_image_compute_indirect";
5971 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW:
5972 return "read_indirect_buffer_draw";
5973 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED:
5974 return "read_indirect_buffer_draw_indexed";
5975 case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH:
5976 return "read_indirect_buffer_dispatch";
5977 case OPERATION_NAME_READ_VERTEX_INPUT:
5978 return "read_vertex_input";
5979 case OPERATION_NAME_READ_INDEX_INPUT:
5980 return "read_index_input";
5981
5982 case OPERATION_NAME_COPY_BUFFER:
5983 return "copy_buffer";
5984 case OPERATION_NAME_COPY_IMAGE:
5985 return "copy_image";
5986 case OPERATION_NAME_BLIT_IMAGE:
5987 return "blit_image";
5988 case OPERATION_NAME_COPY_SSBO_VERTEX:
5989 return "copy_buffer_vertex";
5990 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL:
5991 return "copy_ssbo_tess_control";
5992 case OPERATION_NAME_COPY_SSBO_TESSELLATION_EVALUATION:
5993 return "copy_ssbo_tess_eval";
5994 case OPERATION_NAME_COPY_SSBO_GEOMETRY:
5995 return "copy_ssbo_geometry";
5996 case OPERATION_NAME_COPY_SSBO_FRAGMENT:
5997 return "copy_ssbo_fragment";
5998 case OPERATION_NAME_COPY_SSBO_COMPUTE:
5999 return "copy_ssbo_compute";
6000 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT:
6001 return "copy_ssbo_compute_indirect";
6002 case OPERATION_NAME_COPY_IMAGE_VERTEX:
6003 return "copy_image_vertex";
6004 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL:
6005 return "copy_image_tess_control";
6006 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION:
6007 return "copy_image_tess_eval";
6008 case OPERATION_NAME_COPY_IMAGE_GEOMETRY:
6009 return "copy_image_geometry";
6010 case OPERATION_NAME_COPY_IMAGE_FRAGMENT:
6011 return "copy_image_fragment";
6012 case OPERATION_NAME_COPY_IMAGE_COMPUTE:
6013 return "copy_image_compute";
6014 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT:
6015 return "copy_image_compute_indirect";
6016 default:
6017 DE_ASSERT(0);
6018 return "";
6019 }
6020 }
6021
isSpecializedAccessFlagSupported(const OperationName opName)6022 bool isSpecializedAccessFlagSupported(const OperationName opName)
6023 {
6024 switch (opName)
6025 {
6026 case OPERATION_NAME_WRITE_SSBO_VERTEX:
6027 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:
6028 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:
6029 case OPERATION_NAME_WRITE_SSBO_GEOMETRY:
6030 case OPERATION_NAME_WRITE_SSBO_FRAGMENT:
6031 case OPERATION_NAME_WRITE_SSBO_COMPUTE:
6032 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:
6033 case OPERATION_NAME_WRITE_IMAGE_VERTEX:
6034 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:
6035 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:
6036 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:
6037 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:
6038 case OPERATION_NAME_WRITE_IMAGE_COMPUTE:
6039 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:
6040 case OPERATION_NAME_READ_UBO_VERTEX:
6041 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:
6042 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:
6043 case OPERATION_NAME_READ_UBO_GEOMETRY:
6044 case OPERATION_NAME_READ_UBO_FRAGMENT:
6045 case OPERATION_NAME_READ_UBO_COMPUTE:
6046 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:
6047 case OPERATION_NAME_READ_UBO_TEXEL_VERTEX:
6048 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_CONTROL:
6049 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_EVALUATION:
6050 case OPERATION_NAME_READ_UBO_TEXEL_GEOMETRY:
6051 case OPERATION_NAME_READ_UBO_TEXEL_FRAGMENT:
6052 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE:
6053 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE_INDIRECT:
6054 case OPERATION_NAME_READ_SSBO_VERTEX:
6055 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:
6056 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:
6057 case OPERATION_NAME_READ_SSBO_GEOMETRY:
6058 case OPERATION_NAME_READ_SSBO_FRAGMENT:
6059 case OPERATION_NAME_READ_SSBO_COMPUTE:
6060 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:
6061 case OPERATION_NAME_READ_IMAGE_VERTEX:
6062 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:
6063 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:
6064 case OPERATION_NAME_READ_IMAGE_GEOMETRY:
6065 case OPERATION_NAME_READ_IMAGE_FRAGMENT:
6066 case OPERATION_NAME_READ_IMAGE_COMPUTE:
6067 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:
6068 case OPERATION_NAME_COPY_SSBO_VERTEX:
6069 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL:
6070 case OPERATION_NAME_COPY_SSBO_GEOMETRY:
6071 case OPERATION_NAME_COPY_SSBO_FRAGMENT:
6072 case OPERATION_NAME_COPY_SSBO_COMPUTE:
6073 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT:
6074 case OPERATION_NAME_COPY_IMAGE_VERTEX:
6075 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL:
6076 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION:
6077 case OPERATION_NAME_COPY_IMAGE_GEOMETRY:
6078 case OPERATION_NAME_COPY_IMAGE_FRAGMENT:
6079 case OPERATION_NAME_COPY_IMAGE_COMPUTE:
6080 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT:
6081 return true;
6082 default:
6083 return false;
6084 }
6085 }
makeOperationSupport(const OperationName opName,const ResourceDescription & resourceDesc,const bool specializedAccess)6086 de::MovePtr<OperationSupport> makeOperationSupport(const OperationName opName, const ResourceDescription &resourceDesc,
6087 const bool specializedAccess)
6088 {
6089 switch (opName)
6090 {
6091 case OPERATION_NAME_WRITE_FILL_BUFFER:
6092 return de::MovePtr<OperationSupport>(
6093 new FillUpdateBuffer ::Support(resourceDesc, FillUpdateBuffer::BUFFER_OP_FILL));
6094 case OPERATION_NAME_WRITE_UPDATE_BUFFER:
6095 return de::MovePtr<OperationSupport>(
6096 new FillUpdateBuffer ::Support(resourceDesc, FillUpdateBuffer::BUFFER_OP_UPDATE));
6097 case OPERATION_NAME_WRITE_COPY_BUFFER:
6098 return de::MovePtr<OperationSupport>(new CopyBuffer ::Support(resourceDesc, ACCESS_MODE_WRITE));
6099 case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE:
6100 return de::MovePtr<OperationSupport>(new CopyBufferToImage ::Support(resourceDesc, ACCESS_MODE_WRITE));
6101 case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER:
6102 return de::MovePtr<OperationSupport>(new CopyImageToBuffer ::Support(resourceDesc, ACCESS_MODE_WRITE));
6103 case OPERATION_NAME_WRITE_COPY_IMAGE:
6104 return de::MovePtr<OperationSupport>(
6105 new CopyBlitResolveImage ::Support(resourceDesc, CopyBlitResolveImage::TYPE_COPY, ACCESS_MODE_WRITE));
6106 case OPERATION_NAME_WRITE_BLIT_IMAGE:
6107 return de::MovePtr<OperationSupport>(
6108 new CopyBlitResolveImage ::Support(resourceDesc, CopyBlitResolveImage::TYPE_BLIT, ACCESS_MODE_WRITE));
6109 case OPERATION_NAME_WRITE_SSBO_VERTEX:
6110 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6111 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6112 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:
6113 return de::MovePtr<OperationSupport>(
6114 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess,
6115 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6116 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:
6117 return de::MovePtr<OperationSupport>(
6118 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess,
6119 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6120 case OPERATION_NAME_WRITE_SSBO_GEOMETRY:
6121 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6122 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
6123 case OPERATION_NAME_WRITE_SSBO_FRAGMENT:
6124 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6125 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
6126 case OPERATION_NAME_WRITE_SSBO_COMPUTE:
6127 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6128 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6129 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:
6130 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6131 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6132 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6133 case OPERATION_NAME_WRITE_IMAGE_VERTEX:
6134 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6135 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6136 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:
6137 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6138 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6139 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:
6140 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6141 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6142 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:
6143 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6144 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
6145 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:
6146 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6147 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
6148 case OPERATION_NAME_WRITE_IMAGE_COMPUTE:
6149 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6150 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6151 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:
6152 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6153 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6154 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6155 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_MULTISAMPLE:
6156 return de::MovePtr<OperationSupport>(new ShaderAccess ::MSImageSupport(resourceDesc));
6157 case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE:
6158 return de::MovePtr<OperationSupport>(new ClearImage ::Support(resourceDesc, ClearImage::CLEAR_MODE_COLOR));
6159 case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE:
6160 return de::MovePtr<OperationSupport>(
6161 new ClearImage ::Support(resourceDesc, ClearImage::CLEAR_MODE_DEPTH_STENCIL));
6162 case OPERATION_NAME_WRITE_DRAW:
6163 return de::MovePtr<OperationSupport>(new Draw ::Support(resourceDesc, Draw::DRAW_CALL_DRAW));
6164 case OPERATION_NAME_WRITE_DRAW_INDEXED:
6165 return de::MovePtr<OperationSupport>(new Draw ::Support(resourceDesc, Draw::DRAW_CALL_DRAW_INDEXED));
6166 case OPERATION_NAME_WRITE_DRAW_INDIRECT:
6167 return de::MovePtr<OperationSupport>(new Draw ::Support(resourceDesc, Draw::DRAW_CALL_DRAW_INDIRECT));
6168 case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT:
6169 return de::MovePtr<OperationSupport>(new Draw ::Support(resourceDesc, Draw::DRAW_CALL_DRAW_INDEXED_INDIRECT));
6170 case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS:
6171 return de::MovePtr<OperationSupport>(new ClearAttachments ::Support(resourceDesc));
6172 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW:
6173 return de::MovePtr<OperationSupport>(new IndirectBuffer ::WriteSupport(resourceDesc));
6174 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED:
6175 return de::MovePtr<OperationSupport>(new IndirectBuffer ::WriteSupport(resourceDesc));
6176 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH:
6177 return de::MovePtr<OperationSupport>(new IndirectBuffer ::WriteSupport(resourceDesc));
6178 case OPERATION_NAME_WRITE_UPDATE_INDEX_BUFFER:
6179 return de::MovePtr<OperationSupport>(
6180 new FillUpdateBuffer ::Support(resourceDesc, FillUpdateBuffer::BUFFER_OP_UPDATE_WITH_INDEX_PATTERN));
6181
6182 case OPERATION_NAME_READ_COPY_BUFFER:
6183 return de::MovePtr<OperationSupport>(new CopyBuffer ::Support(resourceDesc, ACCESS_MODE_READ));
6184 case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE:
6185 return de::MovePtr<OperationSupport>(new CopyBufferToImage ::Support(resourceDesc, ACCESS_MODE_READ));
6186 case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER:
6187 return de::MovePtr<OperationSupport>(new CopyImageToBuffer ::Support(resourceDesc, ACCESS_MODE_READ));
6188 case OPERATION_NAME_READ_COPY_IMAGE:
6189 return de::MovePtr<OperationSupport>(
6190 new CopyBlitResolveImage::Support(resourceDesc, CopyBlitResolveImage::TYPE_COPY, ACCESS_MODE_READ));
6191 case OPERATION_NAME_READ_BLIT_IMAGE:
6192 return de::MovePtr<OperationSupport>(
6193 new CopyBlitResolveImage::Support(resourceDesc, CopyBlitResolveImage::TYPE_BLIT, ACCESS_MODE_READ));
6194 case OPERATION_NAME_READ_RESOLVE_IMAGE:
6195 return de::MovePtr<OperationSupport>(
6196 new CopyBlitResolveImage::Support(resourceDesc, CopyBlitResolveImage::TYPE_RESOLVE, ACCESS_MODE_READ));
6197 case OPERATION_NAME_READ_UBO_VERTEX:
6198 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6199 resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6200 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:
6201 return de::MovePtr<OperationSupport>(
6202 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess,
6203 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6204 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:
6205 return de::MovePtr<OperationSupport>(
6206 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess,
6207 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6208 case OPERATION_NAME_READ_UBO_GEOMETRY:
6209 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6210 resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
6211 case OPERATION_NAME_READ_UBO_FRAGMENT:
6212 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6213 resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
6214 case OPERATION_NAME_READ_UBO_COMPUTE:
6215 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6216 resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6217 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:
6218 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6219 resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6220 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6221 case OPERATION_NAME_READ_UBO_TEXEL_VERTEX:
6222 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6223 resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6224 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_CONTROL:
6225 return de::MovePtr<OperationSupport>(
6226 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ,
6227 specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6228 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_EVALUATION:
6229 return de::MovePtr<OperationSupport>(
6230 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ,
6231 specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6232 case OPERATION_NAME_READ_UBO_TEXEL_GEOMETRY:
6233 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL,
6234 ACCESS_MODE_READ, specializedAccess,
6235 VK_SHADER_STAGE_GEOMETRY_BIT));
6236 case OPERATION_NAME_READ_UBO_TEXEL_FRAGMENT:
6237 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL,
6238 ACCESS_MODE_READ, specializedAccess,
6239 VK_SHADER_STAGE_FRAGMENT_BIT));
6240 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE:
6241 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6242 resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6243 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE_INDIRECT:
6244 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6245 resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6246 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6247 case OPERATION_NAME_READ_SSBO_VERTEX:
6248 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6249 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6250 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:
6251 return de::MovePtr<OperationSupport>(
6252 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess,
6253 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6254 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:
6255 return de::MovePtr<OperationSupport>(
6256 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess,
6257 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6258 case OPERATION_NAME_READ_SSBO_GEOMETRY:
6259 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6260 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
6261 case OPERATION_NAME_READ_SSBO_FRAGMENT:
6262 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6263 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
6264 case OPERATION_NAME_READ_SSBO_COMPUTE:
6265 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6266 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6267 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:
6268 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6269 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6270 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6271 case OPERATION_NAME_READ_IMAGE_VERTEX:
6272 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6273 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6274 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:
6275 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6276 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6277 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:
6278 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6279 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6280 case OPERATION_NAME_READ_IMAGE_GEOMETRY:
6281 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6282 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
6283 case OPERATION_NAME_READ_IMAGE_FRAGMENT:
6284 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6285 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
6286 case OPERATION_NAME_READ_IMAGE_COMPUTE:
6287 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6288 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6289 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:
6290 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6291 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6292 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6293 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW:
6294 return de::MovePtr<OperationSupport>(new IndirectBuffer ::ReadSupport(resourceDesc));
6295 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED:
6296 return de::MovePtr<OperationSupport>(new IndirectBuffer ::ReadSupport(resourceDesc));
6297 case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH:
6298 return de::MovePtr<OperationSupport>(new IndirectBuffer ::ReadSupport(resourceDesc));
6299 case OPERATION_NAME_READ_VERTEX_INPUT:
6300 return de::MovePtr<OperationSupport>(new VertexInput ::Support(resourceDesc, VertexInput::DRAW_MODE_VERTEX));
6301 case OPERATION_NAME_READ_INDEX_INPUT:
6302 return de::MovePtr<OperationSupport>(new VertexInput ::Support(resourceDesc, VertexInput::DRAW_MODE_INDEXED));
6303
6304 case OPERATION_NAME_COPY_BUFFER:
6305 return de::MovePtr<OperationSupport>(new CopyBuffer ::CopySupport(resourceDesc));
6306 case OPERATION_NAME_COPY_IMAGE:
6307 return de::MovePtr<OperationSupport>(
6308 new CopyBlitResolveImage::CopySupport(resourceDesc, CopyBlitResolveImage::TYPE_COPY));
6309 case OPERATION_NAME_BLIT_IMAGE:
6310 return de::MovePtr<OperationSupport>(
6311 new CopyBlitResolveImage::CopySupport(resourceDesc, CopyBlitResolveImage::TYPE_BLIT));
6312 case OPERATION_NAME_COPY_SSBO_VERTEX:
6313 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6314 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6315 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL:
6316 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6317 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6318 case OPERATION_NAME_COPY_SSBO_TESSELLATION_EVALUATION:
6319 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6320 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6321 case OPERATION_NAME_COPY_SSBO_GEOMETRY:
6322 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6323 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
6324 case OPERATION_NAME_COPY_SSBO_FRAGMENT:
6325 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6326 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
6327 case OPERATION_NAME_COPY_SSBO_COMPUTE:
6328 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6329 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6330 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT:
6331 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6332 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6333 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6334 case OPERATION_NAME_COPY_IMAGE_VERTEX:
6335 return de::MovePtr<OperationSupport>(
6336 new ShaderAccess ::CopyImageSupport(resourceDesc, VK_SHADER_STAGE_VERTEX_BIT, specializedAccess));
6337 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL:
6338 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport(
6339 resourceDesc, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, specializedAccess));
6340 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION:
6341 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport(
6342 resourceDesc, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, specializedAccess));
6343 case OPERATION_NAME_COPY_IMAGE_GEOMETRY:
6344 return de::MovePtr<OperationSupport>(
6345 new ShaderAccess ::CopyImageSupport(resourceDesc, VK_SHADER_STAGE_GEOMETRY_BIT, specializedAccess));
6346 case OPERATION_NAME_COPY_IMAGE_FRAGMENT:
6347 return de::MovePtr<OperationSupport>(
6348 new ShaderAccess ::CopyImageSupport(resourceDesc, VK_SHADER_STAGE_FRAGMENT_BIT, specializedAccess));
6349 case OPERATION_NAME_COPY_IMAGE_COMPUTE:
6350 return de::MovePtr<OperationSupport>(
6351 new ShaderAccess ::CopyImageSupport(resourceDesc, VK_SHADER_STAGE_COMPUTE_BIT, specializedAccess));
6352 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT:
6353 return de::MovePtr<OperationSupport>(
6354 new ShaderAccess ::CopyImageSupport(resourceDesc, VK_SHADER_STAGE_COMPUTE_BIT, specializedAccess,
6355 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6356
6357 default:
6358 DE_ASSERT(0);
6359 return de::MovePtr<OperationSupport>();
6360 }
6361 }
6362
isStageSupported(const vk::VkShaderStageFlagBits stage,const vk::VkQueueFlags queueFlags)6363 bool isStageSupported(const vk::VkShaderStageFlagBits stage, const vk::VkQueueFlags queueFlags)
6364 {
6365 switch (stage)
6366 {
6367 case vk::VK_SHADER_STAGE_VERTEX_BIT:
6368 case vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
6369 case vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
6370 case vk::VK_SHADER_STAGE_GEOMETRY_BIT:
6371 case vk::VK_SHADER_STAGE_FRAGMENT_BIT:
6372 if ((queueFlags & (vk::VK_QUEUE_GRAPHICS_BIT)) == 0)
6373 return false;
6374 break;
6375 case vk::VK_SHADER_STAGE_COMPUTE_BIT:
6376 if ((queueFlags & (vk::VK_QUEUE_COMPUTE_BIT)) == 0)
6377 return false;
6378 break;
6379 default:
6380 break;
6381 }
6382 return true;
6383 }
6384
6385 } // namespace synchronization
6386 } // namespace vkt
6387