1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2019 The Khronos Group Inc.
6 * Copyright (c) 2019 Valve Corporation.
7 *
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
11 *
12 * http://www.apache.org/licenses/LICENSE-2.0
13 *
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
19 *
20 *//*!
21 * \file
22 * \brief Vulkan Memory Model padding access tests
23 *//*--------------------------------------------------------------------*/
24
25 #include "vktMemoryModelPadding.hpp"
26 #include "vktTestCase.hpp"
27
28 #include "vkBufferWithMemory.hpp"
29 #include "vkBarrierUtil.hpp"
30 #include "vkObjUtil.hpp"
31 #include "vkBuilderUtil.hpp"
32 #include "vkTypeUtil.hpp"
33 #include "vkCmdUtil.hpp"
34
35 #include "deMemory.h"
36
37 namespace vkt
38 {
39 namespace MemoryModel
40 {
41
42 namespace
43 {
44 // The structures below match the shader declarations but have explicit padding members at the end so we can check their contents
45 // easily after running the shader. Using the std140 layout means structures are aligned to 16 bytes.
46
47 // Structure with a 12-byte padding at the end.
48 struct Pad12
49 {
50 deInt32 a;
51 deUint8 padding[12];
52 };
53
54 // Structure with an 8-byte padding at the end.
55 struct Pad8
56 {
57 deInt32 a, b;
58 deUint8 padding[8];
59 };
60
61 // Structure with a 4-byte padding at the end.
62 struct Pad4
63 {
64 deInt32 a, b, c;
65 deUint8 padding[4];
66 };
67
68 // Buffer structure for the input and output buffers.
69 struct BufferStructure
70 {
71 static constexpr deUint32 kArrayLength = 3u;
72
73 Pad12 subA[kArrayLength];
74 Pad8 subB[kArrayLength];
75 Pad4 subC[kArrayLength];
76
77 // Pre-fill substructures with the given data.
BufferStructurevkt::MemoryModel::__anon2dec56cc0111::BufferStructure78 BufferStructure (deInt32 a, deInt32 b, deInt32 c, deUint8 paddingByte)
79 {
80 for (deUint32 i = 0; i < kArrayLength; ++i)
81 {
82 subA[i].a = a;
83 subB[i].a = a;
84 subC[i].a = a;
85 subB[i].b = b;
86 subC[i].b = b;
87 subC[i].c = c;
88 deMemset(subA[i].padding, static_cast<int>(paddingByte), sizeof(subA[i].padding));
89 deMemset(subB[i].padding, static_cast<int>(paddingByte), sizeof(subB[i].padding));
90 deMemset(subC[i].padding, static_cast<int>(paddingByte), sizeof(subC[i].padding));
91 }
92 }
93
94 // Pre-fill substructures with zeros.
BufferStructurevkt::MemoryModel::__anon2dec56cc0111::BufferStructure95 BufferStructure (deUint8 paddingByte)
96 : BufferStructure (0, 0, 0, paddingByte)
97 {}
98
99 // Verify members and padding bytes.
checkValuesvkt::MemoryModel::__anon2dec56cc0111::BufferStructure100 bool checkValues (deInt32 a, deInt32 b, deInt32 c, deUint8 paddingByte) const
101 {
102 for (deUint32 i = 0; i < kArrayLength; ++i)
103 {
104 if (subA[i].a != a || subB[i].a != a || subC[i].a != a ||
105 subB[i].b != b || subC[i].b != b ||
106 subC[i].c != c)
107 return false;
108 }
109 return checkPaddingBytes(paddingByte);
110 }
111
112 // Verify padding bytes have a known value.
checkPaddingBytesvkt::MemoryModel::__anon2dec56cc0111::BufferStructure113 bool checkPaddingBytes (deUint8 value) const
114 {
115 for (deUint32 j = 0; j < kArrayLength; ++j)
116 {
117 for (int i = 0; i < DE_LENGTH_OF_ARRAY(subA[j].padding); ++i)
118 {
119 if (subA[j].padding[i] != value)
120 return false;
121 }
122 for (int i = 0; i < DE_LENGTH_OF_ARRAY(subB[j].padding); ++i)
123 {
124 if (subB[j].padding[i] != value)
125 return false;
126 }
127 for (int i = 0; i < DE_LENGTH_OF_ARRAY(subC[j].padding); ++i)
128 {
129 if (subC[j].padding[i] != value)
130 return false;
131 }
132 }
133 return true;
134 }
135 };
136
137 class PaddingTest : public vkt::TestCase
138 {
139 public:
140 PaddingTest (tcu::TestContext& testCtx, const std::string& name, const std::string& description);
~PaddingTest(void)141 virtual ~PaddingTest (void) {}
142
143 virtual void initPrograms (vk::SourceCollections& programCollection) const;
144 virtual TestInstance* createInstance (Context& context) const;
145 virtual void checkSupport (Context& context) const;
146
iterate(void)147 IterateResult iterate (void) { DE_ASSERT(false); return STOP; } // Deprecated in this module
148 };
149
150 class PaddingTestInstance : public vkt::TestInstance
151 {
152 public:
PaddingTestInstance(Context & context)153 PaddingTestInstance (Context& context)
154 : vkt::TestInstance(context)
155 {}
~PaddingTestInstance(void)156 virtual ~PaddingTestInstance (void) {}
157
158 virtual tcu::TestStatus iterate (void);
159 };
160
161
PaddingTest(tcu::TestContext & testCtx,const std::string & name,const std::string & description)162 PaddingTest::PaddingTest (tcu::TestContext& testCtx, const std::string& name, const std::string& description)
163 : vkt::TestCase(testCtx, name, description)
164 {
165 }
166
createInstance(Context & context) const167 TestInstance* PaddingTest::createInstance (Context& context) const
168 {
169 return new PaddingTestInstance(context);
170 }
171
initPrograms(vk::SourceCollections & programCollection) const172 void PaddingTest::initPrograms (vk::SourceCollections& programCollection) const
173 {
174 const std::string arrayLenghtStr = std::to_string(BufferStructure::kArrayLength);
175
176 std::ostringstream shaderSrc;
177 shaderSrc
178 << "#version 450\n"
179 << "#pragma use_vulkan_memory_model\n"
180 << "\n"
181 << "struct A {\n"
182 << " int a;\n"
183 << "};\n"
184 << "\n"
185 << "struct B {\n"
186 << " int a, b;\n"
187 << "};\n"
188 << "\n"
189 << "struct C {\n"
190 << " int a, b, c;\n"
191 << "};\n"
192 << "\n"
193 << "struct BufferStructure {\n"
194 << " A subA[" << arrayLenghtStr << "];\n"
195 << " B subB[" << arrayLenghtStr << "];\n"
196 << " C subC[" << arrayLenghtStr << "];\n"
197 << "};\n"
198 << "\n"
199 << "layout (set=0, binding=0, std140) uniform InputBlock\n"
200 << "{\n"
201 << " BufferStructure inBlock;\n"
202 << "};\n"
203 << "\n"
204 << "layout (set=0, binding=1, std140) buffer OutputBlock\n"
205 << "{\n"
206 << " BufferStructure outBlock;\n"
207 << "};\n"
208 << "\n"
209 << "void main()\n"
210 << "{\n"
211 << " const uint idx = gl_GlobalInvocationID.x;\n"
212 << " outBlock.subA[idx] = inBlock.subA[idx];\n"
213 << " outBlock.subB[idx] = inBlock.subB[idx];\n"
214 << " outBlock.subC[idx] = inBlock.subC[idx];\n"
215 << "}\n";
216
217 programCollection.glslSources.add("comp") << glu::ComputeSource(shaderSrc.str());
218 }
219
checkSupport(Context & context) const220 void PaddingTest::checkSupport (Context& context) const
221 {
222 context.requireDeviceFunctionality("VK_KHR_vulkan_memory_model");
223 if (!context.getVulkanMemoryModelFeatures().vulkanMemoryModel)
224 {
225 TCU_THROW(NotSupportedError, "Vulkan memory model not supported");
226 }
227 }
228
iterate(void)229 tcu::TestStatus PaddingTestInstance::iterate (void)
230 {
231 const auto& vkd = m_context.getDeviceInterface();
232 const auto device = m_context.getDevice();
233 auto& allocator = m_context.getDefaultAllocator();
234 const auto queue = m_context.getUniversalQueue();
235 const auto queueIndex = m_context.getUniversalQueueFamilyIndex();
236
237 constexpr vk::VkDeviceSize kBufferSize = static_cast<vk::VkDeviceSize>(sizeof(BufferStructure));
238 constexpr deInt32 kA = 1;
239 constexpr deInt32 kB = 2;
240 constexpr deInt32 kC = 3;
241 constexpr deUint8 kInputPaddingByte = 0xFEu;
242 constexpr deUint8 kOutputPaddingByte = 0x7Fu;
243
244 // Create input and output buffers.
245 auto inputBufferInfo = vk::makeBufferCreateInfo(kBufferSize, vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
246 auto outputBufferInfo = vk::makeBufferCreateInfo(kBufferSize, vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
247
248 vk::BufferWithMemory inputBuffer {vkd, device, allocator, inputBufferInfo, vk::MemoryRequirement::HostVisible};
249 vk::BufferWithMemory outputBuffer {vkd, device, allocator, outputBufferInfo, vk::MemoryRequirement::HostVisible};
250
251 // Fill buffers with initial contents.
252 BufferStructure inputValues {kA, kB, kC, kInputPaddingByte};
253 BufferStructure outputInit {kOutputPaddingByte};
254
255 auto& inputAlloc = inputBuffer.getAllocation();
256 auto& outputAlloc = outputBuffer.getAllocation();
257
258 void* inputBufferPtr = static_cast<deUint8*>(inputAlloc.getHostPtr()) + inputAlloc.getOffset();
259 void* outputBufferPtr = static_cast<deUint8*>(outputAlloc.getHostPtr()) + outputAlloc.getOffset();
260
261 deMemcpy(inputBufferPtr, &inputValues, sizeof(inputValues));
262 deMemcpy(outputBufferPtr, &outputInit, sizeof(outputInit));
263
264 vk::flushAlloc(vkd, device, inputAlloc);
265 vk::flushAlloc(vkd, device, outputAlloc);
266
267 // Descriptor set layout.
268 vk::DescriptorSetLayoutBuilder layoutBuilder;
269 layoutBuilder.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, vk::VK_SHADER_STAGE_COMPUTE_BIT);
270 layoutBuilder.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vk::VK_SHADER_STAGE_COMPUTE_BIT);
271 auto descriptorSetLayout = layoutBuilder.build(vkd, device);
272
273 // Descriptor pool.
274 vk::DescriptorPoolBuilder poolBuilder;
275 poolBuilder.addType(vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
276 poolBuilder.addType(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
277 auto descriptorPool = poolBuilder.build(vkd, device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
278
279 // Descriptor set.
280 const auto descriptorSet = vk::makeDescriptorSet(vkd, device, descriptorPool.get(), descriptorSetLayout.get());
281
282 // Update descriptor set using the buffers.
283 const auto inputBufferDescriptorInfo = vk::makeDescriptorBufferInfo(inputBuffer.get(), 0ull, VK_WHOLE_SIZE);
284 const auto outputBufferDescriptorInfo = vk::makeDescriptorBufferInfo(outputBuffer.get(), 0ull, VK_WHOLE_SIZE);
285
286 vk::DescriptorSetUpdateBuilder updateBuilder;
287 updateBuilder.writeSingle(descriptorSet.get(), vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, &inputBufferDescriptorInfo);
288 updateBuilder.writeSingle(descriptorSet.get(), vk::DescriptorSetUpdateBuilder::Location::binding(1u), vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outputBufferDescriptorInfo);
289 updateBuilder.update(vkd, device);
290
291 // Create compute pipeline.
292 auto shaderModule = vk::createShaderModule(vkd, device, m_context.getBinaryCollection().get("comp"), 0u);
293 auto pipelineLayout = vk::makePipelineLayout(vkd, device, descriptorSetLayout.get());
294
295 const vk::VkComputePipelineCreateInfo pipelineCreateInfo =
296 {
297 vk::VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
298 nullptr,
299 0u, // flags
300 { // compute shader
301 vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, // VkStructureType sType;
302 nullptr, // const void* pNext;
303 0u, // VkPipelineShaderStageCreateFlags flags;
304 vk::VK_SHADER_STAGE_COMPUTE_BIT, // VkShaderStageFlagBits stage;
305 shaderModule.get(), // VkShaderModule module;
306 "main", // const char* pName;
307 nullptr, // const VkSpecializationInfo* pSpecializationInfo;
308 },
309 pipelineLayout.get(), // layout
310 DE_NULL, // basePipelineHandle
311 0, // basePipelineIndex
312 };
313 auto pipeline = vk::createComputePipeline(vkd, device, DE_NULL, &pipelineCreateInfo);
314
315 // Synchronization barriers.
316 auto inputBufferHostToDevBarrier = vk::makeBufferMemoryBarrier(vk::VK_ACCESS_HOST_WRITE_BIT, vk::VK_ACCESS_SHADER_READ_BIT, inputBuffer.get(), 0ull, VK_WHOLE_SIZE);
317 auto outputBufferHostToDevBarrier = vk::makeBufferMemoryBarrier(vk::VK_ACCESS_HOST_WRITE_BIT, vk::VK_ACCESS_SHADER_WRITE_BIT, outputBuffer.get(), 0ull, VK_WHOLE_SIZE);
318 auto outputBufferDevToHostBarrier = vk::makeBufferMemoryBarrier(vk::VK_ACCESS_SHADER_WRITE_BIT, vk::VK_ACCESS_HOST_READ_BIT, outputBuffer.get(), 0ull, VK_WHOLE_SIZE);
319
320 // Command buffer.
321 auto cmdPool = vk::makeCommandPool(vkd, device, queueIndex);
322 auto cmdBufferPtr = vk::allocateCommandBuffer(vkd, device, cmdPool.get(), vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY);
323 auto cmdBuffer = cmdBufferPtr.get();
324
325 // Record and submit commands.
326 vk::beginCommandBuffer(vkd, cmdBuffer);
327 vkd.cmdBindPipeline(cmdBuffer, vk::VK_PIPELINE_BIND_POINT_COMPUTE, pipeline.get());
328 vkd.cmdBindDescriptorSets(cmdBuffer, vk::VK_PIPELINE_BIND_POINT_COMPUTE, pipelineLayout.get(), 0, 1u, &descriptorSet.get(), 0u, nullptr);
329 vkd.cmdPipelineBarrier(cmdBuffer, vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0u, 0u, nullptr, 1u, &inputBufferHostToDevBarrier, 0u, nullptr);
330 vkd.cmdPipelineBarrier(cmdBuffer, vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0u, 0u, nullptr, 1u, &outputBufferHostToDevBarrier, 0u, nullptr);
331 vkd.cmdDispatch(cmdBuffer, BufferStructure::kArrayLength, 1u, 1u);
332 vkd.cmdPipelineBarrier(cmdBuffer, vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0u, 0u, nullptr, 1u, &outputBufferDevToHostBarrier, 0u, nullptr);
333 vk::endCommandBuffer(vkd, cmdBuffer);
334 vk::submitCommandsAndWait(vkd, device, queue, cmdBuffer);
335
336 // Verify output buffer contents.
337 vk::invalidateAlloc(vkd, device, outputAlloc);
338 BufferStructure* outputData = reinterpret_cast<BufferStructure*>(outputBufferPtr);
339 return (outputData->checkValues(kA, kB, kC, kOutputPaddingByte) ? tcu::TestStatus::pass("Pass") : tcu::TestStatus::fail("Unexpected values in output data"));
340 }
341
342 } // anonymous
343
createPaddingTests(tcu::TestContext & testCtx)344 tcu::TestCaseGroup* createPaddingTests (tcu::TestContext& testCtx)
345 {
346 de::MovePtr<tcu::TestCaseGroup> paddingGroup(new tcu::TestCaseGroup(testCtx, "padding", "Padding bytes tests"));
347 paddingGroup->addChild(new PaddingTest(testCtx, "test", "Check padding bytes at the end of structures are not touched on copy"));
348
349 return paddingGroup.release();
350 }
351
352 } // MemoryModel
353 } // vkt
354