1 /*-------------------------------------------------------------------------
2 * Vulkan CTS Framework
3 * --------------------
4 *
5 * Copyright (c) 2019 Google Inc.
6 * Copyright (c) 2019 The Khronos Group Inc.
7 *
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
11 *
12 * http://www.apache.org/licenses/LICENSE-2.0
13 *
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
19 *
20 *//*!
21 * \file
22 * \brief Memory management utilities.
23 *//*--------------------------------------------------------------------*/
24
25 #include "vkMemUtil.hpp"
26 #include "deDefs.h"
27 #include "vkStrUtil.hpp"
28 #include "vkQueryUtil.hpp"
29 #include "vkRef.hpp"
30 #include "vkRefUtil.hpp"
31 #include "vkImageUtil.hpp"
32 #include "deInt32.h"
33
34 #include <sstream>
35
36 namespace vk
37 {
38
39 using de::UniquePtr;
40 using de::MovePtr;
41 using std::vector;
42
43 typedef de::SharedPtr<Allocation> AllocationSp;
44
45 namespace
46 {
47
48 class HostPtr
49 {
50 public:
51 HostPtr (const DeviceInterface& vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags);
52 ~HostPtr (void);
53
get(void) const54 void* get (void) const { return m_ptr; }
55
56 private:
57 const DeviceInterface& m_vkd;
58 const VkDevice m_device;
59 const VkDeviceMemory m_memory;
60 void* const m_ptr;
61 };
62
HostPtr(const DeviceInterface & vkd,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags)63 HostPtr::HostPtr (const DeviceInterface& vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags)
64 : m_vkd (vkd)
65 , m_device (device)
66 , m_memory (memory)
67 , m_ptr (mapMemory(vkd, device, memory, offset, size, flags))
68 {
69 }
70
~HostPtr(void)71 HostPtr::~HostPtr (void)
72 {
73 m_vkd.unmapMemory(m_device, m_memory);
74 }
75
isHostVisibleMemory(const VkPhysicalDeviceMemoryProperties & deviceMemProps,deUint32 memoryTypeNdx)76 bool isHostVisibleMemory (const VkPhysicalDeviceMemoryProperties& deviceMemProps, deUint32 memoryTypeNdx)
77 {
78 DE_ASSERT(memoryTypeNdx < deviceMemProps.memoryTypeCount);
79 return (deviceMemProps.memoryTypes[memoryTypeNdx].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0u;
80 }
81
82 } // anonymous
83
84 // Allocation
85
Allocation(VkDeviceMemory memory,VkDeviceSize offset,void * hostPtr)86 Allocation::Allocation (VkDeviceMemory memory, VkDeviceSize offset, void* hostPtr)
87 : m_memory (memory)
88 , m_offset (offset)
89 , m_hostPtr (hostPtr)
90 {
91 }
92
~Allocation(void)93 Allocation::~Allocation (void)
94 {
95 }
96
flushAlloc(const DeviceInterface & vkd,VkDevice device,const Allocation & alloc)97 void flushAlloc (const DeviceInterface& vkd, VkDevice device, const Allocation& alloc)
98 {
99 flushMappedMemoryRange(vkd, device, alloc.getMemory(), alloc.getOffset(), VK_WHOLE_SIZE);
100 }
101
invalidateAlloc(const DeviceInterface & vkd,VkDevice device,const Allocation & alloc)102 void invalidateAlloc (const DeviceInterface& vkd, VkDevice device, const Allocation& alloc)
103 {
104 invalidateMappedMemoryRange(vkd, device, alloc.getMemory(), alloc.getOffset(), VK_WHOLE_SIZE);
105 }
106
107 // MemoryRequirement
108
109 const MemoryRequirement MemoryRequirement::Any = MemoryRequirement(0x0u);
110 const MemoryRequirement MemoryRequirement::HostVisible = MemoryRequirement(MemoryRequirement::FLAG_HOST_VISIBLE);
111 const MemoryRequirement MemoryRequirement::Coherent = MemoryRequirement(MemoryRequirement::FLAG_COHERENT);
112 const MemoryRequirement MemoryRequirement::LazilyAllocated = MemoryRequirement(MemoryRequirement::FLAG_LAZY_ALLOCATION);
113 const MemoryRequirement MemoryRequirement::Protected = MemoryRequirement(MemoryRequirement::FLAG_PROTECTED);
114 const MemoryRequirement MemoryRequirement::Local = MemoryRequirement(MemoryRequirement::FLAG_LOCAL);
115 const MemoryRequirement MemoryRequirement::Cached = MemoryRequirement(MemoryRequirement::FLAG_CACHED);
116 const MemoryRequirement MemoryRequirement::NonLocal = MemoryRequirement(MemoryRequirement::FLAG_NON_LOCAL);
117 const MemoryRequirement MemoryRequirement::DeviceAddress = MemoryRequirement(MemoryRequirement::FLAG_DEVICE_ADDRESS);
118 const MemoryRequirement MemoryRequirement::DeviceAddressCaptureReplay = MemoryRequirement(MemoryRequirement::FLAG_DEVICE_ADDRESS_CAPTURE_REPLAY);
119
matchesHeap(VkMemoryPropertyFlags heapFlags) const120 bool MemoryRequirement::matchesHeap (VkMemoryPropertyFlags heapFlags) const
121 {
122 // Quick check
123 if ((m_flags & FLAG_COHERENT) && !(m_flags & FLAG_HOST_VISIBLE))
124 DE_FATAL("Coherent memory must be host-visible");
125 if ((m_flags & FLAG_HOST_VISIBLE) && (m_flags & FLAG_LAZY_ALLOCATION))
126 DE_FATAL("Lazily allocated memory cannot be mappable");
127 if ((m_flags & FLAG_PROTECTED) && (m_flags & FLAG_HOST_VISIBLE))
128 DE_FATAL("Protected memory cannot be mappable");
129
130 // host-visible
131 if ((m_flags & FLAG_HOST_VISIBLE) && !(heapFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
132 return false;
133
134 // coherent
135 if ((m_flags & FLAG_COHERENT) && !(heapFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))
136 return false;
137
138 // lazy
139 if ((m_flags & FLAG_LAZY_ALLOCATION) && !(heapFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT))
140 return false;
141
142 // protected
143 if ((m_flags & FLAG_PROTECTED) && !(heapFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT))
144 return false;
145
146 // local
147 if ((m_flags & FLAG_LOCAL) && !(heapFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT))
148 return false;
149
150 // cached
151 if ((m_flags & FLAG_CACHED) && !(heapFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT))
152 return false;
153
154 // non-local
155 if ((m_flags & FLAG_NON_LOCAL) && (heapFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT))
156 return false;
157
158 return true;
159 }
160
MemoryRequirement(deUint32 flags)161 MemoryRequirement::MemoryRequirement (deUint32 flags)
162 : m_flags(flags)
163 {
164 }
165
166 // SimpleAllocator
167
168 class SimpleAllocation : public Allocation
169 {
170 public:
171 SimpleAllocation (Move<VkDeviceMemory> mem, MovePtr<HostPtr> hostPtr, size_t offset);
172 virtual ~SimpleAllocation (void);
173
174 private:
175 const Unique<VkDeviceMemory> m_memHolder;
176 const UniquePtr<HostPtr> m_hostPtr;
177 };
178
SimpleAllocation(Move<VkDeviceMemory> mem,MovePtr<HostPtr> hostPtr,size_t offset)179 SimpleAllocation::SimpleAllocation (Move<VkDeviceMemory> mem, MovePtr<HostPtr> hostPtr, size_t offset)
180 : Allocation (*mem, offset, hostPtr ? hostPtr->get() : DE_NULL)
181 , m_memHolder (mem)
182 , m_hostPtr (hostPtr)
183 {
184 }
185
~SimpleAllocation(void)186 SimpleAllocation::~SimpleAllocation (void)
187 {
188 }
189
SimpleAllocator(const DeviceInterface & vk,VkDevice device,const VkPhysicalDeviceMemoryProperties & deviceMemProps,const OptionalOffsetParams & offsetParams)190 SimpleAllocator::SimpleAllocator (const DeviceInterface& vk, VkDevice device, const VkPhysicalDeviceMemoryProperties& deviceMemProps, const OptionalOffsetParams& offsetParams)
191 : m_vk (vk)
192 , m_device (device)
193 , m_memProps (deviceMemProps)
194 , m_offsetParams (offsetParams)
195 {
196 if (m_offsetParams)
197 {
198 const auto zero = VkDeviceSize{0};
199 DE_UNREF(zero); // For release builds.
200 // If an offset is provided, a non-coherent atom size must be provided too.
201 DE_ASSERT(m_offsetParams->offset == zero || m_offsetParams->nonCoherentAtomSize != zero);
202 }
203 }
204
allocate(const VkMemoryAllocateInfo & allocInfo,VkDeviceSize alignment)205 MovePtr<Allocation> SimpleAllocator::allocate (const VkMemoryAllocateInfo& allocInfo, VkDeviceSize alignment)
206 {
207 // Align the offset to the requirements.
208 // Aligning to the non coherent atom size prevents flush and memory invalidation valid usage errors.
209 const auto requiredAlignment = (m_offsetParams ? de::lcm(m_offsetParams->nonCoherentAtomSize, alignment) : alignment);
210 const auto offset = (m_offsetParams ? de::roundUp(m_offsetParams->offset, requiredAlignment) : 0);
211
212 VkMemoryAllocateInfo info = allocInfo;
213 info.allocationSize += offset;
214
215 Move<VkDeviceMemory> mem = allocateMemory(m_vk, m_device, &info);
216 MovePtr<HostPtr> hostPtr;
217
218 if (isHostVisibleMemory(m_memProps, info.memoryTypeIndex))
219 hostPtr = MovePtr<HostPtr>(new HostPtr(m_vk, m_device, *mem, offset, info.allocationSize, 0u));
220
221 return MovePtr<Allocation>(new SimpleAllocation(mem, hostPtr, static_cast<size_t>(offset)));
222 }
223
allocate(const VkMemoryRequirements & memReqs,MemoryRequirement requirement)224 MovePtr<Allocation> SimpleAllocator::allocate (const VkMemoryRequirements& memReqs, MemoryRequirement requirement)
225 {
226 const auto memoryTypeNdx = selectMatchingMemoryType(m_memProps, memReqs.memoryTypeBits, requirement);
227
228 // Align the offset to the requirements.
229 // Aligning to the non coherent atom size prevents flush and memory invalidation valid usage errors.
230 const auto requiredAlignment = (m_offsetParams ? de::lcm(m_offsetParams->nonCoherentAtomSize, memReqs.alignment) : memReqs.alignment);
231 const auto offset = (m_offsetParams ? de::roundUp(m_offsetParams->offset, requiredAlignment) : 0);
232
233 VkMemoryAllocateInfo allocInfo =
234 {
235 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // VkStructureType sType;
236 DE_NULL, // const void* pNext;
237 memReqs.size + offset, // VkDeviceSize allocationSize;
238 memoryTypeNdx, // deUint32 memoryTypeIndex;
239 };
240
241 VkMemoryAllocateFlagsInfo allocFlagsInfo =
242 {
243 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, // VkStructureType sType
244 DE_NULL, // const void* pNext
245 0, // VkMemoryAllocateFlags flags
246 0, // uint32_t deviceMask
247 };
248
249 if (requirement & MemoryRequirement::DeviceAddress)
250 allocFlagsInfo.flags |= VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT;
251
252 if (requirement & MemoryRequirement::DeviceAddressCaptureReplay)
253 allocFlagsInfo.flags |= VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT;
254
255 if (allocFlagsInfo.flags)
256 allocInfo.pNext = &allocFlagsInfo;
257
258 Move<VkDeviceMemory> mem = allocateMemory(m_vk, m_device, &allocInfo);
259 MovePtr<HostPtr> hostPtr;
260
261 if (requirement & MemoryRequirement::HostVisible)
262 {
263 DE_ASSERT(isHostVisibleMemory(m_memProps, allocInfo.memoryTypeIndex));
264 hostPtr = MovePtr<HostPtr>(new HostPtr(m_vk, m_device, *mem, offset, memReqs.size, 0u));
265 }
266
267 return MovePtr<Allocation>(new SimpleAllocation(mem, hostPtr, static_cast<size_t>(offset)));
268 }
269
allocateExtended(const InstanceInterface & vki,const DeviceInterface & vkd,const VkPhysicalDevice & physDevice,const VkDevice device,const VkMemoryRequirements & memReqs,const MemoryRequirement requirement,const void * pNext)270 MovePtr<Allocation> allocateExtended (const InstanceInterface& vki,
271 const DeviceInterface& vkd,
272 const VkPhysicalDevice& physDevice,
273 const VkDevice device,
274 const VkMemoryRequirements& memReqs,
275 const MemoryRequirement requirement,
276 const void* pNext)
277 {
278 const VkPhysicalDeviceMemoryProperties memoryProperties = getPhysicalDeviceMemoryProperties(vki, physDevice);
279 const deUint32 memoryTypeNdx = selectMatchingMemoryType(memoryProperties, memReqs.memoryTypeBits, requirement);
280 const VkMemoryAllocateInfo allocInfo =
281 {
282 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // VkStructureType sType
283 pNext, // const void* pNext
284 memReqs.size, // VkDeviceSize allocationSize
285 memoryTypeNdx, // deUint32 memoryTypeIndex
286 };
287 Move<VkDeviceMemory> mem = allocateMemory(vkd, device, &allocInfo);
288 MovePtr<HostPtr> hostPtr;
289
290 if (requirement & MemoryRequirement::HostVisible)
291 {
292 DE_ASSERT(isHostVisibleMemory(memoryProperties, allocInfo.memoryTypeIndex));
293 hostPtr = MovePtr<HostPtr>(new HostPtr(vkd, device, *mem, 0u, allocInfo.allocationSize, 0u));
294 }
295
296 return MovePtr<Allocation>(new SimpleAllocation(mem, hostPtr, 0u));
297 }
298
allocateDedicated(const InstanceInterface & vki,const DeviceInterface & vkd,const VkPhysicalDevice & physDevice,const VkDevice device,const VkBuffer buffer,MemoryRequirement requirement)299 de::MovePtr<Allocation> allocateDedicated (const InstanceInterface& vki,
300 const DeviceInterface& vkd,
301 const VkPhysicalDevice& physDevice,
302 const VkDevice device,
303 const VkBuffer buffer,
304 MemoryRequirement requirement)
305 {
306 const VkMemoryRequirements memoryRequirements = getBufferMemoryRequirements(vkd, device, buffer);
307 const VkMemoryDedicatedAllocateInfo dedicatedAllocationInfo =
308 {
309 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, // VkStructureType sType
310 DE_NULL, // const void* pNext
311 DE_NULL, // VkImage image
312 buffer // VkBuffer buffer
313 };
314
315 return allocateExtended(vki, vkd, physDevice, device, memoryRequirements, requirement, &dedicatedAllocationInfo);
316 }
317
allocateDedicated(const InstanceInterface & vki,const DeviceInterface & vkd,const VkPhysicalDevice & physDevice,const VkDevice device,const VkImage image,MemoryRequirement requirement)318 de::MovePtr<Allocation> allocateDedicated (const InstanceInterface& vki,
319 const DeviceInterface& vkd,
320 const VkPhysicalDevice& physDevice,
321 const VkDevice device,
322 const VkImage image,
323 MemoryRequirement requirement)
324 {
325 const VkMemoryRequirements memoryRequirements = getImageMemoryRequirements(vkd, device, image);
326 const VkMemoryDedicatedAllocateInfo dedicatedAllocationInfo =
327 {
328 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, // VkStructureType sType
329 DE_NULL, // const void* pNext
330 image, // VkImage image
331 DE_NULL // VkBuffer buffer
332 };
333
334 return allocateExtended(vki, vkd, physDevice, device, memoryRequirements, requirement, &dedicatedAllocationInfo);
335 }
336
mapMemory(const DeviceInterface & vkd,VkDevice device,VkDeviceMemory mem,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags)337 void* mapMemory (const DeviceInterface& vkd, VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags)
338 {
339 void* hostPtr = DE_NULL;
340 VK_CHECK(vkd.mapMemory(device, mem, offset, size, flags, &hostPtr));
341 TCU_CHECK(hostPtr);
342 return hostPtr;
343 }
344
flushMappedMemoryRange(const DeviceInterface & vkd,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size)345 void flushMappedMemoryRange (const DeviceInterface& vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size)
346 {
347 const VkMappedMemoryRange range =
348 {
349 VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
350 DE_NULL,
351 memory,
352 offset,
353 size
354 };
355
356 VK_CHECK(vkd.flushMappedMemoryRanges(device, 1u, &range));
357 }
358
invalidateMappedMemoryRange(const DeviceInterface & vkd,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size)359 void invalidateMappedMemoryRange (const DeviceInterface& vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size)
360 {
361 const VkMappedMemoryRange range =
362 {
363 VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
364 DE_NULL,
365 memory,
366 offset,
367 size
368 };
369
370 VK_CHECK(vkd.invalidateMappedMemoryRanges(device, 1u, &range));
371 }
372
selectMatchingMemoryType(const VkPhysicalDeviceMemoryProperties & deviceMemProps,deUint32 allowedMemTypeBits,MemoryRequirement requirement)373 deUint32 selectMatchingMemoryType (const VkPhysicalDeviceMemoryProperties& deviceMemProps, deUint32 allowedMemTypeBits, MemoryRequirement requirement)
374 {
375 const deUint32 compatibleTypes = getCompatibleMemoryTypes(deviceMemProps, requirement);
376 deUint32 candidates = allowedMemTypeBits & compatibleTypes;
377 #ifdef CTS_USES_VULKANSC
378 // in case of Vulkan SC: prefer memory types from SEU-safe heaps ( SEU = single event upsets )
379 const deUint32 seuSafeTypes = getSEUSafeMemoryTypes(deviceMemProps);
380 deUint32 seuSafeCandidates = candidates & seuSafeTypes;
381 if (seuSafeCandidates != 0u)
382 candidates = seuSafeCandidates;
383 #endif // CTS_USES_VULKANSC
384
385 if (candidates == 0u)
386 TCU_THROW(NotSupportedError, "No compatible memory type found");
387
388 return (deUint32)deCtz32(candidates);
389 }
390
getCompatibleMemoryTypes(const VkPhysicalDeviceMemoryProperties & deviceMemProps,MemoryRequirement requirement)391 deUint32 getCompatibleMemoryTypes (const VkPhysicalDeviceMemoryProperties& deviceMemProps, MemoryRequirement requirement)
392 {
393 deUint32 compatibleTypes = 0u;
394
395 for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < deviceMemProps.memoryTypeCount; memoryTypeNdx++)
396 {
397 if (requirement.matchesHeap(deviceMemProps.memoryTypes[memoryTypeNdx].propertyFlags))
398 compatibleTypes |= (1u << memoryTypeNdx);
399 }
400
401 return compatibleTypes;
402 }
403
404 #ifdef CTS_USES_VULKANSC
405
getSEUSafeMemoryTypes(const VkPhysicalDeviceMemoryProperties & deviceMemProps)406 deUint32 getSEUSafeMemoryTypes (const VkPhysicalDeviceMemoryProperties& deviceMemProps)
407 {
408 deUint32 seuSafeTypes = 0u;
409
410 for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < deviceMemProps.memoryTypeCount; memoryTypeNdx++)
411 {
412 if( ( deviceMemProps.memoryHeaps[deviceMemProps.memoryTypes[memoryTypeNdx].heapIndex].flags & VK_MEMORY_HEAP_SEU_SAFE_BIT ) != 0u )
413 seuSafeTypes |= (1u << memoryTypeNdx);
414 }
415 return seuSafeTypes;
416 }
417
418 #endif // CTS_USES_VULKANSC
419
bindImagePlanesMemory(const DeviceInterface & vkd,const VkDevice device,const VkImage image,const deUint32 numPlanes,vector<AllocationSp> & allocations,vk::Allocator & allocator,const vk::MemoryRequirement requirement)420 void bindImagePlanesMemory (const DeviceInterface& vkd,
421 const VkDevice device,
422 const VkImage image,
423 const deUint32 numPlanes,
424 vector<AllocationSp>& allocations,
425 vk::Allocator& allocator,
426 const vk::MemoryRequirement requirement)
427 {
428 vector<VkBindImageMemoryInfo> coreInfos;
429 vector<VkBindImagePlaneMemoryInfo> planeInfos;
430 coreInfos.reserve(numPlanes);
431 planeInfos.reserve(numPlanes);
432
433 for (deUint32 planeNdx = 0; planeNdx < numPlanes; ++planeNdx)
434 {
435 const VkImageAspectFlagBits planeAspect = getPlaneAspect(planeNdx);
436 const VkMemoryRequirements reqs = getImagePlaneMemoryRequirements(vkd, device, image, planeAspect);
437
438 allocations.push_back(AllocationSp(allocator.allocate(reqs, requirement).release()));
439
440 VkBindImagePlaneMemoryInfo planeInfo =
441 {
442 VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
443 DE_NULL,
444 planeAspect
445 };
446 planeInfos.push_back(planeInfo);
447
448 VkBindImageMemoryInfo coreInfo =
449 {
450 VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
451 &planeInfos.back(),
452 image,
453 allocations.back()->getMemory(),
454 allocations.back()->getOffset(),
455 };
456 coreInfos.push_back(coreInfo);
457 }
458
459 VK_CHECK(vkd.bindImageMemory2(device, numPlanes, coreInfos.data()));
460 }
461
bindImage(const DeviceInterface & vk,const VkDevice device,Allocator & allocator,const VkImage image,const MemoryRequirement requirement)462 MovePtr<Allocation> bindImage (const DeviceInterface& vk,
463 const VkDevice device,
464 Allocator& allocator,
465 const VkImage image,
466 const MemoryRequirement requirement)
467 {
468 MovePtr<Allocation> alloc = allocator.allocate(getImageMemoryRequirements(vk, device, image), requirement);
469 VK_CHECK(vk.bindImageMemory(device, image, alloc->getMemory(), alloc->getOffset()));
470 return alloc;
471 }
472
bindBuffer(const DeviceInterface & vk,const VkDevice device,Allocator & allocator,const VkBuffer buffer,const MemoryRequirement requirement)473 MovePtr<Allocation> bindBuffer (const DeviceInterface& vk,
474 const VkDevice device,
475 Allocator& allocator,
476 const VkBuffer buffer,
477 const MemoryRequirement requirement)
478 {
479 MovePtr<Allocation> alloc(allocator.allocate(getBufferMemoryRequirements(vk, device, buffer), requirement));
480 VK_CHECK(vk.bindBufferMemory(device, buffer, alloc->getMemory(), alloc->getOffset()));
481 return alloc;
482 }
483
zeroBuffer(const DeviceInterface & vk,const VkDevice device,const Allocation & alloc,const VkDeviceSize size)484 void zeroBuffer (const DeviceInterface& vk,
485 const VkDevice device,
486 const Allocation& alloc,
487 const VkDeviceSize size)
488 {
489 deMemset(alloc.getHostPtr(), 0, static_cast<std::size_t>(size));
490 flushAlloc(vk, device, alloc);
491 }
492
493 } // vk
494