1 /*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/vk/VulkanMemory.h"
9
10 #include "include/gpu/vk/VulkanMemoryAllocator.h"
11
12 namespace skgpu {
13
14 using BufferUsage = VulkanMemoryAllocator::BufferUsage;
15
AllocBufferMemory(VulkanMemoryAllocator * allocator,VkBuffer buffer,BufferUsage usage,bool shouldPersistentlyMapCpuToGpu,const std::function<CheckResult> & checkResult,VulkanAlloc * alloc)16 bool VulkanMemory::AllocBufferMemory(VulkanMemoryAllocator* allocator,
17 VkBuffer buffer,
18 BufferUsage usage,
19 bool shouldPersistentlyMapCpuToGpu,
20 const std::function<CheckResult>& checkResult,
21 VulkanAlloc* alloc) {
22 VulkanBackendMemory memory = 0;
23 uint32_t propFlags;
24 if (usage == BufferUsage::kTransfersFromCpuToGpu ||
25 (usage == BufferUsage::kCpuWritesGpuReads && shouldPersistentlyMapCpuToGpu)) {
26 // In general it is always fine (and often better) to keep buffers always mapped that we are
27 // writing to on the cpu.
28 propFlags = VulkanMemoryAllocator::kPersistentlyMapped_AllocationPropertyFlag;
29 } else {
30 propFlags = VulkanMemoryAllocator::kNone_AllocationPropertyFlag;
31 }
32
33 VkResult result = allocator->allocateBufferMemory(buffer, usage, propFlags, &memory);
34 if (!checkResult(result)) {
35 return false;
36 }
37 allocator->getAllocInfo(memory, alloc);
38 return true;
39 }
40
FreeBufferMemory(VulkanMemoryAllocator * allocator,const VulkanAlloc & alloc)41 void VulkanMemory::FreeBufferMemory(VulkanMemoryAllocator* allocator, const VulkanAlloc& alloc) {
42 SkASSERT(alloc.fBackendMemory);
43 allocator->freeMemory(alloc.fBackendMemory);
44 }
45
AllocImageMemory(VulkanMemoryAllocator * allocator,VkImage image,Protected isProtected,bool forceDedicatedMemory,bool useLazyAllocation,const std::function<CheckResult> & checkResult,VulkanAlloc * alloc)46 bool VulkanMemory::AllocImageMemory(VulkanMemoryAllocator* allocator,
47 VkImage image,
48 Protected isProtected,
49 bool forceDedicatedMemory,
50 bool useLazyAllocation,
51 const std::function<CheckResult>& checkResult,
52 VulkanAlloc* alloc) {
53 VulkanBackendMemory memory = 0;
54
55 uint32_t propFlags;
56 // If we ever find that our allocator is not aggressive enough in using dedicated image
57 // memory we can add a size check here to force the use of dedicate memory. However for now,
58 // we let the allocators decide. The allocator can query the GPU for each image to see if the
59 // GPU recommends or requires the use of dedicated memory.
60 if (forceDedicatedMemory) {
61 propFlags = VulkanMemoryAllocator::kDedicatedAllocation_AllocationPropertyFlag;
62 } else {
63 propFlags = VulkanMemoryAllocator::kNone_AllocationPropertyFlag;
64 }
65
66 if (isProtected == Protected::kYes) {
67 propFlags = propFlags | VulkanMemoryAllocator::kProtected_AllocationPropertyFlag;
68 }
69
70 if (useLazyAllocation) {
71 propFlags = propFlags | VulkanMemoryAllocator::kLazyAllocation_AllocationPropertyFlag;
72 }
73
74 VkResult result = allocator->allocateImageMemory(image, propFlags, &memory);
75 if (!checkResult(result)) {
76 return false;
77 }
78
79 allocator->getAllocInfo(memory, alloc);
80 return true;
81 }
82
FreeImageMemory(VulkanMemoryAllocator * allocator,const VulkanAlloc & alloc)83 void VulkanMemory::FreeImageMemory(VulkanMemoryAllocator* allocator,
84 const VulkanAlloc& alloc) {
85 SkASSERT(alloc.fBackendMemory);
86 allocator->freeMemory(alloc.fBackendMemory);
87 }
88
MapAlloc(VulkanMemoryAllocator * allocator,const VulkanAlloc & alloc,const std::function<CheckResult> & checkResult)89 void* VulkanMemory::MapAlloc(VulkanMemoryAllocator* allocator,
90 const VulkanAlloc& alloc,
91 const std::function<CheckResult>& checkResult) {
92 SkASSERT(VulkanAlloc::kMappable_Flag & alloc.fFlags);
93 SkASSERT(alloc.fBackendMemory);
94 void* mapPtr;
95 VkResult result = allocator->mapMemory(alloc.fBackendMemory, &mapPtr);
96 if (!checkResult(result)) {
97 return nullptr;
98 }
99 return mapPtr;
100 }
101
UnmapAlloc(VulkanMemoryAllocator * allocator,const VulkanAlloc & alloc)102 void VulkanMemory::UnmapAlloc(VulkanMemoryAllocator* allocator,
103 const VulkanAlloc& alloc) {
104 SkASSERT(alloc.fBackendMemory);
105 allocator->unmapMemory(alloc.fBackendMemory);
106 }
107
GetNonCoherentMappedMemoryRange(const VulkanAlloc & alloc,VkDeviceSize offset,VkDeviceSize size,VkDeviceSize alignment,VkMappedMemoryRange * range)108 void VulkanMemory::GetNonCoherentMappedMemoryRange(const VulkanAlloc& alloc,
109 VkDeviceSize offset,
110 VkDeviceSize size,
111 VkDeviceSize alignment,
112 VkMappedMemoryRange* range) {
113 SkASSERT(alloc.fFlags & VulkanAlloc::kNoncoherent_Flag);
114 offset = offset + alloc.fOffset;
115 VkDeviceSize offsetDiff = offset & (alignment -1);
116 offset = offset - offsetDiff;
117 size = (size + alignment - 1) & ~(alignment - 1);
118 #ifdef SK_DEBUG
119 SkASSERT(offset >= alloc.fOffset);
120 SkASSERT(offset + size <= alloc.fOffset + alloc.fSize);
121 SkASSERT(0 == (offset & (alignment-1)));
122 SkASSERT(size > 0);
123 SkASSERT(0 == (size & (alignment-1)));
124 #endif
125
126 memset(range, 0, sizeof(VkMappedMemoryRange));
127 range->sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
128 range->memory = alloc.fMemory;
129 range->offset = offset;
130 range->size = size;
131 }
132
FlushMappedAlloc(VulkanMemoryAllocator * allocator,const VulkanAlloc & alloc,VkDeviceSize offset,VkDeviceSize size,const std::function<CheckResult> & checkResult)133 void VulkanMemory::FlushMappedAlloc(VulkanMemoryAllocator* allocator,
134 const VulkanAlloc& alloc,
135 VkDeviceSize offset,
136 VkDeviceSize size,
137 const std::function<CheckResult>& checkResult) {
138 if (alloc.fFlags & VulkanAlloc::kNoncoherent_Flag) {
139 SkASSERT(offset == 0);
140 SkASSERT(size <= alloc.fSize);
141 SkASSERT(alloc.fBackendMemory);
142 VkResult result = allocator->flushMemory(alloc.fBackendMemory, offset, size);
143 checkResult(result);
144 }
145 }
146
InvalidateMappedAlloc(VulkanMemoryAllocator * allocator,const VulkanAlloc & alloc,VkDeviceSize offset,VkDeviceSize size,const std::function<CheckResult> & checkResult)147 void VulkanMemory::InvalidateMappedAlloc(VulkanMemoryAllocator* allocator,
148 const VulkanAlloc& alloc,
149 VkDeviceSize offset,
150 VkDeviceSize size,
151 const std::function<CheckResult>& checkResult) {
152 if (alloc.fFlags & VulkanAlloc::kNoncoherent_Flag) {
153 SkASSERT(offset == 0);
154 SkASSERT(size <= alloc.fSize);
155 SkASSERT(alloc.fBackendMemory);
156 VkResult result = allocator->invalidateMemory(alloc.fBackendMemory, offset, size);
157 checkResult(result);
158 }
159 }
160
161 } // namespace skgpu
162
163