• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7 
8 #include "include/core/SkTypes.h"
9 #include "include/gpu/GpuTypes.h"
10 #include "include/gpu/vk/VulkanMemoryAllocator.h"
11 #include "include/gpu/vk/VulkanTypes.h"
12 #include "src/gpu/vk/VulkanMemory.h"
13 
14 #include <cstdint>
15 #include <cstring>
16 
17 namespace skgpu {
18 
19 using BufferUsage = VulkanMemoryAllocator::BufferUsage;
20 
AllocBufferMemory(VulkanMemoryAllocator * allocator,VkBuffer buffer,BufferUsage usage,bool shouldPersistentlyMapCpuToGpu,const std::function<CheckResult> & checkResult,VulkanAlloc * alloc)21 bool VulkanMemory::AllocBufferMemory(VulkanMemoryAllocator* allocator,
22                                      VkBuffer buffer,
23                                      BufferUsage usage,
24                                      bool shouldPersistentlyMapCpuToGpu,
25                                      const std::function<CheckResult>& checkResult,
26                                      VulkanAlloc* alloc) {
27     VulkanBackendMemory memory = 0;
28     uint32_t propFlags;
29     if (usage == BufferUsage::kTransfersFromCpuToGpu ||
30         (usage == BufferUsage::kCpuWritesGpuReads && shouldPersistentlyMapCpuToGpu)) {
31         // In general it is always fine (and often better) to keep buffers always mapped that we are
32         // writing to on the cpu.
33         propFlags = VulkanMemoryAllocator::kPersistentlyMapped_AllocationPropertyFlag;
34     } else {
35         propFlags = VulkanMemoryAllocator::kNone_AllocationPropertyFlag;
36     }
37 
38     VkResult result = allocator->allocateBufferMemory(buffer, usage, propFlags, &memory);
39     if (!checkResult(result)) {
40         return false;
41     }
42     allocator->getAllocInfo(memory, alloc);
43     return true;
44 }
45 
FreeBufferMemory(VulkanMemoryAllocator * allocator,const VulkanAlloc & alloc)46 void VulkanMemory::FreeBufferMemory(VulkanMemoryAllocator* allocator, const VulkanAlloc& alloc) {
47     SkASSERT(alloc.fBackendMemory);
48     allocator->freeMemory(alloc.fBackendMemory);
49 }
50 
AllocImageMemory(VulkanMemoryAllocator * allocator,VkImage image,Protected isProtected,bool forceDedicatedMemory,bool useLazyAllocation,const std::function<CheckResult> & checkResult,VulkanAlloc * alloc)51 bool VulkanMemory::AllocImageMemory(VulkanMemoryAllocator* allocator,
52                                     VkImage image,
53                                     Protected isProtected,
54                                     bool forceDedicatedMemory,
55                                     bool useLazyAllocation,
56                                     const std::function<CheckResult>& checkResult,
57                                     VulkanAlloc* alloc) {
58     VulkanBackendMemory memory = 0;
59 
60     uint32_t propFlags;
61     // If we ever find that our allocator is not aggressive enough in using dedicated image
62     // memory we can add a size check here to force the use of dedicate memory. However for now,
63     // we let the allocators decide. The allocator can query the GPU for each image to see if the
64     // GPU recommends or requires the use of dedicated memory.
65     if (forceDedicatedMemory) {
66         propFlags = VulkanMemoryAllocator::kDedicatedAllocation_AllocationPropertyFlag;
67     } else {
68         propFlags = VulkanMemoryAllocator::kNone_AllocationPropertyFlag;
69     }
70 
71     if (isProtected == Protected::kYes) {
72         propFlags = propFlags | VulkanMemoryAllocator::kProtected_AllocationPropertyFlag;
73     }
74 
75     if (useLazyAllocation) {
76         propFlags = propFlags | VulkanMemoryAllocator::kLazyAllocation_AllocationPropertyFlag;
77     }
78 
79     VkResult result = allocator->allocateImageMemory(image, propFlags, &memory);
80     if (!checkResult(result)) {
81         return false;
82     }
83 
84     allocator->getAllocInfo(memory, alloc);
85     return true;
86 }
87 
FreeImageMemory(VulkanMemoryAllocator * allocator,const VulkanAlloc & alloc)88 void VulkanMemory::FreeImageMemory(VulkanMemoryAllocator* allocator,
89                                    const VulkanAlloc& alloc) {
90     SkASSERT(alloc.fBackendMemory);
91     allocator->freeMemory(alloc.fBackendMemory);
92 }
93 
MapAlloc(VulkanMemoryAllocator * allocator,const VulkanAlloc & alloc,const std::function<CheckResult> & checkResult)94 void* VulkanMemory::MapAlloc(VulkanMemoryAllocator* allocator,
95                              const VulkanAlloc& alloc,
96                              const std::function<CheckResult>& checkResult) {
97     SkASSERT(VulkanAlloc::kMappable_Flag & alloc.fFlags);
98     SkASSERT(alloc.fBackendMemory);
99     void* mapPtr;
100     VkResult result = allocator->mapMemory(alloc.fBackendMemory, &mapPtr);
101     if (!checkResult(result)) {
102         return nullptr;
103     }
104     return mapPtr;
105 }
106 
UnmapAlloc(VulkanMemoryAllocator * allocator,const VulkanAlloc & alloc)107 void VulkanMemory::UnmapAlloc(VulkanMemoryAllocator* allocator,
108                               const VulkanAlloc& alloc) {
109     SkASSERT(alloc.fBackendMemory);
110     allocator->unmapMemory(alloc.fBackendMemory);
111 }
112 
GetNonCoherentMappedMemoryRange(const VulkanAlloc & alloc,VkDeviceSize offset,VkDeviceSize size,VkDeviceSize alignment,VkMappedMemoryRange * range)113 void VulkanMemory::GetNonCoherentMappedMemoryRange(const VulkanAlloc& alloc,
114                                                    VkDeviceSize offset,
115                                                    VkDeviceSize size,
116                                                    VkDeviceSize alignment,
117                                                    VkMappedMemoryRange* range) {
118     SkASSERT(alloc.fFlags & VulkanAlloc::kNoncoherent_Flag);
119     offset = offset + alloc.fOffset;
120     VkDeviceSize offsetDiff = offset & (alignment -1);
121     offset = offset - offsetDiff;
122     size = (size + alignment - 1) & ~(alignment - 1);
123 #ifdef SK_DEBUG
124     SkASSERT(offset >= alloc.fOffset);
125     SkASSERT(offset + size <= alloc.fOffset + alloc.fSize);
126     SkASSERT(0 == (offset & (alignment-1)));
127     SkASSERT(size > 0);
128     SkASSERT(0 == (size & (alignment-1)));
129 #endif
130 
131     std::memset(range, 0, sizeof(VkMappedMemoryRange));
132     range->sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
133     range->memory = alloc.fMemory;
134     range->offset = offset;
135     range->size = size;
136 }
137 
FlushMappedAlloc(VulkanMemoryAllocator * allocator,const VulkanAlloc & alloc,VkDeviceSize offset,VkDeviceSize size,const std::function<CheckResult> & checkResult)138 void VulkanMemory::FlushMappedAlloc(VulkanMemoryAllocator* allocator,
139                                     const VulkanAlloc& alloc,
140                                     VkDeviceSize offset,
141                                     VkDeviceSize size,
142                                     const std::function<CheckResult>& checkResult) {
143     if (alloc.fFlags & VulkanAlloc::kNoncoherent_Flag) {
144         SkASSERT(offset == 0);
145         SkASSERT(size <= alloc.fSize);
146         SkASSERT(alloc.fBackendMemory);
147         VkResult result = allocator->flushMemory(alloc.fBackendMemory, offset, size);
148         checkResult(result);
149     }
150 }
151 
InvalidateMappedAlloc(VulkanMemoryAllocator * allocator,const VulkanAlloc & alloc,VkDeviceSize offset,VkDeviceSize size,const std::function<CheckResult> & checkResult)152 void VulkanMemory::InvalidateMappedAlloc(VulkanMemoryAllocator* allocator,
153                                          const VulkanAlloc& alloc,
154                                          VkDeviceSize offset,
155                                          VkDeviceSize size,
156                                          const std::function<CheckResult>& checkResult) {
157     if (alloc.fFlags & VulkanAlloc::kNoncoherent_Flag) {
158         SkASSERT(offset == 0);
159         SkASSERT(size <= alloc.fSize);
160         SkASSERT(alloc.fBackendMemory);
161         VkResult result = allocator->invalidateMemory(alloc.fBackendMemory, offset, size);
162         checkResult(result);
163     }
164 }
165 
166 }  // namespace skgpu
167 
168