• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2024 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expresso or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #pragma once
16 
17 #include <vulkan/vulkan.h>
18 
19 #ifdef _WIN32
20 #include <malloc.h>
21 #endif
22 
23 #include <stdlib.h>
24 
25 #include <set>
26 #include <string>
27 
28 #include "DeviceOpTracker.h"
29 #include "Handle.h"
30 #include "VkEmulatedPhysicalDeviceMemory.h"
31 #include "aemu/base/files/Stream.h"
32 #include "aemu/base/memory/SharedMemory.h"
33 #include "aemu/base/synchronization/ConditionVariable.h"
34 #include "aemu/base/synchronization/Lock.h"
35 #include "common/goldfish_vk_deepcopy.h"
36 #include "vulkan/VkAndroidNativeBuffer.h"
37 #include "vulkan/VkFormatUtils.h"
38 #include "vulkan/emulated_textures/CompressedImageInfo.h"
39 
40 namespace gfxstream {
41 namespace vk {
42 
43 template <class TDispatch>
44 class ExternalFencePool {
45    public:
ExternalFencePool(TDispatch * dispatch,VkDevice device)46     ExternalFencePool(TDispatch* dispatch, VkDevice device)
47         : m_vk(dispatch), mDevice(device), mMaxSize(5) {}
48 
~ExternalFencePool()49     ~ExternalFencePool() {
50         if (!mPool.empty()) {
51             GFXSTREAM_ABORT(emugl::FatalError(emugl::ABORT_REASON_OTHER))
52                 << "External fence pool for device " << static_cast<void*>(mDevice)
53                 << " destroyed but " << mPool.size() << " fences still not destroyed.";
54         }
55     }
56 
add(VkFence fence)57     void add(VkFence fence) {
58         android::base::AutoLock lock(mLock);
59         mPool.push_back(fence);
60         if (mPool.size() > mMaxSize) {
61             INFO("External fence pool for %p has increased to size %d", mDevice, mPool.size());
62             mMaxSize = mPool.size();
63         }
64     }
65 
pop(const VkFenceCreateInfo * pCreateInfo)66     VkFence pop(const VkFenceCreateInfo* pCreateInfo) {
67         VkFence fence = VK_NULL_HANDLE;
68         {
69             android::base::AutoLock lock(mLock);
70             auto it = std::find_if(mPool.begin(), mPool.end(), [this](const VkFence& fence) {
71                 VkResult status = m_vk->vkGetFenceStatus(mDevice, fence);
72                 if (status != VK_SUCCESS) {
73                     if (status != VK_NOT_READY) {
74                         VK_CHECK(status);
75                     }
76 
77                     // Status is valid, but fence is not yet signaled
78                     return false;
79                 }
80                 return true;
81             });
82             if (it == mPool.end()) {
83                 return VK_NULL_HANDLE;
84             }
85 
86             fence = *it;
87             mPool.erase(it);
88         }
89 
90         if (!(pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT)) {
91             VK_CHECK(m_vk->vkResetFences(mDevice, 1, &fence));
92         }
93 
94         return fence;
95     }
96 
popAll()97     std::vector<VkFence> popAll() {
98         android::base::AutoLock lock(mLock);
99         std::vector<VkFence> popped = mPool;
100         mPool.clear();
101         return popped;
102     }
103 
104    private:
105     TDispatch* m_vk;
106     VkDevice mDevice;
107     android::base::Lock mLock;
108     std::vector<VkFence> mPool;
109     int mMaxSize;
110 };
111 
112 class PrivateMemory {
113 public:
PrivateMemory(size_t alignment,size_t size)114     PrivateMemory(size_t alignment, size_t size) {
115 #ifdef _WIN32
116         mAddr = _aligned_malloc(size, alignment);
117 #else
118         mAddr = aligned_alloc(alignment, size);
119 #endif
120     }
~PrivateMemory()121     ~PrivateMemory() {
122         if (mAddr) {
123 #ifdef _WIN32
124             _aligned_free(mAddr);
125 #else
126             free(mAddr);
127 #endif
128             mAddr = nullptr;
129         }
130     }
getAddr()131     void* getAddr() {
132         return mAddr;
133     }
134 private:
135     void* mAddr{nullptr};
136 };
137 
138 // We always map the whole size on host.
139 // This makes it much easier to implement
140 // the memory map API.
141 struct MemoryInfo {
142     // This indicates whether the VkDecoderGlobalState needs to clean up
143     // and unmap the mapped memory; only the owner of the mapped memory
144     // should call unmap.
145     bool needUnmap = false;
146     // When ptr is null, it means the VkDeviceMemory object
147     // was not allocated with the HOST_VISIBLE property.
148     void* ptr = nullptr;
149     VkDeviceSize size;
150     // GLDirectMem info
151     bool directMapped = false;
152     bool virtioGpuMapped = false;
153     uint32_t caching = 0;
154     uint64_t guestPhysAddr = 0;
155     void* pageAlignedHva = nullptr;
156     uint64_t sizeToPage = 0;
157     uint64_t hostmemId = 0;
158     VkDevice device = VK_NULL_HANDLE;
159     MTLTextureRef mtlTexture = nullptr;
160     uint32_t memoryIndex = 0;
161     // Set if the memory is backed by shared memory.
162     std::optional<android::base::SharedMemory> sharedMemory;
163 
164     std::shared_ptr<PrivateMemory> privateMemory;
165     // virtio-gpu blobs
166     uint64_t blobId = 0;
167 
168     // Buffer, provided via vkAllocateMemory().
169     std::optional<HandleType> boundBuffer;
170     // ColorBuffer, provided via vkAllocateMemory().
171     std::optional<HandleType> boundColorBuffer;
172 };
173 
174 struct InstanceInfo {
175     std::vector<std::string> enabledExtensionNames;
176     uint32_t apiVersion = VK_MAKE_VERSION(1, 0, 0);
177     VkInstance boxed = nullptr;
178     bool isAngle = false;
179     std::string applicationName;
180     std::string engineName;
181 };
182 
183 struct PhysicalDeviceInfo {
184     VkInstance instance = VK_NULL_HANDLE;
185     VkPhysicalDeviceProperties props;
186     std::unique_ptr<EmulatedPhysicalDeviceMemoryProperties> memoryPropertiesHelper;
187     std::vector<VkQueueFamilyProperties> queueFamilyProperties;
188     VkPhysicalDevice boxed = nullptr;
189 };
190 
191 struct DeviceInfo {
192     std::unordered_map<uint32_t, std::vector<VkQueue>> queues;
193     std::vector<std::string> enabledExtensionNames;
194     bool emulateTextureEtc2 = false;
195     bool emulateTextureAstc = false;
196     bool useAstcCpuDecompression = false;
197     VkPhysicalDevice physicalDevice;
198     VkDevice boxed = nullptr;
199     DebugUtilsHelper debugUtilsHelper = DebugUtilsHelper::withUtilsDisabled();
200     std::unique_ptr<ExternalFencePool<VulkanDispatch>> externalFencePool = nullptr;
201     std::set<VkFormat> imageFormats = {};  // image formats used on this device
202     std::unique_ptr<GpuDecompressionPipelineManager> decompPipelines = nullptr;
203     std::optional<DeviceOpTracker> deviceOpTracker;
204 
205     // True if this is a compressed image that needs to be decompressed on the GPU (with our
206     // compute shader)
needGpuDecompressionDeviceInfo207     bool needGpuDecompression(const CompressedImageInfo& cmpInfo) {
208         return ((cmpInfo.isEtc2() && emulateTextureEtc2) ||
209                 (cmpInfo.isAstc() && emulateTextureAstc && !useAstcCpuDecompression));
210     }
needEmulatedDecompressionDeviceInfo211     bool needEmulatedDecompression(const CompressedImageInfo& cmpInfo) {
212         return ((cmpInfo.isEtc2() && emulateTextureEtc2) ||
213                 (cmpInfo.isAstc() && emulateTextureAstc));
214     }
needEmulatedDecompressionDeviceInfo215     bool needEmulatedDecompression(VkFormat format) {
216         return (gfxstream::vk::isEtc2(format) && emulateTextureEtc2) ||
217                (gfxstream::vk::isAstc(format) && emulateTextureAstc);
218     }
219 };
220 
221 struct QueueInfo {
222     android::base::Lock* lock = nullptr;
223     VkDevice device;
224     uint32_t queueFamilyIndex;
225     VkQueue boxed = nullptr;
226     uint32_t sequenceNumber = 0;
227 };
228 
229 struct BufferInfo {
230     VkDevice device;
231     VkBufferUsageFlags usage;
232     VkDeviceMemory memory = 0;
233     VkDeviceSize memoryOffset = 0;
234     VkDeviceSize size;
235     std::shared_ptr<bool> alive{new bool(true)};
236 };
237 
238 struct ImageInfo {
239     VkDevice device;
240     VkImageCreateInfo imageCreateInfoShallow;
241     std::shared_ptr<AndroidNativeBufferInfo> anbInfo;
242     CompressedImageInfo cmpInfo;
243     // ColorBuffer, provided via vkAllocateMemory().
244     std::optional<HandleType> boundColorBuffer;
245     // TODO: might need to use an array of layouts to represent each sub resource
246     VkImageLayout layout = VK_IMAGE_LAYOUT_UNDEFINED;
247     VkDeviceMemory memory = VK_NULL_HANDLE;
248 };
249 
250 struct ImageViewInfo {
251     VkDevice device;
252     bool needEmulatedAlpha = false;
253 
254     // Color buffer, provided via vkAllocateMemory().
255     std::optional<HandleType> boundColorBuffer;
256     std::shared_ptr<bool> alive{new bool(true)};
257 };
258 
259 struct SamplerInfo {
260     VkDevice device;
261     bool needEmulatedAlpha = false;
262     VkSamplerCreateInfo createInfo = {};
263     VkSampler emulatedborderSampler = VK_NULL_HANDLE;
264     android::base::BumpPool pool = android::base::BumpPool(256);
265     SamplerInfo() = default;
266     SamplerInfo& operator=(const SamplerInfo& other) {
267         deepcopy_VkSamplerCreateInfo(&pool, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
268                                      &other.createInfo, &createInfo);
269         device = other.device;
270         needEmulatedAlpha = other.needEmulatedAlpha;
271         emulatedborderSampler = other.emulatedborderSampler;
272         return *this;
273     }
SamplerInfoSamplerInfo274     SamplerInfo(const SamplerInfo& other) { *this = other; }
275     SamplerInfo(SamplerInfo&& other) = delete;
276     SamplerInfo& operator=(SamplerInfo&& other) = delete;
277     std::shared_ptr<bool> alive{new bool(true)};
278 };
279 
280 struct FenceInfo {
281     VkDevice device = VK_NULL_HANDLE;
282     VkFence boxed = VK_NULL_HANDLE;
283     VulkanDispatch* vk = nullptr;
284 
285     android::base::StaticLock lock;
286     android::base::ConditionVariable cv;
287 
288     enum class State {
289         kWaitable,
290         kNotWaitable,
291         kWaiting,
292     };
293     State state = State::kNotWaitable;
294 
295     bool external = false;
296 
297     // If this fence was used in an additional host operation that must be waited
298     // upon before destruction (e.g. as part of a vkAcquireImageANDROID() call),
299     // the waitable that tracking that host operation.
300     std::optional<DeviceOpWaitable> latestUse;
301 };
302 
303 struct SemaphoreInfo {
304     VkDevice device;
305     int externalHandleId = 0;
306     VK_EXT_SYNC_HANDLE externalHandle = VK_EXT_SYNC_HANDLE_INVALID;
307     // If this fence was used in an additional host operation that must be waited
308     // upon before destruction (e.g. as part of a vkAcquireImageANDROID() call),
309     // the waitable that tracking that host operation.
310     std::optional<DeviceOpWaitable> latestUse;
311 };
312 struct DescriptorSetLayoutInfo {
313     VkDevice device = 0;
314     VkDescriptorSetLayout boxed = 0;
315     VkDescriptorSetLayoutCreateInfo createInfo;
316     std::vector<VkDescriptorSetLayoutBinding> bindings;
317 };
318 
319 struct DescriptorPoolInfo {
320     VkDevice device = 0;
321     VkDescriptorPool boxed = 0;
322     struct PoolState {
323         VkDescriptorType type;
324         uint32_t descriptorCount;
325         uint32_t used;
326     };
327 
328     VkDescriptorPoolCreateInfo createInfo;
329     uint32_t maxSets;
330     uint32_t usedSets;
331     std::vector<PoolState> pools;
332 
333     std::unordered_map<VkDescriptorSet, VkDescriptorSet> allocedSetsToBoxed;
334     std::vector<uint64_t> poolIds;
335 };
336 
337 struct DescriptorSetInfo {
338     enum DescriptorWriteType {
339         Empty = 0,
340         ImageInfo = 1,
341         BufferInfo = 2,
342         BufferView = 3,
343         InlineUniformBlock = 4,
344         AccelerationStructure = 5,
345     };
346 
347     struct DescriptorWrite {
348         VkDescriptorType descriptorType;
349         DescriptorWriteType writeType = DescriptorWriteType::Empty;
350         uint32_t dstArrayElement;  // Only used for inlineUniformBlock and accelerationStructure.
351 
352         union {
353             VkDescriptorImageInfo imageInfo;
354             VkDescriptorBufferInfo bufferInfo;
355             VkBufferView bufferView;
356             VkWriteDescriptorSetInlineUniformBlockEXT inlineUniformBlock;
357             VkWriteDescriptorSetAccelerationStructureKHR accelerationStructure;
358         };
359 
360         std::vector<uint8_t> inlineUniformBlockBuffer;
361         // Weak pointer(s) to detect if all objects on dependency chain are alive.
362         std::vector<std::weak_ptr<bool>> alives;
363     };
364 
365     VkDescriptorPool pool;
366     VkDescriptorSetLayout unboxedLayout = 0;
367     std::vector<std::vector<DescriptorWrite>> allWrites;
368     std::vector<VkDescriptorSetLayoutBinding> bindings;
369 };
370 
371 struct ShaderModuleInfo {
372     VkDevice device;
373 };
374 
375 struct PipelineCacheInfo {
376     VkDevice device;
377 };
378 
379 struct PipelineInfo {
380     VkDevice device;
381 };
382 
383 struct RenderPassInfo {
384     VkDevice device;
385 };
386 
387 struct FramebufferInfo {
388     VkDevice device;
389     std::vector<HandleType> attachedColorBuffers;
390 };
391 }  // namespace vk
392 }  // namespace gfxstream
393